From d06e132358d44f02c22527b4f463df1f58da9216 Mon Sep 17 00:00:00 2001 From: Jean-Michel Trivi Date: Wed, 12 Sep 2012 15:47:07 -0700 Subject: Communicate audio session ID to downmixer The audio downmixer effect might need the audio session Id, pass it from the track creation in AudioFlinger to the downmix effect creation in AudioMixer. Change-Id: I836873eebd6711f1048fce81cd2eb29b94f0ad0c --- services/audioflinger/AudioFlinger.cpp | 9 +++++---- services/audioflinger/AudioFlinger.h | 4 ++-- services/audioflinger/AudioMixer.cpp | 5 +++-- services/audioflinger/AudioMixer.h | 6 ++++-- 4 files changed, 14 insertions(+), 10 deletions(-) diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp index a5d4c6c..dffa099 100644 --- a/services/audioflinger/AudioFlinger.cpp +++ b/services/audioflinger/AudioFlinger.cpp @@ -3383,9 +3383,9 @@ void AudioFlinger::PlaybackThread::invalidateTracks(audio_stream_type_t streamTy } // getTrackName_l() must be called with ThreadBase::mLock held -int AudioFlinger::MixerThread::getTrackName_l(audio_channel_mask_t channelMask) +int AudioFlinger::MixerThread::getTrackName_l(audio_channel_mask_t channelMask, int sessionId) { - return mAudioMixer->getTrackName(channelMask); + return mAudioMixer->getTrackName(channelMask, sessionId); } // deleteTrackName_l() must be called with ThreadBase::mLock held @@ -3828,7 +3828,8 @@ void AudioFlinger::DirectOutputThread::threadLoop_sleepTime() } // getTrackName_l() must be called with ThreadBase::mLock held -int AudioFlinger::DirectOutputThread::getTrackName_l(audio_channel_mask_t channelMask) +int AudioFlinger::DirectOutputThread::getTrackName_l(audio_channel_mask_t channelMask, + int sessionId) { return 0; } @@ -4293,7 +4294,7 @@ AudioFlinger::PlaybackThread::Track::Track( // 16 bit because data is converted to 16 bit before being stored in buffer by AudioTrack mCblk->frameSize = audio_is_linear_pcm(format) ? mChannelCount * sizeof(int16_t) : sizeof(uint8_t); // to avoid leaking a track name, do not allocate one unless there is an mCblk - mName = thread->getTrackName_l(channelMask); + mName = thread->getTrackName_l(channelMask, sessionId); mCblk->mName = mName; if (mName < 0) { ALOGE("no more track names available"); diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h index 5ffa5a6..216e3fa 100644 --- a/services/audioflinger/AudioFlinger.h +++ b/services/audioflinger/AudioFlinger.h @@ -1086,7 +1086,7 @@ public: // Allocate a track name for a given channel mask. // Returns name >= 0 if successful, -1 on failure. - virtual int getTrackName_l(audio_channel_mask_t channelMask) = 0; + virtual int getTrackName_l(audio_channel_mask_t channelMask, int sessionId) = 0; virtual void deleteTrackName_l(int name) = 0; // Time to sleep between cycles when: @@ -1254,7 +1254,7 @@ public: virtual bool checkForNewParameters_l(); protected: - virtual int getTrackName_l(audio_channel_mask_t channelMask); + virtual int getTrackName_l(audio_channel_mask_t channelMask, int sessionId); virtual void deleteTrackName_l(int name); virtual uint32_t activeSleepTimeUs() const; virtual uint32_t idleSleepTimeUs() const; diff --git a/services/audioflinger/AudioMixer.cpp b/services/audioflinger/AudioMixer.cpp index 183c1f3..1f13625 100644 --- a/services/audioflinger/AudioMixer.cpp +++ b/services/audioflinger/AudioMixer.cpp @@ -163,7 +163,7 @@ AudioMixer::~AudioMixer() delete [] mState.resampleTemp; } -int AudioMixer::getTrackName(audio_channel_mask_t channelMask) +int AudioMixer::getTrackName(audio_channel_mask_t channelMask, int sessionId) { uint32_t names = (~mTrackNames) & mConfiguredNames; if (names != 0) { @@ -189,6 +189,7 @@ int AudioMixer::getTrackName(audio_channel_mask_t channelMask) t->enabled = false; t->format = 16; t->channelMask = AUDIO_CHANNEL_OUT_STEREO; + t->sessionId = sessionId; // setBufferProvider(name, AudioBufferProvider *) is required before enable(name) t->bufferProvider = NULL; t->buffer.raw = NULL; @@ -269,7 +270,7 @@ status_t AudioMixer::prepareTrackForDownmix(track_t* pTrack, int trackName) } if (EffectCreate(&dwnmFxDesc.uuid, - -2 /*sessionId*/, -2 /*ioId*/,// both not relevant here, using random value + pTrack->sessionId /*sessionId*/, -2 /*ioId not relevant here, using random value*/, &pDbp->mDownmixHandle/*pHandle*/) != 0) { ALOGE("prepareTrackForDownmix(%d) fails: error creating downmixer effect", trackName); goto noDownmixForActiveTrack; diff --git a/services/audioflinger/AudioMixer.h b/services/audioflinger/AudioMixer.h index 6c80253..dc468ff 100644 --- a/services/audioflinger/AudioMixer.h +++ b/services/audioflinger/AudioMixer.h @@ -91,7 +91,7 @@ public: // For all APIs with "name": TRACK0 <= name < TRACK0 + MAX_NUM_TRACKS // Allocate a track name. Returns new track name if successful, -1 on failure. - int getTrackName(audio_channel_mask_t channelMask); + int getTrackName(audio_channel_mask_t channelMask, int sessionId); // Free an allocated track by name void deleteTrackName(int name); @@ -190,7 +190,9 @@ private: DownmixerBufferProvider* downmixerBufferProvider; // 4 bytes - int32_t padding[3]; + int32_t sessionId; + + int32_t padding[2]; // 16-byte boundary -- cgit v1.1 From 43ed5337f3eaa909ad2e87df0ccd061c3e6d0096 Mon Sep 17 00:00:00 2001 From: Jean-Michel Trivi Date: Thu, 13 Sep 2012 11:08:37 -0700 Subject: Fix build in AudioFlinger/FastMixer Commit was missing some changes to modifications to send session ID from AudioFlinger to AudioMixer. Change-Id: I7daeaf4eff243b5287bef63be86a87acf408374a --- services/audioflinger/AudioFlinger.cpp | 2 +- services/audioflinger/AudioFlinger.h | 2 +- services/audioflinger/FastMixer.cpp | 5 +++-- 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp index dffa099..27e2ed2 100644 --- a/services/audioflinger/AudioFlinger.cpp +++ b/services/audioflinger/AudioFlinger.cpp @@ -3498,7 +3498,7 @@ bool AudioFlinger::MixerThread::checkForNewParameters_l() readOutputParameters(); mAudioMixer = new AudioMixer(mNormalFrameCount, mSampleRate); for (size_t i = 0; i < mTracks.size() ; i++) { - int name = getTrackName_l(mTracks[i]->mChannelMask); + int name = getTrackName_l(mTracks[i]->mChannelMask, mTracks[i]->mSessionId); if (name < 0) break; mTracks[i]->mName = name; // limit track sample rate to 2 x new output sample rate diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h index 216e3fa..c956861 100644 --- a/services/audioflinger/AudioFlinger.h +++ b/services/audioflinger/AudioFlinger.h @@ -1202,7 +1202,7 @@ public: protected: virtual mixer_state prepareTracks_l(Vector< sp > *tracksToRemove); - virtual int getTrackName_l(audio_channel_mask_t channelMask); + virtual int getTrackName_l(audio_channel_mask_t channelMask, int sessionId); virtual void deleteTrackName_l(int name); virtual uint32_t idleSleepTimeUs() const; virtual uint32_t suspendSleepTimeUs() const; diff --git a/services/audioflinger/FastMixer.cpp b/services/audioflinger/FastMixer.cpp index cdc27a2..13003d9 100644 --- a/services/audioflinger/FastMixer.cpp +++ b/services/audioflinger/FastMixer.cpp @@ -281,8 +281,9 @@ bool FastMixer::threadLoop() AudioBufferProvider *bufferProvider = fastTrack->mBufferProvider; ALOG_ASSERT(bufferProvider != NULL && fastTrackNames[i] == -1); if (mixer != NULL) { - // calling getTrackName with default channel mask - name = mixer->getTrackName(AUDIO_CHANNEL_OUT_STEREO); + // calling getTrackName with default channel mask and a random invalid + // sessionId (no effects here) + name = mixer->getTrackName(AUDIO_CHANNEL_OUT_STEREO, -555); ALOG_ASSERT(name >= 0); fastTrackNames[i] = name; mixer->setBufferProvider(name, bufferProvider); -- cgit v1.1 From 879d503a5d4460a4265279985c63af954afe4a2c Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Wed, 17 Oct 2012 12:16:50 -0700 Subject: Ignore SIGPIPE during write() to broken pipe This happens occasionally when taking a bugreport. Bug: 6447319 Change-Id: Ia6531a4a3658461f8fd3f7106e7996da7cc5933a --- media/mediaserver/main_mediaserver.cpp | 1 + 1 file changed, 1 insertion(+) diff --git a/media/mediaserver/main_mediaserver.cpp b/media/mediaserver/main_mediaserver.cpp index 6b1abb1..ddd5b84 100644 --- a/media/mediaserver/main_mediaserver.cpp +++ b/media/mediaserver/main_mediaserver.cpp @@ -33,6 +33,7 @@ using namespace android; int main(int argc, char** argv) { + signal(SIGPIPE, SIG_IGN); sp proc(ProcessState::self()); sp sm = defaultServiceManager(); ALOGI("ServiceManager: %p", sm.get()); -- cgit v1.1 From a15ed9529e70caaf42aae78f9fe530abe38bcc1b Mon Sep 17 00:00:00 2001 From: Marco Nelissen Date: Wed, 24 Oct 2012 13:43:32 -0700 Subject: Make ThrottledSource more usable Add reconnectAtOffset(), DrmInitialization() and getDrmInfo(). Also rearrange the code a bit so all the methods that just call through to the wrapped DataSource are in the header. Change-Id: If25b674df317b0f6da5d36241c694e32abb0a01c --- media/libstagefright/ThrottledSource.cpp | 12 --------- media/libstagefright/include/ThrottledSource.h | 36 ++++++++++++++++++++++---- 2 files changed, 31 insertions(+), 17 deletions(-) diff --git a/media/libstagefright/ThrottledSource.cpp b/media/libstagefright/ThrottledSource.cpp index 348a9d3..7496752 100644 --- a/media/libstagefright/ThrottledSource.cpp +++ b/media/libstagefright/ThrottledSource.cpp @@ -31,10 +31,6 @@ ThrottledSource::ThrottledSource( CHECK(mBandwidthLimitBytesPerSecond > 0); } -status_t ThrottledSource::initCheck() const { - return mSource->initCheck(); -} - ssize_t ThrottledSource::readAt(off64_t offset, void *data, size_t size) { Mutex::Autolock autoLock(mLock); @@ -62,17 +58,9 @@ ssize_t ThrottledSource::readAt(off64_t offset, void *data, size_t size) { if (whenUs > nowUs) { usleep(whenUs - nowUs); } - return n; } -status_t ThrottledSource::getSize(off64_t *size) { - return mSource->getSize(size); -} - -uint32_t ThrottledSource::flags() { - return mSource->flags(); -} } // namespace android diff --git a/media/libstagefright/include/ThrottledSource.h b/media/libstagefright/include/ThrottledSource.h index 7fe7c06..673268b 100644 --- a/media/libstagefright/include/ThrottledSource.h +++ b/media/libstagefright/include/ThrottledSource.h @@ -28,18 +28,44 @@ struct ThrottledSource : public DataSource { const sp &source, int32_t bandwidthLimitBytesPerSecond); - virtual status_t initCheck() const; - + // implementation of readAt() that sleeps to achieve the desired max throughput virtual ssize_t readAt(off64_t offset, void *data, size_t size); - virtual status_t getSize(off64_t *size); - virtual uint32_t flags(); + // returns an empty string to prevent callers from using the Uri to construct a new datasource + virtual String8 getUri() { + return String8(); + } + + // following methods all call through to the wrapped DataSource's methods + + status_t initCheck() const { + return mSource->initCheck(); + } + + virtual status_t getSize(off64_t *size) { + return mSource->getSize(size); + } + + virtual uint32_t flags() { + return mSource->flags(); + } + + virtual status_t reconnectAtOffset(off64_t offset) { + return mSource->reconnectAtOffset(offset); + } + + virtual sp DrmInitialization(const char *mime = NULL) { + return mSource->DrmInitialization(mime); + } + + virtual void getDrmInfo(sp &handle, DrmManagerClient **client) { + mSource->getDrmInfo(handle, client); + }; virtual String8 getMIMEType() const { return mSource->getMIMEType(); } - private: Mutex mLock; -- cgit v1.1 From 1bb85d27f09cb01b7e43e08600229258edf16e60 Mon Sep 17 00:00:00 2001 From: Marco Nelissen Date: Thu, 25 Oct 2012 11:02:50 -0700 Subject: Switch to new fx library API Change-Id: I6603aef5e3821a8f911e3f33ef8565d04bd1e2e5 --- media/libeffects/downmix/EffectDownmix.c | 21 ------------ media/libeffects/downmix/EffectDownmix.h | 3 -- .../libeffects/lvm/wrapper/Bundle/EffectBundle.cpp | 38 ---------------------- .../libeffects/lvm/wrapper/Reverb/EffectReverb.cpp | 26 --------------- media/libeffects/preprocessing/PreProcessing.cpp | 26 --------------- media/libeffects/testlibs/EffectEqualizer.cpp | 19 ----------- media/libeffects/testlibs/EffectReverb.c | 19 ----------- media/libeffects/testlibs/EffectReverb.h | 3 -- media/libeffects/visualizer/EffectVisualizer.cpp | 19 ----------- 9 files changed, 174 deletions(-) diff --git a/media/libeffects/downmix/EffectDownmix.c b/media/libeffects/downmix/EffectDownmix.c index 5bf052a..aa2134c 100644 --- a/media/libeffects/downmix/EffectDownmix.c +++ b/media/libeffects/downmix/EffectDownmix.c @@ -63,8 +63,6 @@ audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM = { version : EFFECT_LIBRARY_API_VERSION, name : "Downmix Library", implementor : "The Android Open Source Project", - query_num_effects : DownmixLib_QueryNumberEffects, - query_effect : DownmixLib_QueryEffect, create_effect : DownmixLib_Create, release_effect : DownmixLib_Release, get_descriptor : DownmixLib_GetDescriptor, @@ -159,25 +157,6 @@ void Downmix_testIndexComputation(uint32_t mask) { /*--- Effect Library Interface Implementation ---*/ -int32_t DownmixLib_QueryNumberEffects(uint32_t *pNumEffects) { - ALOGV("DownmixLib_QueryNumberEffects()"); - *pNumEffects = kNbEffects; - return 0; -} - -int32_t DownmixLib_QueryEffect(uint32_t index, effect_descriptor_t *pDescriptor) { - ALOGV("DownmixLib_QueryEffect() index=%d", index); - if (pDescriptor == NULL) { - return -EINVAL; - } - if (index >= (uint32_t)kNbEffects) { - return -EINVAL; - } - memcpy(pDescriptor, gDescriptors[index], sizeof(effect_descriptor_t)); - return 0; -} - - int32_t DownmixLib_Create(const effect_uuid_t *uuid, int32_t sessionId, int32_t ioId, diff --git a/media/libeffects/downmix/EffectDownmix.h b/media/libeffects/downmix/EffectDownmix.h index be3ca3f..cb6b957 100644 --- a/media/libeffects/downmix/EffectDownmix.h +++ b/media/libeffects/downmix/EffectDownmix.h @@ -65,9 +65,6 @@ const uint32_t kUnsupported = * Effect API *------------------------------------ */ -int32_t DownmixLib_QueryNumberEffects(uint32_t *pNumEffects); -int32_t DownmixLib_QueryEffect(uint32_t index, - effect_descriptor_t *pDescriptor); int32_t DownmixLib_Create(const effect_uuid_t *uuid, int32_t sessionId, int32_t ioId, diff --git a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp index d706c2d..3ea3e18 100644 --- a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp +++ b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp @@ -158,42 +158,6 @@ int Volume_getParameter (EffectContext *pContext, int Effect_setEnabled(EffectContext *pContext, bool enabled); /* Effect Library Interface Implementation */ -extern "C" int EffectQueryNumberEffects(uint32_t *pNumEffects){ - ALOGV("\n\tEffectQueryNumberEffects start"); - *pNumEffects = 4; - ALOGV("\tEffectQueryNumberEffects creating %d effects", *pNumEffects); - ALOGV("\tEffectQueryNumberEffects end\n"); - return 0; -} /* end EffectQueryNumberEffects */ - -extern "C" int EffectQueryEffect(uint32_t index, effect_descriptor_t *pDescriptor){ - ALOGV("\n\tEffectQueryEffect start"); - ALOGV("\tEffectQueryEffect processing index %d", index); - - if (pDescriptor == NULL){ - ALOGV("\tLVM_ERROR : EffectQueryEffect was passed NULL pointer"); - return -EINVAL; - } - if (index > 3){ - ALOGV("\tLVM_ERROR : EffectQueryEffect index out of range %d", index); - return -ENOENT; - } - if(index == LVM_BASS_BOOST){ - ALOGV("\tEffectQueryEffect processing LVM_BASS_BOOST"); - *pDescriptor = gBassBoostDescriptor; - }else if(index == LVM_VIRTUALIZER){ - ALOGV("\tEffectQueryEffect processing LVM_VIRTUALIZER"); - *pDescriptor = gVirtualizerDescriptor; - } else if(index == LVM_EQUALIZER){ - ALOGV("\tEffectQueryEffect processing LVM_EQUALIZER"); - *pDescriptor = gEqualizerDescriptor; - } else if(index == LVM_VOLUME){ - ALOGV("\tEffectQueryEffect processing LVM_VOLUME"); - *pDescriptor = gVolumeDescriptor; - } - ALOGV("\tEffectQueryEffect end\n"); - return 0; -} /* end EffectQueryEffect */ extern "C" int EffectCreate(const effect_uuid_t *uuid, int32_t sessionId, @@ -3304,8 +3268,6 @@ audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM = { version : EFFECT_LIBRARY_API_VERSION, name : "Effect Bundle Library", implementor : "NXP Software Ltd.", - query_num_effects : android::EffectQueryNumberEffects, - query_effect : android::EffectQueryEffect, create_effect : android::EffectCreate, release_effect : android::EffectRelease, get_descriptor : android::EffectGetDescriptor, diff --git a/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp b/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp index 941d651..1a2f9dc 100755 --- a/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp +++ b/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp @@ -186,30 +186,6 @@ int Reverb_getParameter (ReverbContext *pContext, int Reverb_LoadPreset (ReverbContext *pContext); /* Effect Library Interface Implementation */ -extern "C" int EffectQueryNumberEffects(uint32_t *pNumEffects){ - ALOGV("\n\tEffectQueryNumberEffects start"); - *pNumEffects = sizeof(gDescriptors) / sizeof(const effect_descriptor_t *); - ALOGV("\tEffectQueryNumberEffects creating %d effects", *pNumEffects); - ALOGV("\tEffectQueryNumberEffects end\n"); - return 0; -} /* end EffectQueryNumberEffects */ - -extern "C" int EffectQueryEffect(uint32_t index, - effect_descriptor_t *pDescriptor){ - ALOGV("\n\tEffectQueryEffect start"); - ALOGV("\tEffectQueryEffect processing index %d", index); - if (pDescriptor == NULL){ - ALOGV("\tLVM_ERROR : EffectQueryEffect was passed NULL pointer"); - return -EINVAL; - } - if (index >= sizeof(gDescriptors) / sizeof(const effect_descriptor_t *)) { - ALOGV("\tLVM_ERROR : EffectQueryEffect index out of range %d", index); - return -ENOENT; - } - *pDescriptor = *gDescriptors[index]; - ALOGV("\tEffectQueryEffect end\n"); - return 0; -} /* end EffectQueryEffect */ extern "C" int EffectCreate(const effect_uuid_t *uuid, int32_t sessionId, @@ -2175,8 +2151,6 @@ audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM = { version : EFFECT_LIBRARY_API_VERSION, name : "Reverb Library", implementor : "NXP Software Ltd.", - query_num_effects : android::EffectQueryNumberEffects, - query_effect : android::EffectQueryEffect, create_effect : android::EffectCreate, release_effect : android::EffectRelease, get_descriptor : android::EffectGetDescriptor, diff --git a/media/libeffects/preprocessing/PreProcessing.cpp b/media/libeffects/preprocessing/PreProcessing.cpp index 597866a..58dc413 100755 --- a/media/libeffects/preprocessing/PreProcessing.cpp +++ b/media/libeffects/preprocessing/PreProcessing.cpp @@ -1818,30 +1818,6 @@ const struct effect_interface_s sEffectInterfaceReverse = { // Effect Library Interface Implementation //------------------------------------------------------------------------------ -int PreProcessingLib_QueryNumberEffects(uint32_t *pNumEffects) -{ - if (PreProc_Init() != 0) { - return sInitStatus; - } - if (pNumEffects == NULL) { - return -EINVAL; - } - *pNumEffects = PREPROC_NUM_EFFECTS; - return sInitStatus; -} - -int PreProcessingLib_QueryEffect(uint32_t index, effect_descriptor_t *pDescriptor) -{ - if (PreProc_Init() != 0) { - return sInitStatus; - } - if (index >= PREPROC_NUM_EFFECTS) { - return -EINVAL; - } - *pDescriptor = *sDescriptors[index]; - return 0; -} - int PreProcessingLib_Create(const effect_uuid_t *uuid, int32_t sessionId, int32_t ioId, @@ -1918,8 +1894,6 @@ audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM = { version : EFFECT_LIBRARY_API_VERSION, name : "Audio Preprocessing Library", implementor : "The Android Open Source Project", - query_num_effects : PreProcessingLib_QueryNumberEffects, - query_effect : PreProcessingLib_QueryEffect, create_effect : PreProcessingLib_Create, release_effect : PreProcessingLib_Release, get_descriptor : PreProcessingLib_GetDescriptor diff --git a/media/libeffects/testlibs/EffectEqualizer.cpp b/media/libeffects/testlibs/EffectEqualizer.cpp index 90ebe1f..c35453b 100644 --- a/media/libeffects/testlibs/EffectEqualizer.cpp +++ b/media/libeffects/testlibs/EffectEqualizer.cpp @@ -123,23 +123,6 @@ int Equalizer_setParameter(AudioEqualizer * pEqualizer, int32_t *pParam, void *p //--- Effect Library Interface Implementation // -extern "C" int EffectQueryNumberEffects(uint32_t *pNumEffects) { - *pNumEffects = 1; - return 0; -} /* end EffectQueryNumberEffects */ - -extern "C" int EffectQueryEffect(uint32_t index, - effect_descriptor_t *pDescriptor) { - if (pDescriptor == NULL) { - return -EINVAL; - } - if (index > 0) { - return -EINVAL; - } - *pDescriptor = gEqualizerDescriptor; - return 0; -} /* end EffectQueryNext */ - extern "C" int EffectCreate(const effect_uuid_t *uuid, int32_t sessionId, int32_t ioId, @@ -771,8 +754,6 @@ audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM = { version : EFFECT_LIBRARY_API_VERSION, name : "Test Equalizer Library", implementor : "The Android Open Source Project", - query_num_effects : android::EffectQueryNumberEffects, - query_effect : android::EffectQueryEffect, create_effect : android::EffectCreate, release_effect : android::EffectRelease, get_descriptor : android::EffectGetDescriptor, diff --git a/media/libeffects/testlibs/EffectReverb.c b/media/libeffects/testlibs/EffectReverb.c index a87a834..c37f392 100644 --- a/media/libeffects/testlibs/EffectReverb.c +++ b/media/libeffects/testlibs/EffectReverb.c @@ -94,23 +94,6 @@ static const effect_descriptor_t * const gDescriptors[] = { /*--- Effect Library Interface Implementation ---*/ -int EffectQueryNumberEffects(uint32_t *pNumEffects) { - *pNumEffects = sizeof(gDescriptors) / sizeof(const effect_descriptor_t *); - return 0; -} - -int EffectQueryEffect(uint32_t index, effect_descriptor_t *pDescriptor) { - if (pDescriptor == NULL) { - return -EINVAL; - } - if (index >= sizeof(gDescriptors) / sizeof(const effect_descriptor_t *)) { - return -EINVAL; - } - memcpy(pDescriptor, gDescriptors[index], - sizeof(effect_descriptor_t)); - return 0; -} - int EffectCreate(const effect_uuid_t *uuid, int32_t sessionId, int32_t ioId, @@ -2222,8 +2205,6 @@ audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM = { .version = EFFECT_LIBRARY_API_VERSION, .name = "Test Equalizer Library", .implementor = "The Android Open Source Project", - .query_num_effects = EffectQueryNumberEffects, - .query_effect = EffectQueryEffect, .create_effect = EffectCreate, .release_effect = EffectRelease, .get_descriptor = EffectGetDescriptor, diff --git a/media/libeffects/testlibs/EffectReverb.h b/media/libeffects/testlibs/EffectReverb.h index 1fb14a7..e5248fe 100644 --- a/media/libeffects/testlibs/EffectReverb.h +++ b/media/libeffects/testlibs/EffectReverb.h @@ -300,9 +300,6 @@ typedef struct reverb_module_s { * Effect API *------------------------------------ */ -int EffectQueryNumberEffects(uint32_t *pNumEffects); -int EffectQueryEffect(uint32_t index, - effect_descriptor_t *pDescriptor); int EffectCreate(const effect_uuid_t *effectUID, int32_t sessionId, int32_t ioId, diff --git a/media/libeffects/visualizer/EffectVisualizer.cpp b/media/libeffects/visualizer/EffectVisualizer.cpp index 44baf93..dc1937e 100644 --- a/media/libeffects/visualizer/EffectVisualizer.cpp +++ b/media/libeffects/visualizer/EffectVisualizer.cpp @@ -177,23 +177,6 @@ int Visualizer_init(VisualizerContext *pContext) //--- Effect Library Interface Implementation // -int VisualizerLib_QueryNumberEffects(uint32_t *pNumEffects) { - *pNumEffects = 1; - return 0; -} - -int VisualizerLib_QueryEffect(uint32_t index, - effect_descriptor_t *pDescriptor) { - if (pDescriptor == NULL) { - return -EINVAL; - } - if (index > 0) { - return -EINVAL; - } - *pDescriptor = gVisualizerDescriptor; - return 0; -} - int VisualizerLib_Create(const effect_uuid_t *uuid, int32_t sessionId, int32_t ioId, @@ -580,8 +563,6 @@ audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM = { version : EFFECT_LIBRARY_API_VERSION, name : "Visualizer Library", implementor : "The Android Open Source Project", - query_num_effects : VisualizerLib_QueryNumberEffects, - query_effect : VisualizerLib_QueryEffect, create_effect : VisualizerLib_Create, release_effect : VisualizerLib_Release, get_descriptor : VisualizerLib_GetDescriptor, -- cgit v1.1 From 655604a7c1ffadc04ec479e4f45345918f44b460 Mon Sep 17 00:00:00 2001 From: Marco Nelissen Date: Thu, 25 Oct 2012 16:05:57 -0700 Subject: Only export the symbols that need to be The effects libraries were exporting many more symbols than needed. This reduces the exported symbols to just the needed ones (basically just "AELI"), which happens to also save about 28KB. Change-Id: I115077e52e8dc845282e6f62a522908d26dd72d6 --- media/libeffects/downmix/Android.mk | 2 ++ media/libeffects/downmix/EffectDownmix.c | 2 ++ media/libeffects/lvm/lib/Android.mk | 9 +++++---- media/libeffects/lvm/wrapper/Android.mk | 11 +++++------ media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp | 2 ++ media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp | 2 ++ media/libeffects/preprocessing/Android.mk | 2 ++ media/libeffects/preprocessing/PreProcessing.cpp | 2 ++ media/libeffects/visualizer/Android.mk | 2 +- media/libeffects/visualizer/EffectVisualizer.cpp | 3 ++- 10 files changed, 25 insertions(+), 12 deletions(-) diff --git a/media/libeffects/downmix/Android.mk b/media/libeffects/downmix/Android.mk index 95ca6fd..3052ad9 100644 --- a/media/libeffects/downmix/Android.mk +++ b/media/libeffects/downmix/Android.mk @@ -25,4 +25,6 @@ LOCAL_C_INCLUDES := \ LOCAL_PRELINK_MODULE := false +LOCAL_CFLAGS += -fvisibility=hidden + include $(BUILD_SHARED_LIBRARY) diff --git a/media/libeffects/downmix/EffectDownmix.c b/media/libeffects/downmix/EffectDownmix.c index aa2134c..f17a6e8 100644 --- a/media/libeffects/downmix/EffectDownmix.c +++ b/media/libeffects/downmix/EffectDownmix.c @@ -58,6 +58,8 @@ const struct effect_interface_s gDownmixInterface = { NULL /* no process_reverse function, no reference stream needed */ }; +// This is the only symbol that needs to be exported +__attribute__ ((visibility ("default"))) audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM = { tag : AUDIO_EFFECT_LIBRARY_TAG, version : EFFECT_LIBRARY_API_VERSION, diff --git a/media/libeffects/lvm/lib/Android.mk b/media/libeffects/lvm/lib/Android.mk index f49267e..bb56c75 100644 --- a/media/libeffects/lvm/lib/Android.mk +++ b/media/libeffects/lvm/lib/Android.mk @@ -105,8 +105,6 @@ LOCAL_SRC_FILES:= \ LOCAL_MODULE:= libmusicbundle - - LOCAL_C_INCLUDES += \ $(LOCAL_PATH)/Eq/lib \ $(LOCAL_PATH)/Eq/src \ @@ -121,8 +119,12 @@ LOCAL_C_INCLUDES += \ $(LOCAL_PATH)/StereoWidening/src \ $(LOCAL_PATH)/StereoWidening/lib +LOCAL_CFLAGS += -fvisibility=hidden + include $(BUILD_STATIC_LIBRARY) + + # Reverb library include $(CLEAR_VARS) @@ -168,12 +170,11 @@ LOCAL_SRC_FILES:= \ LOCAL_MODULE:= libreverb - - LOCAL_C_INCLUDES += \ $(LOCAL_PATH)/Reverb/lib \ $(LOCAL_PATH)/Reverb/src \ $(LOCAL_PATH)/Common/lib \ $(LOCAL_PATH)/Common/src +LOCAL_CFLAGS += -fvisibility=hidden include $(BUILD_STATIC_LIBRARY) diff --git a/media/libeffects/lvm/wrapper/Android.mk b/media/libeffects/lvm/wrapper/Android.mk index 4313424..f1af389 100644 --- a/media/libeffects/lvm/wrapper/Android.mk +++ b/media/libeffects/lvm/wrapper/Android.mk @@ -9,28 +9,27 @@ LOCAL_ARM_MODE := arm LOCAL_SRC_FILES:= \ Bundle/EffectBundle.cpp +LOCAL_CFLAGS += -fvisibility=hidden + LOCAL_MODULE:= libbundlewrapper LOCAL_MODULE_PATH := $(TARGET_OUT_SHARED_LIBRARIES)/soundfx - - LOCAL_STATIC_LIBRARIES += libmusicbundle LOCAL_SHARED_LIBRARIES := \ libcutils \ libdl - LOCAL_C_INCLUDES += \ $(LOCAL_PATH)/Bundle \ $(LOCAL_PATH)/../lib/Common/lib/ \ $(LOCAL_PATH)/../lib/Bundle/lib/ \ $(call include-path-for, audio-effects) - include $(BUILD_SHARED_LIBRARY) + # reverb wrapper include $(CLEAR_VARS) @@ -39,12 +38,12 @@ LOCAL_ARM_MODE := arm LOCAL_SRC_FILES:= \ Reverb/EffectReverb.cpp +LOCAL_CFLAGS += -fvisibility=hidden + LOCAL_MODULE:= libreverbwrapper LOCAL_MODULE_PATH := $(TARGET_OUT_SHARED_LIBRARIES)/soundfx - - LOCAL_STATIC_LIBRARIES += libreverb LOCAL_SHARED_LIBRARIES := \ diff --git a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp index 3ea3e18..94b9acf 100644 --- a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp +++ b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp @@ -3263,6 +3263,8 @@ const struct effect_interface_s gLvmEffectInterface = { NULL, }; /* end gLvmEffectInterface */ +// This is the only symbol that needs to be exported +__attribute__ ((visibility ("default"))) audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM = { tag : AUDIO_EFFECT_LIBRARY_TAG, version : EFFECT_LIBRARY_API_VERSION, diff --git a/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp b/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp index 1a2f9dc..87e2c85 100755 --- a/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp +++ b/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp @@ -2146,6 +2146,8 @@ const struct effect_interface_s gReverbInterface = { NULL, }; /* end gReverbInterface */ +// This is the only symbol that needs to be exported +__attribute__ ((visibility ("default"))) audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM = { tag : AUDIO_EFFECT_LIBRARY_TAG, version : EFFECT_LIBRARY_API_VERSION, diff --git a/media/libeffects/preprocessing/Android.mk b/media/libeffects/preprocessing/Android.mk index c13b9d4..dfa1711 100755 --- a/media/libeffects/preprocessing/Android.mk +++ b/media/libeffects/preprocessing/Android.mk @@ -29,4 +29,6 @@ else LOCAL_SHARED_LIBRARIES += libdl endif +LOCAL_CFLAGS += -fvisibility=hidden + include $(BUILD_SHARED_LIBRARY) diff --git a/media/libeffects/preprocessing/PreProcessing.cpp b/media/libeffects/preprocessing/PreProcessing.cpp index 58dc413..25586e8 100755 --- a/media/libeffects/preprocessing/PreProcessing.cpp +++ b/media/libeffects/preprocessing/PreProcessing.cpp @@ -1889,6 +1889,8 @@ int PreProcessingLib_GetDescriptor(const effect_uuid_t *uuid, return 0; } +// This is the only symbol that needs to be exported +__attribute__ ((visibility ("default"))) audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM = { tag : AUDIO_EFFECT_LIBRARY_TAG, version : EFFECT_LIBRARY_API_VERSION, diff --git a/media/libeffects/visualizer/Android.mk b/media/libeffects/visualizer/Android.mk index 76b5110..49cf4fa 100644 --- a/media/libeffects/visualizer/Android.mk +++ b/media/libeffects/visualizer/Android.mk @@ -6,7 +6,7 @@ include $(CLEAR_VARS) LOCAL_SRC_FILES:= \ EffectVisualizer.cpp -LOCAL_CFLAGS+= -O2 +LOCAL_CFLAGS+= -O2 -fvisibility=hidden LOCAL_SHARED_LIBRARIES := \ libcutils \ diff --git a/media/libeffects/visualizer/EffectVisualizer.cpp b/media/libeffects/visualizer/EffectVisualizer.cpp index dc1937e..e7eccf1 100644 --- a/media/libeffects/visualizer/EffectVisualizer.cpp +++ b/media/libeffects/visualizer/EffectVisualizer.cpp @@ -557,7 +557,8 @@ const struct effect_interface_s gVisualizerInterface = { NULL, }; - +// This is the only symbol that needs to be exported +__attribute__ ((visibility ("default"))) audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM = { tag : AUDIO_EFFECT_LIBRARY_TAG, version : EFFECT_LIBRARY_API_VERSION, -- cgit v1.1 From 73e90268adf4c9638b8d820a802e5e9a8ebe6597 Mon Sep 17 00:00:00 2001 From: Pixelflinger Date: Thu, 25 Oct 2012 19:43:49 -0700 Subject: improve fir tool: cleanup, better default, bug fixes - all parameters can be changed on the command-line - added float output - added debug option - added an option to generate a polyphase filter coefficients - added an attenuation option in dBFS - added a lot of comments and references - fixed kaiser window parameter also the default should generate a filter with 80 dB rejection (of the 24 KHz aliasing) above 20 KHz and a 15 KHz transition band around ~20 KHz (for 48 KHz sampling rate). It's not very good but corresponds to the current code. --- tools/resampler_tools/fir.cpp | 263 ++++++++++++++++++++++++++++++++++-------- 1 file changed, 212 insertions(+), 51 deletions(-) diff --git a/tools/resampler_tools/fir.cpp b/tools/resampler_tools/fir.cpp index 377814f..14707d1 100644 --- a/tools/resampler_tools/fir.cpp +++ b/tools/resampler_tools/fir.cpp @@ -16,6 +16,9 @@ #include #include +#include +#include +#include static double sinc(double x) { if (fabs(x) == 0.0f) return 1.0f; @@ -34,44 +37,82 @@ static double I0(double x) { y=x/3.75; y*=y; ans=1.0+y*(3.5156229+y*(3.0899424+y*(1.2067492 - +y*(0.2659732+y*(0.360768e-1+y*0.45813e-2))))); + +y*(0.2659732+y*(0.360768e-1+y*0.45813e-2))))); } else { y=3.75/ax; ans=(exp(ax)/sqrt(ax))*(0.39894228+y*(0.1328592e-1 - +y*(0.225319e-2+y*(-0.157565e-2+y*(0.916281e-2 - +y*(-0.2057706e-1+y*(0.2635537e-1+y*(-0.1647633e-1 - +y*0.392377e-2)))))))); + +y*(0.225319e-2+y*(-0.157565e-2+y*(0.916281e-2 + +y*(-0.2057706e-1+y*(0.2635537e-1+y*(-0.1647633e-1 + +y*0.392377e-2)))))))); } return ans; } -static double kaiser(int k, int N, double alpha) { +static double kaiser(int k, int N, double beta) { if (k < 0 || k > N) return 0; - return I0(M_PI*alpha * sqrt(1.0 - sqr((2.0*k)/N - 1.0))) / I0(M_PI*alpha); + return I0(beta * sqrt(1.0 - sqr((2.0*k)/N - 1.0))) / I0(beta); +} + + +static void usage(char* name) { + fprintf(stderr, + "usage: %s [-h] [-d] [-s sample_rate] [-c cut-off_frequency] [-n half_zero_crossings] [-f {float|fixed}] [-b beta] [-v dBFS] [-l lerp]\n" + " %s [-h] [-d] [-s sample_rate] [-c cut-off_frequency] [-n half_zero_crossings] [-f {float|fixed}] [-b beta] [-v dBFS] -p M/N\n" + " -h this help message\n" + " -d debug, print comma-separated coefficient table\n" + " -p generate poly-phase filter coefficients, with sample increment M/N\n" + " -s sample rate (48000)\n" + " -c cut-off frequency (20478)\n" + " -n number of zero-crossings on one side (8)\n" + " -l number of lerping bits (4)\n" + " -f output format, can be fixed-point or floating-point (fixed)\n" + " -b kaiser window parameter beta (7.865 [-80dB])\n" + " -v attenuation in dBFS (0)\n", + name, name + ); + exit(0); } int main(int argc, char** argv) { // nc is the number of bits to store the coefficients - int nc = 32; + const int nc = 32; - // ni is the minimum number of bits needed for interpolation - // (not used for generating the coefficients) - const int ni = nc / 2; + bool polyphase = false; + unsigned int polyM = 160; + unsigned int polyN = 147; + bool debug = false; + double Fs = 48000; + double Fc = 24000; + double atten = 1; + int format = 0; - // cut off frequency ratio Fc/Fs - // The bigger the stop-band, the less coefficients we'll need. - double Fcr = 20000.0 / 48000.0; - // nzc is the number of zero-crossing on one half of the filter - int nzc = 8; - - // alpha parameter of the kaiser window - // Larger numbers reduce ripples in the rejection band but increase - // the width of the transition band. - // the table below gives some value of alpha for a given - // stop-band attenuation. + // in order to keep the errors associated with the linear + // interpolation of the coefficients below the quantization error + // we must satisfy: + // 2^nz >= 2^(nc/2) + // + // for 16 bit coefficients that would be 256 + // + // note that increasing nz only increases memory requirements, + // but doesn't increase the amount of computation to do. + // + // + // see: + // Smith, J.O. Digital Audio Resampling Home Page + // https://ccrma.stanford.edu/~jos/resample/, 2011-03-29 + // + int nz = 4; + + // | 0.1102*(A - 8.7) A > 50 + // beta = | 0.5842*(A - 21)^0.4 + 0.07886*(A - 21) 21 <= A <= 50 + // | 0 A < 21 + // with A is the desired stop-band attenuation in dBFS + // + // for eg: + // // 30 dB 2.210 // 40 dB 3.384 // 50 dB 4.538 @@ -80,42 +121,162 @@ int main(int argc, char** argv) // 80 dB 7.865 // 90 dB 8.960 // 100 dB 10.056 - double alpha = 7.865; // -80dB stop-band attenuation - - // 2^nz is the number coefficients per zero-crossing - // (int theory this should be 1<<(nc/2)) - const int nz = 4; + double beta = 7.865; + + + // 2*nzc = (A - 8) / (2.285 * dw) + // with dw the transition width = 2*pi*dF/Fs + // + int nzc = 8; + + // + // Example: + // 44.1 KHz to 48 KHz resampling + // 100 dB rejection above 28 KHz + // (the spectrum will fold around 24 KHz and we want 100 dB rejection + // at the point where the folding reaches 20 KHz) + // ...___|_____ + // | \| + // | ____/|\____ + // |/alias| \ + // ------/------+------\---------> KHz + // 20 24 28 + + // Transition band 8 KHz, or dw = 1.0472 + // + // beta = 10.056 + // nzc = 20 + // + + int ch; + while ((ch = getopt(argc, argv, ":hds:c:n:f:l:b:p:v:")) != -1) { + switch (ch) { + case 'd': + debug = true; + break; + case 'p': + if (sscanf(optarg, "%u/%u", &polyM, &polyN) != 2) { + usage(argv[0]); + } + polyphase = true; + break; + case 's': + Fs = atof(optarg); + break; + case 'c': + Fc = atof(optarg); + break; + case 'n': + nzc = atoi(optarg); + break; + case 'l': + nz = atoi(optarg); + break; + case 'f': + if (!strcmp(optarg,"fixed")) format = 0; + else if (!strcmp(optarg,"float")) format = 1; + else usage(argv[0]); + break; + case 'b': + beta = atof(optarg); + break; + case 'v': + atten = pow(10, -fabs(atof(optarg))*0.05 ); + break; + case 'h': + default: + usage(argv[0]); + break; + } + } + + // cut off frequency ratio Fc/Fs + double Fcr = Fc / Fs; + - // total number of coefficients + // total number of coefficients (one side) const int N = (1 << nz) * nzc; // generate the right half of the filter - printf("const int32_t RESAMPLE_FIR_SIZE = %d;\n", N); - printf("const int32_t RESAMPLE_FIR_NUM_COEF = %d;\n", nzc); - printf("const int32_t RESAMPLE_FIR_COEF_BITS = %d;\n", nc); - printf("const int32_t RESAMPLE_FIR_LERP_FRAC_BITS = %d;\n", ni); - printf("const int32_t RESAMPLE_FIR_LERP_INT_BITS = %d;\n", nz); - printf("\n"); - printf("static int16_t resampleFIR[%d] = {", N); - for (int i=0 ; i= (1LL<<(nc-1))) yi = (1LL<<(nc-1))-1; - - if ((i % (1 << 4)) == 0) printf("\n "); - if (nc > 16) - printf("0x%08x, ", int(yi)); - else - printf("0x%04x, ", int(yi)&0xFFFF); + if (!debug) { + printf("// cmd-line: "); + for (int i=1 ; i= (1LL<<(nc-1))) yi = (1LL<<(nc-1))-1; + printf("0x%08x, ", int32_t(yi)); + } else { + printf("%.9g%s ", y, debug ? "," : "f,"); + } + } + } else { + for (int j=0 ; j= (1LL<<(nc-1))) yi = (1LL<<(nc-1))-1; + printf("0x%08x", int32_t(yi)); + } else { + printf("%.9g%s", y, debug ? "" : "f"); + } + + if (debug && (i==nzc-1)) { + } else { + printf(", "); + } + } + } + } + + if (!debug) { + if (!format) { + printf("\n 0x%08x ", 0); + } else { + printf("\n %.9g ", 0.0f); + } + printf("\n};"); } - printf("\n};\n"); + printf("\n"); return 0; - } +} -// http://www.dsptutor.freeuk.com/KaiserFilterDesign/KaiserFilterDesign.html // http://www.csee.umbc.edu/help/sound/AFsp-V2R1/html/audio/ResampAudio.html - + -- cgit v1.1 From 443e69625d598ea578e2c838960778ce498fd773 Mon Sep 17 00:00:00 2001 From: Mathias Agopian Date: Fri, 26 Oct 2012 13:48:42 -0700 Subject: improve SINC resampler coefficients - we increase the interpolation precision from 4 to 7 bits this doesn't increase CPU power required, it only increases the size of the filter table but significantly reduces the noise introduced by the quantization of the impulse response. - the parameters of the filter are set such that aliasing is rejected at 80 dB below 20 KHz. Because we don't use a lot of coefficient (to save compute power), there are quite a bit of attenuation in the pass-band: starting at 9KHz for the down-sampler (48 to 44.1), and starting at 13 KHz for the up-sampler (44.1 to 48) -- the transition band is about 15 KHz. Change-Id: I855548d2aab8a0fb0d2a2da3a364b6842d7d3838 --- services/audioflinger/AudioResamplerSinc.cpp | 41 +++++++++++++--------------- services/audioflinger/AudioResamplerSinc.h | 2 +- 2 files changed, 20 insertions(+), 23 deletions(-) diff --git a/services/audioflinger/AudioResamplerSinc.cpp b/services/audioflinger/AudioResamplerSinc.cpp index 9e8447a..8dad250 100644 --- a/services/audioflinger/AudioResamplerSinc.cpp +++ b/services/audioflinger/AudioResamplerSinc.cpp @@ -31,36 +31,33 @@ namespace android { /* * These coeficients are computed with the "fir" utility found in * tools/resampler_tools - * TODO: A good optimization would be to transpose this matrix, to take - * better advantage of the data-cache. + * cmd-line: fir -v 0.3 -l 7 -s 48000 -c 20478 */ const int32_t AudioResamplerSinc::mFirCoefsUp[] = { - 0x7fffffff, 0x7f15d078, 0x7c5e0da6, 0x77ecd867, 0x71e2e251, 0x6a6c304a, 0x61be7269, 0x58170412, 0x4db8ab05, 0x42e92ea6, 0x37eee214, 0x2d0e3bb1, 0x22879366, 0x18951e95, 0x0f693d0d, 0x072d2621, - 0x00000000, 0xf9f66655, 0xf51a5fd7, 0xf16bbd84, 0xeee0d9ac, 0xed67a922, 0xece70de6, 0xed405897, 0xee50e505, 0xeff3be30, 0xf203370f, 0xf45a6741, 0xf6d67d53, 0xf957db66, 0xfbc2f647, 0xfe00f2b9, - 0x00000000, 0x01b37218, 0x0313a0c6, 0x041d930d, 0x04d28057, 0x053731b0, 0x05534dff, 0x05309bfd, 0x04da440d, 0x045c1aee, 0x03c1fcdd, 0x03173ef5, 0x02663ae8, 0x01b7f736, 0x0113ec79, 0x007fe6a9, - 0x00000000, 0xff96b229, 0xff44f99f, 0xff0a86be, 0xfee5f803, 0xfed518fd, 0xfed521fd, 0xfee2f4fd, 0xfefb54f8, 0xff1b159b, 0xff3f4203, 0xff6539e0, 0xff8ac502, 0xffae1ddd, 0xffcdf3f9, 0xffe96798, - 0x00000000, 0x00119de6, 0x001e6b7e, 0x0026cb7a, 0x002b4830, 0x002c83d6, 0x002b2a82, 0x0027e67a, 0x002356f9, 0x001e098e, 0x001875e4, 0x0012fbbe, 0x000de2d1, 0x00095c10, 0x00058414, 0x00026636, - 0x00000000, 0xfffe44a9, 0xfffd206d, 0xfffc7b7f, 0xfffc3c8f, 0xfffc4ac2, 0xfffc8f2b, 0xfffcf5c4, 0xfffd6df3, 0xfffdeab2, 0xfffe6275, 0xfffececf, 0xffff2c07, 0xffff788c, 0xffffb471, 0xffffe0f2, - 0x00000000, 0x000013e6, 0x00001f03, 0x00002396, 0x00002399, 0x000020b6, 0x00001c3c, 0x00001722, 0x00001216, 0x00000d81, 0x0000099c, 0x0000067c, 0x00000419, 0x0000025f, 0x00000131, 0x00000070, - 0x00000000, 0xffffffc7, 0xffffffb3, 0xffffffb3, 0xffffffbe, 0xffffffcd, 0xffffffdb, 0xffffffe7, 0xfffffff0, 0xfffffff7, 0xfffffffb, 0xfffffffe, 0xffffffff, 0x00000000, 0x00000000, 0x00000000, + 0x7ba78e22, 0x7ba44428, 0x7b9a668f, 0x7b89f654, 0x7b72f51b, 0x7b556530, 0x7b314988, 0x7b06a5be, 0x7ad57e15, 0x7a9dd776, 0x7a5fb76f, 0x7a1b2433, 0x79d0249b, 0x797ec022, 0x7926fee5, 0x78c8e9a4, 0x786489be, 0x77f9e933, 0x7789129f, 0x7712113b, 0x7694f0de, 0x7611bdf6, 0x7588858a, 0x74f95538, 0x74643b33, 0x73c94642, 0x732885bc, 0x72820989, 0x71d5e21d, 0x7124207a, 0x706cd62a, 0x6fb0153e, 0x6eedf04d, 0x6e267a71, 0x6d59c744, 0x6c87eade, 0x6bb0f9d4, 0x6ad50932, 0x69f42e7d, 0x690e7fac, 0x68241328, 0x6734ffca, 0x66415cd4, 0x654941f4, 0x644cc73b, 0x634c051f, 0x62471477, 0x613e0e75, 0x60310ca7, 0x5f2028f0, 0x5e0b7d8a, 0x5cf324fd, 0x5bd73a21, 0x5ab7d815, 0x59951a42, 0x586f1c54, 0x5745fa37, 0x5619d015, 0x54eaba51, 0x53b8d585, 0x52843e7f, 0x514d1239, 0x50136ddd, 0x4ed76ebc, 0x4d99324c, 0x4c58d626, 0x4b167800, 0x49d235ab, 0x488c2d10, 0x47447c2b, 0x45fb410a, 0x44b099c4, 0x4364a47d, 0x42177f5c, 0x40c9488e, 0x3f7a1e3b, 0x3e2a1e87, 0x3cd96790, 0x3b881766, 0x3a364c0b, 0x38e4236f, 0x3791bb6b, 0x363f31c0, 0x34eca410, 0x339a2fe1, 0x3247f291, 0x30f6095c, 0x2fa49150, 0x2e53a752, 0x2d036813, 0x2bb3f012, 0x2a655b9a, 0x2917c6b7, 0x27cb4d3e, 0x26800abf, 0x25361a8c, 0x23ed97ae, 0x22a69ce7, 0x216144ad, 0x201da929, 0x1edbe431, 0x1d9c0f49, 0x1c5e439e, 0x1b229a04, 0x19e92af4, 0x18b20e89, 0x177d5c7e, 0x164b2c2a, 0x151b947f, 0x13eeac09, 0x12c488ea, 0x119d40d7, 0x1078e918, 0x0f579686, 0x0e395d87, 0x0d1e520d, 0x0c068797, 0x0af21128, 0x09e10150, 0x08d36a1f, 0x07c95d2c, 0x06c2eb8e, 0x05c025df, 0x04c11c38, 0x03c5de2f, 0x02ce7ad8, 0x01db00c3, 0x00eb7df9, + 0x00000000, 0xff1893d3, 0xfe3545e8, 0xfd56222b, 0xfc7b33fd, 0xfba48638, 0xfad22328, 0xfa04148f, 0xf93a63a3, 0xf8751910, 0xf7b43cf2, 0xf6f7d6db, 0xf63fedcf, 0xf58c8845, 0xf4ddac29, 0xf4335ed9, 0xf38da525, 0xf2ec8353, 0xf24ffd1c, 0xf1b815ad, 0xf124cfa8, 0xf0962d24, 0xf00c2fad, 0xef86d846, 0xef06276a, 0xee8a1d0b, 0xee12b892, 0xed9ff8e4, 0xed31dc5f, 0xecc860de, 0xec6383b6, 0xec0341bc, 0xeba79744, 0xeb50801f, 0xeafdf7a4, 0xeaaff8aa, 0xea667d8d, 0xea21802e, 0xe9e0f9f7, 0xe9a4e3da, 0xe96d3654, 0xe939e96d, 0xe90af4be, 0xe8e04f6d, 0xe8b9f032, 0xe897cd5b, 0xe879dcc9, 0xe86013f5, 0xe84a67f4, 0xe838cd74, 0xe82b38c1, 0xe8219dc9, 0xe81bf01a, 0xe81a22e7, 0xe81c290b, 0xe821f507, 0xe82b790b, 0xe838a6f1, 0xe8497046, 0xe85dc648, 0xe87599e9, 0xe890dbd5, 0xe8af7c6e, 0xe8d16bd7, 0xe8f699ee, 0xe91ef654, 0xe94a706f, 0xe978f769, 0xe9aa7a37, 0xe9dee79a, 0xea162e21, 0xea503c2b, 0xea8cffec, 0xeacc676e, 0xeb0e6095, 0xeb52d91e, 0xeb99bea7, 0xebe2fead, 0xec2e8693, 0xec7c439f, 0xeccc2303, 0xed1e11db, 0xed71fd31, 0xedc7d201, 0xee1f7d3a, 0xee78ebc1, 0xeed40a74, 0xef30c62b, 0xef8f0bbd, 0xefeec803, 0xf04fe7d5, 0xf0b25814, 0xf11605a8, 0xf17add82, 0xf1e0cca2, 0xf247c017, 0xf2afa4ff, 0xf3186891, 0xf381f816, 0xf3ec40f2, 0xf45730a4, 0xf4c2b4c9, 0xf52ebb1b, 0xf59b3178, 0xf60805e2, 0xf6752681, 0xf6e281a4, 0xf75005c5, 0xf7bda18b, 0xf82b43c9, 0xf898db86, 0xf90657f7, 0xf973a887, 0xf9e0bcd7, 0xfa4d84bf, 0xfab9f050, 0xfb25efd6, 0xfb9173db, 0xfbfc6d24, 0xfc66ccb9, 0xfcd083e2, 0xfd398428, 0xfda1bf5c, 0xfe092790, 0xfe6faf21, 0xfed548af, 0xff39e729, 0xff9d7dc3, + 0x00000000, 0x006161ae, 0x00c196e9, 0x0120941c, 0x017e4e00, 0x01dab9a0, 0x0235cc56, 0x028f7bcf, 0x02e7be0b, 0x033e895c, 0x0393d469, 0x03e7962c, 0x0439c5f4, 0x048a5b66, 0x04d94e7c, 0x05269785, 0x05722f29, 0x05bc0e63, 0x06042e86, 0x064a893e, 0x068f1889, 0x06d1d6c1, 0x0712be93, 0x0751cb05, 0x078ef772, 0x07ca3f8d, 0x08039f5e, 0x083b1344, 0x087097f3, 0x08a42a74, 0x08d5c827, 0x09056ebd, 0x09331c3e, 0x095ecf04, 0x098885bc, 0x09b03f66, 0x09d5fb52, 0x09f9b923, 0x0a1b78cb, 0x0a3b3a8a, 0x0a58fef1, 0x0a74c6dd, 0x0a8e9378, 0x0aa66638, 0x0abc40dd, 0x0ad02573, 0x0ae2164c, 0x0af21603, 0x0b00277a, 0x0b0c4dd8, 0x0b168c87, 0x0b1ee736, 0x0b2561d5, 0x0b2a0093, 0x0b2cc7e1, 0x0b2dbc6d, 0x0b2ce320, 0x0b2a411f, 0x0b25dbcb, 0x0b1fb8ba, 0x0b17ddbc, 0x0b0e50d4, 0x0b03183b, 0x0af63a5b, 0x0ae7bdd1, 0x0ad7a969, 0x0ac6041a, 0x0ab2d50d, 0x0a9e2391, 0x0a87f721, 0x0a70575f, 0x0a574c14, 0x0a3cdd2c, 0x0a2112b7, 0x0a03f4e7, 0x09e58c0d, 0x09c5e097, 0x09a4fb12, 0x0982e424, 0x095fa48c, 0x093b4523, 0x0915ced5, 0x08ef4aa5, 0x08c7c1a7, 0x089f3d00, 0x0875c5e5, 0x084b659a, 0x0820256e, 0x07f40ebb, 0x07c72ae4, 0x07998354, 0x076b217a, 0x073c0ecd, 0x070c54c3, 0x06dbfcd7, 0x06ab1080, 0x06799936, 0x0647a06e, 0x06152f96, 0x05e25018, 0x05af0b56, 0x057b6aa7, 0x0547775b, 0x05133ab3, 0x04debde6, 0x04aa0a19, 0x04752865, 0x044021d0, 0x040aff4c, 0x03d5c9ba, 0x03a089e5, 0x036b4882, 0x03360e30, 0x0300e373, 0x02cbd0b8, 0x0296de51, 0x02621472, 0x022d7b36, 0x01f91a98, 0x01c4fa74, 0x01912288, 0x015d9a6f, 0x012a69a6, 0x00f79784, 0x00c52b40, 0x00932be9, 0x0061a06e, 0x00308f96, + 0x00000000, 0xffcff828, 0xffa07e5f, 0xff7198d0, 0xff434d7c, 0xff15a23b, 0xfee89cbb, 0xfebc4281, 0xfe9098e4, 0xfe65a513, 0xfe3b6c10, 0xfe11f2b0, 0xfde93d9e, 0xfdc15155, 0xfd9a3226, 0xfd73e434, 0xfd4e6b74, 0xfd29cbad, 0xfd06087a, 0xfce32547, 0xfcc12551, 0xfca00ba9, 0xfc7fdb31, 0xfc60969d, 0xfc424073, 0xfc24db0b, 0xfc086890, 0xfbeceafd, 0xfbd26423, 0xfbb8d5a2, 0xfba040ee, 0xfb88a750, 0xfb7209e1, 0xfb5c6990, 0xfb47c71f, 0xfb342324, 0xfb217e0a, 0xfb0fd810, 0xfaff314c, 0xfaef89a9, 0xfae0e0e7, 0xfad3369e, 0xfac68a3e, 0xfabadb0d, 0xfab0282a, 0xfaa6708d, 0xfa9db307, 0xfa95ee44, 0xfa8f20c9, 0xfa8948f7, 0xfa84650c, 0xfa807321, 0xfa7d712d, 0xfa7b5d05, 0xfa7a345d, 0xfa79f4c8, 0xfa7a9bb9, 0xfa7c2684, 0xfa7e925f, 0xfa81dc63, 0xfa86018b, 0xfa8afeb7, 0xfa90d0ac, 0xfa977415, 0xfa9ee582, 0xfaa7216d, 0xfab02435, 0xfab9ea25, 0xfac46f71, 0xfacfb037, 0xfadba883, 0xfae8544b, 0xfaf5af73, 0xfb03b5d1, 0xfb126324, 0xfb21b321, 0xfb31a16a, 0xfb422996, 0xfb53472c, 0xfb64f5a9, 0xfb77307e, 0xfb89f311, 0xfb9d38bf, 0xfbb0fcdb, 0xfbc53ab3, 0xfbd9ed8a, 0xfbef10a2, 0xfc049f32, 0xfc1a9472, 0xfc30eb93, 0xfc479fc5, 0xfc5eac35, 0xfc760c11, 0xfc8dba84, 0xfca5b2bc, 0xfcbdefe8, 0xfcd66d38, 0xfcef25e2, 0xfd08151d, 0xfd213624, 0xfd3a843c, 0xfd53faab, 0xfd6d94c0, 0xfd874dd3, 0xfda12141, 0xfdbb0a73, 0xfdd504da, 0xfdef0bf1, 0xfe091b41, 0xfe232e5a, 0xfe3d40dc, 0xfe574e72, 0xfe7152d5, 0xfe8b49cc, 0xfea52f2d, 0xfebefedd, 0xfed8b4cf, 0xfef24d09, 0xff0bc3a0, 0xff2514ba, 0xff3e3c8f, 0xff57376b, 0xff7001a9, 0xff8897b9, 0xffa0f61e, 0xffb91970, 0xffd0fe58, 0xffe8a197, + 0x00000000, 0x0017167d, 0x002de20b, 0x00445fc0, 0x005a8cc4, 0x00706659, 0x0085e9d5, 0x009b14a4, 0x00afe44b, 0x00c45665, 0x00d868a3, 0x00ec18cd, 0x00ff64c6, 0x01124a84, 0x0124c816, 0x0136dba4, 0x0148836b, 0x0159bdc2, 0x016a8918, 0x017ae3f0, 0x018acce9, 0x019a42b6, 0x01a94425, 0x01b7d01a, 0x01c5e58e, 0x01d38396, 0x01e0a95a, 0x01ed561d, 0x01f98935, 0x02054211, 0x02108037, 0x021b4343, 0x02258ae6, 0x022f56e9, 0x0238a72b, 0x02417b9e, 0x0249d44b, 0x0251b151, 0x025912e4, 0x025ff949, 0x026664de, 0x026c5612, 0x0271cd69, 0x0276cb79, 0x027b50ed, 0x027f5e80, 0x0282f503, 0x02861556, 0x0288c06b, 0x028af746, 0x028cbafc, 0x028e0cb1, 0x028eed9b, 0x028f5eff, 0x028f6231, 0x028ef893, 0x028e2396, 0x028ce4bb, 0x028b3d8e, 0x02892fa9, 0x0286bcb2, 0x0283e65d, 0x0280ae68, 0x027d169e, 0x027920d3, 0x0274cee6, 0x027022c2, 0x026b1e58, 0x0265c3a6, 0x026014b0, 0x025a1382, 0x0253c232, 0x024d22da, 0x0246379d, 0x023f02a5, 0x0237861f, 0x022fc441, 0x0227bf42, 0x021f7961, 0x0216f4de, 0x020e3400, 0x0205390e, 0x01fc0653, 0x01f29e1c, 0x01e902b9, 0x01df367a, 0x01d53bb1, 0x01cb14ae, 0x01c0c3c5, 0x01b64b47, 0x01abad84, 0x01a0ecce, 0x01960b72, 0x018b0bbd, 0x017feffa, 0x0174ba6f, 0x01696d61, 0x015e0b11, 0x015295be, 0x01470f9f, 0x013b7aea, 0x012fd9cf, 0x01242e78, 0x01187b0c, 0x010cc1a9, 0x01010469, 0x00f54560, 0x00e9869a, 0x00ddca1c, 0x00d211e5, 0x00c65fec, 0x00bab620, 0x00af1668, 0x00a382a2, 0x0097fca6, 0x008c8641, 0x00812137, 0x0075cf45, 0x006a921e, 0x005f6b69, 0x00545cc6, 0x004967cc, 0x003e8e04, 0x0033d0f1, 0x0029320a, 0x001eb2bb, 0x00145466, 0x000a1864, + 0x00000000, 0xfff60c7d, 0xffec3f11, 0xffe298e9, 0xffd91b25, 0xffcfc6dc, 0xffc69d18, 0xffbd9ed7, 0xffb4cd10, 0xffac28a9, 0xffa3b281, 0xff9b6b6a, 0xff93542b, 0xff8b6d80, 0xff83b819, 0xff7c349b, 0xff74e3a0, 0xff6dc5b6, 0xff66db62, 0xff60251c, 0xff59a351, 0xff535664, 0xff4d3ead, 0xff475c78, 0xff41b008, 0xff3c3995, 0xff36f94b, 0xff31ef4d, 0xff2d1bb4, 0xff287e8e, 0xff2417e0, 0xff1fe7a4, 0xff1bedca, 0xff182a3b, 0xff149cd2, 0xff114566, 0xff0e23c0, 0xff0b37a4, 0xff0880cb, 0xff05fee6, 0xff03b19d, 0xff01988f, 0xfeffb356, 0xfefe0182, 0xfefc829a, 0xfefb3620, 0xfefa1b8d, 0xfef93254, 0xfef879e0, 0xfef7f196, 0xfef798d4, 0xfef76ef3, 0xfef77344, 0xfef7a513, 0xfef803a8, 0xfef88e42, 0xfef9441d, 0xfefa2471, 0xfefb2e6f, 0xfefc6144, 0xfefdbc19, 0xfeff3e14, 0xff00e655, 0xff02b3f9, 0xff04a61b, 0xff06bbd0, 0xff08f42c, 0xff0b4e40, 0xff0dc91a, 0xff1063c5, 0xff131d4a, 0xff15f4b2, 0xff18e902, 0xff1bf93e, 0xff1f2469, 0xff226984, 0xff25c791, 0xff293d8f, 0xff2cca7e, 0xff306d5d, 0xff34252b, 0xff37f0e7, 0xff3bcf91, 0xff3fc028, 0xff43c1ad, 0xff47d321, 0xff4bf386, 0xff5021df, 0xff545d32, 0xff58a483, 0xff5cf6dc, 0xff615345, 0xff65b8ca, 0xff6a267a, 0xff6e9b62, 0xff731697, 0xff77972d, 0xff7c1c3b, 0xff80a4dd, 0xff85302f, 0xff89bd52, 0xff8e4b69, 0xff92d99c, 0xff976715, 0xff9bf301, 0xffa07c94, 0xffa50301, 0xffa98582, 0xffae0354, 0xffb27bb9, 0xffb6edf5, 0xffbb5953, 0xffbfbd1e, 0xffc418ab, 0xffc86b4e, 0xffccb463, 0xffd0f349, 0xffd52763, 0xffd9501b, 0xffdd6cde, 0xffe17d1d, 0xffe5804e, 0xffe975ed, 0xffed5d7b, 0xfff1367b, 0xfff50077, 0xfff8baff, 0xfffc65a4, + 0x00000000, 0x000389af, 0x00070254, 0x000a6994, 0x000dbf1c, 0x0011029c, 0x001433c8, 0x0017525c, 0x001a5e15, 0x001d56b7, 0x00203c0a, 0x00230ddc, 0x0025cbfd, 0x00287644, 0x002b0c8b, 0x002d8eb2, 0x002ffc9c, 0x00325631, 0x00349b5d, 0x0036cc12, 0x0038e844, 0x003aefed, 0x003ce309, 0x003ec19a, 0x00408ba5, 0x00424134, 0x0043e254, 0x00456f15, 0x0046e78c, 0x00484bd1, 0x00499c00, 0x004ad839, 0x004c009d, 0x004d1552, 0x004e1683, 0x004f045a, 0x004fdf07, 0x0050a6bc, 0x00515bae, 0x0051fe16, 0x00528e2d, 0x00530c30, 0x00537860, 0x0053d2fd, 0x00541c4d, 0x00545496, 0x00547c20, 0x00549337, 0x00549a25, 0x0054913b, 0x005478c8, 0x0054511d, 0x00541a8f, 0x0053d572, 0x0053821b, 0x005320e4, 0x0052b223, 0x00523633, 0x0051ad6f, 0x00511831, 0x005076d8, 0x004fc9bf, 0x004f1144, 0x004e4dc6, 0x004d7fa4, 0x004ca73d, 0x004bc4ef, 0x004ad91c, 0x0049e423, 0x0048e663, 0x0047e03c, 0x0046d20f, 0x0045bc3b, 0x00449f1f, 0x00437b19, 0x0042508a, 0x00411fce, 0x003fe943, 0x003ead47, 0x003d6c34, 0x003c2668, 0x003adc3d, 0x00398e0c, 0x00383c2f, 0x0036e6fd, 0x00358ece, 0x003433f7, 0x0032d6cd, 0x003177a3, 0x003016cc, 0x002eb499, 0x002d5159, 0x002bed5b, 0x002a88ec, 0x00292456, 0x0027bfe5, 0x00265be0, 0x0024f88e, 0x00239636, 0x00223519, 0x0020d57b, 0x001f779c, 0x001e1bbb, 0x001cc214, 0x001b6ae3, 0x001a1661, 0x0018c4c7, 0x0017764a, 0x00162b1e, 0x0014e376, 0x00139f83, 0x00125f73, 0x00112373, 0x000febaf, 0x000eb84f, 0x000d897c, 0x000c5f5b, 0x000b3a10, 0x000a19bc, 0x0008fe81, 0x0007e87c, 0x0006d7cb, 0x0005cc88, 0x0004c6cc, 0x0003c6ae, 0x0002cc45, 0x0001d7a4, 0x0000e8dd, + 0x00000000, 0xffff1d1d, 0xfffe4040, 0xfffd6975, 0xfffc98c6, 0xfffbce3b, 0xfffb09db, 0xfffa4bab, 0xfff993af, 0xfff8e1e9, 0xfff83659, 0xfff790ff, 0xfff6f1d9, 0xfff658e3, 0xfff5c619, 0xfff53974, 0xfff4b2ed, 0xfff4327c, 0xfff3b816, 0xfff343b1, 0xfff2d541, 0xfff26cb7, 0xfff20a07, 0xfff1ad20, 0xfff155f3, 0xfff1046d, 0xfff0b87e, 0xfff07211, 0xfff03112, 0xffeff56d, 0xffefbf0d, 0xffef8ddb, 0xffef61c0, 0xffef3aa3, 0xffef186e, 0xffeefb07, 0xffeee254, 0xffeece3c, 0xffeebea4, 0xffeeb371, 0xffeeac88, 0xffeea9cc, 0xffeeab22, 0xffeeb06e, 0xffeeb992, 0xffeec671, 0xffeed6ef, 0xffeeeaef, 0xffef0251, 0xffef1cfa, 0xffef3acb, 0xffef5ba6, 0xffef7f6e, 0xffefa605, 0xffefcf4d, 0xffeffb28, 0xfff02979, 0xfff05a22, 0xfff08d05, 0xfff0c207, 0xfff0f909, 0xfff131ef, 0xfff16c9c, 0xfff1a8f3, 0xfff1e6da, 0xfff22634, 0xfff266e6, 0xfff2a8d5, 0xfff2ebe6, 0xfff32fff, 0xfff37506, 0xfff3bae1, 0xfff40178, 0xfff448b2, 0xfff49077, 0xfff4d8b0, 0xfff52144, 0xfff56a1e, 0xfff5b328, 0xfff5fc4b, 0xfff64574, 0xfff68e8c, 0xfff6d782, 0xfff72040, 0xfff768b5, 0xfff7b0ce, 0xfff7f879, 0xfff83fa6, 0xfff88644, 0xfff8cc43, 0xfff91195, 0xfff95629, 0xfff999f3, 0xfff9dce5, 0xfffa1ef2, 0xfffa600e, 0xfffaa02d, 0xfffadf44, 0xfffb1d49, 0xfffb5a32, 0xfffb95f6, 0xfffbd08c, 0xfffc09ec, 0xfffc420e, 0xfffc78ed, 0xfffcae80, 0xfffce2c3, 0xfffd15b1, 0xfffd4744, 0xfffd7779, 0xfffda64c, 0xfffdd3ba, 0xfffdffc0, 0xfffe2a5c, 0xfffe538d, 0xfffe7b51, 0xfffea1a9, 0xfffec693, 0xfffeea11, 0xffff0c22, 0xffff2cc8, 0xffff4c05, 0xffff69db, 0xffff864b, 0xffffa15a, 0xffffbb09, 0xffffd35c, 0xffffea58, 0x00000000 // this one is needed for lerping the last coefficient }; /* - * These coefficients are optimized for 48KHz -> 44.1KHz (stop-band at 22.050KHz) - * It's possible to use the above coefficient for any down-sampling - * at the expense of a slower processing loop (we can interpolate - * these coefficient from the above by "Stretching" them in time). + * These coefficients are optimized for 48KHz -> 44.1KHz + * cmd-line: fir -v 0.3 -l 7 -s 48000 -c 16600 */ const int32_t AudioResamplerSinc::mFirCoefsDown[] = { - 0x7fffffff, 0x7f55e46d, 0x7d5b4c60, 0x7a1b4b98, 0x75a7fb14, 0x7019f0bd, 0x698f875a, 0x622bfd59, 0x5a167256, 0x5178cc54, 0x487e8e6c, 0x3f53aae8, 0x36235ad4, 0x2d17047b, 0x245539ab, 0x1c00d540, - 0x14383e57, 0x0d14d5ca, 0x06aa910b, 0x0107c38b, 0xfc351654, 0xf835abae, 0xf5076b45, 0xf2a37202, 0xf0fe9faa, 0xf00a3bbd, 0xefb4aa81, 0xefea2b05, 0xf0959716, 0xf1a11e83, 0xf2f6f7a0, 0xf481fff4, - 0xf62e48ce, 0xf7e98ca5, 0xf9a38b4c, 0xfb4e4bfa, 0xfcde456f, 0xfe4a6d30, 0xff8c2fdf, 0x009f5555, 0x0181d393, 0x0233940f, 0x02b62f06, 0x030ca07d, 0x033afa62, 0x03461725, 0x03334f83, 0x030835fa, - 0x02ca59cc, 0x027f12d1, 0x022b570d, 0x01d39a49, 0x017bb78f, 0x0126e414, 0x00d7aaaf, 0x008feec7, 0x0050f584, 0x001b73e3, 0xffefa063, 0xffcd46ed, 0xffb3ddcd, 0xffa29aaa, 0xff988691, 0xff949066, - 0xff959d24, 0xff9a959e, 0xffa27195, 0xffac4011, 0xffb72d2b, 0xffc28569, 0xffcdb706, 0xffd85171, 0xffe20364, 0xffea97e9, 0xfff1f2b2, 0xfff80c06, 0xfffcec92, 0x0000a955, 0x00035fd8, 0x000532cf, - 0x00064735, 0x0006c1f9, 0x0006c62d, 0x000673ba, 0x0005e68f, 0x00053630, 0x000475a3, 0x0003b397, 0x0002fac1, 0x00025257, 0x0001be9e, 0x0001417a, 0x0000dafd, 0x000089eb, 0x00004c28, 0x00001f1d, - 0x00000000, 0xffffec10, 0xffffe0be, 0xffffdbc5, 0xffffdb39, 0xffffdd8b, 0xffffe182, 0xffffe638, 0xffffeb0a, 0xffffef8f, 0xfffff38b, 0xfffff6e3, 0xfffff993, 0xfffffba6, 0xfffffd30, 0xfffffe4a, - 0xffffff09, 0xffffff85, 0xffffffd1, 0xfffffffb, 0x0000000f, 0x00000016, 0x00000015, 0x00000012, 0x0000000d, 0x00000009, 0x00000006, 0x00000003, 0x00000002, 0x00000001, 0x00000000, 0x00000000, + 0x7ba78e22, 0x7ba5ec84, 0x7ba107c0, 0x7b98e016, 0x7b8d75f3, 0x7b7ec9ed, 0x7b6cdcc5, 0x7b57af69, 0x7b3f42f0, 0x7b23989d, 0x7b04b1dc, 0x7ae29047, 0x7abd359f, 0x7a94a3d0, 0x7a68dcf4, 0x7a39e349, 0x7a07b93d, 0x79d26164, 0x7999de7d, 0x795e3370, 0x791f6350, 0x78dd7157, 0x789860e9, 0x78503592, 0x7804f307, 0x77b69d25, 0x776537f2, 0x7710c799, 0x76b95070, 0x765ed6f1, 0x76015fbf, 0x75a0efa2, 0x753d8b88, 0x74d73888, 0x746dfbda, 0x7401dade, 0x7392db19, 0x73210234, 0x72ac55fc, 0x7234dc61, 0x71ba9b77, 0x713d9976, 0x70bddcb7, 0x703b6bb6, 0x6fb64d11, 0x6f2e8786, 0x6ea421f5, 0x6e17235e, 0x6d8792e2, 0x6cf577bf, 0x6c60d954, 0x6bc9bf1f, 0x6b3030bb, 0x6a9435e0, 0x69f5d664, 0x69551a39, 0x68b2096e, 0x680cac2d, 0x67650abb, 0x66bb2d77, 0x660f1cda, 0x6560e178, 0x64b083fb, 0x63fe0d27, 0x634985d8, 0x6292f701, 0x61da69ab, 0x611fe6f5, 0x60637814, 0x5fa52650, 0x5ee4fb09, 0x5e22ffae, 0x5d5f3dc5, 0x5c99bee4, 0x5bd28cb4, 0x5b09b0ee, 0x5a3f355e, 0x597323dc, 0x58a58654, 0x57d666bd, 0x5705cf1d, 0x5633c98a, 0x55606024, 0x548b9d17, 0x53b58a9c, 0x52de32f7, 0x5205a075, 0x512bdd6f, 0x5050f443, 0x4f74ef5c, 0x4e97d929, 0x4db9bc22, 0x4cdaa2c5, 0x4bfa9795, 0x4b19a51b, 0x4a37d5e5, 0x49553484, 0x4871cb8b, 0x478da592, 0x46a8cd31, 0x45c34d02, 0x44dd2f9f, 0x43f67fa3, 0x430f47a7, 0x42279244, 0x413f6a10, 0x4056d99f, 0x3f6deb81, 0x3e84aa43, 0x3d9b206d, 0x3cb15882, 0x3bc75d00, 0x3add385c, 0x39f2f507, 0x39089d69, 0x381e3be1, 0x3733dac8, 0x3649846b, 0x355f430d, 0x347520e7, 0x338b2828, 0x32a162f0, 0x31b7db56, 0x30ce9b63, 0x2fe5ad11, 0x2efd1a4d, 0x2e14ecf6, 0x2d2d2eda, + 0x2c45e9b9, 0x2b5f2742, 0x2a78f112, 0x299350b7, 0x28ae4fab, 0x27c9f756, 0x26e6510d, 0x26036613, 0x25213f95, 0x243fe6ac, 0x235f645c, 0x227fc196, 0x21a10731, 0x20c33def, 0x1fe66e7e, 0x1f0aa171, 0x1e2fdf44, 0x1d56305d, 0x1c7d9d06, 0x1ba62d74, 0x1acfe9be, 0x19fad9e5, 0x192705ce, 0x18547543, 0x17832ff3, 0x16b33d74, 0x15e4a53c, 0x15176ea9, 0x144ba0f9, 0x13814350, 0x12b85cb4, 0x11f0f40c, 0x112b1024, 0x1066b7a7, 0x0fa3f123, 0x0ee2c308, 0x0e2333a7, 0x0d654930, 0x0ca909b5, 0x0bee7b28, 0x0b35a35b, 0x0a7e8800, 0x09c92ea8, 0x09159cc5, 0x0863d7a5, 0x07b3e479, 0x0705c84e, 0x06598811, 0x05af288c, 0x0506ae68, 0x04601e2e, 0x03bb7c42, 0x0318cce7, 0x02781440, 0x01d9564b, 0x013c96e3, 0x00a1d9c5, 0x00092285, 0xff72749a, 0xfeddd356, 0xfe4b41e8, 0xfdbac35c, 0xfd2c5a9c, 0xfca00a6f, 0xfc15d57a, 0xfb8dbe3c, 0xfb07c716, 0xfa83f243, 0xfa0241db, 0xf982b7d4, 0xf9055602, 0xf88a1e16, 0xf811119e, 0xf79a3206, 0xf7258096, 0xf6b2fe76, 0xf642acab, 0xf5d48c16, 0xf5689d79, 0xf4fee173, 0xf4975880, 0xf43202fb, 0xf3cee11f, 0xf36df305, 0xf30f38a2, 0xf2b2b1d0, 0xf2585e42, 0xf2003d8f, 0xf1aa4f2b, 0xf156926c, 0xf1050685, 0xf0b5aa8d, 0xf0687d78, 0xf01d7e1e, 0xefd4ab35, 0xef8e0357, 0xef4984fd, 0xef072e84, 0xeec6fe2b, 0xee88f210, 0xee4d0839, 0xee133e8a, 0xeddb92ce, 0xeda602b0, 0xed728bc3, 0xed412b7b, 0xed11df32, 0xece4a425, 0xecb97779, 0xec905638, 0xec693d4f, 0xec442995, 0xec2117c5, 0xec000482, 0xebe0ec58, 0xebc3cbb7, 0xeba89efa, 0xeb8f6264, 0xeb781221, 0xeb62aa45, 0xeb4f26ce, 0xeb3d83a7, 0xeb2dbca1, 0xeb1fcd7b, 0xeb13b1df, 0xeb096562, 0xeb00e385, 0xeafa27b6, + 0xeaf52d50, 0xeaf1ef9d, 0xeaf069d1, 0xeaf09712, 0xeaf27274, 0xeaf5f6fa, 0xeafb1f95, 0xeb01e728, 0xeb0a4886, 0xeb143e74, 0xeb1fc3a7, 0xeb2cd2c7, 0xeb3b666c, 0xeb4b7925, 0xeb5d0571, 0xeb7005c4, 0xeb847485, 0xeb9a4c11, 0xebb186ba, 0xebca1ec8, 0xebe40e77, 0xebff4ffb, 0xec1bdd80, 0xec39b127, 0xec58c50a, 0xec79133d, 0xec9a95ca, 0xecbd46b7, 0xece12000, 0xed061ba0, 0xed2c3388, 0xed5361a8, 0xed7b9fe8, 0xeda4e830, 0xedcf3461, 0xedfa7e5a, 0xee26bff8, 0xee53f315, 0xee82118a, 0xeeb1152e, 0xeee0f7d8, 0xef11b35d, 0xef434193, 0xef759c51, 0xefa8bd6e, 0xefdc9ec2, 0xf0113a28, 0xf046897c, 0xf07c869d, 0xf0b32b6c, 0xf0ea71cf, 0xf12253af, 0xf15acaf8, 0xf193d19c, 0xf1cd6192, 0xf20774d5, 0xf2420568, 0xf27d0d52, 0xf2b886a1, 0xf2f46b6a, 0xf330b5ca, 0xf36d5fe4, 0xf3aa63e4, 0xf3e7bbfe, 0xf4256270, 0xf463517e, 0xf4a18378, 0xf4dff2b7, 0xf51e999d, 0xf55d7297, 0xf59c781d, 0xf5dba4b2, 0xf61af2e4, 0xf65a5d4c, 0xf699de8f, 0xf6d97160, 0xf719107b, 0xf758b6ab, 0xf7985ec9, 0xf7d803b9, 0xf817a06d, 0xf8572fe6, 0xf896ad32, 0xf8d6136d, 0xf9155dc3, 0xf954876c, 0xf9938bb3, 0xf9d265ef, 0xfa111187, 0xfa4f89f3, 0xfa8dcab9, 0xfacbcf70, 0xfb0993bf, 0xfb47135d, 0xfb844a13, 0xfbc133ba, 0xfbfdcc3b, 0xfc3a0f90, 0xfc75f9c7, 0xfcb186fb, 0xfcecb35d, 0xfd277b2d, 0xfd61dabc, 0xfd9bce6f, 0xfdd552bd, 0xfe0e642d, 0xfe46ff5a, 0xfe7f20f1, 0xfeb6c5b1, 0xfeedea6c, 0xff248c06, 0xff5aa776, 0xff9039c5, 0xffc54010, 0xfff9b786, 0x002d9d69, 0x0060ef0e, 0x0093a9dd, 0x00c5cb50, 0x00f750f6, 0x0128386e, 0x01587f6d, 0x018823b9, 0x01b7232d, 0x01e57bb4, 0x02132b4f, 0x02403010, 0x026c881c, + 0x029831ad, 0x02c32b0d, 0x02ed729c, 0x031706c9, 0x033fe618, 0x03680f20, 0x038f8089, 0x03b63910, 0x03dc3782, 0x04017abf, 0x042601ba, 0x0449cb78, 0x046cd70f, 0x048f23a9, 0x04b0b080, 0x04d17ce2, 0x04f1882b, 0x0510d1cc, 0x052f5947, 0x054d1e2e, 0x056a2024, 0x05865edf, 0x05a1da25, 0x05bc91cb, 0x05d685b9, 0x05efb5e6, 0x0608225b, 0x061fcb2f, 0x0636b08a, 0x064cd2a4, 0x066231c4, 0x0676ce42, 0x068aa883, 0x069dc0fd, 0x06b01833, 0x06c1aeba, 0x06d28532, 0x06e29c4a, 0x06f1f4c2, 0x07008f64, 0x070e6d0a, 0x071b8e9c, 0x0727f50e, 0x0733a162, 0x073e94a5, 0x0748cff4, 0x07525475, 0x075b235d, 0x07633dec, 0x076aa56d, 0x07715b37, 0x077760ae, 0x077cb73f, 0x07816063, 0x07855d9c, 0x0788b07a, 0x078b5a93, 0x078d5d89, 0x078ebb09, 0x078f74c8, 0x078f8c82, 0x078f0401, 0x078ddd14, 0x078c1993, 0x0789bb60, 0x0786c464, 0x0783368e, 0x077f13d8, 0x077a5e41, 0x077517d0, 0x076f4291, 0x0768e09a, 0x0761f403, 0x075a7eef, 0x07528382, 0x074a03e9, 0x07410255, 0x073780fc, 0x072d8219, 0x072307ec, 0x071814ba, 0x070caaca, 0x0700cc69, 0x06f47be7, 0x06e7bb97, 0x06da8dcf, 0x06ccf4e9, 0x06bef340, 0x06b08b35, 0x06a1bf28, 0x0692917b, 0x06830493, 0x06731ad7, 0x0662d6af, 0x06523a82, 0x064148bc, 0x063003c6, 0x061e6e0c, 0x060c89f8, 0x05fa59f5, 0x05e7e06f, 0x05d51fd0, 0x05c21a83, 0x05aed2ef, 0x059b4b7f, 0x05878698, 0x057386a1, 0x055f4dfc, 0x054adf0e, 0x05363c35, 0x052167d0, 0x050c643b, 0x04f733cf, 0x04e1d8e2, 0x04cc55c8, 0x04b6acd2, 0x04a0e04c, 0x048af281, 0x0474e5b7, 0x045ebc2f, 0x0448782a, 0x04321be1, 0x041ba98b, 0x0405235a, 0x03ee8b7b, 0x03d7e417, 0x03c12f51, 0x03aa6f4a, + 0x0393a61a, 0x037cd5d6, 0x0366008e, 0x034f284c, 0x03384f14, 0x032176e3, 0x030aa1b4, 0x02f3d179, 0x02dd081e, 0x02c64789, 0x02af919c, 0x0298e830, 0x02824d17, 0x026bc220, 0x02554910, 0x023ee3a6, 0x0228939b, 0x02125aa0, 0x01fc3a61, 0x01e63480, 0x01d04a9a, 0x01ba7e44, 0x01a4d10c, 0x018f4478, 0x0179da08, 0x01649334, 0x014f716a, 0x013a7615, 0x0125a295, 0x0110f844, 0x00fc7872, 0x00e8246b, 0x00d3fd70, 0x00c004bc, 0x00ac3b81, 0x0098a2eb, 0x00853c1b, 0x0072082e, 0x005f0837, 0x004c3d40, 0x0039a84d, 0x00274a5a, 0x0015245a, 0x00033739, 0xfff183db, 0xffe00b1b, 0xffcecdcd, 0xffbdccbe, 0xffad08b2, 0xff9c8265, 0xff8c3a8b, 0xff7c31d2, 0xff6c68de, 0xff5ce04c, 0xff4d98b2, 0xff3e929e, 0xff2fce96, 0xff214d18, 0xff130e9b, 0xff05138f, 0xfef75c5b, 0xfee9e960, 0xfedcbaf7, 0xfecfd172, 0xfec32d1a, 0xfeb6ce34, 0xfeaab4fb, 0xfe9ee1a5, 0xfe93545e, 0xfe880d4e, 0xfe7d0c95, 0xfe72524c, 0xfe67de84, 0xfe5db14b, 0xfe53caa3, 0xfe4a2a8d, 0xfe40d0ff, 0xfe37bdec, 0xfe2ef13e, 0xfe266ada, 0xfe1e2a9e, 0xfe163064, 0xfe0e7bfe, 0xfe070d39, 0xfdffe3db, 0xfdf8ffa6, 0xfdf26054, 0xfdec059d, 0xfde5ef30, 0xfde01cb8, 0xfdda8ddc, 0xfdd5423b, 0xfdd03971, 0xfdcb7316, 0xfdc6eeb9, 0xfdc2abe9, 0xfdbeaa2d, 0xfdbae90a, 0xfdb767fd, 0xfdb42681, 0xfdb1240e, 0xfdae6015, 0xfdabda05, 0xfda99147, 0xfda78541, 0xfda5b557, 0xfda420e6, 0xfda2c74b, 0xfda1a7dd, 0xfda0c1f0, 0xfda014d5, 0xfd9f9fdc, 0xfd9f624e, 0xfd9f5b73, 0xfd9f8a91, 0xfd9feeeb, 0xfda087c0, 0xfda1544d, 0xfda253ce, 0xfda3857b, 0xfda4e88a, 0xfda67c31, 0xfda83fa0, 0xfdaa3209, 0xfdac529a, 0xfdaea081, 0xfdb11ae7, 0xfdb3c0f9, + 0xfdb691dc, 0xfdb98cba, 0xfdbcb0b8, 0xfdbffcfa, 0xfdc370a5, 0xfdc70adc, 0xfdcacac1, 0xfdceaf74, 0xfdd2b818, 0xfdd6e3cc, 0xfddb31b0, 0xfddfa0e4, 0xfde43087, 0xfde8dfb8, 0xfdedad97, 0xfdf29942, 0xfdf7a1d8, 0xfdfcc679, 0xfe020645, 0xfe07605b, 0xfe0cd3dc, 0xfe125fe8, 0xfe1803a3, 0xfe1dbe2d, 0xfe238ea9, 0xfe29743c, 0xfe2f6e0a, 0xfe357b39, 0xfe3b9af0, 0xfe41cc56, 0xfe480e94, 0xfe4e60d6, 0xfe54c246, 0xfe5b3212, 0xfe61af68, 0xfe683978, 0xfe6ecf74, 0xfe75708f, 0xfe7c1bff, 0xfe82d0f9, 0xfe898eb7, 0xfe905473, 0xfe972169, 0xfe9df4d8, 0xfea4ce00, 0xfeabac24, 0xfeb28e88, 0xfeb97473, 0xfec05d2d, 0xfec74803, 0xfece3442, 0xfed5213a, 0xfedc0e3c, 0xfee2fa9f, 0xfee9e5b8, 0xfef0cee2, 0xfef7b579, 0xfefe98db, 0xff05786b, 0xff0c538b, 0xff1329a3, 0xff19fa1b, 0xff20c461, 0xff2787e2, 0xff2e4410, 0xff34f85f, 0xff3ba447, 0xff424740, 0xff48e0c9, 0xff4f705f, 0xff55f586, 0xff5c6fc2, 0xff62de9c, 0xff69419f, 0xff6f9858, 0xff75e258, 0xff7c1f32, 0xff824e7e, 0xff886fd4, 0xff8e82d1, 0xff948714, 0xff9a7c40, 0xffa061f8, 0xffa637e6, 0xffabfdb4, 0xffb1b310, 0xffb757ab, 0xffbceb37, 0xffc26d6c, 0xffc7de03, 0xffcd3cb8, 0xffd28949, 0xffd7c379, 0xffdceb0d, 0xffe1ffcc, 0xffe7017f, 0xffebeff5, 0xfff0cafc, 0xfff59268, 0xfffa460d, 0xfffee5c4, 0x00037166, 0x0007e8d2, 0x000c4be7, 0x00109a87, 0x0014d499, 0x0018fa02, 0x001d0aad, 0x00210688, 0x0024ed80, 0x0028bf89, 0x002c7c95, 0x0030249a, 0x0033b793, 0x00373579, 0x003a9e4b, 0x003df207, 0x004130b0, 0x00445a4a, 0x00476eda, 0x004a6e6a, 0x004d5903, 0x00502eb3, 0x0052ef87, 0x00559b91, 0x005832e3, 0x005ab591, 0x005d23b1, + 0x005f7d5c, 0x0061c2ac, 0x0063f3bc, 0x006610aa, 0x00681995, 0x006a0e9e, 0x006befe8, 0x006dbd95, 0x006f77cd, 0x00711eb5, 0x0072b277, 0x0074333d, 0x0075a131, 0x0076fc81, 0x0078455a, 0x00797bed, 0x007aa068, 0x007bb2fe, 0x007cb3e3, 0x007da349, 0x007e8166, 0x007f4e6f, 0x00800a9d, 0x0080b626, 0x00815144, 0x0081dc31, 0x00825727, 0x0082c261, 0x00831e1c, 0x00836a95, 0x0083a80a, 0x0083d6b7, 0x0083f6dd, 0x008408bb, 0x00840c91, 0x0084029f, 0x0083eb26, 0x0083c667, 0x008394a4, 0x00835620, 0x00830b1d, 0x0082b3dc, 0x008250a3, 0x0081e1b2, 0x0081674f, 0x0080e1bc, 0x0080513c, 0x007fb615, 0x007f1089, 0x007e60dc, 0x007da752, 0x007ce42f, 0x007c17b7, 0x007b422c, 0x007a63d3, 0x00797cef, 0x00788dc2, 0x00779690, 0x0076979c, 0x00759127, 0x00748375, 0x00736ec6, 0x0072535c, 0x00713179, 0x0070095c, 0x006edb47, 0x006da779, 0x006c6e31, 0x006b2faf, 0x0069ec30, 0x0068a3f3, 0x00675735, 0x00660633, 0x0064b129, 0x00635852, 0x0061fbea, 0x00609c2a, 0x005f394d, 0x005dd38c, 0x005c6b1e, 0x005b003c, 0x0059931c, 0x005823f5, 0x0056b2fc, 0x00554066, 0x0053cc66, 0x00525730, 0x0050e0f6, 0x004f69ea, 0x004df23c, 0x004c7a1d, 0x004b01bb, 0x00498945, 0x004810e8, 0x004698d0, 0x0045212a, 0x0043aa20, 0x004233dd, 0x0040be88, 0x003f4a4b, 0x003dd74c, 0x003c65b3, 0x003af5a4, 0x00398744, 0x00381ab7, 0x0036b020, 0x003547a0, 0x0033e15a, 0x00327d6b, 0x00311bf5, 0x002fbd15, 0x002e60e9, 0x002d078c, 0x002bb11b, 0x002a5db0, 0x00290d66, 0x0027c054, 0x00267693, 0x0025303b, 0x0023ed60, 0x0022ae19, 0x0021727a, 0x00203a97, 0x001f0682, 0x001dd64d, 0x001caa0a, 0x001b81c7, 0x001a5d96, + 0x00193d84, 0x0018219f, 0x001709f3, 0x0015f68d, 0x0014e779, 0x0013dcc0, 0x0012d66c, 0x0011d487, 0x0010d717, 0x000fde26, 0x000ee9b8, 0x000df9d5, 0x000d0e82, 0x000c27c2, 0x000b459a, 0x000a680d, 0x00098f1d, 0x0008bacc, 0x0007eb1a, 0x00072009, 0x00065999, 0x000597c7, 0x0004da94, 0x000421fc, 0x00036dfd, 0x0002be95, 0x000213be, 0x00016d76, 0x0000cbb6, 0x00002e7a, 0xffff95bc, 0xffff0175, 0xfffe719f, 0xfffde632, 0xfffd5f26, 0xfffcdc72, 0xfffc5e10, 0xfffbe3f4, 0xfffb6e16, 0xfffafc6b, 0xfffa8eea, 0xfffa2588, 0xfff9c039, 0xfff95ef2, 0xfff901a8, 0xfff8a84e, 0xfff852d8, 0xfff8013a, 0xfff7b366, 0xfff7694f, 0xfff722e9, 0xfff6e024, 0xfff6a0f4, 0xfff66549, 0xfff62d17, 0xfff5f84d, 0xfff5c6de, 0xfff598bb, 0xfff56dd4, 0xfff5461a, 0xfff5217e, 0xfff4fff1, 0xfff4e162, 0xfff4c5c3, 0xfff4ad03, 0xfff49712, 0xfff483e1, 0xfff47360, 0xfff4657e, 0xfff45a2c, 0xfff45159, 0xfff44af5, 0xfff446f1, 0xfff4453b, 0xfff445c5, 0xfff4487d, 0xfff44d54, 0xfff4543a, 0xfff45d1e, 0xfff467f1, 0xfff474a4, 0xfff48325, 0xfff49366, 0xfff4a556, 0xfff4b8e7, 0xfff4ce09, 0xfff4e4ad, 0xfff4fcc2, 0xfff5163b, 0xfff53109, 0xfff54d1b, 0xfff56a65, 0xfff588d7, 0xfff5a863, 0xfff5c8fb, 0xfff5ea91, 0xfff60d16, 0xfff6307e, 0xfff654bb, 0xfff679bf, 0xfff69f7d, 0xfff6c5e9, 0xfff6ecf5, 0xfff71495, 0xfff73cbe, 0xfff76562, 0xfff78e75, 0xfff7b7ed, 0xfff7e1be, 0xfff80bdc, 0xfff8363c, 0xfff860d4, 0xfff88b99, 0xfff8b681, 0xfff8e182, 0xfff90c92, 0xfff937a6, 0xfff962b7, 0xfff98dba, 0xfff9b8a7, 0xfff9e376, 0xfffa0e1d, 0xfffa3895, 0xfffa62d5, 0xfffa8cd6, 0xfffab691, 0xfffadfff, 0xfffb0917, 0x00000000 // this one is needed for lerping the last coefficient }; diff --git a/services/audioflinger/AudioResamplerSinc.h b/services/audioflinger/AudioResamplerSinc.h index 25fc025..48bc747 100644 --- a/services/audioflinger/AudioResamplerSinc.h +++ b/services/audioflinger/AudioResamplerSinc.h @@ -71,7 +71,7 @@ private: // ---------------------------------------------------------------------------- static const int32_t RESAMPLE_FIR_NUM_COEF = 8; - static const int32_t RESAMPLE_FIR_LERP_INT_BITS = 4; + static const int32_t RESAMPLE_FIR_LERP_INT_BITS = 7; struct Constants { // we have 16 coefs samples per zero-crossing -- cgit v1.1 From 93d0767a8a9ee9d649eea9afac59f778e29a6a54 Mon Sep 17 00:00:00 2001 From: Mathias Agopian Date: Fri, 26 Oct 2012 14:11:01 -0700 Subject: reenable the cubic resampler cubic resampler was disabled because it hadn't been qualified, however after I did some tests, it does improve significantly the sound quality over the order-1 resampler, even if it is still quite bad. also HIGH_QUALITY resampler was partially disabled, it's now fully enabled. It's a big improvement over the cubic resampler in terms of aliasing noise (it's not as good in the pass-band). Change-Id: I70e3658c255896588642697be9eb594ff4ec0f8b --- services/audioflinger/Android.mk | 4 +--- services/audioflinger/AudioResampler.cpp | 4 ---- 2 files changed, 1 insertion(+), 7 deletions(-) diff --git a/services/audioflinger/Android.mk b/services/audioflinger/Android.mk index bd9421c..60f231e 100644 --- a/services/audioflinger/Android.mk +++ b/services/audioflinger/Android.mk @@ -19,11 +19,9 @@ LOCAL_SRC_FILES:= \ AudioResampler.cpp.arm \ AudioPolicyService.cpp \ ServiceUtilities.cpp \ + AudioResamplerCubic.cpp.arm \ AudioResamplerSinc.cpp.arm -# uncomment to enable AudioResampler::MED_QUALITY -# LOCAL_SRC_FILES += AudioResamplerCubic.cpp.arm - LOCAL_SRC_FILES += StateQueue.cpp # uncomment for debugging timing problems related to StateQueue::push() diff --git a/services/audioflinger/AudioResampler.cpp b/services/audioflinger/AudioResampler.cpp index ffea9b9..2c3c719 100644 --- a/services/audioflinger/AudioResampler.cpp +++ b/services/audioflinger/AudioResampler.cpp @@ -82,10 +82,8 @@ bool AudioResampler::qualityIsSupported(src_quality quality) switch (quality) { case DEFAULT_QUALITY: case LOW_QUALITY: -#if 0 // these have not been qualified recently so are not supported unless explicitly requested case MED_QUALITY: case HIGH_QUALITY: -#endif case VERY_HIGH_QUALITY: return true; default: @@ -190,12 +188,10 @@ AudioResampler* AudioResampler::create(int bitDepth, int inChannelCount, ALOGV("Create linear Resampler"); resampler = new AudioResamplerOrder1(bitDepth, inChannelCount, sampleRate); break; -#if 0 // disabled because it has not been qualified recently, if requested will use default: case MED_QUALITY: ALOGV("Create cubic Resampler"); resampler = new AudioResamplerCubic(bitDepth, inChannelCount, sampleRate); break; -#endif case HIGH_QUALITY: ALOGV("Create HIGH_QUALITY sinc Resampler"); resampler = new AudioResamplerSinc(bitDepth, inChannelCount, sampleRate); -- cgit v1.1 From 0fc2cb59d5f77412f5922540d67fea81f4d1744b Mon Sep 17 00:00:00 2001 From: Mathias Agopian Date: Sun, 21 Oct 2012 01:01:38 -0700 Subject: a test app for the resamplers Change-Id: I66852d90d384f1d9e77b51ad1a1ebdbaf61d0607 --- services/audioflinger/Android.mk | 23 ++++ services/audioflinger/test-resample.cpp | 229 ++++++++++++++++++++++++++++++++ 2 files changed, 252 insertions(+) create mode 100644 services/audioflinger/test-resample.cpp diff --git a/services/audioflinger/Android.mk b/services/audioflinger/Android.mk index 60f231e..2899953 100644 --- a/services/audioflinger/Android.mk +++ b/services/audioflinger/Android.mk @@ -78,4 +78,27 @@ LOCAL_CFLAGS += -UFAST_TRACKS_AT_NON_NATIVE_SAMPLE_RATE include $(BUILD_SHARED_LIBRARY) +# +# build audio resampler test tool +# +include $(CLEAR_VARS) + +LOCAL_SRC_FILES:= \ + test-resample.cpp \ + AudioResampler.cpp.arm \ + AudioResamplerCubic.cpp.arm \ + AudioResamplerSinc.cpp.arm + +LOCAL_SHARED_LIBRARIES := \ + libdl \ + libcutils \ + libutils + +LOCAL_MODULE:= test-resample + +LOCAL_MODULE_TAGS := optional + +include $(BUILD_EXECUTABLE) + + include $(call all-makefiles-under,$(LOCAL_PATH)) diff --git a/services/audioflinger/test-resample.cpp b/services/audioflinger/test-resample.cpp new file mode 100644 index 0000000..a55a32b --- /dev/null +++ b/services/audioflinger/test-resample.cpp @@ -0,0 +1,229 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "AudioResampler.h" +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +using namespace android; + +struct HeaderWav { + HeaderWav(size_t size, int nc, int sr, int bits) { + strncpy(RIFF, "RIFF", 4); + chunkSize = size + sizeof(HeaderWav); + strncpy(WAVE, "WAVE", 4); + strncpy(fmt, "fmt ", 4); + fmtSize = 16; + audioFormat = 1; + numChannels = nc; + samplesRate = sr; + byteRate = sr * numChannels * (bits/8); + align = nc*(bits/8); + bitsPerSample = bits; + strncpy(data, "data", 4); + dataSize = size; + } + + char RIFF[4]; // RIFF + uint32_t chunkSize; // File size + char WAVE[4]; // WAVE + char fmt[4]; // fmt\0 + uint32_t fmtSize; // fmt size + uint16_t audioFormat; // 1=PCM + uint16_t numChannels; // num channels + uint32_t samplesRate; // sample rate in hz + uint32_t byteRate; // Bps + uint16_t align; // 2=16-bit mono, 4=16-bit stereo + uint16_t bitsPerSample; // bits per sample + char data[4]; // "data" + uint32_t dataSize; // size +}; + +static int usage(const char* name) { + fprintf(stderr,"Usage: %s [-p] [-h] [-q ] [-i ] [-o ] \n", name); + fprintf(stderr,"-p - enable profiling\n"); + fprintf(stderr,"-h - create wav file\n"); + fprintf(stderr,"-q - resampler quality\n"); + fprintf(stderr," dq : default quality\n"); + fprintf(stderr," lq : low quality\n"); + fprintf(stderr," mq : medium quality\n"); + fprintf(stderr," hq : high quality\n"); + fprintf(stderr," vhq : very high quality\n"); + fprintf(stderr,"-i - input file sample rate\n"); + fprintf(stderr,"-o - output file sample rate\n"); + return -1; +} + +int main(int argc, char* argv[]) { + + bool profiling = false; + bool writeHeader = false; + int input_freq = 0; + int output_freq = 0; + AudioResampler::src_quality quality = AudioResampler::DEFAULT_QUALITY; + + int ch; + while ((ch = getopt(argc, argv, "phq:i:o:")) != -1) { + switch (ch) { + case 'p': + profiling = true; + break; + case 'h': + writeHeader = true; + break; + case 'q': + if (!strcmp(optarg, "dq")) + quality = AudioResampler::DEFAULT_QUALITY; + else if (!strcmp(optarg, "lq")) + quality = AudioResampler::LOW_QUALITY; + else if (!strcmp(optarg, "mq")) + quality = AudioResampler::MED_QUALITY; + else if (!strcmp(optarg, "hq")) + quality = AudioResampler::HIGH_QUALITY; + else if (!strcmp(optarg, "vhq")) + quality = AudioResampler::VERY_HIGH_QUALITY; + else { + usage(argv[0]); + return -1; + } + break; + case 'i': + input_freq = atoi(optarg); + break; + case 'o': + output_freq = atoi(optarg); + break; + case '?': + default: + usage(argv[0]); + return -1; + } + } + argc -= optind; + + if (argc != 2) { + usage(argv[0]); + return -1; + } + + argv += optind; + + // ---------------------------------------------------------- + + struct stat st; + if (stat(argv[0], &st) < 0) { + fprintf(stderr, "stat: %s\n", strerror(errno)); + return -1; + } + + int input_fd = open(argv[0], O_RDONLY); + if (input_fd < 0) { + fprintf(stderr, "open: %s\n", strerror(errno)); + return -1; + } + + size_t input_size = st.st_size; + void* input_vaddr = mmap(0, input_size, PROT_READ, MAP_PRIVATE, input_fd, + 0); + if (input_vaddr == MAP_FAILED ) { + fprintf(stderr, "mmap: %s\n", strerror(errno)); + return -1; + } + +// printf("input sample rate: %d Hz\n", input_freq); +// printf("output sample rate: %d Hz\n", output_freq); +// printf("input mmap: %p, size=%u\n", input_vaddr, input_size); + + // ---------------------------------------------------------- + + class Provider: public AudioBufferProvider { + int16_t* mAddr; + size_t mNumFrames; + public: + Provider(const void* addr, size_t size) { + mAddr = (int16_t*) addr; + mNumFrames = size / sizeof(int16_t); + } + virtual status_t getNextBuffer(Buffer* buffer, + int64_t pts = kInvalidPTS) { + buffer->frameCount = mNumFrames; + buffer->i16 = mAddr; + return NO_ERROR; + } + virtual void releaseBuffer(Buffer* buffer) { + } + } provider(input_vaddr, input_size); + + size_t output_size = 2 * 2 * ((int64_t) input_size * output_freq) + / input_freq; + output_size &= ~7; // always stereo, 32-bits + + void* output_vaddr = malloc(output_size); + memset(output_vaddr, 0, output_size); + + AudioResampler* resampler = AudioResampler::create(16, 1, output_freq, + quality); + + size_t out_frames = output_size/8; + resampler->setSampleRate(input_freq); + resampler->setVolume(0x1000, 0x1000); + resampler->resample((int*) output_vaddr, out_frames, &provider); + + if (profiling) { + memset(output_vaddr, 0, output_size); + timespec start, end; + clock_gettime(CLOCK_MONOTONIC_HR, &start); + resampler->resample((int*) output_vaddr, out_frames, &provider); + clock_gettime(CLOCK_MONOTONIC_HR, &end); + int64_t start_ns = start.tv_sec * 1000000000LL + start.tv_nsec; + int64_t end_ns = end.tv_sec * 1000000000LL + end.tv_nsec; + int64_t time = end_ns - start_ns; + printf("%f Mspl/s\n", out_frames/(time/1e9)/1e6); + } + + // down-mix (we just truncate and keep the left channel) + int32_t* out = (int32_t*) output_vaddr; + int16_t* convert = (int16_t*) malloc(out_frames * sizeof(int16_t)); + for (size_t i = 0; i < out_frames; i++) { + convert[i] = out[i * 2] >> 12; + } + + // write output to disk + int output_fd = open(argv[1], O_WRONLY | O_CREAT | O_TRUNC, + S_IRUSR | S_IWUSR | S_IRGRP | S_IROTH); + if (output_fd < 0) { + fprintf(stderr, "open: %s\n", strerror(errno)); + return -1; + } + + if (writeHeader) { + HeaderWav wav(out_frames*sizeof(int16_t), 1, output_freq, 16); + write(output_fd, &wav, sizeof(wav)); + } + + write(output_fd, convert, out_frames * sizeof(int16_t)); + close(output_fd); + + return 0; +} -- cgit v1.1 From ddf3c5025e2f6f35a4c188c19f30142c64a092c4 Mon Sep 17 00:00:00 2001 From: Igor Murashkin Date: Fri, 12 Oct 2012 16:56:11 -0700 Subject: Camera2: Compile with warnings, fix warnings Change-Id: I557350abb32b0480f5da7dcecadfbe9edbe53361 --- services/camera/libcameraservice/Android.mk | 3 +++ services/camera/libcameraservice/Camera2Client.cpp | 15 +++++++-------- services/camera/libcameraservice/Camera2Device.cpp | 14 +++++++------- services/camera/libcameraservice/CameraClient.cpp | 4 ---- .../camera/libcameraservice/CameraHardwareInterface.h | 5 +++-- services/camera/libcameraservice/camera2/BurstCapture.cpp | 9 +++++---- .../camera/libcameraservice/camera2/CallbackProcessor.cpp | 4 +--- .../camera/libcameraservice/camera2/CaptureSequencer.cpp | 14 +++++++------- .../camera/libcameraservice/camera2/FrameProcessor.cpp | 5 ++--- .../camera/libcameraservice/camera2/JpegCompressor.cpp | 6 +++--- .../camera/libcameraservice/camera2/JpegProcessor.cpp | 5 ++--- services/camera/libcameraservice/camera2/Parameters.cpp | 3 +-- .../libcameraservice/camera2/StreamingProcessor.cpp | 3 +-- services/camera/libcameraservice/camera2/ZslProcessor.cpp | 5 +++-- 14 files changed, 45 insertions(+), 50 deletions(-) diff --git a/services/camera/libcameraservice/Android.mk b/services/camera/libcameraservice/Android.mk index eff47c8..801afe9 100644 --- a/services/camera/libcameraservice/Android.mk +++ b/services/camera/libcameraservice/Android.mk @@ -40,6 +40,9 @@ LOCAL_C_INCLUDES += \ system/media/camera/include \ external/jpeg + +LOCAL_CFLAGS += -Wall -Wextra + LOCAL_MODULE:= libcameraservice include $(BUILD_SHARED_LIBRARY) diff --git a/services/camera/libcameraservice/Camera2Client.cpp b/services/camera/libcameraservice/Camera2Client.cpp index e59a240..9627416 100644 --- a/services/camera/libcameraservice/Camera2Client.cpp +++ b/services/camera/libcameraservice/Camera2Client.cpp @@ -37,10 +37,6 @@ static int getCallingPid() { return IPCThreadState::self()->getCallingPid(); } -static int getCallingUid() { - return IPCThreadState::self()->getCallingUid(); -} - // Interface used by CameraService Camera2Client::Camera2Client(const sp& cameraService, @@ -370,7 +366,6 @@ status_t Camera2Client::dump(int fd, const Vector& args) { void Camera2Client::disconnect() { ATRACE_CALL(); Mutex::Autolock icl(mICameraLock); - status_t res; // Allow both client and the media server to disconnect at all times int callingPid = getCallingPid(); @@ -575,7 +570,7 @@ void Camera2Client::setPreviewCallbackFlag(int flag) { ATRACE_CALL(); ALOGV("%s: Camera %d: Flag 0x%x", __FUNCTION__, mCameraId, flag); Mutex::Autolock icl(mICameraLock); - status_t res; + if ( checkPid(__FUNCTION__) != OK) return; SharedParameters::Lock l(mParameters); @@ -1062,7 +1057,7 @@ status_t Camera2Client::cancelAutoFocus() { return OK; } -status_t Camera2Client::takePicture(int msgType) { +status_t Camera2Client::takePicture(int /*msgType*/) { ATRACE_CALL(); Mutex::Autolock icl(mICameraLock); status_t res; @@ -1244,7 +1239,7 @@ status_t Camera2Client::commandPlayRecordingSoundL() { return OK; } -status_t Camera2Client::commandStartFaceDetectionL(int type) { +status_t Camera2Client::commandStartFaceDetectionL(int /*type*/) { ALOGV("%s: Camera %d: Starting face detection", __FUNCTION__, mCameraId); status_t res; @@ -1331,6 +1326,8 @@ void Camera2Client::notifyError(int errorCode, int arg1, int arg2) { } void Camera2Client::notifyShutter(int frameNumber, nsecs_t timestamp) { + (void)frameNumber; + (void)timestamp; ALOGV("%s: Shutter notification for frame %d at time %lld", __FUNCTION__, frameNumber, timestamp); } @@ -1452,6 +1449,8 @@ void Camera2Client::notifyAutoExposure(uint8_t newState, int triggerId) { } void Camera2Client::notifyAutoWhitebalance(uint8_t newState, int triggerId) { + (void)newState; + (void)triggerId; ALOGV("%s: Auto-whitebalance state now %d, last trigger %d", __FUNCTION__, newState, triggerId); } diff --git a/services/camera/libcameraservice/Camera2Device.cpp b/services/camera/libcameraservice/Camera2Device.cpp index d6445c1..5bfa085 100644 --- a/services/camera/libcameraservice/Camera2Device.cpp +++ b/services/camera/libcameraservice/Camera2Device.cpp @@ -765,7 +765,6 @@ status_t Camera2Device::MetadataQueue::setStreamSlot( ATRACE_CALL(); ALOGV("%s: E", __FUNCTION__); Mutex::Autolock l(mMutex); - status_t res; if (mStreamSlotCount > 0) { freeBuffers(mStreamSlot.begin(), mStreamSlot.end()); @@ -785,7 +784,7 @@ status_t Camera2Device::MetadataQueue::setStreamSlot( } status_t Camera2Device::MetadataQueue::dump(int fd, - const Vector& args) { + const Vector& /*args*/) { ATRACE_CALL(); String8 result; status_t notLocked; @@ -894,12 +893,13 @@ int Camera2Device::MetadataQueue::consumer_free( { ATRACE_CALL(); MetadataQueue *queue = getInstance(q); + (void)queue; free_camera_metadata(old_buffer); return OK; } int Camera2Device::MetadataQueue::producer_dequeue( - const camera2_frame_queue_dst_ops_t *q, + const camera2_frame_queue_dst_ops_t * /*q*/, size_t entries, size_t bytes, camera_metadata_t **buffer) { @@ -912,7 +912,7 @@ int Camera2Device::MetadataQueue::producer_dequeue( } int Camera2Device::MetadataQueue::producer_cancel( - const camera2_frame_queue_dst_ops_t *q, + const camera2_frame_queue_dst_ops_t * /*q*/, camera_metadata_t *old_buffer) { ATRACE_CALL(); @@ -1184,7 +1184,7 @@ status_t Camera2Device::StreamAdapter::setTransform(int transform) { } status_t Camera2Device::StreamAdapter::dump(int fd, - const Vector& args) { + const Vector& /*args*/) { ATRACE_CALL(); String8 result = String8::format(" Stream %d: %d x %d, format 0x%x\n", mId, mWidth, mHeight, mFormat); @@ -1423,7 +1423,7 @@ status_t Camera2Device::ReprocessStreamAdapter::pushIntoStream( } status_t Camera2Device::ReprocessStreamAdapter::dump(int fd, - const Vector& args) { + const Vector& /*args*/) { ATRACE_CALL(); String8 result = String8::format(" Reprocess stream %d: %d x %d, fmt 0x%x\n", @@ -1444,7 +1444,7 @@ int Camera2Device::ReprocessStreamAdapter::acquire_buffer( const camera2_stream_in_ops_t *w, buffer_handle_t** buffer) { ATRACE_CALL(); - int res; + ReprocessStreamAdapter* stream = const_cast( static_cast(w)); diff --git a/services/camera/libcameraservice/CameraClient.cpp b/services/camera/libcameraservice/CameraClient.cpp index b930c02..006a9c9 100644 --- a/services/camera/libcameraservice/CameraClient.cpp +++ b/services/camera/libcameraservice/CameraClient.cpp @@ -34,10 +34,6 @@ static int getCallingPid() { return IPCThreadState::self()->getCallingPid(); } -static int getCallingUid() { - return IPCThreadState::self()->getCallingUid(); -} - CameraClient::CameraClient(const sp& cameraService, const sp& cameraClient, int cameraId, int cameraFacing, int clientPid, int servicePid): diff --git a/services/camera/libcameraservice/CameraHardwareInterface.h b/services/camera/libcameraservice/CameraHardwareInterface.h index 05ac9fa..167b37c 100644 --- a/services/camera/libcameraservice/CameraHardwareInterface.h +++ b/services/camera/libcameraservice/CameraHardwareInterface.h @@ -427,7 +427,7 @@ public: /** * Dump state of the camera hardware */ - status_t dump(int fd, const Vector& args) const + status_t dump(int fd, const Vector& /*args*/) const { ALOGV("%s(%s)", __FUNCTION__, mName.string()); if (mDevice->ops->dump) @@ -584,9 +584,10 @@ private: #endif static int __lock_buffer(struct preview_stream_ops* w, - buffer_handle_t* buffer) + buffer_handle_t* /*buffer*/) { ANativeWindow *a = anw(w); + (void)a; return 0; } diff --git a/services/camera/libcameraservice/camera2/BurstCapture.cpp b/services/camera/libcameraservice/camera2/BurstCapture.cpp index f56c50c..192d419 100644 --- a/services/camera/libcameraservice/camera2/BurstCapture.cpp +++ b/services/camera/libcameraservice/camera2/BurstCapture.cpp @@ -38,7 +38,8 @@ BurstCapture::BurstCapture(wp client, wp sequen BurstCapture::~BurstCapture() { } -status_t BurstCapture::start(Vector &metadatas, int32_t firstCaptureId) { +status_t BurstCapture::start(Vector &/*metadatas*/, + int32_t /*firstCaptureId*/) { ALOGE("Not completely implemented"); return INVALID_OPERATION; } @@ -75,7 +76,7 @@ bool BurstCapture::threadLoop() { CpuConsumer::LockedBuffer* BurstCapture::jpegEncode( CpuConsumer::LockedBuffer *imgBuffer, - int quality) + int /*quality*/) { ALOGV("%s", __FUNCTION__); @@ -91,7 +92,7 @@ CpuConsumer::LockedBuffer* BurstCapture::jpegEncode( buffers.push_back(imgEncoded); sp jpeg = new JpegCompressor(); - status_t res = jpeg->start(buffers, 1); + jpeg->start(buffers, 1); bool success = jpeg->waitForDone(10 * 1e9); if(success) { @@ -103,7 +104,7 @@ CpuConsumer::LockedBuffer* BurstCapture::jpegEncode( } } -status_t BurstCapture::processFrameAvailable(sp &client) { +status_t BurstCapture::processFrameAvailable(sp &/*client*/) { ALOGE("Not implemented"); return INVALID_OPERATION; } diff --git a/services/camera/libcameraservice/camera2/CallbackProcessor.cpp b/services/camera/libcameraservice/camera2/CallbackProcessor.cpp index 3e9c255..307cfab 100644 --- a/services/camera/libcameraservice/camera2/CallbackProcessor.cpp +++ b/services/camera/libcameraservice/camera2/CallbackProcessor.cpp @@ -119,7 +119,6 @@ status_t CallbackProcessor::updateStream(const Parameters ¶ms) { status_t CallbackProcessor::deleteStream() { ATRACE_CALL(); - status_t res; Mutex::Autolock l(mInputMutex); @@ -144,7 +143,7 @@ int CallbackProcessor::getStreamId() const { return mCallbackStreamId; } -void CallbackProcessor::dump(int fd, const Vector& args) const { +void CallbackProcessor::dump(int /*fd*/, const Vector& /*args*/) const { } bool CallbackProcessor::threadLoop() { @@ -173,7 +172,6 @@ status_t CallbackProcessor::processNewCallback(sp &client) { ATRACE_CALL(); status_t res; - int callbackHeapId; sp callbackHeap; size_t heapIdx; diff --git a/services/camera/libcameraservice/camera2/CaptureSequencer.cpp b/services/camera/libcameraservice/camera2/CaptureSequencer.cpp index fe4abc0..b228faf 100644 --- a/services/camera/libcameraservice/camera2/CaptureSequencer.cpp +++ b/services/camera/libcameraservice/camera2/CaptureSequencer.cpp @@ -128,7 +128,7 @@ void CaptureSequencer::onCaptureAvailable(nsecs_t timestamp, } -void CaptureSequencer::dump(int fd, const Vector& args) { +void CaptureSequencer::dump(int fd, const Vector& /*args*/) { String8 result; if (mCaptureRequest.entryCount() != 0) { result = " Capture request:\n"; @@ -182,7 +182,6 @@ const CaptureSequencer::StateManager }; bool CaptureSequencer::threadLoop() { - status_t res; sp client = mClient.promote(); if (client == 0) return false; @@ -213,7 +212,8 @@ bool CaptureSequencer::threadLoop() { return true; } -CaptureSequencer::CaptureState CaptureSequencer::manageIdle(sp &client) { +CaptureSequencer::CaptureState CaptureSequencer::manageIdle( + sp &/*client*/) { status_t res; Mutex::Autolock l(mInputMutex); while (!mStartCapture) { @@ -350,13 +350,13 @@ CaptureSequencer::CaptureState CaptureSequencer::manageZslStart( } CaptureSequencer::CaptureState CaptureSequencer::manageZslWaiting( - sp &client) { + sp &/*client*/) { ALOGV("%s", __FUNCTION__); return DONE; } CaptureSequencer::CaptureState CaptureSequencer::manageZslReprocessing( - sp &client) { + sp &/*client*/) { ALOGV("%s", __FUNCTION__); return START; } @@ -378,7 +378,7 @@ CaptureSequencer::CaptureState CaptureSequencer::manageStandardStart( } CaptureSequencer::CaptureState CaptureSequencer::manageStandardPrecaptureWait( - sp &client) { + sp &/*client*/) { status_t res; ATRACE_CALL(); Mutex::Autolock l(mInputMutex); @@ -578,7 +578,7 @@ CaptureSequencer::CaptureState CaptureSequencer::manageBurstCaptureStart( } CaptureSequencer::CaptureState CaptureSequencer::manageBurstCaptureWait( - sp &client) { + sp &/*client*/) { status_t res; ATRACE_CALL(); diff --git a/services/camera/libcameraservice/camera2/FrameProcessor.cpp b/services/camera/libcameraservice/camera2/FrameProcessor.cpp index 064607c..e032522 100644 --- a/services/camera/libcameraservice/camera2/FrameProcessor.cpp +++ b/services/camera/libcameraservice/camera2/FrameProcessor.cpp @@ -62,7 +62,7 @@ status_t FrameProcessor::removeListener(int32_t minId, return OK; } -void FrameProcessor::dump(int fd, const Vector& args) { +void FrameProcessor::dump(int fd, const Vector& /*args*/) { String8 result(" Latest received frame:\n"); write(fd, result.string(), result.size()); mLastFrame.dump(fd, 2, 6); @@ -128,7 +128,6 @@ void FrameProcessor::processNewFrames(sp &client) { status_t FrameProcessor::processListeners(const CameraMetadata &frame, sp &client) { - status_t res; ATRACE_CALL(); camera_metadata_ro_entry_t entry; @@ -173,7 +172,7 @@ status_t FrameProcessor::processFaceDetect(const CameraMetadata &frame, ATRACE_CALL(); camera_metadata_ro_entry_t entry; bool enableFaceDetect; - int maxFaces; + { SharedParameters::Lock l(client->getParameters()); enableFaceDetect = l.mParameters.enableFaceDetect; diff --git a/services/camera/libcameraservice/camera2/JpegCompressor.cpp b/services/camera/libcameraservice/camera2/JpegCompressor.cpp index 702ef58..c9af71e 100644 --- a/services/camera/libcameraservice/camera2/JpegCompressor.cpp +++ b/services/camera/libcameraservice/camera2/JpegCompressor.cpp @@ -144,7 +144,7 @@ bool JpegCompressor::isBusy() { } // old function -- TODO: update for new buffer type -bool JpegCompressor::isStreamInUse(uint32_t id) { +bool JpegCompressor::isStreamInUse(uint32_t /*id*/) { ALOGV("%s", __FUNCTION__); Mutex::Autolock lock(mBusyMutex); @@ -203,14 +203,14 @@ void JpegCompressor::jpegInitDestination(j_compress_ptr cinfo) { dest->free_in_buffer = kMaxJpegSize; } -boolean JpegCompressor::jpegEmptyOutputBuffer(j_compress_ptr cinfo) { +boolean JpegCompressor::jpegEmptyOutputBuffer(j_compress_ptr /*cinfo*/) { ALOGV("%s", __FUNCTION__); ALOGE("%s: JPEG destination buffer overflow!", __FUNCTION__); return true; } -void JpegCompressor::jpegTermDestination(j_compress_ptr cinfo) { +void JpegCompressor::jpegTermDestination(j_compress_ptr /*cinfo*/) { ALOGV("%s", __FUNCTION__); ALOGV("%s: Done writing JPEG data. %d bytes left in buffer", __FUNCTION__, cinfo->dest->free_in_buffer); diff --git a/services/camera/libcameraservice/camera2/JpegProcessor.cpp b/services/camera/libcameraservice/camera2/JpegProcessor.cpp index ffc072b..6280f83 100644 --- a/services/camera/libcameraservice/camera2/JpegProcessor.cpp +++ b/services/camera/libcameraservice/camera2/JpegProcessor.cpp @@ -139,7 +139,6 @@ status_t JpegProcessor::updateStream(const Parameters ¶ms) { status_t JpegProcessor::deleteStream() { ATRACE_CALL(); - status_t res; Mutex::Autolock l(mInputMutex); @@ -164,7 +163,7 @@ int JpegProcessor::getStreamId() const { return mCaptureStreamId; } -void JpegProcessor::dump(int fd, const Vector& args) const { +void JpegProcessor::dump(int /*fd*/, const Vector& /*args*/) const { } bool JpegProcessor::threadLoop() { @@ -356,7 +355,7 @@ size_t JpegProcessor::findJpegSize(uint8_t* jpegBuffer, size_t maxSize) { // Find End of Image // Scan JPEG buffer until End of Image (EOI) bool foundEnd = false; - for (size; size <= maxSize - MARKER_LENGTH; size++) { + for ( ; size <= maxSize - MARKER_LENGTH; size++) { if ( checkJpegEnd(jpegBuffer + size) ) { foundEnd = true; size += MARKER_LENGTH; diff --git a/services/camera/libcameraservice/camera2/Parameters.cpp b/services/camera/libcameraservice/camera2/Parameters.cpp index e9e5e79..93927e6 100644 --- a/services/camera/libcameraservice/camera2/Parameters.cpp +++ b/services/camera/libcameraservice/camera2/Parameters.cpp @@ -951,7 +951,6 @@ status_t Parameters::buildQuirks() { camera_metadata_ro_entry_t Parameters::staticInfo(uint32_t tag, size_t minCount, size_t maxCount) const { - status_t res; camera_metadata_ro_entry_t entry = info->find(tag); if (CC_UNLIKELY( entry.count == 0 )) { @@ -2430,7 +2429,7 @@ Parameters::CropRegion Parameters::calculateCropRegion( return crop; } -int32_t Parameters::fpsFromRange(int32_t min, int32_t max) const { +int32_t Parameters::fpsFromRange(int32_t /*min*/, int32_t max) const { return max; } diff --git a/services/camera/libcameraservice/camera2/StreamingProcessor.cpp b/services/camera/libcameraservice/camera2/StreamingProcessor.cpp index 207f780..6ea27b2 100644 --- a/services/camera/libcameraservice/camera2/StreamingProcessor.cpp +++ b/services/camera/libcameraservice/camera2/StreamingProcessor.cpp @@ -447,7 +447,6 @@ status_t StreamingProcessor::incrementStreamingIds() { ATRACE_CALL(); Mutex::Autolock m(mMutex); - status_t res; mPreviewRequestId++; if (mPreviewRequestId >= Camera2Client::kPreviewRequestIdEnd) { mPreviewRequestId = Camera2Client::kPreviewRequestIdStart; @@ -628,7 +627,7 @@ void StreamingProcessor::releaseRecordingFrame(const sp& mem) { } -status_t StreamingProcessor::dump(int fd, const Vector& args) { +status_t StreamingProcessor::dump(int fd, const Vector& /*args*/) { String8 result; result.append(" Current requests:\n"); diff --git a/services/camera/libcameraservice/camera2/ZslProcessor.cpp b/services/camera/libcameraservice/camera2/ZslProcessor.cpp index 1937955..9584028 100644 --- a/services/camera/libcameraservice/camera2/ZslProcessor.cpp +++ b/services/camera/libcameraservice/camera2/ZslProcessor.cpp @@ -69,11 +69,12 @@ void ZslProcessor::onFrameAvailable() { } } -void ZslProcessor::onFrameAvailable(int32_t frameId, const CameraMetadata &frame) { +void ZslProcessor::onFrameAvailable(int32_t /*frameId*/, const CameraMetadata &frame) { Mutex::Autolock l(mInputMutex); camera_metadata_ro_entry_t entry; entry = frame.find(ANDROID_SENSOR_TIMESTAMP); nsecs_t timestamp = entry.data.i64[0]; + (void)timestamp; ALOGVV("Got preview frame for timestamp %lld", timestamp); if (mState != RUNNING) return; @@ -367,7 +368,7 @@ status_t ZslProcessor::clearZslQueueLocked() { return OK; } -void ZslProcessor::dump(int fd, const Vector& args) const { +void ZslProcessor::dump(int fd, const Vector& /*args*/) const { Mutex::Autolock l(mInputMutex); if (!mLatestCapturedRequest.isEmpty()) { String8 result(" Latest ZSL capture request:\n"); -- cgit v1.1 From dc8a0d75bd7b1343cd65c3c7f6e0f91ca0fa6946 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Tue, 6 Mar 2012 11:34:22 -0800 Subject: Mark volume fields private Change-Id: I8ffca0460195263d159aa13015c246122d8556a2 --- services/audioflinger/AudioFlinger.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h index 49e2b2c..55c2b8c 100644 --- a/services/audioflinger/AudioFlinger.h +++ b/services/audioflinger/AudioFlinger.h @@ -1326,11 +1326,11 @@ public: virtual void threadLoop_mix(); virtual void threadLoop_sleepTime(); + private: // volumes last sent to audio HAL with stream->set_volume() float mLeftVolFloat; float mRightVolFloat; -private: // prepareTracks_l() tells threadLoop_mix() the name of the single active track sp mActiveTrack; public: -- cgit v1.1 From 274c02ee1464d8948913ac70e64e8dbb80f82ad7 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Tue, 10 Jul 2012 11:44:31 -0700 Subject: Clean up constructor and derivation whitespace Change-Id: I47d688a9c10c4c3c868accc34102fb402ebcac62 --- services/audioflinger/AudioFlinger.h | 31 ++++++++++++++++--------------- 1 file changed, 16 insertions(+), 15 deletions(-) diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h index 49e2b2c..1081009 100644 --- a/services/audioflinger/AudioFlinger.h +++ b/services/audioflinger/AudioFlinger.h @@ -356,7 +356,7 @@ private: RECORD // Thread class is RecordThread }; - ThreadBase (const sp& audioFlinger, audio_io_handle_t id, + ThreadBase(const sp& audioFlinger, audio_io_handle_t id, audio_devices_t outDevice, audio_devices_t inDevice, type_t type); virtual ~ThreadBase(); @@ -810,6 +810,7 @@ private: // implement FastMixerState::VolumeProvider interface virtual uint32_t getVolumeLR(); + virtual status_t setSyncEvent(const sp& event); protected: @@ -986,7 +987,7 @@ private: class OutputTrack : public Track { public: - class Buffer: public AudioBufferProvider::Buffer { + class Buffer : public AudioBufferProvider::Buffer { public: int16_t *mBuffer; }; @@ -1025,8 +1026,8 @@ private: DuplicatingThread* const mSourceThread; // for waitTimeMs() in write() }; // end of OutputTrack - PlaybackThread (const sp& audioFlinger, AudioStreamOut* output, - audio_io_handle_t id, audio_devices_t device, type_t type); + PlaybackThread(const sp& audioFlinger, AudioStreamOut* output, + audio_io_handle_t id, audio_devices_t device, type_t type); virtual ~PlaybackThread(); void dump(int fd, const Vector& args); @@ -1248,11 +1249,11 @@ public: class MixerThread : public PlaybackThread { public: - MixerThread (const sp& audioFlinger, - AudioStreamOut* output, - audio_io_handle_t id, - audio_devices_t device, - type_t type = MIXER); + MixerThread(const sp& audioFlinger, + AudioStreamOut* output, + audio_io_handle_t id, + audio_devices_t device, + type_t type = MIXER); virtual ~MixerThread(); // Thread virtuals @@ -1305,8 +1306,8 @@ public: class DirectOutputThread : public PlaybackThread { public: - DirectOutputThread (const sp& audioFlinger, AudioStreamOut* output, - audio_io_handle_t id, audio_devices_t device); + DirectOutputThread(const sp& audioFlinger, AudioStreamOut* output, + audio_io_handle_t id, audio_devices_t device); virtual ~DirectOutputThread(); // Thread virtuals @@ -1339,8 +1340,8 @@ private: class DuplicatingThread : public MixerThread { public: - DuplicatingThread (const sp& audioFlinger, MixerThread* mainThread, - audio_io_handle_t id); + DuplicatingThread(const sp& audioFlinger, MixerThread* mainThread, + audio_io_handle_t id); virtual ~DuplicatingThread(); // Thread virtuals @@ -1607,7 +1608,7 @@ private: // ramping when effects are activated/deactivated. // When controlling an auxiliary effect, the EffectModule also provides an input buffer used by // the attached track(s) to accumulate their auxiliary channel. - class EffectModule: public RefBase { + class EffectModule : public RefBase { public: EffectModule(ThreadBase *thread, const wp& chain, @@ -1798,7 +1799,7 @@ mutable Mutex mLock; // mutex for process, commands and handl // are insert only. The EffectChain maintains an ordered list of effect module, the order corresponding // in the effect process order. When attached to a track (session ID != 0), it also provide it's own // input buffer used by the track as accumulation buffer. - class EffectChain: public RefBase { + class EffectChain : public RefBase { public: EffectChain(const wp& wThread, int sessionId); EffectChain(ThreadBase *thread, int sessionId); -- cgit v1.1 From 599fabc596687efa4b71b8f3ebbb957c7cae0c72 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Thu, 8 Mar 2012 12:33:37 -0800 Subject: Document AudioMixer hard-coded limits Change-Id: I83ea8bed375f251260945db788bdb5f280dba12d --- services/audioflinger/AudioMixer.cpp | 6 ++++++ services/audioflinger/AudioMixer.h | 7 +++++++ 2 files changed, 13 insertions(+) diff --git a/services/audioflinger/AudioMixer.cpp b/services/audioflinger/AudioMixer.cpp index e652d14..a4ed445 100644 --- a/services/audioflinger/AudioMixer.cpp +++ b/services/audioflinger/AudioMixer.cpp @@ -106,6 +106,12 @@ AudioMixer::AudioMixer(size_t frameCount, uint32_t sampleRate, uint32_t maxNumTr ALOG_ASSERT(maxNumTracks <= MAX_NUM_TRACKS, "maxNumTracks %u > MAX_NUM_TRACKS %u", maxNumTracks, MAX_NUM_TRACKS); + // AudioMixer is not yet capable of more than 32 active track inputs + ALOG_ASSERT(32 >= MAX_NUM_TRACKS, "bad MAX_NUM_TRACKS %d", MAX_NUM_TRACKS); + + // AudioMixer is not yet capable of multi-channel output beyond stereo + ALOG_ASSERT(2 == MAX_NUM_CHANNELS, "bad MAX_NUM_CHANNELS %d", MAX_NUM_CHANNELS); + LocalClock lc; pthread_once(&sOnceControl, &sInitRoutine); diff --git a/services/audioflinger/AudioMixer.h b/services/audioflinger/AudioMixer.h index dc468ff..e60a298 100644 --- a/services/audioflinger/AudioMixer.h +++ b/services/audioflinger/AudioMixer.h @@ -41,8 +41,15 @@ public: /*virtual*/ ~AudioMixer(); // non-virtual saves a v-table, restore if sub-classed + + // This mixer has a hard-coded upper limit of 32 active track inputs. + // Adding support for > 32 tracks would require more than simply changing this value. static const uint32_t MAX_NUM_TRACKS = 32; // maximum number of channels supported by the mixer + + // This mixer has a hard-coded upper limit of 2 channels for output. + // There is support for > 2 channel tracks down-mixed to 2 channel output via a down-mix effect. + // Adding support for > 2 channel output would require more than simply changing this value. static const uint32_t MAX_NUM_CHANNELS = 2; // maximum number of channels supported for the content static const uint32_t MAX_NUM_CHANNELS_TO_DOWNMIX = 8; -- cgit v1.1 From 2188bc912a56d9bc577fcec7bf2208f49455e744 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Fri, 26 Oct 2012 16:10:30 -0700 Subject: StateQueue comments Change-Id: Id8c76d1e2ab5201e7ab3875f1dbcc0a126e611a4 --- services/audioflinger/StateQueue.h | 66 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 66 insertions(+) diff --git a/services/audioflinger/StateQueue.h b/services/audioflinger/StateQueue.h index eba190c..c9b5111 100644 --- a/services/audioflinger/StateQueue.h +++ b/services/audioflinger/StateQueue.h @@ -17,6 +17,72 @@ #ifndef ANDROID_AUDIO_STATE_QUEUE_H #define ANDROID_AUDIO_STATE_QUEUE_H +// The state queue template class was originally driven by this use case / requirements: +// There are two threads: a fast mixer, and a normal mixer, and they share state. +// The interesting part of the shared state is a set of active fast tracks, +// and the output HAL configuration (buffer size in frames, sample rate, etc.). +// Fast mixer thread: +// periodic with typical period < 10 ms +// FIFO/RR scheduling policy and a low fixed priority +// ok to block for bounded time using nanosleep() to achieve desired period +// must not block on condition wait, mutex lock, atomic operation spin, I/O, etc. +// under typical operations of mixing, writing, or adding/removing tracks +// ok to block for unbounded time when the output HAL configuration changes, +// and this may result in an audible artifact +// needs read-only access to a recent stable state, +// but not necessarily the most current one +// Normal mixer thread: +// periodic with typical period ~40 ms +// SCHED_OTHER scheduling policy and nice priority == urgent audio +// ok to block, but prefer to avoid as much as possible +// needs read/write access to state +// The normal mixer may need to temporarily suspend the fast mixer thread during mode changes. +// It will do this using the state -- one of the fields tells the fast mixer to idle. + +// Additional requirements: +// - observer must always be able to poll for and view the latest pushed state; it must never be +// blocked from seeing that state +// - observer does not need to see every state in sequence; it is OK for it to skip states +// [see below for more on this] +// - mutator must always be able to read/modify a state, it must never be blocked from reading or +// modifying state +// - reduce memcpy where possible +// - work well if the observer runs more frequently than the mutator, +// as is the case with fast mixer/normal mixer. +// It is not a requirement to work well if the roles were reversed, +// and the mutator were to run more frequently than the observer. +// In this case, the mutator could get blocked waiting for a slot to fill up for +// it to work with. This could be solved somewhat by increasing the depth of the queue, but it would +// still limit the mutator to a finite number of changes before it would block. A future +// possibility, not implemented here, would be to allow the mutator to safely overwrite an already +// pushed state. This could be done by the mutator overwriting mNext, but then being prepared to +// read an mAck which is actually for the earlier mNext (since there is a race). + +// Solution: +// Let's call the fast mixer thread the "observer" and normal mixer thread the "mutator". +// We assume there is only a single observer and a single mutator; this is critical. +// Each state is of type , and should contain only POD (Plain Old Data) and raw pointers, as +// memcpy() may be used to copy state, and the destructors are run in unpredictable order. +// The states in chronological order are: previous, current, next, and mutating: +// previous read-only, observer can compare vs. current to see the subset that changed +// current read-only, this is the primary state for observer +// next read-only, when observer is ready to accept a new state it will shift it in: +// previous = current +// current = next +// and the slot formerly used by previous is now available to the mutator. +// mutating invisible to observer, read/write to mutator +// Initialization is tricky, especially for the observer. If the observer starts execution +// before the mutator, there are no previous, current, or next states. And even if the observer +// starts execution after the mutator, there is a next state but no previous or current states. +// To solve this, we'll have the observer idle until there is a next state, +// and it will have to deal with the case where there is no previous state. +// The states are stored in a shared FIFO queue represented using a circular array. +// The observer polls for mutations, and receives a new state pointer after a +// a mutation is pushed onto the queue. To the observer, the state pointers are +// effectively in random order, that is the observer should not do address +// arithmetic on the state pointers. However to the mutator, the state pointers +// are in a definite circular order. + namespace android { #ifdef STATE_QUEUE_DUMP -- cgit v1.1 From 18a6d9029e18a93748d3d9c33f04c1b360aeb7ae Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Mon, 24 Sep 2012 11:27:56 -0700 Subject: Fix typo Change-Id: I8cc2969eb329a830ee866622a8633adcb4e967cc --- media/libmedia/AudioSystem.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/media/libmedia/AudioSystem.cpp b/media/libmedia/AudioSystem.cpp index 207f96f..0e5c149 100644 --- a/media/libmedia/AudioSystem.cpp +++ b/media/libmedia/AudioSystem.cpp @@ -510,7 +510,7 @@ sp AudioSystem::gAudioPolicyService; sp AudioSystem::gAudioPolicyServiceClient; -// establish binder interface to AudioFlinger service +// establish binder interface to AudioPolicy service const sp& AudioSystem::get_audio_policy_service() { gLock.lock(); -- cgit v1.1 From 26dd66e8ea7a1abf28c33196fc77822f68a718af Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Thu, 18 Oct 2012 15:51:03 -0700 Subject: Use ALOGVV instead of commented ALOGV Change-Id: I17bdb5274877760c7edaa31416020d349082e593 --- services/audioflinger/AudioFlinger.cpp | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp index 76d6447..096a0f0 100644 --- a/services/audioflinger/AudioFlinger.cpp +++ b/services/audioflinger/AudioFlinger.cpp @@ -941,8 +941,8 @@ status_t AudioFlinger::setParameters(audio_io_handle_t ioHandle, const String8& String8 AudioFlinger::getParameters(audio_io_handle_t ioHandle, const String8& keys) const { -// ALOGV("getParameters() io %d, keys %s, tid %d, calling pid %d", -// ioHandle, keys.string(), gettid(), IPCThreadState::self()->getCallingPid()); + ALOGVV("getParameters() io %d, keys %s, tid %d, calling pid %d", + ioHandle, keys.string(), gettid(), IPCThreadState::self()->getCallingPid()); Mutex::Autolock _l(mLock); @@ -3125,7 +3125,7 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::MixerThread::prepareTrac if ((track->framesReady() >= minFrames) && track->isReady() && !track->isPaused() && !track->isTerminated()) { - //ALOGV("track %d u=%08x, s=%08x [OK] on thread %p", name, cblk->user, cblk->server, this); + ALOGVV("track %d u=%08x, s=%08x [OK] on thread %p", name, cblk->user, cblk->server, this); mixedTracks++; @@ -3268,7 +3268,7 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::MixerThread::prepareTrac chain->clearInputBuffer(); } - //ALOGV("track %d u=%08x, s=%08x [NOT READY] on thread %p", name, cblk->user, cblk->server, this); + ALOGVV("track %d u=%08x, s=%08x [NOT READY] on thread %p", name, cblk->user, cblk->server, this); if ((track->sharedBuffer() != 0) || track->isTerminated() || track->isStopped() || track->isPaused()) { // We have consumed all the buffers of this track. @@ -3731,7 +3731,7 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::DirectOutputThread::prep if ((track->framesReady() >= minFrames) && track->isReady() && !track->isPaused() && !track->isTerminated()) { - //ALOGV("track %d u=%08x, s=%08x [OK]", track->name(), cblk->user, cblk->server); + ALOGVV("track %d u=%08x, s=%08x [OK]", track->name(), cblk->user, cblk->server); if (track->mFillingUpStatus == Track::FS_FILLED) { track->mFillingUpStatus = Track::FS_ACTIVE; @@ -3792,7 +3792,7 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::DirectOutputThread::prep mEffectChains[0]->clearInputBuffer(); } - //ALOGV("track %d u=%08x, s=%08x [NOT READY]", track->name(), cblk->user, cblk->server); + ALOGVV("track %d u=%08x, s=%08x [NOT READY]", track->name(), cblk->user, cblk->server); if ((track->sharedBuffer() != 0) || track->isTerminated() || track->isStopped() || track->isPaused()) { // We have consumed all the buffers of this track. @@ -5664,7 +5664,7 @@ status_t AudioFlinger::PlaybackThread::OutputTrack::obtainBuffer(AudioBufferProv audio_track_cblk_t* cblk = mCblk; uint32_t framesReq = buffer->frameCount; -// ALOGV("OutputTrack::obtainBuffer user %d, server %d", cblk->user, cblk->server); + ALOGVV("OutputTrack::obtainBuffer user %d, server %d", cblk->user, cblk->server); buffer->frameCount = 0; uint32_t framesAvail = cblk->framesAvailable(); @@ -8594,7 +8594,7 @@ status_t AudioFlinger::EffectModule::command(uint32_t cmdCode, void *pReplyData) { Mutex::Autolock _l(mLock); -// ALOGV("command(), cmdCode: %d, mEffectInterface: %p", cmdCode, mEffectInterface); + ALOGVV("command(), cmdCode: %d, mEffectInterface: %p", cmdCode, mEffectInterface); if (mState == DESTROYED || mEffectInterface == NULL) { return NO_INIT; @@ -9049,8 +9049,8 @@ status_t AudioFlinger::EffectHandle::command(uint32_t cmdCode, uint32_t *replySize, void *pReplyData) { -// ALOGV("command(), cmdCode: %d, mHasControl: %d, mEffect: %p", -// cmdCode, mHasControl, (mEffect == 0) ? 0 : mEffect.get()); + ALOGVV("command(), cmdCode: %d, mHasControl: %d, mEffect: %p", + cmdCode, mHasControl, (mEffect == 0) ? 0 : mEffect.get()); // only get parameter command is permitted for applications not controlling the effect if (!mHasControl && cmdCode != EFFECT_CMD_GET_PARAM) { -- cgit v1.1 From fd88f86ec6788170fb4d903c1b0932a18ce1197c Mon Sep 17 00:00:00 2001 From: Johann Date: Mon, 29 Oct 2012 16:48:23 -0700 Subject: Match new paths and organization in external/libvpx See I739f99d48b8d7e6354c416ef2ca79c954826307f Change-Id: I42b51e2845a696a6e211dde00951afc8f571336f --- media/libstagefright/Android.mk | 1 + media/libstagefright/codecs/on2/dec/Android.mk | 7 +++---- media/libstagefright/matroska/Android.mk | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/media/libstagefright/Android.mk b/media/libstagefright/Android.mk index faa0f31..cc0581e 100644 --- a/media/libstagefright/Android.mk +++ b/media/libstagefright/Android.mk @@ -95,6 +95,7 @@ LOCAL_STATIC_LIBRARIES := \ libstagefright_matroska \ libstagefright_timedtext \ libvpx \ + libwebm \ libstagefright_mpeg2ts \ libstagefright_httplive \ libstagefright_id3 \ diff --git a/media/libstagefright/codecs/on2/dec/Android.mk b/media/libstagefright/codecs/on2/dec/Android.mk index 3223871..0082d7c 100644 --- a/media/libstagefright/codecs/on2/dec/Android.mk +++ b/media/libstagefright/codecs/on2/dec/Android.mk @@ -5,9 +5,9 @@ LOCAL_SRC_FILES := \ SoftVPX.cpp LOCAL_C_INCLUDES := \ - $(TOP)/external/libvpx \ - $(TOP)/external/libvpx/vpx_codec \ - $(TOP)/external/libvpx/vpx_ports \ + $(TOP)/external/libvpx/libvpx \ + $(TOP)/external/libvpx/libvpx/vpx_codec \ + $(TOP)/external/libvpx/libvpx/vpx_ports \ frameworks/av/media/libstagefright/include \ frameworks/native/include/media/openmax \ @@ -21,4 +21,3 @@ LOCAL_MODULE := libstagefright_soft_vpxdec LOCAL_MODULE_TAGS := optional include $(BUILD_SHARED_LIBRARY) - diff --git a/media/libstagefright/matroska/Android.mk b/media/libstagefright/matroska/Android.mk index 2cccb4f..2d8c1e1 100644 --- a/media/libstagefright/matroska/Android.mk +++ b/media/libstagefright/matroska/Android.mk @@ -5,7 +5,7 @@ LOCAL_SRC_FILES:= \ MatroskaExtractor.cpp LOCAL_C_INCLUDES:= \ - $(TOP)/external/libvpx/mkvparser \ + $(TOP)/external/libvpx/libwebm \ $(TOP)/frameworks/native/include/media/openmax \ LOCAL_CFLAGS += -Wno-multichar -- cgit v1.1 From 9aec8c3f7f72cd36a8e3d7aafc1149f50514087a Mon Sep 17 00:00:00 2001 From: Mathias Agopian Date: Mon, 29 Oct 2012 17:13:16 -0700 Subject: test-resample: clip instead of overflowing Change-Id: I550e5a59e51c11e1095ca338222b094f92b96878 --- services/audioflinger/test-resample.cpp | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/services/audioflinger/test-resample.cpp b/services/audioflinger/test-resample.cpp index a55a32b..a8e23e4 100644 --- a/services/audioflinger/test-resample.cpp +++ b/services/audioflinger/test-resample.cpp @@ -206,7 +206,10 @@ int main(int argc, char* argv[]) { int32_t* out = (int32_t*) output_vaddr; int16_t* convert = (int16_t*) malloc(out_frames * sizeof(int16_t)); for (size_t i = 0; i < out_frames; i++) { - convert[i] = out[i * 2] >> 12; + int32_t s = out[i * 2] >> 12; + if (s > 32767) s = 32767; + else if (s < -32768) s = -32768; + convert[i] = int16_t(s); } // write output to disk -- cgit v1.1 From b4b75b47c2a4248e60bbc3229d6acc4d5f872431 Mon Sep 17 00:00:00 2001 From: Mathias Agopian Date: Mon, 29 Oct 2012 17:13:20 -0700 Subject: fir a typo that caused up-sampling coefficiens to be wrong up-sample coefficient were generated with a cut-off frequency of 24KHz intead of ~20KHz, which caused more aliasing in the audible band. also increased the attenuation to 1.3 dB on both up and down sampling coefficient to avoid clipping. Change-Id: Ie8aeecf1429190541b656810c6716b6aae5ece2e --- services/audioflinger/AudioResamplerSinc.cpp | 34 ++++++++++++++-------------- tools/resampler_tools/fir.cpp | 2 +- 2 files changed, 18 insertions(+), 18 deletions(-) diff --git a/services/audioflinger/AudioResamplerSinc.cpp b/services/audioflinger/AudioResamplerSinc.cpp index 8dad250..0e1acb4 100644 --- a/services/audioflinger/AudioResamplerSinc.cpp +++ b/services/audioflinger/AudioResamplerSinc.cpp @@ -31,17 +31,17 @@ namespace android { /* * These coeficients are computed with the "fir" utility found in * tools/resampler_tools - * cmd-line: fir -v 0.3 -l 7 -s 48000 -c 20478 + * cmd-line: fir -v 1.3 -l 7 -s 48000 -c 20478 */ const int32_t AudioResamplerSinc::mFirCoefsUp[] = { - 0x7ba78e22, 0x7ba44428, 0x7b9a668f, 0x7b89f654, 0x7b72f51b, 0x7b556530, 0x7b314988, 0x7b06a5be, 0x7ad57e15, 0x7a9dd776, 0x7a5fb76f, 0x7a1b2433, 0x79d0249b, 0x797ec022, 0x7926fee5, 0x78c8e9a4, 0x786489be, 0x77f9e933, 0x7789129f, 0x7712113b, 0x7694f0de, 0x7611bdf6, 0x7588858a, 0x74f95538, 0x74643b33, 0x73c94642, 0x732885bc, 0x72820989, 0x71d5e21d, 0x7124207a, 0x706cd62a, 0x6fb0153e, 0x6eedf04d, 0x6e267a71, 0x6d59c744, 0x6c87eade, 0x6bb0f9d4, 0x6ad50932, 0x69f42e7d, 0x690e7fac, 0x68241328, 0x6734ffca, 0x66415cd4, 0x654941f4, 0x644cc73b, 0x634c051f, 0x62471477, 0x613e0e75, 0x60310ca7, 0x5f2028f0, 0x5e0b7d8a, 0x5cf324fd, 0x5bd73a21, 0x5ab7d815, 0x59951a42, 0x586f1c54, 0x5745fa37, 0x5619d015, 0x54eaba51, 0x53b8d585, 0x52843e7f, 0x514d1239, 0x50136ddd, 0x4ed76ebc, 0x4d99324c, 0x4c58d626, 0x4b167800, 0x49d235ab, 0x488c2d10, 0x47447c2b, 0x45fb410a, 0x44b099c4, 0x4364a47d, 0x42177f5c, 0x40c9488e, 0x3f7a1e3b, 0x3e2a1e87, 0x3cd96790, 0x3b881766, 0x3a364c0b, 0x38e4236f, 0x3791bb6b, 0x363f31c0, 0x34eca410, 0x339a2fe1, 0x3247f291, 0x30f6095c, 0x2fa49150, 0x2e53a752, 0x2d036813, 0x2bb3f012, 0x2a655b9a, 0x2917c6b7, 0x27cb4d3e, 0x26800abf, 0x25361a8c, 0x23ed97ae, 0x22a69ce7, 0x216144ad, 0x201da929, 0x1edbe431, 0x1d9c0f49, 0x1c5e439e, 0x1b229a04, 0x19e92af4, 0x18b20e89, 0x177d5c7e, 0x164b2c2a, 0x151b947f, 0x13eeac09, 0x12c488ea, 0x119d40d7, 0x1078e918, 0x0f579686, 0x0e395d87, 0x0d1e520d, 0x0c068797, 0x0af21128, 0x09e10150, 0x08d36a1f, 0x07c95d2c, 0x06c2eb8e, 0x05c025df, 0x04c11c38, 0x03c5de2f, 0x02ce7ad8, 0x01db00c3, 0x00eb7df9, - 0x00000000, 0xff1893d3, 0xfe3545e8, 0xfd56222b, 0xfc7b33fd, 0xfba48638, 0xfad22328, 0xfa04148f, 0xf93a63a3, 0xf8751910, 0xf7b43cf2, 0xf6f7d6db, 0xf63fedcf, 0xf58c8845, 0xf4ddac29, 0xf4335ed9, 0xf38da525, 0xf2ec8353, 0xf24ffd1c, 0xf1b815ad, 0xf124cfa8, 0xf0962d24, 0xf00c2fad, 0xef86d846, 0xef06276a, 0xee8a1d0b, 0xee12b892, 0xed9ff8e4, 0xed31dc5f, 0xecc860de, 0xec6383b6, 0xec0341bc, 0xeba79744, 0xeb50801f, 0xeafdf7a4, 0xeaaff8aa, 0xea667d8d, 0xea21802e, 0xe9e0f9f7, 0xe9a4e3da, 0xe96d3654, 0xe939e96d, 0xe90af4be, 0xe8e04f6d, 0xe8b9f032, 0xe897cd5b, 0xe879dcc9, 0xe86013f5, 0xe84a67f4, 0xe838cd74, 0xe82b38c1, 0xe8219dc9, 0xe81bf01a, 0xe81a22e7, 0xe81c290b, 0xe821f507, 0xe82b790b, 0xe838a6f1, 0xe8497046, 0xe85dc648, 0xe87599e9, 0xe890dbd5, 0xe8af7c6e, 0xe8d16bd7, 0xe8f699ee, 0xe91ef654, 0xe94a706f, 0xe978f769, 0xe9aa7a37, 0xe9dee79a, 0xea162e21, 0xea503c2b, 0xea8cffec, 0xeacc676e, 0xeb0e6095, 0xeb52d91e, 0xeb99bea7, 0xebe2fead, 0xec2e8693, 0xec7c439f, 0xeccc2303, 0xed1e11db, 0xed71fd31, 0xedc7d201, 0xee1f7d3a, 0xee78ebc1, 0xeed40a74, 0xef30c62b, 0xef8f0bbd, 0xefeec803, 0xf04fe7d5, 0xf0b25814, 0xf11605a8, 0xf17add82, 0xf1e0cca2, 0xf247c017, 0xf2afa4ff, 0xf3186891, 0xf381f816, 0xf3ec40f2, 0xf45730a4, 0xf4c2b4c9, 0xf52ebb1b, 0xf59b3178, 0xf60805e2, 0xf6752681, 0xf6e281a4, 0xf75005c5, 0xf7bda18b, 0xf82b43c9, 0xf898db86, 0xf90657f7, 0xf973a887, 0xf9e0bcd7, 0xfa4d84bf, 0xfab9f050, 0xfb25efd6, 0xfb9173db, 0xfbfc6d24, 0xfc66ccb9, 0xfcd083e2, 0xfd398428, 0xfda1bf5c, 0xfe092790, 0xfe6faf21, 0xfed548af, 0xff39e729, 0xff9d7dc3, - 0x00000000, 0x006161ae, 0x00c196e9, 0x0120941c, 0x017e4e00, 0x01dab9a0, 0x0235cc56, 0x028f7bcf, 0x02e7be0b, 0x033e895c, 0x0393d469, 0x03e7962c, 0x0439c5f4, 0x048a5b66, 0x04d94e7c, 0x05269785, 0x05722f29, 0x05bc0e63, 0x06042e86, 0x064a893e, 0x068f1889, 0x06d1d6c1, 0x0712be93, 0x0751cb05, 0x078ef772, 0x07ca3f8d, 0x08039f5e, 0x083b1344, 0x087097f3, 0x08a42a74, 0x08d5c827, 0x09056ebd, 0x09331c3e, 0x095ecf04, 0x098885bc, 0x09b03f66, 0x09d5fb52, 0x09f9b923, 0x0a1b78cb, 0x0a3b3a8a, 0x0a58fef1, 0x0a74c6dd, 0x0a8e9378, 0x0aa66638, 0x0abc40dd, 0x0ad02573, 0x0ae2164c, 0x0af21603, 0x0b00277a, 0x0b0c4dd8, 0x0b168c87, 0x0b1ee736, 0x0b2561d5, 0x0b2a0093, 0x0b2cc7e1, 0x0b2dbc6d, 0x0b2ce320, 0x0b2a411f, 0x0b25dbcb, 0x0b1fb8ba, 0x0b17ddbc, 0x0b0e50d4, 0x0b03183b, 0x0af63a5b, 0x0ae7bdd1, 0x0ad7a969, 0x0ac6041a, 0x0ab2d50d, 0x0a9e2391, 0x0a87f721, 0x0a70575f, 0x0a574c14, 0x0a3cdd2c, 0x0a2112b7, 0x0a03f4e7, 0x09e58c0d, 0x09c5e097, 0x09a4fb12, 0x0982e424, 0x095fa48c, 0x093b4523, 0x0915ced5, 0x08ef4aa5, 0x08c7c1a7, 0x089f3d00, 0x0875c5e5, 0x084b659a, 0x0820256e, 0x07f40ebb, 0x07c72ae4, 0x07998354, 0x076b217a, 0x073c0ecd, 0x070c54c3, 0x06dbfcd7, 0x06ab1080, 0x06799936, 0x0647a06e, 0x06152f96, 0x05e25018, 0x05af0b56, 0x057b6aa7, 0x0547775b, 0x05133ab3, 0x04debde6, 0x04aa0a19, 0x04752865, 0x044021d0, 0x040aff4c, 0x03d5c9ba, 0x03a089e5, 0x036b4882, 0x03360e30, 0x0300e373, 0x02cbd0b8, 0x0296de51, 0x02621472, 0x022d7b36, 0x01f91a98, 0x01c4fa74, 0x01912288, 0x015d9a6f, 0x012a69a6, 0x00f79784, 0x00c52b40, 0x00932be9, 0x0061a06e, 0x00308f96, - 0x00000000, 0xffcff828, 0xffa07e5f, 0xff7198d0, 0xff434d7c, 0xff15a23b, 0xfee89cbb, 0xfebc4281, 0xfe9098e4, 0xfe65a513, 0xfe3b6c10, 0xfe11f2b0, 0xfde93d9e, 0xfdc15155, 0xfd9a3226, 0xfd73e434, 0xfd4e6b74, 0xfd29cbad, 0xfd06087a, 0xfce32547, 0xfcc12551, 0xfca00ba9, 0xfc7fdb31, 0xfc60969d, 0xfc424073, 0xfc24db0b, 0xfc086890, 0xfbeceafd, 0xfbd26423, 0xfbb8d5a2, 0xfba040ee, 0xfb88a750, 0xfb7209e1, 0xfb5c6990, 0xfb47c71f, 0xfb342324, 0xfb217e0a, 0xfb0fd810, 0xfaff314c, 0xfaef89a9, 0xfae0e0e7, 0xfad3369e, 0xfac68a3e, 0xfabadb0d, 0xfab0282a, 0xfaa6708d, 0xfa9db307, 0xfa95ee44, 0xfa8f20c9, 0xfa8948f7, 0xfa84650c, 0xfa807321, 0xfa7d712d, 0xfa7b5d05, 0xfa7a345d, 0xfa79f4c8, 0xfa7a9bb9, 0xfa7c2684, 0xfa7e925f, 0xfa81dc63, 0xfa86018b, 0xfa8afeb7, 0xfa90d0ac, 0xfa977415, 0xfa9ee582, 0xfaa7216d, 0xfab02435, 0xfab9ea25, 0xfac46f71, 0xfacfb037, 0xfadba883, 0xfae8544b, 0xfaf5af73, 0xfb03b5d1, 0xfb126324, 0xfb21b321, 0xfb31a16a, 0xfb422996, 0xfb53472c, 0xfb64f5a9, 0xfb77307e, 0xfb89f311, 0xfb9d38bf, 0xfbb0fcdb, 0xfbc53ab3, 0xfbd9ed8a, 0xfbef10a2, 0xfc049f32, 0xfc1a9472, 0xfc30eb93, 0xfc479fc5, 0xfc5eac35, 0xfc760c11, 0xfc8dba84, 0xfca5b2bc, 0xfcbdefe8, 0xfcd66d38, 0xfcef25e2, 0xfd08151d, 0xfd213624, 0xfd3a843c, 0xfd53faab, 0xfd6d94c0, 0xfd874dd3, 0xfda12141, 0xfdbb0a73, 0xfdd504da, 0xfdef0bf1, 0xfe091b41, 0xfe232e5a, 0xfe3d40dc, 0xfe574e72, 0xfe7152d5, 0xfe8b49cc, 0xfea52f2d, 0xfebefedd, 0xfed8b4cf, 0xfef24d09, 0xff0bc3a0, 0xff2514ba, 0xff3e3c8f, 0xff57376b, 0xff7001a9, 0xff8897b9, 0xffa0f61e, 0xffb91970, 0xffd0fe58, 0xffe8a197, - 0x00000000, 0x0017167d, 0x002de20b, 0x00445fc0, 0x005a8cc4, 0x00706659, 0x0085e9d5, 0x009b14a4, 0x00afe44b, 0x00c45665, 0x00d868a3, 0x00ec18cd, 0x00ff64c6, 0x01124a84, 0x0124c816, 0x0136dba4, 0x0148836b, 0x0159bdc2, 0x016a8918, 0x017ae3f0, 0x018acce9, 0x019a42b6, 0x01a94425, 0x01b7d01a, 0x01c5e58e, 0x01d38396, 0x01e0a95a, 0x01ed561d, 0x01f98935, 0x02054211, 0x02108037, 0x021b4343, 0x02258ae6, 0x022f56e9, 0x0238a72b, 0x02417b9e, 0x0249d44b, 0x0251b151, 0x025912e4, 0x025ff949, 0x026664de, 0x026c5612, 0x0271cd69, 0x0276cb79, 0x027b50ed, 0x027f5e80, 0x0282f503, 0x02861556, 0x0288c06b, 0x028af746, 0x028cbafc, 0x028e0cb1, 0x028eed9b, 0x028f5eff, 0x028f6231, 0x028ef893, 0x028e2396, 0x028ce4bb, 0x028b3d8e, 0x02892fa9, 0x0286bcb2, 0x0283e65d, 0x0280ae68, 0x027d169e, 0x027920d3, 0x0274cee6, 0x027022c2, 0x026b1e58, 0x0265c3a6, 0x026014b0, 0x025a1382, 0x0253c232, 0x024d22da, 0x0246379d, 0x023f02a5, 0x0237861f, 0x022fc441, 0x0227bf42, 0x021f7961, 0x0216f4de, 0x020e3400, 0x0205390e, 0x01fc0653, 0x01f29e1c, 0x01e902b9, 0x01df367a, 0x01d53bb1, 0x01cb14ae, 0x01c0c3c5, 0x01b64b47, 0x01abad84, 0x01a0ecce, 0x01960b72, 0x018b0bbd, 0x017feffa, 0x0174ba6f, 0x01696d61, 0x015e0b11, 0x015295be, 0x01470f9f, 0x013b7aea, 0x012fd9cf, 0x01242e78, 0x01187b0c, 0x010cc1a9, 0x01010469, 0x00f54560, 0x00e9869a, 0x00ddca1c, 0x00d211e5, 0x00c65fec, 0x00bab620, 0x00af1668, 0x00a382a2, 0x0097fca6, 0x008c8641, 0x00812137, 0x0075cf45, 0x006a921e, 0x005f6b69, 0x00545cc6, 0x004967cc, 0x003e8e04, 0x0033d0f1, 0x0029320a, 0x001eb2bb, 0x00145466, 0x000a1864, - 0x00000000, 0xfff60c7d, 0xffec3f11, 0xffe298e9, 0xffd91b25, 0xffcfc6dc, 0xffc69d18, 0xffbd9ed7, 0xffb4cd10, 0xffac28a9, 0xffa3b281, 0xff9b6b6a, 0xff93542b, 0xff8b6d80, 0xff83b819, 0xff7c349b, 0xff74e3a0, 0xff6dc5b6, 0xff66db62, 0xff60251c, 0xff59a351, 0xff535664, 0xff4d3ead, 0xff475c78, 0xff41b008, 0xff3c3995, 0xff36f94b, 0xff31ef4d, 0xff2d1bb4, 0xff287e8e, 0xff2417e0, 0xff1fe7a4, 0xff1bedca, 0xff182a3b, 0xff149cd2, 0xff114566, 0xff0e23c0, 0xff0b37a4, 0xff0880cb, 0xff05fee6, 0xff03b19d, 0xff01988f, 0xfeffb356, 0xfefe0182, 0xfefc829a, 0xfefb3620, 0xfefa1b8d, 0xfef93254, 0xfef879e0, 0xfef7f196, 0xfef798d4, 0xfef76ef3, 0xfef77344, 0xfef7a513, 0xfef803a8, 0xfef88e42, 0xfef9441d, 0xfefa2471, 0xfefb2e6f, 0xfefc6144, 0xfefdbc19, 0xfeff3e14, 0xff00e655, 0xff02b3f9, 0xff04a61b, 0xff06bbd0, 0xff08f42c, 0xff0b4e40, 0xff0dc91a, 0xff1063c5, 0xff131d4a, 0xff15f4b2, 0xff18e902, 0xff1bf93e, 0xff1f2469, 0xff226984, 0xff25c791, 0xff293d8f, 0xff2cca7e, 0xff306d5d, 0xff34252b, 0xff37f0e7, 0xff3bcf91, 0xff3fc028, 0xff43c1ad, 0xff47d321, 0xff4bf386, 0xff5021df, 0xff545d32, 0xff58a483, 0xff5cf6dc, 0xff615345, 0xff65b8ca, 0xff6a267a, 0xff6e9b62, 0xff731697, 0xff77972d, 0xff7c1c3b, 0xff80a4dd, 0xff85302f, 0xff89bd52, 0xff8e4b69, 0xff92d99c, 0xff976715, 0xff9bf301, 0xffa07c94, 0xffa50301, 0xffa98582, 0xffae0354, 0xffb27bb9, 0xffb6edf5, 0xffbb5953, 0xffbfbd1e, 0xffc418ab, 0xffc86b4e, 0xffccb463, 0xffd0f349, 0xffd52763, 0xffd9501b, 0xffdd6cde, 0xffe17d1d, 0xffe5804e, 0xffe975ed, 0xffed5d7b, 0xfff1367b, 0xfff50077, 0xfff8baff, 0xfffc65a4, - 0x00000000, 0x000389af, 0x00070254, 0x000a6994, 0x000dbf1c, 0x0011029c, 0x001433c8, 0x0017525c, 0x001a5e15, 0x001d56b7, 0x00203c0a, 0x00230ddc, 0x0025cbfd, 0x00287644, 0x002b0c8b, 0x002d8eb2, 0x002ffc9c, 0x00325631, 0x00349b5d, 0x0036cc12, 0x0038e844, 0x003aefed, 0x003ce309, 0x003ec19a, 0x00408ba5, 0x00424134, 0x0043e254, 0x00456f15, 0x0046e78c, 0x00484bd1, 0x00499c00, 0x004ad839, 0x004c009d, 0x004d1552, 0x004e1683, 0x004f045a, 0x004fdf07, 0x0050a6bc, 0x00515bae, 0x0051fe16, 0x00528e2d, 0x00530c30, 0x00537860, 0x0053d2fd, 0x00541c4d, 0x00545496, 0x00547c20, 0x00549337, 0x00549a25, 0x0054913b, 0x005478c8, 0x0054511d, 0x00541a8f, 0x0053d572, 0x0053821b, 0x005320e4, 0x0052b223, 0x00523633, 0x0051ad6f, 0x00511831, 0x005076d8, 0x004fc9bf, 0x004f1144, 0x004e4dc6, 0x004d7fa4, 0x004ca73d, 0x004bc4ef, 0x004ad91c, 0x0049e423, 0x0048e663, 0x0047e03c, 0x0046d20f, 0x0045bc3b, 0x00449f1f, 0x00437b19, 0x0042508a, 0x00411fce, 0x003fe943, 0x003ead47, 0x003d6c34, 0x003c2668, 0x003adc3d, 0x00398e0c, 0x00383c2f, 0x0036e6fd, 0x00358ece, 0x003433f7, 0x0032d6cd, 0x003177a3, 0x003016cc, 0x002eb499, 0x002d5159, 0x002bed5b, 0x002a88ec, 0x00292456, 0x0027bfe5, 0x00265be0, 0x0024f88e, 0x00239636, 0x00223519, 0x0020d57b, 0x001f779c, 0x001e1bbb, 0x001cc214, 0x001b6ae3, 0x001a1661, 0x0018c4c7, 0x0017764a, 0x00162b1e, 0x0014e376, 0x00139f83, 0x00125f73, 0x00112373, 0x000febaf, 0x000eb84f, 0x000d897c, 0x000c5f5b, 0x000b3a10, 0x000a19bc, 0x0008fe81, 0x0007e87c, 0x0006d7cb, 0x0005cc88, 0x0004c6cc, 0x0003c6ae, 0x0002cc45, 0x0001d7a4, 0x0000e8dd, - 0x00000000, 0xffff1d1d, 0xfffe4040, 0xfffd6975, 0xfffc98c6, 0xfffbce3b, 0xfffb09db, 0xfffa4bab, 0xfff993af, 0xfff8e1e9, 0xfff83659, 0xfff790ff, 0xfff6f1d9, 0xfff658e3, 0xfff5c619, 0xfff53974, 0xfff4b2ed, 0xfff4327c, 0xfff3b816, 0xfff343b1, 0xfff2d541, 0xfff26cb7, 0xfff20a07, 0xfff1ad20, 0xfff155f3, 0xfff1046d, 0xfff0b87e, 0xfff07211, 0xfff03112, 0xffeff56d, 0xffefbf0d, 0xffef8ddb, 0xffef61c0, 0xffef3aa3, 0xffef186e, 0xffeefb07, 0xffeee254, 0xffeece3c, 0xffeebea4, 0xffeeb371, 0xffeeac88, 0xffeea9cc, 0xffeeab22, 0xffeeb06e, 0xffeeb992, 0xffeec671, 0xffeed6ef, 0xffeeeaef, 0xffef0251, 0xffef1cfa, 0xffef3acb, 0xffef5ba6, 0xffef7f6e, 0xffefa605, 0xffefcf4d, 0xffeffb28, 0xfff02979, 0xfff05a22, 0xfff08d05, 0xfff0c207, 0xfff0f909, 0xfff131ef, 0xfff16c9c, 0xfff1a8f3, 0xfff1e6da, 0xfff22634, 0xfff266e6, 0xfff2a8d5, 0xfff2ebe6, 0xfff32fff, 0xfff37506, 0xfff3bae1, 0xfff40178, 0xfff448b2, 0xfff49077, 0xfff4d8b0, 0xfff52144, 0xfff56a1e, 0xfff5b328, 0xfff5fc4b, 0xfff64574, 0xfff68e8c, 0xfff6d782, 0xfff72040, 0xfff768b5, 0xfff7b0ce, 0xfff7f879, 0xfff83fa6, 0xfff88644, 0xfff8cc43, 0xfff91195, 0xfff95629, 0xfff999f3, 0xfff9dce5, 0xfffa1ef2, 0xfffa600e, 0xfffaa02d, 0xfffadf44, 0xfffb1d49, 0xfffb5a32, 0xfffb95f6, 0xfffbd08c, 0xfffc09ec, 0xfffc420e, 0xfffc78ed, 0xfffcae80, 0xfffce2c3, 0xfffd15b1, 0xfffd4744, 0xfffd7779, 0xfffda64c, 0xfffdd3ba, 0xfffdffc0, 0xfffe2a5c, 0xfffe538d, 0xfffe7b51, 0xfffea1a9, 0xfffec693, 0xfffeea11, 0xffff0c22, 0xffff2cc8, 0xffff4c05, 0xffff69db, 0xffff864b, 0xffffa15a, 0xffffbb09, 0xffffd35c, 0xffffea58, + 0x6e350b14, 0x6e32e1de, 0x6e2c6665, 0x6e219925, 0x6e127aed, 0x6dff0cdf, 0x6de7506d, 0x6dcb475f, 0x6daaf3cd, 0x6d865820, 0x6d5d7714, 0x6d3053b9, 0x6cfef16b, 0x6cc953db, 0x6c8f7f09, 0x6c517746, 0x6c0f4131, 0x6bc8e1ba, 0x6b7e5e1f, 0x6b2fbbee, 0x6add0100, 0x6a86337e, 0x6a2b59dc, 0x69cc7adc, 0x69699d89, 0x6902c93b, 0x68980593, 0x68295a7c, 0x67b6d02a, 0x67406f18, 0x66c64009, 0x66484c06, 0x65c69c5e, 0x65413aa3, 0x64b830ad, 0x642b8896, 0x639b4cb9, 0x630787b3, 0x62704460, 0x61d58ddd, 0x61376f83, 0x6095f4e8, 0x5ff129de, 0x5f491a73, 0x5e9dd2ed, 0x5def5fcc, 0x5d3dcdc4, 0x5c8929c4, 0x5bd180ec, 0x5b16e092, 0x5a59563d, 0x5998efa7, 0x58d5bab8, 0x580fc589, 0x57471e5f, 0x567bd3ac, 0x55adf40b, 0x54dd8e43, 0x540ab142, 0x53356c1d, 0x525dce0d, 0x5183e672, 0x50a7c4cd, 0x4fc978c0, 0x4ee9120c, 0x4e06a094, 0x4d223454, 0x4c3bdd65, 0x4b53abfc, 0x4a69b064, 0x497dfb00, 0x48909c49, 0x47a1a4cd, 0x46b1252c, 0x45bf2e16, 0x44cbd04d, 0x43d71ca1, 0x42e123ed, 0x41e9f71a, 0x40f1a71a, 0x3ff844e5, 0x3efde17e, 0x3e028dea, 0x3d065b33, 0x3c095a67, 0x3b0b9c92, 0x3a0d32c2, 0x390e2e01, 0x380e9f57, 0x370e97c6, 0x360e284c, 0x350d61da, 0x340c555d, 0x330b13b5, 0x3209adb4, 0x31083422, 0x3006b7b5, 0x2f054914, 0x2e03f8d4, 0x2d02d775, 0x2c01f564, 0x2b0162f9, 0x2a013072, 0x29016df5, 0x28022b90, 0x27037934, 0x260566b4, 0x250803c7, 0x240b6004, 0x230f8ae2, 0x221493b5, 0x211a89b0, 0x20217be0, 0x1f29792e, 0x1e32905a, 0x1d3ccfff, 0x1c48468e, 0x1b55024e, 0x1a63115b, 0x197281a5, 0x188360ef, 0x1795bccc, 0x16a9a2a1, 0x15bf1fa4, 0x14d640d8, 0x13ef130e, 0x1309a2e4, 0x1225fcc4, + 0x11442ce3, 0x10643f3f, 0x0f863fa3, 0x0eaa399d, 0x0dd03888, 0x0cf84782, 0x0c227171, 0x0b4ec101, 0x0a7d40a1, 0x09adfa86, 0x08e0f8a7, 0x081644be, 0x074de849, 0x0687ec84, 0x05c45a70, 0x05033acc, 0x04449618, 0x03887494, 0x02cede3e, 0x0217dad5, 0x016371d3, 0x00b1aa73, 0x00028bac, 0xff561c34, 0xfeac627c, 0xfe0564b3, 0xfd6128c4, 0xfcbfb457, 0xfc210cd1, 0xfb853752, 0xfaec38b4, 0xfa561591, 0xf9c2d23c, 0xf93272c4, 0xf8a4faf4, 0xf81a6e54, 0xf792d025, 0xf70e2367, 0xf68c6ad3, 0xf60da8df, 0xf591dfbf, 0xf5191161, 0xf4a33f70, 0xf4306b54, 0xf3c09632, 0xf353c0ed, 0xf2e9ec23, 0xf2831834, 0xf21f4539, 0xf1be730e, 0xf160a14c, 0xf105cf4a, 0xf0adfc22, 0xf05926ab, 0xf0074d7e, 0xefb86ef7, 0xef6c892f, 0xef239a07, 0xeedd9f1e, 0xee9a95d8, 0xee5a7b5f, 0xee1d4c9f, 0xede3064a, 0xedaba4d8, 0xed772488, 0xed458160, 0xed16b72d, 0xeceac186, 0xecc19bcc, 0xec9b4129, 0xec77ac93, 0xec56d8cb, 0xec38c05f, 0xec1d5dac, 0xec04aadb, 0xebeea1e6, 0xebdb3c96, 0xebca7487, 0xebbc4325, 0xebb0a1af, 0xeba78939, 0xeba0f2ab, 0xeb9cd6c3, 0xeb9b2e16, 0xeb9bf110, 0xeb9f17f7, 0xeba49ae9, 0xebac71df, 0xebb694b0, 0xebc2fb0d, 0xebd19c85, 0xebe27089, 0xebf56e66, 0xec0a8d4c, 0xec21c44d, 0xec3b0a5e, 0xec565658, 0xec739ef9, 0xec92dae5, 0xecb400aa, 0xecd706bc, 0xecfbe378, 0xed228d2a, 0xed4afa04, 0xed75202a, 0xeda0f5ab, 0xedce7087, 0xedfd86ac, 0xee2e2dfd, 0xee605c4c, 0xee940760, 0xeec924f5, 0xeeffaabc, 0xef378e5d, 0xef70c579, 0xefab45a8, 0xefe7047f, 0xf023f78b, 0xf0621459, 0xf0a1506f, 0xf0e1a155, 0xf122fc90, 0xf16557a8, 0xf1a8a824, 0xf1ece390, 0xf231ff7a, 0xf277f176, 0xf2beaf1b, + 0xf3062e09, 0xf34e63e8, 0xf3974666, 0xf3e0cb3f, 0xf42ae835, 0xf4759318, 0xf4c0c1c3, 0xf50c6a21, 0xf5588229, 0xf5a4ffe1, 0xf5f1d961, 0xf63f04d0, 0xf68c7868, 0xf6da2a76, 0xf7281159, 0xf7762387, 0xf7c45788, 0xf812a3fd, 0xf860ff9b, 0xf8af6130, 0xf8fdbfa2, 0xf94c11f0, 0xf99a4f34, 0xf9e86ea0, 0xfa366783, 0xfa843147, 0xfad1c372, 0xfb1f15a8, 0xfb6c1fab, 0xfbb8d95a, 0xfc053ab4, 0xfc513bd7, 0xfc9cd501, 0xfce7fe91, 0xfd32b105, 0xfd7ce501, 0xfdc69347, 0xfe0fb4bc, 0xfe58426a, 0xfea0357e, 0xfee78746, 0xff2e3137, 0xff742ceb, 0xffb9741e, 0xfffe00b5, 0x0041ccb6, 0x0084d252, 0x00c70bdc, 0x010873cf, 0x014904ce, 0x0188b9a0, 0x01c78d36, 0x02057aa6, 0x02427d2e, 0x027e9035, 0x02b9af49, 0x02f3d61f, 0x032d0094, 0x03652aae, 0x039c509c, 0x03d26eb2, 0x0407816e, 0x043b8577, 0x046e779c, 0x04a054d1, 0x04d11a37, 0x0500c513, 0x052f52d5, 0x055cc111, 0x05890d88, 0x05b4361d, 0x05de38df, 0x06071402, 0x062ec5e2, 0x06554d01, 0x067aa809, 0x069ed5ca, 0x06c1d539, 0x06e3a573, 0x070445ba, 0x0723b575, 0x0741f431, 0x075f01a0, 0x077add97, 0x07958811, 0x07af012c, 0x07c7492a, 0x07de606f, 0x07f44784, 0x0808ff11, 0x081c87e3, 0x082ee2e6, 0x0840112b, 0x085013de, 0x085eec50, 0x086c9bf0, 0x0879244c, 0x08848710, 0x088ec607, 0x0897e31b, 0x089fe052, 0x08a6bfcd, 0x08ac83ce, 0x08b12eac, 0x08b4c2e0, 0x08b742f8, 0x08b8b19f, 0x08b91198, 0x08b865bf, 0x08b6b10a, 0x08b3f685, 0x08b03952, 0x08ab7cac, 0x08a5c3e1, 0x089f1258, 0x08976b89, 0x088ed303, 0x08854c65, 0x087adb64, 0x086f83c5, 0x0863495f, 0x0856301c, 0x08483bf4, 0x083970ee, 0x0829d322, 0x081966b5, 0x08082fdb, 0x07f632d4, + 0x07e373ec, 0x07cff77c, 0x07bbc1e7, 0x07a6d79d, 0x07913d14, 0x077af6ce, 0x07640956, 0x074c793d, 0x07344b1b, 0x071b8393, 0x07022749, 0x06e83ae8, 0x06cdc322, 0x06b2c4ac, 0x0697443c, 0x067b4690, 0x065ed064, 0x0641e678, 0x06248d8e, 0x0606ca65, 0x05e8a1c1, 0x05ca1862, 0x05ab3308, 0x058bf671, 0x056c675a, 0x054c8a7b, 0x052c648a, 0x050bfa3b, 0x04eb5039, 0x04ca6b2f, 0x04a94fbf, 0x04880288, 0x0466881f, 0x0444e515, 0x04231df2, 0x04013738, 0x03df355d, 0x03bd1cd3, 0x039af1fe, 0x0378b93c, 0x035676dd, 0x03342f2b, 0x0311e661, 0x02efa0b0, 0x02cd623e, 0x02ab2f23, 0x02890b6c, 0x0266fb19, 0x0245021b, 0x02232456, 0x020165a2, 0x01dfc9c5, 0x01be547a, 0x019d096b, 0x017bec31, 0x015b005a, 0x013a4960, 0x0119caaf, 0x00f987a2, 0x00d98384, 0x00b9c18c, 0x009a44e6, 0x007b10a6, 0x005c27d3, 0x003d8d61, 0x001f4432, 0x00014f15, 0xffe3b0c9, 0xffc66bf8, 0xffa9833b, 0xff8cf919, 0xff70d004, 0xff550a5e, 0xff39aa73, 0xff1eb27e, 0xff0424a6, 0xfeea02ff, 0xfed04f89, 0xfeb70c33, 0xfe9e3ad6, 0xfe85dd38, 0xfe6df50d, 0xfe5683f5, 0xfe3f8b7b, 0xfe290d18, 0xfe130a32, 0xfdfd841c, 0xfde87c13, 0xfdd3f344, 0xfdbfeac5, 0xfdac639e, 0xfd995ebf, 0xfd86dd07, 0xfd74df43, 0xfd63662b, 0xfd527267, 0xfd42048a, 0xfd321d16, 0xfd22bc79, 0xfd13e312, 0xfd05912a, 0xfcf7c6fb, 0xfcea84ac, 0xfcddca53, 0xfcd197f5, 0xfcc5ed84, 0xfcbacae2, 0xfcb02fe3, 0xfca61c45, 0xfc9c8fba, 0xfc9389e1, 0xfc8b0a4c, 0xfc83107b, 0xfc7b9bdf, 0xfc74abda, 0xfc6e3fbf, 0xfc6856d2, 0xfc62f049, 0xfc5e0b4b, 0xfc59a6f2, 0xfc55c249, 0xfc525c50, 0xfc4f73f6, 0xfc4d0822, 0xfc4b17a9, 0xfc49a159, 0xfc48a3ef, 0xfc481e21, + 0xfc480e96, 0xfc4873eb, 0xfc494cb3, 0xfc4a9776, 0xfc4c52b2, 0xfc4e7cd9, 0xfc511457, 0xfc54178c, 0xfc5784d0, 0xfc5b5a72, 0xfc5f96ba, 0xfc6437e5, 0xfc693c2c, 0xfc6ea1be, 0xfc7466c4, 0xfc7a895f, 0xfc8107ac, 0xfc87dfbf, 0xfc8f0fa9, 0xfc969573, 0xfc9e6f23, 0xfca69ab8, 0xfcaf162e, 0xfcb7df7d, 0xfcc0f498, 0xfcca536e, 0xfcd3f9ed, 0xfcdde5fe, 0xfce81587, 0xfcf2866f, 0xfcfd3696, 0xfd0823de, 0xfd134c26, 0xfd1ead4c, 0xfd2a452e, 0xfd3611a8, 0xfd421096, 0xfd4e3fd4, 0xfd5a9d41, 0xfd6726b7, 0xfd73da17, 0xfd80b53d, 0xfd8db60c, 0xfd9ada65, 0xfda8202c, 0xfdb58547, 0xfdc3079e, 0xfdd0a51d, 0xfdde5bb2, 0xfdec294d, 0xfdfa0be4, 0xfe08016e, 0xfe1607e7, 0xfe241d50, 0xfe323fac, 0xfe406d04, 0xfe4ea365, 0xfe5ce0e3, 0xfe6b2393, 0xfe796993, 0xfe87b104, 0xfe95f80f, 0xfea43ce1, 0xfeb27dae, 0xfec0b8af, 0xfeceec25, 0xfedd1655, 0xfeeb358e, 0xfef94823, 0xff074c6f, 0xff1540d4, 0xff2323bc, 0xff30f397, 0xff3eaedd, 0xff4c540e, 0xff59e1b1, 0xff675655, 0xff74b091, 0xff81ef04, 0xff8f1056, 0xff9c1334, 0xffa8f658, 0xffb5b87f, 0xffc25874, 0xffced505, 0xffdb2d0d, 0xffe75f6c, 0xfff36b0d, 0xffff4ee4, 0x000b09ea, 0x00169b25, 0x002201a1, 0x002d3c75, 0x00384abe, 0x00432ba4, 0x004dde58, 0x00586211, 0x0062b611, 0x006cd9a4, 0x0076cc1c, 0x00808cd5, 0x008a1b34, 0x009376a6, 0x009c9ea1, 0x00a592a5, 0x00ae5238, 0x00b6dcea, 0x00bf3254, 0x00c75217, 0x00cf3bdd, 0x00d6ef55, 0x00de6c3c, 0x00e5b252, 0x00ecc163, 0x00f39941, 0x00fa39c5, 0x0100a2d4, 0x0106d456, 0x010cce3d, 0x01129084, 0x01181b2c, 0x011d6e3c, 0x012289c6, 0x01276de1, 0x012c1aab, 0x01309049, 0x0134cee8, 0x0138d6bc, + 0x013ca7ff, 0x014042f1, 0x0143a7d9, 0x0146d705, 0x0149d0c8, 0x014c957d, 0x014f2584, 0x01518141, 0x0153a922, 0x01559d96, 0x01575f14, 0x0158ee18, 0x015a4b23, 0x015b76bb, 0x015c716b, 0x015d3bc3, 0x015dd658, 0x015e41c1, 0x015e7e9d, 0x015e8d8e, 0x015e6f37, 0x015e2444, 0x015dad61, 0x015d0b3e, 0x015c3e92, 0x015b4812, 0x015a287a, 0x0158e089, 0x015770fe, 0x0155da9d, 0x01541e2e, 0x01523c78, 0x01503646, 0x014e0c67, 0x014bbfa8, 0x014950dc, 0x0146c0d5, 0x01441068, 0x0141406b, 0x013e51b5, 0x013b4520, 0x01381b84, 0x0134d5bd, 0x013174a6, 0x012df91b, 0x012a63f9, 0x0126b61d, 0x0122f065, 0x011f13ad, 0x011b20d4, 0x011718b7, 0x0112fc33, 0x010ecc25, 0x010a896b, 0x010634df, 0x0101cf5d, 0x00fd59bf, 0x00f8d4df, 0x00f44196, 0x00efa0ba, 0x00eaf323, 0x00e639a4, 0x00e17512, 0x00dca63d, 0x00d7cdf8, 0x00d2ed0f, 0x00ce044f, 0x00c91484, 0x00c41e76, 0x00bf22ec, 0x00ba22ab, 0x00b51e74, 0x00b01708, 0x00ab0d25, 0x00a60185, 0x00a0f4e1, 0x009be7f0, 0x0096db64, 0x0091cfed, 0x008cc63b, 0x0087bef8, 0x0082bacb, 0x007dba5a, 0x0078be46, 0x0073c72f, 0x006ed5b1, 0x0069ea64, 0x006505de, 0x006028b0, 0x005b536b, 0x00568698, 0x0051c2c2, 0x004d086c, 0x00485818, 0x0043b244, 0x003f176c, 0x003a8805, 0x00360484, 0x00318d58, 0x002d22ef, 0x0028c5b2, 0x00247606, 0x0020344d, 0x001c00e5, 0x0017dc2b, 0x0013c674, 0x000fc015, 0x000bc95f, 0x0007e29d, 0x00040c1a, 0x0000461a, 0xfffc90e1, 0xfff8ecac, 0xfff559b7, 0xfff1d83a, 0xffee686a, 0xffeb0a76, 0xffe7be8d, 0xffe484d8, 0xffe15d7f, 0xffde48a4, 0xffdb4669, 0xffd856ea, 0xffd57a40, 0xffd2b083, 0xffcff9c5, 0xffcd5617, 0xffcac586, + 0xffc8481c, 0xffc5dde0, 0xffc386d6, 0xffc142ff, 0xffbf125a, 0xffbcf4e3, 0xffbaea92, 0xffb8f35c, 0xffb70f36, 0xffb53e10, 0xffb37fd9, 0xffb1d47b, 0xffb03be0, 0xffaeb5ee, 0xffad428a, 0xffabe196, 0xffaa92f1, 0xffa95679, 0xffa82c09, 0xffa7137b, 0xffa60ca5, 0xffa5175d, 0xffa43376, 0xffa360c1, 0xffa29f0e, 0xffa1ee2b, 0xffa14de4, 0xffa0be03, 0xffa03e51, 0xff9fce96, 0xff9f6e98, 0xff9f1e1a, 0xff9edce1, 0xff9eaaae, 0xff9e8741, 0xff9e725b, 0xff9e6bb9, 0xff9e7319, 0xff9e8837, 0xff9eaace, 0xff9eda9a, 0xff9f1753, 0xff9f60b3, 0xff9fb672, 0xffa01848, 0xffa085ea, 0xffa0ff11, 0xffa18371, 0xffa212c1, 0xffa2acb5, 0xffa35102, 0xffa3ff5d, 0xffa4b779, 0xffa5790c, 0xffa643ca, 0xffa71765, 0xffa7f392, 0xffa8d805, 0xffa9c472, 0xffaab88d, 0xffabb40a, 0xffacb69e, 0xffadbffe, 0xffaecfde, 0xffafe5f5, 0xffb101f7, 0xffb2239b, 0xffb34a98, 0xffb476a5, 0xffb5a779, 0xffb6dccd, 0xffb8165a, 0xffb953d8, 0xffba9503, 0xffbbd996, 0xffbd214b, 0xffbe6bdf, 0xffbfb910, 0xffc1089c, 0xffc25a41, 0xffc3adbf, 0xffc502d8, 0xffc6594c, 0xffc7b0df, 0xffc90953, 0xffca626e, 0xffcbbbf5, 0xffcd15b0, 0xffce6f64, 0xffcfc8dd, 0xffd121e3, 0xffd27a41, 0xffd3d1c4, 0xffd52839, 0xffd67d6f, 0xffd7d135, 0xffd9235c, 0xffda73b6, 0xffdbc216, 0xffdd0e51, 0xffde583d, 0xffdf9fb0, 0xffe0e483, 0xffe2268e, 0xffe365ad, 0xffe4a1bb, 0xffe5da95, 0xffe7101a, 0xffe84229, 0xffe970a2, 0xffea9b69, 0xffebc25f, 0xffece56a, 0xffee046f, 0xffef1f56, 0xfff03605, 0xfff14867, 0xfff25666, 0xfff35fee, 0xfff464ec, 0xfff5654e, 0xfff66102, 0xfff757fa, 0xfff84a27, 0xfff9377c, 0xfffa1fed, 0xfffb036e, 0xfffbe1f5, + 0xfffcbb7a, 0xfffd8ff5, 0xfffe5f5f, 0xffff29b1, 0xffffeee9, 0x0000af01, 0x000169f7, 0x00021fca, 0x0002d077, 0x00037c01, 0x00042267, 0x0004c3ab, 0x00055fd1, 0x0005f6dc, 0x000688d1, 0x000715b4, 0x00079d8d, 0x00082061, 0x00089e39, 0x0009171e, 0x00098b17, 0x0009fa30, 0x000a6472, 0x000ac9ea, 0x000b2aa2, 0x000b86a7, 0x000bde06, 0x000c30cd, 0x000c7f0a, 0x000cc8cc, 0x000d0e21, 0x000d4f1a, 0x000d8bc7, 0x000dc438, 0x000df87f, 0x000e28ac, 0x000e54d2, 0x000e7d03, 0x000ea151, 0x000ec1cf, 0x000ede91, 0x000ef7a9, 0x000f0d2c, 0x000f1f2d, 0x000f2dc1, 0x000f38fb, 0x000f40f1, 0x000f45b7, 0x000f4762, 0x000f4606, 0x000f41ba, 0x000f3a91, 0x000f30a2, 0x000f2401, 0x000f14c4, 0x000f0300, 0x000eeeca, 0x000ed838, 0x000ebf60, 0x000ea455, 0x000e872d, 0x000e67fe, 0x000e46dd, 0x000e23dd, 0x000dff15, 0x000dd898, 0x000db07b, 0x000d86d3, 0x000d5bb3, 0x000d2f30, 0x000d015d, 0x000cd24d, 0x000ca215, 0x000c70c7, 0x000c3e76, 0x000c0b34, 0x000bd715, 0x000ba229, 0x000b6c83, 0x000b3634, 0x000aff4e, 0x000ac7e1, 0x000a8ffe, 0x000a57b5, 0x000a1f17, 0x0009e632, 0x0009ad16, 0x000973d2, 0x00093a74, 0x0009010b, 0x0008c7a5, 0x00088e4f, 0x00085517, 0x00081c09, 0x0007e331, 0x0007aa9c, 0x00077255, 0x00073a68, 0x000702e0, 0x0006cbc7, 0x00069527, 0x00065f0b, 0x0006297b, 0x0005f481, 0x0005c026, 0x00058c71, 0x0005596b, 0x0005271b, 0x0004f588, 0x0004c4ba, 0x000494b7, 0x00046585, 0x00043729, 0x000409aa, 0x0003dd0b, 0x0003b152, 0x00038683, 0x00035ca3, 0x000333b4, 0x00030bba, 0x0002e4b8, 0x0002beb1, 0x000299a7, 0x0002759c, 0x00025292, 0x0002308a, 0x00020f86, 0x0001ef85, 0x00000000 // this one is needed for lerping the last coefficient }; @@ -50,14 +50,14 @@ const int32_t AudioResamplerSinc::mFirCoefsUp[] = { * cmd-line: fir -v 0.3 -l 7 -s 48000 -c 16600 */ const int32_t AudioResamplerSinc::mFirCoefsDown[] = { - 0x7ba78e22, 0x7ba5ec84, 0x7ba107c0, 0x7b98e016, 0x7b8d75f3, 0x7b7ec9ed, 0x7b6cdcc5, 0x7b57af69, 0x7b3f42f0, 0x7b23989d, 0x7b04b1dc, 0x7ae29047, 0x7abd359f, 0x7a94a3d0, 0x7a68dcf4, 0x7a39e349, 0x7a07b93d, 0x79d26164, 0x7999de7d, 0x795e3370, 0x791f6350, 0x78dd7157, 0x789860e9, 0x78503592, 0x7804f307, 0x77b69d25, 0x776537f2, 0x7710c799, 0x76b95070, 0x765ed6f1, 0x76015fbf, 0x75a0efa2, 0x753d8b88, 0x74d73888, 0x746dfbda, 0x7401dade, 0x7392db19, 0x73210234, 0x72ac55fc, 0x7234dc61, 0x71ba9b77, 0x713d9976, 0x70bddcb7, 0x703b6bb6, 0x6fb64d11, 0x6f2e8786, 0x6ea421f5, 0x6e17235e, 0x6d8792e2, 0x6cf577bf, 0x6c60d954, 0x6bc9bf1f, 0x6b3030bb, 0x6a9435e0, 0x69f5d664, 0x69551a39, 0x68b2096e, 0x680cac2d, 0x67650abb, 0x66bb2d77, 0x660f1cda, 0x6560e178, 0x64b083fb, 0x63fe0d27, 0x634985d8, 0x6292f701, 0x61da69ab, 0x611fe6f5, 0x60637814, 0x5fa52650, 0x5ee4fb09, 0x5e22ffae, 0x5d5f3dc5, 0x5c99bee4, 0x5bd28cb4, 0x5b09b0ee, 0x5a3f355e, 0x597323dc, 0x58a58654, 0x57d666bd, 0x5705cf1d, 0x5633c98a, 0x55606024, 0x548b9d17, 0x53b58a9c, 0x52de32f7, 0x5205a075, 0x512bdd6f, 0x5050f443, 0x4f74ef5c, 0x4e97d929, 0x4db9bc22, 0x4cdaa2c5, 0x4bfa9795, 0x4b19a51b, 0x4a37d5e5, 0x49553484, 0x4871cb8b, 0x478da592, 0x46a8cd31, 0x45c34d02, 0x44dd2f9f, 0x43f67fa3, 0x430f47a7, 0x42279244, 0x413f6a10, 0x4056d99f, 0x3f6deb81, 0x3e84aa43, 0x3d9b206d, 0x3cb15882, 0x3bc75d00, 0x3add385c, 0x39f2f507, 0x39089d69, 0x381e3be1, 0x3733dac8, 0x3649846b, 0x355f430d, 0x347520e7, 0x338b2828, 0x32a162f0, 0x31b7db56, 0x30ce9b63, 0x2fe5ad11, 0x2efd1a4d, 0x2e14ecf6, 0x2d2d2eda, - 0x2c45e9b9, 0x2b5f2742, 0x2a78f112, 0x299350b7, 0x28ae4fab, 0x27c9f756, 0x26e6510d, 0x26036613, 0x25213f95, 0x243fe6ac, 0x235f645c, 0x227fc196, 0x21a10731, 0x20c33def, 0x1fe66e7e, 0x1f0aa171, 0x1e2fdf44, 0x1d56305d, 0x1c7d9d06, 0x1ba62d74, 0x1acfe9be, 0x19fad9e5, 0x192705ce, 0x18547543, 0x17832ff3, 0x16b33d74, 0x15e4a53c, 0x15176ea9, 0x144ba0f9, 0x13814350, 0x12b85cb4, 0x11f0f40c, 0x112b1024, 0x1066b7a7, 0x0fa3f123, 0x0ee2c308, 0x0e2333a7, 0x0d654930, 0x0ca909b5, 0x0bee7b28, 0x0b35a35b, 0x0a7e8800, 0x09c92ea8, 0x09159cc5, 0x0863d7a5, 0x07b3e479, 0x0705c84e, 0x06598811, 0x05af288c, 0x0506ae68, 0x04601e2e, 0x03bb7c42, 0x0318cce7, 0x02781440, 0x01d9564b, 0x013c96e3, 0x00a1d9c5, 0x00092285, 0xff72749a, 0xfeddd356, 0xfe4b41e8, 0xfdbac35c, 0xfd2c5a9c, 0xfca00a6f, 0xfc15d57a, 0xfb8dbe3c, 0xfb07c716, 0xfa83f243, 0xfa0241db, 0xf982b7d4, 0xf9055602, 0xf88a1e16, 0xf811119e, 0xf79a3206, 0xf7258096, 0xf6b2fe76, 0xf642acab, 0xf5d48c16, 0xf5689d79, 0xf4fee173, 0xf4975880, 0xf43202fb, 0xf3cee11f, 0xf36df305, 0xf30f38a2, 0xf2b2b1d0, 0xf2585e42, 0xf2003d8f, 0xf1aa4f2b, 0xf156926c, 0xf1050685, 0xf0b5aa8d, 0xf0687d78, 0xf01d7e1e, 0xefd4ab35, 0xef8e0357, 0xef4984fd, 0xef072e84, 0xeec6fe2b, 0xee88f210, 0xee4d0839, 0xee133e8a, 0xeddb92ce, 0xeda602b0, 0xed728bc3, 0xed412b7b, 0xed11df32, 0xece4a425, 0xecb97779, 0xec905638, 0xec693d4f, 0xec442995, 0xec2117c5, 0xec000482, 0xebe0ec58, 0xebc3cbb7, 0xeba89efa, 0xeb8f6264, 0xeb781221, 0xeb62aa45, 0xeb4f26ce, 0xeb3d83a7, 0xeb2dbca1, 0xeb1fcd7b, 0xeb13b1df, 0xeb096562, 0xeb00e385, 0xeafa27b6, - 0xeaf52d50, 0xeaf1ef9d, 0xeaf069d1, 0xeaf09712, 0xeaf27274, 0xeaf5f6fa, 0xeafb1f95, 0xeb01e728, 0xeb0a4886, 0xeb143e74, 0xeb1fc3a7, 0xeb2cd2c7, 0xeb3b666c, 0xeb4b7925, 0xeb5d0571, 0xeb7005c4, 0xeb847485, 0xeb9a4c11, 0xebb186ba, 0xebca1ec8, 0xebe40e77, 0xebff4ffb, 0xec1bdd80, 0xec39b127, 0xec58c50a, 0xec79133d, 0xec9a95ca, 0xecbd46b7, 0xece12000, 0xed061ba0, 0xed2c3388, 0xed5361a8, 0xed7b9fe8, 0xeda4e830, 0xedcf3461, 0xedfa7e5a, 0xee26bff8, 0xee53f315, 0xee82118a, 0xeeb1152e, 0xeee0f7d8, 0xef11b35d, 0xef434193, 0xef759c51, 0xefa8bd6e, 0xefdc9ec2, 0xf0113a28, 0xf046897c, 0xf07c869d, 0xf0b32b6c, 0xf0ea71cf, 0xf12253af, 0xf15acaf8, 0xf193d19c, 0xf1cd6192, 0xf20774d5, 0xf2420568, 0xf27d0d52, 0xf2b886a1, 0xf2f46b6a, 0xf330b5ca, 0xf36d5fe4, 0xf3aa63e4, 0xf3e7bbfe, 0xf4256270, 0xf463517e, 0xf4a18378, 0xf4dff2b7, 0xf51e999d, 0xf55d7297, 0xf59c781d, 0xf5dba4b2, 0xf61af2e4, 0xf65a5d4c, 0xf699de8f, 0xf6d97160, 0xf719107b, 0xf758b6ab, 0xf7985ec9, 0xf7d803b9, 0xf817a06d, 0xf8572fe6, 0xf896ad32, 0xf8d6136d, 0xf9155dc3, 0xf954876c, 0xf9938bb3, 0xf9d265ef, 0xfa111187, 0xfa4f89f3, 0xfa8dcab9, 0xfacbcf70, 0xfb0993bf, 0xfb47135d, 0xfb844a13, 0xfbc133ba, 0xfbfdcc3b, 0xfc3a0f90, 0xfc75f9c7, 0xfcb186fb, 0xfcecb35d, 0xfd277b2d, 0xfd61dabc, 0xfd9bce6f, 0xfdd552bd, 0xfe0e642d, 0xfe46ff5a, 0xfe7f20f1, 0xfeb6c5b1, 0xfeedea6c, 0xff248c06, 0xff5aa776, 0xff9039c5, 0xffc54010, 0xfff9b786, 0x002d9d69, 0x0060ef0e, 0x0093a9dd, 0x00c5cb50, 0x00f750f6, 0x0128386e, 0x01587f6d, 0x018823b9, 0x01b7232d, 0x01e57bb4, 0x02132b4f, 0x02403010, 0x026c881c, - 0x029831ad, 0x02c32b0d, 0x02ed729c, 0x031706c9, 0x033fe618, 0x03680f20, 0x038f8089, 0x03b63910, 0x03dc3782, 0x04017abf, 0x042601ba, 0x0449cb78, 0x046cd70f, 0x048f23a9, 0x04b0b080, 0x04d17ce2, 0x04f1882b, 0x0510d1cc, 0x052f5947, 0x054d1e2e, 0x056a2024, 0x05865edf, 0x05a1da25, 0x05bc91cb, 0x05d685b9, 0x05efb5e6, 0x0608225b, 0x061fcb2f, 0x0636b08a, 0x064cd2a4, 0x066231c4, 0x0676ce42, 0x068aa883, 0x069dc0fd, 0x06b01833, 0x06c1aeba, 0x06d28532, 0x06e29c4a, 0x06f1f4c2, 0x07008f64, 0x070e6d0a, 0x071b8e9c, 0x0727f50e, 0x0733a162, 0x073e94a5, 0x0748cff4, 0x07525475, 0x075b235d, 0x07633dec, 0x076aa56d, 0x07715b37, 0x077760ae, 0x077cb73f, 0x07816063, 0x07855d9c, 0x0788b07a, 0x078b5a93, 0x078d5d89, 0x078ebb09, 0x078f74c8, 0x078f8c82, 0x078f0401, 0x078ddd14, 0x078c1993, 0x0789bb60, 0x0786c464, 0x0783368e, 0x077f13d8, 0x077a5e41, 0x077517d0, 0x076f4291, 0x0768e09a, 0x0761f403, 0x075a7eef, 0x07528382, 0x074a03e9, 0x07410255, 0x073780fc, 0x072d8219, 0x072307ec, 0x071814ba, 0x070caaca, 0x0700cc69, 0x06f47be7, 0x06e7bb97, 0x06da8dcf, 0x06ccf4e9, 0x06bef340, 0x06b08b35, 0x06a1bf28, 0x0692917b, 0x06830493, 0x06731ad7, 0x0662d6af, 0x06523a82, 0x064148bc, 0x063003c6, 0x061e6e0c, 0x060c89f8, 0x05fa59f5, 0x05e7e06f, 0x05d51fd0, 0x05c21a83, 0x05aed2ef, 0x059b4b7f, 0x05878698, 0x057386a1, 0x055f4dfc, 0x054adf0e, 0x05363c35, 0x052167d0, 0x050c643b, 0x04f733cf, 0x04e1d8e2, 0x04cc55c8, 0x04b6acd2, 0x04a0e04c, 0x048af281, 0x0474e5b7, 0x045ebc2f, 0x0448782a, 0x04321be1, 0x041ba98b, 0x0405235a, 0x03ee8b7b, 0x03d7e417, 0x03c12f51, 0x03aa6f4a, - 0x0393a61a, 0x037cd5d6, 0x0366008e, 0x034f284c, 0x03384f14, 0x032176e3, 0x030aa1b4, 0x02f3d179, 0x02dd081e, 0x02c64789, 0x02af919c, 0x0298e830, 0x02824d17, 0x026bc220, 0x02554910, 0x023ee3a6, 0x0228939b, 0x02125aa0, 0x01fc3a61, 0x01e63480, 0x01d04a9a, 0x01ba7e44, 0x01a4d10c, 0x018f4478, 0x0179da08, 0x01649334, 0x014f716a, 0x013a7615, 0x0125a295, 0x0110f844, 0x00fc7872, 0x00e8246b, 0x00d3fd70, 0x00c004bc, 0x00ac3b81, 0x0098a2eb, 0x00853c1b, 0x0072082e, 0x005f0837, 0x004c3d40, 0x0039a84d, 0x00274a5a, 0x0015245a, 0x00033739, 0xfff183db, 0xffe00b1b, 0xffcecdcd, 0xffbdccbe, 0xffad08b2, 0xff9c8265, 0xff8c3a8b, 0xff7c31d2, 0xff6c68de, 0xff5ce04c, 0xff4d98b2, 0xff3e929e, 0xff2fce96, 0xff214d18, 0xff130e9b, 0xff05138f, 0xfef75c5b, 0xfee9e960, 0xfedcbaf7, 0xfecfd172, 0xfec32d1a, 0xfeb6ce34, 0xfeaab4fb, 0xfe9ee1a5, 0xfe93545e, 0xfe880d4e, 0xfe7d0c95, 0xfe72524c, 0xfe67de84, 0xfe5db14b, 0xfe53caa3, 0xfe4a2a8d, 0xfe40d0ff, 0xfe37bdec, 0xfe2ef13e, 0xfe266ada, 0xfe1e2a9e, 0xfe163064, 0xfe0e7bfe, 0xfe070d39, 0xfdffe3db, 0xfdf8ffa6, 0xfdf26054, 0xfdec059d, 0xfde5ef30, 0xfde01cb8, 0xfdda8ddc, 0xfdd5423b, 0xfdd03971, 0xfdcb7316, 0xfdc6eeb9, 0xfdc2abe9, 0xfdbeaa2d, 0xfdbae90a, 0xfdb767fd, 0xfdb42681, 0xfdb1240e, 0xfdae6015, 0xfdabda05, 0xfda99147, 0xfda78541, 0xfda5b557, 0xfda420e6, 0xfda2c74b, 0xfda1a7dd, 0xfda0c1f0, 0xfda014d5, 0xfd9f9fdc, 0xfd9f624e, 0xfd9f5b73, 0xfd9f8a91, 0xfd9feeeb, 0xfda087c0, 0xfda1544d, 0xfda253ce, 0xfda3857b, 0xfda4e88a, 0xfda67c31, 0xfda83fa0, 0xfdaa3209, 0xfdac529a, 0xfdaea081, 0xfdb11ae7, 0xfdb3c0f9, - 0xfdb691dc, 0xfdb98cba, 0xfdbcb0b8, 0xfdbffcfa, 0xfdc370a5, 0xfdc70adc, 0xfdcacac1, 0xfdceaf74, 0xfdd2b818, 0xfdd6e3cc, 0xfddb31b0, 0xfddfa0e4, 0xfde43087, 0xfde8dfb8, 0xfdedad97, 0xfdf29942, 0xfdf7a1d8, 0xfdfcc679, 0xfe020645, 0xfe07605b, 0xfe0cd3dc, 0xfe125fe8, 0xfe1803a3, 0xfe1dbe2d, 0xfe238ea9, 0xfe29743c, 0xfe2f6e0a, 0xfe357b39, 0xfe3b9af0, 0xfe41cc56, 0xfe480e94, 0xfe4e60d6, 0xfe54c246, 0xfe5b3212, 0xfe61af68, 0xfe683978, 0xfe6ecf74, 0xfe75708f, 0xfe7c1bff, 0xfe82d0f9, 0xfe898eb7, 0xfe905473, 0xfe972169, 0xfe9df4d8, 0xfea4ce00, 0xfeabac24, 0xfeb28e88, 0xfeb97473, 0xfec05d2d, 0xfec74803, 0xfece3442, 0xfed5213a, 0xfedc0e3c, 0xfee2fa9f, 0xfee9e5b8, 0xfef0cee2, 0xfef7b579, 0xfefe98db, 0xff05786b, 0xff0c538b, 0xff1329a3, 0xff19fa1b, 0xff20c461, 0xff2787e2, 0xff2e4410, 0xff34f85f, 0xff3ba447, 0xff424740, 0xff48e0c9, 0xff4f705f, 0xff55f586, 0xff5c6fc2, 0xff62de9c, 0xff69419f, 0xff6f9858, 0xff75e258, 0xff7c1f32, 0xff824e7e, 0xff886fd4, 0xff8e82d1, 0xff948714, 0xff9a7c40, 0xffa061f8, 0xffa637e6, 0xffabfdb4, 0xffb1b310, 0xffb757ab, 0xffbceb37, 0xffc26d6c, 0xffc7de03, 0xffcd3cb8, 0xffd28949, 0xffd7c379, 0xffdceb0d, 0xffe1ffcc, 0xffe7017f, 0xffebeff5, 0xfff0cafc, 0xfff59268, 0xfffa460d, 0xfffee5c4, 0x00037166, 0x0007e8d2, 0x000c4be7, 0x00109a87, 0x0014d499, 0x0018fa02, 0x001d0aad, 0x00210688, 0x0024ed80, 0x0028bf89, 0x002c7c95, 0x0030249a, 0x0033b793, 0x00373579, 0x003a9e4b, 0x003df207, 0x004130b0, 0x00445a4a, 0x00476eda, 0x004a6e6a, 0x004d5903, 0x00502eb3, 0x0052ef87, 0x00559b91, 0x005832e3, 0x005ab591, 0x005d23b1, - 0x005f7d5c, 0x0061c2ac, 0x0063f3bc, 0x006610aa, 0x00681995, 0x006a0e9e, 0x006befe8, 0x006dbd95, 0x006f77cd, 0x00711eb5, 0x0072b277, 0x0074333d, 0x0075a131, 0x0076fc81, 0x0078455a, 0x00797bed, 0x007aa068, 0x007bb2fe, 0x007cb3e3, 0x007da349, 0x007e8166, 0x007f4e6f, 0x00800a9d, 0x0080b626, 0x00815144, 0x0081dc31, 0x00825727, 0x0082c261, 0x00831e1c, 0x00836a95, 0x0083a80a, 0x0083d6b7, 0x0083f6dd, 0x008408bb, 0x00840c91, 0x0084029f, 0x0083eb26, 0x0083c667, 0x008394a4, 0x00835620, 0x00830b1d, 0x0082b3dc, 0x008250a3, 0x0081e1b2, 0x0081674f, 0x0080e1bc, 0x0080513c, 0x007fb615, 0x007f1089, 0x007e60dc, 0x007da752, 0x007ce42f, 0x007c17b7, 0x007b422c, 0x007a63d3, 0x00797cef, 0x00788dc2, 0x00779690, 0x0076979c, 0x00759127, 0x00748375, 0x00736ec6, 0x0072535c, 0x00713179, 0x0070095c, 0x006edb47, 0x006da779, 0x006c6e31, 0x006b2faf, 0x0069ec30, 0x0068a3f3, 0x00675735, 0x00660633, 0x0064b129, 0x00635852, 0x0061fbea, 0x00609c2a, 0x005f394d, 0x005dd38c, 0x005c6b1e, 0x005b003c, 0x0059931c, 0x005823f5, 0x0056b2fc, 0x00554066, 0x0053cc66, 0x00525730, 0x0050e0f6, 0x004f69ea, 0x004df23c, 0x004c7a1d, 0x004b01bb, 0x00498945, 0x004810e8, 0x004698d0, 0x0045212a, 0x0043aa20, 0x004233dd, 0x0040be88, 0x003f4a4b, 0x003dd74c, 0x003c65b3, 0x003af5a4, 0x00398744, 0x00381ab7, 0x0036b020, 0x003547a0, 0x0033e15a, 0x00327d6b, 0x00311bf5, 0x002fbd15, 0x002e60e9, 0x002d078c, 0x002bb11b, 0x002a5db0, 0x00290d66, 0x0027c054, 0x00267693, 0x0025303b, 0x0023ed60, 0x0022ae19, 0x0021727a, 0x00203a97, 0x001f0682, 0x001dd64d, 0x001caa0a, 0x001b81c7, 0x001a5d96, - 0x00193d84, 0x0018219f, 0x001709f3, 0x0015f68d, 0x0014e779, 0x0013dcc0, 0x0012d66c, 0x0011d487, 0x0010d717, 0x000fde26, 0x000ee9b8, 0x000df9d5, 0x000d0e82, 0x000c27c2, 0x000b459a, 0x000a680d, 0x00098f1d, 0x0008bacc, 0x0007eb1a, 0x00072009, 0x00065999, 0x000597c7, 0x0004da94, 0x000421fc, 0x00036dfd, 0x0002be95, 0x000213be, 0x00016d76, 0x0000cbb6, 0x00002e7a, 0xffff95bc, 0xffff0175, 0xfffe719f, 0xfffde632, 0xfffd5f26, 0xfffcdc72, 0xfffc5e10, 0xfffbe3f4, 0xfffb6e16, 0xfffafc6b, 0xfffa8eea, 0xfffa2588, 0xfff9c039, 0xfff95ef2, 0xfff901a8, 0xfff8a84e, 0xfff852d8, 0xfff8013a, 0xfff7b366, 0xfff7694f, 0xfff722e9, 0xfff6e024, 0xfff6a0f4, 0xfff66549, 0xfff62d17, 0xfff5f84d, 0xfff5c6de, 0xfff598bb, 0xfff56dd4, 0xfff5461a, 0xfff5217e, 0xfff4fff1, 0xfff4e162, 0xfff4c5c3, 0xfff4ad03, 0xfff49712, 0xfff483e1, 0xfff47360, 0xfff4657e, 0xfff45a2c, 0xfff45159, 0xfff44af5, 0xfff446f1, 0xfff4453b, 0xfff445c5, 0xfff4487d, 0xfff44d54, 0xfff4543a, 0xfff45d1e, 0xfff467f1, 0xfff474a4, 0xfff48325, 0xfff49366, 0xfff4a556, 0xfff4b8e7, 0xfff4ce09, 0xfff4e4ad, 0xfff4fcc2, 0xfff5163b, 0xfff53109, 0xfff54d1b, 0xfff56a65, 0xfff588d7, 0xfff5a863, 0xfff5c8fb, 0xfff5ea91, 0xfff60d16, 0xfff6307e, 0xfff654bb, 0xfff679bf, 0xfff69f7d, 0xfff6c5e9, 0xfff6ecf5, 0xfff71495, 0xfff73cbe, 0xfff76562, 0xfff78e75, 0xfff7b7ed, 0xfff7e1be, 0xfff80bdc, 0xfff8363c, 0xfff860d4, 0xfff88b99, 0xfff8b681, 0xfff8e182, 0xfff90c92, 0xfff937a6, 0xfff962b7, 0xfff98dba, 0xfff9b8a7, 0xfff9e376, 0xfffa0e1d, 0xfffa3895, 0xfffa62d5, 0xfffa8cd6, 0xfffab691, 0xfffadfff, 0xfffb0917, + 0x6e350b14, 0x6e3396e0, 0x6e2f3a59, 0x6e27f5b7, 0x6e1dc95c, 0x6e10b5cc, 0x6e00bbb6, 0x6deddbeb, 0x6dd81765, 0x6dbf6f44, 0x6da3e4cd, 0x6d85796c, 0x6d642eb2, 0x6d400656, 0x6d190235, 0x6cef2453, 0x6cc26ed6, 0x6c92e40c, 0x6c608668, 0x6c2b5882, 0x6bf35d14, 0x6bb89701, 0x6b7b094e, 0x6b3ab724, 0x6af7a3d1, 0x6ab1d2c7, 0x6a69479a, 0x6a1e0603, 0x69d011df, 0x697f6f2a, 0x692c2207, 0x68d62eb9, 0x687d99a6, 0x68226753, 0x67c49c6c, 0x67643db8, 0x67015023, 0x669bd8b9, 0x6633dca5, 0x65c96131, 0x655c6bca, 0x64ed01f9, 0x647b2967, 0x6406e7dc, 0x6390433c, 0x6317418a, 0x629be8e8, 0x621e3f92, 0x619e4be2, 0x611c144d, 0x60979f66, 0x6010f3d8, 0x5f88186c, 0x5efd1402, 0x5e6fed97, 0x5de0ac3f, 0x5d4f5729, 0x5cbbf59c, 0x5c268ef6, 0x5b8f2aad, 0x5af5d04f, 0x5a5a8780, 0x59bd57fa, 0x591e498d, 0x587d641d, 0x57daafa5, 0x57363432, 0x568ff9e4, 0x55e808f2, 0x553e69a1, 0x5493244a, 0x53e64158, 0x5337c946, 0x5287c4a0, 0x51d63c02, 0x51233816, 0x506ec197, 0x4fb8e14d, 0x4f01a00d, 0x4e4906ba, 0x4d8f1e43, 0x4cd3efa4, 0x4c1783e5, 0x4b59e415, 0x4a9b1952, 0x49db2cc2, 0x491a2792, 0x485812fb, 0x4794f83d, 0x46d0e09d, 0x460bd56a, 0x4545dff9, 0x447f09a2, 0x43b75bc5, 0x42eedfc5, 0x42259f0a, 0x415ba2fe, 0x4090f510, 0x3fc59eaf, 0x3ef9a94d, 0x3e2d1e5c, 0x3d600751, 0x3c926d9d, 0x3bc45ab4, 0x3af5d807, 0x3a26ef05, 0x3957a91c, 0x38880fb6, 0x37b82c3b, 0x36e8080c, 0x3617ac89, 0x3547230a, 0x347674e3, 0x33a5ab62, 0x32d4cfcc, 0x3203eb61, 0x31330758, 0x30622ce0, 0x2f91651f, 0x2ec0b931, 0x2df03228, 0x2d1fd90d, 0x2c4fb6dd, 0x2b7fd488, 0x2ab03af3, 0x29e0f2f8, 0x29120562, 0x28437aef, + 0x27755c4f, 0x26a7b223, 0x25da84fc, 0x250ddd60, 0x2441c3bf, 0x2376407c, 0x22ab5be9, 0x21e11e45, 0x21178fc0, 0x204eb874, 0x1f86a06c, 0x1ebf4f9d, 0x1df8cdea, 0x1d332322, 0x1c6e56ff, 0x1baa7127, 0x1ae77929, 0x1a257682, 0x19647094, 0x18a46eb0, 0x17e5780d, 0x172793cc, 0x166ac8f6, 0x15af1e7c, 0x14f49b39, 0x143b45ed, 0x13832540, 0x12cc3fc3, 0x12169bea, 0x11624012, 0x10af327e, 0x0ffd7955, 0x0f4d1aa4, 0x0e9e1c5e, 0x0df0845b, 0x0d445856, 0x0c999df2, 0x0bf05ab1, 0x0b4893fd, 0x0aa24f23, 0x09fd9152, 0x095a5f9d, 0x08b8befc, 0x0818b447, 0x077a443b, 0x06dd7376, 0x0642467a, 0x05a8c1a9, 0x0510e949, 0x047ac182, 0x03e64e5e, 0x035393c6, 0x02c29588, 0x02335752, 0x01a5dcb3, 0x011a291e, 0x00903fe2, 0x00082435, 0xff81d92b, 0xfefd61b9, 0xfe7ac0b7, 0xfdf9f8da, 0xfd7b0cbe, 0xfcfdfeda, 0xfc82d18a, 0xfc098709, 0xfb922173, 0xfb1ca2c7, 0xfaa90ce2, 0xfa376185, 0xf9c7a24e, 0xf959d0c2, 0xf8edee41, 0xf883fc11, 0xf81bfb56, 0xf7b5ed19, 0xf751d240, 0xf6efab96, 0xf68f79c7, 0xf6313d60, 0xf5d4f6d0, 0xf57aa669, 0xf5224c5f, 0xf4cbe8c7, 0xf4777b9a, 0xf42504b2, 0xf3d483cf, 0xf385f890, 0xf339627b, 0xf2eec0f5, 0xf2a6134a, 0xf25f58aa, 0xf21a9025, 0xf1d7b8b4, 0xf196d132, 0xf157d85d, 0xf11accdc, 0xf0dfad37, 0xf0a677de, 0xf06f2b25, 0xf039c548, 0xf0064466, 0xefd4a686, 0xefa4e997, 0xef770b6c, 0xef4b09c2, 0xef20e23d, 0xeef89266, 0xeed217b2, 0xeead6f7d, 0xee8a9709, 0xee698b87, 0xee4a4a0b, 0xee2ccf97, 0xee111915, 0xedf7235b, 0xeddeeb29, 0xedc86d28, 0xedb3a5ef, 0xeda09201, 0xed8f2dcb, 0xed7f75a8, 0xed7165e0, 0xed64faa7, 0xed5a301f, 0xed510259, 0xed496d53, 0xed436cfb, + 0xed3efd2c, 0xed3c19b3, 0xed3abe4b, 0xed3ae6a1, 0xed3c8e50, 0xed3fb0e7, 0xed4449e5, 0xed4a54ba, 0xed51ccca, 0xed5aad6a, 0xed64f1e4, 0xed709574, 0xed7d934a, 0xed8be68a, 0xed9b8a4d, 0xedac79a2, 0xedbeaf8c, 0xedd22705, 0xede6dafd, 0xedfcc65a, 0xee13e3f9, 0xee2c2eb1, 0xee45a14f, 0xee603697, 0xee7be949, 0xee98b41b, 0xeeb691c0, 0xeed57ce1, 0xeef57025, 0xef16662c, 0xef385992, 0xef5b44ed, 0xef7f22d2, 0xefa3edcf, 0xefc9a072, 0xeff03543, 0xf017a6cb, 0xf03fef90, 0xf0690a14, 0xf092f0da, 0xf0bd9e66, 0xf0e90d37, 0xf11537d0, 0xf14218b3, 0xf16faa62, 0xf19de762, 0xf1ccca37, 0xf1fc4d68, 0xf22c6b80, 0xf25d1f0a, 0xf28e6296, 0xf2c030b5, 0xf2f283fe, 0xf325570c, 0xf358a47c, 0xf38c66f3, 0xf3c09918, 0xf3f5359a, 0xf42a372c, 0xf45f9888, 0xf495546e, 0xf4cb65a6, 0xf501c6fd, 0xf5387349, 0xf56f6567, 0xf5a6983c, 0xf5de06b6, 0xf615abcb, 0xf64d827a, 0xf68585cb, 0xf6bdb0d0, 0xf6f5fea5, 0xf72e6a6e, 0xf766ef5c, 0xf79f88a9, 0xf7d8319a, 0xf810e580, 0xf8499fb6, 0xf8825ba4, 0xf8bb14bc, 0xf8f3c67e, 0xf92c6c75, 0xf9650239, 0xf99d836f, 0xf9d5ebc7, 0xfa0e3702, 0xfa4660ea, 0xfa7e6559, 0xfab64038, 0xfaeded7a, 0xfb256924, 0xfb5caf47, 0xfb93bc03, 0xfbca8b87, 0xfc011a11, 0xfc3763ec, 0xfc6d6574, 0xfca31b14, 0xfcd88147, 0xfd0d9494, 0xfd425197, 0xfd76b4f7, 0xfdaabb6d, 0xfdde61c2, 0xfe11a4ce, 0xfe44817b, 0xfe76f4c2, 0xfea8fbab, 0xfeda9350, 0xff0bb8dd, 0xff3c698b, 0xff6ca2a6, 0xff9c618a, 0xffcba3a4, 0xfffa6672, 0x0028a781, 0x00566471, 0x00839af2, 0x00b048c5, 0x00dc6bbc, 0x010801b9, 0x013308b1, 0x015d7ea7, 0x018761b2, 0x01b0aff7, 0x01d967af, 0x02018722, 0x02290ca8, + 0x024ff6ac, 0x027643a9, 0x029bf22a, 0x02c100cc, 0x02e56e3b, 0x03093935, 0x032c608a, 0x034ee316, 0x0370bfca, 0x0391f5a5, 0x03b283b7, 0x03d26920, 0x03f1a510, 0x041036c8, 0x042e1d98, 0x044b58e0, 0x0467e810, 0x0483caa7, 0x049f0035, 0x04b98858, 0x04d362bd, 0x04ec8f23, 0x05050d54, 0x051cdd2c, 0x0533fe94, 0x054a7186, 0x05603607, 0x05754c2f, 0x0589b41f, 0x059d6e0a, 0x05b07a30, 0x05c2d8dd, 0x05d48a6e, 0x05e58f49, 0x05f5e7e6, 0x060594c8, 0x0614967d, 0x0622eda3, 0x06309ae3, 0x063d9ef2, 0x0649fa93, 0x0655ae94, 0x0660bbce, 0x066b2327, 0x0674e591, 0x067e0406, 0x06867f90, 0x068e5940, 0x06959234, 0x069c2b92, 0x06a2268e, 0x06a78463, 0x06ac4657, 0x06b06dba, 0x06b3fbe5, 0x06b6f23a, 0x06b95226, 0x06bb1d1c, 0x06bc549a, 0x06bcfa25, 0x06bd0f4b, 0x06bc95a2, 0x06bb8ec8, 0x06b9fc61, 0x06b7e01a, 0x06b53ba8, 0x06b210c4, 0x06ae6130, 0x06aa2eb4, 0x06a57b1f, 0x06a04844, 0x069a97fe, 0x06946c2c, 0x068dc6b3, 0x0686a97f, 0x067f167e, 0x06770fa5, 0x066e96eb, 0x0665ae50, 0x065c57d2, 0x06529578, 0x0648694a, 0x063dd555, 0x0632dba8, 0x06277e55, 0x061bbf72, 0x060fa118, 0x0603255f, 0x05f64e66, 0x05e91e4b, 0x05db972f, 0x05cdbb32, 0x05bf8c7a, 0x05b10d2b, 0x05a23f6a, 0x0593255f, 0x0583c12f, 0x05741504, 0x05642304, 0x0553ed58, 0x05437626, 0x0532bf98, 0x0521cbd2, 0x05109cfc, 0x04ff353b, 0x04ed96b2, 0x04dbc386, 0x04c9bdd6, 0x04b787c3, 0x04a5236c, 0x049292eb, 0x047fd85d, 0x046cf5d7, 0x0459ed70, 0x0446c13b, 0x04337347, 0x042005a1, 0x040c7a55, 0x03f8d368, 0x03e512de, 0x03d13ab8, 0x03bd4cf2, 0x03a94b85, 0x03953865, 0x03811584, 0x036ce4cd, 0x0358a82a, 0x0344617e, + 0x033012a8, 0x031bbd84, 0x030763e8, 0x02f307a3, 0x02deaa84, 0x02ca4e50, 0x02b5f4c9, 0x02a19fac, 0x028d50b0, 0x02790987, 0x0264cbdc, 0x02509956, 0x023c7395, 0x02285c34, 0x021454c8, 0x02005edf, 0x01ec7c02, 0x01d8adb3, 0x01c4f56f, 0x01b154ab, 0x019dccd7, 0x018a5f5c, 0x01770d9d, 0x0163d8f6, 0x0150c2bb, 0x013dcc3d, 0x012af6c3, 0x0118438f, 0x0105b3da, 0x00f348da, 0x00e103ba, 0x00cee5a2, 0x00bcefb0, 0x00ab22fc, 0x00998098, 0x0088098e, 0x0076bee2, 0x0065a18f, 0x0054b28b, 0x0043f2c4, 0x00336322, 0x00230484, 0x0012d7c4, 0x0002ddb3, 0xfff3171d, 0xffe384c5, 0xffd42769, 0xffc4ffbe, 0xffb60e73, 0xffa75430, 0xff98d197, 0xff8a8740, 0xff7c75bf, 0xff6e9da0, 0xff60ff68, 0xff539b96, 0xff4672a1, 0xff3984fa, 0xff2cd30a, 0xff205d36, 0xff1423d9, 0xff08274a, 0xfefc67d8, 0xfef0e5cb, 0xfee5a167, 0xfeda9ae7, 0xfecfd280, 0xfec54861, 0xfebafcb4, 0xfeb0ef9c, 0xfea72133, 0xfe9d9192, 0xfe9440c8, 0xfe8b2edf, 0xfe825bdc, 0xfe79c7bd, 0xfe71727b, 0xfe695c0a, 0xfe618456, 0xfe59eb47, 0xfe5290bf, 0xfe4b749d, 0xfe4496b6, 0xfe3df6dd, 0xfe3794e0, 0xfe317085, 0xfe2b8991, 0xfe25dfc1, 0xfe2072ce, 0xfe1b426d, 0xfe164e4c, 0xfe119618, 0xfe0d1976, 0xfe08d808, 0xfe04d16d, 0xfe01053d, 0xfdfd730d, 0xfdfa1a6f, 0xfdf6faf0, 0xfdf41419, 0xfdf1656f, 0xfdeeee74, 0xfdecaea5, 0xfdeaa57e, 0xfde8d275, 0xfde734fe, 0xfde5cc89, 0xfde49884, 0xfde39857, 0xfde2cb6c, 0xfde23124, 0xfde1c8e3, 0xfde19207, 0xfde18beb, 0xfde1b5ea, 0xfde20f5a, 0xfde29790, 0xfde34dde, 0xfde43196, 0xfde54205, 0xfde67e78, 0xfde7e638, 0xfde97890, 0xfdeb34c5, 0xfded1a1e, 0xfdef27de, 0xfdf15d47, 0xfdf3b99b, + 0xfdf63c19, 0xfdf8e401, 0xfdfbb090, 0xfdfea102, 0xfe01b494, 0xfe04ea81, 0xfe084202, 0xfe0bba52, 0xfe0f52a9, 0xfe130a40, 0xfe16e050, 0xfe1ad410, 0xfe1ee4b9, 0xfe231181, 0xfe2759a0, 0xfe2bbc4e, 0xfe3038c2, 0xfe34ce35, 0xfe397bdd, 0xfe3e40f4, 0xfe431cb3, 0xfe480e52, 0xfe4d150b, 0xfe523019, 0xfe575eb6, 0xfe5ca01f, 0xfe61f390, 0xfe675846, 0xfe6ccd7f, 0xfe72527c, 0xfe77e67d, 0xfe7d88c3, 0xfe833890, 0xfe88f52a, 0xfe8ebdd5, 0xfe9491d7, 0xfe9a707a, 0xfea05906, 0xfea64ac7, 0xfeac4509, 0xfeb2471b, 0xfeb8504c, 0xfebe5fee, 0xfec47555, 0xfeca8fd6, 0xfed0aec7, 0xfed6d183, 0xfedcf763, 0xfee31fc4, 0xfee94a07, 0xfeef758b, 0xfef5a1b3, 0xfefbcde6, 0xff01f989, 0xff082408, 0xff0e4ccd, 0xff147346, 0xff1a96e4, 0xff20b71a, 0xff26d35c, 0xff2ceb21, 0xff32fde4, 0xff390b21, 0xff3f1255, 0xff451303, 0xff4b0cac, 0xff50fed8, 0xff56e90f, 0xff5ccadb, 0xff62a3ca, 0xff68736c, 0xff6e3954, 0xff73f516, 0xff79a64a, 0xff7f4c8b, 0xff84e775, 0xff8a76a7, 0xff8ff9c4, 0xff957070, 0xff9ada53, 0xffa03716, 0xffa58665, 0xffaac7ef, 0xffaffb66, 0xffb5207e, 0xffba36ee, 0xffbf3e6f, 0xffc436bd, 0xffc91f96, 0xffcdf8bc, 0xffd2c1f1, 0xffd77afc, 0xffdc23a6, 0xffe0bbb9, 0xffe54303, 0xffe9b954, 0xffee1e7f, 0xfff27259, 0xfff6b4b9, 0xfffae579, 0xffff0475, 0x0003118c, 0x00070ca0, 0x000af592, 0x000ecc4a, 0x001290ae, 0x001642aa, 0x0019e22a, 0x001d6f1c, 0x0020e971, 0x0024511d, 0x0027a615, 0x002ae850, 0x002e17c8, 0x00313479, 0x00343e5f, 0x0037357c, 0x003a19cf, 0x003ceb5e, 0x003faa2d, 0x00425644, 0x0044efac, 0x00477671, 0x0049ea9f, 0x004c4c45, 0x004e9b74, 0x0050d83e, 0x005302b6, + 0x00551af3, 0x0057210c, 0x00591518, 0x005af733, 0x005cc777, 0x005e8604, 0x006032f6, 0x0061ce6f, 0x0063588f, 0x0064d17a, 0x00663953, 0x00679041, 0x0068d669, 0x006a0bf4, 0x006b310a, 0x006c45d6, 0x006d4a83, 0x006e3f3d, 0x006f2431, 0x006ff98f, 0x0070bf84, 0x00717641, 0x00721df8, 0x0072b6da, 0x00734119, 0x0073bcea, 0x00742a81, 0x00748a12, 0x0074dbd4, 0x00751ffc, 0x007556c1, 0x0075805b, 0x00759d03, 0x0075acef, 0x0075b05a, 0x0075a77d, 0x00759291, 0x007571d1, 0x00754578, 0x00750dc0, 0x0074cae5, 0x00747d22, 0x007424b3, 0x0073c1d3, 0x007354bf, 0x0072ddb2, 0x00725cea, 0x0071d2a2, 0x00713f16, 0x0070a284, 0x006ffd28, 0x006f4f3d, 0x006e9901, 0x006ddaaf, 0x006d1484, 0x006c46bc, 0x006b7192, 0x006a9542, 0x0069b208, 0x0068c81e, 0x0067d7c0, 0x0066e128, 0x0065e490, 0x0064e233, 0x0063da4a, 0x0062cd0f, 0x0061baba, 0x0060a384, 0x005f87a5, 0x005e6754, 0x005d42c9, 0x005c1a3b, 0x005aeddf, 0x0059bdeb, 0x00588a94, 0x00575410, 0x00561a91, 0x0054de4b, 0x00539f71, 0x00525e36, 0x00511acb, 0x004fd560, 0x004e8e26, 0x004d454d, 0x004bfb04, 0x004aaf78, 0x004962d9, 0x00481551, 0x0046c70e, 0x0045783b, 0x00442903, 0x0042d990, 0x00418a0a, 0x00403a9b, 0x003eeb6a, 0x003d9c9e, 0x003c4e5d, 0x003b00cc, 0x0039b411, 0x0038684f, 0x00371da9, 0x0035d441, 0x00348c38, 0x003345b0, 0x003200c9, 0x0030bda0, 0x002f7c55, 0x002e3d04, 0x002cffcb, 0x002bc4c5, 0x002a8c0d, 0x002955be, 0x002821f0, 0x0026f0bc, 0x0025c23b, 0x00249683, 0x00236daa, 0x002247c5, 0x002124ea, 0x0020052b, 0x001ee89d, 0x001dcf51, 0x001cb958, 0x001ba6c4, 0x001a97a5, 0x00198c08, 0x001883fe, 0x00177f93, + 0x00167ed5, 0x001581cf, 0x0014888e, 0x0013931b, 0x0012a181, 0x0011b3ca, 0x0010c9fd, 0x000fe423, 0x000f0244, 0x000e2465, 0x000d4a8c, 0x000c74bf, 0x000ba303, 0x000ad55c, 0x000a0bcc, 0x00094656, 0x000884fe, 0x0007c7c3, 0x00070ea8, 0x000659ad, 0x0005a8d0, 0x0004fc13, 0x00045373, 0x0003aeee, 0x00030e82, 0x0002722d, 0x0001d9eb, 0x000145b8, 0x0000b58f, 0x0000296d, 0xffffa14b, 0xffff1d24, 0xfffe9cf2, 0xfffe20ae, 0xfffda852, 0xfffd33d5, 0xfffcc331, 0xfffc565d, 0xfffbed50, 0xfffb8802, 0xfffb2669, 0xfffac87d, 0xfffa6e32, 0xfffa1780, 0xfff9c45a, 0xfff974b8, 0xfff9288e, 0xfff8dfcf, 0xfff89a72, 0xfff8586a, 0xfff819ac, 0xfff7de2a, 0xfff7a5d9, 0xfff770ab, 0xfff73e95, 0xfff70f89, 0xfff6e37b, 0xfff6ba5c, 0xfff6941f, 0xfff670b7, 0xfff65016, 0xfff6322f, 0xfff616f3, 0xfff5fe55, 0xfff5e846, 0xfff5d4b8, 0xfff5c39d, 0xfff5b4e7, 0xfff5a888, 0xfff59e71, 0xfff59694, 0xfff590e2, 0xfff58d4d, 0xfff58bc8, 0xfff58c42, 0xfff58eaf, 0xfff592ff, 0xfff59925, 0xfff5a112, 0xfff5aab7, 0xfff5b608, 0xfff5c2f6, 0xfff5d172, 0xfff5e16f, 0xfff5f2df, 0xfff605b5, 0xfff619e2, 0xfff62f59, 0xfff6460d, 0xfff65df0, 0xfff676f6, 0xfff69110, 0xfff6ac32, 0xfff6c850, 0xfff6e55d, 0xfff7034b, 0xfff72210, 0xfff7419e, 0xfff761ea, 0xfff782e7, 0xfff7a48b, 0xfff7c6c9, 0xfff7e996, 0xfff80ce7, 0xfff830b1, 0xfff854ea, 0xfff87986, 0xfff89e7c, 0xfff8c3c0, 0xfff8e949, 0xfff90f0e, 0xfff93504, 0xfff95b23, 0xfff98160, 0xfff9a7b4, 0xfff9ce14, 0xfff9f47a, 0xfffa1adc, 0xfffa4131, 0xfffa6774, 0xfffa8d9a, 0xfffab39e, 0xfffad977, 0xfffaff1f, 0xfffb248f, 0xfffb49c1, 0xfffb6eac, 0xfffb934d, 0x00000000 // this one is needed for lerping the last coefficient }; diff --git a/tools/resampler_tools/fir.cpp b/tools/resampler_tools/fir.cpp index 14707d1..acd9911 100644 --- a/tools/resampler_tools/fir.cpp +++ b/tools/resampler_tools/fir.cpp @@ -84,7 +84,7 @@ int main(int argc, char** argv) unsigned int polyN = 147; bool debug = false; double Fs = 48000; - double Fc = 24000; + double Fc = 20478; double atten = 1; int format = 0; -- cgit v1.1 From c41590251aa84c078c942d258e838aad814b73a5 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Tue, 30 Oct 2012 10:51:39 -0700 Subject: Remove obsolete references to libmedia_native Bug: 6654403 Change-Id: I3993d62987cf0dd85db10bf002a5cce53d4f01bd --- cmds/stagefright/Android.mk | 8 ++++---- libvideoeditor/lvpp/Android.mk | 1 - media/libmedia/Android.mk | 2 +- media/libmediaplayerservice/Android.mk | 1 - media/libstagefright/Android.mk | 1 - services/audioflinger/Android.mk | 2 -- services/camera/libcameraservice/Android.mk | 1 - 7 files changed, 5 insertions(+), 11 deletions(-) diff --git a/cmds/stagefright/Android.mk b/cmds/stagefright/Android.mk index 1247588..f60b1a4 100644 --- a/cmds/stagefright/Android.mk +++ b/cmds/stagefright/Android.mk @@ -8,7 +8,7 @@ LOCAL_SRC_FILES:= \ SineSource.cpp LOCAL_SHARED_LIBRARIES := \ - libstagefright libmedia libmedia_native libutils libbinder libstagefright_foundation \ + libstagefright libmedia libutils libbinder libstagefright_foundation \ libjpeg libgui LOCAL_C_INCLUDES:= \ @@ -104,7 +104,7 @@ LOCAL_SRC_FILES:= \ LOCAL_SHARED_LIBRARIES := \ libstagefright liblog libutils libbinder libgui \ - libstagefright_foundation libmedia libmedia_native libcutils + libstagefright_foundation libmedia libcutils LOCAL_C_INCLUDES:= \ frameworks/av/media/libstagefright \ @@ -127,7 +127,7 @@ LOCAL_SRC_FILES:= \ LOCAL_SHARED_LIBRARIES := \ libstagefright liblog libutils libbinder libstagefright_foundation \ - libmedia libmedia_native libgui libcutils libui + libmedia libgui libcutils libui LOCAL_C_INCLUDES:= \ frameworks/av/media/libstagefright \ @@ -151,7 +151,7 @@ LOCAL_SRC_FILES:= \ LOCAL_SHARED_LIBRARIES := \ libstagefright liblog libutils libbinder libstagefright_foundation \ - libmedia libmedia_native libgui libcutils libui + libmedia libgui libcutils libui LOCAL_C_INCLUDES:= \ frameworks/av/media/libstagefright \ diff --git a/libvideoeditor/lvpp/Android.mk b/libvideoeditor/lvpp/Android.mk index 0ed7e6c..778c5ac 100755 --- a/libvideoeditor/lvpp/Android.mk +++ b/libvideoeditor/lvpp/Android.mk @@ -54,7 +54,6 @@ LOCAL_SHARED_LIBRARIES := \ libGLESv2 \ libgui \ libmedia \ - libmedia_native \ libdrmframework \ libstagefright \ libstagefright_foundation \ diff --git a/media/libmedia/Android.mk b/media/libmedia/Android.mk index 54666fb..f2b6441 100644 --- a/media/libmedia/Android.mk +++ b/media/libmedia/Android.mk @@ -54,7 +54,7 @@ LOCAL_SRC_FILES:= \ LOCAL_SHARED_LIBRARIES := \ libui libcutils libutils libbinder libsonivox libicuuc libexpat \ libcamera_client libstagefright_foundation \ - libgui libdl libaudioutils libmedia_native + libgui libdl libaudioutils LOCAL_WHOLE_STATIC_LIBRARY := libmedia_helper diff --git a/media/libmediaplayerservice/Android.mk b/media/libmediaplayerservice/Android.mk index 5b5ed71..48f48e4 100644 --- a/media/libmediaplayerservice/Android.mk +++ b/media/libmediaplayerservice/Android.mk @@ -28,7 +28,6 @@ LOCAL_SHARED_LIBRARIES := \ libdl \ libgui \ libmedia \ - libmedia_native \ libsonivox \ libstagefright \ libstagefright_foundation \ diff --git a/media/libstagefright/Android.mk b/media/libstagefright/Android.mk index faa0f31..a056706 100644 --- a/media/libstagefright/Android.mk +++ b/media/libstagefright/Android.mk @@ -78,7 +78,6 @@ LOCAL_SHARED_LIBRARIES := \ libicuuc \ liblog \ libmedia \ - libmedia_native \ libsonivox \ libssl \ libstagefright_omx \ diff --git a/services/audioflinger/Android.mk b/services/audioflinger/Android.mk index 2899953..58c4be4 100644 --- a/services/audioflinger/Android.mk +++ b/services/audioflinger/Android.mk @@ -31,7 +31,6 @@ LOCAL_C_INCLUDES := \ $(call include-path-for, audio-effects) \ $(call include-path-for, audio-utils) -# FIXME keep libmedia_native but remove libmedia after split LOCAL_SHARED_LIBRARIES := \ libaudioutils \ libcommon_time_client \ @@ -39,7 +38,6 @@ LOCAL_SHARED_LIBRARIES := \ libutils \ libbinder \ libmedia \ - libmedia_native \ libnbaio \ libhardware \ libhardware_legacy \ diff --git a/services/camera/libcameraservice/Android.mk b/services/camera/libcameraservice/Android.mk index 801afe9..5245983 100644 --- a/services/camera/libcameraservice/Android.mk +++ b/services/camera/libcameraservice/Android.mk @@ -28,7 +28,6 @@ LOCAL_SHARED_LIBRARIES:= \ libbinder \ libcutils \ libmedia \ - libmedia_native \ libcamera_client \ libgui \ libhardware \ -- cgit v1.1 From d88a051aff15fdf5c57e1e5a4083bbd9635af3ad Mon Sep 17 00:00:00 2001 From: Mathias Agopian Date: Tue, 30 Oct 2012 12:49:07 -0700 Subject: fix another issue with generating FIR coefficients the impulse response of a low-pass is 2*f*sinc(2*pi*f*k), we were missing the 2*f scale factor. This explains why we were seeing clipping and had to manually scale the filter down. Change-Id: I86d0bb82ecdd99681c8ba5a8112a8257bf6f0186 --- services/audioflinger/AudioResamplerSinc.cpp | 36 ++++++++++++++-------------- tools/resampler_tools/fir.cpp | 4 ++-- 2 files changed, 20 insertions(+), 20 deletions(-) diff --git a/services/audioflinger/AudioResamplerSinc.cpp b/services/audioflinger/AudioResamplerSinc.cpp index 0e1acb4..edfed49 100644 --- a/services/audioflinger/AudioResamplerSinc.cpp +++ b/services/audioflinger/AudioResamplerSinc.cpp @@ -31,33 +31,33 @@ namespace android { /* * These coeficients are computed with the "fir" utility found in * tools/resampler_tools - * cmd-line: fir -v 1.3 -l 7 -s 48000 -c 20478 + * cmd-line: fir -l 7 -s 48000 -c 20478 */ const int32_t AudioResamplerSinc::mFirCoefsUp[] = { - 0x6e350b14, 0x6e32e1de, 0x6e2c6665, 0x6e219925, 0x6e127aed, 0x6dff0cdf, 0x6de7506d, 0x6dcb475f, 0x6daaf3cd, 0x6d865820, 0x6d5d7714, 0x6d3053b9, 0x6cfef16b, 0x6cc953db, 0x6c8f7f09, 0x6c517746, 0x6c0f4131, 0x6bc8e1ba, 0x6b7e5e1f, 0x6b2fbbee, 0x6add0100, 0x6a86337e, 0x6a2b59dc, 0x69cc7adc, 0x69699d89, 0x6902c93b, 0x68980593, 0x68295a7c, 0x67b6d02a, 0x67406f18, 0x66c64009, 0x66484c06, 0x65c69c5e, 0x65413aa3, 0x64b830ad, 0x642b8896, 0x639b4cb9, 0x630787b3, 0x62704460, 0x61d58ddd, 0x61376f83, 0x6095f4e8, 0x5ff129de, 0x5f491a73, 0x5e9dd2ed, 0x5def5fcc, 0x5d3dcdc4, 0x5c8929c4, 0x5bd180ec, 0x5b16e092, 0x5a59563d, 0x5998efa7, 0x58d5bab8, 0x580fc589, 0x57471e5f, 0x567bd3ac, 0x55adf40b, 0x54dd8e43, 0x540ab142, 0x53356c1d, 0x525dce0d, 0x5183e672, 0x50a7c4cd, 0x4fc978c0, 0x4ee9120c, 0x4e06a094, 0x4d223454, 0x4c3bdd65, 0x4b53abfc, 0x4a69b064, 0x497dfb00, 0x48909c49, 0x47a1a4cd, 0x46b1252c, 0x45bf2e16, 0x44cbd04d, 0x43d71ca1, 0x42e123ed, 0x41e9f71a, 0x40f1a71a, 0x3ff844e5, 0x3efde17e, 0x3e028dea, 0x3d065b33, 0x3c095a67, 0x3b0b9c92, 0x3a0d32c2, 0x390e2e01, 0x380e9f57, 0x370e97c6, 0x360e284c, 0x350d61da, 0x340c555d, 0x330b13b5, 0x3209adb4, 0x31083422, 0x3006b7b5, 0x2f054914, 0x2e03f8d4, 0x2d02d775, 0x2c01f564, 0x2b0162f9, 0x2a013072, 0x29016df5, 0x28022b90, 0x27037934, 0x260566b4, 0x250803c7, 0x240b6004, 0x230f8ae2, 0x221493b5, 0x211a89b0, 0x20217be0, 0x1f29792e, 0x1e32905a, 0x1d3ccfff, 0x1c48468e, 0x1b55024e, 0x1a63115b, 0x197281a5, 0x188360ef, 0x1795bccc, 0x16a9a2a1, 0x15bf1fa4, 0x14d640d8, 0x13ef130e, 0x1309a2e4, 0x1225fcc4, - 0x11442ce3, 0x10643f3f, 0x0f863fa3, 0x0eaa399d, 0x0dd03888, 0x0cf84782, 0x0c227171, 0x0b4ec101, 0x0a7d40a1, 0x09adfa86, 0x08e0f8a7, 0x081644be, 0x074de849, 0x0687ec84, 0x05c45a70, 0x05033acc, 0x04449618, 0x03887494, 0x02cede3e, 0x0217dad5, 0x016371d3, 0x00b1aa73, 0x00028bac, 0xff561c34, 0xfeac627c, 0xfe0564b3, 0xfd6128c4, 0xfcbfb457, 0xfc210cd1, 0xfb853752, 0xfaec38b4, 0xfa561591, 0xf9c2d23c, 0xf93272c4, 0xf8a4faf4, 0xf81a6e54, 0xf792d025, 0xf70e2367, 0xf68c6ad3, 0xf60da8df, 0xf591dfbf, 0xf5191161, 0xf4a33f70, 0xf4306b54, 0xf3c09632, 0xf353c0ed, 0xf2e9ec23, 0xf2831834, 0xf21f4539, 0xf1be730e, 0xf160a14c, 0xf105cf4a, 0xf0adfc22, 0xf05926ab, 0xf0074d7e, 0xefb86ef7, 0xef6c892f, 0xef239a07, 0xeedd9f1e, 0xee9a95d8, 0xee5a7b5f, 0xee1d4c9f, 0xede3064a, 0xedaba4d8, 0xed772488, 0xed458160, 0xed16b72d, 0xeceac186, 0xecc19bcc, 0xec9b4129, 0xec77ac93, 0xec56d8cb, 0xec38c05f, 0xec1d5dac, 0xec04aadb, 0xebeea1e6, 0xebdb3c96, 0xebca7487, 0xebbc4325, 0xebb0a1af, 0xeba78939, 0xeba0f2ab, 0xeb9cd6c3, 0xeb9b2e16, 0xeb9bf110, 0xeb9f17f7, 0xeba49ae9, 0xebac71df, 0xebb694b0, 0xebc2fb0d, 0xebd19c85, 0xebe27089, 0xebf56e66, 0xec0a8d4c, 0xec21c44d, 0xec3b0a5e, 0xec565658, 0xec739ef9, 0xec92dae5, 0xecb400aa, 0xecd706bc, 0xecfbe378, 0xed228d2a, 0xed4afa04, 0xed75202a, 0xeda0f5ab, 0xedce7087, 0xedfd86ac, 0xee2e2dfd, 0xee605c4c, 0xee940760, 0xeec924f5, 0xeeffaabc, 0xef378e5d, 0xef70c579, 0xefab45a8, 0xefe7047f, 0xf023f78b, 0xf0621459, 0xf0a1506f, 0xf0e1a155, 0xf122fc90, 0xf16557a8, 0xf1a8a824, 0xf1ece390, 0xf231ff7a, 0xf277f176, 0xf2beaf1b, - 0xf3062e09, 0xf34e63e8, 0xf3974666, 0xf3e0cb3f, 0xf42ae835, 0xf4759318, 0xf4c0c1c3, 0xf50c6a21, 0xf5588229, 0xf5a4ffe1, 0xf5f1d961, 0xf63f04d0, 0xf68c7868, 0xf6da2a76, 0xf7281159, 0xf7762387, 0xf7c45788, 0xf812a3fd, 0xf860ff9b, 0xf8af6130, 0xf8fdbfa2, 0xf94c11f0, 0xf99a4f34, 0xf9e86ea0, 0xfa366783, 0xfa843147, 0xfad1c372, 0xfb1f15a8, 0xfb6c1fab, 0xfbb8d95a, 0xfc053ab4, 0xfc513bd7, 0xfc9cd501, 0xfce7fe91, 0xfd32b105, 0xfd7ce501, 0xfdc69347, 0xfe0fb4bc, 0xfe58426a, 0xfea0357e, 0xfee78746, 0xff2e3137, 0xff742ceb, 0xffb9741e, 0xfffe00b5, 0x0041ccb6, 0x0084d252, 0x00c70bdc, 0x010873cf, 0x014904ce, 0x0188b9a0, 0x01c78d36, 0x02057aa6, 0x02427d2e, 0x027e9035, 0x02b9af49, 0x02f3d61f, 0x032d0094, 0x03652aae, 0x039c509c, 0x03d26eb2, 0x0407816e, 0x043b8577, 0x046e779c, 0x04a054d1, 0x04d11a37, 0x0500c513, 0x052f52d5, 0x055cc111, 0x05890d88, 0x05b4361d, 0x05de38df, 0x06071402, 0x062ec5e2, 0x06554d01, 0x067aa809, 0x069ed5ca, 0x06c1d539, 0x06e3a573, 0x070445ba, 0x0723b575, 0x0741f431, 0x075f01a0, 0x077add97, 0x07958811, 0x07af012c, 0x07c7492a, 0x07de606f, 0x07f44784, 0x0808ff11, 0x081c87e3, 0x082ee2e6, 0x0840112b, 0x085013de, 0x085eec50, 0x086c9bf0, 0x0879244c, 0x08848710, 0x088ec607, 0x0897e31b, 0x089fe052, 0x08a6bfcd, 0x08ac83ce, 0x08b12eac, 0x08b4c2e0, 0x08b742f8, 0x08b8b19f, 0x08b91198, 0x08b865bf, 0x08b6b10a, 0x08b3f685, 0x08b03952, 0x08ab7cac, 0x08a5c3e1, 0x089f1258, 0x08976b89, 0x088ed303, 0x08854c65, 0x087adb64, 0x086f83c5, 0x0863495f, 0x0856301c, 0x08483bf4, 0x083970ee, 0x0829d322, 0x081966b5, 0x08082fdb, 0x07f632d4, - 0x07e373ec, 0x07cff77c, 0x07bbc1e7, 0x07a6d79d, 0x07913d14, 0x077af6ce, 0x07640956, 0x074c793d, 0x07344b1b, 0x071b8393, 0x07022749, 0x06e83ae8, 0x06cdc322, 0x06b2c4ac, 0x0697443c, 0x067b4690, 0x065ed064, 0x0641e678, 0x06248d8e, 0x0606ca65, 0x05e8a1c1, 0x05ca1862, 0x05ab3308, 0x058bf671, 0x056c675a, 0x054c8a7b, 0x052c648a, 0x050bfa3b, 0x04eb5039, 0x04ca6b2f, 0x04a94fbf, 0x04880288, 0x0466881f, 0x0444e515, 0x04231df2, 0x04013738, 0x03df355d, 0x03bd1cd3, 0x039af1fe, 0x0378b93c, 0x035676dd, 0x03342f2b, 0x0311e661, 0x02efa0b0, 0x02cd623e, 0x02ab2f23, 0x02890b6c, 0x0266fb19, 0x0245021b, 0x02232456, 0x020165a2, 0x01dfc9c5, 0x01be547a, 0x019d096b, 0x017bec31, 0x015b005a, 0x013a4960, 0x0119caaf, 0x00f987a2, 0x00d98384, 0x00b9c18c, 0x009a44e6, 0x007b10a6, 0x005c27d3, 0x003d8d61, 0x001f4432, 0x00014f15, 0xffe3b0c9, 0xffc66bf8, 0xffa9833b, 0xff8cf919, 0xff70d004, 0xff550a5e, 0xff39aa73, 0xff1eb27e, 0xff0424a6, 0xfeea02ff, 0xfed04f89, 0xfeb70c33, 0xfe9e3ad6, 0xfe85dd38, 0xfe6df50d, 0xfe5683f5, 0xfe3f8b7b, 0xfe290d18, 0xfe130a32, 0xfdfd841c, 0xfde87c13, 0xfdd3f344, 0xfdbfeac5, 0xfdac639e, 0xfd995ebf, 0xfd86dd07, 0xfd74df43, 0xfd63662b, 0xfd527267, 0xfd42048a, 0xfd321d16, 0xfd22bc79, 0xfd13e312, 0xfd05912a, 0xfcf7c6fb, 0xfcea84ac, 0xfcddca53, 0xfcd197f5, 0xfcc5ed84, 0xfcbacae2, 0xfcb02fe3, 0xfca61c45, 0xfc9c8fba, 0xfc9389e1, 0xfc8b0a4c, 0xfc83107b, 0xfc7b9bdf, 0xfc74abda, 0xfc6e3fbf, 0xfc6856d2, 0xfc62f049, 0xfc5e0b4b, 0xfc59a6f2, 0xfc55c249, 0xfc525c50, 0xfc4f73f6, 0xfc4d0822, 0xfc4b17a9, 0xfc49a159, 0xfc48a3ef, 0xfc481e21, - 0xfc480e96, 0xfc4873eb, 0xfc494cb3, 0xfc4a9776, 0xfc4c52b2, 0xfc4e7cd9, 0xfc511457, 0xfc54178c, 0xfc5784d0, 0xfc5b5a72, 0xfc5f96ba, 0xfc6437e5, 0xfc693c2c, 0xfc6ea1be, 0xfc7466c4, 0xfc7a895f, 0xfc8107ac, 0xfc87dfbf, 0xfc8f0fa9, 0xfc969573, 0xfc9e6f23, 0xfca69ab8, 0xfcaf162e, 0xfcb7df7d, 0xfcc0f498, 0xfcca536e, 0xfcd3f9ed, 0xfcdde5fe, 0xfce81587, 0xfcf2866f, 0xfcfd3696, 0xfd0823de, 0xfd134c26, 0xfd1ead4c, 0xfd2a452e, 0xfd3611a8, 0xfd421096, 0xfd4e3fd4, 0xfd5a9d41, 0xfd6726b7, 0xfd73da17, 0xfd80b53d, 0xfd8db60c, 0xfd9ada65, 0xfda8202c, 0xfdb58547, 0xfdc3079e, 0xfdd0a51d, 0xfdde5bb2, 0xfdec294d, 0xfdfa0be4, 0xfe08016e, 0xfe1607e7, 0xfe241d50, 0xfe323fac, 0xfe406d04, 0xfe4ea365, 0xfe5ce0e3, 0xfe6b2393, 0xfe796993, 0xfe87b104, 0xfe95f80f, 0xfea43ce1, 0xfeb27dae, 0xfec0b8af, 0xfeceec25, 0xfedd1655, 0xfeeb358e, 0xfef94823, 0xff074c6f, 0xff1540d4, 0xff2323bc, 0xff30f397, 0xff3eaedd, 0xff4c540e, 0xff59e1b1, 0xff675655, 0xff74b091, 0xff81ef04, 0xff8f1056, 0xff9c1334, 0xffa8f658, 0xffb5b87f, 0xffc25874, 0xffced505, 0xffdb2d0d, 0xffe75f6c, 0xfff36b0d, 0xffff4ee4, 0x000b09ea, 0x00169b25, 0x002201a1, 0x002d3c75, 0x00384abe, 0x00432ba4, 0x004dde58, 0x00586211, 0x0062b611, 0x006cd9a4, 0x0076cc1c, 0x00808cd5, 0x008a1b34, 0x009376a6, 0x009c9ea1, 0x00a592a5, 0x00ae5238, 0x00b6dcea, 0x00bf3254, 0x00c75217, 0x00cf3bdd, 0x00d6ef55, 0x00de6c3c, 0x00e5b252, 0x00ecc163, 0x00f39941, 0x00fa39c5, 0x0100a2d4, 0x0106d456, 0x010cce3d, 0x01129084, 0x01181b2c, 0x011d6e3c, 0x012289c6, 0x01276de1, 0x012c1aab, 0x01309049, 0x0134cee8, 0x0138d6bc, - 0x013ca7ff, 0x014042f1, 0x0143a7d9, 0x0146d705, 0x0149d0c8, 0x014c957d, 0x014f2584, 0x01518141, 0x0153a922, 0x01559d96, 0x01575f14, 0x0158ee18, 0x015a4b23, 0x015b76bb, 0x015c716b, 0x015d3bc3, 0x015dd658, 0x015e41c1, 0x015e7e9d, 0x015e8d8e, 0x015e6f37, 0x015e2444, 0x015dad61, 0x015d0b3e, 0x015c3e92, 0x015b4812, 0x015a287a, 0x0158e089, 0x015770fe, 0x0155da9d, 0x01541e2e, 0x01523c78, 0x01503646, 0x014e0c67, 0x014bbfa8, 0x014950dc, 0x0146c0d5, 0x01441068, 0x0141406b, 0x013e51b5, 0x013b4520, 0x01381b84, 0x0134d5bd, 0x013174a6, 0x012df91b, 0x012a63f9, 0x0126b61d, 0x0122f065, 0x011f13ad, 0x011b20d4, 0x011718b7, 0x0112fc33, 0x010ecc25, 0x010a896b, 0x010634df, 0x0101cf5d, 0x00fd59bf, 0x00f8d4df, 0x00f44196, 0x00efa0ba, 0x00eaf323, 0x00e639a4, 0x00e17512, 0x00dca63d, 0x00d7cdf8, 0x00d2ed0f, 0x00ce044f, 0x00c91484, 0x00c41e76, 0x00bf22ec, 0x00ba22ab, 0x00b51e74, 0x00b01708, 0x00ab0d25, 0x00a60185, 0x00a0f4e1, 0x009be7f0, 0x0096db64, 0x0091cfed, 0x008cc63b, 0x0087bef8, 0x0082bacb, 0x007dba5a, 0x0078be46, 0x0073c72f, 0x006ed5b1, 0x0069ea64, 0x006505de, 0x006028b0, 0x005b536b, 0x00568698, 0x0051c2c2, 0x004d086c, 0x00485818, 0x0043b244, 0x003f176c, 0x003a8805, 0x00360484, 0x00318d58, 0x002d22ef, 0x0028c5b2, 0x00247606, 0x0020344d, 0x001c00e5, 0x0017dc2b, 0x0013c674, 0x000fc015, 0x000bc95f, 0x0007e29d, 0x00040c1a, 0x0000461a, 0xfffc90e1, 0xfff8ecac, 0xfff559b7, 0xfff1d83a, 0xffee686a, 0xffeb0a76, 0xffe7be8d, 0xffe484d8, 0xffe15d7f, 0xffde48a4, 0xffdb4669, 0xffd856ea, 0xffd57a40, 0xffd2b083, 0xffcff9c5, 0xffcd5617, 0xffcac586, - 0xffc8481c, 0xffc5dde0, 0xffc386d6, 0xffc142ff, 0xffbf125a, 0xffbcf4e3, 0xffbaea92, 0xffb8f35c, 0xffb70f36, 0xffb53e10, 0xffb37fd9, 0xffb1d47b, 0xffb03be0, 0xffaeb5ee, 0xffad428a, 0xffabe196, 0xffaa92f1, 0xffa95679, 0xffa82c09, 0xffa7137b, 0xffa60ca5, 0xffa5175d, 0xffa43376, 0xffa360c1, 0xffa29f0e, 0xffa1ee2b, 0xffa14de4, 0xffa0be03, 0xffa03e51, 0xff9fce96, 0xff9f6e98, 0xff9f1e1a, 0xff9edce1, 0xff9eaaae, 0xff9e8741, 0xff9e725b, 0xff9e6bb9, 0xff9e7319, 0xff9e8837, 0xff9eaace, 0xff9eda9a, 0xff9f1753, 0xff9f60b3, 0xff9fb672, 0xffa01848, 0xffa085ea, 0xffa0ff11, 0xffa18371, 0xffa212c1, 0xffa2acb5, 0xffa35102, 0xffa3ff5d, 0xffa4b779, 0xffa5790c, 0xffa643ca, 0xffa71765, 0xffa7f392, 0xffa8d805, 0xffa9c472, 0xffaab88d, 0xffabb40a, 0xffacb69e, 0xffadbffe, 0xffaecfde, 0xffafe5f5, 0xffb101f7, 0xffb2239b, 0xffb34a98, 0xffb476a5, 0xffb5a779, 0xffb6dccd, 0xffb8165a, 0xffb953d8, 0xffba9503, 0xffbbd996, 0xffbd214b, 0xffbe6bdf, 0xffbfb910, 0xffc1089c, 0xffc25a41, 0xffc3adbf, 0xffc502d8, 0xffc6594c, 0xffc7b0df, 0xffc90953, 0xffca626e, 0xffcbbbf5, 0xffcd15b0, 0xffce6f64, 0xffcfc8dd, 0xffd121e3, 0xffd27a41, 0xffd3d1c4, 0xffd52839, 0xffd67d6f, 0xffd7d135, 0xffd9235c, 0xffda73b6, 0xffdbc216, 0xffdd0e51, 0xffde583d, 0xffdf9fb0, 0xffe0e483, 0xffe2268e, 0xffe365ad, 0xffe4a1bb, 0xffe5da95, 0xffe7101a, 0xffe84229, 0xffe970a2, 0xffea9b69, 0xffebc25f, 0xffece56a, 0xffee046f, 0xffef1f56, 0xfff03605, 0xfff14867, 0xfff25666, 0xfff35fee, 0xfff464ec, 0xfff5654e, 0xfff66102, 0xfff757fa, 0xfff84a27, 0xfff9377c, 0xfffa1fed, 0xfffb036e, 0xfffbe1f5, - 0xfffcbb7a, 0xfffd8ff5, 0xfffe5f5f, 0xffff29b1, 0xffffeee9, 0x0000af01, 0x000169f7, 0x00021fca, 0x0002d077, 0x00037c01, 0x00042267, 0x0004c3ab, 0x00055fd1, 0x0005f6dc, 0x000688d1, 0x000715b4, 0x00079d8d, 0x00082061, 0x00089e39, 0x0009171e, 0x00098b17, 0x0009fa30, 0x000a6472, 0x000ac9ea, 0x000b2aa2, 0x000b86a7, 0x000bde06, 0x000c30cd, 0x000c7f0a, 0x000cc8cc, 0x000d0e21, 0x000d4f1a, 0x000d8bc7, 0x000dc438, 0x000df87f, 0x000e28ac, 0x000e54d2, 0x000e7d03, 0x000ea151, 0x000ec1cf, 0x000ede91, 0x000ef7a9, 0x000f0d2c, 0x000f1f2d, 0x000f2dc1, 0x000f38fb, 0x000f40f1, 0x000f45b7, 0x000f4762, 0x000f4606, 0x000f41ba, 0x000f3a91, 0x000f30a2, 0x000f2401, 0x000f14c4, 0x000f0300, 0x000eeeca, 0x000ed838, 0x000ebf60, 0x000ea455, 0x000e872d, 0x000e67fe, 0x000e46dd, 0x000e23dd, 0x000dff15, 0x000dd898, 0x000db07b, 0x000d86d3, 0x000d5bb3, 0x000d2f30, 0x000d015d, 0x000cd24d, 0x000ca215, 0x000c70c7, 0x000c3e76, 0x000c0b34, 0x000bd715, 0x000ba229, 0x000b6c83, 0x000b3634, 0x000aff4e, 0x000ac7e1, 0x000a8ffe, 0x000a57b5, 0x000a1f17, 0x0009e632, 0x0009ad16, 0x000973d2, 0x00093a74, 0x0009010b, 0x0008c7a5, 0x00088e4f, 0x00085517, 0x00081c09, 0x0007e331, 0x0007aa9c, 0x00077255, 0x00073a68, 0x000702e0, 0x0006cbc7, 0x00069527, 0x00065f0b, 0x0006297b, 0x0005f481, 0x0005c026, 0x00058c71, 0x0005596b, 0x0005271b, 0x0004f588, 0x0004c4ba, 0x000494b7, 0x00046585, 0x00043729, 0x000409aa, 0x0003dd0b, 0x0003b152, 0x00038683, 0x00035ca3, 0x000333b4, 0x00030bba, 0x0002e4b8, 0x0002beb1, 0x000299a7, 0x0002759c, 0x00025292, 0x0002308a, 0x00020f86, 0x0001ef85, + 0x6d374bc7, 0x6d35278a, 0x6d2ebafe, 0x6d24069d, 0x6d150b35, 0x6d01c9e3, 0x6cea4418, 0x6cce7b97, 0x6cae7272, 0x6c8a2b0f, 0x6c61a823, 0x6c34ecb5, 0x6c03fc1c, 0x6bced9ff, 0x6b958a54, 0x6b581163, 0x6b1673c1, 0x6ad0b652, 0x6a86de48, 0x6a38f123, 0x69e6f4b1, 0x6990ef0b, 0x6936e697, 0x68d8e206, 0x6876e855, 0x681100c9, 0x67a732f4, 0x673986ac, 0x66c80413, 0x6652b392, 0x65d99dd5, 0x655ccbd3, 0x64dc46c3, 0x64581823, 0x63d049b4, 0x6344e578, 0x62b5f5b2, 0x622384e8, 0x618d9ddc, 0x60f44b91, 0x60579947, 0x5fb79278, 0x5f1442dc, 0x5e6db665, 0x5dc3f93c, 0x5d1717c4, 0x5c671e96, 0x5bb41a80, 0x5afe1886, 0x5a4525df, 0x59894ff3, 0x58caa45b, 0x580930e1, 0x5745037c, 0x567e2a51, 0x55b4b3af, 0x54e8ae13, 0x541a281e, 0x5349309e, 0x5275d684, 0x51a028e8, 0x50c83704, 0x4fee1037, 0x4f11c3fe, 0x4e3361f7, 0x4d52f9df, 0x4c709b8e, 0x4b8c56f8, 0x4aa63c2c, 0x49be5b50, 0x48d4c4a2, 0x47e98874, 0x46fcb72d, 0x460e6148, 0x451e9750, 0x442d69de, 0x433ae99c, 0x4247273f, 0x41523389, 0x405c1f43, 0x3f64fb40, 0x3e6cd85b, 0x3d73c772, 0x3c79d968, 0x3b7f1f23, 0x3a83a989, 0x3987897f, 0x388acfe9, 0x378d8da8, 0x368fd397, 0x3591b28b, 0x34933b50, 0x33947eab, 0x32958d55, 0x319677fa, 0x30974f3b, 0x2f9823a8, 0x2e9905c1, 0x2d9a05f4, 0x2c9b349e, 0x2b9ca203, 0x2a9e5e57, 0x29a079b2, 0x28a30416, 0x27a60d6a, 0x26a9a57b, 0x25addbf9, 0x24b2c075, 0x23b86263, 0x22bed116, 0x21c61bc0, 0x20ce516f, 0x1fd7810f, 0x1ee1b965, 0x1ded0911, 0x1cf97e8b, 0x1c072823, 0x1b1613ff, 0x1a26501b, 0x1937ea47, 0x184af025, 0x175f6f2b, 0x1675749e, 0x158d0d95, 0x14a646f6, 0x13c12d73, 0x12ddcd8f, 0x11fc3395, + 0x111c6ba0, 0x103e8192, 0x0f62811a, 0x0e8875ad, 0x0db06a89, 0x0cda6ab5, 0x0c0680fe, 0x0b34b7f5, 0x0a6519f4, 0x0997b116, 0x08cc873c, 0x0803a60a, 0x073d16e7, 0x0678e2fc, 0x05b71332, 0x04f7b037, 0x043ac276, 0x0380521c, 0x02c86715, 0x0213090c, 0x01603f6e, 0x00b01162, 0x000285d0, 0xff57a35e, 0xfeaf706f, 0xfe09f323, 0xfd673159, 0xfcc730aa, 0xfc29f670, 0xfb8f87bd, 0xfaf7e963, 0xfa631fef, 0xf9d12fab, 0xf9421c9d, 0xf8b5ea87, 0xf82c9ce7, 0xf7a636fa, 0xf722bbb5, 0xf6a22dcf, 0xf6248fb6, 0xf5a9e398, 0xf5322b61, 0xf4bd68b6, 0xf44b9cfe, 0xf3dcc959, 0xf370eea9, 0xf3080d8c, 0xf2a2265e, 0xf23f393b, 0xf1df45fd, 0xf1824c3e, 0xf1284b58, 0xf0d14267, 0xf07d3043, 0xf02c138a, 0xefddea9a, 0xef92b393, 0xef4a6c58, 0xef051290, 0xeec2a3a3, 0xee831cc3, 0xee467ae1, 0xee0cbab9, 0xedd5d8ca, 0xeda1d15c, 0xed70a07d, 0xed424205, 0xed16b196, 0xecedea99, 0xecc7e845, 0xeca4a59b, 0xec841d68, 0xec664a48, 0xec4b26a2, 0xec32acb0, 0xec1cd677, 0xec099dcf, 0xebf8fc64, 0xebeaebaf, 0xebdf6500, 0xebd6617b, 0xebcfda19, 0xebcbc7a7, 0xebca22cc, 0xebcae405, 0xebce03aa, 0xebd379eb, 0xebdb3ed5, 0xebe54a4f, 0xebf1941f, 0xec0013e8, 0xec10c12c, 0xec23934f, 0xec388194, 0xec4f8322, 0xec688f02, 0xec839c22, 0xeca0a156, 0xecbf9558, 0xece06ecb, 0xed032439, 0xed27ac16, 0xed4dfcc2, 0xed760c88, 0xed9fd1a2, 0xedcb4237, 0xedf8545b, 0xee26fe17, 0xee573562, 0xee88f026, 0xeebc2444, 0xeef0c78d, 0xef26cfca, 0xef5e32bd, 0xef96e61c, 0xefd0df9a, 0xf00c14e1, 0xf0487b98, 0xf0860962, 0xf0c4b3e0, 0xf10470b0, 0xf1453571, 0xf186f7c0, 0xf1c9ad40, 0xf20d4b92, 0xf251c85d, 0xf297194d, 0xf2dd3411, + 0xf3240e61, 0xf36b9dfd, 0xf3b3d8ac, 0xf3fcb43e, 0xf4462690, 0xf4902587, 0xf4daa718, 0xf525a143, 0xf5710a17, 0xf5bcd7b1, 0xf609003f, 0xf6557a00, 0xf6a23b44, 0xf6ef3a6e, 0xf73c6df4, 0xf789cc61, 0xf7d74c53, 0xf824e480, 0xf8728bb3, 0xf8c038d0, 0xf90de2d1, 0xf95b80cb, 0xf9a909ea, 0xf9f67577, 0xfa43bad2, 0xfa90d17b, 0xfaddb10c, 0xfb2a513b, 0xfb76a9dd, 0xfbc2b2e4, 0xfc0e6461, 0xfc59b685, 0xfca4a19f, 0xfcef1e20, 0xfd392498, 0xfd82adba, 0xfdcbb25a, 0xfe142b6e, 0xfe5c120f, 0xfea35f79, 0xfeea0d0c, 0xff30144a, 0xff756edc, 0xffba168d, 0xfffe054e, 0x00413536, 0x0083a081, 0x00c54190, 0x010612eb, 0x01460f41, 0x01853165, 0x01c37452, 0x0200d32c, 0x023d493c, 0x0278d1f2, 0x02b368e6, 0x02ed09d7, 0x0325b0ad, 0x035d5977, 0x0394006a, 0x03c9a1e5, 0x03fe3a6f, 0x0431c6b5, 0x0464438c, 0x0495adf2, 0x04c6030d, 0x04f54029, 0x052362ba, 0x0550685d, 0x057c4ed4, 0x05a7140b, 0x05d0b612, 0x05f93324, 0x0620899e, 0x0646b808, 0x066bbd0d, 0x068f9781, 0x06b2465b, 0x06d3c8bb, 0x06f41de3, 0x0713453d, 0x07313e56, 0x074e08e0, 0x0769a4b2, 0x078411c7, 0x079d503b, 0x07b56051, 0x07cc426c, 0x07e1f712, 0x07f67eec, 0x0809dac3, 0x081c0b84, 0x082d1239, 0x083cf010, 0x084ba654, 0x08593671, 0x0865a1f1, 0x0870ea7e, 0x087b11de, 0x088419f6, 0x088c04c8, 0x0892d470, 0x08988b2a, 0x089d2b4a, 0x08a0b740, 0x08a33196, 0x08a49cf0, 0x08a4fc0d, 0x08a451c0, 0x08a2a0f8, 0x089fecbb, 0x089c3824, 0x08978666, 0x0891dac8, 0x088b38a9, 0x0883a378, 0x087b1ebc, 0x0871ae0d, 0x08675516, 0x085c1794, 0x084ff957, 0x0842fe3d, 0x08352a35, 0x0826813e, 0x08170767, 0x0806c0cb, 0x07f5b193, 0x07e3ddf7, + 0x07d14a38, 0x07bdfaa5, 0x07a9f399, 0x07953976, 0x077fd0ac, 0x0769bdaf, 0x07530501, 0x073bab28, 0x0723b4b4, 0x070b2639, 0x06f20453, 0x06d853a2, 0x06be18cd, 0x06a3587e, 0x06881761, 0x066c5a27, 0x06502583, 0x06337e2a, 0x061668d2, 0x05f8ea30, 0x05db06fc, 0x05bcc3ed, 0x059e25b5, 0x057f310a, 0x055fea9d, 0x0540571a, 0x05207b2f, 0x05005b82, 0x04dffcb6, 0x04bf6369, 0x049e9433, 0x047d93a8, 0x045c6654, 0x043b10bd, 0x04199760, 0x03f7feb4, 0x03d64b27, 0x03b4811d, 0x0392a4f4, 0x0370bafc, 0x034ec77f, 0x032ccebb, 0x030ad4e1, 0x02e8de19, 0x02c6ee7f, 0x02a50a22, 0x02833506, 0x02617321, 0x023fc85c, 0x021e3891, 0x01fcc78f, 0x01db7914, 0x01ba50d2, 0x0199526b, 0x01788170, 0x0157e166, 0x013775bf, 0x011741df, 0x00f7491a, 0x00d78eb3, 0x00b815da, 0x0098e1b3, 0x0079f54c, 0x005b53a4, 0x003cffa9, 0x001efc35, 0x00014c12, 0xffe3f1f7, 0xffc6f08a, 0xffaa4a5d, 0xff8e01f1, 0xff7219b3, 0xff5693fe, 0xff3b731b, 0xff20b93e, 0xff066889, 0xfeec830d, 0xfed30ac5, 0xfeba0199, 0xfea16960, 0xfe8943dc, 0xfe7192bd, 0xfe5a579d, 0xfe439407, 0xfe2d496f, 0xfe177937, 0xfe0224b0, 0xfded4d13, 0xfdd8f38b, 0xfdc5192d, 0xfdb1befc, 0xfd9ee5e7, 0xfd8c8ecc, 0xfd7aba74, 0xfd696998, 0xfd589cdc, 0xfd4854d3, 0xfd3891fd, 0xfd2954c8, 0xfd1a9d91, 0xfd0c6ca2, 0xfcfec233, 0xfcf19e6b, 0xfce50161, 0xfcd8eb17, 0xfccd5b82, 0xfcc25285, 0xfcb7cff0, 0xfcadd386, 0xfca45cf7, 0xfc9b6be5, 0xfc92ffe1, 0xfc8b186d, 0xfc83b4fc, 0xfc7cd4f0, 0xfc76779e, 0xfc709c4d, 0xfc6b4233, 0xfc66687a, 0xfc620e3d, 0xfc5e328c, 0xfc5ad465, 0xfc57f2be, 0xfc558c7c, 0xfc53a07b, 0xfc522d88, 0xfc513266, 0xfc50adcc, + 0xfc509e64, 0xfc5102d0, 0xfc51d9a6, 0xfc53216f, 0xfc54d8ae, 0xfc56fdda, 0xfc598f60, 0xfc5c8ba5, 0xfc5ff105, 0xfc63bdd3, 0xfc67f05a, 0xfc6c86dd, 0xfc717f97, 0xfc76d8bc, 0xfc7c9079, 0xfc82a4f4, 0xfc89144d, 0xfc8fdc9f, 0xfc96fbfc, 0xfc9e7074, 0xfca63810, 0xfcae50d6, 0xfcb6b8c4, 0xfcbf6dd8, 0xfcc86e09, 0xfcd1b74c, 0xfcdb4793, 0xfce51ccb, 0xfcef34e1, 0xfcf98dbe, 0xfd04254a, 0xfd0ef969, 0xfd1a0801, 0xfd254ef4, 0xfd30cc24, 0xfd3c7d73, 0xfd4860c2, 0xfd5473f3, 0xfd60b4e7, 0xfd6d2180, 0xfd79b7a1, 0xfd86752e, 0xfd93580d, 0xfda05e23, 0xfdad855b, 0xfdbacb9e, 0xfdc82edb, 0xfdd5ad01, 0xfde34403, 0xfdf0f1d6, 0xfdfeb475, 0xfe0c89db, 0xfe1a7009, 0xfe286505, 0xfe3666d5, 0xfe447389, 0xfe528931, 0xfe60a5e5, 0xfe6ec7c0, 0xfe7cece2, 0xfe8b1373, 0xfe99399f, 0xfea75d97, 0xfeb57d92, 0xfec397cf, 0xfed1aa92, 0xfedfb425, 0xfeedb2da, 0xfefba508, 0xff09890f, 0xff175d53, 0xff252042, 0xff32d04f, 0xff406bf8, 0xff4df1be, 0xff5b602c, 0xff68b5d5, 0xff75f153, 0xff831148, 0xff90145e, 0xff9cf947, 0xffa9bebe, 0xffb66386, 0xffc2e669, 0xffcf463a, 0xffdb81d6, 0xffe79820, 0xfff38806, 0xffff507b, 0x000af07f, 0x00166718, 0x0021b355, 0x002cd44d, 0x0037c922, 0x004290fc, 0x004d2b0e, 0x00579691, 0x0061d2ca, 0x006bdf05, 0x0075ba95, 0x007f64da, 0x0088dd38, 0x0092231e, 0x009b3605, 0x00a4156b, 0x00acc0da, 0x00b537e1, 0x00bd7a1c, 0x00c5872a, 0x00cd5eb7, 0x00d50075, 0x00dc6c1e, 0x00e3a175, 0x00eaa045, 0x00f16861, 0x00f7f9a3, 0x00fe53ef, 0x0104772e, 0x010a6353, 0x01101858, 0x0115963d, 0x011add0b, 0x011fecd3, 0x0124c5ab, 0x012967b1, 0x012dd30a, 0x013207e4, 0x01360670, + 0x0139cee9, 0x013d618d, 0x0140bea5, 0x0143e67c, 0x0146d965, 0x014997bb, 0x014c21db, 0x014e782a, 0x01509b14, 0x01528b08, 0x0154487b, 0x0155d3e8, 0x01572dcf, 0x015856b6, 0x01594f25, 0x015a17ab, 0x015ab0db, 0x015b1b4e, 0x015b579e, 0x015b666c, 0x015b485b, 0x015afe14, 0x015a8843, 0x0159e796, 0x01591cc0, 0x01582878, 0x01570b77, 0x0155c678, 0x01545a3c, 0x0152c783, 0x01510f13, 0x014f31b2, 0x014d3029, 0x014b0b45, 0x0148c3d2, 0x01465a9f, 0x0143d07f, 0x01412643, 0x013e5cc0, 0x013b74ca, 0x01386f3a, 0x01354ce7, 0x01320ea9, 0x012eb55a, 0x012b41d3, 0x0127b4f1, 0x01240f8e, 0x01205285, 0x011c7eb2, 0x011894f0, 0x0114961b, 0x0110830f, 0x010c5ca6, 0x010823ba, 0x0103d927, 0x00ff7dc4, 0x00fb126b, 0x00f697f3, 0x00f20f32, 0x00ed78ff, 0x00e8d62d, 0x00e4278f, 0x00df6df7, 0x00daaa34, 0x00d5dd16, 0x00d10769, 0x00cc29f7, 0x00c7458a, 0x00c25ae8, 0x00bd6ad7, 0x00b87619, 0x00b37d70, 0x00ae8198, 0x00a9834e, 0x00a4834c, 0x009f8249, 0x009a80f8, 0x0095800c, 0x00908034, 0x008b821b, 0x0086866b, 0x00818dcb, 0x007c98de, 0x0077a845, 0x0072bc9d, 0x006dd680, 0x0068f687, 0x00641d44, 0x005f4b4a, 0x005a8125, 0x0055bf60, 0x00510682, 0x004c570f, 0x0047b186, 0x00431666, 0x003e8628, 0x003a0141, 0x00358824, 0x00311b41, 0x002cbb03, 0x002867d2, 0x00242213, 0x001fea27, 0x001bc06b, 0x0017a53b, 0x001398ec, 0x000f9bd2, 0x000bae3c, 0x0007d075, 0x000402c8, 0x00004579, 0xfffc98c9, 0xfff8fcf7, 0xfff5723d, 0xfff1f8d2, 0xffee90eb, 0xffeb3ab8, 0xffe7f666, 0xffe4c41e, 0xffe1a408, 0xffde9646, 0xffdb9af8, 0xffd8b23b, 0xffd5dc28, 0xffd318d6, 0xffd06858, 0xffcdcabe, 0xffcb4014, + 0xffc8c866, 0xffc663b9, 0xffc41212, 0xffc1d373, 0xffbfa7d9, 0xffbd8f40, 0xffbb89a1, 0xffb996f3, 0xffb7b728, 0xffb5ea31, 0xffb42ffc, 0xffb28876, 0xffb0f388, 0xffaf7118, 0xffae010b, 0xffaca344, 0xffab57a1, 0xffaa1e02, 0xffa8f641, 0xffa7e039, 0xffa6dbc0, 0xffa5e8ad, 0xffa506d2, 0xffa43603, 0xffa3760e, 0xffa2c6c2, 0xffa227ec, 0xffa19957, 0xffa11acb, 0xffa0ac11, 0xffa04cf0, 0xff9ffd2c, 0xff9fbc89, 0xff9f8ac9, 0xff9f67ae, 0xff9f52f7, 0xff9f4c65, 0xff9f53b4, 0xff9f68a1, 0xff9f8ae9, 0xff9fba47, 0xff9ff674, 0xffa03f2b, 0xffa09425, 0xffa0f519, 0xffa161bf, 0xffa1d9cf, 0xffa25cfe, 0xffa2eb04, 0xffa38395, 0xffa42668, 0xffa4d332, 0xffa589a6, 0xffa6497c, 0xffa71266, 0xffa7e41a, 0xffa8be4c, 0xffa9a0b1, 0xffaa8afe, 0xffab7ce7, 0xffac7621, 0xffad7662, 0xffae7d5f, 0xffaf8acd, 0xffb09e63, 0xffb1b7d8, 0xffb2d6e1, 0xffb3fb37, 0xffb52490, 0xffb652a7, 0xffb78533, 0xffb8bbed, 0xffb9f691, 0xffbb34d8, 0xffbc767f, 0xffbdbb42, 0xffbf02dd, 0xffc04d0f, 0xffc19996, 0xffc2e832, 0xffc438a3, 0xffc58aaa, 0xffc6de09, 0xffc83285, 0xffc987e0, 0xffcadde1, 0xffcc344c, 0xffcd8aeb, 0xffcee183, 0xffd037e0, 0xffd18dcc, 0xffd2e311, 0xffd4377d, 0xffd58ade, 0xffd6dd02, 0xffd82dba, 0xffd97cd6, 0xffdaca2a, 0xffdc1588, 0xffdd5ec6, 0xffdea5bb, 0xffdfea3c, 0xffe12c22, 0xffe26b48, 0xffe3a788, 0xffe4e0bf, 0xffe616c8, 0xffe74984, 0xffe878d3, 0xffe9a494, 0xffeaccaa, 0xffebf0fa, 0xffed1166, 0xffee2dd7, 0xffef4632, 0xfff05a60, 0xfff16a4a, 0xfff275db, 0xfff37d00, 0xfff47fa5, 0xfff57db8, 0xfff67729, 0xfff76be9, 0xfff85be8, 0xfff9471b, 0xfffa2d74, 0xfffb0ee9, 0xfffbeb70, + 0xfffcc300, 0xfffd9592, 0xfffe631e, 0xffff2b9f, 0xffffef10, 0x0000ad6e, 0x000166b6, 0x00021ae5, 0x0002c9fd, 0x000373fb, 0x000418e2, 0x0004b8b3, 0x00055371, 0x0005e921, 0x000679c5, 0x00070564, 0x00078c04, 0x00080dab, 0x00088a62, 0x00090230, 0x0009751e, 0x0009e337, 0x000a4c85, 0x000ab112, 0x000b10ec, 0x000b6c1d, 0x000bc2b3, 0x000c14bb, 0x000c6244, 0x000cab5c, 0x000cf012, 0x000d3075, 0x000d6c97, 0x000da486, 0x000dd854, 0x000e0812, 0x000e33d3, 0x000e5ba7, 0x000e7fa1, 0x000e9fd5, 0x000ebc54, 0x000ed533, 0x000eea84, 0x000efc5c, 0x000f0ace, 0x000f15ef, 0x000f1dd2, 0x000f228d, 0x000f2434, 0x000f22dc, 0x000f1e99, 0x000f1781, 0x000f0da8, 0x000f0125, 0x000ef20b, 0x000ee070, 0x000ecc69, 0x000eb60b, 0x000e9d6b, 0x000e829e, 0x000e65ba, 0x000e46d3, 0x000e25fd, 0x000e034f, 0x000ddedb, 0x000db8b7, 0x000d90f6, 0x000d67ae, 0x000d3cf1, 0x000d10d5, 0x000ce36b, 0x000cb4c8, 0x000c84ff, 0x000c5422, 0x000c2245, 0x000bef79, 0x000bbbd2, 0x000b8760, 0x000b5235, 0x000b1c64, 0x000ae5fc, 0x000aaf0f, 0x000a77ac, 0x000a3fe5, 0x000a07c9, 0x0009cf67, 0x000996ce, 0x00095e0e, 0x00092535, 0x0008ec50, 0x0008b36e, 0x00087a9c, 0x000841e8, 0x0008095d, 0x0007d108, 0x000798f5, 0x00076130, 0x000729c4, 0x0006f2bb, 0x0006bc21, 0x000685ff, 0x0006505f, 0x00061b4b, 0x0005e6cb, 0x0005b2e8, 0x00057faa, 0x00054d1a, 0x00051b3e, 0x0004ea1d, 0x0004b9c0, 0x00048a2b, 0x00045b65, 0x00042d74, 0x0004005e, 0x0003d426, 0x0003a8d2, 0x00037e65, 0x000354e5, 0x00032c54, 0x000304b7, 0x0002de0e, 0x0002b85f, 0x000293aa, 0x00026ff2, 0x00024d39, 0x00022b7f, 0x00020ac7, 0x0001eb10, 0x00000000 // this one is needed for lerping the last coefficient }; /* * These coefficients are optimized for 48KHz -> 44.1KHz - * cmd-line: fir -v 0.3 -l 7 -s 48000 -c 16600 + * cmd-line: fir -l 7 -s 48000 -c 16600 */ const int32_t AudioResamplerSinc::mFirCoefsDown[] = { - 0x6e350b14, 0x6e3396e0, 0x6e2f3a59, 0x6e27f5b7, 0x6e1dc95c, 0x6e10b5cc, 0x6e00bbb6, 0x6deddbeb, 0x6dd81765, 0x6dbf6f44, 0x6da3e4cd, 0x6d85796c, 0x6d642eb2, 0x6d400656, 0x6d190235, 0x6cef2453, 0x6cc26ed6, 0x6c92e40c, 0x6c608668, 0x6c2b5882, 0x6bf35d14, 0x6bb89701, 0x6b7b094e, 0x6b3ab724, 0x6af7a3d1, 0x6ab1d2c7, 0x6a69479a, 0x6a1e0603, 0x69d011df, 0x697f6f2a, 0x692c2207, 0x68d62eb9, 0x687d99a6, 0x68226753, 0x67c49c6c, 0x67643db8, 0x67015023, 0x669bd8b9, 0x6633dca5, 0x65c96131, 0x655c6bca, 0x64ed01f9, 0x647b2967, 0x6406e7dc, 0x6390433c, 0x6317418a, 0x629be8e8, 0x621e3f92, 0x619e4be2, 0x611c144d, 0x60979f66, 0x6010f3d8, 0x5f88186c, 0x5efd1402, 0x5e6fed97, 0x5de0ac3f, 0x5d4f5729, 0x5cbbf59c, 0x5c268ef6, 0x5b8f2aad, 0x5af5d04f, 0x5a5a8780, 0x59bd57fa, 0x591e498d, 0x587d641d, 0x57daafa5, 0x57363432, 0x568ff9e4, 0x55e808f2, 0x553e69a1, 0x5493244a, 0x53e64158, 0x5337c946, 0x5287c4a0, 0x51d63c02, 0x51233816, 0x506ec197, 0x4fb8e14d, 0x4f01a00d, 0x4e4906ba, 0x4d8f1e43, 0x4cd3efa4, 0x4c1783e5, 0x4b59e415, 0x4a9b1952, 0x49db2cc2, 0x491a2792, 0x485812fb, 0x4794f83d, 0x46d0e09d, 0x460bd56a, 0x4545dff9, 0x447f09a2, 0x43b75bc5, 0x42eedfc5, 0x42259f0a, 0x415ba2fe, 0x4090f510, 0x3fc59eaf, 0x3ef9a94d, 0x3e2d1e5c, 0x3d600751, 0x3c926d9d, 0x3bc45ab4, 0x3af5d807, 0x3a26ef05, 0x3957a91c, 0x38880fb6, 0x37b82c3b, 0x36e8080c, 0x3617ac89, 0x3547230a, 0x347674e3, 0x33a5ab62, 0x32d4cfcc, 0x3203eb61, 0x31330758, 0x30622ce0, 0x2f91651f, 0x2ec0b931, 0x2df03228, 0x2d1fd90d, 0x2c4fb6dd, 0x2b7fd488, 0x2ab03af3, 0x29e0f2f8, 0x29120562, 0x28437aef, - 0x27755c4f, 0x26a7b223, 0x25da84fc, 0x250ddd60, 0x2441c3bf, 0x2376407c, 0x22ab5be9, 0x21e11e45, 0x21178fc0, 0x204eb874, 0x1f86a06c, 0x1ebf4f9d, 0x1df8cdea, 0x1d332322, 0x1c6e56ff, 0x1baa7127, 0x1ae77929, 0x1a257682, 0x19647094, 0x18a46eb0, 0x17e5780d, 0x172793cc, 0x166ac8f6, 0x15af1e7c, 0x14f49b39, 0x143b45ed, 0x13832540, 0x12cc3fc3, 0x12169bea, 0x11624012, 0x10af327e, 0x0ffd7955, 0x0f4d1aa4, 0x0e9e1c5e, 0x0df0845b, 0x0d445856, 0x0c999df2, 0x0bf05ab1, 0x0b4893fd, 0x0aa24f23, 0x09fd9152, 0x095a5f9d, 0x08b8befc, 0x0818b447, 0x077a443b, 0x06dd7376, 0x0642467a, 0x05a8c1a9, 0x0510e949, 0x047ac182, 0x03e64e5e, 0x035393c6, 0x02c29588, 0x02335752, 0x01a5dcb3, 0x011a291e, 0x00903fe2, 0x00082435, 0xff81d92b, 0xfefd61b9, 0xfe7ac0b7, 0xfdf9f8da, 0xfd7b0cbe, 0xfcfdfeda, 0xfc82d18a, 0xfc098709, 0xfb922173, 0xfb1ca2c7, 0xfaa90ce2, 0xfa376185, 0xf9c7a24e, 0xf959d0c2, 0xf8edee41, 0xf883fc11, 0xf81bfb56, 0xf7b5ed19, 0xf751d240, 0xf6efab96, 0xf68f79c7, 0xf6313d60, 0xf5d4f6d0, 0xf57aa669, 0xf5224c5f, 0xf4cbe8c7, 0xf4777b9a, 0xf42504b2, 0xf3d483cf, 0xf385f890, 0xf339627b, 0xf2eec0f5, 0xf2a6134a, 0xf25f58aa, 0xf21a9025, 0xf1d7b8b4, 0xf196d132, 0xf157d85d, 0xf11accdc, 0xf0dfad37, 0xf0a677de, 0xf06f2b25, 0xf039c548, 0xf0064466, 0xefd4a686, 0xefa4e997, 0xef770b6c, 0xef4b09c2, 0xef20e23d, 0xeef89266, 0xeed217b2, 0xeead6f7d, 0xee8a9709, 0xee698b87, 0xee4a4a0b, 0xee2ccf97, 0xee111915, 0xedf7235b, 0xeddeeb29, 0xedc86d28, 0xedb3a5ef, 0xeda09201, 0xed8f2dcb, 0xed7f75a8, 0xed7165e0, 0xed64faa7, 0xed5a301f, 0xed510259, 0xed496d53, 0xed436cfb, - 0xed3efd2c, 0xed3c19b3, 0xed3abe4b, 0xed3ae6a1, 0xed3c8e50, 0xed3fb0e7, 0xed4449e5, 0xed4a54ba, 0xed51ccca, 0xed5aad6a, 0xed64f1e4, 0xed709574, 0xed7d934a, 0xed8be68a, 0xed9b8a4d, 0xedac79a2, 0xedbeaf8c, 0xedd22705, 0xede6dafd, 0xedfcc65a, 0xee13e3f9, 0xee2c2eb1, 0xee45a14f, 0xee603697, 0xee7be949, 0xee98b41b, 0xeeb691c0, 0xeed57ce1, 0xeef57025, 0xef16662c, 0xef385992, 0xef5b44ed, 0xef7f22d2, 0xefa3edcf, 0xefc9a072, 0xeff03543, 0xf017a6cb, 0xf03fef90, 0xf0690a14, 0xf092f0da, 0xf0bd9e66, 0xf0e90d37, 0xf11537d0, 0xf14218b3, 0xf16faa62, 0xf19de762, 0xf1ccca37, 0xf1fc4d68, 0xf22c6b80, 0xf25d1f0a, 0xf28e6296, 0xf2c030b5, 0xf2f283fe, 0xf325570c, 0xf358a47c, 0xf38c66f3, 0xf3c09918, 0xf3f5359a, 0xf42a372c, 0xf45f9888, 0xf495546e, 0xf4cb65a6, 0xf501c6fd, 0xf5387349, 0xf56f6567, 0xf5a6983c, 0xf5de06b6, 0xf615abcb, 0xf64d827a, 0xf68585cb, 0xf6bdb0d0, 0xf6f5fea5, 0xf72e6a6e, 0xf766ef5c, 0xf79f88a9, 0xf7d8319a, 0xf810e580, 0xf8499fb6, 0xf8825ba4, 0xf8bb14bc, 0xf8f3c67e, 0xf92c6c75, 0xf9650239, 0xf99d836f, 0xf9d5ebc7, 0xfa0e3702, 0xfa4660ea, 0xfa7e6559, 0xfab64038, 0xfaeded7a, 0xfb256924, 0xfb5caf47, 0xfb93bc03, 0xfbca8b87, 0xfc011a11, 0xfc3763ec, 0xfc6d6574, 0xfca31b14, 0xfcd88147, 0xfd0d9494, 0xfd425197, 0xfd76b4f7, 0xfdaabb6d, 0xfdde61c2, 0xfe11a4ce, 0xfe44817b, 0xfe76f4c2, 0xfea8fbab, 0xfeda9350, 0xff0bb8dd, 0xff3c698b, 0xff6ca2a6, 0xff9c618a, 0xffcba3a4, 0xfffa6672, 0x0028a781, 0x00566471, 0x00839af2, 0x00b048c5, 0x00dc6bbc, 0x010801b9, 0x013308b1, 0x015d7ea7, 0x018761b2, 0x01b0aff7, 0x01d967af, 0x02018722, 0x02290ca8, - 0x024ff6ac, 0x027643a9, 0x029bf22a, 0x02c100cc, 0x02e56e3b, 0x03093935, 0x032c608a, 0x034ee316, 0x0370bfca, 0x0391f5a5, 0x03b283b7, 0x03d26920, 0x03f1a510, 0x041036c8, 0x042e1d98, 0x044b58e0, 0x0467e810, 0x0483caa7, 0x049f0035, 0x04b98858, 0x04d362bd, 0x04ec8f23, 0x05050d54, 0x051cdd2c, 0x0533fe94, 0x054a7186, 0x05603607, 0x05754c2f, 0x0589b41f, 0x059d6e0a, 0x05b07a30, 0x05c2d8dd, 0x05d48a6e, 0x05e58f49, 0x05f5e7e6, 0x060594c8, 0x0614967d, 0x0622eda3, 0x06309ae3, 0x063d9ef2, 0x0649fa93, 0x0655ae94, 0x0660bbce, 0x066b2327, 0x0674e591, 0x067e0406, 0x06867f90, 0x068e5940, 0x06959234, 0x069c2b92, 0x06a2268e, 0x06a78463, 0x06ac4657, 0x06b06dba, 0x06b3fbe5, 0x06b6f23a, 0x06b95226, 0x06bb1d1c, 0x06bc549a, 0x06bcfa25, 0x06bd0f4b, 0x06bc95a2, 0x06bb8ec8, 0x06b9fc61, 0x06b7e01a, 0x06b53ba8, 0x06b210c4, 0x06ae6130, 0x06aa2eb4, 0x06a57b1f, 0x06a04844, 0x069a97fe, 0x06946c2c, 0x068dc6b3, 0x0686a97f, 0x067f167e, 0x06770fa5, 0x066e96eb, 0x0665ae50, 0x065c57d2, 0x06529578, 0x0648694a, 0x063dd555, 0x0632dba8, 0x06277e55, 0x061bbf72, 0x060fa118, 0x0603255f, 0x05f64e66, 0x05e91e4b, 0x05db972f, 0x05cdbb32, 0x05bf8c7a, 0x05b10d2b, 0x05a23f6a, 0x0593255f, 0x0583c12f, 0x05741504, 0x05642304, 0x0553ed58, 0x05437626, 0x0532bf98, 0x0521cbd2, 0x05109cfc, 0x04ff353b, 0x04ed96b2, 0x04dbc386, 0x04c9bdd6, 0x04b787c3, 0x04a5236c, 0x049292eb, 0x047fd85d, 0x046cf5d7, 0x0459ed70, 0x0446c13b, 0x04337347, 0x042005a1, 0x040c7a55, 0x03f8d368, 0x03e512de, 0x03d13ab8, 0x03bd4cf2, 0x03a94b85, 0x03953865, 0x03811584, 0x036ce4cd, 0x0358a82a, 0x0344617e, - 0x033012a8, 0x031bbd84, 0x030763e8, 0x02f307a3, 0x02deaa84, 0x02ca4e50, 0x02b5f4c9, 0x02a19fac, 0x028d50b0, 0x02790987, 0x0264cbdc, 0x02509956, 0x023c7395, 0x02285c34, 0x021454c8, 0x02005edf, 0x01ec7c02, 0x01d8adb3, 0x01c4f56f, 0x01b154ab, 0x019dccd7, 0x018a5f5c, 0x01770d9d, 0x0163d8f6, 0x0150c2bb, 0x013dcc3d, 0x012af6c3, 0x0118438f, 0x0105b3da, 0x00f348da, 0x00e103ba, 0x00cee5a2, 0x00bcefb0, 0x00ab22fc, 0x00998098, 0x0088098e, 0x0076bee2, 0x0065a18f, 0x0054b28b, 0x0043f2c4, 0x00336322, 0x00230484, 0x0012d7c4, 0x0002ddb3, 0xfff3171d, 0xffe384c5, 0xffd42769, 0xffc4ffbe, 0xffb60e73, 0xffa75430, 0xff98d197, 0xff8a8740, 0xff7c75bf, 0xff6e9da0, 0xff60ff68, 0xff539b96, 0xff4672a1, 0xff3984fa, 0xff2cd30a, 0xff205d36, 0xff1423d9, 0xff08274a, 0xfefc67d8, 0xfef0e5cb, 0xfee5a167, 0xfeda9ae7, 0xfecfd280, 0xfec54861, 0xfebafcb4, 0xfeb0ef9c, 0xfea72133, 0xfe9d9192, 0xfe9440c8, 0xfe8b2edf, 0xfe825bdc, 0xfe79c7bd, 0xfe71727b, 0xfe695c0a, 0xfe618456, 0xfe59eb47, 0xfe5290bf, 0xfe4b749d, 0xfe4496b6, 0xfe3df6dd, 0xfe3794e0, 0xfe317085, 0xfe2b8991, 0xfe25dfc1, 0xfe2072ce, 0xfe1b426d, 0xfe164e4c, 0xfe119618, 0xfe0d1976, 0xfe08d808, 0xfe04d16d, 0xfe01053d, 0xfdfd730d, 0xfdfa1a6f, 0xfdf6faf0, 0xfdf41419, 0xfdf1656f, 0xfdeeee74, 0xfdecaea5, 0xfdeaa57e, 0xfde8d275, 0xfde734fe, 0xfde5cc89, 0xfde49884, 0xfde39857, 0xfde2cb6c, 0xfde23124, 0xfde1c8e3, 0xfde19207, 0xfde18beb, 0xfde1b5ea, 0xfde20f5a, 0xfde29790, 0xfde34dde, 0xfde43196, 0xfde54205, 0xfde67e78, 0xfde7e638, 0xfde97890, 0xfdeb34c5, 0xfded1a1e, 0xfdef27de, 0xfdf15d47, 0xfdf3b99b, - 0xfdf63c19, 0xfdf8e401, 0xfdfbb090, 0xfdfea102, 0xfe01b494, 0xfe04ea81, 0xfe084202, 0xfe0bba52, 0xfe0f52a9, 0xfe130a40, 0xfe16e050, 0xfe1ad410, 0xfe1ee4b9, 0xfe231181, 0xfe2759a0, 0xfe2bbc4e, 0xfe3038c2, 0xfe34ce35, 0xfe397bdd, 0xfe3e40f4, 0xfe431cb3, 0xfe480e52, 0xfe4d150b, 0xfe523019, 0xfe575eb6, 0xfe5ca01f, 0xfe61f390, 0xfe675846, 0xfe6ccd7f, 0xfe72527c, 0xfe77e67d, 0xfe7d88c3, 0xfe833890, 0xfe88f52a, 0xfe8ebdd5, 0xfe9491d7, 0xfe9a707a, 0xfea05906, 0xfea64ac7, 0xfeac4509, 0xfeb2471b, 0xfeb8504c, 0xfebe5fee, 0xfec47555, 0xfeca8fd6, 0xfed0aec7, 0xfed6d183, 0xfedcf763, 0xfee31fc4, 0xfee94a07, 0xfeef758b, 0xfef5a1b3, 0xfefbcde6, 0xff01f989, 0xff082408, 0xff0e4ccd, 0xff147346, 0xff1a96e4, 0xff20b71a, 0xff26d35c, 0xff2ceb21, 0xff32fde4, 0xff390b21, 0xff3f1255, 0xff451303, 0xff4b0cac, 0xff50fed8, 0xff56e90f, 0xff5ccadb, 0xff62a3ca, 0xff68736c, 0xff6e3954, 0xff73f516, 0xff79a64a, 0xff7f4c8b, 0xff84e775, 0xff8a76a7, 0xff8ff9c4, 0xff957070, 0xff9ada53, 0xffa03716, 0xffa58665, 0xffaac7ef, 0xffaffb66, 0xffb5207e, 0xffba36ee, 0xffbf3e6f, 0xffc436bd, 0xffc91f96, 0xffcdf8bc, 0xffd2c1f1, 0xffd77afc, 0xffdc23a6, 0xffe0bbb9, 0xffe54303, 0xffe9b954, 0xffee1e7f, 0xfff27259, 0xfff6b4b9, 0xfffae579, 0xffff0475, 0x0003118c, 0x00070ca0, 0x000af592, 0x000ecc4a, 0x001290ae, 0x001642aa, 0x0019e22a, 0x001d6f1c, 0x0020e971, 0x0024511d, 0x0027a615, 0x002ae850, 0x002e17c8, 0x00313479, 0x00343e5f, 0x0037357c, 0x003a19cf, 0x003ceb5e, 0x003faa2d, 0x00425644, 0x0044efac, 0x00477671, 0x0049ea9f, 0x004c4c45, 0x004e9b74, 0x0050d83e, 0x005302b6, - 0x00551af3, 0x0057210c, 0x00591518, 0x005af733, 0x005cc777, 0x005e8604, 0x006032f6, 0x0061ce6f, 0x0063588f, 0x0064d17a, 0x00663953, 0x00679041, 0x0068d669, 0x006a0bf4, 0x006b310a, 0x006c45d6, 0x006d4a83, 0x006e3f3d, 0x006f2431, 0x006ff98f, 0x0070bf84, 0x00717641, 0x00721df8, 0x0072b6da, 0x00734119, 0x0073bcea, 0x00742a81, 0x00748a12, 0x0074dbd4, 0x00751ffc, 0x007556c1, 0x0075805b, 0x00759d03, 0x0075acef, 0x0075b05a, 0x0075a77d, 0x00759291, 0x007571d1, 0x00754578, 0x00750dc0, 0x0074cae5, 0x00747d22, 0x007424b3, 0x0073c1d3, 0x007354bf, 0x0072ddb2, 0x00725cea, 0x0071d2a2, 0x00713f16, 0x0070a284, 0x006ffd28, 0x006f4f3d, 0x006e9901, 0x006ddaaf, 0x006d1484, 0x006c46bc, 0x006b7192, 0x006a9542, 0x0069b208, 0x0068c81e, 0x0067d7c0, 0x0066e128, 0x0065e490, 0x0064e233, 0x0063da4a, 0x0062cd0f, 0x0061baba, 0x0060a384, 0x005f87a5, 0x005e6754, 0x005d42c9, 0x005c1a3b, 0x005aeddf, 0x0059bdeb, 0x00588a94, 0x00575410, 0x00561a91, 0x0054de4b, 0x00539f71, 0x00525e36, 0x00511acb, 0x004fd560, 0x004e8e26, 0x004d454d, 0x004bfb04, 0x004aaf78, 0x004962d9, 0x00481551, 0x0046c70e, 0x0045783b, 0x00442903, 0x0042d990, 0x00418a0a, 0x00403a9b, 0x003eeb6a, 0x003d9c9e, 0x003c4e5d, 0x003b00cc, 0x0039b411, 0x0038684f, 0x00371da9, 0x0035d441, 0x00348c38, 0x003345b0, 0x003200c9, 0x0030bda0, 0x002f7c55, 0x002e3d04, 0x002cffcb, 0x002bc4c5, 0x002a8c0d, 0x002955be, 0x002821f0, 0x0026f0bc, 0x0025c23b, 0x00249683, 0x00236daa, 0x002247c5, 0x002124ea, 0x0020052b, 0x001ee89d, 0x001dcf51, 0x001cb958, 0x001ba6c4, 0x001a97a5, 0x00198c08, 0x001883fe, 0x00177f93, - 0x00167ed5, 0x001581cf, 0x0014888e, 0x0013931b, 0x0012a181, 0x0011b3ca, 0x0010c9fd, 0x000fe423, 0x000f0244, 0x000e2465, 0x000d4a8c, 0x000c74bf, 0x000ba303, 0x000ad55c, 0x000a0bcc, 0x00094656, 0x000884fe, 0x0007c7c3, 0x00070ea8, 0x000659ad, 0x0005a8d0, 0x0004fc13, 0x00045373, 0x0003aeee, 0x00030e82, 0x0002722d, 0x0001d9eb, 0x000145b8, 0x0000b58f, 0x0000296d, 0xffffa14b, 0xffff1d24, 0xfffe9cf2, 0xfffe20ae, 0xfffda852, 0xfffd33d5, 0xfffcc331, 0xfffc565d, 0xfffbed50, 0xfffb8802, 0xfffb2669, 0xfffac87d, 0xfffa6e32, 0xfffa1780, 0xfff9c45a, 0xfff974b8, 0xfff9288e, 0xfff8dfcf, 0xfff89a72, 0xfff8586a, 0xfff819ac, 0xfff7de2a, 0xfff7a5d9, 0xfff770ab, 0xfff73e95, 0xfff70f89, 0xfff6e37b, 0xfff6ba5c, 0xfff6941f, 0xfff670b7, 0xfff65016, 0xfff6322f, 0xfff616f3, 0xfff5fe55, 0xfff5e846, 0xfff5d4b8, 0xfff5c39d, 0xfff5b4e7, 0xfff5a888, 0xfff59e71, 0xfff59694, 0xfff590e2, 0xfff58d4d, 0xfff58bc8, 0xfff58c42, 0xfff58eaf, 0xfff592ff, 0xfff59925, 0xfff5a112, 0xfff5aab7, 0xfff5b608, 0xfff5c2f6, 0xfff5d172, 0xfff5e16f, 0xfff5f2df, 0xfff605b5, 0xfff619e2, 0xfff62f59, 0xfff6460d, 0xfff65df0, 0xfff676f6, 0xfff69110, 0xfff6ac32, 0xfff6c850, 0xfff6e55d, 0xfff7034b, 0xfff72210, 0xfff7419e, 0xfff761ea, 0xfff782e7, 0xfff7a48b, 0xfff7c6c9, 0xfff7e996, 0xfff80ce7, 0xfff830b1, 0xfff854ea, 0xfff87986, 0xfff89e7c, 0xfff8c3c0, 0xfff8e949, 0xfff90f0e, 0xfff93504, 0xfff95b23, 0xfff98160, 0xfff9a7b4, 0xfff9ce14, 0xfff9f47a, 0xfffa1adc, 0xfffa4131, 0xfffa6774, 0xfffa8d9a, 0xfffab39e, 0xfffad977, 0xfffaff1f, 0xfffb248f, 0xfffb49c1, 0xfffb6eac, 0xfffb934d, + 0x58888889, 0x58875d88, 0x5883dc96, 0x587e05e0, 0x5875d9b3, 0x586b587d, 0x585e82c6, 0x584f593a, 0x583ddc9f, 0x582a0dde, 0x5813edfb, 0x57fb7e1a, 0x57e0bf7f, 0x57c3b389, 0x57a45bb8, 0x5782b9aa, 0x575ecf1a, 0x57389de0, 0x571027f6, 0x56e56f6f, 0x56b8767e, 0x56893f73, 0x5657ccbb, 0x562420e2, 0x55ee3e8d, 0x55b62882, 0x557be1a0, 0x553f6ce6, 0x5500cd6d, 0x54c0066a, 0x547d1b2e, 0x54380f26, 0x53f0e5da, 0x53a7a2ed, 0x535c4a1e, 0x530edf46, 0x52bf6657, 0x526de360, 0x521a5a86, 0x51c4d00c, 0x516d484a, 0x5113c7b6, 0x50b852d9, 0x505aee59, 0x4ffb9ef2, 0x4f9a6979, 0x4f3752d9, 0x4ed26016, 0x4e6b9649, 0x4e02faa3, 0x4d98926b, 0x4d2c62fd, 0x4cbe71cc, 0x4c4ec45e, 0x4bdd6050, 0x4b6a4b53, 0x4af58b2b, 0x4a7f25b0, 0x4a0720cd, 0x498d8283, 0x491250e1, 0x4895920c, 0x48174c37, 0x479785ab, 0x471644bd, 0x46938fd7, 0x460f6d70, 0x4589e411, 0x4502fa51, 0x447ab6d5, 0x43f12053, 0x43663d8d, 0x42da1554, 0x424cae85, 0x41be100a, 0x412e40db, 0x409d47f9, 0x400b2c72, 0x3f77f561, 0x3ee3a9e7, 0x3e4e5132, 0x3db7f27a, 0x3d2094ff, 0x3c88400b, 0x3beefaee, 0x3b54cd01, 0x3ab9bda6, 0x3a1dd444, 0x39811848, 0x38e39127, 0x3845465a, 0x37a63f5f, 0x370683ba, 0x36661af1, 0x35c50c90, 0x35236024, 0x34811d3f, 0x33de4b72, 0x333af253, 0x32971979, 0x31f2c87a, 0x314e06ed, 0x30a8dc6a, 0x30035089, 0x2f5d6ade, 0x2eb732fe, 0x2e10b07d, 0x2d69eaeb, 0x2cc2e9d4, 0x2c1bb4c4, 0x2b745340, 0x2acccccc, 0x2a2528e6, 0x297d6f06, 0x28d5a6a0, 0x282dd722, 0x278607f2, 0x26de4072, 0x263687fa, 0x258ee5dd, 0x24e76163, 0x244001cf, 0x2398ce58, 0x22f1ce2e, 0x224b0876, 0x21a4844b, 0x20fe48be, 0x20585cd5, + 0x1fb2c78a, 0x1f0d8fcb, 0x1e68bc7d, 0x1dc45475, 0x1d205e7d, 0x1c7ce150, 0x1bd9e39e, 0x1b376c06, 0x1a95811c, 0x19f42964, 0x19536b51, 0x18b34d4a, 0x1813d5a3, 0x17750aa3, 0x16d6f27f, 0x1639935b, 0x159cf34b, 0x15011851, 0x1466085d, 0x13cbc94f, 0x133260f3, 0x1299d502, 0x12022b24, 0x116b68ed, 0x10d593dd, 0x1040b162, 0x0facc6d4, 0x0f19d979, 0x0e87ee81, 0x0df70b09, 0x0d673417, 0x0cd86e9d, 0x0c4abf78, 0x0bbe2b70, 0x0b32b735, 0x0aa86763, 0x0a1f407f, 0x099746f9, 0x09107f29, 0x088aed4f, 0x08069598, 0x07837c17, 0x0701a4c8, 0x06811392, 0x0601cc40, 0x0583d28b, 0x05072a0f, 0x048bd653, 0x0411dac7, 0x03993abf, 0x0321f97b, 0x02ac1a20, 0x02379fbb, 0x01c48d42, 0x0152e590, 0x00e2ab69, 0x0073e179, 0x00068a52, 0xff9aa86c, 0xff303e29, 0xfec74dd1, 0xfe5fd993, 0xfdf9e383, 0xfd956da0, 0xfd3279cd, 0xfcd109d6, 0xfc711f6d, 0xfc12bc2a, 0xfbb5e18f, 0xfb5a9103, 0xfb00cbd4, 0xfaa89339, 0xfa51e84e, 0xf9fccc18, 0xf9a93f82, 0xf9574361, 0xf906d86d, 0xf8b7ff4b, 0xf86ab883, 0xf81f0487, 0xf7d4e3b0, 0xf78c5641, 0xf7455c62, 0xf6fff625, 0xf6bc2385, 0xf679e463, 0xf639388a, 0xf5fa1fae, 0xf5bc996b, 0xf580a547, 0xf54642b1, 0xf50d70ff, 0xf4d62f74, 0xf4a07d3b, 0xf46c5967, 0xf439c2f9, 0xf408b8d8, 0xf3d939d9, 0xf3ab44b9, 0xf37ed821, 0xf353f2a5, 0xf32a92c3, 0xf302b6e6, 0xf2dc5d64, 0xf2b7847f, 0xf2942a64, 0xf2724d2e, 0xf251eae4, 0xf2330179, 0xf2158ece, 0xf1f990b1, 0xf1df04de, 0xf1c5e8ff, 0xf1ae3aaa, 0xf197f765, 0xf1831ca6, 0xf16fa7d0, 0xf15d9634, 0xf14ce516, 0xf13d91a7, 0xf12f9909, 0xf122f84e, 0xf117ac79, 0xf10db27d, 0xf1050741, 0xf0fda799, 0xf0f7904e, 0xf0f2be1a, + 0xf0ef2dab, 0xf0ecdba0, 0xf0ebc48a, 0xf0ebe4f1, 0xf0ed394e, 0xf0efbe0d, 0xf0f36f92, 0xf0f84a32, 0xf0fe4a39, 0xf1056be8, 0xf10dab74, 0xf117050a, 0xf12174cd, 0xf12cf6d5, 0xf1398732, 0xf14721ec, 0xf155c300, 0xf1656666, 0xf176080d, 0xf187a3db, 0xf19a35b1, 0xf1adb969, 0xf1c22ad4, 0xf1d785c1, 0xf1edc5f5, 0xf204e733, 0xf21ce537, 0xf235bbb8, 0xf24f6669, 0xf269e0fa, 0xf2852715, 0xf2a13462, 0xf2be0485, 0xf2db9321, 0xf2f9dbd3, 0xf318da38, 0xf33889ec, 0xf358e688, 0xf379eba4, 0xf39b94d7, 0xf3bdddb7, 0xf3e0c1db, 0xf4043cd8, 0xf4284a45, 0xf44ce5ba, 0xf4720ace, 0xf497b51a, 0xf4bde03a, 0xf4e487c9, 0xf50ba766, 0xf5333ab3, 0xf55b3d52, 0xf583aaec, 0xf5ac7f29, 0xf5d5b5b7, 0xf5ff4a47, 0xf6293890, 0xf6537c4a, 0xf67e1134, 0xf6a8f311, 0xf6d41dab, 0xf6ff8cce, 0xf72b3c4f, 0xf7572808, 0xf7834bd7, 0xf7afa3a3, 0xf7dc2b58, 0xf808deec, 0xf835ba59, 0xf862b9a0, 0xf88fd8cc, 0xf8bd13f0, 0xf8ea6724, 0xf917ce8a, 0xf945464f, 0xf972caa4, 0xf9a057c6, 0xf9cde9fb, 0xf9fb7d90, 0xfa290edf, 0xfa569a49, 0xfa841c3a, 0xfab19127, 0xfadef591, 0xfb0c4601, 0xfb397f0d, 0xfb669d55, 0xfb939d83, 0xfbc07c4c, 0xfbed3671, 0xfc19c8bf, 0xfc46300d, 0xfc72693e, 0xfc9e7141, 0xfcca4511, 0xfcf5e1b4, 0xfd21443e, 0xfd4c69cd, 0xfd774f8e, 0xfda1f2b7, 0xfdcc508d, 0xfdf66662, 0xfe203193, 0xfe49af8a, 0xfe72ddbf, 0xfe9bb9b7, 0xfec44103, 0xfeec7141, 0xff14481d, 0xff3bc351, 0xff62e0a2, 0xff899de5, 0xffaff8f9, 0xffd5efce, 0xfffb8060, 0x0020a8b7, 0x004566eb, 0x0069b920, 0x008d9d89, 0x00b11264, 0x00d415ff, 0x00f6a6b5, 0x0118c2ef, 0x013a6922, 0x015b97d1, 0x017c4d8f, 0x019c88f9, 0x01bc48bd, + 0x01db8b94, 0x01fa5045, 0x021895a6, 0x02365a98, 0x02539e0b, 0x02705efd, 0x028c9c77, 0x02a85592, 0x02c38972, 0x02de3749, 0x02f85e57, 0x0311fde7, 0x032b1552, 0x0343a3ff, 0x035ba961, 0x037324f6, 0x038a164c, 0x03a07cfa, 0x03b658a7, 0x03cba904, 0x03e06dcf, 0x03f4a6d1, 0x040853e2, 0x041b74e4, 0x042e09c4, 0x0440127d, 0x04518f14, 0x04627f9b, 0x0472e42e, 0x0482bcf5, 0x04920a24, 0x04a0cbf7, 0x04af02ba, 0x04bcaebe, 0x04c9d064, 0x04d66814, 0x04e27642, 0x04edfb6c, 0x04f8f819, 0x05036cdc, 0x050d5a51, 0x0516c11c, 0x051fa1ee, 0x0527fd7e, 0x052fd48d, 0x053727e8, 0x053df861, 0x054446d5, 0x054a1429, 0x054f614a, 0x05542f2f, 0x05587ed5, 0x055c5141, 0x055fa783, 0x056282ae, 0x0564e3e1, 0x0566cc3e, 0x05683cf1, 0x0569372c, 0x0569bc29, 0x0569cd27, 0x05696b6b, 0x05689842, 0x056754fe, 0x0565a2f9, 0x0563838f, 0x0560f824, 0x055e0222, 0x055aa2f6, 0x0556dc14, 0x0552aef5, 0x054e1d14, 0x054927f4, 0x0543d11a, 0x053e1a11, 0x05380465, 0x053191aa, 0x052ac373, 0x05239b5b, 0x051c1afe, 0x051443fa, 0x050c17f3, 0x0503988d, 0x04fac770, 0x04f1a647, 0x04e836bd, 0x04de7a82, 0x04d47346, 0x04ca22bc, 0x04bf8a97, 0x04b4ac8c, 0x04a98a54, 0x049e25a4, 0x04928037, 0x04869bc6, 0x047a7a0b, 0x046e1cc1, 0x046185a3, 0x0454b66c, 0x0447b0d7, 0x043a76a1, 0x042d0983, 0x041f6b39, 0x04119d7b, 0x0403a204, 0x03f57a8c, 0x03e728c9, 0x03d8ae73, 0x03ca0d3e, 0x03bb46dd, 0x03ac5d03, 0x039d5160, 0x038e25a2, 0x037edb76, 0x036f7486, 0x035ff27a, 0x035056f9, 0x0340a3a5, 0x0330da20, 0x0320fc08, 0x03110af8, 0x03010889, 0x02f0f64f, 0x02e0d5df, 0x02d0a8c6, 0x02c07090, 0x02b02ec6, 0x029fe4ec, + 0x028f9484, 0x027f3f0b, 0x026ee5fa, 0x025e8ac8, 0x024e2ee5, 0x023dd3c0, 0x022d7ac1, 0x021d254d, 0x020cd4c6, 0x01fc8a88, 0x01ec47ea, 0x01dc0e40, 0x01cbded8, 0x01bbbafd, 0x01aba3f2, 0x019b9afa, 0x018ba14e, 0x017bb826, 0x016be0b3, 0x015c1c20, 0x014c6b97, 0x013cd038, 0x012d4b20, 0x011ddd67, 0x010e8820, 0x00ff4c57, 0x00f02b13, 0x00e12558, 0x00d23c22, 0x00c37068, 0x00b4c31c, 0x00a6352a, 0x0097c778, 0x00897ae9, 0x007b5057, 0x006d4899, 0x005f647f, 0x0051a4d3, 0x00440a5a, 0x003695d5, 0x002947fc, 0x001c2183, 0x000f231a, 0x00024d68, 0xfff5a111, 0xffe91eb2, 0xffdcc6e4, 0xffd09a37, 0xffc49939, 0xffb8c471, 0xffad1c5f, 0xffa1a180, 0xff965449, 0xff8b352a, 0xff804490, 0xff7582e0, 0xff6af079, 0xff608db6, 0xff565aec, 0xff4c586c, 0xff42867e, 0xff38e569, 0xff2f756c, 0xff2636c2, 0xff1d29a0, 0xff144e36, 0xff0ba4ae, 0xff032d30, 0xfefae7db, 0xfef2d4cc, 0xfeeaf419, 0xfee345d5, 0xfedbca0b, 0xfed480c6, 0xfecd6a07, 0xfec685cf, 0xfebfd416, 0xfeb954d4, 0xfeb307f8, 0xfeaced6f, 0xfea70522, 0xfea14ef4, 0xfe9bcac5, 0xfe96786f, 0xfe9157cb, 0xfe8c68ab, 0xfe87aadd, 0xfe831e2e, 0xfe7ec263, 0xfe7a9741, 0xfe769c85, 0xfe72d1ed, 0xfe6f3731, 0xfe6bcc04, 0xfe689017, 0xfe658319, 0xfe62a4b3, 0xfe5ff48d, 0xfe5d7249, 0xfe5b1d89, 0xfe58f5ea, 0xfe56fb06, 0xfe552c76, 0xfe5389cc, 0xfe52129d, 0xfe50c676, 0xfe4fa4e5, 0xfe4ead73, 0xfe4ddfa8, 0xfe4d3b09, 0xfe4cbf19, 0xfe4c6b59, 0xfe4c3f47, 0xfe4c3a5e, 0xfe4c5c1b, 0xfe4ca3f4, 0xfe4d1160, 0xfe4da3d4, 0xfe4e5ac3, 0xfe4f359e, 0xfe5033d5, 0xfe5154d6, 0xfe52980d, 0xfe53fce6, 0xfe5582cb, 0xfe572926, 0xfe58ef5d, 0xfe5ad4d7, + 0xfe5cd8fa, 0xfe5efb2b, 0xfe613ace, 0xfe639746, 0xfe660ff5, 0xfe68a43c, 0xfe6b537e, 0xfe6e1d1b, 0xfe710072, 0xfe73fce5, 0xfe7711d2, 0xfe7a3e98, 0xfe7d8297, 0xfe80dd2e, 0xfe844dbc, 0xfe87d39f, 0xfe8b6e37, 0xfe8f1ce3, 0xfe92df02, 0xfe96b3f4, 0xfe9a9b19, 0xfe9e93d1, 0xfea29d7d, 0xfea6b77d, 0xfeaae135, 0xfeaf1a05, 0xfeb36152, 0xfeb7b67e, 0xfebc18ef, 0xfec0880a, 0xfec50334, 0xfec989d5, 0xfece1b54, 0xfed2b71b, 0xfed75c94, 0xfedc0b2a, 0xfee0c249, 0xfee5815e, 0xfeea47d8, 0xfeef1528, 0xfef3e8be, 0xfef8c20c, 0xfefda088, 0xff0283a5, 0xff076adc, 0xff0c55a4, 0xff114377, 0xff1633d0, 0xff1b262d, 0xff201a0c, 0xff250eee, 0xff2a0453, 0xff2ef9c1, 0xff33eebc, 0xff38e2cb, 0xff3dd578, 0xff42c64c, 0xff47b4d6, 0xff4ca0a2, 0xff518941, 0xff566e47, 0xff5b4f45, 0xff602bd4, 0xff65038a, 0xff69d601, 0xff6ea2d6, 0xff7369a7, 0xff782a12, 0xff7ce3bb, 0xff819645, 0xff864157, 0xff8ae498, 0xff8f7fb2, 0xff941251, 0xff989c25, 0xff9d1cdc, 0xffa1942a, 0xffa601c3, 0xffaa655e, 0xffaebeb2, 0xffb30d7c, 0xffb75177, 0xffbb8a62, 0xffbfb7ff, 0xffc3da11, 0xffc7f05c, 0xffcbfaa8, 0xffcff8be, 0xffd3ea6a, 0xffd7cf79, 0xffdba7b9, 0xffdf72fe, 0xffe33119, 0xffe6e1e1, 0xffea852e, 0xffee1ad8, 0xfff1a2bb, 0xfff51cb5, 0xfff888a4, 0xfffbe66b, 0xffff35ed, 0x0002770f, 0x0005a9b8, 0x0008cdd0, 0x000be344, 0x000ee9ff, 0x0011e1f0, 0x0014cb08, 0x0017a538, 0x001a7075, 0x001d2cb3, 0x001fd9eb, 0x00227816, 0x0025072f, 0x00278731, 0x0029f81b, 0x002c59ed, 0x002eaca8, 0x0030f04f, 0x003324e6, 0x00354a74, 0x003760ff, 0x00396892, 0x003b6135, 0x003d4af6, 0x003f25e1, 0x0040f206, 0x0042af73, + 0x00445e3a, 0x0045fe6e, 0x00479023, 0x0049136d, 0x004a8864, 0x004bef1e, 0x004d47b5, 0x004e9242, 0x004fcedf, 0x0050fdaa, 0x00521ebe, 0x0053323b, 0x0054383e, 0x005530e9, 0x00561c5b, 0x0056fab7, 0x0057cc20, 0x005890b9, 0x005948a7, 0x0059f40e, 0x005a9315, 0x005b25e2, 0x005bac9d, 0x005c276d, 0x005c967d, 0x005cf9f4, 0x005d51fd, 0x005d9ec3, 0x005de071, 0x005e1731, 0x005e4331, 0x005e649d, 0x005e7ba1, 0x005e886c, 0x005e8b2b, 0x005e840c, 0x005e733e, 0x005e58ef, 0x005e354e, 0x005e088c, 0x005dd2d6, 0x005d945e, 0x005d4d53, 0x005cfde5, 0x005ca645, 0x005c46a2, 0x005bdf2d, 0x005b7017, 0x005af990, 0x005a7bc9, 0x0059f6f2, 0x00596b3b, 0x0058d8d6, 0x00583ff2, 0x0057a0c0, 0x0056fb70, 0x00565032, 0x00559f36, 0x0054e8ac, 0x00542cc2, 0x00536baa, 0x0052a591, 0x0051daa6, 0x00510b19, 0x00503717, 0x004f5ece, 0x004e826d, 0x004da220, 0x004cbe15, 0x004bd678, 0x004aeb75, 0x0049fd39, 0x00490bef, 0x004817c2, 0x004720dd, 0x0046276a, 0x00452b92, 0x00442d80, 0x00432d5b, 0x00422b4c, 0x0041277c, 0x00402210, 0x003f1b31, 0x003e1304, 0x003d09b0, 0x003bff58, 0x003af423, 0x0039e833, 0x0038dbad, 0x0037ceb3, 0x0036c168, 0x0035b3ed, 0x0034a664, 0x003398ed, 0x00328ba7, 0x00317eb3, 0x0030722e, 0x002f6638, 0x002e5aec, 0x002d5069, 0x002c46c9, 0x002b3e2a, 0x002a36a5, 0x00293054, 0x00282b52, 0x002727b7, 0x0026259c, 0x00252518, 0x00242641, 0x00232930, 0x00222df8, 0x002134b0, 0x00203d6b, 0x001f483d, 0x001e5539, 0x001d6473, 0x001c75fb, 0x001b89e3, 0x001aa03b, 0x0019b913, 0x0018d47b, 0x0017f281, 0x00171334, 0x001636a0, 0x00155cd2, 0x001485d7, 0x0013b1ba, 0x0012e086, + 0x00121246, 0x00114703, 0x00107ec6, 0x000fb999, 0x000ef783, 0x000e388c, 0x000d7cba, 0x000cc414, 0x000c0ea0, 0x000b5c64, 0x000aad63, 0x000a01a2, 0x00095925, 0x0008b3f0, 0x00081204, 0x00077364, 0x0006d811, 0x0006400e, 0x0005ab5a, 0x000519f6, 0x00048be2, 0x0004011d, 0x000379a7, 0x0002f57d, 0x0002749e, 0x0001f708, 0x00017cb7, 0x000105a9, 0x000091da, 0x00002147, 0xffffb3eb, 0xffff49c1, 0xfffee2c6, 0xfffe7ef2, 0xfffe1e41, 0xfffdc0ad, 0xfffd6630, 0xfffd0ec3, 0xfffcba5f, 0xfffc68fd, 0xfffc1a97, 0xfffbcf23, 0xfffb869a, 0xfffb40f4, 0xfffafe29, 0xfffabe30, 0xfffa8100, 0xfffa4690, 0xfffa0ed7, 0xfff9d9cc, 0xfff9a764, 0xfff97796, 0xfff94a58, 0xfff91fa0, 0xfff8f764, 0xfff8d199, 0xfff8ae34, 0xfff88d2b, 0xfff86e74, 0xfff85203, 0xfff837cd, 0xfff81fc7, 0xfff809e6, 0xfff7f61f, 0xfff7e467, 0xfff7d4b1, 0xfff7c6f4, 0xfff7bb22, 0xfff7b132, 0xfff7a917, 0xfff7a2c6, 0xfff79e33, 0xfff79b52, 0xfff79a19, 0xfff79a7b, 0xfff79c6e, 0xfff79fe5, 0xfff7a4d5, 0xfff7ab33, 0xfff7b2f3, 0xfff7bc0a, 0xfff7c66d, 0xfff7d210, 0xfff7dee8, 0xfff7eceb, 0xfff7fc0c, 0xfff80c41, 0xfff81d80, 0xfff82fbc, 0xfff842ed, 0xfff85707, 0xfff86bff, 0xfff881cb, 0xfff89861, 0xfff8afb7, 0xfff8c7c3, 0xfff8e07b, 0xfff8f9d4, 0xfff913c6, 0xfff92e46, 0xfff9494c, 0xfff964ce, 0xfff980c3, 0xfff99d23, 0xfff9b9e3, 0xfff9d6fc, 0xfff9f465, 0xfffa1216, 0xfffa3006, 0xfffa4e2d, 0xfffa6c84, 0xfffa8b03, 0xfffaa9a3, 0xfffac85b, 0xfffae725, 0xfffb05f9, 0xfffb24d2, 0xfffb43a7, 0xfffb6273, 0xfffb812f, 0xfffb9fd5, 0xfffbbe5f, 0xfffbdcc6, 0xfffbfb07, 0xfffc191a, 0xfffc36fa, 0xfffc54a4, 0xfffc7210, 0x00000000 // this one is needed for lerping the last coefficient }; diff --git a/tools/resampler_tools/fir.cpp b/tools/resampler_tools/fir.cpp index acd9911..ea3ef50 100644 --- a/tools/resampler_tools/fir.cpp +++ b/tools/resampler_tools/fir.cpp @@ -222,7 +222,7 @@ int main(int argc, char** argv) if (!polyphase) { for (int i=0 ; i Date: Tue, 30 Oct 2012 13:51:44 -0700 Subject: fix SINC resampler on non ARM architectures make sure the C version of the code generates the same output than the ARM assemply version. Change-Id: Ide218785c35d02598b2d7278e646b1b178148698 --- services/audioflinger/AudioResamplerSinc.cpp | 20 +++++--------------- 1 file changed, 5 insertions(+), 15 deletions(-) diff --git a/services/audioflinger/AudioResamplerSinc.cpp b/services/audioflinger/AudioResamplerSinc.cpp index edfed49..e0ea4a4 100644 --- a/services/audioflinger/AudioResamplerSinc.cpp +++ b/services/audioflinger/AudioResamplerSinc.cpp @@ -141,11 +141,8 @@ int32_t mulRL(int left, int32_t in, uint32_t vRL) } return out; #else - if (left) { - return int16_t(in>>16) * int16_t(vRL&0xFFFF); - } else { - return int16_t(in>>16) * int16_t(vRL>>16); - } + int16_t v = left ? int16_t(vRL) : int16_t(vRL>>16); + return int32_t((int64_t(in) * v) >> 16); #endif } @@ -160,9 +157,7 @@ int32_t mulAdd(int16_t in, int32_t v, int32_t a) : ); return out; #else - return a + in * (v>>16); - // improved precision - // return a + in * (v>>16) + ((in * (v & 0xffff)) >> 16); + return a + int32_t((int64_t(v) * in) >> 16); #endif } @@ -184,13 +179,8 @@ int32_t mulAddRL(int left, uint32_t inRL, int32_t v, int32_t a) } return out; #else - if (left) { - return a + (int16_t(inRL&0xFFFF) * (v>>16)); - //improved precision - // return a + (int16_t(inRL&0xFFFF) * (v>>16)) + ((int16_t(inRL&0xFFFF) * (v & 0xffff)) >> 16); - } else { - return a + (int16_t(inRL>>16) * (v>>16)); - } + int16_t s = left ? int16_t(inRL) : int16_t(inRL>>16); + return a + int32_t((int64_t(v) * s) >> 16); #endif } -- cgit v1.1 From 77536f9f8fc030379102c9e36ad21ce5b2ab234c Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Tue, 30 Oct 2012 10:55:15 -0700 Subject: Remove obsolete libmedia_native Bug: 6654403 Change-Id: Ic979a7890e2f4ef3f5409af14372eb52196e6dea --- CleanSpec.mk | 52 ++++++++++++++++++++++++++++++++++++++++ media/libmedia_native/Android.mk | 11 --------- 2 files changed, 52 insertions(+), 11 deletions(-) create mode 100644 CleanSpec.mk delete mode 100644 media/libmedia_native/Android.mk diff --git a/CleanSpec.mk b/CleanSpec.mk new file mode 100644 index 0000000..e6d9ebf --- /dev/null +++ b/CleanSpec.mk @@ -0,0 +1,52 @@ +# Copyright (C) 2012 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# If you don't need to do a full clean build but would like to touch +# a file or delete some intermediate files, add a clean step to the end +# of the list. These steps will only be run once, if they haven't been +# run before. +# +# E.g.: +# $(call add-clean-step, touch -c external/sqlite/sqlite3.h) +# $(call add-clean-step, rm -rf $(PRODUCT_OUT)/obj/STATIC_LIBRARIES/libz_intermediates) +# +# Always use "touch -c" and "rm -f" or "rm -rf" to gracefully deal with +# files that are missing or have been moved. +# +# Use $(PRODUCT_OUT) to get to the "out/target/product/blah/" directory. +# Use $(OUT_DIR) to refer to the "out" directory. +# +# If you need to re-do something that's already mentioned, just copy +# the command and add it to the bottom of the list. E.g., if a change +# that you made last week required touching a file and a change you +# made today requires touching the same file, just copy the old +# touch step and add it to the end of the list. +# +# ************************************************ +# NEWER CLEAN STEPS MUST BE AT THE END OF THE LIST +# ************************************************ + +# For example: +#$(call add-clean-step, rm -rf $(OUT_DIR)/target/common/obj/APPS/AndroidTests_intermediates) +#$(call add-clean-step, rm -rf $(OUT_DIR)/target/common/obj/JAVA_LIBRARIES/core_intermediates) +#$(call add-clean-step, find $(OUT_DIR) -type f -name "IGTalkSession*" -print0 | xargs -0 rm -f) +#$(call add-clean-step, rm -rf $(PRODUCT_OUT)/data/*) +$(call add-clean-step, rm -rf $(PRODUCT_OUT)/obj/SHARED_LIBRARIES/libmedia_native_intermediates) +$(call add-clean-step, rm -rf $(PRODUCT_OUT)/obj/lib/libmedia_native.so) +$(call add-clean-step, rm -rf $(PRODUCT_OUT)/symbols/system/lib/libmedia_native.so) +$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/lib/libmedia_native.so) +# ************************************************ +# NEWER CLEAN STEPS MUST BE AT THE END OF THE LIST +# ************************************************ diff --git a/media/libmedia_native/Android.mk b/media/libmedia_native/Android.mk deleted file mode 100644 index 065a90f..0000000 --- a/media/libmedia_native/Android.mk +++ /dev/null @@ -1,11 +0,0 @@ -LOCAL_PATH := $(call my-dir) - -include $(CLEAR_VARS) - -LOCAL_SRC_FILES := - -LOCAL_MODULE:= libmedia_native - -LOCAL_MODULE_TAGS := optional - -include $(BUILD_SHARED_LIBRARY) -- cgit v1.1 From b64497eb8724c4c372fffdbf3ee30543432953c5 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Mon, 1 Oct 2012 09:47:30 -0700 Subject: Add NBAIO support for more sample rates This will be used for audio capture but it is not scalable, and we need a better approach in the long term. Change-Id: I8b12f6b64a3fd8e8a8c425c82574260fe8ffbed6 --- include/media/nbaio/NBAIO.h | 14 +++-- media/libnbaio/NBAIO.cpp | 124 ++++++++++++++++++++++++++++++-------------- 2 files changed, 91 insertions(+), 47 deletions(-) diff --git a/include/media/nbaio/NBAIO.h b/include/media/nbaio/NBAIO.h index 81f42ed..f5d6eb5 100644 --- a/include/media/nbaio/NBAIO.h +++ b/include/media/nbaio/NBAIO.h @@ -45,17 +45,15 @@ enum { // Negotiation of format is based on the data provider and data sink, or the data consumer and // data source, exchanging prioritized arrays of offers and counter-offers until a single offer is // mutually agreed upon. Each offer is an NBAIO_Format. For simplicity and performance, -// NBAIO_Format is an enum that ties together the most important combinations of the various +// NBAIO_Format is a typedef that ties together the most important combinations of the various // attributes, rather than a struct with separate fields for format, sample rate, channel count, // interleave, packing, alignment, etc. The reason is that NBAIO_Format tries to abstract out only -// the combinations that are actually needed within AudioFligner. If the list of combinations grows +// the combinations that are actually needed within AudioFlinger. If the list of combinations grows // too large, then this decision should be re-visited. -enum NBAIO_Format { - Format_Invalid, - Format_SR44_1_C2_I16, // 44.1 kHz PCM stereo interleaved 16-bit signed - Format_SR48_C2_I16, // 48 kHz PCM stereo interleaved 16-bit signed - Format_SR44_1_C1_I16, // 44.1 kHz PCM mono interleaved 16-bit signed - Format_SR48_C1_I16, // 48 kHz PCM mono interleaved 16-bit signed +// Sample rate and channel count are explicit, PCM interleaved 16-bit is assumed. +typedef unsigned NBAIO_Format; +enum { + Format_Invalid }; // Return the frame size of an NBAIO_Format in bytes diff --git a/media/libnbaio/NBAIO.cpp b/media/libnbaio/NBAIO.cpp index 00d2017..e0d2c21 100644 --- a/media/libnbaio/NBAIO.cpp +++ b/media/libnbaio/NBAIO.cpp @@ -24,44 +24,55 @@ namespace android { size_t Format_frameSize(NBAIO_Format format) { - switch (format) { - case Format_SR44_1_C2_I16: - case Format_SR48_C2_I16: - return 2 * sizeof(short); - case Format_SR44_1_C1_I16: - case Format_SR48_C1_I16: - return 1 * sizeof(short); - case Format_Invalid: - default: - return 0; - } + return Format_channelCount(format) * sizeof(short); } size_t Format_frameBitShift(NBAIO_Format format) { - switch (format) { - case Format_SR44_1_C2_I16: - case Format_SR48_C2_I16: - return 2; // 1 << 2 == 2 * sizeof(short) - case Format_SR44_1_C1_I16: - case Format_SR48_C1_I16: - return 1; // 1 << 1 == 1 * sizeof(short) - case Format_Invalid: - default: - return 0; - } + // sizeof(short) == 2, so frame size == 1 << channels + return Format_channelCount(format); } +enum { + Format_SR_8000, + Format_SR_11025, + Format_SR_16000, + Format_SR_22050, + Format_SR_24000, + Format_SR_32000, + Format_SR_44100, + Format_SR_48000, + Format_SR_Mask = 7 +}; + +enum { + Format_C_1 = 0x08, + Format_C_2 = 0x10, + Format_C_Mask = 0x18 +}; + unsigned Format_sampleRate(NBAIO_Format format) { - switch (format) { - case Format_SR44_1_C1_I16: - case Format_SR44_1_C2_I16: + if (format == Format_Invalid) { + return 0; + } + switch (format & Format_SR_Mask) { + case Format_SR_8000: + return 8000; + case Format_SR_11025: + return 11025; + case Format_SR_16000: + return 16000; + case Format_SR_22050: + return 22050; + case Format_SR_24000: + return 24000; + case Format_SR_32000: + return 32000; + case Format_SR_44100: return 44100; - case Format_SR48_C1_I16: - case Format_SR48_C2_I16: + case Format_SR_48000: return 48000; - case Format_Invalid: default: return 0; } @@ -69,14 +80,14 @@ unsigned Format_sampleRate(NBAIO_Format format) unsigned Format_channelCount(NBAIO_Format format) { - switch (format) { - case Format_SR44_1_C1_I16: - case Format_SR48_C1_I16: + if (format == Format_Invalid) { + return 0; + } + switch (format & Format_C_Mask) { + case Format_C_1: return 1; - case Format_SR44_1_C2_I16: - case Format_SR48_C2_I16: + case Format_C_2: return 2; - case Format_Invalid: default: return 0; } @@ -84,11 +95,46 @@ unsigned Format_channelCount(NBAIO_Format format) NBAIO_Format Format_from_SR_C(unsigned sampleRate, unsigned channelCount) { - if (sampleRate == 44100 && channelCount == 2) return Format_SR44_1_C2_I16; - if (sampleRate == 48000 && channelCount == 2) return Format_SR48_C2_I16; - if (sampleRate == 44100 && channelCount == 1) return Format_SR44_1_C1_I16; - if (sampleRate == 48000 && channelCount == 1) return Format_SR48_C1_I16; - return Format_Invalid; + NBAIO_Format format; + switch (sampleRate) { + case 8000: + format = Format_SR_8000; + break; + case 11025: + format = Format_SR_11025; + break; + case 16000: + format = Format_SR_16000; + break; + case 22050: + format = Format_SR_22050; + break; + case 24000: + format = Format_SR_24000; + break; + case 32000: + format = Format_SR_32000; + break; + case 44100: + format = Format_SR_44100; + break; + case 48000: + format = Format_SR_48000; + break; + default: + return Format_Invalid; + } + switch (channelCount) { + case 1: + format |= Format_C_1; + break; + case 2: + format |= Format_C_2; + break; + default: + return Format_Invalid; + } + return format; } // This is a default implementation; it is expected that subclasses will optimize this. -- cgit v1.1 From d06785bebf7e43d4a011b62a252771373ada910c Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Sun, 30 Sep 2012 12:29:28 -0700 Subject: Save copy of mic input, disabled by default Change-Id: I4f5e95a5ddf016530d1b2747a0a5ca0962caabda --- services/audioflinger/Android.mk | 4 + services/audioflinger/AudioFlinger.cpp | 133 +++++++++++++++++++++++++-------- services/audioflinger/AudioFlinger.h | 13 +++- 3 files changed, 117 insertions(+), 33 deletions(-) diff --git a/services/audioflinger/Android.mk b/services/audioflinger/Android.mk index 58c4be4..4416b52 100644 --- a/services/audioflinger/Android.mk +++ b/services/audioflinger/Android.mk @@ -70,6 +70,10 @@ LOCAL_CFLAGS += -UFAST_TRACKS_AT_NON_NATIVE_SAMPLE_RATE # 47.5 seconds at 44.1 kHz, 8 megabytes # LOCAL_CFLAGS += -DTEE_SINK_FRAMES=0x200000 +# uncomment for dumpsys to write most recent audio input to .wav file +# 47.5 seconds at 44.1 kHz, 8 megabytes +# LOCAL_CFLAGS += -DTEE_SINK_INPUT_FRAMES=0x200000 + # uncomment to enable the audio watchdog # LOCAL_SRC_FILES += AudioWatchdog.cpp # LOCAL_CFLAGS += -DAUDIO_WATCHDOG diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp index 096a0f0..9bdab2f 100644 --- a/services/audioflinger/AudioFlinger.cpp +++ b/services/audioflinger/AudioFlinger.cpp @@ -417,6 +417,12 @@ status_t AudioFlinger::dump(int fd, const Vector& args) audio_hw_device_t *dev = mAudioHwDevs.valueAt(i)->hwDevice(); dev->dump(dev, fd); } + + // dump the serially shared record tee sink + if (mRecordTeeSource != 0) { + dumpTee(fd, mRecordTeeSource); + } + if (locked) mLock.unlock(); } return NO_ERROR; @@ -3580,39 +3586,18 @@ bool AudioFlinger::MixerThread::checkForNewParameters_l() return reconfig; } -void AudioFlinger::MixerThread::dumpInternals(int fd, const Vector& args) +void AudioFlinger::dumpTee(int fd, const sp& source, audio_io_handle_t id) { - const size_t SIZE = 256; - char buffer[SIZE]; - String8 result; - - PlaybackThread::dumpInternals(fd, args); - - snprintf(buffer, SIZE, "AudioMixer tracks: %08x\n", mAudioMixer->trackNames()); - result.append(buffer); - write(fd, result.string(), result.size()); - - // Make a non-atomic copy of fast mixer dump state so it won't change underneath us - FastMixerDumpState copy = mFastMixerDumpState; - copy.dump(fd); - -#ifdef STATE_QUEUE_DUMP - // Similar for state queue - StateQueueObserverDump observerCopy = mStateQueueObserverDump; - observerCopy.dump(fd); - StateQueueMutatorDump mutatorCopy = mStateQueueMutatorDump; - mutatorCopy.dump(fd); -#endif - - // Write the tee output to a .wav file - NBAIO_Source *teeSource = mTeeSource.get(); + NBAIO_Source *teeSource = source.get(); if (teeSource != NULL) { - char teePath[64]; + char teeTime[16]; struct timeval tv; gettimeofday(&tv, NULL); struct tm tm; localtime_r(&tv.tv_sec, &tm); - strftime(teePath, sizeof(teePath), "/data/misc/media/%T.wav", &tm); + strftime(teeTime, sizeof(teeTime), "%T", &tm); + char teePath[64]; + sprintf(teePath, "/data/misc/media/%s_%d.wav", teeTime, id); int teeFd = open(teePath, O_WRONLY | O_CREAT, S_IRUSR | S_IWUSR); if (teeFd >= 0) { char wavHeader[44]; @@ -3660,6 +3645,34 @@ void AudioFlinger::MixerThread::dumpInternals(int fd, const Vector& ar fdprintf(fd, "FastMixer unable to create tee %s: \n", strerror(errno)); } } +} + +void AudioFlinger::MixerThread::dumpInternals(int fd, const Vector& args) +{ + const size_t SIZE = 256; + char buffer[SIZE]; + String8 result; + + PlaybackThread::dumpInternals(fd, args); + + snprintf(buffer, SIZE, "AudioMixer tracks: %08x\n", mAudioMixer->trackNames()); + result.append(buffer); + write(fd, result.string(), result.size()); + + // Make a non-atomic copy of fast mixer dump state so it won't change underneath us + FastMixerDumpState copy = mFastMixerDumpState; + copy.dump(fd); + +#ifdef STATE_QUEUE_DUMP + // Similar for state queue + StateQueueObserverDump observerCopy = mStateQueueObserverDump; + observerCopy.dump(fd); + StateQueueMutatorDump mutatorCopy = mStateQueueMutatorDump; + mutatorCopy.dump(fd); +#endif + + // Write the tee output to a .wav file + dumpTee(fd, mTeeSource, mId); #ifdef AUDIO_WATCHDOG if (mAudioWatchdog != 0) { @@ -5988,18 +6001,21 @@ AudioFlinger::RecordThread::RecordThread(const sp& audioFlinger, uint32_t sampleRate, audio_channel_mask_t channelMask, audio_io_handle_t id, - audio_devices_t device) : + audio_devices_t device, + const sp& teeSink) : ThreadBase(audioFlinger, id, AUDIO_DEVICE_NONE, device, RECORD), mInput(input), mResampler(NULL), mRsmpOutBuffer(NULL), mRsmpInBuffer(NULL), // mRsmpInIndex and mInputBytes set by readInputParameters() mReqChannelCount(popcount(channelMask)), - mReqSampleRate(sampleRate) + mReqSampleRate(sampleRate), // mBytesRead is only meaningful while active, and so is cleared in start() // (but might be better to also clear here for dump?) + mTeeSink(teeSink) { snprintf(mName, kNameLength, "AudioIn_%X", id); readInputParameters(); + } @@ -6125,14 +6141,16 @@ bool AudioFlinger::RecordThread::threadLoop() } } if (framesOut && mFrameCount == mRsmpInIndex) { + void *readInto; if (framesOut == mFrameCount && ((int)mChannelCount == mReqChannelCount || mFormat != AUDIO_FORMAT_PCM_16_BIT)) { - mBytesRead = mInput->stream->read(mInput->stream, buffer.raw, mInputBytes); + readInto = buffer.raw; framesOut = 0; } else { - mBytesRead = mInput->stream->read(mInput->stream, mRsmpInBuffer, mInputBytes); + readInto = mRsmpInBuffer; mRsmpInIndex = 0; } + mBytesRead = mInput->stream->read(mInput->stream, readInto, mInputBytes); if (mBytesRead <= 0) { if ((mBytesRead < 0) && (mActiveTrack->mState == TrackBase::ACTIVE)) { @@ -6145,6 +6163,9 @@ bool AudioFlinger::RecordThread::threadLoop() mRsmpInIndex = mFrameCount; framesOut = 0; buffer.frameCount = 0; + } else if (mTeeSink != 0) { + (void) mTeeSink->write(readInto, + mBytesRead >> Format_frameBitShift(mTeeSink->format())); } } } @@ -7184,18 +7205,66 @@ audio_io_handle_t AudioFlinger::openInput(audio_module_handle_t module, } if (status == NO_ERROR && inStream != NULL) { + + // Try to re-use most recently used Pipe to archive a copy of input for dumpsys, + // or (re-)create if current Pipe is idle and does not match the new format + sp teeSink; +#ifdef TEE_SINK_INPUT_FRAMES + enum { + TEE_SINK_NO, // don't copy input + TEE_SINK_NEW, // copy input using a new pipe + TEE_SINK_OLD, // copy input using an existing pipe + } kind; + NBAIO_Format format = Format_from_SR_C(inStream->common.get_sample_rate(&inStream->common), + popcount(inStream->common.get_channels(&inStream->common))); + if (format == Format_Invalid) { + kind = TEE_SINK_NO; + } else if (mRecordTeeSink == 0) { + kind = TEE_SINK_NEW; + } else if (mRecordTeeSink->getStrongCount() != 1) { + kind = TEE_SINK_NO; + } else if (format == mRecordTeeSink->format()) { + kind = TEE_SINK_OLD; + } else { + kind = TEE_SINK_NEW; + } + switch (kind) { + case TEE_SINK_NEW: { + Pipe *pipe = new Pipe(TEE_SINK_INPUT_FRAMES, format); + size_t numCounterOffers = 0; + const NBAIO_Format offers[1] = {format}; + ssize_t index = pipe->negotiate(offers, 1, NULL, numCounterOffers); + ALOG_ASSERT(index == 0); + PipeReader *pipeReader = new PipeReader(*pipe); + numCounterOffers = 0; + index = pipeReader->negotiate(offers, 1, NULL, numCounterOffers); + ALOG_ASSERT(index == 0); + mRecordTeeSink = pipe; + mRecordTeeSource = pipeReader; + teeSink = pipe; + } + break; + case TEE_SINK_OLD: + teeSink = mRecordTeeSink; + break; + case TEE_SINK_NO: + default: + break; + } +#endif AudioStreamIn *input = new AudioStreamIn(inHwDev, inStream); // Start record thread // RecorThread require both input and output device indication to forward to audio // pre processing modules audio_devices_t device = (*pDevices) | primaryOutputDevice_l(); + thread = new RecordThread(this, input, reqSamplingRate, reqChannels, id, - device); + device, teeSink); mRecordThreads.add(id, thread); ALOGV("openInput() created record thread: ID %d thread %p", id, thread); if (pSamplingRate != NULL) *pSamplingRate = reqSamplingRate; diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h index 8c55f7c..116820f 100644 --- a/services/audioflinger/AudioFlinger.h +++ b/services/audioflinger/AudioFlinger.h @@ -1477,7 +1477,8 @@ public: uint32_t sampleRate, audio_channel_mask_t channelMask, audio_io_handle_t id, - audio_devices_t device); + audio_devices_t device, + const sp& teeSink); virtual ~RecordThread(); // no addTrack_l ? @@ -1573,6 +1574,9 @@ public: // when < 0, maximum frames to drop before starting capture even if sync event is // not received ssize_t mFramestoDrop; + + // For dumpsys + const sp mTeeSink; }; // server side of the client's IAudioRecord @@ -2065,6 +2069,13 @@ private: // for use from destructor status_t closeOutput_nonvirtual(audio_io_handle_t output); status_t closeInput_nonvirtual(audio_io_handle_t input); + + // all record threads serially share a common tee sink, which is re-created on format change + sp mRecordTeeSink; + sp mRecordTeeSource; + +public: + static void dumpTee(int fd, const sp& source, audio_io_handle_t id = 0); }; -- cgit v1.1 From 85ab62c4b433df3f1a9826bed1c9bec07a86c750 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Thu, 1 Nov 2012 11:11:38 -0700 Subject: Line length 100 Change-Id: Ib28fd7b9ce951a6933f006e7f8812ba617625530 --- include/media/AudioSystem.h | 26 ++-- include/media/AudioTrack.h | 24 ++- include/media/EffectsFactoryApi.h | 18 ++- include/media/IAudioFlinger.h | 3 +- include/media/IAudioPolicyService.h | 5 +- include/media/SoundPool.h | 6 +- include/private/media/AudioTrackShared.h | 6 +- media/libmedia/AudioEffect.cpp | 21 ++- media/libmedia/AudioRecord.cpp | 6 +- media/libmedia/AudioSystem.cpp | 18 ++- media/libmedia/AudioTrack.cpp | 28 ++-- media/libmedia/IAudioFlinger.cpp | 6 +- media/libmedia/IAudioFlingerClient.cpp | 3 +- media/libmedia/IAudioPolicyService.cpp | 9 +- media/libmedia/Visualizer.cpp | 6 +- services/audioflinger/AudioFlinger.cpp | 212 +++++++++++++++++---------- services/audioflinger/AudioFlinger.h | 45 +++--- services/audioflinger/AudioMixer.cpp | 30 ++-- services/audioflinger/AudioMixer.h | 18 ++- services/audioflinger/AudioPolicyService.cpp | 5 +- services/audioflinger/AudioPolicyService.h | 10 +- services/audioflinger/test-resample.cpp | 3 +- 22 files changed, 326 insertions(+), 182 deletions(-) diff --git a/include/media/AudioSystem.h b/include/media/AudioSystem.h index 49e1afc..2218fad 100644 --- a/include/media/AudioSystem.h +++ b/include/media/AudioSystem.h @@ -87,9 +87,12 @@ public: static float linearToLog(int volume); static int logToLinear(float volume); - static status_t getOutputSamplingRate(int* samplingRate, audio_stream_type_t stream = AUDIO_STREAM_DEFAULT); - static status_t getOutputFrameCount(int* frameCount, audio_stream_type_t stream = AUDIO_STREAM_DEFAULT); - static status_t getOutputLatency(uint32_t* latency, audio_stream_type_t stream = AUDIO_STREAM_DEFAULT); + static status_t getOutputSamplingRate(int* samplingRate, + audio_stream_type_t stream = AUDIO_STREAM_DEFAULT); + static status_t getOutputFrameCount(int* frameCount, + audio_stream_type_t stream = AUDIO_STREAM_DEFAULT); + static status_t getOutputLatency(uint32_t* latency, + audio_stream_type_t stream = AUDIO_STREAM_DEFAULT); static status_t getSamplingRate(audio_io_handle_t output, audio_stream_type_t streamType, int* samplingRate); @@ -126,7 +129,8 @@ public: // - BAD_VALUE: invalid parameter // NOTE: this feature is not supported on all hardware platforms and it is // necessary to check returned status before using the returned values. - static status_t getRenderPosition(uint32_t *halFrames, uint32_t *dspFrames, audio_stream_type_t stream = AUDIO_STREAM_DEFAULT); + static status_t getRenderPosition(uint32_t *halFrames, uint32_t *dspFrames, + audio_stream_type_t stream = AUDIO_STREAM_DEFAULT); // return the number of input frames lost by HAL implementation, or 0 if the handle is invalid static unsigned int getInputFramesLost(audio_io_handle_t ioHandle); @@ -147,8 +151,8 @@ public: NUM_CONFIG_EVENTS }; - // audio output descriptor used to cache output configurations in client process to avoid frequent calls - // through IAudioFlinger + // audio output descriptor used to cache output configurations in client process to avoid + // frequent calls through IAudioFlinger class OutputDescriptor { public: OutputDescriptor() @@ -162,8 +166,8 @@ public: }; // Events used to synchronize actions between audio sessions. - // For instance SYNC_EVENT_PRESENTATION_COMPLETE can be used to delay recording start until playback - // is complete on another audio session. + // For instance SYNC_EVENT_PRESENTATION_COMPLETE can be used to delay recording start until + // playback is complete on another audio session. // See definitions in MediaSyncEvent.java enum sync_event_t { SYNC_EVENT_SAME = -1, // used internally to indicate restart with same event @@ -183,8 +187,10 @@ public: // // IAudioPolicyService interface (see AudioPolicyInterface for method descriptions) // - static status_t setDeviceConnectionState(audio_devices_t device, audio_policy_dev_state_t state, const char *device_address); - static audio_policy_dev_state_t getDeviceConnectionState(audio_devices_t device, const char *device_address); + static status_t setDeviceConnectionState(audio_devices_t device, audio_policy_dev_state_t state, + const char *device_address); + static audio_policy_dev_state_t getDeviceConnectionState(audio_devices_t device, + const char *device_address); static status_t setPhoneState(audio_mode_t state); static status_t setForceUse(audio_policy_force_use_t usage, audio_policy_forced_cfg_t config); static audio_policy_forced_cfg_t getForceUse(audio_policy_force_use_t usage); diff --git a/include/media/AudioTrack.h b/include/media/AudioTrack.h index 34108b3..7dd22e8 100644 --- a/include/media/AudioTrack.h +++ b/include/media/AudioTrack.h @@ -53,9 +53,12 @@ public: enum event_type { EVENT_MORE_DATA = 0, // Request to write more data to PCM buffer. EVENT_UNDERRUN = 1, // PCM buffer underrun occured. - EVENT_LOOP_END = 2, // Sample loop end was reached; playback restarted from loop start if loop count was not 0. - EVENT_MARKER = 3, // Playback head is at the specified marker position (See setMarkerPosition()). - EVENT_NEW_POS = 4, // Playback head is at a new position (See setPositionUpdatePeriod()). + EVENT_LOOP_END = 2, // Sample loop end was reached; playback restarted from + // loop start if loop count was not 0. + EVENT_MARKER = 3, // Playback head is at the specified marker position + // (See setMarkerPosition()). + EVENT_NEW_POS = 4, // Playback head is at a new position + // (See setPositionUpdatePeriod()). EVENT_BUFFER_END = 5 // Playback head is at the end of the buffer. }; @@ -312,7 +315,8 @@ public: /* Sets marker position. When playback reaches the number of frames specified, a callback with * event type EVENT_MARKER is called. Calling setMarkerPosition with marker == 0 cancels marker * notification callback. - * If the AudioTrack has been opened with no callback function associated, the operation will fail. + * If the AudioTrack has been opened with no callback function associated, the operation will + * fail. * * Parameters: * @@ -330,7 +334,8 @@ public: * a callback with event type EVENT_NEW_POS is called. * Calling setPositionUpdatePeriod with updatePeriod == 0 cancels new position notification * callback. - * If the AudioTrack has been opened with no callback function associated, the operation will fail. + * If the AudioTrack has been opened with no callback function associated, the operation will + * fail. * * Parameters: * @@ -359,7 +364,8 @@ public: * Returned status (from utils/Errors.h) can be: * - NO_ERROR: successful operation * - INVALID_OPERATION: the AudioTrack is not stopped. - * - BAD_VALUE: The specified position is beyond the number of frames present in AudioTrack buffer + * - BAD_VALUE: The specified position is beyond the number of frames present in AudioTrack + * buffer */ status_t setPosition(uint32_t position); status_t getPosition(uint32_t *position); @@ -518,8 +524,10 @@ protected: callback_t mCbf; // callback handler for events, or NULL void* mUserData; - uint32_t mNotificationFramesReq; // requested number of frames between each notification callback - uint32_t mNotificationFramesAct; // actual number of frames between each notification callback + uint32_t mNotificationFramesReq; // requested number of frames between each + // notification callback + uint32_t mNotificationFramesAct; // actual number of frames between each + // notification callback sp mSharedBuffer; int mLoopCount; uint32_t mRemainingFrames; diff --git a/include/media/EffectsFactoryApi.h b/include/media/EffectsFactoryApi.h index 65c26f4..b1ed7b0 100644 --- a/include/media/EffectsFactoryApi.h +++ b/include/media/EffectsFactoryApi.h @@ -74,7 +74,8 @@ int EffectQueryNumberEffects(uint32_t *pNumEffects); // -ENOENT no more effect available // -ENODEV factory failed to initialize // -EINVAL invalid pDescriptor -// -ENOSYS effect list has changed since last execution of EffectQueryNumberEffects() +// -ENOSYS effect list has changed since last execution of +// EffectQueryNumberEffects() // *pDescriptor: updated with the effect descriptor. // //////////////////////////////////////////////////////////////////////////////// @@ -91,12 +92,12 @@ int EffectQueryEffect(uint32_t index, effect_descriptor_t *pDescriptor); // // Input: // pEffectUuid: pointer to the effect uuid. -// sessionId: audio session to which this effect instance will be attached. All effects created -// with the same session ID are connected in series and process the same signal stream. -// Knowing that two effects are part of the same effect chain can help the library implement -// some kind of optimizations. -// ioId: identifies the output or input stream this effect is directed to at audio HAL. For future -// use especially with tunneled HW accelerated effects +// sessionId: audio session to which this effect instance will be attached. All effects +// created with the same session ID are connected in series and process the same signal +// stream. Knowing that two effects are part of the same effect chain can help the +// library implement some kind of optimizations. +// ioId: identifies the output or input stream this effect is directed to at audio HAL. +// For future use especially with tunneled HW accelerated effects // // Input/Output: // pHandle: address where to return the effect handle. @@ -109,7 +110,8 @@ int EffectQueryEffect(uint32_t index, effect_descriptor_t *pDescriptor); // *pHandle: updated with the effect handle. // //////////////////////////////////////////////////////////////////////////////// -int EffectCreate(const effect_uuid_t *pEffectUuid, int32_t sessionId, int32_t ioId, effect_handle_t *pHandle); +int EffectCreate(const effect_uuid_t *pEffectUuid, int32_t sessionId, int32_t ioId, + effect_handle_t *pHandle); //////////////////////////////////////////////////////////////////////////////// // diff --git a/include/media/IAudioFlinger.h b/include/media/IAudioFlinger.h index 5170a87..359780e 100644 --- a/include/media/IAudioFlinger.h +++ b/include/media/IAudioFlinger.h @@ -123,7 +123,8 @@ public: virtual status_t setParameters(audio_io_handle_t ioHandle, const String8& keyValuePairs) = 0; - virtual String8 getParameters(audio_io_handle_t ioHandle, const String8& keys) const = 0; + virtual String8 getParameters(audio_io_handle_t ioHandle, const String8& keys) + const = 0; // register a current process for audio output change notifications virtual void registerClient(const sp& client) = 0; diff --git a/include/media/IAudioPolicyService.h b/include/media/IAudioPolicyService.h index cc2e069..f5b0604 100644 --- a/include/media/IAudioPolicyService.h +++ b/include/media/IAudioPolicyService.h @@ -44,9 +44,10 @@ public: audio_policy_dev_state_t state, const char *device_address) = 0; virtual audio_policy_dev_state_t getDeviceConnectionState(audio_devices_t device, - const char *device_address) = 0; + const char *device_address) = 0; virtual status_t setPhoneState(audio_mode_t state) = 0; - virtual status_t setForceUse(audio_policy_force_use_t usage, audio_policy_forced_cfg_t config) = 0; + virtual status_t setForceUse(audio_policy_force_use_t usage, + audio_policy_forced_cfg_t config) = 0; virtual audio_policy_forced_cfg_t getForceUse(audio_policy_force_use_t usage) = 0; virtual audio_io_handle_t getOutput(audio_stream_type_t stream, uint32_t samplingRate = 0, diff --git a/include/media/SoundPool.h b/include/media/SoundPool.h index 002b045..7bf3069 100644 --- a/include/media/SoundPool.h +++ b/include/media/SoundPool.h @@ -65,8 +65,10 @@ public: sp getIMemory() { return mData; } // hack - void init(int numChannels, int sampleRate, audio_format_t format, size_t size, sp data ) { - mNumChannels = numChannels; mSampleRate = sampleRate; mFormat = format; mSize = size; mData = data; } + void init(int numChannels, int sampleRate, audio_format_t format, size_t size, + sp data ) { + mNumChannels = numChannels; mSampleRate = sampleRate; mFormat = format; mSize = size; + mData = data; } private: void init(); diff --git a/include/private/media/AudioTrackShared.h b/include/private/media/AudioTrackShared.h index 5b133f3..fe42afa 100644 --- a/include/private/media/AudioTrackShared.h +++ b/include/private/media/AudioTrackShared.h @@ -27,7 +27,8 @@ namespace android { // ---------------------------------------------------------------------------- // Maximum cumulated timeout milliseconds before restarting audioflinger thread -#define MAX_STARTUP_TIMEOUT_MS 3000 // Longer timeout period at startup to cope with A2DP init time +#define MAX_STARTUP_TIMEOUT_MS 3000 // Longer timeout period at startup to cope with A2DP + // init time #define MAX_RUN_TIMEOUT_MS 1000 #define WAIT_PERIOD_MS 10 #define RESTORE_TIMEOUT_MS 5000 // Maximum waiting time for a track to be restored @@ -100,7 +101,8 @@ public: uint8_t mName; // normal tracks: track name, fast tracks: track index // used by client only - uint16_t bufferTimeoutMs; // Maximum cumulated timeout before restarting audioflinger + uint16_t bufferTimeoutMs; // Maximum cumulated timeout before restarting + // audioflinger uint16_t waitTimeMs; // Cumulated wait time, used by client only private: diff --git a/media/libmedia/AudioEffect.cpp b/media/libmedia/AudioEffect.cpp index 680604b..3317d57 100644 --- a/media/libmedia/AudioEffect.cpp +++ b/media/libmedia/AudioEffect.cpp @@ -152,7 +152,8 @@ status_t AudioEffect::set(const effect_uuid_t *type, mCblk->buffer = (uint8_t *)mCblk + bufOffset; iEffect->asBinder()->linkToDeath(mIEffectClient); - ALOGV("set() %p OK effect: %s id: %d status %d enabled %d", this, mDescriptor.name, mId, mStatus, mEnabled); + ALOGV("set() %p OK effect: %s id: %d status %d enabled %d", this, mDescriptor.name, mId, + mStatus, mEnabled); return mStatus; } @@ -266,9 +267,11 @@ status_t AudioEffect::setParameter(effect_param_t *param) uint32_t size = sizeof(int); uint32_t psize = ((param->psize - 1) / sizeof(int) + 1) * sizeof(int) + param->vsize; - ALOGV("setParameter: param: %d, param2: %d", *(int *)param->data, (param->psize == 8) ? *((int *)param->data + 1): -1); + ALOGV("setParameter: param: %d, param2: %d", *(int *)param->data, + (param->psize == 8) ? *((int *)param->data + 1): -1); - return mIEffect->command(EFFECT_CMD_SET_PARAM, sizeof (effect_param_t) + psize, param, &size, ¶m->status); + return mIEffect->command(EFFECT_CMD_SET_PARAM, sizeof (effect_param_t) + psize, param, &size, + ¶m->status); } status_t AudioEffect::setParameterDeferred(effect_param_t *param) @@ -321,11 +324,14 @@ status_t AudioEffect::getParameter(effect_param_t *param) return BAD_VALUE; } - ALOGV("getParameter: param: %d, param2: %d", *(int *)param->data, (param->psize == 8) ? *((int *)param->data + 1): -1); + ALOGV("getParameter: param: %d, param2: %d", *(int *)param->data, + (param->psize == 8) ? *((int *)param->data + 1): -1); - uint32_t psize = sizeof(effect_param_t) + ((param->psize - 1) / sizeof(int) + 1) * sizeof(int) + param->vsize; + uint32_t psize = sizeof(effect_param_t) + ((param->psize - 1) / sizeof(int) + 1) * sizeof(int) + + param->vsize; - return mIEffect->command(EFFECT_CMD_GET_PARAM, sizeof(effect_param_t) + param->psize, param, &psize, param); + return mIEffect->command(EFFECT_CMD_GET_PARAM, sizeof(effect_param_t) + param->psize, param, + &psize, param); } @@ -346,7 +352,8 @@ void AudioEffect::binderDied() void AudioEffect::controlStatusChanged(bool controlGranted) { - ALOGV("controlStatusChanged %p control %d callback %p mUserData %p", this, controlGranted, mCbf, mUserData); + ALOGV("controlStatusChanged %p control %d callback %p mUserData %p", this, controlGranted, mCbf, + mUserData); if (controlGranted) { if (mStatus == ALREADY_EXISTS) { mStatus = NO_ERROR; diff --git a/media/libmedia/AudioRecord.cpp b/media/libmedia/AudioRecord.cpp index 8ea6306..bdbee0d 100644 --- a/media/libmedia/AudioRecord.cpp +++ b/media/libmedia/AudioRecord.cpp @@ -127,7 +127,8 @@ status_t AudioRecord::set( int sessionId) { - ALOGV("set(): sampleRate %d, channelMask %#x, frameCount %d",sampleRate, channelMask, frameCount); + ALOGV("set(): sampleRate %d, channelMask %#x, frameCount %d",sampleRate, channelMask, + frameCount); AutoMutex lock(mLock); @@ -701,7 +702,8 @@ bool AudioRecord::processAudioBuffer(const sp& thread) status_t err = obtainBuffer(&audioBuffer, 1); if (err < NO_ERROR) { if (err != TIMED_OUT) { - ALOGE_IF(err != status_t(NO_MORE_BUFFERS), "Error obtaining an audio buffer, giving up."); + ALOGE_IF(err != status_t(NO_MORE_BUFFERS), + "Error obtaining an audio buffer, giving up."); return false; } break; diff --git a/media/libmedia/AudioSystem.cpp b/media/libmedia/AudioSystem.cpp index 0e5c149..767c452 100644 --- a/media/libmedia/AudioSystem.cpp +++ b/media/libmedia/AudioSystem.cpp @@ -246,7 +246,8 @@ status_t AudioSystem::getSamplingRate(audio_io_handle_t output, gLock.unlock(); } - ALOGV("getSamplingRate() streamType %d, output %d, sampling rate %d", streamType, output, *samplingRate); + ALOGV("getSamplingRate() streamType %d, output %d, sampling rate %d", streamType, output, + *samplingRate); return NO_ERROR; } @@ -290,7 +291,8 @@ status_t AudioSystem::getFrameCount(audio_io_handle_t output, gLock.unlock(); } - ALOGV("getFrameCount() streamType %d, output %d, frameCount %d", streamType, output, *frameCount); + ALOGV("getFrameCount() streamType %d, output %d, frameCount %d", streamType, output, + *frameCount); return NO_ERROR; } @@ -369,7 +371,8 @@ status_t AudioSystem::setVoiceVolume(float value) return af->setVoiceVolume(value); } -status_t AudioSystem::getRenderPosition(uint32_t *halFrames, uint32_t *dspFrames, audio_stream_type_t stream) +status_t AudioSystem::getRenderPosition(uint32_t *halFrames, uint32_t *dspFrames, + audio_stream_type_t stream) { const sp& af = AudioSystem::get_audio_flinger(); if (af == 0) return PERMISSION_DENIED; @@ -449,8 +452,10 @@ void AudioSystem::AudioFlingerClient::ioConfigChanged(int event, audio_io_handle OutputDescriptor *outputDesc = new OutputDescriptor(*desc); gOutputs.add(ioHandle, outputDesc); - ALOGV("ioConfigChanged() new output samplingRate %d, format %d channels %#x frameCount %d latency %d", - outputDesc->samplingRate, outputDesc->format, outputDesc->channels, outputDesc->frameCount, outputDesc->latency); + ALOGV("ioConfigChanged() new output samplingRate %d, format %d channels %#x frameCount %d " + "latency %d", + outputDesc->samplingRate, outputDesc->format, outputDesc->channels, + outputDesc->frameCount, outputDesc->latency); } break; case OUTPUT_CLOSED: { if (gOutputs.indexOfKey(ioHandle) < 0) { @@ -471,7 +476,8 @@ void AudioSystem::AudioFlingerClient::ioConfigChanged(int event, audio_io_handle if (param2 == NULL) break; desc = (const OutputDescriptor *)param2; - ALOGV("ioConfigChanged() new config for output %d samplingRate %d, format %d channels %#x frameCount %d latency %d", + ALOGV("ioConfigChanged() new config for output %d samplingRate %d, format %d channels %#x " + "frameCount %d latency %d", ioHandle, desc->samplingRate, desc->format, desc->channels, desc->frameCount, desc->latency); OutputDescriptor *outputDesc = gOutputs.valueAt(index); diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp index 362d022..5fc9b07 100644 --- a/media/libmedia/AudioTrack.cpp +++ b/media/libmedia/AudioTrack.cpp @@ -198,7 +198,8 @@ status_t AudioTrack::set( int sessionId) { - ALOGV_IF(sharedBuffer != 0, "sharedBuffer: %p, size: %d", sharedBuffer->pointer(), sharedBuffer->size()); + ALOGV_IF(sharedBuffer != 0, "sharedBuffer: %p, size: %d", sharedBuffer->pointer(), + sharedBuffer->size()); ALOGV("set() streamType %d frameCount %d flags %04x", streamType, frameCount, flags); @@ -617,12 +618,14 @@ status_t AudioTrack::setLoop_l(uint32_t loopStart, uint32_t loopEnd, int loopCou if (loopStart >= loopEnd || loopEnd - loopStart > cblk->frameCount || cblk->server > loopStart) { - ALOGE("setLoop invalid value: loopStart %d, loopEnd %d, loopCount %d, framecount %d, user %d", loopStart, loopEnd, loopCount, cblk->frameCount, cblk->user); + ALOGE("setLoop invalid value: loopStart %d, loopEnd %d, loopCount %d, framecount %d, " + "user %d", loopStart, loopEnd, loopCount, cblk->frameCount, cblk->user); return BAD_VALUE; } if ((mSharedBuffer != 0) && (loopEnd > cblk->frameCount)) { - ALOGE("setLoop invalid value: loop markers beyond data: loopStart %d, loopEnd %d, framecount %d", + ALOGE("setLoop invalid value: loop markers beyond data: loopStart %d, loopEnd %d, " + "framecount %d", loopStart, loopEnd, cblk->frameCount); return BAD_VALUE; } @@ -924,7 +927,8 @@ status_t AudioTrack::createTrack_l( mCblk->stepUser(mCblk->frameCount); } - mCblk->setVolumeLR((uint32_t(uint16_t(mVolume[RIGHT] * 0x1000)) << 16) | uint16_t(mVolume[LEFT] * 0x1000)); + mCblk->setVolumeLR((uint32_t(uint16_t(mVolume[RIGHT] * 0x1000)) << 16) | + uint16_t(mVolume[LEFT] * 0x1000)); mCblk->setSendLevel(mSendLevel); mAudioTrack->attachAuxEffect(mAuxEffectId); mCblk->bufferTimeoutMs = MAX_STARTUP_TIMEOUT_MS; @@ -994,8 +998,8 @@ status_t AudioTrack::obtainBuffer(Buffer* audioBuffer, int32_t waitCount) // timing out when a loop has been set and we have already written upto loop end // is a normal condition: no need to wake AudioFlinger up. if (cblk->user < cblk->loopEnd) { - ALOGW( "obtainBuffer timed out (is the CPU pegged?) %p name=%#x" - "user=%08x, server=%08x", this, cblk->mName, cblk->user, cblk->server); + ALOGW("obtainBuffer timed out (is the CPU pegged?) %p name=%#x user=%08x, " + "server=%08x", this, cblk->mName, cblk->user, cblk->server); //unlock cblk mutex before calling mAudioTrack->start() (see issue #1617140) cblk->lock.unlock(); result = mAudioTrack->start(); @@ -1265,7 +1269,8 @@ bool AudioTrack::processAudioBuffer(const sp& thread) status_t err = obtainBuffer(&audioBuffer, waitCount); if (err < NO_ERROR) { if (err != TIMED_OUT) { - ALOGE_IF(err != status_t(NO_MORE_BUFFERS), "Error obtaining an audio buffer, giving up."); + ALOGE_IF(err != status_t(NO_MORE_BUFFERS), + "Error obtaining an audio buffer, giving up."); return false; } break; @@ -1439,11 +1444,14 @@ status_t AudioTrack::dump(int fd, const Vector& args) const String8 result; result.append(" AudioTrack::dump\n"); - snprintf(buffer, 255, " stream type(%d), left - right volume(%f, %f)\n", mStreamType, mVolume[0], mVolume[1]); + snprintf(buffer, 255, " stream type(%d), left - right volume(%f, %f)\n", mStreamType, + mVolume[0], mVolume[1]); result.append(buffer); - snprintf(buffer, 255, " format(%d), channel count(%d), frame count(%d)\n", mFormat, mChannelCount, mCblk->frameCount); + snprintf(buffer, 255, " format(%d), channel count(%d), frame count(%d)\n", mFormat, + mChannelCount, mCblk->frameCount); result.append(buffer); - snprintf(buffer, 255, " sample rate(%d), status(%d), muted(%d)\n", (mCblk == 0) ? 0 : mCblk->sampleRate, mStatus, mMuted); + snprintf(buffer, 255, " sample rate(%d), status(%d), muted(%d)\n", + (mCblk == 0) ? 0 : mCblk->sampleRate, mStatus, mMuted); result.append(buffer); snprintf(buffer, 255, " active(%d), latency (%d)\n", mActive, mLatency); result.append(buffer); diff --git a/media/libmedia/IAudioFlinger.cpp b/media/libmedia/IAudioFlinger.cpp index ce8ffc4..f412591 100644 --- a/media/libmedia/IAudioFlinger.cpp +++ b/media/libmedia/IAudioFlinger.cpp @@ -865,7 +865,8 @@ status_t BnAudioFlinger::onTransact( case REGISTER_CLIENT: { CHECK_INTERFACE(IAudioFlinger, data, reply); - sp client = interface_cast(data.readStrongBinder()); + sp client = interface_cast( + data.readStrongBinder()); registerClient(client); return NO_ERROR; } break; @@ -1043,7 +1044,8 @@ status_t BnAudioFlinger::onTransact( int id; int enabled; - sp effect = createEffect(pid, &desc, client, priority, output, sessionId, &status, &id, &enabled); + sp effect = createEffect(pid, &desc, client, priority, output, sessionId, + &status, &id, &enabled); reply->writeInt32(status); reply->writeInt32(id); reply->writeInt32(enabled); diff --git a/media/libmedia/IAudioFlingerClient.cpp b/media/libmedia/IAudioFlingerClient.cpp index 4178b29..2d1e0f8 100644 --- a/media/libmedia/IAudioFlingerClient.cpp +++ b/media/libmedia/IAudioFlingerClient.cpp @@ -50,7 +50,8 @@ public: ALOGV("ioConfigChanged stream %d", stream); data.writeInt32(stream); } else if (event != AudioSystem::OUTPUT_CLOSED && event != AudioSystem::INPUT_CLOSED) { - const AudioSystem::OutputDescriptor *desc = (const AudioSystem::OutputDescriptor *)param2; + const AudioSystem::OutputDescriptor *desc = + (const AudioSystem::OutputDescriptor *)param2; data.writeInt32(desc->samplingRate); data.writeInt32(desc->format); data.writeInt32(desc->channels); diff --git a/media/libmedia/IAudioPolicyService.cpp b/media/libmedia/IAudioPolicyService.cpp index 401437c..769deae 100644 --- a/media/libmedia/IAudioPolicyService.cpp +++ b/media/libmedia/IAudioPolicyService.cpp @@ -399,13 +399,15 @@ status_t BnAudioPolicyService::onTransact( case SET_PHONE_STATE: { CHECK_INTERFACE(IAudioPolicyService, data, reply); - reply->writeInt32(static_cast (setPhoneState((audio_mode_t) data.readInt32()))); + reply->writeInt32(static_cast (setPhoneState( + (audio_mode_t) data.readInt32()))); return NO_ERROR; } break; case SET_FORCE_USE: { CHECK_INTERFACE(IAudioPolicyService, data, reply); - audio_policy_force_use_t usage = static_cast (data.readInt32()); + audio_policy_force_use_t usage = static_cast ( + data.readInt32()); audio_policy_forced_cfg_t config = static_cast (data.readInt32()); reply->writeInt32(static_cast (setForceUse(usage, config))); @@ -414,7 +416,8 @@ status_t BnAudioPolicyService::onTransact( case GET_FORCE_USE: { CHECK_INTERFACE(IAudioPolicyService, data, reply); - audio_policy_force_use_t usage = static_cast (data.readInt32()); + audio_policy_force_use_t usage = static_cast ( + data.readInt32()); reply->writeInt32(static_cast (getForceUse(usage))); return NO_ERROR; } break; diff --git a/media/libmedia/Visualizer.cpp b/media/libmedia/Visualizer.cpp index 8196e10..5b4071b 100644 --- a/media/libmedia/Visualizer.cpp +++ b/media/libmedia/Visualizer.cpp @@ -88,7 +88,8 @@ status_t Visualizer::setEnabled(bool enabled) return status; } -status_t Visualizer::setCaptureCallBack(capture_cbk_t cbk, void* user, uint32_t flags, uint32_t rate) +status_t Visualizer::setCaptureCallBack(capture_cbk_t cbk, void* user, uint32_t flags, + uint32_t rate) { if (rate > CAPTURE_RATE_MAX) { return BAD_VALUE; @@ -334,7 +335,8 @@ void Visualizer::controlStatusChanged(bool controlGranted) { //------------------------------------------------------------------------- -Visualizer::CaptureThread::CaptureThread(Visualizer& receiver, uint32_t captureRate, bool bCanCallJava) +Visualizer::CaptureThread::CaptureThread(Visualizer& receiver, uint32_t captureRate, + bool bCanCallJava) : Thread(bCanCallJava), mReceiver(receiver) { mSleepTimeUs = 1000000000 / captureRate; diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp index 9bdab2f..35bd431 100644 --- a/services/audioflinger/AudioFlinger.cpp +++ b/services/audioflinger/AudioFlinger.cpp @@ -1118,7 +1118,8 @@ void AudioFlinger::audioConfigChanged_l(int event, audio_io_handle_t ioHandle, c // removeClient_l() must be called with AudioFlinger::mLock held void AudioFlinger::removeClient_l(pid_t pid) { - ALOGV("removeClient_l() pid %d, tid %d, calling tid %d", pid, gettid(), IPCThreadState::self()->getCallingPid()); + ALOGV("removeClient_l() pid %d, tid %d, calling tid %d", pid, gettid(), + IPCThreadState::self()->getCallingPid()); mClients.removeItem(pid); } @@ -1221,7 +1222,8 @@ void AudioFlinger::ThreadBase::sendIoConfigEvent_l(int event, int param) { IoConfigEvent *ioEvent = new IoConfigEvent(event, param); mConfigEvents.add(static_cast(ioEvent)); - ALOGV("sendIoConfigEvent() num events %d event %d, param %d", mConfigEvents.size(), event, param); + ALOGV("sendIoConfigEvent() num events %d event %d, param %d", mConfigEvents.size(), event, + param); mWaitWorkCV.signal(); } @@ -1250,7 +1252,8 @@ void AudioFlinger::ThreadBase::processConfigEvents() PrioConfigEvent *prioEvent = static_cast(event); int err = requestPriority(prioEvent->pid(), prioEvent->tid(), prioEvent->prio()); if (err != 0) { - ALOGW("Policy SCHED_FIFO priority %d is unavailable for pid %d tid %d; error %d", + ALOGW("Policy SCHED_FIFO priority %d is unavailable for pid %d tid %d; " + "error %d", prioEvent->prio(), prioEvent->pid(), prioEvent->tid(), err); } } break; @@ -1667,7 +1670,8 @@ void AudioFlinger::PlaybackThread::dumpInternals(int fd, const Vector& snprintf(buffer, SIZE, "\nOutput thread %p internals\n", this); result.append(buffer); - snprintf(buffer, SIZE, "last write occurred (msecs): %llu\n", ns2ms(systemTime() - mLastWriteTime)); + snprintf(buffer, SIZE, "last write occurred (msecs): %llu\n", + ns2ms(systemTime() - mLastWriteTime)); result.append(buffer); snprintf(buffer, SIZE, "total writes: %d\n", mNumWrites); result.append(buffer); @@ -1797,7 +1801,7 @@ sp AudioFlinger::PlaybackThread::createTrac if (mType == DIRECT) { if ((format & AUDIO_FORMAT_MAIN_MASK) == AUDIO_FORMAT_PCM) { if (sampleRate != mSampleRate || format != mFormat || channelMask != mChannelMask) { - ALOGE("createTrack_l() Bad parameter: sampleRate %d format %d, channelMask 0x%08x \"" + ALOGE("createTrack_l() Bad parameter: sampleRate %d format %d, channelMask 0x%08x " "for output %p with format %d", sampleRate, format, channelMask, mOutput, mFormat); lStatus = BAD_VALUE; @@ -1965,7 +1969,8 @@ status_t AudioFlinger::PlaybackThread::addTrack_l(const sp& track) if (track->mainBuffer() != mMixBuffer) { sp chain = getEffectChain_l(track->sessionId()); if (chain != 0) { - ALOGV("addTrack_l() starting track on chain %p for session %d", chain.get(), track->sessionId()); + ALOGV("addTrack_l() starting track on chain %p for session %d", chain.get(), + track->sessionId()); chain->incActiveTrackCnt(); } } @@ -2031,7 +2036,8 @@ void AudioFlinger::PlaybackThread::audioConfigChanged_l(int event, int param) { AudioSystem::OutputDescriptor desc; void *param2 = NULL; - ALOGV("PlaybackThread::audioConfigChanged_l, thread %p, event %d, param %d", this, event, param); + ALOGV("PlaybackThread::audioConfigChanged_l, thread %p, event %d, param %d", this, event, + param); switch (event) { case AudioSystem::OUTPUT_OPENED: @@ -2039,7 +2045,8 @@ void AudioFlinger::PlaybackThread::audioConfigChanged_l(int event, int param) { desc.channels = mChannelMask; desc.samplingRate = mSampleRate; desc.format = mFormat; - desc.frameCount = mNormalFrameCount; // FIXME see AudioFlinger::frameCount(audio_io_handle_t) + desc.frameCount = mNormalFrameCount; // FIXME see + // AudioFlinger::frameCount(audio_io_handle_t) desc.latency = latency(); param2 = &desc; break; @@ -2068,7 +2075,8 @@ void AudioFlinger::PlaybackThread::readOutputParameters() // Calculate size of normal mix buffer relative to the HAL output buffer size double multiplier = 1.0; - if (mType == MIXER && (kUseFastMixer == FastMixer_Static || kUseFastMixer == FastMixer_Dynamic)) { + if (mType == MIXER && (kUseFastMixer == FastMixer_Static || + kUseFastMixer == FastMixer_Dynamic)) { size_t minNormalFrameCount = (kMinNormalMixBufferSizeMs * mSampleRate) / 1000; size_t maxNormalFrameCount = (kMaxNormalMixBufferSizeMs * mSampleRate) / 1000; // round up minimum and round down maximum to nearest 16 frames to satisfy AudioMixer @@ -2087,9 +2095,10 @@ void AudioFlinger::PlaybackThread::readOutputParameters() multiplier = (double) maxNormalFrameCount / (double) mFrameCount; } } else { - // prefer an even multiplier, for compatibility with doubling of fast tracks due to HAL SRC - // (it would be unusual for the normal mix buffer size to not be a multiple of fast - // track, but we sometimes have to do this to satisfy the maximum frame count constraint) + // prefer an even multiplier, for compatibility with doubling of fast tracks due to HAL + // SRC (it would be unusual for the normal mix buffer size to not be a multiple of fast + // track, but we sometimes have to do this to satisfy the maximum frame count + // constraint) // FIXME this rounding up should not be done if no HAL SRC uint32_t truncMult = (uint32_t) multiplier; if ((truncMult & 1)) { @@ -2103,7 +2112,8 @@ void AudioFlinger::PlaybackThread::readOutputParameters() mNormalFrameCount = multiplier * mFrameCount; // round up to nearest 16 frames to satisfy AudioMixer mNormalFrameCount = (mNormalFrameCount + 15) & ~15; - ALOGI("HAL output buffer size %u frames, normal mix buffer size %u frames", mFrameCount, mNormalFrameCount); + ALOGI("HAL output buffer size %u frames, normal mix buffer size %u frames", mFrameCount, + mNormalFrameCount); delete[] mMixBuffer; mMixBuffer = new int16_t[mNormalFrameCount * mChannelCount]; @@ -2241,7 +2251,8 @@ bool AudioFlinger::PlaybackThread::isValidSyncEvent(const sp& event) return event->type() == AudioSystem::SYNC_EVENT_PRESENTATION_COMPLETE; } -void AudioFlinger::PlaybackThread::threadLoop_removeTracks(const Vector< sp >& tracksToRemove) +void AudioFlinger::PlaybackThread::threadLoop_removeTracks( + const Vector< sp >& tracksToRemove) { size_t count = tracksToRemove.size(); if (CC_UNLIKELY(count)) { @@ -2897,7 +2908,8 @@ void AudioFlinger::MixerThread::threadLoop_sleepTime() } else if (mBytesWritten != 0 || (mMixerStatus == MIXER_TRACKS_ENABLED)) { memset (mMixBuffer, 0, mixBufferSize); sleepTime = 0; - ALOGV_IF((mBytesWritten == 0 && (mMixerStatus == MIXER_TRACKS_ENABLED)), "anticipated start"); + ALOGV_IF((mBytesWritten == 0 && (mMixerStatus == MIXER_TRACKS_ENABLED)), + "anticipated start"); } // TODO add standby time extension fct of effect tail } @@ -3131,7 +3143,8 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::MixerThread::prepareTrac if ((track->framesReady() >= minFrames) && track->isReady() && !track->isPaused() && !track->isTerminated()) { - ALOGVV("track %d u=%08x, s=%08x [OK] on thread %p", name, cblk->user, cblk->server, this); + ALOGVV("track %d u=%08x, s=%08x [OK] on thread %p", name, cblk->user, cblk->server, + this); mixedTracks++; @@ -3144,7 +3157,8 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::MixerThread::prepareTrac if (chain != 0) { tracksWithEffect++; } else { - ALOGW("prepareTracks_l(): track %d attached to effect but no chain found on session %d", + ALOGW("prepareTracks_l(): track %d attached to effect but no chain found on " + "session %d", name, track->sessionId()); } } @@ -3274,7 +3288,8 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::MixerThread::prepareTrac chain->clearInputBuffer(); } - ALOGVV("track %d u=%08x, s=%08x [NOT READY] on thread %p", name, cblk->user, cblk->server, this); + ALOGVV("track %d u=%08x, s=%08x [NOT READY] on thread %p", name, cblk->user, + cblk->server, this); if ((track->sharedBuffer() != 0) || track->isTerminated() || track->isStopped() || track->isPaused()) { // We have consumed all the buffers of this track. @@ -3368,7 +3383,8 @@ track_is_ready: ; if (track->mainBuffer() != mMixBuffer) { chain = getEffectChain_l(track->sessionId()); if (chain != 0) { - ALOGV("stopping track on chain %p for session Id: %d", chain.get(), track->sessionId()); + ALOGV("stopping track on chain %p for session Id: %d", chain.get(), + track->sessionId()); chain->decActiveTrackCnt(); } } @@ -3381,7 +3397,8 @@ track_is_ready: ; // mix buffer must be cleared if all tracks are connected to an // effect chain as in this case the mixer will not write to // mix buffer and track effects will accumulate into it - if ((mixedTracks != 0 && mixedTracks == tracksWithEffect) || (mixedTracks == 0 && fastTracks > 0)) { + if ((mixedTracks != 0 && mixedTracks == tracksWithEffect) || + (mixedTracks == 0 && fastTracks > 0)) { // FIXME as a performance optimization, should remember previous zero status memset(mMixBuffer, 0, mNormalFrameCount * mChannelCount * sizeof(int16_t)); } @@ -3995,7 +4012,8 @@ void AudioFlinger::DirectOutputThread::cacheParameters_l() AudioFlinger::DuplicatingThread::DuplicatingThread(const sp& audioFlinger, AudioFlinger::MixerThread* mainThread, audio_io_handle_t id) - : MixerThread(audioFlinger, mainThread->getOutput(), id, mainThread->outDevice(), DUPLICATING), + : MixerThread(audioFlinger, mainThread->getOutput(), id, mainThread->outDevice(), + DUPLICATING), mWaitTimeMs(UINT_MAX) { addOutputTrack(mainThread); @@ -4116,18 +4134,21 @@ void AudioFlinger::DuplicatingThread::updateWaitTime_l() } -bool AudioFlinger::DuplicatingThread::outputsReady(const SortedVector< sp > &outputTracks) +bool AudioFlinger::DuplicatingThread::outputsReady( + const SortedVector< sp > &outputTracks) { for (size_t i = 0; i < outputTracks.size(); i++) { sp thread = outputTracks[i]->thread().promote(); if (thread == 0) { - ALOGW("DuplicatingThread::outputsReady() could not promote thread on output track %p", outputTracks[i].get()); + ALOGW("DuplicatingThread::outputsReady() could not promote thread on output track %p", + outputTracks[i].get()); return false; } PlaybackThread *playbackThread = (PlaybackThread *)thread.get(); // see note at standby() declaration if (playbackThread->standby() && !playbackThread->isSuspended()) { - ALOGV("DuplicatingThread output track %p on thread %p Not Ready", outputTracks[i].get(), thread.get()); + ALOGV("DuplicatingThread output track %p on thread %p Not Ready", outputTracks[i].get(), + thread.get()); return false; } } @@ -4174,7 +4195,8 @@ AudioFlinger::ThreadBase::TrackBase::TrackBase( // mChannelCount // mChannelMask { - ALOGV_IF(sharedBuffer != 0, "sharedBuffer: %p, size: %d", sharedBuffer->pointer(), sharedBuffer->size()); + ALOGV_IF(sharedBuffer != 0, "sharedBuffer: %p, size: %d", sharedBuffer->pointer(), + sharedBuffer->size()); // ALOGD("Creating track with %d buffers @ %d bytes", bufferCount, bufferSize); size_t size = sizeof(audio_track_cblk_t); @@ -4335,7 +4357,8 @@ AudioFlinger::PlaybackThread::Track::Track( const sp& sharedBuffer, int sessionId, IAudioFlinger::track_flags_t flags) - : TrackBase(thread, client, sampleRate, format, channelMask, frameCount, sharedBuffer, sessionId), + : TrackBase(thread, client, sampleRate, format, channelMask, frameCount, sharedBuffer, + sessionId), mMute(false), mFillingUpStatus(FS_INVALID), // mRetryCount initialized later when needed @@ -4354,7 +4377,8 @@ AudioFlinger::PlaybackThread::Track::Track( if (mCblk != NULL) { // NOTE: audio_track_cblk_t::frameSize for 8 bit PCM data is based on a sample size of // 16 bit because data is converted to 16 bit before being stored in buffer by AudioTrack - mCblk->frameSize = audio_is_linear_pcm(format) ? mChannelCount * sizeof(int16_t) : sizeof(uint8_t); + mCblk->frameSize = audio_is_linear_pcm(format) ? mChannelCount * sizeof(int16_t) : + sizeof(uint8_t); // to avoid leaking a track name, do not allocate one unless there is an mCblk mName = thread->getTrackName_l(channelMask, sessionId); mCblk->mName = mName; @@ -4379,7 +4403,8 @@ AudioFlinger::PlaybackThread::Track::Track( thread->mFastTrackAvailMask &= ~(1 << i); } } - ALOGV("Track constructor name %d, calling pid %d", mName, IPCThreadState::self()->getCallingPid()); + ALOGV("Track constructor name %d, calling pid %d", mName, + IPCThreadState::self()->getCallingPid()); } AudioFlinger::PlaybackThread::Track::~Track() @@ -4421,8 +4446,8 @@ void AudioFlinger::PlaybackThread::Track::destroy() /*static*/ void AudioFlinger::PlaybackThread::Track::appendDumpHeader(String8& result) { - result.append(" Name Client Type Fmt Chn mask Session mFrCnt fCount S M F SRate L dB R dB " - " Server User Main buf Aux Buf Flags Underruns\n"); + result.append(" Name Client Type Fmt Chn mask Session mFrCnt fCount S M F SRate " + "L dB R dB Server User Main buf Aux Buf Flags Underruns\n"); } void AudioFlinger::PlaybackThread::Track::dump(char* buffer, size_t size) @@ -4649,7 +4674,8 @@ void AudioFlinger::PlaybackThread::Track::stop() // and then to STOPPED and reset() when presentation is complete mState = STOPPING_1; } - ALOGV("not stopping/stopped => stopping/stopped (%d) on thread %p", mName, playbackThread); + ALOGV("not stopping/stopped => stopping/stopped (%d) on thread %p", mName, + playbackThread); } if (!isOutputTrack() && (state == ACTIVE || state == RESUMING)) { thread->mLock.unlock(); @@ -5408,7 +5434,8 @@ AudioFlinger::RecordThread::RecordTrack::~RecordTrack() } // AudioBufferProvider interface -status_t AudioFlinger::RecordThread::RecordTrack::getNextBuffer(AudioBufferProvider::Buffer* buffer, int64_t pts) +status_t AudioFlinger::RecordThread::RecordTrack::getNextBuffer(AudioBufferProvider::Buffer* buffer, + int64_t pts) { audio_track_cblk_t* cblk = this->cblk(); uint32_t framesAvail; @@ -5600,7 +5627,8 @@ bool AudioFlinger::PlaybackThread::OutputTrack::write(int16_t* data, uint32_t fr mOutBuffer.frameCount = pInBuffer->frameCount; nsecs_t startTime = systemTime(); if (obtainBuffer(&mOutBuffer, waitTimeLeftMs) == (status_t)NO_MORE_BUFFERS) { - ALOGV ("OutputTrack::write() %p thread %p no more output buffers", this, mThread.unsafe_get()); + ALOGV ("OutputTrack::write() %p thread %p no more output buffers", this, + mThread.unsafe_get()); outputBufferFull = true; break; } @@ -5612,7 +5640,8 @@ bool AudioFlinger::PlaybackThread::OutputTrack::write(int16_t* data, uint32_t fr } } - uint32_t outFrames = pInBuffer->frameCount > mOutBuffer.frameCount ? mOutBuffer.frameCount : pInBuffer->frameCount; + uint32_t outFrames = pInBuffer->frameCount > mOutBuffer.frameCount ? mOutBuffer.frameCount : + pInBuffer->frameCount; memcpy(mOutBuffer.raw, pInBuffer->raw, outFrames * channelCount * sizeof(int16_t)); mCblk->stepUser(outFrames); pInBuffer->frameCount -= outFrames; @@ -5625,7 +5654,8 @@ bool AudioFlinger::PlaybackThread::OutputTrack::write(int16_t* data, uint32_t fr mBufferQueue.removeAt(0); delete [] pInBuffer->mBuffer; delete pInBuffer; - ALOGV("OutputTrack::write() %p thread %p released overflow buffer %d", this, mThread.unsafe_get(), mBufferQueue.size()); + ALOGV("OutputTrack::write() %p thread %p released overflow buffer %d", this, + mThread.unsafe_get(), mBufferQueue.size()); } else { break; } @@ -5641,11 +5671,14 @@ bool AudioFlinger::PlaybackThread::OutputTrack::write(int16_t* data, uint32_t fr pInBuffer->mBuffer = new int16_t[inBuffer.frameCount * channelCount]; pInBuffer->frameCount = inBuffer.frameCount; pInBuffer->i16 = pInBuffer->mBuffer; - memcpy(pInBuffer->raw, inBuffer.raw, inBuffer.frameCount * channelCount * sizeof(int16_t)); + memcpy(pInBuffer->raw, inBuffer.raw, inBuffer.frameCount * channelCount * + sizeof(int16_t)); mBufferQueue.add(pInBuffer); - ALOGV("OutputTrack::write() %p thread %p adding overflow buffer %d", this, mThread.unsafe_get(), mBufferQueue.size()); + ALOGV("OutputTrack::write() %p thread %p adding overflow buffer %d", this, + mThread.unsafe_get(), mBufferQueue.size()); } else { - ALOGW("OutputTrack::write() %p thread %p no more overflow buffers", mThread.unsafe_get(), this); + ALOGW("OutputTrack::write() %p thread %p no more overflow buffers", + mThread.unsafe_get(), this); } } } @@ -5670,7 +5703,8 @@ bool AudioFlinger::PlaybackThread::OutputTrack::write(int16_t* data, uint32_t fr return outputBufferFull; } -status_t AudioFlinger::PlaybackThread::OutputTrack::obtainBuffer(AudioBufferProvider::Buffer* buffer, uint32_t waitTimeMs) +status_t AudioFlinger::PlaybackThread::OutputTrack::obtainBuffer( + AudioBufferProvider::Buffer* buffer, uint32_t waitTimeMs) { int active; status_t result; @@ -5934,13 +5968,14 @@ sp AudioFlinger::openRecord( *sessionId = lSessionId; } } - // create new record track. The record track uses one track in mHardwareMixerThread by convention. + // create new record track. + // The record track uses one track in mHardwareMixerThread by convention. recordTrack = thread->createRecordTrack_l(client, sampleRate, format, channelMask, frameCount, lSessionId, flags, tid, &lStatus); } if (lStatus != NO_ERROR) { - // remove local strong reference to Client before deleting the RecordTrack so that the Client - // destructor is called by the TrackBase destructor with mLock held + // remove local strong reference to Client before deleting the RecordTrack so that the + // Client destructor is called by the TrackBase destructor with mLock held client.clear(); recordTrack.clear(); goto Exit; @@ -5959,7 +5994,8 @@ Exit: // ---------------------------------------------------------------------------- -AudioFlinger::RecordHandle::RecordHandle(const sp& recordTrack) +AudioFlinger::RecordHandle::RecordHandle( + const sp& recordTrack) : BnAudioRecord(), mRecordTrack(recordTrack) { @@ -5974,7 +6010,8 @@ sp AudioFlinger::RecordHandle::getCblk() const { return mRecordTrack->getCblk(); } -status_t AudioFlinger::RecordHandle::start(int /*AudioSystem::sync_event_t*/ event, int triggerSession) { +status_t AudioFlinger::RecordHandle::start(int /*AudioSystem::sync_event_t*/ event, + int triggerSession) { ALOGV("RecordHandle::start()"); return mRecordTrack->start((AudioSystem::sync_event_t)event, triggerSession); } @@ -6122,7 +6159,8 @@ bool AudioFlinger::RecordThread::threadLoop() size_t framesIn = mFrameCount - mRsmpInIndex; if (framesIn) { int8_t *src = (int8_t *)mRsmpInBuffer + mRsmpInIndex * mFrameSize; - int8_t *dst = buffer.i8 + (buffer.frameCount - framesOut) * mActiveTrack->mCblk->frameSize; + int8_t *dst = buffer.i8 + (buffer.frameCount - framesOut) * + mActiveTrack->mCblk->frameSize; if (framesIn > framesOut) framesIn = framesOut; mRsmpInIndex += framesIn; @@ -6143,7 +6181,8 @@ bool AudioFlinger::RecordThread::threadLoop() if (framesOut && mFrameCount == mRsmpInIndex) { void *readInto; if (framesOut == mFrameCount && - ((int)mChannelCount == mReqChannelCount || mFormat != AUDIO_FORMAT_PCM_16_BIT)) { + ((int)mChannelCount == mReqChannelCount || + mFormat != AUDIO_FORMAT_PCM_16_BIT)) { readInto = buffer.raw; framesOut = 0; } else { @@ -6177,12 +6216,14 @@ bool AudioFlinger::RecordThread::threadLoop() if (mChannelCount == 1 && mReqChannelCount == 1) { framesOut >>= 1; } - mResampler->resample(mRsmpOutBuffer, framesOut, this /* AudioBufferProvider* */); - // ditherAndClamp() works as long as all buffers returned by mActiveTrack->getNextBuffer() - // are 32 bit aligned which should be always true. + mResampler->resample(mRsmpOutBuffer, framesOut, + this /* AudioBufferProvider* */); + // ditherAndClamp() works as long as all buffers returned by + // mActiveTrack->getNextBuffer() are 32 bit aligned which should be always true. if (mChannelCount == 2 && mReqChannelCount == 1) { ditherAndClamp(mRsmpOutBuffer, mRsmpOutBuffer, framesOut); - // the resampler always outputs stereo samples: do post stereo to mono conversion + // the resampler always outputs stereo samples: + // do post stereo to mono conversion downmix_to_mono_i16_from_stereo_i16(buffer.i16, (int16_t *)mRsmpOutBuffer, framesOut); } else { @@ -6656,7 +6697,8 @@ bool AudioFlinger::RecordThread::checkForNewParameters_l() status = BAD_VALUE; } else { mInDevice = value; - // disable AEC and NS if the device is a BT SCO headset supporting those pre processings + // disable AEC and NS if the device is a BT SCO headset supporting those + // pre processings if (mTracks.size() > 0) { bool suspend = audio_is_bluetooth_sco_device(mInDevice) && mAudioFlinger->btNrecIsOff(); @@ -6678,7 +6720,8 @@ bool AudioFlinger::RecordThread::checkForNewParameters_l() mAudioSource = (audio_source_t)value; } if (status == NO_ERROR) { - status = mInput->stream->common.set_parameters(&mInput->stream->common, keyValuePair.string()); + status = mInput->stream->common.set_parameters(&mInput->stream->common, + keyValuePair.string()); if (status == INVALID_OPERATION) { inputStandBy(); status = mInput->stream->common.set_parameters(&mInput->stream->common, @@ -6688,8 +6731,10 @@ bool AudioFlinger::RecordThread::checkForNewParameters_l() if (status == BAD_VALUE && reqFormat == mInput->stream->common.get_format(&mInput->stream->common) && reqFormat == AUDIO_FORMAT_PCM_16_BIT && - ((int)mInput->stream->common.get_sample_rate(&mInput->stream->common) <= (2 * reqSamplingRate)) && - popcount(mInput->stream->common.get_channels(&mInput->stream->common)) <= FCC_2 && + ((int)mInput->stream->common.get_sample_rate(&mInput->stream->common) + <= (2 * reqSamplingRate)) && + popcount(mInput->stream->common.get_channels(&mInput->stream->common)) + <= FCC_2 && (reqChannelCount <= FCC_2)) { status = NO_ERROR; } @@ -6783,7 +6828,8 @@ void AudioFlinger::RecordThread::readInputParameters() mResampler->setVolume(AudioMixer::UNITY_GAIN, AudioMixer::UNITY_GAIN); mRsmpOutBuffer = new int32_t[mFrameCount * 2]; - // optmization: if mono to mono, alter input frame count as if we were inputing stereo samples + // optmization: if mono to mono, alter input frame count as if we were inputing + // stereo samples if (mChannelCount == 1 && mReqChannelCount == 1) { mFrameCount >>= 1; } @@ -7010,7 +7056,8 @@ audio_io_handle_t AudioFlinger::openOutput(audio_module_handle_t module, &outStream); mHardwareStatus = AUDIO_HW_IDLE; - ALOGV("openOutput() openOutputStream returned output %p, SamplingRate %d, Format %d, Channels %x, status %d", + ALOGV("openOutput() openOutputStream returned output %p, SamplingRate %d, Format %d, " + "Channels %x, status %d", outStream, config.sample_rate, config.format, @@ -7063,7 +7110,8 @@ audio_io_handle_t AudioFlinger::openDuplicateOutput(audio_io_handle_t output1, MixerThread *thread2 = checkMixerThread_l(output2); if (thread1 == NULL || thread2 == NULL) { - ALOGW("openDuplicateOutput() wrong output mixer type for output %d or %d", output1, output2); + ALOGW("openDuplicateOutput() wrong output mixer type for output %d or %d", output1, + output2); return 0; } @@ -7098,7 +7146,8 @@ status_t AudioFlinger::closeOutput_nonvirtual(audio_io_handle_t output) if (thread->type() == ThreadBase::MIXER) { for (size_t i = 0; i < mPlaybackThreads.size(); i++) { if (mPlaybackThreads.valueAt(i)->type() == ThreadBase::DUPLICATING) { - DuplicatingThread *dupThread = (DuplicatingThread *)mPlaybackThreads.valueAt(i).get(); + DuplicatingThread *dupThread = + (DuplicatingThread *)mPlaybackThreads.valueAt(i).get(); dupThread->removeOutputTrack((MixerThread *)thread.get()); } } @@ -7185,16 +7234,17 @@ audio_io_handle_t AudioFlinger::openInput(audio_module_handle_t module, status = inHwHal->open_input_stream(inHwHal, id, *pDevices, &config, &inStream); - ALOGV("openInput() openInputStream returned input %p, SamplingRate %d, Format %d, Channels %x, status %d", + ALOGV("openInput() openInputStream returned input %p, SamplingRate %d, Format %d, Channels %x, " + "status %d", inStream, config.sample_rate, config.format, config.channel_mask, status); - // If the input could not be opened with the requested parameters and we can handle the conversion internally, - // try to open again with the proposed parameters. The AudioFlinger can resample the input and do mono to stereo - // or stereo to mono conversions on 16 bit PCM inputs. + // If the input could not be opened with the requested parameters and we can handle the + // conversion internally, try to open again with the proposed parameters. The AudioFlinger can + // resample the input and do mono to stereo or stereo to mono conversions on 16 bit PCM inputs. if (status == BAD_VALUE && reqFormat == config.format && config.format == AUDIO_FORMAT_PCM_16_BIT && (config.sample_rate <= 2 * reqSamplingRate) && @@ -8072,7 +8122,8 @@ status_t AudioFlinger::PlaybackThread::addEffectChain_l(const sp& c for (size_t i = 0; i < mTracks.size(); ++i) { sp track = mTracks[i]; if (session == track->sessionId()) { - ALOGV("addEffectChain_l() track->setMainBuffer track %p buffer %p", track.get(), buffer); + ALOGV("addEffectChain_l() track->setMainBuffer track %p buffer %p", track.get(), + buffer); track->setMainBuffer(buffer); chain->incTrackCnt(); } @@ -8914,12 +8965,15 @@ void AudioFlinger::EffectModule::dump(int fd, const Vector& args) result.append("\t\tDescriptor:\n"); snprintf(buffer, SIZE, "\t\t- UUID: %08X-%04X-%04X-%04X-%02X%02X%02X%02X%02X%02X\n", mDescriptor.uuid.timeLow, mDescriptor.uuid.timeMid, mDescriptor.uuid.timeHiAndVersion, - mDescriptor.uuid.clockSeq, mDescriptor.uuid.node[0], mDescriptor.uuid.node[1],mDescriptor.uuid.node[2], + mDescriptor.uuid.clockSeq, mDescriptor.uuid.node[0], mDescriptor.uuid.node[1], + mDescriptor.uuid.node[2], mDescriptor.uuid.node[3],mDescriptor.uuid.node[4],mDescriptor.uuid.node[5]); result.append(buffer); snprintf(buffer, SIZE, "\t\t- TYPE: %08X-%04X-%04X-%04X-%02X%02X%02X%02X%02X%02X\n", - mDescriptor.type.timeLow, mDescriptor.type.timeMid, mDescriptor.type.timeHiAndVersion, - mDescriptor.type.clockSeq, mDescriptor.type.node[0], mDescriptor.type.node[1],mDescriptor.type.node[2], + mDescriptor.type.timeLow, mDescriptor.type.timeMid, + mDescriptor.type.timeHiAndVersion, + mDescriptor.type.clockSeq, mDescriptor.type.node[0], mDescriptor.type.node[1], + mDescriptor.type.node[2], mDescriptor.type.node[3],mDescriptor.type.node[4],mDescriptor.type.node[5]); result.append(buffer); snprintf(buffer, SIZE, "\t\t- apiVersion: %08X\n\t\t- flags: %08X\n", @@ -9003,7 +9057,8 @@ AudioFlinger::EffectHandle::EffectHandle(const sp& effect, mBuffer = (uint8_t *)mCblk + bufOffset; } } else { - ALOGE("not enough memory for Effect size=%u", EFFECT_PARAM_BUFFER_SIZE + sizeof(effect_param_cblk_t)); + ALOGE("not enough memory for Effect size=%u", EFFECT_PARAM_BUFFER_SIZE + + sizeof(effect_param_cblk_t)); return; } } @@ -9130,8 +9185,9 @@ status_t AudioFlinger::EffectHandle::command(uint32_t cmdCode, // handle commands that are not forwarded transparently to effect engine if (cmdCode == EFFECT_CMD_SET_PARAM_COMMIT) { - // No need to trylock() here as this function is executed in the binder thread serving a particular client process: - // no risk to block the whole media server process or mixer threads is we are stuck here + // No need to trylock() here as this function is executed in the binder thread serving a + // particular client process: no risk to block the whole media server process or mixer + // threads if we are stuck here Mutex::Autolock _l(mCblk->lock); if (mCblk->clientIndex > EFFECT_PARAM_BUFFER_SIZE || mCblk->serverIndex > EFFECT_PARAM_BUFFER_SIZE) { @@ -9271,7 +9327,8 @@ AudioFlinger::EffectChain::~EffectChain() } // getEffectFromDesc_l() must be called with ThreadBase::mLock held -sp AudioFlinger::EffectChain::getEffectFromDesc_l(effect_descriptor_t *descriptor) +sp AudioFlinger::EffectChain::getEffectFromDesc_l( + effect_descriptor_t *descriptor) { size_t size = mEffects.size(); @@ -9430,7 +9487,8 @@ status_t AudioFlinger::EffectChain::addEffect_l(const sp& effect) // check invalid effect chaining combinations if (insertPref == EFFECT_FLAG_INSERT_EXCLUSIVE || iPref == EFFECT_FLAG_INSERT_EXCLUSIVE) { - ALOGW("addEffect_l() could not insert effect %s: exclusive conflict with %s", desc.name, d.name); + ALOGW("addEffect_l() could not insert effect %s: exclusive conflict with %s", + desc.name, d.name); return INVALID_OPERATION; } // remember position of first insert effect and by default @@ -9481,7 +9539,8 @@ status_t AudioFlinger::EffectChain::addEffect_l(const sp& effect) } mEffects.insertAt(effect, idx_insert); - ALOGV("addEffect_l() effect %p, added in chain %p at rank %d", effect.get(), this, idx_insert); + ALOGV("addEffect_l() effect %p, added in chain %p at rank %d", effect.get(), this, + idx_insert); } effect->configure(); return NO_ERROR; @@ -9512,7 +9571,8 @@ size_t AudioFlinger::EffectChain::removeEffect_l(const sp& effect) } } mEffects.removeAt(i); - ALOGV("removeEffect_l() effect %p, removed from chain %p at rank %d", effect.get(), this, i); + ALOGV("removeEffect_l() effect %p, removed from chain %p at rank %d", effect.get(), + this, i); break; } } @@ -9736,7 +9796,8 @@ void AudioFlinger::EffectChain::setEffectSuspendedAll_l(bool suspend) for (size_t i = 0; i < types.size(); i++) { setEffectSuspended_l(types[i], false); } - ALOGV("setEffectSuspendedAll_l() remove entry for %08x", mSuspendedEffects.keyAt(index)); + ALOGV("setEffectSuspendedAll_l() remove entry for %08x", + mSuspendedEffects.keyAt(index)); mSuspendedEffects.removeItem((int)kKeyForSuspendAll); } } @@ -9762,7 +9823,8 @@ bool AudioFlinger::EffectChain::isEffectEligibleForSuspend(const effect_descript return true; } -void AudioFlinger::EffectChain::getSuspendEligibleEffects(Vector< sp > &effects) +void AudioFlinger::EffectChain::getSuspendEligibleEffects( + Vector< sp > &effects) { effects.clear(); for (size_t i = 0; i < mEffects.size(); i++) { diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h index 116820f..2251b45 100644 --- a/services/audioflinger/AudioFlinger.h +++ b/services/audioflinger/AudioFlinger.h @@ -269,12 +269,14 @@ private: virtual ~AudioFlinger(); // call in any IAudioFlinger method that accesses mPrimaryHardwareDev - status_t initCheck() const { return mPrimaryHardwareDev == NULL ? NO_INIT : NO_ERROR; } + status_t initCheck() const { return mPrimaryHardwareDev == NULL ? + NO_INIT : NO_ERROR; } // RefBase virtual void onFirstRef(); - AudioHwDevice* findSuitableHwDev_l(audio_module_handle_t module, audio_devices_t devices); + AudioHwDevice* findSuitableHwDev_l(audio_module_handle_t module, + audio_devices_t devices); void purgeStaleEffects_l(); // standby delay for MIXER and DUPLICATING playback threads is read from property @@ -746,7 +748,8 @@ private: const sp mDeathRecipient; // list of suspended effects per session and per type. The first vector is // keyed by session ID, the second by type UUID timeLow field - KeyedVector< int, KeyedVector< int, sp > > mSuspendedSessions; + KeyedVector< int, KeyedVector< int, sp > > + mSuspendedSessions; }; struct stream_type_t { @@ -788,7 +791,8 @@ private: static void appendDumpHeader(String8& result); void dump(char* buffer, size_t size); - virtual status_t start(AudioSystem::sync_event_t event = AudioSystem::SYNC_EVENT_NONE, + virtual status_t start(AudioSystem::sync_event_t event = + AudioSystem::SYNC_EVENT_NONE, int triggerSession = 0); virtual void stop(); void pause(); @@ -823,7 +827,8 @@ private: Track& operator = (const Track&); // AudioBufferProvider interface - virtual status_t getNextBuffer(AudioBufferProvider::Buffer* buffer, int64_t pts = kInvalidPTS); + virtual status_t getNextBuffer(AudioBufferProvider::Buffer* buffer, + int64_t pts = kInvalidPTS); // releaseBuffer() not overridden virtual size_t framesReady() const; @@ -877,8 +882,8 @@ private: int32_t *mAuxBuffer; int mAuxEffectId; bool mHasVolumeController; - size_t mPresentationCompleteFrames; // number of frames written to the audio HAL - // when this track will be fully rendered + size_t mPresentationCompleteFrames; // number of frames written to the + // audio HAL when this track will be fully rendered private: IAudioFlinger::track_flags_t mFlags; @@ -1000,7 +1005,8 @@ private: int frameCount); virtual ~OutputTrack(); - virtual status_t start(AudioSystem::sync_event_t event = AudioSystem::SYNC_EVENT_NONE, + virtual status_t start(AudioSystem::sync_event_t event = + AudioSystem::SYNC_EVENT_NONE, int triggerSession = 0); virtual void stop(); bool write(int16_t* data, uint32_t frames); @@ -1014,7 +1020,8 @@ private: NO_MORE_BUFFERS = 0x80000001, // same in AudioTrack.h, ok to be different value }; - status_t obtainBuffer(AudioBufferProvider::Buffer* buffer, uint32_t waitTimeMs); + status_t obtainBuffer(AudioBufferProvider::Buffer* buffer, + uint32_t waitTimeMs); void clearBufferQueue(); // Maximum number of pending buffers allocated by OutputTrack::write() @@ -1186,7 +1193,8 @@ public: void dumpTracks(int fd, const Vector& args); SortedVector< sp > mTracks; - // mStreamTypes[] uses 1 additional stream type internally for the OutputTrack used by DuplicatingThread + // mStreamTypes[] uses 1 additional stream type internally for the OutputTrack used by + // DuplicatingThread stream_type_t mStreamTypes[AUDIO_STREAM_CNT + 1]; AudioStreamOut *mOutput; @@ -1454,7 +1462,8 @@ public: // clear the buffer overflow flag void clearOverflow() { mOverflow = false; } // set the buffer overflow flag and return previous value - bool setOverflow() { bool tmp = mOverflow; mOverflow = true; return tmp; } + bool setOverflow() { bool tmp = mOverflow; mOverflow = true; + return tmp; } static void appendDumpHeader(String8& result); void dump(char* buffer, size_t size); @@ -1466,7 +1475,8 @@ public: RecordTrack& operator = (const RecordTrack&); // AudioBufferProvider interface - virtual status_t getNextBuffer(AudioBufferProvider::Buffer* buffer, int64_t pts = kInvalidPTS); + virtual status_t getNextBuffer(AudioBufferProvider::Buffer* buffer, + int64_t pts = kInvalidPTS); // releaseBuffer() not overridden bool mOverflow; // overflow on most recent attempt to fill client buffer @@ -1786,7 +1796,8 @@ mutable Mutex mLock; // mutex for process, commands and handl sp mEffectClient; // callback interface for client notifications /*const*/ sp mClient; // client for shared memory allocation, see disconnect() sp mCblkMemory; // shared memory for control block - effect_param_cblk_t* mCblk; // control block for deferred parameter setting via shared memory + effect_param_cblk_t* mCblk; // control block for deferred parameter setting via + // shared memory uint8_t* mBuffer; // pointer to parameter area in shared memory int mPriority; // client application priority to control the effect bool mHasControl; // true if this handle is controlling the effect @@ -1799,10 +1810,10 @@ mutable Mutex mLock; // mutex for process, commands and handl // the EffectChain class represents a group of effects associated to one audio session. // There can be any number of EffectChain objects per output mixer thread (PlaybackThread). // The EffecChain with session ID 0 contains global effects applied to the output mix. - // Effects in this chain can be insert or auxiliary. Effects in other chains (attached to tracks) - // are insert only. The EffectChain maintains an ordered list of effect module, the order corresponding - // in the effect process order. When attached to a track (session ID != 0), it also provide it's own - // input buffer used by the track as accumulation buffer. + // Effects in this chain can be insert or auxiliary. Effects in other chains (attached to + // tracks) are insert only. The EffectChain maintains an ordered list of effect module, the + // order corresponding in the effect process order. When attached to a track (session ID != 0), + // it also provide it's own input buffer used by the track as accumulation buffer. class EffectChain : public RefBase { public: EffectChain(const wp& wThread, int sessionId); diff --git a/services/audioflinger/AudioMixer.cpp b/services/audioflinger/AudioMixer.cpp index a4ed445..b3ca877 100644 --- a/services/audioflinger/AudioMixer.cpp +++ b/services/audioflinger/AudioMixer.cpp @@ -765,7 +765,8 @@ void AudioMixer::process__validate(state_t* state, int64_t pts) } -void AudioMixer::track__genericResample(track_t* t, int32_t* out, size_t outFrameCount, int32_t* temp, int32_t* aux) +void AudioMixer::track__genericResample(track_t* t, int32_t* out, size_t outFrameCount, + int32_t* temp, int32_t* aux) { t->resampler->setSampleRate(t->sampleRate); @@ -798,11 +799,13 @@ void AudioMixer::track__genericResample(track_t* t, int32_t* out, size_t outFram } } -void AudioMixer::track__nop(track_t* t, int32_t* out, size_t outFrameCount, int32_t* temp, int32_t* aux) +void AudioMixer::track__nop(track_t* t, int32_t* out, size_t outFrameCount, int32_t* temp, + int32_t* aux) { } -void AudioMixer::volumeRampStereo(track_t* t, int32_t* out, size_t frameCount, int32_t* temp, int32_t* aux) +void AudioMixer::volumeRampStereo(track_t* t, int32_t* out, size_t frameCount, int32_t* temp, + int32_t* aux) { int32_t vl = t->prevVolume[0]; int32_t vr = t->prevVolume[1]; @@ -844,7 +847,8 @@ void AudioMixer::volumeRampStereo(track_t* t, int32_t* out, size_t frameCount, i t->adjustVolumeRamp(aux != NULL); } -void AudioMixer::volumeStereo(track_t* t, int32_t* out, size_t frameCount, int32_t* temp, int32_t* aux) +void AudioMixer::volumeStereo(track_t* t, int32_t* out, size_t frameCount, int32_t* temp, + int32_t* aux) { const int16_t vl = t->volume[0]; const int16_t vr = t->volume[1]; @@ -872,7 +876,8 @@ void AudioMixer::volumeStereo(track_t* t, int32_t* out, size_t frameCount, int32 } } -void AudioMixer::track__16BitsStereo(track_t* t, int32_t* out, size_t frameCount, int32_t* temp, int32_t* aux) +void AudioMixer::track__16BitsStereo(track_t* t, int32_t* out, size_t frameCount, int32_t* temp, + int32_t* aux) { const int16_t *in = static_cast(t->in); @@ -962,7 +967,8 @@ void AudioMixer::track__16BitsStereo(track_t* t, int32_t* out, size_t frameCount t->in = in; } -void AudioMixer::track__16BitsMono(track_t* t, int32_t* out, size_t frameCount, int32_t* temp, int32_t* aux) +void AudioMixer::track__16BitsMono(track_t* t, int32_t* out, size_t frameCount, int32_t* temp, + int32_t* aux) { const int16_t *in = static_cast(t->in); @@ -1147,7 +1153,8 @@ void AudioMixer::process__genericNoResampling(state_t* state, int64_t pts) while (outFrames) { size_t inFrames = (t.frameCount > outFrames)?outFrames:t.frameCount; if (inFrames) { - t.hook(&t, outTemp + (BLOCKSIZE-outFrames)*MAX_NUM_CHANNELS, inFrames, state->resampleTemp, aux); + t.hook(&t, outTemp + (BLOCKSIZE-outFrames)*MAX_NUM_CHANNELS, inFrames, + state->resampleTemp, aux); t.frameCount -= inFrames; outFrames -= inFrames; if (CC_UNLIKELY(aux != NULL)) { @@ -1156,7 +1163,8 @@ void AudioMixer::process__genericNoResampling(state_t* state, int64_t pts) } if (t.frameCount == 0 && outFrames) { t.bufferProvider->releaseBuffer(&t.buffer); - t.buffer.frameCount = (state->frameCount - numFrames) - (BLOCKSIZE - outFrames); + t.buffer.frameCount = (state->frameCount - numFrames) - + (BLOCKSIZE - outFrames); int64_t outputPTS = calculateOutputPTS( t, pts, numFrames + (BLOCKSIZE - outFrames)); t.bufferProvider->getNextBuffer(&t.buffer, outputPTS); @@ -1246,7 +1254,8 @@ void AudioMixer::process__genericResampling(state_t* state, int64_t pts) if (CC_UNLIKELY(aux != NULL)) { aux += outFrames; } - t.hook(&t, outTemp + outFrames*MAX_NUM_CHANNELS, t.buffer.frameCount, state->resampleTemp, aux); + t.hook(&t, outTemp + outFrames*MAX_NUM_CHANNELS, t.buffer.frameCount, + state->resampleTemp, aux); outFrames += t.buffer.frameCount; t.bufferProvider->releaseBuffer(&t.buffer); } @@ -1286,7 +1295,8 @@ void AudioMixer::process__OneTrack16BitsStereoNoResampling(state_t* state, // been enabled for mixing. if (in == NULL || ((unsigned long)in & 3)) { memset(out, 0, numFrames*MAX_NUM_CHANNELS*sizeof(int16_t)); - ALOGE_IF(((unsigned long)in & 3), "process stereo track: input buffer alignment pb: buffer %p track %d, channels %d, needs %08x", + ALOGE_IF(((unsigned long)in & 3), "process stereo track: input buffer alignment pb: " + "buffer %p track %d, channels %d, needs %08x", in, i, t.channelCount, t.needs); return; } diff --git a/services/audioflinger/AudioMixer.h b/services/audioflinger/AudioMixer.h index e60a298..fd21fda 100644 --- a/services/audioflinger/AudioMixer.h +++ b/services/audioflinger/AudioMixer.h @@ -146,7 +146,8 @@ private: struct track_t; class DownmixerBufferProvider; - typedef void (*hook_t)(track_t* t, int32_t* output, size_t numOutFrames, int32_t* temp, int32_t* aux); + typedef void (*hook_t)(track_t* t, int32_t* output, size_t numOutFrames, int32_t* temp, + int32_t* aux); static const int BLOCKSIZE = 16; // 4 cache lines struct track_t { @@ -261,12 +262,17 @@ private: static status_t prepareTrackForDownmix(track_t* pTrack, int trackNum); static void unprepareTrackForDownmix(track_t* pTrack, int trackName); - static void track__genericResample(track_t* t, int32_t* out, size_t numFrames, int32_t* temp, int32_t* aux); + static void track__genericResample(track_t* t, int32_t* out, size_t numFrames, int32_t* temp, + int32_t* aux); static void track__nop(track_t* t, int32_t* out, size_t numFrames, int32_t* temp, int32_t* aux); - static void track__16BitsStereo(track_t* t, int32_t* out, size_t numFrames, int32_t* temp, int32_t* aux); - static void track__16BitsMono(track_t* t, int32_t* out, size_t numFrames, int32_t* temp, int32_t* aux); - static void volumeRampStereo(track_t* t, int32_t* out, size_t frameCount, int32_t* temp, int32_t* aux); - static void volumeStereo(track_t* t, int32_t* out, size_t frameCount, int32_t* temp, int32_t* aux); + static void track__16BitsStereo(track_t* t, int32_t* out, size_t numFrames, int32_t* temp, + int32_t* aux); + static void track__16BitsMono(track_t* t, int32_t* out, size_t numFrames, int32_t* temp, + int32_t* aux); + static void volumeRampStereo(track_t* t, int32_t* out, size_t frameCount, int32_t* temp, + int32_t* aux); + static void volumeStereo(track_t* t, int32_t* out, size_t frameCount, int32_t* temp, + int32_t* aux); static void process__validate(state_t* state, int64_t pts); static void process__nop(state_t* state, int64_t pts); diff --git a/services/audioflinger/AudioPolicyService.cpp b/services/audioflinger/AudioPolicyService.cpp index 8b99bd2..ea130ba 100644 --- a/services/audioflinger/AudioPolicyService.cpp +++ b/services/audioflinger/AudioPolicyService.cpp @@ -227,7 +227,8 @@ audio_io_handle_t AudioPolicyService::getOutput(audio_stream_type_t stream, } ALOGV("getOutput() tid %d", gettid()); Mutex::Autolock _l(mLock); - return mpAudioPolicy->get_output(mpAudioPolicy, stream, samplingRate, format, channelMask, flags); + return mpAudioPolicy->get_output(mpAudioPolicy, stream, samplingRate, format, channelMask, + flags); } status_t AudioPolicyService::startOutput(audio_io_handle_t output, @@ -280,7 +281,7 @@ audio_io_handle_t AudioPolicyService::getInput(audio_source_t inputSource, Mutex::Autolock _l(mLock); // the audio_in_acoustics_t parameter is ignored by get_input() audio_io_handle_t input = mpAudioPolicy->get_input(mpAudioPolicy, inputSource, samplingRate, - format, channelMask, (audio_in_acoustics_t) 0); + format, channelMask, (audio_in_acoustics_t) 0); if (input == 0) { return input; diff --git a/services/audioflinger/AudioPolicyService.h b/services/audioflinger/AudioPolicyService.h index 63f9549..92653c1 100644 --- a/services/audioflinger/AudioPolicyService.h +++ b/services/audioflinger/AudioPolicyService.h @@ -142,11 +142,11 @@ private: status_t dumpInternals(int fd); // Thread used for tone playback and to send audio config commands to audio flinger - // For tone playback, using a separate thread is necessary to avoid deadlock with mLock because startTone() - // and stopTone() are normally called with mLock locked and requesting a tone start or stop will cause - // calls to AudioPolicyService and an attempt to lock mLock. - // For audio config commands, it is necessary because audio flinger requires that the calling process (user) - // has permission to modify audio settings. + // For tone playback, using a separate thread is necessary to avoid deadlock with mLock because + // startTone() and stopTone() are normally called with mLock locked and requesting a tone start + // or stop will cause calls to AudioPolicyService and an attempt to lock mLock. + // For audio config commands, it is necessary because audio flinger requires that the calling + // process (user) has permission to modify audio settings. class AudioCommandThread : public Thread { class AudioCommand; public: diff --git a/services/audioflinger/test-resample.cpp b/services/audioflinger/test-resample.cpp index a8e23e4..151313b 100644 --- a/services/audioflinger/test-resample.cpp +++ b/services/audioflinger/test-resample.cpp @@ -61,7 +61,8 @@ struct HeaderWav { }; static int usage(const char* name) { - fprintf(stderr,"Usage: %s [-p] [-h] [-q ] [-i ] [-o ] \n", name); + fprintf(stderr,"Usage: %s [-p] [-h] [-q ] [-i ] " + "[-o ] \n", name); fprintf(stderr,"-p - enable profiling\n"); fprintf(stderr,"-h - create wav file\n"); fprintf(stderr,"-q - resampler quality\n"); -- cgit v1.1 From 2e136686cd60556b681480079142142ab4a7e07c Mon Sep 17 00:00:00 2001 From: James Dong Date: Thu, 1 Nov 2012 18:54:43 -0700 Subject: Remove unused default parameter for copyBuffer() Change-Id: I8002986ccb926165af393ca80cece60dee011204 --- media/libstagefright/include/FragmentedMP4Parser.h | 2 +- media/libstagefright/mp4/FragmentedMP4Parser.cpp | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/media/libstagefright/include/FragmentedMP4Parser.h b/media/libstagefright/include/FragmentedMP4Parser.h index 0edafb9..dbe02b8 100644 --- a/media/libstagefright/include/FragmentedMP4Parser.h +++ b/media/libstagefright/include/FragmentedMP4Parser.h @@ -263,7 +263,7 @@ private: void copyBuffer( sp *dst, - size_t offset, uint64_t size, size_t extra = 0) const; + size_t offset, uint64_t size) const; DISALLOW_EVIL_CONSTRUCTORS(FragmentedMP4Parser); }; diff --git a/media/libstagefright/mp4/FragmentedMP4Parser.cpp b/media/libstagefright/mp4/FragmentedMP4Parser.cpp index 7fe4e63..54c3d63 100644 --- a/media/libstagefright/mp4/FragmentedMP4Parser.cpp +++ b/media/libstagefright/mp4/FragmentedMP4Parser.cpp @@ -1971,8 +1971,8 @@ status_t FragmentedMP4Parser::parseTrackFragmentRun( } void FragmentedMP4Parser::copyBuffer( - sp *dst, size_t offset, uint64_t size, size_t extra) const { - sp buf = new ABuffer(size + extra); + sp *dst, size_t offset, uint64_t size) const { + sp buf = new ABuffer(size); memcpy(buf->data(), mBuffer->data() + offset, size); *dst = buf; -- cgit v1.1 From 4ed475d3ad4231370371e14a94779c5d300eb3c5 Mon Sep 17 00:00:00 2001 From: Mathias Agopian Date: Thu, 1 Nov 2012 21:03:46 -0700 Subject: new coefficients for the vhq resampler previous coefficients were provided by a 3rd party and didn't have a way to re-generate them. we're now using the 'fir' utility. the performance of the filter is virtually identical, except for the down-sampling case which seems slightly better now: It looks like both the previous and new coefficients are generating some sort of clipping for full-scale signals in the down-sampling case (although the new ones seem better), the reason for that is unknown (see bug: 7453062) Also updated the HQ coefficients for the down-samplers, previous ones were a little bit too conservative -- the new ones push the cut-off frequency up by about 1 KHz. Change-Id: I54a827b5c707c7cc41268ed01283758dce1d7647 --- services/audioflinger/AudioResamplerSinc.cpp | 18 +- .../audio-resampler/AudioResamplerCoefficients.cpp | 29 +- .../dnsampler_filter_coefficients_x128_10112011.h | 2585 -------------------- .../audio-resampler/filter_coefficients.h | 61 + .../resampler_filter_coefficients_10042011.h | 2071 ---------------- 5 files changed, 84 insertions(+), 4680 deletions(-) delete mode 100644 services/audioflinger/audio-resampler/dnsampler_filter_coefficients_x128_10112011.h create mode 100644 services/audioflinger/audio-resampler/filter_coefficients.h delete mode 100644 services/audioflinger/audio-resampler/resampler_filter_coefficients_10042011.h diff --git a/services/audioflinger/AudioResamplerSinc.cpp b/services/audioflinger/AudioResamplerSinc.cpp index e0ea4a4..5f25760 100644 --- a/services/audioflinger/AudioResamplerSinc.cpp +++ b/services/audioflinger/AudioResamplerSinc.cpp @@ -47,17 +47,17 @@ const int32_t AudioResamplerSinc::mFirCoefsUp[] = { /* * These coefficients are optimized for 48KHz -> 44.1KHz - * cmd-line: fir -l 7 -s 48000 -c 16600 + * cmd-line: fir -l 7 -s 48000 -c 17189 */ const int32_t AudioResamplerSinc::mFirCoefsDown[] = { - 0x58888889, 0x58875d88, 0x5883dc96, 0x587e05e0, 0x5875d9b3, 0x586b587d, 0x585e82c6, 0x584f593a, 0x583ddc9f, 0x582a0dde, 0x5813edfb, 0x57fb7e1a, 0x57e0bf7f, 0x57c3b389, 0x57a45bb8, 0x5782b9aa, 0x575ecf1a, 0x57389de0, 0x571027f6, 0x56e56f6f, 0x56b8767e, 0x56893f73, 0x5657ccbb, 0x562420e2, 0x55ee3e8d, 0x55b62882, 0x557be1a0, 0x553f6ce6, 0x5500cd6d, 0x54c0066a, 0x547d1b2e, 0x54380f26, 0x53f0e5da, 0x53a7a2ed, 0x535c4a1e, 0x530edf46, 0x52bf6657, 0x526de360, 0x521a5a86, 0x51c4d00c, 0x516d484a, 0x5113c7b6, 0x50b852d9, 0x505aee59, 0x4ffb9ef2, 0x4f9a6979, 0x4f3752d9, 0x4ed26016, 0x4e6b9649, 0x4e02faa3, 0x4d98926b, 0x4d2c62fd, 0x4cbe71cc, 0x4c4ec45e, 0x4bdd6050, 0x4b6a4b53, 0x4af58b2b, 0x4a7f25b0, 0x4a0720cd, 0x498d8283, 0x491250e1, 0x4895920c, 0x48174c37, 0x479785ab, 0x471644bd, 0x46938fd7, 0x460f6d70, 0x4589e411, 0x4502fa51, 0x447ab6d5, 0x43f12053, 0x43663d8d, 0x42da1554, 0x424cae85, 0x41be100a, 0x412e40db, 0x409d47f9, 0x400b2c72, 0x3f77f561, 0x3ee3a9e7, 0x3e4e5132, 0x3db7f27a, 0x3d2094ff, 0x3c88400b, 0x3beefaee, 0x3b54cd01, 0x3ab9bda6, 0x3a1dd444, 0x39811848, 0x38e39127, 0x3845465a, 0x37a63f5f, 0x370683ba, 0x36661af1, 0x35c50c90, 0x35236024, 0x34811d3f, 0x33de4b72, 0x333af253, 0x32971979, 0x31f2c87a, 0x314e06ed, 0x30a8dc6a, 0x30035089, 0x2f5d6ade, 0x2eb732fe, 0x2e10b07d, 0x2d69eaeb, 0x2cc2e9d4, 0x2c1bb4c4, 0x2b745340, 0x2acccccc, 0x2a2528e6, 0x297d6f06, 0x28d5a6a0, 0x282dd722, 0x278607f2, 0x26de4072, 0x263687fa, 0x258ee5dd, 0x24e76163, 0x244001cf, 0x2398ce58, 0x22f1ce2e, 0x224b0876, 0x21a4844b, 0x20fe48be, 0x20585cd5, - 0x1fb2c78a, 0x1f0d8fcb, 0x1e68bc7d, 0x1dc45475, 0x1d205e7d, 0x1c7ce150, 0x1bd9e39e, 0x1b376c06, 0x1a95811c, 0x19f42964, 0x19536b51, 0x18b34d4a, 0x1813d5a3, 0x17750aa3, 0x16d6f27f, 0x1639935b, 0x159cf34b, 0x15011851, 0x1466085d, 0x13cbc94f, 0x133260f3, 0x1299d502, 0x12022b24, 0x116b68ed, 0x10d593dd, 0x1040b162, 0x0facc6d4, 0x0f19d979, 0x0e87ee81, 0x0df70b09, 0x0d673417, 0x0cd86e9d, 0x0c4abf78, 0x0bbe2b70, 0x0b32b735, 0x0aa86763, 0x0a1f407f, 0x099746f9, 0x09107f29, 0x088aed4f, 0x08069598, 0x07837c17, 0x0701a4c8, 0x06811392, 0x0601cc40, 0x0583d28b, 0x05072a0f, 0x048bd653, 0x0411dac7, 0x03993abf, 0x0321f97b, 0x02ac1a20, 0x02379fbb, 0x01c48d42, 0x0152e590, 0x00e2ab69, 0x0073e179, 0x00068a52, 0xff9aa86c, 0xff303e29, 0xfec74dd1, 0xfe5fd993, 0xfdf9e383, 0xfd956da0, 0xfd3279cd, 0xfcd109d6, 0xfc711f6d, 0xfc12bc2a, 0xfbb5e18f, 0xfb5a9103, 0xfb00cbd4, 0xfaa89339, 0xfa51e84e, 0xf9fccc18, 0xf9a93f82, 0xf9574361, 0xf906d86d, 0xf8b7ff4b, 0xf86ab883, 0xf81f0487, 0xf7d4e3b0, 0xf78c5641, 0xf7455c62, 0xf6fff625, 0xf6bc2385, 0xf679e463, 0xf639388a, 0xf5fa1fae, 0xf5bc996b, 0xf580a547, 0xf54642b1, 0xf50d70ff, 0xf4d62f74, 0xf4a07d3b, 0xf46c5967, 0xf439c2f9, 0xf408b8d8, 0xf3d939d9, 0xf3ab44b9, 0xf37ed821, 0xf353f2a5, 0xf32a92c3, 0xf302b6e6, 0xf2dc5d64, 0xf2b7847f, 0xf2942a64, 0xf2724d2e, 0xf251eae4, 0xf2330179, 0xf2158ece, 0xf1f990b1, 0xf1df04de, 0xf1c5e8ff, 0xf1ae3aaa, 0xf197f765, 0xf1831ca6, 0xf16fa7d0, 0xf15d9634, 0xf14ce516, 0xf13d91a7, 0xf12f9909, 0xf122f84e, 0xf117ac79, 0xf10db27d, 0xf1050741, 0xf0fda799, 0xf0f7904e, 0xf0f2be1a, - 0xf0ef2dab, 0xf0ecdba0, 0xf0ebc48a, 0xf0ebe4f1, 0xf0ed394e, 0xf0efbe0d, 0xf0f36f92, 0xf0f84a32, 0xf0fe4a39, 0xf1056be8, 0xf10dab74, 0xf117050a, 0xf12174cd, 0xf12cf6d5, 0xf1398732, 0xf14721ec, 0xf155c300, 0xf1656666, 0xf176080d, 0xf187a3db, 0xf19a35b1, 0xf1adb969, 0xf1c22ad4, 0xf1d785c1, 0xf1edc5f5, 0xf204e733, 0xf21ce537, 0xf235bbb8, 0xf24f6669, 0xf269e0fa, 0xf2852715, 0xf2a13462, 0xf2be0485, 0xf2db9321, 0xf2f9dbd3, 0xf318da38, 0xf33889ec, 0xf358e688, 0xf379eba4, 0xf39b94d7, 0xf3bdddb7, 0xf3e0c1db, 0xf4043cd8, 0xf4284a45, 0xf44ce5ba, 0xf4720ace, 0xf497b51a, 0xf4bde03a, 0xf4e487c9, 0xf50ba766, 0xf5333ab3, 0xf55b3d52, 0xf583aaec, 0xf5ac7f29, 0xf5d5b5b7, 0xf5ff4a47, 0xf6293890, 0xf6537c4a, 0xf67e1134, 0xf6a8f311, 0xf6d41dab, 0xf6ff8cce, 0xf72b3c4f, 0xf7572808, 0xf7834bd7, 0xf7afa3a3, 0xf7dc2b58, 0xf808deec, 0xf835ba59, 0xf862b9a0, 0xf88fd8cc, 0xf8bd13f0, 0xf8ea6724, 0xf917ce8a, 0xf945464f, 0xf972caa4, 0xf9a057c6, 0xf9cde9fb, 0xf9fb7d90, 0xfa290edf, 0xfa569a49, 0xfa841c3a, 0xfab19127, 0xfadef591, 0xfb0c4601, 0xfb397f0d, 0xfb669d55, 0xfb939d83, 0xfbc07c4c, 0xfbed3671, 0xfc19c8bf, 0xfc46300d, 0xfc72693e, 0xfc9e7141, 0xfcca4511, 0xfcf5e1b4, 0xfd21443e, 0xfd4c69cd, 0xfd774f8e, 0xfda1f2b7, 0xfdcc508d, 0xfdf66662, 0xfe203193, 0xfe49af8a, 0xfe72ddbf, 0xfe9bb9b7, 0xfec44103, 0xfeec7141, 0xff14481d, 0xff3bc351, 0xff62e0a2, 0xff899de5, 0xffaff8f9, 0xffd5efce, 0xfffb8060, 0x0020a8b7, 0x004566eb, 0x0069b920, 0x008d9d89, 0x00b11264, 0x00d415ff, 0x00f6a6b5, 0x0118c2ef, 0x013a6922, 0x015b97d1, 0x017c4d8f, 0x019c88f9, 0x01bc48bd, - 0x01db8b94, 0x01fa5045, 0x021895a6, 0x02365a98, 0x02539e0b, 0x02705efd, 0x028c9c77, 0x02a85592, 0x02c38972, 0x02de3749, 0x02f85e57, 0x0311fde7, 0x032b1552, 0x0343a3ff, 0x035ba961, 0x037324f6, 0x038a164c, 0x03a07cfa, 0x03b658a7, 0x03cba904, 0x03e06dcf, 0x03f4a6d1, 0x040853e2, 0x041b74e4, 0x042e09c4, 0x0440127d, 0x04518f14, 0x04627f9b, 0x0472e42e, 0x0482bcf5, 0x04920a24, 0x04a0cbf7, 0x04af02ba, 0x04bcaebe, 0x04c9d064, 0x04d66814, 0x04e27642, 0x04edfb6c, 0x04f8f819, 0x05036cdc, 0x050d5a51, 0x0516c11c, 0x051fa1ee, 0x0527fd7e, 0x052fd48d, 0x053727e8, 0x053df861, 0x054446d5, 0x054a1429, 0x054f614a, 0x05542f2f, 0x05587ed5, 0x055c5141, 0x055fa783, 0x056282ae, 0x0564e3e1, 0x0566cc3e, 0x05683cf1, 0x0569372c, 0x0569bc29, 0x0569cd27, 0x05696b6b, 0x05689842, 0x056754fe, 0x0565a2f9, 0x0563838f, 0x0560f824, 0x055e0222, 0x055aa2f6, 0x0556dc14, 0x0552aef5, 0x054e1d14, 0x054927f4, 0x0543d11a, 0x053e1a11, 0x05380465, 0x053191aa, 0x052ac373, 0x05239b5b, 0x051c1afe, 0x051443fa, 0x050c17f3, 0x0503988d, 0x04fac770, 0x04f1a647, 0x04e836bd, 0x04de7a82, 0x04d47346, 0x04ca22bc, 0x04bf8a97, 0x04b4ac8c, 0x04a98a54, 0x049e25a4, 0x04928037, 0x04869bc6, 0x047a7a0b, 0x046e1cc1, 0x046185a3, 0x0454b66c, 0x0447b0d7, 0x043a76a1, 0x042d0983, 0x041f6b39, 0x04119d7b, 0x0403a204, 0x03f57a8c, 0x03e728c9, 0x03d8ae73, 0x03ca0d3e, 0x03bb46dd, 0x03ac5d03, 0x039d5160, 0x038e25a2, 0x037edb76, 0x036f7486, 0x035ff27a, 0x035056f9, 0x0340a3a5, 0x0330da20, 0x0320fc08, 0x03110af8, 0x03010889, 0x02f0f64f, 0x02e0d5df, 0x02d0a8c6, 0x02c07090, 0x02b02ec6, 0x029fe4ec, - 0x028f9484, 0x027f3f0b, 0x026ee5fa, 0x025e8ac8, 0x024e2ee5, 0x023dd3c0, 0x022d7ac1, 0x021d254d, 0x020cd4c6, 0x01fc8a88, 0x01ec47ea, 0x01dc0e40, 0x01cbded8, 0x01bbbafd, 0x01aba3f2, 0x019b9afa, 0x018ba14e, 0x017bb826, 0x016be0b3, 0x015c1c20, 0x014c6b97, 0x013cd038, 0x012d4b20, 0x011ddd67, 0x010e8820, 0x00ff4c57, 0x00f02b13, 0x00e12558, 0x00d23c22, 0x00c37068, 0x00b4c31c, 0x00a6352a, 0x0097c778, 0x00897ae9, 0x007b5057, 0x006d4899, 0x005f647f, 0x0051a4d3, 0x00440a5a, 0x003695d5, 0x002947fc, 0x001c2183, 0x000f231a, 0x00024d68, 0xfff5a111, 0xffe91eb2, 0xffdcc6e4, 0xffd09a37, 0xffc49939, 0xffb8c471, 0xffad1c5f, 0xffa1a180, 0xff965449, 0xff8b352a, 0xff804490, 0xff7582e0, 0xff6af079, 0xff608db6, 0xff565aec, 0xff4c586c, 0xff42867e, 0xff38e569, 0xff2f756c, 0xff2636c2, 0xff1d29a0, 0xff144e36, 0xff0ba4ae, 0xff032d30, 0xfefae7db, 0xfef2d4cc, 0xfeeaf419, 0xfee345d5, 0xfedbca0b, 0xfed480c6, 0xfecd6a07, 0xfec685cf, 0xfebfd416, 0xfeb954d4, 0xfeb307f8, 0xfeaced6f, 0xfea70522, 0xfea14ef4, 0xfe9bcac5, 0xfe96786f, 0xfe9157cb, 0xfe8c68ab, 0xfe87aadd, 0xfe831e2e, 0xfe7ec263, 0xfe7a9741, 0xfe769c85, 0xfe72d1ed, 0xfe6f3731, 0xfe6bcc04, 0xfe689017, 0xfe658319, 0xfe62a4b3, 0xfe5ff48d, 0xfe5d7249, 0xfe5b1d89, 0xfe58f5ea, 0xfe56fb06, 0xfe552c76, 0xfe5389cc, 0xfe52129d, 0xfe50c676, 0xfe4fa4e5, 0xfe4ead73, 0xfe4ddfa8, 0xfe4d3b09, 0xfe4cbf19, 0xfe4c6b59, 0xfe4c3f47, 0xfe4c3a5e, 0xfe4c5c1b, 0xfe4ca3f4, 0xfe4d1160, 0xfe4da3d4, 0xfe4e5ac3, 0xfe4f359e, 0xfe5033d5, 0xfe5154d6, 0xfe52980d, 0xfe53fce6, 0xfe5582cb, 0xfe572926, 0xfe58ef5d, 0xfe5ad4d7, - 0xfe5cd8fa, 0xfe5efb2b, 0xfe613ace, 0xfe639746, 0xfe660ff5, 0xfe68a43c, 0xfe6b537e, 0xfe6e1d1b, 0xfe710072, 0xfe73fce5, 0xfe7711d2, 0xfe7a3e98, 0xfe7d8297, 0xfe80dd2e, 0xfe844dbc, 0xfe87d39f, 0xfe8b6e37, 0xfe8f1ce3, 0xfe92df02, 0xfe96b3f4, 0xfe9a9b19, 0xfe9e93d1, 0xfea29d7d, 0xfea6b77d, 0xfeaae135, 0xfeaf1a05, 0xfeb36152, 0xfeb7b67e, 0xfebc18ef, 0xfec0880a, 0xfec50334, 0xfec989d5, 0xfece1b54, 0xfed2b71b, 0xfed75c94, 0xfedc0b2a, 0xfee0c249, 0xfee5815e, 0xfeea47d8, 0xfeef1528, 0xfef3e8be, 0xfef8c20c, 0xfefda088, 0xff0283a5, 0xff076adc, 0xff0c55a4, 0xff114377, 0xff1633d0, 0xff1b262d, 0xff201a0c, 0xff250eee, 0xff2a0453, 0xff2ef9c1, 0xff33eebc, 0xff38e2cb, 0xff3dd578, 0xff42c64c, 0xff47b4d6, 0xff4ca0a2, 0xff518941, 0xff566e47, 0xff5b4f45, 0xff602bd4, 0xff65038a, 0xff69d601, 0xff6ea2d6, 0xff7369a7, 0xff782a12, 0xff7ce3bb, 0xff819645, 0xff864157, 0xff8ae498, 0xff8f7fb2, 0xff941251, 0xff989c25, 0xff9d1cdc, 0xffa1942a, 0xffa601c3, 0xffaa655e, 0xffaebeb2, 0xffb30d7c, 0xffb75177, 0xffbb8a62, 0xffbfb7ff, 0xffc3da11, 0xffc7f05c, 0xffcbfaa8, 0xffcff8be, 0xffd3ea6a, 0xffd7cf79, 0xffdba7b9, 0xffdf72fe, 0xffe33119, 0xffe6e1e1, 0xffea852e, 0xffee1ad8, 0xfff1a2bb, 0xfff51cb5, 0xfff888a4, 0xfffbe66b, 0xffff35ed, 0x0002770f, 0x0005a9b8, 0x0008cdd0, 0x000be344, 0x000ee9ff, 0x0011e1f0, 0x0014cb08, 0x0017a538, 0x001a7075, 0x001d2cb3, 0x001fd9eb, 0x00227816, 0x0025072f, 0x00278731, 0x0029f81b, 0x002c59ed, 0x002eaca8, 0x0030f04f, 0x003324e6, 0x00354a74, 0x003760ff, 0x00396892, 0x003b6135, 0x003d4af6, 0x003f25e1, 0x0040f206, 0x0042af73, - 0x00445e3a, 0x0045fe6e, 0x00479023, 0x0049136d, 0x004a8864, 0x004bef1e, 0x004d47b5, 0x004e9242, 0x004fcedf, 0x0050fdaa, 0x00521ebe, 0x0053323b, 0x0054383e, 0x005530e9, 0x00561c5b, 0x0056fab7, 0x0057cc20, 0x005890b9, 0x005948a7, 0x0059f40e, 0x005a9315, 0x005b25e2, 0x005bac9d, 0x005c276d, 0x005c967d, 0x005cf9f4, 0x005d51fd, 0x005d9ec3, 0x005de071, 0x005e1731, 0x005e4331, 0x005e649d, 0x005e7ba1, 0x005e886c, 0x005e8b2b, 0x005e840c, 0x005e733e, 0x005e58ef, 0x005e354e, 0x005e088c, 0x005dd2d6, 0x005d945e, 0x005d4d53, 0x005cfde5, 0x005ca645, 0x005c46a2, 0x005bdf2d, 0x005b7017, 0x005af990, 0x005a7bc9, 0x0059f6f2, 0x00596b3b, 0x0058d8d6, 0x00583ff2, 0x0057a0c0, 0x0056fb70, 0x00565032, 0x00559f36, 0x0054e8ac, 0x00542cc2, 0x00536baa, 0x0052a591, 0x0051daa6, 0x00510b19, 0x00503717, 0x004f5ece, 0x004e826d, 0x004da220, 0x004cbe15, 0x004bd678, 0x004aeb75, 0x0049fd39, 0x00490bef, 0x004817c2, 0x004720dd, 0x0046276a, 0x00452b92, 0x00442d80, 0x00432d5b, 0x00422b4c, 0x0041277c, 0x00402210, 0x003f1b31, 0x003e1304, 0x003d09b0, 0x003bff58, 0x003af423, 0x0039e833, 0x0038dbad, 0x0037ceb3, 0x0036c168, 0x0035b3ed, 0x0034a664, 0x003398ed, 0x00328ba7, 0x00317eb3, 0x0030722e, 0x002f6638, 0x002e5aec, 0x002d5069, 0x002c46c9, 0x002b3e2a, 0x002a36a5, 0x00293054, 0x00282b52, 0x002727b7, 0x0026259c, 0x00252518, 0x00242641, 0x00232930, 0x00222df8, 0x002134b0, 0x00203d6b, 0x001f483d, 0x001e5539, 0x001d6473, 0x001c75fb, 0x001b89e3, 0x001aa03b, 0x0019b913, 0x0018d47b, 0x0017f281, 0x00171334, 0x001636a0, 0x00155cd2, 0x001485d7, 0x0013b1ba, 0x0012e086, - 0x00121246, 0x00114703, 0x00107ec6, 0x000fb999, 0x000ef783, 0x000e388c, 0x000d7cba, 0x000cc414, 0x000c0ea0, 0x000b5c64, 0x000aad63, 0x000a01a2, 0x00095925, 0x0008b3f0, 0x00081204, 0x00077364, 0x0006d811, 0x0006400e, 0x0005ab5a, 0x000519f6, 0x00048be2, 0x0004011d, 0x000379a7, 0x0002f57d, 0x0002749e, 0x0001f708, 0x00017cb7, 0x000105a9, 0x000091da, 0x00002147, 0xffffb3eb, 0xffff49c1, 0xfffee2c6, 0xfffe7ef2, 0xfffe1e41, 0xfffdc0ad, 0xfffd6630, 0xfffd0ec3, 0xfffcba5f, 0xfffc68fd, 0xfffc1a97, 0xfffbcf23, 0xfffb869a, 0xfffb40f4, 0xfffafe29, 0xfffabe30, 0xfffa8100, 0xfffa4690, 0xfffa0ed7, 0xfff9d9cc, 0xfff9a764, 0xfff97796, 0xfff94a58, 0xfff91fa0, 0xfff8f764, 0xfff8d199, 0xfff8ae34, 0xfff88d2b, 0xfff86e74, 0xfff85203, 0xfff837cd, 0xfff81fc7, 0xfff809e6, 0xfff7f61f, 0xfff7e467, 0xfff7d4b1, 0xfff7c6f4, 0xfff7bb22, 0xfff7b132, 0xfff7a917, 0xfff7a2c6, 0xfff79e33, 0xfff79b52, 0xfff79a19, 0xfff79a7b, 0xfff79c6e, 0xfff79fe5, 0xfff7a4d5, 0xfff7ab33, 0xfff7b2f3, 0xfff7bc0a, 0xfff7c66d, 0xfff7d210, 0xfff7dee8, 0xfff7eceb, 0xfff7fc0c, 0xfff80c41, 0xfff81d80, 0xfff82fbc, 0xfff842ed, 0xfff85707, 0xfff86bff, 0xfff881cb, 0xfff89861, 0xfff8afb7, 0xfff8c7c3, 0xfff8e07b, 0xfff8f9d4, 0xfff913c6, 0xfff92e46, 0xfff9494c, 0xfff964ce, 0xfff980c3, 0xfff99d23, 0xfff9b9e3, 0xfff9d6fc, 0xfff9f465, 0xfffa1216, 0xfffa3006, 0xfffa4e2d, 0xfffa6c84, 0xfffa8b03, 0xfffaa9a3, 0xfffac85b, 0xfffae725, 0xfffb05f9, 0xfffb24d2, 0xfffb43a7, 0xfffb6273, 0xfffb812f, 0xfffb9fd5, 0xfffbbe5f, 0xfffbdcc6, 0xfffbfb07, 0xfffc191a, 0xfffc36fa, 0xfffc54a4, 0xfffc7210, + 0x5bacb6f4, 0x5bab6c81, 0x5ba78d37, 0x5ba1194f, 0x5b981122, 0x5b8c7530, 0x5b7e461a, 0x5b6d84a8, 0x5b5a31c6, 0x5b444e81, 0x5b2bdc0e, 0x5b10dbc2, 0x5af34f18, 0x5ad337af, 0x5ab09748, 0x5a8b6fc7, 0x5a63c336, 0x5a3993c0, 0x5a0ce3b2, 0x59ddb57f, 0x59ac0bba, 0x5977e919, 0x59415075, 0x590844c9, 0x58ccc930, 0x588ee0ea, 0x584e8f56, 0x580bd7f4, 0x57c6be67, 0x577f4670, 0x573573f2, 0x56e94af1, 0x569acf90, 0x564a0610, 0x55f6f2d3, 0x55a19a5c, 0x554a0148, 0x54f02c56, 0x54942061, 0x5435e263, 0x53d57774, 0x5372e4c6, 0x530e2fac, 0x52a75d90, 0x523e73fd, 0x51d37897, 0x5166711c, 0x50f76368, 0x5086556f, 0x50134d3e, 0x4f9e50ff, 0x4f2766f2, 0x4eae9571, 0x4e33e2ee, 0x4db755f3, 0x4d38f520, 0x4cb8c72e, 0x4c36d2eb, 0x4bb31f3c, 0x4b2db31a, 0x4aa69594, 0x4a1dcdce, 0x499362ff, 0x49075c72, 0x4879c185, 0x47ea99a9, 0x4759ec60, 0x46c7c140, 0x46341fed, 0x459f101d, 0x45089996, 0x4470c42d, 0x43d797c7, 0x433d1c56, 0x42a159dc, 0x42045865, 0x4166200e, 0x40c6b8fd, 0x40262b65, 0x3f847f83, 0x3ee1bda2, 0x3e3dee13, 0x3d991932, 0x3cf34766, 0x3c4c811c, 0x3ba4cec9, 0x3afc38eb, 0x3a52c805, 0x39a884a1, 0x38fd774e, 0x3851a8a2, 0x37a52135, 0x36f7e9a4, 0x364a0a90, 0x359b8c9d, 0x34ec786f, 0x343cd6af, 0x338cb004, 0x32dc0d17, 0x322af693, 0x3179751f, 0x30c79163, 0x30155404, 0x2f62c5a7, 0x2eafeeed, 0x2dfcd873, 0x2d498ad3, 0x2c960ea3, 0x2be26c73, 0x2b2eaccf, 0x2a7ad83c, 0x29c6f738, 0x2913123c, 0x285f31b7, 0x27ab5e12, 0x26f79fab, 0x2643feda, 0x259083eb, 0x24dd3721, 0x242a20b3, 0x237748cf, 0x22c4b795, 0x2212751a, 0x21608968, 0x20aefc79, 0x1ffdd63b, 0x1f4d1e8e, 0x1e9cdd43, + 0x1ded1a1d, 0x1d3ddccd, 0x1c8f2cf9, 0x1be11231, 0x1b3393f8, 0x1a86b9bf, 0x19da8ae5, 0x192f0eb7, 0x18844c70, 0x17da4b37, 0x17311222, 0x1688a832, 0x15e11453, 0x153a5d5e, 0x14948a16, 0x13efa12c, 0x134ba937, 0x12a8a8bb, 0x1206a625, 0x1165a7cc, 0x10c5b3ef, 0x1026d0b8, 0x0f890437, 0x0eec5465, 0x0e50c723, 0x0db6623b, 0x0d1d2b5d, 0x0c85281f, 0x0bee5dff, 0x0b58d262, 0x0ac48a92, 0x0a318bc1, 0x099fdb04, 0x090f7d57, 0x0880779d, 0x07f2ce9b, 0x076686fc, 0x06dba551, 0x06522e0f, 0x05ca258f, 0x0543900d, 0x04be71ab, 0x043ace6e, 0x03b8aa40, 0x033808eb, 0x02b8ee22, 0x023b5d76, 0x01bf5a5e, 0x0144e834, 0x00cc0a36, 0x0054c382, 0xffdf171b, 0xff6b07e7, 0xfef898ae, 0xfe87cc1b, 0xfe18a4bc, 0xfdab2501, 0xfd3f4f3d, 0xfcd525a5, 0xfc6caa53, 0xfc05df40, 0xfba0c64b, 0xfb3d6133, 0xfadbb19a, 0xfa7bb908, 0xfa1d78e3, 0xf9c0f276, 0xf96626f0, 0xf90d1761, 0xf8b5c4be, 0xf8602fdc, 0xf80c5977, 0xf7ba422b, 0xf769ea78, 0xf71b52c4, 0xf6ce7b57, 0xf683645a, 0xf63a0ddf, 0xf5f277d9, 0xf5aca21f, 0xf5688c6d, 0xf5263665, 0xf4e59f8a, 0xf4a6c748, 0xf469aced, 0xf42e4faf, 0xf3f4aea6, 0xf3bcc8d3, 0xf3869d1a, 0xf3522a49, 0xf31f6f0f, 0xf2ee6a07, 0xf2bf19ae, 0xf2917c6d, 0xf265908f, 0xf23b544b, 0xf212c5be, 0xf1ebe2ec, 0xf1c6a9c3, 0xf1a3181a, 0xf1812bb0, 0xf160e22d, 0xf1423924, 0xf1252e0f, 0xf109be56, 0xf0efe748, 0xf0d7a622, 0xf0c0f808, 0xf0abda0e, 0xf0984931, 0xf086425a, 0xf075c260, 0xf066c606, 0xf05949fb, 0xf04d4ade, 0xf042c539, 0xf039b587, 0xf032182f, 0xf02be98a, 0xf02725dc, 0xf023c95d, 0xf021d031, 0xf0213671, 0xf021f823, 0xf0241140, 0xf0277db1, 0xf02c3953, 0xf0323ff5, + 0xf0398d56, 0xf0421d2c, 0xf04beb1d, 0xf056f2c7, 0xf0632fb7, 0xf0709d74, 0xf07f3776, 0xf08ef92d, 0xf09fddfe, 0xf0b1e143, 0xf0c4fe50, 0xf0d9306d, 0xf0ee72db, 0xf104c0d2, 0xf11c1583, 0xf1346c17, 0xf14dbfb1, 0xf1680b6e, 0xf1834a63, 0xf19f77a0, 0xf1bc8e31, 0xf1da891b, 0xf1f96360, 0xf21917ff, 0xf239a1ef, 0xf25afc29, 0xf27d219f, 0xf2a00d43, 0xf2c3ba04, 0xf2e822ce, 0xf30d428e, 0xf333142f, 0xf359929a, 0xf380b8ba, 0xf3a88179, 0xf3d0e7c2, 0xf3f9e680, 0xf42378a0, 0xf44d9912, 0xf47842c5, 0xf4a370ad, 0xf4cf1dbf, 0xf4fb44f4, 0xf527e149, 0xf554edbd, 0xf5826555, 0xf5b0431a, 0xf5de8218, 0xf60d1d63, 0xf63c1012, 0xf66b5544, 0xf69ae81d, 0xf6cac3c7, 0xf6fae373, 0xf72b425b, 0xf75bdbbd, 0xf78caae0, 0xf7bdab16, 0xf7eed7b4, 0xf8202c1c, 0xf851a3b6, 0xf88339f5, 0xf8b4ea55, 0xf8e6b059, 0xf9188793, 0xf94a6b9b, 0xf97c5815, 0xf9ae48af, 0xf9e03924, 0xfa122537, 0xfa4408ba, 0xfa75df87, 0xfaa7a586, 0xfad956ab, 0xfb0aeef6, 0xfb3c6a73, 0xfb6dc53c, 0xfb9efb77, 0xfbd00956, 0xfc00eb1b, 0xfc319d13, 0xfc621b9a, 0xfc926319, 0xfcc27008, 0xfcf23eec, 0xfd21cc59, 0xfd5114f0, 0xfd801564, 0xfdaeca73, 0xfddd30eb, 0xfe0b45aa, 0xfe39059b, 0xfe666dbc, 0xfe937b15, 0xfec02ac2, 0xfeec79ec, 0xff1865cd, 0xff43ebac, 0xff6f08e4, 0xff99badb, 0xffc3ff0c, 0xffedd2fd, 0x00173447, 0x00402092, 0x00689598, 0x0090911f, 0x00b81102, 0x00df1328, 0x0105958c, 0x012b9635, 0x0151133e, 0x01760ad1, 0x019a7b27, 0x01be628c, 0x01e1bf58, 0x02048ff8, 0x0226d2e6, 0x024886ad, 0x0269a9e9, 0x028a3b44, 0x02aa397b, 0x02c9a359, 0x02e877b9, 0x0306b586, 0x03245bbc, 0x03416966, 0x035ddd9e, 0x0379b790, + 0x0394f674, 0x03af9995, 0x03c9a04a, 0x03e309fe, 0x03fbd625, 0x04140449, 0x042b93fd, 0x044284e6, 0x0458d6b7, 0x046e8933, 0x04839c29, 0x04980f79, 0x04abe310, 0x04bf16e9, 0x04d1ab0d, 0x04e39f93, 0x04f4f4a2, 0x0505aa6a, 0x0515c12d, 0x05253938, 0x053412e4, 0x05424e9b, 0x054feccf, 0x055cee03, 0x056952c3, 0x05751baa, 0x0580495c, 0x058adc8d, 0x0594d5fa, 0x059e366c, 0x05a6feb9, 0x05af2fbf, 0x05b6ca6b, 0x05bdcfb2, 0x05c44095, 0x05ca1e1f, 0x05cf6965, 0x05d42387, 0x05d84daf, 0x05dbe90f, 0x05def6e4, 0x05e17873, 0x05e36f0d, 0x05e4dc08, 0x05e5c0c6, 0x05e61eae, 0x05e5f733, 0x05e54bcd, 0x05e41dfe, 0x05e26f4e, 0x05e0414d, 0x05dd9593, 0x05da6dbe, 0x05d6cb72, 0x05d2b05c, 0x05ce1e2d, 0x05c9169d, 0x05c39b6a, 0x05bdae57, 0x05b7512e, 0x05b085bc, 0x05a94dd5, 0x05a1ab52, 0x0599a00e, 0x05912dea, 0x058856cd, 0x057f1c9e, 0x0575814c, 0x056b86c6, 0x05612f00, 0x05567bf1, 0x054b6f92, 0x05400be1, 0x053452dc, 0x05284685, 0x051be8dd, 0x050f3bec, 0x050241b6, 0x04f4fc46, 0x04e76da3, 0x04d997d8, 0x04cb7cf2, 0x04bd1efb, 0x04ae8000, 0x049fa20f, 0x04908733, 0x0481317a, 0x0471a2ef, 0x0461dda0, 0x0451e396, 0x0441b6dd, 0x0431597d, 0x0420cd80, 0x041014eb, 0x03ff31c3, 0x03ee260d, 0x03dcf3ca, 0x03cb9cf9, 0x03ba2398, 0x03a889a1, 0x0396d10c, 0x0384fbd1, 0x03730be0, 0x0361032a, 0x034ee39b, 0x033caf1d, 0x032a6796, 0x03180ee7, 0x0305a6f0, 0x02f3318a, 0x02e0b08d, 0x02ce25ca, 0x02bb9310, 0x02a8fa2a, 0x02965cdb, 0x0283bce6, 0x02711c05, 0x025e7bf0, 0x024bde5a, 0x023944ee, 0x0226b156, 0x02142533, 0x0201a223, 0x01ef29be, 0x01dcbd96, 0x01ca5f37, 0x01b81028, 0x01a5d1ea, + 0x0193a5f9, 0x01818dc9, 0x016f8aca, 0x015d9e64, 0x014bc9fa, 0x013a0ee9, 0x01286e86, 0x0116ea22, 0x01058306, 0x00f43a74, 0x00e311a9, 0x00d209db, 0x00c12439, 0x00b061eb, 0x009fc413, 0x008f4bcb, 0x007efa29, 0x006ed038, 0x005ecf01, 0x004ef782, 0x003f4ab4, 0x002fc98a, 0x002074ed, 0x00114dc3, 0x000254e8, 0xfff38b32, 0xffe4f171, 0xffd6886d, 0xffc850e6, 0xffba4b98, 0xffac7936, 0xff9eda6d, 0xff916fe1, 0xff843a32, 0xff7739f7, 0xff6a6fc1, 0xff5ddc1a, 0xff517f86, 0xff455a80, 0xff396d7f, 0xff2db8f2, 0xff223d40, 0xff16faca, 0xff0bf1ed, 0xff0122fc, 0xfef68e45, 0xfeec340f, 0xfee2149b, 0xfed83023, 0xfece86db, 0xfec518f1, 0xfebbe68c, 0xfeb2efcd, 0xfeaa34d0, 0xfea1b5a9, 0xfe997268, 0xfe916b15, 0xfe899fb2, 0xfe82103f, 0xfe7abcb1, 0xfe73a4fb, 0xfe6cc909, 0xfe6628c1, 0xfe5fc405, 0xfe599aaf, 0xfe53ac97, 0xfe4df98e, 0xfe48815e, 0xfe4343d0, 0xfe3e40a6, 0xfe39779a, 0xfe34e867, 0xfe3092bf, 0xfe2c7650, 0xfe2892c5, 0xfe24e7c3, 0xfe2174ec, 0xfe1e39da, 0xfe1b3628, 0xfe18696a, 0xfe15d32f, 0xfe137304, 0xfe114872, 0xfe0f52fc, 0xfe0d9224, 0xfe0c0567, 0xfe0aac3f, 0xfe098622, 0xfe089283, 0xfe07d0d3, 0xfe07407d, 0xfe06e0eb, 0xfe06b184, 0xfe06b1ac, 0xfe06e0c4, 0xfe073e2a, 0xfe07c93a, 0xfe08814e, 0xfe0965bc, 0xfe0a75da, 0xfe0bb0f9, 0xfe0d166b, 0xfe0ea57e, 0xfe105d7e, 0xfe123db6, 0xfe144570, 0xfe1673f2, 0xfe18c884, 0xfe1b4268, 0xfe1de0e2, 0xfe20a335, 0xfe2388a1, 0xfe269065, 0xfe29b9c1, 0xfe2d03f2, 0xfe306e35, 0xfe33f7c7, 0xfe379fe3, 0xfe3b65c4, 0xfe3f48a5, 0xfe4347c0, 0xfe476250, 0xfe4b978e, 0xfe4fe6b3, 0xfe544efb, 0xfe58cf9d, 0xfe5d67d4, 0xfe6216db, + 0xfe66dbeb, 0xfe6bb63e, 0xfe70a511, 0xfe75a79f, 0xfe7abd23, 0xfe7fe4db, 0xfe851e05, 0xfe8a67dd, 0xfe8fc1a5, 0xfe952a9b, 0xfe9aa201, 0xfea02719, 0xfea5b926, 0xfeab576d, 0xfeb10134, 0xfeb6b5c0, 0xfebc745c, 0xfec23c50, 0xfec80ce8, 0xfecde571, 0xfed3c538, 0xfed9ab8f, 0xfedf97c6, 0xfee58932, 0xfeeb7f27, 0xfef178fc, 0xfef7760c, 0xfefd75af, 0xff037744, 0xff097a29, 0xff0f7dbf, 0xff15816a, 0xff1b848e, 0xff218692, 0xff2786e1, 0xff2d84e5, 0xff33800e, 0xff3977cb, 0xff3f6b8f, 0xff455acf, 0xff4b4503, 0xff5129a3, 0xff57082e, 0xff5ce021, 0xff62b0fd, 0xff687a47, 0xff6e3b84, 0xff73f43d, 0xff79a3fe, 0xff7f4a54, 0xff84e6d0, 0xff8a7905, 0xff900089, 0xff957cf4, 0xff9aede0, 0xffa052ec, 0xffa5abb8, 0xffaaf7e6, 0xffb0371c, 0xffb56902, 0xffba8d44, 0xffbfa38d, 0xffc4ab8f, 0xffc9a4fc, 0xffce8f8a, 0xffd36af1, 0xffd836eb, 0xffdcf336, 0xffe19f91, 0xffe63bc0, 0xffeac787, 0xffef42af, 0xfff3ad01, 0xfff8064b, 0xfffc4e5c, 0x00008507, 0x0004aa1f, 0x0008bd7c, 0x000cbef7, 0x0010ae6e, 0x00148bbd, 0x001856c7, 0x001c0f6e, 0x001fb599, 0x0023492f, 0x0026ca1c, 0x002a384c, 0x002d93ae, 0x0030dc34, 0x003411d2, 0x0037347d, 0x003a442e, 0x003d40e0, 0x00402a8e, 0x00430137, 0x0045c4dd, 0x00487582, 0x004b132b, 0x004d9dde, 0x005015a5, 0x00527a8a, 0x0054cc9a, 0x00570be4, 0x00593877, 0x005b5267, 0x005d59c6, 0x005f4eac, 0x0061312e, 0x00630167, 0x0064bf71, 0x00666b68, 0x0068056b, 0x00698d98, 0x006b0411, 0x006c68f8, 0x006dbc71, 0x006efea0, 0x00702fae, 0x00714fc0, 0x00725f02, 0x00735d9c, 0x00744bba, 0x0075298a, 0x0075f739, 0x0076b4f5, 0x007762f0, 0x0078015a, 0x00789065, + 0x00791043, 0x0079812a, 0x0079e34d, 0x007a36e2, 0x007a7c20, 0x007ab33d, 0x007adc72, 0x007af7f6, 0x007b0603, 0x007b06d4, 0x007afaa1, 0x007ae1a7, 0x007abc20, 0x007a8a49, 0x007a4c5d, 0x007a029a, 0x0079ad3d, 0x00794c82, 0x0078e0a9, 0x007869ee, 0x0077e891, 0x00775ccf, 0x0076c6e8, 0x00762719, 0x00757da3, 0x0074cac4, 0x00740ebb, 0x007349c7, 0x00727c27, 0x0071a61b, 0x0070c7e1, 0x006fe1b8, 0x006ef3df, 0x006dfe94, 0x006d0217, 0x006bfea4, 0x006af47b, 0x0069e3d9, 0x0068ccfa, 0x0067b01e, 0x00668d80, 0x0065655d, 0x006437f1, 0x00630577, 0x0061ce2c, 0x00609249, 0x005f520a, 0x005e0da8, 0x005cc55c, 0x005b7961, 0x005a29ed, 0x0058d738, 0x0057817b, 0x005628ec, 0x0054cdc0, 0x0053702d, 0x00521068, 0x0050aea5, 0x004f4b17, 0x004de5f1, 0x004c7f66, 0x004b17a6, 0x0049aee3, 0x0048454b, 0x0046db0f, 0x0045705c, 0x00440561, 0x00429a4a, 0x00412f43, 0x003fc478, 0x003e5a12, 0x003cf03d, 0x003b871f, 0x003a1ee3, 0x0038b7ae, 0x003751a7, 0x0035ecf4, 0x003489b9, 0x0033281a, 0x0031c83a, 0x00306a3b, 0x002f0e3f, 0x002db466, 0x002c5cd0, 0x002b079a, 0x0029b4e4, 0x002864c9, 0x00271766, 0x0025ccd7, 0x00248535, 0x0023409a, 0x0021ff1f, 0x0020c0dc, 0x001f85e6, 0x001e4e56, 0x001d1a3f, 0x001be9b7, 0x001abcd0, 0x0019939d, 0x00186e31, 0x00174c9c, 0x00162eef, 0x00151538, 0x0013ff88, 0x0012edea, 0x0011e06d, 0x0010d71d, 0x000fd205, 0x000ed130, 0x000dd4a7, 0x000cdc74, 0x000be89f, 0x000af931, 0x000a0e2f, 0x000927a0, 0x00084589, 0x000767f0, 0x00068ed8, 0x0005ba46, 0x0004ea3a, 0x00041eb9, 0x000357c2, 0x00029558, 0x0001d779, 0x00011e26, 0x0000695e, 0xffffb91f, 0xffff0d66, + 0xfffe6631, 0xfffdc37d, 0xfffd2545, 0xfffc8b86, 0xfffbf639, 0xfffb655b, 0xfffad8e4, 0xfffa50ce, 0xfff9cd12, 0xfff94da9, 0xfff8d28c, 0xfff85bb1, 0xfff7e910, 0xfff77a9f, 0xfff71057, 0xfff6aa2b, 0xfff64812, 0xfff5ea02, 0xfff58ff0, 0xfff539cf, 0xfff4e794, 0xfff49934, 0xfff44ea3, 0xfff407d2, 0xfff3c4b7, 0xfff38542, 0xfff34968, 0xfff3111b, 0xfff2dc4c, 0xfff2aaef, 0xfff27cf3, 0xfff2524c, 0xfff22aea, 0xfff206bf, 0xfff1e5bb, 0xfff1c7d0, 0xfff1acef, 0xfff19508, 0xfff1800b, 0xfff16de9, 0xfff15e93, 0xfff151f9, 0xfff1480b, 0xfff140b9, 0xfff13bf3, 0xfff139aa, 0xfff139cd, 0xfff13c4c, 0xfff14119, 0xfff14821, 0xfff15156, 0xfff15ca8, 0xfff16a07, 0xfff17962, 0xfff18aab, 0xfff19dd1, 0xfff1b2c5, 0xfff1c976, 0xfff1e1d6, 0xfff1fbd5, 0xfff21764, 0xfff23473, 0xfff252f3, 0xfff272d6, 0xfff2940b, 0xfff2b686, 0xfff2da36, 0xfff2ff0d, 0xfff324fd, 0xfff34bf9, 0xfff373f0, 0xfff39cd7, 0xfff3c69f, 0xfff3f13a, 0xfff41c9c, 0xfff448b7, 0xfff4757e, 0xfff4a2e5, 0xfff4d0de, 0xfff4ff5d, 0xfff52e57, 0xfff55dbf, 0xfff58d89, 0xfff5bdaa, 0xfff5ee17, 0xfff61ec5, 0xfff64fa8, 0xfff680b5, 0xfff6b1e4, 0xfff6e329, 0xfff7147a, 0xfff745cd, 0xfff7771a, 0xfff7a857, 0xfff7d97a, 0xfff80a7c, 0xfff83b52, 0xfff86bf6, 0xfff89c60, 0xfff8cc86, 0xfff8fc62, 0xfff92bec, 0xfff95b1e, 0xfff989ef, 0xfff9b85b, 0xfff9e65a, 0xfffa13e5, 0xfffa40f8, 0xfffa6d8d, 0xfffa999d, 0xfffac525, 0xfffaf01e, 0xfffb1a84, 0xfffb4453, 0xfffb6d86, 0xfffb961a, 0xfffbbe09, 0xfffbe552, 0xfffc0bef, 0xfffc31df, 0xfffc571e, 0xfffc7ba9, 0xfffc9f7e, 0xfffcc29a, 0xfffce4fc, 0xfffd06a1, 0xfffd2787, 0xfffd47ae, 0x00000000 // this one is needed for lerping the last coefficient }; diff --git a/services/audioflinger/audio-resampler/AudioResamplerCoefficients.cpp b/services/audioflinger/audio-resampler/AudioResamplerCoefficients.cpp index ade58a7..af3e40d 100644 --- a/services/audioflinger/audio-resampler/AudioResamplerCoefficients.cpp +++ b/services/audioflinger/audio-resampler/AudioResamplerCoefficients.cpp @@ -14,42 +14,41 @@ * limitations under the License. */ -#include -#include -#undef LOG_TAG -#include -//#include "common_log.h" #define LOG_TAG "ResamplerCoefficients" #define LOG_NDEBUG 0 -const int32_t RESAMPLE_FIR_NUM_COEF = 16; -const int32_t RESAMPLE_FIR_LERP_INT_BITS = 7; +#include + +#include "filter_coefficients.h" + +const int32_t RESAMPLE_FIR_NUM_COEF = 16; +const int32_t RESAMPLE_FIR_LERP_INT_BITS = 7; using namespace android; + #ifdef __cplusplus extern "C" { #endif + const int32_t* readResamplerCoefficients(bool upSample) { ALOGV("readResamplerCoefficients"); - if(upSample) { - return resampler_filter_coefficients_10042011; + if (upSample) { + return up_sampler_filter_coefficients; + } else { + return dn_sampler_filter_coefficients; } - else { - return dnsampler_filter_coefficients_x128_10112011; - } } int32_t readResampleFirNumCoeff() { - return RESAMPLE_FIR_NUM_COEF; } int32_t readResampleFirLerpIntBits() { - - return RESAMPLE_FIR_LERP_INT_BITS; + return RESAMPLE_FIR_LERP_INT_BITS; } + #ifdef __cplusplus } #endif diff --git a/services/audioflinger/audio-resampler/dnsampler_filter_coefficients_x128_10112011.h b/services/audioflinger/audio-resampler/dnsampler_filter_coefficients_x128_10112011.h deleted file mode 100644 index eb2944c..0000000 --- a/services/audioflinger/audio-resampler/dnsampler_filter_coefficients_x128_10112011.h +++ /dev/null @@ -1,2585 +0,0 @@ - -/* - * Copyright (C) 2012 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#include - -namespace android { - -const int32_t dnsampler_filter_coefficients_x128_10112011[] = { -1849391518, -1849249650, -1848824221, -1848115177, -1847122891, -1845847499, -1844289491, -1842449129, -1840327103, -1837923861, -1835240203, -1832276710, -1829034388, -1825513999, -1821716652, -1817643240, -1813295074, -1808673214, -1803779065, -1798613825, -1793179109, -1787476282, -1781507048, -1775272909, -1768775774, -1762017295, -1754999460, -1747724062, -1740193297, -1732409109, -1724373764, -1716089338, -1707558293, -1698782831, -1689765473, -1680508558, -1671014814, -1661286715, -1651327042, -1641138399, -1630723762, -1620085842, -1609227651, -1598152036, -1586862206, -1575361116, -1563652006, -1551737947, -1539622358, -1527308388, -1514799466, -1502098869, -1489210218, -1476136877, -1462882477, -1449450498, -1435844739, -1422068735, -1408126274, -1394021008, -1379756900, -1365337657, -1350767225, -1336049408, -1321188297, -1306187721, -1291051731, -1275784259, -1260389523, -1244871491, -1229234343, -1213482137, -1197619187, -1181649550, -1165577481, -1149407125, -1133142876, -1116788884, -1100349476, -1083828868, -1067231493, -1050561533, -1033823337, -1017021160, -1000159475, -983242522, -966274693, -949260282, -932203771, -915109401, -897981548, -880824504, -863642743, -846440509, -829222164, -811991981, -794754382, -777513558, -760273800, -743039333, -725814532, -708603558, -691410662, -674240023, -657095935, -639982476, -622903794, -605863979, -588867231, -571917545, -555018972, -538175499, -521391190, -504669905, -488015541, -471431958, -454923090, -438492688, -422144531, -405882352, -389709927, -373630848, -357648715, -341767096, -325989596, -310319649, -294760687, -279316098, -263989284, -248783473, -233701878, -218747691, -203924116, -189234208, -174680993, -160267473, -145996632, -131871305, -117894282, -104068338, -90396230, -76880575, -63523937, -50328860, -37297842, -24433248, -11737378, --787473, --13139049, --25315210, --37313887, --49133021, --60770615, --72224791, --83493750, --94575698, --105468905, --116171744, --126682678, --137000178, --147122799, --157049194, --166778112, --176308299, --185638576, --194767849, --203695121, --212419395, --220939772, --229255432, --237365662, --245269744, --252967058, --260457050, --267739278, --274813300, --281678782, --288335451, --294783151, --301021713, --307051072, --312871207, --318482217, --323884189, --329077326, --334061875, --338838202, --343406662, --347767725, --351921891, --355869785, --359612019, --363149327, --366482470, --369612329, --372539768, --375265766, --377791316, --380117528, --382245500, --384176449, --385911601, --387452302, --388799883, --389955791, --390921477, --391698507, --392288434, --392692926, --392913649, --392952381, --392810881, --392491020, --391994654, --391323748, --390480249, --389466214, --388283683, --386934801, --385421695, --383746599, --381911725, --379919384, --377771869, --375471577, --373020872, --370422215, --367678046, --364790904, --361763287, --358597785, --355296968, --351863496, --348299986, --344609141, --340793645, --336856270, --332799742, --328626863, --324340418, --319943269, --315438225, --310828168, --306115960, --301304533, --296396768, --291395607, --286303973, --281124849, --275861163, --270515897, --265092015, --259592541, --254020439, --248378720, --242670378, --236898450, --231065914, --225175783, --219231057, --213234773, --207189910, --201099474, --194966455, --188793871, --182584674, --176341840, --170068330, --163767128, --157441156, --151093349, --144726627, --138343919, --131948090, --125542009, --119128533, --112710525, --106290787, --99872116, --93457296, --87049105, --80650255, --74263449, --67891377, --61536721, --55202101, --48890119, --42603362, --36344397, --30115726, --23919828, --17759168, --11636191, --5553280, -487212, -6482947, -12431620, -18330990, -24178851, -29973011, -35711312, -41391655, -47011985, -52570260, -58064483, -63492718, -68853076, -74143678, -79362692, -84508338, -89578892, -94572639, -99487923, -104323138, -109076739, -113747190, -118333012, -122832775, -127245111, -131568663, -135802141, -139944301, -143993966, -147949965, -151811195, -155576593, -159245166, -162815931, -166287976, -169660430, -172932498, -176103387, -179172383, -182138803, -185002039, -187761495, -190416648, -192967011, -195412171, -197751720, -199985330, -202112693, -204133584, -206047780, -207855140, -209555545, -211148956, -212635338, -214014737, -215287214, -216452911, -217511975, -218464633, -219311127, -220051778, -220686909, -221216923, -221642232, -221963326, -222180699, -222294922, -222306577, -222216320, -222024810, -221732784, -221340984, -220850224, -220261321, -219575167, -218792653, -217914741, -216942395, -215876649, -214718535, -213469149, -212129590, -210701024, -209184611, -207581573, -205893134, -204120584, -202265202, -200328328, -198311300, -196215518, -194042369, -191793295, -189469738, -187073198, -184605160, -182067159, -179460730, -176787462, -174048923, -171246730, -168382500, -165457899, -162474572, -159434208, -156338493, -153189156, -149987902, -146736470, -143436603, -140090079, -136698650, -133264101, -129788215, -126272805, -122719654, -119130572, -115507371, -111851884, -108165922, -104451311, -100709877, -96943466, -93153889, -89342976, -85512554, -81664466, -77800525, -73922553, -70032372, -66131809, -62222659, -58306723, -54385799, -50461691, -46536172, -42611011, -38687978, -34768834, -30855311, -26949132, -23052017, -19165682, -15291809, -11432068, -7588126, -3761633, --45792, --3832550, --7597044, --11337694, --15052951, --18741290, --22401190, --26031153, --29629717, --33195444, --36726898, --40222670, --43681381, --47101682, --50482228, --53821707, --57118836, --60372370, --63581062, --66743703, --69859111, --72926141, --75943654, --78910550, --81825759, --84688251, --87496999, --90251018, --92949348, --95591074, --98175284, --100701113, --103167725, --105574326, --107920130, --110204396, --112426408, --114585497, --116681001, --118712307, --120678826, --122580018, --124415349, --126184333, --127886505, --129521449, --131088758, --132588075, --134019062, --135381436, --136674915, --137899273, --139054300, --140139836, --141155730, --142101884, --142978213, --143784688, --144521281, --145188022, --145784950, --146312156, --146769738, --147157848, --147476649, --147726356, --147907187, --148019419, --148063331, --148039258, --147947536, --147788557, --147562717, --147270463, --146912244, --146488563, --145999923, --145446877, --144829983, --144149847, --143407076, --142602324, --141736248, --140809557, --139822954, --138777189, --137673015, --136511230, --135292629, --134018047, --132688326, --131304347, --129866988, --128377166, --126835801, --125243852, --123602271, --121912047, --120174172, --118389676, --116559578, --114684934, --112766800, --110806267, --108804411, --106762342, --104681172, --102562041, --100406081, --98214447, --95988299, --93728821, --91437183, --89114578, --86762204, --84381279, --81973009, --79538619, --77079333, --74596397, --72091035, --69564491, --67018008, --64452847, --61870249, --59271470, --56657765, --54030397, --51390615, --48739670, --46078819, --43409324, --40732431, --38049385, --35361437, --32669835, --29975809, --27280589, --24585404, --21891486, --19200045, --16512287, --13829420, --11152644, --8483140, --5822079, --3170634, --529970, -2098768, -4714446, -7315931, -9902105, -12471869, -15024139, -17557833, -20071880, -22565234, -25036861, -27485734, -29910841, -32311195, -34685828, -37033773, -39354083, -41645834, -43908123, -46140049, -48340738, -50509337, -52645015, -54746948, -56814333, -58846388, -60842359, -62801494, -64723072, -66606390, -68450776, -70255559, -72020101, -73743779, -75426002, -77066186, -78663776, -80218238, -81729070, -83195771, -84617877, -85994939, -87326540, -88612269, -89851753, -91044631, -92190579, -93289279, -94340447, -95343813, -96299144, -97206213, -98064827, -98874809, -99636019, -100348318, -101011607, -101625796, -102190834, -102706673, -103173303, -103590724, -103958975, -104278097, -104548170, -104769282, -104941559, -105065128, -105140157, -105166820, -105145327, -105075891, -104958764, -104794201, -104582494, -104323937, -104018863, -103667606, -103270537, -102828031, -102340497, -101808346, -101232024, -100611979, -99948695, -99242655, -98494373, -97704368, -96873190, -96001391, -95089549, -94138248, -93148103, -92119724, -91053751, -89950827, -88811624, -87636812, -86427086, -85183142, -83905705, -82595493, -81253249, -79879717, -78475668, -77041865, -75579093, -74088140, -72569815, -71024919, -69454270, -67858693, -66239031, -64596117, -62930801, -61243936, -59536392, -57809025, -56062708, -54298314, -52516733, -50718843, -48905532, -47077692, -45236223, -43382015, -41515965, -39638972, -37751945, -35855780, -33951378, -32039643, -30121481, -28197786, -26269454, -24337383, -22402475, -20465617, -18527696, -16589601, -14652220, -12716424, -10783083, -8853070, -6927251, -5006483, -3091612, -1183489, --717043, --2609159, --4492039, --6364862, --8226816, --10077103, --11914934, --13739523, --15550093, --17345884, --19126146, --20890132, --22637106, --24366346, --26077146, --27768800, --29440618, --31091927, --32722068, --34330384, --35916235, --37478994, --39018053, --40532806, --42022667, --43487063, --44925444, --46337259, --47721979, --49079087, --50408090, --51708495, --52979834, --54221651, --55433514, --56614992, --57765679, --58885178, --59973119, --61029133, --62052877, --63044020, --64002256, --64927281, --65818818, --66676601, --67500387, --68289938, --69045045, --69765506, --70451147, --71101796, --71717309, --72297549, --72842408, --73351780, --73825587, --74263759, --74666254, --75033033, --75364085, --75659405, --75919016, --76142943, --76331242, --76483972, --76601219, --76683074, --76729655, --76741083, --76717507, --76659078, --76565976, --76438382, --76276507, --76080561, --75850784, --75587416, --75290725, --74960980, --74598477, --74203511, --73776405, --73317481, --72827089, --72305578, --71753320, --71170691, --70558089, --69915913, --69244582, --68544519, --67816171, --67059982, --66276416, --65465942, --64629046, --63766215, --62877953, --61964766, --61027182, --60065722, --59080928, --58073342, --57043524, --55992029, --54919428, --53826296, --52713221, --51580788, --50429596, --49260245, --48073350, --46869516, --45649365, --44413516, --43162604, --41897255, --40618108, --39325799, --38020978, --36704285, --35376367, --34037875, --32689467, --31331794, --29965512, --28591278, --27209755, --25821597, --24427461, --23028006, --21623896, --20215785, --18804329, --17390186, --15974013, --14556459, --13138172, --11719802, --10301999, --8885401, --7470647, --6058375, --4649221, --3243807, --1842755, --446687, -943782, -2328043, -3705495, -5075537, -6437575, -7791026, -9135312, -10469860, -11794099, -13107474, -14409434, -15699433, -16976932, -18241406, -19492338, -20729214, -21951528, -23158786, -24350507, -25526208, -26685422, -27827692, -28952575, -30059626, -31148419, -32218533, -33269565, -34301113, -35312788, -36304216, -37275034, -38224885, -39153423, -40060316, -40945246, -41807897, -42647972, -43465182, -44259258, -45029930, -45776947, -46500068, -47199069, -47873729, -48523844, -49149222, -49749687, -50325066, -50875204, -51399956, -51899193, -52372790, -52820642, -53242651, -53638739, -54008828, -54352864, -54670795, -54962591, -55228223, -55467686, -55680976, -55868110, -56029109, -56164013, -56272864, -56355729, -56412671, -56443780, -56449143, -56428871, -56383077, -56311893, -56215452, -56093910, -55947423, -55776167, -55580318, -55360074, -55115633, -54847211, -54555028, -54239319, -53900322, -53538294, -53153491, -52746187, -52316657, -51865197, -51392097, -50897668, -50382220, -49846081, -49289577, -48713047, -48116836, -47501302, -46866801, -46213703, -45542381, -44853220, -44146603, -43422927, -42682590, -41926002, -41153572, -40365719, -39562864, -38745438, -37913870, -37068598, -36210060, -35338709, -34454988, -33559353, -32652259, -31734171, -30805546, -29866852, -28918556, -27961133, -26995053, -26020792, -25038826, -24049638, -23053702, -22051499, -21043509, -20030219, -19012108, -17989658, -16963352, -15933674, -14901104, -13866119, -12829202, -11790833, -10751487, -9711638, -8671763, -7632335, -6593820, -5556683, -4521391, -3488407, -2458187, -1431187, -407859, --611345, --1625983, --2635618, --3639811, --4638127, --5630141, --6615431, --7593576, --8564163, --9526785, --10481043, --11426537, --12362876, --13289673, --14206552, --15113136, --16009059, --16893959, --17767487, --18629291, --19479032, --20316375, --21140997, --21952575, --22750798, --23535360, --24305970, --25062334, --25804171, --26531205, --27243175, --27939817, --28620882, --29286128, --29935325, --30568244, --31184668, --31784388, --32367206, --32932927, --33481369, --34012357, --34525729, --35021324, --35498994, --35958600, --36400012, --36823105, --37227767, --37613893, --37981391, --38330171, --38660157, --38971278, --39263479, --39536704, --39790914, --40026074, --40242163, --40439161, --40617065, --40775874, --40915600, --41036259, --41137883, --41220502, --41284167, --41328926, --41354844, --41361987, --41350437, --41320276, --41271601, --41204513, --41119123, --41015546, --40893911, --40754348, --40597001, --40422014, --40229546, --40019757, --39792819, --39548906, --39288206, --39010907, --38717208, --38407310, --38081429, --37739776, --37382578, --37010062, --36622464, --36220024, --35802990, --35371611, --34926149, --34466863, --33994021, --33507895, --33008767, --32496915, --31972628, --31436196, --30887917, --30328089, --29757014, --29175000, --28582360, --27979405, --27366455, --26743828, --26111852, --25470849, --24821150, --24163084, --23496990, --22823200, --22142053, --21453889, --20759052, --20057882, --19350724, --18637922, --17919826, --17196782, --16469137, --15737241, --15001445, --14262095, --13519542, --12774133, --12026222, --11276155, --10524279, --9770942, --9016494, --8261277, --7505635, --6749910, --5994447, --5239585, --4485659, --3733007, --2981966, --2232865, --1486032, --741794, --479, -737595, -1472108, -2202745, -2929192, -3651141, -4368289, -5080331, -5786969, -6487909, -7182860, -7871535, -8553649, -9228926, -9897092, -10557878, -11211015, -11856245, -12493314, -13121967, -13741959, -14353050, -14955005, -15547593, -16130588, -16703768, -17266923, -17819839, -18362313, -18894148, -19415155, -19925144, -20423935, -20911353, -21387231, -21851406, -22303720, -22744023, -23172174, -23588031, -23991464, -24382346, -24760559, -25125987, -25478524, -25818069, -26144531, -26457817, -26757849, -27044549, -27317851, -27577690, -27824011, -28056763, -28275905, -28481398, -28673213, -28851323, -29015713, -29166367, -29303283, -29426459, -29535904, -29631629, -29713655, -29782006, -29836715, -29877818, -29905359, -29919387, -29919958, -29907132, -29880978, -29841566, -29788975, -29723288, -29644596, -29552991, -29448575, -29331452, -29201735, -29059537, -28904981, -28738192, -28559303, -28368447, -28165766, -27951404, -27725513, -27488244, -27239759, -26980219, -26709793, -26428650, -26136967, -25834922, -25522700, -25200487, -24868475, -24526855, -24175830, -23815596, -23446360, -23068326, -22681709, -22286719, -21883572, -21472486, -21053686, -20627392, -20193830, -19753228, -19305819, -18851833, -18391506, -17925071, -17452770, -16974838, -16491518, -16003049, -15509677, -15011645, -14509198, -14002580, -13492042, -12977828, -12460185, -11939362, -11415608, -10889171, -10360299, -9829240, -9296245, -8761559, -8225429, -7688103, -7149828, -6610849, -6071411, -5531756, -4992131, -4452774, -3913926, -3375826, -2838712, -2302821, -1768384, -1235637, -704811, -176132, --350173, --873879, --1394763, --1912606, --2427192, --2938303, --3445727, --3949255, --4448681, --4943802, --5434414, --5920319, --6401326, --6877239, --7347870, --7813034, --8272552, --8726243, --9173932, --9615448, --10050623, --10479293, --10901295, --11316474, --11724679, --12125759, --12519569, --12905967, --13284818, --13655986, --14019344, --14374766, --14722134, --15061330, --15392240, --15714758, --16028781, --16334207, --16630942, --16918895, --17197981, --17468118, --17729228, --17981237, --18224079, --18457687, --18682004, --18896973, --19102546, --19298674, --19485318, --19662437, --19830002, --19987982, --20136353, --20275096, --20404197, --20523643, --20633430, --20733554, --20824018, --20904829, --20975997, --21037538, --21089473, --21131822, --21164617, --21187886, --21201668, --21206001, --21200931, --21186504, --21162774, --21129796, --21087631, --21036341, --20975996, --20906666, --20828426, --20741355, --20645535, --20541051, --20427994, --20306454, --20176529, --20038316, --19891920, --19737444, --19574998, --19404692, --19226642, --19040965, --18847782, --18647215, --18439392, --18224439, --18002487, --17773669, --17538123, --17295986, --17047397, --16792499, --16531439, --16264360, --15991413, --15712746, --15428514, --15138869, --14843968, --14543967, --14239027, --13929305, --13614963, --13296164, --12973072, --12645852, --12314669, --11979690, --11641084, --11299018, --10953660, --10605181, --10253752, --9899543, --9542724, --9183468, --8821947, --8458330, --8092791, --7725499, --7356629, --6986351, --6614835, --6242252, --5868775, --5494571, --5119810, --4744660, --4369291, --3993868, --3618559, --3243529, --2868942, --2494962, --2121750, --1749467, --1378274, --1008329, --639789, --272809, -92455, -455851, -817231, -1176442, -1533338, -1887773, -2239604, -2588688, -2934884, -3278056, -3618068, -3954787, -4288079, -4617816, -4943871, -5266119, -5584437, -5898705, -6208806, -6514625, -6816049, -7112968, -7405275, -7692864, -7975633, -8253483, -8526319, -8794045, -9056570, -9313805, -9565666, -9812069, -10052933, -10288182, -10517742, -10741541, -10959512, -11171587, -11377707, -11577809, -11771839, -11959741, -12141468, -12316971, -12486205, -12649128, -12805703, -12955894, -13099669, -13236996, -13367853, -13492214, -13610059, -13721371, -13826136, -13924343, -14015982, -14101050, -14179544, -14251463, -14316813, -14375598, -14427829, -14473516, -14512676, -14545324, -14571484, -14591176, -14604428, -14611268, -14611727, -14605840, -14593643, -14575174, -14550478, -14519596, -14482577, -14439469, -14390325, -14335196, -14274142, -14207219, -14134490, -14056016, -13971864, -13882101, -13786797, -13686023, -13579854, -13468364, -13351632, -13229736, -13102759, -12970783, -12833894, -12692177, -12545721, -12394615, -12238951, -12078822, -11914322, -11745546, -11572592, -11395558, -11214544, -11029649, -10840977, -10648630, -10452712, -10253328, -10050584, -9844586, -9635444, -9423263, -9208154, -8990226, -8769590, -8546356, -8320637, -8092543, -7862188, -7629684, -7395143, -7158678, -6920405, -6680434, -6438881, -6195859, -5951482, -5705863, -5459115, -5211352, -4962688, -4713234, -4463103, -4212408, -3961262, -3709774, -3458055, -3206216, -2954367, -2702617, -2451073, -2199845, -1949039, -1698760, -1449115, -1200206, -952139, -705015, -458936, -214002, --29687, --272033, --512941, --752313, --990055, --1226074, --1460278, --1692576, --1922878, --2151097, --2377147, --2600942, --2822399, --3041435, --3257970, --3471925, --3683223, --3891787, --4097545, --4300424, --4500352, --4697261, --4891083, --5081754, --5269208, --5453384, --5634222, --5811664, --5985653, --6156134, --6323055, --6486364, --6646013, --6801954, --6954144, --7102537, --7247093, --7387773, --7524538, --7657353, --7786184, --7910999, --8031770, --8148467, --8261064, --8369539, --8473868, --8574031, --8670011, --8761790, --8849356, --8932694, --9011796, --9086651, --9157254, --9223598, --9285682, --9343504, --9397065, --9446367, --9491415, --9532214, --9568774, --9601103, --9629214, --9653119, --9672835, --9688376, --9699763, --9707015, --9710154, --9709202, --9704187, --9695132, --9682068, --9665024, --9644031, --9619121, --9590330, --9557692, --9521245, --9481028, --9437080, --9389443, --9338159, --9283272, --9224828, --9162872, --9097452, --9028617, --8956417, --8880903, --8802127, --8720142, --8635003, --8546764, --8455484, --8361217, --8264022, --8163959, --8061088, --7955468, --7847161, --7736229, --7622737, --7506745, --7388320, --7267525, --7144428, --7019092, --6891585, --6761974, --6630327, --6496711, --6361194, --6223845, --6084734, --5943929, --5801500, --5657517, --5512050, --5365168, --5216942, --5067443, --4916741, --4764906, --4612009, --4458120, --4303311, --4147650, --3991209, --3834057, --3676265, --3517901, --3359037, --3199740, --3040080, --2880126, --2719945, --2559606, --2399177, --2238724, --2078315, --1918015, --1757891, --1598007, --1438428, --1279218, --1120441, --962159, --804435, --647330, --490906, --335223, --180338, --26312, -126797, -278933, -430040, -580062, -728943, -876632, -1023074, -1168219, -1312015, -1454411, -1595360, -1734812, -1872720, -2009038, -2143721, -2276725, -2408006, -2537522, -2665233, -2791097, -2915076, -3037131, -3157228, -3275328, -3391399, -3505405, -3617315, -3727097, -3834720, -3940156, -4043377, -4144356, -4243067, -4339485, -4433587, -4525351, -4614755, -4701780, -4786406, -4868616, -4948393, -5025721, -5100587, -5172977, -5242878, -5310279, -5375172, -5437547, -5497396, -5554712, -5609491, -5661728, -5711419, -5758562, -5803157, -5845203, -5884701, -5921652, -5956061, -5987931, -6017267, -6044075, -6068363, -6090138, -6109409, -6126186, -6140482, -6152306, -6161673, -6168595, -6173088, -6175167, -6174849, -6172151, -6167091, -6159687, -6149960, -6137930, -6123618, -6107046, -6088238, -6067217, -6044007, -6018632, -5991120, -5961496, -5929787, -5896020, -5860225, -5822429, -5782663, -5740956, -5697338, -5651841, -5604497, -5555336, -5504393, -5451699, -5397289, -5341196, -5283454, -5224099, -5163164, -5100686, -5036700, -4971242, -4904348, -4836055, -4766400, -4695420, -4623152, -4549633, -4474901, -4398995, -4321952, -4243810, -4164608, -4084383, -4003175, -3921021, -3837960, -3754032, -3669273, -3583724, -3497422, -3410406, -3322714, -3234384, -3145456, -3055967, -2965954, -2875457, -2784513, -2693159, -2601433, -2509371, -2417012, -2324391, -2231545, -2138511, -2045325, -1952022, -1858638, -1765208, -1671768, -1578350, -1484991, -1391723, -1298581, -1205597, -1112804, -1020235, -927921, -835894, -744186, -652826, -561846, -471276, -381143, -291479, -202310, -113666, -25572, --61942, --148851, --235129, --320751, --405691, --489926, --573430, --656181, --738156, --819332, --899688, --979202, --1057854, --1135623, --1212489, --1288435, --1363441, --1437489, --1510563, --1582644, --1653718, --1723769, --1792781, --1860740, --1927631, --1993443, --2058161, --2121774, --2184270, --2245638, --2305868, --2364949, --2422873, --2479630, --2535212, --2589613, --2642824, --2694840, --2745654, --2795261, --2843656, --2890834, --2936793, --2981528, --3025036, --3067316, --3108366, --3148185, --3186772, --3224126, --3260249, --3295140, --3328802, --3361236, --3392444, --3422430, --3451195, --3478744, --3505081, --3530210, --3554136, --3576865, --3598402, --3618754, --3637927, --3655929, --3672765, --3688446, --3702978, --3716370, --3728631, --3739770, --3749797, --3758721, --3766553, --3773304, --3778983, --3783603, --3787174, --3789709, --3791219, --3791717, --3791215, --3789725, --3787262, --3783838, --3779466, --3774160, --3767934, --3760802, --3752778, --3743877, --3734113, --3723501, --3712056, --3699792, --3686725, --3672871, --3658244, --3642860, --3626735, --3609884, --3592324, --3574069, --3555137, --3535542, --3515302, --3494432, --3472947, --3450866, --3428203, --3404974, --3381197, --3356887, --3332060, --3306732, --3280921, --3254641, --3227909, --3200741, --3173153, --3145162, --3116781, --3088029, --3058920, --3029469, --2999694, --2969608, --2939228, --2908568, --2877644, --2846472, --2815064, --2783438, --2751607, --2719586, --2687389, --2655030, --2622524, --2589885, --2557126, --2524261, --2491304, --2458268, --2425166, --2392011, --2358815, --2325592, --2292354, --2259114, --2225882, --2192671, --2159493, --2126359, --2093281, --2060269, --2027334, --1994487, --1961738, --1929098, --1896576, --1864183, --1831927, --1799818, --1767865, --1736078, --1704464, --1673032, --1641791, --1610748, --1579911, --1549288, --1518886, --1488713, --1458774, --1429078, --1399630, --1370436, --1341503, --1312836, --1284441, --1256324, --1228489, --1200941, --1173686, --1146727, --1120069, --1093717, --1067673, --1041942, --1016527, --991432, --966659, --942211, --918092, --894303, --870846, --847724, --824940, --802493, --780386, --758621, --737199, --716120, --695385, --674996, --654951, --635253, --615901, --596895, --578234, --559919, --541949, --524324, --507042, --490103, --473505, --457248, --441330, --425750, --410506, --395596, --381019, --366773, --352855, --339263, --325995, --313049, --300421, --288110, --276112, --264425, --253046, --241972, --231199, --220725, --210546, --200659, --191060, --181746, --172713, --163958, --155477, --147266, --139321, --131639, --124215, --117045, --110126, --103453, --97022, --90829, --84870, --79140, --73636, --68352, --63285, --58431, --53784, --49341, --45097, --41048, --37188, --33515, --30023, --26708, --23566, --20592, --17782, --15130, --12634, --10289, --8089, --6031, --4111, --2324, --666, -868, -2281, -3578, -4763, -5839, -6812, -7685, -8462, -9146, -9743, -10255, -10686, -11041, -11322, -11533, -11679, -11762, -11785, -11753, -11668, -11533, -11352, -11128, -10864, -10563, -10227, -9860, -9464, -9042, -8596, -8129, -7644, -7142, -6626, -6099, -5562, -5017, -4467, -3913, -3357, -2800, -2245, -1694, -1146, -605, -71, --454, --970, --1474, --1966, --2446, --2911, --3362, --3797, --4215, --4617, --5001, --5367, --5714, --6043, --6352, --6641, --6910, --7160, --7389, --7598, --7786, --7954, --8102, --8230, --8338, --8426, --8495, --8545, --8575, --8587, --8582, --8558, --8517, --8460, --8386, --8297, --8192, --8073, --7940, --7794, --7635, --7464, --7281, --7088, --6885, --6672, --6450, --6221, --5984, --5740, --5490, --5235, --4975, --4711, --4443, --4173, --3901, --3627, --3352, --3077, --2803, --2529, --2257, --1986, --1719, --1454, --1193, --935, --683, --435, --192, -45, -276, -501, -719, -930, -1134, -1331, -1519, -1700, -1873, -2038, -2194, -2342, -2481, -2611, -2733, -2846, -2950, -3046, -3133, -3211, -3281, -3343, -3396, -3441, -3477, -3506, -3527, -3541, -3547, -3546, -3538, -3523, -3502, -3474, -3441, -3401, -3357, -3307, -3252, -3192, -3128, -3060, -2989, -2913, -2835, -2753, -2669, -2583, -2494, -2403, -2311, -2218, -2124, -2028, -1933, -1837, -1741, -1645, -1550, -1455, -1361, -1268, -1176, -1086, -997, -910, -825, -741, -660, -581, -504, -429, -357, -287, -220, -156, -94, -35, --22, --75, --126, --175, --220, --263, --303, --341, --375, --408, --437, --464, --489, --511, --531, --548, --564, --577, --588, --597, --604, --610, --613, --615, --616, --614, --612, --608, --603, --597, --590, --582, --573, --563, --553, --542, --530, --518, --506, --493, --480, --466, --453, --439, --425, --411, --398, --384, --370, --357, --344, --331, --318, --305, --293, --281, --269, --258, --247, --237, --227, --217, --208, --199, --190, --182, --174, --167, --160, --154, --147, --142, --136, --131, --126, --121, --117, --113, --109, --106, --102, --99, --96, --93, --90, --87, --85, --82, --80, --78, --76, --74, --72, --70, --68, --66, --64, --62, --60, --58, --57, --55, --53, --51, --50, --48, --46, --45, --43, --41, --40, --38, --36, --35, --33, --31, --30, --28, --27, --25, --24, --22, --21, --20, --18, --17, --16, --15, --13, --12, --11, --10, --9, --9, --8, --7, --6, -}; -} diff --git a/services/audioflinger/audio-resampler/filter_coefficients.h b/services/audioflinger/audio-resampler/filter_coefficients.h new file mode 100644 index 0000000..bd98136 --- /dev/null +++ b/services/audioflinger/audio-resampler/filter_coefficients.h @@ -0,0 +1,61 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include + +namespace android { + +// cmd-line: fir -l 7 -s 48000 -c 23400 -n 16 -b 9.62 +const int32_t up_sampler_filter_coefficients[] = { + 0x7ccccccd, 0x7cc9b757, 0x7cc0773c, 0x7cb10d52, 0x7c9b7afd, 0x7c7fc22f, 0x7c5de56a, 0x7c35e7bb, 0x7c07ccbe, 0x7bd3989d, 0x7b99500c, 0x7b58f84d, 0x7b12972d, 0x7ac63304, 0x7a73d2b5, 0x7a1b7daa, 0x79bd3bd8, 0x795915bc, 0x78ef1457, 0x787f4134, 0x7809a65e, 0x778e4e68, 0x770d4466, 0x768693ec, 0x75fa4911, 0x75687068, 0x74d11703, 0x74344a70, 0x739218b8, 0x72ea905a, 0x723dc051, 0x718bb80b, 0x70d4876b, 0x70183ec5, 0x6f56eee1, 0x6e90a8f2, 0x6dc57e9b, 0x6cf581e8, 0x6c20c550, 0x6b475bb0, 0x6a69584a, 0x6986cec4, 0x689fd324, 0x67b479cf, 0x66c4d787, 0x65d10168, 0x64d90ce7, 0x63dd0fcd, 0x62dd2039, 0x61d95497, 0x60d1c3a6, 0x5fc68470, 0x5eb7ae46, 0x5da558c5, 0x5c8f9bcb, 0x5b768f7a, 0x5a5a4c32, 0x593aea93, 0x58188376, 0x56f32fea, 0x55cb0935, 0x54a028d0, 0x5372a862, 0x5242a1c1, 0x51102eec, 0x4fdb6a09, 0x4ea46d66, 0x4d6b536f, 0x4c3036b2, 0x4af331d9, 0x49b45fa8, 0x4873daf7, 0x4731beb7, 0x45ee25e7, 0x44a92b96, 0x4362eadc, 0x421b7edf, 0x40d302c5, 0x3f8991bd, 0x3e3f46f2, 0x3cf43d8f, 0x3ba890b9, 0x3a5c5b8e, 0x390fb920, 0x37c2c474, 0x36759880, 0x35285026, 0x33db0631, 0x328dd556, 0x3140d82e, 0x2ff42933, 0x2ea7e2c0, 0x2d5c1f0e, 0x2c10f82d, 0x2ac68807, 0x297ce85a, 0x283432b9, 0x26ec8083, 0x25a5eae8, 0x24608ae2, 0x231c7932, 0x21d9ce63, 0x2098a2bf, 0x1f590e55, 0x1e1b28f2, 0x1cdf0a20, 0x1ba4c923, 0x1a6c7cf9, 0x19363c54, 0x18021d9d, 0x16d036eb, 0x15a09e09, 0x1473686d, 0x1348ab3a, 0x12207b3e, 0x10faecee, 0x0fd81464, 0x0eb80562, 0x0d9ad348, 0x0c80911b, 0x0b69517e, 0x0a5526b0, 0x0944228e, 0x08365690, 0x072bd3c5, 0x0624aad6, 0x0520ec00, 0x0420a716, + 0x0323eb7f, 0x022ac835, 0x01354bc1, 0x0043843f, 0xff557f58, 0xfe6b4a44, 0xfd84f1c8, 0xfca28234, 0xfbc40766, 0xfae98cc5, 0xfa131d41, 0xf940c355, 0xf8728902, 0xf7a877d4, 0xf6e298db, 0xf620f4b2, 0xf5639376, 0xf4aa7cce, 0xf3f5b7e4, 0xf3454b6a, 0xf2993d95, 0xf1f19421, 0xf14e544f, 0xf0af82e4, 0xf015242b, 0xef7f3bf5, 0xeeedcd98, 0xee60dbee, 0xedd86958, 0xed5477be, 0xecd5088e, 0xec5a1cbc, 0xebe3b4c5, 0xeb71d0ab, 0xeb046ffc, 0xea9b91cc, 0xea3734bb, 0xe9d756f3, 0xe97bf627, 0xe9250f99, 0xe8d2a017, 0xe884a3fb, 0xe83b1731, 0xe7f5f531, 0xe7b53908, 0xe778dd50, 0xe740dc3c, 0xe70d2f8d, 0xe6ddd09f, 0xe6b2b862, 0xe68bdf5e, 0xe6693db5, 0xe64acb24, 0xe6307f05, 0xe61a504f, 0xe6083599, 0xe5fa2519, 0xe5f014aa, 0xe5e9f9ca, 0xe5e7c99e, 0xe5e978f0, 0xe5eefc35, 0xe5f8478d, 0xe6054ec6, 0xe616055a, 0xe62a5e76, 0xe6424cf8, 0xe65dc373, 0xe67cb42f, 0xe69f112f, 0xe6c4cc2e, 0xe6edd6a4, 0xe71a21c7, 0xe7499e8f, 0xe77c3db4, 0xe7b1efb4, 0xe7eaa4d4, 0xe8264d21, 0xe864d874, 0xe8a63671, 0xe8ea568f, 0xe9312813, 0xe97a9a17, 0xe9c69b8c, 0xea151b3a, 0xea6607c4, 0xeab94fa9, 0xeb0ee148, 0xeb66aae0, 0xebc09a94, 0xec1c9e6d, 0xec7aa45b, 0xecda9a39, 0xed3c6dce, 0xeda00cd1, 0xee0564e8, 0xee6c63ad, 0xeed4f6b0, 0xef3f0b78, 0xefaa8f87, 0xf017705a, 0xf0859b6e, 0xf0f4fe3d, 0xf1658649, 0xf1d72114, 0xf249bc2c, 0xf2bd4523, 0xf331a99b, 0xf3a6d741, 0xf41cbbd3, 0xf493451f, 0xf50a610a, 0xf581fd8b, 0xf5fa08b5, 0xf67270b1, 0xf6eb23c6, 0xf7641059, 0xf7dd24ef, 0xf856502d, 0xf8cf80de, 0xf948a5f0, 0xf9c1ae7b, 0xfa3a89be, 0xfab32723, 0xfb2b7641, 0xfba366df, 0xfc1ae8f2, 0xfc91eca1, + 0xfd086246, 0xfd7e3a71, 0xfdf365e8, 0xfe67d5a8, 0xfedb7ae9, 0xff4e471d, 0xffc02bf2, 0x00311b54, 0x00a1076e, 0x010fe2ab, 0x017d9fb8, 0x01ea3184, 0x02558b43, 0x02bfa06d, 0x032864c1, 0x038fcc44, 0x03f5cb46, 0x045a565c, 0x04bd6269, 0x051ee498, 0x057ed264, 0x05dd218f, 0x0639c82d, 0x0694bca0, 0x06edf595, 0x07456a0e, 0x079b1158, 0x07eee314, 0x0840d732, 0x0890e5f7, 0x08df07f6, 0x092b3617, 0x09756994, 0x09bd9bfb, 0x0a03c72b, 0x0a47e559, 0x0a89f10c, 0x0ac9e521, 0x0b07bcc6, 0x0b437380, 0x0b7d0525, 0x0bb46de2, 0x0be9aa34, 0x0c1cb6ef, 0x0c4d913a, 0x0c7c368d, 0x0ca8a4b7, 0x0cd2d9d5, 0x0cfad45a, 0x0d209309, 0x0d4414f9, 0x0d65598f, 0x0d846084, 0x0da129df, 0x0dbbb5f6, 0x0dd40571, 0x0dea1943, 0x0dfdf2ae, 0x0e0f9342, 0x0e1efcdb, 0x0e2c319d, 0x0e3733fc, 0x0e4006b2, 0x0e46acc4, 0x0e4b297c, 0x0e4d806f, 0x0e4db575, 0x0e4bccac, 0x0e47ca78, 0x0e41b37c, 0x0e398c9f, 0x0e2f5b0b, 0x0e232425, 0x0e14ed93, 0x0e04bd39, 0x0df29936, 0x0dde87e2, 0x0dc88fd2, 0x0db0b7d1, 0x0d9706e1, 0x0d7b843b, 0x0d5e3749, 0x0d3f27ab, 0x0d1e5d32, 0x0cfbdfdd, 0x0cd7b7dd, 0x0cb1ed8c, 0x0c8a8973, 0x0c619444, 0x0c3716da, 0x0c0b1a37, 0x0bdda783, 0x0baec80a, 0x0b7e853c, 0x0b4ce8a8, 0x0b19fbfe, 0x0ae5c90b, 0x0ab059bc, 0x0a79b814, 0x0a41ee32, 0x0a09064e, 0x09cf0ab4, 0x099405c6, 0x095801f8, 0x091b09d1, 0x08dd27e6, 0x089e66dd, 0x085ed167, 0x081e7241, 0x07dd5430, 0x079b8203, 0x0759068f, 0x0715ecae, 0x06d23f3d, 0x068e091c, 0x0649552a, 0x06042e45, 0x05be9f49, 0x0578b30e, 0x05327467, 0x04ebee1c, 0x04a52af2, 0x045e359f, 0x041718d2, 0x03cfdf29, 0x03889336, 0x03413f7b, 0x02f9ee68, + 0x02b2aa5c, 0x026b7da1, 0x0224726d, 0x01dd92df, 0x0196e8fe, 0x01507eb8, 0x010a5de2, 0x00c49034, 0x007f1f4b, 0x003a14a6, 0xfff579a3, 0xffb15783, 0xff6db764, 0xff2aa243, 0xfee820f8, 0xfea63c38, 0xfe64fc93, 0xfe246a72, 0xfde48e17, 0xfda56f9c, 0xfd6716f2, 0xfd298be0, 0xfcecd602, 0xfcb0fcca, 0xfc76077b, 0xfc3bfd2e, 0xfc02e4cc, 0xfbcac510, 0xfb93a486, 0xfb5d898c, 0xfb287a4d, 0xfaf47cc4, 0xfac196bb, 0xfa8fcdca, 0xfa5f2755, 0xfa2fa890, 0xfa015679, 0xf9d435dc, 0xf9a84b50, 0xf97d9b37, 0xf95429c0, 0xf92bfae4, 0xf9051266, 0xf8df73d6, 0xf8bb228c, 0xf89821ac, 0xf8767422, 0xf8561ca7, 0xf8371dbb, 0xf81979ab, 0xf7fd328c, 0xf7e24a3c, 0xf7c8c267, 0xf7b09c7f, 0xf799d9c4, 0xf7847b3d, 0xf77081be, 0xf75dede5, 0xf74cc01c, 0xf73cf898, 0xf72e9758, 0xf7219c2a, 0xf71606a6, 0xf70bd632, 0xf7030a01, 0xf6fba113, 0xf6f59a36, 0xf6f0f407, 0xf6edacf2, 0xf6ebc332, 0xf6eb34d4, 0xf6ebffb2, 0xf6ee217b, 0xf6f197ad, 0xf6f65f9b, 0xf6fc766a, 0xf703d912, 0xf70c8461, 0xf71674fa, 0xf721a756, 0xf72e17c4, 0xf73bc26b, 0xf74aa34c, 0xf75ab63f, 0xf76bf6f7, 0xf77e6103, 0xf791efcb, 0xf7a69e96, 0xf7bc6889, 0xf7d348a4, 0xf7eb39cc, 0xf80436c0, 0xf81e3a25, 0xf8393e81, 0xf8553e3c, 0xf87233a4, 0xf89018eb, 0xf8aee828, 0xf8ce9b5d, 0xf8ef2c71, 0xf9109535, 0xf932cf65, 0xf955d4a7, 0xf9799e8f, 0xf99e269e, 0xf9c36642, 0xf9e956da, 0xfa0ff1b6, 0xfa373017, 0xfa5f0b30, 0xfa877c29, 0xfab07c1d, 0xfada0420, 0xfb040d3b, 0xfb2e906f, 0xfb5986b6, 0xfb84e906, 0xfbb0b04e, 0xfbdcd57a, 0xfc095174, 0xfc361d25, 0xfc633173, 0xfc908746, 0xfcbe1789, 0xfcebdb26, 0xfd19cb0e, 0xfd47e035, 0xfd761395, + 0xfda45e2c, 0xfdd2b905, 0xfe011d2e, 0xfe2f83c1, 0xfe5de5e3, 0xfe8c3cc3, 0xfeba819d, 0xfee8adba, 0xff16ba71, 0xff44a128, 0xff725b54, 0xff9fe27d, 0xffcd303b, 0xfffa3e37, 0x00270631, 0x005381fa, 0x007fab77, 0x00ab7ca6, 0x00d6ef99, 0x0101fe7a, 0x012ca389, 0x0156d920, 0x018099b2, 0x01a9dfcc, 0x01d2a615, 0x01fae74e, 0x02229e57, 0x0249c629, 0x027059da, 0x029654a0, 0x02bbb1cc, 0x02e06ccf, 0x03048139, 0x0327eab8, 0x034aa51b, 0x036cac52, 0x038dfc6c, 0x03ae919a, 0x03ce682d, 0x03ed7c9a, 0x040bcb77, 0x0429517b, 0x04460b81, 0x0461f688, 0x047d0fb1, 0x0497543f, 0x04b0c19a, 0x04c9554e, 0x04e10d0a, 0x04f7e6a2, 0x050de00d, 0x0522f766, 0x05372aee, 0x054a7909, 0x055ce03f, 0x056e5f3d, 0x057ef4d3, 0x058e9ff8, 0x059d5fc5, 0x05ab3377, 0x05b81a70, 0x05c41435, 0x05cf2070, 0x05d93eee, 0x05e26f9f, 0x05eab296, 0x05f20809, 0x05f87053, 0x05fdebee, 0x06027b78, 0x06061fb2, 0x0608d97c, 0x060aa9da, 0x060b91ee, 0x060b92ff, 0x060aae6e, 0x0608e5c2, 0x06063a9d, 0x0602aec3, 0x05fe4414, 0x05f8fc8f, 0x05f2da52, 0x05ebdf97, 0x05e40eb3, 0x05db6a19, 0x05d1f459, 0x05c7b01a, 0x05bca021, 0x05b0c74b, 0x05a42890, 0x0596c6ff, 0x0588a5bf, 0x0579c812, 0x056a314b, 0x0559e4da, 0x0548e63f, 0x05373912, 0x0524e100, 0x0511e1c6, 0x04fe3f39, 0x04e9fd3c, 0x04d51fc6, 0x04bfaadf, 0x04a9a29e, 0x04930b2b, 0x047be8bc, 0x04643f95, 0x044c1409, 0x04336a75, 0x041a4744, 0x0400aeec, 0x03e6a5ee, 0x03cc30d4, 0x03b15431, 0x039614a1, 0x037a76c7, 0x035e7f4e, 0x034232e6, 0x03259644, 0x0308ae24, 0x02eb7f44, 0x02ce0e67, 0x02b0604f, 0x029279c4, 0x02745f8c, 0x02561670, 0x0237a337, 0x02190aa6, + 0x01fa5183, 0x01db7c90, 0x01bc908b, 0x019d9230, 0x017e8635, 0x015f714d, 0x01405821, 0x01213f58, 0x01022b90, 0x00e3215e, 0x00c42551, 0x00a53bed, 0x008669ae, 0x0067b303, 0x00491c54, 0x002aa9fa, 0x000c6043, 0xffee4372, 0xffd057bb, 0xffb2a145, 0xff952429, 0xff77e470, 0xff5ae614, 0xff3e2d01, 0xff21bd11, 0xff059a0e, 0xfee9c7af, 0xfece499d, 0xfeb3236b, 0xfe98589b, 0xfe7dec9c, 0xfe63e2cc, 0xfe4a3e70, 0xfe3102bd, 0xfe1832d4, 0xfdffd1bd, 0xfde7e26f, 0xfdd067ca, 0xfdb96498, 0xfda2db8c, 0xfd8ccf46, 0xfd77424c, 0xfd62370e, 0xfd4dafe6, 0xfd39af17, 0xfd2636ca, 0xfd134913, 0xfd00e7ec, 0xfcef153a, 0xfcddd2c7, 0xfccd2246, 0xfcbd0551, 0xfcad7d6b, 0xfc9e8bfd, 0xfc903258, 0xfc8271b4, 0xfc754b32, 0xfc68bfd7, 0xfc5cd092, 0xfc517e38, 0xfc46c987, 0xfc3cb323, 0xfc333b97, 0xfc2a6356, 0xfc222abb, 0xfc1a9208, 0xfc139968, 0xfc0d40ec, 0xfc07888e, 0xfc027031, 0xfbfdf79e, 0xfbfa1e88, 0xfbf6e48c, 0xfbf4492d, 0xfbf24bd9, 0xfbf0ebe7, 0xfbf02896, 0xfbf00112, 0xfbf0746e, 0xfbf181a9, 0xfbf327ab, 0xfbf56549, 0xfbf83941, 0xfbfba23f, 0xfbff9ed7, 0xfc042d8e, 0xfc094cd2, 0xfc0efafe, 0xfc15365c, 0xfc1bfd22, 0xfc234d75, 0xfc2b2567, 0xfc3382fb, 0xfc3c6420, 0xfc45c6b6, 0xfc4fa88f, 0xfc5a076a, 0xfc64e0f9, 0xfc7032de, 0xfc7bfaad, 0xfc8835ed, 0xfc94e216, 0xfca1fc96, 0xfcaf82ca, 0xfcbd7206, 0xfccbc793, 0xfcda80ad, 0xfce99a86, 0xfcf91246, 0xfd08e50c, 0xfd190fed, 0xfd298ff6, 0xfd3a622b, 0xfd4b8389, 0xfd5cf105, 0xfd6ea790, 0xfd80a411, 0xfd92e36c, 0xfda5627e, 0xfdb81e22, 0xfdcb132d, 0xfdde3e6f, 0xfdf19cb9, 0xfe052ad4, 0xfe18e58c, 0xfe2cc9a7, 0xfe40d3ed, 0xfe550124, + 0xfe694e12, 0xfe7db77c, 0xfe923a2b, 0xfea6d2e5, 0xfebb7e75, 0xfed039a8, 0xfee5014c, 0xfef9d232, 0xff0ea931, 0xff238322, 0xff385ce3, 0xff4d3358, 0xff620368, 0xff76ca02, 0xff8b841a, 0xffa02eac, 0xffb4c6b9, 0xffc9494b, 0xffddb374, 0xfff2024e, 0x000632fa, 0x001a42a4, 0x002e2e82, 0x0041f3d2, 0x00558fdc, 0x0068fff3, 0x007c4177, 0x008f51cf, 0x00a22e71, 0x00b4d4dd, 0x00c7429f, 0x00d97550, 0x00eb6a95, 0x00fd2022, 0x010e93b5, 0x011fc31c, 0x0130ac31, 0x01414cdd, 0x0151a317, 0x0161ace5, 0x01716859, 0x0180d397, 0x018fecd1, 0x019eb246, 0x01ad2249, 0x01bb3b37, 0x01c8fb81, 0x01d661a6, 0x01e36c34, 0x01f019cb, 0x01fc691b, 0x020858e2, 0x0213e7f0, 0x021f1526, 0x0229df75, 0x023445dd, 0x023e4772, 0x0247e354, 0x025118b8, 0x0259e6e1, 0x02624d23, 0x026a4ae5, 0x0271df9c, 0x02790ace, 0x027fcc12, 0x02862311, 0x028c0f83, 0x0291912f, 0x0296a7f0, 0x029b53af, 0x029f9466, 0x02a36a1e, 0x02a6d4f0, 0x02a9d508, 0x02ac6a9e, 0x02ae95fb, 0x02b05779, 0x02b1af7f, 0x02b29e84, 0x02b3250f, 0x02b343b5, 0x02b2fb1a, 0x02b24bf1, 0x02b136f9, 0x02afbd02, 0x02addee8, 0x02ab9d96, 0x02a8fa03, 0x02a5f535, 0x02a2903e, 0x029ecc3c, 0x029aaa5a, 0x02962bd1, 0x029151e3, 0x028c1de0, 0x02869122, 0x0280ad0f, 0x027a7318, 0x0273e4b8, 0x026d0374, 0x0265d0dd, 0x025e4e8b, 0x02567e22, 0x024e614c, 0x0245f9bf, 0x023d4937, 0x0234517a, 0x022b1455, 0x0221939d, 0x0217d12d, 0x020dcee8, 0x02038eb7, 0x01f9128a, 0x01ee5c55, 0x01e36e14, 0x01d849c7, 0x01ccf173, 0x01c16720, 0x01b5acdd, 0x01a9c4bc, 0x019db0d0, 0x01917334, 0x01850e00, 0x01788354, 0x016bd54f, 0x015f0612, 0x015217c0, 0x01450c7f, + 0x0137e672, 0x012aa7bf, 0x011d528d, 0x010fe901, 0x01026d40, 0x00f4e16f, 0x00e747b0, 0x00d9a226, 0x00cbf2f0, 0x00be3c2d, 0x00b07ff8, 0x00a2c06b, 0x0094ff9b, 0x00873f9b, 0x0079827a, 0x006bca44, 0x005e1900, 0x005070b0, 0x0042d353, 0x003542e2, 0x0027c151, 0x001a508e, 0x000cf281, 0xffffa90e, 0xfff27611, 0xffe55b60, 0xffd85ac9, 0xffcb7615, 0xffbeaf06, 0xffb20754, 0xffa580b1, 0xff991cc9, 0xff8cdd3c, 0xff80c3a4, 0xff74d194, 0xff690894, 0xff5d6a24, 0xff51f7bb, 0xff46b2c7, 0xff3b9cad, 0xff30b6c8, 0xff260269, 0xff1b80da, 0xff113358, 0xff071b16, 0xfefd3941, 0xfef38ef6, 0xfeea1d4c, 0xfee0e54e, 0xfed7e7fd, 0xfecf2650, 0xfec6a130, 0xfebe5980, 0xfeb65015, 0xfeae85bb, 0xfea6fb32, 0xfe9fb12e, 0xfe98a85b, 0xfe91e159, 0xfe8b5cba, 0xfe851b09, 0xfe7f1cc4, 0xfe79625e, 0xfe73ec40, 0xfe6ebac6, 0xfe69ce43, 0xfe6526fe, 0xfe60c533, 0xfe5ca913, 0xfe58d2c5, 0xfe554265, 0xfe51f802, 0xfe4ef3a4, 0xfe4c3546, 0xfe49bcd9, 0xfe478a42, 0xfe459d5e, 0xfe43f5ff, 0xfe4293ec, 0xfe4176e2, 0xfe409e95, 0xfe400aae, 0xfe3fbacd, 0xfe3fae87, 0xfe3fe569, 0xfe405ef6, 0xfe411aa8, 0xfe4217ef, 0xfe435633, 0xfe44d4d3, 0xfe469325, 0xfe489077, 0xfe4acc0e, 0xfe4d4526, 0xfe4ffaf6, 0xfe52ecab, 0xfe561969, 0xfe598050, 0xfe5d2075, 0xfe60f8ea, 0xfe6508b6, 0xfe694edd, 0xfe6dca58, 0xfe727a1f, 0xfe775d1f, 0xfe7c7243, 0xfe81b86d, 0xfe872e7c, 0xfe8cd349, 0xfe92a5a7, 0xfe98a466, 0xfe9ece4f, 0xfea52227, 0xfeab9eb2, 0xfeb242ac, 0xfeb90cce, 0xfebffbd0, 0xfec70e64, 0xfece433a, 0xfed598fe, 0xfedd0e5c, 0xfee4a1fa, 0xfeec527e, 0xfef41e8c, 0xfefc04c6, 0xff0403cc, 0xff0c1a3c, 0xff1446b5, + 0xff1c87d3, 0xff24dc32, 0xff2d426f, 0xff35b924, 0xff3e3eed, 0xff46d266, 0xff4f722b, 0xff581cd8, 0xff60d10b, 0xff698d62, 0xff72507e, 0xff7b18fe, 0xff83e586, 0xff8cb4bb, 0xff958542, 0xff9e55c6, 0xffa724f0, 0xffaff16f, 0xffb8b9f3, 0xffc17d30, 0xffca39dd, 0xffd2eeb3, 0xffdb9a70, 0xffe43bd5, 0xffecd1a6, 0xfff55aae, 0xfffdd5b8, 0x00064197, 0x000e9d1f, 0x0016e72c, 0x001f1e9b, 0x00274253, 0x002f513a, 0x00374a40, 0x003f2c57, 0x0046f679, 0x004ea7a3, 0x00563edb, 0x005dbb29, 0x00651b9c, 0x006c5f4b, 0x00738551, 0x007a8cd0, 0x008174ef, 0x00883cdc, 0x008ee3cd, 0x009568fc, 0x009bcbab, 0x00a20b23, 0x00a826b2, 0x00ae1dae, 0x00b3ef73, 0x00b99b65, 0x00bf20ee, 0x00c47f7f, 0x00c9b691, 0x00cec5a1, 0x00d3ac38, 0x00d869e1, 0x00dcfe32, 0x00e168c5, 0x00e5a93c, 0x00e9bf43, 0x00edaa88, 0x00f16ac4, 0x00f4ffb6, 0x00f86924, 0x00fba6da, 0x00feb8ad, 0x01019e78, 0x0104581c, 0x0106e583, 0x0109469d, 0x010b7b61, 0x010d83cb, 0x010f5fe2, 0x01110faf, 0x01129344, 0x0113eabb, 0x01151632, 0x011615ce, 0x0116e9bc, 0x0117922f, 0x01180f5d, 0x01186187, 0x011888f2, 0x011885e7, 0x011858b9, 0x011801be, 0x01178152, 0x0116d7d7, 0x011605b5, 0x01150b5a, 0x0113e937, 0x01129fc5, 0x01112f81, 0x010f98eb, 0x010ddc8c, 0x010bfaee, 0x0109f4a2, 0x0107ca3c, 0x01057c57, 0x01030b8e, 0x01007885, 0x00fdc3e0, 0x00faee49, 0x00f7f86e, 0x00f4e2ff, 0x00f1aeb2, 0x00ee5c3e, 0x00eaec5e, 0x00e75fd1, 0x00e3b758, 0x00dff3b7, 0x00dc15b4, 0x00d81e1a, 0x00d40db3, 0x00cfe54f, 0x00cba5bc, 0x00c74fce, 0x00c2e457, 0x00be642f, 0x00b9d02b, 0x00b52925, 0x00b06ff7, 0x00aba57c, 0x00a6ca90, 0x00a1e00f, + 0x009ce6d8, 0x0097dfc9, 0x0092cbc0, 0x008dab9d, 0x0088803e, 0x00834a83, 0x007e0b4b, 0x0078c375, 0x007373de, 0x006e1d66, 0x0068c0e9, 0x00635f45, 0x005df954, 0x00588ff1, 0x005323f7, 0x004db63c, 0x00484799, 0x0042d8e1, 0x003d6aea, 0x0037fe85, 0x00329483, 0x002d2db0, 0x0027cada, 0x00226ccb, 0x001d144a, 0x0017c21c, 0x00127704, 0x000d33c3, 0x0007f915, 0x0002c7b6, 0xfffda05c, 0xfff883be, 0xfff3728d, 0xffee6d78, 0xffe97529, 0xffe48a4a, 0xffdfad7f, 0xffdadf69, 0xffd620a6, 0xffd171d1, 0xffccd380, 0xffc84645, 0xffc3cab1, 0xffbf614e, 0xffbb0aa3, 0xffb6c735, 0xffb29782, 0xffae7c06, 0xffaa7538, 0xffa6838c, 0xffa2a770, 0xff9ee150, 0xff9b3192, 0xff979898, 0xff9416c1, 0xff90ac66, 0xff8d59dd, 0xff8a1f77, 0xff86fd81, 0xff83f443, 0xff810401, 0xff7e2cfb, 0xff7b6f6c, 0xff78cb8c, 0xff76418b, 0xff73d199, 0xff717bdf, 0xff6f4083, 0xff6d1fa5, 0xff6b1961, 0xff692dd2, 0xff675d09, 0xff65a718, 0xff640c08, 0xff628be3, 0xff6126a9, 0xff5fdc5b, 0xff5eacf3, 0xff5d9867, 0xff5c9eaa, 0xff5bbfaa, 0xff5afb53, 0xff5a5189, 0xff59c230, 0xff594d27, 0xff58f249, 0xff58b16c, 0xff588a65, 0xff587d03, 0xff588913, 0xff58ae5d, 0xff58eca8, 0xff5943b4, 0xff59b340, 0xff5a3b09, 0xff5adac6, 0xff5b922d, 0xff5c60ee, 0xff5d46bb, 0xff5e433e, 0xff5f5621, 0xff607f0b, 0xff61bd9f, 0xff631180, 0xff647a4b, 0xff65f79e, 0xff678912, 0xff692e3f, 0xff6ae6ba, 0xff6cb218, 0xff6e8fe9, 0xff707fbd, 0xff728121, 0xff7493a2, 0xff76b6ca, 0xff78ea20, 0xff7b2d2d, 0xff7d7f76, 0xff7fe07f, 0xff824fca, 0xff84ccdb, 0xff875731, 0xff89ee4d, 0xff8c91ad, 0xff8f40d0, 0xff91fb31, 0xff94c04f, 0xff978fa6, + 0xff9a68b0, 0xff9d4ae9, 0xffa035cc, 0xffa328d4, 0xffa6237a, 0xffa9253b, 0xffac2d8f, 0xffaf3bf2, 0xffb24fde, 0xffb568ce, 0xffb8863e, 0xffbba7aa, 0xffbecc8d, 0xffc1f465, 0xffc51eaf, 0xffc84ae9, 0xffcb7893, 0xffcea72c, 0xffd1d635, 0xffd50530, 0xffd833a0, 0xffdb6109, 0xffde8cf1, 0xffe1b6dd, 0xffe4de56, 0xffe802e6, 0xffeb2416, 0xffee4174, 0xfff15a8d, 0xfff46ef1, 0xfff77e31, 0xfffa87df, 0xfffd8b92, 0x000088df, 0x00037f60, 0x00066eae, 0x00095666, 0x000c3627, 0x000f0d91, 0x0011dc47, 0x0014a1ee, 0x00175e2d, 0x001a10ad, 0x001cb91a, 0x001f5723, 0x0021ea76, 0x002472c8, 0x0026efcc, 0x0029613a, 0x002bc6cd, 0x002e2040, 0x00306d52, 0x0032adc4, 0x0034e15b, 0x003707dc, 0x00392111, 0x003b2cc5, 0x003d2ac6, 0x003f1ae4, 0x0040fcf3, 0x0042d0c9, 0x0044963d, 0x00464d2b, 0x0047f571, 0x00498eed, 0x004b1984, 0x004c951b, 0x004e0199, 0x004f5ee9, 0x0050acf7, 0x0051ebb4, 0x00531b12, 0x00543b04, 0x00554b83, 0x00564c88, 0x00573e0f, 0x00582016, 0x0058f29f, 0x0059b5ad, 0x005a6946, 0x005b0d72, 0x005ba23b, 0x005c27af, 0x005c9ddc, 0x005d04d4, 0x005d5cab, 0x005da575, 0x005ddf4c, 0x005e0a48, 0x005e2687, 0x005e3427, 0x005e3347, 0x005e240a, 0x005e0694, 0x005ddb0b, 0x005da198, 0x005d5a62, 0x005d0597, 0x005ca363, 0x005c33f6, 0x005bb77f, 0x005b2e31, 0x005a9840, 0x0059f5e1, 0x0059474a, 0x00588cb4, 0x0057c658, 0x0056f471, 0x0056173b, 0x00552ef3, 0x00543bd8, 0x00533e29, 0x00523626, 0x00512412, 0x0050082f, 0x004ee2c1, 0x004db40c, 0x004c7c55, 0x004b3be3, 0x0049f2fc, 0x0048a1e7, 0x004748ed, 0x0045e856, 0x0044806c, 0x00431177, 0x00419bc2, 0x00401f98, 0x003e9d42, + 0x003d150d, 0x003b8742, 0x0039f42e, 0x00385c1d, 0x0036bf58, 0x00351e2d, 0x003378e7, 0x0031cfd1, 0x00302337, 0x002e7363, 0x002cc0a2, 0x002b0b3d, 0x00295380, 0x002799b3, 0x0025de22, 0x00242115, 0x002262d6, 0x0020a3ad, 0x001ee3e1, 0x001d23b9, 0x001b637e, 0x0019a373, 0x0017e3df, 0x00162507, 0x0014672d, 0x0012aa95, 0x0010ef82, 0x000f3633, 0x000d7eea, 0x000bc9e6, 0x000a1765, 0x000867a5, 0x0006bae1, 0x00051157, 0x00036b3f, 0x0001c8d2, 0x00002a4a, 0xfffe8fdc, 0xfffcf9be, 0xfffb6825, 0xfff9db44, 0xfff8534d, 0xfff6d070, 0xfff552de, 0xfff3dac3, 0xfff2684e, 0xfff0fba9, 0xffef94fe, 0xffee3477, 0xffecda3b, 0xffeb866f, 0xffea3939, 0xffe8f2bb, 0xffe7b317, 0xffe67a6f, 0xffe548e0, 0xffe41e88, 0xffe2fb83, 0xffe1dfec, 0xffe0cbdc, 0xffdfbf6b, 0xffdebaaf, 0xffddbdbd, 0xffdcc8a9, 0xffdbdb84, 0xffdaf65e, 0xffda1948, 0xffd9444e, 0xffd8777d, 0xffd7b2e0, 0xffd6f67f, 0xffd64264, 0xffd59695, 0xffd4f316, 0xffd457ec, 0xffd3c519, 0xffd33a9e, 0xffd2b87c, 0xffd23eaf, 0xffd1cd37, 0xffd1640e, 0xffd1032f, 0xffd0aa93, 0xffd05a33, 0xffd01205, 0xffcfd1ff, 0xffcf9a15, 0xffcf6a3b, 0xffcf4262, 0xffcf227b, 0xffcf0a77, 0xffcefa44, 0xffcef1cf, 0xffcef106, 0xffcef7d4, 0xffcf0623, 0xffcf1bde, 0xffcf38ec, 0xffcf5d36, 0xffcf88a2, 0xffcfbb17, 0xffcff478, 0xffd034ac, 0xffd07b95, 0xffd0c915, 0xffd11d0f, 0xffd17764, 0xffd1d7f5, 0xffd23ea1, 0xffd2ab47, 0xffd31dc7, 0xffd395fd, 0xffd413c9, 0xffd49705, 0xffd51f90, 0xffd5ad44, 0xffd63ffe, 0xffd6d798, 0xffd773ed, 0xffd814d7, 0xffd8ba31, 0xffd963d4, 0xffda1199, 0xffdac35a, 0xffdb78ef, 0xffdc3231, 0xffdceef9, 0xffddaf1e, + 0xffde727a, 0xffdf38e5, 0xffe00236, 0xffe0ce46, 0xffe19cec, 0xffe26e01, 0xffe3415d, 0xffe416d8, 0xffe4ee4b, 0xffe5c78d, 0xffe6a277, 0xffe77ee2, 0xffe85ca7, 0xffe93b9e, 0xffea1ba2, 0xffeafc8b, 0xffebde33, 0xffecc075, 0xffeda32a, 0xffee862e, 0xffef695c, 0xfff04c8f, 0xfff12fa3, 0xfff21275, 0xfff2f4e0, 0xfff3d6c3, 0xfff4b7fb, 0xfff59866, 0xfff677e2, 0xfff75650, 0xfff8338e, 0xfff90f7c, 0xfff9e9fd, 0xfffac2f0, 0xfffb9a38, 0xfffc6fb8, 0xfffd4352, 0xfffe14eb, 0xfffee466, 0xffffb1aa, 0x00007c9c, 0x00014521, 0x00020b23, 0x0002ce87, 0x00038f37, 0x00044d1b, 0x0005081f, 0x0005c02c, 0x0006752d, 0x0007270f, 0x0007d5bf, 0x0008812a, 0x0009293e, 0x0009cdeb, 0x000a6f20, 0x000b0cce, 0x000ba6e5, 0x000c3d59, 0x000cd01b, 0x000d5f1f, 0x000dea5a, 0x000e71c1, 0x000ef549, 0x000f74e9, 0x000ff098, 0x0010684e, 0x0010dc05, 0x00114bb4, 0x0011b757, 0x00121ee9, 0x00128265, 0x0012e1c8, 0x00133d0e, 0x00139436, 0x0013e73e, 0x00143626, 0x001480ec, 0x0014c792, 0x00150a19, 0x00154883, 0x001582d3, 0x0015b90b, 0x0015eb2f, 0x00161944, 0x0016434f, 0x00166956, 0x00168b5e, 0x0016a96f, 0x0016c390, 0x0016d9c9, 0x0016ec22, 0x0016faa5, 0x0017055b, 0x00170c4f, 0x00170f8a, 0x00170f18, 0x00170b04, 0x0017035a, 0x0016f828, 0x0016e979, 0x0016d75b, 0x0016c1dc, 0x0016a90a, 0x00168cf2, 0x00166da5, 0x00164b32, 0x001625a7, 0x0015fd15, 0x0015d18b, 0x0015a31b, 0x001571d5, 0x00153dca, 0x0015070b, 0x0014cdab, 0x001491b9, 0x00145349, 0x0014126c, 0x0013cf36, 0x001389b7, 0x00134204, 0x0012f82e, 0x0012ac48, 0x00125e66, 0x00120e9b, 0x0011bcf9, 0x00116994, 0x00111480, 0x0010bdcf, + 0x00106595, 0x00100be5, 0x000fb0d2, 0x000f5471, 0x000ef6d4, 0x000e980f, 0x000e3834, 0x000dd758, 0x000d758d, 0x000d12e6, 0x000caf76, 0x000c4b50, 0x000be687, 0x000b812d, 0x000b1b55, 0x000ab510, 0x000a4e72, 0x0009e78c, 0x00098070, 0x0009192f, 0x0008b1db, 0x00084a86, 0x0007e33f, 0x00077c17, 0x00071520, 0x0006ae6a, 0x00064804, 0x0005e1fe, 0x00057c68, 0x00051750, 0x0004b2c7, 0x00044eda, 0x0003eb98, 0x0003890e, 0x0003274c, 0x0002c65d, 0x00026650, 0x00020730, 0x0001a90b, 0x00014bed, 0x0000efe1, 0x000094f3, 0x00003b2e, 0xffffe29d, 0xffff8b4b, 0xffff3540, 0xfffee088, 0xfffe8d2c, 0xfffe3b35, 0xfffdeaaa, 0xfffd9b96, 0xfffd4dff, 0xfffd01ee, 0xfffcb769, 0xfffc6e78, 0xfffc2720, 0xfffbe169, 0xfffb9d59, 0xfffb5af3, 0xfffb1a3f, 0xfffadb40, 0xfffa9dfa, 0xfffa6273, 0xfffa28ad, 0xfff9f0ac, 0xfff9ba73, 0xfff98604, 0xfff95363, 0xfff92290, 0xfff8f38e, 0xfff8c65d, 0xfff89b00, 0xfff87176, 0xfff849c0, 0xfff823dd, 0xfff7ffce, 0xfff7dd92, 0xfff7bd28, 0xfff79e8f, 0xfff781c5, 0xfff766c8, 0xfff74d97, 0xfff7362f, 0xfff7208d, 0xfff70caf, 0xfff6fa92, 0xfff6ea31, 0xfff6db89, 0xfff6ce97, 0xfff6c356, 0xfff6b9c1, 0xfff6b1d5, 0xfff6ab8c, 0xfff6a6e2, 0xfff6a3d0, 0xfff6a252, 0xfff6a262, 0xfff6a3f9, 0xfff6a713, 0xfff6aba9, 0xfff6b1b4, 0xfff6b92d, 0xfff6c20f, 0xfff6cc52, 0xfff6d7f0, 0xfff6e4e1, 0xfff6f31d, 0xfff7029f, 0xfff7135d, 0xfff72551, 0xfff73873, 0xfff74cba, 0xfff76220, 0xfff7789c, 0xfff79026, 0xfff7a8b6, 0xfff7c245, 0xfff7dcc8, 0xfff7f83a, 0xfff81490, 0xfff831c3, 0xfff84fcb, 0xfff86e9e, 0xfff88e35, 0xfff8ae88, 0xfff8cf8d, 0xfff8f13c, 0xfff9138e, + 0xfff93679, 0xfff959f5, 0xfff97dfa, 0xfff9a27f, 0xfff9c77d, 0xfff9eceb, 0xfffa12c0, 0xfffa38f5, 0xfffa5f81, 0xfffa865d, 0xfffaad81, 0xfffad4e4, 0xfffafc7f, 0xfffb244a, 0xfffb4c3e, 0xfffb7452, 0xfffb9c80, 0xfffbc4bf, 0xfffbed0a, 0xfffc1558, 0xfffc3da2, 0xfffc65e2, 0xfffc8e11, 0xfffcb628, 0xfffcde20, 0xfffd05f3, 0xfffd2d9b, 0xfffd5511, 0xfffd7c4f, 0xfffda350, 0xfffdca0d, 0xfffdf080, 0xfffe16a6, 0xfffe3c76, 0xfffe61ee, 0xfffe8707, 0xfffeabbd, 0xfffed00a, 0xfffef3ea, 0xffff1759, 0xffff3a53, 0xffff5cd2, 0xffff7ed3, 0xffffa052, 0xffffc14b, 0xffffe1bc, 0x0000019f, 0x000020f3, 0x00003fb3, 0x00005ddd, 0x00007b6f, 0x00009865, 0x0000b4bd, 0x0000d074, 0x0000eb89, 0x000105f9, 0x00011fc3, 0x000138e4, 0x0001515c, 0x00016928, 0x00018048, 0x000196ba, 0x0001ac7d, 0x0001c191, 0x0001d5f4, 0x0001e9a7, 0x0001fca8, 0x00020ef7, 0x00022095, 0x00023181, 0x000241bb, 0x00025143, 0x0002601b, 0x00026e41, 0x00027bb8, 0x0002887f, 0x00029497, 0x0002a002, 0x0002aac0, 0x0002b4d2, 0x0002be3b, 0x0002c6fa, 0x0002cf12, 0x0002d684, 0x0002dd53, 0x0002e37e, 0x0002e90a, 0x0002edf6, 0x0002f246, 0x0002f5fc, 0x0002f919, 0x0002fba0, 0x0002fd94, 0x0002fef6, 0x0002ffc9, 0x00030010, 0x0002ffcd, 0x0002ff03, 0x0002fdb4, 0x0002fbe4, 0x0002f995, 0x0002f6c9, 0x0002f385, 0x0002efca, 0x0002eb9c, 0x0002e6fe, 0x0002e1f3, 0x0002dc7d, 0x0002d6a0, 0x0002d060, 0x0002c9be, 0x0002c2be, 0x0002bb64, 0x0002b3b3, 0x0002abad, 0x0002a357, 0x00029ab2, 0x000291c3, 0x0002888c, 0x00027f11, 0x00027555, 0x00026b5b, 0x00026126, 0x000256b9, 0x00024c18, 0x00024146, 0x00023645, 0x00022b19, + 0x00021fc5, 0x0002144b, 0x000208b0, 0x0001fcf5, 0x0001f11e, 0x0001e52e, 0x0001d927, 0x0001cd0d, 0x0001c0e1, 0x0001b4a8, 0x0001a863, 0x00019c15, 0x00018fc1, 0x0001836a, 0x00017712, 0x00016abb, 0x00015e68, 0x0001521b, 0x000145d7, 0x0001399e, 0x00012d72, 0x00012155, 0x0001154a, 0x00010952, 0x0000fd6f, 0x0000f1a4, 0x0000e5f3, 0x0000da5c, 0x0000cee3, 0x0000c388, 0x0000b84d, 0x0000ad34, 0x0000a23f, 0x0000976e, 0x00008cc4, 0x00008241, 0x000077e8, 0x00006db9, 0x000063b5, 0x000059dd, 0x00005033, 0x000046b8, 0x00003d6c, 0x00003450, 0x00002b66, 0x000022ad, 0x00001a28, 0x000011d5, 0x000009b6, 0x000001cc, 0xfffffa17, 0xfffff297, 0xffffeb4c, 0xffffe438, 0xffffdd5a, 0xffffd6b2, 0xffffd041, 0xffffca06, 0xffffc402, 0xffffbe35, 0xffffb89f, 0xffffb340, 0xffffae17, 0xffffa924, 0xffffa467, 0xffff9fe0, 0xffff9b8f, 0xffff9773, 0xffff938c, 0xffff8fd9, 0xffff8c5a, 0xffff890e, 0xffff85f5, 0xffff830e, 0xffff805a, 0xffff7dd6, 0xffff7b82, 0xffff795f, 0xffff776a, 0xffff75a3, 0xffff740a, 0xffff729e, 0xffff715d, 0xffff7047, 0xffff6f5c, 0xffff6e99, 0xffff6dff, 0xffff6d8d, 0xffff6d40, 0xffff6d1a, 0xffff6d17, 0xffff6d38, 0xffff6d7c, 0xffff6de2, 0xffff6e67, 0xffff6f0d, 0xffff6fd1, 0xffff70b2, 0xffff71b0, 0xffff72c9, 0xffff73fc, 0xffff7549, 0xffff76ae, 0xffff782a, 0xffff79bc, 0xffff7b63, 0xffff7d1f, 0xffff7eed, 0xffff80cd, 0xffff82bf, 0xffff84c0, 0xffff86d0, 0xffff88ee, 0xffff8b19, 0xffff8d50, 0xffff8f92, 0xffff91de, 0xffff9434, 0xffff9691, 0xffff98f5, 0xffff9b60, 0xffff9dd0, 0xffffa045, 0xffffa2be, 0xffffa539, 0xffffa7b7, 0xffffaa35, 0xffffacb4, + 0x00000000 +}; + +// cmd-line: fir -l 7 -s 44100 -c 19876 -n 16 -b 9.62 +const int32_t dn_sampler_filter_coefficients[] = { + 0x736144b5, 0x735ed3aa, 0x735780bb, 0x734b4c77, 0x733a37d2, 0x7324441e, 0x7309730f, 0x72e9c6b8, 0x72c5418e, 0x729be665, 0x726db871, 0x723abb44, 0x7202f2d3, 0x71c6636d, 0x718511c2, 0x713f02e0, 0x70f43c32, 0x70a4c37f, 0x70509eec, 0x6ff7d4f8, 0x6f9a6c7f, 0x6f386cb6, 0x6ed1dd2e, 0x6e66c5ce, 0x6df72ed9, 0x6d8320e6, 0x6d0aa4e6, 0x6c8dc41f, 0x6c0c882a, 0x6b86faf8, 0x6afd26cb, 0x6a6f1638, 0x69dcd425, 0x69466bc8, 0x68abe8a8, 0x680d5698, 0x676ac1bb, 0x66c4367d, 0x6619c197, 0x656b700a, 0x64b94f22, 0x64036c6f, 0x6349d5c9, 0x628c994c, 0x61cbc559, 0x61076890, 0x603f91d5, 0x5f745049, 0x5ea5b34c, 0x5dd3ca7a, 0x5cfea5aa, 0x5c2654ed, 0x5b4ae88d, 0x5a6c7108, 0x598aff13, 0x58a6a397, 0x57bf6fae, 0x56d574a2, 0x55e8c3ee, 0x54f96f37, 0x54078851, 0x53132138, 0x521c4c10, 0x51231b26, 0x5027a0e9, 0x4f29efed, 0x4e2a1ae8, 0x4d2834b0, 0x4c245038, 0x4b1e8091, 0x4a16d8e5, 0x490d6c79, 0x48024ea7, 0x46f592e2, 0x45e74cad, 0x44d78fa0, 0x43c66f62, 0x42b3ffa9, 0x41a05437, 0x408b80d9, 0x3f759967, 0x3e5eb1bd, 0x3d46ddc1, 0x3c2e315a, 0x3b14c072, 0x39fa9ef3, 0x38dfe0c6, 0x37c499d0, 0x36a8ddf3, 0x358cc109, 0x347056e3, 0x3353b349, 0x3236e9f7, 0x311a0e9b, 0x2ffd34d4, 0x2ee07030, 0x2dc3d429, 0x2ca77428, 0x2b8b637b, 0x2a6fb55e, 0x29547ced, 0x2839cd30, 0x271fb90d, 0x2606534e, 0x24edae9c, 0x23d5dd81, 0x22bef262, 0x21a8ff7e, 0x209416f2, 0x1f804ab0, 0x1e6dac83, 0x1d5c4e09, 0x1c4c40b6, 0x1b3d95d1, 0x1a305e70, 0x1924ab7b, 0x181a8da5, 0x17121573, 0x160b5331, 0x150656f8, 0x140330a9, 0x1301efed, 0x1202a434, 0x11055cb4, 0x100a2864, 0x0f111603, 0x0e1a340d, 0x0d2590c3, + 0x0c333a22, 0x0b433de8, 0x0a55a98f, 0x096a8a51, 0x0881ed1f, 0x079bdea7, 0x06b86b52, 0x05d79f40, 0x04f98649, 0x041e2bfe, 0x03459ba4, 0x026fe039, 0x019d046d, 0x00cd12a4, 0x000014f8, 0xff361534, 0xfe6f1cd7, 0xfdab350f, 0xfcea66be, 0xfc2cba75, 0xfb723876, 0xfabae8b2, 0xfa06d2ca, 0xf955fe0c, 0xf8a87178, 0xf7fe33ba, 0xf7574b2b, 0xf6b3bdd3, 0xf6139169, 0xf576cb4e, 0xf4dd7092, 0xf44785f1, 0xf3b50fd6, 0xf3261255, 0xf29a9133, 0xf2128fde, 0xf18e1174, 0xf10d18bd, 0xf08fa82f, 0xf015c1ee, 0xef9f67cb, 0xef2c9b43, 0xeebd5d81, 0xee51af5f, 0xede99165, 0xed8503c7, 0xed24066b, 0xecc698e6, 0xec6cba79, 0xec166a19, 0xebc3a669, 0xeb746dbe, 0xeb28be1f, 0xeae09544, 0xea9bf097, 0xea5acd38, 0xea1d27f7, 0xe9e2fd5b, 0xe9ac49a0, 0xe97908b8, 0xe9493649, 0xe91ccdb5, 0xe8f3ca12, 0xe8ce2631, 0xe8abdc9d, 0xe88ce79a, 0xe871412a, 0xe858e30a, 0xe843c6b5, 0xe831e563, 0xe823380d, 0xe817b76c, 0xe80f5bfb, 0xe80a1df5, 0xe807f55b, 0xe808d9f1, 0xe80cc342, 0xe813a89f, 0xe81d8122, 0xe82a43ac, 0xe839e6e9, 0xe84c6152, 0xe861a92b, 0xe879b487, 0xe8947947, 0xe8b1ed1c, 0xe8d2058b, 0xe8f4b7e9, 0xe919f961, 0xe941bef3, 0xe96bfd76, 0xe998a999, 0xe9c7b7e3, 0xe9f91cb9, 0xea2ccc59, 0xea62bae0, 0xea9adc49, 0xead52471, 0xeb118714, 0xeb4ff7d4, 0xeb906a35, 0xebd2d1a1, 0xec17216b, 0xec5d4ccd, 0xeca546eb, 0xecef02d5, 0xed3a7388, 0xed878bf0, 0xedd63ee5, 0xee267f35, 0xee783f9e, 0xeecb72d1, 0xef200b76, 0xef75fc2b, 0xefcd3787, 0xf025b01a, 0xf07f586e, 0xf0da230b, 0xf1360276, 0xf192e932, 0xf1f0c9c5, 0xf24f96b5, 0xf2af428c, 0xf30fbfd7, 0xf371012c, 0xf3d2f926, 0xf4359a6a, 0xf498d7a5, + 0xf4fca390, 0xf560f0f3, 0xf5c5b2a1, 0xf62adb7c, 0xf6905e79, 0xf6f62e9d, 0xf75c3eff, 0xf7c282cb, 0xf828ed43, 0xf88f71bf, 0xf8f603ae, 0xf95c9699, 0xf9c31e22, 0xfa298e07, 0xfa8fda21, 0xfaf5f669, 0xfb5bd6f4, 0xfbc16ff6, 0xfc26b5c5, 0xfc8b9cda, 0xfcf019cd, 0xfd54215c, 0xfdb7a869, 0xfe1aa3fc, 0xfe7d0942, 0xfedecd90, 0xff3fe663, 0xffa04963, 0xffffec5f, 0x005ec552, 0x00bcca63, 0x0119f1e4, 0x01763256, 0x01d18265, 0x022bd8ee, 0x02852cfc, 0x02dd75ca, 0x0334aac4, 0x038ac385, 0x03dfb7dd, 0x04337fcb, 0x04861383, 0x04d76b6b, 0x0527801d, 0x05764a68, 0x05c3c34e, 0x060fe408, 0x065aa604, 0x06a402e4, 0x06ebf483, 0x073274f1, 0x07777e74, 0x07bb0b8b, 0x07fd16eb, 0x083d9b81, 0x087c9471, 0x08b9fd18, 0x08f5d10a, 0x09300c14, 0x0968aa3b, 0x099fa7bb, 0x09d5010b, 0x0a08b2d9, 0x0a3aba09, 0x0a6b13bc, 0x0a99bd47, 0x0ac6b43a, 0x0af1f65d, 0x0b1b81ad, 0x0b435462, 0x0b696ceb, 0x0b8dc9ed, 0x0bb06a47, 0x0bd14d0b, 0x0bf07186, 0x0c0dd738, 0x0c297dd9, 0x0c436557, 0x0c5b8dd4, 0x0c71f7a9, 0x0c86a361, 0x0c9991be, 0x0caac3b5, 0x0cba3a6d, 0x0cc7f742, 0x0cd3fbc0, 0x0cde49a8, 0x0ce6e2ea, 0x0cedc9a7, 0x0cf30031, 0x0cf6890a, 0x0cf866e1, 0x0cf89c96, 0x0cf72d34, 0x0cf41bf7, 0x0cef6c43, 0x0ce921ab, 0x0ce13feb, 0x0cd7caec, 0x0cccc6bc, 0x0cc03797, 0x0cb221de, 0x0ca28a1a, 0x0c9174fa, 0x0c7ee754, 0x0c6ae622, 0x0c557681, 0x0c3e9db5, 0x0c26611f, 0x0c0cc646, 0x0bf1d2d0, 0x0bd58c81, 0x0bb7f940, 0x0b991f0f, 0x0b79040c, 0x0b57ae75, 0x0b3524a0, 0x0b116cff, 0x0aec8e1c, 0x0ac68e9b, 0x0a9f7537, 0x0a7748c0, 0x0a4e101f, 0x0a23d24e, 0x09f8965d, 0x09cc636e, 0x099f40b5, 0x09713575, + 0x09424904, 0x091282c4, 0x08e1ea27, 0x08b086aa, 0x087e5fd7, 0x084b7d43, 0x0817e68c, 0x07e3a35a, 0x07aebb5d, 0x0779364a, 0x07431bdf, 0x070c73dd, 0x06d5460b, 0x069d9a31, 0x0665781b, 0x062ce795, 0x05f3f06b, 0x05ba9a6b, 0x0580ed5f, 0x0546f10f, 0x050cad3f, 0x04d229b1, 0x04976e20, 0x045c8240, 0x04216dc0, 0x03e63846, 0x03aae970, 0x036f88d2, 0x03341df4, 0x02f8b055, 0x02bd4768, 0x0281ea90, 0x0246a125, 0x020b726f, 0x01d065a8, 0x019581f9, 0x015ace79, 0x0120522f, 0x00e6140f, 0x00ac1af9, 0x00726dbb, 0x0039130c, 0x00001191, 0xffc76fd5, 0xff8f344f, 0xff576560, 0xff20094d, 0xfee92646, 0xfeb2c261, 0xfe7ce399, 0xfe478fd2, 0xfe12ccd1, 0xfddea042, 0xfdab0fb6, 0xfd7820a0, 0xfd45d856, 0xfd143c12, 0xfce350f0, 0xfcb31bec, 0xfc83a1e5, 0xfc54e79a, 0xfc26f1ad, 0xfbf9c49d, 0xfbcd64ca, 0xfba1d673, 0xfb771db9, 0xfb4d3e97, 0xfb243cea, 0xfafc1c6e, 0xfad4e0b9, 0xfaae8d43, 0xfa89255f, 0xfa64ac3f, 0xfa4124f2, 0xfa1e9262, 0xf9fcf758, 0xf9dc567b, 0xf9bcb24a, 0xf99e0d26, 0xf980694a, 0xf963c8cc, 0xf9482da0, 0xf92d9997, 0xf9140e5e, 0xf8fb8d7d, 0xf8e4185a, 0xf8cdb036, 0xf8b85631, 0xf8a40b44, 0xf890d048, 0xf87ea5f1, 0xf86d8cd1, 0xf85d8555, 0xf84e8fc9, 0xf840ac57, 0xf833db04, 0xf8281bb6, 0xf81d6e2e, 0xf813d20d, 0xf80b46d3, 0xf803cbdc, 0xf7fd6065, 0xf7f8038c, 0xf7f3b44b, 0xf7f0717e, 0xf7ee39e2, 0xf7ed0c12, 0xf7ece68c, 0xf7edc7af, 0xf7efadbd, 0xf7f296d7, 0xf7f68103, 0xf7fb6a29, 0xf8015015, 0xf8083077, 0xf81008e2, 0xf818d6cf, 0xf822979b, 0xf82d488c, 0xf838e6c9, 0xf8456f65, 0xf852df56, 0xf861337c, 0xf870689f, 0xf8807b70, 0xf8916889, 0xf8a32c6e, 0xf8b5c38d, + 0xf8c92a41, 0xf8dd5ccf, 0xf8f25767, 0xf9081629, 0xf91e9521, 0xf935d048, 0xf94dc388, 0xf9666ab7, 0xf97fc19e, 0xf999c3f4, 0xf9b46d64, 0xf9cfb988, 0xf9eba3ef, 0xfa082817, 0xfa254176, 0xfa42eb75, 0xfa61216f, 0xfa7fdeba, 0xfa9f1e9e, 0xfabedc5a, 0xfadf1328, 0xfaffbe36, 0xfb20d8ad, 0xfb425db0, 0xfb64485b, 0xfb8693c6, 0xfba93b01, 0xfbcc391d, 0xfbef8924, 0xfc13261f, 0xfc370b14, 0xfc5b3309, 0xfc7f9902, 0xfca43803, 0xfcc90b12, 0xfcee0d33, 0xfd133970, 0xfd388ad1, 0xfd5dfc63, 0xfd838938, 0xfda92c63, 0xfdcee0ff, 0xfdf4a22a, 0xfe1a6b08, 0xfe4036c5, 0xfe660094, 0xfe8bc3ad, 0xfeb17b53, 0xfed722d0, 0xfefcb57a, 0xff222eac, 0xff4789d1, 0xff6cc25a, 0xff91d3c6, 0xffb6b99f, 0xffdb6f7c, 0xfffff100, 0x002439db, 0x004845cc, 0x006c10a0, 0x008f9631, 0x00b2d26b, 0x00d5c147, 0x00f85ecf, 0x011aa71d, 0x013c965b, 0x015e28c7, 0x017f5aad, 0x01a0286c, 0x01c08e78, 0x01e08952, 0x02001593, 0x021f2fe5, 0x023dd505, 0x025c01c5, 0x0279b30b, 0x0296e5d0, 0x02b39724, 0x02cfc429, 0x02eb6a18, 0x03068640, 0x03211603, 0x033b16dc, 0x03548659, 0x036d621f, 0x0385a7eb, 0x039d558e, 0x03b468f1, 0x03cae014, 0x03e0b90d, 0x03f5f20a, 0x040a894e, 0x041e7d34, 0x0431cc31, 0x044474ce, 0x045675ab, 0x0467cd83, 0x04787b24, 0x04887d76, 0x0497d378, 0x04a67c41, 0x04b476fe, 0x04c1c2f3, 0x04ce5f7d, 0x04da4c10, 0x04e58836, 0x04f01392, 0x04f9edda, 0x050316e0, 0x050b8e8a, 0x051354d5, 0x051a69d4, 0x0520cdb1, 0x052680ae, 0x052b8320, 0x052fd573, 0x0533782a, 0x05366bdc, 0x0538b136, 0x053a48fa, 0x053b3400, 0x053b7332, 0x053b0791, 0x0539f231, 0x0538343a, 0x0535cee9, 0x0532c38c, 0x052f1386, + 0x052ac04c, 0x0525cb66, 0x0520366d, 0x051a030f, 0x05133308, 0x050bc828, 0x0503c44d, 0x04fb2969, 0x04f1f97c, 0x04e83697, 0x04dde2da, 0x04d30074, 0x04c791a4, 0x04bb98b5, 0x04af1804, 0x04a211f8, 0x04948906, 0x04867fb3, 0x0477f88d, 0x0468f62e, 0x04597b40, 0x04498a72, 0x04392684, 0x0428523d, 0x0417106e, 0x040563f4, 0x03f34fb2, 0x03e0d697, 0x03cdfb99, 0x03bac1b4, 0x03a72bf0, 0x03933d58, 0x037ef900, 0x036a6201, 0x03557b7a, 0x03404890, 0x032acc6d, 0x03150a3f, 0x02ff0538, 0x02e8c08e, 0x02d23f7a, 0x02bb8537, 0x02a49505, 0x028d7223, 0x02761fd3, 0x025ea157, 0x0246f9f3, 0x022f2cea, 0x02173d81, 0x01ff2ef9, 0x01e70494, 0x01cec194, 0x01b66936, 0x019dfeb6, 0x0185854f, 0x016d0037, 0x015472a1, 0x013bdfbc, 0x01234ab4, 0x010ab6b0, 0x00f226d0, 0x00d99e31, 0x00c11feb, 0x00a8af0c, 0x00904ea0, 0x007801aa, 0x005fcb26, 0x0047ae09, 0x002fad3f, 0x0017cbae, 0x00000c33, 0xffe871a0, 0xffd0fec1, 0xffb9b656, 0xffa29b18, 0xff8bafb3, 0xff74f6cc, 0xff5e72fb, 0xff4826cf, 0xff3214c9, 0xff1c3f63, 0xff06a907, 0xfef15417, 0xfedc42e7, 0xfec777be, 0xfeb2f4d9, 0xfe9ebc66, 0xfe8ad087, 0xfe773351, 0xfe63e6cb, 0xfe50ecf0, 0xfe3e47ac, 0xfe2bf8de, 0xfe1a0256, 0xfe0865d7, 0xfdf72515, 0xfde641b7, 0xfdd5bd53, 0xfdc59972, 0xfdb5d78f, 0xfda67913, 0xfd977f5d, 0xfd88ebb9, 0xfd7abf64, 0xfd6cfb8e, 0xfd5fa157, 0xfd52b1cf, 0xfd462df6, 0xfd3a16c0, 0xfd2e6d0d, 0xfd2331b0, 0xfd18656f, 0xfd0e08fb, 0xfd041cfa, 0xfcfaa200, 0xfcf19894, 0xfce9012c, 0xfce0dc2f, 0xfcd929f4, 0xfcd1eac3, 0xfccb1ed7, 0xfcc4c658, 0xfcbee162, 0xfcb97001, 0xfcb47232, 0xfcafe7e2, 0xfcabd0f2, 0xfca82d32, + 0xfca4fc64, 0xfca23e3d, 0xfc9ff262, 0xfc9e186a, 0xfc9cafe0, 0xfc9bb83e, 0xfc9b30f3, 0xfc9b195f, 0xfc9b70d6, 0xfc9c369c, 0xfc9d69eb, 0xfc9f09ee, 0xfca115c5, 0xfca38c83, 0xfca66d2e, 0xfca9b6bf, 0xfcad6827, 0xfcb18047, 0xfcb5fdf7, 0xfcbae002, 0xfcc0252b, 0xfcc5cc26, 0xfccbd3a0, 0xfcd23a3a, 0xfcd8fe8b, 0xfce01f21, 0xfce79a7f, 0xfcef6f20, 0xfcf79b75, 0xfd001de8, 0xfd08f4d6, 0xfd121e99, 0xfd1b9980, 0xfd2563d3, 0xfd2f7bd1, 0xfd39dfb4, 0xfd448dae, 0xfd4f83eb, 0xfd5ac08e, 0xfd6641b8, 0xfd720581, 0xfd7e09fc, 0xfd8a4d37, 0xfd96cd3d, 0xfda3880f, 0xfdb07bb0, 0xfdbda61a, 0xfdcb0546, 0xfdd89727, 0xfde659af, 0xfdf44acc, 0xfe026869, 0xfe10b06f, 0xfe1f20c5, 0xfe2db74f, 0xfe3c71f1, 0xfe4b4e8c, 0xfe5a4b03, 0xfe696534, 0xfe789b01, 0xfe87ea47, 0xfe9750e8, 0xfea6ccc3, 0xfeb65bb9, 0xfec5fbac, 0xfed5aa7e, 0xfee56614, 0xfef52c54, 0xff04fb25, 0xff14d073, 0xff24aa2a, 0xff348639, 0xff446293, 0xff543d2e, 0xff641402, 0xff73e50e, 0xff83ae52, 0xff936dd2, 0xffa3219a, 0xffb2c7b6, 0xffc25e3b, 0xffd1e340, 0xffe154e3, 0xfff0b148, 0xfffff697, 0x000f22fe, 0x001e34b4, 0x002d29f3, 0x003c00fd, 0x004ab81b, 0x00594d9d, 0x0067bfd8, 0x00760d2a, 0x008433f9, 0x009232b2, 0x00a007c9, 0x00adb1bb, 0x00bb2f0b, 0x00c87e47, 0x00d59e03, 0x00e28cdd, 0x00ef497a, 0x00fbd28a, 0x010826c4, 0x011444e7, 0x01202bbe, 0x012bda1b, 0x01374eda, 0x014288e0, 0x014d871b, 0x01584883, 0x0162cc19, 0x016d10e9, 0x01771608, 0x0180da94, 0x018a5db5, 0x01939e9e, 0x019c9c8b, 0x01a556c1, 0x01adcc91, 0x01b5fd54, 0x01bde86f, 0x01c58d50, 0x01cceb6e, 0x01d4024c, 0x01dad175, 0x01e15880, 0x01e7970e, + 0x01ed8cc7, 0x01f33960, 0x01f89c98, 0x01fdb637, 0x0202860e, 0x02070bf9, 0x020b47dd, 0x020f39ab, 0x0212e15c, 0x02163ef1, 0x02195278, 0x021c1c06, 0x021e9bbb, 0x0220d1bf, 0x0222be45, 0x02246187, 0x0225bbca, 0x0226cd5b, 0x02279691, 0x022817ca, 0x0228516f, 0x022843f0, 0x0227efc6, 0x02275572, 0x0226757e, 0x0225507c, 0x0223e706, 0x022239bc, 0x02204949, 0x021e165d, 0x021ba1b2, 0x0218ec06, 0x0215f621, 0x0212c0d2, 0x020f4cec, 0x020b9b4c, 0x0207acd4, 0x0203826c, 0x01ff1d04, 0x01fa7d91, 0x01f5a50d, 0x01f0947a, 0x01eb4cde, 0x01e5cf44, 0x01e01cbe, 0x01da3661, 0x01d41d4a, 0x01cdd297, 0x01c7576d, 0x01c0acf5, 0x01b9d45b, 0x01b2ced1, 0x01ab9d8b, 0x01a441c2, 0x019cbcb1, 0x01950f98, 0x018d3bb8, 0x01854258, 0x017d24bf, 0x0174e437, 0x016c820d, 0x0163ff90, 0x015b5e11, 0x01529ee3, 0x0149c35a, 0x0140cccb, 0x0137bc8f, 0x012e93fc, 0x0125546c, 0x011bff38, 0x011295bb, 0x0109194f, 0x00ff8b4f, 0x00f5ed15, 0x00ec3ffc, 0x00e2855d, 0x00d8be92, 0x00ceecf5, 0x00c511dc, 0x00bb2e9f, 0x00b14493, 0x00a7550c, 0x009d615d, 0x00936ad6, 0x008972c7, 0x007f7a7c, 0x00758341, 0x006b8e5c, 0x00619d15, 0x0057b0ae, 0x004dca68, 0x0043eb7f, 0x003a152f, 0x003048ae, 0x0026872f, 0x001cd1e4, 0x001329f7, 0x00099093, 0x000006db, 0xfff68df1, 0xffed26f0, 0xffe3d2f2, 0xffda930a, 0xffd16848, 0xffc853b6, 0xffbf565a, 0xffb67137, 0xffada547, 0xffa4f383, 0xff9c5cdc, 0xff93e241, 0xff8b8498, 0xff8344c4, 0xff7b23a1, 0xff732209, 0xff6b40cb, 0xff6380b5, 0xff5be28d, 0xff546713, 0xff4d0f02, 0xff45db10, 0xff3ecbea, 0xff37e23b, 0xff311ea4, 0xff2a81c4, 0xff240c2f, 0xff1dbe77, 0xff179926, + 0xff119cc0, 0xff0bc9c2, 0xff0620a4, 0xff00a1d8, 0xfefb4dc7, 0xfef624d8, 0xfef12766, 0xfeec55cc, 0xfee7b059, 0xfee33759, 0xfedeeb11, 0xfedacbbf, 0xfed6d99c, 0xfed314da, 0xfecf7da3, 0xfecc141d, 0xfec8d867, 0xfec5ca9a, 0xfec2eaca, 0xfec03901, 0xfebdb547, 0xfebb5f9b, 0xfeb937f9, 0xfeb73e54, 0xfeb5729b, 0xfeb3d4b7, 0xfeb26489, 0xfeb121ee, 0xfeb00cbf, 0xfeaf24cc, 0xfeae69e1, 0xfeaddbc4, 0xfead7a37, 0xfead44f4, 0xfead3bb2, 0xfead5e22, 0xfeadabef, 0xfeae24c1, 0xfeaec838, 0xfeaf95f2, 0xfeb08d86, 0xfeb1ae87, 0xfeb2f884, 0xfeb46b07, 0xfeb60596, 0xfeb7c7b0, 0xfeb9b0d3, 0xfebbc078, 0xfebdf613, 0xfec05114, 0xfec2d0e8, 0xfec574f9, 0xfec83caa, 0xfecb275e, 0xfece3472, 0xfed16342, 0xfed4b325, 0xfed82370, 0xfedbb373, 0xfedf627d, 0xfee32fdb, 0xfee71ad4, 0xfeeb22af, 0xfeef46b0, 0xfef3861a, 0xfef7e02a, 0xfefc541e, 0xff00e133, 0xff0586a0, 0xff0a439e, 0xff0f1762, 0xff140121, 0xff19000e, 0xff1e135b, 0xff233a39, 0xff2873d6, 0xff2dbf61, 0xff331c08, 0xff3888f8, 0xff3e055d, 0xff439064, 0xff492937, 0xff4ecf02, 0xff5480f0, 0xff5a3e2c, 0xff6005e1, 0xff65d73a, 0xff6bb163, 0xff719388, 0xff777cd6, 0xff7d6c79, 0xff83619f, 0xff895b77, 0xff8f5930, 0xff9559fb, 0xff9b5d0a, 0xffa16190, 0xffa766c0, 0xffad6bd0, 0xffb36ff9, 0xffb97271, 0xffbf7274, 0xffc56f3e, 0xffcb680e, 0xffd15c22, 0xffd74abe, 0xffdd3325, 0xffe3149e, 0xffe8ee72, 0xffeebfec, 0xfff48859, 0xfffa470a, 0xfffffb51, 0x0005a483, 0x000b41fa, 0x0010d30e, 0x00165720, 0x001bcd8e, 0x002135bd, 0x00268f13, 0x002bd8fa, 0x003112e0, 0x00363c35, 0x003b546b, 0x00405afa, 0x00454f5d, 0x004a310f, 0x004eff94, + 0x0053ba6e, 0x00586127, 0x005cf349, 0x00617065, 0x0065d80c, 0x006a29d6, 0x006e655c, 0x00728a3d, 0x0076981a, 0x007a8e98, 0x007e6d61, 0x00823422, 0x0085e28b, 0x00897851, 0x008cf52d, 0x009058da, 0x0093a31a, 0x0096d3af, 0x0099ea62, 0x009ce6fe, 0x009fc954, 0x00a29136, 0x00a53e7b, 0x00a7d0ff, 0x00aa48a0, 0x00aca542, 0x00aee6ca, 0x00b10d23, 0x00b3183c, 0x00b50805, 0x00b6dc75, 0x00b89584, 0x00ba3330, 0x00bbb579, 0x00bd1c63, 0x00be67f6, 0x00bf983d, 0x00c0ad48, 0x00c1a728, 0x00c285f4, 0x00c349c4, 0x00c3f2b6, 0x00c480e9, 0x00c4f480, 0x00c54da2, 0x00c58c79, 0x00c5b132, 0x00c5bbfc, 0x00c5ad0a, 0x00c58494, 0x00c542d1, 0x00c4e7fe, 0x00c47459, 0x00c3e824, 0x00c343a4, 0x00c2871f, 0x00c1b2e0, 0x00c0c731, 0x00bfc463, 0x00beaac6, 0x00bd7aae, 0x00bc3470, 0x00bad866, 0x00b966e9, 0x00b7e055, 0x00b6450a, 0x00b49568, 0x00b2d1d1, 0x00b0faaa, 0x00af1059, 0x00ad1346, 0x00ab03da, 0x00a8e282, 0x00a6afa8, 0x00a46bbc, 0x00a2172d, 0x009fb26c, 0x009d3deb, 0x009aba1d, 0x00982778, 0x0095866f, 0x0092d77b, 0x00901b11, 0x008d51ab, 0x008a7bc1, 0x008799cd, 0x0084ac48, 0x0081b3af, 0x007eb07b, 0x007ba32a, 0x00788c36, 0x00756c1d, 0x0072435b, 0x006f126b, 0x006bd9cd, 0x006899fb, 0x00655372, 0x006206b1, 0x005eb431, 0x005b5c71, 0x0057ffec, 0x00549f1c, 0x00513a7e, 0x004dd28c, 0x004a67c0, 0x0046fa93, 0x00438b7e, 0x00401af9, 0x003ca97b, 0x0039377a, 0x0035c56c, 0x003253c6, 0x002ee2fa, 0x002b737b, 0x002805ba, 0x00249a28, 0x00213134, 0x001dcb4a, 0x001a68d8, 0x00170a47, 0x0013b003, 0x00105a72, 0x000d09fc, 0x0009bf05, 0x000679f2, 0x00033b23, 0x000002f9, 0xfffcd1d3, + 0xfff9a80d, 0xfff68603, 0xfff36c0d, 0xfff05a84, 0xffed51bc, 0xffea520a, 0xffe75bbe, 0xffe46f2a, 0xffe18c9a, 0xffdeb45b, 0xffdbe6b6, 0xffd923f4, 0xffd66c59, 0xffd3c02a, 0xffd11fa9, 0xffce8b13, 0xffcc02a8, 0xffc986a1, 0xffc71738, 0xffc4b4a4, 0xffc25f1a, 0xffc016cb, 0xffbddbe8, 0xffbbae9f, 0xffb98f1c, 0xffb77d88, 0xffb57a0b, 0xffb384ca, 0xffb19de7, 0xffafc584, 0xffadfbbe, 0xffac40b3, 0xffaa947c, 0xffa8f730, 0xffa768e6, 0xffa5e9b1, 0xffa479a2, 0xffa318c7, 0xffa1c72f, 0xffa084e3, 0xff9f51eb, 0xff9e2e50, 0xff9d1a14, 0xff9c1539, 0xff9b1fc1, 0xff9a39a9, 0xff9962ec, 0xff989b85, 0xff97e36c, 0xff973a96, 0xff96a0f8, 0xff961684, 0xff959b29, 0xff952ed7, 0xff94d178, 0xff9482f8, 0xff944340, 0xff941236, 0xff93efbf, 0xff93dbc0, 0xff93d618, 0xff93deaa, 0xff93f552, 0xff9419ef, 0xff944c5a, 0xff948c6e, 0xff94da03, 0xff9534f0, 0xff959d0a, 0xff961224, 0xff969412, 0xff9722a5, 0xff97bdac, 0xff9864f6, 0xff991851, 0xff99d789, 0xff9aa268, 0xff9b78ba, 0xff9c5a47, 0xff9d46d6, 0xff9e3e30, 0xff9f4019, 0xffa04c57, 0xffa162ae, 0xffa282e1, 0xffa3acb4, 0xffa4dfe8, 0xffa61c3e, 0xffa76176, 0xffa8af51, 0xffaa058d, 0xffab63ea, 0xffacca25, 0xffae37fd, 0xffafad2e, 0xffb12976, 0xffb2ac90, 0xffb4363a, 0xffb5c630, 0xffb75c2c, 0xffb8f7ea, 0xffba9927, 0xffbc3f9d, 0xffbdeb07, 0xffbf9b21, 0xffc14fa5, 0xffc3084f, 0xffc4c4da, 0xffc68502, 0xffc84881, 0xffca0f14, 0xffcbd876, 0xffcda463, 0xffcf7299, 0xffd142d3, 0xffd314cf, 0xffd4e84a, 0xffd6bd01, 0xffd892b4, 0xffda6921, 0xffdc4007, 0xffde1726, 0xffdfee3f, 0xffe1c511, 0xffe39b60, 0xffe570ed, 0xffe7457c, 0xffe918ce, + 0xffeaeaab, 0xffecbad5, 0xffee8913, 0xfff0552d, 0xfff21ee8, 0xfff3e60f, 0xfff5aa69, 0xfff76bc2, 0xfff929e3, 0xfffae49b, 0xfffc9bb4, 0xfffe4efd, 0xfffffe46, 0x0001a95d, 0x00035015, 0x0004f23e, 0x00068fad, 0x00082835, 0x0009bbab, 0x000b49e6, 0x000cd2bd, 0x000e5609, 0x000fd3a3, 0x00114b67, 0x0012bd30, 0x001428db, 0x00158e47, 0x0016ed53, 0x001845e0, 0x001997d0, 0x001ae306, 0x001c2765, 0x001d64d5, 0x001e9b3a, 0x001fca7d, 0x0020f288, 0x00221344, 0x00232c9d, 0x00243e7f, 0x002548d9, 0x00264b9a, 0x002746b2, 0x00283a12, 0x002925ae, 0x002a0979, 0x002ae568, 0x002bb971, 0x002c858d, 0x002d49b4, 0x002e05df, 0x002eba0a, 0x002f6630, 0x00300a4f, 0x0030a665, 0x00313a72, 0x0031c677, 0x00324a74, 0x0032c66e, 0x00333a67, 0x0033a665, 0x00340a6d, 0x00346687, 0x0034babb, 0x00350711, 0x00354b94, 0x0035884f, 0x0035bd4e, 0x0035ea9d, 0x0036104b, 0x00362e66, 0x003644fd, 0x00365422, 0x00365be6, 0x00365c5b, 0x00365594, 0x003647a5, 0x003632a2, 0x003616a2, 0x0035f3b9, 0x0035ca00, 0x0035998d, 0x00356279, 0x003524dd, 0x0034e0d3, 0x00349674, 0x003445dc, 0x0033ef25, 0x0033926d, 0x00332fcf, 0x0032c769, 0x00325958, 0x0031e5ba, 0x00316cae, 0x0030ee53, 0x00306ac8, 0x002fe22c, 0x002f54a1, 0x002ec246, 0x002e2b3c, 0x002d8fa4, 0x002cefa1, 0x002c4b53, 0x002ba2dc, 0x002af65f, 0x002a45fe, 0x002991db, 0x0028da1a, 0x00281edd, 0x00276046, 0x00269e7a, 0x0025d99b, 0x002511cd, 0x00244733, 0x002379ef, 0x0022aa26, 0x0021d7fa, 0x00210390, 0x00202d09, 0x001f5489, 0x001e7a33, 0x001d9e2a, 0x001cc091, 0x001be18a, 0x001b0138, 0x001a1fbc, 0x00193d3a, 0x001859d2, 0x001775a7, + 0x001690d9, 0x0015ab8b, 0x0014c5dc, 0x0013dfed, 0x0012f9de, 0x001213d0, 0x00112de1, 0x00104831, 0x000f62de, 0x000e7e08, 0x000d99cc, 0x000cb647, 0x000bd397, 0x000af1d9, 0x000a1129, 0x000931a3, 0x00085362, 0x00077681, 0x00069b1b, 0x0005c149, 0x0004e926, 0x000412c9, 0x00033e4c, 0x00026bc6, 0x00019b4e, 0x0000ccfc, 0x000000e6, 0xffff3721, 0xfffe6fc3, 0xfffdaadf, 0xfffce88b, 0xfffc28d9, 0xfffb6bdd, 0xfffab1a8, 0xfff9fa4d, 0xfff945dc, 0xfff89465, 0xfff7e5f9, 0xfff73aa7, 0xfff6927e, 0xfff5ed8b, 0xfff54bdc, 0xfff4ad7e, 0xfff4127d, 0xfff37ae4, 0xfff2e6bf, 0xfff25619, 0xfff1c8fa, 0xfff13f6c, 0xfff0b977, 0xfff03724, 0xffefb87a, 0xffef3d7f, 0xffeec63a, 0xffee52b1, 0xffede2e7, 0xffed76e3, 0xffed0ea7, 0xffecaa36, 0xffec4994, 0xffebecc2, 0xffeb93c3, 0xffeb3e96, 0xffeaed3c, 0xffea9fb6, 0xffea5602, 0xffea1020, 0xffe9ce0d, 0xffe98fc8, 0xffe9554c, 0xffe91e99, 0xffe8eba8, 0xffe8bc77, 0xffe89101, 0xffe8693f, 0xffe8452d, 0xffe824c5, 0xffe807ff, 0xffe7eed5, 0xffe7d93f, 0xffe7c735, 0xffe7b8af, 0xffe7ada5, 0xffe7a60d, 0xffe7a1de, 0xffe7a10d, 0xffe7a391, 0xffe7a95f, 0xffe7b26c, 0xffe7bead, 0xffe7ce16, 0xffe7e09c, 0xffe7f631, 0xffe80eca, 0xffe82a59, 0xffe848d3, 0xffe86a29, 0xffe88e4d, 0xffe8b532, 0xffe8decb, 0xffe90b08, 0xffe939db, 0xffe96b35, 0xffe99f08, 0xffe9d545, 0xffea0ddc, 0xffea48be, 0xffea85dc, 0xffeac525, 0xffeb068a, 0xffeb49fc, 0xffeb8f6a, 0xffebd6c4, 0xffec1ffa, 0xffec6afc, 0xffecb7b9, 0xffed0621, 0xffed5624, 0xffeda7b1, 0xffedfab8, 0xffee4f29, 0xffeea4f2, 0xffeefc04, 0xffef544e, 0xffefadc0, 0xfff00849, 0xfff063d9, 0xfff0c060, + 0xfff11dcd, 0xfff17c10, 0xfff1db1a, 0xfff23ada, 0xfff29b40, 0xfff2fc3d, 0xfff35dc1, 0xfff3bfbc, 0xfff4221f, 0xfff484db, 0xfff4e7e1, 0xfff54b20, 0xfff5ae8c, 0xfff61214, 0xfff675ab, 0xfff6d942, 0xfff73ccb, 0xfff7a037, 0xfff8037a, 0xfff86686, 0xfff8c94c, 0xfff92bc0, 0xfff98dd6, 0xfff9ef80, 0xfffa50b1, 0xfffab15e, 0xfffb117a, 0xfffb70fa, 0xfffbcfd2, 0xfffc2df6, 0xfffc8b5c, 0xfffce7f8, 0xfffd43c1, 0xfffd9eab, 0xfffdf8ae, 0xfffe51be, 0xfffea9d2, 0xffff00e1, 0xffff56e3, 0xffffabcd, 0xffffff99, 0x0000523d, 0x0000a3b3, 0x0000f3f1, 0x000142f1, 0x000190ac, 0x0001dd1b, 0x00022837, 0x000271fa, 0x0002ba5f, 0x0003015f, 0x000346f6, 0x00038b1d, 0x0003cdd1, 0x00040f0d, 0x00044ecb, 0x00048d0a, 0x0004c9c4, 0x000504f6, 0x00053e9e, 0x000576b8, 0x0005ad41, 0x0005e238, 0x00061599, 0x00064764, 0x00067797, 0x0006a630, 0x0006d32f, 0x0006fe92, 0x00072859, 0x00075084, 0x00077712, 0x00079c04, 0x0007bf5b, 0x0007e116, 0x00080137, 0x00081fbf, 0x00083cb0, 0x0008580a, 0x000871cf, 0x00088a02, 0x0008a0a5, 0x0008b5ba, 0x0008c944, 0x0008db46, 0x0008ebc1, 0x0008fabb, 0x00090836, 0x00091435, 0x00091ebd, 0x000927d1, 0x00092f75, 0x000935ad, 0x00093a7f, 0x00093ded, 0x00093ffe, 0x000940b6, 0x00094019, 0x00093e2e, 0x00093af8, 0x0009367e, 0x000930c4, 0x000929d1, 0x000921aa, 0x00091854, 0x00090dd6, 0x00090236, 0x0008f57a, 0x0008e7a7, 0x0008d8c4, 0x0008c8d7, 0x0008b7e7, 0x0008a5fa, 0x00089316, 0x00087f43, 0x00086a86, 0x000854e6, 0x00083e6a, 0x00082718, 0x00080ef7, 0x0007f60f, 0x0007dc65, 0x0007c201, 0x0007a6e9, 0x00078b24, 0x00076eba, 0x000751b0, 0x0007340d, + 0x000715d9, 0x0006f71a, 0x0006d7d7, 0x0006b817, 0x000697e0, 0x00067739, 0x00065629, 0x000634b6, 0x000612e8, 0x0005f0c4, 0x0005ce51, 0x0005ab95, 0x00058898, 0x0005655e, 0x000541f0, 0x00051e52, 0x0004fa8b, 0x0004d6a1, 0x0004b29a, 0x00048e7c, 0x00046a4c, 0x00044612, 0x000421d2, 0x0003fd92, 0x0003d957, 0x0003b527, 0x00039108, 0x00036cfe, 0x00034910, 0x00032541, 0x00030196, 0x0002de16, 0x0002bac4, 0x000297a5, 0x000274be, 0x00025214, 0x00022fa9, 0x00020d84, 0x0001eba8, 0x0001ca18, 0x0001a8da, 0x000187f0, 0x0001675f, 0x00014729, 0x00012754, 0x000107e1, 0x0000e8d4, 0x0000ca30, 0x0000abf8, 0x00008e30, 0x000070d9, 0x000053f7, 0x0000378c, 0x00001b9a, 0x00000024, 0xffffe52d, 0xffffcab5, 0xffffb0bf, 0xffff974d, 0xffff7e61, 0xffff65fc, 0xffff4e20, 0xffff36ce, 0xffff2007, 0xffff09ce, 0xfffef421, 0xfffedf04, 0xfffeca76, 0xfffeb678, 0xfffea30b, 0xfffe9030, 0xfffe7de7, 0xfffe6c2f, 0xfffe5b0b, 0xfffe4a79, 0xfffe3a79, 0xfffe2b0d, 0xfffe1c32, 0xfffe0dea, 0xfffe0034, 0xfffdf310, 0xfffde67c, 0xfffdda79, 0xfffdcf05, 0xfffdc421, 0xfffdb9cb, 0xfffdb002, 0xfffda6c5, 0xfffd9e13, 0xfffd95eb, 0xfffd8e4d, 0xfffd8735, 0xfffd80a4, 0xfffd7a98, 0xfffd750f, 0xfffd7008, 0xfffd6b81, 0xfffd6779, 0xfffd63ed, 0xfffd60dd, 0xfffd5e46, 0xfffd5c26, 0xfffd5a7c, 0xfffd5945, 0xfffd5880, 0xfffd582a, 0xfffd5842, 0xfffd58c5, 0xfffd59b2, 0xfffd5b05, 0xfffd5cbe, 0xfffd5ed8, 0xfffd6154, 0xfffd642d, 0xfffd6762, 0xfffd6af1, 0xfffd6ed6, 0xfffd7310, 0xfffd779d, 0xfffd7c7a, 0xfffd81a4, 0xfffd8719, 0xfffd8cd7, 0xfffd92db, 0xfffd9923, 0xfffd9fac, 0xfffda675, 0xfffdad79, + 0xfffdb4b9, 0xfffdbc2f, 0xfffdc3db, 0xfffdcbba, 0xfffdd3ca, 0xfffddc07, 0xfffde470, 0xfffded03, 0xfffdf5bc, 0xfffdfe9b, 0xfffe079b, 0xfffe10bc, 0xfffe19fa, 0xfffe2354, 0xfffe2cc8, 0xfffe3652, 0xfffe3ff2, 0xfffe49a4, 0xfffe5367, 0xfffe5d38, 0xfffe6716, 0xfffe70ff, 0xfffe7aef, 0xfffe84e7, 0xfffe8ee3, 0xfffe98e2, 0xfffea2e1, 0xfffeacdf, 0xfffeb6db, 0xfffec0d2, 0xfffecac3, 0xfffed4ab, 0xfffede8a, 0xfffee85e, 0xfffef225, 0xfffefbde, 0xffff0587, 0xffff0f1f, 0xffff18a4, 0xffff2215, 0xffff2b70, 0xffff34b6, 0xffff3de3, 0xffff46f7, 0xffff4ff1, 0xffff58d0, 0xffff6192, 0xffff6a38, 0xffff72be, 0xffff7b26, 0xffff836d, 0xffff8b93, 0xffff9398, 0xffff9b7a, 0xffffa339, 0xffffaad3, 0xffffb249, 0xffffb99a, 0xffffc0c5, 0xffffc7ca, 0xffffcea8, 0xffffd55f, 0xffffdbee, 0xffffe255, 0xffffe894, 0xffffeeaa, 0xfffff498, 0xfffffa5d, 0xfffffff8, 0x0000056a, 0x00000ab3, 0x00000fd2, 0x000014c8, 0x00001994, 0x00001e37, 0x000022b1, 0x00002701, 0x00002b28, 0x00002f26, 0x000032fb, 0x000036a8, 0x00003a2d, 0x00003d89, 0x000040be, 0x000043cc, 0x000046b2, 0x00004972, 0x00004c0b, 0x00004e7f, 0x000050cd, 0x000052f7, 0x000054fc, 0x000056dd, 0x0000589b, 0x00005a36, 0x00005baf, 0x00005d06, 0x00005e3d, 0x00005f52, 0x00006048, 0x0000611f, 0x000061d8, 0x00006272, 0x000062f0, 0x00006351, 0x00006396, 0x000063c0, 0x000063d0, 0x000063c6, 0x000063a3, 0x00006368, 0x00006316, 0x000062ad, 0x0000622e, 0x0000619a, 0x000060f1, 0x00006035, 0x00005f66, 0x00005e84, 0x00005d92, 0x00005c8e, 0x00005b7b, 0x00005a58, 0x00005927, 0x000057e9, 0x0000569d, 0x00005545, 0x000053e2, + 0x00000000 +}; +} diff --git a/services/audioflinger/audio-resampler/resampler_filter_coefficients_10042011.h b/services/audioflinger/audio-resampler/resampler_filter_coefficients_10042011.h deleted file mode 100644 index 8c6a899..0000000 --- a/services/audioflinger/audio-resampler/resampler_filter_coefficients_10042011.h +++ /dev/null @@ -1,2071 +0,0 @@ -/* - * Copyright (C) 2012 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -#include - -namespace android { - -const int32_t resampler_filter_coefficients_10042011[] = { -2075076504, -2074870219, -2074269557, -2073262841, -2071862786, -2070051926, -2067849110, -2065243563, -2062248465, -2058846262, -2055055548, -2050866069, -2046291635, -2041315273, -2035955897, -2030204167, -2024074532, -2017550518, -2010651175, -2003368165, -1995716923, -1987682243, -1979283938, -1970514752, -1961390076, -1951894181, -1942045775, -1931837943, -1921286728, -1910377736, -1899130344, -1887538902, -1875619770, -1863358864, -1850775104, -1837863721, -1824641940, -1811097380, -1797249776, -1783095528, -1768651291, -1753903742, -1738870726, -1723548746, -1707955054, -1692078274, -1675937190, -1659529795, -1642873313, -1625956474, -1608797063, -1591393772, -1573764495, -1555899971, -1537818785, -1519520784, -1501022796, -1482314240, -1463411160, -1444313227, -1425037741, -1405576492, -1385946622, -1366149382, -1346201706, -1326095287, -1305845828, -1285455045, -1264940308, -1244295464, -1223536951, -1202667530, -1181703085, -1160635857, -1139479271, -1118235636, -1096921134, -1075530585, -1054078577, -1032568949, -1011017224, -989417949, -967783970, -946119336, -924439752, -902741900, -881039249, -859336703, -837648175, -815968557, -794308053, -772670861, -751070988, -729505896, -707986911, -686519594, -665117063, -643776375, -622506966, -601314359, -580211607, -559197735, -538282662, -517472516, -496778402, -476197491, -455736426, -435400430, -415200497, -395136166, -375215078, -355443561, -335831583, -316378199, -297089183, -277970603, -259032151, -240274557, -221703918, -203326638, -185150446, -167174280, -149401179, -131836509, -114487708, -97355714, -80444376, -63759844, -47308498, -31090767, -15108809, --631654, --16124677, --31368536, --46360908, --61095999, --75569692, --89781876, --103732869, --117417959, --130833461, --143977734, --156850513, --169446641, --181763420, --193799687, --205556559, --217029409, --228216068, --239114500, --249725794, --260045485, --270072966, --279807538, --289252364, --298404017, --307262396, --315825804, --324097052, --332072584, --339753137, --347137398, --354229167, --361025450, --367527527, --373733643, --379647676, --385266970, --390594013, --395628043, --400374357, --404831217, --409001628, --412884357, --416484569, --419800667, --422836272, --425590412, --428068799, --430270358, --432199204, --433854305, --435241518, --436360214, --437215297, --437806349, --438139967, --438216244, --438040549, --437612462, --436938620, --436019455, --434860689, --433462013, --431830172, --429966019, --427875663, --425559045, --423023081, --420269108, --417303590, --414126712, --410745497, --407161766, --403382348, --399407797, --395245305, --390897144, --386370224, --381665062, --376788564, --371743279, --366536360, --361168782, --355647611, --349975843, --344160581, --338202705, --332108851, --325882239, --319530194, --313054213, --306461164, --299754744, --292942106, --286024571, --279008412, --271897436, --264698872, --257414606, --250051032, --242612307, --235105276, --227531464, --219896432, --212204319, --204462036, --196671840, --188839540, --180969700, --173068854, --165138974, --157185076, --149211664, --141225186, --133228184, --125225726, --117222535, --109224429, --101233397, --93253442, --85289050, --77345947, --69426846, --61535955, --53678071, --45858404, --38079310, --30344121, --22657427, --15024209, --7447312, -69952, -7522929, -14907403, -22221174, -29462063, -36625813, -43708432, -50707112, -57619541, -64441301, -71168979, -77800149, -84333341, -90764454, -97090405, -103308422, -109417126, -115412500, -121292248, -127054242, -132698149, -138220442, -143619140, -148891701, -154037743, -159053739, -163938275, -168689170, -173306749, -177787867, -182131495, -186335286, -190399716, -194321853, -198101380, -201736519, -205228586, -208575166, -211776310, -214830056, -217737773, -220497194, -223108861, -225571114, -227885824, -230051117, -232067922, -233934597, -235653195, -237222146, -238642925, -239914316, -241038916, -242015623, -242846277, -243529706, -244068632, -244462216, -244712654, -244818983, -244784181, -244607765, -244292268, -243836913, -243244874, -242516010, -241653201, -240655918, -239527557, -238268354, -236881508, -235366743, -233727638, -231964766, -230081521, -228077731, -225956985, -223720128, -221370808, -218909157, -216338938, -213661333, -210880096, -207995414, -205010946, -201928111, -198750891, -195479864, -192118892, -188669741, -185136417, -181519490, -177822602, -174047687, -170198894, -166277172, -162286302, -158228508, -154107834, -149925117, -145683777, -141386129, -137036351, -132635752, -128187958, -123695608, -119162747, -114590586, -109982363, -105340765, -100669861, -95971242, -91248228, -86503703, -81741443, -76962776, -72170457, -67367307, -62557108, -57741653, -52923856, -48106777, -43293939, -38486954, -33688240, -28900789, -24128036, -19371931, -14634917, -9920080, -5230446, -567618, --4066612, --8669336, --13237622, --17769457, --22262939, --26715017, --31123091, --35485370, --39800471, --44065504, --48278050, --52436071, --56538223, --60581634, --64564356, --68484740, --72342096, --76133828, --79858170, --83513191, --87098163, --90610469, --94048704, --97411174, --100697628, --103905676, --107034151, --110081227, --113046738, --115928406, --118725517, --121436605, --124062060, --126599916, --129049689, --131409767, --133680567, --135860199, --137948517, --139944122, --141847799, --143657917, --145374591, --146996418, --148524310, --149956828, --151294475, --152536152, --153683178, --154734431, --155690674, --156550825, --157316298, --157986149, --158561410, --159041176, --159427086, --159718451, --159916548, --160020582, --160032332, --159951337, --159779143, --159515147, --159161329, --158717499, --158185432, --157564678, --156857347, --156063474, --155185008, --154221601, --153175426, --152046727, --150837654, --149548057, --148180246, --146734704, --145213706, --143617181, --141947434, --140205146, --138392776, --136510514, --134560819, --132544622, --130464445, --128320507, --126115178, --123849532, --121526221, --119145720, --116710510, --114221879, --111682456, --109092675, --106454825, --103770291, --101041818, --98270160, --95457755, --92606223, --89718259, --86794577, --83837395, --80848397, --77830331, --74784179, --71712237, --68616354, --65499123, --62361390, --59205117, --56032152, --52845129, --49645224, --46434530, --43215084, --39989380, --36758496, --33524217, --30288565, --27054001, --23821849, --20593926, --17372347, --14159320, --10955954, --7763647, --4584416, --1420431, -1726908, -4856115, -7965053, -11051765, -14114989, -17153587, -20165512, -23148908, -26102328, -29024647, -31913805, -34768253, -37586799, -40368755, -43112232, -45815788, -48478013, -51098183, -53674369, -56205373, -58689945, -61127693, -63516834, -65856322, -68144797, -70381920, -72565965, -74696201, -76771518, -78791971, -80756044, -82663155, -84512076, -86302872, -88034067, -89705316, -91315544, -92865092, -94352657, -95778072, -97140241, -98439585, -99674922, -100846363, -101953029, -102995643, -103973242, -104886114, -105733366, -106515783, -107232507, -107884029, -108469586, -108990146, -109445031, -109834907, -110159075, -110418598, -110612956, -110743021, -110808247, -110809873, -110747573, -110622392, -110433872, -110183341, -109870630, -109496925, -109061853, -108566821, -108011814, -107398172, -106725651, -105995758, -105208649, -104365776, -103466973, -102513780, -101506507, -100446746, -99334505, -98171433, -96958015, -95695913, -94385172, -93027406, -91623219, -90174378, -88681103, -87145094, -85567114, -83948941, -82290788, -80594255, -78860190, -77090470, -75285527, -73447070, -71576122, -69674546, -67742761, -65782346, -63794384, -61780793, -59742181, -57680191, -55596034, -53491542, -51367245, -49224577, -47064766, -44889688, -42700109, -40497558, -38283410, -36059459, -33826411, -31585598, -29338397, -27086596, -24831081, -22573215, -20314456, -18056430, -15799888, -13545910, -11295904, -9051483, -6813612, -4583444, -2362482, -152205, --2046514, --4232760, --6405078, --8562051, --10702661, --12825994, --14930573, --17015184, --19078982, --21121361, --23140947, --25136591, --27107282, --29052375, --30970456, --32860543, --34721731, --36553615, --38354874, --40124623, --41861868, --43566233, --45236425, --46871781, --48471484, --50035443, --51562506, --53052109, --54503334, --55916090, --57289239, --58622387, --59914726, --61166369, --62376298, --63544241, --64669360, --65751822, --66790682, --67785875, --68736719, --69643614, --70505769, --71323237, --72095315, --72822435, --73503872, --74139832, --74729705, --75274073, --75772337, --76224828, --76630970, --76991410, --77305656, --77574197, --77796578, --77973591, --78104888, --78191082, --78231765, --78227787, --78178906, --78085845, --77948266, --77767093, --77542200, --77274423, --76963506, --76610446, --76215234, --75778805, --75300971, --74782775, --74224327, --73626669, --72989720, --72314602, --71601546, --70851655, --70064886, --69242354, --68384382, --67492158, --66565755, --65606354, --64614392, --63591082, --62536507, --61451798, --60337466, --59194800, --58024034, --56826376, --55602460, --54353584, --53079977, --51782773, --50462662, --49120984, --47758105, --46375208, --44973078, --43553013, --42115335, --40661095, --39191102, --37706696, --36208364, --34697230, --33174211, --31640600, --30096851, --28543956, --26982847, --25414822, --23840464, --22260795, --20676812, --19089709, --17499984, --15908468, --14316132, --12724175, --11133249, --9544240, --7958198, --6376234, --4798939, --3227038, --1661555, --103565, -1446230, -2987101, -4517998, -6037984, -7546475, -9042956, -10526443, -11996036, -13451023, -14890857, -16314514, -17721207, -19110300, -20481414, -21833586, -23166092, -24478220, -25769611, -27039309, -28286749, -29511342, -30712939, -31890678, -33044057, -34172405, -35275563, -36352672, -37403349, -38427003, -39423626, -40392440, -41333144, -42245116, -43128383, -43982211, -44806449, -45600591, -46364840, -47098569, -47801711, -48473727, -49114838, -49724455, -50302623, -50848874, -51363545, -51846135, -52296779, -52715020, -53101240, -53455010, -53776585, -54065600, -54322557, -54547131, -54739663, -54899810, -55028111, -55124307, -55188827, -55221380, -55222572, -55192228, -55130859, -55038225, -54914980, -54761033, -54576974, -54362622, -54118682, -53845150, -53542695, -53211202, -52851427, -52463450, -52047991, -51604968, -51135146, -50638673, -50116337, -49568129, -48994861, -48396765, -47774658, -47128548, -46459228, -45766990, -45052710, -44316493, -43559189, -42781174, -41983339, -41165794, -40329347, -39474419, -38601939, -37712110, -36805777, -35883432, -34945984, -33993615, -33027089, -32046926, -31054070, -30048815, -29031979, -28004160, -26966277, -25918608, -24861885, -23796723, -22724052, -21644243, -20558050, -19466141, -18369382, -17268088, -16162890, -15054445, -13943628, -12830865, -11716830, -10602238, -9487911, -8374236, -7261776, -6151235, -5043417, -3938790, -2837924, -1741550, -650377, --435204, --1514773, --2587638, --3653109, --4710700, --5759963, --6800171, --7830712, --8851149, --9861153, --10860035, --11847221, --12822216, --13784698, --14733981, --15669597, --16591140, --17498438, --18390866, --19267997, --20129356, --20974762, --21803581, --22615471, --23410012, --24187134, --24946258, --25687095, --26409195, --27112509, --27796483, --28460937, --29105505, --29730269, --30334749, --30918819, --31482081, --32024624, --32545988, --33046126, --33524693, --33981867, --34417247, --34830850, --35222329, --35591891, --35939181, --36264307, --36566990, --36847536, --37105661, --37341533, --37554878, --37746021, --37914721, --38061211, --38185256, --38287239, --38366977, --38424762, --38460388, --38474270, --38466280, --38436775, --38385595, --38313205, --38219543, --38105021, --37969515, --37813522, --37637035, --37440507, --37223843, --36987554, --36731686, --36456739, --36162666, --35850010, --35518873, --35169788, --34802724, --34418227, --34016443, --33597949, --33162777, --32711507, --32244345, --31761885, --31264164, --30751745, --30224868, --29684158, --29129713, --28562123, --27981679, --27389002, --26784181, --26167762, --25540059, --24901722, --24252917, --23594223, --22926012, --22248923, --21563113, --20869110, --20167303, --19458342, --18742448, --18020168, --17291929, --16558346, --15819604, --15076174, --14328481, --13577150, --12822445, --12064865, --11304882, --10543086, --9779720, --9015209, --8250021, --7484741, --6719667, --5955234, --5191932, --4430285, --3670541, --2913036, --2158236, --1406657, --658617, -85527, -825280, -1560174, -2289926, -3014260, -3732701, -4444806, -5150245, -5848747, -6539832, -7223129, -7898368, -8565383, -9223733, -9873071, -10513075, -11143565, -11764092, -12374365, -12974099, -13563194, -14141234, -14707964, -15263070, -15806466, -16337748, -16856735, -17363175, -17857079, -18338088, -18806059, -19260708, -19702046, -20129723, -20543652, -20943584, -21329597, -21701383, -22058894, -22401875, -22730421, -23044252, -23343385, -23627618, -23897120, -24151661, -24391299, -24615824, -24825417, -25019868, -25199285, -25363488, -25512702, -25646760, -25765809, -25869680, -25958622, -26032502, -26091517, -26135536, -26164850, -26179375, -26179344, -26164644, -26135585, -26092113, -26034498, -25962646, -25876887, -25777203, -25663899, -25536906, -25396578, -25242934, -25076305, -24896644, -24704311, -24499364, -24282163, -24052697, -23811348, -23558211, -23293664, -23017702, -22730703, -22432787, -22124357, -21805445, -21476448, -21137523, -20789074, -20431134, -20064079, -19688086, -19303582, -18910646, -18509679, -18100894, -17684716, -17261223, -16830785, -16393630, -15950197, -15500602, -15045231, -14584340, -14118348, -13647355, -13171699, -12691641, -12207609, -11719754, -11228437, -10733948, -10236700, -9736830, -9234652, -8730459, -8224662, -7717439, -7209110, -6699986, -6190443, -5680626, -5170792, -4661242, -4152348, -3644304, -3137385, -2631912, -2128229, -1626510, -1126973, -629931, -135715, --355469, --843402, --1327763, --1808270, --2284755, --2757069, --3224916, --3688026, --4146190, --4599250, --5046898, --5488901, --5925074, --6355314, --6779333, --7196918, --7607862, --8012069, --8409254, --8799255, --9181907, --9557181, --9924822, --10284691, --10636595, --10980501, --11316157, --11643459, --11962240, --12272514, --12574056, --12866786, --13150528, --13425307, --13690910, --13947306, --14194356, --14432139, --14660477, --14879363, --15088645, --15288408, --15478483, --15658898, --15829522, --15990474, --16141613, --16282992, --16414484, --16536222, --16648083, --16750160, --16842353, --16924831, --16997506, --17060493, --17113697, --17157297, --17191222, --17215613, --17230389, --17235750, --17231648, --17218249, --17195484, --17163567, --17122473, --17072392, --17013271, --16945340, --16868599, --16783258, --16689284, --16586915, --16476178, --16357294, --16230239, --16095254, --15952382, --15801866, --15643700, --15478137, --15305245, --15125274, --14938221, --14744336, --14543701, --14336583, --14123005, --13903232, --13677369, --13445685, --13208205, --12965179, --12716724, --12463120, --12204418, --11940874, --11672628, --11399952, --11122887, --10841668, --10556438, --10267479, --9974863, --9678839, --9379572, --9077334, --8772193, --8464370, --8154033, --7841459, --7526740, --7210102, --6891729, --6571875, --6250617, --5928143, --5604631, --5280339, --4955374, --4629936, --4304221, --3978468, --3652773, --3327303, --3002250, --2677848, --2354215, --2031520, --1709962, --1389747, --1070968, --753751, --438281, --124760, -186691, -495939, -802786, -1107056, -1408639, -1707439, -2003270, -2295964, -2585396, -2871472, -3154006, -3432861, -3707940, -3979193, -4246453, -4509594, -4768498, -5023110, -5273258, -5518843, -5759760, -5995988, -6227371, -6453823, -6675229, -6891575, -7102708, -7308575, -7509086, -7704267, -7893987, -8078205, -8256820, -8429855, -8597183, -8758786, -8914577, -9064605, -9208760, -9347039, -9479353, -9605758, -9726154, -9840566, -9948924, -10051316, -10147660, -10237999, -10322257, -10400525, -10472731, -10538934, -10599070, -10653248, -10701410, -10743630, -10779849, -10810183, -10834587, -10853155, -10865840, -10872775, -10873933, -10869421, -10859197, -10843400, -10822013, -10795156, -10762793, -10725068, -10681980, -10633658, -10580078, -10521391, -10457608, -10388869, -10315156, -10236621, -10153290, -10065313, -9972683, -9875561, -9773986, -9668111, -9557933, -9443609, -9325184, -9202823, -9076535, -8946481, -8812721, -8675419, -8534581, -8390359, -8242821, -8092134, -7938325, -7781551, -7621894, -7459519, -7294449, -7126832, -6956750, -6784374, -6609741, -6433003, -6254252, -6073651, -5891227, -5707114, -5521403, -5334259, -5145731, -4955957, -4765041, -4573140, -4380296, -4186630, -3992245, -3797298, -3601843, -3406004, -3209891, -3013645, -2817308, -2620977, -2424757, -2228787, -2033128, -1837884, -1643168, -1449107, -1255755, -1063192, -871528, -680885, -491329, -302942, -115833, --69891, --254183, --436990, --618208, --797740, --975518, --1151487, --1325539, --1497588, --1667578, --1835474, --2001175, --2164603, --2325693, --2484411, --2640658, --2794375, --2945514, --3094068, --3239949, --3383105, --3523475, --3661049, --3795739, --3927507, --4056301, --4182128, --4304909, --4424616, --4541189, --4654642, --4764898, --4871946, --4975742, --5076322, --5173620, --5267633, --5358312, --5445689, --5529702, --5610360, --5687620, --5761528, --5832032, --5899149, --5962833, --6023137, --6080011, --6133489, --6183535, --6230216, --6273495, --6313411, --6349927, --6383111, --6412929, --6439430, --6462582, --6482459, --6499037, --6512371, --6522432, --6529297, --6532949, --6533453, --6530785, --6525030, --6516181, --6504306, --6489387, --6471510, --6450672, --6426949, --6400323, --6370884, --6338633, --6303653, --6265930, --6225556, --6182542, --6136972, --6088835, --6038222, --5985148, --5929704, --5871883, --5811781, --5749418, --5684887, --5618180, --5549390, --5478541, --5405727, --5330951, --5254303, --5175817, --5095584, --5013604, --4929960, --4844689, --4757882, --4669550, --4579780, --4488613, --4396142, --4302370, --4207380, --4111213, --4013962, --3915640, --3816329, --3716076, --3614967, --3513009, --3410272, --3306801, --3202684, --3097938, --2992636, --2886829, --2780600, --2673962, --2566977, --2459696, --2352199, --2244507, --2136684, --2028782, --1920874, --1812971, --1705121, --1597373, --1489799, --1382421, --1275290, --1168459, --1061994, --955913, --850253, --745067, --640414, --536322, --432828, --329984, --227841, --126414, --25726, -74177, -173245, -271455, -368780, -465170, -560585, -655003, -748412, -840765, -932024, -1022164, -1111173, -1199003, -1285630, -1371035, -1455222, -1538151, -1619799, -1700140, -1779174, -1856860, -1933182, -2008118, -2081679, -2153827, -2224550, -2293824, -2361659, -2428020, -2492903, -2556292, -2618211, -2678631, -2737549, -2794945, -2850839, -2905201, -2958036, -3009325, -3059097, -3107325, -3154018, -3199156, -3242768, -3284829, -3325357, -3364336, -3401806, -3437747, -3472179, -3505082, -3536494, -3566396, -3594812, -3621723, -3647174, -3671149, -3693673, -3714729, -3734362, -3752557, -3769346, -3784715, -3798715, -3811334, -3822606, -3832516, -3841112, -3848384, -3854368, -3859049, -3862479, -3864649, -3865599, -3865314, -3863845, -3861188, -3857383, -3852418, -3846346, -3839164, -3830915, -3821584, -3811225, -3799836, -3787460, -3774084, -3759760, -3744487, -3728310, -3711218, -3693263, -3674447, -3654814, -3634352, -3613113, -3591098, -3568353, -3544866, -3520689, -3495824, -3470316, -3444153, -3417383, -3390012, -3362082, -3333586, -3304570, -3275042, -3245044, -3214563, -3183645, -3152295, -3120556, -3088416, -3055921, -3023078, -2989927, -2956457, -2922708, -2888688, -2854437, -2819945, -2785255, -2750373, -2715338, -2680135, -2644802, -2609344, -2573798, -2538153, -2502446, -2466684, -2430901, -2395085, -2359269, -2323458, -2287688, -2251947, -2216270, -2180661, -2145152, -2109728, -2074417, -2039222, -2004173, -1969258, -1934504, -1899918, -1865524, -1831311, -1797303, -1763503, -1729936, -1696594, -1663499, -1630656, -1598085, -1565774, -1533740, -1501984, -1470527, -1439358, -1408495, -1377941, -1347715, -1317803, -1288221, -1258970, -1230067, -1201504, -1173294, -1145440, -1117954, -1090824, -1064057, -1037655, -1011629, -985969, -960683, -935774, -911250, -887101, -863334, -839947, -816951, -794337, -772110, -750271, -728827, -707764, -687085, -666787, -646876, -627343, -608189, -589415, -571023, -553003, -535356, -518081, -501181, -484646, -468479, -452677, -437242, -422161, -407434, -393055, -379026, -365337, -351987, -338974, -326296, -313944, -301916, -290208, -278820, -267744, -256978, -246518, -236362, -226500, -216928, -207640, -198636, -189906, -181447, -173255, -165327, -157655, -150236, -143064, -136137, -129449, -122995, -116772, -110776, -104999, -99436, -94081, -88932, -83981, -79224, -74658, -70277, -66077, -62052, -58198, -54512, -50989, -47624, -44413, -41353, -38438, -35663, -33023, -30515, -28134, -25876, -23736, -21712, -19799, -17992, -16290, -14687, -13182, -11769, -10446, -9210, -8057, -6982, -5983, -5056, -4198, -3407, -2678, -2010, -1400, -844, -341, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -0, -}; -} -- cgit v1.1 From ad2f6dbe14e601ae69f3697bc17106f9fd7d59d6 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Thu, 1 Nov 2012 15:45:06 -0700 Subject: AudioTrack comments Change-Id: I08988f635c5bc6f4c35e9c4188f3d6fa557bc4c2 --- include/media/AudioTrack.h | 27 ++++++++++++++++----------- 1 file changed, 16 insertions(+), 11 deletions(-) diff --git a/include/media/AudioTrack.h b/include/media/AudioTrack.h index 7dd22e8..3d45503 100644 --- a/include/media/AudioTrack.h +++ b/include/media/AudioTrack.h @@ -49,10 +49,11 @@ public: }; /* Events used by AudioTrack callback function (audio_track_cblk_t). + * Keep in sync with frameworks/base/media/java/android/media/AudioTrack.java NATIVE_EVENT_*. */ enum event_type { EVENT_MORE_DATA = 0, // Request to write more data to PCM buffer. - EVENT_UNDERRUN = 1, // PCM buffer underrun occured. + EVENT_UNDERRUN = 1, // PCM buffer underrun occurred. EVENT_LOOP_END = 2, // Sample loop end was reached; playback restarted from // loop start if loop count was not 0. EVENT_MARKER = 3, // Playback head is at the specified marker position @@ -73,7 +74,7 @@ public: MUTE = 0x00000001 }; uint32_t flags; // 0 or MUTE - audio_format_t format; // but AUDIO_FORMAT_PCM_8_BIT -> AUDIO_FORMAT_PCM_16_BIT + audio_format_t format; // but AUDIO_FORMAT_PCM_8_BIT -> AUDIO_FORMAT_PCM_16_BIT // accessed directly by WebKit ANP callback int channelCount; // will be removed in the future, do not use @@ -126,7 +127,7 @@ public: */ AudioTrack(); - /* Creates an audio track and registers it with AudioFlinger. + /* Creates an AudioTrack object and registers it with AudioFlinger. * Once created, the track needs to be started before it can be used. * Unspecified values are set to the audio hardware's current * values. @@ -140,12 +141,13 @@ public: * 16 bits per sample). * channelMask: Channel mask. * frameCount: Minimum size of track PCM buffer in frames. This defines the + * application's contribution to the * latency of the track. The actual size selected by the AudioTrack could be * larger if the requested size is not compatible with current audio HAL * latency. Zero means to use a default value. * flags: See comments on audio_output_flags_t in . * cbf: Callback function. If not null, this function is called periodically - * to request new PCM data. + * to provide new PCM data. * user: Context for use by the callback receiver. * notificationFrames: The callback function is called each time notificationFrames PCM * frames have been consumed from track input buffer. @@ -209,7 +211,7 @@ public: * - INVALID_OPERATION: AudioTrack is already initialized * - BAD_VALUE: invalid parameter (channelMask, format, sampleRate...) * - NO_INIT: audio server or audio hardware not initialized - * */ + */ status_t set(audio_stream_type_t streamType = AUDIO_STREAM_DEFAULT, uint32_t sampleRate = 0, audio_format_t format = AUDIO_FORMAT_DEFAULT, @@ -293,7 +295,7 @@ public: status_t setAuxEffectSendLevel(float level); void getAuxEffectSendLevel(float* level) const; - /* Set sample rate for this track, mostly used for games' sound effects + /* Set sample rate for this track in Hz, mostly used for games' sound effects */ status_t setSampleRate(int sampleRate); uint32_t getSampleRate() const; @@ -419,7 +421,7 @@ public: * If the track is stopped, obtainBuffer() returns * STOPPED instead of NO_ERROR as long as there are buffers available, * at which point NO_MORE_BUFFERS is returned. - * Buffers will be returned until the pool (buffercount()) + * Buffers will be returned until the pool * is exhausted, at which point obtainBuffer() will either block * or return WOULD_BLOCK depending on the value of the "blocking" * parameter. @@ -523,7 +525,9 @@ protected: bool mActive; // protected by mLock callback_t mCbf; // callback handler for events, or NULL - void* mUserData; + void* mUserData; // for client callback handler + + // for notification APIs uint32_t mNotificationFramesReq; // requested number of frames between each // notification callback uint32_t mNotificationFramesAct; // actual number of frames between each @@ -531,10 +535,11 @@ protected: sp mSharedBuffer; int mLoopCount; uint32_t mRemainingFrames; - uint32_t mMarkerPosition; + uint32_t mMarkerPosition; // in frames bool mMarkerReached; - uint32_t mNewPosition; - uint32_t mUpdatePeriod; + uint32_t mNewPosition; // in frames + uint32_t mUpdatePeriod; // in frames + bool mFlushed; // FIXME will be made obsolete by making flush() synchronous audio_output_flags_t mFlags; int mSessionId; -- cgit v1.1 From c28c03b0b819d705522929852ecdb5a8bb50b13b Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Thu, 1 Nov 2012 15:41:48 -0700 Subject: AudioRecord comments Change-Id: Ibec910608948d778dc655d900255a80384e9b06f --- include/media/AudioRecord.h | 75 +++++++++++++++++++++++++++------------------ 1 file changed, 45 insertions(+), 30 deletions(-) diff --git a/include/media/AudioRecord.h b/include/media/AudioRecord.h index 156c592..f9f6e8d 100644 --- a/include/media/AudioRecord.h +++ b/include/media/AudioRecord.h @@ -43,15 +43,15 @@ public: */ enum event_type { EVENT_MORE_DATA = 0, // Request to read more data from PCM buffer. - EVENT_OVERRUN = 1, // PCM buffer overrun occured. + EVENT_OVERRUN = 1, // PCM buffer overrun occurred. EVENT_MARKER = 2, // Record head is at the specified marker position // (See setMarkerPosition()). EVENT_NEW_POS = 3, // Record head is at a new position // (See setPositionUpdatePeriod()). }; - /* Create Buffer on the stack and pass it to obtainBuffer() - * and releaseBuffer(). + /* Client should declare Buffer on the stack and pass address to obtainBuffer() + * and releaseBuffer(). See also callback_t for EVENT_MORE_DATA. */ class Buffer @@ -63,26 +63,30 @@ public: uint32_t flags; int channelCount; audio_format_t format; - size_t frameCount; + + size_t frameCount; // number of sample frames corresponding to size; + // on input it is the number of frames available, + // on output is the number of frames actually drained + size_t size; // total size in bytes == frameCount * frameSize union { void* raw; - short* i16; - int8_t* i8; + short* i16; // signed 16-bit + int8_t* i8; // unsigned 8-bit, offset by 0x80 }; }; /* As a convenience, if a callback is supplied, a handler thread * is automatically created with the appropriate priority. This thread - * invokes the callback when a new buffer becomes ready or an overrun condition occurs. + * invokes the callback when a new buffer becomes ready or various conditions occur. * Parameters: * * event: type of event notified (see enum AudioRecord::event_type). * user: Pointer to context for use by the callback receiver. * info: Pointer to optional parameter according to event type: * - EVENT_MORE_DATA: pointer to AudioRecord::Buffer struct. The callback must not read - * more bytes than indicated by 'size' field and update 'size' if less bytes are - * read. + * more bytes than indicated by 'size' field and update 'size' if fewer bytes are + * consumed. * - EVENT_OVERRUN: unused. * - EVENT_MARKER: pointer to const uint32_t containing the marker position in frames. * - EVENT_NEW_POS: pointer to const uint32_t containing the new position in frames. @@ -108,7 +112,7 @@ public: */ AudioRecord(); - /* Creates an AudioRecord track and registers it with AudioFlinger. + /* Creates an AudioRecord object and registers it with AudioFlinger. * Once created, the track needs to be started before it can be used. * Unspecified values are set to the audio hardware's current * values. @@ -120,10 +124,13 @@ public: * format: Audio format (e.g AUDIO_FORMAT_PCM_16_BIT for signed * 16 bits per sample). * channelMask: Channel mask. - * frameCount: Total size of track PCM buffer in frames. This defines the - * latency of the track. + * frameCount: Minimum size of track PCM buffer in frames. This defines the + * application's contribution to the + * latency of the track. The actual size selected by the AudioRecord could + * be larger if the requested size is not compatible with current audio HAL + * latency. Zero means to use a default value. * cbf: Callback function. If not null, this function is called periodically - * to provide new PCM data. + * to consume new PCM data. * user: Context for use by the callback receiver. * notificationFrames: The callback function is called each time notificationFrames PCM * frames are ready in record track output buffer. @@ -154,7 +161,7 @@ public: * - BAD_VALUE: invalid parameter (channels, format, sampleRate...) * - NO_INIT: audio server or audio hardware not initialized * - PERMISSION_DENIED: recording is not allowed for the requesting process - * */ + */ status_t set(audio_source_t inputSource = AUDIO_SOURCE_DEFAULT, uint32_t sampleRate = 0, audio_format_t format = AUDIO_FORMAT_DEFAULT, @@ -168,14 +175,14 @@ public: /* Result of constructing the AudioRecord. This must be checked - * before using any AudioRecord API (except for set()), using + * before using any AudioRecord API (except for set()), because using * an uninitialized AudioRecord produces undefined results. * See set() method above for possible return codes. */ status_t initCheck() const; - /* Returns this track's latency in milliseconds. - * This includes the latency due to AudioRecord buffer size + /* Returns this track's estimated latency in milliseconds. + * This includes the latency due to AudioRecord buffer size, * and audio hardware driver. */ uint32_t latency() const; @@ -191,7 +198,7 @@ public: /* After it's created the track is not active. Call start() to * make it active. If set, the callback will start being called. - * if event is not AudioSystem::SYNC_EVENT_NONE, the capture start will be delayed until + * If event is not AudioSystem::SYNC_EVENT_NONE, the capture start will be delayed until * the specified event occurs on the specified trigger session. */ status_t start(AudioSystem::sync_event_t event = AudioSystem::SYNC_EVENT_NONE, @@ -199,12 +206,12 @@ public: /* Stop a track. If set, the callback will cease being called and * obtainBuffer returns STOPPED. Note that obtainBuffer() still works - * and will fill up buffers until the pool is exhausted. + * and will drain buffers until the pool is exhausted. */ void stop(); bool stopped() const; - /* get sample rate for this record track + /* Get sample rate for this record track in Hz. */ uint32_t getSampleRate() const; @@ -258,7 +265,7 @@ public: */ status_t getPosition(uint32_t *position) const; - /* returns a handle on the audio input used by this AudioRecord. + /* Returns a handle on the audio input used by this AudioRecord. * * Parameters: * none. @@ -268,7 +275,7 @@ public: */ audio_io_handle_t getInput() const; - /* returns the audio session ID associated with this AudioRecord. + /* Returns the audio session ID associated with this AudioRecord. * * Parameters: * none. @@ -278,22 +285,30 @@ public: */ int getSessionId() const; - /* obtains a buffer of "frameCount" frames. The buffer must be - * filled entirely. If the track is stopped, obtainBuffer() returns + /* Obtains a buffer of "frameCount" frames. The buffer must be + * drained entirely, and then released with releaseBuffer(). + * If the track is stopped, obtainBuffer() returns * STOPPED instead of NO_ERROR as long as there are buffers available, * at which point NO_MORE_BUFFERS is returned. - * Buffers will be returned until the pool (buffercount()) + * Buffers will be returned until the pool * is exhausted, at which point obtainBuffer() will either block * or return WOULD_BLOCK depending on the value of the "blocking" * parameter. + * + * Interpretation of waitCount: + * +n limits wait time to n * WAIT_PERIOD_MS, + * -1 causes an (almost) infinite wait time, + * 0 non-blocking. */ enum { - NO_MORE_BUFFERS = 0x80000001, + NO_MORE_BUFFERS = 0x80000001, // same name in AudioFlinger.h, ok to be different value STOPPED = 1 }; status_t obtainBuffer(Buffer* audioBuffer, int32_t waitCount); + + /* Release an emptied buffer of "frameCount" frames for AudioFlinger to re-fill. */ void releaseBuffer(Buffer* audioBuffer); @@ -302,16 +317,16 @@ public: */ ssize_t read(void* buffer, size_t size); - /* Return the amount of input frames lost in the audio driver since the last call of this + /* Return the number of input frames lost in the audio driver since the last call of this * function. Audio driver is expected to reset the value to 0 and restart counting upon * returning the current value by this function call. Such loss typically occurs when the * user space process is blocked longer than the capacity of audio driver buffers. - * Unit: the number of input audio frames + * Units: the number of input audio frames. */ unsigned int getInputFramesLost() const; private: - /* copying audio tracks is not allowed */ + /* copying audio record objects is not allowed */ AudioRecord(const AudioRecord& other); AudioRecord& operator = (const AudioRecord& other); @@ -355,7 +370,7 @@ private: bool mActive; // protected by mLock // for client callback handler - callback_t mCbf; + callback_t mCbf; // callback handler for events, or NULL void* mUserData; // for notification APIs -- cgit v1.1 From 9a08ebc8de71e260efb86cd1a04559b075b38ebc Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Fri, 2 Nov 2012 09:59:51 -0700 Subject: Fix spurious wakeup waiting for new IAudioTrack If there was a spurious wakeup while waiting for another thread to create a new IAudioTrack, we assumed that the track has been created when it might not have been. Change-Id: I5f3999b4f7a06a00aabd65a746cc7222fff396ab --- media/libmedia/AudioTrack.cpp | 26 +++++++++++++++++--------- 1 file changed, 17 insertions(+), 9 deletions(-) diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp index 5fc9b07..ffed161 100644 --- a/media/libmedia/AudioTrack.cpp +++ b/media/libmedia/AudioTrack.cpp @@ -1407,19 +1407,27 @@ status_t AudioTrack::restoreTrack_l(audio_track_cblk_t*& cblk, bool fromStart) android_atomic_or(CBLK_RESTORED_ON, &cblk->flags); cblk->cv.broadcast(); } else { - if (!(cblk->flags & CBLK_RESTORED_MSK)) { - ALOGW("dead IAudioTrack, waiting for a new one TID %d", gettid()); - mLock.unlock(); - result = cblk->cv.waitRelative(cblk->lock, milliseconds(RESTORE_TIMEOUT_MS)); - if (result == NO_ERROR) { + bool haveLogged = false; + for (;;) { + if (cblk->flags & CBLK_RESTORED_MSK) { + ALOGW("dead IAudioTrack restored"); result = mRestoreStatus; + cblk->lock.unlock(); + break; + } + if (!haveLogged) { + ALOGW("dead IAudioTrack, waiting for a new one"); + haveLogged = true; } + mLock.unlock(); + result = cblk->cv.waitRelative(cblk->lock, milliseconds(RESTORE_TIMEOUT_MS)); cblk->lock.unlock(); mLock.lock(); - } else { - ALOGW("dead IAudioTrack, already restored TID %d", gettid()); - result = mRestoreStatus; - cblk->lock.unlock(); + if (result != NO_ERROR) { + ALOGW("timed out"); + break; + } + cblk->lock.lock(); } } ALOGV("restoreTrack_l() status %d mActive %d cblk %p, old cblk %p flags %08x old flags %08x", -- cgit v1.1 From ad4e408b8ea397caadbfee85e1e39515e7e08104 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Fri, 26 Oct 2012 14:28:05 -0700 Subject: Turn off executable bit on ordinary files Change-Id: I0abea25b58fb1d03975bed9cca40f826fcd4c5e4 --- media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp | 0 media/libeffects/preprocessing/Android.mk | 0 media/libeffects/preprocessing/PreProcessing.cpp | 0 media/libstagefright/CameraSource.cpp | 0 media/libstagefright/MPEG4Writer.cpp | 0 media/libstagefright/OMXCodec.cpp | 0 media/libstagefright/SkipCutBuffer.cpp | 0 7 files changed, 0 insertions(+), 0 deletions(-) mode change 100755 => 100644 media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp mode change 100755 => 100644 media/libeffects/preprocessing/Android.mk mode change 100755 => 100644 media/libeffects/preprocessing/PreProcessing.cpp mode change 100755 => 100644 media/libstagefright/CameraSource.cpp mode change 100755 => 100644 media/libstagefright/MPEG4Writer.cpp mode change 100755 => 100644 media/libstagefright/OMXCodec.cpp mode change 100755 => 100644 media/libstagefright/SkipCutBuffer.cpp diff --git a/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp b/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp old mode 100755 new mode 100644 diff --git a/media/libeffects/preprocessing/Android.mk b/media/libeffects/preprocessing/Android.mk old mode 100755 new mode 100644 diff --git a/media/libeffects/preprocessing/PreProcessing.cpp b/media/libeffects/preprocessing/PreProcessing.cpp old mode 100755 new mode 100644 diff --git a/media/libstagefright/CameraSource.cpp b/media/libstagefright/CameraSource.cpp old mode 100755 new mode 100644 diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp old mode 100755 new mode 100644 diff --git a/media/libstagefright/OMXCodec.cpp b/media/libstagefright/OMXCodec.cpp old mode 100755 new mode 100644 diff --git a/media/libstagefright/SkipCutBuffer.cpp b/media/libstagefright/SkipCutBuffer.cpp old mode 100755 new mode 100644 -- cgit v1.1 From a798c97386a842d06d290797ba5dce95d031332a Mon Sep 17 00:00:00 2001 From: Mathias Agopian Date: Sat, 3 Nov 2012 23:37:53 -0700 Subject: improve SINC resampler performance The improvement is about 60% by just tweaking a few things to help the compiler generate better code. It turns out that inlining too much stuff manually was hurting us. Change-Id: I8068f0f75051f95ac600e50ce552572dd1e8c304 --- services/audioflinger/AudioResamplerSinc.cpp | 64 +++++++++++----------------- services/audioflinger/AudioResamplerSinc.h | 2 +- 2 files changed, 26 insertions(+), 40 deletions(-) diff --git a/services/audioflinger/AudioResamplerSinc.cpp b/services/audioflinger/AudioResamplerSinc.cpp index 5f25760..b478e8d 100644 --- a/services/audioflinger/AudioResamplerSinc.cpp +++ b/services/audioflinger/AudioResamplerSinc.cpp @@ -23,6 +23,7 @@ #include #include #include +#include namespace android { // ---------------------------------------------------------------------------- @@ -305,38 +306,28 @@ void AudioResamplerSinc::resample(int32_t* out, size_t outFrameCount, // Always read-in the first samples from the input buffer int16_t* head = impulse + c->halfNumCoefs*CHANNELS; - head[0] = in[inputIndex*CHANNELS + 0]; - if (CHANNELS == 2) - head[1] = in[inputIndex*CHANNELS + 1]; + for (size_t i=0 ; i(l, r, phaseFraction, impulse); - out[outputIndex++] += 2 * mulRL(1, l, vRL); - out[outputIndex++] += 2 * mulRL(0, r, vRL); + while (CC_LIKELY(outputIndex < outputSampleCount)) { + filterCoefficient(l, r, phaseFraction, impulse, vRL); + out[outputIndex++] += l; + out[outputIndex++] += r; phaseFraction += phaseIncrement; - const uint32_t phaseIndex = phaseFraction >> kNumPhaseBits; - if (phaseIndex == 1) { - inputIndex++; - if (inputIndex >= frameCount) - break; // need a new buffer - read(impulse, phaseFraction, in, inputIndex); - } else if (phaseIndex == 2) { // maximum value - inputIndex++; - if (inputIndex >= frameCount) - break; // 0 frame available, 2 frames needed - // read first frame - read(impulse, phaseFraction, in, inputIndex); + const size_t phaseIndex = phaseFraction >> kNumPhaseBits; + for (size_t i=0 ; i= frameCount) - break; // 0 frame available, 1 frame needed - // read second frame + if (inputIndex >= frameCount) { + goto done; // need a new buffer + } read(impulse, phaseFraction, in, inputIndex); } } - +done: // if done with buffer, save samples if (inputIndex >= frameCount) { inputIndex -= frameCount; @@ -366,20 +357,20 @@ void AudioResamplerSinc::read( const uint32_t phaseIndex = phaseFraction >> kNumPhaseBits; impulse += CHANNELS; phaseFraction -= 1LU<= mRingFull) { + if (CC_UNLIKELY(impulse >= mRingFull)) { const size_t stateSize = (c->halfNumCoefs*2)*CHANNELS; memcpy(mState, mState+stateSize, sizeof(int16_t)*stateSize); impulse -= stateSize; } int16_t* head = impulse + c->halfNumCoefs*CHANNELS; - head[0] = in[inputIndex*CHANNELS + 0]; - if (CHANNELS == 2) - head[1] = in[inputIndex*CHANNELS + 1]; + for (size_t i=0 ; i void AudioResamplerSinc::filterCoefficient( - int32_t& l, int32_t& r, uint32_t phase, const int16_t *samples) + int32_t& l, int32_t& r, uint32_t phase, const int16_t *samples, uint32_t vRL) { const Constants *c = mConstants; @@ -399,20 +390,15 @@ void AudioResamplerSinc::filterCoefficient( const int32_t* coefs = mFirCoefs; const int16_t *sP = samples; const int16_t *sN = samples+CHANNELS; - for (unsigned int i=0 ; i < c->halfNumCoefs/4 ; i++) { - interpolate(l, r, coefs+indexP, lerpP, sP); - interpolate(l, r, coefs+indexN, lerpN, sN); - sP -= CHANNELS; sN += CHANNELS; coefs += 1 << c->coefsBits; - interpolate(l, r, coefs+indexP, lerpP, sP); - interpolate(l, r, coefs+indexN, lerpN, sN); - sP -= CHANNELS; sN += CHANNELS; coefs += 1 << c->coefsBits; - interpolate(l, r, coefs+indexP, lerpP, sP); - interpolate(l, r, coefs+indexN, lerpN, sN); - sP -= CHANNELS; sN += CHANNELS; coefs += 1 << c->coefsBits; + const size_t offset = 1 << c->coefsBits; + const size_t count = c->halfNumCoefs; + for (size_t i=0 ; i < count ; i++) { interpolate(l, r, coefs+indexP, lerpP, sP); interpolate(l, r, coefs+indexN, lerpN, sN); - sP -= CHANNELS; sN += CHANNELS; coefs += 1 << c->coefsBits; + sP -= CHANNELS; sN += CHANNELS; coefs += offset; } + l = 2 * mulRL(1, l, vRL); + r = 2 * mulRL(0, r, vRL); } template diff --git a/services/audioflinger/AudioResamplerSinc.h b/services/audioflinger/AudioResamplerSinc.h index 48bc747..3a6e356 100644 --- a/services/audioflinger/AudioResamplerSinc.h +++ b/services/audioflinger/AudioResamplerSinc.h @@ -50,7 +50,7 @@ private: template inline void filterCoefficient( - int32_t& l, int32_t& r, uint32_t phase, const int16_t *samples); + int32_t& l, int32_t& r, uint32_t phase, const int16_t *samples, uint32_t vRL); template inline void interpolate( -- cgit v1.1 From 46afbec3743f1d799f185273ff897d1f8e0175dd Mon Sep 17 00:00:00 2001 From: Mathias Agopian Date: Sun, 4 Nov 2012 02:03:49 -0800 Subject: change how we store the FIR coefficients The coefficient table is now transposed and shows much better its polyphase nature: we now have a FIR per line, each line corresponding to a phase. This doesn't change at all the results produced by the filter, but allows us to make slightly better use of the data cache and improves performance a bit (although not as much as I thought it would). The main benefit is that it is the first step before we can make much larger optimizations (like using NEON). Change-Id: Iebf7695825dcbd41f25861efcaefbaa3365ecb43 --- services/audioflinger/AudioResamplerSinc.cpp | 327 ++++++++++++++++++--- services/audioflinger/AudioResamplerSinc.h | 3 +- .../audio-resampler/filter_coefficients.h | 296 ++++++++++++++++--- tools/resampler_tools/fir.cpp | 39 ++- 4 files changed, 570 insertions(+), 95 deletions(-) diff --git a/services/audioflinger/AudioResamplerSinc.cpp b/services/audioflinger/AudioResamplerSinc.cpp index b478e8d..8d9168b 100644 --- a/services/audioflinger/AudioResamplerSinc.cpp +++ b/services/audioflinger/AudioResamplerSinc.cpp @@ -18,12 +18,16 @@ //#define LOG_NDEBUG 0 #include -#include "AudioResamplerSinc.h" +#include #include + +#include #include -#include + #include -#include + +#include "AudioResamplerSinc.h" + namespace android { // ---------------------------------------------------------------------------- @@ -34,32 +38,272 @@ namespace android { * tools/resampler_tools * cmd-line: fir -l 7 -s 48000 -c 20478 */ -const int32_t AudioResamplerSinc::mFirCoefsUp[] = { - 0x6d374bc7, 0x6d35278a, 0x6d2ebafe, 0x6d24069d, 0x6d150b35, 0x6d01c9e3, 0x6cea4418, 0x6cce7b97, 0x6cae7272, 0x6c8a2b0f, 0x6c61a823, 0x6c34ecb5, 0x6c03fc1c, 0x6bced9ff, 0x6b958a54, 0x6b581163, 0x6b1673c1, 0x6ad0b652, 0x6a86de48, 0x6a38f123, 0x69e6f4b1, 0x6990ef0b, 0x6936e697, 0x68d8e206, 0x6876e855, 0x681100c9, 0x67a732f4, 0x673986ac, 0x66c80413, 0x6652b392, 0x65d99dd5, 0x655ccbd3, 0x64dc46c3, 0x64581823, 0x63d049b4, 0x6344e578, 0x62b5f5b2, 0x622384e8, 0x618d9ddc, 0x60f44b91, 0x60579947, 0x5fb79278, 0x5f1442dc, 0x5e6db665, 0x5dc3f93c, 0x5d1717c4, 0x5c671e96, 0x5bb41a80, 0x5afe1886, 0x5a4525df, 0x59894ff3, 0x58caa45b, 0x580930e1, 0x5745037c, 0x567e2a51, 0x55b4b3af, 0x54e8ae13, 0x541a281e, 0x5349309e, 0x5275d684, 0x51a028e8, 0x50c83704, 0x4fee1037, 0x4f11c3fe, 0x4e3361f7, 0x4d52f9df, 0x4c709b8e, 0x4b8c56f8, 0x4aa63c2c, 0x49be5b50, 0x48d4c4a2, 0x47e98874, 0x46fcb72d, 0x460e6148, 0x451e9750, 0x442d69de, 0x433ae99c, 0x4247273f, 0x41523389, 0x405c1f43, 0x3f64fb40, 0x3e6cd85b, 0x3d73c772, 0x3c79d968, 0x3b7f1f23, 0x3a83a989, 0x3987897f, 0x388acfe9, 0x378d8da8, 0x368fd397, 0x3591b28b, 0x34933b50, 0x33947eab, 0x32958d55, 0x319677fa, 0x30974f3b, 0x2f9823a8, 0x2e9905c1, 0x2d9a05f4, 0x2c9b349e, 0x2b9ca203, 0x2a9e5e57, 0x29a079b2, 0x28a30416, 0x27a60d6a, 0x26a9a57b, 0x25addbf9, 0x24b2c075, 0x23b86263, 0x22bed116, 0x21c61bc0, 0x20ce516f, 0x1fd7810f, 0x1ee1b965, 0x1ded0911, 0x1cf97e8b, 0x1c072823, 0x1b1613ff, 0x1a26501b, 0x1937ea47, 0x184af025, 0x175f6f2b, 0x1675749e, 0x158d0d95, 0x14a646f6, 0x13c12d73, 0x12ddcd8f, 0x11fc3395, - 0x111c6ba0, 0x103e8192, 0x0f62811a, 0x0e8875ad, 0x0db06a89, 0x0cda6ab5, 0x0c0680fe, 0x0b34b7f5, 0x0a6519f4, 0x0997b116, 0x08cc873c, 0x0803a60a, 0x073d16e7, 0x0678e2fc, 0x05b71332, 0x04f7b037, 0x043ac276, 0x0380521c, 0x02c86715, 0x0213090c, 0x01603f6e, 0x00b01162, 0x000285d0, 0xff57a35e, 0xfeaf706f, 0xfe09f323, 0xfd673159, 0xfcc730aa, 0xfc29f670, 0xfb8f87bd, 0xfaf7e963, 0xfa631fef, 0xf9d12fab, 0xf9421c9d, 0xf8b5ea87, 0xf82c9ce7, 0xf7a636fa, 0xf722bbb5, 0xf6a22dcf, 0xf6248fb6, 0xf5a9e398, 0xf5322b61, 0xf4bd68b6, 0xf44b9cfe, 0xf3dcc959, 0xf370eea9, 0xf3080d8c, 0xf2a2265e, 0xf23f393b, 0xf1df45fd, 0xf1824c3e, 0xf1284b58, 0xf0d14267, 0xf07d3043, 0xf02c138a, 0xefddea9a, 0xef92b393, 0xef4a6c58, 0xef051290, 0xeec2a3a3, 0xee831cc3, 0xee467ae1, 0xee0cbab9, 0xedd5d8ca, 0xeda1d15c, 0xed70a07d, 0xed424205, 0xed16b196, 0xecedea99, 0xecc7e845, 0xeca4a59b, 0xec841d68, 0xec664a48, 0xec4b26a2, 0xec32acb0, 0xec1cd677, 0xec099dcf, 0xebf8fc64, 0xebeaebaf, 0xebdf6500, 0xebd6617b, 0xebcfda19, 0xebcbc7a7, 0xebca22cc, 0xebcae405, 0xebce03aa, 0xebd379eb, 0xebdb3ed5, 0xebe54a4f, 0xebf1941f, 0xec0013e8, 0xec10c12c, 0xec23934f, 0xec388194, 0xec4f8322, 0xec688f02, 0xec839c22, 0xeca0a156, 0xecbf9558, 0xece06ecb, 0xed032439, 0xed27ac16, 0xed4dfcc2, 0xed760c88, 0xed9fd1a2, 0xedcb4237, 0xedf8545b, 0xee26fe17, 0xee573562, 0xee88f026, 0xeebc2444, 0xeef0c78d, 0xef26cfca, 0xef5e32bd, 0xef96e61c, 0xefd0df9a, 0xf00c14e1, 0xf0487b98, 0xf0860962, 0xf0c4b3e0, 0xf10470b0, 0xf1453571, 0xf186f7c0, 0xf1c9ad40, 0xf20d4b92, 0xf251c85d, 0xf297194d, 0xf2dd3411, - 0xf3240e61, 0xf36b9dfd, 0xf3b3d8ac, 0xf3fcb43e, 0xf4462690, 0xf4902587, 0xf4daa718, 0xf525a143, 0xf5710a17, 0xf5bcd7b1, 0xf609003f, 0xf6557a00, 0xf6a23b44, 0xf6ef3a6e, 0xf73c6df4, 0xf789cc61, 0xf7d74c53, 0xf824e480, 0xf8728bb3, 0xf8c038d0, 0xf90de2d1, 0xf95b80cb, 0xf9a909ea, 0xf9f67577, 0xfa43bad2, 0xfa90d17b, 0xfaddb10c, 0xfb2a513b, 0xfb76a9dd, 0xfbc2b2e4, 0xfc0e6461, 0xfc59b685, 0xfca4a19f, 0xfcef1e20, 0xfd392498, 0xfd82adba, 0xfdcbb25a, 0xfe142b6e, 0xfe5c120f, 0xfea35f79, 0xfeea0d0c, 0xff30144a, 0xff756edc, 0xffba168d, 0xfffe054e, 0x00413536, 0x0083a081, 0x00c54190, 0x010612eb, 0x01460f41, 0x01853165, 0x01c37452, 0x0200d32c, 0x023d493c, 0x0278d1f2, 0x02b368e6, 0x02ed09d7, 0x0325b0ad, 0x035d5977, 0x0394006a, 0x03c9a1e5, 0x03fe3a6f, 0x0431c6b5, 0x0464438c, 0x0495adf2, 0x04c6030d, 0x04f54029, 0x052362ba, 0x0550685d, 0x057c4ed4, 0x05a7140b, 0x05d0b612, 0x05f93324, 0x0620899e, 0x0646b808, 0x066bbd0d, 0x068f9781, 0x06b2465b, 0x06d3c8bb, 0x06f41de3, 0x0713453d, 0x07313e56, 0x074e08e0, 0x0769a4b2, 0x078411c7, 0x079d503b, 0x07b56051, 0x07cc426c, 0x07e1f712, 0x07f67eec, 0x0809dac3, 0x081c0b84, 0x082d1239, 0x083cf010, 0x084ba654, 0x08593671, 0x0865a1f1, 0x0870ea7e, 0x087b11de, 0x088419f6, 0x088c04c8, 0x0892d470, 0x08988b2a, 0x089d2b4a, 0x08a0b740, 0x08a33196, 0x08a49cf0, 0x08a4fc0d, 0x08a451c0, 0x08a2a0f8, 0x089fecbb, 0x089c3824, 0x08978666, 0x0891dac8, 0x088b38a9, 0x0883a378, 0x087b1ebc, 0x0871ae0d, 0x08675516, 0x085c1794, 0x084ff957, 0x0842fe3d, 0x08352a35, 0x0826813e, 0x08170767, 0x0806c0cb, 0x07f5b193, 0x07e3ddf7, - 0x07d14a38, 0x07bdfaa5, 0x07a9f399, 0x07953976, 0x077fd0ac, 0x0769bdaf, 0x07530501, 0x073bab28, 0x0723b4b4, 0x070b2639, 0x06f20453, 0x06d853a2, 0x06be18cd, 0x06a3587e, 0x06881761, 0x066c5a27, 0x06502583, 0x06337e2a, 0x061668d2, 0x05f8ea30, 0x05db06fc, 0x05bcc3ed, 0x059e25b5, 0x057f310a, 0x055fea9d, 0x0540571a, 0x05207b2f, 0x05005b82, 0x04dffcb6, 0x04bf6369, 0x049e9433, 0x047d93a8, 0x045c6654, 0x043b10bd, 0x04199760, 0x03f7feb4, 0x03d64b27, 0x03b4811d, 0x0392a4f4, 0x0370bafc, 0x034ec77f, 0x032ccebb, 0x030ad4e1, 0x02e8de19, 0x02c6ee7f, 0x02a50a22, 0x02833506, 0x02617321, 0x023fc85c, 0x021e3891, 0x01fcc78f, 0x01db7914, 0x01ba50d2, 0x0199526b, 0x01788170, 0x0157e166, 0x013775bf, 0x011741df, 0x00f7491a, 0x00d78eb3, 0x00b815da, 0x0098e1b3, 0x0079f54c, 0x005b53a4, 0x003cffa9, 0x001efc35, 0x00014c12, 0xffe3f1f7, 0xffc6f08a, 0xffaa4a5d, 0xff8e01f1, 0xff7219b3, 0xff5693fe, 0xff3b731b, 0xff20b93e, 0xff066889, 0xfeec830d, 0xfed30ac5, 0xfeba0199, 0xfea16960, 0xfe8943dc, 0xfe7192bd, 0xfe5a579d, 0xfe439407, 0xfe2d496f, 0xfe177937, 0xfe0224b0, 0xfded4d13, 0xfdd8f38b, 0xfdc5192d, 0xfdb1befc, 0xfd9ee5e7, 0xfd8c8ecc, 0xfd7aba74, 0xfd696998, 0xfd589cdc, 0xfd4854d3, 0xfd3891fd, 0xfd2954c8, 0xfd1a9d91, 0xfd0c6ca2, 0xfcfec233, 0xfcf19e6b, 0xfce50161, 0xfcd8eb17, 0xfccd5b82, 0xfcc25285, 0xfcb7cff0, 0xfcadd386, 0xfca45cf7, 0xfc9b6be5, 0xfc92ffe1, 0xfc8b186d, 0xfc83b4fc, 0xfc7cd4f0, 0xfc76779e, 0xfc709c4d, 0xfc6b4233, 0xfc66687a, 0xfc620e3d, 0xfc5e328c, 0xfc5ad465, 0xfc57f2be, 0xfc558c7c, 0xfc53a07b, 0xfc522d88, 0xfc513266, 0xfc50adcc, - 0xfc509e64, 0xfc5102d0, 0xfc51d9a6, 0xfc53216f, 0xfc54d8ae, 0xfc56fdda, 0xfc598f60, 0xfc5c8ba5, 0xfc5ff105, 0xfc63bdd3, 0xfc67f05a, 0xfc6c86dd, 0xfc717f97, 0xfc76d8bc, 0xfc7c9079, 0xfc82a4f4, 0xfc89144d, 0xfc8fdc9f, 0xfc96fbfc, 0xfc9e7074, 0xfca63810, 0xfcae50d6, 0xfcb6b8c4, 0xfcbf6dd8, 0xfcc86e09, 0xfcd1b74c, 0xfcdb4793, 0xfce51ccb, 0xfcef34e1, 0xfcf98dbe, 0xfd04254a, 0xfd0ef969, 0xfd1a0801, 0xfd254ef4, 0xfd30cc24, 0xfd3c7d73, 0xfd4860c2, 0xfd5473f3, 0xfd60b4e7, 0xfd6d2180, 0xfd79b7a1, 0xfd86752e, 0xfd93580d, 0xfda05e23, 0xfdad855b, 0xfdbacb9e, 0xfdc82edb, 0xfdd5ad01, 0xfde34403, 0xfdf0f1d6, 0xfdfeb475, 0xfe0c89db, 0xfe1a7009, 0xfe286505, 0xfe3666d5, 0xfe447389, 0xfe528931, 0xfe60a5e5, 0xfe6ec7c0, 0xfe7cece2, 0xfe8b1373, 0xfe99399f, 0xfea75d97, 0xfeb57d92, 0xfec397cf, 0xfed1aa92, 0xfedfb425, 0xfeedb2da, 0xfefba508, 0xff09890f, 0xff175d53, 0xff252042, 0xff32d04f, 0xff406bf8, 0xff4df1be, 0xff5b602c, 0xff68b5d5, 0xff75f153, 0xff831148, 0xff90145e, 0xff9cf947, 0xffa9bebe, 0xffb66386, 0xffc2e669, 0xffcf463a, 0xffdb81d6, 0xffe79820, 0xfff38806, 0xffff507b, 0x000af07f, 0x00166718, 0x0021b355, 0x002cd44d, 0x0037c922, 0x004290fc, 0x004d2b0e, 0x00579691, 0x0061d2ca, 0x006bdf05, 0x0075ba95, 0x007f64da, 0x0088dd38, 0x0092231e, 0x009b3605, 0x00a4156b, 0x00acc0da, 0x00b537e1, 0x00bd7a1c, 0x00c5872a, 0x00cd5eb7, 0x00d50075, 0x00dc6c1e, 0x00e3a175, 0x00eaa045, 0x00f16861, 0x00f7f9a3, 0x00fe53ef, 0x0104772e, 0x010a6353, 0x01101858, 0x0115963d, 0x011add0b, 0x011fecd3, 0x0124c5ab, 0x012967b1, 0x012dd30a, 0x013207e4, 0x01360670, - 0x0139cee9, 0x013d618d, 0x0140bea5, 0x0143e67c, 0x0146d965, 0x014997bb, 0x014c21db, 0x014e782a, 0x01509b14, 0x01528b08, 0x0154487b, 0x0155d3e8, 0x01572dcf, 0x015856b6, 0x01594f25, 0x015a17ab, 0x015ab0db, 0x015b1b4e, 0x015b579e, 0x015b666c, 0x015b485b, 0x015afe14, 0x015a8843, 0x0159e796, 0x01591cc0, 0x01582878, 0x01570b77, 0x0155c678, 0x01545a3c, 0x0152c783, 0x01510f13, 0x014f31b2, 0x014d3029, 0x014b0b45, 0x0148c3d2, 0x01465a9f, 0x0143d07f, 0x01412643, 0x013e5cc0, 0x013b74ca, 0x01386f3a, 0x01354ce7, 0x01320ea9, 0x012eb55a, 0x012b41d3, 0x0127b4f1, 0x01240f8e, 0x01205285, 0x011c7eb2, 0x011894f0, 0x0114961b, 0x0110830f, 0x010c5ca6, 0x010823ba, 0x0103d927, 0x00ff7dc4, 0x00fb126b, 0x00f697f3, 0x00f20f32, 0x00ed78ff, 0x00e8d62d, 0x00e4278f, 0x00df6df7, 0x00daaa34, 0x00d5dd16, 0x00d10769, 0x00cc29f7, 0x00c7458a, 0x00c25ae8, 0x00bd6ad7, 0x00b87619, 0x00b37d70, 0x00ae8198, 0x00a9834e, 0x00a4834c, 0x009f8249, 0x009a80f8, 0x0095800c, 0x00908034, 0x008b821b, 0x0086866b, 0x00818dcb, 0x007c98de, 0x0077a845, 0x0072bc9d, 0x006dd680, 0x0068f687, 0x00641d44, 0x005f4b4a, 0x005a8125, 0x0055bf60, 0x00510682, 0x004c570f, 0x0047b186, 0x00431666, 0x003e8628, 0x003a0141, 0x00358824, 0x00311b41, 0x002cbb03, 0x002867d2, 0x00242213, 0x001fea27, 0x001bc06b, 0x0017a53b, 0x001398ec, 0x000f9bd2, 0x000bae3c, 0x0007d075, 0x000402c8, 0x00004579, 0xfffc98c9, 0xfff8fcf7, 0xfff5723d, 0xfff1f8d2, 0xffee90eb, 0xffeb3ab8, 0xffe7f666, 0xffe4c41e, 0xffe1a408, 0xffde9646, 0xffdb9af8, 0xffd8b23b, 0xffd5dc28, 0xffd318d6, 0xffd06858, 0xffcdcabe, 0xffcb4014, - 0xffc8c866, 0xffc663b9, 0xffc41212, 0xffc1d373, 0xffbfa7d9, 0xffbd8f40, 0xffbb89a1, 0xffb996f3, 0xffb7b728, 0xffb5ea31, 0xffb42ffc, 0xffb28876, 0xffb0f388, 0xffaf7118, 0xffae010b, 0xffaca344, 0xffab57a1, 0xffaa1e02, 0xffa8f641, 0xffa7e039, 0xffa6dbc0, 0xffa5e8ad, 0xffa506d2, 0xffa43603, 0xffa3760e, 0xffa2c6c2, 0xffa227ec, 0xffa19957, 0xffa11acb, 0xffa0ac11, 0xffa04cf0, 0xff9ffd2c, 0xff9fbc89, 0xff9f8ac9, 0xff9f67ae, 0xff9f52f7, 0xff9f4c65, 0xff9f53b4, 0xff9f68a1, 0xff9f8ae9, 0xff9fba47, 0xff9ff674, 0xffa03f2b, 0xffa09425, 0xffa0f519, 0xffa161bf, 0xffa1d9cf, 0xffa25cfe, 0xffa2eb04, 0xffa38395, 0xffa42668, 0xffa4d332, 0xffa589a6, 0xffa6497c, 0xffa71266, 0xffa7e41a, 0xffa8be4c, 0xffa9a0b1, 0xffaa8afe, 0xffab7ce7, 0xffac7621, 0xffad7662, 0xffae7d5f, 0xffaf8acd, 0xffb09e63, 0xffb1b7d8, 0xffb2d6e1, 0xffb3fb37, 0xffb52490, 0xffb652a7, 0xffb78533, 0xffb8bbed, 0xffb9f691, 0xffbb34d8, 0xffbc767f, 0xffbdbb42, 0xffbf02dd, 0xffc04d0f, 0xffc19996, 0xffc2e832, 0xffc438a3, 0xffc58aaa, 0xffc6de09, 0xffc83285, 0xffc987e0, 0xffcadde1, 0xffcc344c, 0xffcd8aeb, 0xffcee183, 0xffd037e0, 0xffd18dcc, 0xffd2e311, 0xffd4377d, 0xffd58ade, 0xffd6dd02, 0xffd82dba, 0xffd97cd6, 0xffdaca2a, 0xffdc1588, 0xffdd5ec6, 0xffdea5bb, 0xffdfea3c, 0xffe12c22, 0xffe26b48, 0xffe3a788, 0xffe4e0bf, 0xffe616c8, 0xffe74984, 0xffe878d3, 0xffe9a494, 0xffeaccaa, 0xffebf0fa, 0xffed1166, 0xffee2dd7, 0xffef4632, 0xfff05a60, 0xfff16a4a, 0xfff275db, 0xfff37d00, 0xfff47fa5, 0xfff57db8, 0xfff67729, 0xfff76be9, 0xfff85be8, 0xfff9471b, 0xfffa2d74, 0xfffb0ee9, 0xfffbeb70, - 0xfffcc300, 0xfffd9592, 0xfffe631e, 0xffff2b9f, 0xffffef10, 0x0000ad6e, 0x000166b6, 0x00021ae5, 0x0002c9fd, 0x000373fb, 0x000418e2, 0x0004b8b3, 0x00055371, 0x0005e921, 0x000679c5, 0x00070564, 0x00078c04, 0x00080dab, 0x00088a62, 0x00090230, 0x0009751e, 0x0009e337, 0x000a4c85, 0x000ab112, 0x000b10ec, 0x000b6c1d, 0x000bc2b3, 0x000c14bb, 0x000c6244, 0x000cab5c, 0x000cf012, 0x000d3075, 0x000d6c97, 0x000da486, 0x000dd854, 0x000e0812, 0x000e33d3, 0x000e5ba7, 0x000e7fa1, 0x000e9fd5, 0x000ebc54, 0x000ed533, 0x000eea84, 0x000efc5c, 0x000f0ace, 0x000f15ef, 0x000f1dd2, 0x000f228d, 0x000f2434, 0x000f22dc, 0x000f1e99, 0x000f1781, 0x000f0da8, 0x000f0125, 0x000ef20b, 0x000ee070, 0x000ecc69, 0x000eb60b, 0x000e9d6b, 0x000e829e, 0x000e65ba, 0x000e46d3, 0x000e25fd, 0x000e034f, 0x000ddedb, 0x000db8b7, 0x000d90f6, 0x000d67ae, 0x000d3cf1, 0x000d10d5, 0x000ce36b, 0x000cb4c8, 0x000c84ff, 0x000c5422, 0x000c2245, 0x000bef79, 0x000bbbd2, 0x000b8760, 0x000b5235, 0x000b1c64, 0x000ae5fc, 0x000aaf0f, 0x000a77ac, 0x000a3fe5, 0x000a07c9, 0x0009cf67, 0x000996ce, 0x00095e0e, 0x00092535, 0x0008ec50, 0x0008b36e, 0x00087a9c, 0x000841e8, 0x0008095d, 0x0007d108, 0x000798f5, 0x00076130, 0x000729c4, 0x0006f2bb, 0x0006bc21, 0x000685ff, 0x0006505f, 0x00061b4b, 0x0005e6cb, 0x0005b2e8, 0x00057faa, 0x00054d1a, 0x00051b3e, 0x0004ea1d, 0x0004b9c0, 0x00048a2b, 0x00045b65, 0x00042d74, 0x0004005e, 0x0003d426, 0x0003a8d2, 0x00037e65, 0x000354e5, 0x00032c54, 0x000304b7, 0x0002de0e, 0x0002b85f, 0x000293aa, 0x00026ff2, 0x00024d39, 0x00022b7f, 0x00020ac7, 0x0001eb10, - 0x00000000 // this one is needed for lerping the last coefficient +const int32_t AudioResamplerSinc::mFirCoefsUp[] __attribute__ ((aligned (32))) = { + 0x6d374bc7, 0x111c6ba0, 0xf3240e61, 0x07d14a38, 0xfc509e64, 0x0139cee9, 0xffc8c866, 0xfffcc300, + 0x6d35278a, 0x103e8192, 0xf36b9dfd, 0x07bdfaa5, 0xfc5102d0, 0x013d618d, 0xffc663b9, 0xfffd9592, + 0x6d2ebafe, 0x0f62811a, 0xf3b3d8ac, 0x07a9f399, 0xfc51d9a6, 0x0140bea5, 0xffc41212, 0xfffe631e, + 0x6d24069d, 0x0e8875ad, 0xf3fcb43e, 0x07953976, 0xfc53216f, 0x0143e67c, 0xffc1d373, 0xffff2b9f, + 0x6d150b35, 0x0db06a89, 0xf4462690, 0x077fd0ac, 0xfc54d8ae, 0x0146d965, 0xffbfa7d9, 0xffffef10, + 0x6d01c9e3, 0x0cda6ab5, 0xf4902587, 0x0769bdaf, 0xfc56fdda, 0x014997bb, 0xffbd8f40, 0x0000ad6e, + 0x6cea4418, 0x0c0680fe, 0xf4daa718, 0x07530501, 0xfc598f60, 0x014c21db, 0xffbb89a1, 0x000166b6, + 0x6cce7b97, 0x0b34b7f5, 0xf525a143, 0x073bab28, 0xfc5c8ba5, 0x014e782a, 0xffb996f3, 0x00021ae5, + 0x6cae7272, 0x0a6519f4, 0xf5710a17, 0x0723b4b4, 0xfc5ff105, 0x01509b14, 0xffb7b728, 0x0002c9fd, + 0x6c8a2b0f, 0x0997b116, 0xf5bcd7b1, 0x070b2639, 0xfc63bdd3, 0x01528b08, 0xffb5ea31, 0x000373fb, + 0x6c61a823, 0x08cc873c, 0xf609003f, 0x06f20453, 0xfc67f05a, 0x0154487b, 0xffb42ffc, 0x000418e2, + 0x6c34ecb5, 0x0803a60a, 0xf6557a00, 0x06d853a2, 0xfc6c86dd, 0x0155d3e8, 0xffb28876, 0x0004b8b3, + 0x6c03fc1c, 0x073d16e7, 0xf6a23b44, 0x06be18cd, 0xfc717f97, 0x01572dcf, 0xffb0f388, 0x00055371, + 0x6bced9ff, 0x0678e2fc, 0xf6ef3a6e, 0x06a3587e, 0xfc76d8bc, 0x015856b6, 0xffaf7118, 0x0005e921, + 0x6b958a54, 0x05b71332, 0xf73c6df4, 0x06881761, 0xfc7c9079, 0x01594f25, 0xffae010b, 0x000679c5, + 0x6b581163, 0x04f7b037, 0xf789cc61, 0x066c5a27, 0xfc82a4f4, 0x015a17ab, 0xffaca344, 0x00070564, + 0x6b1673c1, 0x043ac276, 0xf7d74c53, 0x06502583, 0xfc89144d, 0x015ab0db, 0xffab57a1, 0x00078c04, + 0x6ad0b652, 0x0380521c, 0xf824e480, 0x06337e2a, 0xfc8fdc9f, 0x015b1b4e, 0xffaa1e02, 0x00080dab, + 0x6a86de48, 0x02c86715, 0xf8728bb3, 0x061668d2, 0xfc96fbfc, 0x015b579e, 0xffa8f641, 0x00088a62, + 0x6a38f123, 0x0213090c, 0xf8c038d0, 0x05f8ea30, 0xfc9e7074, 0x015b666c, 0xffa7e039, 0x00090230, + 0x69e6f4b1, 0x01603f6e, 0xf90de2d1, 0x05db06fc, 0xfca63810, 0x015b485b, 0xffa6dbc0, 0x0009751e, + 0x6990ef0b, 0x00b01162, 0xf95b80cb, 0x05bcc3ed, 0xfcae50d6, 0x015afe14, 0xffa5e8ad, 0x0009e337, + 0x6936e697, 0x000285d0, 0xf9a909ea, 0x059e25b5, 0xfcb6b8c4, 0x015a8843, 0xffa506d2, 0x000a4c85, + 0x68d8e206, 0xff57a35e, 0xf9f67577, 0x057f310a, 0xfcbf6dd8, 0x0159e796, 0xffa43603, 0x000ab112, + 0x6876e855, 0xfeaf706f, 0xfa43bad2, 0x055fea9d, 0xfcc86e09, 0x01591cc0, 0xffa3760e, 0x000b10ec, + 0x681100c9, 0xfe09f323, 0xfa90d17b, 0x0540571a, 0xfcd1b74c, 0x01582878, 0xffa2c6c2, 0x000b6c1d, + 0x67a732f4, 0xfd673159, 0xfaddb10c, 0x05207b2f, 0xfcdb4793, 0x01570b77, 0xffa227ec, 0x000bc2b3, + 0x673986ac, 0xfcc730aa, 0xfb2a513b, 0x05005b82, 0xfce51ccb, 0x0155c678, 0xffa19957, 0x000c14bb, + 0x66c80413, 0xfc29f670, 0xfb76a9dd, 0x04dffcb6, 0xfcef34e1, 0x01545a3c, 0xffa11acb, 0x000c6244, + 0x6652b392, 0xfb8f87bd, 0xfbc2b2e4, 0x04bf6369, 0xfcf98dbe, 0x0152c783, 0xffa0ac11, 0x000cab5c, + 0x65d99dd5, 0xfaf7e963, 0xfc0e6461, 0x049e9433, 0xfd04254a, 0x01510f13, 0xffa04cf0, 0x000cf012, + 0x655ccbd3, 0xfa631fef, 0xfc59b685, 0x047d93a8, 0xfd0ef969, 0x014f31b2, 0xff9ffd2c, 0x000d3075, + 0x64dc46c3, 0xf9d12fab, 0xfca4a19f, 0x045c6654, 0xfd1a0801, 0x014d3029, 0xff9fbc89, 0x000d6c97, + 0x64581823, 0xf9421c9d, 0xfcef1e20, 0x043b10bd, 0xfd254ef4, 0x014b0b45, 0xff9f8ac9, 0x000da486, + 0x63d049b4, 0xf8b5ea87, 0xfd392498, 0x04199760, 0xfd30cc24, 0x0148c3d2, 0xff9f67ae, 0x000dd854, + 0x6344e578, 0xf82c9ce7, 0xfd82adba, 0x03f7feb4, 0xfd3c7d73, 0x01465a9f, 0xff9f52f7, 0x000e0812, + 0x62b5f5b2, 0xf7a636fa, 0xfdcbb25a, 0x03d64b27, 0xfd4860c2, 0x0143d07f, 0xff9f4c65, 0x000e33d3, + 0x622384e8, 0xf722bbb5, 0xfe142b6e, 0x03b4811d, 0xfd5473f3, 0x01412643, 0xff9f53b4, 0x000e5ba7, + 0x618d9ddc, 0xf6a22dcf, 0xfe5c120f, 0x0392a4f4, 0xfd60b4e7, 0x013e5cc0, 0xff9f68a1, 0x000e7fa1, + 0x60f44b91, 0xf6248fb6, 0xfea35f79, 0x0370bafc, 0xfd6d2180, 0x013b74ca, 0xff9f8ae9, 0x000e9fd5, + 0x60579947, 0xf5a9e398, 0xfeea0d0c, 0x034ec77f, 0xfd79b7a1, 0x01386f3a, 0xff9fba47, 0x000ebc54, + 0x5fb79278, 0xf5322b61, 0xff30144a, 0x032ccebb, 0xfd86752e, 0x01354ce7, 0xff9ff674, 0x000ed533, + 0x5f1442dc, 0xf4bd68b6, 0xff756edc, 0x030ad4e1, 0xfd93580d, 0x01320ea9, 0xffa03f2b, 0x000eea84, + 0x5e6db665, 0xf44b9cfe, 0xffba168d, 0x02e8de19, 0xfda05e23, 0x012eb55a, 0xffa09425, 0x000efc5c, + 0x5dc3f93c, 0xf3dcc959, 0xfffe054e, 0x02c6ee7f, 0xfdad855b, 0x012b41d3, 0xffa0f519, 0x000f0ace, + 0x5d1717c4, 0xf370eea9, 0x00413536, 0x02a50a22, 0xfdbacb9e, 0x0127b4f1, 0xffa161bf, 0x000f15ef, + 0x5c671e96, 0xf3080d8c, 0x0083a081, 0x02833506, 0xfdc82edb, 0x01240f8e, 0xffa1d9cf, 0x000f1dd2, + 0x5bb41a80, 0xf2a2265e, 0x00c54190, 0x02617321, 0xfdd5ad01, 0x01205285, 0xffa25cfe, 0x000f228d, + 0x5afe1886, 0xf23f393b, 0x010612eb, 0x023fc85c, 0xfde34403, 0x011c7eb2, 0xffa2eb04, 0x000f2434, + 0x5a4525df, 0xf1df45fd, 0x01460f41, 0x021e3891, 0xfdf0f1d6, 0x011894f0, 0xffa38395, 0x000f22dc, + 0x59894ff3, 0xf1824c3e, 0x01853165, 0x01fcc78f, 0xfdfeb475, 0x0114961b, 0xffa42668, 0x000f1e99, + 0x58caa45b, 0xf1284b58, 0x01c37452, 0x01db7914, 0xfe0c89db, 0x0110830f, 0xffa4d332, 0x000f1781, + 0x580930e1, 0xf0d14267, 0x0200d32c, 0x01ba50d2, 0xfe1a7009, 0x010c5ca6, 0xffa589a6, 0x000f0da8, + 0x5745037c, 0xf07d3043, 0x023d493c, 0x0199526b, 0xfe286505, 0x010823ba, 0xffa6497c, 0x000f0125, + 0x567e2a51, 0xf02c138a, 0x0278d1f2, 0x01788170, 0xfe3666d5, 0x0103d927, 0xffa71266, 0x000ef20b, + 0x55b4b3af, 0xefddea9a, 0x02b368e6, 0x0157e166, 0xfe447389, 0x00ff7dc4, 0xffa7e41a, 0x000ee070, + 0x54e8ae13, 0xef92b393, 0x02ed09d7, 0x013775bf, 0xfe528931, 0x00fb126b, 0xffa8be4c, 0x000ecc69, + 0x541a281e, 0xef4a6c58, 0x0325b0ad, 0x011741df, 0xfe60a5e5, 0x00f697f3, 0xffa9a0b1, 0x000eb60b, + 0x5349309e, 0xef051290, 0x035d5977, 0x00f7491a, 0xfe6ec7c0, 0x00f20f32, 0xffaa8afe, 0x000e9d6b, + 0x5275d684, 0xeec2a3a3, 0x0394006a, 0x00d78eb3, 0xfe7cece2, 0x00ed78ff, 0xffab7ce7, 0x000e829e, + 0x51a028e8, 0xee831cc3, 0x03c9a1e5, 0x00b815da, 0xfe8b1373, 0x00e8d62d, 0xffac7621, 0x000e65ba, + 0x50c83704, 0xee467ae1, 0x03fe3a6f, 0x0098e1b3, 0xfe99399f, 0x00e4278f, 0xffad7662, 0x000e46d3, + 0x4fee1037, 0xee0cbab9, 0x0431c6b5, 0x0079f54c, 0xfea75d97, 0x00df6df7, 0xffae7d5f, 0x000e25fd, + 0x4f11c3fe, 0xedd5d8ca, 0x0464438c, 0x005b53a4, 0xfeb57d92, 0x00daaa34, 0xffaf8acd, 0x000e034f, + 0x4e3361f7, 0xeda1d15c, 0x0495adf2, 0x003cffa9, 0xfec397cf, 0x00d5dd16, 0xffb09e63, 0x000ddedb, + 0x4d52f9df, 0xed70a07d, 0x04c6030d, 0x001efc35, 0xfed1aa92, 0x00d10769, 0xffb1b7d8, 0x000db8b7, + 0x4c709b8e, 0xed424205, 0x04f54029, 0x00014c12, 0xfedfb425, 0x00cc29f7, 0xffb2d6e1, 0x000d90f6, + 0x4b8c56f8, 0xed16b196, 0x052362ba, 0xffe3f1f7, 0xfeedb2da, 0x00c7458a, 0xffb3fb37, 0x000d67ae, + 0x4aa63c2c, 0xecedea99, 0x0550685d, 0xffc6f08a, 0xfefba508, 0x00c25ae8, 0xffb52490, 0x000d3cf1, + 0x49be5b50, 0xecc7e845, 0x057c4ed4, 0xffaa4a5d, 0xff09890f, 0x00bd6ad7, 0xffb652a7, 0x000d10d5, + 0x48d4c4a2, 0xeca4a59b, 0x05a7140b, 0xff8e01f1, 0xff175d53, 0x00b87619, 0xffb78533, 0x000ce36b, + 0x47e98874, 0xec841d68, 0x05d0b612, 0xff7219b3, 0xff252042, 0x00b37d70, 0xffb8bbed, 0x000cb4c8, + 0x46fcb72d, 0xec664a48, 0x05f93324, 0xff5693fe, 0xff32d04f, 0x00ae8198, 0xffb9f691, 0x000c84ff, + 0x460e6148, 0xec4b26a2, 0x0620899e, 0xff3b731b, 0xff406bf8, 0x00a9834e, 0xffbb34d8, 0x000c5422, + 0x451e9750, 0xec32acb0, 0x0646b808, 0xff20b93e, 0xff4df1be, 0x00a4834c, 0xffbc767f, 0x000c2245, + 0x442d69de, 0xec1cd677, 0x066bbd0d, 0xff066889, 0xff5b602c, 0x009f8249, 0xffbdbb42, 0x000bef79, + 0x433ae99c, 0xec099dcf, 0x068f9781, 0xfeec830d, 0xff68b5d5, 0x009a80f8, 0xffbf02dd, 0x000bbbd2, + 0x4247273f, 0xebf8fc64, 0x06b2465b, 0xfed30ac5, 0xff75f153, 0x0095800c, 0xffc04d0f, 0x000b8760, + 0x41523389, 0xebeaebaf, 0x06d3c8bb, 0xfeba0199, 0xff831148, 0x00908034, 0xffc19996, 0x000b5235, + 0x405c1f43, 0xebdf6500, 0x06f41de3, 0xfea16960, 0xff90145e, 0x008b821b, 0xffc2e832, 0x000b1c64, + 0x3f64fb40, 0xebd6617b, 0x0713453d, 0xfe8943dc, 0xff9cf947, 0x0086866b, 0xffc438a3, 0x000ae5fc, + 0x3e6cd85b, 0xebcfda19, 0x07313e56, 0xfe7192bd, 0xffa9bebe, 0x00818dcb, 0xffc58aaa, 0x000aaf0f, + 0x3d73c772, 0xebcbc7a7, 0x074e08e0, 0xfe5a579d, 0xffb66386, 0x007c98de, 0xffc6de09, 0x000a77ac, + 0x3c79d968, 0xebca22cc, 0x0769a4b2, 0xfe439407, 0xffc2e669, 0x0077a845, 0xffc83285, 0x000a3fe5, + 0x3b7f1f23, 0xebcae405, 0x078411c7, 0xfe2d496f, 0xffcf463a, 0x0072bc9d, 0xffc987e0, 0x000a07c9, + 0x3a83a989, 0xebce03aa, 0x079d503b, 0xfe177937, 0xffdb81d6, 0x006dd680, 0xffcadde1, 0x0009cf67, + 0x3987897f, 0xebd379eb, 0x07b56051, 0xfe0224b0, 0xffe79820, 0x0068f687, 0xffcc344c, 0x000996ce, + 0x388acfe9, 0xebdb3ed5, 0x07cc426c, 0xfded4d13, 0xfff38806, 0x00641d44, 0xffcd8aeb, 0x00095e0e, + 0x378d8da8, 0xebe54a4f, 0x07e1f712, 0xfdd8f38b, 0xffff507b, 0x005f4b4a, 0xffcee183, 0x00092535, + 0x368fd397, 0xebf1941f, 0x07f67eec, 0xfdc5192d, 0x000af07f, 0x005a8125, 0xffd037e0, 0x0008ec50, + 0x3591b28b, 0xec0013e8, 0x0809dac3, 0xfdb1befc, 0x00166718, 0x0055bf60, 0xffd18dcc, 0x0008b36e, + 0x34933b50, 0xec10c12c, 0x081c0b84, 0xfd9ee5e7, 0x0021b355, 0x00510682, 0xffd2e311, 0x00087a9c, + 0x33947eab, 0xec23934f, 0x082d1239, 0xfd8c8ecc, 0x002cd44d, 0x004c570f, 0xffd4377d, 0x000841e8, + 0x32958d55, 0xec388194, 0x083cf010, 0xfd7aba74, 0x0037c922, 0x0047b186, 0xffd58ade, 0x0008095d, + 0x319677fa, 0xec4f8322, 0x084ba654, 0xfd696998, 0x004290fc, 0x00431666, 0xffd6dd02, 0x0007d108, + 0x30974f3b, 0xec688f02, 0x08593671, 0xfd589cdc, 0x004d2b0e, 0x003e8628, 0xffd82dba, 0x000798f5, + 0x2f9823a8, 0xec839c22, 0x0865a1f1, 0xfd4854d3, 0x00579691, 0x003a0141, 0xffd97cd6, 0x00076130, + 0x2e9905c1, 0xeca0a156, 0x0870ea7e, 0xfd3891fd, 0x0061d2ca, 0x00358824, 0xffdaca2a, 0x000729c4, + 0x2d9a05f4, 0xecbf9558, 0x087b11de, 0xfd2954c8, 0x006bdf05, 0x00311b41, 0xffdc1588, 0x0006f2bb, + 0x2c9b349e, 0xece06ecb, 0x088419f6, 0xfd1a9d91, 0x0075ba95, 0x002cbb03, 0xffdd5ec6, 0x0006bc21, + 0x2b9ca203, 0xed032439, 0x088c04c8, 0xfd0c6ca2, 0x007f64da, 0x002867d2, 0xffdea5bb, 0x000685ff, + 0x2a9e5e57, 0xed27ac16, 0x0892d470, 0xfcfec233, 0x0088dd38, 0x00242213, 0xffdfea3c, 0x0006505f, + 0x29a079b2, 0xed4dfcc2, 0x08988b2a, 0xfcf19e6b, 0x0092231e, 0x001fea27, 0xffe12c22, 0x00061b4b, + 0x28a30416, 0xed760c88, 0x089d2b4a, 0xfce50161, 0x009b3605, 0x001bc06b, 0xffe26b48, 0x0005e6cb, + 0x27a60d6a, 0xed9fd1a2, 0x08a0b740, 0xfcd8eb17, 0x00a4156b, 0x0017a53b, 0xffe3a788, 0x0005b2e8, + 0x26a9a57b, 0xedcb4237, 0x08a33196, 0xfccd5b82, 0x00acc0da, 0x001398ec, 0xffe4e0bf, 0x00057faa, + 0x25addbf9, 0xedf8545b, 0x08a49cf0, 0xfcc25285, 0x00b537e1, 0x000f9bd2, 0xffe616c8, 0x00054d1a, + 0x24b2c075, 0xee26fe17, 0x08a4fc0d, 0xfcb7cff0, 0x00bd7a1c, 0x000bae3c, 0xffe74984, 0x00051b3e, + 0x23b86263, 0xee573562, 0x08a451c0, 0xfcadd386, 0x00c5872a, 0x0007d075, 0xffe878d3, 0x0004ea1d, + 0x22bed116, 0xee88f026, 0x08a2a0f8, 0xfca45cf7, 0x00cd5eb7, 0x000402c8, 0xffe9a494, 0x0004b9c0, + 0x21c61bc0, 0xeebc2444, 0x089fecbb, 0xfc9b6be5, 0x00d50075, 0x00004579, 0xffeaccaa, 0x00048a2b, + 0x20ce516f, 0xeef0c78d, 0x089c3824, 0xfc92ffe1, 0x00dc6c1e, 0xfffc98c9, 0xffebf0fa, 0x00045b65, + 0x1fd7810f, 0xef26cfca, 0x08978666, 0xfc8b186d, 0x00e3a175, 0xfff8fcf7, 0xffed1166, 0x00042d74, + 0x1ee1b965, 0xef5e32bd, 0x0891dac8, 0xfc83b4fc, 0x00eaa045, 0xfff5723d, 0xffee2dd7, 0x0004005e, + 0x1ded0911, 0xef96e61c, 0x088b38a9, 0xfc7cd4f0, 0x00f16861, 0xfff1f8d2, 0xffef4632, 0x0003d426, + 0x1cf97e8b, 0xefd0df9a, 0x0883a378, 0xfc76779e, 0x00f7f9a3, 0xffee90eb, 0xfff05a60, 0x0003a8d2, + 0x1c072823, 0xf00c14e1, 0x087b1ebc, 0xfc709c4d, 0x00fe53ef, 0xffeb3ab8, 0xfff16a4a, 0x00037e65, + 0x1b1613ff, 0xf0487b98, 0x0871ae0d, 0xfc6b4233, 0x0104772e, 0xffe7f666, 0xfff275db, 0x000354e5, + 0x1a26501b, 0xf0860962, 0x08675516, 0xfc66687a, 0x010a6353, 0xffe4c41e, 0xfff37d00, 0x00032c54, + 0x1937ea47, 0xf0c4b3e0, 0x085c1794, 0xfc620e3d, 0x01101858, 0xffe1a408, 0xfff47fa5, 0x000304b7, + 0x184af025, 0xf10470b0, 0x084ff957, 0xfc5e328c, 0x0115963d, 0xffde9646, 0xfff57db8, 0x0002de0e, + 0x175f6f2b, 0xf1453571, 0x0842fe3d, 0xfc5ad465, 0x011add0b, 0xffdb9af8, 0xfff67729, 0x0002b85f, + 0x1675749e, 0xf186f7c0, 0x08352a35, 0xfc57f2be, 0x011fecd3, 0xffd8b23b, 0xfff76be9, 0x000293aa, + 0x158d0d95, 0xf1c9ad40, 0x0826813e, 0xfc558c7c, 0x0124c5ab, 0xffd5dc28, 0xfff85be8, 0x00026ff2, + 0x14a646f6, 0xf20d4b92, 0x08170767, 0xfc53a07b, 0x012967b1, 0xffd318d6, 0xfff9471b, 0x00024d39, + 0x13c12d73, 0xf251c85d, 0x0806c0cb, 0xfc522d88, 0x012dd30a, 0xffd06858, 0xfffa2d74, 0x00022b7f, + 0x12ddcd8f, 0xf297194d, 0x07f5b193, 0xfc513266, 0x013207e4, 0xffcdcabe, 0xfffb0ee9, 0x00020ac7, + 0x11fc3395, 0xf2dd3411, 0x07e3ddf7, 0xfc50adcc, 0x01360670, 0xffcb4014, 0xfffbeb70, 0x0001eb10, + 0x111c6ba0, 0xf3240e61, 0x07d14a38, 0xfc509e64, 0x0139cee9, 0xffc8c866, 0xfffcc300, 0x0001cc5c, }; /* * These coefficients are optimized for 48KHz -> 44.1KHz * cmd-line: fir -l 7 -s 48000 -c 17189 */ -const int32_t AudioResamplerSinc::mFirCoefsDown[] = { - 0x5bacb6f4, 0x5bab6c81, 0x5ba78d37, 0x5ba1194f, 0x5b981122, 0x5b8c7530, 0x5b7e461a, 0x5b6d84a8, 0x5b5a31c6, 0x5b444e81, 0x5b2bdc0e, 0x5b10dbc2, 0x5af34f18, 0x5ad337af, 0x5ab09748, 0x5a8b6fc7, 0x5a63c336, 0x5a3993c0, 0x5a0ce3b2, 0x59ddb57f, 0x59ac0bba, 0x5977e919, 0x59415075, 0x590844c9, 0x58ccc930, 0x588ee0ea, 0x584e8f56, 0x580bd7f4, 0x57c6be67, 0x577f4670, 0x573573f2, 0x56e94af1, 0x569acf90, 0x564a0610, 0x55f6f2d3, 0x55a19a5c, 0x554a0148, 0x54f02c56, 0x54942061, 0x5435e263, 0x53d57774, 0x5372e4c6, 0x530e2fac, 0x52a75d90, 0x523e73fd, 0x51d37897, 0x5166711c, 0x50f76368, 0x5086556f, 0x50134d3e, 0x4f9e50ff, 0x4f2766f2, 0x4eae9571, 0x4e33e2ee, 0x4db755f3, 0x4d38f520, 0x4cb8c72e, 0x4c36d2eb, 0x4bb31f3c, 0x4b2db31a, 0x4aa69594, 0x4a1dcdce, 0x499362ff, 0x49075c72, 0x4879c185, 0x47ea99a9, 0x4759ec60, 0x46c7c140, 0x46341fed, 0x459f101d, 0x45089996, 0x4470c42d, 0x43d797c7, 0x433d1c56, 0x42a159dc, 0x42045865, 0x4166200e, 0x40c6b8fd, 0x40262b65, 0x3f847f83, 0x3ee1bda2, 0x3e3dee13, 0x3d991932, 0x3cf34766, 0x3c4c811c, 0x3ba4cec9, 0x3afc38eb, 0x3a52c805, 0x39a884a1, 0x38fd774e, 0x3851a8a2, 0x37a52135, 0x36f7e9a4, 0x364a0a90, 0x359b8c9d, 0x34ec786f, 0x343cd6af, 0x338cb004, 0x32dc0d17, 0x322af693, 0x3179751f, 0x30c79163, 0x30155404, 0x2f62c5a7, 0x2eafeeed, 0x2dfcd873, 0x2d498ad3, 0x2c960ea3, 0x2be26c73, 0x2b2eaccf, 0x2a7ad83c, 0x29c6f738, 0x2913123c, 0x285f31b7, 0x27ab5e12, 0x26f79fab, 0x2643feda, 0x259083eb, 0x24dd3721, 0x242a20b3, 0x237748cf, 0x22c4b795, 0x2212751a, 0x21608968, 0x20aefc79, 0x1ffdd63b, 0x1f4d1e8e, 0x1e9cdd43, - 0x1ded1a1d, 0x1d3ddccd, 0x1c8f2cf9, 0x1be11231, 0x1b3393f8, 0x1a86b9bf, 0x19da8ae5, 0x192f0eb7, 0x18844c70, 0x17da4b37, 0x17311222, 0x1688a832, 0x15e11453, 0x153a5d5e, 0x14948a16, 0x13efa12c, 0x134ba937, 0x12a8a8bb, 0x1206a625, 0x1165a7cc, 0x10c5b3ef, 0x1026d0b8, 0x0f890437, 0x0eec5465, 0x0e50c723, 0x0db6623b, 0x0d1d2b5d, 0x0c85281f, 0x0bee5dff, 0x0b58d262, 0x0ac48a92, 0x0a318bc1, 0x099fdb04, 0x090f7d57, 0x0880779d, 0x07f2ce9b, 0x076686fc, 0x06dba551, 0x06522e0f, 0x05ca258f, 0x0543900d, 0x04be71ab, 0x043ace6e, 0x03b8aa40, 0x033808eb, 0x02b8ee22, 0x023b5d76, 0x01bf5a5e, 0x0144e834, 0x00cc0a36, 0x0054c382, 0xffdf171b, 0xff6b07e7, 0xfef898ae, 0xfe87cc1b, 0xfe18a4bc, 0xfdab2501, 0xfd3f4f3d, 0xfcd525a5, 0xfc6caa53, 0xfc05df40, 0xfba0c64b, 0xfb3d6133, 0xfadbb19a, 0xfa7bb908, 0xfa1d78e3, 0xf9c0f276, 0xf96626f0, 0xf90d1761, 0xf8b5c4be, 0xf8602fdc, 0xf80c5977, 0xf7ba422b, 0xf769ea78, 0xf71b52c4, 0xf6ce7b57, 0xf683645a, 0xf63a0ddf, 0xf5f277d9, 0xf5aca21f, 0xf5688c6d, 0xf5263665, 0xf4e59f8a, 0xf4a6c748, 0xf469aced, 0xf42e4faf, 0xf3f4aea6, 0xf3bcc8d3, 0xf3869d1a, 0xf3522a49, 0xf31f6f0f, 0xf2ee6a07, 0xf2bf19ae, 0xf2917c6d, 0xf265908f, 0xf23b544b, 0xf212c5be, 0xf1ebe2ec, 0xf1c6a9c3, 0xf1a3181a, 0xf1812bb0, 0xf160e22d, 0xf1423924, 0xf1252e0f, 0xf109be56, 0xf0efe748, 0xf0d7a622, 0xf0c0f808, 0xf0abda0e, 0xf0984931, 0xf086425a, 0xf075c260, 0xf066c606, 0xf05949fb, 0xf04d4ade, 0xf042c539, 0xf039b587, 0xf032182f, 0xf02be98a, 0xf02725dc, 0xf023c95d, 0xf021d031, 0xf0213671, 0xf021f823, 0xf0241140, 0xf0277db1, 0xf02c3953, 0xf0323ff5, - 0xf0398d56, 0xf0421d2c, 0xf04beb1d, 0xf056f2c7, 0xf0632fb7, 0xf0709d74, 0xf07f3776, 0xf08ef92d, 0xf09fddfe, 0xf0b1e143, 0xf0c4fe50, 0xf0d9306d, 0xf0ee72db, 0xf104c0d2, 0xf11c1583, 0xf1346c17, 0xf14dbfb1, 0xf1680b6e, 0xf1834a63, 0xf19f77a0, 0xf1bc8e31, 0xf1da891b, 0xf1f96360, 0xf21917ff, 0xf239a1ef, 0xf25afc29, 0xf27d219f, 0xf2a00d43, 0xf2c3ba04, 0xf2e822ce, 0xf30d428e, 0xf333142f, 0xf359929a, 0xf380b8ba, 0xf3a88179, 0xf3d0e7c2, 0xf3f9e680, 0xf42378a0, 0xf44d9912, 0xf47842c5, 0xf4a370ad, 0xf4cf1dbf, 0xf4fb44f4, 0xf527e149, 0xf554edbd, 0xf5826555, 0xf5b0431a, 0xf5de8218, 0xf60d1d63, 0xf63c1012, 0xf66b5544, 0xf69ae81d, 0xf6cac3c7, 0xf6fae373, 0xf72b425b, 0xf75bdbbd, 0xf78caae0, 0xf7bdab16, 0xf7eed7b4, 0xf8202c1c, 0xf851a3b6, 0xf88339f5, 0xf8b4ea55, 0xf8e6b059, 0xf9188793, 0xf94a6b9b, 0xf97c5815, 0xf9ae48af, 0xf9e03924, 0xfa122537, 0xfa4408ba, 0xfa75df87, 0xfaa7a586, 0xfad956ab, 0xfb0aeef6, 0xfb3c6a73, 0xfb6dc53c, 0xfb9efb77, 0xfbd00956, 0xfc00eb1b, 0xfc319d13, 0xfc621b9a, 0xfc926319, 0xfcc27008, 0xfcf23eec, 0xfd21cc59, 0xfd5114f0, 0xfd801564, 0xfdaeca73, 0xfddd30eb, 0xfe0b45aa, 0xfe39059b, 0xfe666dbc, 0xfe937b15, 0xfec02ac2, 0xfeec79ec, 0xff1865cd, 0xff43ebac, 0xff6f08e4, 0xff99badb, 0xffc3ff0c, 0xffedd2fd, 0x00173447, 0x00402092, 0x00689598, 0x0090911f, 0x00b81102, 0x00df1328, 0x0105958c, 0x012b9635, 0x0151133e, 0x01760ad1, 0x019a7b27, 0x01be628c, 0x01e1bf58, 0x02048ff8, 0x0226d2e6, 0x024886ad, 0x0269a9e9, 0x028a3b44, 0x02aa397b, 0x02c9a359, 0x02e877b9, 0x0306b586, 0x03245bbc, 0x03416966, 0x035ddd9e, 0x0379b790, - 0x0394f674, 0x03af9995, 0x03c9a04a, 0x03e309fe, 0x03fbd625, 0x04140449, 0x042b93fd, 0x044284e6, 0x0458d6b7, 0x046e8933, 0x04839c29, 0x04980f79, 0x04abe310, 0x04bf16e9, 0x04d1ab0d, 0x04e39f93, 0x04f4f4a2, 0x0505aa6a, 0x0515c12d, 0x05253938, 0x053412e4, 0x05424e9b, 0x054feccf, 0x055cee03, 0x056952c3, 0x05751baa, 0x0580495c, 0x058adc8d, 0x0594d5fa, 0x059e366c, 0x05a6feb9, 0x05af2fbf, 0x05b6ca6b, 0x05bdcfb2, 0x05c44095, 0x05ca1e1f, 0x05cf6965, 0x05d42387, 0x05d84daf, 0x05dbe90f, 0x05def6e4, 0x05e17873, 0x05e36f0d, 0x05e4dc08, 0x05e5c0c6, 0x05e61eae, 0x05e5f733, 0x05e54bcd, 0x05e41dfe, 0x05e26f4e, 0x05e0414d, 0x05dd9593, 0x05da6dbe, 0x05d6cb72, 0x05d2b05c, 0x05ce1e2d, 0x05c9169d, 0x05c39b6a, 0x05bdae57, 0x05b7512e, 0x05b085bc, 0x05a94dd5, 0x05a1ab52, 0x0599a00e, 0x05912dea, 0x058856cd, 0x057f1c9e, 0x0575814c, 0x056b86c6, 0x05612f00, 0x05567bf1, 0x054b6f92, 0x05400be1, 0x053452dc, 0x05284685, 0x051be8dd, 0x050f3bec, 0x050241b6, 0x04f4fc46, 0x04e76da3, 0x04d997d8, 0x04cb7cf2, 0x04bd1efb, 0x04ae8000, 0x049fa20f, 0x04908733, 0x0481317a, 0x0471a2ef, 0x0461dda0, 0x0451e396, 0x0441b6dd, 0x0431597d, 0x0420cd80, 0x041014eb, 0x03ff31c3, 0x03ee260d, 0x03dcf3ca, 0x03cb9cf9, 0x03ba2398, 0x03a889a1, 0x0396d10c, 0x0384fbd1, 0x03730be0, 0x0361032a, 0x034ee39b, 0x033caf1d, 0x032a6796, 0x03180ee7, 0x0305a6f0, 0x02f3318a, 0x02e0b08d, 0x02ce25ca, 0x02bb9310, 0x02a8fa2a, 0x02965cdb, 0x0283bce6, 0x02711c05, 0x025e7bf0, 0x024bde5a, 0x023944ee, 0x0226b156, 0x02142533, 0x0201a223, 0x01ef29be, 0x01dcbd96, 0x01ca5f37, 0x01b81028, 0x01a5d1ea, - 0x0193a5f9, 0x01818dc9, 0x016f8aca, 0x015d9e64, 0x014bc9fa, 0x013a0ee9, 0x01286e86, 0x0116ea22, 0x01058306, 0x00f43a74, 0x00e311a9, 0x00d209db, 0x00c12439, 0x00b061eb, 0x009fc413, 0x008f4bcb, 0x007efa29, 0x006ed038, 0x005ecf01, 0x004ef782, 0x003f4ab4, 0x002fc98a, 0x002074ed, 0x00114dc3, 0x000254e8, 0xfff38b32, 0xffe4f171, 0xffd6886d, 0xffc850e6, 0xffba4b98, 0xffac7936, 0xff9eda6d, 0xff916fe1, 0xff843a32, 0xff7739f7, 0xff6a6fc1, 0xff5ddc1a, 0xff517f86, 0xff455a80, 0xff396d7f, 0xff2db8f2, 0xff223d40, 0xff16faca, 0xff0bf1ed, 0xff0122fc, 0xfef68e45, 0xfeec340f, 0xfee2149b, 0xfed83023, 0xfece86db, 0xfec518f1, 0xfebbe68c, 0xfeb2efcd, 0xfeaa34d0, 0xfea1b5a9, 0xfe997268, 0xfe916b15, 0xfe899fb2, 0xfe82103f, 0xfe7abcb1, 0xfe73a4fb, 0xfe6cc909, 0xfe6628c1, 0xfe5fc405, 0xfe599aaf, 0xfe53ac97, 0xfe4df98e, 0xfe48815e, 0xfe4343d0, 0xfe3e40a6, 0xfe39779a, 0xfe34e867, 0xfe3092bf, 0xfe2c7650, 0xfe2892c5, 0xfe24e7c3, 0xfe2174ec, 0xfe1e39da, 0xfe1b3628, 0xfe18696a, 0xfe15d32f, 0xfe137304, 0xfe114872, 0xfe0f52fc, 0xfe0d9224, 0xfe0c0567, 0xfe0aac3f, 0xfe098622, 0xfe089283, 0xfe07d0d3, 0xfe07407d, 0xfe06e0eb, 0xfe06b184, 0xfe06b1ac, 0xfe06e0c4, 0xfe073e2a, 0xfe07c93a, 0xfe08814e, 0xfe0965bc, 0xfe0a75da, 0xfe0bb0f9, 0xfe0d166b, 0xfe0ea57e, 0xfe105d7e, 0xfe123db6, 0xfe144570, 0xfe1673f2, 0xfe18c884, 0xfe1b4268, 0xfe1de0e2, 0xfe20a335, 0xfe2388a1, 0xfe269065, 0xfe29b9c1, 0xfe2d03f2, 0xfe306e35, 0xfe33f7c7, 0xfe379fe3, 0xfe3b65c4, 0xfe3f48a5, 0xfe4347c0, 0xfe476250, 0xfe4b978e, 0xfe4fe6b3, 0xfe544efb, 0xfe58cf9d, 0xfe5d67d4, 0xfe6216db, - 0xfe66dbeb, 0xfe6bb63e, 0xfe70a511, 0xfe75a79f, 0xfe7abd23, 0xfe7fe4db, 0xfe851e05, 0xfe8a67dd, 0xfe8fc1a5, 0xfe952a9b, 0xfe9aa201, 0xfea02719, 0xfea5b926, 0xfeab576d, 0xfeb10134, 0xfeb6b5c0, 0xfebc745c, 0xfec23c50, 0xfec80ce8, 0xfecde571, 0xfed3c538, 0xfed9ab8f, 0xfedf97c6, 0xfee58932, 0xfeeb7f27, 0xfef178fc, 0xfef7760c, 0xfefd75af, 0xff037744, 0xff097a29, 0xff0f7dbf, 0xff15816a, 0xff1b848e, 0xff218692, 0xff2786e1, 0xff2d84e5, 0xff33800e, 0xff3977cb, 0xff3f6b8f, 0xff455acf, 0xff4b4503, 0xff5129a3, 0xff57082e, 0xff5ce021, 0xff62b0fd, 0xff687a47, 0xff6e3b84, 0xff73f43d, 0xff79a3fe, 0xff7f4a54, 0xff84e6d0, 0xff8a7905, 0xff900089, 0xff957cf4, 0xff9aede0, 0xffa052ec, 0xffa5abb8, 0xffaaf7e6, 0xffb0371c, 0xffb56902, 0xffba8d44, 0xffbfa38d, 0xffc4ab8f, 0xffc9a4fc, 0xffce8f8a, 0xffd36af1, 0xffd836eb, 0xffdcf336, 0xffe19f91, 0xffe63bc0, 0xffeac787, 0xffef42af, 0xfff3ad01, 0xfff8064b, 0xfffc4e5c, 0x00008507, 0x0004aa1f, 0x0008bd7c, 0x000cbef7, 0x0010ae6e, 0x00148bbd, 0x001856c7, 0x001c0f6e, 0x001fb599, 0x0023492f, 0x0026ca1c, 0x002a384c, 0x002d93ae, 0x0030dc34, 0x003411d2, 0x0037347d, 0x003a442e, 0x003d40e0, 0x00402a8e, 0x00430137, 0x0045c4dd, 0x00487582, 0x004b132b, 0x004d9dde, 0x005015a5, 0x00527a8a, 0x0054cc9a, 0x00570be4, 0x00593877, 0x005b5267, 0x005d59c6, 0x005f4eac, 0x0061312e, 0x00630167, 0x0064bf71, 0x00666b68, 0x0068056b, 0x00698d98, 0x006b0411, 0x006c68f8, 0x006dbc71, 0x006efea0, 0x00702fae, 0x00714fc0, 0x00725f02, 0x00735d9c, 0x00744bba, 0x0075298a, 0x0075f739, 0x0076b4f5, 0x007762f0, 0x0078015a, 0x00789065, - 0x00791043, 0x0079812a, 0x0079e34d, 0x007a36e2, 0x007a7c20, 0x007ab33d, 0x007adc72, 0x007af7f6, 0x007b0603, 0x007b06d4, 0x007afaa1, 0x007ae1a7, 0x007abc20, 0x007a8a49, 0x007a4c5d, 0x007a029a, 0x0079ad3d, 0x00794c82, 0x0078e0a9, 0x007869ee, 0x0077e891, 0x00775ccf, 0x0076c6e8, 0x00762719, 0x00757da3, 0x0074cac4, 0x00740ebb, 0x007349c7, 0x00727c27, 0x0071a61b, 0x0070c7e1, 0x006fe1b8, 0x006ef3df, 0x006dfe94, 0x006d0217, 0x006bfea4, 0x006af47b, 0x0069e3d9, 0x0068ccfa, 0x0067b01e, 0x00668d80, 0x0065655d, 0x006437f1, 0x00630577, 0x0061ce2c, 0x00609249, 0x005f520a, 0x005e0da8, 0x005cc55c, 0x005b7961, 0x005a29ed, 0x0058d738, 0x0057817b, 0x005628ec, 0x0054cdc0, 0x0053702d, 0x00521068, 0x0050aea5, 0x004f4b17, 0x004de5f1, 0x004c7f66, 0x004b17a6, 0x0049aee3, 0x0048454b, 0x0046db0f, 0x0045705c, 0x00440561, 0x00429a4a, 0x00412f43, 0x003fc478, 0x003e5a12, 0x003cf03d, 0x003b871f, 0x003a1ee3, 0x0038b7ae, 0x003751a7, 0x0035ecf4, 0x003489b9, 0x0033281a, 0x0031c83a, 0x00306a3b, 0x002f0e3f, 0x002db466, 0x002c5cd0, 0x002b079a, 0x0029b4e4, 0x002864c9, 0x00271766, 0x0025ccd7, 0x00248535, 0x0023409a, 0x0021ff1f, 0x0020c0dc, 0x001f85e6, 0x001e4e56, 0x001d1a3f, 0x001be9b7, 0x001abcd0, 0x0019939d, 0x00186e31, 0x00174c9c, 0x00162eef, 0x00151538, 0x0013ff88, 0x0012edea, 0x0011e06d, 0x0010d71d, 0x000fd205, 0x000ed130, 0x000dd4a7, 0x000cdc74, 0x000be89f, 0x000af931, 0x000a0e2f, 0x000927a0, 0x00084589, 0x000767f0, 0x00068ed8, 0x0005ba46, 0x0004ea3a, 0x00041eb9, 0x000357c2, 0x00029558, 0x0001d779, 0x00011e26, 0x0000695e, 0xffffb91f, 0xffff0d66, - 0xfffe6631, 0xfffdc37d, 0xfffd2545, 0xfffc8b86, 0xfffbf639, 0xfffb655b, 0xfffad8e4, 0xfffa50ce, 0xfff9cd12, 0xfff94da9, 0xfff8d28c, 0xfff85bb1, 0xfff7e910, 0xfff77a9f, 0xfff71057, 0xfff6aa2b, 0xfff64812, 0xfff5ea02, 0xfff58ff0, 0xfff539cf, 0xfff4e794, 0xfff49934, 0xfff44ea3, 0xfff407d2, 0xfff3c4b7, 0xfff38542, 0xfff34968, 0xfff3111b, 0xfff2dc4c, 0xfff2aaef, 0xfff27cf3, 0xfff2524c, 0xfff22aea, 0xfff206bf, 0xfff1e5bb, 0xfff1c7d0, 0xfff1acef, 0xfff19508, 0xfff1800b, 0xfff16de9, 0xfff15e93, 0xfff151f9, 0xfff1480b, 0xfff140b9, 0xfff13bf3, 0xfff139aa, 0xfff139cd, 0xfff13c4c, 0xfff14119, 0xfff14821, 0xfff15156, 0xfff15ca8, 0xfff16a07, 0xfff17962, 0xfff18aab, 0xfff19dd1, 0xfff1b2c5, 0xfff1c976, 0xfff1e1d6, 0xfff1fbd5, 0xfff21764, 0xfff23473, 0xfff252f3, 0xfff272d6, 0xfff2940b, 0xfff2b686, 0xfff2da36, 0xfff2ff0d, 0xfff324fd, 0xfff34bf9, 0xfff373f0, 0xfff39cd7, 0xfff3c69f, 0xfff3f13a, 0xfff41c9c, 0xfff448b7, 0xfff4757e, 0xfff4a2e5, 0xfff4d0de, 0xfff4ff5d, 0xfff52e57, 0xfff55dbf, 0xfff58d89, 0xfff5bdaa, 0xfff5ee17, 0xfff61ec5, 0xfff64fa8, 0xfff680b5, 0xfff6b1e4, 0xfff6e329, 0xfff7147a, 0xfff745cd, 0xfff7771a, 0xfff7a857, 0xfff7d97a, 0xfff80a7c, 0xfff83b52, 0xfff86bf6, 0xfff89c60, 0xfff8cc86, 0xfff8fc62, 0xfff92bec, 0xfff95b1e, 0xfff989ef, 0xfff9b85b, 0xfff9e65a, 0xfffa13e5, 0xfffa40f8, 0xfffa6d8d, 0xfffa999d, 0xfffac525, 0xfffaf01e, 0xfffb1a84, 0xfffb4453, 0xfffb6d86, 0xfffb961a, 0xfffbbe09, 0xfffbe552, 0xfffc0bef, 0xfffc31df, 0xfffc571e, 0xfffc7ba9, 0xfffc9f7e, 0xfffcc29a, 0xfffce4fc, 0xfffd06a1, 0xfffd2787, 0xfffd47ae, - 0x00000000 // this one is needed for lerping the last coefficient +const int32_t AudioResamplerSinc::mFirCoefsDown[] __attribute__ ((aligned (32))) = { + 0x5bacb6f4, 0x1ded1a1d, 0xf0398d56, 0x0394f674, 0x0193a5f9, 0xfe66dbeb, 0x00791043, 0xfffe6631, + 0x5bab6c81, 0x1d3ddccd, 0xf0421d2c, 0x03af9995, 0x01818dc9, 0xfe6bb63e, 0x0079812a, 0xfffdc37d, + 0x5ba78d37, 0x1c8f2cf9, 0xf04beb1d, 0x03c9a04a, 0x016f8aca, 0xfe70a511, 0x0079e34d, 0xfffd2545, + 0x5ba1194f, 0x1be11231, 0xf056f2c7, 0x03e309fe, 0x015d9e64, 0xfe75a79f, 0x007a36e2, 0xfffc8b86, + 0x5b981122, 0x1b3393f8, 0xf0632fb7, 0x03fbd625, 0x014bc9fa, 0xfe7abd23, 0x007a7c20, 0xfffbf639, + 0x5b8c7530, 0x1a86b9bf, 0xf0709d74, 0x04140449, 0x013a0ee9, 0xfe7fe4db, 0x007ab33d, 0xfffb655b, + 0x5b7e461a, 0x19da8ae5, 0xf07f3776, 0x042b93fd, 0x01286e86, 0xfe851e05, 0x007adc72, 0xfffad8e4, + 0x5b6d84a8, 0x192f0eb7, 0xf08ef92d, 0x044284e6, 0x0116ea22, 0xfe8a67dd, 0x007af7f6, 0xfffa50ce, + 0x5b5a31c6, 0x18844c70, 0xf09fddfe, 0x0458d6b7, 0x01058306, 0xfe8fc1a5, 0x007b0603, 0xfff9cd12, + 0x5b444e81, 0x17da4b37, 0xf0b1e143, 0x046e8933, 0x00f43a74, 0xfe952a9b, 0x007b06d4, 0xfff94da9, + 0x5b2bdc0e, 0x17311222, 0xf0c4fe50, 0x04839c29, 0x00e311a9, 0xfe9aa201, 0x007afaa1, 0xfff8d28c, + 0x5b10dbc2, 0x1688a832, 0xf0d9306d, 0x04980f79, 0x00d209db, 0xfea02719, 0x007ae1a7, 0xfff85bb1, + 0x5af34f18, 0x15e11453, 0xf0ee72db, 0x04abe310, 0x00c12439, 0xfea5b926, 0x007abc20, 0xfff7e910, + 0x5ad337af, 0x153a5d5e, 0xf104c0d2, 0x04bf16e9, 0x00b061eb, 0xfeab576d, 0x007a8a49, 0xfff77a9f, + 0x5ab09748, 0x14948a16, 0xf11c1583, 0x04d1ab0d, 0x009fc413, 0xfeb10134, 0x007a4c5d, 0xfff71057, + 0x5a8b6fc7, 0x13efa12c, 0xf1346c17, 0x04e39f93, 0x008f4bcb, 0xfeb6b5c0, 0x007a029a, 0xfff6aa2b, + 0x5a63c336, 0x134ba937, 0xf14dbfb1, 0x04f4f4a2, 0x007efa29, 0xfebc745c, 0x0079ad3d, 0xfff64812, + 0x5a3993c0, 0x12a8a8bb, 0xf1680b6e, 0x0505aa6a, 0x006ed038, 0xfec23c50, 0x00794c82, 0xfff5ea02, + 0x5a0ce3b2, 0x1206a625, 0xf1834a63, 0x0515c12d, 0x005ecf01, 0xfec80ce8, 0x0078e0a9, 0xfff58ff0, + 0x59ddb57f, 0x1165a7cc, 0xf19f77a0, 0x05253938, 0x004ef782, 0xfecde571, 0x007869ee, 0xfff539cf, + 0x59ac0bba, 0x10c5b3ef, 0xf1bc8e31, 0x053412e4, 0x003f4ab4, 0xfed3c538, 0x0077e891, 0xfff4e794, + 0x5977e919, 0x1026d0b8, 0xf1da891b, 0x05424e9b, 0x002fc98a, 0xfed9ab8f, 0x00775ccf, 0xfff49934, + 0x59415075, 0x0f890437, 0xf1f96360, 0x054feccf, 0x002074ed, 0xfedf97c6, 0x0076c6e8, 0xfff44ea3, + 0x590844c9, 0x0eec5465, 0xf21917ff, 0x055cee03, 0x00114dc3, 0xfee58932, 0x00762719, 0xfff407d2, + 0x58ccc930, 0x0e50c723, 0xf239a1ef, 0x056952c3, 0x000254e8, 0xfeeb7f27, 0x00757da3, 0xfff3c4b7, + 0x588ee0ea, 0x0db6623b, 0xf25afc29, 0x05751baa, 0xfff38b32, 0xfef178fc, 0x0074cac4, 0xfff38542, + 0x584e8f56, 0x0d1d2b5d, 0xf27d219f, 0x0580495c, 0xffe4f171, 0xfef7760c, 0x00740ebb, 0xfff34968, + 0x580bd7f4, 0x0c85281f, 0xf2a00d43, 0x058adc8d, 0xffd6886d, 0xfefd75af, 0x007349c7, 0xfff3111b, + 0x57c6be67, 0x0bee5dff, 0xf2c3ba04, 0x0594d5fa, 0xffc850e6, 0xff037744, 0x00727c27, 0xfff2dc4c, + 0x577f4670, 0x0b58d262, 0xf2e822ce, 0x059e366c, 0xffba4b98, 0xff097a29, 0x0071a61b, 0xfff2aaef, + 0x573573f2, 0x0ac48a92, 0xf30d428e, 0x05a6feb9, 0xffac7936, 0xff0f7dbf, 0x0070c7e1, 0xfff27cf3, + 0x56e94af1, 0x0a318bc1, 0xf333142f, 0x05af2fbf, 0xff9eda6d, 0xff15816a, 0x006fe1b8, 0xfff2524c, + 0x569acf90, 0x099fdb04, 0xf359929a, 0x05b6ca6b, 0xff916fe1, 0xff1b848e, 0x006ef3df, 0xfff22aea, + 0x564a0610, 0x090f7d57, 0xf380b8ba, 0x05bdcfb2, 0xff843a32, 0xff218692, 0x006dfe94, 0xfff206bf, + 0x55f6f2d3, 0x0880779d, 0xf3a88179, 0x05c44095, 0xff7739f7, 0xff2786e1, 0x006d0217, 0xfff1e5bb, + 0x55a19a5c, 0x07f2ce9b, 0xf3d0e7c2, 0x05ca1e1f, 0xff6a6fc1, 0xff2d84e5, 0x006bfea4, 0xfff1c7d0, + 0x554a0148, 0x076686fc, 0xf3f9e680, 0x05cf6965, 0xff5ddc1a, 0xff33800e, 0x006af47b, 0xfff1acef, + 0x54f02c56, 0x06dba551, 0xf42378a0, 0x05d42387, 0xff517f86, 0xff3977cb, 0x0069e3d9, 0xfff19508, + 0x54942061, 0x06522e0f, 0xf44d9912, 0x05d84daf, 0xff455a80, 0xff3f6b8f, 0x0068ccfa, 0xfff1800b, + 0x5435e263, 0x05ca258f, 0xf47842c5, 0x05dbe90f, 0xff396d7f, 0xff455acf, 0x0067b01e, 0xfff16de9, + 0x53d57774, 0x0543900d, 0xf4a370ad, 0x05def6e4, 0xff2db8f2, 0xff4b4503, 0x00668d80, 0xfff15e93, + 0x5372e4c6, 0x04be71ab, 0xf4cf1dbf, 0x05e17873, 0xff223d40, 0xff5129a3, 0x0065655d, 0xfff151f9, + 0x530e2fac, 0x043ace6e, 0xf4fb44f4, 0x05e36f0d, 0xff16faca, 0xff57082e, 0x006437f1, 0xfff1480b, + 0x52a75d90, 0x03b8aa40, 0xf527e149, 0x05e4dc08, 0xff0bf1ed, 0xff5ce021, 0x00630577, 0xfff140b9, + 0x523e73fd, 0x033808eb, 0xf554edbd, 0x05e5c0c6, 0xff0122fc, 0xff62b0fd, 0x0061ce2c, 0xfff13bf3, + 0x51d37897, 0x02b8ee22, 0xf5826555, 0x05e61eae, 0xfef68e45, 0xff687a47, 0x00609249, 0xfff139aa, + 0x5166711c, 0x023b5d76, 0xf5b0431a, 0x05e5f733, 0xfeec340f, 0xff6e3b84, 0x005f520a, 0xfff139cd, + 0x50f76368, 0x01bf5a5e, 0xf5de8218, 0x05e54bcd, 0xfee2149b, 0xff73f43d, 0x005e0da8, 0xfff13c4c, + 0x5086556f, 0x0144e834, 0xf60d1d63, 0x05e41dfe, 0xfed83023, 0xff79a3fe, 0x005cc55c, 0xfff14119, + 0x50134d3e, 0x00cc0a36, 0xf63c1012, 0x05e26f4e, 0xfece86db, 0xff7f4a54, 0x005b7961, 0xfff14821, + 0x4f9e50ff, 0x0054c382, 0xf66b5544, 0x05e0414d, 0xfec518f1, 0xff84e6d0, 0x005a29ed, 0xfff15156, + 0x4f2766f2, 0xffdf171b, 0xf69ae81d, 0x05dd9593, 0xfebbe68c, 0xff8a7905, 0x0058d738, 0xfff15ca8, + 0x4eae9571, 0xff6b07e7, 0xf6cac3c7, 0x05da6dbe, 0xfeb2efcd, 0xff900089, 0x0057817b, 0xfff16a07, + 0x4e33e2ee, 0xfef898ae, 0xf6fae373, 0x05d6cb72, 0xfeaa34d0, 0xff957cf4, 0x005628ec, 0xfff17962, + 0x4db755f3, 0xfe87cc1b, 0xf72b425b, 0x05d2b05c, 0xfea1b5a9, 0xff9aede0, 0x0054cdc0, 0xfff18aab, + 0x4d38f520, 0xfe18a4bc, 0xf75bdbbd, 0x05ce1e2d, 0xfe997268, 0xffa052ec, 0x0053702d, 0xfff19dd1, + 0x4cb8c72e, 0xfdab2501, 0xf78caae0, 0x05c9169d, 0xfe916b15, 0xffa5abb8, 0x00521068, 0xfff1b2c5, + 0x4c36d2eb, 0xfd3f4f3d, 0xf7bdab16, 0x05c39b6a, 0xfe899fb2, 0xffaaf7e6, 0x0050aea5, 0xfff1c976, + 0x4bb31f3c, 0xfcd525a5, 0xf7eed7b4, 0x05bdae57, 0xfe82103f, 0xffb0371c, 0x004f4b17, 0xfff1e1d6, + 0x4b2db31a, 0xfc6caa53, 0xf8202c1c, 0x05b7512e, 0xfe7abcb1, 0xffb56902, 0x004de5f1, 0xfff1fbd5, + 0x4aa69594, 0xfc05df40, 0xf851a3b6, 0x05b085bc, 0xfe73a4fb, 0xffba8d44, 0x004c7f66, 0xfff21764, + 0x4a1dcdce, 0xfba0c64b, 0xf88339f5, 0x05a94dd5, 0xfe6cc909, 0xffbfa38d, 0x004b17a6, 0xfff23473, + 0x499362ff, 0xfb3d6133, 0xf8b4ea55, 0x05a1ab52, 0xfe6628c1, 0xffc4ab8f, 0x0049aee3, 0xfff252f3, + 0x49075c72, 0xfadbb19a, 0xf8e6b059, 0x0599a00e, 0xfe5fc405, 0xffc9a4fc, 0x0048454b, 0xfff272d6, + 0x4879c185, 0xfa7bb908, 0xf9188793, 0x05912dea, 0xfe599aaf, 0xffce8f8a, 0x0046db0f, 0xfff2940b, + 0x47ea99a9, 0xfa1d78e3, 0xf94a6b9b, 0x058856cd, 0xfe53ac97, 0xffd36af1, 0x0045705c, 0xfff2b686, + 0x4759ec60, 0xf9c0f276, 0xf97c5815, 0x057f1c9e, 0xfe4df98e, 0xffd836eb, 0x00440561, 0xfff2da36, + 0x46c7c140, 0xf96626f0, 0xf9ae48af, 0x0575814c, 0xfe48815e, 0xffdcf336, 0x00429a4a, 0xfff2ff0d, + 0x46341fed, 0xf90d1761, 0xf9e03924, 0x056b86c6, 0xfe4343d0, 0xffe19f91, 0x00412f43, 0xfff324fd, + 0x459f101d, 0xf8b5c4be, 0xfa122537, 0x05612f00, 0xfe3e40a6, 0xffe63bc0, 0x003fc478, 0xfff34bf9, + 0x45089996, 0xf8602fdc, 0xfa4408ba, 0x05567bf1, 0xfe39779a, 0xffeac787, 0x003e5a12, 0xfff373f0, + 0x4470c42d, 0xf80c5977, 0xfa75df87, 0x054b6f92, 0xfe34e867, 0xffef42af, 0x003cf03d, 0xfff39cd7, + 0x43d797c7, 0xf7ba422b, 0xfaa7a586, 0x05400be1, 0xfe3092bf, 0xfff3ad01, 0x003b871f, 0xfff3c69f, + 0x433d1c56, 0xf769ea78, 0xfad956ab, 0x053452dc, 0xfe2c7650, 0xfff8064b, 0x003a1ee3, 0xfff3f13a, + 0x42a159dc, 0xf71b52c4, 0xfb0aeef6, 0x05284685, 0xfe2892c5, 0xfffc4e5c, 0x0038b7ae, 0xfff41c9c, + 0x42045865, 0xf6ce7b57, 0xfb3c6a73, 0x051be8dd, 0xfe24e7c3, 0x00008507, 0x003751a7, 0xfff448b7, + 0x4166200e, 0xf683645a, 0xfb6dc53c, 0x050f3bec, 0xfe2174ec, 0x0004aa1f, 0x0035ecf4, 0xfff4757e, + 0x40c6b8fd, 0xf63a0ddf, 0xfb9efb77, 0x050241b6, 0xfe1e39da, 0x0008bd7c, 0x003489b9, 0xfff4a2e5, + 0x40262b65, 0xf5f277d9, 0xfbd00956, 0x04f4fc46, 0xfe1b3628, 0x000cbef7, 0x0033281a, 0xfff4d0de, + 0x3f847f83, 0xf5aca21f, 0xfc00eb1b, 0x04e76da3, 0xfe18696a, 0x0010ae6e, 0x0031c83a, 0xfff4ff5d, + 0x3ee1bda2, 0xf5688c6d, 0xfc319d13, 0x04d997d8, 0xfe15d32f, 0x00148bbd, 0x00306a3b, 0xfff52e57, + 0x3e3dee13, 0xf5263665, 0xfc621b9a, 0x04cb7cf2, 0xfe137304, 0x001856c7, 0x002f0e3f, 0xfff55dbf, + 0x3d991932, 0xf4e59f8a, 0xfc926319, 0x04bd1efb, 0xfe114872, 0x001c0f6e, 0x002db466, 0xfff58d89, + 0x3cf34766, 0xf4a6c748, 0xfcc27008, 0x04ae8000, 0xfe0f52fc, 0x001fb599, 0x002c5cd0, 0xfff5bdaa, + 0x3c4c811c, 0xf469aced, 0xfcf23eec, 0x049fa20f, 0xfe0d9224, 0x0023492f, 0x002b079a, 0xfff5ee17, + 0x3ba4cec9, 0xf42e4faf, 0xfd21cc59, 0x04908733, 0xfe0c0567, 0x0026ca1c, 0x0029b4e4, 0xfff61ec5, + 0x3afc38eb, 0xf3f4aea6, 0xfd5114f0, 0x0481317a, 0xfe0aac3f, 0x002a384c, 0x002864c9, 0xfff64fa8, + 0x3a52c805, 0xf3bcc8d3, 0xfd801564, 0x0471a2ef, 0xfe098622, 0x002d93ae, 0x00271766, 0xfff680b5, + 0x39a884a1, 0xf3869d1a, 0xfdaeca73, 0x0461dda0, 0xfe089283, 0x0030dc34, 0x0025ccd7, 0xfff6b1e4, + 0x38fd774e, 0xf3522a49, 0xfddd30eb, 0x0451e396, 0xfe07d0d3, 0x003411d2, 0x00248535, 0xfff6e329, + 0x3851a8a2, 0xf31f6f0f, 0xfe0b45aa, 0x0441b6dd, 0xfe07407d, 0x0037347d, 0x0023409a, 0xfff7147a, + 0x37a52135, 0xf2ee6a07, 0xfe39059b, 0x0431597d, 0xfe06e0eb, 0x003a442e, 0x0021ff1f, 0xfff745cd, + 0x36f7e9a4, 0xf2bf19ae, 0xfe666dbc, 0x0420cd80, 0xfe06b184, 0x003d40e0, 0x0020c0dc, 0xfff7771a, + 0x364a0a90, 0xf2917c6d, 0xfe937b15, 0x041014eb, 0xfe06b1ac, 0x00402a8e, 0x001f85e6, 0xfff7a857, + 0x359b8c9d, 0xf265908f, 0xfec02ac2, 0x03ff31c3, 0xfe06e0c4, 0x00430137, 0x001e4e56, 0xfff7d97a, + 0x34ec786f, 0xf23b544b, 0xfeec79ec, 0x03ee260d, 0xfe073e2a, 0x0045c4dd, 0x001d1a3f, 0xfff80a7c, + 0x343cd6af, 0xf212c5be, 0xff1865cd, 0x03dcf3ca, 0xfe07c93a, 0x00487582, 0x001be9b7, 0xfff83b52, + 0x338cb004, 0xf1ebe2ec, 0xff43ebac, 0x03cb9cf9, 0xfe08814e, 0x004b132b, 0x001abcd0, 0xfff86bf6, + 0x32dc0d17, 0xf1c6a9c3, 0xff6f08e4, 0x03ba2398, 0xfe0965bc, 0x004d9dde, 0x0019939d, 0xfff89c60, + 0x322af693, 0xf1a3181a, 0xff99badb, 0x03a889a1, 0xfe0a75da, 0x005015a5, 0x00186e31, 0xfff8cc86, + 0x3179751f, 0xf1812bb0, 0xffc3ff0c, 0x0396d10c, 0xfe0bb0f9, 0x00527a8a, 0x00174c9c, 0xfff8fc62, + 0x30c79163, 0xf160e22d, 0xffedd2fd, 0x0384fbd1, 0xfe0d166b, 0x0054cc9a, 0x00162eef, 0xfff92bec, + 0x30155404, 0xf1423924, 0x00173447, 0x03730be0, 0xfe0ea57e, 0x00570be4, 0x00151538, 0xfff95b1e, + 0x2f62c5a7, 0xf1252e0f, 0x00402092, 0x0361032a, 0xfe105d7e, 0x00593877, 0x0013ff88, 0xfff989ef, + 0x2eafeeed, 0xf109be56, 0x00689598, 0x034ee39b, 0xfe123db6, 0x005b5267, 0x0012edea, 0xfff9b85b, + 0x2dfcd873, 0xf0efe748, 0x0090911f, 0x033caf1d, 0xfe144570, 0x005d59c6, 0x0011e06d, 0xfff9e65a, + 0x2d498ad3, 0xf0d7a622, 0x00b81102, 0x032a6796, 0xfe1673f2, 0x005f4eac, 0x0010d71d, 0xfffa13e5, + 0x2c960ea3, 0xf0c0f808, 0x00df1328, 0x03180ee7, 0xfe18c884, 0x0061312e, 0x000fd205, 0xfffa40f8, + 0x2be26c73, 0xf0abda0e, 0x0105958c, 0x0305a6f0, 0xfe1b4268, 0x00630167, 0x000ed130, 0xfffa6d8d, + 0x2b2eaccf, 0xf0984931, 0x012b9635, 0x02f3318a, 0xfe1de0e2, 0x0064bf71, 0x000dd4a7, 0xfffa999d, + 0x2a7ad83c, 0xf086425a, 0x0151133e, 0x02e0b08d, 0xfe20a335, 0x00666b68, 0x000cdc74, 0xfffac525, + 0x29c6f738, 0xf075c260, 0x01760ad1, 0x02ce25ca, 0xfe2388a1, 0x0068056b, 0x000be89f, 0xfffaf01e, + 0x2913123c, 0xf066c606, 0x019a7b27, 0x02bb9310, 0xfe269065, 0x00698d98, 0x000af931, 0xfffb1a84, + 0x285f31b7, 0xf05949fb, 0x01be628c, 0x02a8fa2a, 0xfe29b9c1, 0x006b0411, 0x000a0e2f, 0xfffb4453, + 0x27ab5e12, 0xf04d4ade, 0x01e1bf58, 0x02965cdb, 0xfe2d03f2, 0x006c68f8, 0x000927a0, 0xfffb6d86, + 0x26f79fab, 0xf042c539, 0x02048ff8, 0x0283bce6, 0xfe306e35, 0x006dbc71, 0x00084589, 0xfffb961a, + 0x2643feda, 0xf039b587, 0x0226d2e6, 0x02711c05, 0xfe33f7c7, 0x006efea0, 0x000767f0, 0xfffbbe09, + 0x259083eb, 0xf032182f, 0x024886ad, 0x025e7bf0, 0xfe379fe3, 0x00702fae, 0x00068ed8, 0xfffbe552, + 0x24dd3721, 0xf02be98a, 0x0269a9e9, 0x024bde5a, 0xfe3b65c4, 0x00714fc0, 0x0005ba46, 0xfffc0bef, + 0x242a20b3, 0xf02725dc, 0x028a3b44, 0x023944ee, 0xfe3f48a5, 0x00725f02, 0x0004ea3a, 0xfffc31df, + 0x237748cf, 0xf023c95d, 0x02aa397b, 0x0226b156, 0xfe4347c0, 0x00735d9c, 0x00041eb9, 0xfffc571e, + 0x22c4b795, 0xf021d031, 0x02c9a359, 0x02142533, 0xfe476250, 0x00744bba, 0x000357c2, 0xfffc7ba9, + 0x2212751a, 0xf0213671, 0x02e877b9, 0x0201a223, 0xfe4b978e, 0x0075298a, 0x00029558, 0xfffc9f7e, + 0x21608968, 0xf021f823, 0x0306b586, 0x01ef29be, 0xfe4fe6b3, 0x0075f739, 0x0001d779, 0xfffcc29a, + 0x20aefc79, 0xf0241140, 0x03245bbc, 0x01dcbd96, 0xfe544efb, 0x0076b4f5, 0x00011e26, 0xfffce4fc, + 0x1ffdd63b, 0xf0277db1, 0x03416966, 0x01ca5f37, 0xfe58cf9d, 0x007762f0, 0x0000695e, 0xfffd06a1, + 0x1f4d1e8e, 0xf02c3953, 0x035ddd9e, 0x01b81028, 0xfe5d67d4, 0x0078015a, 0xffffb91f, 0xfffd2787, + 0x1e9cdd43, 0xf0323ff5, 0x0379b790, 0x01a5d1ea, 0xfe6216db, 0x00789065, 0xffff0d66, 0xfffd47ae, + 0x1ded1a1d, 0xf0398d56, 0x0394f674, 0x0193a5f9, 0xfe66dbeb, 0x00791043, 0xfffe6631, 0xfffd6713, }; // we use 15 bits to interpolate between these samples @@ -376,26 +620,32 @@ void AudioResamplerSinc::filterCoefficient( // compute the index of the coefficient on the positive side and // negative side - uint32_t indexP = (phase & c->cMask) >> c->cShift; - uint16_t lerpP = (phase & c->pMask) >> c->pShift; + uint32_t indexP = ( phase & c->cMask) >> c->cShift; uint32_t indexN = (-phase & c->cMask) >> c->cShift; - uint16_t lerpN = (-phase & c->pMask) >> c->pShift; + uint32_t lerpP = ( phase & c->pMask) >> c->pShift; + uint32_t lerpN = (-phase & c->pMask) >> c->pShift; if ((indexP == 0) && (lerpP == 0)) { indexN = c->cMask >> c->cShift; - lerpN = c->pMask >> c->pShift; + lerpN = c->pMask >> c->pShift; } + const size_t offset = c->halfNumCoefs; + indexP *= offset; + indexN *= offset; + + int32_t const* const coefs = mFirCoefs; + int32_t const* coefsP = coefs + indexP; + int32_t const* coefsN = coefs + indexN; + int16_t const* sP = samples; + int16_t const* sN = samples + CHANNELS; l = 0; r = 0; - const int32_t* coefs = mFirCoefs; - const int16_t *sP = samples; - const int16_t *sN = samples+CHANNELS; - const size_t offset = 1 << c->coefsBits; - const size_t count = c->halfNumCoefs; - for (size_t i=0 ; i < count ; i++) { - interpolate(l, r, coefs+indexP, lerpP, sP); - interpolate(l, r, coefs+indexN, lerpN, sN); - sP -= CHANNELS; sN += CHANNELS; coefs += offset; + size_t count = offset; + for (size_t i=0 ; i(l, r, coefsP++, offset, lerpP, sP); + sP -= CHANNELS; + interpolate(l, r, coefsN++, offset, lerpN, sN); + sN += CHANNELS; } l = 2 * mulRL(1, l, vRL); r = 2 * mulRL(0, r, vRL); @@ -404,10 +654,11 @@ void AudioResamplerSinc::filterCoefficient( template void AudioResamplerSinc::interpolate( int32_t& l, int32_t& r, - const int32_t* coefs, int16_t lerp, const int16_t* samples) + const int32_t* coefs, size_t offset, + int32_t lerp, const int16_t* samples) { int32_t c0 = coefs[0]; - int32_t c1 = coefs[1]; + int32_t c1 = coefs[offset]; int32_t sinc = mulAdd(lerp, (c1-c0)<<1, c0); if (CHANNELS == 2) { uint32_t rl = *reinterpret_cast(samples); diff --git a/services/audioflinger/AudioResamplerSinc.h b/services/audioflinger/AudioResamplerSinc.h index 3a6e356..1b14019 100644 --- a/services/audioflinger/AudioResamplerSinc.h +++ b/services/audioflinger/AudioResamplerSinc.h @@ -55,7 +55,8 @@ private: template inline void interpolate( int32_t& l, int32_t& r, - const int32_t* coefs, int16_t lerp, const int16_t* samples); + const int32_t* coefs, size_t offset, + int32_t lerp, const int16_t* samples); template inline void read(int16_t*& impulse, uint32_t& phaseFraction, diff --git a/services/audioflinger/audio-resampler/filter_coefficients.h b/services/audioflinger/audio-resampler/filter_coefficients.h index bd98136..bf70c63 100644 --- a/services/audioflinger/audio-resampler/filter_coefficients.h +++ b/services/audioflinger/audio-resampler/filter_coefficients.h @@ -18,44 +18,268 @@ namespace android { // cmd-line: fir -l 7 -s 48000 -c 23400 -n 16 -b 9.62 -const int32_t up_sampler_filter_coefficients[] = { - 0x7ccccccd, 0x7cc9b757, 0x7cc0773c, 0x7cb10d52, 0x7c9b7afd, 0x7c7fc22f, 0x7c5de56a, 0x7c35e7bb, 0x7c07ccbe, 0x7bd3989d, 0x7b99500c, 0x7b58f84d, 0x7b12972d, 0x7ac63304, 0x7a73d2b5, 0x7a1b7daa, 0x79bd3bd8, 0x795915bc, 0x78ef1457, 0x787f4134, 0x7809a65e, 0x778e4e68, 0x770d4466, 0x768693ec, 0x75fa4911, 0x75687068, 0x74d11703, 0x74344a70, 0x739218b8, 0x72ea905a, 0x723dc051, 0x718bb80b, 0x70d4876b, 0x70183ec5, 0x6f56eee1, 0x6e90a8f2, 0x6dc57e9b, 0x6cf581e8, 0x6c20c550, 0x6b475bb0, 0x6a69584a, 0x6986cec4, 0x689fd324, 0x67b479cf, 0x66c4d787, 0x65d10168, 0x64d90ce7, 0x63dd0fcd, 0x62dd2039, 0x61d95497, 0x60d1c3a6, 0x5fc68470, 0x5eb7ae46, 0x5da558c5, 0x5c8f9bcb, 0x5b768f7a, 0x5a5a4c32, 0x593aea93, 0x58188376, 0x56f32fea, 0x55cb0935, 0x54a028d0, 0x5372a862, 0x5242a1c1, 0x51102eec, 0x4fdb6a09, 0x4ea46d66, 0x4d6b536f, 0x4c3036b2, 0x4af331d9, 0x49b45fa8, 0x4873daf7, 0x4731beb7, 0x45ee25e7, 0x44a92b96, 0x4362eadc, 0x421b7edf, 0x40d302c5, 0x3f8991bd, 0x3e3f46f2, 0x3cf43d8f, 0x3ba890b9, 0x3a5c5b8e, 0x390fb920, 0x37c2c474, 0x36759880, 0x35285026, 0x33db0631, 0x328dd556, 0x3140d82e, 0x2ff42933, 0x2ea7e2c0, 0x2d5c1f0e, 0x2c10f82d, 0x2ac68807, 0x297ce85a, 0x283432b9, 0x26ec8083, 0x25a5eae8, 0x24608ae2, 0x231c7932, 0x21d9ce63, 0x2098a2bf, 0x1f590e55, 0x1e1b28f2, 0x1cdf0a20, 0x1ba4c923, 0x1a6c7cf9, 0x19363c54, 0x18021d9d, 0x16d036eb, 0x15a09e09, 0x1473686d, 0x1348ab3a, 0x12207b3e, 0x10faecee, 0x0fd81464, 0x0eb80562, 0x0d9ad348, 0x0c80911b, 0x0b69517e, 0x0a5526b0, 0x0944228e, 0x08365690, 0x072bd3c5, 0x0624aad6, 0x0520ec00, 0x0420a716, - 0x0323eb7f, 0x022ac835, 0x01354bc1, 0x0043843f, 0xff557f58, 0xfe6b4a44, 0xfd84f1c8, 0xfca28234, 0xfbc40766, 0xfae98cc5, 0xfa131d41, 0xf940c355, 0xf8728902, 0xf7a877d4, 0xf6e298db, 0xf620f4b2, 0xf5639376, 0xf4aa7cce, 0xf3f5b7e4, 0xf3454b6a, 0xf2993d95, 0xf1f19421, 0xf14e544f, 0xf0af82e4, 0xf015242b, 0xef7f3bf5, 0xeeedcd98, 0xee60dbee, 0xedd86958, 0xed5477be, 0xecd5088e, 0xec5a1cbc, 0xebe3b4c5, 0xeb71d0ab, 0xeb046ffc, 0xea9b91cc, 0xea3734bb, 0xe9d756f3, 0xe97bf627, 0xe9250f99, 0xe8d2a017, 0xe884a3fb, 0xe83b1731, 0xe7f5f531, 0xe7b53908, 0xe778dd50, 0xe740dc3c, 0xe70d2f8d, 0xe6ddd09f, 0xe6b2b862, 0xe68bdf5e, 0xe6693db5, 0xe64acb24, 0xe6307f05, 0xe61a504f, 0xe6083599, 0xe5fa2519, 0xe5f014aa, 0xe5e9f9ca, 0xe5e7c99e, 0xe5e978f0, 0xe5eefc35, 0xe5f8478d, 0xe6054ec6, 0xe616055a, 0xe62a5e76, 0xe6424cf8, 0xe65dc373, 0xe67cb42f, 0xe69f112f, 0xe6c4cc2e, 0xe6edd6a4, 0xe71a21c7, 0xe7499e8f, 0xe77c3db4, 0xe7b1efb4, 0xe7eaa4d4, 0xe8264d21, 0xe864d874, 0xe8a63671, 0xe8ea568f, 0xe9312813, 0xe97a9a17, 0xe9c69b8c, 0xea151b3a, 0xea6607c4, 0xeab94fa9, 0xeb0ee148, 0xeb66aae0, 0xebc09a94, 0xec1c9e6d, 0xec7aa45b, 0xecda9a39, 0xed3c6dce, 0xeda00cd1, 0xee0564e8, 0xee6c63ad, 0xeed4f6b0, 0xef3f0b78, 0xefaa8f87, 0xf017705a, 0xf0859b6e, 0xf0f4fe3d, 0xf1658649, 0xf1d72114, 0xf249bc2c, 0xf2bd4523, 0xf331a99b, 0xf3a6d741, 0xf41cbbd3, 0xf493451f, 0xf50a610a, 0xf581fd8b, 0xf5fa08b5, 0xf67270b1, 0xf6eb23c6, 0xf7641059, 0xf7dd24ef, 0xf856502d, 0xf8cf80de, 0xf948a5f0, 0xf9c1ae7b, 0xfa3a89be, 0xfab32723, 0xfb2b7641, 0xfba366df, 0xfc1ae8f2, 0xfc91eca1, - 0xfd086246, 0xfd7e3a71, 0xfdf365e8, 0xfe67d5a8, 0xfedb7ae9, 0xff4e471d, 0xffc02bf2, 0x00311b54, 0x00a1076e, 0x010fe2ab, 0x017d9fb8, 0x01ea3184, 0x02558b43, 0x02bfa06d, 0x032864c1, 0x038fcc44, 0x03f5cb46, 0x045a565c, 0x04bd6269, 0x051ee498, 0x057ed264, 0x05dd218f, 0x0639c82d, 0x0694bca0, 0x06edf595, 0x07456a0e, 0x079b1158, 0x07eee314, 0x0840d732, 0x0890e5f7, 0x08df07f6, 0x092b3617, 0x09756994, 0x09bd9bfb, 0x0a03c72b, 0x0a47e559, 0x0a89f10c, 0x0ac9e521, 0x0b07bcc6, 0x0b437380, 0x0b7d0525, 0x0bb46de2, 0x0be9aa34, 0x0c1cb6ef, 0x0c4d913a, 0x0c7c368d, 0x0ca8a4b7, 0x0cd2d9d5, 0x0cfad45a, 0x0d209309, 0x0d4414f9, 0x0d65598f, 0x0d846084, 0x0da129df, 0x0dbbb5f6, 0x0dd40571, 0x0dea1943, 0x0dfdf2ae, 0x0e0f9342, 0x0e1efcdb, 0x0e2c319d, 0x0e3733fc, 0x0e4006b2, 0x0e46acc4, 0x0e4b297c, 0x0e4d806f, 0x0e4db575, 0x0e4bccac, 0x0e47ca78, 0x0e41b37c, 0x0e398c9f, 0x0e2f5b0b, 0x0e232425, 0x0e14ed93, 0x0e04bd39, 0x0df29936, 0x0dde87e2, 0x0dc88fd2, 0x0db0b7d1, 0x0d9706e1, 0x0d7b843b, 0x0d5e3749, 0x0d3f27ab, 0x0d1e5d32, 0x0cfbdfdd, 0x0cd7b7dd, 0x0cb1ed8c, 0x0c8a8973, 0x0c619444, 0x0c3716da, 0x0c0b1a37, 0x0bdda783, 0x0baec80a, 0x0b7e853c, 0x0b4ce8a8, 0x0b19fbfe, 0x0ae5c90b, 0x0ab059bc, 0x0a79b814, 0x0a41ee32, 0x0a09064e, 0x09cf0ab4, 0x099405c6, 0x095801f8, 0x091b09d1, 0x08dd27e6, 0x089e66dd, 0x085ed167, 0x081e7241, 0x07dd5430, 0x079b8203, 0x0759068f, 0x0715ecae, 0x06d23f3d, 0x068e091c, 0x0649552a, 0x06042e45, 0x05be9f49, 0x0578b30e, 0x05327467, 0x04ebee1c, 0x04a52af2, 0x045e359f, 0x041718d2, 0x03cfdf29, 0x03889336, 0x03413f7b, 0x02f9ee68, - 0x02b2aa5c, 0x026b7da1, 0x0224726d, 0x01dd92df, 0x0196e8fe, 0x01507eb8, 0x010a5de2, 0x00c49034, 0x007f1f4b, 0x003a14a6, 0xfff579a3, 0xffb15783, 0xff6db764, 0xff2aa243, 0xfee820f8, 0xfea63c38, 0xfe64fc93, 0xfe246a72, 0xfde48e17, 0xfda56f9c, 0xfd6716f2, 0xfd298be0, 0xfcecd602, 0xfcb0fcca, 0xfc76077b, 0xfc3bfd2e, 0xfc02e4cc, 0xfbcac510, 0xfb93a486, 0xfb5d898c, 0xfb287a4d, 0xfaf47cc4, 0xfac196bb, 0xfa8fcdca, 0xfa5f2755, 0xfa2fa890, 0xfa015679, 0xf9d435dc, 0xf9a84b50, 0xf97d9b37, 0xf95429c0, 0xf92bfae4, 0xf9051266, 0xf8df73d6, 0xf8bb228c, 0xf89821ac, 0xf8767422, 0xf8561ca7, 0xf8371dbb, 0xf81979ab, 0xf7fd328c, 0xf7e24a3c, 0xf7c8c267, 0xf7b09c7f, 0xf799d9c4, 0xf7847b3d, 0xf77081be, 0xf75dede5, 0xf74cc01c, 0xf73cf898, 0xf72e9758, 0xf7219c2a, 0xf71606a6, 0xf70bd632, 0xf7030a01, 0xf6fba113, 0xf6f59a36, 0xf6f0f407, 0xf6edacf2, 0xf6ebc332, 0xf6eb34d4, 0xf6ebffb2, 0xf6ee217b, 0xf6f197ad, 0xf6f65f9b, 0xf6fc766a, 0xf703d912, 0xf70c8461, 0xf71674fa, 0xf721a756, 0xf72e17c4, 0xf73bc26b, 0xf74aa34c, 0xf75ab63f, 0xf76bf6f7, 0xf77e6103, 0xf791efcb, 0xf7a69e96, 0xf7bc6889, 0xf7d348a4, 0xf7eb39cc, 0xf80436c0, 0xf81e3a25, 0xf8393e81, 0xf8553e3c, 0xf87233a4, 0xf89018eb, 0xf8aee828, 0xf8ce9b5d, 0xf8ef2c71, 0xf9109535, 0xf932cf65, 0xf955d4a7, 0xf9799e8f, 0xf99e269e, 0xf9c36642, 0xf9e956da, 0xfa0ff1b6, 0xfa373017, 0xfa5f0b30, 0xfa877c29, 0xfab07c1d, 0xfada0420, 0xfb040d3b, 0xfb2e906f, 0xfb5986b6, 0xfb84e906, 0xfbb0b04e, 0xfbdcd57a, 0xfc095174, 0xfc361d25, 0xfc633173, 0xfc908746, 0xfcbe1789, 0xfcebdb26, 0xfd19cb0e, 0xfd47e035, 0xfd761395, - 0xfda45e2c, 0xfdd2b905, 0xfe011d2e, 0xfe2f83c1, 0xfe5de5e3, 0xfe8c3cc3, 0xfeba819d, 0xfee8adba, 0xff16ba71, 0xff44a128, 0xff725b54, 0xff9fe27d, 0xffcd303b, 0xfffa3e37, 0x00270631, 0x005381fa, 0x007fab77, 0x00ab7ca6, 0x00d6ef99, 0x0101fe7a, 0x012ca389, 0x0156d920, 0x018099b2, 0x01a9dfcc, 0x01d2a615, 0x01fae74e, 0x02229e57, 0x0249c629, 0x027059da, 0x029654a0, 0x02bbb1cc, 0x02e06ccf, 0x03048139, 0x0327eab8, 0x034aa51b, 0x036cac52, 0x038dfc6c, 0x03ae919a, 0x03ce682d, 0x03ed7c9a, 0x040bcb77, 0x0429517b, 0x04460b81, 0x0461f688, 0x047d0fb1, 0x0497543f, 0x04b0c19a, 0x04c9554e, 0x04e10d0a, 0x04f7e6a2, 0x050de00d, 0x0522f766, 0x05372aee, 0x054a7909, 0x055ce03f, 0x056e5f3d, 0x057ef4d3, 0x058e9ff8, 0x059d5fc5, 0x05ab3377, 0x05b81a70, 0x05c41435, 0x05cf2070, 0x05d93eee, 0x05e26f9f, 0x05eab296, 0x05f20809, 0x05f87053, 0x05fdebee, 0x06027b78, 0x06061fb2, 0x0608d97c, 0x060aa9da, 0x060b91ee, 0x060b92ff, 0x060aae6e, 0x0608e5c2, 0x06063a9d, 0x0602aec3, 0x05fe4414, 0x05f8fc8f, 0x05f2da52, 0x05ebdf97, 0x05e40eb3, 0x05db6a19, 0x05d1f459, 0x05c7b01a, 0x05bca021, 0x05b0c74b, 0x05a42890, 0x0596c6ff, 0x0588a5bf, 0x0579c812, 0x056a314b, 0x0559e4da, 0x0548e63f, 0x05373912, 0x0524e100, 0x0511e1c6, 0x04fe3f39, 0x04e9fd3c, 0x04d51fc6, 0x04bfaadf, 0x04a9a29e, 0x04930b2b, 0x047be8bc, 0x04643f95, 0x044c1409, 0x04336a75, 0x041a4744, 0x0400aeec, 0x03e6a5ee, 0x03cc30d4, 0x03b15431, 0x039614a1, 0x037a76c7, 0x035e7f4e, 0x034232e6, 0x03259644, 0x0308ae24, 0x02eb7f44, 0x02ce0e67, 0x02b0604f, 0x029279c4, 0x02745f8c, 0x02561670, 0x0237a337, 0x02190aa6, - 0x01fa5183, 0x01db7c90, 0x01bc908b, 0x019d9230, 0x017e8635, 0x015f714d, 0x01405821, 0x01213f58, 0x01022b90, 0x00e3215e, 0x00c42551, 0x00a53bed, 0x008669ae, 0x0067b303, 0x00491c54, 0x002aa9fa, 0x000c6043, 0xffee4372, 0xffd057bb, 0xffb2a145, 0xff952429, 0xff77e470, 0xff5ae614, 0xff3e2d01, 0xff21bd11, 0xff059a0e, 0xfee9c7af, 0xfece499d, 0xfeb3236b, 0xfe98589b, 0xfe7dec9c, 0xfe63e2cc, 0xfe4a3e70, 0xfe3102bd, 0xfe1832d4, 0xfdffd1bd, 0xfde7e26f, 0xfdd067ca, 0xfdb96498, 0xfda2db8c, 0xfd8ccf46, 0xfd77424c, 0xfd62370e, 0xfd4dafe6, 0xfd39af17, 0xfd2636ca, 0xfd134913, 0xfd00e7ec, 0xfcef153a, 0xfcddd2c7, 0xfccd2246, 0xfcbd0551, 0xfcad7d6b, 0xfc9e8bfd, 0xfc903258, 0xfc8271b4, 0xfc754b32, 0xfc68bfd7, 0xfc5cd092, 0xfc517e38, 0xfc46c987, 0xfc3cb323, 0xfc333b97, 0xfc2a6356, 0xfc222abb, 0xfc1a9208, 0xfc139968, 0xfc0d40ec, 0xfc07888e, 0xfc027031, 0xfbfdf79e, 0xfbfa1e88, 0xfbf6e48c, 0xfbf4492d, 0xfbf24bd9, 0xfbf0ebe7, 0xfbf02896, 0xfbf00112, 0xfbf0746e, 0xfbf181a9, 0xfbf327ab, 0xfbf56549, 0xfbf83941, 0xfbfba23f, 0xfbff9ed7, 0xfc042d8e, 0xfc094cd2, 0xfc0efafe, 0xfc15365c, 0xfc1bfd22, 0xfc234d75, 0xfc2b2567, 0xfc3382fb, 0xfc3c6420, 0xfc45c6b6, 0xfc4fa88f, 0xfc5a076a, 0xfc64e0f9, 0xfc7032de, 0xfc7bfaad, 0xfc8835ed, 0xfc94e216, 0xfca1fc96, 0xfcaf82ca, 0xfcbd7206, 0xfccbc793, 0xfcda80ad, 0xfce99a86, 0xfcf91246, 0xfd08e50c, 0xfd190fed, 0xfd298ff6, 0xfd3a622b, 0xfd4b8389, 0xfd5cf105, 0xfd6ea790, 0xfd80a411, 0xfd92e36c, 0xfda5627e, 0xfdb81e22, 0xfdcb132d, 0xfdde3e6f, 0xfdf19cb9, 0xfe052ad4, 0xfe18e58c, 0xfe2cc9a7, 0xfe40d3ed, 0xfe550124, - 0xfe694e12, 0xfe7db77c, 0xfe923a2b, 0xfea6d2e5, 0xfebb7e75, 0xfed039a8, 0xfee5014c, 0xfef9d232, 0xff0ea931, 0xff238322, 0xff385ce3, 0xff4d3358, 0xff620368, 0xff76ca02, 0xff8b841a, 0xffa02eac, 0xffb4c6b9, 0xffc9494b, 0xffddb374, 0xfff2024e, 0x000632fa, 0x001a42a4, 0x002e2e82, 0x0041f3d2, 0x00558fdc, 0x0068fff3, 0x007c4177, 0x008f51cf, 0x00a22e71, 0x00b4d4dd, 0x00c7429f, 0x00d97550, 0x00eb6a95, 0x00fd2022, 0x010e93b5, 0x011fc31c, 0x0130ac31, 0x01414cdd, 0x0151a317, 0x0161ace5, 0x01716859, 0x0180d397, 0x018fecd1, 0x019eb246, 0x01ad2249, 0x01bb3b37, 0x01c8fb81, 0x01d661a6, 0x01e36c34, 0x01f019cb, 0x01fc691b, 0x020858e2, 0x0213e7f0, 0x021f1526, 0x0229df75, 0x023445dd, 0x023e4772, 0x0247e354, 0x025118b8, 0x0259e6e1, 0x02624d23, 0x026a4ae5, 0x0271df9c, 0x02790ace, 0x027fcc12, 0x02862311, 0x028c0f83, 0x0291912f, 0x0296a7f0, 0x029b53af, 0x029f9466, 0x02a36a1e, 0x02a6d4f0, 0x02a9d508, 0x02ac6a9e, 0x02ae95fb, 0x02b05779, 0x02b1af7f, 0x02b29e84, 0x02b3250f, 0x02b343b5, 0x02b2fb1a, 0x02b24bf1, 0x02b136f9, 0x02afbd02, 0x02addee8, 0x02ab9d96, 0x02a8fa03, 0x02a5f535, 0x02a2903e, 0x029ecc3c, 0x029aaa5a, 0x02962bd1, 0x029151e3, 0x028c1de0, 0x02869122, 0x0280ad0f, 0x027a7318, 0x0273e4b8, 0x026d0374, 0x0265d0dd, 0x025e4e8b, 0x02567e22, 0x024e614c, 0x0245f9bf, 0x023d4937, 0x0234517a, 0x022b1455, 0x0221939d, 0x0217d12d, 0x020dcee8, 0x02038eb7, 0x01f9128a, 0x01ee5c55, 0x01e36e14, 0x01d849c7, 0x01ccf173, 0x01c16720, 0x01b5acdd, 0x01a9c4bc, 0x019db0d0, 0x01917334, 0x01850e00, 0x01788354, 0x016bd54f, 0x015f0612, 0x015217c0, 0x01450c7f, - 0x0137e672, 0x012aa7bf, 0x011d528d, 0x010fe901, 0x01026d40, 0x00f4e16f, 0x00e747b0, 0x00d9a226, 0x00cbf2f0, 0x00be3c2d, 0x00b07ff8, 0x00a2c06b, 0x0094ff9b, 0x00873f9b, 0x0079827a, 0x006bca44, 0x005e1900, 0x005070b0, 0x0042d353, 0x003542e2, 0x0027c151, 0x001a508e, 0x000cf281, 0xffffa90e, 0xfff27611, 0xffe55b60, 0xffd85ac9, 0xffcb7615, 0xffbeaf06, 0xffb20754, 0xffa580b1, 0xff991cc9, 0xff8cdd3c, 0xff80c3a4, 0xff74d194, 0xff690894, 0xff5d6a24, 0xff51f7bb, 0xff46b2c7, 0xff3b9cad, 0xff30b6c8, 0xff260269, 0xff1b80da, 0xff113358, 0xff071b16, 0xfefd3941, 0xfef38ef6, 0xfeea1d4c, 0xfee0e54e, 0xfed7e7fd, 0xfecf2650, 0xfec6a130, 0xfebe5980, 0xfeb65015, 0xfeae85bb, 0xfea6fb32, 0xfe9fb12e, 0xfe98a85b, 0xfe91e159, 0xfe8b5cba, 0xfe851b09, 0xfe7f1cc4, 0xfe79625e, 0xfe73ec40, 0xfe6ebac6, 0xfe69ce43, 0xfe6526fe, 0xfe60c533, 0xfe5ca913, 0xfe58d2c5, 0xfe554265, 0xfe51f802, 0xfe4ef3a4, 0xfe4c3546, 0xfe49bcd9, 0xfe478a42, 0xfe459d5e, 0xfe43f5ff, 0xfe4293ec, 0xfe4176e2, 0xfe409e95, 0xfe400aae, 0xfe3fbacd, 0xfe3fae87, 0xfe3fe569, 0xfe405ef6, 0xfe411aa8, 0xfe4217ef, 0xfe435633, 0xfe44d4d3, 0xfe469325, 0xfe489077, 0xfe4acc0e, 0xfe4d4526, 0xfe4ffaf6, 0xfe52ecab, 0xfe561969, 0xfe598050, 0xfe5d2075, 0xfe60f8ea, 0xfe6508b6, 0xfe694edd, 0xfe6dca58, 0xfe727a1f, 0xfe775d1f, 0xfe7c7243, 0xfe81b86d, 0xfe872e7c, 0xfe8cd349, 0xfe92a5a7, 0xfe98a466, 0xfe9ece4f, 0xfea52227, 0xfeab9eb2, 0xfeb242ac, 0xfeb90cce, 0xfebffbd0, 0xfec70e64, 0xfece433a, 0xfed598fe, 0xfedd0e5c, 0xfee4a1fa, 0xfeec527e, 0xfef41e8c, 0xfefc04c6, 0xff0403cc, 0xff0c1a3c, 0xff1446b5, - 0xff1c87d3, 0xff24dc32, 0xff2d426f, 0xff35b924, 0xff3e3eed, 0xff46d266, 0xff4f722b, 0xff581cd8, 0xff60d10b, 0xff698d62, 0xff72507e, 0xff7b18fe, 0xff83e586, 0xff8cb4bb, 0xff958542, 0xff9e55c6, 0xffa724f0, 0xffaff16f, 0xffb8b9f3, 0xffc17d30, 0xffca39dd, 0xffd2eeb3, 0xffdb9a70, 0xffe43bd5, 0xffecd1a6, 0xfff55aae, 0xfffdd5b8, 0x00064197, 0x000e9d1f, 0x0016e72c, 0x001f1e9b, 0x00274253, 0x002f513a, 0x00374a40, 0x003f2c57, 0x0046f679, 0x004ea7a3, 0x00563edb, 0x005dbb29, 0x00651b9c, 0x006c5f4b, 0x00738551, 0x007a8cd0, 0x008174ef, 0x00883cdc, 0x008ee3cd, 0x009568fc, 0x009bcbab, 0x00a20b23, 0x00a826b2, 0x00ae1dae, 0x00b3ef73, 0x00b99b65, 0x00bf20ee, 0x00c47f7f, 0x00c9b691, 0x00cec5a1, 0x00d3ac38, 0x00d869e1, 0x00dcfe32, 0x00e168c5, 0x00e5a93c, 0x00e9bf43, 0x00edaa88, 0x00f16ac4, 0x00f4ffb6, 0x00f86924, 0x00fba6da, 0x00feb8ad, 0x01019e78, 0x0104581c, 0x0106e583, 0x0109469d, 0x010b7b61, 0x010d83cb, 0x010f5fe2, 0x01110faf, 0x01129344, 0x0113eabb, 0x01151632, 0x011615ce, 0x0116e9bc, 0x0117922f, 0x01180f5d, 0x01186187, 0x011888f2, 0x011885e7, 0x011858b9, 0x011801be, 0x01178152, 0x0116d7d7, 0x011605b5, 0x01150b5a, 0x0113e937, 0x01129fc5, 0x01112f81, 0x010f98eb, 0x010ddc8c, 0x010bfaee, 0x0109f4a2, 0x0107ca3c, 0x01057c57, 0x01030b8e, 0x01007885, 0x00fdc3e0, 0x00faee49, 0x00f7f86e, 0x00f4e2ff, 0x00f1aeb2, 0x00ee5c3e, 0x00eaec5e, 0x00e75fd1, 0x00e3b758, 0x00dff3b7, 0x00dc15b4, 0x00d81e1a, 0x00d40db3, 0x00cfe54f, 0x00cba5bc, 0x00c74fce, 0x00c2e457, 0x00be642f, 0x00b9d02b, 0x00b52925, 0x00b06ff7, 0x00aba57c, 0x00a6ca90, 0x00a1e00f, - 0x009ce6d8, 0x0097dfc9, 0x0092cbc0, 0x008dab9d, 0x0088803e, 0x00834a83, 0x007e0b4b, 0x0078c375, 0x007373de, 0x006e1d66, 0x0068c0e9, 0x00635f45, 0x005df954, 0x00588ff1, 0x005323f7, 0x004db63c, 0x00484799, 0x0042d8e1, 0x003d6aea, 0x0037fe85, 0x00329483, 0x002d2db0, 0x0027cada, 0x00226ccb, 0x001d144a, 0x0017c21c, 0x00127704, 0x000d33c3, 0x0007f915, 0x0002c7b6, 0xfffda05c, 0xfff883be, 0xfff3728d, 0xffee6d78, 0xffe97529, 0xffe48a4a, 0xffdfad7f, 0xffdadf69, 0xffd620a6, 0xffd171d1, 0xffccd380, 0xffc84645, 0xffc3cab1, 0xffbf614e, 0xffbb0aa3, 0xffb6c735, 0xffb29782, 0xffae7c06, 0xffaa7538, 0xffa6838c, 0xffa2a770, 0xff9ee150, 0xff9b3192, 0xff979898, 0xff9416c1, 0xff90ac66, 0xff8d59dd, 0xff8a1f77, 0xff86fd81, 0xff83f443, 0xff810401, 0xff7e2cfb, 0xff7b6f6c, 0xff78cb8c, 0xff76418b, 0xff73d199, 0xff717bdf, 0xff6f4083, 0xff6d1fa5, 0xff6b1961, 0xff692dd2, 0xff675d09, 0xff65a718, 0xff640c08, 0xff628be3, 0xff6126a9, 0xff5fdc5b, 0xff5eacf3, 0xff5d9867, 0xff5c9eaa, 0xff5bbfaa, 0xff5afb53, 0xff5a5189, 0xff59c230, 0xff594d27, 0xff58f249, 0xff58b16c, 0xff588a65, 0xff587d03, 0xff588913, 0xff58ae5d, 0xff58eca8, 0xff5943b4, 0xff59b340, 0xff5a3b09, 0xff5adac6, 0xff5b922d, 0xff5c60ee, 0xff5d46bb, 0xff5e433e, 0xff5f5621, 0xff607f0b, 0xff61bd9f, 0xff631180, 0xff647a4b, 0xff65f79e, 0xff678912, 0xff692e3f, 0xff6ae6ba, 0xff6cb218, 0xff6e8fe9, 0xff707fbd, 0xff728121, 0xff7493a2, 0xff76b6ca, 0xff78ea20, 0xff7b2d2d, 0xff7d7f76, 0xff7fe07f, 0xff824fca, 0xff84ccdb, 0xff875731, 0xff89ee4d, 0xff8c91ad, 0xff8f40d0, 0xff91fb31, 0xff94c04f, 0xff978fa6, - 0xff9a68b0, 0xff9d4ae9, 0xffa035cc, 0xffa328d4, 0xffa6237a, 0xffa9253b, 0xffac2d8f, 0xffaf3bf2, 0xffb24fde, 0xffb568ce, 0xffb8863e, 0xffbba7aa, 0xffbecc8d, 0xffc1f465, 0xffc51eaf, 0xffc84ae9, 0xffcb7893, 0xffcea72c, 0xffd1d635, 0xffd50530, 0xffd833a0, 0xffdb6109, 0xffde8cf1, 0xffe1b6dd, 0xffe4de56, 0xffe802e6, 0xffeb2416, 0xffee4174, 0xfff15a8d, 0xfff46ef1, 0xfff77e31, 0xfffa87df, 0xfffd8b92, 0x000088df, 0x00037f60, 0x00066eae, 0x00095666, 0x000c3627, 0x000f0d91, 0x0011dc47, 0x0014a1ee, 0x00175e2d, 0x001a10ad, 0x001cb91a, 0x001f5723, 0x0021ea76, 0x002472c8, 0x0026efcc, 0x0029613a, 0x002bc6cd, 0x002e2040, 0x00306d52, 0x0032adc4, 0x0034e15b, 0x003707dc, 0x00392111, 0x003b2cc5, 0x003d2ac6, 0x003f1ae4, 0x0040fcf3, 0x0042d0c9, 0x0044963d, 0x00464d2b, 0x0047f571, 0x00498eed, 0x004b1984, 0x004c951b, 0x004e0199, 0x004f5ee9, 0x0050acf7, 0x0051ebb4, 0x00531b12, 0x00543b04, 0x00554b83, 0x00564c88, 0x00573e0f, 0x00582016, 0x0058f29f, 0x0059b5ad, 0x005a6946, 0x005b0d72, 0x005ba23b, 0x005c27af, 0x005c9ddc, 0x005d04d4, 0x005d5cab, 0x005da575, 0x005ddf4c, 0x005e0a48, 0x005e2687, 0x005e3427, 0x005e3347, 0x005e240a, 0x005e0694, 0x005ddb0b, 0x005da198, 0x005d5a62, 0x005d0597, 0x005ca363, 0x005c33f6, 0x005bb77f, 0x005b2e31, 0x005a9840, 0x0059f5e1, 0x0059474a, 0x00588cb4, 0x0057c658, 0x0056f471, 0x0056173b, 0x00552ef3, 0x00543bd8, 0x00533e29, 0x00523626, 0x00512412, 0x0050082f, 0x004ee2c1, 0x004db40c, 0x004c7c55, 0x004b3be3, 0x0049f2fc, 0x0048a1e7, 0x004748ed, 0x0045e856, 0x0044806c, 0x00431177, 0x00419bc2, 0x00401f98, 0x003e9d42, - 0x003d150d, 0x003b8742, 0x0039f42e, 0x00385c1d, 0x0036bf58, 0x00351e2d, 0x003378e7, 0x0031cfd1, 0x00302337, 0x002e7363, 0x002cc0a2, 0x002b0b3d, 0x00295380, 0x002799b3, 0x0025de22, 0x00242115, 0x002262d6, 0x0020a3ad, 0x001ee3e1, 0x001d23b9, 0x001b637e, 0x0019a373, 0x0017e3df, 0x00162507, 0x0014672d, 0x0012aa95, 0x0010ef82, 0x000f3633, 0x000d7eea, 0x000bc9e6, 0x000a1765, 0x000867a5, 0x0006bae1, 0x00051157, 0x00036b3f, 0x0001c8d2, 0x00002a4a, 0xfffe8fdc, 0xfffcf9be, 0xfffb6825, 0xfff9db44, 0xfff8534d, 0xfff6d070, 0xfff552de, 0xfff3dac3, 0xfff2684e, 0xfff0fba9, 0xffef94fe, 0xffee3477, 0xffecda3b, 0xffeb866f, 0xffea3939, 0xffe8f2bb, 0xffe7b317, 0xffe67a6f, 0xffe548e0, 0xffe41e88, 0xffe2fb83, 0xffe1dfec, 0xffe0cbdc, 0xffdfbf6b, 0xffdebaaf, 0xffddbdbd, 0xffdcc8a9, 0xffdbdb84, 0xffdaf65e, 0xffda1948, 0xffd9444e, 0xffd8777d, 0xffd7b2e0, 0xffd6f67f, 0xffd64264, 0xffd59695, 0xffd4f316, 0xffd457ec, 0xffd3c519, 0xffd33a9e, 0xffd2b87c, 0xffd23eaf, 0xffd1cd37, 0xffd1640e, 0xffd1032f, 0xffd0aa93, 0xffd05a33, 0xffd01205, 0xffcfd1ff, 0xffcf9a15, 0xffcf6a3b, 0xffcf4262, 0xffcf227b, 0xffcf0a77, 0xffcefa44, 0xffcef1cf, 0xffcef106, 0xffcef7d4, 0xffcf0623, 0xffcf1bde, 0xffcf38ec, 0xffcf5d36, 0xffcf88a2, 0xffcfbb17, 0xffcff478, 0xffd034ac, 0xffd07b95, 0xffd0c915, 0xffd11d0f, 0xffd17764, 0xffd1d7f5, 0xffd23ea1, 0xffd2ab47, 0xffd31dc7, 0xffd395fd, 0xffd413c9, 0xffd49705, 0xffd51f90, 0xffd5ad44, 0xffd63ffe, 0xffd6d798, 0xffd773ed, 0xffd814d7, 0xffd8ba31, 0xffd963d4, 0xffda1199, 0xffdac35a, 0xffdb78ef, 0xffdc3231, 0xffdceef9, 0xffddaf1e, - 0xffde727a, 0xffdf38e5, 0xffe00236, 0xffe0ce46, 0xffe19cec, 0xffe26e01, 0xffe3415d, 0xffe416d8, 0xffe4ee4b, 0xffe5c78d, 0xffe6a277, 0xffe77ee2, 0xffe85ca7, 0xffe93b9e, 0xffea1ba2, 0xffeafc8b, 0xffebde33, 0xffecc075, 0xffeda32a, 0xffee862e, 0xffef695c, 0xfff04c8f, 0xfff12fa3, 0xfff21275, 0xfff2f4e0, 0xfff3d6c3, 0xfff4b7fb, 0xfff59866, 0xfff677e2, 0xfff75650, 0xfff8338e, 0xfff90f7c, 0xfff9e9fd, 0xfffac2f0, 0xfffb9a38, 0xfffc6fb8, 0xfffd4352, 0xfffe14eb, 0xfffee466, 0xffffb1aa, 0x00007c9c, 0x00014521, 0x00020b23, 0x0002ce87, 0x00038f37, 0x00044d1b, 0x0005081f, 0x0005c02c, 0x0006752d, 0x0007270f, 0x0007d5bf, 0x0008812a, 0x0009293e, 0x0009cdeb, 0x000a6f20, 0x000b0cce, 0x000ba6e5, 0x000c3d59, 0x000cd01b, 0x000d5f1f, 0x000dea5a, 0x000e71c1, 0x000ef549, 0x000f74e9, 0x000ff098, 0x0010684e, 0x0010dc05, 0x00114bb4, 0x0011b757, 0x00121ee9, 0x00128265, 0x0012e1c8, 0x00133d0e, 0x00139436, 0x0013e73e, 0x00143626, 0x001480ec, 0x0014c792, 0x00150a19, 0x00154883, 0x001582d3, 0x0015b90b, 0x0015eb2f, 0x00161944, 0x0016434f, 0x00166956, 0x00168b5e, 0x0016a96f, 0x0016c390, 0x0016d9c9, 0x0016ec22, 0x0016faa5, 0x0017055b, 0x00170c4f, 0x00170f8a, 0x00170f18, 0x00170b04, 0x0017035a, 0x0016f828, 0x0016e979, 0x0016d75b, 0x0016c1dc, 0x0016a90a, 0x00168cf2, 0x00166da5, 0x00164b32, 0x001625a7, 0x0015fd15, 0x0015d18b, 0x0015a31b, 0x001571d5, 0x00153dca, 0x0015070b, 0x0014cdab, 0x001491b9, 0x00145349, 0x0014126c, 0x0013cf36, 0x001389b7, 0x00134204, 0x0012f82e, 0x0012ac48, 0x00125e66, 0x00120e9b, 0x0011bcf9, 0x00116994, 0x00111480, 0x0010bdcf, - 0x00106595, 0x00100be5, 0x000fb0d2, 0x000f5471, 0x000ef6d4, 0x000e980f, 0x000e3834, 0x000dd758, 0x000d758d, 0x000d12e6, 0x000caf76, 0x000c4b50, 0x000be687, 0x000b812d, 0x000b1b55, 0x000ab510, 0x000a4e72, 0x0009e78c, 0x00098070, 0x0009192f, 0x0008b1db, 0x00084a86, 0x0007e33f, 0x00077c17, 0x00071520, 0x0006ae6a, 0x00064804, 0x0005e1fe, 0x00057c68, 0x00051750, 0x0004b2c7, 0x00044eda, 0x0003eb98, 0x0003890e, 0x0003274c, 0x0002c65d, 0x00026650, 0x00020730, 0x0001a90b, 0x00014bed, 0x0000efe1, 0x000094f3, 0x00003b2e, 0xffffe29d, 0xffff8b4b, 0xffff3540, 0xfffee088, 0xfffe8d2c, 0xfffe3b35, 0xfffdeaaa, 0xfffd9b96, 0xfffd4dff, 0xfffd01ee, 0xfffcb769, 0xfffc6e78, 0xfffc2720, 0xfffbe169, 0xfffb9d59, 0xfffb5af3, 0xfffb1a3f, 0xfffadb40, 0xfffa9dfa, 0xfffa6273, 0xfffa28ad, 0xfff9f0ac, 0xfff9ba73, 0xfff98604, 0xfff95363, 0xfff92290, 0xfff8f38e, 0xfff8c65d, 0xfff89b00, 0xfff87176, 0xfff849c0, 0xfff823dd, 0xfff7ffce, 0xfff7dd92, 0xfff7bd28, 0xfff79e8f, 0xfff781c5, 0xfff766c8, 0xfff74d97, 0xfff7362f, 0xfff7208d, 0xfff70caf, 0xfff6fa92, 0xfff6ea31, 0xfff6db89, 0xfff6ce97, 0xfff6c356, 0xfff6b9c1, 0xfff6b1d5, 0xfff6ab8c, 0xfff6a6e2, 0xfff6a3d0, 0xfff6a252, 0xfff6a262, 0xfff6a3f9, 0xfff6a713, 0xfff6aba9, 0xfff6b1b4, 0xfff6b92d, 0xfff6c20f, 0xfff6cc52, 0xfff6d7f0, 0xfff6e4e1, 0xfff6f31d, 0xfff7029f, 0xfff7135d, 0xfff72551, 0xfff73873, 0xfff74cba, 0xfff76220, 0xfff7789c, 0xfff79026, 0xfff7a8b6, 0xfff7c245, 0xfff7dcc8, 0xfff7f83a, 0xfff81490, 0xfff831c3, 0xfff84fcb, 0xfff86e9e, 0xfff88e35, 0xfff8ae88, 0xfff8cf8d, 0xfff8f13c, 0xfff9138e, - 0xfff93679, 0xfff959f5, 0xfff97dfa, 0xfff9a27f, 0xfff9c77d, 0xfff9eceb, 0xfffa12c0, 0xfffa38f5, 0xfffa5f81, 0xfffa865d, 0xfffaad81, 0xfffad4e4, 0xfffafc7f, 0xfffb244a, 0xfffb4c3e, 0xfffb7452, 0xfffb9c80, 0xfffbc4bf, 0xfffbed0a, 0xfffc1558, 0xfffc3da2, 0xfffc65e2, 0xfffc8e11, 0xfffcb628, 0xfffcde20, 0xfffd05f3, 0xfffd2d9b, 0xfffd5511, 0xfffd7c4f, 0xfffda350, 0xfffdca0d, 0xfffdf080, 0xfffe16a6, 0xfffe3c76, 0xfffe61ee, 0xfffe8707, 0xfffeabbd, 0xfffed00a, 0xfffef3ea, 0xffff1759, 0xffff3a53, 0xffff5cd2, 0xffff7ed3, 0xffffa052, 0xffffc14b, 0xffffe1bc, 0x0000019f, 0x000020f3, 0x00003fb3, 0x00005ddd, 0x00007b6f, 0x00009865, 0x0000b4bd, 0x0000d074, 0x0000eb89, 0x000105f9, 0x00011fc3, 0x000138e4, 0x0001515c, 0x00016928, 0x00018048, 0x000196ba, 0x0001ac7d, 0x0001c191, 0x0001d5f4, 0x0001e9a7, 0x0001fca8, 0x00020ef7, 0x00022095, 0x00023181, 0x000241bb, 0x00025143, 0x0002601b, 0x00026e41, 0x00027bb8, 0x0002887f, 0x00029497, 0x0002a002, 0x0002aac0, 0x0002b4d2, 0x0002be3b, 0x0002c6fa, 0x0002cf12, 0x0002d684, 0x0002dd53, 0x0002e37e, 0x0002e90a, 0x0002edf6, 0x0002f246, 0x0002f5fc, 0x0002f919, 0x0002fba0, 0x0002fd94, 0x0002fef6, 0x0002ffc9, 0x00030010, 0x0002ffcd, 0x0002ff03, 0x0002fdb4, 0x0002fbe4, 0x0002f995, 0x0002f6c9, 0x0002f385, 0x0002efca, 0x0002eb9c, 0x0002e6fe, 0x0002e1f3, 0x0002dc7d, 0x0002d6a0, 0x0002d060, 0x0002c9be, 0x0002c2be, 0x0002bb64, 0x0002b3b3, 0x0002abad, 0x0002a357, 0x00029ab2, 0x000291c3, 0x0002888c, 0x00027f11, 0x00027555, 0x00026b5b, 0x00026126, 0x000256b9, 0x00024c18, 0x00024146, 0x00023645, 0x00022b19, - 0x00021fc5, 0x0002144b, 0x000208b0, 0x0001fcf5, 0x0001f11e, 0x0001e52e, 0x0001d927, 0x0001cd0d, 0x0001c0e1, 0x0001b4a8, 0x0001a863, 0x00019c15, 0x00018fc1, 0x0001836a, 0x00017712, 0x00016abb, 0x00015e68, 0x0001521b, 0x000145d7, 0x0001399e, 0x00012d72, 0x00012155, 0x0001154a, 0x00010952, 0x0000fd6f, 0x0000f1a4, 0x0000e5f3, 0x0000da5c, 0x0000cee3, 0x0000c388, 0x0000b84d, 0x0000ad34, 0x0000a23f, 0x0000976e, 0x00008cc4, 0x00008241, 0x000077e8, 0x00006db9, 0x000063b5, 0x000059dd, 0x00005033, 0x000046b8, 0x00003d6c, 0x00003450, 0x00002b66, 0x000022ad, 0x00001a28, 0x000011d5, 0x000009b6, 0x000001cc, 0xfffffa17, 0xfffff297, 0xffffeb4c, 0xffffe438, 0xffffdd5a, 0xffffd6b2, 0xffffd041, 0xffffca06, 0xffffc402, 0xffffbe35, 0xffffb89f, 0xffffb340, 0xffffae17, 0xffffa924, 0xffffa467, 0xffff9fe0, 0xffff9b8f, 0xffff9773, 0xffff938c, 0xffff8fd9, 0xffff8c5a, 0xffff890e, 0xffff85f5, 0xffff830e, 0xffff805a, 0xffff7dd6, 0xffff7b82, 0xffff795f, 0xffff776a, 0xffff75a3, 0xffff740a, 0xffff729e, 0xffff715d, 0xffff7047, 0xffff6f5c, 0xffff6e99, 0xffff6dff, 0xffff6d8d, 0xffff6d40, 0xffff6d1a, 0xffff6d17, 0xffff6d38, 0xffff6d7c, 0xffff6de2, 0xffff6e67, 0xffff6f0d, 0xffff6fd1, 0xffff70b2, 0xffff71b0, 0xffff72c9, 0xffff73fc, 0xffff7549, 0xffff76ae, 0xffff782a, 0xffff79bc, 0xffff7b63, 0xffff7d1f, 0xffff7eed, 0xffff80cd, 0xffff82bf, 0xffff84c0, 0xffff86d0, 0xffff88ee, 0xffff8b19, 0xffff8d50, 0xffff8f92, 0xffff91de, 0xffff9434, 0xffff9691, 0xffff98f5, 0xffff9b60, 0xffff9dd0, 0xffffa045, 0xffffa2be, 0xffffa539, 0xffffa7b7, 0xffffaa35, 0xffffacb4, - 0x00000000 +const int32_t up_sampler_filter_coefficients[] __attribute__ ((aligned (32))) = { + 0x7ccccccd, 0x0323eb7f, 0xfd086246, 0x02b2aa5c, 0xfda45e2c, 0x01fa5183, 0xfe694e12, 0x0137e672, 0xff1c87d3, 0x009ce6d8, 0xff9a68b0, 0x003d150d, 0xffde727a, 0x00106595, 0xfff93679, 0x00021fc5, + 0x7cc9b757, 0x022ac835, 0xfd7e3a71, 0x026b7da1, 0xfdd2b905, 0x01db7c90, 0xfe7db77c, 0x012aa7bf, 0xff24dc32, 0x0097dfc9, 0xff9d4ae9, 0x003b8742, 0xffdf38e5, 0x00100be5, 0xfff959f5, 0x0002144b, + 0x7cc0773c, 0x01354bc1, 0xfdf365e8, 0x0224726d, 0xfe011d2e, 0x01bc908b, 0xfe923a2b, 0x011d528d, 0xff2d426f, 0x0092cbc0, 0xffa035cc, 0x0039f42e, 0xffe00236, 0x000fb0d2, 0xfff97dfa, 0x000208b0, + 0x7cb10d52, 0x0043843f, 0xfe67d5a8, 0x01dd92df, 0xfe2f83c1, 0x019d9230, 0xfea6d2e5, 0x010fe901, 0xff35b924, 0x008dab9d, 0xffa328d4, 0x00385c1d, 0xffe0ce46, 0x000f5471, 0xfff9a27f, 0x0001fcf5, + 0x7c9b7afd, 0xff557f58, 0xfedb7ae9, 0x0196e8fe, 0xfe5de5e3, 0x017e8635, 0xfebb7e75, 0x01026d40, 0xff3e3eed, 0x0088803e, 0xffa6237a, 0x0036bf58, 0xffe19cec, 0x000ef6d4, 0xfff9c77d, 0x0001f11e, + 0x7c7fc22f, 0xfe6b4a44, 0xff4e471d, 0x01507eb8, 0xfe8c3cc3, 0x015f714d, 0xfed039a8, 0x00f4e16f, 0xff46d266, 0x00834a83, 0xffa9253b, 0x00351e2d, 0xffe26e01, 0x000e980f, 0xfff9eceb, 0x0001e52e, + 0x7c5de56a, 0xfd84f1c8, 0xffc02bf2, 0x010a5de2, 0xfeba819d, 0x01405821, 0xfee5014c, 0x00e747b0, 0xff4f722b, 0x007e0b4b, 0xffac2d8f, 0x003378e7, 0xffe3415d, 0x000e3834, 0xfffa12c0, 0x0001d927, + 0x7c35e7bb, 0xfca28234, 0x00311b54, 0x00c49034, 0xfee8adba, 0x01213f58, 0xfef9d232, 0x00d9a226, 0xff581cd8, 0x0078c375, 0xffaf3bf2, 0x0031cfd1, 0xffe416d8, 0x000dd758, 0xfffa38f5, 0x0001cd0d, + 0x7c07ccbe, 0xfbc40766, 0x00a1076e, 0x007f1f4b, 0xff16ba71, 0x01022b90, 0xff0ea931, 0x00cbf2f0, 0xff60d10b, 0x007373de, 0xffb24fde, 0x00302337, 0xffe4ee4b, 0x000d758d, 0xfffa5f81, 0x0001c0e1, + 0x7bd3989d, 0xfae98cc5, 0x010fe2ab, 0x003a14a6, 0xff44a128, 0x00e3215e, 0xff238322, 0x00be3c2d, 0xff698d62, 0x006e1d66, 0xffb568ce, 0x002e7363, 0xffe5c78d, 0x000d12e6, 0xfffa865d, 0x0001b4a8, + 0x7b99500c, 0xfa131d41, 0x017d9fb8, 0xfff579a3, 0xff725b54, 0x00c42551, 0xff385ce3, 0x00b07ff8, 0xff72507e, 0x0068c0e9, 0xffb8863e, 0x002cc0a2, 0xffe6a277, 0x000caf76, 0xfffaad81, 0x0001a863, + 0x7b58f84d, 0xf940c355, 0x01ea3184, 0xffb15783, 0xff9fe27d, 0x00a53bed, 0xff4d3358, 0x00a2c06b, 0xff7b18fe, 0x00635f45, 0xffbba7aa, 0x002b0b3d, 0xffe77ee2, 0x000c4b50, 0xfffad4e4, 0x00019c15, + 0x7b12972d, 0xf8728902, 0x02558b43, 0xff6db764, 0xffcd303b, 0x008669ae, 0xff620368, 0x0094ff9b, 0xff83e586, 0x005df954, 0xffbecc8d, 0x00295380, 0xffe85ca7, 0x000be687, 0xfffafc7f, 0x00018fc1, + 0x7ac63304, 0xf7a877d4, 0x02bfa06d, 0xff2aa243, 0xfffa3e37, 0x0067b303, 0xff76ca02, 0x00873f9b, 0xff8cb4bb, 0x00588ff1, 0xffc1f465, 0x002799b3, 0xffe93b9e, 0x000b812d, 0xfffb244a, 0x0001836a, + 0x7a73d2b5, 0xf6e298db, 0x032864c1, 0xfee820f8, 0x00270631, 0x00491c54, 0xff8b841a, 0x0079827a, 0xff958542, 0x005323f7, 0xffc51eaf, 0x0025de22, 0xffea1ba2, 0x000b1b55, 0xfffb4c3e, 0x00017712, + 0x7a1b7daa, 0xf620f4b2, 0x038fcc44, 0xfea63c38, 0x005381fa, 0x002aa9fa, 0xffa02eac, 0x006bca44, 0xff9e55c6, 0x004db63c, 0xffc84ae9, 0x00242115, 0xffeafc8b, 0x000ab510, 0xfffb7452, 0x00016abb, + 0x79bd3bd8, 0xf5639376, 0x03f5cb46, 0xfe64fc93, 0x007fab77, 0x000c6043, 0xffb4c6b9, 0x005e1900, 0xffa724f0, 0x00484799, 0xffcb7893, 0x002262d6, 0xffebde33, 0x000a4e72, 0xfffb9c80, 0x00015e68, + 0x795915bc, 0xf4aa7cce, 0x045a565c, 0xfe246a72, 0x00ab7ca6, 0xffee4372, 0xffc9494b, 0x005070b0, 0xffaff16f, 0x0042d8e1, 0xffcea72c, 0x0020a3ad, 0xffecc075, 0x0009e78c, 0xfffbc4bf, 0x0001521b, + 0x78ef1457, 0xf3f5b7e4, 0x04bd6269, 0xfde48e17, 0x00d6ef99, 0xffd057bb, 0xffddb374, 0x0042d353, 0xffb8b9f3, 0x003d6aea, 0xffd1d635, 0x001ee3e1, 0xffeda32a, 0x00098070, 0xfffbed0a, 0x000145d7, + 0x787f4134, 0xf3454b6a, 0x051ee498, 0xfda56f9c, 0x0101fe7a, 0xffb2a145, 0xfff2024e, 0x003542e2, 0xffc17d30, 0x0037fe85, 0xffd50530, 0x001d23b9, 0xffee862e, 0x0009192f, 0xfffc1558, 0x0001399e, + 0x7809a65e, 0xf2993d95, 0x057ed264, 0xfd6716f2, 0x012ca389, 0xff952429, 0x000632fa, 0x0027c151, 0xffca39dd, 0x00329483, 0xffd833a0, 0x001b637e, 0xffef695c, 0x0008b1db, 0xfffc3da2, 0x00012d72, + 0x778e4e68, 0xf1f19421, 0x05dd218f, 0xfd298be0, 0x0156d920, 0xff77e470, 0x001a42a4, 0x001a508e, 0xffd2eeb3, 0x002d2db0, 0xffdb6109, 0x0019a373, 0xfff04c8f, 0x00084a86, 0xfffc65e2, 0x00012155, + 0x770d4466, 0xf14e544f, 0x0639c82d, 0xfcecd602, 0x018099b2, 0xff5ae614, 0x002e2e82, 0x000cf281, 0xffdb9a70, 0x0027cada, 0xffde8cf1, 0x0017e3df, 0xfff12fa3, 0x0007e33f, 0xfffc8e11, 0x0001154a, + 0x768693ec, 0xf0af82e4, 0x0694bca0, 0xfcb0fcca, 0x01a9dfcc, 0xff3e2d01, 0x0041f3d2, 0xffffa90e, 0xffe43bd5, 0x00226ccb, 0xffe1b6dd, 0x00162507, 0xfff21275, 0x00077c17, 0xfffcb628, 0x00010952, + 0x75fa4911, 0xf015242b, 0x06edf595, 0xfc76077b, 0x01d2a615, 0xff21bd11, 0x00558fdc, 0xfff27611, 0xffecd1a6, 0x001d144a, 0xffe4de56, 0x0014672d, 0xfff2f4e0, 0x00071520, 0xfffcde20, 0x0000fd6f, + 0x75687068, 0xef7f3bf5, 0x07456a0e, 0xfc3bfd2e, 0x01fae74e, 0xff059a0e, 0x0068fff3, 0xffe55b60, 0xfff55aae, 0x0017c21c, 0xffe802e6, 0x0012aa95, 0xfff3d6c3, 0x0006ae6a, 0xfffd05f3, 0x0000f1a4, + 0x74d11703, 0xeeedcd98, 0x079b1158, 0xfc02e4cc, 0x02229e57, 0xfee9c7af, 0x007c4177, 0xffd85ac9, 0xfffdd5b8, 0x00127704, 0xffeb2416, 0x0010ef82, 0xfff4b7fb, 0x00064804, 0xfffd2d9b, 0x0000e5f3, + 0x74344a70, 0xee60dbee, 0x07eee314, 0xfbcac510, 0x0249c629, 0xfece499d, 0x008f51cf, 0xffcb7615, 0x00064197, 0x000d33c3, 0xffee4174, 0x000f3633, 0xfff59866, 0x0005e1fe, 0xfffd5511, 0x0000da5c, + 0x739218b8, 0xedd86958, 0x0840d732, 0xfb93a486, 0x027059da, 0xfeb3236b, 0x00a22e71, 0xffbeaf06, 0x000e9d1f, 0x0007f915, 0xfff15a8d, 0x000d7eea, 0xfff677e2, 0x00057c68, 0xfffd7c4f, 0x0000cee3, + 0x72ea905a, 0xed5477be, 0x0890e5f7, 0xfb5d898c, 0x029654a0, 0xfe98589b, 0x00b4d4dd, 0xffb20754, 0x0016e72c, 0x0002c7b6, 0xfff46ef1, 0x000bc9e6, 0xfff75650, 0x00051750, 0xfffda350, 0x0000c388, + 0x723dc051, 0xecd5088e, 0x08df07f6, 0xfb287a4d, 0x02bbb1cc, 0xfe7dec9c, 0x00c7429f, 0xffa580b1, 0x001f1e9b, 0xfffda05c, 0xfff77e31, 0x000a1765, 0xfff8338e, 0x0004b2c7, 0xfffdca0d, 0x0000b84d, + 0x718bb80b, 0xec5a1cbc, 0x092b3617, 0xfaf47cc4, 0x02e06ccf, 0xfe63e2cc, 0x00d97550, 0xff991cc9, 0x00274253, 0xfff883be, 0xfffa87df, 0x000867a5, 0xfff90f7c, 0x00044eda, 0xfffdf080, 0x0000ad34, + 0x70d4876b, 0xebe3b4c5, 0x09756994, 0xfac196bb, 0x03048139, 0xfe4a3e70, 0x00eb6a95, 0xff8cdd3c, 0x002f513a, 0xfff3728d, 0xfffd8b92, 0x0006bae1, 0xfff9e9fd, 0x0003eb98, 0xfffe16a6, 0x0000a23f, + 0x70183ec5, 0xeb71d0ab, 0x09bd9bfb, 0xfa8fcdca, 0x0327eab8, 0xfe3102bd, 0x00fd2022, 0xff80c3a4, 0x00374a40, 0xffee6d78, 0x000088df, 0x00051157, 0xfffac2f0, 0x0003890e, 0xfffe3c76, 0x0000976e, + 0x6f56eee1, 0xeb046ffc, 0x0a03c72b, 0xfa5f2755, 0x034aa51b, 0xfe1832d4, 0x010e93b5, 0xff74d194, 0x003f2c57, 0xffe97529, 0x00037f60, 0x00036b3f, 0xfffb9a38, 0x0003274c, 0xfffe61ee, 0x00008cc4, + 0x6e90a8f2, 0xea9b91cc, 0x0a47e559, 0xfa2fa890, 0x036cac52, 0xfdffd1bd, 0x011fc31c, 0xff690894, 0x0046f679, 0xffe48a4a, 0x00066eae, 0x0001c8d2, 0xfffc6fb8, 0x0002c65d, 0xfffe8707, 0x00008241, + 0x6dc57e9b, 0xea3734bb, 0x0a89f10c, 0xfa015679, 0x038dfc6c, 0xfde7e26f, 0x0130ac31, 0xff5d6a24, 0x004ea7a3, 0xffdfad7f, 0x00095666, 0x00002a4a, 0xfffd4352, 0x00026650, 0xfffeabbd, 0x000077e8, + 0x6cf581e8, 0xe9d756f3, 0x0ac9e521, 0xf9d435dc, 0x03ae919a, 0xfdd067ca, 0x01414cdd, 0xff51f7bb, 0x00563edb, 0xffdadf69, 0x000c3627, 0xfffe8fdc, 0xfffe14eb, 0x00020730, 0xfffed00a, 0x00006db9, + 0x6c20c550, 0xe97bf627, 0x0b07bcc6, 0xf9a84b50, 0x03ce682d, 0xfdb96498, 0x0151a317, 0xff46b2c7, 0x005dbb29, 0xffd620a6, 0x000f0d91, 0xfffcf9be, 0xfffee466, 0x0001a90b, 0xfffef3ea, 0x000063b5, + 0x6b475bb0, 0xe9250f99, 0x0b437380, 0xf97d9b37, 0x03ed7c9a, 0xfda2db8c, 0x0161ace5, 0xff3b9cad, 0x00651b9c, 0xffd171d1, 0x0011dc47, 0xfffb6825, 0xffffb1aa, 0x00014bed, 0xffff1759, 0x000059dd, + 0x6a69584a, 0xe8d2a017, 0x0b7d0525, 0xf95429c0, 0x040bcb77, 0xfd8ccf46, 0x01716859, 0xff30b6c8, 0x006c5f4b, 0xffccd380, 0x0014a1ee, 0xfff9db44, 0x00007c9c, 0x0000efe1, 0xffff3a53, 0x00005033, + 0x6986cec4, 0xe884a3fb, 0x0bb46de2, 0xf92bfae4, 0x0429517b, 0xfd77424c, 0x0180d397, 0xff260269, 0x00738551, 0xffc84645, 0x00175e2d, 0xfff8534d, 0x00014521, 0x000094f3, 0xffff5cd2, 0x000046b8, + 0x689fd324, 0xe83b1731, 0x0be9aa34, 0xf9051266, 0x04460b81, 0xfd62370e, 0x018fecd1, 0xff1b80da, 0x007a8cd0, 0xffc3cab1, 0x001a10ad, 0xfff6d070, 0x00020b23, 0x00003b2e, 0xffff7ed3, 0x00003d6c, + 0x67b479cf, 0xe7f5f531, 0x0c1cb6ef, 0xf8df73d6, 0x0461f688, 0xfd4dafe6, 0x019eb246, 0xff113358, 0x008174ef, 0xffbf614e, 0x001cb91a, 0xfff552de, 0x0002ce87, 0xffffe29d, 0xffffa052, 0x00003450, + 0x66c4d787, 0xe7b53908, 0x0c4d913a, 0xf8bb228c, 0x047d0fb1, 0xfd39af17, 0x01ad2249, 0xff071b16, 0x00883cdc, 0xffbb0aa3, 0x001f5723, 0xfff3dac3, 0x00038f37, 0xffff8b4b, 0xffffc14b, 0x00002b66, + 0x65d10168, 0xe778dd50, 0x0c7c368d, 0xf89821ac, 0x0497543f, 0xfd2636ca, 0x01bb3b37, 0xfefd3941, 0x008ee3cd, 0xffb6c735, 0x0021ea76, 0xfff2684e, 0x00044d1b, 0xffff3540, 0xffffe1bc, 0x000022ad, + 0x64d90ce7, 0xe740dc3c, 0x0ca8a4b7, 0xf8767422, 0x04b0c19a, 0xfd134913, 0x01c8fb81, 0xfef38ef6, 0x009568fc, 0xffb29782, 0x002472c8, 0xfff0fba9, 0x0005081f, 0xfffee088, 0x0000019f, 0x00001a28, + 0x63dd0fcd, 0xe70d2f8d, 0x0cd2d9d5, 0xf8561ca7, 0x04c9554e, 0xfd00e7ec, 0x01d661a6, 0xfeea1d4c, 0x009bcbab, 0xffae7c06, 0x0026efcc, 0xffef94fe, 0x0005c02c, 0xfffe8d2c, 0x000020f3, 0x000011d5, + 0x62dd2039, 0xe6ddd09f, 0x0cfad45a, 0xf8371dbb, 0x04e10d0a, 0xfcef153a, 0x01e36c34, 0xfee0e54e, 0x00a20b23, 0xffaa7538, 0x0029613a, 0xffee3477, 0x0006752d, 0xfffe3b35, 0x00003fb3, 0x000009b6, + 0x61d95497, 0xe6b2b862, 0x0d209309, 0xf81979ab, 0x04f7e6a2, 0xfcddd2c7, 0x01f019cb, 0xfed7e7fd, 0x00a826b2, 0xffa6838c, 0x002bc6cd, 0xffecda3b, 0x0007270f, 0xfffdeaaa, 0x00005ddd, 0x000001cc, + 0x60d1c3a6, 0xe68bdf5e, 0x0d4414f9, 0xf7fd328c, 0x050de00d, 0xfccd2246, 0x01fc691b, 0xfecf2650, 0x00ae1dae, 0xffa2a770, 0x002e2040, 0xffeb866f, 0x0007d5bf, 0xfffd9b96, 0x00007b6f, 0xfffffa17, + 0x5fc68470, 0xe6693db5, 0x0d65598f, 0xf7e24a3c, 0x0522f766, 0xfcbd0551, 0x020858e2, 0xfec6a130, 0x00b3ef73, 0xff9ee150, 0x00306d52, 0xffea3939, 0x0008812a, 0xfffd4dff, 0x00009865, 0xfffff297, + 0x5eb7ae46, 0xe64acb24, 0x0d846084, 0xf7c8c267, 0x05372aee, 0xfcad7d6b, 0x0213e7f0, 0xfebe5980, 0x00b99b65, 0xff9b3192, 0x0032adc4, 0xffe8f2bb, 0x0009293e, 0xfffd01ee, 0x0000b4bd, 0xffffeb4c, + 0x5da558c5, 0xe6307f05, 0x0da129df, 0xf7b09c7f, 0x054a7909, 0xfc9e8bfd, 0x021f1526, 0xfeb65015, 0x00bf20ee, 0xff979898, 0x0034e15b, 0xffe7b317, 0x0009cdeb, 0xfffcb769, 0x0000d074, 0xffffe438, + 0x5c8f9bcb, 0xe61a504f, 0x0dbbb5f6, 0xf799d9c4, 0x055ce03f, 0xfc903258, 0x0229df75, 0xfeae85bb, 0x00c47f7f, 0xff9416c1, 0x003707dc, 0xffe67a6f, 0x000a6f20, 0xfffc6e78, 0x0000eb89, 0xffffdd5a, + 0x5b768f7a, 0xe6083599, 0x0dd40571, 0xf7847b3d, 0x056e5f3d, 0xfc8271b4, 0x023445dd, 0xfea6fb32, 0x00c9b691, 0xff90ac66, 0x00392111, 0xffe548e0, 0x000b0cce, 0xfffc2720, 0x000105f9, 0xffffd6b2, + 0x5a5a4c32, 0xe5fa2519, 0x0dea1943, 0xf77081be, 0x057ef4d3, 0xfc754b32, 0x023e4772, 0xfe9fb12e, 0x00cec5a1, 0xff8d59dd, 0x003b2cc5, 0xffe41e88, 0x000ba6e5, 0xfffbe169, 0x00011fc3, 0xffffd041, + 0x593aea93, 0xe5f014aa, 0x0dfdf2ae, 0xf75dede5, 0x058e9ff8, 0xfc68bfd7, 0x0247e354, 0xfe98a85b, 0x00d3ac38, 0xff8a1f77, 0x003d2ac6, 0xffe2fb83, 0x000c3d59, 0xfffb9d59, 0x000138e4, 0xffffca06, + 0x58188376, 0xe5e9f9ca, 0x0e0f9342, 0xf74cc01c, 0x059d5fc5, 0xfc5cd092, 0x025118b8, 0xfe91e159, 0x00d869e1, 0xff86fd81, 0x003f1ae4, 0xffe1dfec, 0x000cd01b, 0xfffb5af3, 0x0001515c, 0xffffc402, + 0x56f32fea, 0xe5e7c99e, 0x0e1efcdb, 0xf73cf898, 0x05ab3377, 0xfc517e38, 0x0259e6e1, 0xfe8b5cba, 0x00dcfe32, 0xff83f443, 0x0040fcf3, 0xffe0cbdc, 0x000d5f1f, 0xfffb1a3f, 0x00016928, 0xffffbe35, + 0x55cb0935, 0xe5e978f0, 0x0e2c319d, 0xf72e9758, 0x05b81a70, 0xfc46c987, 0x02624d23, 0xfe851b09, 0x00e168c5, 0xff810401, 0x0042d0c9, 0xffdfbf6b, 0x000dea5a, 0xfffadb40, 0x00018048, 0xffffb89f, + 0x54a028d0, 0xe5eefc35, 0x0e3733fc, 0xf7219c2a, 0x05c41435, 0xfc3cb323, 0x026a4ae5, 0xfe7f1cc4, 0x00e5a93c, 0xff7e2cfb, 0x0044963d, 0xffdebaaf, 0x000e71c1, 0xfffa9dfa, 0x000196ba, 0xffffb340, + 0x5372a862, 0xe5f8478d, 0x0e4006b2, 0xf71606a6, 0x05cf2070, 0xfc333b97, 0x0271df9c, 0xfe79625e, 0x00e9bf43, 0xff7b6f6c, 0x00464d2b, 0xffddbdbd, 0x000ef549, 0xfffa6273, 0x0001ac7d, 0xffffae17, + 0x5242a1c1, 0xe6054ec6, 0x0e46acc4, 0xf70bd632, 0x05d93eee, 0xfc2a6356, 0x02790ace, 0xfe73ec40, 0x00edaa88, 0xff78cb8c, 0x0047f571, 0xffdcc8a9, 0x000f74e9, 0xfffa28ad, 0x0001c191, 0xffffa924, + 0x51102eec, 0xe616055a, 0x0e4b297c, 0xf7030a01, 0x05e26f9f, 0xfc222abb, 0x027fcc12, 0xfe6ebac6, 0x00f16ac4, 0xff76418b, 0x00498eed, 0xffdbdb84, 0x000ff098, 0xfff9f0ac, 0x0001d5f4, 0xffffa467, + 0x4fdb6a09, 0xe62a5e76, 0x0e4d806f, 0xf6fba113, 0x05eab296, 0xfc1a9208, 0x02862311, 0xfe69ce43, 0x00f4ffb6, 0xff73d199, 0x004b1984, 0xffdaf65e, 0x0010684e, 0xfff9ba73, 0x0001e9a7, 0xffff9fe0, + 0x4ea46d66, 0xe6424cf8, 0x0e4db575, 0xf6f59a36, 0x05f20809, 0xfc139968, 0x028c0f83, 0xfe6526fe, 0x00f86924, 0xff717bdf, 0x004c951b, 0xffda1948, 0x0010dc05, 0xfff98604, 0x0001fca8, 0xffff9b8f, + 0x4d6b536f, 0xe65dc373, 0x0e4bccac, 0xf6f0f407, 0x05f87053, 0xfc0d40ec, 0x0291912f, 0xfe60c533, 0x00fba6da, 0xff6f4083, 0x004e0199, 0xffd9444e, 0x00114bb4, 0xfff95363, 0x00020ef7, 0xffff9773, + 0x4c3036b2, 0xe67cb42f, 0x0e47ca78, 0xf6edacf2, 0x05fdebee, 0xfc07888e, 0x0296a7f0, 0xfe5ca913, 0x00feb8ad, 0xff6d1fa5, 0x004f5ee9, 0xffd8777d, 0x0011b757, 0xfff92290, 0x00022095, 0xffff938c, + 0x4af331d9, 0xe69f112f, 0x0e41b37c, 0xf6ebc332, 0x06027b78, 0xfc027031, 0x029b53af, 0xfe58d2c5, 0x01019e78, 0xff6b1961, 0x0050acf7, 0xffd7b2e0, 0x00121ee9, 0xfff8f38e, 0x00023181, 0xffff8fd9, + 0x49b45fa8, 0xe6c4cc2e, 0x0e398c9f, 0xf6eb34d4, 0x06061fb2, 0xfbfdf79e, 0x029f9466, 0xfe554265, 0x0104581c, 0xff692dd2, 0x0051ebb4, 0xffd6f67f, 0x00128265, 0xfff8c65d, 0x000241bb, 0xffff8c5a, + 0x4873daf7, 0xe6edd6a4, 0x0e2f5b0b, 0xf6ebffb2, 0x0608d97c, 0xfbfa1e88, 0x02a36a1e, 0xfe51f802, 0x0106e583, 0xff675d09, 0x00531b12, 0xffd64264, 0x0012e1c8, 0xfff89b00, 0x00025143, 0xffff890e, + 0x4731beb7, 0xe71a21c7, 0x0e232425, 0xf6ee217b, 0x060aa9da, 0xfbf6e48c, 0x02a6d4f0, 0xfe4ef3a4, 0x0109469d, 0xff65a718, 0x00543b04, 0xffd59695, 0x00133d0e, 0xfff87176, 0x0002601b, 0xffff85f5, + 0x45ee25e7, 0xe7499e8f, 0x0e14ed93, 0xf6f197ad, 0x060b91ee, 0xfbf4492d, 0x02a9d508, 0xfe4c3546, 0x010b7b61, 0xff640c08, 0x00554b83, 0xffd4f316, 0x00139436, 0xfff849c0, 0x00026e41, 0xffff830e, + 0x44a92b96, 0xe77c3db4, 0x0e04bd39, 0xf6f65f9b, 0x060b92ff, 0xfbf24bd9, 0x02ac6a9e, 0xfe49bcd9, 0x010d83cb, 0xff628be3, 0x00564c88, 0xffd457ec, 0x0013e73e, 0xfff823dd, 0x00027bb8, 0xffff805a, + 0x4362eadc, 0xe7b1efb4, 0x0df29936, 0xf6fc766a, 0x060aae6e, 0xfbf0ebe7, 0x02ae95fb, 0xfe478a42, 0x010f5fe2, 0xff6126a9, 0x00573e0f, 0xffd3c519, 0x00143626, 0xfff7ffce, 0x0002887f, 0xffff7dd6, + 0x421b7edf, 0xe7eaa4d4, 0x0dde87e2, 0xf703d912, 0x0608e5c2, 0xfbf02896, 0x02b05779, 0xfe459d5e, 0x01110faf, 0xff5fdc5b, 0x00582016, 0xffd33a9e, 0x001480ec, 0xfff7dd92, 0x00029497, 0xffff7b82, + 0x40d302c5, 0xe8264d21, 0x0dc88fd2, 0xf70c8461, 0x06063a9d, 0xfbf00112, 0x02b1af7f, 0xfe43f5ff, 0x01129344, 0xff5eacf3, 0x0058f29f, 0xffd2b87c, 0x0014c792, 0xfff7bd28, 0x0002a002, 0xffff795f, + 0x3f8991bd, 0xe864d874, 0x0db0b7d1, 0xf71674fa, 0x0602aec3, 0xfbf0746e, 0x02b29e84, 0xfe4293ec, 0x0113eabb, 0xff5d9867, 0x0059b5ad, 0xffd23eaf, 0x00150a19, 0xfff79e8f, 0x0002aac0, 0xffff776a, + 0x3e3f46f2, 0xe8a63671, 0x0d9706e1, 0xf721a756, 0x05fe4414, 0xfbf181a9, 0x02b3250f, 0xfe4176e2, 0x01151632, 0xff5c9eaa, 0x005a6946, 0xffd1cd37, 0x00154883, 0xfff781c5, 0x0002b4d2, 0xffff75a3, + 0x3cf43d8f, 0xe8ea568f, 0x0d7b843b, 0xf72e17c4, 0x05f8fc8f, 0xfbf327ab, 0x02b343b5, 0xfe409e95, 0x011615ce, 0xff5bbfaa, 0x005b0d72, 0xffd1640e, 0x001582d3, 0xfff766c8, 0x0002be3b, 0xffff740a, + 0x3ba890b9, 0xe9312813, 0x0d5e3749, 0xf73bc26b, 0x05f2da52, 0xfbf56549, 0x02b2fb1a, 0xfe400aae, 0x0116e9bc, 0xff5afb53, 0x005ba23b, 0xffd1032f, 0x0015b90b, 0xfff74d97, 0x0002c6fa, 0xffff729e, + 0x3a5c5b8e, 0xe97a9a17, 0x0d3f27ab, 0xf74aa34c, 0x05ebdf97, 0xfbf83941, 0x02b24bf1, 0xfe3fbacd, 0x0117922f, 0xff5a5189, 0x005c27af, 0xffd0aa93, 0x0015eb2f, 0xfff7362f, 0x0002cf12, 0xffff715d, + 0x390fb920, 0xe9c69b8c, 0x0d1e5d32, 0xf75ab63f, 0x05e40eb3, 0xfbfba23f, 0x02b136f9, 0xfe3fae87, 0x01180f5d, 0xff59c230, 0x005c9ddc, 0xffd05a33, 0x00161944, 0xfff7208d, 0x0002d684, 0xffff7047, + 0x37c2c474, 0xea151b3a, 0x0cfbdfdd, 0xf76bf6f7, 0x05db6a19, 0xfbff9ed7, 0x02afbd02, 0xfe3fe569, 0x01186187, 0xff594d27, 0x005d04d4, 0xffd01205, 0x0016434f, 0xfff70caf, 0x0002dd53, 0xffff6f5c, + 0x36759880, 0xea6607c4, 0x0cd7b7dd, 0xf77e6103, 0x05d1f459, 0xfc042d8e, 0x02addee8, 0xfe405ef6, 0x011888f2, 0xff58f249, 0x005d5cab, 0xffcfd1ff, 0x00166956, 0xfff6fa92, 0x0002e37e, 0xffff6e99, + 0x35285026, 0xeab94fa9, 0x0cb1ed8c, 0xf791efcb, 0x05c7b01a, 0xfc094cd2, 0x02ab9d96, 0xfe411aa8, 0x011885e7, 0xff58b16c, 0x005da575, 0xffcf9a15, 0x00168b5e, 0xfff6ea31, 0x0002e90a, 0xffff6dff, + 0x33db0631, 0xeb0ee148, 0x0c8a8973, 0xf7a69e96, 0x05bca021, 0xfc0efafe, 0x02a8fa03, 0xfe4217ef, 0x011858b9, 0xff588a65, 0x005ddf4c, 0xffcf6a3b, 0x0016a96f, 0xfff6db89, 0x0002edf6, 0xffff6d8d, + 0x328dd556, 0xeb66aae0, 0x0c619444, 0xf7bc6889, 0x05b0c74b, 0xfc15365c, 0x02a5f535, 0xfe435633, 0x011801be, 0xff587d03, 0x005e0a48, 0xffcf4262, 0x0016c390, 0xfff6ce97, 0x0002f246, 0xffff6d40, + 0x3140d82e, 0xebc09a94, 0x0c3716da, 0xf7d348a4, 0x05a42890, 0xfc1bfd22, 0x02a2903e, 0xfe44d4d3, 0x01178152, 0xff588913, 0x005e2687, 0xffcf227b, 0x0016d9c9, 0xfff6c356, 0x0002f5fc, 0xffff6d1a, + 0x2ff42933, 0xec1c9e6d, 0x0c0b1a37, 0xf7eb39cc, 0x0596c6ff, 0xfc234d75, 0x029ecc3c, 0xfe469325, 0x0116d7d7, 0xff58ae5d, 0x005e3427, 0xffcf0a77, 0x0016ec22, 0xfff6b9c1, 0x0002f919, 0xffff6d17, + 0x2ea7e2c0, 0xec7aa45b, 0x0bdda783, 0xf80436c0, 0x0588a5bf, 0xfc2b2567, 0x029aaa5a, 0xfe489077, 0x011605b5, 0xff58eca8, 0x005e3347, 0xffcefa44, 0x0016faa5, 0xfff6b1d5, 0x0002fba0, 0xffff6d38, + 0x2d5c1f0e, 0xecda9a39, 0x0baec80a, 0xf81e3a25, 0x0579c812, 0xfc3382fb, 0x02962bd1, 0xfe4acc0e, 0x01150b5a, 0xff5943b4, 0x005e240a, 0xffcef1cf, 0x0017055b, 0xfff6ab8c, 0x0002fd94, 0xffff6d7c, + 0x2c10f82d, 0xed3c6dce, 0x0b7e853c, 0xf8393e81, 0x056a314b, 0xfc3c6420, 0x029151e3, 0xfe4d4526, 0x0113e937, 0xff59b340, 0x005e0694, 0xffcef106, 0x00170c4f, 0xfff6a6e2, 0x0002fef6, 0xffff6de2, + 0x2ac68807, 0xeda00cd1, 0x0b4ce8a8, 0xf8553e3c, 0x0559e4da, 0xfc45c6b6, 0x028c1de0, 0xfe4ffaf6, 0x01129fc5, 0xff5a3b09, 0x005ddb0b, 0xffcef7d4, 0x00170f8a, 0xfff6a3d0, 0x0002ffc9, 0xffff6e67, + 0x297ce85a, 0xee0564e8, 0x0b19fbfe, 0xf87233a4, 0x0548e63f, 0xfc4fa88f, 0x02869122, 0xfe52ecab, 0x01112f81, 0xff5adac6, 0x005da198, 0xffcf0623, 0x00170f18, 0xfff6a252, 0x00030010, 0xffff6f0d, + 0x283432b9, 0xee6c63ad, 0x0ae5c90b, 0xf89018eb, 0x05373912, 0xfc5a076a, 0x0280ad0f, 0xfe561969, 0x010f98eb, 0xff5b922d, 0x005d5a62, 0xffcf1bde, 0x00170b04, 0xfff6a262, 0x0002ffcd, 0xffff6fd1, + 0x26ec8083, 0xeed4f6b0, 0x0ab059bc, 0xf8aee828, 0x0524e100, 0xfc64e0f9, 0x027a7318, 0xfe598050, 0x010ddc8c, 0xff5c60ee, 0x005d0597, 0xffcf38ec, 0x0017035a, 0xfff6a3f9, 0x0002ff03, 0xffff70b2, + 0x25a5eae8, 0xef3f0b78, 0x0a79b814, 0xf8ce9b5d, 0x0511e1c6, 0xfc7032de, 0x0273e4b8, 0xfe5d2075, 0x010bfaee, 0xff5d46bb, 0x005ca363, 0xffcf5d36, 0x0016f828, 0xfff6a713, 0x0002fdb4, 0xffff71b0, + 0x24608ae2, 0xefaa8f87, 0x0a41ee32, 0xf8ef2c71, 0x04fe3f39, 0xfc7bfaad, 0x026d0374, 0xfe60f8ea, 0x0109f4a2, 0xff5e433e, 0x005c33f6, 0xffcf88a2, 0x0016e979, 0xfff6aba9, 0x0002fbe4, 0xffff72c9, + 0x231c7932, 0xf017705a, 0x0a09064e, 0xf9109535, 0x04e9fd3c, 0xfc8835ed, 0x0265d0dd, 0xfe6508b6, 0x0107ca3c, 0xff5f5621, 0x005bb77f, 0xffcfbb17, 0x0016d75b, 0xfff6b1b4, 0x0002f995, 0xffff73fc, + 0x21d9ce63, 0xf0859b6e, 0x09cf0ab4, 0xf932cf65, 0x04d51fc6, 0xfc94e216, 0x025e4e8b, 0xfe694edd, 0x01057c57, 0xff607f0b, 0x005b2e31, 0xffcff478, 0x0016c1dc, 0xfff6b92d, 0x0002f6c9, 0xffff7549, + 0x2098a2bf, 0xf0f4fe3d, 0x099405c6, 0xf955d4a7, 0x04bfaadf, 0xfca1fc96, 0x02567e22, 0xfe6dca58, 0x01030b8e, 0xff61bd9f, 0x005a9840, 0xffd034ac, 0x0016a90a, 0xfff6c20f, 0x0002f385, 0xffff76ae, + 0x1f590e55, 0xf1658649, 0x095801f8, 0xf9799e8f, 0x04a9a29e, 0xfcaf82ca, 0x024e614c, 0xfe727a1f, 0x01007885, 0xff631180, 0x0059f5e1, 0xffd07b95, 0x00168cf2, 0xfff6cc52, 0x0002efca, 0xffff782a, + 0x1e1b28f2, 0xf1d72114, 0x091b09d1, 0xf99e269e, 0x04930b2b, 0xfcbd7206, 0x0245f9bf, 0xfe775d1f, 0x00fdc3e0, 0xff647a4b, 0x0059474a, 0xffd0c915, 0x00166da5, 0xfff6d7f0, 0x0002eb9c, 0xffff79bc, + 0x1cdf0a20, 0xf249bc2c, 0x08dd27e6, 0xf9c36642, 0x047be8bc, 0xfccbc793, 0x023d4937, 0xfe7c7243, 0x00faee49, 0xff65f79e, 0x00588cb4, 0xffd11d0f, 0x00164b32, 0xfff6e4e1, 0x0002e6fe, 0xffff7b63, + 0x1ba4c923, 0xf2bd4523, 0x089e66dd, 0xf9e956da, 0x04643f95, 0xfcda80ad, 0x0234517a, 0xfe81b86d, 0x00f7f86e, 0xff678912, 0x0057c658, 0xffd17764, 0x001625a7, 0xfff6f31d, 0x0002e1f3, 0xffff7d1f, + 0x1a6c7cf9, 0xf331a99b, 0x085ed167, 0xfa0ff1b6, 0x044c1409, 0xfce99a86, 0x022b1455, 0xfe872e7c, 0x00f4e2ff, 0xff692e3f, 0x0056f471, 0xffd1d7f5, 0x0015fd15, 0xfff7029f, 0x0002dc7d, 0xffff7eed, + 0x19363c54, 0xf3a6d741, 0x081e7241, 0xfa373017, 0x04336a75, 0xfcf91246, 0x0221939d, 0xfe8cd349, 0x00f1aeb2, 0xff6ae6ba, 0x0056173b, 0xffd23ea1, 0x0015d18b, 0xfff7135d, 0x0002d6a0, 0xffff80cd, + 0x18021d9d, 0xf41cbbd3, 0x07dd5430, 0xfa5f0b30, 0x041a4744, 0xfd08e50c, 0x0217d12d, 0xfe92a5a7, 0x00ee5c3e, 0xff6cb218, 0x00552ef3, 0xffd2ab47, 0x0015a31b, 0xfff72551, 0x0002d060, 0xffff82bf, + 0x16d036eb, 0xf493451f, 0x079b8203, 0xfa877c29, 0x0400aeec, 0xfd190fed, 0x020dcee8, 0xfe98a466, 0x00eaec5e, 0xff6e8fe9, 0x00543bd8, 0xffd31dc7, 0x001571d5, 0xfff73873, 0x0002c9be, 0xffff84c0, + 0x15a09e09, 0xf50a610a, 0x0759068f, 0xfab07c1d, 0x03e6a5ee, 0xfd298ff6, 0x02038eb7, 0xfe9ece4f, 0x00e75fd1, 0xff707fbd, 0x00533e29, 0xffd395fd, 0x00153dca, 0xfff74cba, 0x0002c2be, 0xffff86d0, + 0x1473686d, 0xf581fd8b, 0x0715ecae, 0xfada0420, 0x03cc30d4, 0xfd3a622b, 0x01f9128a, 0xfea52227, 0x00e3b758, 0xff728121, 0x00523626, 0xffd413c9, 0x0015070b, 0xfff76220, 0x0002bb64, 0xffff88ee, + 0x1348ab3a, 0xf5fa08b5, 0x06d23f3d, 0xfb040d3b, 0x03b15431, 0xfd4b8389, 0x01ee5c55, 0xfeab9eb2, 0x00dff3b7, 0xff7493a2, 0x00512412, 0xffd49705, 0x0014cdab, 0xfff7789c, 0x0002b3b3, 0xffff8b19, + 0x12207b3e, 0xf67270b1, 0x068e091c, 0xfb2e906f, 0x039614a1, 0xfd5cf105, 0x01e36e14, 0xfeb242ac, 0x00dc15b4, 0xff76b6ca, 0x0050082f, 0xffd51f90, 0x001491b9, 0xfff79026, 0x0002abad, 0xffff8d50, + 0x10faecee, 0xf6eb23c6, 0x0649552a, 0xfb5986b6, 0x037a76c7, 0xfd6ea790, 0x01d849c7, 0xfeb90cce, 0x00d81e1a, 0xff78ea20, 0x004ee2c1, 0xffd5ad44, 0x00145349, 0xfff7a8b6, 0x0002a357, 0xffff8f92, + 0x0fd81464, 0xf7641059, 0x06042e45, 0xfb84e906, 0x035e7f4e, 0xfd80a411, 0x01ccf173, 0xfebffbd0, 0x00d40db3, 0xff7b2d2d, 0x004db40c, 0xffd63ffe, 0x0014126c, 0xfff7c245, 0x00029ab2, 0xffff91de, + 0x0eb80562, 0xf7dd24ef, 0x05be9f49, 0xfbb0b04e, 0x034232e6, 0xfd92e36c, 0x01c16720, 0xfec70e64, 0x00cfe54f, 0xff7d7f76, 0x004c7c55, 0xffd6d798, 0x0013cf36, 0xfff7dcc8, 0x000291c3, 0xffff9434, + 0x0d9ad348, 0xf856502d, 0x0578b30e, 0xfbdcd57a, 0x03259644, 0xfda5627e, 0x01b5acdd, 0xfece433a, 0x00cba5bc, 0xff7fe07f, 0x004b3be3, 0xffd773ed, 0x001389b7, 0xfff7f83a, 0x0002888c, 0xffff9691, + 0x0c80911b, 0xf8cf80de, 0x05327467, 0xfc095174, 0x0308ae24, 0xfdb81e22, 0x01a9c4bc, 0xfed598fe, 0x00c74fce, 0xff824fca, 0x0049f2fc, 0xffd814d7, 0x00134204, 0xfff81490, 0x00027f11, 0xffff98f5, + 0x0b69517e, 0xf948a5f0, 0x04ebee1c, 0xfc361d25, 0x02eb7f44, 0xfdcb132d, 0x019db0d0, 0xfedd0e5c, 0x00c2e457, 0xff84ccdb, 0x0048a1e7, 0xffd8ba31, 0x0012f82e, 0xfff831c3, 0x00027555, 0xffff9b60, + 0x0a5526b0, 0xf9c1ae7b, 0x04a52af2, 0xfc633173, 0x02ce0e67, 0xfdde3e6f, 0x01917334, 0xfee4a1fa, 0x00be642f, 0xff875731, 0x004748ed, 0xffd963d4, 0x0012ac48, 0xfff84fcb, 0x00026b5b, 0xffff9dd0, + 0x0944228e, 0xfa3a89be, 0x045e359f, 0xfc908746, 0x02b0604f, 0xfdf19cb9, 0x01850e00, 0xfeec527e, 0x00b9d02b, 0xff89ee4d, 0x0045e856, 0xffda1199, 0x00125e66, 0xfff86e9e, 0x00026126, 0xffffa045, + 0x08365690, 0xfab32723, 0x041718d2, 0xfcbe1789, 0x029279c4, 0xfe052ad4, 0x01788354, 0xfef41e8c, 0x00b52925, 0xff8c91ad, 0x0044806c, 0xffdac35a, 0x00120e9b, 0xfff88e35, 0x000256b9, 0xffffa2be, + 0x072bd3c5, 0xfb2b7641, 0x03cfdf29, 0xfcebdb26, 0x02745f8c, 0xfe18e58c, 0x016bd54f, 0xfefc04c6, 0x00b06ff7, 0xff8f40d0, 0x00431177, 0xffdb78ef, 0x0011bcf9, 0xfff8ae88, 0x00024c18, 0xffffa539, + 0x0624aad6, 0xfba366df, 0x03889336, 0xfd19cb0e, 0x02561670, 0xfe2cc9a7, 0x015f0612, 0xff0403cc, 0x00aba57c, 0xff91fb31, 0x00419bc2, 0xffdc3231, 0x00116994, 0xfff8cf8d, 0x00024146, 0xffffa7b7, + 0x0520ec00, 0xfc1ae8f2, 0x03413f7b, 0xfd47e035, 0x0237a337, 0xfe40d3ed, 0x015217c0, 0xff0c1a3c, 0x00a6ca90, 0xff94c04f, 0x00401f98, 0xffdceef9, 0x00111480, 0xfff8f13c, 0x00023645, 0xffffaa35, + 0x0420a716, 0xfc91eca1, 0x02f9ee68, 0xfd761395, 0x02190aa6, 0xfe550124, 0x01450c7f, 0xff1446b5, 0x00a1e00f, 0xff978fa6, 0x003e9d42, 0xffddaf1e, 0x0010bdcf, 0xfff9138e, 0x00022b19, 0xffffacb4, + 0x0323eb7f, 0xfd086246, 0x02b2aa5c, 0xfda45e2c, 0x01fa5183, 0xfe694e12, 0x0137e672, 0xff1c87d3, 0x009ce6d8, 0xff9a68b0, 0x003d150d, 0xffde727a, 0x00106595, 0xfff93679, 0x00021fc5, 0xffffaf33, }; // cmd-line: fir -l 7 -s 44100 -c 19876 -n 16 -b 9.62 -const int32_t dn_sampler_filter_coefficients[] = { - 0x736144b5, 0x735ed3aa, 0x735780bb, 0x734b4c77, 0x733a37d2, 0x7324441e, 0x7309730f, 0x72e9c6b8, 0x72c5418e, 0x729be665, 0x726db871, 0x723abb44, 0x7202f2d3, 0x71c6636d, 0x718511c2, 0x713f02e0, 0x70f43c32, 0x70a4c37f, 0x70509eec, 0x6ff7d4f8, 0x6f9a6c7f, 0x6f386cb6, 0x6ed1dd2e, 0x6e66c5ce, 0x6df72ed9, 0x6d8320e6, 0x6d0aa4e6, 0x6c8dc41f, 0x6c0c882a, 0x6b86faf8, 0x6afd26cb, 0x6a6f1638, 0x69dcd425, 0x69466bc8, 0x68abe8a8, 0x680d5698, 0x676ac1bb, 0x66c4367d, 0x6619c197, 0x656b700a, 0x64b94f22, 0x64036c6f, 0x6349d5c9, 0x628c994c, 0x61cbc559, 0x61076890, 0x603f91d5, 0x5f745049, 0x5ea5b34c, 0x5dd3ca7a, 0x5cfea5aa, 0x5c2654ed, 0x5b4ae88d, 0x5a6c7108, 0x598aff13, 0x58a6a397, 0x57bf6fae, 0x56d574a2, 0x55e8c3ee, 0x54f96f37, 0x54078851, 0x53132138, 0x521c4c10, 0x51231b26, 0x5027a0e9, 0x4f29efed, 0x4e2a1ae8, 0x4d2834b0, 0x4c245038, 0x4b1e8091, 0x4a16d8e5, 0x490d6c79, 0x48024ea7, 0x46f592e2, 0x45e74cad, 0x44d78fa0, 0x43c66f62, 0x42b3ffa9, 0x41a05437, 0x408b80d9, 0x3f759967, 0x3e5eb1bd, 0x3d46ddc1, 0x3c2e315a, 0x3b14c072, 0x39fa9ef3, 0x38dfe0c6, 0x37c499d0, 0x36a8ddf3, 0x358cc109, 0x347056e3, 0x3353b349, 0x3236e9f7, 0x311a0e9b, 0x2ffd34d4, 0x2ee07030, 0x2dc3d429, 0x2ca77428, 0x2b8b637b, 0x2a6fb55e, 0x29547ced, 0x2839cd30, 0x271fb90d, 0x2606534e, 0x24edae9c, 0x23d5dd81, 0x22bef262, 0x21a8ff7e, 0x209416f2, 0x1f804ab0, 0x1e6dac83, 0x1d5c4e09, 0x1c4c40b6, 0x1b3d95d1, 0x1a305e70, 0x1924ab7b, 0x181a8da5, 0x17121573, 0x160b5331, 0x150656f8, 0x140330a9, 0x1301efed, 0x1202a434, 0x11055cb4, 0x100a2864, 0x0f111603, 0x0e1a340d, 0x0d2590c3, - 0x0c333a22, 0x0b433de8, 0x0a55a98f, 0x096a8a51, 0x0881ed1f, 0x079bdea7, 0x06b86b52, 0x05d79f40, 0x04f98649, 0x041e2bfe, 0x03459ba4, 0x026fe039, 0x019d046d, 0x00cd12a4, 0x000014f8, 0xff361534, 0xfe6f1cd7, 0xfdab350f, 0xfcea66be, 0xfc2cba75, 0xfb723876, 0xfabae8b2, 0xfa06d2ca, 0xf955fe0c, 0xf8a87178, 0xf7fe33ba, 0xf7574b2b, 0xf6b3bdd3, 0xf6139169, 0xf576cb4e, 0xf4dd7092, 0xf44785f1, 0xf3b50fd6, 0xf3261255, 0xf29a9133, 0xf2128fde, 0xf18e1174, 0xf10d18bd, 0xf08fa82f, 0xf015c1ee, 0xef9f67cb, 0xef2c9b43, 0xeebd5d81, 0xee51af5f, 0xede99165, 0xed8503c7, 0xed24066b, 0xecc698e6, 0xec6cba79, 0xec166a19, 0xebc3a669, 0xeb746dbe, 0xeb28be1f, 0xeae09544, 0xea9bf097, 0xea5acd38, 0xea1d27f7, 0xe9e2fd5b, 0xe9ac49a0, 0xe97908b8, 0xe9493649, 0xe91ccdb5, 0xe8f3ca12, 0xe8ce2631, 0xe8abdc9d, 0xe88ce79a, 0xe871412a, 0xe858e30a, 0xe843c6b5, 0xe831e563, 0xe823380d, 0xe817b76c, 0xe80f5bfb, 0xe80a1df5, 0xe807f55b, 0xe808d9f1, 0xe80cc342, 0xe813a89f, 0xe81d8122, 0xe82a43ac, 0xe839e6e9, 0xe84c6152, 0xe861a92b, 0xe879b487, 0xe8947947, 0xe8b1ed1c, 0xe8d2058b, 0xe8f4b7e9, 0xe919f961, 0xe941bef3, 0xe96bfd76, 0xe998a999, 0xe9c7b7e3, 0xe9f91cb9, 0xea2ccc59, 0xea62bae0, 0xea9adc49, 0xead52471, 0xeb118714, 0xeb4ff7d4, 0xeb906a35, 0xebd2d1a1, 0xec17216b, 0xec5d4ccd, 0xeca546eb, 0xecef02d5, 0xed3a7388, 0xed878bf0, 0xedd63ee5, 0xee267f35, 0xee783f9e, 0xeecb72d1, 0xef200b76, 0xef75fc2b, 0xefcd3787, 0xf025b01a, 0xf07f586e, 0xf0da230b, 0xf1360276, 0xf192e932, 0xf1f0c9c5, 0xf24f96b5, 0xf2af428c, 0xf30fbfd7, 0xf371012c, 0xf3d2f926, 0xf4359a6a, 0xf498d7a5, - 0xf4fca390, 0xf560f0f3, 0xf5c5b2a1, 0xf62adb7c, 0xf6905e79, 0xf6f62e9d, 0xf75c3eff, 0xf7c282cb, 0xf828ed43, 0xf88f71bf, 0xf8f603ae, 0xf95c9699, 0xf9c31e22, 0xfa298e07, 0xfa8fda21, 0xfaf5f669, 0xfb5bd6f4, 0xfbc16ff6, 0xfc26b5c5, 0xfc8b9cda, 0xfcf019cd, 0xfd54215c, 0xfdb7a869, 0xfe1aa3fc, 0xfe7d0942, 0xfedecd90, 0xff3fe663, 0xffa04963, 0xffffec5f, 0x005ec552, 0x00bcca63, 0x0119f1e4, 0x01763256, 0x01d18265, 0x022bd8ee, 0x02852cfc, 0x02dd75ca, 0x0334aac4, 0x038ac385, 0x03dfb7dd, 0x04337fcb, 0x04861383, 0x04d76b6b, 0x0527801d, 0x05764a68, 0x05c3c34e, 0x060fe408, 0x065aa604, 0x06a402e4, 0x06ebf483, 0x073274f1, 0x07777e74, 0x07bb0b8b, 0x07fd16eb, 0x083d9b81, 0x087c9471, 0x08b9fd18, 0x08f5d10a, 0x09300c14, 0x0968aa3b, 0x099fa7bb, 0x09d5010b, 0x0a08b2d9, 0x0a3aba09, 0x0a6b13bc, 0x0a99bd47, 0x0ac6b43a, 0x0af1f65d, 0x0b1b81ad, 0x0b435462, 0x0b696ceb, 0x0b8dc9ed, 0x0bb06a47, 0x0bd14d0b, 0x0bf07186, 0x0c0dd738, 0x0c297dd9, 0x0c436557, 0x0c5b8dd4, 0x0c71f7a9, 0x0c86a361, 0x0c9991be, 0x0caac3b5, 0x0cba3a6d, 0x0cc7f742, 0x0cd3fbc0, 0x0cde49a8, 0x0ce6e2ea, 0x0cedc9a7, 0x0cf30031, 0x0cf6890a, 0x0cf866e1, 0x0cf89c96, 0x0cf72d34, 0x0cf41bf7, 0x0cef6c43, 0x0ce921ab, 0x0ce13feb, 0x0cd7caec, 0x0cccc6bc, 0x0cc03797, 0x0cb221de, 0x0ca28a1a, 0x0c9174fa, 0x0c7ee754, 0x0c6ae622, 0x0c557681, 0x0c3e9db5, 0x0c26611f, 0x0c0cc646, 0x0bf1d2d0, 0x0bd58c81, 0x0bb7f940, 0x0b991f0f, 0x0b79040c, 0x0b57ae75, 0x0b3524a0, 0x0b116cff, 0x0aec8e1c, 0x0ac68e9b, 0x0a9f7537, 0x0a7748c0, 0x0a4e101f, 0x0a23d24e, 0x09f8965d, 0x09cc636e, 0x099f40b5, 0x09713575, - 0x09424904, 0x091282c4, 0x08e1ea27, 0x08b086aa, 0x087e5fd7, 0x084b7d43, 0x0817e68c, 0x07e3a35a, 0x07aebb5d, 0x0779364a, 0x07431bdf, 0x070c73dd, 0x06d5460b, 0x069d9a31, 0x0665781b, 0x062ce795, 0x05f3f06b, 0x05ba9a6b, 0x0580ed5f, 0x0546f10f, 0x050cad3f, 0x04d229b1, 0x04976e20, 0x045c8240, 0x04216dc0, 0x03e63846, 0x03aae970, 0x036f88d2, 0x03341df4, 0x02f8b055, 0x02bd4768, 0x0281ea90, 0x0246a125, 0x020b726f, 0x01d065a8, 0x019581f9, 0x015ace79, 0x0120522f, 0x00e6140f, 0x00ac1af9, 0x00726dbb, 0x0039130c, 0x00001191, 0xffc76fd5, 0xff8f344f, 0xff576560, 0xff20094d, 0xfee92646, 0xfeb2c261, 0xfe7ce399, 0xfe478fd2, 0xfe12ccd1, 0xfddea042, 0xfdab0fb6, 0xfd7820a0, 0xfd45d856, 0xfd143c12, 0xfce350f0, 0xfcb31bec, 0xfc83a1e5, 0xfc54e79a, 0xfc26f1ad, 0xfbf9c49d, 0xfbcd64ca, 0xfba1d673, 0xfb771db9, 0xfb4d3e97, 0xfb243cea, 0xfafc1c6e, 0xfad4e0b9, 0xfaae8d43, 0xfa89255f, 0xfa64ac3f, 0xfa4124f2, 0xfa1e9262, 0xf9fcf758, 0xf9dc567b, 0xf9bcb24a, 0xf99e0d26, 0xf980694a, 0xf963c8cc, 0xf9482da0, 0xf92d9997, 0xf9140e5e, 0xf8fb8d7d, 0xf8e4185a, 0xf8cdb036, 0xf8b85631, 0xf8a40b44, 0xf890d048, 0xf87ea5f1, 0xf86d8cd1, 0xf85d8555, 0xf84e8fc9, 0xf840ac57, 0xf833db04, 0xf8281bb6, 0xf81d6e2e, 0xf813d20d, 0xf80b46d3, 0xf803cbdc, 0xf7fd6065, 0xf7f8038c, 0xf7f3b44b, 0xf7f0717e, 0xf7ee39e2, 0xf7ed0c12, 0xf7ece68c, 0xf7edc7af, 0xf7efadbd, 0xf7f296d7, 0xf7f68103, 0xf7fb6a29, 0xf8015015, 0xf8083077, 0xf81008e2, 0xf818d6cf, 0xf822979b, 0xf82d488c, 0xf838e6c9, 0xf8456f65, 0xf852df56, 0xf861337c, 0xf870689f, 0xf8807b70, 0xf8916889, 0xf8a32c6e, 0xf8b5c38d, - 0xf8c92a41, 0xf8dd5ccf, 0xf8f25767, 0xf9081629, 0xf91e9521, 0xf935d048, 0xf94dc388, 0xf9666ab7, 0xf97fc19e, 0xf999c3f4, 0xf9b46d64, 0xf9cfb988, 0xf9eba3ef, 0xfa082817, 0xfa254176, 0xfa42eb75, 0xfa61216f, 0xfa7fdeba, 0xfa9f1e9e, 0xfabedc5a, 0xfadf1328, 0xfaffbe36, 0xfb20d8ad, 0xfb425db0, 0xfb64485b, 0xfb8693c6, 0xfba93b01, 0xfbcc391d, 0xfbef8924, 0xfc13261f, 0xfc370b14, 0xfc5b3309, 0xfc7f9902, 0xfca43803, 0xfcc90b12, 0xfcee0d33, 0xfd133970, 0xfd388ad1, 0xfd5dfc63, 0xfd838938, 0xfda92c63, 0xfdcee0ff, 0xfdf4a22a, 0xfe1a6b08, 0xfe4036c5, 0xfe660094, 0xfe8bc3ad, 0xfeb17b53, 0xfed722d0, 0xfefcb57a, 0xff222eac, 0xff4789d1, 0xff6cc25a, 0xff91d3c6, 0xffb6b99f, 0xffdb6f7c, 0xfffff100, 0x002439db, 0x004845cc, 0x006c10a0, 0x008f9631, 0x00b2d26b, 0x00d5c147, 0x00f85ecf, 0x011aa71d, 0x013c965b, 0x015e28c7, 0x017f5aad, 0x01a0286c, 0x01c08e78, 0x01e08952, 0x02001593, 0x021f2fe5, 0x023dd505, 0x025c01c5, 0x0279b30b, 0x0296e5d0, 0x02b39724, 0x02cfc429, 0x02eb6a18, 0x03068640, 0x03211603, 0x033b16dc, 0x03548659, 0x036d621f, 0x0385a7eb, 0x039d558e, 0x03b468f1, 0x03cae014, 0x03e0b90d, 0x03f5f20a, 0x040a894e, 0x041e7d34, 0x0431cc31, 0x044474ce, 0x045675ab, 0x0467cd83, 0x04787b24, 0x04887d76, 0x0497d378, 0x04a67c41, 0x04b476fe, 0x04c1c2f3, 0x04ce5f7d, 0x04da4c10, 0x04e58836, 0x04f01392, 0x04f9edda, 0x050316e0, 0x050b8e8a, 0x051354d5, 0x051a69d4, 0x0520cdb1, 0x052680ae, 0x052b8320, 0x052fd573, 0x0533782a, 0x05366bdc, 0x0538b136, 0x053a48fa, 0x053b3400, 0x053b7332, 0x053b0791, 0x0539f231, 0x0538343a, 0x0535cee9, 0x0532c38c, 0x052f1386, - 0x052ac04c, 0x0525cb66, 0x0520366d, 0x051a030f, 0x05133308, 0x050bc828, 0x0503c44d, 0x04fb2969, 0x04f1f97c, 0x04e83697, 0x04dde2da, 0x04d30074, 0x04c791a4, 0x04bb98b5, 0x04af1804, 0x04a211f8, 0x04948906, 0x04867fb3, 0x0477f88d, 0x0468f62e, 0x04597b40, 0x04498a72, 0x04392684, 0x0428523d, 0x0417106e, 0x040563f4, 0x03f34fb2, 0x03e0d697, 0x03cdfb99, 0x03bac1b4, 0x03a72bf0, 0x03933d58, 0x037ef900, 0x036a6201, 0x03557b7a, 0x03404890, 0x032acc6d, 0x03150a3f, 0x02ff0538, 0x02e8c08e, 0x02d23f7a, 0x02bb8537, 0x02a49505, 0x028d7223, 0x02761fd3, 0x025ea157, 0x0246f9f3, 0x022f2cea, 0x02173d81, 0x01ff2ef9, 0x01e70494, 0x01cec194, 0x01b66936, 0x019dfeb6, 0x0185854f, 0x016d0037, 0x015472a1, 0x013bdfbc, 0x01234ab4, 0x010ab6b0, 0x00f226d0, 0x00d99e31, 0x00c11feb, 0x00a8af0c, 0x00904ea0, 0x007801aa, 0x005fcb26, 0x0047ae09, 0x002fad3f, 0x0017cbae, 0x00000c33, 0xffe871a0, 0xffd0fec1, 0xffb9b656, 0xffa29b18, 0xff8bafb3, 0xff74f6cc, 0xff5e72fb, 0xff4826cf, 0xff3214c9, 0xff1c3f63, 0xff06a907, 0xfef15417, 0xfedc42e7, 0xfec777be, 0xfeb2f4d9, 0xfe9ebc66, 0xfe8ad087, 0xfe773351, 0xfe63e6cb, 0xfe50ecf0, 0xfe3e47ac, 0xfe2bf8de, 0xfe1a0256, 0xfe0865d7, 0xfdf72515, 0xfde641b7, 0xfdd5bd53, 0xfdc59972, 0xfdb5d78f, 0xfda67913, 0xfd977f5d, 0xfd88ebb9, 0xfd7abf64, 0xfd6cfb8e, 0xfd5fa157, 0xfd52b1cf, 0xfd462df6, 0xfd3a16c0, 0xfd2e6d0d, 0xfd2331b0, 0xfd18656f, 0xfd0e08fb, 0xfd041cfa, 0xfcfaa200, 0xfcf19894, 0xfce9012c, 0xfce0dc2f, 0xfcd929f4, 0xfcd1eac3, 0xfccb1ed7, 0xfcc4c658, 0xfcbee162, 0xfcb97001, 0xfcb47232, 0xfcafe7e2, 0xfcabd0f2, 0xfca82d32, - 0xfca4fc64, 0xfca23e3d, 0xfc9ff262, 0xfc9e186a, 0xfc9cafe0, 0xfc9bb83e, 0xfc9b30f3, 0xfc9b195f, 0xfc9b70d6, 0xfc9c369c, 0xfc9d69eb, 0xfc9f09ee, 0xfca115c5, 0xfca38c83, 0xfca66d2e, 0xfca9b6bf, 0xfcad6827, 0xfcb18047, 0xfcb5fdf7, 0xfcbae002, 0xfcc0252b, 0xfcc5cc26, 0xfccbd3a0, 0xfcd23a3a, 0xfcd8fe8b, 0xfce01f21, 0xfce79a7f, 0xfcef6f20, 0xfcf79b75, 0xfd001de8, 0xfd08f4d6, 0xfd121e99, 0xfd1b9980, 0xfd2563d3, 0xfd2f7bd1, 0xfd39dfb4, 0xfd448dae, 0xfd4f83eb, 0xfd5ac08e, 0xfd6641b8, 0xfd720581, 0xfd7e09fc, 0xfd8a4d37, 0xfd96cd3d, 0xfda3880f, 0xfdb07bb0, 0xfdbda61a, 0xfdcb0546, 0xfdd89727, 0xfde659af, 0xfdf44acc, 0xfe026869, 0xfe10b06f, 0xfe1f20c5, 0xfe2db74f, 0xfe3c71f1, 0xfe4b4e8c, 0xfe5a4b03, 0xfe696534, 0xfe789b01, 0xfe87ea47, 0xfe9750e8, 0xfea6ccc3, 0xfeb65bb9, 0xfec5fbac, 0xfed5aa7e, 0xfee56614, 0xfef52c54, 0xff04fb25, 0xff14d073, 0xff24aa2a, 0xff348639, 0xff446293, 0xff543d2e, 0xff641402, 0xff73e50e, 0xff83ae52, 0xff936dd2, 0xffa3219a, 0xffb2c7b6, 0xffc25e3b, 0xffd1e340, 0xffe154e3, 0xfff0b148, 0xfffff697, 0x000f22fe, 0x001e34b4, 0x002d29f3, 0x003c00fd, 0x004ab81b, 0x00594d9d, 0x0067bfd8, 0x00760d2a, 0x008433f9, 0x009232b2, 0x00a007c9, 0x00adb1bb, 0x00bb2f0b, 0x00c87e47, 0x00d59e03, 0x00e28cdd, 0x00ef497a, 0x00fbd28a, 0x010826c4, 0x011444e7, 0x01202bbe, 0x012bda1b, 0x01374eda, 0x014288e0, 0x014d871b, 0x01584883, 0x0162cc19, 0x016d10e9, 0x01771608, 0x0180da94, 0x018a5db5, 0x01939e9e, 0x019c9c8b, 0x01a556c1, 0x01adcc91, 0x01b5fd54, 0x01bde86f, 0x01c58d50, 0x01cceb6e, 0x01d4024c, 0x01dad175, 0x01e15880, 0x01e7970e, - 0x01ed8cc7, 0x01f33960, 0x01f89c98, 0x01fdb637, 0x0202860e, 0x02070bf9, 0x020b47dd, 0x020f39ab, 0x0212e15c, 0x02163ef1, 0x02195278, 0x021c1c06, 0x021e9bbb, 0x0220d1bf, 0x0222be45, 0x02246187, 0x0225bbca, 0x0226cd5b, 0x02279691, 0x022817ca, 0x0228516f, 0x022843f0, 0x0227efc6, 0x02275572, 0x0226757e, 0x0225507c, 0x0223e706, 0x022239bc, 0x02204949, 0x021e165d, 0x021ba1b2, 0x0218ec06, 0x0215f621, 0x0212c0d2, 0x020f4cec, 0x020b9b4c, 0x0207acd4, 0x0203826c, 0x01ff1d04, 0x01fa7d91, 0x01f5a50d, 0x01f0947a, 0x01eb4cde, 0x01e5cf44, 0x01e01cbe, 0x01da3661, 0x01d41d4a, 0x01cdd297, 0x01c7576d, 0x01c0acf5, 0x01b9d45b, 0x01b2ced1, 0x01ab9d8b, 0x01a441c2, 0x019cbcb1, 0x01950f98, 0x018d3bb8, 0x01854258, 0x017d24bf, 0x0174e437, 0x016c820d, 0x0163ff90, 0x015b5e11, 0x01529ee3, 0x0149c35a, 0x0140cccb, 0x0137bc8f, 0x012e93fc, 0x0125546c, 0x011bff38, 0x011295bb, 0x0109194f, 0x00ff8b4f, 0x00f5ed15, 0x00ec3ffc, 0x00e2855d, 0x00d8be92, 0x00ceecf5, 0x00c511dc, 0x00bb2e9f, 0x00b14493, 0x00a7550c, 0x009d615d, 0x00936ad6, 0x008972c7, 0x007f7a7c, 0x00758341, 0x006b8e5c, 0x00619d15, 0x0057b0ae, 0x004dca68, 0x0043eb7f, 0x003a152f, 0x003048ae, 0x0026872f, 0x001cd1e4, 0x001329f7, 0x00099093, 0x000006db, 0xfff68df1, 0xffed26f0, 0xffe3d2f2, 0xffda930a, 0xffd16848, 0xffc853b6, 0xffbf565a, 0xffb67137, 0xffada547, 0xffa4f383, 0xff9c5cdc, 0xff93e241, 0xff8b8498, 0xff8344c4, 0xff7b23a1, 0xff732209, 0xff6b40cb, 0xff6380b5, 0xff5be28d, 0xff546713, 0xff4d0f02, 0xff45db10, 0xff3ecbea, 0xff37e23b, 0xff311ea4, 0xff2a81c4, 0xff240c2f, 0xff1dbe77, 0xff179926, - 0xff119cc0, 0xff0bc9c2, 0xff0620a4, 0xff00a1d8, 0xfefb4dc7, 0xfef624d8, 0xfef12766, 0xfeec55cc, 0xfee7b059, 0xfee33759, 0xfedeeb11, 0xfedacbbf, 0xfed6d99c, 0xfed314da, 0xfecf7da3, 0xfecc141d, 0xfec8d867, 0xfec5ca9a, 0xfec2eaca, 0xfec03901, 0xfebdb547, 0xfebb5f9b, 0xfeb937f9, 0xfeb73e54, 0xfeb5729b, 0xfeb3d4b7, 0xfeb26489, 0xfeb121ee, 0xfeb00cbf, 0xfeaf24cc, 0xfeae69e1, 0xfeaddbc4, 0xfead7a37, 0xfead44f4, 0xfead3bb2, 0xfead5e22, 0xfeadabef, 0xfeae24c1, 0xfeaec838, 0xfeaf95f2, 0xfeb08d86, 0xfeb1ae87, 0xfeb2f884, 0xfeb46b07, 0xfeb60596, 0xfeb7c7b0, 0xfeb9b0d3, 0xfebbc078, 0xfebdf613, 0xfec05114, 0xfec2d0e8, 0xfec574f9, 0xfec83caa, 0xfecb275e, 0xfece3472, 0xfed16342, 0xfed4b325, 0xfed82370, 0xfedbb373, 0xfedf627d, 0xfee32fdb, 0xfee71ad4, 0xfeeb22af, 0xfeef46b0, 0xfef3861a, 0xfef7e02a, 0xfefc541e, 0xff00e133, 0xff0586a0, 0xff0a439e, 0xff0f1762, 0xff140121, 0xff19000e, 0xff1e135b, 0xff233a39, 0xff2873d6, 0xff2dbf61, 0xff331c08, 0xff3888f8, 0xff3e055d, 0xff439064, 0xff492937, 0xff4ecf02, 0xff5480f0, 0xff5a3e2c, 0xff6005e1, 0xff65d73a, 0xff6bb163, 0xff719388, 0xff777cd6, 0xff7d6c79, 0xff83619f, 0xff895b77, 0xff8f5930, 0xff9559fb, 0xff9b5d0a, 0xffa16190, 0xffa766c0, 0xffad6bd0, 0xffb36ff9, 0xffb97271, 0xffbf7274, 0xffc56f3e, 0xffcb680e, 0xffd15c22, 0xffd74abe, 0xffdd3325, 0xffe3149e, 0xffe8ee72, 0xffeebfec, 0xfff48859, 0xfffa470a, 0xfffffb51, 0x0005a483, 0x000b41fa, 0x0010d30e, 0x00165720, 0x001bcd8e, 0x002135bd, 0x00268f13, 0x002bd8fa, 0x003112e0, 0x00363c35, 0x003b546b, 0x00405afa, 0x00454f5d, 0x004a310f, 0x004eff94, - 0x0053ba6e, 0x00586127, 0x005cf349, 0x00617065, 0x0065d80c, 0x006a29d6, 0x006e655c, 0x00728a3d, 0x0076981a, 0x007a8e98, 0x007e6d61, 0x00823422, 0x0085e28b, 0x00897851, 0x008cf52d, 0x009058da, 0x0093a31a, 0x0096d3af, 0x0099ea62, 0x009ce6fe, 0x009fc954, 0x00a29136, 0x00a53e7b, 0x00a7d0ff, 0x00aa48a0, 0x00aca542, 0x00aee6ca, 0x00b10d23, 0x00b3183c, 0x00b50805, 0x00b6dc75, 0x00b89584, 0x00ba3330, 0x00bbb579, 0x00bd1c63, 0x00be67f6, 0x00bf983d, 0x00c0ad48, 0x00c1a728, 0x00c285f4, 0x00c349c4, 0x00c3f2b6, 0x00c480e9, 0x00c4f480, 0x00c54da2, 0x00c58c79, 0x00c5b132, 0x00c5bbfc, 0x00c5ad0a, 0x00c58494, 0x00c542d1, 0x00c4e7fe, 0x00c47459, 0x00c3e824, 0x00c343a4, 0x00c2871f, 0x00c1b2e0, 0x00c0c731, 0x00bfc463, 0x00beaac6, 0x00bd7aae, 0x00bc3470, 0x00bad866, 0x00b966e9, 0x00b7e055, 0x00b6450a, 0x00b49568, 0x00b2d1d1, 0x00b0faaa, 0x00af1059, 0x00ad1346, 0x00ab03da, 0x00a8e282, 0x00a6afa8, 0x00a46bbc, 0x00a2172d, 0x009fb26c, 0x009d3deb, 0x009aba1d, 0x00982778, 0x0095866f, 0x0092d77b, 0x00901b11, 0x008d51ab, 0x008a7bc1, 0x008799cd, 0x0084ac48, 0x0081b3af, 0x007eb07b, 0x007ba32a, 0x00788c36, 0x00756c1d, 0x0072435b, 0x006f126b, 0x006bd9cd, 0x006899fb, 0x00655372, 0x006206b1, 0x005eb431, 0x005b5c71, 0x0057ffec, 0x00549f1c, 0x00513a7e, 0x004dd28c, 0x004a67c0, 0x0046fa93, 0x00438b7e, 0x00401af9, 0x003ca97b, 0x0039377a, 0x0035c56c, 0x003253c6, 0x002ee2fa, 0x002b737b, 0x002805ba, 0x00249a28, 0x00213134, 0x001dcb4a, 0x001a68d8, 0x00170a47, 0x0013b003, 0x00105a72, 0x000d09fc, 0x0009bf05, 0x000679f2, 0x00033b23, 0x000002f9, 0xfffcd1d3, - 0xfff9a80d, 0xfff68603, 0xfff36c0d, 0xfff05a84, 0xffed51bc, 0xffea520a, 0xffe75bbe, 0xffe46f2a, 0xffe18c9a, 0xffdeb45b, 0xffdbe6b6, 0xffd923f4, 0xffd66c59, 0xffd3c02a, 0xffd11fa9, 0xffce8b13, 0xffcc02a8, 0xffc986a1, 0xffc71738, 0xffc4b4a4, 0xffc25f1a, 0xffc016cb, 0xffbddbe8, 0xffbbae9f, 0xffb98f1c, 0xffb77d88, 0xffb57a0b, 0xffb384ca, 0xffb19de7, 0xffafc584, 0xffadfbbe, 0xffac40b3, 0xffaa947c, 0xffa8f730, 0xffa768e6, 0xffa5e9b1, 0xffa479a2, 0xffa318c7, 0xffa1c72f, 0xffa084e3, 0xff9f51eb, 0xff9e2e50, 0xff9d1a14, 0xff9c1539, 0xff9b1fc1, 0xff9a39a9, 0xff9962ec, 0xff989b85, 0xff97e36c, 0xff973a96, 0xff96a0f8, 0xff961684, 0xff959b29, 0xff952ed7, 0xff94d178, 0xff9482f8, 0xff944340, 0xff941236, 0xff93efbf, 0xff93dbc0, 0xff93d618, 0xff93deaa, 0xff93f552, 0xff9419ef, 0xff944c5a, 0xff948c6e, 0xff94da03, 0xff9534f0, 0xff959d0a, 0xff961224, 0xff969412, 0xff9722a5, 0xff97bdac, 0xff9864f6, 0xff991851, 0xff99d789, 0xff9aa268, 0xff9b78ba, 0xff9c5a47, 0xff9d46d6, 0xff9e3e30, 0xff9f4019, 0xffa04c57, 0xffa162ae, 0xffa282e1, 0xffa3acb4, 0xffa4dfe8, 0xffa61c3e, 0xffa76176, 0xffa8af51, 0xffaa058d, 0xffab63ea, 0xffacca25, 0xffae37fd, 0xffafad2e, 0xffb12976, 0xffb2ac90, 0xffb4363a, 0xffb5c630, 0xffb75c2c, 0xffb8f7ea, 0xffba9927, 0xffbc3f9d, 0xffbdeb07, 0xffbf9b21, 0xffc14fa5, 0xffc3084f, 0xffc4c4da, 0xffc68502, 0xffc84881, 0xffca0f14, 0xffcbd876, 0xffcda463, 0xffcf7299, 0xffd142d3, 0xffd314cf, 0xffd4e84a, 0xffd6bd01, 0xffd892b4, 0xffda6921, 0xffdc4007, 0xffde1726, 0xffdfee3f, 0xffe1c511, 0xffe39b60, 0xffe570ed, 0xffe7457c, 0xffe918ce, - 0xffeaeaab, 0xffecbad5, 0xffee8913, 0xfff0552d, 0xfff21ee8, 0xfff3e60f, 0xfff5aa69, 0xfff76bc2, 0xfff929e3, 0xfffae49b, 0xfffc9bb4, 0xfffe4efd, 0xfffffe46, 0x0001a95d, 0x00035015, 0x0004f23e, 0x00068fad, 0x00082835, 0x0009bbab, 0x000b49e6, 0x000cd2bd, 0x000e5609, 0x000fd3a3, 0x00114b67, 0x0012bd30, 0x001428db, 0x00158e47, 0x0016ed53, 0x001845e0, 0x001997d0, 0x001ae306, 0x001c2765, 0x001d64d5, 0x001e9b3a, 0x001fca7d, 0x0020f288, 0x00221344, 0x00232c9d, 0x00243e7f, 0x002548d9, 0x00264b9a, 0x002746b2, 0x00283a12, 0x002925ae, 0x002a0979, 0x002ae568, 0x002bb971, 0x002c858d, 0x002d49b4, 0x002e05df, 0x002eba0a, 0x002f6630, 0x00300a4f, 0x0030a665, 0x00313a72, 0x0031c677, 0x00324a74, 0x0032c66e, 0x00333a67, 0x0033a665, 0x00340a6d, 0x00346687, 0x0034babb, 0x00350711, 0x00354b94, 0x0035884f, 0x0035bd4e, 0x0035ea9d, 0x0036104b, 0x00362e66, 0x003644fd, 0x00365422, 0x00365be6, 0x00365c5b, 0x00365594, 0x003647a5, 0x003632a2, 0x003616a2, 0x0035f3b9, 0x0035ca00, 0x0035998d, 0x00356279, 0x003524dd, 0x0034e0d3, 0x00349674, 0x003445dc, 0x0033ef25, 0x0033926d, 0x00332fcf, 0x0032c769, 0x00325958, 0x0031e5ba, 0x00316cae, 0x0030ee53, 0x00306ac8, 0x002fe22c, 0x002f54a1, 0x002ec246, 0x002e2b3c, 0x002d8fa4, 0x002cefa1, 0x002c4b53, 0x002ba2dc, 0x002af65f, 0x002a45fe, 0x002991db, 0x0028da1a, 0x00281edd, 0x00276046, 0x00269e7a, 0x0025d99b, 0x002511cd, 0x00244733, 0x002379ef, 0x0022aa26, 0x0021d7fa, 0x00210390, 0x00202d09, 0x001f5489, 0x001e7a33, 0x001d9e2a, 0x001cc091, 0x001be18a, 0x001b0138, 0x001a1fbc, 0x00193d3a, 0x001859d2, 0x001775a7, - 0x001690d9, 0x0015ab8b, 0x0014c5dc, 0x0013dfed, 0x0012f9de, 0x001213d0, 0x00112de1, 0x00104831, 0x000f62de, 0x000e7e08, 0x000d99cc, 0x000cb647, 0x000bd397, 0x000af1d9, 0x000a1129, 0x000931a3, 0x00085362, 0x00077681, 0x00069b1b, 0x0005c149, 0x0004e926, 0x000412c9, 0x00033e4c, 0x00026bc6, 0x00019b4e, 0x0000ccfc, 0x000000e6, 0xffff3721, 0xfffe6fc3, 0xfffdaadf, 0xfffce88b, 0xfffc28d9, 0xfffb6bdd, 0xfffab1a8, 0xfff9fa4d, 0xfff945dc, 0xfff89465, 0xfff7e5f9, 0xfff73aa7, 0xfff6927e, 0xfff5ed8b, 0xfff54bdc, 0xfff4ad7e, 0xfff4127d, 0xfff37ae4, 0xfff2e6bf, 0xfff25619, 0xfff1c8fa, 0xfff13f6c, 0xfff0b977, 0xfff03724, 0xffefb87a, 0xffef3d7f, 0xffeec63a, 0xffee52b1, 0xffede2e7, 0xffed76e3, 0xffed0ea7, 0xffecaa36, 0xffec4994, 0xffebecc2, 0xffeb93c3, 0xffeb3e96, 0xffeaed3c, 0xffea9fb6, 0xffea5602, 0xffea1020, 0xffe9ce0d, 0xffe98fc8, 0xffe9554c, 0xffe91e99, 0xffe8eba8, 0xffe8bc77, 0xffe89101, 0xffe8693f, 0xffe8452d, 0xffe824c5, 0xffe807ff, 0xffe7eed5, 0xffe7d93f, 0xffe7c735, 0xffe7b8af, 0xffe7ada5, 0xffe7a60d, 0xffe7a1de, 0xffe7a10d, 0xffe7a391, 0xffe7a95f, 0xffe7b26c, 0xffe7bead, 0xffe7ce16, 0xffe7e09c, 0xffe7f631, 0xffe80eca, 0xffe82a59, 0xffe848d3, 0xffe86a29, 0xffe88e4d, 0xffe8b532, 0xffe8decb, 0xffe90b08, 0xffe939db, 0xffe96b35, 0xffe99f08, 0xffe9d545, 0xffea0ddc, 0xffea48be, 0xffea85dc, 0xffeac525, 0xffeb068a, 0xffeb49fc, 0xffeb8f6a, 0xffebd6c4, 0xffec1ffa, 0xffec6afc, 0xffecb7b9, 0xffed0621, 0xffed5624, 0xffeda7b1, 0xffedfab8, 0xffee4f29, 0xffeea4f2, 0xffeefc04, 0xffef544e, 0xffefadc0, 0xfff00849, 0xfff063d9, 0xfff0c060, - 0xfff11dcd, 0xfff17c10, 0xfff1db1a, 0xfff23ada, 0xfff29b40, 0xfff2fc3d, 0xfff35dc1, 0xfff3bfbc, 0xfff4221f, 0xfff484db, 0xfff4e7e1, 0xfff54b20, 0xfff5ae8c, 0xfff61214, 0xfff675ab, 0xfff6d942, 0xfff73ccb, 0xfff7a037, 0xfff8037a, 0xfff86686, 0xfff8c94c, 0xfff92bc0, 0xfff98dd6, 0xfff9ef80, 0xfffa50b1, 0xfffab15e, 0xfffb117a, 0xfffb70fa, 0xfffbcfd2, 0xfffc2df6, 0xfffc8b5c, 0xfffce7f8, 0xfffd43c1, 0xfffd9eab, 0xfffdf8ae, 0xfffe51be, 0xfffea9d2, 0xffff00e1, 0xffff56e3, 0xffffabcd, 0xffffff99, 0x0000523d, 0x0000a3b3, 0x0000f3f1, 0x000142f1, 0x000190ac, 0x0001dd1b, 0x00022837, 0x000271fa, 0x0002ba5f, 0x0003015f, 0x000346f6, 0x00038b1d, 0x0003cdd1, 0x00040f0d, 0x00044ecb, 0x00048d0a, 0x0004c9c4, 0x000504f6, 0x00053e9e, 0x000576b8, 0x0005ad41, 0x0005e238, 0x00061599, 0x00064764, 0x00067797, 0x0006a630, 0x0006d32f, 0x0006fe92, 0x00072859, 0x00075084, 0x00077712, 0x00079c04, 0x0007bf5b, 0x0007e116, 0x00080137, 0x00081fbf, 0x00083cb0, 0x0008580a, 0x000871cf, 0x00088a02, 0x0008a0a5, 0x0008b5ba, 0x0008c944, 0x0008db46, 0x0008ebc1, 0x0008fabb, 0x00090836, 0x00091435, 0x00091ebd, 0x000927d1, 0x00092f75, 0x000935ad, 0x00093a7f, 0x00093ded, 0x00093ffe, 0x000940b6, 0x00094019, 0x00093e2e, 0x00093af8, 0x0009367e, 0x000930c4, 0x000929d1, 0x000921aa, 0x00091854, 0x00090dd6, 0x00090236, 0x0008f57a, 0x0008e7a7, 0x0008d8c4, 0x0008c8d7, 0x0008b7e7, 0x0008a5fa, 0x00089316, 0x00087f43, 0x00086a86, 0x000854e6, 0x00083e6a, 0x00082718, 0x00080ef7, 0x0007f60f, 0x0007dc65, 0x0007c201, 0x0007a6e9, 0x00078b24, 0x00076eba, 0x000751b0, 0x0007340d, - 0x000715d9, 0x0006f71a, 0x0006d7d7, 0x0006b817, 0x000697e0, 0x00067739, 0x00065629, 0x000634b6, 0x000612e8, 0x0005f0c4, 0x0005ce51, 0x0005ab95, 0x00058898, 0x0005655e, 0x000541f0, 0x00051e52, 0x0004fa8b, 0x0004d6a1, 0x0004b29a, 0x00048e7c, 0x00046a4c, 0x00044612, 0x000421d2, 0x0003fd92, 0x0003d957, 0x0003b527, 0x00039108, 0x00036cfe, 0x00034910, 0x00032541, 0x00030196, 0x0002de16, 0x0002bac4, 0x000297a5, 0x000274be, 0x00025214, 0x00022fa9, 0x00020d84, 0x0001eba8, 0x0001ca18, 0x0001a8da, 0x000187f0, 0x0001675f, 0x00014729, 0x00012754, 0x000107e1, 0x0000e8d4, 0x0000ca30, 0x0000abf8, 0x00008e30, 0x000070d9, 0x000053f7, 0x0000378c, 0x00001b9a, 0x00000024, 0xffffe52d, 0xffffcab5, 0xffffb0bf, 0xffff974d, 0xffff7e61, 0xffff65fc, 0xffff4e20, 0xffff36ce, 0xffff2007, 0xffff09ce, 0xfffef421, 0xfffedf04, 0xfffeca76, 0xfffeb678, 0xfffea30b, 0xfffe9030, 0xfffe7de7, 0xfffe6c2f, 0xfffe5b0b, 0xfffe4a79, 0xfffe3a79, 0xfffe2b0d, 0xfffe1c32, 0xfffe0dea, 0xfffe0034, 0xfffdf310, 0xfffde67c, 0xfffdda79, 0xfffdcf05, 0xfffdc421, 0xfffdb9cb, 0xfffdb002, 0xfffda6c5, 0xfffd9e13, 0xfffd95eb, 0xfffd8e4d, 0xfffd8735, 0xfffd80a4, 0xfffd7a98, 0xfffd750f, 0xfffd7008, 0xfffd6b81, 0xfffd6779, 0xfffd63ed, 0xfffd60dd, 0xfffd5e46, 0xfffd5c26, 0xfffd5a7c, 0xfffd5945, 0xfffd5880, 0xfffd582a, 0xfffd5842, 0xfffd58c5, 0xfffd59b2, 0xfffd5b05, 0xfffd5cbe, 0xfffd5ed8, 0xfffd6154, 0xfffd642d, 0xfffd6762, 0xfffd6af1, 0xfffd6ed6, 0xfffd7310, 0xfffd779d, 0xfffd7c7a, 0xfffd81a4, 0xfffd8719, 0xfffd8cd7, 0xfffd92db, 0xfffd9923, 0xfffd9fac, 0xfffda675, 0xfffdad79, - 0xfffdb4b9, 0xfffdbc2f, 0xfffdc3db, 0xfffdcbba, 0xfffdd3ca, 0xfffddc07, 0xfffde470, 0xfffded03, 0xfffdf5bc, 0xfffdfe9b, 0xfffe079b, 0xfffe10bc, 0xfffe19fa, 0xfffe2354, 0xfffe2cc8, 0xfffe3652, 0xfffe3ff2, 0xfffe49a4, 0xfffe5367, 0xfffe5d38, 0xfffe6716, 0xfffe70ff, 0xfffe7aef, 0xfffe84e7, 0xfffe8ee3, 0xfffe98e2, 0xfffea2e1, 0xfffeacdf, 0xfffeb6db, 0xfffec0d2, 0xfffecac3, 0xfffed4ab, 0xfffede8a, 0xfffee85e, 0xfffef225, 0xfffefbde, 0xffff0587, 0xffff0f1f, 0xffff18a4, 0xffff2215, 0xffff2b70, 0xffff34b6, 0xffff3de3, 0xffff46f7, 0xffff4ff1, 0xffff58d0, 0xffff6192, 0xffff6a38, 0xffff72be, 0xffff7b26, 0xffff836d, 0xffff8b93, 0xffff9398, 0xffff9b7a, 0xffffa339, 0xffffaad3, 0xffffb249, 0xffffb99a, 0xffffc0c5, 0xffffc7ca, 0xffffcea8, 0xffffd55f, 0xffffdbee, 0xffffe255, 0xffffe894, 0xffffeeaa, 0xfffff498, 0xfffffa5d, 0xfffffff8, 0x0000056a, 0x00000ab3, 0x00000fd2, 0x000014c8, 0x00001994, 0x00001e37, 0x000022b1, 0x00002701, 0x00002b28, 0x00002f26, 0x000032fb, 0x000036a8, 0x00003a2d, 0x00003d89, 0x000040be, 0x000043cc, 0x000046b2, 0x00004972, 0x00004c0b, 0x00004e7f, 0x000050cd, 0x000052f7, 0x000054fc, 0x000056dd, 0x0000589b, 0x00005a36, 0x00005baf, 0x00005d06, 0x00005e3d, 0x00005f52, 0x00006048, 0x0000611f, 0x000061d8, 0x00006272, 0x000062f0, 0x00006351, 0x00006396, 0x000063c0, 0x000063d0, 0x000063c6, 0x000063a3, 0x00006368, 0x00006316, 0x000062ad, 0x0000622e, 0x0000619a, 0x000060f1, 0x00006035, 0x00005f66, 0x00005e84, 0x00005d92, 0x00005c8e, 0x00005b7b, 0x00005a58, 0x00005927, 0x000057e9, 0x0000569d, 0x00005545, 0x000053e2, - 0x00000000 +const int32_t dn_sampler_filter_coefficients[] __attribute__ ((aligned (32))) = { + 0x736144b5, 0x0c333a22, 0xf4fca390, 0x09424904, 0xf8c92a41, 0x052ac04c, 0xfca4fc64, 0x01ed8cc7, 0xff119cc0, 0x0053ba6e, 0xfff9a80d, 0xffeaeaab, 0x001690d9, 0xfff11dcd, 0x000715d9, 0xfffdb4b9, + 0x735ed3aa, 0x0b433de8, 0xf560f0f3, 0x091282c4, 0xf8dd5ccf, 0x0525cb66, 0xfca23e3d, 0x01f33960, 0xff0bc9c2, 0x00586127, 0xfff68603, 0xffecbad5, 0x0015ab8b, 0xfff17c10, 0x0006f71a, 0xfffdbc2f, + 0x735780bb, 0x0a55a98f, 0xf5c5b2a1, 0x08e1ea27, 0xf8f25767, 0x0520366d, 0xfc9ff262, 0x01f89c98, 0xff0620a4, 0x005cf349, 0xfff36c0d, 0xffee8913, 0x0014c5dc, 0xfff1db1a, 0x0006d7d7, 0xfffdc3db, + 0x734b4c77, 0x096a8a51, 0xf62adb7c, 0x08b086aa, 0xf9081629, 0x051a030f, 0xfc9e186a, 0x01fdb637, 0xff00a1d8, 0x00617065, 0xfff05a84, 0xfff0552d, 0x0013dfed, 0xfff23ada, 0x0006b817, 0xfffdcbba, + 0x733a37d2, 0x0881ed1f, 0xf6905e79, 0x087e5fd7, 0xf91e9521, 0x05133308, 0xfc9cafe0, 0x0202860e, 0xfefb4dc7, 0x0065d80c, 0xffed51bc, 0xfff21ee8, 0x0012f9de, 0xfff29b40, 0x000697e0, 0xfffdd3ca, + 0x7324441e, 0x079bdea7, 0xf6f62e9d, 0x084b7d43, 0xf935d048, 0x050bc828, 0xfc9bb83e, 0x02070bf9, 0xfef624d8, 0x006a29d6, 0xffea520a, 0xfff3e60f, 0x001213d0, 0xfff2fc3d, 0x00067739, 0xfffddc07, + 0x7309730f, 0x06b86b52, 0xf75c3eff, 0x0817e68c, 0xf94dc388, 0x0503c44d, 0xfc9b30f3, 0x020b47dd, 0xfef12766, 0x006e655c, 0xffe75bbe, 0xfff5aa69, 0x00112de1, 0xfff35dc1, 0x00065629, 0xfffde470, + 0x72e9c6b8, 0x05d79f40, 0xf7c282cb, 0x07e3a35a, 0xf9666ab7, 0x04fb2969, 0xfc9b195f, 0x020f39ab, 0xfeec55cc, 0x00728a3d, 0xffe46f2a, 0xfff76bc2, 0x00104831, 0xfff3bfbc, 0x000634b6, 0xfffded03, + 0x72c5418e, 0x04f98649, 0xf828ed43, 0x07aebb5d, 0xf97fc19e, 0x04f1f97c, 0xfc9b70d6, 0x0212e15c, 0xfee7b059, 0x0076981a, 0xffe18c9a, 0xfff929e3, 0x000f62de, 0xfff4221f, 0x000612e8, 0xfffdf5bc, + 0x729be665, 0x041e2bfe, 0xf88f71bf, 0x0779364a, 0xf999c3f4, 0x04e83697, 0xfc9c369c, 0x02163ef1, 0xfee33759, 0x007a8e98, 0xffdeb45b, 0xfffae49b, 0x000e7e08, 0xfff484db, 0x0005f0c4, 0xfffdfe9b, + 0x726db871, 0x03459ba4, 0xf8f603ae, 0x07431bdf, 0xf9b46d64, 0x04dde2da, 0xfc9d69eb, 0x02195278, 0xfedeeb11, 0x007e6d61, 0xffdbe6b6, 0xfffc9bb4, 0x000d99cc, 0xfff4e7e1, 0x0005ce51, 0xfffe079b, + 0x723abb44, 0x026fe039, 0xf95c9699, 0x070c73dd, 0xf9cfb988, 0x04d30074, 0xfc9f09ee, 0x021c1c06, 0xfedacbbf, 0x00823422, 0xffd923f4, 0xfffe4efd, 0x000cb647, 0xfff54b20, 0x0005ab95, 0xfffe10bc, + 0x7202f2d3, 0x019d046d, 0xf9c31e22, 0x06d5460b, 0xf9eba3ef, 0x04c791a4, 0xfca115c5, 0x021e9bbb, 0xfed6d99c, 0x0085e28b, 0xffd66c59, 0xfffffe46, 0x000bd397, 0xfff5ae8c, 0x00058898, 0xfffe19fa, + 0x71c6636d, 0x00cd12a4, 0xfa298e07, 0x069d9a31, 0xfa082817, 0x04bb98b5, 0xfca38c83, 0x0220d1bf, 0xfed314da, 0x00897851, 0xffd3c02a, 0x0001a95d, 0x000af1d9, 0xfff61214, 0x0005655e, 0xfffe2354, + 0x718511c2, 0x000014f8, 0xfa8fda21, 0x0665781b, 0xfa254176, 0x04af1804, 0xfca66d2e, 0x0222be45, 0xfecf7da3, 0x008cf52d, 0xffd11fa9, 0x00035015, 0x000a1129, 0xfff675ab, 0x000541f0, 0xfffe2cc8, + 0x713f02e0, 0xff361534, 0xfaf5f669, 0x062ce795, 0xfa42eb75, 0x04a211f8, 0xfca9b6bf, 0x02246187, 0xfecc141d, 0x009058da, 0xffce8b13, 0x0004f23e, 0x000931a3, 0xfff6d942, 0x00051e52, 0xfffe3652, + 0x70f43c32, 0xfe6f1cd7, 0xfb5bd6f4, 0x05f3f06b, 0xfa61216f, 0x04948906, 0xfcad6827, 0x0225bbca, 0xfec8d867, 0x0093a31a, 0xffcc02a8, 0x00068fad, 0x00085362, 0xfff73ccb, 0x0004fa8b, 0xfffe3ff2, + 0x70a4c37f, 0xfdab350f, 0xfbc16ff6, 0x05ba9a6b, 0xfa7fdeba, 0x04867fb3, 0xfcb18047, 0x0226cd5b, 0xfec5ca9a, 0x0096d3af, 0xffc986a1, 0x00082835, 0x00077681, 0xfff7a037, 0x0004d6a1, 0xfffe49a4, + 0x70509eec, 0xfcea66be, 0xfc26b5c5, 0x0580ed5f, 0xfa9f1e9e, 0x0477f88d, 0xfcb5fdf7, 0x02279691, 0xfec2eaca, 0x0099ea62, 0xffc71738, 0x0009bbab, 0x00069b1b, 0xfff8037a, 0x0004b29a, 0xfffe5367, + 0x6ff7d4f8, 0xfc2cba75, 0xfc8b9cda, 0x0546f10f, 0xfabedc5a, 0x0468f62e, 0xfcbae002, 0x022817ca, 0xfec03901, 0x009ce6fe, 0xffc4b4a4, 0x000b49e6, 0x0005c149, 0xfff86686, 0x00048e7c, 0xfffe5d38, + 0x6f9a6c7f, 0xfb723876, 0xfcf019cd, 0x050cad3f, 0xfadf1328, 0x04597b40, 0xfcc0252b, 0x0228516f, 0xfebdb547, 0x009fc954, 0xffc25f1a, 0x000cd2bd, 0x0004e926, 0xfff8c94c, 0x00046a4c, 0xfffe6716, + 0x6f386cb6, 0xfabae8b2, 0xfd54215c, 0x04d229b1, 0xfaffbe36, 0x04498a72, 0xfcc5cc26, 0x022843f0, 0xfebb5f9b, 0x00a29136, 0xffc016cb, 0x000e5609, 0x000412c9, 0xfff92bc0, 0x00044612, 0xfffe70ff, + 0x6ed1dd2e, 0xfa06d2ca, 0xfdb7a869, 0x04976e20, 0xfb20d8ad, 0x04392684, 0xfccbd3a0, 0x0227efc6, 0xfeb937f9, 0x00a53e7b, 0xffbddbe8, 0x000fd3a3, 0x00033e4c, 0xfff98dd6, 0x000421d2, 0xfffe7aef, + 0x6e66c5ce, 0xf955fe0c, 0xfe1aa3fc, 0x045c8240, 0xfb425db0, 0x0428523d, 0xfcd23a3a, 0x02275572, 0xfeb73e54, 0x00a7d0ff, 0xffbbae9f, 0x00114b67, 0x00026bc6, 0xfff9ef80, 0x0003fd92, 0xfffe84e7, + 0x6df72ed9, 0xf8a87178, 0xfe7d0942, 0x04216dc0, 0xfb64485b, 0x0417106e, 0xfcd8fe8b, 0x0226757e, 0xfeb5729b, 0x00aa48a0, 0xffb98f1c, 0x0012bd30, 0x00019b4e, 0xfffa50b1, 0x0003d957, 0xfffe8ee3, + 0x6d8320e6, 0xf7fe33ba, 0xfedecd90, 0x03e63846, 0xfb8693c6, 0x040563f4, 0xfce01f21, 0x0225507c, 0xfeb3d4b7, 0x00aca542, 0xffb77d88, 0x001428db, 0x0000ccfc, 0xfffab15e, 0x0003b527, 0xfffe98e2, + 0x6d0aa4e6, 0xf7574b2b, 0xff3fe663, 0x03aae970, 0xfba93b01, 0x03f34fb2, 0xfce79a7f, 0x0223e706, 0xfeb26489, 0x00aee6ca, 0xffb57a0b, 0x00158e47, 0x000000e6, 0xfffb117a, 0x00039108, 0xfffea2e1, + 0x6c8dc41f, 0xf6b3bdd3, 0xffa04963, 0x036f88d2, 0xfbcc391d, 0x03e0d697, 0xfcef6f20, 0x022239bc, 0xfeb121ee, 0x00b10d23, 0xffb384ca, 0x0016ed53, 0xffff3721, 0xfffb70fa, 0x00036cfe, 0xfffeacdf, + 0x6c0c882a, 0xf6139169, 0xffffec5f, 0x03341df4, 0xfbef8924, 0x03cdfb99, 0xfcf79b75, 0x02204949, 0xfeb00cbf, 0x00b3183c, 0xffb19de7, 0x001845e0, 0xfffe6fc3, 0xfffbcfd2, 0x00034910, 0xfffeb6db, + 0x6b86faf8, 0xf576cb4e, 0x005ec552, 0x02f8b055, 0xfc13261f, 0x03bac1b4, 0xfd001de8, 0x021e165d, 0xfeaf24cc, 0x00b50805, 0xffafc584, 0x001997d0, 0xfffdaadf, 0xfffc2df6, 0x00032541, 0xfffec0d2, + 0x6afd26cb, 0xf4dd7092, 0x00bcca63, 0x02bd4768, 0xfc370b14, 0x03a72bf0, 0xfd08f4d6, 0x021ba1b2, 0xfeae69e1, 0x00b6dc75, 0xffadfbbe, 0x001ae306, 0xfffce88b, 0xfffc8b5c, 0x00030196, 0xfffecac3, + 0x6a6f1638, 0xf44785f1, 0x0119f1e4, 0x0281ea90, 0xfc5b3309, 0x03933d58, 0xfd121e99, 0x0218ec06, 0xfeaddbc4, 0x00b89584, 0xffac40b3, 0x001c2765, 0xfffc28d9, 0xfffce7f8, 0x0002de16, 0xfffed4ab, + 0x69dcd425, 0xf3b50fd6, 0x01763256, 0x0246a125, 0xfc7f9902, 0x037ef900, 0xfd1b9980, 0x0215f621, 0xfead7a37, 0x00ba3330, 0xffaa947c, 0x001d64d5, 0xfffb6bdd, 0xfffd43c1, 0x0002bac4, 0xfffede8a, + 0x69466bc8, 0xf3261255, 0x01d18265, 0x020b726f, 0xfca43803, 0x036a6201, 0xfd2563d3, 0x0212c0d2, 0xfead44f4, 0x00bbb579, 0xffa8f730, 0x001e9b3a, 0xfffab1a8, 0xfffd9eab, 0x000297a5, 0xfffee85e, + 0x68abe8a8, 0xf29a9133, 0x022bd8ee, 0x01d065a8, 0xfcc90b12, 0x03557b7a, 0xfd2f7bd1, 0x020f4cec, 0xfead3bb2, 0x00bd1c63, 0xffa768e6, 0x001fca7d, 0xfff9fa4d, 0xfffdf8ae, 0x000274be, 0xfffef225, + 0x680d5698, 0xf2128fde, 0x02852cfc, 0x019581f9, 0xfcee0d33, 0x03404890, 0xfd39dfb4, 0x020b9b4c, 0xfead5e22, 0x00be67f6, 0xffa5e9b1, 0x0020f288, 0xfff945dc, 0xfffe51be, 0x00025214, 0xfffefbde, + 0x676ac1bb, 0xf18e1174, 0x02dd75ca, 0x015ace79, 0xfd133970, 0x032acc6d, 0xfd448dae, 0x0207acd4, 0xfeadabef, 0x00bf983d, 0xffa479a2, 0x00221344, 0xfff89465, 0xfffea9d2, 0x00022fa9, 0xffff0587, + 0x66c4367d, 0xf10d18bd, 0x0334aac4, 0x0120522f, 0xfd388ad1, 0x03150a3f, 0xfd4f83eb, 0x0203826c, 0xfeae24c1, 0x00c0ad48, 0xffa318c7, 0x00232c9d, 0xfff7e5f9, 0xffff00e1, 0x00020d84, 0xffff0f1f, + 0x6619c197, 0xf08fa82f, 0x038ac385, 0x00e6140f, 0xfd5dfc63, 0x02ff0538, 0xfd5ac08e, 0x01ff1d04, 0xfeaec838, 0x00c1a728, 0xffa1c72f, 0x00243e7f, 0xfff73aa7, 0xffff56e3, 0x0001eba8, 0xffff18a4, + 0x656b700a, 0xf015c1ee, 0x03dfb7dd, 0x00ac1af9, 0xfd838938, 0x02e8c08e, 0xfd6641b8, 0x01fa7d91, 0xfeaf95f2, 0x00c285f4, 0xffa084e3, 0x002548d9, 0xfff6927e, 0xffffabcd, 0x0001ca18, 0xffff2215, + 0x64b94f22, 0xef9f67cb, 0x04337fcb, 0x00726dbb, 0xfda92c63, 0x02d23f7a, 0xfd720581, 0x01f5a50d, 0xfeb08d86, 0x00c349c4, 0xff9f51eb, 0x00264b9a, 0xfff5ed8b, 0xffffff99, 0x0001a8da, 0xffff2b70, + 0x64036c6f, 0xef2c9b43, 0x04861383, 0x0039130c, 0xfdcee0ff, 0x02bb8537, 0xfd7e09fc, 0x01f0947a, 0xfeb1ae87, 0x00c3f2b6, 0xff9e2e50, 0x002746b2, 0xfff54bdc, 0x0000523d, 0x000187f0, 0xffff34b6, + 0x6349d5c9, 0xeebd5d81, 0x04d76b6b, 0x00001191, 0xfdf4a22a, 0x02a49505, 0xfd8a4d37, 0x01eb4cde, 0xfeb2f884, 0x00c480e9, 0xff9d1a14, 0x00283a12, 0xfff4ad7e, 0x0000a3b3, 0x0001675f, 0xffff3de3, + 0x628c994c, 0xee51af5f, 0x0527801d, 0xffc76fd5, 0xfe1a6b08, 0x028d7223, 0xfd96cd3d, 0x01e5cf44, 0xfeb46b07, 0x00c4f480, 0xff9c1539, 0x002925ae, 0xfff4127d, 0x0000f3f1, 0x00014729, 0xffff46f7, + 0x61cbc559, 0xede99165, 0x05764a68, 0xff8f344f, 0xfe4036c5, 0x02761fd3, 0xfda3880f, 0x01e01cbe, 0xfeb60596, 0x00c54da2, 0xff9b1fc1, 0x002a0979, 0xfff37ae4, 0x000142f1, 0x00012754, 0xffff4ff1, + 0x61076890, 0xed8503c7, 0x05c3c34e, 0xff576560, 0xfe660094, 0x025ea157, 0xfdb07bb0, 0x01da3661, 0xfeb7c7b0, 0x00c58c79, 0xff9a39a9, 0x002ae568, 0xfff2e6bf, 0x000190ac, 0x000107e1, 0xffff58d0, + 0x603f91d5, 0xed24066b, 0x060fe408, 0xff20094d, 0xfe8bc3ad, 0x0246f9f3, 0xfdbda61a, 0x01d41d4a, 0xfeb9b0d3, 0x00c5b132, 0xff9962ec, 0x002bb971, 0xfff25619, 0x0001dd1b, 0x0000e8d4, 0xffff6192, + 0x5f745049, 0xecc698e6, 0x065aa604, 0xfee92646, 0xfeb17b53, 0x022f2cea, 0xfdcb0546, 0x01cdd297, 0xfebbc078, 0x00c5bbfc, 0xff989b85, 0x002c858d, 0xfff1c8fa, 0x00022837, 0x0000ca30, 0xffff6a38, + 0x5ea5b34c, 0xec6cba79, 0x06a402e4, 0xfeb2c261, 0xfed722d0, 0x02173d81, 0xfdd89727, 0x01c7576d, 0xfebdf613, 0x00c5ad0a, 0xff97e36c, 0x002d49b4, 0xfff13f6c, 0x000271fa, 0x0000abf8, 0xffff72be, + 0x5dd3ca7a, 0xec166a19, 0x06ebf483, 0xfe7ce399, 0xfefcb57a, 0x01ff2ef9, 0xfde659af, 0x01c0acf5, 0xfec05114, 0x00c58494, 0xff973a96, 0x002e05df, 0xfff0b977, 0x0002ba5f, 0x00008e30, 0xffff7b26, + 0x5cfea5aa, 0xebc3a669, 0x073274f1, 0xfe478fd2, 0xff222eac, 0x01e70494, 0xfdf44acc, 0x01b9d45b, 0xfec2d0e8, 0x00c542d1, 0xff96a0f8, 0x002eba0a, 0xfff03724, 0x0003015f, 0x000070d9, 0xffff836d, + 0x5c2654ed, 0xeb746dbe, 0x07777e74, 0xfe12ccd1, 0xff4789d1, 0x01cec194, 0xfe026869, 0x01b2ced1, 0xfec574f9, 0x00c4e7fe, 0xff961684, 0x002f6630, 0xffefb87a, 0x000346f6, 0x000053f7, 0xffff8b93, + 0x5b4ae88d, 0xeb28be1f, 0x07bb0b8b, 0xfddea042, 0xff6cc25a, 0x01b66936, 0xfe10b06f, 0x01ab9d8b, 0xfec83caa, 0x00c47459, 0xff959b29, 0x00300a4f, 0xffef3d7f, 0x00038b1d, 0x0000378c, 0xffff9398, + 0x5a6c7108, 0xeae09544, 0x07fd16eb, 0xfdab0fb6, 0xff91d3c6, 0x019dfeb6, 0xfe1f20c5, 0x01a441c2, 0xfecb275e, 0x00c3e824, 0xff952ed7, 0x0030a665, 0xffeec63a, 0x0003cdd1, 0x00001b9a, 0xffff9b7a, + 0x598aff13, 0xea9bf097, 0x083d9b81, 0xfd7820a0, 0xffb6b99f, 0x0185854f, 0xfe2db74f, 0x019cbcb1, 0xfece3472, 0x00c343a4, 0xff94d178, 0x00313a72, 0xffee52b1, 0x00040f0d, 0x00000024, 0xffffa339, + 0x58a6a397, 0xea5acd38, 0x087c9471, 0xfd45d856, 0xffdb6f7c, 0x016d0037, 0xfe3c71f1, 0x01950f98, 0xfed16342, 0x00c2871f, 0xff9482f8, 0x0031c677, 0xffede2e7, 0x00044ecb, 0xffffe52d, 0xffffaad3, + 0x57bf6fae, 0xea1d27f7, 0x08b9fd18, 0xfd143c12, 0xfffff100, 0x015472a1, 0xfe4b4e8c, 0x018d3bb8, 0xfed4b325, 0x00c1b2e0, 0xff944340, 0x00324a74, 0xffed76e3, 0x00048d0a, 0xffffcab5, 0xffffb249, + 0x56d574a2, 0xe9e2fd5b, 0x08f5d10a, 0xfce350f0, 0x002439db, 0x013bdfbc, 0xfe5a4b03, 0x01854258, 0xfed82370, 0x00c0c731, 0xff941236, 0x0032c66e, 0xffed0ea7, 0x0004c9c4, 0xffffb0bf, 0xffffb99a, + 0x55e8c3ee, 0xe9ac49a0, 0x09300c14, 0xfcb31bec, 0x004845cc, 0x01234ab4, 0xfe696534, 0x017d24bf, 0xfedbb373, 0x00bfc463, 0xff93efbf, 0x00333a67, 0xffecaa36, 0x000504f6, 0xffff974d, 0xffffc0c5, + 0x54f96f37, 0xe97908b8, 0x0968aa3b, 0xfc83a1e5, 0x006c10a0, 0x010ab6b0, 0xfe789b01, 0x0174e437, 0xfedf627d, 0x00beaac6, 0xff93dbc0, 0x0033a665, 0xffec4994, 0x00053e9e, 0xffff7e61, 0xffffc7ca, + 0x54078851, 0xe9493649, 0x099fa7bb, 0xfc54e79a, 0x008f9631, 0x00f226d0, 0xfe87ea47, 0x016c820d, 0xfee32fdb, 0x00bd7aae, 0xff93d618, 0x00340a6d, 0xffebecc2, 0x000576b8, 0xffff65fc, 0xffffcea8, + 0x53132138, 0xe91ccdb5, 0x09d5010b, 0xfc26f1ad, 0x00b2d26b, 0x00d99e31, 0xfe9750e8, 0x0163ff90, 0xfee71ad4, 0x00bc3470, 0xff93deaa, 0x00346687, 0xffeb93c3, 0x0005ad41, 0xffff4e20, 0xffffd55f, + 0x521c4c10, 0xe8f3ca12, 0x0a08b2d9, 0xfbf9c49d, 0x00d5c147, 0x00c11feb, 0xfea6ccc3, 0x015b5e11, 0xfeeb22af, 0x00bad866, 0xff93f552, 0x0034babb, 0xffeb3e96, 0x0005e238, 0xffff36ce, 0xffffdbee, + 0x51231b26, 0xe8ce2631, 0x0a3aba09, 0xfbcd64ca, 0x00f85ecf, 0x00a8af0c, 0xfeb65bb9, 0x01529ee3, 0xfeef46b0, 0x00b966e9, 0xff9419ef, 0x00350711, 0xffeaed3c, 0x00061599, 0xffff2007, 0xffffe255, + 0x5027a0e9, 0xe8abdc9d, 0x0a6b13bc, 0xfba1d673, 0x011aa71d, 0x00904ea0, 0xfec5fbac, 0x0149c35a, 0xfef3861a, 0x00b7e055, 0xff944c5a, 0x00354b94, 0xffea9fb6, 0x00064764, 0xffff09ce, 0xffffe894, + 0x4f29efed, 0xe88ce79a, 0x0a99bd47, 0xfb771db9, 0x013c965b, 0x007801aa, 0xfed5aa7e, 0x0140cccb, 0xfef7e02a, 0x00b6450a, 0xff948c6e, 0x0035884f, 0xffea5602, 0x00067797, 0xfffef421, 0xffffeeaa, + 0x4e2a1ae8, 0xe871412a, 0x0ac6b43a, 0xfb4d3e97, 0x015e28c7, 0x005fcb26, 0xfee56614, 0x0137bc8f, 0xfefc541e, 0x00b49568, 0xff94da03, 0x0035bd4e, 0xffea1020, 0x0006a630, 0xfffedf04, 0xfffff498, + 0x4d2834b0, 0xe858e30a, 0x0af1f65d, 0xfb243cea, 0x017f5aad, 0x0047ae09, 0xfef52c54, 0x012e93fc, 0xff00e133, 0x00b2d1d1, 0xff9534f0, 0x0035ea9d, 0xffe9ce0d, 0x0006d32f, 0xfffeca76, 0xfffffa5d, + 0x4c245038, 0xe843c6b5, 0x0b1b81ad, 0xfafc1c6e, 0x01a0286c, 0x002fad3f, 0xff04fb25, 0x0125546c, 0xff0586a0, 0x00b0faaa, 0xff959d0a, 0x0036104b, 0xffe98fc8, 0x0006fe92, 0xfffeb678, 0xfffffff8, + 0x4b1e8091, 0xe831e563, 0x0b435462, 0xfad4e0b9, 0x01c08e78, 0x0017cbae, 0xff14d073, 0x011bff38, 0xff0a439e, 0x00af1059, 0xff961224, 0x00362e66, 0xffe9554c, 0x00072859, 0xfffea30b, 0x0000056a, + 0x4a16d8e5, 0xe823380d, 0x0b696ceb, 0xfaae8d43, 0x01e08952, 0x00000c33, 0xff24aa2a, 0x011295bb, 0xff0f1762, 0x00ad1346, 0xff969412, 0x003644fd, 0xffe91e99, 0x00075084, 0xfffe9030, 0x00000ab3, + 0x490d6c79, 0xe817b76c, 0x0b8dc9ed, 0xfa89255f, 0x02001593, 0xffe871a0, 0xff348639, 0x0109194f, 0xff140121, 0x00ab03da, 0xff9722a5, 0x00365422, 0xffe8eba8, 0x00077712, 0xfffe7de7, 0x00000fd2, + 0x48024ea7, 0xe80f5bfb, 0x0bb06a47, 0xfa64ac3f, 0x021f2fe5, 0xffd0fec1, 0xff446293, 0x00ff8b4f, 0xff19000e, 0x00a8e282, 0xff97bdac, 0x00365be6, 0xffe8bc77, 0x00079c04, 0xfffe6c2f, 0x000014c8, + 0x46f592e2, 0xe80a1df5, 0x0bd14d0b, 0xfa4124f2, 0x023dd505, 0xffb9b656, 0xff543d2e, 0x00f5ed15, 0xff1e135b, 0x00a6afa8, 0xff9864f6, 0x00365c5b, 0xffe89101, 0x0007bf5b, 0xfffe5b0b, 0x00001994, + 0x45e74cad, 0xe807f55b, 0x0bf07186, 0xfa1e9262, 0x025c01c5, 0xffa29b18, 0xff641402, 0x00ec3ffc, 0xff233a39, 0x00a46bbc, 0xff991851, 0x00365594, 0xffe8693f, 0x0007e116, 0xfffe4a79, 0x00001e37, + 0x44d78fa0, 0xe808d9f1, 0x0c0dd738, 0xf9fcf758, 0x0279b30b, 0xff8bafb3, 0xff73e50e, 0x00e2855d, 0xff2873d6, 0x00a2172d, 0xff99d789, 0x003647a5, 0xffe8452d, 0x00080137, 0xfffe3a79, 0x000022b1, + 0x43c66f62, 0xe80cc342, 0x0c297dd9, 0xf9dc567b, 0x0296e5d0, 0xff74f6cc, 0xff83ae52, 0x00d8be92, 0xff2dbf61, 0x009fb26c, 0xff9aa268, 0x003632a2, 0xffe824c5, 0x00081fbf, 0xfffe2b0d, 0x00002701, + 0x42b3ffa9, 0xe813a89f, 0x0c436557, 0xf9bcb24a, 0x02b39724, 0xff5e72fb, 0xff936dd2, 0x00ceecf5, 0xff331c08, 0x009d3deb, 0xff9b78ba, 0x003616a2, 0xffe807ff, 0x00083cb0, 0xfffe1c32, 0x00002b28, + 0x41a05437, 0xe81d8122, 0x0c5b8dd4, 0xf99e0d26, 0x02cfc429, 0xff4826cf, 0xffa3219a, 0x00c511dc, 0xff3888f8, 0x009aba1d, 0xff9c5a47, 0x0035f3b9, 0xffe7eed5, 0x0008580a, 0xfffe0dea, 0x00002f26, + 0x408b80d9, 0xe82a43ac, 0x0c71f7a9, 0xf980694a, 0x02eb6a18, 0xff3214c9, 0xffb2c7b6, 0x00bb2e9f, 0xff3e055d, 0x00982778, 0xff9d46d6, 0x0035ca00, 0xffe7d93f, 0x000871cf, 0xfffe0034, 0x000032fb, + 0x3f759967, 0xe839e6e9, 0x0c86a361, 0xf963c8cc, 0x03068640, 0xff1c3f63, 0xffc25e3b, 0x00b14493, 0xff439064, 0x0095866f, 0xff9e3e30, 0x0035998d, 0xffe7c735, 0x00088a02, 0xfffdf310, 0x000036a8, + 0x3e5eb1bd, 0xe84c6152, 0x0c9991be, 0xf9482da0, 0x03211603, 0xff06a907, 0xffd1e340, 0x00a7550c, 0xff492937, 0x0092d77b, 0xff9f4019, 0x00356279, 0xffe7b8af, 0x0008a0a5, 0xfffde67c, 0x00003a2d, + 0x3d46ddc1, 0xe861a92b, 0x0caac3b5, 0xf92d9997, 0x033b16dc, 0xfef15417, 0xffe154e3, 0x009d615d, 0xff4ecf02, 0x00901b11, 0xffa04c57, 0x003524dd, 0xffe7ada5, 0x0008b5ba, 0xfffdda79, 0x00003d89, + 0x3c2e315a, 0xe879b487, 0x0cba3a6d, 0xf9140e5e, 0x03548659, 0xfedc42e7, 0xfff0b148, 0x00936ad6, 0xff5480f0, 0x008d51ab, 0xffa162ae, 0x0034e0d3, 0xffe7a60d, 0x0008c944, 0xfffdcf05, 0x000040be, + 0x3b14c072, 0xe8947947, 0x0cc7f742, 0xf8fb8d7d, 0x036d621f, 0xfec777be, 0xfffff697, 0x008972c7, 0xff5a3e2c, 0x008a7bc1, 0xffa282e1, 0x00349674, 0xffe7a1de, 0x0008db46, 0xfffdc421, 0x000043cc, + 0x39fa9ef3, 0xe8b1ed1c, 0x0cd3fbc0, 0xf8e4185a, 0x0385a7eb, 0xfeb2f4d9, 0x000f22fe, 0x007f7a7c, 0xff6005e1, 0x008799cd, 0xffa3acb4, 0x003445dc, 0xffe7a10d, 0x0008ebc1, 0xfffdb9cb, 0x000046b2, + 0x38dfe0c6, 0xe8d2058b, 0x0cde49a8, 0xf8cdb036, 0x039d558e, 0xfe9ebc66, 0x001e34b4, 0x00758341, 0xff65d73a, 0x0084ac48, 0xffa4dfe8, 0x0033ef25, 0xffe7a391, 0x0008fabb, 0xfffdb002, 0x00004972, + 0x37c499d0, 0xe8f4b7e9, 0x0ce6e2ea, 0xf8b85631, 0x03b468f1, 0xfe8ad087, 0x002d29f3, 0x006b8e5c, 0xff6bb163, 0x0081b3af, 0xffa61c3e, 0x0033926d, 0xffe7a95f, 0x00090836, 0xfffda6c5, 0x00004c0b, + 0x36a8ddf3, 0xe919f961, 0x0cedc9a7, 0xf8a40b44, 0x03cae014, 0xfe773351, 0x003c00fd, 0x00619d15, 0xff719388, 0x007eb07b, 0xffa76176, 0x00332fcf, 0xffe7b26c, 0x00091435, 0xfffd9e13, 0x00004e7f, + 0x358cc109, 0xe941bef3, 0x0cf30031, 0xf890d048, 0x03e0b90d, 0xfe63e6cb, 0x004ab81b, 0x0057b0ae, 0xff777cd6, 0x007ba32a, 0xffa8af51, 0x0032c769, 0xffe7bead, 0x00091ebd, 0xfffd95eb, 0x000050cd, + 0x347056e3, 0xe96bfd76, 0x0cf6890a, 0xf87ea5f1, 0x03f5f20a, 0xfe50ecf0, 0x00594d9d, 0x004dca68, 0xff7d6c79, 0x00788c36, 0xffaa058d, 0x00325958, 0xffe7ce16, 0x000927d1, 0xfffd8e4d, 0x000052f7, + 0x3353b349, 0xe998a999, 0x0cf866e1, 0xf86d8cd1, 0x040a894e, 0xfe3e47ac, 0x0067bfd8, 0x0043eb7f, 0xff83619f, 0x00756c1d, 0xffab63ea, 0x0031e5ba, 0xffe7e09c, 0x00092f75, 0xfffd8735, 0x000054fc, + 0x3236e9f7, 0xe9c7b7e3, 0x0cf89c96, 0xf85d8555, 0x041e7d34, 0xfe2bf8de, 0x00760d2a, 0x003a152f, 0xff895b77, 0x0072435b, 0xffacca25, 0x00316cae, 0xffe7f631, 0x000935ad, 0xfffd80a4, 0x000056dd, + 0x311a0e9b, 0xe9f91cb9, 0x0cf72d34, 0xf84e8fc9, 0x0431cc31, 0xfe1a0256, 0x008433f9, 0x003048ae, 0xff8f5930, 0x006f126b, 0xffae37fd, 0x0030ee53, 0xffe80eca, 0x00093a7f, 0xfffd7a98, 0x0000589b, + 0x2ffd34d4, 0xea2ccc59, 0x0cf41bf7, 0xf840ac57, 0x044474ce, 0xfe0865d7, 0x009232b2, 0x0026872f, 0xff9559fb, 0x006bd9cd, 0xffafad2e, 0x00306ac8, 0xffe82a59, 0x00093ded, 0xfffd750f, 0x00005a36, + 0x2ee07030, 0xea62bae0, 0x0cef6c43, 0xf833db04, 0x045675ab, 0xfdf72515, 0x00a007c9, 0x001cd1e4, 0xff9b5d0a, 0x006899fb, 0xffb12976, 0x002fe22c, 0xffe848d3, 0x00093ffe, 0xfffd7008, 0x00005baf, + 0x2dc3d429, 0xea9adc49, 0x0ce921ab, 0xf8281bb6, 0x0467cd83, 0xfde641b7, 0x00adb1bb, 0x001329f7, 0xffa16190, 0x00655372, 0xffb2ac90, 0x002f54a1, 0xffe86a29, 0x000940b6, 0xfffd6b81, 0x00005d06, + 0x2ca77428, 0xead52471, 0x0ce13feb, 0xf81d6e2e, 0x04787b24, 0xfdd5bd53, 0x00bb2f0b, 0x00099093, 0xffa766c0, 0x006206b1, 0xffb4363a, 0x002ec246, 0xffe88e4d, 0x00094019, 0xfffd6779, 0x00005e3d, + 0x2b8b637b, 0xeb118714, 0x0cd7caec, 0xf813d20d, 0x04887d76, 0xfdc59972, 0x00c87e47, 0x000006db, 0xffad6bd0, 0x005eb431, 0xffb5c630, 0x002e2b3c, 0xffe8b532, 0x00093e2e, 0xfffd63ed, 0x00005f52, + 0x2a6fb55e, 0xeb4ff7d4, 0x0cccc6bc, 0xf80b46d3, 0x0497d378, 0xfdb5d78f, 0x00d59e03, 0xfff68df1, 0xffb36ff9, 0x005b5c71, 0xffb75c2c, 0x002d8fa4, 0xffe8decb, 0x00093af8, 0xfffd60dd, 0x00006048, + 0x29547ced, 0xeb906a35, 0x0cc03797, 0xf803cbdc, 0x04a67c41, 0xfda67913, 0x00e28cdd, 0xffed26f0, 0xffb97271, 0x0057ffec, 0xffb8f7ea, 0x002cefa1, 0xffe90b08, 0x0009367e, 0xfffd5e46, 0x0000611f, + 0x2839cd30, 0xebd2d1a1, 0x0cb221de, 0xf7fd6065, 0x04b476fe, 0xfd977f5d, 0x00ef497a, 0xffe3d2f2, 0xffbf7274, 0x00549f1c, 0xffba9927, 0x002c4b53, 0xffe939db, 0x000930c4, 0xfffd5c26, 0x000061d8, + 0x271fb90d, 0xec17216b, 0x0ca28a1a, 0xf7f8038c, 0x04c1c2f3, 0xfd88ebb9, 0x00fbd28a, 0xffda930a, 0xffc56f3e, 0x00513a7e, 0xffbc3f9d, 0x002ba2dc, 0xffe96b35, 0x000929d1, 0xfffd5a7c, 0x00006272, + 0x2606534e, 0xec5d4ccd, 0x0c9174fa, 0xf7f3b44b, 0x04ce5f7d, 0xfd7abf64, 0x010826c4, 0xffd16848, 0xffcb680e, 0x004dd28c, 0xffbdeb07, 0x002af65f, 0xffe99f08, 0x000921aa, 0xfffd5945, 0x000062f0, + 0x24edae9c, 0xeca546eb, 0x0c7ee754, 0xf7f0717e, 0x04da4c10, 0xfd6cfb8e, 0x011444e7, 0xffc853b6, 0xffd15c22, 0x004a67c0, 0xffbf9b21, 0x002a45fe, 0xffe9d545, 0x00091854, 0xfffd5880, 0x00006351, + 0x23d5dd81, 0xecef02d5, 0x0c6ae622, 0xf7ee39e2, 0x04e58836, 0xfd5fa157, 0x01202bbe, 0xffbf565a, 0xffd74abe, 0x0046fa93, 0xffc14fa5, 0x002991db, 0xffea0ddc, 0x00090dd6, 0xfffd582a, 0x00006396, + 0x22bef262, 0xed3a7388, 0x0c557681, 0xf7ed0c12, 0x04f01392, 0xfd52b1cf, 0x012bda1b, 0xffb67137, 0xffdd3325, 0x00438b7e, 0xffc3084f, 0x0028da1a, 0xffea48be, 0x00090236, 0xfffd5842, 0x000063c0, + 0x21a8ff7e, 0xed878bf0, 0x0c3e9db5, 0xf7ece68c, 0x04f9edda, 0xfd462df6, 0x01374eda, 0xffada547, 0xffe3149e, 0x00401af9, 0xffc4c4da, 0x00281edd, 0xffea85dc, 0x0008f57a, 0xfffd58c5, 0x000063d0, + 0x209416f2, 0xedd63ee5, 0x0c26611f, 0xf7edc7af, 0x050316e0, 0xfd3a16c0, 0x014288e0, 0xffa4f383, 0xffe8ee72, 0x003ca97b, 0xffc68502, 0x00276046, 0xffeac525, 0x0008e7a7, 0xfffd59b2, 0x000063c6, + 0x1f804ab0, 0xee267f35, 0x0c0cc646, 0xf7efadbd, 0x050b8e8a, 0xfd2e6d0d, 0x014d871b, 0xff9c5cdc, 0xffeebfec, 0x0039377a, 0xffc84881, 0x00269e7a, 0xffeb068a, 0x0008d8c4, 0xfffd5b05, 0x000063a3, + 0x1e6dac83, 0xee783f9e, 0x0bf1d2d0, 0xf7f296d7, 0x051354d5, 0xfd2331b0, 0x01584883, 0xff93e241, 0xfff48859, 0x0035c56c, 0xffca0f14, 0x0025d99b, 0xffeb49fc, 0x0008c8d7, 0xfffd5cbe, 0x00006368, + 0x1d5c4e09, 0xeecb72d1, 0x0bd58c81, 0xf7f68103, 0x051a69d4, 0xfd18656f, 0x0162cc19, 0xff8b8498, 0xfffa470a, 0x003253c6, 0xffcbd876, 0x002511cd, 0xffeb8f6a, 0x0008b7e7, 0xfffd5ed8, 0x00006316, + 0x1c4c40b6, 0xef200b76, 0x0bb7f940, 0xf7fb6a29, 0x0520cdb1, 0xfd0e08fb, 0x016d10e9, 0xff8344c4, 0xfffffb51, 0x002ee2fa, 0xffcda463, 0x00244733, 0xffebd6c4, 0x0008a5fa, 0xfffd6154, 0x000062ad, + 0x1b3d95d1, 0xef75fc2b, 0x0b991f0f, 0xf8015015, 0x052680ae, 0xfd041cfa, 0x01771608, 0xff7b23a1, 0x0005a483, 0x002b737b, 0xffcf7299, 0x002379ef, 0xffec1ffa, 0x00089316, 0xfffd642d, 0x0000622e, + 0x1a305e70, 0xefcd3787, 0x0b79040c, 0xf8083077, 0x052b8320, 0xfcfaa200, 0x0180da94, 0xff732209, 0x000b41fa, 0x002805ba, 0xffd142d3, 0x0022aa26, 0xffec6afc, 0x00087f43, 0xfffd6762, 0x0000619a, + 0x1924ab7b, 0xf025b01a, 0x0b57ae75, 0xf81008e2, 0x052fd573, 0xfcf19894, 0x018a5db5, 0xff6b40cb, 0x0010d30e, 0x00249a28, 0xffd314cf, 0x0021d7fa, 0xffecb7b9, 0x00086a86, 0xfffd6af1, 0x000060f1, + 0x181a8da5, 0xf07f586e, 0x0b3524a0, 0xf818d6cf, 0x0533782a, 0xfce9012c, 0x01939e9e, 0xff6380b5, 0x00165720, 0x00213134, 0xffd4e84a, 0x00210390, 0xffed0621, 0x000854e6, 0xfffd6ed6, 0x00006035, + 0x17121573, 0xf0da230b, 0x0b116cff, 0xf822979b, 0x05366bdc, 0xfce0dc2f, 0x019c9c8b, 0xff5be28d, 0x001bcd8e, 0x001dcb4a, 0xffd6bd01, 0x00202d09, 0xffed5624, 0x00083e6a, 0xfffd7310, 0x00005f66, + 0x160b5331, 0xf1360276, 0x0aec8e1c, 0xf82d488c, 0x0538b136, 0xfcd929f4, 0x01a556c1, 0xff546713, 0x002135bd, 0x001a68d8, 0xffd892b4, 0x001f5489, 0xffeda7b1, 0x00082718, 0xfffd779d, 0x00005e84, + 0x150656f8, 0xf192e932, 0x0ac68e9b, 0xf838e6c9, 0x053a48fa, 0xfcd1eac3, 0x01adcc91, 0xff4d0f02, 0x00268f13, 0x00170a47, 0xffda6921, 0x001e7a33, 0xffedfab8, 0x00080ef7, 0xfffd7c7a, 0x00005d92, + 0x140330a9, 0xf1f0c9c5, 0x0a9f7537, 0xf8456f65, 0x053b3400, 0xfccb1ed7, 0x01b5fd54, 0xff45db10, 0x002bd8fa, 0x0013b003, 0xffdc4007, 0x001d9e2a, 0xffee4f29, 0x0007f60f, 0xfffd81a4, 0x00005c8e, + 0x1301efed, 0xf24f96b5, 0x0a7748c0, 0xf852df56, 0x053b7332, 0xfcc4c658, 0x01bde86f, 0xff3ecbea, 0x003112e0, 0x00105a72, 0xffde1726, 0x001cc091, 0xffeea4f2, 0x0007dc65, 0xfffd8719, 0x00005b7b, + 0x1202a434, 0xf2af428c, 0x0a4e101f, 0xf861337c, 0x053b0791, 0xfcbee162, 0x01c58d50, 0xff37e23b, 0x00363c35, 0x000d09fc, 0xffdfee3f, 0x001be18a, 0xffeefc04, 0x0007c201, 0xfffd8cd7, 0x00005a58, + 0x11055cb4, 0xf30fbfd7, 0x0a23d24e, 0xf870689f, 0x0539f231, 0xfcb97001, 0x01cceb6e, 0xff311ea4, 0x003b546b, 0x0009bf05, 0xffe1c511, 0x001b0138, 0xffef544e, 0x0007a6e9, 0xfffd92db, 0x00005927, + 0x100a2864, 0xf371012c, 0x09f8965d, 0xf8807b70, 0x0538343a, 0xfcb47232, 0x01d4024c, 0xff2a81c4, 0x00405afa, 0x000679f2, 0xffe39b60, 0x001a1fbc, 0xffefadc0, 0x00078b24, 0xfffd9923, 0x000057e9, + 0x0f111603, 0xf3d2f926, 0x09cc636e, 0xf8916889, 0x0535cee9, 0xfcafe7e2, 0x01dad175, 0xff240c2f, 0x00454f5d, 0x00033b23, 0xffe570ed, 0x00193d3a, 0xfff00849, 0x00076eba, 0xfffd9fac, 0x0000569d, + 0x0e1a340d, 0xf4359a6a, 0x099f40b5, 0xf8a32c6e, 0x0532c38c, 0xfcabd0f2, 0x01e15880, 0xff1dbe77, 0x004a310f, 0x000002f9, 0xffe7457c, 0x001859d2, 0xfff063d9, 0x000751b0, 0xfffda675, 0x00005545, + 0x0d2590c3, 0xf498d7a5, 0x09713575, 0xf8b5c38d, 0x052f1386, 0xfca82d32, 0x01e7970e, 0xff179926, 0x004eff94, 0xfffcd1d3, 0xffe918ce, 0x001775a7, 0xfff0c060, 0x0007340d, 0xfffdad79, 0x000053e2, + 0x0c333a22, 0xf4fca390, 0x09424904, 0xf8c92a41, 0x052ac04c, 0xfca4fc64, 0x01ed8cc7, 0xff119cc0, 0x0053ba6e, 0xfff9a80d, 0xffeaeaab, 0x001690d9, 0xfff11dcd, 0x000715d9, 0xfffdb4b9, 0x00005274, }; } diff --git a/tools/resampler_tools/fir.cpp b/tools/resampler_tools/fir.cpp index ea3ef50..cc3d509 100644 --- a/tools/resampler_tools/fir.cpp +++ b/tools/resampler_tools/fir.cpp @@ -195,7 +195,8 @@ int main(int argc, char** argv) // total number of coefficients (one side) - const int N = (1 << nz) * nzc; + const int M = (1 << nz); + const int N = M * nzc; // generate the right half of the filter if (!debug) { @@ -220,22 +221,25 @@ int main(int argc, char** argv) } if (!polyphase) { - for (int i=0 ; i= (1LL<<(nc-1))) yi = (1LL<<(nc-1))-1; - printf("0x%08x, ", int32_t(yi)); - } else { - printf("%.9g%s ", y, debug ? "," : "f,"); + if (!format) { + int64_t yi = floor(y * ((1ULL<<(nc-1))) + 0.5); + if (yi >= (1LL<<(nc-1))) yi = (1LL<<(nc-1))-1; + printf("0x%08x, ", int32_t(yi)); + } else { + printf("%.9g%s ", y, debug ? "," : "f,"); + } } } } else { @@ -266,11 +270,6 @@ int main(int argc, char** argv) } if (!debug) { - if (!format) { - printf("\n 0x%08x ", 0); - } else { - printf("\n %.9g ", 0.0f); - } printf("\n};"); } printf("\n"); -- cgit v1.1 From 3f71761cab8a08e4ae9e4cf8cb8f1b82643825b2 Mon Sep 17 00:00:00 2001 From: Mathias Agopian Date: Sun, 4 Nov 2012 18:49:14 -0800 Subject: improve resample test - handle stereo input - input file can now be ommited, in this case a linear chirp will be used automatically - better usage information Change-Id: I5d62a6c26a9054a1c1a517a065b4df5a2cdcda22 --- services/audioflinger/test-resample.cpp | 163 ++++++++++++++++++++------------ 1 file changed, 102 insertions(+), 61 deletions(-) diff --git a/services/audioflinger/test-resample.cpp b/services/audioflinger/test-resample.cpp index 151313b..e6d5cbe 100644 --- a/services/audioflinger/test-resample.cpp +++ b/services/audioflinger/test-resample.cpp @@ -25,6 +25,7 @@ #include #include #include +#include using namespace android; @@ -61,31 +62,34 @@ struct HeaderWav { }; static int usage(const char* name) { - fprintf(stderr,"Usage: %s [-p] [-h] [-q ] [-i ] " - "[-o ] \n", name); - fprintf(stderr,"-p - enable profiling\n"); - fprintf(stderr,"-h - create wav file\n"); - fprintf(stderr,"-q - resampler quality\n"); - fprintf(stderr," dq : default quality\n"); - fprintf(stderr," lq : low quality\n"); - fprintf(stderr," mq : medium quality\n"); - fprintf(stderr," hq : high quality\n"); - fprintf(stderr," vhq : very high quality\n"); - fprintf(stderr,"-i - input file sample rate\n"); - fprintf(stderr,"-o - output file sample rate\n"); + fprintf(stderr,"Usage: %s [-p] [-h] [-s] [-q {dq|lq|mq|hq|vhq}] [-i input-sample-rate] " + "[-o output-sample-rate] [] \n", name); + fprintf(stderr," -p enable profiling\n"); + fprintf(stderr," -h create wav file\n"); + fprintf(stderr," -s stereo\n"); + fprintf(stderr," -q resampler quality\n"); + fprintf(stderr," dq : default quality\n"); + fprintf(stderr," lq : low quality\n"); + fprintf(stderr," mq : medium quality\n"); + fprintf(stderr," hq : high quality\n"); + fprintf(stderr," vhq : very high quality\n"); + fprintf(stderr," -i input file sample rate\n"); + fprintf(stderr," -o output file sample rate\n"); return -1; } int main(int argc, char* argv[]) { + const char* const progname = argv[0]; bool profiling = false; bool writeHeader = false; + int channels = 1; int input_freq = 0; int output_freq = 0; AudioResampler::src_quality quality = AudioResampler::DEFAULT_QUALITY; int ch; - while ((ch = getopt(argc, argv, "phq:i:o:")) != -1) { + while ((ch = getopt(argc, argv, "phsq:i:o:")) != -1) { switch (ch) { case 'p': profiling = true; @@ -93,6 +97,9 @@ int main(int argc, char* argv[]) { case 'h': writeHeader = true; break; + case 's': + channels = 2; + break; case 'q': if (!strcmp(optarg, "dq")) quality = AudioResampler::DEFAULT_QUALITY; @@ -105,7 +112,7 @@ int main(int argc, char* argv[]) { else if (!strcmp(optarg, "vhq")) quality = AudioResampler::VERY_HIGH_QUALITY; else { - usage(argv[0]); + usage(progname); return -1; } break; @@ -117,54 +124,74 @@ int main(int argc, char* argv[]) { break; case '?': default: - usage(argv[0]); + usage(progname); return -1; } } argc -= optind; + argv += optind; - if (argc != 2) { - usage(argv[0]); + const char* file_in = NULL; + const char* file_out = NULL; + if (argc == 1) { + file_out = argv[0]; + } else if (argc == 2) { + file_in = argv[0]; + file_out = argv[1]; + } else { + usage(progname); return -1; } - argv += optind; - // ---------------------------------------------------------- - struct stat st; - if (stat(argv[0], &st) < 0) { - fprintf(stderr, "stat: %s\n", strerror(errno)); - return -1; - } + size_t input_size; + void* input_vaddr; + if (argc == 2) { + struct stat st; + if (stat(file_in, &st) < 0) { + fprintf(stderr, "stat: %s\n", strerror(errno)); + return -1; + } - int input_fd = open(argv[0], O_RDONLY); - if (input_fd < 0) { - fprintf(stderr, "open: %s\n", strerror(errno)); - return -1; - } + int input_fd = open(file_in, O_RDONLY); + if (input_fd < 0) { + fprintf(stderr, "open: %s\n", strerror(errno)); + return -1; + } - size_t input_size = st.st_size; - void* input_vaddr = mmap(0, input_size, PROT_READ, MAP_PRIVATE, input_fd, - 0); - if (input_vaddr == MAP_FAILED ) { - fprintf(stderr, "mmap: %s\n", strerror(errno)); - return -1; + input_size = st.st_size; + input_vaddr = mmap(0, input_size, PROT_READ, MAP_PRIVATE, input_fd, 0); + if (input_vaddr == MAP_FAILED ) { + fprintf(stderr, "mmap: %s\n", strerror(errno)); + return -1; + } + } else { + double k = 1000; // Hz / s + double time = (input_freq / 2) / k; + size_t input_frames = size_t(input_freq * time); + input_size = channels * sizeof(int16_t) * input_frames; + input_vaddr = malloc(input_size); + int16_t* in = (int16_t*)input_vaddr; + for (size_t i=0 ; isetSampleRate(input_freq); - resampler->setVolume(0x1000, 0x1000); - resampler->resample((int*) output_vaddr, out_frames, &provider); + size_t out_frames = output_size/8; + resampler->setSampleRate(input_freq); + resampler->setVolume(0x1000, 0x1000); - if (profiling) { memset(output_vaddr, 0, output_size); timespec start, end; clock_gettime(CLOCK_MONOTONIC_HR, &start); resampler->resample((int*) output_vaddr, out_frames, &provider); + resampler->resample((int*) output_vaddr, out_frames, &provider); + resampler->resample((int*) output_vaddr, out_frames, &provider); + resampler->resample((int*) output_vaddr, out_frames, &provider); clock_gettime(CLOCK_MONOTONIC_HR, &end); int64_t start_ns = start.tv_sec * 1000000000LL + start.tv_nsec; int64_t end_ns = end.tv_sec * 1000000000LL + end.tv_nsec; - int64_t time = end_ns - start_ns; + int64_t time = (end_ns - start_ns)/4; printf("%f Mspl/s\n", out_frames/(time/1e9)/1e6); + + delete resampler; } + AudioResampler* resampler = AudioResampler::create(16, channels, + output_freq, quality); + size_t out_frames = output_size/8; + resampler->setSampleRate(input_freq); + resampler->setVolume(0x1000, 0x1000); + + memset(output_vaddr, 0, output_size); + resampler->resample((int*) output_vaddr, out_frames, &provider); + // down-mix (we just truncate and keep the left channel) int32_t* out = (int32_t*) output_vaddr; - int16_t* convert = (int16_t*) malloc(out_frames * sizeof(int16_t)); + int16_t* convert = (int16_t*) malloc(out_frames * channels * sizeof(int16_t)); for (size_t i = 0; i < out_frames; i++) { - int32_t s = out[i * 2] >> 12; - if (s > 32767) s = 32767; - else if (s < -32768) s = -32768; - convert[i] = int16_t(s); + for (int j=0 ; j> 12; + if (s > 32767) s = 32767; + else if (s < -32768) s = -32768; + convert[i * channels + j] = int16_t(s); + } } // write output to disk - int output_fd = open(argv[1], O_WRONLY | O_CREAT | O_TRUNC, + int output_fd = open(file_out, O_WRONLY | O_CREAT | O_TRUNC, S_IRUSR | S_IWUSR | S_IRGRP | S_IROTH); if (output_fd < 0) { fprintf(stderr, "open: %s\n", strerror(errno)); @@ -222,11 +263,11 @@ int main(int argc, char* argv[]) { } if (writeHeader) { - HeaderWav wav(out_frames*sizeof(int16_t), 1, output_freq, 16); + HeaderWav wav(out_frames * channels * sizeof(int16_t), channels, output_freq, 16); write(output_fd, &wav, sizeof(wav)); } - write(output_fd, convert, out_frames * sizeof(int16_t)); + write(output_fd, convert, out_frames * channels * sizeof(int16_t)); close(output_fd); return 0; -- cgit v1.1 From 9c5fdd83f9b9f49be35107971feb33528d60b945 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Mon, 5 Nov 2012 13:38:15 -0800 Subject: Simplify control block flag names Use only one symbol per flag Change-Id: Ia3582e2134abd60c896d11337face65383e79c7c --- include/private/media/AudioTrackShared.h | 31 +++++----------- media/libmedia/AudioRecord.cpp | 22 ++++++------ media/libmedia/AudioTrack.cpp | 62 ++++++++++++++++---------------- services/audioflinger/AudioFlinger.cpp | 26 +++++++------- 4 files changed, 64 insertions(+), 77 deletions(-) diff --git a/include/private/media/AudioTrackShared.h b/include/private/media/AudioTrackShared.h index fe42afa..6a86a00 100644 --- a/include/private/media/AudioTrackShared.h +++ b/include/private/media/AudioTrackShared.h @@ -33,28 +33,15 @@ namespace android { #define WAIT_PERIOD_MS 10 #define RESTORE_TIMEOUT_MS 5000 // Maximum waiting time for a track to be restored -#define CBLK_UNDERRUN_MSK 0x0001 -#define CBLK_UNDERRUN_ON 0x0001 // underrun (out) or overrrun (in) indication -#define CBLK_UNDERRUN_OFF 0x0000 // no underrun -#define CBLK_DIRECTION_MSK 0x0002 -#define CBLK_DIRECTION_OUT 0x0002 // this cblk is for an AudioTrack -#define CBLK_DIRECTION_IN 0x0000 // this cblk is for an AudioRecord -#define CBLK_FORCEREADY_MSK 0x0004 -#define CBLK_FORCEREADY_ON 0x0004 // track is considered ready immediately by AudioFlinger -#define CBLK_FORCEREADY_OFF 0x0000 // track is ready when buffer full -#define CBLK_INVALID_MSK 0x0008 -#define CBLK_INVALID_ON 0x0008 // track buffer is invalidated by AudioFlinger: -#define CBLK_INVALID_OFF 0x0000 // must be re-created -#define CBLK_DISABLED_MSK 0x0010 -#define CBLK_DISABLED_ON 0x0010 // track disabled by AudioFlinger due to underrun: -#define CBLK_DISABLED_OFF 0x0000 // must be re-started -#define CBLK_RESTORING_MSK 0x0020 -#define CBLK_RESTORING_ON 0x0020 // track is being restored after invalidation -#define CBLK_RESTORING_OFF 0x0000 // by AudioFlinger -#define CBLK_RESTORED_MSK 0x0040 -#define CBLK_RESTORED_ON 0x0040 // track has been restored after invalidation -#define CBLK_RESTORED_OFF 0x0040 // by AudioFlinger -#define CBLK_FAST 0x0080 // AudioFlinger successfully created a fast track +#define CBLK_UNDERRUN 0x01 // set: underrun (out) or overrrun (in), clear: no underrun or overrun +#define CBLK_DIRECTION 0x02 // set: cblk is for an AudioTrack, clear: for AudioRecord +#define CBLK_FORCEREADY 0x04 // set: track is considered ready immediately by AudioFlinger, + // clear: track is ready when buffer full +#define CBLK_INVALID 0x08 // track buffer invalidated by AudioFlinger, need to re-create +#define CBLK_DISABLED 0x10 // track disabled by AudioFlinger due to underrun, need to re-start +#define CBLK_RESTORING 0x20 // track is being restored after invalidation by AudioFlinger +#define CBLK_RESTORED 0x40 // track has been restored after invalidation by AudioFlinger +#define CBLK_FAST 0x80 // AudioFlinger successfully created a fast track // Important: do not add any virtual methods, including ~ struct audio_track_cblk_t diff --git a/media/libmedia/AudioRecord.cpp b/media/libmedia/AudioRecord.cpp index bdbee0d..bd558fa 100644 --- a/media/libmedia/AudioRecord.cpp +++ b/media/libmedia/AudioRecord.cpp @@ -292,16 +292,16 @@ status_t AudioRecord::start(AudioSystem::sync_event_t event, int triggerSession) mActive = true; cblk->lock.lock(); - if (!(cblk->flags & CBLK_INVALID_MSK)) { + if (!(cblk->flags & CBLK_INVALID)) { cblk->lock.unlock(); ALOGV("mAudioRecord->start()"); ret = mAudioRecord->start(event, triggerSession); cblk->lock.lock(); if (ret == DEAD_OBJECT) { - android_atomic_or(CBLK_INVALID_ON, &cblk->flags); + android_atomic_or(CBLK_INVALID, &cblk->flags); } } - if (cblk->flags & CBLK_INVALID_MSK) { + if (cblk->flags & CBLK_INVALID) { ret = restoreRecord_l(cblk); } cblk->lock.unlock(); @@ -466,7 +466,7 @@ status_t AudioRecord::openRecord_l( mCblkMemory = cblk; mCblk = static_cast(cblk->pointer()); mCblk->buffers = (char*)mCblk + sizeof(audio_track_cblk_t); - android_atomic_and(~CBLK_DIRECTION_MSK, &mCblk->flags); + android_atomic_and(~CBLK_DIRECTION, &mCblk->flags); mCblk->bufferTimeoutMs = MAX_RUN_TIMEOUT_MS; mCblk->waitTimeMs = 0; return NO_ERROR; @@ -499,7 +499,7 @@ status_t AudioRecord::obtainBuffer(Buffer* audioBuffer, int32_t waitCount) cblk->lock.unlock(); return WOULD_BLOCK; } - if (!(cblk->flags & CBLK_INVALID_MSK)) { + if (!(cblk->flags & CBLK_INVALID)) { mLock.unlock(); result = cblk->cv.waitRelative(cblk->lock, milliseconds(waitTimeMs)); cblk->lock.unlock(); @@ -509,7 +509,7 @@ status_t AudioRecord::obtainBuffer(Buffer* audioBuffer, int32_t waitCount) } cblk->lock.lock(); } - if (cblk->flags & CBLK_INVALID_MSK) { + if (cblk->flags & CBLK_INVALID) { goto create_new_record; } if (CC_UNLIKELY(result != NO_ERROR)) { @@ -522,7 +522,7 @@ status_t AudioRecord::obtainBuffer(Buffer* audioBuffer, int32_t waitCount) result = mAudioRecord->start(AudioSystem::SYNC_EVENT_SAME, 0); cblk->lock.lock(); if (result == DEAD_OBJECT) { - android_atomic_or(CBLK_INVALID_ON, &cblk->flags); + android_atomic_or(CBLK_INVALID, &cblk->flags); create_new_record: result = AudioRecord::restoreRecord_l(cblk); } @@ -739,7 +739,7 @@ bool AudioRecord::processAudioBuffer(const sp& thread) // The value of active is stale, but we are almost sure to be active here because // otherwise we would have exited when obtainBuffer returned STOPPED earlier. ALOGV("Overrun user: %x, server: %x, flags %04x", cblk->user, cblk->server, cblk->flags); - if (!(android_atomic_or(CBLK_UNDERRUN_ON, &cblk->flags) & CBLK_UNDERRUN_MSK)) { + if (!(android_atomic_or(CBLK_UNDERRUN, &cblk->flags) & CBLK_UNDERRUN)) { mCbf(EVENT_OVERRUN, mUserData, NULL); } } @@ -759,7 +759,7 @@ status_t AudioRecord::restoreRecord_l(audio_track_cblk_t*& cblk) { status_t result; - if (!(android_atomic_or(CBLK_RESTORING_ON, &cblk->flags) & CBLK_RESTORING_MSK)) { + if (!(android_atomic_or(CBLK_RESTORING, &cblk->flags) & CBLK_RESTORING)) { ALOGW("dead IAudioRecord, creating a new one"); // signal old cblk condition so that other threads waiting for available buffers stop // waiting now @@ -780,10 +780,10 @@ status_t AudioRecord::restoreRecord_l(audio_track_cblk_t*& cblk) } // signal old cblk condition for other threads waiting for restore completion - android_atomic_or(CBLK_RESTORED_ON, &cblk->flags); + android_atomic_or(CBLK_RESTORED, &cblk->flags); cblk->cv.broadcast(); } else { - if (!(cblk->flags & CBLK_RESTORED_MSK)) { + if (!(cblk->flags & CBLK_RESTORED)) { ALOGW("dead IAudioRecord, waiting for a new one to be created"); mLock.unlock(); result = cblk->cv.waitRelative(cblk->lock, milliseconds(RESTORE_TIMEOUT_MS)); diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp index ffed161..5bd1aa7 100644 --- a/media/libmedia/AudioTrack.cpp +++ b/media/libmedia/AudioTrack.cpp @@ -391,7 +391,7 @@ void AudioTrack::start() cblk->lock.lock(); cblk->bufferTimeoutMs = MAX_STARTUP_TIMEOUT_MS; cblk->waitTimeMs = 0; - android_atomic_and(~CBLK_DISABLED_ON, &cblk->flags); + android_atomic_and(~CBLK_DISABLED, &cblk->flags); if (t != 0) { t->resume(); } else { @@ -402,16 +402,16 @@ void AudioTrack::start() ALOGV("start %p before lock cblk %p", this, mCblk); status_t status = NO_ERROR; - if (!(cblk->flags & CBLK_INVALID_MSK)) { + if (!(cblk->flags & CBLK_INVALID)) { cblk->lock.unlock(); ALOGV("mAudioTrack->start()"); status = mAudioTrack->start(); cblk->lock.lock(); if (status == DEAD_OBJECT) { - android_atomic_or(CBLK_INVALID_ON, &cblk->flags); + android_atomic_or(CBLK_INVALID, &cblk->flags); } } - if (cblk->flags & CBLK_INVALID_MSK) { + if (cblk->flags & CBLK_INVALID) { status = restoreTrack_l(cblk, true); } cblk->lock.unlock(); @@ -691,7 +691,7 @@ status_t AudioTrack::setPosition(uint32_t position) if (position > mCblk->user) return BAD_VALUE; mCblk->server = position; - android_atomic_or(CBLK_FORCEREADY_ON, &mCblk->flags); + android_atomic_or(CBLK_FORCEREADY, &mCblk->flags); return NO_ERROR; } @@ -905,7 +905,7 @@ status_t AudioTrack::createTrack_l( mCblkMemory = cblk; mCblk = static_cast(cblk->pointer()); // old has the previous value of mCblk->flags before the "or" operation - int32_t old = android_atomic_or(CBLK_DIRECTION_OUT, &mCblk->flags); + int32_t old = android_atomic_or(CBLK_DIRECTION, &mCblk->flags); if (flags & AUDIO_OUTPUT_FLAG_FAST) { if (old & CBLK_FAST) { ALOGV("AUDIO_OUTPUT_FLAG_FAST successful; frameCount %u", mCblk->frameCount); @@ -959,7 +959,7 @@ status_t AudioTrack::obtainBuffer(Buffer* audioBuffer, int32_t waitCount) uint32_t framesAvail = cblk->framesAvailable(); cblk->lock.lock(); - if (cblk->flags & CBLK_INVALID_MSK) { + if (cblk->flags & CBLK_INVALID) { goto create_new_track; } cblk->lock.unlock(); @@ -978,7 +978,7 @@ status_t AudioTrack::obtainBuffer(Buffer* audioBuffer, int32_t waitCount) cblk->lock.unlock(); return WOULD_BLOCK; } - if (!(cblk->flags & CBLK_INVALID_MSK)) { + if (!(cblk->flags & CBLK_INVALID)) { mLock.unlock(); result = cblk->cv.waitRelative(cblk->lock, milliseconds(waitTimeMs)); cblk->lock.unlock(); @@ -989,7 +989,7 @@ status_t AudioTrack::obtainBuffer(Buffer* audioBuffer, int32_t waitCount) cblk->lock.lock(); } - if (cblk->flags & CBLK_INVALID_MSK) { + if (cblk->flags & CBLK_INVALID) { goto create_new_track; } if (CC_UNLIKELY(result != NO_ERROR)) { @@ -1005,7 +1005,7 @@ status_t AudioTrack::obtainBuffer(Buffer* audioBuffer, int32_t waitCount) result = mAudioTrack->start(); cblk->lock.lock(); if (result == DEAD_OBJECT) { - android_atomic_or(CBLK_INVALID_ON, &cblk->flags); + android_atomic_or(CBLK_INVALID, &cblk->flags); create_new_track: result = restoreTrack_l(cblk, false); } @@ -1063,8 +1063,8 @@ void AudioTrack::releaseBuffer(Buffer* audioBuffer) mCblk->stepUser(audioBuffer->frameCount); if (audioBuffer->frameCount > 0) { // restart track if it was disabled by audioflinger due to previous underrun - if (mActive && (mCblk->flags & CBLK_DISABLED_MSK)) { - android_atomic_and(~CBLK_DISABLED_ON, &mCblk->flags); + if (mActive && (mCblk->flags & CBLK_DISABLED)) { + android_atomic_and(~CBLK_DISABLED, &mCblk->flags); ALOGW("releaseBuffer() track %p name=%#x disabled, restarting", this, mCblk->mName); mAudioTrack->start(); } @@ -1149,16 +1149,16 @@ status_t TimedAudioTrack::allocateTimedBuffer(size_t size, sp* buffer) // If the track is not invalid already, try to allocate a buffer. alloc // fails indicating that the server is dead, flag the track as invalid so // we can attempt to restore in just a bit. - if (!(mCblk->flags & CBLK_INVALID_MSK)) { + if (!(mCblk->flags & CBLK_INVALID)) { result = mAudioTrack->allocateTimedBuffer(size, buffer); if (result == DEAD_OBJECT) { - android_atomic_or(CBLK_INVALID_ON, &mCblk->flags); + android_atomic_or(CBLK_INVALID, &mCblk->flags); } } // If the track is invalid at this point, attempt to restore it. and try the // allocation one more time. - if (mCblk->flags & CBLK_INVALID_MSK) { + if (mCblk->flags & CBLK_INVALID) { mCblk->lock.lock(); result = restoreTrack_l(mCblk, false); mCblk->lock.unlock(); @@ -1178,8 +1178,8 @@ status_t TimedAudioTrack::queueTimedBuffer(const sp& buffer, AutoMutex lock(mLock); // restart track if it was disabled by audioflinger due to previous underrun if (buffer->size() != 0 && status == NO_ERROR && - mActive && (mCblk->flags & CBLK_DISABLED_MSK)) { - android_atomic_and(~CBLK_DISABLED_ON, &mCblk->flags); + mActive && (mCblk->flags & CBLK_DISABLED)) { + android_atomic_and(~CBLK_DISABLED, &mCblk->flags); ALOGW("queueTimedBuffer() track %p disabled, restarting", this); mAudioTrack->start(); } @@ -1213,7 +1213,7 @@ bool AudioTrack::processAudioBuffer(const sp& thread) // Manage underrun callback if (active && (cblk->framesAvailable() == cblk->frameCount)) { ALOGV("Underrun user: %x, server: %x, flags %04x", cblk->user, cblk->server, cblk->flags); - if (!(android_atomic_or(CBLK_UNDERRUN_ON, &cblk->flags) & CBLK_UNDERRUN_MSK)) { + if (!(android_atomic_or(CBLK_UNDERRUN, &cblk->flags) & CBLK_UNDERRUN)) { mCbf(EVENT_UNDERRUN, mUserData, 0); if (cblk->server == cblk->frameCount) { mCbf(EVENT_BUFFER_END, mUserData, 0); @@ -1333,7 +1333,7 @@ status_t AudioTrack::restoreTrack_l(audio_track_cblk_t*& cblk, bool fromStart) { status_t result; - if (!(android_atomic_or(CBLK_RESTORING_ON, &cblk->flags) & CBLK_RESTORING_MSK)) { + if (!(android_atomic_or(CBLK_RESTORING, &cblk->flags) & CBLK_RESTORING)) { ALOGW("dead IAudioTrack, creating a new one from %s TID %d", fromStart ? "start()" : "obtainBuffer()", gettid()); @@ -1381,8 +1381,8 @@ status_t AudioTrack::restoreTrack_l(audio_track_cblk_t*& cblk, bool fromStart) memset(mCblk->buffers, 0, frames * mCblk->frameSize); } // restart playback even if buffer is not completely filled. - android_atomic_or(CBLK_FORCEREADY_ON, &mCblk->flags); - // stepUser() clears CBLK_UNDERRUN_ON flag enabling underrun callbacks to + android_atomic_or(CBLK_FORCEREADY, &mCblk->flags); + // stepUser() clears CBLK_UNDERRUN flag enabling underrun callbacks to // the client mCblk->stepUser(frames); } @@ -1399,17 +1399,17 @@ status_t AudioTrack::restoreTrack_l(audio_track_cblk_t*& cblk, bool fromStart) } } if (result != NO_ERROR) { - android_atomic_and(~CBLK_RESTORING_ON, &cblk->flags); + android_atomic_and(~CBLK_RESTORING, &cblk->flags); ALOGW_IF(result != NO_ERROR, "restoreTrack_l() failed status %d", result); } mRestoreStatus = result; // signal old cblk condition for other threads waiting for restore completion - android_atomic_or(CBLK_RESTORED_ON, &cblk->flags); + android_atomic_or(CBLK_RESTORED, &cblk->flags); cblk->cv.broadcast(); } else { bool haveLogged = false; for (;;) { - if (cblk->flags & CBLK_RESTORED_MSK) { + if (cblk->flags & CBLK_RESTORED) { ALOGW("dead IAudioTrack restored"); result = mRestoreStatus; cblk->lock.unlock(); @@ -1534,7 +1534,7 @@ uint32_t audio_track_cblk_t::stepUser(uint32_t frameCount) uint32_t u = user; u += frameCount; // Ensure that user is never ahead of server for AudioRecord - if (flags & CBLK_DIRECTION_MSK) { + if (flags & CBLK_DIRECTION) { // If stepServer() has been called once, switch to normal obtainBuffer() timeout period if (bufferTimeoutMs == MAX_STARTUP_TIMEOUT_MS-1) { bufferTimeoutMs = MAX_RUN_TIMEOUT_MS; @@ -1558,8 +1558,8 @@ uint32_t audio_track_cblk_t::stepUser(uint32_t frameCount) user = u; // Clear flow control error condition as new data has been written/read to/from buffer. - if (flags & CBLK_UNDERRUN_MSK) { - android_atomic_and(~CBLK_UNDERRUN_MSK, &flags); + if (flags & CBLK_UNDERRUN) { + android_atomic_and(~CBLK_UNDERRUN, &flags); } return u; @@ -1578,7 +1578,7 @@ bool audio_track_cblk_t::stepServer(uint32_t frameCount) bool flushed = (s == user); s += frameCount; - if (flags & CBLK_DIRECTION_MSK) { + if (flags & CBLK_DIRECTION) { // Mark that we have read the first buffer so that next time stepUser() is called // we switch to normal obtainBuffer() timeout period if (bufferTimeoutMs == MAX_STARTUP_TIMEOUT_MS) { @@ -1616,7 +1616,7 @@ bool audio_track_cblk_t::stepServer(uint32_t frameCount) server = s; - if (!(flags & CBLK_INVALID_MSK)) { + if (!(flags & CBLK_INVALID)) { cv.signal(); } lock.unlock(); @@ -1639,7 +1639,7 @@ uint32_t audio_track_cblk_t::framesAvailable_l() uint32_t u = user; uint32_t s = server; - if (flags & CBLK_DIRECTION_MSK) { + if (flags & CBLK_DIRECTION) { uint32_t limit = (s < loopStart) ? s : loopStart; return limit + frameCount - u; } else { @@ -1652,7 +1652,7 @@ uint32_t audio_track_cblk_t::framesReady() uint32_t u = user; uint32_t s = server; - if (flags & CBLK_DIRECTION_MSK) { + if (flags & CBLK_DIRECTION) { if (u < loopEnd) { return u - s; } else { diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp index 35bd431..5f2f441 100644 --- a/services/audioflinger/AudioFlinger.cpp +++ b/services/audioflinger/AudioFlinger.cpp @@ -2167,7 +2167,7 @@ uint32_t AudioFlinger::PlaybackThread::hasAudioSession(int sessionId) const for (size_t i = 0; i < mTracks.size(); ++i) { sp track = mTracks[i]; if (sessionId == track->sessionId() && - !(track->mCblk->flags & CBLK_INVALID_MSK)) { + !(track->mCblk->flags & CBLK_INVALID)) { result |= TRACK_SESSION; break; } @@ -2186,7 +2186,7 @@ uint32_t AudioFlinger::PlaybackThread::getStrategyForSession_l(int sessionId) for (size_t i = 0; i < mTracks.size(); i++) { sp track = mTracks[i]; if (sessionId == track->sessionId() && - !(track->mCblk->flags & CBLK_INVALID_MSK)) { + !(track->mCblk->flags & CBLK_INVALID)) { return AudioSystem::getStrategyForStream(track->streamType()); } } @@ -3032,7 +3032,7 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::MixerThread::prepareTrac } // indicate to client process that the track was disabled because of underrun; // it will then automatically call start() when data is available - android_atomic_or(CBLK_DISABLED_ON, &track->mCblk->flags); + android_atomic_or(CBLK_DISABLED, &track->mCblk->flags); // remove from active list, but state remains ACTIVE [confusing but true] isActive = false; break; @@ -3314,7 +3314,7 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::MixerThread::prepareTrac tracksToRemove->add(track); // indicate to client process that the track was disabled because of underrun; // it will then automatically call start() when data is available - android_atomic_or(CBLK_DISABLED_ON, &cblk->flags); + android_atomic_or(CBLK_DISABLED, &cblk->flags); // If one track is not ready, mark the mixer also not ready if: // - the mixer was ready during previous round OR // - no other track is ready @@ -3447,7 +3447,7 @@ void AudioFlinger::PlaybackThread::invalidateTracks(audio_stream_type_t streamTy for (size_t i = 0; i < size; i++) { sp t = mTracks[i]; if (t->streamType() == streamType) { - android_atomic_or(CBLK_INVALID_ON, &t->mCblk->flags); + android_atomic_or(CBLK_INVALID, &t->mCblk->flags); t->mCblk->cv.signal(); } } @@ -4227,7 +4227,7 @@ AudioFlinger::ThreadBase::TrackBase::TrackBase( memset(mBuffer, 0, frameCount*channelCount*sizeof(int16_t)); // Force underrun condition to avoid false underrun callback until first data is // written to buffer (other flags are cleared) - mCblk->flags = CBLK_UNDERRUN_ON; + mCblk->flags = CBLK_UNDERRUN; } else { mBuffer = sharedBuffer->pointer(); } @@ -4256,7 +4256,7 @@ AudioFlinger::ThreadBase::TrackBase::TrackBase( memset(mBuffer, 0, frameCount*channelCount*sizeof(int16_t)); // Force underrun condition to avoid false underrun callback until first data is // written to buffer (other flags are cleared) - mCblk->flags = CBLK_UNDERRUN_ON; + mCblk->flags = CBLK_UNDERRUN; mBufferEnd = (uint8_t *)mBuffer + bufferSize; } } @@ -4600,9 +4600,9 @@ bool AudioFlinger::PlaybackThread::Track::isReady() const { if (mFillingUpStatus != FS_FILLING || isStopped() || isPausing()) return true; if (framesReady() >= mCblk->frameCount || - (mCblk->flags & CBLK_FORCEREADY_MSK)) { + (mCblk->flags & CBLK_FORCEREADY)) { mFillingUpStatus = FS_FILLED; - android_atomic_and(~CBLK_FORCEREADY_MSK, &mCblk->flags); + android_atomic_and(~CBLK_FORCEREADY, &mCblk->flags); return true; } return false; @@ -4745,8 +4745,8 @@ void AudioFlinger::PlaybackThread::Track::reset() TrackBase::reset(); // Force underrun condition to avoid false underrun callback until first data is // written to buffer - android_atomic_and(~CBLK_FORCEREADY_MSK, &mCblk->flags); - android_atomic_or(CBLK_UNDERRUN_ON, &mCblk->flags); + android_atomic_and(~CBLK_FORCEREADY, &mCblk->flags); + android_atomic_or(CBLK_UNDERRUN, &mCblk->flags); mFillingUpStatus = FS_FILLING; mResetDone = true; if (mState == FLUSHED) { @@ -5495,7 +5495,7 @@ void AudioFlinger::RecordThread::RecordTrack::stop() TrackBase::reset(); // Force overrun condition to avoid false overrun callback until first data is // read from buffer - android_atomic_or(CBLK_UNDERRUN_ON, &mCblk->flags); + android_atomic_or(CBLK_UNDERRUN, &mCblk->flags); } recordThread->mLock.unlock(); if (doStop) { @@ -5540,7 +5540,7 @@ AudioFlinger::PlaybackThread::OutputTrack::OutputTrack( { if (mCblk != NULL) { - mCblk->flags |= CBLK_DIRECTION_OUT; + mCblk->flags |= CBLK_DIRECTION; mCblk->buffers = (char*)mCblk + sizeof(audio_track_cblk_t); mOutBuffer.frameCount = 0; playbackThread->mTracks.add(this); -- cgit v1.1 From 847d05dc8fa144dcf8f4f435d6a6ac1727f00937 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Mon, 27 Feb 2012 16:05:09 -0800 Subject: Remove deprecated AudioTrack APIs Change-Id: I88be6525f3e33df529c0c3cb701d12a484809477 --- include/media/AudioTrack.h | 12 ------------ media/libmedia/AudioTrack.cpp | 22 ---------------------- 2 files changed, 34 deletions(-) diff --git a/include/media/AudioTrack.h b/include/media/AudioTrack.h index 3d45503..76af2f8 100644 --- a/include/media/AudioTrack.h +++ b/include/media/AudioTrack.h @@ -167,18 +167,6 @@ public: int notificationFrames = 0, int sessionId = 0); - // DEPRECATED - explicit AudioTrack( int streamType, - uint32_t sampleRate = 0, - int format = AUDIO_FORMAT_DEFAULT, - int channelMask = 0, - int frameCount = 0, - uint32_t flags = (uint32_t) AUDIO_OUTPUT_FLAG_NONE, - callback_t cbf = 0, - void* user = 0, - int notificationFrames = 0, - int sessionId = 0); - /* Creates an audio track and registers it with AudioFlinger. With this constructor, * the PCM data to be rendered by AudioTrack is passed in a shared memory buffer * identified by the argument sharedBuffer. This prototype is for static buffer playback. diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp index ffed161..24db1bd 100644 --- a/media/libmedia/AudioTrack.cpp +++ b/media/libmedia/AudioTrack.cpp @@ -120,28 +120,6 @@ AudioTrack::AudioTrack( 0 /*sharedBuffer*/, false /*threadCanCallJava*/, sessionId); } -// DEPRECATED -AudioTrack::AudioTrack( - int streamType, - uint32_t sampleRate, - int format, - int channelMask, - int frameCount, - uint32_t flags, - callback_t cbf, - void* user, - int notificationFrames, - int sessionId) - : mStatus(NO_INIT), - mIsTimed(false), - mPreviousPriority(ANDROID_PRIORITY_NORMAL), mPreviousSchedulingGroup(SP_DEFAULT) -{ - mStatus = set((audio_stream_type_t)streamType, sampleRate, (audio_format_t)format, - (audio_channel_mask_t) channelMask, - frameCount, (audio_output_flags_t)flags, cbf, user, notificationFrames, - 0 /*sharedBuffer*/, false /*threadCanCallJava*/, sessionId); -} - AudioTrack::AudioTrack( audio_stream_type_t streamType, uint32_t sampleRate, -- cgit v1.1 From b1c0993b215c5c3eebd1c6bafc22bba23d57a70b Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Mon, 27 Feb 2012 16:21:04 -0800 Subject: Add all-channel AudioTrack::setVolume() API Add combined channel APIs setVolume to AudioTrack, and remove obsolete getVolume. Change-Id: I0c87bfdbff4f4292259fa33e65f67badbafd270b --- include/media/AudioTrack.h | 7 ++++++- media/libmedia/AudioTrack.cpp | 9 ++------- media/libmedia/ToneGenerator.cpp | 2 +- 3 files changed, 9 insertions(+), 9 deletions(-) diff --git a/include/media/AudioTrack.h b/include/media/AudioTrack.h index 3d45503..6d19092 100644 --- a/include/media/AudioTrack.h +++ b/include/media/AudioTrack.h @@ -285,9 +285,14 @@ public: /* Set volume for this track, mostly used for games' sound effects * left and right volumes. Levels must be >= 0.0 and <= 1.0. + * This is the older API. New applications should use setVolume(float) when possible. */ status_t setVolume(float left, float right); - void getVolume(float* left, float* right) const; + + /* Set volume for all channels. This is the preferred API for new applications, + * especially for multi-channel content. + */ + status_t setVolume(float volume); /* Set the send level for this track. An auxiliary effect should be attached * to the track with attachEffect(). Level must be >= 0.0 and <= 1.0. diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp index ffed161..596523d 100644 --- a/media/libmedia/AudioTrack.cpp +++ b/media/libmedia/AudioTrack.cpp @@ -529,14 +529,9 @@ status_t AudioTrack::setVolume(float left, float right) return NO_ERROR; } -void AudioTrack::getVolume(float* left, float* right) const +status_t AudioTrack::setVolume(float volume) { - if (left != NULL) { - *left = mVolume[LEFT]; - } - if (right != NULL) { - *right = mVolume[RIGHT]; - } + return setVolume(volume, volume); } status_t AudioTrack::setAuxEffectSendLevel(float level) diff --git a/media/libmedia/ToneGenerator.cpp b/media/libmedia/ToneGenerator.cpp index 253602d..42584fe 100644 --- a/media/libmedia/ToneGenerator.cpp +++ b/media/libmedia/ToneGenerator.cpp @@ -1036,7 +1036,7 @@ bool ToneGenerator::initAudioTrack() { goto initAudioTrack_exit; } - mpAudioTrack->setVolume(mVolume, mVolume); + mpAudioTrack->setVolume(mVolume); mState = TONE_INIT; -- cgit v1.1 From 287fedb1b1430f138c1f583869b10294773945be Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Mon, 5 Nov 2012 13:39:09 -0800 Subject: Merge duplicate code in TrackBase::TrackBase() Change-Id: Id8a7db4d94888796a9dcbed4cac99941705cd174 --- services/audioflinger/AudioFlinger.cpp | 57 ++++++++++++++-------------------- 1 file changed, 23 insertions(+), 34 deletions(-) diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp index 5f2f441..cb44114 100644 --- a/services/audioflinger/AudioFlinger.cpp +++ b/services/audioflinger/AudioFlinger.cpp @@ -4195,6 +4195,9 @@ AudioFlinger::ThreadBase::TrackBase::TrackBase( // mChannelCount // mChannelMask { + // client == 0 implies sharedBuffer == 0 + ALOG_ASSERT(!(client == 0 && sharedBuffer != 0)); + ALOGV_IF(sharedBuffer != 0, "sharedBuffer: %p, size: %d", sharedBuffer->pointer(), sharedBuffer->size()); @@ -4206,33 +4209,11 @@ AudioFlinger::ThreadBase::TrackBase::TrackBase( size += bufferSize; } - if (client != NULL) { + if (client != 0) { mCblkMemory = client->heap()->allocate(size); if (mCblkMemory != 0) { mCblk = static_cast(mCblkMemory->pointer()); - if (mCblk != NULL) { // construct the shared structure in-place. - new(mCblk) audio_track_cblk_t(); - // clear all buffers - mCblk->frameCount = frameCount; - mCblk->sampleRate = sampleRate; -// uncomment the following lines to quickly test 32-bit wraparound -// mCblk->user = 0xffff0000; -// mCblk->server = 0xffff0000; -// mCblk->userBase = 0xffff0000; -// mCblk->serverBase = 0xffff0000; - mChannelCount = channelCount; - mChannelMask = channelMask; - if (sharedBuffer == 0) { - mBuffer = (char*)mCblk + sizeof(audio_track_cblk_t); - memset(mBuffer, 0, frameCount*channelCount*sizeof(int16_t)); - // Force underrun condition to avoid false underrun callback until first data is - // written to buffer (other flags are cleared) - mCblk->flags = CBLK_UNDERRUN; - } else { - mBuffer = sharedBuffer->pointer(); - } - mBufferEnd = (uint8_t *)mBuffer + bufferSize; - } + // can't assume mCblk != NULL } else { ALOGE("not enough memory for AudioTrack size=%u", size); client->heap()->dump("AudioTrack"); @@ -4240,23 +4221,31 @@ AudioFlinger::ThreadBase::TrackBase::TrackBase( } } else { mCblk = (audio_track_cblk_t *)(new uint8_t[size]); - // construct the shared structure in-place. + // assume mCblk != NULL + } + + // construct the shared structure in-place. + if (mCblk != NULL) { new(mCblk) audio_track_cblk_t(); // clear all buffers mCblk->frameCount = frameCount; mCblk->sampleRate = sampleRate; // uncomment the following lines to quickly test 32-bit wraparound -// mCblk->user = 0xffff0000; -// mCblk->server = 0xffff0000; -// mCblk->userBase = 0xffff0000; -// mCblk->serverBase = 0xffff0000; +// mCblk->user = 0xffff0000; +// mCblk->server = 0xffff0000; +// mCblk->userBase = 0xffff0000; +// mCblk->serverBase = 0xffff0000; mChannelCount = channelCount; mChannelMask = channelMask; - mBuffer = (char*)mCblk + sizeof(audio_track_cblk_t); - memset(mBuffer, 0, frameCount*channelCount*sizeof(int16_t)); - // Force underrun condition to avoid false underrun callback until first data is - // written to buffer (other flags are cleared) - mCblk->flags = CBLK_UNDERRUN; + if (sharedBuffer == 0) { + mBuffer = (char*)mCblk + sizeof(audio_track_cblk_t); + memset(mBuffer, 0, frameCount*channelCount*sizeof(int16_t)); + // Force underrun condition to avoid false underrun callback until first data is + // written to buffer (other flags are cleared) + mCblk->flags = CBLK_UNDERRUN; + } else { + mBuffer = sharedBuffer->pointer(); + } mBufferEnd = (uint8_t *)mBuffer + bufferSize; } } -- cgit v1.1 From d2c38fc4d5dc742d7441444316849510dd2b7363 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Thu, 1 Nov 2012 14:58:02 -0700 Subject: Cache mCblk in local variable cblk Use "iMem" for sp Change-Id: I2f1fbbc517fbd77cfc92f6c3b1f253c26bae93b0 --- media/libmedia/AudioTrack.cpp | 141 +++++++++++++++++++++++------------------- 1 file changed, 79 insertions(+), 62 deletions(-) diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp index 9f087c2..523d844 100644 --- a/media/libmedia/AudioTrack.cpp +++ b/media/libmedia/AudioTrack.cpp @@ -378,7 +378,7 @@ void AudioTrack::start() androidSetThreadPriority(0, ANDROID_PRIORITY_AUDIO); } - ALOGV("start %p before lock cblk %p", this, mCblk); + ALOGV("start %p before lock cblk %p", this, cblk); status_t status = NO_ERROR; if (!(cblk->flags & CBLK_INVALID)) { cblk->lock.unlock(); @@ -390,7 +390,9 @@ void AudioTrack::start() } } if (cblk->flags & CBLK_INVALID) { - status = restoreTrack_l(cblk, true); + audio_track_cblk_t* temp = cblk; + status = restoreTrack_l(temp, true); + cblk = temp; } cblk->lock.unlock(); if (status != NO_ERROR) { @@ -664,12 +666,13 @@ status_t AudioTrack::setPosition(uint32_t position) if (!stopped_l()) return INVALID_OPERATION; - Mutex::Autolock _l(mCblk->lock); + audio_track_cblk_t* cblk = mCblk; + Mutex::Autolock _l(cblk->lock); - if (position > mCblk->user) return BAD_VALUE; + if (position > cblk->user) return BAD_VALUE; - mCblk->server = position; - android_atomic_or(CBLK_FORCEREADY, &mCblk->flags); + cblk->server = position; + android_atomic_or(CBLK_FORCEREADY, &cblk->flags); return NO_ERROR; } @@ -691,7 +694,8 @@ status_t AudioTrack::reload() flush_l(); - mCblk->stepUser(mCblk->frameCount); + audio_track_cblk_t* cblk = mCblk; + cblk->stepUser(cblk->frameCount); return NO_ERROR; } @@ -874,50 +878,51 @@ status_t AudioTrack::createTrack_l( ALOGE("AudioFlinger could not create track, status: %d", status); return status; } - sp cblk = track->getCblk(); - if (cblk == 0) { + sp iMem = track->getCblk(); + if (iMem == 0) { ALOGE("Could not get control block"); return NO_INIT; } mAudioTrack = track; - mCblkMemory = cblk; - mCblk = static_cast(cblk->pointer()); - // old has the previous value of mCblk->flags before the "or" operation - int32_t old = android_atomic_or(CBLK_DIRECTION, &mCblk->flags); + mCblkMemory = iMem; + audio_track_cblk_t* cblk = static_cast(iMem->pointer()); + mCblk = cblk; + // old has the previous value of cblk->flags before the "or" operation + int32_t old = android_atomic_or(CBLK_DIRECTION, &cblk->flags); if (flags & AUDIO_OUTPUT_FLAG_FAST) { if (old & CBLK_FAST) { - ALOGV("AUDIO_OUTPUT_FLAG_FAST successful; frameCount %u", mCblk->frameCount); + ALOGV("AUDIO_OUTPUT_FLAG_FAST successful; frameCount %u", cblk->frameCount); } else { - ALOGV("AUDIO_OUTPUT_FLAG_FAST denied by server; frameCount %u", mCblk->frameCount); + ALOGV("AUDIO_OUTPUT_FLAG_FAST denied by server; frameCount %u", cblk->frameCount); // once denied, do not request again if IAudioTrack is re-created flags = (audio_output_flags_t) (flags & ~AUDIO_OUTPUT_FLAG_FAST); mFlags = flags; } if (sharedBuffer == 0) { - mNotificationFramesAct = mCblk->frameCount/2; + mNotificationFramesAct = cblk->frameCount/2; } } if (sharedBuffer == 0) { - mCblk->buffers = (char*)mCblk + sizeof(audio_track_cblk_t); + cblk->buffers = (char*)cblk + sizeof(audio_track_cblk_t); } else { - mCblk->buffers = sharedBuffer->pointer(); + cblk->buffers = sharedBuffer->pointer(); // Force buffer full condition as data is already present in shared memory - mCblk->stepUser(mCblk->frameCount); + cblk->stepUser(cblk->frameCount); } - mCblk->setVolumeLR((uint32_t(uint16_t(mVolume[RIGHT] * 0x1000)) << 16) | + cblk->setVolumeLR((uint32_t(uint16_t(mVolume[RIGHT] * 0x1000)) << 16) | uint16_t(mVolume[LEFT] * 0x1000)); - mCblk->setSendLevel(mSendLevel); + cblk->setSendLevel(mSendLevel); mAudioTrack->attachAuxEffect(mAuxEffectId); - mCblk->bufferTimeoutMs = MAX_STARTUP_TIMEOUT_MS; - mCblk->waitTimeMs = 0; + cblk->bufferTimeoutMs = MAX_STARTUP_TIMEOUT_MS; + cblk->waitTimeMs = 0; mRemainingFrames = mNotificationFramesAct; // FIXME don't believe this lie - mLatency = afLatency + (1000*mCblk->frameCount) / sampleRate; + mLatency = afLatency + (1000*cblk->frameCount) / sampleRate; // If IAudioTrack is re-created, don't let the requested frameCount // decrease. This can confuse clients that cache frameCount(). - if (mCblk->frameCount > mFrameCount) { - mFrameCount = mCblk->frameCount; + if (cblk->frameCount > mFrameCount) { + mFrameCount = cblk->frameCount; } return NO_ERROR; } @@ -985,7 +990,9 @@ status_t AudioTrack::obtainBuffer(Buffer* audioBuffer, int32_t waitCount) if (result == DEAD_OBJECT) { android_atomic_or(CBLK_INVALID, &cblk->flags); create_new_track: - result = restoreTrack_l(cblk, false); + audio_track_cblk_t* temp = cblk; + result = restoreTrack_l(temp, false); + cblk = temp; } if (result != NO_ERROR) { ALOGW("obtainBuffer create Track error %d", result); @@ -1038,12 +1045,13 @@ create_new_track: void AudioTrack::releaseBuffer(Buffer* audioBuffer) { AutoMutex lock(mLock); - mCblk->stepUser(audioBuffer->frameCount); + audio_track_cblk_t* cblk = mCblk; + cblk->stepUser(audioBuffer->frameCount); if (audioBuffer->frameCount > 0) { // restart track if it was disabled by audioflinger due to previous underrun - if (mActive && (mCblk->flags & CBLK_DISABLED)) { - android_atomic_and(~CBLK_DISABLED, &mCblk->flags); - ALOGW("releaseBuffer() track %p name=%#x disabled, restarting", this, mCblk->mName); + if (mActive && (cblk->flags & CBLK_DISABLED)) { + android_atomic_and(~CBLK_DISABLED, &cblk->flags); + ALOGW("releaseBuffer() track %p name=%#x disabled, restarting", this, cblk->mName); mAudioTrack->start(); } } @@ -1127,19 +1135,22 @@ status_t TimedAudioTrack::allocateTimedBuffer(size_t size, sp* buffer) // If the track is not invalid already, try to allocate a buffer. alloc // fails indicating that the server is dead, flag the track as invalid so // we can attempt to restore in just a bit. - if (!(mCblk->flags & CBLK_INVALID)) { + audio_track_cblk_t* cblk = mCblk; + if (!(cblk->flags & CBLK_INVALID)) { result = mAudioTrack->allocateTimedBuffer(size, buffer); if (result == DEAD_OBJECT) { - android_atomic_or(CBLK_INVALID, &mCblk->flags); + android_atomic_or(CBLK_INVALID, &cblk->flags); } } // If the track is invalid at this point, attempt to restore it. and try the // allocation one more time. - if (mCblk->flags & CBLK_INVALID) { - mCblk->lock.lock(); - result = restoreTrack_l(mCblk, false); - mCblk->lock.unlock(); + if (cblk->flags & CBLK_INVALID) { + cblk->lock.lock(); + audio_track_cblk_t* temp = cblk; + result = restoreTrack_l(temp, false); + cblk = temp; + cblk->lock.unlock(); if (result == OK) result = mAudioTrack->allocateTimedBuffer(size, buffer); @@ -1154,10 +1165,11 @@ status_t TimedAudioTrack::queueTimedBuffer(const sp& buffer, status_t status = mAudioTrack->queueTimedBuffer(buffer, pts); { AutoMutex lock(mLock); + audio_track_cblk_t* cblk = mCblk; // restart track if it was disabled by audioflinger due to previous underrun if (buffer->size() != 0 && status == NO_ERROR && - mActive && (mCblk->flags & CBLK_DISABLED)) { - android_atomic_and(~CBLK_DISABLED, &mCblk->flags); + mActive && (cblk->flags & CBLK_DISABLED)) { + android_atomic_and(~CBLK_DISABLED, &cblk->flags); ALOGW("queueTimedBuffer() track %p disabled, restarting", this); mAudioTrack->start(); } @@ -1285,10 +1297,10 @@ bool AudioTrack::processAudioBuffer(const sp& thread) } audioBuffer.size = writtenSize; - // NOTE: mCblk->frameSize is not equal to AudioTrack::frameSize() for - // 8 bit PCM data: in this case, mCblk->frameSize is based on a sample size of + // NOTE: cblk->frameSize is not equal to AudioTrack::frameSize() for + // 8 bit PCM data: in this case, cblk->frameSize is based on a sample size of // 16 bit. - audioBuffer.frameCount = writtenSize/mCblk->frameSize; + audioBuffer.frameCount = writtenSize/cblk->frameSize; frames -= audioBuffer.frameCount; @@ -1304,13 +1316,16 @@ bool AudioTrack::processAudioBuffer(const sp& thread) return true; } -// must be called with mLock and cblk.lock held. Callers must also hold strong references on +// must be called with mLock and refCblk.lock held. Callers must also hold strong references on // the IAudioTrack and IMemory in case they are recreated here. -// If the IAudioTrack is successfully restored, the cblk pointer is updated -status_t AudioTrack::restoreTrack_l(audio_track_cblk_t*& cblk, bool fromStart) +// If the IAudioTrack is successfully restored, the refCblk pointer is updated +// FIXME Don't depend on caller to hold strong references. +status_t AudioTrack::restoreTrack_l(audio_track_cblk_t*& refCblk, bool fromStart) { status_t result; + audio_track_cblk_t* cblk = refCblk; + audio_track_cblk_t* newCblk = cblk; if (!(android_atomic_or(CBLK_RESTORING, &cblk->flags) & CBLK_RESTORING)) { ALOGW("dead IAudioTrack, creating a new one from %s TID %d", fromStart ? "start()" : "obtainBuffer()", gettid()); @@ -1340,40 +1355,41 @@ status_t AudioTrack::restoreTrack_l(audio_track_cblk_t*& cblk, bool fromStart) uint32_t user = cblk->user; uint32_t server = cblk->server; // restore write index and set other indexes to reflect empty buffer status - mCblk->user = user; - mCblk->server = user; - mCblk->userBase = user; - mCblk->serverBase = user; + newCblk = mCblk; + newCblk->user = user; + newCblk->server = user; + newCblk->userBase = user; + newCblk->serverBase = user; // restore loop: this is not guaranteed to succeed if new frame count is not // compatible with loop length setLoop_l(cblk->loopStart, cblk->loopEnd, cblk->loopCount); if (!fromStart) { - mCblk->bufferTimeoutMs = MAX_RUN_TIMEOUT_MS; + newCblk->bufferTimeoutMs = MAX_RUN_TIMEOUT_MS; // Make sure that a client relying on callback events indicating underrun or // the actual amount of audio frames played (e.g SoundPool) receives them. if (mSharedBuffer == 0) { uint32_t frames = 0; if (user > server) { - frames = ((user - server) > mCblk->frameCount) ? - mCblk->frameCount : (user - server); - memset(mCblk->buffers, 0, frames * mCblk->frameSize); + frames = ((user - server) > newCblk->frameCount) ? + newCblk->frameCount : (user - server); + memset(newCblk->buffers, 0, frames * newCblk->frameSize); } // restart playback even if buffer is not completely filled. - android_atomic_or(CBLK_FORCEREADY, &mCblk->flags); + android_atomic_or(CBLK_FORCEREADY, &newCblk->flags); // stepUser() clears CBLK_UNDERRUN flag enabling underrun callbacks to // the client - mCblk->stepUser(frames); + newCblk->stepUser(frames); } } if (mSharedBuffer != 0) { - mCblk->stepUser(mCblk->frameCount); + newCblk->stepUser(newCblk->frameCount); } if (mActive) { result = mAudioTrack->start(); ALOGW_IF(result != NO_ERROR, "restoreTrack_l() start() failed status %d", result); } if (fromStart && result == NO_ERROR) { - mNewPosition = mCblk->server + mUpdatePeriod; + mNewPosition = newCblk->server + mUpdatePeriod; } } if (result != NO_ERROR) { @@ -1409,13 +1425,13 @@ status_t AudioTrack::restoreTrack_l(audio_track_cblk_t*& cblk, bool fromStart) } } ALOGV("restoreTrack_l() status %d mActive %d cblk %p, old cblk %p flags %08x old flags %08x", - result, mActive, mCblk, cblk, mCblk->flags, cblk->flags); + result, mActive, newCblk, cblk, newCblk->flags, cblk->flags); if (result == NO_ERROR) { // from now on we switch to the newly created cblk - cblk = mCblk; + refCblk = newCblk; } - cblk->lock.lock(); + newCblk->lock.lock(); ALOGW_IF(result != NO_ERROR, "restoreTrack_l() error %d TID %d", result, gettid()); @@ -1429,15 +1445,16 @@ status_t AudioTrack::dump(int fd, const Vector& args) const char buffer[SIZE]; String8 result; + audio_track_cblk_t* cblk = mCblk; result.append(" AudioTrack::dump\n"); snprintf(buffer, 255, " stream type(%d), left - right volume(%f, %f)\n", mStreamType, mVolume[0], mVolume[1]); result.append(buffer); snprintf(buffer, 255, " format(%d), channel count(%d), frame count(%d)\n", mFormat, - mChannelCount, mCblk->frameCount); + mChannelCount, cblk->frameCount); result.append(buffer); snprintf(buffer, 255, " sample rate(%d), status(%d), muted(%d)\n", - (mCblk == 0) ? 0 : mCblk->sampleRate, mStatus, mMuted); + (cblk == 0) ? 0 : cblk->sampleRate, mStatus, mMuted); result.append(buffer); snprintf(buffer, 255, " active(%d), latency (%d)\n", mActive, mLatency); result.append(buffer); -- cgit v1.1 From 26ba972eafde73a26271ecf027a1d5988ce50eb8 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Thu, 21 Jun 2012 16:24:32 -0700 Subject: Removed unused fields in AudioRecord::Buffer Change-Id: I89fc6d8f695b48516d956b0a9a4a43d408f369f9 --- include/media/AudioRecord.h | 7 ------- media/libmedia/AudioRecord.cpp | 3 --- 2 files changed, 10 deletions(-) diff --git a/include/media/AudioRecord.h b/include/media/AudioRecord.h index f9f6e8d..0ab26b8 100644 --- a/include/media/AudioRecord.h +++ b/include/media/AudioRecord.h @@ -57,13 +57,6 @@ public: class Buffer { public: - enum { - MUTE = 0x00000001 - }; - uint32_t flags; - int channelCount; - audio_format_t format; - size_t frameCount; // number of sample frames corresponding to size; // on input it is the number of frames available, // on output is the number of frames actually drained diff --git a/media/libmedia/AudioRecord.cpp b/media/libmedia/AudioRecord.cpp index bd558fa..3c28ca7 100644 --- a/media/libmedia/AudioRecord.cpp +++ b/media/libmedia/AudioRecord.cpp @@ -560,9 +560,6 @@ create_new_record: framesReq = bufferEnd - u; } - audioBuffer->flags = 0; - audioBuffer->channelCount= mChannelCount; - audioBuffer->format = mFormat; audioBuffer->frameCount = framesReq; audioBuffer->size = framesReq*cblk->frameSize; audioBuffer->raw = (int8_t*)cblk->buffer(u); -- cgit v1.1 From 05d499958e4030938ed77a924ebdd9899f36752e Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Tue, 6 Nov 2012 14:25:20 -0800 Subject: Remove unused fields in AudioTrack::Buffer Change-Id: Iab75f6e2348d8b6d1f3cec95aeb3fcd5135dfb50 --- include/media/AudioTrack.h | 19 +++++++++++-------- media/libmedia/AudioTrack.cpp | 7 ------- 2 files changed, 11 insertions(+), 15 deletions(-) diff --git a/include/media/AudioTrack.h b/include/media/AudioTrack.h index 76af2f8..529f74e 100644 --- a/include/media/AudioTrack.h +++ b/include/media/AudioTrack.h @@ -70,14 +70,6 @@ public: class Buffer { public: - enum { - MUTE = 0x00000001 - }; - uint32_t flags; // 0 or MUTE - audio_format_t format; // but AUDIO_FORMAT_PCM_8_BIT -> AUDIO_FORMAT_PCM_16_BIT - // accessed directly by WebKit ANP callback - int channelCount; // will be removed in the future, do not use - size_t frameCount; // number of sample frames corresponding to size; // on input it is the number of frames desired, // on output is the number of frames actually filled @@ -418,6 +410,17 @@ public: * +n limits wait time to n * WAIT_PERIOD_MS, * -1 causes an (almost) infinite wait time, * 0 non-blocking. + * + * Buffer fields + * On entry: + * frameCount number of frames requested + * After error return: + * frameCount 0 + * size 0 + * After successful return: + * frameCount actual number of frames available, <= number requested + * size actual number of bytes available + * raw pointer to the buffer */ enum { diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp index 523d844..da467ba 100644 --- a/media/libmedia/AudioTrack.cpp +++ b/media/libmedia/AudioTrack.cpp @@ -1028,15 +1028,8 @@ create_new_track: framesReq = bufferEnd - u; } - audioBuffer->flags = mMuted ? Buffer::MUTE : 0; - audioBuffer->channelCount = mChannelCount; audioBuffer->frameCount = framesReq; audioBuffer->size = framesReq * cblk->frameSize; - if (audio_is_linear_pcm(mFormat)) { - audioBuffer->format = AUDIO_FORMAT_PCM_16_BIT; - } else { - audioBuffer->format = mFormat; - } audioBuffer->raw = (int8_t *)cblk->buffer(u); active = mActive; return active ? status_t(NO_ERROR) : status_t(STOPPED); -- cgit v1.1 From e0b07179a48ee50fda931d2aa1b3c751d167e4d7 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Tue, 6 Nov 2012 15:03:34 -0800 Subject: Remove CBLK_FAST from control block flags This is part of a series to clean up the control block. Change-Id: Ic881a3560d9547cb63fcc0cefec87aa3da480e0d --- include/media/IAudioFlinger.h | 2 +- include/private/media/AudioTrackShared.h | 1 - media/libmedia/AudioTrack.cpp | 7 +++---- media/libmedia/IAudioFlinger.cpp | 12 +++++++++--- services/audioflinger/AudioFlinger.cpp | 15 +++++++-------- services/audioflinger/AudioFlinger.h | 4 ++-- 6 files changed, 22 insertions(+), 19 deletions(-) diff --git a/include/media/IAudioFlinger.h b/include/media/IAudioFlinger.h index 359780e..0aa48c6 100644 --- a/include/media/IAudioFlinger.h +++ b/include/media/IAudioFlinger.h @@ -62,7 +62,7 @@ public: audio_format_t format, audio_channel_mask_t channelMask, int frameCount, - track_flags_t flags, + track_flags_t *flags, const sp& sharedBuffer, audio_io_handle_t output, pid_t tid, // -1 means unused, otherwise must be valid non-0 diff --git a/include/private/media/AudioTrackShared.h b/include/private/media/AudioTrackShared.h index 6a86a00..141078f 100644 --- a/include/private/media/AudioTrackShared.h +++ b/include/private/media/AudioTrackShared.h @@ -41,7 +41,6 @@ namespace android { #define CBLK_DISABLED 0x10 // track disabled by AudioFlinger due to underrun, need to re-start #define CBLK_RESTORING 0x20 // track is being restored after invalidation by AudioFlinger #define CBLK_RESTORED 0x40 // track has been restored after invalidation by AudioFlinger -#define CBLK_FAST 0x80 // AudioFlinger successfully created a fast track // Important: do not add any virtual methods, including ~ struct audio_track_cblk_t diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp index 523d844..38eaa65 100644 --- a/media/libmedia/AudioTrack.cpp +++ b/media/libmedia/AudioTrack.cpp @@ -867,7 +867,7 @@ status_t AudioTrack::createTrack_l( format, channelMask, frameCount, - trackFlags, + &trackFlags, sharedBuffer, output, tid, @@ -887,10 +887,9 @@ status_t AudioTrack::createTrack_l( mCblkMemory = iMem; audio_track_cblk_t* cblk = static_cast(iMem->pointer()); mCblk = cblk; - // old has the previous value of cblk->flags before the "or" operation - int32_t old = android_atomic_or(CBLK_DIRECTION, &cblk->flags); + android_atomic_or(CBLK_DIRECTION, &cblk->flags); if (flags & AUDIO_OUTPUT_FLAG_FAST) { - if (old & CBLK_FAST) { + if (trackFlags & IAudioFlinger::TRACK_FAST) { ALOGV("AUDIO_OUTPUT_FLAG_FAST successful; frameCount %u", cblk->frameCount); } else { ALOGV("AUDIO_OUTPUT_FLAG_FAST denied by server; frameCount %u", cblk->frameCount); diff --git a/media/libmedia/IAudioFlinger.cpp b/media/libmedia/IAudioFlinger.cpp index f412591..bb936ec 100644 --- a/media/libmedia/IAudioFlinger.cpp +++ b/media/libmedia/IAudioFlinger.cpp @@ -90,7 +90,7 @@ public: audio_format_t format, audio_channel_mask_t channelMask, int frameCount, - track_flags_t flags, + track_flags_t *flags, const sp& sharedBuffer, audio_io_handle_t output, pid_t tid, @@ -106,7 +106,8 @@ public: data.writeInt32(format); data.writeInt32(channelMask); data.writeInt32(frameCount); - data.writeInt32((int32_t) flags); + track_flags_t lFlags = flags != NULL ? *flags : TRACK_DEFAULT; + data.writeInt32(lFlags); data.writeStrongBinder(sharedBuffer->asBinder()); data.writeInt32((int32_t) output); data.writeInt32((int32_t) tid); @@ -119,6 +120,10 @@ public: if (lStatus != NO_ERROR) { ALOGE("createTrack error: %s", strerror(-lStatus)); } else { + lFlags = reply.readInt32(); + if (flags != NULL) { + *flags = lFlags; + } lSessionId = reply.readInt32(); if (sessionId != NULL) { *sessionId = lSessionId; @@ -732,7 +737,8 @@ status_t BnAudioFlinger::onTransact( status_t status; sp track = createTrack(pid, (audio_stream_type_t) streamType, sampleRate, format, - channelMask, bufferCount, flags, buffer, output, tid, &sessionId, &status); + channelMask, bufferCount, &flags, buffer, output, tid, &sessionId, &status); + reply->writeInt32(flags); reply->writeInt32(sessionId); reply->writeInt32(status); reply->writeStrongBinder(track->asBinder()); diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp index cb44114..379e936 100644 --- a/services/audioflinger/AudioFlinger.cpp +++ b/services/audioflinger/AudioFlinger.cpp @@ -451,7 +451,7 @@ sp AudioFlinger::createTrack( audio_format_t format, audio_channel_mask_t channelMask, int frameCount, - IAudioFlinger::track_flags_t flags, + IAudioFlinger::track_flags_t *flags, const sp& sharedBuffer, audio_io_handle_t output, pid_t tid, @@ -1725,17 +1725,17 @@ sp AudioFlinger::PlaybackThread::createTrac int frameCount, const sp& sharedBuffer, int sessionId, - IAudioFlinger::track_flags_t flags, + IAudioFlinger::track_flags_t *flags, pid_t tid, status_t *status) { sp track; status_t lStatus; - bool isTimed = (flags & IAudioFlinger::TRACK_TIMED) != 0; + bool isTimed = (*flags & IAudioFlinger::TRACK_TIMED) != 0; // client expresses a preference for FAST, but we get the final say - if (flags & IAudioFlinger::TRACK_FAST) { + if (*flags & IAudioFlinger::TRACK_FAST) { if ( // not timed (!isTimed) && @@ -1781,7 +1781,7 @@ sp AudioFlinger::PlaybackThread::createTrac isTimed, sharedBuffer.get(), frameCount, mFrameCount, format, audio_is_linear_pcm(format), channelMask, sampleRate, mSampleRate, hasFastMixer(), tid, mFastTrackAvailMask); - flags &= ~IAudioFlinger::TRACK_FAST; + *flags &= ~IAudioFlinger::TRACK_FAST; // For compatibility with AudioTrack calculation, buffer depth is forced // to be at least 2 x the normal mixer frame count and cover audio hardware latency. // This is probably too conservative, but legacy application code may depend on it. @@ -1845,7 +1845,7 @@ sp AudioFlinger::PlaybackThread::createTrac if (!isTimed) { track = new Track(this, client, streamType, sampleRate, format, - channelMask, frameCount, sharedBuffer, sessionId, flags); + channelMask, frameCount, sharedBuffer, sessionId, *flags); } else { track = TimedTrack::create(this, client, streamType, sampleRate, format, channelMask, frameCount, sharedBuffer, sessionId); @@ -1864,7 +1864,7 @@ sp AudioFlinger::PlaybackThread::createTrac chain->incTrackCnt(); } - if ((flags & IAudioFlinger::TRACK_FAST) && (tid != -1)) { + if ((*flags & IAudioFlinger::TRACK_FAST) && (tid != -1)) { pid_t callingPid = IPCThreadState::self()->getCallingPid(); // we don't have CAP_SYS_NICE, nor do we want to have it as it's too powerful, // so ask activity manager to do this on our behalf @@ -4377,7 +4377,6 @@ AudioFlinger::PlaybackThread::Track::Track( } // only allocate a fast track index if we were able to allocate a normal track name if (flags & IAudioFlinger::TRACK_FAST) { - mCblk->flags |= CBLK_FAST; // atomic op not needed yet ALOG_ASSERT(thread->mFastTrackAvailMask != 0); int i = __builtin_ctz(thread->mFastTrackAvailMask); ALOG_ASSERT(0 < i && i < (int)FastMixerState::kMaxFastTracks); diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h index 2251b45..de2fbfa 100644 --- a/services/audioflinger/AudioFlinger.h +++ b/services/audioflinger/AudioFlinger.h @@ -93,7 +93,7 @@ public: audio_format_t format, audio_channel_mask_t channelMask, int frameCount, - IAudioFlinger::track_flags_t flags, + IAudioFlinger::track_flags_t *flags, const sp& sharedBuffer, audio_io_handle_t output, pid_t tid, @@ -1089,7 +1089,7 @@ public: int frameCount, const sp& sharedBuffer, int sessionId, - IAudioFlinger::track_flags_t flags, + IAudioFlinger::track_flags_t *flags, pid_t tid, status_t *status); -- cgit v1.1 From 864585df53eb97c31e77b3ad7c0d89e4f9b42588 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Tue, 6 Nov 2012 16:15:41 -0800 Subject: Remove CBLK_DIRECTION from control block flags This is part of a series to clean up the control block. Change-Id: I0265fece3247356b585d4d48fbda6f37aea8a851 --- include/private/media/AudioTrackShared.h | 38 ++++++++++++++++++++++---------- media/libmedia/AudioRecord.cpp | 9 ++++---- media/libmedia/AudioTrack.cpp | 37 +++++++++++++++---------------- services/audioflinger/AudioFlinger.cpp | 25 ++++++++++++++------- services/audioflinger/AudioFlinger.h | 6 +++++ 5 files changed, 71 insertions(+), 44 deletions(-) diff --git a/include/private/media/AudioTrackShared.h b/include/private/media/AudioTrackShared.h index 141078f..46788c4 100644 --- a/include/private/media/AudioTrackShared.h +++ b/include/private/media/AudioTrackShared.h @@ -34,13 +34,12 @@ namespace android { #define RESTORE_TIMEOUT_MS 5000 // Maximum waiting time for a track to be restored #define CBLK_UNDERRUN 0x01 // set: underrun (out) or overrrun (in), clear: no underrun or overrun -#define CBLK_DIRECTION 0x02 // set: cblk is for an AudioTrack, clear: for AudioRecord -#define CBLK_FORCEREADY 0x04 // set: track is considered ready immediately by AudioFlinger, +#define CBLK_FORCEREADY 0x02 // set: track is considered ready immediately by AudioFlinger, // clear: track is ready when buffer full -#define CBLK_INVALID 0x08 // track buffer invalidated by AudioFlinger, need to re-create -#define CBLK_DISABLED 0x10 // track disabled by AudioFlinger due to underrun, need to re-start -#define CBLK_RESTORING 0x20 // track is being restored after invalidation by AudioFlinger -#define CBLK_RESTORED 0x40 // track has been restored after invalidation by AudioFlinger +#define CBLK_INVALID 0x04 // track buffer invalidated by AudioFlinger, need to re-create +#define CBLK_DISABLED 0x08 // track disabled by AudioFlinger due to underrun, need to re-start +#define CBLK_RESTORING 0x10 // track is being restored after invalidation by AudioFlinger +#define CBLK_RESTORED 0x20 // track has been restored after invalidation by AudioFlinger // Important: do not add any virtual methods, including ~ struct audio_track_cblk_t @@ -102,13 +101,22 @@ public: // Since the control block is always located in shared memory, this constructor // is only used for placement new(). It is never used for regular new() or stack. audio_track_cblk_t(); - uint32_t stepUser(uint32_t frameCount); // called by client only, where - // client includes regular AudioTrack and AudioFlinger::PlaybackThread::OutputTrack - bool stepServer(uint32_t frameCount); // called by server only + + // called by client only, where client includes regular + // AudioTrack and AudioFlinger::PlaybackThread::OutputTrack + uint32_t stepUserIn(uint32_t frameCount) { return stepUser(frameCount, false); } + uint32_t stepUserOut(uint32_t frameCount) { return stepUser(frameCount, true); } + + bool stepServer(uint32_t frameCount, bool isOut); + void* buffer(uint32_t offset) const; - uint32_t framesAvailable(); - uint32_t framesAvailable_l(); - uint32_t framesReady(); // called by server only + uint32_t framesAvailableIn() { return framesAvailable(false); } + uint32_t framesAvailableOut() { return framesAvailable(true); } + uint32_t framesAvailableIn_l() { return framesAvailable_l(false); } + uint32_t framesAvailableOut_l() { return framesAvailable_l(true); } + uint32_t framesReadyIn() { return framesReady(false); } + uint32_t framesReadyOut() { return framesReady(true); } + bool tryLock(); // No barriers on the following operations, so the ordering of loads/stores @@ -134,6 +142,12 @@ public: return mVolumeLR; } +private: + // isOut == true means AudioTrack, isOut == false means AudioRecord + uint32_t stepUser(uint32_t frameCount, bool isOut); + uint32_t framesAvailable(bool isOut); + uint32_t framesAvailable_l(bool isOut); + uint32_t framesReady(bool isOut); }; diff --git a/media/libmedia/AudioRecord.cpp b/media/libmedia/AudioRecord.cpp index 3c28ca7..ae1842e 100644 --- a/media/libmedia/AudioRecord.cpp +++ b/media/libmedia/AudioRecord.cpp @@ -466,7 +466,6 @@ status_t AudioRecord::openRecord_l( mCblkMemory = cblk; mCblk = static_cast(cblk->pointer()); mCblk->buffers = (char*)mCblk + sizeof(audio_track_cblk_t); - android_atomic_and(~CBLK_DIRECTION, &mCblk->flags); mCblk->bufferTimeoutMs = MAX_RUN_TIMEOUT_MS; mCblk->waitTimeMs = 0; return NO_ERROR; @@ -484,7 +483,7 @@ status_t AudioRecord::obtainBuffer(Buffer* audioBuffer, int32_t waitCount) audioBuffer->frameCount = 0; audioBuffer->size = 0; - uint32_t framesReady = cblk->framesReady(); + uint32_t framesReady = cblk->framesReadyIn(); if (framesReady == 0) { cblk->lock.lock(); @@ -540,7 +539,7 @@ create_new_record: } // read the server count again start_loop_here: - framesReady = cblk->framesReady(); + framesReady = cblk->framesReadyIn(); } cblk->lock.unlock(); } @@ -570,7 +569,7 @@ create_new_record: void AudioRecord::releaseBuffer(Buffer* audioBuffer) { AutoMutex lock(mLock); - mCblk->stepUser(audioBuffer->frameCount); + mCblk->stepUserIn(audioBuffer->frameCount); } audio_io_handle_t AudioRecord::getInput() const @@ -732,7 +731,7 @@ bool AudioRecord::processAudioBuffer(const sp& thread) // Manage overrun callback - if (active && (cblk->framesAvailable() == 0)) { + if (active && (cblk->framesAvailableIn() == 0)) { // The value of active is stale, but we are almost sure to be active here because // otherwise we would have exited when obtainBuffer returned STOPPED earlier. ALOGV("Overrun user: %x, server: %x, flags %04x", cblk->user, cblk->server, cblk->flags); diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp index 5348646..f55ec9a 100644 --- a/media/libmedia/AudioTrack.cpp +++ b/media/libmedia/AudioTrack.cpp @@ -695,7 +695,7 @@ status_t AudioTrack::reload() flush_l(); audio_track_cblk_t* cblk = mCblk; - cblk->stepUser(cblk->frameCount); + cblk->stepUserOut(cblk->frameCount); return NO_ERROR; } @@ -887,7 +887,6 @@ status_t AudioTrack::createTrack_l( mCblkMemory = iMem; audio_track_cblk_t* cblk = static_cast(iMem->pointer()); mCblk = cblk; - android_atomic_or(CBLK_DIRECTION, &cblk->flags); if (flags & AUDIO_OUTPUT_FLAG_FAST) { if (trackFlags & IAudioFlinger::TRACK_FAST) { ALOGV("AUDIO_OUTPUT_FLAG_FAST successful; frameCount %u", cblk->frameCount); @@ -906,7 +905,7 @@ status_t AudioTrack::createTrack_l( } else { cblk->buffers = sharedBuffer->pointer(); // Force buffer full condition as data is already present in shared memory - cblk->stepUser(cblk->frameCount); + cblk->stepUserOut(cblk->frameCount); } cblk->setVolumeLR((uint32_t(uint16_t(mVolume[RIGHT] * 0x1000)) << 16) | @@ -938,7 +937,7 @@ status_t AudioTrack::obtainBuffer(Buffer* audioBuffer, int32_t waitCount) audioBuffer->frameCount = 0; audioBuffer->size = 0; - uint32_t framesAvail = cblk->framesAvailable(); + uint32_t framesAvail = cblk->framesAvailableOut(); cblk->lock.lock(); if (cblk->flags & CBLK_INVALID) { @@ -1009,7 +1008,7 @@ create_new_track: } // read the server count again start_loop_here: - framesAvail = cblk->framesAvailable_l(); + framesAvail = cblk->framesAvailableOut_l(); } cblk->lock.unlock(); } @@ -1038,7 +1037,7 @@ void AudioTrack::releaseBuffer(Buffer* audioBuffer) { AutoMutex lock(mLock); audio_track_cblk_t* cblk = mCblk; - cblk->stepUser(audioBuffer->frameCount); + cblk->stepUserOut(audioBuffer->frameCount); if (audioBuffer->frameCount > 0) { // restart track if it was disabled by audioflinger due to previous underrun if (mActive && (cblk->flags & CBLK_DISABLED)) { @@ -1193,7 +1192,7 @@ bool AudioTrack::processAudioBuffer(const sp& thread) mLock.unlock(); // Manage underrun callback - if (active && (cblk->framesAvailable() == cblk->frameCount)) { + if (active && (cblk->framesAvailableOut() == cblk->frameCount)) { ALOGV("Underrun user: %x, server: %x, flags %04x", cblk->user, cblk->server, cblk->flags); if (!(android_atomic_or(CBLK_UNDERRUN, &cblk->flags) & CBLK_UNDERRUN)) { mCbf(EVENT_UNDERRUN, mUserData, 0); @@ -1370,11 +1369,11 @@ status_t AudioTrack::restoreTrack_l(audio_track_cblk_t*& refCblk, bool fromStart android_atomic_or(CBLK_FORCEREADY, &newCblk->flags); // stepUser() clears CBLK_UNDERRUN flag enabling underrun callbacks to // the client - newCblk->stepUser(frames); + newCblk->stepUserOut(frames); } } if (mSharedBuffer != 0) { - newCblk->stepUser(newCblk->frameCount); + newCblk->stepUserOut(newCblk->frameCount); } if (mActive) { result = mAudioTrack->start(); @@ -1514,14 +1513,14 @@ audio_track_cblk_t::audio_track_cblk_t() { } -uint32_t audio_track_cblk_t::stepUser(uint32_t frameCount) +uint32_t audio_track_cblk_t::stepUser(uint32_t frameCount, bool isOut) { ALOGV("stepuser %08x %08x %d", user, server, frameCount); uint32_t u = user; u += frameCount; // Ensure that user is never ahead of server for AudioRecord - if (flags & CBLK_DIRECTION) { + if (isOut) { // If stepServer() has been called once, switch to normal obtainBuffer() timeout period if (bufferTimeoutMs == MAX_STARTUP_TIMEOUT_MS-1) { bufferTimeoutMs = MAX_RUN_TIMEOUT_MS; @@ -1552,7 +1551,7 @@ uint32_t audio_track_cblk_t::stepUser(uint32_t frameCount) return u; } -bool audio_track_cblk_t::stepServer(uint32_t frameCount) +bool audio_track_cblk_t::stepServer(uint32_t frameCount, bool isOut) { ALOGV("stepserver %08x %08x %d", user, server, frameCount); @@ -1565,7 +1564,7 @@ bool audio_track_cblk_t::stepServer(uint32_t frameCount) bool flushed = (s == user); s += frameCount; - if (flags & CBLK_DIRECTION) { + if (isOut) { // Mark that we have read the first buffer so that next time stepUser() is called // we switch to normal obtainBuffer() timeout period if (bufferTimeoutMs == MAX_STARTUP_TIMEOUT_MS) { @@ -1615,18 +1614,18 @@ void* audio_track_cblk_t::buffer(uint32_t offset) const return (int8_t *)buffers + (offset - userBase) * frameSize; } -uint32_t audio_track_cblk_t::framesAvailable() +uint32_t audio_track_cblk_t::framesAvailable(bool isOut) { Mutex::Autolock _l(lock); - return framesAvailable_l(); + return framesAvailable_l(isOut); } -uint32_t audio_track_cblk_t::framesAvailable_l() +uint32_t audio_track_cblk_t::framesAvailable_l(bool isOut) { uint32_t u = user; uint32_t s = server; - if (flags & CBLK_DIRECTION) { + if (isOut) { uint32_t limit = (s < loopStart) ? s : loopStart; return limit + frameCount - u; } else { @@ -1634,12 +1633,12 @@ uint32_t audio_track_cblk_t::framesAvailable_l() } } -uint32_t audio_track_cblk_t::framesReady() +uint32_t audio_track_cblk_t::framesReady(bool isOut) { uint32_t u = user; uint32_t s = server; - if (flags & CBLK_DIRECTION) { + if (isOut) { if (u < loopEnd) { return u - s; } else { diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp index 379e936..69ac3e3 100644 --- a/services/audioflinger/AudioFlinger.cpp +++ b/services/audioflinger/AudioFlinger.cpp @@ -4286,7 +4286,7 @@ bool AudioFlinger::ThreadBase::TrackBase::step() { bool result; audio_track_cblk_t* cblk = this->cblk(); - result = cblk->stepServer(mFrameCount); + result = cblk->stepServer(mFrameCount, isOut()); if (!result) { ALOGV("stepServer failed acquiring cblk mutex"); mStepServerFailed = true; @@ -4545,7 +4545,7 @@ status_t AudioFlinger::PlaybackThread::Track::getNextBuffer( } // FIXME Same as above - framesReady = cblk->framesReady(); + framesReady = cblk->framesReadyOut(); if (CC_LIKELY(framesReady)) { uint32_t s = cblk->server; @@ -4580,7 +4580,7 @@ getNextBuffer_exit: // the tryLock() could block for up to 1 ms, and a sequence of these could delay fast mixer. // FIXME Replace AudioTrackShared control block implementation by a non-blocking FIFO queue. size_t AudioFlinger::PlaybackThread::Track::framesReady() const { - return mCblk->framesReady(); + return mCblk->framesReadyOut(); } // Don't call for fast tracks; the framesReady() could result in priority inversion @@ -4875,6 +4875,11 @@ status_t AudioFlinger::PlaybackThread::Track::setSyncEvent(const sp& return NO_ERROR; } +bool AudioFlinger::PlaybackThread::Track::isOut() const +{ + return true; +} + // timed audio tracks sp @@ -5436,7 +5441,8 @@ status_t AudioFlinger::RecordThread::RecordTrack::getNextBuffer(AudioBufferProvi mStepServerFailed = false; } - framesAvail = cblk->framesAvailable_l(); + // FIXME lock is not actually held, so overrun is possible + framesAvail = cblk->framesAvailableIn_l(); if (CC_LIKELY(framesAvail)) { uint32_t s = cblk->server; @@ -5512,6 +5518,10 @@ void AudioFlinger::RecordThread::RecordTrack::dump(char* buffer, size_t size) mCblk->frameCount); } +bool AudioFlinger::RecordThread::RecordTrack::isOut() const +{ + return false; +} // ---------------------------------------------------------------------------- @@ -5528,7 +5538,6 @@ AudioFlinger::PlaybackThread::OutputTrack::OutputTrack( { if (mCblk != NULL) { - mCblk->flags |= CBLK_DIRECTION; mCblk->buffers = (char*)mCblk + sizeof(audio_track_cblk_t); mOutBuffer.frameCount = 0; playbackThread->mTracks.add(this); @@ -5631,7 +5640,7 @@ bool AudioFlinger::PlaybackThread::OutputTrack::write(int16_t* data, uint32_t fr uint32_t outFrames = pInBuffer->frameCount > mOutBuffer.frameCount ? mOutBuffer.frameCount : pInBuffer->frameCount; memcpy(mOutBuffer.raw, pInBuffer->raw, outFrames * channelCount * sizeof(int16_t)); - mCblk->stepUser(outFrames); + mCblk->stepUserOut(outFrames); pInBuffer->frameCount -= outFrames; pInBuffer->i16 += outFrames * channelCount; mOutBuffer.frameCount -= outFrames; @@ -5702,7 +5711,7 @@ status_t AudioFlinger::PlaybackThread::OutputTrack::obtainBuffer( ALOGVV("OutputTrack::obtainBuffer user %d, server %d", cblk->user, cblk->server); buffer->frameCount = 0; - uint32_t framesAvail = cblk->framesAvailable(); + uint32_t framesAvail = cblk->framesAvailableOut(); if (framesAvail == 0) { @@ -5720,7 +5729,7 @@ status_t AudioFlinger::PlaybackThread::OutputTrack::obtainBuffer( } // read the server count again start_loop_here: - framesAvail = cblk->framesAvailable_l(); + framesAvail = cblk->framesAvailableOut_l(); } } diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h index de2fbfa..1417105 100644 --- a/services/audioflinger/AudioFlinger.h +++ b/services/audioflinger/AudioFlinger.h @@ -456,6 +456,9 @@ private: bool step(); void reset(); + virtual bool isOut() const = 0; // true for Track and TimedTrack, false for RecordTrack, + // this could be a track type if needed later + const wp mThread; /*const*/ sp mClient; // see explanation at ~TrackBase() why not const sp mCblkMemory; @@ -859,6 +862,7 @@ private: void triggerEvents(AudioSystem::sync_event_t type); virtual bool isTimedTrack() const { return false; } bool isFastTrack() const { return (mFlags & IAudioFlinger::TRACK_FAST) != 0; } + virtual bool isOut() const; protected: @@ -1468,6 +1472,8 @@ public: static void appendDumpHeader(String8& result); void dump(char* buffer, size_t size); + virtual bool isOut() const; + private: friend class AudioFlinger; // for mState -- cgit v1.1 From d5ed6e88a9bea1879e41d7defaf1edea7c09f554 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Fri, 2 Nov 2012 13:05:14 -0700 Subject: Fix call to restoreTrack_l() without lock held Also document lock order Change-Id: I2c1f273a0a51fa79ee3dd766de8d23083e270051 --- include/media/AudioTrack.h | 7 +++++++ media/libmedia/AudioTrack.cpp | 6 ++++++ 2 files changed, 13 insertions(+) diff --git a/include/media/AudioTrack.h b/include/media/AudioTrack.h index 529f74e..1a19999 100644 --- a/include/media/AudioTrack.h +++ b/include/media/AudioTrack.h @@ -481,6 +481,7 @@ protected: // body of AudioTrackThread::threadLoop() bool processAudioBuffer(const sp& thread); + // caller must hold lock on mLock for all _l methods status_t createTrack_l(audio_stream_type_t streamType, uint32_t sampleRate, audio_format_t format, @@ -535,7 +536,13 @@ protected: audio_output_flags_t mFlags; int mSessionId; int mAuxEffectId; + + // When locking both mLock and mCblk->lock, must lock in this order to avoid deadlock: + // 1. mLock + // 2. mCblk->lock + // It is OK to lock only mCblk->lock. mutable Mutex mLock; + status_t mRestoreStatus; bool mIsTimed; int mPreviousPriority; // before start() diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp index 5348646..1f4f3d0 100644 --- a/media/libmedia/AudioTrack.cpp +++ b/media/libmedia/AudioTrack.cpp @@ -1122,8 +1122,14 @@ TimedAudioTrack::TimedAudioTrack() { status_t TimedAudioTrack::allocateTimedBuffer(size_t size, sp* buffer) { + AutoMutex lock(mLock); status_t result = UNKNOWN_ERROR; + // acquire a strong reference on the IMemory and IAudioTrack so that they cannot be destroyed + // while we are accessing the cblk + sp audioTrack = mAudioTrack; + sp iMem = mCblkMemory; + // If the track is not invalid already, try to allocate a buffer. alloc // fails indicating that the server is dead, flag the track as invalid so // we can attempt to restore in just a bit. -- cgit v1.1 From a47f3165f53c8e8fb8907a94de7417e2c3047eeb Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Wed, 7 Nov 2012 10:13:08 -0800 Subject: Simplify AudioTrack::restoreTrack_l() Remove CBLK_RESTORING and CBLK_RESTORED from control block flags, for AudioTrack only. They are still used by AudioRecord. This is part of a series to clean up the control block. Change-Id: Iae4798f5b527c492bdaf789987ff3a1dadd0cb37 --- include/media/AudioTrack.h | 3 +- include/private/media/AudioTrackShared.h | 2 + media/libmedia/AudioTrack.cpp | 160 +++++++++++++------------------ 3 files changed, 72 insertions(+), 93 deletions(-) diff --git a/include/media/AudioTrack.h b/include/media/AudioTrack.h index 5f235cb..6e88032 100644 --- a/include/media/AudioTrack.h +++ b/include/media/AudioTrack.h @@ -509,7 +509,7 @@ protected: float mSendLevel; uint32_t mFrameCount; - audio_track_cblk_t* mCblk; + audio_track_cblk_t* mCblk; // re-load after mLock.unlock() audio_format_t mFormat; audio_stream_type_t mStreamType; uint8_t mChannelCount; @@ -548,7 +548,6 @@ protected: // It is OK to lock only mCblk->lock. mutable Mutex mLock; - status_t mRestoreStatus; bool mIsTimed; int mPreviousPriority; // before start() SchedPolicy mPreviousSchedulingGroup; diff --git a/include/private/media/AudioTrackShared.h b/include/private/media/AudioTrackShared.h index 46788c4..90301cd 100644 --- a/include/private/media/AudioTrackShared.h +++ b/include/private/media/AudioTrackShared.h @@ -31,6 +31,7 @@ namespace android { // init time #define MAX_RUN_TIMEOUT_MS 1000 #define WAIT_PERIOD_MS 10 +// AudioTrack no longer uses this, it is for AudioRecord only: #define RESTORE_TIMEOUT_MS 5000 // Maximum waiting time for a track to be restored #define CBLK_UNDERRUN 0x01 // set: underrun (out) or overrrun (in), clear: no underrun or overrun @@ -38,6 +39,7 @@ namespace android { // clear: track is ready when buffer full #define CBLK_INVALID 0x04 // track buffer invalidated by AudioFlinger, need to re-create #define CBLK_DISABLED 0x08 // track disabled by AudioFlinger due to underrun, need to re-start +// AudioTrack no longer uses these, they are for AudioRecord only: #define CBLK_RESTORING 0x10 // track is being restored after invalidation by AudioFlinger #define CBLK_RESTORED 0x20 // track has been restored after invalidation by AudioFlinger diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp index 324fd6d..7ce9879 100644 --- a/media/libmedia/AudioTrack.cpp +++ b/media/libmedia/AudioTrack.cpp @@ -297,7 +297,6 @@ status_t AudioTrack::set( mUpdatePeriod = 0; mFlushed = false; AudioSystem::acquireAudioSessionId(mSessionId); - mRestoreStatus = NO_ERROR; return NO_ERROR; } @@ -956,12 +955,17 @@ status_t AudioTrack::obtainBuffer(Buffer* audioBuffer, int32_t waitCount) } if (!(cblk->flags & CBLK_INVALID)) { mLock.unlock(); + // this condition is in shared memory, so if IAudioTrack and control block + // are replaced due to mediaserver death or IAudioTrack invalidation then + // cv won't be signalled, but fortunately the timeout will limit the wait result = cblk->cv.waitRelative(cblk->lock, milliseconds(waitTimeMs)); cblk->lock.unlock(); mLock.lock(); if (!mActive) { return status_t(STOPPED); } + // IAudioTrack may have been re-created while mLock was unlocked + cblk = mCblk; cblk->lock.lock(); } @@ -1072,6 +1076,9 @@ ssize_t AudioTrack::write(const void* buffer, size_t userSize) sp iMem = mCblkMemory; mLock.unlock(); + // since mLock is unlocked the IAudioTrack and shared memory may be re-created, + // so all cblk references might still refer to old shared memory, but that should be benign + ssize_t written = 0; const int8_t *src = (const int8_t *)buffer; Buffer audioBuffer; @@ -1192,6 +1199,9 @@ bool AudioTrack::processAudioBuffer(const sp& thread) bool active = mActive; mLock.unlock(); + // since mLock is unlocked the IAudioTrack and shared memory may be re-created, + // so all cblk references might still refer to old shared memory, but that should be benign + // Manage underrun callback if (active && (cblk->framesAvailableOut() == cblk->frameCount)) { ALOGV("Underrun user: %x, server: %x, flags %04x", cblk->user, cblk->server, cblk->flags); @@ -1318,104 +1328,72 @@ status_t AudioTrack::restoreTrack_l(audio_track_cblk_t*& refCblk, bool fromStart audio_track_cblk_t* cblk = refCblk; audio_track_cblk_t* newCblk = cblk; - if (!(android_atomic_or(CBLK_RESTORING, &cblk->flags) & CBLK_RESTORING)) { - ALOGW("dead IAudioTrack, creating a new one from %s TID %d", - fromStart ? "start()" : "obtainBuffer()", gettid()); + ALOGW("dead IAudioTrack, creating a new one from %s TID %d", + fromStart ? "start()" : "obtainBuffer()", gettid()); - // signal old cblk condition so that other threads waiting for available buffers stop - // waiting now - cblk->cv.broadcast(); - cblk->lock.unlock(); + // signal old cblk condition so that other threads waiting for available buffers stop + // waiting now + cblk->cv.broadcast(); + cblk->lock.unlock(); - // refresh the audio configuration cache in this process to make sure we get new - // output parameters in getOutput_l() and createTrack_l() - AudioSystem::clearAudioConfigCache(); - - // if the new IAudioTrack is created, createTrack_l() will modify the - // following member variables: mAudioTrack, mCblkMemory and mCblk. - // It will also delete the strong references on previous IAudioTrack and IMemory - result = createTrack_l(mStreamType, - cblk->sampleRate, - mFormat, - mChannelMask, - mFrameCount, - mFlags, - mSharedBuffer, - getOutput_l()); - - if (result == NO_ERROR) { - uint32_t user = cblk->user; - uint32_t server = cblk->server; - // restore write index and set other indexes to reflect empty buffer status - newCblk = mCblk; - newCblk->user = user; - newCblk->server = user; - newCblk->userBase = user; - newCblk->serverBase = user; - // restore loop: this is not guaranteed to succeed if new frame count is not - // compatible with loop length - setLoop_l(cblk->loopStart, cblk->loopEnd, cblk->loopCount); - if (!fromStart) { - newCblk->bufferTimeoutMs = MAX_RUN_TIMEOUT_MS; - // Make sure that a client relying on callback events indicating underrun or - // the actual amount of audio frames played (e.g SoundPool) receives them. - if (mSharedBuffer == 0) { - uint32_t frames = 0; - if (user > server) { - frames = ((user - server) > newCblk->frameCount) ? - newCblk->frameCount : (user - server); - memset(newCblk->buffers, 0, frames * newCblk->frameSize); - } - // restart playback even if buffer is not completely filled. - android_atomic_or(CBLK_FORCEREADY, &newCblk->flags); - // stepUser() clears CBLK_UNDERRUN flag enabling underrun callbacks to - // the client - newCblk->stepUserOut(frames); + // refresh the audio configuration cache in this process to make sure we get new + // output parameters in getOutput_l() and createTrack_l() + AudioSystem::clearAudioConfigCache(); + + // if the new IAudioTrack is created, createTrack_l() will modify the + // following member variables: mAudioTrack, mCblkMemory and mCblk. + // It will also delete the strong references on previous IAudioTrack and IMemory + result = createTrack_l(mStreamType, + cblk->sampleRate, + mFormat, + mChannelMask, + mFrameCount, + mFlags, + mSharedBuffer, + getOutput_l()); + + if (result == NO_ERROR) { + uint32_t user = cblk->user; + uint32_t server = cblk->server; + // restore write index and set other indexes to reflect empty buffer status + newCblk = mCblk; + newCblk->user = user; + newCblk->server = user; + newCblk->userBase = user; + newCblk->serverBase = user; + // restore loop: this is not guaranteed to succeed if new frame count is not + // compatible with loop length + setLoop_l(cblk->loopStart, cblk->loopEnd, cblk->loopCount); + if (!fromStart) { + newCblk->bufferTimeoutMs = MAX_RUN_TIMEOUT_MS; + // Make sure that a client relying on callback events indicating underrun or + // the actual amount of audio frames played (e.g SoundPool) receives them. + if (mSharedBuffer == 0) { + uint32_t frames = 0; + if (user > server) { + frames = ((user - server) > newCblk->frameCount) ? + newCblk->frameCount : (user - server); + memset(newCblk->buffers, 0, frames * newCblk->frameSize); } - } - if (mSharedBuffer != 0) { - newCblk->stepUserOut(newCblk->frameCount); - } - if (mActive) { - result = mAudioTrack->start(); - ALOGW_IF(result != NO_ERROR, "restoreTrack_l() start() failed status %d", result); - } - if (fromStart && result == NO_ERROR) { - mNewPosition = newCblk->server + mUpdatePeriod; + // restart playback even if buffer is not completely filled. + android_atomic_or(CBLK_FORCEREADY, &newCblk->flags); + // stepUser() clears CBLK_UNDERRUN flag enabling underrun callbacks to + // the client + newCblk->stepUserOut(frames); } } - if (result != NO_ERROR) { - android_atomic_and(~CBLK_RESTORING, &cblk->flags); - ALOGW_IF(result != NO_ERROR, "restoreTrack_l() failed status %d", result); + if (mSharedBuffer != 0) { + newCblk->stepUserOut(newCblk->frameCount); } - mRestoreStatus = result; - // signal old cblk condition for other threads waiting for restore completion - android_atomic_or(CBLK_RESTORED, &cblk->flags); - cblk->cv.broadcast(); - } else { - bool haveLogged = false; - for (;;) { - if (cblk->flags & CBLK_RESTORED) { - ALOGW("dead IAudioTrack restored"); - result = mRestoreStatus; - cblk->lock.unlock(); - break; - } - if (!haveLogged) { - ALOGW("dead IAudioTrack, waiting for a new one"); - haveLogged = true; - } - mLock.unlock(); - result = cblk->cv.waitRelative(cblk->lock, milliseconds(RESTORE_TIMEOUT_MS)); - cblk->lock.unlock(); - mLock.lock(); - if (result != NO_ERROR) { - ALOGW("timed out"); - break; - } - cblk->lock.lock(); + if (mActive) { + result = mAudioTrack->start(); + ALOGW_IF(result != NO_ERROR, "restoreTrack_l() start() failed status %d", result); + } + if (fromStart && result == NO_ERROR) { + mNewPosition = newCblk->server + mUpdatePeriod; } } + ALOGW_IF(result != NO_ERROR, "restoreTrack_l() failed status %d", result); ALOGV("restoreTrack_l() status %d mActive %d cblk %p, old cblk %p flags %08x old flags %08x", result, mActive, newCblk, cblk, newCblk->flags, cblk->flags); -- cgit v1.1 From b929e417853694e37aba1ef4399f188987b709d9 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Thu, 8 Nov 2012 12:13:58 -0800 Subject: Move buffers pointer out of the control block This is part of a series to clean up the control block. Change-Id: Ie474557db7cb360f2d9a0f11600a68f5a3d46f07 --- include/media/AudioRecord.h | 1 + include/media/AudioTrack.h | 1 + include/private/media/AudioTrackShared.h | 9 +++++---- media/libmedia/AudioRecord.cpp | 4 ++-- media/libmedia/AudioTrack.cpp | 12 ++++++------ services/audioflinger/AudioFlinger.cpp | 6 +++--- services/audioflinger/AudioFlinger.h | 1 + 7 files changed, 19 insertions(+), 15 deletions(-) diff --git a/include/media/AudioRecord.h b/include/media/AudioRecord.h index 0ab26b8..bfb5d3a 100644 --- a/include/media/AudioRecord.h +++ b/include/media/AudioRecord.h @@ -389,6 +389,7 @@ private: sp mAudioRecord; sp mCblkMemory; audio_track_cblk_t* mCblk; + void* mBuffers; // starting address of buffers in shared memory int mPreviousPriority; // before start() SchedPolicy mPreviousSchedulingGroup; diff --git a/include/media/AudioTrack.h b/include/media/AudioTrack.h index 6e88032..639b567 100644 --- a/include/media/AudioTrack.h +++ b/include/media/AudioTrack.h @@ -510,6 +510,7 @@ protected: uint32_t mFrameCount; audio_track_cblk_t* mCblk; // re-load after mLock.unlock() + void* mBuffers; // starting address of buffers in shared memory audio_format_t mFormat; audio_stream_type_t mStreamType; uint8_t mChannelCount; diff --git a/include/private/media/AudioTrackShared.h b/include/private/media/AudioTrackShared.h index 90301cd..8ef90c7 100644 --- a/include/private/media/AudioTrackShared.h +++ b/include/private/media/AudioTrackShared.h @@ -58,9 +58,7 @@ struct audio_track_cblk_t uint32_t userBase; uint32_t serverBase; - // if there is a shared buffer, "buffers" is the value of pointer() for the shared - // buffer, otherwise "buffers" points immediately after the control block - void* buffers; + int mPad1; // unused, but preserves cache line alignment uint32_t frameCount; // Cache line boundary @@ -111,7 +109,10 @@ public: bool stepServer(uint32_t frameCount, bool isOut); - void* buffer(uint32_t offset) const; + // if there is a shared buffer, "buffers" is the value of pointer() for the shared + // buffer, otherwise "buffers" points immediately after the control block + void* buffer(void *buffers, uint32_t offset) const; + uint32_t framesAvailableIn() { return framesAvailable(false); } uint32_t framesAvailableOut() { return framesAvailable(true); } uint32_t framesAvailableIn_l() { return framesAvailable_l(false); } diff --git a/media/libmedia/AudioRecord.cpp b/media/libmedia/AudioRecord.cpp index ae1842e..263a7c7 100644 --- a/media/libmedia/AudioRecord.cpp +++ b/media/libmedia/AudioRecord.cpp @@ -465,7 +465,7 @@ status_t AudioRecord::openRecord_l( mCblkMemory.clear(); mCblkMemory = cblk; mCblk = static_cast(cblk->pointer()); - mCblk->buffers = (char*)mCblk + sizeof(audio_track_cblk_t); + mBuffers = (char*)mCblk + sizeof(audio_track_cblk_t); mCblk->bufferTimeoutMs = MAX_RUN_TIMEOUT_MS; mCblk->waitTimeMs = 0; return NO_ERROR; @@ -561,7 +561,7 @@ create_new_record: audioBuffer->frameCount = framesReq; audioBuffer->size = framesReq*cblk->frameSize; - audioBuffer->raw = (int8_t*)cblk->buffer(u); + audioBuffer->raw = cblk->buffer(mBuffers, u); active = mActive; return active ? status_t(NO_ERROR) : status_t(STOPPED); } diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp index 7ce9879..468bd29 100644 --- a/media/libmedia/AudioTrack.cpp +++ b/media/libmedia/AudioTrack.cpp @@ -895,9 +895,9 @@ status_t AudioTrack::createTrack_l( } } if (sharedBuffer == 0) { - cblk->buffers = (char*)cblk + sizeof(audio_track_cblk_t); + mBuffers = (char*)cblk + sizeof(audio_track_cblk_t); } else { - cblk->buffers = sharedBuffer->pointer(); + mBuffers = sharedBuffer->pointer(); // Force buffer full condition as data is already present in shared memory cblk->stepUserOut(cblk->frameCount); } @@ -1027,7 +1027,7 @@ create_new_track: audioBuffer->frameCount = framesReq; audioBuffer->size = framesReq * cblk->frameSize; - audioBuffer->raw = (int8_t *)cblk->buffer(u); + audioBuffer->raw = cblk->buffer(mBuffers, u); active = mActive; return active ? status_t(NO_ERROR) : status_t(STOPPED); } @@ -1373,7 +1373,7 @@ status_t AudioTrack::restoreTrack_l(audio_track_cblk_t*& refCblk, bool fromStart if (user > server) { frames = ((user - server) > newCblk->frameCount) ? newCblk->frameCount : (user - server); - memset(newCblk->buffers, 0, frames * newCblk->frameSize); + memset(mBuffers, 0, frames * newCblk->frameSize); } // restart playback even if buffer is not completely filled. android_atomic_or(CBLK_FORCEREADY, &newCblk->flags); @@ -1486,7 +1486,7 @@ void AudioTrack::AudioTrackThread::resume() audio_track_cblk_t::audio_track_cblk_t() : lock(Mutex::SHARED), cv(Condition::SHARED), user(0), server(0), - userBase(0), serverBase(0), buffers(NULL), frameCount(0), + userBase(0), serverBase(0), frameCount(0), loopStart(UINT_MAX), loopEnd(UINT_MAX), loopCount(0), mVolumeLR(0x10001000), mSendLevel(0), flags(0) { @@ -1588,7 +1588,7 @@ bool audio_track_cblk_t::stepServer(uint32_t frameCount, bool isOut) return true; } -void* audio_track_cblk_t::buffer(uint32_t offset) const +void* audio_track_cblk_t::buffer(void *buffers, uint32_t offset) const { return (int8_t *)buffers + (offset - userBase) * frameSize; } diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp index 69ac3e3..c0f5c7b 100644 --- a/services/audioflinger/AudioFlinger.cpp +++ b/services/audioflinger/AudioFlinger.cpp @@ -5534,11 +5534,11 @@ AudioFlinger::PlaybackThread::OutputTrack::OutputTrack( int frameCount) : Track(playbackThread, NULL, AUDIO_STREAM_CNT, sampleRate, format, channelMask, frameCount, NULL, 0, IAudioFlinger::TRACK_DEFAULT), - mActive(false), mSourceThread(sourceThread) + mActive(false), mSourceThread(sourceThread), mBuffers(NULL) { if (mCblk != NULL) { - mCblk->buffers = (char*)mCblk + sizeof(audio_track_cblk_t); + mBuffers = (char*)mCblk + sizeof(audio_track_cblk_t); mOutBuffer.frameCount = 0; playbackThread->mTracks.add(this); ALOGV("OutputTrack constructor mCblk %p, mBuffer %p, mCblk->buffers %p, " \ @@ -5749,7 +5749,7 @@ status_t AudioFlinger::PlaybackThread::OutputTrack::obtainBuffer( } buffer->frameCount = framesReq; - buffer->raw = (void *)cblk->buffer(u); + buffer->raw = cblk->buffer(mBuffers, u); return NO_ERROR; } diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h index 1417105..fc24bed 100644 --- a/services/audioflinger/AudioFlinger.h +++ b/services/audioflinger/AudioFlinger.h @@ -1035,6 +1035,7 @@ private: AudioBufferProvider::Buffer mOutBuffer; bool mActive; DuplicatingThread* const mSourceThread; // for waitTimeMs() in write() + void* mBuffers; // starting address of buffers in plain memory }; // end of OutputTrack PlaybackThread(const sp& audioFlinger, AudioStreamOut* output, -- cgit v1.1 From 7aa7ed773040ea60bbe0a2a6ea949d62802304a4 Mon Sep 17 00:00:00 2001 From: Mathias Agopian Date: Mon, 5 Nov 2012 01:51:37 -0800 Subject: minor cleanups Change-Id: Ia12ee4fb59e90221761bec85e6450db29197591f --- services/audioflinger/AudioResamplerSinc.cpp | 80 ++++++++++++++-------------- 1 file changed, 39 insertions(+), 41 deletions(-) diff --git a/services/audioflinger/AudioResamplerSinc.cpp b/services/audioflinger/AudioResamplerSinc.cpp index 8d9168b..165bb61 100644 --- a/services/audioflinger/AudioResamplerSinc.cpp +++ b/services/audioflinger/AudioResamplerSinc.cpp @@ -17,6 +17,7 @@ #define LOG_TAG "AudioResamplerSinc" //#define LOG_NDEBUG 0 +#include #include #include #include @@ -338,12 +339,16 @@ void AudioResamplerSinc::init_routine() return; } - readResampleCoefficients = (readCoefficientsFn) dlsym(resampleCoeffLib, - "readResamplerCoefficients"); - readResampleFirNumCoeffFn readResampleFirNumCoeff = (readResampleFirNumCoeffFn) + readResampleFirNumCoeffFn readResampleFirNumCoeff; + readResampleFirLerpIntBitsFn readResampleFirLerpIntBits; + + readResampleCoefficients = (readCoefficientsFn) + dlsym(resampleCoeffLib, "readResamplerCoefficients"); + readResampleFirNumCoeff = (readResampleFirNumCoeffFn) dlsym(resampleCoeffLib, "readResampleFirNumCoeff"); - readResampleFirLerpIntBitsFn readResampleFirLerpIntBits = (readResampleFirLerpIntBitsFn) + readResampleFirLerpIntBits = (readResampleFirLerpIntBitsFn) dlsym(resampleCoeffLib, "readResampleFirLerpIntBits"); + if (!readResampleCoefficients || !readResampleFirNumCoeff || !readResampleFirLerpIntBits) { readResampleCoefficients = NULL; dlclose(resampleCoeffLib); @@ -353,15 +358,14 @@ void AudioResamplerSinc::init_routine() } c = &veryHighQualityConstants; - // we have 16 coefs samples per zero-crossing c->coefsBits = readResampleFirLerpIntBits(); - ALOGV("coefsBits = %d", c->coefsBits); c->cShift = kNumPhaseBits - c->coefsBits; c->cMask = ((1<coefsBits)-1) << c->cShift; c->pShift = kNumPhaseBits - c->coefsBits - pLerpBits; c->pMask = ((1<pShift; // number of zero-crossing on each side c->halfNumCoefs = readResampleFirNumCoeff(); + ALOGV("coefsBits = %d", c->coefsBits); ALOGV("halfNumCoefs = %d", c->halfNumCoefs); // note that we "leak" resampleCoeffLib until the process exits } @@ -434,7 +438,7 @@ int32_t mulAddRL(int left, uint32_t inRL, int32_t v, int32_t a) AudioResamplerSinc::AudioResamplerSinc(int bitDepth, int inChannelCount, int32_t sampleRate, src_quality quality) : AudioResampler(bitDepth, inChannelCount, sampleRate, quality), - mState(0) + mState(0), mImpulse(0), mRingFull(0), mFirCoefs(0) { /* * Layout of the state buffer for 32 tap: @@ -457,39 +461,34 @@ AudioResamplerSinc::AudioResamplerSinc(int bitDepth, if (ok != 0) { ALOGE("%s pthread_once failed: %d", __func__, ok); } - mConstants = (quality == VERY_HIGH_QUALITY) ? &veryHighQualityConstants : &highQualityConstants; + mConstants = (quality == VERY_HIGH_QUALITY) ? + &veryHighQualityConstants : &highQualityConstants; } -AudioResamplerSinc::~AudioResamplerSinc() -{ - delete[] mState; +AudioResamplerSinc::~AudioResamplerSinc() { + free(mState); } void AudioResamplerSinc::init() { - const Constants *c = mConstants; - - const size_t numCoefs = 2*c->halfNumCoefs; + const Constants& c(*mConstants); + const size_t numCoefs = 2 * c.halfNumCoefs; const size_t stateSize = numCoefs * mChannelCount * 2; - mState = new int16_t[stateSize]; + mState = (int16_t*)memalign(32, stateSize*sizeof(int16_t)); memset(mState, 0, sizeof(int16_t)*stateSize); - mImpulse = mState + (c->halfNumCoefs-1)*mChannelCount; + mImpulse = mState + (c.halfNumCoefs-1)*mChannelCount; mRingFull = mImpulse + (numCoefs+1)*mChannelCount; } void AudioResamplerSinc::resample(int32_t* out, size_t outFrameCount, AudioBufferProvider* provider) { - // FIXME store current state (up or down sample) and only load the coefs when the state // changes. Or load two pointers one for up and one for down in the init function. // Not critical now since the read functions are fast, but would be important if read was slow. if (mConstants == &veryHighQualityConstants && readResampleCoefficients) { - ALOGV("get coefficient from libmm-audio resampler library"); - mFirCoefs = (mInSampleRate <= mSampleRate) ? readResampleCoefficients(true) : - readResampleCoefficients(false); + mFirCoefs = readResampleCoefficients( mInSampleRate <= mSampleRate ); } else { - ALOGV("Use default coefficients"); mFirCoefs = (mInSampleRate <= mSampleRate) ? mFirCoefsUp : mFirCoefsDown; } @@ -502,7 +501,6 @@ void AudioResamplerSinc::resample(int32_t* out, size_t outFrameCount, resample<2>(out, outFrameCount, provider); break; } - } @@ -510,7 +508,8 @@ template void AudioResamplerSinc::resample(int32_t* out, size_t outFrameCount, AudioBufferProvider* provider) { - const Constants *c = mConstants; + const Constants& c(*mConstants); + const size_t headOffset = c.halfNumCoefs*CHANNELS; int16_t* impulse = mImpulse; uint32_t vRL = mVolumeRL; size_t inputIndex = mInputIndex; @@ -545,11 +544,11 @@ void AudioResamplerSinc::resample(int32_t* out, size_t outFrameCount, } } } - int16_t *in = mBuffer.i16; + int16_t const * const in = mBuffer.i16; const size_t frameCount = mBuffer.frameCount; // Always read-in the first samples from the input buffer - int16_t* head = impulse + c->halfNumCoefs*CHANNELS; + int16_t* head = impulse + headOffset; for (size_t i=0 ; i> kNumPhaseBits; impulse += CHANNELS; phaseFraction -= 1LU<= mRingFull)) { - const size_t stateSize = (c->halfNumCoefs*2)*CHANNELS; + const size_t stateSize = (c.halfNumCoefs*2)*CHANNELS; memcpy(mState, mState+stateSize, sizeof(int16_t)*stateSize); impulse -= stateSize; } - int16_t* head = impulse + c->halfNumCoefs*CHANNELS; + + int16_t* head = impulse + c.halfNumCoefs*CHANNELS; for (size_t i=0 ; i void AudioResamplerSinc::filterCoefficient( int32_t& l, int32_t& r, uint32_t phase, const int16_t *samples, uint32_t vRL) { - const Constants *c = mConstants; - // compute the index of the coefficient on the positive side and // negative side - uint32_t indexP = ( phase & c->cMask) >> c->cShift; - uint32_t indexN = (-phase & c->cMask) >> c->cShift; - uint32_t lerpP = ( phase & c->pMask) >> c->pShift; - uint32_t lerpN = (-phase & c->pMask) >> c->pShift; + const Constants& c(*mConstants); + uint32_t indexP = ( phase & c.cMask) >> c.cShift; + uint32_t indexN = (-phase & c.cMask) >> c.cShift; + uint32_t lerpP = ( phase & c.pMask) >> c.pShift; + uint32_t lerpN = (-phase & c.pMask) >> c.pShift; if ((indexP == 0) && (lerpP == 0)) { - indexN = c->cMask >> c->cShift; - lerpN = c->pMask >> c->pShift; + indexN = c.cMask >> c.cShift; + lerpN = c.pMask >> c.pShift; } - const size_t offset = c->halfNumCoefs; + const size_t offset = c.halfNumCoefs; indexP *= offset; indexN *= offset; - int32_t const* const coefs = mFirCoefs; - int32_t const* coefsP = coefs + indexP; - int32_t const* coefsN = coefs + indexN; + int32_t const* coefsP = mFirCoefs + indexP; + int32_t const* coefsN = mFirCoefs + indexN; int16_t const* sP = samples; int16_t const* sN = samples + CHANNELS; -- cgit v1.1 From ad9af03c4b491912239fc8c97a3ad0d342a33303 Mon Sep 17 00:00:00 2001 From: Mathias Agopian Date: Sun, 4 Nov 2012 15:16:13 -0800 Subject: NEON optimized SINC resampler this currently gives us a 60% to 80% boost depending on the quality level selected. Change-Id: I7db385007e811ed7bffe5fd3403b44e300894f5b --- services/audioflinger/AudioResamplerSinc.cpp | 175 +++++++++++++++++++++++++-- services/audioflinger/test-resample.cpp | 2 +- 2 files changed, 166 insertions(+), 11 deletions(-) diff --git a/services/audioflinger/AudioResamplerSinc.cpp b/services/audioflinger/AudioResamplerSinc.cpp index 165bb61..7d3681c 100644 --- a/services/audioflinger/AudioResamplerSinc.cpp +++ b/services/audioflinger/AudioResamplerSinc.cpp @@ -30,6 +30,20 @@ #include "AudioResamplerSinc.h" + +#if defined(__arm__) && !defined(__thumb__) +#define USE_INLINE_ASSEMBLY (true) +#else +#define USE_INLINE_ASSEMBLY (false) +#endif + +#if USE_INLINE_ASSEMBLY && defined(__ARM_NEON__) +#define USE_NEON (true) +#else +#define USE_NEON (false) +#endif + + namespace android { // ---------------------------------------------------------------------------- @@ -375,7 +389,7 @@ void AudioResamplerSinc::init_routine() static inline int32_t mulRL(int left, int32_t in, uint32_t vRL) { -#if defined(__arm__) && !defined(__thumb__) +#if USE_INLINE_ASSEMBLY int32_t out; if (left) { asm( "smultb %[out], %[in], %[vRL] \n" @@ -398,7 +412,7 @@ int32_t mulRL(int left, int32_t in, uint32_t vRL) static inline int32_t mulAdd(int16_t in, int32_t v, int32_t a) { -#if defined(__arm__) && !defined(__thumb__) +#if USE_INLINE_ASSEMBLY int32_t out; asm( "smlawb %[out], %[v], %[in], %[a] \n" : [out]"=r"(out) @@ -413,7 +427,7 @@ int32_t mulAdd(int16_t in, int32_t v, int32_t a) static inline int32_t mulAddRL(int left, uint32_t inRL, int32_t v, int32_t a) { -#if defined(__arm__) && !defined(__thumb__) +#if USE_INLINE_ASSEMBLY int32_t out; if (left) { asm( "smlawb %[out], %[v], %[inRL], %[a] \n" @@ -639,14 +653,155 @@ void AudioResamplerSinc::filterCoefficient( l = 0; r = 0; size_t count = offset; - for (size_t i=0 ; i(l, r, coefsP++, offset, lerpP, sP); - sP -= CHANNELS; - interpolate(l, r, coefsN++, offset, lerpN, sN); - sN += CHANNELS; + + if (!USE_NEON) { + for (size_t i=0 ; i(l, r, coefsP++, offset, lerpP, sP); + sP -= CHANNELS; + interpolate(l, r, coefsN++, offset, lerpN, sN); + sN += CHANNELS; + } + l = 2 * mulRL(1, l, vRL); + r = 2 * mulRL(0, r, vRL); + } else if (CHANNELS == 1) { + int32_t const* coefsP1 = coefsP + offset; + int32_t const* coefsN1 = coefsN + offset; + sP -= CHANNELS*3; + asm ( + "vmov.32 d2[0], %[lerpP] \n" // load the positive phase + "vmov.32 d2[1], %[lerpN] \n" // load the negative phase + "veor q0, q0 \n" // result, initialize to 0 + + "1: \n" + "vld1.16 { d4}, [%[sP]] \n" // load 4 16-bits stereo samples + "vld1.32 { q8}, [%[coefsP0]]! \n" // load 4 32-bits coefs + "vld1.32 { q9}, [%[coefsP1]]! \n" // load 4 32-bits coefs for interpolation + "vld1.16 { d6}, [%[sN]]! \n" // load 4 16-bits stereo samples + "vld1.32 {q10}, [%[coefsN0]]! \n" // load 4 32-bits coefs + "vld1.32 {q11}, [%[coefsN1]]! \n" // load 4 32-bits coefs for interpolation + + "vrev64.16 d4, d4 \n" // reverse 2 frames of the positive side + + "vsub.s32 q9, q9, q8 \n" // interpolate (step1) 1st set of coefs + "vsub.s32 q11, q11, q10 \n" // interpolate (step1) 2nd set of coets + "vshll.s16 q12, d4, #15 \n" // extend samples to 31 bits + + "vqrdmulh.s32 q9, q9, d2[0] \n" // interpolate (step2) 1st set of coefs + "vqrdmulh.s32 q11, q11, d2[1] \n" // interpolate (step3) 2nd set of coefs + "vshll.s16 q14, d6, #15 \n" // extend samples to 31 bits + + "vadd.s32 q8, q8, q9 \n" // interpolate (step3) 1st set + "vadd.s32 q10, q10, q11 \n" // interpolate (step4) 2nd set + "subs %[count], %[count], #4 \n" // update loop counter + + "vqrdmulh.s32 q12, q12, q8 \n" // multiply samples by interpolated coef + "vqrdmulh.s32 q14, q14, q10 \n" // multiply samples by interpolated coef + "sub %[sP], %[sP], #8 \n" // move pointer to next set of samples + + "vadd.s32 q0, q0, q12 \n" // accumulate result + "vadd.s32 q0, q0, q14 \n" // accumulate result + + "bne 1b \n" // loop + + "vpadd.s32 d0, d0, d1 \n" // add all 4 partial sums + "vpadd.s32 d0, d0, d0 \n" // together + + "vmov.s32 %[l], d0[0] \n" // save result in ARM register + + : [l] "=r" (l), + [count] "+r" (count), + [coefsP0] "+r" (coefsP), + [coefsP1] "+r" (coefsP1), + [coefsN0] "+r" (coefsN), + [coefsN1] "+r" (coefsN1), + [sP] "+r" (sP), + [sN] "+r" (sN) + : [lerpP] "r" (lerpP<<16), + [lerpN] "r" (lerpN<<16), + [vRL] "r" (vRL) + : "cc", "memory", + "q0", "q1", "q2", "q3", + "q8", "q9", "q10", "q11", + "q12", "q14" + ); + l = 2 * mulRL(1, l, vRL); + r = l; + } else if (CHANNELS == 2) { + int32_t const* coefsP1 = coefsP + offset; + int32_t const* coefsN1 = coefsN + offset; + sP -= CHANNELS*3; + asm ( + "vmov.32 d2[0], %[lerpP] \n" // load the positive phase + "vmov.32 d2[1], %[lerpN] \n" // load the negative phase + "veor q0, q0 \n" // result, initialize to 0 + "veor q4, q4 \n" // result, initialize to 0 + + "1: \n" + "vld2.16 {d4,d5}, [%[sP]] \n" // load 4 16-bits stereo samples + "vld1.32 { q8}, [%[coefsP0]]! \n" // load 4 32-bits coefs + "vld1.32 { q9}, [%[coefsP1]]! \n" // load 4 32-bits coefs for interpolation + "vld2.16 {d6,d7}, [%[sN]]! \n" // load 4 16-bits stereo samples + "vld1.32 {q10}, [%[coefsN0]]! \n" // load 4 32-bits coefs + "vld1.32 {q11}, [%[coefsN1]]! \n" // load 4 32-bits coefs for interpolation + + "vrev64.16 d4, d4 \n" // reverse 2 frames of the positive side + "vrev64.16 d5, d5 \n" // reverse 2 frames of the positive side + + "vsub.s32 q9, q9, q8 \n" // interpolate (step1) 1st set of coefs + "vsub.s32 q11, q11, q10 \n" // interpolate (step1) 2nd set of coets + "vshll.s16 q12, d4, #15 \n" // extend samples to 31 bits + "vshll.s16 q13, d5, #15 \n" // extend samples to 31 bits + + "vqrdmulh.s32 q9, q9, d2[0] \n" // interpolate (step2) 1st set of coefs + "vqrdmulh.s32 q11, q11, d2[1] \n" // interpolate (step3) 2nd set of coefs + "vshll.s16 q14, d6, #15 \n" // extend samples to 31 bits + "vshll.s16 q15, d7, #15 \n" // extend samples to 31 bits + + "vadd.s32 q8, q8, q9 \n" // interpolate (step3) 1st set + "vadd.s32 q10, q10, q11 \n" // interpolate (step4) 2nd set + "subs %[count], %[count], #4 \n" // update loop counter + + "vqrdmulh.s32 q12, q12, q8 \n" // multiply samples by interpolated coef + "vqrdmulh.s32 q13, q13, q8 \n" // multiply samples by interpolated coef + "vqrdmulh.s32 q14, q14, q10 \n" // multiply samples by interpolated coef + "vqrdmulh.s32 q15, q15, q10 \n" // multiply samples by interpolated coef + "sub %[sP], %[sP], #16 \n" // move pointer to next set of samples + + "vadd.s32 q0, q0, q12 \n" // accumulate result + "vadd.s32 q4, q4, q13 \n" // accumulate result + "vadd.s32 q0, q0, q14 \n" // accumulate result + "vadd.s32 q4, q4, q15 \n" // accumulate result + + "bne 1b \n" // loop + + "vpadd.s32 d0, d0, d1 \n" // add all 4 partial sums + "vpadd.s32 d8, d8, d9 \n" // add all 4 partial sums + "vpadd.s32 d0, d0, d0 \n" // together + "vpadd.s32 d8, d8, d8 \n" // together + + "vmov.s32 %[l], d0[0] \n" // save result in ARM register + "vmov.s32 %[r], d8[0] \n" // save result in ARM register + + : [l] "=r" (l), + [r] "=r" (r), + [count] "+r" (count), + [coefsP0] "+r" (coefsP), + [coefsP1] "+r" (coefsP1), + [coefsN0] "+r" (coefsN), + [coefsN1] "+r" (coefsN1), + [sP] "+r" (sP), + [sN] "+r" (sN) + : [lerpP] "r" (lerpP<<16), + [lerpN] "r" (lerpN<<16), + [vRL] "r" (vRL) + : "cc", "memory", + "q0", "q1", "q2", "q3", "q4", + "q8", "q9", "q10", "q11", + "q12", "q13", "q14", "q15" + ); + l = 2 * mulRL(1, l, vRL); + r = 2 * mulRL(0, r, vRL); } - l = 2 * mulRL(1, l, vRL); - r = 2 * mulRL(0, r, vRL); } template diff --git a/services/audioflinger/test-resample.cpp b/services/audioflinger/test-resample.cpp index e6d5cbe..3b66530 100644 --- a/services/audioflinger/test-resample.cpp +++ b/services/audioflinger/test-resample.cpp @@ -178,7 +178,7 @@ int main(int argc, char* argv[]) { double y = sin(M_PI * k * t * t); int16_t yi = floor(y * 32767.0 + 0.5); for (size_t j=0 ; j Date: Sat, 10 Nov 2012 03:26:39 -0800 Subject: refactor code to improve neon code we want to make sure we don't transfer data from the neon unit to the arm register file, as this can be quite slow. instead we do all the calculation on the neon side and write the result directly to main memory. Change-Id: Ibb56664d3ab03098ae2798b75e2b6927ac900187 --- services/audioflinger/AudioResamplerSinc.cpp | 75 ++++++++++++++++------------ services/audioflinger/AudioResamplerSinc.h | 5 +- 2 files changed, 46 insertions(+), 34 deletions(-) diff --git a/services/audioflinger/AudioResamplerSinc.cpp b/services/audioflinger/AudioResamplerSinc.cpp index 7d3681c..952abb4 100644 --- a/services/audioflinger/AudioResamplerSinc.cpp +++ b/services/audioflinger/AudioResamplerSinc.cpp @@ -470,6 +470,9 @@ AudioResamplerSinc::AudioResamplerSinc(int bitDepth, * */ + mVolumeSIMD[0] = 0; + mVolumeSIMD[1] = 0; + // Load the constants for coefficients int ok = pthread_once(&once_control, init_routine); if (ok != 0) { @@ -494,6 +497,12 @@ void AudioResamplerSinc::init() { mRingFull = mImpulse + (numCoefs+1)*mChannelCount; } +void AudioResamplerSinc::setVolume(int16_t left, int16_t right) { + AudioResampler::setVolume(left, right); + mVolumeSIMD[0] = int32_t(left)<<16; + mVolumeSIMD[1] = int32_t(right)<<16; +} + void AudioResamplerSinc::resample(int32_t* out, size_t outFrameCount, AudioBufferProvider* provider) { @@ -568,11 +577,9 @@ void AudioResamplerSinc::resample(int32_t* out, size_t outFrameCount, } // handle boundary case - int32_t l, r; while (CC_LIKELY(outputIndex < outputSampleCount)) { - filterCoefficient(l, r, phaseFraction, impulse, vRL); - out[outputIndex++] += l; - out[outputIndex++] += r; + filterCoefficient(&out[outputIndex], phaseFraction, impulse, vRL); + outputIndex += 2; phaseFraction += phaseIncrement; const size_t phaseIndex = phaseFraction >> kNumPhaseBits; @@ -628,7 +635,7 @@ void AudioResamplerSinc::read( template void AudioResamplerSinc::filterCoefficient( - int32_t& l, int32_t& r, uint32_t phase, const int16_t *samples, uint32_t vRL) + int32_t* out, uint32_t phase, const int16_t *samples, uint32_t vRL) { // compute the index of the coefficient on the positive side and // negative side @@ -650,19 +657,19 @@ void AudioResamplerSinc::filterCoefficient( int16_t const* sP = samples; int16_t const* sN = samples + CHANNELS; - l = 0; - r = 0; size_t count = offset; if (!USE_NEON) { + int32_t l = 0; + int32_t r = 0; for (size_t i=0 ; i(l, r, coefsP++, offset, lerpP, sP); sP -= CHANNELS; interpolate(l, r, coefsN++, offset, lerpN, sN); sN += CHANNELS; } - l = 2 * mulRL(1, l, vRL); - r = 2 * mulRL(0, r, vRL); + out[0] += 2 * mulRL(1, l, vRL); + out[1] += 2 * mulRL(0, r, vRL); } else if (CHANNELS == 1) { int32_t const* coefsP1 = coefsP + offset; int32_t const* coefsN1 = coefsN + offset; @@ -674,11 +681,11 @@ void AudioResamplerSinc::filterCoefficient( "1: \n" "vld1.16 { d4}, [%[sP]] \n" // load 4 16-bits stereo samples - "vld1.32 { q8}, [%[coefsP0]]! \n" // load 4 32-bits coefs - "vld1.32 { q9}, [%[coefsP1]]! \n" // load 4 32-bits coefs for interpolation + "vld1.32 { q8}, [%[coefsP0]:128]! \n" // load 4 32-bits coefs + "vld1.32 { q9}, [%[coefsP1]:128]! \n" // load 4 32-bits coefs for interpolation "vld1.16 { d6}, [%[sN]]! \n" // load 4 16-bits stereo samples - "vld1.32 {q10}, [%[coefsN0]]! \n" // load 4 32-bits coefs - "vld1.32 {q11}, [%[coefsN1]]! \n" // load 4 32-bits coefs for interpolation + "vld1.32 {q10}, [%[coefsN0]:128]! \n" // load 4 32-bits coefs + "vld1.32 {q11}, [%[coefsN1]:128]! \n" // load 4 32-bits coefs for interpolation "vrev64.16 d4, d4 \n" // reverse 2 frames of the positive side @@ -703,12 +710,16 @@ void AudioResamplerSinc::filterCoefficient( "bne 1b \n" // loop + "vld1.s32 {d2}, [%[vLR]] \n" // load volumes + "vld1.s32 {d3}, %[out] \n" // load the output "vpadd.s32 d0, d0, d1 \n" // add all 4 partial sums "vpadd.s32 d0, d0, d0 \n" // together + "vdup.i32 d0, d0[0] \n" // interleave L,R channels + "vqrdmulh.s32 d0, d0, d2 \n" // apply volume + "vadd.s32 d3, d3, d0 \n" // accumulate result + "vst1.s32 {d0}, %[out] \n" // store result - "vmov.s32 %[l], d0[0] \n" // save result in ARM register - - : [l] "=r" (l), + : [out] "=Uv" (out[0]), [count] "+r" (count), [coefsP0] "+r" (coefsP), [coefsP1] "+r" (coefsP1), @@ -718,14 +729,12 @@ void AudioResamplerSinc::filterCoefficient( [sN] "+r" (sN) : [lerpP] "r" (lerpP<<16), [lerpN] "r" (lerpN<<16), - [vRL] "r" (vRL) + [vLR] "r" (mVolumeSIMD) : "cc", "memory", "q0", "q1", "q2", "q3", "q8", "q9", "q10", "q11", "q12", "q14" ); - l = 2 * mulRL(1, l, vRL); - r = l; } else if (CHANNELS == 2) { int32_t const* coefsP1 = coefsP + offset; int32_t const* coefsN1 = coefsN + offset; @@ -738,11 +747,11 @@ void AudioResamplerSinc::filterCoefficient( "1: \n" "vld2.16 {d4,d5}, [%[sP]] \n" // load 4 16-bits stereo samples - "vld1.32 { q8}, [%[coefsP0]]! \n" // load 4 32-bits coefs - "vld1.32 { q9}, [%[coefsP1]]! \n" // load 4 32-bits coefs for interpolation + "vld1.32 { q8}, [%[coefsP0]:128]! \n" // load 4 32-bits coefs + "vld1.32 { q9}, [%[coefsP1]:128]! \n" // load 4 32-bits coefs for interpolation "vld2.16 {d6,d7}, [%[sN]]! \n" // load 4 16-bits stereo samples - "vld1.32 {q10}, [%[coefsN0]]! \n" // load 4 32-bits coefs - "vld1.32 {q11}, [%[coefsN1]]! \n" // load 4 32-bits coefs for interpolation + "vld1.32 {q10}, [%[coefsN0]:128]! \n" // load 4 32-bits coefs + "vld1.32 {q11}, [%[coefsN1]:128]! \n" // load 4 32-bits coefs for interpolation "vrev64.16 d4, d4 \n" // reverse 2 frames of the positive side "vrev64.16 d5, d5 \n" // reverse 2 frames of the positive side @@ -774,16 +783,18 @@ void AudioResamplerSinc::filterCoefficient( "bne 1b \n" // loop - "vpadd.s32 d0, d0, d1 \n" // add all 4 partial sums - "vpadd.s32 d8, d8, d9 \n" // add all 4 partial sums + "vld1.s32 {d2}, [%[vLR]] \n" // load volumes + "vld1.s32 {d3}, %[out] \n" // load the output + "vpadd.s32 d0, d0, d1 \n" // add all 4 partial sums from q0 + "vpadd.s32 d8, d8, d9 \n" // add all 4 partial sums from q4 "vpadd.s32 d0, d0, d0 \n" // together "vpadd.s32 d8, d8, d8 \n" // together + "vtrn.s32 d0, d8 \n" // interlace L,R channels + "vqrdmulh.s32 d0, d0, d2 \n" // apply volume + "vadd.s32 d3, d3, d0 \n" // accumulate result + "vst1.s32 {d0}, %[out] \n" // store result - "vmov.s32 %[l], d0[0] \n" // save result in ARM register - "vmov.s32 %[r], d8[0] \n" // save result in ARM register - - : [l] "=r" (l), - [r] "=r" (r), + : [out] "=Uv" (out[0]), [count] "+r" (count), [coefsP0] "+r" (coefsP), [coefsP1] "+r" (coefsP1), @@ -793,14 +804,12 @@ void AudioResamplerSinc::filterCoefficient( [sN] "+r" (sN) : [lerpP] "r" (lerpP<<16), [lerpN] "r" (lerpN<<16), - [vRL] "r" (vRL) + [vLR] "r" (mVolumeSIMD) : "cc", "memory", "q0", "q1", "q2", "q3", "q4", "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15" ); - l = 2 * mulRL(1, l, vRL); - r = 2 * mulRL(0, r, vRL); } } diff --git a/services/audioflinger/AudioResamplerSinc.h b/services/audioflinger/AudioResamplerSinc.h index 1b14019..96c31ee 100644 --- a/services/audioflinger/AudioResamplerSinc.h +++ b/services/audioflinger/AudioResamplerSinc.h @@ -44,13 +44,15 @@ public: private: void init(); + virtual void setVolume(int16_t left, int16_t right); + template void resample(int32_t* out, size_t outFrameCount, AudioBufferProvider* provider); template inline void filterCoefficient( - int32_t& l, int32_t& r, uint32_t phase, const int16_t *samples, uint32_t vRL); + int32_t* out, uint32_t phase, const int16_t *samples, uint32_t vRL); template inline void interpolate( @@ -65,6 +67,7 @@ private: int16_t *mState; int16_t *mImpulse; int16_t *mRingFull; + int32_t mVolumeSIMD[2]; const int32_t * mFirCoefs; static const int32_t mFirCoefsDown[]; -- cgit v1.1 From 7492a7ff46a75b5d8e10ae11d4ad50429cf945ce Mon Sep 17 00:00:00 2001 From: Mathias Agopian Date: Sat, 10 Nov 2012 04:44:30 -0800 Subject: more optimizations... calculate the offsets from the phase differently, this happens to reduce the register pressure in the main loop, which in turns allows the compiler to generate much better code (doesn't need to spill a lot of stuff on the stack). this gives another 15% performance increase Change-Id: I2ce3479dd48b9e6941adb80e6d443d6e14d64d96 --- services/audioflinger/AudioResamplerSinc.cpp | 31 ++++++++++++++++------------ services/audioflinger/AudioResamplerSinc.h | 3 --- 2 files changed, 18 insertions(+), 16 deletions(-) diff --git a/services/audioflinger/AudioResamplerSinc.cpp b/services/audioflinger/AudioResamplerSinc.cpp index 952abb4..d68b839 100644 --- a/services/audioflinger/AudioResamplerSinc.cpp +++ b/services/audioflinger/AudioResamplerSinc.cpp @@ -637,17 +637,20 @@ template void AudioResamplerSinc::filterCoefficient( int32_t* out, uint32_t phase, const int16_t *samples, uint32_t vRL) { + // NOTE: be very careful when modifying the code here. register + // pressure is very high and a small change might cause the compiler + // to generate far less efficient code. + // Always sanity check the result with objdump or test-resample. + // compute the index of the coefficient on the positive side and // negative side const Constants& c(*mConstants); + const int32_t ONE = c.cMask | c.pMask; uint32_t indexP = ( phase & c.cMask) >> c.cShift; - uint32_t indexN = (-phase & c.cMask) >> c.cShift; uint32_t lerpP = ( phase & c.pMask) >> c.pShift; - uint32_t lerpN = (-phase & c.pMask) >> c.pShift; - if ((indexP == 0) && (lerpP == 0)) { - indexN = c.cMask >> c.cShift; - lerpN = c.pMask >> c.pShift; - } + uint32_t indexN = ((ONE-phase) & c.cMask) >> c.cShift; + uint32_t lerpN = ((ONE-phase) & c.pMask) >> c.pShift; + const size_t offset = c.halfNumCoefs; indexP *= offset; indexN *= offset; @@ -677,7 +680,8 @@ void AudioResamplerSinc::filterCoefficient( asm ( "vmov.32 d2[0], %[lerpP] \n" // load the positive phase "vmov.32 d2[1], %[lerpN] \n" // load the negative phase - "veor q0, q0 \n" // result, initialize to 0 + "veor q0, q0, q0 \n" // result, initialize to 0 + "vshl.s32 d2, d2, #16 \n" // convert to 32 bits "1: \n" "vld1.16 { d4}, [%[sP]] \n" // load 4 16-bits stereo samples @@ -727,8 +731,8 @@ void AudioResamplerSinc::filterCoefficient( [coefsN1] "+r" (coefsN1), [sP] "+r" (sP), [sN] "+r" (sN) - : [lerpP] "r" (lerpP<<16), - [lerpN] "r" (lerpN<<16), + : [lerpP] "r" (lerpP), + [lerpN] "r" (lerpN), [vLR] "r" (mVolumeSIMD) : "cc", "memory", "q0", "q1", "q2", "q3", @@ -742,8 +746,9 @@ void AudioResamplerSinc::filterCoefficient( asm ( "vmov.32 d2[0], %[lerpP] \n" // load the positive phase "vmov.32 d2[1], %[lerpN] \n" // load the negative phase - "veor q0, q0 \n" // result, initialize to 0 - "veor q4, q4 \n" // result, initialize to 0 + "veor q0, q0, q0 \n" // result, initialize to 0 + "veor q4, q4, q4 \n" // result, initialize to 0 + "vshl.s32 d2, d2, #16 \n" // convert to 32 bits "1: \n" "vld2.16 {d4,d5}, [%[sP]] \n" // load 4 16-bits stereo samples @@ -802,8 +807,8 @@ void AudioResamplerSinc::filterCoefficient( [coefsN1] "+r" (coefsN1), [sP] "+r" (sP), [sN] "+r" (sN) - : [lerpP] "r" (lerpP<<16), - [lerpN] "r" (lerpN<<16), + : [lerpP] "r" (lerpP), + [lerpN] "r" (lerpN), [vLR] "r" (mVolumeSIMD) : "cc", "memory", "q0", "q1", "q2", "q3", "q4", diff --git a/services/audioflinger/AudioResamplerSinc.h b/services/audioflinger/AudioResamplerSinc.h index 96c31ee..09c6866 100644 --- a/services/audioflinger/AudioResamplerSinc.h +++ b/services/audioflinger/AudioResamplerSinc.h @@ -78,14 +78,11 @@ private: static const int32_t RESAMPLE_FIR_LERP_INT_BITS = 7; struct Constants { - // we have 16 coefs samples per zero-crossing int coefsBits; int cShift; uint32_t cMask; - int pShift; uint32_t pMask; - // number of zero-crossing on each side unsigned int halfNumCoefs; }; -- cgit v1.1 From bc0f6b92bba33ca9c2e76f2a520d290f055da6b2 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Mon, 12 Nov 2012 14:32:06 -0800 Subject: Fix regression for AudioTrack::write() 8-bit PCM Bug: 7526532 Change-Id: I8ddd1f0e9d035b54401788dcc422591281dcd97a --- media/libmedia/AudioTrack.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp index 468bd29..26cf877 100644 --- a/media/libmedia/AudioTrack.cpp +++ b/media/libmedia/AudioTrack.cpp @@ -1104,8 +1104,8 @@ ssize_t AudioTrack::write(const void* buffer, size_t userSize) } else { toWrite = audioBuffer.size; memcpy(audioBuffer.i8, src, toWrite); - src += toWrite; } + src += toWrite; userSize -= toWrite; written += toWrite; -- cgit v1.1 From 71bd6f8faf362659d3a9052549607039585f4922 Mon Sep 17 00:00:00 2001 From: Jean-Michel Trivi Date: Thu, 25 Oct 2012 12:07:27 -0700 Subject: Stagefright command line tool: input file name last Show in usage that the source file name comes last, and is preceded by the options. Change-Id: I8407fc36c8d19785cb2e6e1f7b7a352a8d86f889 --- cmds/stagefright/stagefright.cpp | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/cmds/stagefright/stagefright.cpp b/cmds/stagefright/stagefright.cpp index b92a8a0..1e0e7f8 100644 --- a/cmds/stagefright/stagefright.cpp +++ b/cmds/stagefright/stagefright.cpp @@ -589,7 +589,7 @@ static void performSeekTest(const sp &source) { } static void usage(const char *me) { - fprintf(stderr, "usage: %s\n", me); + fprintf(stderr, "usage: %s [options] [input_filename]\n", me); fprintf(stderr, " -h(elp)\n"); fprintf(stderr, " -a(udio)\n"); fprintf(stderr, " -n repetitions\n"); @@ -607,8 +607,8 @@ static void usage(const char *me) { "(video only)\n"); fprintf(stderr, " -S allocate buffers from a surface\n"); fprintf(stderr, " -T allocate buffers from a surface texture\n"); - fprintf(stderr, " -d(ump) filename (raw stream data to a file)\n"); - fprintf(stderr, " -D(ump) filename (decoded PCM data to a file)\n"); + fprintf(stderr, " -d(ump) output_filename (raw stream data to a file)\n"); + fprintf(stderr, " -D(ump) output_filename (decoded PCM data to a file)\n"); } static void dumpCodecProfiles(const sp& omx, bool queryDecoders) { -- cgit v1.1 From 83a0382dc17364567667a4e6135db43f5bd92efc Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Mon, 12 Nov 2012 07:58:20 -0800 Subject: Move frame size out of the control block This is part of a series to clean up the control block. Change-Id: Ifab1c42ac0f8be704e571b292713cd2250d12a3f --- include/media/AudioRecord.h | 3 +- include/media/AudioTrack.h | 9 +++++- include/private/media/AudioTrackShared.h | 7 ++--- media/libmedia/AudioRecord.cpp | 20 ++++++------- media/libmedia/AudioTrack.cpp | 28 +++++++++--------- services/audioflinger/AudioFlinger.cpp | 49 ++++++++++++++++---------------- services/audioflinger/AudioFlinger.h | 4 +++ 7 files changed, 63 insertions(+), 57 deletions(-) diff --git a/include/media/AudioRecord.h b/include/media/AudioRecord.h index bfb5d3a..2672db1 100644 --- a/include/media/AudioRecord.h +++ b/include/media/AudioRecord.h @@ -185,7 +185,7 @@ public: audio_format_t format() const; int channelCount() const; uint32_t frameCount() const; - size_t frameSize() const; + size_t frameSize() const { return mFrameSize; } audio_source_t inputSource() const; @@ -378,6 +378,7 @@ private: uint32_t mFrameCount; audio_format_t mFormat; uint8_t mChannelCount; + size_t mFrameSize; // app-level frame size == AudioFlinger frame size audio_source_t mInputSource; status_t mStatus; uint32_t mLatency; diff --git a/include/media/AudioTrack.h b/include/media/AudioTrack.h index 639b567..3504f1f 100644 --- a/include/media/AudioTrack.h +++ b/include/media/AudioTrack.h @@ -229,7 +229,7 @@ public: /* Return channelCount * (bit depth per channel / 8). * channelCount is determined from channelMask, and bit depth comes from format. */ - size_t frameSize() const; + size_t frameSize() const { return mFrameSize; } sp& sharedBuffer(); @@ -517,6 +517,13 @@ protected: uint8_t mMuted; uint8_t mReserved; audio_channel_mask_t mChannelMask; + + // mFrameSize is equal to mFrameSizeAF for non-PCM or 16-bit PCM data. + // For 8-bit PCM data, mFrameSizeAF is + // twice as large because data is expanded to 16-bit before being stored in buffer. + size_t mFrameSize; // app-level frame size + size_t mFrameSizeAF; // AudioFlinger frame size + status_t mStatus; uint32_t mLatency; diff --git a/include/private/media/AudioTrackShared.h b/include/private/media/AudioTrackShared.h index 8ef90c7..ac5372f 100644 --- a/include/private/media/AudioTrackShared.h +++ b/include/private/media/AudioTrackShared.h @@ -77,12 +77,9 @@ public: uint32_t sampleRate; - // NOTE: audio_track_cblk_t::frameSize is not equal to AudioTrack::frameSize() for - // 8 bit PCM data: in this case, mCblk->frameSize is based on a sample size of - // 16 bit because data is converted to 16 bit before being stored in buffer + uint8_t mPad2; // unused // read-only for client, server writes once at initialization and is then read-only - uint8_t frameSize; // would normally be size_t, but 8 bits is plenty uint8_t mName; // normal tracks: track name, fast tracks: track index // used by client only @@ -111,7 +108,7 @@ public: // if there is a shared buffer, "buffers" is the value of pointer() for the shared // buffer, otherwise "buffers" points immediately after the control block - void* buffer(void *buffers, uint32_t offset) const; + void* buffer(void *buffers, uint32_t frameSize, uint32_t offset) const; uint32_t framesAvailableIn() { return framesAvailable(false); } uint32_t framesAvailableOut() { return framesAvailable(true); } diff --git a/media/libmedia/AudioRecord.cpp b/media/libmedia/AudioRecord.cpp index 263a7c7..b40aaf5 100644 --- a/media/libmedia/AudioRecord.cpp +++ b/media/libmedia/AudioRecord.cpp @@ -213,6 +213,13 @@ status_t AudioRecord::set( mFrameCount = mCblk->frameCount; mChannelCount = (uint8_t)channelCount; mChannelMask = channelMask; + + if (audio_is_linear_pcm(mFormat)) { + mFrameSize = channelCount * audio_bytes_per_sample(format); + } else { + mFrameSize = sizeof(uint8_t); + } + mActive = false; mCbf = cbf; mNotificationFrames = notificationFrames; @@ -258,15 +265,6 @@ uint32_t AudioRecord::frameCount() const return mFrameCount; } -size_t AudioRecord::frameSize() const -{ - if (audio_is_linear_pcm(mFormat)) { - return channelCount()*audio_bytes_per_sample(mFormat); - } else { - return sizeof(uint8_t); - } -} - audio_source_t AudioRecord::inputSource() const { return mInputSource; @@ -560,8 +558,8 @@ create_new_record: } audioBuffer->frameCount = framesReq; - audioBuffer->size = framesReq*cblk->frameSize; - audioBuffer->raw = cblk->buffer(mBuffers, u); + audioBuffer->size = framesReq * mFrameSize; + audioBuffer->raw = cblk->buffer(mBuffers, mFrameSize, u); active = mActive; return active ? status_t(NO_ERROR) : status_t(STOPPED); } diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp index 26cf877..4a4759e 100644 --- a/media/libmedia/AudioTrack.cpp +++ b/media/libmedia/AudioTrack.cpp @@ -286,6 +286,15 @@ status_t AudioTrack::set( mFormat = format; mChannelMask = channelMask; mChannelCount = channelCount; + + if (audio_is_linear_pcm(format)) { + mFrameSize = channelCount * audio_bytes_per_sample(format); + mFrameSizeAF = channelCount * sizeof(int16_t); + } else { + mFrameSize = sizeof(uint8_t); + mFrameSizeAF = sizeof(uint8_t); + } + mSharedBuffer = sharedBuffer; mMuted = false; mActive = false; @@ -332,15 +341,6 @@ uint32_t AudioTrack::frameCount() const return mCblk->frameCount; } -size_t AudioTrack::frameSize() const -{ - if (audio_is_linear_pcm(mFormat)) { - return channelCount()*audio_bytes_per_sample(mFormat); - } else { - return sizeof(uint8_t); - } -} - sp& AudioTrack::sharedBuffer() { return mSharedBuffer; @@ -1026,8 +1026,8 @@ create_new_track: } audioBuffer->frameCount = framesReq; - audioBuffer->size = framesReq * cblk->frameSize; - audioBuffer->raw = cblk->buffer(mBuffers, u); + audioBuffer->size = framesReq * mFrameSizeAF; + audioBuffer->raw = cblk->buffer(mBuffers, mFrameSizeAF, u); active = mActive; return active ? status_t(NO_ERROR) : status_t(STOPPED); } @@ -1302,7 +1302,7 @@ bool AudioTrack::processAudioBuffer(const sp& thread) // NOTE: cblk->frameSize is not equal to AudioTrack::frameSize() for // 8 bit PCM data: in this case, cblk->frameSize is based on a sample size of // 16 bit. - audioBuffer.frameCount = writtenSize/cblk->frameSize; + audioBuffer.frameCount = writtenSize / mFrameSizeAF; frames -= audioBuffer.frameCount; @@ -1373,7 +1373,7 @@ status_t AudioTrack::restoreTrack_l(audio_track_cblk_t*& refCblk, bool fromStart if (user > server) { frames = ((user - server) > newCblk->frameCount) ? newCblk->frameCount : (user - server); - memset(mBuffers, 0, frames * newCblk->frameSize); + memset(mBuffers, 0, frames * mFrameSizeAF); } // restart playback even if buffer is not completely filled. android_atomic_or(CBLK_FORCEREADY, &newCblk->flags); @@ -1588,7 +1588,7 @@ bool audio_track_cblk_t::stepServer(uint32_t frameCount, bool isOut) return true; } -void* audio_track_cblk_t::buffer(void *buffers, uint32_t offset) const +void* audio_track_cblk_t::buffer(void *buffers, size_t frameSize, uint32_t offset) const { return (int8_t *)buffers + (offset - userBase) * frameSize; } diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp index c0f5c7b..eb20019 100644 --- a/services/audioflinger/AudioFlinger.cpp +++ b/services/audioflinger/AudioFlinger.cpp @@ -4190,6 +4190,7 @@ AudioFlinger::ThreadBase::TrackBase::TrackBase( mState(IDLE), mSampleRate(sampleRate), mFormat(format), + mFrameSize(0), // will be set to correct value in constructor mStepServerFailed(false), mSessionId(sessionId) // mChannelCount @@ -4311,7 +4312,7 @@ int AudioFlinger::ThreadBase::TrackBase::sampleRate() const { void* AudioFlinger::ThreadBase::TrackBase::getBuffer(uint32_t offset, uint32_t frames) const { audio_track_cblk_t* cblk = this->cblk(); - size_t frameSize = cblk->frameSize; + size_t frameSize = mFrameSize; int8_t *bufferStart = (int8_t *)mBuffer + (offset-cblk->serverBase)*frameSize; int8_t *bufferEnd = bufferStart + frames * frameSize; @@ -4363,11 +4364,11 @@ AudioFlinger::PlaybackThread::Track::Track( mUnderrunCount(0), mCachedVolume(1.0) { + // NOTE: frame size for 8 bit PCM data is based on a sample size of + // 16 bit because data is converted to 16 bit before being stored in buffer by AudioTrack + mFrameSize = audio_is_linear_pcm(format) ? mChannelCount * sizeof(int16_t) : + sizeof(uint8_t); if (mCblk != NULL) { - // NOTE: audio_track_cblk_t::frameSize for 8 bit PCM data is based on a sample size of - // 16 bit because data is converted to 16 bit before being stored in buffer by AudioTrack - mCblk->frameSize = audio_is_linear_pcm(format) ? mChannelCount * sizeof(int16_t) : - sizeof(uint8_t); // to avoid leaking a track name, do not allocate one unless there is an mCblk mName = thread->getTrackName_l(channelMask, sessionId); mCblk->mName = mName; @@ -5014,7 +5015,7 @@ void AudioFlinger::PlaybackThread::TimedTrack::trimTimedBufferQueue_l() { // this frame in media time units and adding it to the PTS of the // buffer. int64_t frameCount = mTimedBufferQueue[trimEnd].buffer()->size() - / mCblk->frameSize; + / mFrameSize; if (!mMediaTimeToSampleTransform.doReverseTransform(frameCount, &bufEnd)) { @@ -5074,7 +5075,7 @@ void AudioFlinger::PlaybackThread::TimedTrack::updateFramesPendingAfterTrim_l( " bytes. (update reason: \"%s\")", bufBytes, consumedAlready, logTag); - uint32_t bufFrames = (bufBytes - consumedAlready) / mCblk->frameSize; + uint32_t bufFrames = (bufBytes - consumedAlready) / mFrameSize; ALOG_ASSERT(mFramesPendingInQueue >= bufFrames, "Bad bookkeeping while updating frames pending. Should have at" " least %u queued frames, but we think we have only %u. (update" @@ -5095,7 +5096,7 @@ status_t AudioFlinger::PlaybackThread::TimedTrack::queueTimedBuffer( Mutex::Autolock _l(mTimedBufferQueueLock); - uint32_t bufFrames = buffer->size() / mCblk->frameSize; + uint32_t bufFrames = buffer->size() / mFrameSize; mFramesPendingInQueue += bufFrames; mTimedBufferQueue.add(TimedBuffer(buffer, pts)); @@ -5192,7 +5193,7 @@ status_t AudioFlinger::PlaybackThread::TimedTrack::getNextBuffer( // adjust the head buffer's PTS to reflect the portion of the head buffer // that has already been consumed int64_t effectivePTS = headLocalPTS + - ((head.position() / mCblk->frameSize) * mLocalTimeFreq / sampleRate()); + ((head.position() / mFrameSize) * mLocalTimeFreq / sampleRate()); // Calculate the delta in samples between the head of the input buffer // queue and the start of the next output buffer that will be written. @@ -5257,7 +5258,7 @@ status_t AudioFlinger::PlaybackThread::TimedTrack::getNextBuffer( // the next input sample is late uint32_t lateFrames = static_cast(-((sampleDelta + 0x80000000) >> 32)); size_t onTimeSamplePosition = - head.position() + lateFrames * mCblk->frameSize; + head.position() + lateFrames * mFrameSize; if (onTimeSamplePosition > head.buffer()->size()) { // all the remaining samples in the head are too late, so @@ -5292,7 +5293,7 @@ void AudioFlinger::PlaybackThread::TimedTrack::timedYieldSamples_l( head.position()); uint32_t framesLeftInHead = ((head.buffer()->size() - head.position()) / - mCblk->frameSize); + mFrameSize); size_t framesRequested = buffer->frameCount; buffer->frameCount = min(framesLeftInHead, framesRequested); @@ -5307,9 +5308,9 @@ void AudioFlinger::PlaybackThread::TimedTrack::timedYieldSilence_l( uint32_t numFrames, AudioBufferProvider::Buffer* buffer) { // lazily allocate a buffer filled with silence - if (mTimedSilenceBufferSize < numFrames * mCblk->frameSize) { + if (mTimedSilenceBufferSize < numFrames * mFrameSize) { delete [] mTimedSilenceBuffer; - mTimedSilenceBufferSize = numFrames * mCblk->frameSize; + mTimedSilenceBufferSize = numFrames * mFrameSize; mTimedSilenceBuffer = new uint8_t[mTimedSilenceBufferSize]; memset(mTimedSilenceBuffer, 0, mTimedSilenceBufferSize); } @@ -5357,7 +5358,7 @@ void AudioFlinger::PlaybackThread::TimedTrack::releaseBuffer( start, end, buffer->raw); head.setPosition(head.position() + - (buffer->frameCount * mCblk->frameSize)); + (buffer->frameCount * mFrameSize)); mQueueHeadInFlight = false; ALOG_ASSERT(mFramesPendingInQueue >= buffer->frameCount, @@ -5409,15 +5410,13 @@ AudioFlinger::RecordThread::RecordTrack::RecordTrack( channelMask, frameCount, 0 /*sharedBuffer*/, sessionId), mOverflow(false) { - if (mCblk != NULL) { - ALOGV("RecordTrack constructor, size %d", (int)mBufferEnd - (int)mBuffer); - if (format == AUDIO_FORMAT_PCM_16_BIT) { - mCblk->frameSize = mChannelCount * sizeof(int16_t); - } else if (format == AUDIO_FORMAT_PCM_8_BIT) { - mCblk->frameSize = mChannelCount * sizeof(int8_t); - } else { - mCblk->frameSize = sizeof(int8_t); - } + ALOGV("RecordTrack constructor, size %d", (int)mBufferEnd - (int)mBuffer); + if (format == AUDIO_FORMAT_PCM_16_BIT) { + mFrameSize = mChannelCount * sizeof(int16_t); + } else if (format == AUDIO_FORMAT_PCM_8_BIT) { + mFrameSize = mChannelCount * sizeof(int8_t); + } else { + mFrameSize = sizeof(int8_t); } } @@ -5749,7 +5748,7 @@ status_t AudioFlinger::PlaybackThread::OutputTrack::obtainBuffer( } buffer->frameCount = framesReq; - buffer->raw = cblk->buffer(mBuffers, u); + buffer->raw = cblk->buffer(mBuffers, mFrameSize, u); return NO_ERROR; } @@ -6157,7 +6156,7 @@ bool AudioFlinger::RecordThread::threadLoop() if (framesIn) { int8_t *src = (int8_t *)mRsmpInBuffer + mRsmpInIndex * mFrameSize; int8_t *dst = buffer.i8 + (buffer.frameCount - framesOut) * - mActiveTrack->mCblk->frameSize; + mActiveTrack->mFrameSize; if (framesIn > framesOut) framesIn = framesOut; mRsmpInIndex += framesIn; diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h index fc24bed..38744d0 100644 --- a/services/audioflinger/AudioFlinger.h +++ b/services/audioflinger/AudioFlinger.h @@ -472,6 +472,10 @@ private: const uint32_t mSampleRate; // initial sample rate only; for tracks which // support dynamic rates, the current value is in control block const audio_format_t mFormat; + size_t mFrameSize; // AudioFlinger's view of frame size in shared memory, + // where for AudioTrack (but not AudioRecord), + // 8-bit PCM samples are stored as 16-bit + // FIXME should be const bool mStepServerFailed; const int mSessionId; uint8_t mChannelCount; -- cgit v1.1 From f4fca226d2cb08862d0faa4918e181b3e73f6a0c Mon Sep 17 00:00:00 2001 From: Marco Nelissen Date: Wed, 7 Nov 2012 15:36:59 -0800 Subject: Scan .awb files too b/6122599 Change-Id: Ied3e0392939231447f1fc5685ca1fade1e55ce08 --- media/libstagefright/StagefrightMediaScanner.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/media/libstagefright/StagefrightMediaScanner.cpp b/media/libstagefright/StagefrightMediaScanner.cpp index b7cf96e..359f2be 100644 --- a/media/libstagefright/StagefrightMediaScanner.cpp +++ b/media/libstagefright/StagefrightMediaScanner.cpp @@ -42,7 +42,7 @@ static bool FileHasAcceptableExtension(const char *extension) { ".mpeg", ".ogg", ".mid", ".smf", ".imy", ".wma", ".aac", ".wav", ".amr", ".midi", ".xmf", ".rtttl", ".rtx", ".ota", ".mkv", ".mka", ".webm", ".ts", ".fl", ".flac", ".mxmf", - ".avi", ".mpeg", ".mpg" + ".avi", ".mpeg", ".mpg", ".awb" }; static const size_t kNumValidExtensions = sizeof(kValidExtensions) / sizeof(kValidExtensions[0]); -- cgit v1.1 From b36a7a68af073b1e7fd5cad6aa2c52223fd30efd Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Mon, 12 Nov 2012 15:46:10 -0800 Subject: Simplify AudioRecord::restoreTrack_l() Finish removing CBLK_RESTORING and CBLK_RESTORED from control block flags, and remove constant RESTORE_TIMEOUT_MS. Also minor cleanup: - Cache mCblk in local variable cblk and make cblk allocatable in a register. - Use "iMem" for sp. - Add missing error log to AudioRecord; it was already in AudioTrack. This is part of a series to clean up the control block. Change-Id: Ia5f5ab4763c392bc06a45851b167ddaee29e3455 --- include/private/media/AudioTrackShared.h | 5 -- media/libmedia/AudioRecord.cpp | 93 +++++++++++++++----------------- 2 files changed, 44 insertions(+), 54 deletions(-) diff --git a/include/private/media/AudioTrackShared.h b/include/private/media/AudioTrackShared.h index ac5372f..3063448 100644 --- a/include/private/media/AudioTrackShared.h +++ b/include/private/media/AudioTrackShared.h @@ -31,17 +31,12 @@ namespace android { // init time #define MAX_RUN_TIMEOUT_MS 1000 #define WAIT_PERIOD_MS 10 -// AudioTrack no longer uses this, it is for AudioRecord only: -#define RESTORE_TIMEOUT_MS 5000 // Maximum waiting time for a track to be restored #define CBLK_UNDERRUN 0x01 // set: underrun (out) or overrrun (in), clear: no underrun or overrun #define CBLK_FORCEREADY 0x02 // set: track is considered ready immediately by AudioFlinger, // clear: track is ready when buffer full #define CBLK_INVALID 0x04 // track buffer invalidated by AudioFlinger, need to re-create #define CBLK_DISABLED 0x08 // track disabled by AudioFlinger due to underrun, need to re-start -// AudioTrack no longer uses these, they are for AudioRecord only: -#define CBLK_RESTORING 0x10 // track is being restored after invalidation by AudioFlinger -#define CBLK_RESTORED 0x20 // track has been restored after invalidation by AudioFlinger // Important: do not add any virtual methods, including ~ struct audio_track_cblk_t diff --git a/media/libmedia/AudioRecord.cpp b/media/libmedia/AudioRecord.cpp index b40aaf5..062f546 100644 --- a/media/libmedia/AudioRecord.cpp +++ b/media/libmedia/AudioRecord.cpp @@ -300,7 +300,9 @@ status_t AudioRecord::start(AudioSystem::sync_event_t event, int triggerSession) } } if (cblk->flags & CBLK_INVALID) { - ret = restoreRecord_l(cblk); + audio_track_cblk_t* temp = cblk; + ret = restoreRecord_l(temp); + cblk = temp; } cblk->lock.unlock(); if (ret == NO_ERROR) { @@ -431,6 +433,7 @@ status_t AudioRecord::openRecord_l( status_t status; const sp& audioFlinger = AudioSystem::get_audio_flinger(); if (audioFlinger == 0) { + ALOGE("Could not get audioflinger"); return NO_INIT; } @@ -453,19 +456,20 @@ status_t AudioRecord::openRecord_l( ALOGE("AudioFlinger could not create record track, status: %d", status); return status; } - sp cblk = record->getCblk(); - if (cblk == 0) { + sp iMem = record->getCblk(); + if (iMem == 0) { ALOGE("Could not get control block"); return NO_INIT; } mAudioRecord.clear(); mAudioRecord = record; mCblkMemory.clear(); - mCblkMemory = cblk; - mCblk = static_cast(cblk->pointer()); - mBuffers = (char*)mCblk + sizeof(audio_track_cblk_t); - mCblk->bufferTimeoutMs = MAX_RUN_TIMEOUT_MS; - mCblk->waitTimeMs = 0; + mCblkMemory = iMem; + audio_track_cblk_t* cblk = static_cast(iMem->pointer()); + mCblk = cblk; + mBuffers = (char*)cblk + sizeof(audio_track_cblk_t); + cblk->bufferTimeoutMs = MAX_RUN_TIMEOUT_MS; + cblk->waitTimeMs = 0; return NO_ERROR; } @@ -498,12 +502,17 @@ status_t AudioRecord::obtainBuffer(Buffer* audioBuffer, int32_t waitCount) } if (!(cblk->flags & CBLK_INVALID)) { mLock.unlock(); + // this condition is in shared memory, so if IAudioRecord and control block + // are replaced due to mediaserver death or IAudioRecord invalidation then + // cv won't be signalled, but fortunately the timeout will limit the wait result = cblk->cv.waitRelative(cblk->lock, milliseconds(waitTimeMs)); cblk->lock.unlock(); mLock.lock(); if (!mActive) { return status_t(STOPPED); } + // IAudioRecord may have been re-created while mLock was unlocked + cblk = mCblk; cblk->lock.lock(); } if (cblk->flags & CBLK_INVALID) { @@ -521,7 +530,9 @@ status_t AudioRecord::obtainBuffer(Buffer* audioBuffer, int32_t waitCount) if (result == DEAD_OBJECT) { android_atomic_or(CBLK_INVALID, &cblk->flags); create_new_record: - result = AudioRecord::restoreRecord_l(cblk); + audio_track_cblk_t* temp = cblk; + result = AudioRecord::restoreRecord_l(temp); + cblk = temp; } if (result != NO_ERROR) { ALOGW("obtainBuffer create Track error %d", result); @@ -749,57 +760,41 @@ bool AudioRecord::processAudioBuffer(const sp& thread) // must be called with mLock and cblk.lock held. Callers must also hold strong references on // the IAudioRecord and IMemory in case they are recreated here. // If the IAudioRecord is successfully restored, the cblk pointer is updated -status_t AudioRecord::restoreRecord_l(audio_track_cblk_t*& cblk) +status_t AudioRecord::restoreRecord_l(audio_track_cblk_t*& refCblk) { status_t result; - if (!(android_atomic_or(CBLK_RESTORING, &cblk->flags) & CBLK_RESTORING)) { - ALOGW("dead IAudioRecord, creating a new one"); - // signal old cblk condition so that other threads waiting for available buffers stop - // waiting now - cblk->cv.broadcast(); - cblk->lock.unlock(); + audio_track_cblk_t* cblk = refCblk; + audio_track_cblk_t* newCblk = cblk; + ALOGW("dead IAudioRecord, creating a new one"); - // if the new IAudioRecord is created, openRecord_l() will modify the - // following member variables: mAudioRecord, mCblkMemory and mCblk. - // It will also delete the strong references on previous IAudioRecord and IMemory - result = openRecord_l(cblk->sampleRate, mFormat, mChannelMask, - mFrameCount, getInput_l()); - if (result == NO_ERROR) { - // callback thread or sync event hasn't changed - result = mAudioRecord->start(AudioSystem::SYNC_EVENT_SAME, 0); - } - if (result != NO_ERROR) { - mActive = false; - } + // signal old cblk condition so that other threads waiting for available buffers stop + // waiting now + cblk->cv.broadcast(); + cblk->lock.unlock(); - // signal old cblk condition for other threads waiting for restore completion - android_atomic_or(CBLK_RESTORED, &cblk->flags); - cblk->cv.broadcast(); - } else { - if (!(cblk->flags & CBLK_RESTORED)) { - ALOGW("dead IAudioRecord, waiting for a new one to be created"); - mLock.unlock(); - result = cblk->cv.waitRelative(cblk->lock, milliseconds(RESTORE_TIMEOUT_MS)); - cblk->lock.unlock(); - mLock.lock(); - } else { - ALOGW("dead IAudioRecord, already restored"); - result = NO_ERROR; - cblk->lock.unlock(); - } - if (result != NO_ERROR || !mActive) { - result = status_t(STOPPED); - } + // if the new IAudioRecord is created, openRecord_l() will modify the + // following member variables: mAudioRecord, mCblkMemory and mCblk. + // It will also delete the strong references on previous IAudioRecord and IMemory + result = openRecord_l(cblk->sampleRate, mFormat, mChannelMask, + mFrameCount, getInput_l()); + if (result == NO_ERROR) { + newCblk = mCblk; + // callback thread or sync event hasn't changed + result = mAudioRecord->start(AudioSystem::SYNC_EVENT_SAME, 0); + } + if (result != NO_ERROR) { + mActive = false; } + ALOGV("restoreRecord_l() status %d mActive %d cblk %p, old cblk %p flags %08x old flags %08x", - result, mActive, mCblk, cblk, mCblk->flags, cblk->flags); + result, mActive, newCblk, cblk, newCblk->flags, cblk->flags); if (result == NO_ERROR) { // from now on we switch to the newly created cblk - cblk = mCblk; + refCblk = newCblk; } - cblk->lock.lock(); + newCblk->lock.lock(); ALOGW_IF(result != NO_ERROR, "restoreRecord_l() error %d", result); -- cgit v1.1 From 9f2016d9adfb4f88fa0bbfcfa5954f79160db595 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Tue, 13 Nov 2012 09:58:55 -0800 Subject: Rename TrackBase::mFrameCount to mStepCount This prepares for adding a new field TrackBase::mFrameCount with a different meaning. Change-Id: I6bbe2c59f2a882be57caeec2e2e06f439a0e9e83 --- services/audioflinger/AudioFlinger.cpp | 14 +++++++------- services/audioflinger/AudioFlinger.h | 5 +++-- 2 files changed, 10 insertions(+), 9 deletions(-) diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp index eb20019..9353e70 100644 --- a/services/audioflinger/AudioFlinger.cpp +++ b/services/audioflinger/AudioFlinger.cpp @@ -4186,7 +4186,7 @@ AudioFlinger::ThreadBase::TrackBase::TrackBase( mCblk(NULL), // mBuffer // mBufferEnd - mFrameCount(0), + mStepCount(0), mState(IDLE), mSampleRate(sampleRate), mFormat(format), @@ -4277,7 +4277,7 @@ AudioFlinger::ThreadBase::TrackBase::~TrackBase() void AudioFlinger::ThreadBase::TrackBase::releaseBuffer(AudioBufferProvider::Buffer* buffer) { buffer->raw = NULL; - mFrameCount = buffer->frameCount; + mStepCount = buffer->frameCount; // FIXME See note at getNextBuffer() (void) step(); // ignore return value of step() buffer->frameCount = 0; @@ -4287,7 +4287,7 @@ bool AudioFlinger::ThreadBase::TrackBase::step() { bool result; audio_track_cblk_t* cblk = this->cblk(); - result = cblk->stepServer(mFrameCount, isOut()); + result = cblk->stepServer(mStepCount, isOut()); if (!result) { ALOGV("stepServer failed acquiring cblk mutex"); mStepServerFailed = true; @@ -4435,7 +4435,7 @@ void AudioFlinger::PlaybackThread::Track::destroy() /*static*/ void AudioFlinger::PlaybackThread::Track::appendDumpHeader(String8& result) { - result.append(" Name Client Type Fmt Chn mask Session mFrCnt fCount S M F SRate " + result.append(" Name Client Type Fmt Chn mask Session StpCnt fCount S M F SRate " "L dB R dB Server User Main buf Aux Buf Flags Underruns\n"); } @@ -4506,7 +4506,7 @@ void AudioFlinger::PlaybackThread::Track::dump(char* buffer, size_t size) mFormat, mChannelMask, mSessionId, - mFrameCount, + mStepCount, mCblk->frameCount, stateChar, mMute, @@ -5499,7 +5499,7 @@ void AudioFlinger::RecordThread::RecordTrack::stop() /*static*/ void AudioFlinger::RecordThread::RecordTrack::appendDumpHeader(String8& result) { - result.append(" Clien Fmt Chn mask Session Buf S SRate Serv User FrameCount\n"); + result.append(" Clien Fmt Chn mask Session Step S SRate Serv User FrameCount\n"); } void AudioFlinger::RecordThread::RecordTrack::dump(char* buffer, size_t size) @@ -5509,7 +5509,7 @@ void AudioFlinger::RecordThread::RecordTrack::dump(char* buffer, size_t size) mFormat, mChannelMask, mSessionId, - mFrameCount, + mStepCount, mState, mCblk->sampleRate, mCblk->server, diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h index 38744d0..8cf58b1 100644 --- a/services/audioflinger/AudioFlinger.h +++ b/services/audioflinger/AudioFlinger.h @@ -453,7 +453,7 @@ private: return mState == TERMINATED; } - bool step(); + bool step(); // mStepCount is an implicit input void reset(); virtual bool isOut() const = 0; // true for Track and TimedTrack, false for RecordTrack, @@ -466,7 +466,8 @@ private: void* mBuffer; // start of track buffer, typically in shared memory void* mBufferEnd; // &mBuffer[mFrameCount * frameSize], where frameSize // is based on mChannelCount and 16-bit samples - uint32_t mFrameCount; + uint32_t mStepCount; // saves AudioBufferProvider::Buffer::frameCount as of + // time of releaseBuffer() for later use by step() // we don't really need a lock for these track_state mState; const uint32_t mSampleRate; // initial sample rate only; for tracks which -- cgit v1.1 From a552d6049ccf674b083d011ce7b8a443a9cd68a4 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Tue, 13 Nov 2012 15:01:05 -0800 Subject: Remove deprecated AudioSystem methods Change-Id: I952d504e03af9a1d3e1e0aa379c82dfb00197d9f --- include/media/AudioSystem.h | 6 ------ media/libmedia/AudioSystem.cpp | 10 ---------- 2 files changed, 16 deletions(-) diff --git a/include/media/AudioSystem.h b/include/media/AudioSystem.h index 2218fad..d64ecd4 100644 --- a/include/media/AudioSystem.h +++ b/include/media/AudioSystem.h @@ -107,12 +107,6 @@ public: audio_stream_type_t stream, uint32_t* latency); - // DEPRECATED - static status_t getOutputSamplingRate(int* samplingRate, int stream = AUDIO_STREAM_DEFAULT); - - // DEPRECATED - static status_t getOutputFrameCount(int* frameCount, int stream = AUDIO_STREAM_DEFAULT); - static bool routedToA2dpOutput(audio_stream_type_t streamType); static status_t getInputBufferSize(uint32_t sampleRate, audio_format_t format, diff --git a/media/libmedia/AudioSystem.cpp b/media/libmedia/AudioSystem.cpp index 767c452..488edac 100644 --- a/media/libmedia/AudioSystem.cpp +++ b/media/libmedia/AudioSystem.cpp @@ -205,11 +205,6 @@ int AudioSystem::logToLinear(float volume) return volume ? 100 - int(dBConvertInverse * log(volume) + 0.5) : 0; } -// DEPRECATED -status_t AudioSystem::getOutputSamplingRate(int* samplingRate, int streamType) { - return getOutputSamplingRate(samplingRate, (audio_stream_type_t)streamType); -} - status_t AudioSystem::getOutputSamplingRate(int* samplingRate, audio_stream_type_t streamType) { audio_io_handle_t output; @@ -252,11 +247,6 @@ status_t AudioSystem::getSamplingRate(audio_io_handle_t output, return NO_ERROR; } -// DEPRECATED -status_t AudioSystem::getOutputFrameCount(int* frameCount, int streamType) { - return getOutputFrameCount(frameCount, (audio_stream_type_t)streamType); -} - status_t AudioSystem::getOutputFrameCount(int* frameCount, audio_stream_type_t streamType) { audio_io_handle_t output; -- cgit v1.1 From b26e3e9f2ab0334bff21a4fa4851dbf6e57fba5d Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Wed, 14 Nov 2012 08:32:08 -0800 Subject: Fix build warnings Change-Id: Ic43bcca166a529a6431711b05a7fa21849b6a38b --- media/libmedia/IAudioFlinger.cpp | 2 +- services/audioflinger/test-resample.cpp | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/media/libmedia/IAudioFlinger.cpp b/media/libmedia/IAudioFlinger.cpp index bb936ec..55658db 100644 --- a/media/libmedia/IAudioFlinger.cpp +++ b/media/libmedia/IAudioFlinger.cpp @@ -106,7 +106,7 @@ public: data.writeInt32(format); data.writeInt32(channelMask); data.writeInt32(frameCount); - track_flags_t lFlags = flags != NULL ? *flags : TRACK_DEFAULT; + track_flags_t lFlags = flags != NULL ? *flags : (track_flags_t) TRACK_DEFAULT; data.writeInt32(lFlags); data.writeStrongBinder(sharedBuffer->asBinder()); data.writeInt32((int32_t) output); diff --git a/services/audioflinger/test-resample.cpp b/services/audioflinger/test-resample.cpp index 3b66530..b082e8c 100644 --- a/services/audioflinger/test-resample.cpp +++ b/services/audioflinger/test-resample.cpp @@ -177,7 +177,7 @@ int main(int argc, char* argv[]) { double t = double(i) / input_freq; double y = sin(M_PI * k * t * t); int16_t yi = floor(y * 32767.0 + 0.5); - for (size_t j=0 ; j Date: Wed, 7 Nov 2012 14:03:00 -0800 Subject: Update audio comments Change-Id: I85d7d2f6381b251db5695202fec75128883a8662 --- include/media/AudioTrack.h | 9 ++++++++- include/private/media/AudioTrackShared.h | 3 ++- media/libmedia/AudioTrack.cpp | 6 +++--- services/audioflinger/AudioFlinger.h | 6 +++++- services/audioflinger/StateQueue.h | 2 +- 5 files changed, 19 insertions(+), 7 deletions(-) diff --git a/include/media/AudioTrack.h b/include/media/AudioTrack.h index 3504f1f..b82f814 100644 --- a/include/media/AudioTrack.h +++ b/include/media/AudioTrack.h @@ -422,6 +422,7 @@ public: * After error return: * frameCount 0 * size 0 + * raw undefined * After successful return: * frameCount actual number of frames available, <= number requested * size actual number of bytes available @@ -510,7 +511,13 @@ protected: uint32_t mFrameCount; audio_track_cblk_t* mCblk; // re-load after mLock.unlock() - void* mBuffers; // starting address of buffers in shared memory + + // Starting address of buffers in shared memory. If there is a shared buffer, mBuffers + // is the value of pointer() for the shared buffer, otherwise mBuffers points + // immediately after the control block. This address is for the mapping within client + // address space. AudioFlinger::TrackBase::mBuffer is for the server address space. + void* mBuffers; + audio_format_t mFormat; audio_stream_type_t mStreamType; uint8_t mChannelCount; diff --git a/include/private/media/AudioTrackShared.h b/include/private/media/AudioTrackShared.h index 3063448..bbc5e26 100644 --- a/include/private/media/AudioTrackShared.h +++ b/include/private/media/AudioTrackShared.h @@ -54,9 +54,10 @@ struct audio_track_cblk_t uint32_t serverBase; int mPad1; // unused, but preserves cache line alignment + uint32_t frameCount; - // Cache line boundary + // Cache line boundary (32 bytes) uint32_t loopStart; uint32_t loopEnd; // read-only for server, read/write for client diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp index 4a4759e..daf6d07 100644 --- a/media/libmedia/AudioTrack.cpp +++ b/media/libmedia/AudioTrack.cpp @@ -390,7 +390,7 @@ void AudioTrack::start() } if (cblk->flags & CBLK_INVALID) { audio_track_cblk_t* temp = cblk; - status = restoreTrack_l(temp, true); + status = restoreTrack_l(temp, true /*fromStart*/); cblk = temp; } cblk->lock.unlock(); @@ -988,7 +988,7 @@ status_t AudioTrack::obtainBuffer(Buffer* audioBuffer, int32_t waitCount) android_atomic_or(CBLK_INVALID, &cblk->flags); create_new_track: audio_track_cblk_t* temp = cblk; - result = restoreTrack_l(temp, false); + result = restoreTrack_l(temp, false /*fromStart*/); cblk = temp; } if (result != NO_ERROR) { @@ -1147,7 +1147,7 @@ status_t TimedAudioTrack::allocateTimedBuffer(size_t size, sp* buffer) if (cblk->flags & CBLK_INVALID) { cblk->lock.lock(); audio_track_cblk_t* temp = cblk; - result = restoreTrack_l(temp, false); + result = restoreTrack_l(temp, false /*fromStart*/); cblk = temp; cblk->lock.unlock(); diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h index 8cf58b1..54cf239 100644 --- a/services/audioflinger/AudioFlinger.h +++ b/services/audioflinger/AudioFlinger.h @@ -861,6 +861,9 @@ private: sp sharedBuffer() const { return mSharedBuffer; } + // framesWritten is cumulative, never reset, and is shared all tracks + // audioHalFrames is derived from output latency + // FIXME parameters not needed, could get them from the thread bool presentationComplete(size_t framesWritten, size_t audioHalFrames); public: @@ -893,6 +896,7 @@ private: bool mHasVolumeController; size_t mPresentationCompleteFrames; // number of frames written to the // audio HAL when this track will be fully rendered + // zero means not monitoring private: IAudioFlinger::track_flags_t mFlags; @@ -997,7 +1001,7 @@ private: }; - // playback track + // playback track, used by DuplicatingThread class OutputTrack : public Track { public: diff --git a/services/audioflinger/StateQueue.h b/services/audioflinger/StateQueue.h index c9b5111..e33b3c6 100644 --- a/services/audioflinger/StateQueue.h +++ b/services/audioflinger/StateQueue.h @@ -174,7 +174,7 @@ public: #endif private: - static const unsigned kN = 4; // values != 4 are not supported by this code + static const unsigned kN = 4; // values < 4 are not supported by this code T mStates[kN]; // written by mutator, read by observer // "volatile" is meaningless with SMP, but here it indicates that we're using atomic ops -- cgit v1.1 From 3b16c766d1ae2cfd8487e8ffb2b23936fc0a8e17 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Wed, 14 Nov 2012 08:44:39 -0800 Subject: Use uint32_t for sample rate Change-Id: Ie240b48fb54b08359f69ecd4e5f8bda3d15cbe80 --- include/media/AudioSystem.h | 6 ++--- include/media/AudioTrack.h | 4 +++- include/media/IAudioFlinger.h | 2 +- include/media/ToneGenerator.h | 2 +- libvideoeditor/lvpp/VideoEditorPlayer.cpp | 2 +- media/libmedia/AudioRecord.cpp | 4 ++-- media/libmedia/AudioSystem.cpp | 12 +++++----- media/libmedia/AudioTrack.cpp | 18 +++++++-------- media/libmedia/IAudioFlinger.cpp | 2 +- media/libmedia/SoundPool.cpp | 2 +- media/libmediaplayerservice/MediaPlayerService.cpp | 2 +- services/audioflinger/AudioFlinger.cpp | 26 +++++++++++----------- services/audioflinger/AudioFlinger.h | 4 ++-- 13 files changed, 44 insertions(+), 42 deletions(-) diff --git a/include/media/AudioSystem.h b/include/media/AudioSystem.h index d64ecd4..33078bb 100644 --- a/include/media/AudioSystem.h +++ b/include/media/AudioSystem.h @@ -87,7 +87,7 @@ public: static float linearToLog(int volume); static int logToLinear(float volume); - static status_t getOutputSamplingRate(int* samplingRate, + static status_t getOutputSamplingRate(uint32_t* samplingRate, audio_stream_type_t stream = AUDIO_STREAM_DEFAULT); static status_t getOutputFrameCount(int* frameCount, audio_stream_type_t stream = AUDIO_STREAM_DEFAULT); @@ -95,7 +95,7 @@ public: audio_stream_type_t stream = AUDIO_STREAM_DEFAULT); static status_t getSamplingRate(audio_io_handle_t output, audio_stream_type_t streamType, - int* samplingRate); + uint32_t* samplingRate); // returns the number of frames per audio HAL write buffer. Corresponds to // audio_stream->get_buffer_size()/audio_stream_frame_size() static status_t getFrameCount(audio_io_handle_t output, @@ -237,7 +237,7 @@ public: static const sp& get_audio_policy_service(); // helpers for android.media.AudioManager.getProperty(), see description there for meaning - static int32_t getPrimaryOutputSamplingRate(); + static uint32_t getPrimaryOutputSamplingRate(); static int32_t getPrimaryOutputFrameCount(); // ---------------------------------------------------------------------------- diff --git a/include/media/AudioTrack.h b/include/media/AudioTrack.h index b82f814..99d583d 100644 --- a/include/media/AudioTrack.h +++ b/include/media/AudioTrack.h @@ -282,7 +282,9 @@ public: /* Set sample rate for this track in Hz, mostly used for games' sound effects */ - status_t setSampleRate(int sampleRate); + status_t setSampleRate(uint32_t sampleRate); + + /* Return current sample rate in Hz, or 0 if unknown */ uint32_t getSampleRate() const; /* Enables looping and sets the start and end points of looping. diff --git a/include/media/IAudioFlinger.h b/include/media/IAudioFlinger.h index 0aa48c6..5fd5044 100644 --- a/include/media/IAudioFlinger.h +++ b/include/media/IAudioFlinger.h @@ -192,7 +192,7 @@ public: // helpers for android.media.AudioManager.getProperty(), see description there for meaning // FIXME move these APIs to AudioPolicy to permit a more accurate implementation // that looks on primary device for a stream with fast flag, primary flag, or first one. - virtual int32_t getPrimaryOutputSamplingRate() = 0; + virtual uint32_t getPrimaryOutputSamplingRate() = 0; virtual int32_t getPrimaryOutputFrameCount() = 0; }; diff --git a/include/media/ToneGenerator.h b/include/media/ToneGenerator.h index 29c8fd9..0529bcd 100644 --- a/include/media/ToneGenerator.h +++ b/include/media/ToneGenerator.h @@ -263,7 +263,7 @@ private: unsigned short mLoopCounter; // Current tone loopback count - int mSamplingRate; // AudioFlinger Sampling rate + uint32_t mSamplingRate; // AudioFlinger Sampling rate AudioTrack *mpAudioTrack; // Pointer to audio track used for playback Mutex mLock; // Mutex to control concurent access to ToneGenerator object from audio callback and application API Mutex mCbkCondLock; // Mutex associated to mWaitCbkCond diff --git a/libvideoeditor/lvpp/VideoEditorPlayer.cpp b/libvideoeditor/lvpp/VideoEditorPlayer.cpp index fc9fb49..d34b6d3 100755 --- a/libvideoeditor/lvpp/VideoEditorPlayer.cpp +++ b/libvideoeditor/lvpp/VideoEditorPlayer.cpp @@ -406,7 +406,7 @@ status_t VideoEditorPlayer::VeAudioOutput::open( } ALOGV("open(%u, %d, %d, %d)", sampleRate, channelCount, format, bufferCount); if (mTrack) close(); - int afSampleRate; + uint32_t afSampleRate; int afFrameCount; int frameCount; diff --git a/media/libmedia/AudioRecord.cpp b/media/libmedia/AudioRecord.cpp index 062f546..8f45a57 100644 --- a/media/libmedia/AudioRecord.cpp +++ b/media/libmedia/AudioRecord.cpp @@ -54,7 +54,7 @@ status_t AudioRecord::getMinFrameCount( } if (size == 0) { - ALOGE("Unsupported configuration: sampleRate %d, format %d, channelMask %#x", + ALOGE("Unsupported configuration: sampleRate %u, format %d, channelMask %#x", sampleRate, format, channelMask); return BAD_VALUE; } @@ -127,7 +127,7 @@ status_t AudioRecord::set( int sessionId) { - ALOGV("set(): sampleRate %d, channelMask %#x, frameCount %d",sampleRate, channelMask, + ALOGV("set(): sampleRate %u, channelMask %#x, frameCount %d", sampleRate, channelMask, frameCount); AutoMutex lock(mLock); diff --git a/media/libmedia/AudioSystem.cpp b/media/libmedia/AudioSystem.cpp index 488edac..f3b74a2 100644 --- a/media/libmedia/AudioSystem.cpp +++ b/media/libmedia/AudioSystem.cpp @@ -205,7 +205,7 @@ int AudioSystem::logToLinear(float volume) return volume ? 100 - int(dBConvertInverse * log(volume) + 0.5) : 0; } -status_t AudioSystem::getOutputSamplingRate(int* samplingRate, audio_stream_type_t streamType) +status_t AudioSystem::getOutputSamplingRate(uint32_t* samplingRate, audio_stream_type_t streamType) { audio_io_handle_t output; @@ -223,7 +223,7 @@ status_t AudioSystem::getOutputSamplingRate(int* samplingRate, audio_stream_type status_t AudioSystem::getSamplingRate(audio_io_handle_t output, audio_stream_type_t streamType, - int* samplingRate) + uint32_t* samplingRate) { OutputDescriptor *outputDesc; @@ -241,7 +241,7 @@ status_t AudioSystem::getSamplingRate(audio_io_handle_t output, gLock.unlock(); } - ALOGV("getSamplingRate() streamType %d, output %d, sampling rate %d", streamType, output, + ALOGV("getSamplingRate() streamType %d, output %d, sampling rate %u", streamType, output, *samplingRate); return NO_ERROR; @@ -442,7 +442,7 @@ void AudioSystem::AudioFlingerClient::ioConfigChanged(int event, audio_io_handle OutputDescriptor *outputDesc = new OutputDescriptor(*desc); gOutputs.add(ioHandle, outputDesc); - ALOGV("ioConfigChanged() new output samplingRate %d, format %d channels %#x frameCount %d " + ALOGV("ioConfigChanged() new output samplingRate %u, format %d channels %#x frameCount %d " "latency %d", outputDesc->samplingRate, outputDesc->format, outputDesc->channels, outputDesc->frameCount, outputDesc->latency); @@ -466,7 +466,7 @@ void AudioSystem::AudioFlingerClient::ioConfigChanged(int event, audio_io_handle if (param2 == NULL) break; desc = (const OutputDescriptor *)param2; - ALOGV("ioConfigChanged() new config for output %d samplingRate %d, format %d channels %#x " + ALOGV("ioConfigChanged() new config for output %d samplingRate %u, format %d channels %#x " "frameCount %d latency %d", ioHandle, desc->samplingRate, desc->format, desc->channels, desc->frameCount, desc->latency); @@ -740,7 +740,7 @@ status_t AudioSystem::isSourceActive(audio_source_t stream, bool* state) return NO_ERROR; } -int32_t AudioSystem::getPrimaryOutputSamplingRate() +uint32_t AudioSystem::getPrimaryOutputSamplingRate() { const sp& af = AudioSystem::get_audio_flinger(); if (af == 0) return 0; diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp index daf6d07..7480807 100644 --- a/media/libmedia/AudioTrack.cpp +++ b/media/libmedia/AudioTrack.cpp @@ -65,7 +65,7 @@ status_t AudioTrack::getMinFrameCount( // audio_format_t format // audio_channel_mask_t channelMask // audio_output_flags_t flags - int afSampleRate; + uint32_t afSampleRate; if (AudioSystem::getOutputSamplingRate(&afSampleRate, streamType) != NO_ERROR) { return NO_INIT; } @@ -193,7 +193,7 @@ status_t AudioTrack::set( } if (sampleRate == 0) { - int afSampleRate; + uint32_t afSampleRate; if (AudioSystem::getOutputSamplingRate(&afSampleRate, streamType) != NO_ERROR) { return NO_INIT; } @@ -535,9 +535,9 @@ void AudioTrack::getAuxEffectSendLevel(float* level) const } } -status_t AudioTrack::setSampleRate(int rate) +status_t AudioTrack::setSampleRate(uint32_t rate) { - int afSamplingRate; + uint32_t afSamplingRate; if (mIsTimed) { return INVALID_OPERATION; @@ -547,7 +547,7 @@ status_t AudioTrack::setSampleRate(int rate) return NO_INIT; } // Resampler implementation limits input sampling rate to 2 x output sampling rate. - if (rate <= 0 || rate > afSamplingRate*2 ) return BAD_VALUE; + if (rate == 0 || rate > afSamplingRate*2 ) return BAD_VALUE; AutoMutex lock(mLock); mCblk->sampleRate = rate; @@ -557,7 +557,7 @@ status_t AudioTrack::setSampleRate(int rate) uint32_t AudioTrack::getSampleRate() const { if (mIsTimed) { - return INVALID_OPERATION; + return 0; } AutoMutex lock(mLock); @@ -802,7 +802,7 @@ status_t AudioTrack::createTrack_l( } else if (!(flags & AUDIO_OUTPUT_FLAG_FAST)) { // FIXME move these calculations and associated checks to server - int afSampleRate; + uint32_t afSampleRate; if (AudioSystem::getSamplingRate(output, streamType, &afSampleRate) != NO_ERROR) { return NO_INIT; } @@ -816,7 +816,7 @@ status_t AudioTrack::createTrack_l( if (minBufCount < 2) minBufCount = 2; int minFrameCount = (afFrameCount*sampleRate*minBufCount)/afSampleRate; - ALOGV("minFrameCount: %d, afFrameCount=%d, minBufCount=%d, sampleRate=%d, afSampleRate=%d" + ALOGV("minFrameCount: %d, afFrameCount=%d, minBufCount=%d, sampleRate=%u, afSampleRate=%u" ", afLatency=%d", minFrameCount, afFrameCount, minBufCount, sampleRate, afSampleRate, afLatency); @@ -1423,7 +1423,7 @@ status_t AudioTrack::dump(int fd, const Vector& args) const snprintf(buffer, 255, " format(%d), channel count(%d), frame count(%d)\n", mFormat, mChannelCount, cblk->frameCount); result.append(buffer); - snprintf(buffer, 255, " sample rate(%d), status(%d), muted(%d)\n", + snprintf(buffer, 255, " sample rate(%u), status(%d), muted(%d)\n", (cblk == 0) ? 0 : cblk->sampleRate, mStatus, mMuted); result.append(buffer); snprintf(buffer, 255, " active(%d), latency (%d)\n", mActive, mLatency); diff --git a/media/libmedia/IAudioFlinger.cpp b/media/libmedia/IAudioFlinger.cpp index 55658db..0eeb6d9 100644 --- a/media/libmedia/IAudioFlinger.cpp +++ b/media/libmedia/IAudioFlinger.cpp @@ -695,7 +695,7 @@ public: return (audio_module_handle_t) reply.readInt32(); } - virtual int32_t getPrimaryOutputSamplingRate() + virtual uint32_t getPrimaryOutputSamplingRate() { Parcel data, reply; data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor()); diff --git a/media/libmedia/SoundPool.cpp b/media/libmedia/SoundPool.cpp index abc8899..b321e92 100644 --- a/media/libmedia/SoundPool.cpp +++ b/media/libmedia/SoundPool.cpp @@ -569,7 +569,7 @@ void SoundChannel::play(const sp& sample, int nextChannelID, float leftV // initialize track int afFrameCount; - int afSampleRate; + uint32_t afSampleRate; audio_stream_type_t streamType = mSoundPool->streamType(); if (AudioSystem::getOutputFrameCount(&afFrameCount, streamType) != NO_ERROR) { afFrameCount = kDefaultFrameCount; diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp index 9bedff1..769b322 100644 --- a/media/libmediaplayerservice/MediaPlayerService.cpp +++ b/media/libmediaplayerservice/MediaPlayerService.cpp @@ -1387,7 +1387,7 @@ status_t MediaPlayerService::AudioOutput::open( } ALOGV("open(%u, %d, 0x%x, %d, %d, %d)", sampleRate, channelCount, channelMask, format, bufferCount, mSessionId); - int afSampleRate; + uint32_t afSampleRate; int afFrameCount; uint32_t frameCount; diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp index 9353e70..6406b6c 100644 --- a/services/audioflinger/AudioFlinger.cpp +++ b/services/audioflinger/AudioFlinger.cpp @@ -1291,7 +1291,7 @@ void AudioFlinger::ThreadBase::dumpBase(int fd, const Vector& args) result.append(buffer); snprintf(buffer, SIZE, "standby: %d\n", mStandby); result.append(buffer); - snprintf(buffer, SIZE, "Sample rate: %d\n", mSampleRate); + snprintf(buffer, SIZE, "Sample rate: %u\n", mSampleRate); result.append(buffer); snprintf(buffer, SIZE, "HAL frame count: %d\n", mFrameCount); result.append(buffer); @@ -1776,7 +1776,7 @@ sp AudioFlinger::PlaybackThread::createTrac frameCount, mFrameCount); } else { ALOGV("AUDIO_OUTPUT_FLAG_FAST denied: isTimed=%d sharedBuffer=%p frameCount=%d " - "mFrameCount=%d format=%d isLinear=%d channelMask=%#x sampleRate=%d mSampleRate=%d " + "mFrameCount=%d format=%d isLinear=%d channelMask=%#x sampleRate=%u mSampleRate=%u " "hasFastMixer=%d tid=%d fastTrackAvailMask=%#x", isTimed, sharedBuffer.get(), frameCount, mFrameCount, format, audio_is_linear_pcm(format), @@ -1801,7 +1801,7 @@ sp AudioFlinger::PlaybackThread::createTrac if (mType == DIRECT) { if ((format & AUDIO_FORMAT_MAIN_MASK) == AUDIO_FORMAT_PCM) { if (sampleRate != mSampleRate || format != mFormat || channelMask != mChannelMask) { - ALOGE("createTrack_l() Bad parameter: sampleRate %d format %d, channelMask 0x%08x " + ALOGE("createTrack_l() Bad parameter: sampleRate %u format %d, channelMask 0x%08x " "for output %p with format %d", sampleRate, format, channelMask, mOutput, mFormat); lStatus = BAD_VALUE; @@ -1811,7 +1811,7 @@ sp AudioFlinger::PlaybackThread::createTrac } else { // Resampler implementation limits input sampling rate to 2 x output sampling rate. if (sampleRate > mSampleRate*2) { - ALOGE("Sample rate out of range: %d mSampleRate %d", sampleRate, mSampleRate); + ALOGE("Sample rate out of range: %u mSampleRate %u", sampleRate, mSampleRate); lStatus = BAD_VALUE; goto Exit; } @@ -2280,7 +2280,7 @@ AudioFlinger::MixerThread::MixerThread(const sp& audioFlinger, Aud // mNormalSink below { ALOGV("MixerThread() id=%d device=%#x type=%d", id, device, type); - ALOGV("mSampleRate=%d, mChannelMask=%#x, mChannelCount=%d, mFormat=%d, mFrameSize=%d, " + ALOGV("mSampleRate=%u, mChannelMask=%#x, mChannelCount=%d, mFormat=%d, mFrameSize=%d, " "mFrameCount=%d, mNormalFrameCount=%d", mSampleRate, mChannelMask, mChannelCount, mFormat, mFrameSize, mFrameCount, mNormalFrameCount); @@ -3126,7 +3126,7 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::MixerThread::prepareTrac uint32_t minFrames = 1; if ((track->sharedBuffer() == 0) && !track->isStopped() && !track->isPausing() && (mMixerStatusIgnoringFastTracks == MIXER_TRACKS_READY)) { - if (t->sampleRate() == (int)mSampleRate) { + if (t->sampleRate() == mSampleRate) { minFrames = mNormalFrameCount; } else { // +1 for rounding and +1 for additional sample needed for interpolation @@ -3624,7 +3624,7 @@ void AudioFlinger::dumpTee(int fd, const sp& source, audio_io_hand NBAIO_Format format = teeSource->format(); unsigned channelCount = Format_channelCount(format); ALOG_ASSERT(channelCount <= FCC_2); - unsigned sampleRate = Format_sampleRate(format); + uint32_t sampleRate = Format_sampleRate(format); wavHeader[22] = channelCount; // number of channels wavHeader[24] = sampleRate; // sample rate wavHeader[25] = sampleRate >> 8; @@ -4306,8 +4306,8 @@ void AudioFlinger::ThreadBase::TrackBase::reset() { ALOGV("TrackBase::reset"); } -int AudioFlinger::ThreadBase::TrackBase::sampleRate() const { - return (int)mCblk->sampleRate; +uint32_t AudioFlinger::ThreadBase::TrackBase::sampleRate() const { + return mCblk->sampleRate; } void* AudioFlinger::ThreadBase::TrackBase::getBuffer(uint32_t offset, uint32_t frames) const { @@ -5541,7 +5541,7 @@ AudioFlinger::PlaybackThread::OutputTrack::OutputTrack( mOutBuffer.frameCount = 0; playbackThread->mTracks.add(this); ALOGV("OutputTrack constructor mCblk %p, mBuffer %p, mCblk->buffers %p, " \ - "mCblk->frameCount %d, mCblk->sampleRate %d, mChannelMask 0x%08x mBufferEnd %p", + "mCblk->frameCount %d, mCblk->sampleRate %u, mChannelMask 0x%08x mBufferEnd %p", mCblk, mBuffer, mCblk->buffers, mCblk->frameCount, mCblk->sampleRate, mChannelMask, mBufferEnd); } else { @@ -6558,7 +6558,7 @@ void AudioFlinger::RecordThread::dumpInternals(int fd, const Vector& a result.append(buffer); snprintf(buffer, SIZE, "Out channel count: %d\n", mReqChannelCount); result.append(buffer); - snprintf(buffer, SIZE, "Out sample rate: %d\n", mReqSampleRate); + snprintf(buffer, SIZE, "Out sample rate: %u\n", mReqSampleRate); result.append(buffer); } else { result.append("No active record client\n"); @@ -6653,7 +6653,7 @@ bool AudioFlinger::RecordThread::checkForNewParameters_l() AudioParameter param = AudioParameter(keyValuePair); int value; audio_format_t reqFormat = mFormat; - int reqSamplingRate = mReqSampleRate; + uint32_t reqSamplingRate = mReqSampleRate; int reqChannelCount = mReqChannelCount; if (param.getInt(String8(AudioParameter::keySamplingRate), value) == NO_ERROR) { @@ -6987,7 +6987,7 @@ audio_module_handle_t AudioFlinger::loadHwModule_l(const char *name) // ---------------------------------------------------------------------------- -int32_t AudioFlinger::getPrimaryOutputSamplingRate() +uint32_t AudioFlinger::getPrimaryOutputSamplingRate() { Mutex::Autolock _l(mLock); PlaybackThread *thread = primaryPlaybackThread_l(); diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h index 54cf239..8816929 100644 --- a/services/audioflinger/AudioFlinger.h +++ b/services/audioflinger/AudioFlinger.h @@ -207,7 +207,7 @@ public: virtual audio_module_handle_t loadHwModule(const char *name); - virtual int32_t getPrimaryOutputSamplingRate(); + virtual uint32_t getPrimaryOutputSamplingRate(); virtual int32_t getPrimaryOutputFrameCount(); virtual status_t onTransact( @@ -423,7 +423,7 @@ private: audio_channel_mask_t channelMask() const { return mChannelMask; } - int sampleRate() const; // FIXME inline after cblk sr moved + uint32_t sampleRate() const; // FIXME inline after cblk sr moved // Return a pointer to the start of a contiguous slice of the track buffer. // Parameter 'offset' is the requested start position, expressed in -- cgit v1.1 From 60a839204713e0f8258d082af83262b1eb33a6c3 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Thu, 21 Jun 2012 12:56:37 -0700 Subject: Clean up frame size in AudioTrack and AudioFlinger TrackBase::mFrameSize, mChannelMask, and mChannelCount are now const. Use TrackBase::mFrameSize instead of re-calculating frame size. AudioFlinger only sees 16-bit PCM format, conversion from 8-bit is now entirely on the client side. Previously a small part of the responsibility was on server side also. size_t is unsigned, so use %u in logs. Fix theoretical bug where TrackBase constructor was over-allocating space for non-linear AudioTrack or 8-bit PCM AudioRecord (probably benign). Change-Id: I7cbbba0bf4dba29ea751d8af341ab8e5cbbdc206 --- include/media/AudioTrack.h | 2 +- media/libmedia/AudioTrack.cpp | 4 +++- services/audioflinger/AudioFlinger.cpp | 44 +++++++++++++++------------------- services/audioflinger/AudioFlinger.h | 7 +++--- 4 files changed, 26 insertions(+), 31 deletions(-) diff --git a/include/media/AudioTrack.h b/include/media/AudioTrack.h index 99d583d..6fd1b9e 100644 --- a/include/media/AudioTrack.h +++ b/include/media/AudioTrack.h @@ -520,7 +520,7 @@ protected: // address space. AudioFlinger::TrackBase::mBuffer is for the server address space. void* mBuffers; - audio_format_t mFormat; + audio_format_t mFormat; // as requested by client, not forced to 16-bit audio_stream_type_t mStreamType; uint8_t mChannelCount; uint8_t mMuted; diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp index 7480807..5fb36ee 100644 --- a/media/libmedia/AudioTrack.cpp +++ b/media/libmedia/AudioTrack.cpp @@ -858,7 +858,9 @@ status_t AudioTrack::createTrack_l( sp track = audioFlinger->createTrack(getpid(), streamType, sampleRate, - format, + // AudioFlinger only sees 16-bit PCM + format == AUDIO_FORMAT_PCM_8_BIT ? + AUDIO_FORMAT_PCM_16_BIT : format, channelMask, frameCount, &trackFlags, diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp index 6406b6c..10f4410 100644 --- a/services/audioflinger/AudioFlinger.cpp +++ b/services/audioflinger/AudioFlinger.cpp @@ -472,6 +472,14 @@ sp AudioFlinger::createTrack( goto Exit; } + // client is responsible for conversion of 8-bit PCM to 16-bit PCM, + // and we don't yet support 8.24 or 32-bit PCM + if (audio_is_linear_pcm(format) && format != AUDIO_FORMAT_PCM_16_BIT) { + ALOGE("createTrack() invalid format %d", format); + lStatus = BAD_VALUE; + goto Exit; + } + { Mutex::Autolock _l(mLock); PlaybackThread *thread = checkPlaybackThread_l(output); @@ -2280,7 +2288,7 @@ AudioFlinger::MixerThread::MixerThread(const sp& audioFlinger, Aud // mNormalSink below { ALOGV("MixerThread() id=%d device=%#x type=%d", id, device, type); - ALOGV("mSampleRate=%u, mChannelMask=%#x, mChannelCount=%d, mFormat=%d, mFrameSize=%d, " + ALOGV("mSampleRate=%u, mChannelMask=%#x, mChannelCount=%d, mFormat=%d, mFrameSize=%u, " "mFrameCount=%d, mNormalFrameCount=%d", mSampleRate, mChannelMask, mChannelCount, mFormat, mFrameSize, mFrameCount, mNormalFrameCount); @@ -4190,11 +4198,12 @@ AudioFlinger::ThreadBase::TrackBase::TrackBase( mState(IDLE), mSampleRate(sampleRate), mFormat(format), - mFrameSize(0), // will be set to correct value in constructor + mChannelMask(channelMask), + mChannelCount(popcount(channelMask)), + mFrameSize(audio_is_linear_pcm(format) ? + mChannelCount * audio_bytes_per_sample(format) : sizeof(int8_t)), mStepServerFailed(false), mSessionId(sessionId) - // mChannelCount - // mChannelMask { // client == 0 implies sharedBuffer == 0 ALOG_ASSERT(!(client == 0 && sharedBuffer != 0)); @@ -4204,8 +4213,7 @@ AudioFlinger::ThreadBase::TrackBase::TrackBase( // ALOGD("Creating track with %d buffers @ %d bytes", bufferCount, bufferSize); size_t size = sizeof(audio_track_cblk_t); - uint8_t channelCount = popcount(channelMask); - size_t bufferSize = frameCount*channelCount*sizeof(int16_t); + size_t bufferSize = frameCount * mFrameSize; if (sharedBuffer == 0) { size += bufferSize; } @@ -4236,11 +4244,9 @@ AudioFlinger::ThreadBase::TrackBase::TrackBase( // mCblk->server = 0xffff0000; // mCblk->userBase = 0xffff0000; // mCblk->serverBase = 0xffff0000; - mChannelCount = channelCount; - mChannelMask = channelMask; if (sharedBuffer == 0) { mBuffer = (char*)mCblk + sizeof(audio_track_cblk_t); - memset(mBuffer, 0, frameCount*channelCount*sizeof(int16_t)); + memset(mBuffer, 0, bufferSize); // Force underrun condition to avoid false underrun callback until first data is // written to buffer (other flags are cleared) mCblk->flags = CBLK_UNDERRUN; @@ -4312,17 +4318,16 @@ uint32_t AudioFlinger::ThreadBase::TrackBase::sampleRate() const { void* AudioFlinger::ThreadBase::TrackBase::getBuffer(uint32_t offset, uint32_t frames) const { audio_track_cblk_t* cblk = this->cblk(); - size_t frameSize = mFrameSize; - int8_t *bufferStart = (int8_t *)mBuffer + (offset-cblk->serverBase)*frameSize; - int8_t *bufferEnd = bufferStart + frames * frameSize; + int8_t *bufferStart = (int8_t *)mBuffer + (offset-cblk->serverBase) * mFrameSize; + int8_t *bufferEnd = bufferStart + frames * mFrameSize; // Check validity of returned pointer in case the track control block would have been corrupted. ALOG_ASSERT(!(bufferStart < mBuffer || bufferStart > bufferEnd || bufferEnd > mBufferEnd), "TrackBase::getBuffer buffer out of range:\n" " start: %p, end %p , mBuffer %p mBufferEnd %p\n" - " server %u, serverBase %u, user %u, userBase %u, frameSize %d", + " server %u, serverBase %u, user %u, userBase %u, frameSize %u", bufferStart, bufferEnd, mBuffer, mBufferEnd, - cblk->server, cblk->serverBase, cblk->user, cblk->userBase, frameSize); + cblk->server, cblk->serverBase, cblk->user, cblk->userBase, mFrameSize); return bufferStart; } @@ -4364,10 +4369,6 @@ AudioFlinger::PlaybackThread::Track::Track( mUnderrunCount(0), mCachedVolume(1.0) { - // NOTE: frame size for 8 bit PCM data is based on a sample size of - // 16 bit because data is converted to 16 bit before being stored in buffer by AudioTrack - mFrameSize = audio_is_linear_pcm(format) ? mChannelCount * sizeof(int16_t) : - sizeof(uint8_t); if (mCblk != NULL) { // to avoid leaking a track name, do not allocate one unless there is an mCblk mName = thread->getTrackName_l(channelMask, sessionId); @@ -5411,13 +5412,6 @@ AudioFlinger::RecordThread::RecordTrack::RecordTrack( mOverflow(false) { ALOGV("RecordTrack constructor, size %d", (int)mBufferEnd - (int)mBuffer); - if (format == AUDIO_FORMAT_PCM_16_BIT) { - mFrameSize = mChannelCount * sizeof(int16_t); - } else if (format == AUDIO_FORMAT_PCM_8_BIT) { - mFrameSize = mChannelCount * sizeof(int8_t); - } else { - mFrameSize = sizeof(int8_t); - } } AudioFlinger::RecordThread::RecordTrack::~RecordTrack() diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h index 8816929..9ddfe28 100644 --- a/services/audioflinger/AudioFlinger.h +++ b/services/audioflinger/AudioFlinger.h @@ -473,14 +473,13 @@ private: const uint32_t mSampleRate; // initial sample rate only; for tracks which // support dynamic rates, the current value is in control block const audio_format_t mFormat; - size_t mFrameSize; // AudioFlinger's view of frame size in shared memory, + const audio_channel_mask_t mChannelMask; + const uint8_t mChannelCount; + const size_t mFrameSize; // AudioFlinger's view of frame size in shared memory, // where for AudioTrack (but not AudioRecord), // 8-bit PCM samples are stored as 16-bit - // FIXME should be const bool mStepServerFailed; const int mSessionId; - uint8_t mChannelCount; - audio_channel_mask_t mChannelMask; Vector < sp >mSyncEvents; }; -- cgit v1.1 From 5ce181568da90c78ba7fad3e084c8479041545df Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Wed, 14 Nov 2012 15:24:53 -0800 Subject: The length information of the chunks making up vorbis codec specific info are "Xiph-style-lacing encoded" instead of individual bytes. Change-Id: Ic1274a5bd8f082197bae6831da04002762a920c5 related-to-bug: 7401329 --- media/libstagefright/codecs/on2/dec/SoftVPX.cpp | 2 +- .../libstagefright/matroska/MatroskaExtractor.cpp | 74 ++++++++++++++++++---- 2 files changed, 61 insertions(+), 15 deletions(-) diff --git a/media/libstagefright/codecs/on2/dec/SoftVPX.cpp b/media/libstagefright/codecs/on2/dec/SoftVPX.cpp index bf9ab3a..a400b4c 100644 --- a/media/libstagefright/codecs/on2/dec/SoftVPX.cpp +++ b/media/libstagefright/codecs/on2/dec/SoftVPX.cpp @@ -66,7 +66,7 @@ void SoftVPX::initPorts() { def.eDir = OMX_DirInput; def.nBufferCountMin = kNumBuffers; def.nBufferCountActual = def.nBufferCountMin; - def.nBufferSize = 256 * 1024; + def.nBufferSize = 768 * 1024; def.bEnabled = OMX_TRUE; def.bPopulated = OMX_FALSE; def.eDomain = OMX_PortDomainVideo; diff --git a/media/libstagefright/matroska/MatroskaExtractor.cpp b/media/libstagefright/matroska/MatroskaExtractor.cpp index 8f7d12b..7fc7037 100644 --- a/media/libstagefright/matroska/MatroskaExtractor.cpp +++ b/media/libstagefright/matroska/MatroskaExtractor.cpp @@ -758,31 +758,69 @@ static void addESDSFromCodecPrivate( esds = NULL; } -void addVorbisCodecInfo( +status_t addVorbisCodecInfo( const sp &meta, const void *_codecPrivate, size_t codecPrivateSize) { - // printf("vorbis private data follows:\n"); // hexdump(_codecPrivate, codecPrivateSize); - CHECK(codecPrivateSize >= 3); + if (codecPrivateSize < 1) { + return ERROR_MALFORMED; + } const uint8_t *codecPrivate = (const uint8_t *)_codecPrivate; - CHECK(codecPrivate[0] == 0x02); - size_t len1 = codecPrivate[1]; - size_t len2 = codecPrivate[2]; + if (codecPrivate[0] != 0x02) { + return ERROR_MALFORMED; + } - CHECK(codecPrivateSize > 3 + len1 + len2); + // codecInfo starts with two lengths, len1 and len2, that are + // "Xiph-style-lacing encoded"... - CHECK(codecPrivate[3] == 0x01); - meta->setData(kKeyVorbisInfo, 0, &codecPrivate[3], len1); + size_t offset = 1; + size_t len1 = 0; + while (offset < codecPrivateSize && codecPrivate[offset] == 0xff) { + len1 += 0xff; + ++offset; + } + if (offset >= codecPrivateSize) { + return ERROR_MALFORMED; + } + len1 += codecPrivate[offset++]; - CHECK(codecPrivate[len1 + 3] == 0x03); + size_t len2 = 0; + while (offset < codecPrivateSize && codecPrivate[offset] == 0xff) { + len2 += 0xff; + ++offset; + } + if (offset >= codecPrivateSize) { + return ERROR_MALFORMED; + } + len2 += codecPrivate[offset++]; + + if (codecPrivateSize < offset + len1 + len2) { + return ERROR_MALFORMED; + } + + if (codecPrivate[offset] != 0x01) { + return ERROR_MALFORMED; + } + meta->setData(kKeyVorbisInfo, 0, &codecPrivate[offset], len1); + + offset += len1; + if (codecPrivate[offset] != 0x03) { + return ERROR_MALFORMED; + } + + offset += len2; + if (codecPrivate[offset] != 0x05) { + return ERROR_MALFORMED; + } - CHECK(codecPrivate[len1 + len2 + 3] == 0x05); meta->setData( - kKeyVorbisBooks, 0, &codecPrivate[len1 + len2 + 3], - codecPrivateSize - len1 - len2 - 3); + kKeyVorbisBooks, 0, &codecPrivate[offset], + codecPrivateSize - offset); + + return OK; } void MatroskaExtractor::addTracks() { @@ -809,6 +847,8 @@ void MatroskaExtractor::addTracks() { sp meta = new MetaData; + status_t err = OK; + switch (track->GetType()) { case VIDEO_TRACK: { @@ -855,7 +895,8 @@ void MatroskaExtractor::addTracks() { } else if (!strcmp("A_VORBIS", codecID)) { meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_VORBIS); - addVorbisCodecInfo(meta, codecPrivate, codecPrivateSize); + err = addVorbisCodecInfo( + meta, codecPrivate, codecPrivateSize); } else if (!strcmp("A_MPEG/L3", codecID)) { meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_MPEG); } else { @@ -872,6 +913,11 @@ void MatroskaExtractor::addTracks() { continue; } + if (err != OK) { + ALOGE("skipping track, codec specific data was malformed."); + continue; + } + long long durationNs = mSegment->GetDuration(); meta->setInt64(kKeyDuration, (durationNs + 500) / 1000); -- cgit v1.1 From 22d00b70516f108c3351a29c95d8ba639a8ed520 Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Thu, 15 Nov 2012 11:16:30 -0800 Subject: wfd sink update. Change-Id: Ib4e41ec1524d045699543536acdddc9a243db741 --- media/libstagefright/wifi-display/sink/TunnelRenderer.cpp | 5 ++++- media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp | 7 ++++--- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/media/libstagefright/wifi-display/sink/TunnelRenderer.cpp b/media/libstagefright/wifi-display/sink/TunnelRenderer.cpp index bc35aef..b913124 100644 --- a/media/libstagefright/wifi-display/sink/TunnelRenderer.cpp +++ b/media/libstagefright/wifi-display/sink/TunnelRenderer.cpp @@ -271,6 +271,7 @@ sp TunnelRenderer::dequeueBuffer() { if (mFirstFailedAttemptUs + 50000ll > ALooper::GetNowUs()) { // We're willing to wait a little while to get the right packet. +#if 0 if (!mRequestedRetransmission) { ALOGI("requesting retransmission of seqNo %d", (mLastDequeuedExtSeqNo + 1) & 0xffff); @@ -280,7 +281,9 @@ sp TunnelRenderer::dequeueBuffer() { notify->post(); mRequestedRetransmission = true; - } else { + } else +#endif + { ALOGI("still waiting for the correct packet to arrive."); } diff --git a/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp b/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp index fcd20d4..c3e0470 100644 --- a/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp +++ b/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp @@ -475,9 +475,10 @@ void WifiDisplaySink::onGetParameterRequest( int32_t cseq, const sp &data) { AString body = - "wfd_video_formats: xxx\r\n" - "wfd_audio_codecs: xxx\r\n" - "wfd_client_rtp_ports: RTP/AVP/UDP;unicast xxx 0 mode=play\r\n"; + "wfd_video_formats: " + "28 00 02 02 FFFFFFFF 0000000 00000000 00 0000 0000 00 none none\r\n" + "wfd_audio_codecs: AAC 0000000F 00\r\n" + "wfd_client_rtp_ports: RTP/AVP/UDP;unicast 19000 0 mode=play\r\n"; AString response = "RTSP/1.0 200 OK\r\n"; AppendCommonResponse(&response, cseq); -- cgit v1.1 From 4bd7e5436f9c308503d72e80804fb5637fda1584 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Thu, 15 Nov 2012 14:13:16 -0800 Subject: Static AudioTrack plays twice initially Bug: 7528721 Change-Id: I10bc16a26f33dba6572b730a170cb3bf00e68e30 --- services/audioflinger/AudioFlinger.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp index 10f4410..5bb599b 100644 --- a/services/audioflinger/AudioFlinger.cpp +++ b/services/audioflinger/AudioFlinger.cpp @@ -4710,7 +4710,7 @@ void AudioFlinger::PlaybackThread::Track::flush() if (thread != 0) { Mutex::Autolock _l(thread->mLock); if (mState != STOPPING_1 && mState != STOPPING_2 && mState != STOPPED && mState != PAUSED && - mState != PAUSING) { + mState != PAUSING && mState != IDLE && mState != FLUSHED) { return; } // No point remaining in PAUSED state after a flush => go to -- cgit v1.1 From e33054eb968cbf8ccaee1b0ff0301403902deed6 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Wed, 14 Nov 2012 12:54:39 -0800 Subject: Use size_t for frame counts Also fix typo: bufferCount should be frameCount. Change-Id: Ibed539504db75ef99dc21c8ff1bf2987122063a5 --- include/media/AudioRecord.h | 8 +++---- include/media/AudioSystem.h | 10 ++++---- include/media/AudioTrack.h | 4 ++-- include/media/IAudioFlinger.h | 10 ++++---- libvideoeditor/lvpp/VideoEditorPlayer.cpp | 2 +- media/libmedia/AudioRecord.cpp | 18 +++++++++----- media/libmedia/AudioSystem.cpp | 12 +++++----- media/libmedia/AudioTrack.cpp | 28 +++++++++++++--------- media/libmedia/IAudioFlinger.cpp | 20 ++++++++-------- media/libmedia/SoundPool.cpp | 2 +- media/libmediaplayerservice/MediaPlayerService.cpp | 2 +- media/libstagefright/AudioSource.cpp | 2 +- services/audioflinger/AudioFlinger.cpp | 26 ++++++++++---------- services/audioflinger/AudioFlinger.h | 22 ++++++++--------- 14 files changed, 89 insertions(+), 77 deletions(-) diff --git a/include/media/AudioRecord.h b/include/media/AudioRecord.h index 2672db1..cd7ff92 100644 --- a/include/media/AudioRecord.h +++ b/include/media/AudioRecord.h @@ -95,7 +95,7 @@ public: * - BAD_VALUE: unsupported configuration */ - static status_t getMinFrameCount(int* frameCount, + static status_t getMinFrameCount(size_t* frameCount, uint32_t sampleRate, audio_format_t format, audio_channel_mask_t channelMask); @@ -184,7 +184,7 @@ public: audio_format_t format() const; int channelCount() const; - uint32_t frameCount() const; + size_t frameCount() const; size_t frameSize() const { return mFrameSize; } audio_source_t inputSource() const; @@ -352,7 +352,7 @@ private: status_t openRecord_l(uint32_t sampleRate, audio_format_t format, audio_channel_mask_t channelMask, - int frameCount, + size_t frameCount, audio_io_handle_t input); audio_io_handle_t getInput_l(); status_t restoreRecord_l(audio_track_cblk_t*& cblk); @@ -375,7 +375,7 @@ private: uint32_t mUpdatePeriod; // in ms // constant after constructor or set() - uint32_t mFrameCount; + size_t mFrameCount; audio_format_t mFormat; uint8_t mChannelCount; size_t mFrameSize; // app-level frame size == AudioFlinger frame size diff --git a/include/media/AudioSystem.h b/include/media/AudioSystem.h index 33078bb..126ef12 100644 --- a/include/media/AudioSystem.h +++ b/include/media/AudioSystem.h @@ -89,7 +89,7 @@ public: static status_t getOutputSamplingRate(uint32_t* samplingRate, audio_stream_type_t stream = AUDIO_STREAM_DEFAULT); - static status_t getOutputFrameCount(int* frameCount, + static status_t getOutputFrameCount(size_t* frameCount, audio_stream_type_t stream = AUDIO_STREAM_DEFAULT); static status_t getOutputLatency(uint32_t* latency, audio_stream_type_t stream = AUDIO_STREAM_DEFAULT); @@ -100,7 +100,7 @@ public: // audio_stream->get_buffer_size()/audio_stream_frame_size() static status_t getFrameCount(audio_io_handle_t output, audio_stream_type_t stream, - int* frameCount); + size_t* frameCount); // returns the audio output stream latency in ms. Corresponds to // audio_stream_out->get_latency() static status_t getLatency(audio_io_handle_t output, @@ -123,11 +123,11 @@ public: // - BAD_VALUE: invalid parameter // NOTE: this feature is not supported on all hardware platforms and it is // necessary to check returned status before using the returned values. - static status_t getRenderPosition(uint32_t *halFrames, uint32_t *dspFrames, + static status_t getRenderPosition(size_t *halFrames, size_t *dspFrames, audio_stream_type_t stream = AUDIO_STREAM_DEFAULT); // return the number of input frames lost by HAL implementation, or 0 if the handle is invalid - static unsigned int getInputFramesLost(audio_io_handle_t ioHandle); + static size_t getInputFramesLost(audio_io_handle_t ioHandle); static int newAudioSessionId(); static void acquireAudioSessionId(int audioSession); @@ -238,7 +238,7 @@ public: // helpers for android.media.AudioManager.getProperty(), see description there for meaning static uint32_t getPrimaryOutputSamplingRate(); - static int32_t getPrimaryOutputFrameCount(); + static size_t getPrimaryOutputFrameCount(); // ---------------------------------------------------------------------------- diff --git a/include/media/AudioTrack.h b/include/media/AudioTrack.h index 6fd1b9e..f1b26b5 100644 --- a/include/media/AudioTrack.h +++ b/include/media/AudioTrack.h @@ -110,7 +110,7 @@ public: * - NO_INIT: audio server or audio hardware not initialized */ - static status_t getMinFrameCount(int* frameCount, + static status_t getMinFrameCount(size_t* frameCount, audio_stream_type_t streamType = AUDIO_STREAM_DEFAULT, uint32_t sampleRate = 0); @@ -494,7 +494,7 @@ protected: uint32_t sampleRate, audio_format_t format, audio_channel_mask_t channelMask, - int frameCount, + size_t frameCount, audio_output_flags_t flags, const sp& sharedBuffer, audio_io_handle_t output); diff --git a/include/media/IAudioFlinger.h b/include/media/IAudioFlinger.h index 5fd5044..9727143 100644 --- a/include/media/IAudioFlinger.h +++ b/include/media/IAudioFlinger.h @@ -61,7 +61,7 @@ public: uint32_t sampleRate, audio_format_t format, audio_channel_mask_t channelMask, - int frameCount, + size_t frameCount, track_flags_t *flags, const sp& sharedBuffer, audio_io_handle_t output, @@ -75,7 +75,7 @@ public: uint32_t sampleRate, audio_format_t format, audio_channel_mask_t channelMask, - int frameCount, + size_t frameCount, track_flags_t flags, pid_t tid, // -1 means unused, otherwise must be valid non-0 int *sessionId, @@ -157,10 +157,10 @@ public: virtual status_t setVoiceVolume(float volume) = 0; - virtual status_t getRenderPosition(uint32_t *halFrames, uint32_t *dspFrames, + virtual status_t getRenderPosition(size_t *halFrames, size_t *dspFrames, audio_io_handle_t output) const = 0; - virtual unsigned int getInputFramesLost(audio_io_handle_t ioHandle) const = 0; + virtual size_t getInputFramesLost(audio_io_handle_t ioHandle) const = 0; virtual int newAudioSessionId() = 0; @@ -193,7 +193,7 @@ public: // FIXME move these APIs to AudioPolicy to permit a more accurate implementation // that looks on primary device for a stream with fast flag, primary flag, or first one. virtual uint32_t getPrimaryOutputSamplingRate() = 0; - virtual int32_t getPrimaryOutputFrameCount() = 0; + virtual size_t getPrimaryOutputFrameCount() = 0; }; diff --git a/libvideoeditor/lvpp/VideoEditorPlayer.cpp b/libvideoeditor/lvpp/VideoEditorPlayer.cpp index d34b6d3..a47fc15 100755 --- a/libvideoeditor/lvpp/VideoEditorPlayer.cpp +++ b/libvideoeditor/lvpp/VideoEditorPlayer.cpp @@ -407,7 +407,7 @@ status_t VideoEditorPlayer::VeAudioOutput::open( ALOGV("open(%u, %d, %d, %d)", sampleRate, channelCount, format, bufferCount); if (mTrack) close(); uint32_t afSampleRate; - int afFrameCount; + size_t afFrameCount; int frameCount; if (AudioSystem::getOutputFrameCount(&afFrameCount, mStreamType) != diff --git a/media/libmedia/AudioRecord.cpp b/media/libmedia/AudioRecord.cpp index 8f45a57..0587651 100644 --- a/media/libmedia/AudioRecord.cpp +++ b/media/libmedia/AudioRecord.cpp @@ -36,7 +36,7 @@ namespace android { // static status_t AudioRecord::getMinFrameCount( - int* frameCount, + size_t* frameCount, uint32_t sampleRate, audio_format_t format, audio_channel_mask_t channelMask) @@ -119,15 +119,21 @@ status_t AudioRecord::set( uint32_t sampleRate, audio_format_t format, audio_channel_mask_t channelMask, - int frameCount, + int frameCountInt, callback_t cbf, void* user, int notificationFrames, bool threadCanCallJava, int sessionId) { + // FIXME "int" here is legacy and will be replaced by size_t later + if (frameCountInt < 0) { + ALOGE("Invalid frame count %d", frameCountInt); + return BAD_VALUE; + } + size_t frameCount = frameCountInt; - ALOGV("set(): sampleRate %u, channelMask %#x, frameCount %d", sampleRate, channelMask, + ALOGV("set(): sampleRate %u, channelMask %#x, frameCount %u", sampleRate, channelMask, frameCount); AutoMutex lock(mLock); @@ -177,7 +183,7 @@ status_t AudioRecord::set( } // validate framecount - int minFrameCount = 0; + size_t minFrameCount = 0; status_t status = getMinFrameCount(&minFrameCount, sampleRate, format, channelMask); if (status != NO_ERROR) { return status; @@ -260,7 +266,7 @@ int AudioRecord::channelCount() const return mChannelCount; } -uint32_t AudioRecord::frameCount() const +size_t AudioRecord::frameCount() const { return mFrameCount; } @@ -427,7 +433,7 @@ status_t AudioRecord::openRecord_l( uint32_t sampleRate, audio_format_t format, audio_channel_mask_t channelMask, - int frameCount, + size_t frameCount, audio_io_handle_t input) { status_t status; diff --git a/media/libmedia/AudioSystem.cpp b/media/libmedia/AudioSystem.cpp index f3b74a2..028e4a3 100644 --- a/media/libmedia/AudioSystem.cpp +++ b/media/libmedia/AudioSystem.cpp @@ -247,7 +247,7 @@ status_t AudioSystem::getSamplingRate(audio_io_handle_t output, return NO_ERROR; } -status_t AudioSystem::getOutputFrameCount(int* frameCount, audio_stream_type_t streamType) +status_t AudioSystem::getOutputFrameCount(size_t* frameCount, audio_stream_type_t streamType) { audio_io_handle_t output; @@ -265,7 +265,7 @@ status_t AudioSystem::getOutputFrameCount(int* frameCount, audio_stream_type_t s status_t AudioSystem::getFrameCount(audio_io_handle_t output, audio_stream_type_t streamType, - int* frameCount) + size_t* frameCount) { OutputDescriptor *outputDesc; @@ -361,7 +361,7 @@ status_t AudioSystem::setVoiceVolume(float value) return af->setVoiceVolume(value); } -status_t AudioSystem::getRenderPosition(uint32_t *halFrames, uint32_t *dspFrames, +status_t AudioSystem::getRenderPosition(size_t *halFrames, size_t *dspFrames, audio_stream_type_t stream) { const sp& af = AudioSystem::get_audio_flinger(); @@ -374,7 +374,7 @@ status_t AudioSystem::getRenderPosition(uint32_t *halFrames, uint32_t *dspFrames return af->getRenderPosition(halFrames, dspFrames, getOutput(stream)); } -unsigned int AudioSystem::getInputFramesLost(audio_io_handle_t ioHandle) { +size_t AudioSystem::getInputFramesLost(audio_io_handle_t ioHandle) { const sp& af = AudioSystem::get_audio_flinger(); unsigned int result = 0; if (af == 0) return result; @@ -442,7 +442,7 @@ void AudioSystem::AudioFlingerClient::ioConfigChanged(int event, audio_io_handle OutputDescriptor *outputDesc = new OutputDescriptor(*desc); gOutputs.add(ioHandle, outputDesc); - ALOGV("ioConfigChanged() new output samplingRate %u, format %d channels %#x frameCount %d " + ALOGV("ioConfigChanged() new output samplingRate %u, format %d channels %#x frameCount %u " "latency %d", outputDesc->samplingRate, outputDesc->format, outputDesc->channels, outputDesc->frameCount, outputDesc->latency); @@ -747,7 +747,7 @@ uint32_t AudioSystem::getPrimaryOutputSamplingRate() return af->getPrimaryOutputSamplingRate(); } -int32_t AudioSystem::getPrimaryOutputFrameCount() +size_t AudioSystem::getPrimaryOutputFrameCount() { const sp& af = AudioSystem::get_audio_flinger(); if (af == 0) return 0; diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp index 5fb36ee..979ee37 100644 --- a/media/libmedia/AudioTrack.cpp +++ b/media/libmedia/AudioTrack.cpp @@ -50,7 +50,7 @@ namespace android { // static status_t AudioTrack::getMinFrameCount( - int* frameCount, + size_t* frameCount, audio_stream_type_t streamType, uint32_t sampleRate) { @@ -69,7 +69,7 @@ status_t AudioTrack::getMinFrameCount( if (AudioSystem::getOutputSamplingRate(&afSampleRate, streamType) != NO_ERROR) { return NO_INIT; } - int afFrameCount; + size_t afFrameCount; if (AudioSystem::getOutputFrameCount(&afFrameCount, streamType) != NO_ERROR) { return NO_INIT; } @@ -166,7 +166,7 @@ status_t AudioTrack::set( uint32_t sampleRate, audio_format_t format, audio_channel_mask_t channelMask, - int frameCount, + int frameCountInt, audio_output_flags_t flags, callback_t cbf, void* user, @@ -175,11 +175,17 @@ status_t AudioTrack::set( bool threadCanCallJava, int sessionId) { + // FIXME "int" here is legacy and will be replaced by size_t later + if (frameCountInt < 0) { + ALOGE("Invalid frame count %d", frameCountInt); + return BAD_VALUE; + } + size_t frameCount = frameCountInt; ALOGV_IF(sharedBuffer != 0, "sharedBuffer: %p, size: %d", sharedBuffer->pointer(), sharedBuffer->size()); - ALOGV("set() streamType %d frameCount %d flags %04x", streamType, frameCount, flags); + ALOGV("set() streamType %d frameCount %u flags %04x", streamType, frameCount, flags); AutoMutex lock(mLock); if (mAudioTrack != 0) { @@ -336,7 +342,7 @@ int AudioTrack::channelCount() const return mChannelCount; } -uint32_t AudioTrack::frameCount() const +size_t AudioTrack::frameCount() const { return mCblk->frameCount; } @@ -730,7 +736,7 @@ status_t AudioTrack::createTrack_l( uint32_t sampleRate, audio_format_t format, audio_channel_mask_t channelMask, - int frameCount, + size_t frameCount, audio_output_flags_t flags, const sp& sharedBuffer, audio_io_handle_t output) @@ -770,7 +776,7 @@ status_t AudioTrack::createTrack_l( // Same comment as below about ignoring frameCount parameter for set() frameCount = sharedBuffer->size(); } else if (frameCount == 0) { - int afFrameCount; + size_t afFrameCount; if (AudioSystem::getFrameCount(output, streamType, &afFrameCount) != NO_ERROR) { return NO_INIT; } @@ -806,7 +812,7 @@ status_t AudioTrack::createTrack_l( if (AudioSystem::getSamplingRate(output, streamType, &afSampleRate) != NO_ERROR) { return NO_INIT; } - int afFrameCount; + size_t afFrameCount; if (AudioSystem::getFrameCount(output, streamType, &afFrameCount) != NO_ERROR) { return NO_INIT; } @@ -815,8 +821,8 @@ status_t AudioTrack::createTrack_l( uint32_t minBufCount = afLatency / ((1000 * afFrameCount)/afSampleRate); if (minBufCount < 2) minBufCount = 2; - int minFrameCount = (afFrameCount*sampleRate*minBufCount)/afSampleRate; - ALOGV("minFrameCount: %d, afFrameCount=%d, minBufCount=%d, sampleRate=%u, afSampleRate=%u" + size_t minFrameCount = (afFrameCount*sampleRate*minBufCount)/afSampleRate; + ALOGV("minFrameCount: %u, afFrameCount=%d, minBufCount=%d, sampleRate=%u, afSampleRate=%u" ", afLatency=%d", minFrameCount, afFrameCount, minBufCount, sampleRate, afSampleRate, afLatency); @@ -828,7 +834,7 @@ status_t AudioTrack::createTrack_l( } // Make sure that application is notified with sufficient margin // before underrun - if (mNotificationFramesAct > (uint32_t)frameCount/2) { + if (mNotificationFramesAct > frameCount/2) { mNotificationFramesAct = frameCount/2; } if (frameCount < minFrameCount) { diff --git a/media/libmedia/IAudioFlinger.cpp b/media/libmedia/IAudioFlinger.cpp index 0eeb6d9..79c3361 100644 --- a/media/libmedia/IAudioFlinger.cpp +++ b/media/libmedia/IAudioFlinger.cpp @@ -89,7 +89,7 @@ public: uint32_t sampleRate, audio_format_t format, audio_channel_mask_t channelMask, - int frameCount, + size_t frameCount, track_flags_t *flags, const sp& sharedBuffer, audio_io_handle_t output, @@ -143,7 +143,7 @@ public: uint32_t sampleRate, audio_format_t format, audio_channel_mask_t channelMask, - int frameCount, + size_t frameCount, track_flags_t flags, pid_t tid, int *sessionId, @@ -527,7 +527,7 @@ public: return status; } - virtual unsigned int getInputFramesLost(audio_io_handle_t ioHandle) const + virtual size_t getInputFramesLost(audio_io_handle_t ioHandle) const { Parcel data, reply; data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor()); @@ -703,7 +703,7 @@ public: return reply.readInt32(); } - virtual int32_t getPrimaryOutputFrameCount() + virtual size_t getPrimaryOutputFrameCount() { Parcel data, reply; data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor()); @@ -728,7 +728,7 @@ status_t BnAudioFlinger::onTransact( uint32_t sampleRate = data.readInt32(); audio_format_t format = (audio_format_t) data.readInt32(); audio_channel_mask_t channelMask = data.readInt32(); - size_t bufferCount = data.readInt32(); + size_t frameCount = data.readInt32(); track_flags_t flags = (track_flags_t) data.readInt32(); sp buffer = interface_cast(data.readStrongBinder()); audio_io_handle_t output = (audio_io_handle_t) data.readInt32(); @@ -737,7 +737,7 @@ status_t BnAudioFlinger::onTransact( status_t status; sp track = createTrack(pid, (audio_stream_type_t) streamType, sampleRate, format, - channelMask, bufferCount, &flags, buffer, output, tid, &sessionId, &status); + channelMask, frameCount, &flags, buffer, output, tid, &sessionId, &status); reply->writeInt32(flags); reply->writeInt32(sessionId); reply->writeInt32(status); @@ -751,13 +751,13 @@ status_t BnAudioFlinger::onTransact( uint32_t sampleRate = data.readInt32(); audio_format_t format = (audio_format_t) data.readInt32(); audio_channel_mask_t channelMask = data.readInt32(); - size_t bufferCount = data.readInt32(); + size_t frameCount = data.readInt32(); track_flags_t flags = (track_flags_t) data.readInt32(); pid_t tid = (pid_t) data.readInt32(); int sessionId = data.readInt32(); status_t status; sp record = openRecord(pid, input, - sampleRate, format, channelMask, bufferCount, flags, tid, &sessionId, &status); + sampleRate, format, channelMask, frameCount, flags, tid, &sessionId, &status); reply->writeInt32(sessionId); reply->writeInt32(status); reply->writeStrongBinder(record->asBinder()); @@ -972,8 +972,8 @@ status_t BnAudioFlinger::onTransact( case GET_RENDER_POSITION: { CHECK_INTERFACE(IAudioFlinger, data, reply); audio_io_handle_t output = (audio_io_handle_t) data.readInt32(); - uint32_t halFrames; - uint32_t dspFrames; + size_t halFrames; + size_t dspFrames; status_t status = getRenderPosition(&halFrames, &dspFrames, output); reply->writeInt32(status); if (status == NO_ERROR) { diff --git a/media/libmedia/SoundPool.cpp b/media/libmedia/SoundPool.cpp index b321e92..204e0ce 100644 --- a/media/libmedia/SoundPool.cpp +++ b/media/libmedia/SoundPool.cpp @@ -568,7 +568,7 @@ void SoundChannel::play(const sp& sample, int nextChannelID, float leftV } // initialize track - int afFrameCount; + size_t afFrameCount; uint32_t afSampleRate; audio_stream_type_t streamType = mSoundPool->streamType(); if (AudioSystem::getOutputFrameCount(&afFrameCount, streamType) != NO_ERROR) { diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp index 769b322..c3e5c40 100644 --- a/media/libmediaplayerservice/MediaPlayerService.cpp +++ b/media/libmediaplayerservice/MediaPlayerService.cpp @@ -1388,7 +1388,7 @@ status_t MediaPlayerService::AudioOutput::open( ALOGV("open(%u, %d, 0x%x, %d, %d, %d)", sampleRate, channelCount, channelMask, format, bufferCount, mSessionId); uint32_t afSampleRate; - int afFrameCount; + size_t afFrameCount; uint32_t frameCount; if (AudioSystem::getOutputFrameCount(&afFrameCount, mStreamType) != NO_ERROR) { diff --git a/media/libstagefright/AudioSource.cpp b/media/libstagefright/AudioSource.cpp index 861aebe..3cf4d5c 100644 --- a/media/libstagefright/AudioSource.cpp +++ b/media/libstagefright/AudioSource.cpp @@ -58,7 +58,7 @@ AudioSource::AudioSource( ALOGV("sampleRate: %d, channelCount: %d", sampleRate, channelCount); CHECK(channelCount == 1 || channelCount == 2); - int minFrameCount; + size_t minFrameCount; status_t status = AudioRecord::getMinFrameCount(&minFrameCount, sampleRate, AUDIO_FORMAT_PCM_16_BIT, diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp index 10f4410..cee704f 100644 --- a/services/audioflinger/AudioFlinger.cpp +++ b/services/audioflinger/AudioFlinger.cpp @@ -450,7 +450,7 @@ sp AudioFlinger::createTrack( uint32_t sampleRate, audio_format_t format, audio_channel_mask_t channelMask, - int frameCount, + size_t frameCount, IAudioFlinger::track_flags_t *flags, const sp& sharedBuffer, audio_io_handle_t output, @@ -1730,7 +1730,7 @@ sp AudioFlinger::PlaybackThread::createTrac uint32_t sampleRate, audio_format_t format, audio_channel_mask_t channelMask, - int frameCount, + size_t frameCount, const sp& sharedBuffer, int sessionId, IAudioFlinger::track_flags_t *flags, @@ -1799,7 +1799,7 @@ sp AudioFlinger::PlaybackThread::createTrac if (minBufCount < 2) { minBufCount = 2; } - int minFrameCount = mNormalFrameCount * minBufCount; + size_t minFrameCount = mNormalFrameCount * minBufCount; if (frameCount < minFrameCount) { frameCount = minFrameCount; } @@ -4097,7 +4097,7 @@ void AudioFlinger::DuplicatingThread::addOutputTrack(MixerThread *thread) { Mutex::Autolock _l(mLock); // FIXME explain this formula - int frameCount = (3 * mNormalFrameCount * mSampleRate) / thread->sampleRate(); + size_t frameCount = (3 * mNormalFrameCount * mSampleRate) / thread->sampleRate(); OutputTrack *outputTrack = new OutputTrack(thread, this, mSampleRate, @@ -4185,7 +4185,7 @@ AudioFlinger::ThreadBase::TrackBase::TrackBase( uint32_t sampleRate, audio_format_t format, audio_channel_mask_t channelMask, - int frameCount, + size_t frameCount, const sp& sharedBuffer, int sessionId) : RefBase(), @@ -4348,7 +4348,7 @@ AudioFlinger::PlaybackThread::Track::Track( uint32_t sampleRate, audio_format_t format, audio_channel_mask_t channelMask, - int frameCount, + size_t frameCount, const sp& sharedBuffer, int sessionId, IAudioFlinger::track_flags_t flags) @@ -4892,7 +4892,7 @@ AudioFlinger::PlaybackThread::TimedTrack::create( uint32_t sampleRate, audio_format_t format, audio_channel_mask_t channelMask, - int frameCount, + size_t frameCount, const sp& sharedBuffer, int sessionId) { if (!client->reserveTimedTrack()) @@ -4910,7 +4910,7 @@ AudioFlinger::PlaybackThread::TimedTrack::TimedTrack( uint32_t sampleRate, audio_format_t format, audio_channel_mask_t channelMask, - int frameCount, + size_t frameCount, const sp& sharedBuffer, int sessionId) : Track(thread, client, streamType, sampleRate, format, channelMask, @@ -5405,7 +5405,7 @@ AudioFlinger::RecordThread::RecordTrack::RecordTrack( uint32_t sampleRate, audio_format_t format, audio_channel_mask_t channelMask, - int frameCount, + size_t frameCount, int sessionId) : TrackBase(thread, client, sampleRate, format, channelMask, frameCount, 0 /*sharedBuffer*/, sessionId), @@ -5524,7 +5524,7 @@ AudioFlinger::PlaybackThread::OutputTrack::OutputTrack( uint32_t sampleRate, audio_format_t format, audio_channel_mask_t channelMask, - int frameCount) + size_t frameCount) : Track(playbackThread, NULL, AUDIO_STREAM_CNT, sampleRate, format, channelMask, frameCount, NULL, 0, IAudioFlinger::TRACK_DEFAULT), mActive(false), mSourceThread(sourceThread), mBuffers(NULL) @@ -5918,7 +5918,7 @@ sp AudioFlinger::openRecord( uint32_t sampleRate, audio_format_t format, audio_channel_mask_t channelMask, - int frameCount, + size_t frameCount, IAudioFlinger::track_flags_t flags, pid_t tid, int *sessionId, @@ -6295,7 +6295,7 @@ sp AudioFlinger::RecordThread::createR uint32_t sampleRate, audio_format_t format, audio_channel_mask_t channelMask, - int frameCount, + size_t frameCount, int sessionId, IAudioFlinger::track_flags_t flags, pid_t tid, @@ -6988,7 +6988,7 @@ uint32_t AudioFlinger::getPrimaryOutputSamplingRate() return thread != NULL ? thread->sampleRate() : 0; } -int32_t AudioFlinger::getPrimaryOutputFrameCount() +size_t AudioFlinger::getPrimaryOutputFrameCount() { Mutex::Autolock _l(mLock); PlaybackThread *thread = primaryPlaybackThread_l(); diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h index 9ddfe28..830dfe9 100644 --- a/services/audioflinger/AudioFlinger.h +++ b/services/audioflinger/AudioFlinger.h @@ -92,7 +92,7 @@ public: uint32_t sampleRate, audio_format_t format, audio_channel_mask_t channelMask, - int frameCount, + size_t frameCount, IAudioFlinger::track_flags_t *flags, const sp& sharedBuffer, audio_io_handle_t output, @@ -106,7 +106,7 @@ public: uint32_t sampleRate, audio_format_t format, audio_channel_mask_t channelMask, - int frameCount, + size_t frameCount, IAudioFlinger::track_flags_t flags, pid_t tid, int *sessionId, @@ -208,7 +208,7 @@ public: virtual audio_module_handle_t loadHwModule(const char *name); virtual uint32_t getPrimaryOutputSamplingRate(); - virtual int32_t getPrimaryOutputFrameCount(); + virtual size_t getPrimaryOutputFrameCount(); virtual status_t onTransact( uint32_t code, @@ -390,7 +390,7 @@ private: uint32_t sampleRate, audio_format_t format, audio_channel_mask_t channelMask, - int frameCount, + size_t frameCount, const sp& sharedBuffer, int sessionId); virtual ~TrackBase(); @@ -790,7 +790,7 @@ private: uint32_t sampleRate, audio_format_t format, audio_channel_mask_t channelMask, - int frameCount, + size_t frameCount, const sp& sharedBuffer, int sessionId, IAudioFlinger::track_flags_t flags); @@ -922,7 +922,7 @@ private: uint32_t sampleRate, audio_format_t format, audio_channel_mask_t channelMask, - int frameCount, + size_t frameCount, const sp& sharedBuffer, int sessionId); virtual ~TimedTrack(); @@ -965,7 +965,7 @@ private: uint32_t sampleRate, audio_format_t format, audio_channel_mask_t channelMask, - int frameCount, + size_t frameCount, const sp& sharedBuffer, int sessionId); @@ -1014,7 +1014,7 @@ private: uint32_t sampleRate, audio_format_t format, audio_channel_mask_t channelMask, - int frameCount); + size_t frameCount); virtual ~OutputTrack(); virtual status_t start(AudioSystem::sync_event_t event = @@ -1099,7 +1099,7 @@ public: uint32_t sampleRate, audio_format_t format, audio_channel_mask_t channelMask, - int frameCount, + size_t frameCount, const sp& sharedBuffer, int sessionId, IAudioFlinger::track_flags_t *flags, @@ -1463,7 +1463,7 @@ public: uint32_t sampleRate, audio_format_t format, audio_channel_mask_t channelMask, - int frameCount, + size_t frameCount, int sessionId); virtual ~RecordTrack(); @@ -1526,7 +1526,7 @@ public: uint32_t sampleRate, audio_format_t format, audio_channel_mask_t channelMask, - int frameCount, + size_t frameCount, int sessionId, IAudioFlinger::track_flags_t flags, pid_t tid, -- cgit v1.1 From ba933df89521d63f75ca66af12ce9d7ae9496b9e Mon Sep 17 00:00:00 2001 From: Marco Nelissen Date: Thu, 15 Nov 2012 14:31:56 -0800 Subject: Add GSM 6.10 decoder Supports Microsoft frame packing only, since that's what the sample file used. b/6620569 Change-Id: Ia89d95bcbf0f8dcbaad42148a7401728f60e079d --- include/media/stagefright/MediaDefs.h | 1 + media/libstagefright/ACodec.cpp | 2 + media/libstagefright/MediaDefs.cpp | 1 + media/libstagefright/OMXCodec.cpp | 2 + media/libstagefright/WAVExtractor.cpp | 61 ++++- media/libstagefright/codecs/gsm/Android.mk | 4 + media/libstagefright/codecs/gsm/dec/Android.mk | 21 ++ .../codecs/gsm/dec/MODULE_LICENSE_APACHE2 | 0 media/libstagefright/codecs/gsm/dec/NOTICE | 190 +++++++++++++++ media/libstagefright/codecs/gsm/dec/SoftGSM.cpp | 269 +++++++++++++++++++++ media/libstagefright/codecs/gsm/dec/SoftGSM.h | 65 +++++ media/libstagefright/omx/SoftOMXPlugin.cpp | 1 + 12 files changed, 606 insertions(+), 11 deletions(-) create mode 100644 media/libstagefright/codecs/gsm/Android.mk create mode 100644 media/libstagefright/codecs/gsm/dec/Android.mk create mode 100644 media/libstagefright/codecs/gsm/dec/MODULE_LICENSE_APACHE2 create mode 100644 media/libstagefright/codecs/gsm/dec/NOTICE create mode 100644 media/libstagefright/codecs/gsm/dec/SoftGSM.cpp create mode 100644 media/libstagefright/codecs/gsm/dec/SoftGSM.h diff --git a/include/media/stagefright/MediaDefs.h b/include/media/stagefright/MediaDefs.h index 457d5d7..81de6e4 100644 --- a/include/media/stagefright/MediaDefs.h +++ b/include/media/stagefright/MediaDefs.h @@ -42,6 +42,7 @@ extern const char *MEDIA_MIMETYPE_AUDIO_G711_MLAW; extern const char *MEDIA_MIMETYPE_AUDIO_RAW; extern const char *MEDIA_MIMETYPE_AUDIO_FLAC; extern const char *MEDIA_MIMETYPE_AUDIO_AAC_ADTS; +extern const char *MEDIA_MIMETYPE_AUDIO_MSGSM; extern const char *MEDIA_MIMETYPE_CONTAINER_MPEG4; extern const char *MEDIA_MIMETYPE_CONTAINER_WAV; diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp index 84b4962..a135222 100644 --- a/media/libstagefright/ACodec.cpp +++ b/media/libstagefright/ACodec.cpp @@ -803,6 +803,8 @@ status_t ACodec::setComponentRole( "audio_decoder.raw", "audio_encoder.raw" }, { MEDIA_MIMETYPE_AUDIO_FLAC, "audio_decoder.flac", "audio_encoder.flac" }, + { MEDIA_MIMETYPE_AUDIO_MSGSM, + "audio_decoder.gsm", "audio_encoder.gsm" }, }; static const size_t kNumMimeToRole = diff --git a/media/libstagefright/MediaDefs.cpp b/media/libstagefright/MediaDefs.cpp index e7b5903..5d8029c 100644 --- a/media/libstagefright/MediaDefs.cpp +++ b/media/libstagefright/MediaDefs.cpp @@ -40,6 +40,7 @@ const char *MEDIA_MIMETYPE_AUDIO_G711_MLAW = "audio/g711-mlaw"; const char *MEDIA_MIMETYPE_AUDIO_RAW = "audio/raw"; const char *MEDIA_MIMETYPE_AUDIO_FLAC = "audio/flac"; const char *MEDIA_MIMETYPE_AUDIO_AAC_ADTS = "audio/aac-adts"; +const char *MEDIA_MIMETYPE_AUDIO_MSGSM = "audio/gsm"; const char *MEDIA_MIMETYPE_CONTAINER_MPEG4 = "video/mp4"; const char *MEDIA_MIMETYPE_CONTAINER_WAV = "audio/x-wav"; diff --git a/media/libstagefright/OMXCodec.cpp b/media/libstagefright/OMXCodec.cpp index 70de174..22aefcc 100644 --- a/media/libstagefright/OMXCodec.cpp +++ b/media/libstagefright/OMXCodec.cpp @@ -1390,6 +1390,8 @@ void OMXCodec::setComponentRole( "audio_decoder.raw", "audio_encoder.raw" }, { MEDIA_MIMETYPE_AUDIO_FLAC, "audio_decoder.flac", "audio_encoder.flac" }, + { MEDIA_MIMETYPE_AUDIO_MSGSM, + "audio_decoder.gsm", "audio_encoder.gsm" }, }; static const size_t kNumMimeToRole = diff --git a/media/libstagefright/WAVExtractor.cpp b/media/libstagefright/WAVExtractor.cpp index a38400b..d32f4fb 100644 --- a/media/libstagefright/WAVExtractor.cpp +++ b/media/libstagefright/WAVExtractor.cpp @@ -38,6 +38,7 @@ enum { WAVE_FORMAT_PCM = 0x0001, WAVE_FORMAT_ALAW = 0x0006, WAVE_FORMAT_MULAW = 0x0007, + WAVE_FORMAT_MSGSM = 0x0031, WAVE_FORMAT_EXTENSIBLE = 0xFFFE }; @@ -178,6 +179,7 @@ status_t WAVExtractor::init() { if (mWaveFormat != WAVE_FORMAT_PCM && mWaveFormat != WAVE_FORMAT_ALAW && mWaveFormat != WAVE_FORMAT_MULAW + && mWaveFormat != WAVE_FORMAT_MSGSM && mWaveFormat != WAVE_FORMAT_EXTENSIBLE) { return ERROR_UNSUPPORTED; } @@ -216,6 +218,10 @@ status_t WAVExtractor::init() { && mBitsPerSample != 24) { return ERROR_UNSUPPORTED; } + } else if (mWaveFormat == WAVE_FORMAT_MSGSM) { + if (mBitsPerSample != 0) { + return ERROR_UNSUPPORTED; + } } else { CHECK(mWaveFormat == WAVE_FORMAT_MULAW || mWaveFormat == WAVE_FORMAT_ALAW); @@ -283,6 +289,10 @@ status_t WAVExtractor::init() { mTrackMeta->setCString( kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_G711_ALAW); break; + case WAVE_FORMAT_MSGSM: + mTrackMeta->setCString( + kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_MSGSM); + break; default: CHECK_EQ(mWaveFormat, (uint16_t)WAVE_FORMAT_MULAW); mTrackMeta->setCString( @@ -294,11 +304,17 @@ status_t WAVExtractor::init() { mTrackMeta->setInt32(kKeyChannelMask, mChannelMask); mTrackMeta->setInt32(kKeySampleRate, mSampleRate); - size_t bytesPerSample = mBitsPerSample >> 3; - - int64_t durationUs = - 1000000LL * (mDataSize / (mNumChannels * bytesPerSample)) - / mSampleRate; + int64_t durationUs = 0; + if (mWaveFormat == WAVE_FORMAT_MSGSM) { + // 65 bytes decode to 320 8kHz samples + durationUs = + 1000000LL * (mDataSize / 65 * 320) / 8000; + } else { + size_t bytesPerSample = mBitsPerSample >> 3; + durationUs = + 1000000LL * (mDataSize / (mNumChannels * bytesPerSample)) + / mSampleRate; + } mTrackMeta->setInt64(kKeyDuration, durationUs); @@ -388,7 +404,16 @@ status_t WAVSource::read( int64_t seekTimeUs; ReadOptions::SeekMode mode; if (options != NULL && options->getSeekTo(&seekTimeUs, &mode)) { - int64_t pos = (seekTimeUs * mSampleRate) / 1000000 * mNumChannels * (mBitsPerSample >> 3); + int64_t pos = 0; + + if (mWaveFormat == WAVE_FORMAT_MSGSM) { + // 65 bytes decode to 320 8kHz samples + int64_t samplenumber = (seekTimeUs * mSampleRate) / 1000000; + int64_t framenumber = samplenumber / 320; + pos = framenumber * 65; + } else { + pos = (seekTimeUs * mSampleRate) / 1000000 * mNumChannels * (mBitsPerSample >> 3); + } if (pos > mSize) { pos = mSize; } @@ -412,6 +437,15 @@ status_t WAVSource::read( maxBytesToRead = maxBytesAvailable; } + if (mWaveFormat == WAVE_FORMAT_MSGSM) { + // Microsoft packs 2 frames into 65 bytes, rather than using separate 33-byte frames, + // so read multiples of 65, and use smaller buffers to account for ~10:1 expansion ratio + if (maxBytesToRead > 1024) { + maxBytesToRead = 1024; + } + maxBytesToRead = (maxBytesToRead / 65) * 65; + } + ssize_t n = mDataSource->readAt( mCurrentPos, buffer->data(), maxBytesToRead); @@ -468,12 +502,17 @@ status_t WAVSource::read( } } - size_t bytesPerSample = mBitsPerSample >> 3; + int64_t timeStampUs = 0; + + if (mWaveFormat == WAVE_FORMAT_MSGSM) { + timeStampUs = 1000000LL * (mCurrentPos - mOffset) * 320 / 65 / mSampleRate; + } else { + size_t bytesPerSample = mBitsPerSample >> 3; + timeStampUs = 1000000LL * (mCurrentPos - mOffset) + / (mNumChannels * bytesPerSample) / mSampleRate; + } - buffer->meta_data()->setInt64( - kKeyTime, - 1000000LL * (mCurrentPos - mOffset) - / (mNumChannels * bytesPerSample) / mSampleRate); + buffer->meta_data()->setInt64(kKeyTime, timeStampUs); buffer->meta_data()->setInt32(kKeyIsSyncFrame, 1); mCurrentPos += n; diff --git a/media/libstagefright/codecs/gsm/Android.mk b/media/libstagefright/codecs/gsm/Android.mk new file mode 100644 index 0000000..2e43120 --- /dev/null +++ b/media/libstagefright/codecs/gsm/Android.mk @@ -0,0 +1,4 @@ +LOCAL_PATH:= $(call my-dir) +include $(CLEAR_VARS) + +include $(call all-makefiles-under,$(LOCAL_PATH)) diff --git a/media/libstagefright/codecs/gsm/dec/Android.mk b/media/libstagefright/codecs/gsm/dec/Android.mk new file mode 100644 index 0000000..9c0c6ae --- /dev/null +++ b/media/libstagefright/codecs/gsm/dec/Android.mk @@ -0,0 +1,21 @@ +LOCAL_PATH:= $(call my-dir) +include $(CLEAR_VARS) + +LOCAL_SRC_FILES := \ + SoftGSM.cpp + +LOCAL_C_INCLUDES := \ + frameworks/av/media/libstagefright/include \ + frameworks/native/include/media/openmax \ + external/libgsm/inc + +LOCAL_SHARED_LIBRARIES := \ + libstagefright libstagefright_omx libstagefright_foundation libutils + +LOCAL_STATIC_LIBRARIES := \ + libgsm + +LOCAL_MODULE := libstagefright_soft_gsmdec +LOCAL_MODULE_TAGS := optional + +include $(BUILD_SHARED_LIBRARY) diff --git a/media/libstagefright/codecs/gsm/dec/MODULE_LICENSE_APACHE2 b/media/libstagefright/codecs/gsm/dec/MODULE_LICENSE_APACHE2 new file mode 100644 index 0000000..e69de29 diff --git a/media/libstagefright/codecs/gsm/dec/NOTICE b/media/libstagefright/codecs/gsm/dec/NOTICE new file mode 100644 index 0000000..c5b1efa --- /dev/null +++ b/media/libstagefright/codecs/gsm/dec/NOTICE @@ -0,0 +1,190 @@ + + Copyright (c) 2005-2008, The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + diff --git a/media/libstagefright/codecs/gsm/dec/SoftGSM.cpp b/media/libstagefright/codecs/gsm/dec/SoftGSM.cpp new file mode 100644 index 0000000..00e0c85 --- /dev/null +++ b/media/libstagefright/codecs/gsm/dec/SoftGSM.cpp @@ -0,0 +1,269 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "SoftGSM" +#include + +#include "SoftGSM.h" + +#include +#include + +namespace android { + +template +static void InitOMXParams(T *params) { + params->nSize = sizeof(T); + params->nVersion.s.nVersionMajor = 1; + params->nVersion.s.nVersionMinor = 0; + params->nVersion.s.nRevision = 0; + params->nVersion.s.nStep = 0; +} + +SoftGSM::SoftGSM( + const char *name, + const OMX_CALLBACKTYPE *callbacks, + OMX_PTR appData, + OMX_COMPONENTTYPE **component) + : SimpleSoftOMXComponent(name, callbacks, appData, component), + mSignalledError(false) { + + CHECK(!strcmp(name, "OMX.google.gsm.decoder")); + + mGsm = gsm_create(); + CHECK(mGsm); + int msopt = 1; + gsm_option(mGsm, GSM_OPT_WAV49, &msopt); + + initPorts(); +} + +SoftGSM::~SoftGSM() { + gsm_destroy(mGsm); +} + +void SoftGSM::initPorts() { + OMX_PARAM_PORTDEFINITIONTYPE def; + InitOMXParams(&def); + + def.nPortIndex = 0; + def.eDir = OMX_DirInput; + def.nBufferCountMin = kNumBuffers; + def.nBufferCountActual = def.nBufferCountMin; + def.nBufferSize = sizeof(gsm_frame); + def.bEnabled = OMX_TRUE; + def.bPopulated = OMX_FALSE; + def.eDomain = OMX_PortDomainAudio; + def.bBuffersContiguous = OMX_FALSE; + def.nBufferAlignment = 1; + + def.format.audio.cMIMEType = + const_cast(MEDIA_MIMETYPE_AUDIO_MSGSM); + + def.format.audio.pNativeRender = NULL; + def.format.audio.bFlagErrorConcealment = OMX_FALSE; + def.format.audio.eEncoding = OMX_AUDIO_CodingGSMFR; + + addPort(def); + + def.nPortIndex = 1; + def.eDir = OMX_DirOutput; + def.nBufferCountMin = kNumBuffers; + def.nBufferCountActual = def.nBufferCountMin; + def.nBufferSize = kMaxNumSamplesPerFrame * sizeof(int16_t); + def.bEnabled = OMX_TRUE; + def.bPopulated = OMX_FALSE; + def.eDomain = OMX_PortDomainAudio; + def.bBuffersContiguous = OMX_FALSE; + def.nBufferAlignment = 2; + + def.format.audio.cMIMEType = const_cast("audio/raw"); + def.format.audio.pNativeRender = NULL; + def.format.audio.bFlagErrorConcealment = OMX_FALSE; + def.format.audio.eEncoding = OMX_AUDIO_CodingPCM; + + addPort(def); +} + +OMX_ERRORTYPE SoftGSM::internalGetParameter( + OMX_INDEXTYPE index, OMX_PTR params) { + switch (index) { + case OMX_IndexParamAudioPcm: + { + OMX_AUDIO_PARAM_PCMMODETYPE *pcmParams = + (OMX_AUDIO_PARAM_PCMMODETYPE *)params; + + if (pcmParams->nPortIndex > 1) { + return OMX_ErrorUndefined; + } + + pcmParams->eNumData = OMX_NumericalDataSigned; + pcmParams->eEndian = OMX_EndianBig; + pcmParams->bInterleaved = OMX_TRUE; + pcmParams->nBitPerSample = 16; + pcmParams->ePCMMode = OMX_AUDIO_PCMModeLinear; + pcmParams->eChannelMapping[0] = OMX_AUDIO_ChannelLF; + pcmParams->eChannelMapping[1] = OMX_AUDIO_ChannelRF; + + pcmParams->nChannels = 1; + pcmParams->nSamplingRate = 8000; + + return OMX_ErrorNone; + } + + default: + return SimpleSoftOMXComponent::internalGetParameter(index, params); + } +} + +OMX_ERRORTYPE SoftGSM::internalSetParameter( + OMX_INDEXTYPE index, const OMX_PTR params) { + switch (index) { + case OMX_IndexParamAudioPcm: + { + OMX_AUDIO_PARAM_PCMMODETYPE *pcmParams = + (OMX_AUDIO_PARAM_PCMMODETYPE *)params; + + if (pcmParams->nPortIndex != 0 && pcmParams->nPortIndex != 1) { + return OMX_ErrorUndefined; + } + + if (pcmParams->nChannels != 1) { + return OMX_ErrorUndefined; + } + + if (pcmParams->nSamplingRate != 8000) { + return OMX_ErrorUndefined; + } + + return OMX_ErrorNone; + } + + case OMX_IndexParamStandardComponentRole: + { + const OMX_PARAM_COMPONENTROLETYPE *roleParams = + (const OMX_PARAM_COMPONENTROLETYPE *)params; + + if (strncmp((const char *)roleParams->cRole, + "audio_decoder.gsm", + OMX_MAX_STRINGNAME_SIZE - 1)) { + return OMX_ErrorUndefined; + } + + return OMX_ErrorNone; + } + + default: + return SimpleSoftOMXComponent::internalSetParameter(index, params); + } +} + +void SoftGSM::onQueueFilled(OMX_U32 portIndex) { + if (mSignalledError) { + return; + } + + List &inQueue = getPortQueue(0); + List &outQueue = getPortQueue(1); + + while (!inQueue.empty() && !outQueue.empty()) { + BufferInfo *inInfo = *inQueue.begin(); + OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader; + + BufferInfo *outInfo = *outQueue.begin(); + OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader; + + if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) { + inQueue.erase(inQueue.begin()); + inInfo->mOwnedByUs = false; + notifyEmptyBufferDone(inHeader); + + outHeader->nFilledLen = 0; + outHeader->nFlags = OMX_BUFFERFLAG_EOS; + + outQueue.erase(outQueue.begin()); + outInfo->mOwnedByUs = false; + notifyFillBufferDone(outHeader); + return; + } + + if (inHeader->nFilledLen > kMaxNumSamplesPerFrame) { + ALOGE("input buffer too large (%ld).", inHeader->nFilledLen); + notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL); + mSignalledError = true; + } + + if(((inHeader->nFilledLen / 65) * 65) != inHeader->nFilledLen) { + ALOGE("input buffer not multiple of 65 (%ld).", inHeader->nFilledLen); + notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL); + mSignalledError = true; + } + + uint8_t *inputptr = inHeader->pBuffer + inHeader->nOffset; + + int n = mSignalledError ? 0 : DecodeGSM(mGsm, + reinterpret_cast(outHeader->pBuffer), inputptr, inHeader->nFilledLen); + + outHeader->nTimeStamp = inHeader->nTimeStamp; + outHeader->nOffset = 0; + outHeader->nFilledLen = n * sizeof(int16_t); + outHeader->nFlags = 0; + + inInfo->mOwnedByUs = false; + inQueue.erase(inQueue.begin()); + inInfo = NULL; + notifyEmptyBufferDone(inHeader); + inHeader = NULL; + + outInfo->mOwnedByUs = false; + outQueue.erase(outQueue.begin()); + outInfo = NULL; + notifyFillBufferDone(outHeader); + outHeader = NULL; + } +} + + +// static +int SoftGSM::DecodeGSM(gsm handle, + int16_t *out, uint8_t *in, size_t inSize) { + + int ret = 0; + while (inSize > 0) { + gsm_decode(handle, in, out); + in += 33; + inSize -= 33; + out += 160; + ret += 160; + gsm_decode(handle, in, out); + in += 32; + inSize -= 32; + out += 160; + ret += 160; + } + return ret; +} + + +} // namespace android + +android::SoftOMXComponent *createSoftOMXComponent( + const char *name, const OMX_CALLBACKTYPE *callbacks, + OMX_PTR appData, OMX_COMPONENTTYPE **component) { + return new android::SoftGSM(name, callbacks, appData, component); +} + diff --git a/media/libstagefright/codecs/gsm/dec/SoftGSM.h b/media/libstagefright/codecs/gsm/dec/SoftGSM.h new file mode 100644 index 0000000..8ab6116 --- /dev/null +++ b/media/libstagefright/codecs/gsm/dec/SoftGSM.h @@ -0,0 +1,65 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef SOFT_GSM_H_ + +#define SOFT_GSM_H_ + +#include "SimpleSoftOMXComponent.h" + +extern "C" { +#include "gsm.h" +} + +namespace android { + +struct SoftGSM : public SimpleSoftOMXComponent { + SoftGSM(const char *name, + const OMX_CALLBACKTYPE *callbacks, + OMX_PTR appData, + OMX_COMPONENTTYPE **component); + +protected: + virtual ~SoftGSM(); + + virtual OMX_ERRORTYPE internalGetParameter( + OMX_INDEXTYPE index, OMX_PTR params); + + virtual OMX_ERRORTYPE internalSetParameter( + OMX_INDEXTYPE index, const OMX_PTR params); + + virtual void onQueueFilled(OMX_U32 portIndex); + +private: + enum { + kNumBuffers = 4, + kMaxNumSamplesPerFrame = 16384, + }; + + bool mSignalledError; + gsm mGsm; + + void initPorts(); + + static int DecodeGSM(gsm handle, int16_t *out, uint8_t *in, size_t inSize); + + DISALLOW_EVIL_CONSTRUCTORS(SoftGSM); +}; + +} // namespace android + +#endif // SOFT_GSM_H_ + diff --git a/media/libstagefright/omx/SoftOMXPlugin.cpp b/media/libstagefright/omx/SoftOMXPlugin.cpp index 3747b3b..6e1c04d 100644 --- a/media/libstagefright/omx/SoftOMXPlugin.cpp +++ b/media/libstagefright/omx/SoftOMXPlugin.cpp @@ -53,6 +53,7 @@ static const struct { { "OMX.google.vpx.decoder", "vpxdec", "video_decoder.vpx" }, { "OMX.google.raw.decoder", "rawdec", "audio_decoder.raw" }, { "OMX.google.flac.encoder", "flacenc", "audio_encoder.flac" }, + { "OMX.google.gsm.decoder", "gsmdec", "audio_decoder.gsm" }, }; static const size_t kNumComponents = -- cgit v1.1 From 4cf1bdef135d6ebf6ccd404aacc8917f9887a07a Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Fri, 16 Nov 2012 11:15:44 -0800 Subject: Only pass the surface to the video decoder. Change-Id: Ice0cfc0021fdd9fe053be6ee324cbc64226ed122 --- cmds/stagefright/SimplePlayer.cpp | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/cmds/stagefright/SimplePlayer.cpp b/cmds/stagefright/SimplePlayer.cpp index 7636906..eb3296e 100644 --- a/cmds/stagefright/SimplePlayer.cpp +++ b/cmds/stagefright/SimplePlayer.cpp @@ -297,9 +297,11 @@ status_t SimplePlayer::onPrepare() { AString mime; CHECK(format->findString("mime", &mime)); + bool isVideo = !strncasecmp(mime.c_str(), "video/", 6); + if (!haveAudio && !strncasecmp(mime.c_str(), "audio/", 6)) { haveAudio = true; - } else if (!haveVideo && !strncasecmp(mime.c_str(), "video/", 6)) { + } else if (!haveVideo && isVideo) { haveVideo = true; } else { continue; @@ -320,7 +322,7 @@ status_t SimplePlayer::onPrepare() { err = state->mCodec->configure( format, - mNativeWindow->getSurfaceTextureClient(), + isVideo ? mNativeWindow->getSurfaceTextureClient() : NULL, NULL /* crypto */, 0 /* flags */); -- cgit v1.1 From 26c77556efc30800466b60b3975bc35a70c8c28b Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Fri, 16 Nov 2012 12:01:44 -0800 Subject: Fix time vs. bytes units bug in getRenderPosition Rename correctLatency since it requires thread to be locked. Use size_t for byte and frame counts. Change-Id: I178fdd18bdb823813b9563927bdff8c0d28ca5a5 --- media/libmedia/IAudioFlinger.cpp | 2 +- services/audioflinger/AudioFlinger.cpp | 31 +++++++++++++------------------ services/audioflinger/AudioFlinger.h | 12 +++++++----- 3 files changed, 21 insertions(+), 24 deletions(-) diff --git a/media/libmedia/IAudioFlinger.cpp b/media/libmedia/IAudioFlinger.cpp index 79c3361..a010bb6 100644 --- a/media/libmedia/IAudioFlinger.cpp +++ b/media/libmedia/IAudioFlinger.cpp @@ -506,7 +506,7 @@ public: return reply.readInt32(); } - virtual status_t getRenderPosition(uint32_t *halFrames, uint32_t *dspFrames, + virtual status_t getRenderPosition(size_t *halFrames, size_t *dspFrames, audio_io_handle_t output) const { Parcel data, reply; diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp index 384f268..cb66d21 100644 --- a/services/audioflinger/AudioFlinger.cpp +++ b/services/audioflinger/AudioFlinger.cpp @@ -1042,7 +1042,7 @@ status_t AudioFlinger::setVoiceVolume(float value) return ret; } -status_t AudioFlinger::getRenderPosition(uint32_t *halFrames, uint32_t *dspFrames, +status_t AudioFlinger::getRenderPosition(size_t *halFrames, size_t *dspFrames, audio_io_handle_t output) const { status_t status; @@ -1889,7 +1889,7 @@ Exit: return track; } -uint32_t AudioFlinger::MixerThread::correctLatency(uint32_t latency) const +uint32_t AudioFlinger::MixerThread::correctLatency_l(uint32_t latency) const { if (mFastMixer != NULL) { MonoPipe *pipe = (MonoPipe *)mPipeSink.get(); @@ -1898,7 +1898,7 @@ uint32_t AudioFlinger::MixerThread::correctLatency(uint32_t latency) const return latency; } -uint32_t AudioFlinger::PlaybackThread::correctLatency(uint32_t latency) const +uint32_t AudioFlinger::PlaybackThread::correctLatency_l(uint32_t latency) const { return latency; } @@ -1911,7 +1911,7 @@ uint32_t AudioFlinger::PlaybackThread::latency() const uint32_t AudioFlinger::PlaybackThread::latency_l() const { if (initCheck() == NO_ERROR) { - return correctLatency(mOutput->stream->get_latency(mOutput->stream)); + return correctLatency_l(mOutput->stream->get_latency(mOutput->stream)); } else { return 0; } @@ -2140,7 +2140,7 @@ void AudioFlinger::PlaybackThread::readOutputParameters() } -status_t AudioFlinger::PlaybackThread::getRenderPosition(uint32_t *halFrames, uint32_t *dspFrames) +status_t AudioFlinger::PlaybackThread::getRenderPosition(size_t *halFrames, size_t *dspFrames) { if (halFrames == NULL || dspFrames == NULL) { return BAD_VALUE; @@ -2149,15 +2149,13 @@ status_t AudioFlinger::PlaybackThread::getRenderPosition(uint32_t *halFrames, ui if (initCheck() != NO_ERROR) { return INVALID_OPERATION; } - *halFrames = mBytesWritten / audio_stream_frame_size(&mOutput->stream->common); + size_t framesWritten = mBytesWritten / mFrameSize; + *halFrames = framesWritten; if (isSuspended()) { // return an estimation of rendered frames when the output is suspended - int32_t frames = mBytesWritten - latency_l(); - if (frames < 0) { - frames = 0; - } - *dspFrames = (uint32_t)frames; + size_t latencyFrames = (latency_l() * mSampleRate) / 1000; + *dspFrames = framesWritten >= latencyFrames ? framesWritten - latencyFrames : 0; return NO_ERROR; } else { return mOutput->stream->get_render_position(mOutput->stream, dspFrames); @@ -2916,7 +2914,7 @@ void AudioFlinger::MixerThread::threadLoop_sleepTime() } else if (mBytesWritten != 0 || (mMixerStatus == MIXER_TRACKS_ENABLED)) { memset (mMixBuffer, 0, mixBufferSize); sleepTime = 0; - ALOGV_IF((mBytesWritten == 0 && (mMixerStatus == MIXER_TRACKS_ENABLED)), + ALOGV_IF(mBytesWritten == 0 && (mMixerStatus == MIXER_TRACKS_ENABLED), "anticipated start"); } // TODO add standby time extension fct of effect tail @@ -3057,8 +3055,7 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::MixerThread::prepareTrac { size_t audioHALFrames = (mOutput->stream->get_latency(mOutput->stream)*mSampleRate) / 1000; - size_t framesWritten = - mBytesWritten / audio_stream_frame_size(&mOutput->stream->common); + size_t framesWritten = mBytesWritten / mFrameSize; if (!(mStandby || track->presentationComplete(framesWritten, audioHALFrames))) { // track stays in active list until presentation is complete break; @@ -3305,8 +3302,7 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::MixerThread::prepareTrac // TODO: use actual buffer filling status instead of latency when available from // audio HAL size_t audioHALFrames = (latency_l() * mSampleRate) / 1000; - size_t framesWritten = - mBytesWritten / audio_stream_frame_size(&mOutput->stream->common); + size_t framesWritten = mBytesWritten / mFrameSize; if (mStandby || track->presentationComplete(framesWritten, audioHALFrames)) { if (track->isStopped()) { track->reset(); @@ -3837,8 +3833,7 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::DirectOutputThread::prep // Remove it from the list of active tracks. // TODO: implement behavior for compressed audio size_t audioHALFrames = (latency_l() * mSampleRate) / 1000; - size_t framesWritten = - mBytesWritten / audio_stream_frame_size(&mOutput->stream->common); + size_t framesWritten = mBytesWritten / mFrameSize; if (mStandby || track->presentationComplete(framesWritten, audioHALFrames)) { if (track->isStopped()) { track->reset(); diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h index 830dfe9..50dbd27 100644 --- a/services/audioflinger/AudioFlinger.h +++ b/services/audioflinger/AudioFlinger.h @@ -174,7 +174,7 @@ public: virtual status_t setVoiceVolume(float volume); - virtual status_t getRenderPosition(uint32_t *halFrames, uint32_t *dspFrames, + virtual status_t getRenderPosition(size_t *halFrames, size_t *dspFrames, audio_io_handle_t output) const; virtual unsigned int getInputFramesLost(audio_io_handle_t ioHandle) const; @@ -1125,7 +1125,7 @@ public: virtual String8 getParameters(const String8& keys); virtual void audioConfigChanged_l(int event, int param = 0); - status_t getRenderPosition(uint32_t *halFrames, uint32_t *dspFrames); + status_t getRenderPosition(size_t *halFrames, size_t *dspFrames); int16_t *mixBuffer() const { return mMixBuffer; }; virtual void detachAuxEffect_l(int effectId); @@ -1155,7 +1155,9 @@ public: // 'volatile' means accessed via atomic operations and no lock. volatile int32_t mSuspended; - int mBytesWritten; + // FIXME overflows every 6+ hours at 44.1 kHz stereo 16-bit samples + // mFramesWritten would be better, or 64-bit even better + size_t mBytesWritten; private: // mMasterMute is in both PlaybackThread and in AudioFlinger. When a // PlaybackThread needs to find out if master-muted, it checks it's local @@ -1187,7 +1189,7 @@ public: // Cache various calculated values, at threadLoop() entry and after a parameter change virtual void cacheParameters_l(); - virtual uint32_t correctLatency(uint32_t latency) const; + virtual uint32_t correctLatency_l(uint32_t latency) const; private: @@ -1296,7 +1298,7 @@ public: virtual void threadLoop_mix(); virtual void threadLoop_sleepTime(); virtual void threadLoop_removeTracks(const Vector< sp >& tracksToRemove); - virtual uint32_t correctLatency(uint32_t latency) const; + virtual uint32_t correctLatency_l(uint32_t latency) const; AudioMixer* mAudioMixer; // normal mixer private: -- cgit v1.1 From 827e5f1237757aee78b677efcf0f7c44fd0dd3d8 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Fri, 2 Nov 2012 10:00:06 -0700 Subject: Don't explicitly log tid If needed, it can be obtained with adb logcat -v threadtime Change-Id: I91b3911d20f7bcfc3361db4052db21ff9181f1cf --- media/libmedia/AudioTrack.cpp | 6 +++--- services/audioflinger/AudioFlinger.cpp | 14 +++++++------- services/audioflinger/AudioPolicyService.cpp | 16 ++++++++-------- 3 files changed, 18 insertions(+), 18 deletions(-) diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp index 979ee37..907f7e6 100644 --- a/media/libmedia/AudioTrack.cpp +++ b/media/libmedia/AudioTrack.cpp @@ -1336,8 +1336,8 @@ status_t AudioTrack::restoreTrack_l(audio_track_cblk_t*& refCblk, bool fromStart audio_track_cblk_t* cblk = refCblk; audio_track_cblk_t* newCblk = cblk; - ALOGW("dead IAudioTrack, creating a new one from %s TID %d", - fromStart ? "start()" : "obtainBuffer()", gettid()); + ALOGW("dead IAudioTrack, creating a new one from %s", + fromStart ? "start()" : "obtainBuffer()"); // signal old cblk condition so that other threads waiting for available buffers stop // waiting now @@ -1411,7 +1411,7 @@ status_t AudioTrack::restoreTrack_l(audio_track_cblk_t*& refCblk, bool fromStart } newCblk->lock.lock(); - ALOGW_IF(result != NO_ERROR, "restoreTrack_l() error %d TID %d", result, gettid()); + ALOGW_IF(result != NO_ERROR, "restoreTrack_l() error %d", result); return result; } diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp index 384f268..fb62669 100644 --- a/services/audioflinger/AudioFlinger.cpp +++ b/services/audioflinger/AudioFlinger.cpp @@ -870,8 +870,8 @@ bool AudioFlinger::streamMute(audio_stream_type_t stream) const status_t AudioFlinger::setParameters(audio_io_handle_t ioHandle, const String8& keyValuePairs) { - ALOGV("setParameters(): io %d, keyvalue %s, tid %d, calling pid %d", - ioHandle, keyValuePairs.string(), gettid(), IPCThreadState::self()->getCallingPid()); + ALOGV("setParameters(): io %d, keyvalue %s, calling pid %d", + ioHandle, keyValuePairs.string(), IPCThreadState::self()->getCallingPid()); // check calling permissions if (!settingsAllowed()) { return PERMISSION_DENIED; @@ -955,8 +955,8 @@ status_t AudioFlinger::setParameters(audio_io_handle_t ioHandle, const String8& String8 AudioFlinger::getParameters(audio_io_handle_t ioHandle, const String8& keys) const { - ALOGVV("getParameters() io %d, keys %s, tid %d, calling pid %d", - ioHandle, keys.string(), gettid(), IPCThreadState::self()->getCallingPid()); + ALOGVV("getParameters() io %d, keys %s, calling pid %d", + ioHandle, keys.string(), IPCThreadState::self()->getCallingPid()); Mutex::Autolock _l(mLock); @@ -1126,7 +1126,7 @@ void AudioFlinger::audioConfigChanged_l(int event, audio_io_handle_t ioHandle, c // removeClient_l() must be called with AudioFlinger::mLock held void AudioFlinger::removeClient_l(pid_t pid) { - ALOGV("removeClient_l() pid %d, tid %d, calling tid %d", pid, gettid(), + ALOGV("removeClient_l() pid %d, calling pid %d", pid, IPCThreadState::self()->getCallingPid()); mClients.removeItem(pid); } @@ -5534,9 +5534,9 @@ AudioFlinger::PlaybackThread::OutputTrack::OutputTrack( mBuffers = (char*)mCblk + sizeof(audio_track_cblk_t); mOutBuffer.frameCount = 0; playbackThread->mTracks.add(this); - ALOGV("OutputTrack constructor mCblk %p, mBuffer %p, mCblk->buffers %p, " \ + ALOGV("OutputTrack constructor mCblk %p, mBuffer %p, mBuffers %p, " \ "mCblk->frameCount %d, mCblk->sampleRate %u, mChannelMask 0x%08x mBufferEnd %p", - mCblk, mBuffer, mCblk->buffers, + mCblk, mBuffer, mBuffers, mCblk->frameCount, mCblk->sampleRate, mChannelMask, mBufferEnd); } else { ALOGW("Error creating output track on thread %p", playbackThread); diff --git a/services/audioflinger/AudioPolicyService.cpp b/services/audioflinger/AudioPolicyService.cpp index ea130ba..b86d3ae 100644 --- a/services/audioflinger/AudioPolicyService.cpp +++ b/services/audioflinger/AudioPolicyService.cpp @@ -145,7 +145,7 @@ status_t AudioPolicyService::setDeviceConnectionState(audio_devices_t device, return BAD_VALUE; } - ALOGV("setDeviceConnectionState() tid %d", gettid()); + ALOGV("setDeviceConnectionState()"); Mutex::Autolock _l(mLock); return mpAudioPolicy->set_device_connection_state(mpAudioPolicy, device, state, device_address); @@ -174,7 +174,7 @@ status_t AudioPolicyService::setPhoneState(audio_mode_t state) return BAD_VALUE; } - ALOGV("setPhoneState() tid %d", gettid()); + ALOGV("setPhoneState()"); // TODO: check if it is more appropriate to do it in platform specific policy manager AudioSystem::setMode(state); @@ -199,7 +199,7 @@ status_t AudioPolicyService::setForceUse(audio_policy_force_use_t usage, if (config < 0 || config >= AUDIO_POLICY_FORCE_CFG_CNT) { return BAD_VALUE; } - ALOGV("setForceUse() tid %d", gettid()); + ALOGV("setForceUse()"); Mutex::Autolock _l(mLock); mpAudioPolicy->set_force_use(mpAudioPolicy, usage, config); return NO_ERROR; @@ -225,7 +225,7 @@ audio_io_handle_t AudioPolicyService::getOutput(audio_stream_type_t stream, if (mpAudioPolicy == NULL) { return 0; } - ALOGV("getOutput() tid %d", gettid()); + ALOGV("getOutput()"); Mutex::Autolock _l(mLock); return mpAudioPolicy->get_output(mpAudioPolicy, stream, samplingRate, format, channelMask, flags); @@ -238,7 +238,7 @@ status_t AudioPolicyService::startOutput(audio_io_handle_t output, if (mpAudioPolicy == NULL) { return NO_INIT; } - ALOGV("startOutput() tid %d", gettid()); + ALOGV("startOutput()"); Mutex::Autolock _l(mLock); return mpAudioPolicy->start_output(mpAudioPolicy, output, stream, session); } @@ -250,7 +250,7 @@ status_t AudioPolicyService::stopOutput(audio_io_handle_t output, if (mpAudioPolicy == NULL) { return NO_INIT; } - ALOGV("stopOutput() tid %d", gettid()); + ALOGV("stopOutput()"); Mutex::Autolock _l(mLock); return mpAudioPolicy->stop_output(mpAudioPolicy, output, stream, session); } @@ -260,7 +260,7 @@ void AudioPolicyService::releaseOutput(audio_io_handle_t output) if (mpAudioPolicy == NULL) { return; } - ALOGV("releaseOutput() tid %d", gettid()); + ALOGV("releaseOutput()"); Mutex::Autolock _l(mLock); mpAudioPolicy->release_output(mpAudioPolicy, output); } @@ -534,7 +534,7 @@ status_t AudioPolicyService::queryDefaultPreProcessing(int audioSession, } void AudioPolicyService::binderDied(const wp& who) { - ALOGW("binderDied() %p, tid %d, calling pid %d", who.unsafe_get(), gettid(), + ALOGW("binderDied() %p, calling pid %d", who.unsafe_get(), IPCThreadState::self()->getCallingPid()); } -- cgit v1.1 From d65d73c4ae74d084751b417615a78cbe7a51372a Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Fri, 22 Jun 2012 17:21:07 -0700 Subject: "if" statements use curly braces per media style Change-Id: I130e7849fd1da7a0b7fe56c3c53919d26e3843b8 --- media/libmedia/AudioRecord.cpp | 6 +- media/libmedia/AudioTrack.cpp | 70 +++++++++++++++------ services/audioflinger/AudioFlinger.cpp | 112 ++++++++++++++++++++++++--------- 3 files changed, 140 insertions(+), 48 deletions(-) diff --git a/media/libmedia/AudioRecord.cpp b/media/libmedia/AudioRecord.cpp index 0587651..dd0a145 100644 --- a/media/libmedia/AudioRecord.cpp +++ b/media/libmedia/AudioRecord.cpp @@ -643,10 +643,12 @@ ssize_t AudioRecord::read(void* buffer, size_t userSize) status_t err = obtainBuffer(&audioBuffer, ((2 * MAX_RUN_TIMEOUT_MS) / WAIT_PERIOD_MS)); if (err < 0) { // out of buffers, return #bytes written - if (err == status_t(NO_MORE_BUFFERS)) + if (err == status_t(NO_MORE_BUFFERS)) { break; - if (err == status_t(TIMED_OUT)) + } + if (err == status_t(TIMED_OUT)) { err = 0; + } return ssize_t(err); } diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp index 979ee37..3056b4c 100644 --- a/media/libmedia/AudioTrack.cpp +++ b/media/libmedia/AudioTrack.cpp @@ -54,7 +54,9 @@ status_t AudioTrack::getMinFrameCount( audio_stream_type_t streamType, uint32_t sampleRate) { - if (frameCount == NULL) return BAD_VALUE; + if (frameCount == NULL) { + return BAD_VALUE; + } // default to 0 in case of error *frameCount = 0; @@ -553,7 +555,9 @@ status_t AudioTrack::setSampleRate(uint32_t rate) return NO_INIT; } // Resampler implementation limits input sampling rate to 2 x output sampling rate. - if (rate == 0 || rate > afSamplingRate*2 ) return BAD_VALUE; + if (rate == 0 || rate > afSamplingRate*2 ) { + return BAD_VALUE; + } AutoMutex lock(mLock); mCblk->sampleRate = rate; @@ -620,7 +624,9 @@ status_t AudioTrack::setLoop_l(uint32_t loopStart, uint32_t loopEnd, int loopCou status_t AudioTrack::setMarkerPosition(uint32_t marker) { - if (mCbf == NULL) return INVALID_OPERATION; + if (mCbf == NULL) { + return INVALID_OPERATION; + } mMarkerPosition = marker; mMarkerReached = false; @@ -630,7 +636,9 @@ status_t AudioTrack::setMarkerPosition(uint32_t marker) status_t AudioTrack::getMarkerPosition(uint32_t *marker) const { - if (marker == NULL) return BAD_VALUE; + if (marker == NULL) { + return BAD_VALUE; + } *marker = mMarkerPosition; @@ -639,7 +647,9 @@ status_t AudioTrack::getMarkerPosition(uint32_t *marker) const status_t AudioTrack::setPositionUpdatePeriod(uint32_t updatePeriod) { - if (mCbf == NULL) return INVALID_OPERATION; + if (mCbf == NULL) { + return INVALID_OPERATION; + } uint32_t curPosition; getPosition(&curPosition); @@ -651,7 +661,9 @@ status_t AudioTrack::setPositionUpdatePeriod(uint32_t updatePeriod) status_t AudioTrack::getPositionUpdatePeriod(uint32_t *updatePeriod) const { - if (updatePeriod == NULL) return BAD_VALUE; + if (updatePeriod == NULL) { + return BAD_VALUE; + } *updatePeriod = mUpdatePeriod; @@ -660,16 +672,22 @@ status_t AudioTrack::getPositionUpdatePeriod(uint32_t *updatePeriod) const status_t AudioTrack::setPosition(uint32_t position) { - if (mIsTimed) return INVALID_OPERATION; + if (mIsTimed) { + return INVALID_OPERATION; + } AutoMutex lock(mLock); - if (!stopped_l()) return INVALID_OPERATION; + if (!stopped_l()) { + return INVALID_OPERATION; + } audio_track_cblk_t* cblk = mCblk; Mutex::Autolock _l(cblk->lock); - if (position > cblk->user) return BAD_VALUE; + if (position > cblk->user) { + return BAD_VALUE; + } cblk->server = position; android_atomic_or(CBLK_FORCEREADY, &cblk->flags); @@ -679,7 +697,9 @@ status_t AudioTrack::setPosition(uint32_t position) status_t AudioTrack::getPosition(uint32_t *position) { - if (position == NULL) return BAD_VALUE; + if (position == NULL) { + return BAD_VALUE; + } AutoMutex lock(mLock); *position = mFlushed ? 0 : mCblk->server; @@ -690,7 +710,9 @@ status_t AudioTrack::reload() { AutoMutex lock(mLock); - if (!stopped_l()) return INVALID_OPERATION; + if (!stopped_l()) { + return INVALID_OPERATION; + } flush_l(); @@ -1060,8 +1082,12 @@ void AudioTrack::releaseBuffer(Buffer* audioBuffer) ssize_t AudioTrack::write(const void* buffer, size_t userSize) { - if (mSharedBuffer != 0) return INVALID_OPERATION; - if (mIsTimed) return INVALID_OPERATION; + if (mSharedBuffer != 0) { + return INVALID_OPERATION; + } + if (mIsTimed) { + return INVALID_OPERATION; + } if (ssize_t(userSize) < 0) { // Sanity-check: user is most-likely passing an error code, and it would @@ -1098,8 +1124,9 @@ ssize_t AudioTrack::write(const void* buffer, size_t userSize) status_t err = obtainBuffer(&audioBuffer, -1); if (err < 0) { // out of buffers, return #bytes written - if (err == status_t(NO_MORE_BUFFERS)) + if (err == status_t(NO_MORE_BUFFERS)) { break; + } return ssize_t(err); } @@ -1159,8 +1186,9 @@ status_t TimedAudioTrack::allocateTimedBuffer(size_t size, sp* buffer) cblk = temp; cblk->lock.unlock(); - if (result == OK) + if (result == OK) { result = mAudioTrack->allocateTimedBuffer(size, buffer); + } } return result; @@ -1218,7 +1246,9 @@ bool AudioTrack::processAudioBuffer(const sp& thread) if (cblk->server == cblk->frameCount) { mCbf(EVENT_BUFFER_END, mUserData, 0); } - if (mSharedBuffer != 0) return false; + if (mSharedBuffer != 0) { + return false; + } } } @@ -1275,7 +1305,9 @@ bool AudioTrack::processAudioBuffer(const sp& thread) } break; } - if (err == status_t(STOPPED)) return false; + if (err == status_t(STOPPED)) { + return false; + } // Divide buffer size by 2 to take into account the expansion // due to 8 to 16 bit conversion: the callback must fill only half @@ -1298,7 +1330,9 @@ bool AudioTrack::processAudioBuffer(const sp& thread) break; } - if (writtenSize > reqSize) writtenSize = reqSize; + if (writtenSize > reqSize) { + writtenSize = reqSize; + } if (mFormat == AUDIO_FORMAT_PCM_8_BIT && !(mFlags & AUDIO_OUTPUT_FLAG_DIRECT)) { // 8 to 16 bit conversion, note that source and destination are the same address diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp index 384f268..9baa830 100644 --- a/services/audioflinger/AudioFlinger.cpp +++ b/services/audioflinger/AudioFlinger.cpp @@ -423,7 +423,9 @@ status_t AudioFlinger::dump(int fd, const Vector& args) dumpTee(fd, mRecordTeeSource); } - if (locked) mLock.unlock(); + if (locked) { + mLock.unlock(); + } } return NO_ERROR; } @@ -2629,7 +2631,9 @@ bool AudioFlinger::PlaybackThread::threadLoop() clearOutputTracks(); - if (exitPending()) break; + if (exitPending()) { + break; + } releaseWakeLock_l(); // wait until we have something to do... @@ -2822,7 +2826,9 @@ void AudioFlinger::PlaybackThread::threadLoop_write() bytesWritten = (int)mOutput->stream->write(mOutput->stream, mMixBuffer, mixBufferSize); } - if (bytesWritten > 0) mBytesWritten += mixBufferSize; + if (bytesWritten > 0) { + mBytesWritten += mixBufferSize; + } mNumWrites++; mInWrite = false; } @@ -2963,7 +2969,9 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::MixerThread::prepareTrac for (size_t i=0 ; i t = mActiveTracks[i].promote(); - if (t == 0) continue; + if (t == 0) { + continue; + } // this const just means the local variable doesn't change Track* const track = t.get(); @@ -3243,11 +3251,17 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::MixerThread::prepareTrac // Convert volumes from 8.24 to 4.12 format // This additional clamping is needed in case chain->setVolume_l() overshot vl = (vl + (1 << 11)) >> 12; - if (vl > MAX_GAIN_INT) vl = MAX_GAIN_INT; + if (vl > MAX_GAIN_INT) { + vl = MAX_GAIN_INT; + } vr = (vr + (1 << 11)) >> 12; - if (vr > MAX_GAIN_INT) vr = MAX_GAIN_INT; + if (vr > MAX_GAIN_INT) { + vr = MAX_GAIN_INT; + } - if (va > MAX_GAIN_INT) va = MAX_GAIN_INT; // va is uint32_t, so no need to check for - + if (va > MAX_GAIN_INT) { + va = MAX_GAIN_INT; // va is uint32_t, so no need to check for - + } // XXX: these things DON'T need to be done each time mAudioMixer->setBufferProvider(name, track); @@ -3376,7 +3390,9 @@ track_is_ready: ; ALOG_ASSERT(i < count); resetMask &= ~(1 << i); sp t = mActiveTracks[i].promote(); - if (t == 0) continue; + if (t == 0) { + continue; + } Track* track = t.get(); ALOG_ASSERT(track->isFastTrack() && track->isStopped()); track->reset(); @@ -3578,7 +3594,9 @@ bool AudioFlinger::MixerThread::checkForNewParameters_l() mAudioMixer = new AudioMixer(mNormalFrameCount, mSampleRate); for (size_t i = 0; i < mTracks.size() ; i++) { int name = getTrackName_l(mTracks[i]->mChannelMask, mTracks[i]->mSessionId); - if (name < 0) break; + if (name < 0) { + break; + } mTracks[i]->mName = name; // limit track sample rate to 2 x new output sample rate if (mTracks[i]->mCblk->sampleRate > 2 * sampleRate()) { @@ -3753,7 +3771,9 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::DirectOutputThread::prep if (mActiveTracks.size() != 0) { sp t = mActiveTracks[0].promote(); // The track died recently - if (t == 0) return MIXER_IDLE; + if (t == 0) { + return MIXER_IDLE; + } Track* const track = t.get(); audio_track_cblk_t* cblk = track->cblk(); @@ -3792,10 +3812,14 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::DirectOutputThread::prep float v = mMasterVolume * typeVolume; uint32_t vlr = cblk->getVolumeLR(); float v_clamped = v * (vlr & 0xFFFF); - if (v_clamped > MAX_GAIN) v_clamped = MAX_GAIN; + if (v_clamped > MAX_GAIN) { + v_clamped = MAX_GAIN; + } left = v_clamped/MAX_GAIN; v_clamped = v * (vlr >> 16); - if (v_clamped > MAX_GAIN) v_clamped = MAX_GAIN; + if (v_clamped > MAX_GAIN) { + v_clamped = MAX_GAIN; + } right = v_clamped/MAX_GAIN; } @@ -4587,7 +4611,9 @@ size_t AudioFlinger::PlaybackThread::Track::framesReady() const { // Don't call for fast tracks; the framesReady() could result in priority inversion bool AudioFlinger::PlaybackThread::Track::isReady() const { - if (mFillingUpStatus != FS_FILLING || isStopped() || isPausing()) return true; + if (mFillingUpStatus != FS_FILLING || isStopped() || isPausing()) { + return true; + } if (framesReady() >= mCblk->frameCount || (mCblk->flags & CBLK_FORCEREADY)) { @@ -5429,7 +5455,9 @@ status_t AudioFlinger::RecordThread::RecordTrack::getNextBuffer(AudioBufferProvi // Check if last stepServer failed, try to step now if (mStepServerFailed) { - if (!step()) goto getNextBuffer_exit; + if (!step()) { + goto getNextBuffer_exit; + } ALOGV("stepServer recovered"); mStepServerFailed = false; } @@ -6090,7 +6118,9 @@ bool AudioFlinger::RecordThread::threadLoop() if (mActiveTrack == 0 && mConfigEvents.isEmpty()) { standby(); - if (exitPending()) break; + if (exitPending()) { + break; + } releaseWakeLock_l(); ALOGV("RecordThread: loop stopping"); @@ -8122,7 +8152,9 @@ status_t AudioFlinger::PlaybackThread::addEffectChain_l(const sp& c // indicate all active tracks in the chain for (size_t i = 0 ; i < mActiveTracks.size() ; ++i) { sp track = mActiveTracks[i].promote(); - if (track == 0) continue; + if (track == 0) { + continue; + } if (session == track->sessionId()) { ALOGV("addEffectChain_l() activating track %p on session %d", track.get(), session); chain->incActiveTrackCnt(); @@ -8145,7 +8177,9 @@ status_t AudioFlinger::PlaybackThread::addEffectChain_l(const sp& c size_t size = mEffectChains.size(); size_t i = 0; for (i = 0; i < size; i++) { - if (mEffectChains[i]->sessionId() < session) break; + if (mEffectChains[i]->sessionId() < session) { + break; + } } mEffectChains.insertAt(chain, i); checkSuspendOnAddEffectChain_l(chain); @@ -8165,7 +8199,9 @@ size_t AudioFlinger::PlaybackThread::removeEffectChain_l(const sp& // detach all active tracks from the chain for (size_t i = 0 ; i < mActiveTracks.size() ; ++i) { sp track = mActiveTracks[i].promote(); - if (track == 0) continue; + if (track == 0) { + continue; + } if (session == track->sessionId()) { ALOGV("removeEffectChain_l(): stopping track on chain %p for session Id: %d", chain.get(), session); @@ -8332,11 +8368,15 @@ status_t AudioFlinger::EffectModule::addHandle(EffectHandle *handle) size_t i; for (i = 0; i < size; i++) { EffectHandle *h = mHandles[i]; - if (h == NULL || h->destroyed_l()) continue; + if (h == NULL || h->destroyed_l()) { + continue; + } // first non destroyed handle is considered in control if (controlHandle == NULL) controlHandle = h; - if (h->priority() <= priority) break; + if (h->priority() <= priority) { + break; + } } // if inserted in first place, move effect control from previous owner to this handle if (i == 0) { @@ -8361,7 +8401,9 @@ size_t AudioFlinger::EffectModule::removeHandle(EffectHandle *handle) size_t size = mHandles.size(); size_t i; for (i = 0; i < size; i++) { - if (mHandles[i] == handle) break; + if (mHandles[i] == handle) { + break; + } } if (i == size) { return size; @@ -9070,8 +9112,12 @@ AudioFlinger::EffectHandle::~EffectHandle() status_t AudioFlinger::EffectHandle::enable() { ALOGV("enable %p", this); - if (!mHasControl) return INVALID_OPERATION; - if (mEffect == 0) return DEAD_OBJECT; + if (!mHasControl) { + return INVALID_OPERATION; + } + if (mEffect == 0) { + return DEAD_OBJECT; + } if (mEnabled) { return NO_ERROR; @@ -9102,8 +9148,12 @@ status_t AudioFlinger::EffectHandle::enable() status_t AudioFlinger::EffectHandle::disable() { ALOGV("disable %p", this); - if (!mHasControl) return INVALID_OPERATION; - if (mEffect == 0) return DEAD_OBJECT; + if (!mHasControl) { + return INVALID_OPERATION; + } + if (mEffect == 0) { + return DEAD_OBJECT; + } if (!mEnabled) { return NO_ERROR; @@ -9170,8 +9220,12 @@ status_t AudioFlinger::EffectHandle::command(uint32_t cmdCode, if (!mHasControl && cmdCode != EFFECT_CMD_GET_PARAM) { return INVALID_OPERATION; } - if (mEffect == 0) return DEAD_OBJECT; - if (mClient == 0) return INVALID_OPERATION; + if (mEffect == 0) { + return DEAD_OBJECT; + } + if (mClient == 0) { + return INVALID_OPERATION; + } // handle commands that are not forwarded transparently to effect engine if (cmdCode == EFFECT_CMD_SET_PARAM_COMMIT) { @@ -9641,7 +9695,9 @@ bool AudioFlinger::EffectChain::setVolume_l(uint32_t *left, uint32_t *right) uint32_t rVol = newRight; for (size_t i = 0; i < size; i++) { - if ((int)i == ctrlIdx) continue; + if ((int)i == ctrlIdx) { + continue; + } // this also works for ctrlIdx == -1 when there is no volume controller if ((int)i > ctrlIdx) { lVol = *left; -- cgit v1.1 From f6f0f0e313f4d4dc7035e842270cd31303bd91e7 Mon Sep 17 00:00:00 2001 From: James Dong Date: Fri, 16 Nov 2012 14:31:15 -0800 Subject: Fix a crash when the stop might be called due to some error before start in RTSPSource o related-to-bug: 7507224 Change-Id: Ic8bfec13097b824ba337a01c9b00c98af2a33f43 --- media/libmediaplayerservice/nuplayer/RTSPSource.cpp | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp index 5a7a785..6df2ddd 100644 --- a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp +++ b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp @@ -57,9 +57,7 @@ NuPlayer::RTSPSource::RTSPSource( } NuPlayer::RTSPSource::~RTSPSource() { - if (mLooper != NULL) { - mLooper->stop(); - } + mLooper->stop(); } void NuPlayer::RTSPSource::start() { @@ -86,6 +84,9 @@ void NuPlayer::RTSPSource::start() { } void NuPlayer::RTSPSource::stop() { + if (mLooper == NULL) { + return; + } sp msg = new AMessage(kWhatDisconnect, mReflector->id()); sp dummy; -- cgit v1.1 From b603744e96b07b1d5bf745bde593fb2c025cefcf Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Wed, 14 Nov 2012 13:42:25 -0800 Subject: Don't use control block frame count after create This is part of a series to clean up the control block. Change-Id: I7f4cb05aef63053f8e2ab05b286d302260ef4758 --- include/media/AudioTrack.h | 4 +- include/private/media/AudioTrackShared.h | 29 +++++---- media/libmedia/AudioRecord.cpp | 8 +-- media/libmedia/AudioTrack.cpp | 102 +++++++++++++++++-------------- services/audioflinger/AudioFlinger.cpp | 35 +++++------ services/audioflinger/AudioFlinger.h | 2 + 6 files changed, 100 insertions(+), 80 deletions(-) diff --git a/include/media/AudioTrack.h b/include/media/AudioTrack.h index f1b26b5..61214ec 100644 --- a/include/media/AudioTrack.h +++ b/include/media/AudioTrack.h @@ -510,7 +510,9 @@ protected: float mVolume[2]; float mSendLevel; - uint32_t mFrameCount; + size_t mFrameCount; // corresponds to current IAudioTrack + size_t mReqFrameCount; // frame count to request the next time a new + // IAudioTrack is needed audio_track_cblk_t* mCblk; // re-load after mLock.unlock() diff --git a/include/private/media/AudioTrackShared.h b/include/private/media/AudioTrackShared.h index bbc5e26..48b6b21 100644 --- a/include/private/media/AudioTrackShared.h +++ b/include/private/media/AudioTrackShared.h @@ -55,7 +55,10 @@ struct audio_track_cblk_t int mPad1; // unused, but preserves cache line alignment - uint32_t frameCount; + size_t frameCount_; // used during creation to pass actual track buffer size + // from AudioFlinger to client, and not referenced again + // FIXME remove here and replace by createTrack() in/out parameter + // renamed to "_" to detect incorrect use // Cache line boundary (32 bytes) @@ -97,19 +100,23 @@ public: // called by client only, where client includes regular // AudioTrack and AudioFlinger::PlaybackThread::OutputTrack - uint32_t stepUserIn(uint32_t frameCount) { return stepUser(frameCount, false); } - uint32_t stepUserOut(uint32_t frameCount) { return stepUser(frameCount, true); } + uint32_t stepUserIn(size_t stepCount, size_t frameCount) { return stepUser(stepCount, frameCount, false); } + uint32_t stepUserOut(size_t stepCount, size_t frameCount) { return stepUser(stepCount, frameCount, true); } - bool stepServer(uint32_t frameCount, bool isOut); + bool stepServer(size_t stepCount, size_t frameCount, bool isOut); // if there is a shared buffer, "buffers" is the value of pointer() for the shared // buffer, otherwise "buffers" points immediately after the control block void* buffer(void *buffers, uint32_t frameSize, uint32_t offset) const; - uint32_t framesAvailableIn() { return framesAvailable(false); } - uint32_t framesAvailableOut() { return framesAvailable(true); } - uint32_t framesAvailableIn_l() { return framesAvailable_l(false); } - uint32_t framesAvailableOut_l() { return framesAvailable_l(true); } + uint32_t framesAvailableIn(size_t frameCount) + { return framesAvailable(frameCount, false); } + uint32_t framesAvailableOut(size_t frameCount) + { return framesAvailable(frameCount, true); } + uint32_t framesAvailableIn_l(size_t frameCount) + { return framesAvailable_l(frameCount, false); } + uint32_t framesAvailableOut_l(size_t frameCount) + { return framesAvailable_l(frameCount, true); } uint32_t framesReadyIn() { return framesReady(false); } uint32_t framesReadyOut() { return framesReady(true); } @@ -140,9 +147,9 @@ public: private: // isOut == true means AudioTrack, isOut == false means AudioRecord - uint32_t stepUser(uint32_t frameCount, bool isOut); - uint32_t framesAvailable(bool isOut); - uint32_t framesAvailable_l(bool isOut); + uint32_t stepUser(size_t stepCount, size_t frameCount, bool isOut); + uint32_t framesAvailable(size_t frameCount, bool isOut); + uint32_t framesAvailable_l(size_t frameCount, bool isOut); uint32_t framesReady(bool isOut); }; diff --git a/media/libmedia/AudioRecord.cpp b/media/libmedia/AudioRecord.cpp index 0587651..0731f00 100644 --- a/media/libmedia/AudioRecord.cpp +++ b/media/libmedia/AudioRecord.cpp @@ -216,7 +216,7 @@ status_t AudioRecord::set( mFormat = format; // Update buffer size in case it has been limited by AudioFlinger during track creation - mFrameCount = mCblk->frameCount; + mFrameCount = mCblk->frameCount_; mChannelCount = (uint8_t)channelCount; mChannelMask = channelMask; @@ -568,7 +568,7 @@ create_new_record: } uint32_t u = cblk->user; - uint32_t bufferEnd = cblk->userBase + cblk->frameCount; + uint32_t bufferEnd = cblk->userBase + mFrameCount; if (framesReq > bufferEnd - u) { framesReq = bufferEnd - u; @@ -584,7 +584,7 @@ create_new_record: void AudioRecord::releaseBuffer(Buffer* audioBuffer) { AutoMutex lock(mLock); - mCblk->stepUserIn(audioBuffer->frameCount); + mCblk->stepUserIn(audioBuffer->frameCount, mFrameCount); } audio_io_handle_t AudioRecord::getInput() const @@ -746,7 +746,7 @@ bool AudioRecord::processAudioBuffer(const sp& thread) // Manage overrun callback - if (active && (cblk->framesAvailableIn() == 0)) { + if (active && (cblk->framesAvailableIn(mFrameCount) == 0)) { // The value of active is stale, but we are almost sure to be active here because // otherwise we would have exited when obtainBuffer returned STOPPED earlier. ALOGV("Overrun user: %x, server: %x, flags %04x", cblk->user, cblk->server, cblk->flags); diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp index 979ee37..0be5534 100644 --- a/media/libmedia/AudioTrack.cpp +++ b/media/libmedia/AudioTrack.cpp @@ -257,6 +257,7 @@ status_t AudioTrack::set( mVolume[RIGHT] = 1.0f; mSendLevel = 0.0f; mFrameCount = frameCount; + mReqFrameCount = frameCount; mNotificationFramesReq = notificationFrames; mSessionId = sessionId; mAuxEffectId = 0; @@ -344,7 +345,7 @@ int AudioTrack::channelCount() const size_t AudioTrack::frameCount() const { - return mCblk->frameCount; + return mFrameCount; } sp& AudioTrack::sharedBuffer() @@ -596,17 +597,17 @@ status_t AudioTrack::setLoop_l(uint32_t loopStart, uint32_t loopEnd, int loopCou } if (loopStart >= loopEnd || - loopEnd - loopStart > cblk->frameCount || + loopEnd - loopStart > mFrameCount || cblk->server > loopStart) { ALOGE("setLoop invalid value: loopStart %d, loopEnd %d, loopCount %d, framecount %d, " - "user %d", loopStart, loopEnd, loopCount, cblk->frameCount, cblk->user); + "user %d", loopStart, loopEnd, loopCount, mFrameCount, cblk->user); return BAD_VALUE; } - if ((mSharedBuffer != 0) && (loopEnd > cblk->frameCount)) { + if ((mSharedBuffer != 0) && (loopEnd > mFrameCount)) { ALOGE("setLoop invalid value: loop markers beyond data: loopStart %d, loopEnd %d, " "framecount %d", - loopStart, loopEnd, cblk->frameCount); + loopStart, loopEnd, mFrameCount); return BAD_VALUE; } @@ -695,7 +696,7 @@ status_t AudioTrack::reload() flush_l(); audio_track_cblk_t* cblk = mCblk; - cblk->stepUserOut(cblk->frameCount); + cblk->stepUserOut(mFrameCount, mFrameCount); return NO_ERROR; } @@ -889,17 +890,25 @@ status_t AudioTrack::createTrack_l( mCblkMemory = iMem; audio_track_cblk_t* cblk = static_cast(iMem->pointer()); mCblk = cblk; + size_t temp = cblk->frameCount_; + if (temp < frameCount || (frameCount == 0 && temp == 0)) { + // In current design, AudioTrack client checks and ensures frame count validity before + // passing it to AudioFlinger so AudioFlinger should not return a different value except + // for fast track as it uses a special method of assigning frame count. + ALOGW("Requested frameCount %u but received frameCount %u", frameCount, temp); + } + frameCount = temp; if (flags & AUDIO_OUTPUT_FLAG_FAST) { if (trackFlags & IAudioFlinger::TRACK_FAST) { - ALOGV("AUDIO_OUTPUT_FLAG_FAST successful; frameCount %u", cblk->frameCount); + ALOGV("AUDIO_OUTPUT_FLAG_FAST successful; frameCount %u", frameCount); } else { - ALOGV("AUDIO_OUTPUT_FLAG_FAST denied by server; frameCount %u", cblk->frameCount); + ALOGV("AUDIO_OUTPUT_FLAG_FAST denied by server; frameCount %u", frameCount); // once denied, do not request again if IAudioTrack is re-created flags = (audio_output_flags_t) (flags & ~AUDIO_OUTPUT_FLAG_FAST); mFlags = flags; } if (sharedBuffer == 0) { - mNotificationFramesAct = cblk->frameCount/2; + mNotificationFramesAct = frameCount/2; } } if (sharedBuffer == 0) { @@ -907,7 +916,7 @@ status_t AudioTrack::createTrack_l( } else { mBuffers = sharedBuffer->pointer(); // Force buffer full condition as data is already present in shared memory - cblk->stepUserOut(cblk->frameCount); + cblk->stepUserOut(frameCount, frameCount); } cblk->setVolumeLR((uint32_t(uint16_t(mVolume[RIGHT] * 0x1000)) << 16) | @@ -918,11 +927,12 @@ status_t AudioTrack::createTrack_l( cblk->waitTimeMs = 0; mRemainingFrames = mNotificationFramesAct; // FIXME don't believe this lie - mLatency = afLatency + (1000*cblk->frameCount) / sampleRate; + mLatency = afLatency + (1000*frameCount) / sampleRate; + mFrameCount = frameCount; // If IAudioTrack is re-created, don't let the requested frameCount // decrease. This can confuse clients that cache frameCount(). - if (cblk->frameCount > mFrameCount) { - mFrameCount = cblk->frameCount; + if (frameCount > mReqFrameCount) { + mReqFrameCount = frameCount; } return NO_ERROR; } @@ -939,7 +949,7 @@ status_t AudioTrack::obtainBuffer(Buffer* audioBuffer, int32_t waitCount) audioBuffer->frameCount = 0; audioBuffer->size = 0; - uint32_t framesAvail = cblk->framesAvailableOut(); + uint32_t framesAvail = cblk->framesAvailableOut(mFrameCount); cblk->lock.lock(); if (cblk->flags & CBLK_INVALID) { @@ -1015,7 +1025,7 @@ create_new_track: } // read the server count again start_loop_here: - framesAvail = cblk->framesAvailableOut_l(); + framesAvail = cblk->framesAvailableOut_l(mFrameCount); } cblk->lock.unlock(); } @@ -1027,7 +1037,7 @@ create_new_track: } uint32_t u = cblk->user; - uint32_t bufferEnd = cblk->userBase + cblk->frameCount; + uint32_t bufferEnd = cblk->userBase + mFrameCount; if (framesReq > bufferEnd - u) { framesReq = bufferEnd - u; @@ -1044,7 +1054,7 @@ void AudioTrack::releaseBuffer(Buffer* audioBuffer) { AutoMutex lock(mLock); audio_track_cblk_t* cblk = mCblk; - cblk->stepUserOut(audioBuffer->frameCount); + cblk->stepUserOut(audioBuffer->frameCount, mFrameCount); if (audioBuffer->frameCount > 0) { // restart track if it was disabled by audioflinger due to previous underrun if (mActive && (cblk->flags & CBLK_DISABLED)) { @@ -1211,11 +1221,11 @@ bool AudioTrack::processAudioBuffer(const sp& thread) // so all cblk references might still refer to old shared memory, but that should be benign // Manage underrun callback - if (active && (cblk->framesAvailableOut() == cblk->frameCount)) { + if (active && (cblk->framesAvailableOut(mFrameCount) == mFrameCount)) { ALOGV("Underrun user: %x, server: %x, flags %04x", cblk->user, cblk->server, cblk->flags); if (!(android_atomic_or(CBLK_UNDERRUN, &cblk->flags) & CBLK_UNDERRUN)) { mCbf(EVENT_UNDERRUN, mUserData, 0); - if (cblk->server == cblk->frameCount) { + if (cblk->server == mFrameCount) { mCbf(EVENT_BUFFER_END, mUserData, 0); } if (mSharedBuffer != 0) return false; @@ -1355,7 +1365,7 @@ status_t AudioTrack::restoreTrack_l(audio_track_cblk_t*& refCblk, bool fromStart cblk->sampleRate, mFormat, mChannelMask, - mFrameCount, + mReqFrameCount, // so that frame count never goes down mFlags, mSharedBuffer, getOutput_l()); @@ -1379,19 +1389,19 @@ status_t AudioTrack::restoreTrack_l(audio_track_cblk_t*& refCblk, bool fromStart if (mSharedBuffer == 0) { uint32_t frames = 0; if (user > server) { - frames = ((user - server) > newCblk->frameCount) ? - newCblk->frameCount : (user - server); + frames = ((user - server) > mFrameCount) ? + mFrameCount : (user - server); memset(mBuffers, 0, frames * mFrameSizeAF); } // restart playback even if buffer is not completely filled. android_atomic_or(CBLK_FORCEREADY, &newCblk->flags); // stepUser() clears CBLK_UNDERRUN flag enabling underrun callbacks to // the client - newCblk->stepUserOut(frames); + newCblk->stepUserOut(frames, mFrameCount); } } if (mSharedBuffer != 0) { - newCblk->stepUserOut(newCblk->frameCount); + newCblk->stepUserOut(mFrameCount, mFrameCount); } if (mActive) { result = mAudioTrack->start(); @@ -1429,7 +1439,7 @@ status_t AudioTrack::dump(int fd, const Vector& args) const mVolume[0], mVolume[1]); result.append(buffer); snprintf(buffer, 255, " format(%d), channel count(%d), frame count(%d)\n", mFormat, - mChannelCount, cblk->frameCount); + mChannelCount, mFrameCount); result.append(buffer); snprintf(buffer, 255, " sample rate(%u), status(%d), muted(%d)\n", (cblk == 0) ? 0 : cblk->sampleRate, mStatus, mMuted); @@ -1494,18 +1504,18 @@ void AudioTrack::AudioTrackThread::resume() audio_track_cblk_t::audio_track_cblk_t() : lock(Mutex::SHARED), cv(Condition::SHARED), user(0), server(0), - userBase(0), serverBase(0), frameCount(0), + userBase(0), serverBase(0), frameCount_(0), loopStart(UINT_MAX), loopEnd(UINT_MAX), loopCount(0), mVolumeLR(0x10001000), mSendLevel(0), flags(0) { } -uint32_t audio_track_cblk_t::stepUser(uint32_t frameCount, bool isOut) +uint32_t audio_track_cblk_t::stepUser(size_t stepCount, size_t frameCount, bool isOut) { - ALOGV("stepuser %08x %08x %d", user, server, frameCount); + ALOGV("stepuser %08x %08x %d", user, server, stepCount); uint32_t u = user; - u += frameCount; + u += stepCount; // Ensure that user is never ahead of server for AudioRecord if (isOut) { // If stepServer() has been called once, switch to normal obtainBuffer() timeout period @@ -1517,15 +1527,14 @@ uint32_t audio_track_cblk_t::stepUser(uint32_t frameCount, bool isOut) u = server; } - uint32_t fc = this->frameCount; - if (u >= fc) { + if (u >= frameCount) { // common case, user didn't just wrap - if (u - fc >= userBase ) { - userBase += fc; + if (u - frameCount >= userBase ) { + userBase += frameCount; } - } else if (u >= userBase + fc) { + } else if (u >= userBase + frameCount) { // user just wrapped - userBase += fc; + userBase += frameCount; } user = u; @@ -1538,9 +1547,9 @@ uint32_t audio_track_cblk_t::stepUser(uint32_t frameCount, bool isOut) return u; } -bool audio_track_cblk_t::stepServer(uint32_t frameCount, bool isOut) +bool audio_track_cblk_t::stepServer(size_t stepCount, size_t frameCount, bool isOut) { - ALOGV("stepserver %08x %08x %d", user, server, frameCount); + ALOGV("stepserver %08x %08x %d", user, server, stepCount); if (!tryLock()) { ALOGW("stepServer() could not lock cblk"); @@ -1550,7 +1559,7 @@ bool audio_track_cblk_t::stepServer(uint32_t frameCount, bool isOut) uint32_t s = server; bool flushed = (s == user); - s += frameCount; + s += stepCount; if (isOut) { // Mark that we have read the first buffer so that next time stepUser() is called // we switch to normal obtainBuffer() timeout period @@ -1576,15 +1585,14 @@ bool audio_track_cblk_t::stepServer(uint32_t frameCount, bool isOut) } } - uint32_t fc = this->frameCount; - if (s >= fc) { + if (s >= frameCount) { // common case, server didn't just wrap - if (s - fc >= serverBase ) { - serverBase += fc; + if (s - frameCount >= serverBase ) { + serverBase += frameCount; } - } else if (s >= serverBase + fc) { + } else if (s >= serverBase + frameCount) { // server just wrapped - serverBase += fc; + serverBase += frameCount; } server = s; @@ -1601,13 +1609,13 @@ void* audio_track_cblk_t::buffer(void *buffers, size_t frameSize, uint32_t offse return (int8_t *)buffers + (offset - userBase) * frameSize; } -uint32_t audio_track_cblk_t::framesAvailable(bool isOut) +uint32_t audio_track_cblk_t::framesAvailable(size_t frameCount, bool isOut) { Mutex::Autolock _l(lock); - return framesAvailable_l(isOut); + return framesAvailable_l(frameCount, isOut); } -uint32_t audio_track_cblk_t::framesAvailable_l(bool isOut) +uint32_t audio_track_cblk_t::framesAvailable_l(size_t frameCount, bool isOut) { uint32_t u = user; uint32_t s = server; diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp index 384f268..97bbd97 100644 --- a/services/audioflinger/AudioFlinger.cpp +++ b/services/audioflinger/AudioFlinger.cpp @@ -1757,7 +1757,7 @@ sp AudioFlinger::PlaybackThread::createTrac ( (tid != -1) && ((frameCount == 0) || - (frameCount >= (int) (mFrameCount * kFastTrackMultiplier))) + (frameCount >= (mFrameCount * kFastTrackMultiplier))) ) ) && // PCM data @@ -4202,6 +4202,7 @@ AudioFlinger::ThreadBase::TrackBase::TrackBase( mChannelCount(popcount(channelMask)), mFrameSize(audio_is_linear_pcm(format) ? mChannelCount * audio_bytes_per_sample(format) : sizeof(int8_t)), + mFrameCount(frameCount), mStepServerFailed(false), mSessionId(sessionId) { @@ -4237,7 +4238,7 @@ AudioFlinger::ThreadBase::TrackBase::TrackBase( if (mCblk != NULL) { new(mCblk) audio_track_cblk_t(); // clear all buffers - mCblk->frameCount = frameCount; + mCblk->frameCount_ = frameCount; mCblk->sampleRate = sampleRate; // uncomment the following lines to quickly test 32-bit wraparound // mCblk->user = 0xffff0000; @@ -4293,7 +4294,7 @@ bool AudioFlinger::ThreadBase::TrackBase::step() { bool result; audio_track_cblk_t* cblk = this->cblk(); - result = cblk->stepServer(mStepCount, isOut()); + result = cblk->stepServer(mStepCount, mFrameCount, isOut()); if (!result) { ALOGV("stepServer failed acquiring cblk mutex"); mStepServerFailed = true; @@ -4508,7 +4509,7 @@ void AudioFlinger::PlaybackThread::Track::dump(char* buffer, size_t size) mChannelMask, mSessionId, mStepCount, - mCblk->frameCount, + mFrameCount, stateChar, mMute, mFillingUpStatus, @@ -4551,7 +4552,7 @@ status_t AudioFlinger::PlaybackThread::Track::getNextBuffer( if (CC_LIKELY(framesReady)) { uint32_t s = cblk->server; - uint32_t bufferEnd = cblk->serverBase + cblk->frameCount; + uint32_t bufferEnd = cblk->serverBase + mFrameCount; bufferEnd = (cblk->loopEnd < bufferEnd) ? cblk->loopEnd : bufferEnd; if (framesReq > framesReady) { @@ -4589,7 +4590,7 @@ size_t AudioFlinger::PlaybackThread::Track::framesReady() const { bool AudioFlinger::PlaybackThread::Track::isReady() const { if (mFillingUpStatus != FS_FILLING || isStopped() || isPausing()) return true; - if (framesReady() >= mCblk->frameCount || + if (framesReady() >= mFrameCount || (mCblk->flags & CBLK_FORCEREADY)) { mFillingUpStatus = FS_FILLED; android_atomic_and(~CBLK_FORCEREADY, &mCblk->flags); @@ -5435,11 +5436,11 @@ status_t AudioFlinger::RecordThread::RecordTrack::getNextBuffer(AudioBufferProvi } // FIXME lock is not actually held, so overrun is possible - framesAvail = cblk->framesAvailableIn_l(); + framesAvail = cblk->framesAvailableIn_l(mFrameCount); if (CC_LIKELY(framesAvail)) { uint32_t s = cblk->server; - uint32_t bufferEnd = cblk->serverBase + cblk->frameCount; + uint32_t bufferEnd = cblk->serverBase + mFrameCount; if (framesReq > framesAvail) { framesReq = framesAvail; @@ -5508,7 +5509,7 @@ void AudioFlinger::RecordThread::RecordTrack::dump(char* buffer, size_t size) mCblk->sampleRate, mCblk->server, mCblk->user, - mCblk->frameCount); + mFrameCount); } bool AudioFlinger::RecordThread::RecordTrack::isOut() const @@ -5585,9 +5586,9 @@ bool AudioFlinger::PlaybackThread::OutputTrack::write(int16_t* data, uint32_t fr sp thread = mThread.promote(); if (thread != 0) { MixerThread *mixerThread = (MixerThread *)thread.get(); - if (mCblk->frameCount > frames){ + if (mFrameCount > frames){ if (mBufferQueue.size() < kMaxOverFlowBuffers) { - uint32_t startFrames = (mCblk->frameCount - frames); + uint32_t startFrames = (mFrameCount - frames); pInBuffer = new Buffer; pInBuffer->mBuffer = new int16_t[startFrames * channelCount]; pInBuffer->frameCount = startFrames; @@ -5633,7 +5634,7 @@ bool AudioFlinger::PlaybackThread::OutputTrack::write(int16_t* data, uint32_t fr uint32_t outFrames = pInBuffer->frameCount > mOutBuffer.frameCount ? mOutBuffer.frameCount : pInBuffer->frameCount; memcpy(mOutBuffer.raw, pInBuffer->raw, outFrames * channelCount * sizeof(int16_t)); - mCblk->stepUserOut(outFrames); + mCblk->stepUserOut(outFrames, mFrameCount); pInBuffer->frameCount -= outFrames; pInBuffer->i16 += outFrames * channelCount; mOutBuffer.frameCount -= outFrames; @@ -5677,8 +5678,8 @@ bool AudioFlinger::PlaybackThread::OutputTrack::write(int16_t* data, uint32_t fr // If no more buffers are pending, fill output track buffer to make sure it is started // by output mixer. if (frames == 0 && mBufferQueue.size() == 0) { - if (mCblk->user < mCblk->frameCount) { - frames = mCblk->frameCount - mCblk->user; + if (mCblk->user < mFrameCount) { + frames = mFrameCount - mCblk->user; pInBuffer = new Buffer; pInBuffer->mBuffer = new int16_t[frames * channelCount]; pInBuffer->frameCount = frames; @@ -5704,7 +5705,7 @@ status_t AudioFlinger::PlaybackThread::OutputTrack::obtainBuffer( ALOGVV("OutputTrack::obtainBuffer user %d, server %d", cblk->user, cblk->server); buffer->frameCount = 0; - uint32_t framesAvail = cblk->framesAvailableOut(); + uint32_t framesAvail = cblk->framesAvailableOut(mFrameCount); if (framesAvail == 0) { @@ -5722,7 +5723,7 @@ status_t AudioFlinger::PlaybackThread::OutputTrack::obtainBuffer( } // read the server count again start_loop_here: - framesAvail = cblk->framesAvailableOut_l(); + framesAvail = cblk->framesAvailableOut_l(mFrameCount); } } @@ -5735,7 +5736,7 @@ status_t AudioFlinger::PlaybackThread::OutputTrack::obtainBuffer( } uint32_t u = cblk->user; - uint32_t bufferEnd = cblk->userBase + cblk->frameCount; + uint32_t bufferEnd = cblk->userBase + mFrameCount; if (framesReq > bufferEnd - u) { framesReq = bufferEnd - u; diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h index 830dfe9..75bfcfe 100644 --- a/services/audioflinger/AudioFlinger.h +++ b/services/audioflinger/AudioFlinger.h @@ -478,6 +478,8 @@ private: const size_t mFrameSize; // AudioFlinger's view of frame size in shared memory, // where for AudioTrack (but not AudioRecord), // 8-bit PCM samples are stored as 16-bit + const size_t mFrameCount;// size of track buffer given at createTrack() or + // openRecord(), and then adjusted as needed bool mStepServerFailed; const int mSessionId; Vector < sp >mSyncEvents; -- cgit v1.1 From a6b47a17fb1288936b491f30cd751172a572df5c Mon Sep 17 00:00:00 2001 From: Marco Nelissen Date: Mon, 19 Nov 2012 09:49:18 -0800 Subject: delete -> free Strings duplicated with strdup() should be free()d, not deleted. Change-Id: I42bb3df9625bb8d35f80b02d15364b94c36496f8 --- media/libmedia/SoundPool.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/media/libmedia/SoundPool.cpp b/media/libmedia/SoundPool.cpp index 204e0ce..ee70ef7 100644 --- a/media/libmedia/SoundPool.cpp +++ b/media/libmedia/SoundPool.cpp @@ -489,7 +489,7 @@ Sample::~Sample() ::close(mFd); } mData.clear(); - delete mUrl; + free(mUrl); } status_t Sample::doLoad() -- cgit v1.1 From a42ff007a17d63df22c60dd5e5fd811ee45ca1b3 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Wed, 14 Nov 2012 12:47:55 -0800 Subject: Clean up channel count and channel mask Channel count is uint32_t. Remove redundant mask parameter to AudioTrack::createTrack_l() and AudioRecord::openRecord_l(). Change-Id: I5dc2b18eb609b2c0dc3091994cbaa4628062c17f --- include/media/AudioRecord.h | 3 +-- include/media/AudioTrack.h | 3 +-- media/libmedia/AudioRecord.cpp | 20 ++++++++------------ media/libmedia/AudioTrack.cpp | 24 ++++++++++-------------- services/audioflinger/AudioFlinger.cpp | 8 ++++---- services/audioflinger/AudioFlinger.h | 6 +++--- 6 files changed, 27 insertions(+), 37 deletions(-) diff --git a/include/media/AudioRecord.h b/include/media/AudioRecord.h index cd7ff92..ae444c3 100644 --- a/include/media/AudioRecord.h +++ b/include/media/AudioRecord.h @@ -183,7 +183,7 @@ public: /* getters, see constructor and set() */ audio_format_t format() const; - int channelCount() const; + uint32_t channelCount() const; size_t frameCount() const; size_t frameSize() const { return mFrameSize; } audio_source_t inputSource() const; @@ -351,7 +351,6 @@ private: status_t openRecord_l(uint32_t sampleRate, audio_format_t format, - audio_channel_mask_t channelMask, size_t frameCount, audio_io_handle_t input); audio_io_handle_t getInput_l(); diff --git a/include/media/AudioTrack.h b/include/media/AudioTrack.h index 61214ec..f1b77ab 100644 --- a/include/media/AudioTrack.h +++ b/include/media/AudioTrack.h @@ -223,7 +223,7 @@ public: audio_stream_type_t streamType() const; audio_format_t format() const; - int channelCount() const; + uint32_t channelCount() const; uint32_t frameCount() const; /* Return channelCount * (bit depth per channel / 8). @@ -493,7 +493,6 @@ protected: status_t createTrack_l(audio_stream_type_t streamType, uint32_t sampleRate, audio_format_t format, - audio_channel_mask_t channelMask, size_t frameCount, audio_output_flags_t flags, const sp& sharedBuffer, diff --git a/media/libmedia/AudioRecord.cpp b/media/libmedia/AudioRecord.cpp index 2a5a996..c2ef68c 100644 --- a/media/libmedia/AudioRecord.cpp +++ b/media/libmedia/AudioRecord.cpp @@ -63,7 +63,7 @@ status_t AudioRecord::getMinFrameCount( size <<= 1; if (audio_is_linear_pcm(format)) { - int channelCount = popcount(channelMask); + uint32_t channelCount = popcount(channelMask); size /= channelCount * audio_bytes_per_sample(format); } @@ -162,8 +162,9 @@ status_t AudioRecord::set( if (!audio_is_input_channel(channelMask)) { return BAD_VALUE; } - - int channelCount = popcount(channelMask); + mChannelMask = channelMask; + uint32_t channelCount = popcount(channelMask); + mChannelCount = channelCount; if (sessionId == 0 ) { mSessionId = AudioSystem::newAudioSessionId(); @@ -201,8 +202,7 @@ status_t AudioRecord::set( } // create the IAudioRecord - status = openRecord_l(sampleRate, format, channelMask, - frameCount, input); + status = openRecord_l(sampleRate, format, frameCount, input); if (status != NO_ERROR) { return status; } @@ -217,8 +217,6 @@ status_t AudioRecord::set( mFormat = format; // Update buffer size in case it has been limited by AudioFlinger during track creation mFrameCount = mCblk->frameCount_; - mChannelCount = (uint8_t)channelCount; - mChannelMask = channelMask; if (audio_is_linear_pcm(mFormat)) { mFrameSize = channelCount * audio_bytes_per_sample(format); @@ -261,7 +259,7 @@ audio_format_t AudioRecord::format() const return mFormat; } -int AudioRecord::channelCount() const +uint32_t AudioRecord::channelCount() const { return mChannelCount; } @@ -432,7 +430,6 @@ unsigned int AudioRecord::getInputFramesLost() const status_t AudioRecord::openRecord_l( uint32_t sampleRate, audio_format_t format, - audio_channel_mask_t channelMask, size_t frameCount, audio_io_handle_t input) { @@ -449,7 +446,7 @@ status_t AudioRecord::openRecord_l( int originalSessionId = mSessionId; sp record = audioFlinger->openRecord(getpid(), input, sampleRate, format, - channelMask, + mChannelMask, frameCount, IAudioFlinger::TRACK_DEFAULT, tid, @@ -784,8 +781,7 @@ status_t AudioRecord::restoreRecord_l(audio_track_cblk_t*& refCblk) // if the new IAudioRecord is created, openRecord_l() will modify the // following member variables: mAudioRecord, mCblkMemory and mCblk. // It will also delete the strong references on previous IAudioRecord and IMemory - result = openRecord_l(cblk->sampleRate, mFormat, mChannelMask, - mFrameCount, getInput_l()); + result = openRecord_l(cblk->sampleRate, mFormat, mFrameCount, getInput_l()); if (result == NO_ERROR) { newCblk = mCblk; // callback thread or sync event hasn't changed diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp index ff1b21b..e40895a 100644 --- a/media/libmedia/AudioTrack.cpp +++ b/media/libmedia/AudioTrack.cpp @@ -243,7 +243,9 @@ status_t AudioTrack::set( ALOGE("Invalid channel mask %#x", channelMask); return BAD_VALUE; } + mChannelMask = channelMask; uint32_t channelCount = popcount(channelMask); + mChannelCount = channelCount; audio_io_handle_t output = AudioSystem::getOutput( streamType, @@ -275,7 +277,6 @@ status_t AudioTrack::set( status_t status = createTrack_l(streamType, sampleRate, format, - channelMask, frameCount, flags, sharedBuffer, @@ -293,8 +294,6 @@ status_t AudioTrack::set( mStreamType = streamType; mFormat = format; - mChannelMask = channelMask; - mChannelCount = channelCount; if (audio_is_linear_pcm(format)) { mFrameSize = channelCount * audio_bytes_per_sample(format); @@ -340,7 +339,7 @@ audio_format_t AudioTrack::format() const return mFormat; } -int AudioTrack::channelCount() const +uint32_t AudioTrack::channelCount() const { return mChannelCount; } @@ -758,7 +757,6 @@ status_t AudioTrack::createTrack_l( audio_stream_type_t streamType, uint32_t sampleRate, audio_format_t format, - audio_channel_mask_t channelMask, size_t frameCount, audio_output_flags_t flags, const sp& sharedBuffer, @@ -808,17 +806,16 @@ status_t AudioTrack::createTrack_l( } else if (sharedBuffer != 0) { - // Ensure that buffer alignment matches channelCount - int channelCount = popcount(channelMask); + // Ensure that buffer alignment matches channel count // 8-bit data in shared memory is not currently supported by AudioFlinger size_t alignment = /* format == AUDIO_FORMAT_PCM_8_BIT ? 1 : */ 2; - if (channelCount > 1) { + if (mChannelCount > 1) { // More than 2 channels does not require stronger alignment than stereo alignment <<= 1; } - if (((uint32_t)sharedBuffer->pointer() & (alignment - 1)) != 0) { - ALOGE("Invalid buffer alignment: address %p, channelCount %d", - sharedBuffer->pointer(), channelCount); + if (((size_t)sharedBuffer->pointer() & (alignment - 1)) != 0) { + ALOGE("Invalid buffer alignment: address %p, channel count %u", + sharedBuffer->pointer(), mChannelCount); return BAD_VALUE; } @@ -826,7 +823,7 @@ status_t AudioTrack::createTrack_l( // there's no frameCount parameter. // But when initializing a shared buffer AudioTrack via set(), // there _is_ a frameCount parameter. We silently ignore it. - frameCount = sharedBuffer->size()/channelCount/sizeof(int16_t); + frameCount = sharedBuffer->size()/mChannelCount/sizeof(int16_t); } else if (!(flags & AUDIO_OUTPUT_FLAG_FAST)) { @@ -890,7 +887,7 @@ status_t AudioTrack::createTrack_l( // AudioFlinger only sees 16-bit PCM format == AUDIO_FORMAT_PCM_8_BIT ? AUDIO_FORMAT_PCM_16_BIT : format, - channelMask, + mChannelMask, frameCount, &trackFlags, sharedBuffer, @@ -1398,7 +1395,6 @@ status_t AudioTrack::restoreTrack_l(audio_track_cblk_t*& refCblk, bool fromStart result = createTrack_l(mStreamType, cblk->sampleRate, mFormat, - mChannelMask, mReqFrameCount, // so that frame count never goes down mFlags, mSharedBuffer, diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp index 5f3754f..0c1ab3c 100644 --- a/services/audioflinger/AudioFlinger.cpp +++ b/services/audioflinger/AudioFlinger.cpp @@ -6181,7 +6181,7 @@ bool AudioFlinger::RecordThread::threadLoop() framesIn = framesOut; mRsmpInIndex += framesIn; framesOut -= framesIn; - if ((int)mChannelCount == mReqChannelCount || + if (mChannelCount == mReqChannelCount || mFormat != AUDIO_FORMAT_PCM_16_BIT) { memcpy(dst, src, framesIn * mFrameSize); } else { @@ -6197,7 +6197,7 @@ bool AudioFlinger::RecordThread::threadLoop() if (framesOut && mFrameCount == mRsmpInIndex) { void *readInto; if (framesOut == mFrameCount && - ((int)mChannelCount == mReqChannelCount || + (mChannelCount == mReqChannelCount || mFormat != AUDIO_FORMAT_PCM_16_BIT)) { readInto = buffer.raw; framesOut = 0; @@ -6576,7 +6576,7 @@ void AudioFlinger::RecordThread::dumpInternals(int fd, const Vector& a result.append(buffer); snprintf(buffer, SIZE, "Resampling: %d\n", (mResampler != NULL)); result.append(buffer); - snprintf(buffer, SIZE, "Out channel count: %d\n", mReqChannelCount); + snprintf(buffer, SIZE, "Out channel count: %u\n", mReqChannelCount); result.append(buffer); snprintf(buffer, SIZE, "Out sample rate: %u\n", mReqSampleRate); result.append(buffer); @@ -6674,7 +6674,7 @@ bool AudioFlinger::RecordThread::checkForNewParameters_l() int value; audio_format_t reqFormat = mFormat; uint32_t reqSamplingRate = mReqSampleRate; - int reqChannelCount = mReqChannelCount; + uint32_t reqChannelCount = mReqChannelCount; if (param.getInt(String8(AudioParameter::keySamplingRate), value) == NO_ERROR) { reqSamplingRate = value; diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h index 61f459c..2541b15 100644 --- a/services/audioflinger/AudioFlinger.h +++ b/services/audioflinger/AudioFlinger.h @@ -419,7 +419,7 @@ private: return mFormat; } - int channelCount() const { return mChannelCount; } + uint32_t channelCount() const { return mChannelCount; } audio_channel_mask_t channelMask() const { return mChannelMask; } @@ -565,7 +565,7 @@ private: // dynamic externally-visible uint32_t sampleRate() const { return mSampleRate; } - int channelCount() const { return mChannelCount; } + uint32_t channelCount() const { return mChannelCount; } audio_channel_mask_t channelMask() const { return mChannelMask; } audio_format_t format() const { return mFormat; } // Called by AudioFlinger::frameCount(audio_io_handle_t output) and effects, @@ -1593,7 +1593,7 @@ public: int16_t *mRsmpInBuffer; size_t mRsmpInIndex; size_t mInputBytes; - const int mReqChannelCount; + const uint32_t mReqChannelCount; const uint32_t mReqSampleRate; ssize_t mBytesRead; // sync event triggering actual audio capture. Frames read before this event will -- cgit v1.1 From d6864faccd19096b72d32481a3cc475e6d36e50d Mon Sep 17 00:00:00 2001 From: Mathias Agopian Date: Mon, 19 Nov 2012 15:27:26 -0800 Subject: fix a typo in SINC resampler that prevented tracks to be mixed we were always erasing the current mix instead of mixing into it. Change-Id: Ib229245f9e5a0d384f1727640a59e9f0469211a2 --- services/audioflinger/AudioResamplerSinc.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/services/audioflinger/AudioResamplerSinc.cpp b/services/audioflinger/AudioResamplerSinc.cpp index d68b839..3f22ca6 100644 --- a/services/audioflinger/AudioResamplerSinc.cpp +++ b/services/audioflinger/AudioResamplerSinc.cpp @@ -721,7 +721,7 @@ void AudioResamplerSinc::filterCoefficient( "vdup.i32 d0, d0[0] \n" // interleave L,R channels "vqrdmulh.s32 d0, d0, d2 \n" // apply volume "vadd.s32 d3, d3, d0 \n" // accumulate result - "vst1.s32 {d0}, %[out] \n" // store result + "vst1.s32 {d3}, %[out] \n" // store result : [out] "=Uv" (out[0]), [count] "+r" (count), @@ -797,7 +797,7 @@ void AudioResamplerSinc::filterCoefficient( "vtrn.s32 d0, d8 \n" // interlace L,R channels "vqrdmulh.s32 d0, d0, d2 \n" // apply volume "vadd.s32 d3, d3, d0 \n" // accumulate result - "vst1.s32 {d0}, %[out] \n" // store result + "vst1.s32 {d3}, %[out] \n" // store result : [out] "=Uv" (out[0]), [count] "+r" (count), -- cgit v1.1 From 81784c37c61b09289654b979567a42bf73cd2b12 Mon Sep 17 00:00:00 2001 From: Eric Laurent Date: Mon, 19 Nov 2012 14:55:58 -0800 Subject: AudioFlinger files reorganization Audioflinger.cpp and Audioflinger.h files must be split to improve readability and maintainability. This CL splits the files as follows: AudioFlinger.cpp split into: - AudioFlinger.cpp: implementation of IAudioflinger interface and global methods - AFThreads.cpp: implementation of ThreadBase, PlaybackThread, MixerThread, DuplicatingThread, DirectOutputThread and RecordThread. - AFTracks.cpp: implementation of TrackBase, Track, TimedTrack, OutputTrack, RecordTrack, TrackHandle and RecordHandle. - AFEffects.cpp: implementation of EffectModule, EffectChain and EffectHandle. AudioFlinger.h is modified by inline inclusion of header files containing the declaration of complex inner classes: - AFThreads.h: ThreadBase, PlaybackThread, MixerThread, DuplicatingThread, DirectOutputThread and RecordThread - AFEffects.h: EffectModule, EffectChain and EffectHandle AFThreads.h includes the follownig headers inline: - AFTrackBase.h: TrackBase - AFPlaybackTracks: Track, TimedTrack, OutputTrack - AFRecordTracks: RecordTrack Change-Id: I512ebc3a51813ab7a4afccc9a538b18125165c4c --- services/audioflinger/Android.mk | 3 + services/audioflinger/AudioFlinger.cpp | 9359 +++----------------------------- services/audioflinger/AudioFlinger.h | 1592 +----- services/audioflinger/Effects.cpp | 1684 ++++++ services/audioflinger/Effects.h | 359 ++ services/audioflinger/PlaybackTracks.h | 285 + services/audioflinger/RecordTracks.h | 62 + services/audioflinger/Threads.cpp | 4426 +++++++++++++++ services/audioflinger/Threads.h | 801 +++ services/audioflinger/TrackBase.h | 139 + services/audioflinger/Tracks.cpp | 1789 ++++++ 11 files changed, 10405 insertions(+), 10094 deletions(-) create mode 100644 services/audioflinger/Effects.cpp create mode 100644 services/audioflinger/Effects.h create mode 100644 services/audioflinger/PlaybackTracks.h create mode 100644 services/audioflinger/RecordTracks.h create mode 100644 services/audioflinger/Threads.cpp create mode 100644 services/audioflinger/Threads.h create mode 100644 services/audioflinger/TrackBase.h create mode 100644 services/audioflinger/Tracks.cpp diff --git a/services/audioflinger/Android.mk b/services/audioflinger/Android.mk index 4416b52..c4050b8 100644 --- a/services/audioflinger/Android.mk +++ b/services/audioflinger/Android.mk @@ -15,6 +15,9 @@ include $(CLEAR_VARS) LOCAL_SRC_FILES:= \ AudioFlinger.cpp \ + Threads.cpp \ + Tracks.cpp \ + Effects.cpp \ AudioMixer.cpp.arm \ AudioResampler.cpp.arm \ AudioPolicyService.cpp \ diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp index 0c1ab3c..514fcb1 100644 --- a/services/audioflinger/AudioFlinger.cpp +++ b/services/audioflinger/AudioFlinger.cpp @@ -29,7 +29,6 @@ #include #include #include -#include #include #include #include @@ -38,15 +37,8 @@ #include #include -#undef ADD_BATTERY_DATA - -#ifdef ADD_BATTERY_DATA -#include -#include -#endif - -#include -#include +//#include +//#include #include #include @@ -64,26 +56,8 @@ #include -// #define DEBUG_CPU_USAGE 10 // log statistics every n wall clock seconds -#ifdef DEBUG_CPU_USAGE -#include -#include -#endif - #include -#include - -#include "FastMixer.h" - -// NBAIO implementations -#include -#include -#include -#include -#include -#include - -#include "SchedulingPolicyService.h" +//#include // ---------------------------------------------------------------------------- @@ -105,90 +79,13 @@ namespace android { static const char kDeadlockedString[] = "AudioFlinger may be deadlocked\n"; static const char kHardwareLockedString[] = "Hardware lock is taken\n"; -static const float MAX_GAIN = 4096.0f; -static const uint32_t MAX_GAIN_INT = 0x1000; - -// retry counts for buffer fill timeout -// 50 * ~20msecs = 1 second -static const int8_t kMaxTrackRetries = 50; -static const int8_t kMaxTrackStartupRetries = 50; -// allow less retry attempts on direct output thread. -// direct outputs can be a scarce resource in audio hardware and should -// be released as quickly as possible. -static const int8_t kMaxTrackRetriesDirect = 2; - -static const int kDumpLockRetries = 50; -static const int kDumpLockSleepUs = 20000; - -// don't warn about blocked writes or record buffer overflows more often than this -static const nsecs_t kWarningThrottleNs = seconds(5); - -// RecordThread loop sleep time upon application overrun or audio HAL read error -static const int kRecordThreadSleepUs = 5000; - -// maximum time to wait for setParameters to complete -static const nsecs_t kSetParametersTimeoutNs = seconds(2); - -// minimum sleep time for the mixer thread loop when tracks are active but in underrun -static const uint32_t kMinThreadSleepTimeUs = 5000; -// maximum divider applied to the active sleep time in the mixer thread loop -static const uint32_t kMaxThreadSleepTimeShift = 2; - -// minimum normal mix buffer size, expressed in milliseconds rather than frames -static const uint32_t kMinNormalMixBufferSizeMs = 20; -// maximum normal mix buffer size -static const uint32_t kMaxNormalMixBufferSizeMs = 24; nsecs_t AudioFlinger::mStandbyTimeInNsecs = kDefaultStandbyTimeInNsecs; -// Whether to use fast mixer -static const enum { - FastMixer_Never, // never initialize or use: for debugging only - FastMixer_Always, // always initialize and use, even if not needed: for debugging only - // normal mixer multiplier is 1 - FastMixer_Static, // initialize if needed, then use all the time if initialized, - // multiplier is calculated based on min & max normal mixer buffer size - FastMixer_Dynamic, // initialize if needed, then use dynamically depending on track load, - // multiplier is calculated based on min & max normal mixer buffer size - // FIXME for FastMixer_Dynamic: - // Supporting this option will require fixing HALs that can't handle large writes. - // For example, one HAL implementation returns an error from a large write, - // and another HAL implementation corrupts memory, possibly in the sample rate converter. - // We could either fix the HAL implementations, or provide a wrapper that breaks - // up large writes into smaller ones, and the wrapper would need to deal with scheduler. -} kUseFastMixer = FastMixer_Static; - -static uint32_t gScreenState; // incremented by 2 when screen state changes, bit 0 == 1 means "off" - // AudioFlinger::setParameters() updates, other threads read w/o lock - -// Priorities for requestPriority -static const int kPriorityAudioApp = 2; -static const int kPriorityFastMixer = 3; - -// IAudioFlinger::createTrack() reports back to client the total size of shared memory area -// for the track. The client then sub-divides this into smaller buffers for its use. -// Currently the client uses double-buffering by default, but doesn't tell us about that. -// So for now we just assume that client is double-buffered. -// FIXME It would be better for client to tell AudioFlinger whether it wants double-buffering or -// N-buffering, so AudioFlinger could allocate the right amount of memory. -// See the client's minBufCount and mNotificationFramesAct calculations for details. -static const int kFastTrackMultiplier = 2; +uint32_t AudioFlinger::mScreenState; // ---------------------------------------------------------------------------- -#ifdef ADD_BATTERY_DATA -// To collect the amplifier usage -static void addBatteryData(uint32_t params) { - sp service = IMediaDeathNotifier::getMediaPlayerService(); - if (service == NULL) { - // it already logged - return; - } - - service->addBatteryData(params); -} -#endif - static int load_audio_interface(const char *if_name, audio_hw_device_t **dev) { const hw_module_t *mod; @@ -364,7 +261,7 @@ void AudioFlinger::dumpPermissionDenial(int fd, const Vector& args) write(fd, result.string(), result.size()); } -static bool tryLock(Mutex& mutex) +bool AudioFlinger::dumpTryLock(Mutex& mutex) { bool locked = false; for (int i = 0; i < kDumpLockRetries; ++i) { @@ -383,7 +280,7 @@ status_t AudioFlinger::dump(int fd, const Vector& args) dumpPermissionDenial(fd, args); } else { // get state of hardware lock - bool hardwareLocked = tryLock(mHardwareLock); + bool hardwareLocked = dumpTryLock(mHardwareLock); if (!hardwareLocked) { String8 result(kHardwareLockedString); write(fd, result.string(), result.size()); @@ -391,7 +288,7 @@ status_t AudioFlinger::dump(int fd, const Vector& args) mHardwareLock.unlock(); } - bool locked = tryLock(mLock); + bool locked = dumpTryLock(mLock); // failed to lock - AudioFlinger is probably deadlocked if (!locked) { @@ -874,6 +771,7 @@ status_t AudioFlinger::setParameters(audio_io_handle_t ioHandle, const String8& { ALOGV("setParameters(): io %d, keyvalue %s, calling pid %d", ioHandle, keyValuePairs.string(), IPCThreadState::self()->getCallingPid()); + // check calling permissions if (!settingsAllowed()) { return PERMISSION_DENIED; @@ -922,8 +820,8 @@ status_t AudioFlinger::setParameters(audio_io_handle_t ioHandle, const String8& String8 screenState; if (param.get(String8(AudioParameter::keyScreenState), screenState) == NO_ERROR) { bool isOff = screenState == "off"; - if (isOff != (gScreenState & 1)) { - gScreenState = ((gScreenState & ~1) + 2) | isOff; + if (isOff != (AudioFlinger::mScreenState & 1)) { + AudioFlinger::mScreenState = ((AudioFlinger::mScreenState & ~1) + 2) | isOff; } } return final_result; @@ -1148,2481 +1046,1106 @@ sp AudioFlinger::getEffectThread_l(int sessionId, return thread; } + + // ---------------------------------------------------------------------------- -AudioFlinger::ThreadBase::ThreadBase(const sp& audioFlinger, audio_io_handle_t id, - audio_devices_t outDevice, audio_devices_t inDevice, type_t type) - : Thread(false /*canCallJava*/), - mType(type), - mAudioFlinger(audioFlinger), mSampleRate(0), mFrameCount(0), mNormalFrameCount(0), - // mChannelMask - mChannelCount(0), - mFrameSize(1), mFormat(AUDIO_FORMAT_INVALID), - mParamStatus(NO_ERROR), - mStandby(false), mOutDevice(outDevice), mInDevice(inDevice), - mAudioSource(AUDIO_SOURCE_DEFAULT), mId(id), - // mName will be set by concrete (non-virtual) subclass - mDeathRecipient(new PMDeathRecipient(this)) +AudioFlinger::Client::Client(const sp& audioFlinger, pid_t pid) + : RefBase(), + mAudioFlinger(audioFlinger), + // FIXME should be a "k" constant not hard-coded, in .h or ro. property, see 4 lines below + mMemoryDealer(new MemoryDealer(1024*1024, "AudioFlinger::Client")), + mPid(pid), + mTimedTrackCount(0) { + // 1 MB of address space is good for 32 tracks, 8 buffers each, 4 KB/buffer } -AudioFlinger::ThreadBase::~ThreadBase() +// Client destructor must be called with AudioFlinger::mLock held +AudioFlinger::Client::~Client() { - mParamCond.broadcast(); - // do not lock the mutex in destructor - releaseWakeLock_l(); - if (mPowerManager != 0) { - sp binder = mPowerManager->asBinder(); - binder->unlinkToDeath(mDeathRecipient); - } + mAudioFlinger->removeClient_l(mPid); } -void AudioFlinger::ThreadBase::exit() +sp AudioFlinger::Client::heap() const { - ALOGV("ThreadBase::exit"); - // do any cleanup required for exit to succeed - preExit(); - { - // This lock prevents the following race in thread (uniprocessor for illustration): - // if (!exitPending()) { - // // context switch from here to exit() - // // exit() calls requestExit(), what exitPending() observes - // // exit() calls signal(), which is dropped since no waiters - // // context switch back from exit() to here - // mWaitWorkCV.wait(...); - // // now thread is hung - // } - AutoMutex lock(mLock); - requestExit(); - mWaitWorkCV.broadcast(); - } - // When Thread::requestExitAndWait is made virtual and this method is renamed to - // "virtual status_t requestExitAndWait()", replace by "return Thread::requestExitAndWait();" - requestExitAndWait(); + return mMemoryDealer; } -status_t AudioFlinger::ThreadBase::setParameters(const String8& keyValuePairs) +// Reserve one of the limited slots for a timed audio track associated +// with this client +bool AudioFlinger::Client::reserveTimedTrack() { - status_t status; + const int kMaxTimedTracksPerClient = 4; - ALOGV("ThreadBase::setParameters() %s", keyValuePairs.string()); - Mutex::Autolock _l(mLock); + Mutex::Autolock _l(mTimedTrackLock); - mNewParameters.add(keyValuePairs); - mWaitWorkCV.signal(); - // wait condition with timeout in case the thread loop has exited - // before the request could be processed - if (mParamCond.waitRelative(mLock, kSetParametersTimeoutNs) == NO_ERROR) { - status = mParamStatus; - mWaitWorkCV.signal(); - } else { - status = TIMED_OUT; + if (mTimedTrackCount >= kMaxTimedTracksPerClient) { + ALOGW("can not create timed track - pid %d has exceeded the limit", + mPid); + return false; } - return status; -} -void AudioFlinger::ThreadBase::sendIoConfigEvent(int event, int param) -{ - Mutex::Autolock _l(mLock); - sendIoConfigEvent_l(event, param); + mTimedTrackCount++; + return true; } -// sendIoConfigEvent_l() must be called with ThreadBase::mLock held -void AudioFlinger::ThreadBase::sendIoConfigEvent_l(int event, int param) +// Release a slot for a timed audio track +void AudioFlinger::Client::releaseTimedTrack() { - IoConfigEvent *ioEvent = new IoConfigEvent(event, param); - mConfigEvents.add(static_cast(ioEvent)); - ALOGV("sendIoConfigEvent() num events %d event %d, param %d", mConfigEvents.size(), event, - param); - mWaitWorkCV.signal(); + Mutex::Autolock _l(mTimedTrackLock); + mTimedTrackCount--; } -// sendPrioConfigEvent_l() must be called with ThreadBase::mLock held -void AudioFlinger::ThreadBase::sendPrioConfigEvent_l(pid_t pid, pid_t tid, int32_t prio) +// ---------------------------------------------------------------------------- + +AudioFlinger::NotificationClient::NotificationClient(const sp& audioFlinger, + const sp& client, + pid_t pid) + : mAudioFlinger(audioFlinger), mPid(pid), mAudioFlingerClient(client) { - PrioConfigEvent *prioEvent = new PrioConfigEvent(pid, tid, prio); - mConfigEvents.add(static_cast(prioEvent)); - ALOGV("sendPrioConfigEvent_l() num events %d pid %d, tid %d prio %d", - mConfigEvents.size(), pid, tid, prio); - mWaitWorkCV.signal(); } -void AudioFlinger::ThreadBase::processConfigEvents() +AudioFlinger::NotificationClient::~NotificationClient() { - mLock.lock(); - while (!mConfigEvents.isEmpty()) { - ALOGV("processConfigEvents() remaining events %d", mConfigEvents.size()); - ConfigEvent *event = mConfigEvents[0]; - mConfigEvents.removeAt(0); - // release mLock before locking AudioFlinger mLock: lock order is always - // AudioFlinger then ThreadBase to avoid cross deadlock - mLock.unlock(); - switch(event->type()) { - case CFG_EVENT_PRIO: { - PrioConfigEvent *prioEvent = static_cast(event); - int err = requestPriority(prioEvent->pid(), prioEvent->tid(), prioEvent->prio()); - if (err != 0) { - ALOGW("Policy SCHED_FIFO priority %d is unavailable for pid %d tid %d; " - "error %d", - prioEvent->prio(), prioEvent->pid(), prioEvent->tid(), err); - } - } break; - case CFG_EVENT_IO: { - IoConfigEvent *ioEvent = static_cast(event); - mAudioFlinger->mLock.lock(); - audioConfigChanged_l(ioEvent->event(), ioEvent->param()); - mAudioFlinger->mLock.unlock(); - } break; - default: - ALOGE("processConfigEvents() unknown event type %d", event->type()); - break; - } - delete event; - mLock.lock(); - } - mLock.unlock(); } -void AudioFlinger::ThreadBase::dumpBase(int fd, const Vector& args) +void AudioFlinger::NotificationClient::binderDied(const wp& who) { - const size_t SIZE = 256; - char buffer[SIZE]; - String8 result; - - bool locked = tryLock(mLock); - if (!locked) { - snprintf(buffer, SIZE, "thread %p maybe dead locked\n", this); - write(fd, buffer, strlen(buffer)); - } - - snprintf(buffer, SIZE, "io handle: %d\n", mId); - result.append(buffer); - snprintf(buffer, SIZE, "TID: %d\n", getTid()); - result.append(buffer); - snprintf(buffer, SIZE, "standby: %d\n", mStandby); - result.append(buffer); - snprintf(buffer, SIZE, "Sample rate: %u\n", mSampleRate); - result.append(buffer); - snprintf(buffer, SIZE, "HAL frame count: %d\n", mFrameCount); - result.append(buffer); - snprintf(buffer, SIZE, "Normal frame count: %d\n", mNormalFrameCount); - result.append(buffer); - snprintf(buffer, SIZE, "Channel Count: %d\n", mChannelCount); - result.append(buffer); - snprintf(buffer, SIZE, "Channel Mask: 0x%08x\n", mChannelMask); - result.append(buffer); - snprintf(buffer, SIZE, "Format: %d\n", mFormat); - result.append(buffer); - snprintf(buffer, SIZE, "Frame size: %u\n", mFrameSize); - result.append(buffer); - - snprintf(buffer, SIZE, "\nPending setParameters commands: \n"); - result.append(buffer); - result.append(" Index Command"); - for (size_t i = 0; i < mNewParameters.size(); ++i) { - snprintf(buffer, SIZE, "\n %02d ", i); - result.append(buffer); - result.append(mNewParameters[i]); - } - - snprintf(buffer, SIZE, "\n\nPending config events: \n"); - result.append(buffer); - for (size_t i = 0; i < mConfigEvents.size(); i++) { - mConfigEvents[i]->dump(buffer, SIZE); - result.append(buffer); - } - result.append("\n"); + sp keep(this); + mAudioFlinger->removeNotificationClient(mPid); +} - write(fd, result.string(), result.size()); - if (locked) { - mLock.unlock(); - } -} +// ---------------------------------------------------------------------------- -void AudioFlinger::ThreadBase::dumpEffectChains(int fd, const Vector& args) +sp AudioFlinger::openRecord( + pid_t pid, + audio_io_handle_t input, + uint32_t sampleRate, + audio_format_t format, + audio_channel_mask_t channelMask, + size_t frameCount, + IAudioFlinger::track_flags_t flags, + pid_t tid, + int *sessionId, + status_t *status) { - const size_t SIZE = 256; - char buffer[SIZE]; - String8 result; + sp recordTrack; + sp recordHandle; + sp client; + status_t lStatus; + RecordThread *thread; + size_t inFrameCount; + int lSessionId; - snprintf(buffer, SIZE, "\n- %d Effect Chains:\n", mEffectChains.size()); - write(fd, buffer, strlen(buffer)); + // check calling permissions + if (!recordingAllowed()) { + lStatus = PERMISSION_DENIED; + goto Exit; + } - for (size_t i = 0; i < mEffectChains.size(); ++i) { - sp chain = mEffectChains[i]; - if (chain != 0) { - chain->dump(fd, args); + // add client to list + { // scope for mLock + Mutex::Autolock _l(mLock); + thread = checkRecordThread_l(input); + if (thread == NULL) { + lStatus = BAD_VALUE; + goto Exit; } - } -} -void AudioFlinger::ThreadBase::acquireWakeLock() -{ - Mutex::Autolock _l(mLock); - acquireWakeLock_l(); -} + client = registerPid_l(pid); -void AudioFlinger::ThreadBase::acquireWakeLock_l() -{ - if (mPowerManager == 0) { - // use checkService() to avoid blocking if power service is not up yet - sp binder = - defaultServiceManager()->checkService(String16("power")); - if (binder == 0) { - ALOGW("Thread %s cannot connect to the power manager service", mName); + // If no audio session id is provided, create one here + if (sessionId != NULL && *sessionId != AUDIO_SESSION_OUTPUT_MIX) { + lSessionId = *sessionId; } else { - mPowerManager = interface_cast(binder); - binder->linkToDeath(mDeathRecipient); + lSessionId = nextUniqueId(); + if (sessionId != NULL) { + *sessionId = lSessionId; + } } + // create new record track. + // The record track uses one track in mHardwareMixerThread by convention. + recordTrack = thread->createRecordTrack_l(client, sampleRate, format, channelMask, + frameCount, lSessionId, flags, tid, &lStatus); } - if (mPowerManager != 0) { - sp binder = new BBinder(); - status_t status = mPowerManager->acquireWakeLock(POWERMANAGER_PARTIAL_WAKE_LOCK, - binder, - String16(mName)); - if (status == NO_ERROR) { - mWakeLockToken = binder; - } - ALOGV("acquireWakeLock_l() %s status %d", mName, status); + if (lStatus != NO_ERROR) { + // remove local strong reference to Client before deleting the RecordTrack so that the + // Client destructor is called by the TrackBase destructor with mLock held + client.clear(); + recordTrack.clear(); + goto Exit; } -} -void AudioFlinger::ThreadBase::releaseWakeLock() -{ - Mutex::Autolock _l(mLock); - releaseWakeLock_l(); -} + // return to handle to client + recordHandle = new RecordHandle(recordTrack); + lStatus = NO_ERROR; -void AudioFlinger::ThreadBase::releaseWakeLock_l() -{ - if (mWakeLockToken != 0) { - ALOGV("releaseWakeLock_l() %s", mName); - if (mPowerManager != 0) { - mPowerManager->releaseWakeLock(mWakeLockToken, 0); - } - mWakeLockToken.clear(); +Exit: + if (status) { + *status = lStatus; } + return recordHandle; } -void AudioFlinger::ThreadBase::clearPowerManager() -{ - Mutex::Autolock _l(mLock); - releaseWakeLock_l(); - mPowerManager.clear(); -} -void AudioFlinger::ThreadBase::PMDeathRecipient::binderDied(const wp& who) -{ - sp thread = mThread.promote(); - if (thread != 0) { - thread->clearPowerManager(); - } - ALOGW("power manager service died !!!"); -} -void AudioFlinger::ThreadBase::setEffectSuspended( - const effect_uuid_t *type, bool suspend, int sessionId) +// ---------------------------------------------------------------------------- + +audio_module_handle_t AudioFlinger::loadHwModule(const char *name) { + if (!settingsAllowed()) { + return 0; + } Mutex::Autolock _l(mLock); - setEffectSuspended_l(type, suspend, sessionId); + return loadHwModule_l(name); } -void AudioFlinger::ThreadBase::setEffectSuspended_l( - const effect_uuid_t *type, bool suspend, int sessionId) +// loadHwModule_l() must be called with AudioFlinger::mLock held +audio_module_handle_t AudioFlinger::loadHwModule_l(const char *name) { - sp chain = getEffectChain_l(sessionId); - if (chain != 0) { - if (type != NULL) { - chain->setEffectSuspended_l(type, suspend); - } else { - chain->setEffectSuspendedAll_l(suspend); + for (size_t i = 0; i < mAudioHwDevs.size(); i++) { + if (strncmp(mAudioHwDevs.valueAt(i)->moduleName(), name, strlen(name)) == 0) { + ALOGW("loadHwModule() module %s already loaded", name); + return mAudioHwDevs.keyAt(i); } } - updateSuspendedSessions_l(type, suspend, sessionId); -} + audio_hw_device_t *dev; -void AudioFlinger::ThreadBase::checkSuspendOnAddEffectChain_l(const sp& chain) -{ - ssize_t index = mSuspendedSessions.indexOfKey(chain->sessionId()); - if (index < 0) { - return; + int rc = load_audio_interface(name, &dev); + if (rc) { + ALOGI("loadHwModule() error %d loading module %s ", rc, name); + return 0; } - const KeyedVector >& sessionEffects = - mSuspendedSessions.valueAt(index); - - for (size_t i = 0; i < sessionEffects.size(); i++) { - sp desc = sessionEffects.valueAt(i); - for (int j = 0; j < desc->mRefCount; j++) { - if (sessionEffects.keyAt(i) == EffectChain::kKeyForSuspendAll) { - chain->setEffectSuspendedAll_l(true); - } else { - ALOGV("checkSuspendOnAddEffectChain_l() suspending effects %08x", - desc->mType.timeLow); - chain->setEffectSuspended_l(&desc->mType, true); - } - } + mHardwareStatus = AUDIO_HW_INIT; + rc = dev->init_check(dev); + mHardwareStatus = AUDIO_HW_IDLE; + if (rc) { + ALOGI("loadHwModule() init check error %d for module %s ", rc, name); + return 0; } -} -void AudioFlinger::ThreadBase::updateSuspendedSessions_l(const effect_uuid_t *type, - bool suspend, - int sessionId) -{ - ssize_t index = mSuspendedSessions.indexOfKey(sessionId); + // Check and cache this HAL's level of support for master mute and master + // volume. If this is the first HAL opened, and it supports the get + // methods, use the initial values provided by the HAL as the current + // master mute and volume settings. - KeyedVector > sessionEffects; + AudioHwDevice::Flags flags = static_cast(0); + { // scope for auto-lock pattern + AutoMutex lock(mHardwareLock); - if (suspend) { - if (index >= 0) { - sessionEffects = mSuspendedSessions.valueAt(index); - } else { - mSuspendedSessions.add(sessionId, sessionEffects); + if (0 == mAudioHwDevs.size()) { + mHardwareStatus = AUDIO_HW_GET_MASTER_VOLUME; + if (NULL != dev->get_master_volume) { + float mv; + if (OK == dev->get_master_volume(dev, &mv)) { + mMasterVolume = mv; + } + } + + mHardwareStatus = AUDIO_HW_GET_MASTER_MUTE; + if (NULL != dev->get_master_mute) { + bool mm; + if (OK == dev->get_master_mute(dev, &mm)) { + mMasterMute = mm; + } + } } - } else { - if (index < 0) { - return; + + mHardwareStatus = AUDIO_HW_SET_MASTER_VOLUME; + if ((NULL != dev->set_master_volume) && + (OK == dev->set_master_volume(dev, mMasterVolume))) { + flags = static_cast(flags | + AudioHwDevice::AHWD_CAN_SET_MASTER_VOLUME); } - sessionEffects = mSuspendedSessions.valueAt(index); - } + mHardwareStatus = AUDIO_HW_SET_MASTER_MUTE; + if ((NULL != dev->set_master_mute) && + (OK == dev->set_master_mute(dev, mMasterMute))) { + flags = static_cast(flags | + AudioHwDevice::AHWD_CAN_SET_MASTER_MUTE); + } - int key = EffectChain::kKeyForSuspendAll; - if (type != NULL) { - key = type->timeLow; + mHardwareStatus = AUDIO_HW_IDLE; } - index = sessionEffects.indexOfKey(key); - sp desc; - if (suspend) { - if (index >= 0) { - desc = sessionEffects.valueAt(index); - } else { - desc = new SuspendedSessionDesc(); - if (type != NULL) { - desc->mType = *type; - } - sessionEffects.add(key, desc); - ALOGV("updateSuspendedSessions_l() suspend adding effect %08x", key); - } - desc->mRefCount++; - } else { - if (index < 0) { - return; - } - desc = sessionEffects.valueAt(index); - if (--desc->mRefCount == 0) { - ALOGV("updateSuspendedSessions_l() restore removing effect %08x", key); - sessionEffects.removeItemsAt(index); - if (sessionEffects.isEmpty()) { - ALOGV("updateSuspendedSessions_l() restore removing session %d", - sessionId); - mSuspendedSessions.removeItem(sessionId); - } - } - } - if (!sessionEffects.isEmpty()) { - mSuspendedSessions.replaceValueFor(sessionId, sessionEffects); - } + audio_module_handle_t handle = nextUniqueId(); + mAudioHwDevs.add(handle, new AudioHwDevice(name, dev, flags)); + + ALOGI("loadHwModule() Loaded %s audio interface from %s (%s) handle %d", + name, dev->common.module->name, dev->common.module->id, handle); + + return handle; + } -void AudioFlinger::ThreadBase::checkSuspendOnEffectEnabled(const sp& effect, - bool enabled, - int sessionId) +// ---------------------------------------------------------------------------- + +uint32_t AudioFlinger::getPrimaryOutputSamplingRate() { Mutex::Autolock _l(mLock); - checkSuspendOnEffectEnabled_l(effect, enabled, sessionId); + PlaybackThread *thread = primaryPlaybackThread_l(); + return thread != NULL ? thread->sampleRate() : 0; } -void AudioFlinger::ThreadBase::checkSuspendOnEffectEnabled_l(const sp& effect, - bool enabled, - int sessionId) +size_t AudioFlinger::getPrimaryOutputFrameCount() { - if (mType != RECORD) { - // suspend all effects in AUDIO_SESSION_OUTPUT_MIX when enabling any effect on - // another session. This gives the priority to well behaved effect control panels - // and applications not using global effects. - // Enabling post processing in AUDIO_SESSION_OUTPUT_STAGE session does not affect - // global effects - if ((sessionId != AUDIO_SESSION_OUTPUT_MIX) && (sessionId != AUDIO_SESSION_OUTPUT_STAGE)) { - setEffectSuspended_l(NULL, enabled, AUDIO_SESSION_OUTPUT_MIX); - } - } - - sp chain = getEffectChain_l(sessionId); - if (chain != 0) { - chain->checkSuspendOnEffectEnabled(effect, enabled); - } + Mutex::Autolock _l(mLock); + PlaybackThread *thread = primaryPlaybackThread_l(); + return thread != NULL ? thread->frameCountHAL() : 0; } // ---------------------------------------------------------------------------- -AudioFlinger::PlaybackThread::PlaybackThread(const sp& audioFlinger, - AudioStreamOut* output, - audio_io_handle_t id, - audio_devices_t device, - type_t type) - : ThreadBase(audioFlinger, id, device, AUDIO_DEVICE_NONE, type), - mMixBuffer(NULL), mSuspended(0), mBytesWritten(0), - // mStreamTypes[] initialized in constructor body - mOutput(output), - mLastWriteTime(0), mNumWrites(0), mNumDelayedWrites(0), mInWrite(false), - mMixerStatus(MIXER_IDLE), - mMixerStatusIgnoringFastTracks(MIXER_IDLE), - standbyDelay(AudioFlinger::mStandbyTimeInNsecs), - mScreenState(gScreenState), - // index 0 is reserved for normal mixer's submix - mFastTrackAvailMask(((1 << FastMixerState::kMaxFastTracks) - 1) & ~1) +audio_io_handle_t AudioFlinger::openOutput(audio_module_handle_t module, + audio_devices_t *pDevices, + uint32_t *pSamplingRate, + audio_format_t *pFormat, + audio_channel_mask_t *pChannelMask, + uint32_t *pLatencyMs, + audio_output_flags_t flags) { - snprintf(mName, kNameLength, "AudioOut_%X", id); - - // Assumes constructor is called by AudioFlinger with it's mLock held, but - // it would be safer to explicitly pass initial masterVolume/masterMute as - // parameter. - // - // If the HAL we are using has support for master volume or master mute, - // then do not attenuate or mute during mixing (just leave the volume at 1.0 - // and the mute set to false). - mMasterVolume = audioFlinger->masterVolume_l(); - mMasterMute = audioFlinger->masterMute_l(); - if (mOutput && mOutput->audioHwDev) { - if (mOutput->audioHwDev->canSetMasterVolume()) { - mMasterVolume = 1.0; - } + status_t status; + PlaybackThread *thread = NULL; + struct audio_config config = { + sample_rate: pSamplingRate ? *pSamplingRate : 0, + channel_mask: pChannelMask ? *pChannelMask : 0, + format: pFormat ? *pFormat : AUDIO_FORMAT_DEFAULT, + }; + audio_stream_out_t *outStream = NULL; + AudioHwDevice *outHwDev; - if (mOutput->audioHwDev->canSetMasterMute()) { - mMasterMute = false; - } + ALOGV("openOutput(), module %d Device %x, SamplingRate %d, Format %d, Channels %x, flags %x", + module, + (pDevices != NULL) ? *pDevices : 0, + config.sample_rate, + config.format, + config.channel_mask, + flags); + + if (pDevices == NULL || *pDevices == 0) { + return 0; } - readOutputParameters(); + Mutex::Autolock _l(mLock); - // mStreamTypes[AUDIO_STREAM_CNT] is initialized by stream_type_t default constructor - // There is no AUDIO_STREAM_MIN, and ++ operator does not compile - for (audio_stream_type_t stream = (audio_stream_type_t) 0; stream < AUDIO_STREAM_CNT; - stream = (audio_stream_type_t) (stream + 1)) { - mStreamTypes[stream].volume = mAudioFlinger->streamVolume_l(stream); - mStreamTypes[stream].mute = mAudioFlinger->streamMute_l(stream); - } - // mStreamTypes[AUDIO_STREAM_CNT] exists but isn't explicitly initialized here, - // because mAudioFlinger doesn't have one to copy from -} + outHwDev = findSuitableHwDev_l(module, *pDevices); + if (outHwDev == NULL) + return 0; -AudioFlinger::PlaybackThread::~PlaybackThread() -{ - delete [] mMixBuffer; -} + audio_hw_device_t *hwDevHal = outHwDev->hwDevice(); + audio_io_handle_t id = nextUniqueId(); -void AudioFlinger::PlaybackThread::dump(int fd, const Vector& args) -{ - dumpInternals(fd, args); - dumpTracks(fd, args); - dumpEffectChains(fd, args); -} + mHardwareStatus = AUDIO_HW_OUTPUT_OPEN; -void AudioFlinger::PlaybackThread::dumpTracks(int fd, const Vector& args) -{ - const size_t SIZE = 256; - char buffer[SIZE]; - String8 result; + status = hwDevHal->open_output_stream(hwDevHal, + id, + *pDevices, + (audio_output_flags_t)flags, + &config, + &outStream); - result.appendFormat("Output thread %p stream volumes in dB:\n ", this); - for (int i = 0; i < AUDIO_STREAM_CNT; ++i) { - const stream_type_t *st = &mStreamTypes[i]; - if (i > 0) { - result.appendFormat(", "); - } - result.appendFormat("%d:%.2g", i, 20.0 * log10(st->volume)); - if (st->mute) { - result.append("M"); - } - } - result.append("\n"); - write(fd, result.string(), result.length()); - result.clear(); + mHardwareStatus = AUDIO_HW_IDLE; + ALOGV("openOutput() openOutputStream returned output %p, SamplingRate %d, Format %d, " + "Channels %x, status %d", + outStream, + config.sample_rate, + config.format, + config.channel_mask, + status); - snprintf(buffer, SIZE, "Output thread %p tracks\n", this); - result.append(buffer); - Track::appendDumpHeader(result); - for (size_t i = 0; i < mTracks.size(); ++i) { - sp track = mTracks[i]; - if (track != 0) { - track->dump(buffer, SIZE); - result.append(buffer); - } - } + if (status == NO_ERROR && outStream != NULL) { + AudioStreamOut *output = new AudioStreamOut(outHwDev, outStream); - snprintf(buffer, SIZE, "Output thread %p active tracks\n", this); - result.append(buffer); - Track::appendDumpHeader(result); - for (size_t i = 0; i < mActiveTracks.size(); ++i) { - sp track = mActiveTracks[i].promote(); - if (track != 0) { - track->dump(buffer, SIZE); - result.append(buffer); + if ((flags & AUDIO_OUTPUT_FLAG_DIRECT) || + (config.format != AUDIO_FORMAT_PCM_16_BIT) || + (config.channel_mask != AUDIO_CHANNEL_OUT_STEREO)) { + thread = new DirectOutputThread(this, output, id, *pDevices); + ALOGV("openOutput() created direct output: ID %d thread %p", id, thread); + } else { + thread = new MixerThread(this, output, id, *pDevices); + ALOGV("openOutput() created mixer output: ID %d thread %p", id, thread); } - } - write(fd, result.string(), result.size()); + mPlaybackThreads.add(id, thread); - // These values are "raw"; they will wrap around. See prepareTracks_l() for a better way. - FastTrackUnderruns underruns = getFastTrackUnderruns(0); - fdprintf(fd, "Normal mixer raw underrun counters: partial=%u empty=%u\n", - underruns.mBitFields.mPartial, underruns.mBitFields.mEmpty); -} + if (pSamplingRate != NULL) *pSamplingRate = config.sample_rate; + if (pFormat != NULL) *pFormat = config.format; + if (pChannelMask != NULL) *pChannelMask = config.channel_mask; + if (pLatencyMs != NULL) *pLatencyMs = thread->latency(); -void AudioFlinger::PlaybackThread::dumpInternals(int fd, const Vector& args) -{ - const size_t SIZE = 256; - char buffer[SIZE]; - String8 result; + // notify client processes of the new output creation + thread->audioConfigChanged_l(AudioSystem::OUTPUT_OPENED); - snprintf(buffer, SIZE, "\nOutput thread %p internals\n", this); - result.append(buffer); - snprintf(buffer, SIZE, "last write occurred (msecs): %llu\n", - ns2ms(systemTime() - mLastWriteTime)); - result.append(buffer); - snprintf(buffer, SIZE, "total writes: %d\n", mNumWrites); - result.append(buffer); - snprintf(buffer, SIZE, "delayed writes: %d\n", mNumDelayedWrites); - result.append(buffer); - snprintf(buffer, SIZE, "blocked in write: %d\n", mInWrite); - result.append(buffer); - snprintf(buffer, SIZE, "suspend count: %d\n", mSuspended); - result.append(buffer); - snprintf(buffer, SIZE, "mix buffer : %p\n", mMixBuffer); - result.append(buffer); - write(fd, result.string(), result.size()); - fdprintf(fd, "Fast track availMask=%#x\n", mFastTrackAvailMask); + // the first primary output opened designates the primary hw device + if ((mPrimaryHardwareDev == NULL) && (flags & AUDIO_OUTPUT_FLAG_PRIMARY)) { + ALOGI("Using module %d has the primary audio interface", module); + mPrimaryHardwareDev = outHwDev; + + AutoMutex lock(mHardwareLock); + mHardwareStatus = AUDIO_HW_SET_MODE; + hwDevHal->set_mode(hwDevHal, mMode); + mHardwareStatus = AUDIO_HW_IDLE; + } + return id; + } - dumpBase(fd, args); + return 0; } -// Thread virtuals -status_t AudioFlinger::PlaybackThread::readyToRun() +audio_io_handle_t AudioFlinger::openDuplicateOutput(audio_io_handle_t output1, + audio_io_handle_t output2) { - status_t status = initCheck(); - if (status == NO_ERROR) { - ALOGI("AudioFlinger's thread %p ready to run", this); - } else { - ALOGE("No working audio driver found."); + Mutex::Autolock _l(mLock); + MixerThread *thread1 = checkMixerThread_l(output1); + MixerThread *thread2 = checkMixerThread_l(output2); + + if (thread1 == NULL || thread2 == NULL) { + ALOGW("openDuplicateOutput() wrong output mixer type for output %d or %d", output1, + output2); + return 0; } - return status; -} -void AudioFlinger::PlaybackThread::onFirstRef() -{ - run(mName, ANDROID_PRIORITY_URGENT_AUDIO); + audio_io_handle_t id = nextUniqueId(); + DuplicatingThread *thread = new DuplicatingThread(this, thread1, id); + thread->addOutputTrack(thread2); + mPlaybackThreads.add(id, thread); + // notify client processes of the new output creation + thread->audioConfigChanged_l(AudioSystem::OUTPUT_OPENED); + return id; } -// ThreadBase virtuals -void AudioFlinger::PlaybackThread::preExit() +status_t AudioFlinger::closeOutput(audio_io_handle_t output) { - ALOGV(" preExit()"); - // FIXME this is using hard-coded strings but in the future, this functionality will be - // converted to use audio HAL extensions required to support tunneling - mOutput->stream->common.set_parameters(&mOutput->stream->common, "exiting=1"); + return closeOutput_nonvirtual(output); } -// PlaybackThread::createTrack_l() must be called with AudioFlinger::mLock held -sp AudioFlinger::PlaybackThread::createTrack_l( - const sp& client, - audio_stream_type_t streamType, - uint32_t sampleRate, - audio_format_t format, - audio_channel_mask_t channelMask, - size_t frameCount, - const sp& sharedBuffer, - int sessionId, - IAudioFlinger::track_flags_t *flags, - pid_t tid, - status_t *status) +status_t AudioFlinger::closeOutput_nonvirtual(audio_io_handle_t output) { - sp track; - status_t lStatus; - - bool isTimed = (*flags & IAudioFlinger::TRACK_TIMED) != 0; - - // client expresses a preference for FAST, but we get the final say - if (*flags & IAudioFlinger::TRACK_FAST) { - if ( - // not timed - (!isTimed) && - // either of these use cases: - ( - // use case 1: shared buffer with any frame count - ( - (sharedBuffer != 0) - ) || - // use case 2: callback handler and frame count is default or at least as large as HAL - ( - (tid != -1) && - ((frameCount == 0) || - (frameCount >= (mFrameCount * kFastTrackMultiplier))) - ) - ) && - // PCM data - audio_is_linear_pcm(format) && - // mono or stereo - ( (channelMask == AUDIO_CHANNEL_OUT_MONO) || - (channelMask == AUDIO_CHANNEL_OUT_STEREO) ) && -#ifndef FAST_TRACKS_AT_NON_NATIVE_SAMPLE_RATE - // hardware sample rate - (sampleRate == mSampleRate) && -#endif - // normal mixer has an associated fast mixer - hasFastMixer() && - // there are sufficient fast track slots available - (mFastTrackAvailMask != 0) - // FIXME test that MixerThread for this fast track has a capable output HAL - // FIXME add a permission test also? - ) { - // if frameCount not specified, then it defaults to fast mixer (HAL) frame count - if (frameCount == 0) { - frameCount = mFrameCount * kFastTrackMultiplier; - } - ALOGV("AUDIO_OUTPUT_FLAG_FAST accepted: frameCount=%d mFrameCount=%d", - frameCount, mFrameCount); - } else { - ALOGV("AUDIO_OUTPUT_FLAG_FAST denied: isTimed=%d sharedBuffer=%p frameCount=%d " - "mFrameCount=%d format=%d isLinear=%d channelMask=%#x sampleRate=%u mSampleRate=%u " - "hasFastMixer=%d tid=%d fastTrackAvailMask=%#x", - isTimed, sharedBuffer.get(), frameCount, mFrameCount, format, - audio_is_linear_pcm(format), - channelMask, sampleRate, mSampleRate, hasFastMixer(), tid, mFastTrackAvailMask); - *flags &= ~IAudioFlinger::TRACK_FAST; - // For compatibility with AudioTrack calculation, buffer depth is forced - // to be at least 2 x the normal mixer frame count and cover audio hardware latency. - // This is probably too conservative, but legacy application code may depend on it. - // If you change this calculation, also review the start threshold which is related. - uint32_t latencyMs = mOutput->stream->get_latency(mOutput->stream); - uint32_t minBufCount = latencyMs / ((1000 * mNormalFrameCount) / mSampleRate); - if (minBufCount < 2) { - minBufCount = 2; - } - size_t minFrameCount = mNormalFrameCount * minBufCount; - if (frameCount < minFrameCount) { - frameCount = minFrameCount; - } - } - } - - if (mType == DIRECT) { - if ((format & AUDIO_FORMAT_MAIN_MASK) == AUDIO_FORMAT_PCM) { - if (sampleRate != mSampleRate || format != mFormat || channelMask != mChannelMask) { - ALOGE("createTrack_l() Bad parameter: sampleRate %u format %d, channelMask 0x%08x " - "for output %p with format %d", - sampleRate, format, channelMask, mOutput, mFormat); - lStatus = BAD_VALUE; - goto Exit; - } - } - } else { - // Resampler implementation limits input sampling rate to 2 x output sampling rate. - if (sampleRate > mSampleRate*2) { - ALOGE("Sample rate out of range: %u mSampleRate %u", sampleRate, mSampleRate); - lStatus = BAD_VALUE; - goto Exit; + // keep strong reference on the playback thread so that + // it is not destroyed while exit() is executed + sp thread; + { + Mutex::Autolock _l(mLock); + thread = checkPlaybackThread_l(output); + if (thread == NULL) { + return BAD_VALUE; } - } - - lStatus = initCheck(); - if (lStatus != NO_ERROR) { - ALOGE("Audio driver not initialized."); - goto Exit; - } - { // scope for mLock - Mutex::Autolock _l(mLock); + ALOGV("closeOutput() %d", output); - // all tracks in same audio session must share the same routing strategy otherwise - // conflicts will happen when tracks are moved from one output to another by audio policy - // manager - uint32_t strategy = AudioSystem::getStrategyForStream(streamType); - for (size_t i = 0; i < mTracks.size(); ++i) { - sp t = mTracks[i]; - if (t != 0 && !t->isOutputTrack()) { - uint32_t actual = AudioSystem::getStrategyForStream(t->streamType()); - if (sessionId == t->sessionId() && strategy != actual) { - ALOGE("createTrack_l() mismatched strategy; expected %u but found %u", - strategy, actual); - lStatus = BAD_VALUE; - goto Exit; + if (thread->type() == ThreadBase::MIXER) { + for (size_t i = 0; i < mPlaybackThreads.size(); i++) { + if (mPlaybackThreads.valueAt(i)->type() == ThreadBase::DUPLICATING) { + DuplicatingThread *dupThread = + (DuplicatingThread *)mPlaybackThreads.valueAt(i).get(); + dupThread->removeOutputTrack((MixerThread *)thread.get()); } } } - - if (!isTimed) { - track = new Track(this, client, streamType, sampleRate, format, - channelMask, frameCount, sharedBuffer, sessionId, *flags); - } else { - track = TimedTrack::create(this, client, streamType, sampleRate, format, - channelMask, frameCount, sharedBuffer, sessionId); - } - if (track == 0 || track->getCblk() == NULL || track->name() < 0) { - lStatus = NO_MEMORY; - goto Exit; - } - mTracks.add(track); - - sp chain = getEffectChain_l(sessionId); - if (chain != 0) { - ALOGV("createTrack_l() setting main buffer %p", chain->inBuffer()); - track->setMainBuffer(chain->inBuffer()); - chain->setStrategy(AudioSystem::getStrategyForStream(track->streamType())); - chain->incTrackCnt(); - } - - if ((*flags & IAudioFlinger::TRACK_FAST) && (tid != -1)) { - pid_t callingPid = IPCThreadState::self()->getCallingPid(); - // we don't have CAP_SYS_NICE, nor do we want to have it as it's too powerful, - // so ask activity manager to do this on our behalf - sendPrioConfigEvent_l(callingPid, tid, kPriorityAudioApp); - } + audioConfigChanged_l(AudioSystem::OUTPUT_CLOSED, output, NULL); + mPlaybackThreads.removeItem(output); } + thread->exit(); + // The thread entity (active unit of execution) is no longer running here, + // but the ThreadBase container still exists. - lStatus = NO_ERROR; - -Exit: - if (status) { - *status = lStatus; + if (thread->type() != ThreadBase::DUPLICATING) { + AudioStreamOut *out = thread->clearOutput(); + ALOG_ASSERT(out != NULL, "out shouldn't be NULL"); + // from now on thread->mOutput is NULL + out->hwDev()->close_output_stream(out->hwDev(), out->stream); + delete out; } - return track; + return NO_ERROR; } -uint32_t AudioFlinger::MixerThread::correctLatency_l(uint32_t latency) const +status_t AudioFlinger::suspendOutput(audio_io_handle_t output) { - if (mFastMixer != NULL) { - MonoPipe *pipe = (MonoPipe *)mPipeSink.get(); - latency += (pipe->getAvgFrames() * 1000) / mSampleRate; + Mutex::Autolock _l(mLock); + PlaybackThread *thread = checkPlaybackThread_l(output); + + if (thread == NULL) { + return BAD_VALUE; } - return latency; -} -uint32_t AudioFlinger::PlaybackThread::correctLatency_l(uint32_t latency) const -{ - return latency; -} + ALOGV("suspendOutput() %d", output); + thread->suspend(); -uint32_t AudioFlinger::PlaybackThread::latency() const -{ - Mutex::Autolock _l(mLock); - return latency_l(); -} -uint32_t AudioFlinger::PlaybackThread::latency_l() const -{ - if (initCheck() == NO_ERROR) { - return correctLatency_l(mOutput->stream->get_latency(mOutput->stream)); - } else { - return 0; - } + return NO_ERROR; } -void AudioFlinger::PlaybackThread::setMasterVolume(float value) +status_t AudioFlinger::restoreOutput(audio_io_handle_t output) { Mutex::Autolock _l(mLock); - // Don't apply master volume in SW if our HAL can do it for us. - if (mOutput && mOutput->audioHwDev && - mOutput->audioHwDev->canSetMasterVolume()) { - mMasterVolume = 1.0; - } else { - mMasterVolume = value; - } -} + PlaybackThread *thread = checkPlaybackThread_l(output); -void AudioFlinger::PlaybackThread::setMasterMute(bool muted) -{ - Mutex::Autolock _l(mLock); - // Don't apply master mute in SW if our HAL can do it for us. - if (mOutput && mOutput->audioHwDev && - mOutput->audioHwDev->canSetMasterMute()) { - mMasterMute = false; - } else { - mMasterMute = muted; + if (thread == NULL) { + return BAD_VALUE; } -} -void AudioFlinger::PlaybackThread::setStreamVolume(audio_stream_type_t stream, float value) -{ - Mutex::Autolock _l(mLock); - mStreamTypes[stream].volume = value; -} + ALOGV("restoreOutput() %d", output); -void AudioFlinger::PlaybackThread::setStreamMute(audio_stream_type_t stream, bool muted) -{ - Mutex::Autolock _l(mLock); - mStreamTypes[stream].mute = muted; -} + thread->restore(); -float AudioFlinger::PlaybackThread::streamVolume(audio_stream_type_t stream) const -{ - Mutex::Autolock _l(mLock); - return mStreamTypes[stream].volume; + return NO_ERROR; } -// addTrack_l() must be called with ThreadBase::mLock held -status_t AudioFlinger::PlaybackThread::addTrack_l(const sp& track) +audio_io_handle_t AudioFlinger::openInput(audio_module_handle_t module, + audio_devices_t *pDevices, + uint32_t *pSamplingRate, + audio_format_t *pFormat, + audio_channel_mask_t *pChannelMask) { - status_t status = ALREADY_EXISTS; - - // set retry count for buffer fill - track->mRetryCount = kMaxTrackStartupRetries; - if (mActiveTracks.indexOf(track) < 0) { - // the track is newly added, make sure it fills up all its - // buffers before playing. This is to ensure the client will - // effectively get the latency it requested. - track->mFillingUpStatus = Track::FS_FILLING; - track->mResetDone = false; - track->mPresentationCompleteFrames = 0; - mActiveTracks.add(track); - if (track->mainBuffer() != mMixBuffer) { - sp chain = getEffectChain_l(track->sessionId()); - if (chain != 0) { - ALOGV("addTrack_l() starting track on chain %p for session %d", chain.get(), - track->sessionId()); - chain->incActiveTrackCnt(); - } - } - - status = NO_ERROR; - } - - ALOGV("mWaitWorkCV.broadcast"); - mWaitWorkCV.broadcast(); - - return status; -} + status_t status; + RecordThread *thread = NULL; + struct audio_config config = { + sample_rate: pSamplingRate ? *pSamplingRate : 0, + channel_mask: pChannelMask ? *pChannelMask : 0, + format: pFormat ? *pFormat : AUDIO_FORMAT_DEFAULT, + }; + uint32_t reqSamplingRate = config.sample_rate; + audio_format_t reqFormat = config.format; + audio_channel_mask_t reqChannels = config.channel_mask; + audio_stream_in_t *inStream = NULL; + AudioHwDevice *inHwDev; -// destroyTrack_l() must be called with ThreadBase::mLock held -void AudioFlinger::PlaybackThread::destroyTrack_l(const sp& track) -{ - track->mState = TrackBase::TERMINATED; - // active tracks are removed by threadLoop() - if (mActiveTracks.indexOf(track) < 0) { - removeTrack_l(track); + if (pDevices == NULL || *pDevices == 0) { + return 0; } -} -void AudioFlinger::PlaybackThread::removeTrack_l(const sp& track) -{ - track->triggerEvents(AudioSystem::SYNC_EVENT_PRESENTATION_COMPLETE); - mTracks.remove(track); - deleteTrackName_l(track->name()); - // redundant as track is about to be destroyed, for dumpsys only - track->mName = -1; - if (track->isFastTrack()) { - int index = track->mFastIndex; - ALOG_ASSERT(0 < index && index < (int)FastMixerState::kMaxFastTracks); - ALOG_ASSERT(!(mFastTrackAvailMask & (1 << index))); - mFastTrackAvailMask |= 1 << index; - // redundant as track is about to be destroyed, for dumpsys only - track->mFastIndex = -1; - } - sp chain = getEffectChain_l(track->sessionId()); - if (chain != 0) { - chain->decTrackCnt(); - } -} + Mutex::Autolock _l(mLock); -String8 AudioFlinger::PlaybackThread::getParameters(const String8& keys) -{ - String8 out_s8 = String8(""); - char *s; + inHwDev = findSuitableHwDev_l(module, *pDevices); + if (inHwDev == NULL) + return 0; - Mutex::Autolock _l(mLock); - if (initCheck() != NO_ERROR) { - return out_s8; - } + audio_hw_device_t *inHwHal = inHwDev->hwDevice(); + audio_io_handle_t id = nextUniqueId(); - s = mOutput->stream->common.get_parameters(&mOutput->stream->common, keys.string()); - out_s8 = String8(s); - free(s); - return out_s8; -} + status = inHwHal->open_input_stream(inHwHal, id, *pDevices, &config, + &inStream); + ALOGV("openInput() openInputStream returned input %p, SamplingRate %d, Format %d, Channels %x, " + "status %d", + inStream, + config.sample_rate, + config.format, + config.channel_mask, + status); -// audioConfigChanged_l() must be called with AudioFlinger::mLock held -void AudioFlinger::PlaybackThread::audioConfigChanged_l(int event, int param) { - AudioSystem::OutputDescriptor desc; - void *param2 = NULL; - - ALOGV("PlaybackThread::audioConfigChanged_l, thread %p, event %d, param %d", this, event, - param); - - switch (event) { - case AudioSystem::OUTPUT_OPENED: - case AudioSystem::OUTPUT_CONFIG_CHANGED: - desc.channels = mChannelMask; - desc.samplingRate = mSampleRate; - desc.format = mFormat; - desc.frameCount = mNormalFrameCount; // FIXME see - // AudioFlinger::frameCount(audio_io_handle_t) - desc.latency = latency(); - param2 = &desc; - break; - - case AudioSystem::STREAM_CONFIG_CHANGED: - param2 = ¶m; - case AudioSystem::OUTPUT_CLOSED: - default: - break; + // If the input could not be opened with the requested parameters and we can handle the + // conversion internally, try to open again with the proposed parameters. The AudioFlinger can + // resample the input and do mono to stereo or stereo to mono conversions on 16 bit PCM inputs. + if (status == BAD_VALUE && + reqFormat == config.format && config.format == AUDIO_FORMAT_PCM_16_BIT && + (config.sample_rate <= 2 * reqSamplingRate) && + (popcount(config.channel_mask) <= FCC_2) && (popcount(reqChannels) <= FCC_2)) { + ALOGV("openInput() reopening with proposed sampling rate and channel mask"); + inStream = NULL; + status = inHwHal->open_input_stream(inHwHal, id, *pDevices, &config, &inStream); } - mAudioFlinger->audioConfigChanged_l(event, mId, param2); -} -void AudioFlinger::PlaybackThread::readOutputParameters() -{ - mSampleRate = mOutput->stream->common.get_sample_rate(&mOutput->stream->common); - mChannelMask = mOutput->stream->common.get_channels(&mOutput->stream->common); - mChannelCount = (uint16_t)popcount(mChannelMask); - mFormat = mOutput->stream->common.get_format(&mOutput->stream->common); - mFrameSize = audio_stream_frame_size(&mOutput->stream->common); - mFrameCount = mOutput->stream->common.get_buffer_size(&mOutput->stream->common) / mFrameSize; - if (mFrameCount & 15) { - ALOGW("HAL output buffer size is %u frames but AudioMixer requires multiples of 16 frames", - mFrameCount); - } + if (status == NO_ERROR && inStream != NULL) { - // Calculate size of normal mix buffer relative to the HAL output buffer size - double multiplier = 1.0; - if (mType == MIXER && (kUseFastMixer == FastMixer_Static || - kUseFastMixer == FastMixer_Dynamic)) { - size_t minNormalFrameCount = (kMinNormalMixBufferSizeMs * mSampleRate) / 1000; - size_t maxNormalFrameCount = (kMaxNormalMixBufferSizeMs * mSampleRate) / 1000; - // round up minimum and round down maximum to nearest 16 frames to satisfy AudioMixer - minNormalFrameCount = (minNormalFrameCount + 15) & ~15; - maxNormalFrameCount = maxNormalFrameCount & ~15; - if (maxNormalFrameCount < minNormalFrameCount) { - maxNormalFrameCount = minNormalFrameCount; - } - multiplier = (double) minNormalFrameCount / (double) mFrameCount; - if (multiplier <= 1.0) { - multiplier = 1.0; - } else if (multiplier <= 2.0) { - if (2 * mFrameCount <= maxNormalFrameCount) { - multiplier = 2.0; - } else { - multiplier = (double) maxNormalFrameCount / (double) mFrameCount; - } + // Try to re-use most recently used Pipe to archive a copy of input for dumpsys, + // or (re-)create if current Pipe is idle and does not match the new format + sp teeSink; +#ifdef TEE_SINK_INPUT_FRAMES + enum { + TEE_SINK_NO, // don't copy input + TEE_SINK_NEW, // copy input using a new pipe + TEE_SINK_OLD, // copy input using an existing pipe + } kind; + NBAIO_Format format = Format_from_SR_C(inStream->common.get_sample_rate(&inStream->common), + popcount(inStream->common.get_channels(&inStream->common))); + if (format == Format_Invalid) { + kind = TEE_SINK_NO; + } else if (mRecordTeeSink == 0) { + kind = TEE_SINK_NEW; + } else if (mRecordTeeSink->getStrongCount() != 1) { + kind = TEE_SINK_NO; + } else if (format == mRecordTeeSink->format()) { + kind = TEE_SINK_OLD; } else { - // prefer an even multiplier, for compatibility with doubling of fast tracks due to HAL - // SRC (it would be unusual for the normal mix buffer size to not be a multiple of fast - // track, but we sometimes have to do this to satisfy the maximum frame count - // constraint) - // FIXME this rounding up should not be done if no HAL SRC - uint32_t truncMult = (uint32_t) multiplier; - if ((truncMult & 1)) { - if ((truncMult + 1) * mFrameCount <= maxNormalFrameCount) { - ++truncMult; - } + kind = TEE_SINK_NEW; + } + switch (kind) { + case TEE_SINK_NEW: { + Pipe *pipe = new Pipe(TEE_SINK_INPUT_FRAMES, format); + size_t numCounterOffers = 0; + const NBAIO_Format offers[1] = {format}; + ssize_t index = pipe->negotiate(offers, 1, NULL, numCounterOffers); + ALOG_ASSERT(index == 0); + PipeReader *pipeReader = new PipeReader(*pipe); + numCounterOffers = 0; + index = pipeReader->negotiate(offers, 1, NULL, numCounterOffers); + ALOG_ASSERT(index == 0); + mRecordTeeSink = pipe; + mRecordTeeSource = pipeReader; + teeSink = pipe; } - multiplier = (double) truncMult; + break; + case TEE_SINK_OLD: + teeSink = mRecordTeeSink; + break; + case TEE_SINK_NO: + default: + break; } - } - mNormalFrameCount = multiplier * mFrameCount; - // round up to nearest 16 frames to satisfy AudioMixer - mNormalFrameCount = (mNormalFrameCount + 15) & ~15; - ALOGI("HAL output buffer size %u frames, normal mix buffer size %u frames", mFrameCount, - mNormalFrameCount); - - delete[] mMixBuffer; - mMixBuffer = new int16_t[mNormalFrameCount * mChannelCount]; - memset(mMixBuffer, 0, mNormalFrameCount * mChannelCount * sizeof(int16_t)); - - // force reconfiguration of effect chains and engines to take new buffer size and audio - // parameters into account - // Note that mLock is not held when readOutputParameters() is called from the constructor - // but in this case nothing is done below as no audio sessions have effect yet so it doesn't - // matter. - // create a copy of mEffectChains as calling moveEffectChain_l() can reorder some effect chains - Vector< sp > effectChains = mEffectChains; - for (size_t i = 0; i < effectChains.size(); i ++) { - mAudioFlinger->moveEffectChain_l(effectChains[i]->sessionId(), this, this, false); - } -} +#endif + AudioStreamIn *input = new AudioStreamIn(inHwDev, inStream); + // Start record thread + // RecorThread require both input and output device indication to forward to audio + // pre processing modules + audio_devices_t device = (*pDevices) | primaryOutputDevice_l(); -status_t AudioFlinger::PlaybackThread::getRenderPosition(size_t *halFrames, size_t *dspFrames) -{ - if (halFrames == NULL || dspFrames == NULL) { - return BAD_VALUE; - } - Mutex::Autolock _l(mLock); - if (initCheck() != NO_ERROR) { - return INVALID_OPERATION; - } - size_t framesWritten = mBytesWritten / mFrameSize; - *halFrames = framesWritten; + thread = new RecordThread(this, + input, + reqSamplingRate, + reqChannels, + id, + device, teeSink); + mRecordThreads.add(id, thread); + ALOGV("openInput() created record thread: ID %d thread %p", id, thread); + if (pSamplingRate != NULL) *pSamplingRate = reqSamplingRate; + if (pFormat != NULL) *pFormat = config.format; + if (pChannelMask != NULL) *pChannelMask = reqChannels; - if (isSuspended()) { - // return an estimation of rendered frames when the output is suspended - size_t latencyFrames = (latency_l() * mSampleRate) / 1000; - *dspFrames = framesWritten >= latencyFrames ? framesWritten - latencyFrames : 0; - return NO_ERROR; - } else { - return mOutput->stream->get_render_position(mOutput->stream, dspFrames); + // notify client processes of the new input creation + thread->audioConfigChanged_l(AudioSystem::INPUT_OPENED); + return id; } + + return 0; } -uint32_t AudioFlinger::PlaybackThread::hasAudioSession(int sessionId) const +status_t AudioFlinger::closeInput(audio_io_handle_t input) { - Mutex::Autolock _l(mLock); - uint32_t result = 0; - if (getEffectChain_l(sessionId) != 0) { - result = EFFECT_SESSION; - } - - for (size_t i = 0; i < mTracks.size(); ++i) { - sp track = mTracks[i]; - if (sessionId == track->sessionId() && - !(track->mCblk->flags & CBLK_INVALID)) { - result |= TRACK_SESSION; - break; - } - } - - return result; + return closeInput_nonvirtual(input); } -uint32_t AudioFlinger::PlaybackThread::getStrategyForSession_l(int sessionId) +status_t AudioFlinger::closeInput_nonvirtual(audio_io_handle_t input) { - // session AUDIO_SESSION_OUTPUT_MIX is placed in same strategy as MUSIC stream so that - // it is moved to correct output by audio policy manager when A2DP is connected or disconnected - if (sessionId == AUDIO_SESSION_OUTPUT_MIX) { - return AudioSystem::getStrategyForStream(AUDIO_STREAM_MUSIC); - } - for (size_t i = 0; i < mTracks.size(); i++) { - sp track = mTracks[i]; - if (sessionId == track->sessionId() && - !(track->mCblk->flags & CBLK_INVALID)) { - return AudioSystem::getStrategyForStream(track->streamType()); + // keep strong reference on the record thread so that + // it is not destroyed while exit() is executed + sp thread; + { + Mutex::Autolock _l(mLock); + thread = checkRecordThread_l(input); + if (thread == 0) { + return BAD_VALUE; } + + ALOGV("closeInput() %d", input); + audioConfigChanged_l(AudioSystem::INPUT_CLOSED, input, NULL); + mRecordThreads.removeItem(input); } - return AudioSystem::getStrategyForStream(AUDIO_STREAM_MUSIC); -} + thread->exit(); + // The thread entity (active unit of execution) is no longer running here, + // but the ThreadBase container still exists. + AudioStreamIn *in = thread->clearInput(); + ALOG_ASSERT(in != NULL, "in shouldn't be NULL"); + // from now on thread->mInput is NULL + in->hwDev()->close_input_stream(in->hwDev(), in->stream); + delete in; -AudioFlinger::AudioStreamOut* AudioFlinger::PlaybackThread::getOutput() const -{ - Mutex::Autolock _l(mLock); - return mOutput; + return NO_ERROR; } -AudioFlinger::AudioStreamOut* AudioFlinger::PlaybackThread::clearOutput() +status_t AudioFlinger::setStreamOutput(audio_stream_type_t stream, audio_io_handle_t output) { Mutex::Autolock _l(mLock); - AudioStreamOut *output = mOutput; - mOutput = NULL; - // FIXME FastMixer might also have a raw ptr to mOutputSink; - // must push a NULL and wait for ack - mOutputSink.clear(); - mPipeSink.clear(); - mNormalSink.clear(); - return output; -} + ALOGV("setStreamOutput() stream %d to output %d", stream, output); -// this method must always be called either with ThreadBase mLock held or inside the thread loop -audio_stream_t* AudioFlinger::PlaybackThread::stream() const -{ - if (mOutput == NULL) { - return NULL; + for (size_t i = 0; i < mPlaybackThreads.size(); i++) { + PlaybackThread *thread = mPlaybackThreads.valueAt(i).get(); + thread->invalidateTracks(stream); } - return &mOutput->stream->common; + + return NO_ERROR; } -uint32_t AudioFlinger::PlaybackThread::activeSleepTimeUs() const + +int AudioFlinger::newAudioSessionId() { - return (uint32_t)((uint32_t)((mNormalFrameCount * 1000) / mSampleRate) * 1000); + return nextUniqueId(); } -status_t AudioFlinger::PlaybackThread::setSyncEvent(const sp& event) +void AudioFlinger::acquireAudioSessionId(int audioSession) { - if (!isValidSyncEvent(event)) { - return BAD_VALUE; - } - Mutex::Autolock _l(mLock); - - for (size_t i = 0; i < mTracks.size(); ++i) { - sp track = mTracks[i]; - if (event->triggerSession() == track->sessionId()) { - (void) track->setSyncEvent(event); - return NO_ERROR; + pid_t caller = IPCThreadState::self()->getCallingPid(); + ALOGV("acquiring %d from %d", audioSession, caller); + size_t num = mAudioSessionRefs.size(); + for (size_t i = 0; i< num; i++) { + AudioSessionRef *ref = mAudioSessionRefs.editItemAt(i); + if (ref->mSessionid == audioSession && ref->mPid == caller) { + ref->mCnt++; + ALOGV(" incremented refcount to %d", ref->mCnt); + return; } } - - return NAME_NOT_FOUND; -} - -bool AudioFlinger::PlaybackThread::isValidSyncEvent(const sp& event) const -{ - return event->type() == AudioSystem::SYNC_EVENT_PRESENTATION_COMPLETE; + mAudioSessionRefs.push(new AudioSessionRef(audioSession, caller)); + ALOGV(" added new entry for %d", audioSession); } -void AudioFlinger::PlaybackThread::threadLoop_removeTracks( - const Vector< sp >& tracksToRemove) +void AudioFlinger::releaseAudioSessionId(int audioSession) { - size_t count = tracksToRemove.size(); - if (CC_UNLIKELY(count)) { - for (size_t i = 0 ; i < count ; i++) { - const sp& track = tracksToRemove.itemAt(i); - if ((track->sharedBuffer() != 0) && - (track->mState == TrackBase::ACTIVE || track->mState == TrackBase::RESUMING)) { - AudioSystem::stopOutput(mId, track->streamType(), track->sessionId()); + Mutex::Autolock _l(mLock); + pid_t caller = IPCThreadState::self()->getCallingPid(); + ALOGV("releasing %d from %d", audioSession, caller); + size_t num = mAudioSessionRefs.size(); + for (size_t i = 0; i< num; i++) { + AudioSessionRef *ref = mAudioSessionRefs.itemAt(i); + if (ref->mSessionid == audioSession && ref->mPid == caller) { + ref->mCnt--; + ALOGV(" decremented refcount to %d", ref->mCnt); + if (ref->mCnt == 0) { + mAudioSessionRefs.removeAt(i); + delete ref; + purgeStaleEffects_l(); } + return; } } - + ALOGW("session id %d not found for pid %d", audioSession, caller); } -// ---------------------------------------------------------------------------- +void AudioFlinger::purgeStaleEffects_l() { -AudioFlinger::MixerThread::MixerThread(const sp& audioFlinger, AudioStreamOut* output, - audio_io_handle_t id, audio_devices_t device, type_t type) - : PlaybackThread(audioFlinger, output, id, device, type), - // mAudioMixer below - // mFastMixer below - mFastMixerFutex(0) - // mOutputSink below - // mPipeSink below - // mNormalSink below -{ - ALOGV("MixerThread() id=%d device=%#x type=%d", id, device, type); - ALOGV("mSampleRate=%u, mChannelMask=%#x, mChannelCount=%d, mFormat=%d, mFrameSize=%u, " - "mFrameCount=%d, mNormalFrameCount=%d", - mSampleRate, mChannelMask, mChannelCount, mFormat, mFrameSize, mFrameCount, - mNormalFrameCount); - mAudioMixer = new AudioMixer(mNormalFrameCount, mSampleRate); - - // FIXME - Current mixer implementation only supports stereo output - if (mChannelCount != FCC_2) { - ALOGE("Invalid audio hardware channel count %d", mChannelCount); - } - - // create an NBAIO sink for the HAL output stream, and negotiate - mOutputSink = new AudioStreamOutSink(output->stream); - size_t numCounterOffers = 0; - const NBAIO_Format offers[1] = {Format_from_SR_C(mSampleRate, mChannelCount)}; - ssize_t index = mOutputSink->negotiate(offers, 1, NULL, numCounterOffers); - ALOG_ASSERT(index == 0); - - // initialize fast mixer depending on configuration - bool initFastMixer; - switch (kUseFastMixer) { - case FastMixer_Never: - initFastMixer = false; - break; - case FastMixer_Always: - initFastMixer = true; - break; - case FastMixer_Static: - case FastMixer_Dynamic: - initFastMixer = mFrameCount < mNormalFrameCount; - break; - } - if (initFastMixer) { - - // create a MonoPipe to connect our submix to FastMixer - NBAIO_Format format = mOutputSink->format(); - // This pipe depth compensates for scheduling latency of the normal mixer thread. - // When it wakes up after a maximum latency, it runs a few cycles quickly before - // finally blocking. Note the pipe implementation rounds up the request to a power of 2. - MonoPipe *monoPipe = new MonoPipe(mNormalFrameCount * 4, format, true /*writeCanBlock*/); - const NBAIO_Format offers[1] = {format}; - size_t numCounterOffers = 0; - ssize_t index = monoPipe->negotiate(offers, 1, NULL, numCounterOffers); - ALOG_ASSERT(index == 0); - monoPipe->setAvgFrames((mScreenState & 1) ? - (monoPipe->maxFrames() * 7) / 8 : mNormalFrameCount * 2); - mPipeSink = monoPipe; - -#ifdef TEE_SINK_FRAMES - // create a Pipe to archive a copy of FastMixer's output for dumpsys - Pipe *teeSink = new Pipe(TEE_SINK_FRAMES, format); - numCounterOffers = 0; - index = teeSink->negotiate(offers, 1, NULL, numCounterOffers); - ALOG_ASSERT(index == 0); - mTeeSink = teeSink; - PipeReader *teeSource = new PipeReader(*teeSink); - numCounterOffers = 0; - index = teeSource->negotiate(offers, 1, NULL, numCounterOffers); - ALOG_ASSERT(index == 0); - mTeeSource = teeSource; -#endif + ALOGV("purging stale effects"); - // create fast mixer and configure it initially with just one fast track for our submix - mFastMixer = new FastMixer(); - FastMixerStateQueue *sq = mFastMixer->sq(); -#ifdef STATE_QUEUE_DUMP - sq->setObserverDump(&mStateQueueObserverDump); - sq->setMutatorDump(&mStateQueueMutatorDump); -#endif - FastMixerState *state = sq->begin(); - FastTrack *fastTrack = &state->mFastTracks[0]; - // wrap the source side of the MonoPipe to make it an AudioBufferProvider - fastTrack->mBufferProvider = new SourceAudioBufferProvider(new MonoPipeReader(monoPipe)); - fastTrack->mVolumeProvider = NULL; - fastTrack->mGeneration++; - state->mFastTracksGen++; - state->mTrackMask = 1; - // fast mixer will use the HAL output sink - state->mOutputSink = mOutputSink.get(); - state->mOutputSinkGen++; - state->mFrameCount = mFrameCount; - state->mCommand = FastMixerState::COLD_IDLE; - // already done in constructor initialization list - //mFastMixerFutex = 0; - state->mColdFutexAddr = &mFastMixerFutex; - state->mColdGen++; - state->mDumpState = &mFastMixerDumpState; - state->mTeeSink = mTeeSink.get(); - sq->end(); - sq->push(FastMixerStateQueue::BLOCK_UNTIL_PUSHED); - - // start the fast mixer - mFastMixer->run("FastMixer", PRIORITY_URGENT_AUDIO); - pid_t tid = mFastMixer->getTid(); - int err = requestPriority(getpid_cached, tid, kPriorityFastMixer); - if (err != 0) { - ALOGW("Policy SCHED_FIFO priority %d is unavailable for pid %d tid %d; error %d", - kPriorityFastMixer, getpid_cached, tid, err); - } + Vector< sp > chains; -#ifdef AUDIO_WATCHDOG - // create and start the watchdog - mAudioWatchdog = new AudioWatchdog(); - mAudioWatchdog->setDump(&mAudioWatchdogDump); - mAudioWatchdog->run("AudioWatchdog", PRIORITY_URGENT_AUDIO); - tid = mAudioWatchdog->getTid(); - err = requestPriority(getpid_cached, tid, kPriorityFastMixer); - if (err != 0) { - ALOGW("Policy SCHED_FIFO priority %d is unavailable for pid %d tid %d; error %d", - kPriorityFastMixer, getpid_cached, tid, err); + for (size_t i = 0; i < mPlaybackThreads.size(); i++) { + sp t = mPlaybackThreads.valueAt(i); + for (size_t j = 0; j < t->mEffectChains.size(); j++) { + sp ec = t->mEffectChains[j]; + if (ec->sessionId() > AUDIO_SESSION_OUTPUT_MIX) { + chains.push(ec); + } } -#endif - - } else { - mFastMixer = NULL; } - - switch (kUseFastMixer) { - case FastMixer_Never: - case FastMixer_Dynamic: - mNormalSink = mOutputSink; - break; - case FastMixer_Always: - mNormalSink = mPipeSink; - break; - case FastMixer_Static: - mNormalSink = initFastMixer ? mPipeSink : mOutputSink; - break; + for (size_t i = 0; i < mRecordThreads.size(); i++) { + sp t = mRecordThreads.valueAt(i); + for (size_t j = 0; j < t->mEffectChains.size(); j++) { + sp ec = t->mEffectChains[j]; + chains.push(ec); + } } -} -AudioFlinger::MixerThread::~MixerThread() -{ - if (mFastMixer != NULL) { - FastMixerStateQueue *sq = mFastMixer->sq(); - FastMixerState *state = sq->begin(); - if (state->mCommand == FastMixerState::COLD_IDLE) { - int32_t old = android_atomic_inc(&mFastMixerFutex); - if (old == -1) { - __futex_syscall3(&mFastMixerFutex, FUTEX_WAKE_PRIVATE, 1); + for (size_t i = 0; i < chains.size(); i++) { + sp ec = chains[i]; + int sessionid = ec->sessionId(); + sp t = ec->mThread.promote(); + if (t == 0) { + continue; + } + size_t numsessionrefs = mAudioSessionRefs.size(); + bool found = false; + for (size_t k = 0; k < numsessionrefs; k++) { + AudioSessionRef *ref = mAudioSessionRefs.itemAt(k); + if (ref->mSessionid == sessionid) { + ALOGV(" session %d still exists for %d with %d refs", + sessionid, ref->mPid, ref->mCnt); + found = true; + break; } } - state->mCommand = FastMixerState::EXIT; - sq->end(); - sq->push(FastMixerStateQueue::BLOCK_UNTIL_PUSHED); - mFastMixer->join(); - // Though the fast mixer thread has exited, it's state queue is still valid. - // We'll use that extract the final state which contains one remaining fast track - // corresponding to our sub-mix. - state = sq->begin(); - ALOG_ASSERT(state->mTrackMask == 1); - FastTrack *fastTrack = &state->mFastTracks[0]; - ALOG_ASSERT(fastTrack->mBufferProvider != NULL); - delete fastTrack->mBufferProvider; - sq->end(false /*didModify*/); - delete mFastMixer; -#ifdef AUDIO_WATCHDOG - if (mAudioWatchdog != 0) { - mAudioWatchdog->requestExit(); - mAudioWatchdog->requestExitAndWait(); - mAudioWatchdog.clear(); + if (!found) { + Mutex::Autolock _l (t->mLock); + // remove all effects from the chain + while (ec->mEffects.size()) { + sp effect = ec->mEffects[0]; + effect->unPin(); + t->removeEffect_l(effect); + if (effect->purgeHandles()) { + t->checkSuspendOnEffectEnabled_l(effect, false, effect->sessionId()); + } + AudioSystem::unregisterEffect(effect->id()); + } } -#endif } - delete mAudioMixer; + return; } -class CpuStats { -public: - CpuStats(); - void sample(const String8 &title); -#ifdef DEBUG_CPU_USAGE -private: - ThreadCpuUsage mCpuUsage; // instantaneous thread CPU usage in wall clock ns - CentralTendencyStatistics mWcStats; // statistics on thread CPU usage in wall clock ns - - CentralTendencyStatistics mHzStats; // statistics on thread CPU usage in cycles - - int mCpuNum; // thread's current CPU number - int mCpukHz; // frequency of thread's current CPU in kHz -#endif -}; - -CpuStats::CpuStats() -#ifdef DEBUG_CPU_USAGE - : mCpuNum(-1), mCpukHz(-1) -#endif +// checkPlaybackThread_l() must be called with AudioFlinger::mLock held +AudioFlinger::PlaybackThread *AudioFlinger::checkPlaybackThread_l(audio_io_handle_t output) const { + return mPlaybackThreads.valueFor(output).get(); } -void CpuStats::sample(const String8 &title) { -#ifdef DEBUG_CPU_USAGE - // get current thread's delta CPU time in wall clock ns - double wcNs; - bool valid = mCpuUsage.sampleAndEnable(wcNs); - - // record sample for wall clock statistics - if (valid) { - mWcStats.sample(wcNs); - } - - // get the current CPU number - int cpuNum = sched_getcpu(); - - // get the current CPU frequency in kHz - int cpukHz = mCpuUsage.getCpukHz(cpuNum); - - // check if either CPU number or frequency changed - if (cpuNum != mCpuNum || cpukHz != mCpukHz) { - mCpuNum = cpuNum; - mCpukHz = cpukHz; - // ignore sample for purposes of cycles - valid = false; - } - - // if no change in CPU number or frequency, then record sample for cycle statistics - if (valid && mCpukHz > 0) { - double cycles = wcNs * cpukHz * 0.000001; - mHzStats.sample(cycles); - } +// checkMixerThread_l() must be called with AudioFlinger::mLock held +AudioFlinger::MixerThread *AudioFlinger::checkMixerThread_l(audio_io_handle_t output) const +{ + PlaybackThread *thread = checkPlaybackThread_l(output); + return thread != NULL && thread->type() != ThreadBase::DIRECT ? (MixerThread *) thread : NULL; +} - unsigned n = mWcStats.n(); - // mCpuUsage.elapsed() is expensive, so don't call it every loop - if ((n & 127) == 1) { - long long elapsed = mCpuUsage.elapsed(); - if (elapsed >= DEBUG_CPU_USAGE * 1000000000LL) { - double perLoop = elapsed / (double) n; - double perLoop100 = perLoop * 0.01; - double perLoop1k = perLoop * 0.001; - double mean = mWcStats.mean(); - double stddev = mWcStats.stddev(); - double minimum = mWcStats.minimum(); - double maximum = mWcStats.maximum(); - double meanCycles = mHzStats.mean(); - double stddevCycles = mHzStats.stddev(); - double minCycles = mHzStats.minimum(); - double maxCycles = mHzStats.maximum(); - mCpuUsage.resetElapsed(); - mWcStats.reset(); - mHzStats.reset(); - ALOGD("CPU usage for %s over past %.1f secs\n" - " (%u mixer loops at %.1f mean ms per loop):\n" - " us per mix loop: mean=%.0f stddev=%.0f min=%.0f max=%.0f\n" - " %% of wall: mean=%.1f stddev=%.1f min=%.1f max=%.1f\n" - " MHz: mean=%.1f, stddev=%.1f, min=%.1f max=%.1f", - title.string(), - elapsed * .000000001, n, perLoop * .000001, - mean * .001, - stddev * .001, - minimum * .001, - maximum * .001, - mean / perLoop100, - stddev / perLoop100, - minimum / perLoop100, - maximum / perLoop100, - meanCycles / perLoop1k, - stddevCycles / perLoop1k, - minCycles / perLoop1k, - maxCycles / perLoop1k); +// checkRecordThread_l() must be called with AudioFlinger::mLock held +AudioFlinger::RecordThread *AudioFlinger::checkRecordThread_l(audio_io_handle_t input) const +{ + return mRecordThreads.valueFor(input).get(); +} - } - } -#endif -}; +uint32_t AudioFlinger::nextUniqueId() +{ + return android_atomic_inc(&mNextUniqueId); +} -void AudioFlinger::PlaybackThread::checkSilentMode_l() +AudioFlinger::PlaybackThread *AudioFlinger::primaryPlaybackThread_l() const { - if (!mMasterMute) { - char value[PROPERTY_VALUE_MAX]; - if (property_get("ro.audio.silent", value, "0") > 0) { - char *endptr; - unsigned long ul = strtoul(value, &endptr, 0); - if (*endptr == '\0' && ul != 0) { - ALOGD("Silence is golden"); - // The setprop command will not allow a property to be changed after - // the first time it is set, so we don't have to worry about un-muting. - setMasterMute_l(true); - } + for (size_t i = 0; i < mPlaybackThreads.size(); i++) { + PlaybackThread *thread = mPlaybackThreads.valueAt(i).get(); + AudioStreamOut *output = thread->getOutput(); + if (output != NULL && output->audioHwDev == mPrimaryHardwareDev) { + return thread; } } + return NULL; } -bool AudioFlinger::PlaybackThread::threadLoop() +audio_devices_t AudioFlinger::primaryOutputDevice_l() const { - Vector< sp > tracksToRemove; - - standbyTime = systemTime(); - - // MIXER - nsecs_t lastWarning = 0; - - // DUPLICATING - // FIXME could this be made local to while loop? - writeFrames = 0; - - cacheParameters_l(); - sleepTime = idleSleepTime; + PlaybackThread *thread = primaryPlaybackThread_l(); - if (mType == MIXER) { - sleepTimeShift = 0; + if (thread == NULL) { + return 0; } - CpuStats cpuStats; - const String8 myName(String8::format("thread %p type %d TID %d", this, mType, gettid())); - - acquireWakeLock(); - - while (!exitPending()) - { - cpuStats.sample(myName); - - Vector< sp > effectChains; - - processConfigEvents(); - - { // scope for mLock - - Mutex::Autolock _l(mLock); - - if (checkForNewParameters_l()) { - cacheParameters_l(); - } - - saveOutputTracks(); - - // put audio hardware into standby after short delay - if (CC_UNLIKELY((!mActiveTracks.size() && systemTime() > standbyTime) || - isSuspended())) { - if (!mStandby) { - - threadLoop_standby(); - - mStandby = true; - } - - if (!mActiveTracks.size() && mConfigEvents.isEmpty()) { - // we're about to wait, flush the binder command buffer - IPCThreadState::self()->flushCommands(); - - clearOutputTracks(); - - if (exitPending()) { - break; - } - - releaseWakeLock_l(); - // wait until we have something to do... - ALOGV("%s going to sleep", myName.string()); - mWaitWorkCV.wait(mLock); - ALOGV("%s waking up", myName.string()); - acquireWakeLock_l(); - - mMixerStatus = MIXER_IDLE; - mMixerStatusIgnoringFastTracks = MIXER_IDLE; - mBytesWritten = 0; - - checkSilentMode_l(); - - standbyTime = systemTime() + standbyDelay; - sleepTime = idleSleepTime; - if (mType == MIXER) { - sleepTimeShift = 0; - } - - continue; - } - } - - // mMixerStatusIgnoringFastTracks is also updated internally - mMixerStatus = prepareTracks_l(&tracksToRemove); - - // prevent any changes in effect chain list and in each effect chain - // during mixing and effect process as the audio buffers could be deleted - // or modified if an effect is created or deleted - lockEffectChains_l(effectChains); - } - - if (CC_LIKELY(mMixerStatus == MIXER_TRACKS_READY)) { - threadLoop_mix(); - } else { - threadLoop_sleepTime(); - } - - if (isSuspended()) { - sleepTime = suspendSleepTimeUs(); - mBytesWritten += mixBufferSize; - } - - // only process effects if we're going to write - if (sleepTime == 0) { - for (size_t i = 0; i < effectChains.size(); i ++) { - effectChains[i]->process_l(); - } - } - - // enable changes in effect chain - unlockEffectChains(effectChains); - - // sleepTime == 0 means we must write to audio hardware - if (sleepTime == 0) { - - threadLoop_write(); - -if (mType == MIXER) { - // write blocked detection - nsecs_t now = systemTime(); - nsecs_t delta = now - mLastWriteTime; - if (!mStandby && delta > maxPeriod) { - mNumDelayedWrites++; - if ((now - lastWarning) > kWarningThrottleNs) { -#if defined(ATRACE_TAG) && (ATRACE_TAG != ATRACE_TAG_NEVER) - ScopedTrace st(ATRACE_TAG, "underrun"); -#endif - ALOGW("write blocked for %llu msecs, %d delayed writes, thread %p", - ns2ms(delta), mNumDelayedWrites, this); - lastWarning = now; - } - } + return thread->outDevice(); } - mStandby = false; - } else { - usleep(sleepTime); - } - - // Finally let go of removed track(s), without the lock held - // since we can't guarantee the destructors won't acquire that - // same lock. This will also mutate and push a new fast mixer state. - threadLoop_removeTracks(tracksToRemove); - tracksToRemove.clear(); - - // FIXME I don't understand the need for this here; - // it was in the original code but maybe the - // assignment in saveOutputTracks() makes this unnecessary? - clearOutputTracks(); - - // Effect chains will be actually deleted here if they were removed from - // mEffectChains list during mixing or effects processing - effectChains.clear(); +sp AudioFlinger::createSyncEvent(AudioSystem::sync_event_t type, + int triggerSession, + int listenerSession, + sync_event_callback_t callBack, + void *cookie) +{ + Mutex::Autolock _l(mLock); - // FIXME Note that the above .clear() is no longer necessary since effectChains - // is now local to this block, but will keep it for now (at least until merge done). + sp event = new SyncEvent(type, triggerSession, listenerSession, callBack, cookie); + status_t playStatus = NAME_NOT_FOUND; + status_t recStatus = NAME_NOT_FOUND; + for (size_t i = 0; i < mPlaybackThreads.size(); i++) { + playStatus = mPlaybackThreads.valueAt(i)->setSyncEvent(event); + if (playStatus == NO_ERROR) { + return event; + } } - - // for DuplicatingThread, standby mode is handled by the outputTracks, otherwise ... - if (mType == MIXER || mType == DIRECT) { - // put output stream into standby mode - if (!mStandby) { - mOutput->stream->common.standby(&mOutput->stream->common); + for (size_t i = 0; i < mRecordThreads.size(); i++) { + recStatus = mRecordThreads.valueAt(i)->setSyncEvent(event); + if (recStatus == NO_ERROR) { + return event; } } - - releaseWakeLock(); - - ALOGV("Thread %p type %d exiting", this, mType); - return false; + if (playStatus == NAME_NOT_FOUND || recStatus == NAME_NOT_FOUND) { + mPendingSyncEvents.add(event); + } else { + ALOGV("createSyncEvent() invalid event %d", event->type()); + event.clear(); + } + return event; } -void AudioFlinger::MixerThread::threadLoop_removeTracks(const Vector< sp >& tracksToRemove) -{ - PlaybackThread::threadLoop_removeTracks(tracksToRemove); -} +// ---------------------------------------------------------------------------- +// Effect management +// ---------------------------------------------------------------------------- -void AudioFlinger::MixerThread::threadLoop_write() -{ - // FIXME we should only do one push per cycle; confirm this is true - // Start the fast mixer if it's not already running - if (mFastMixer != NULL) { - FastMixerStateQueue *sq = mFastMixer->sq(); - FastMixerState *state = sq->begin(); - if (state->mCommand != FastMixerState::MIX_WRITE && - (kUseFastMixer != FastMixer_Dynamic || state->mTrackMask > 1)) { - if (state->mCommand == FastMixerState::COLD_IDLE) { - int32_t old = android_atomic_inc(&mFastMixerFutex); - if (old == -1) { - __futex_syscall3(&mFastMixerFutex, FUTEX_WAKE_PRIVATE, 1); - } -#ifdef AUDIO_WATCHDOG - if (mAudioWatchdog != 0) { - mAudioWatchdog->resume(); - } -#endif - } - state->mCommand = FastMixerState::MIX_WRITE; - sq->end(); - sq->push(FastMixerStateQueue::BLOCK_UNTIL_PUSHED); - if (kUseFastMixer == FastMixer_Dynamic) { - mNormalSink = mPipeSink; - } - } else { - sq->end(false /*didModify*/); - } - } - PlaybackThread::threadLoop_write(); -} -// shared by MIXER and DIRECT, overridden by DUPLICATING -void AudioFlinger::PlaybackThread::threadLoop_write() +status_t AudioFlinger::queryNumberEffects(uint32_t *numEffects) const { - // FIXME rewrite to reduce number of system calls - mLastWriteTime = systemTime(); - mInWrite = true; - int bytesWritten; - - // If an NBAIO sink is present, use it to write the normal mixer's submix - if (mNormalSink != 0) { -#define mBitShift 2 // FIXME - size_t count = mixBufferSize >> mBitShift; -#if defined(ATRACE_TAG) && (ATRACE_TAG != ATRACE_TAG_NEVER) - Tracer::traceBegin(ATRACE_TAG, "write"); -#endif - // update the setpoint when gScreenState changes - uint32_t screenState = gScreenState; - if (screenState != mScreenState) { - mScreenState = screenState; - MonoPipe *pipe = (MonoPipe *)mPipeSink.get(); - if (pipe != NULL) { - pipe->setAvgFrames((mScreenState & 1) ? - (pipe->maxFrames() * 7) / 8 : mNormalFrameCount * 2); - } - } - ssize_t framesWritten = mNormalSink->write(mMixBuffer, count); -#if defined(ATRACE_TAG) && (ATRACE_TAG != ATRACE_TAG_NEVER) - Tracer::traceEnd(ATRACE_TAG); -#endif - if (framesWritten > 0) { - bytesWritten = framesWritten << mBitShift; - } else { - bytesWritten = framesWritten; - } - // otherwise use the HAL / AudioStreamOut directly - } else { - // Direct output thread. - bytesWritten = (int)mOutput->stream->write(mOutput->stream, mMixBuffer, mixBufferSize); - } - - if (bytesWritten > 0) { - mBytesWritten += mixBufferSize; - } - mNumWrites++; - mInWrite = false; + Mutex::Autolock _l(mLock); + return EffectQueryNumberEffects(numEffects); } -void AudioFlinger::MixerThread::threadLoop_standby() +status_t AudioFlinger::queryEffect(uint32_t index, effect_descriptor_t *descriptor) const { - // Idle the fast mixer if it's currently running - if (mFastMixer != NULL) { - FastMixerStateQueue *sq = mFastMixer->sq(); - FastMixerState *state = sq->begin(); - if (!(state->mCommand & FastMixerState::IDLE)) { - state->mCommand = FastMixerState::COLD_IDLE; - state->mColdFutexAddr = &mFastMixerFutex; - state->mColdGen++; - mFastMixerFutex = 0; - sq->end(); - // BLOCK_UNTIL_PUSHED would be insufficient, as we need it to stop doing I/O now - sq->push(FastMixerStateQueue::BLOCK_UNTIL_ACKED); - if (kUseFastMixer == FastMixer_Dynamic) { - mNormalSink = mOutputSink; - } -#ifdef AUDIO_WATCHDOG - if (mAudioWatchdog != 0) { - mAudioWatchdog->pause(); - } -#endif - } else { - sq->end(false /*didModify*/); - } - } - PlaybackThread::threadLoop_standby(); + Mutex::Autolock _l(mLock); + return EffectQueryEffect(index, descriptor); } -// shared by MIXER and DIRECT, overridden by DUPLICATING -void AudioFlinger::PlaybackThread::threadLoop_standby() +status_t AudioFlinger::getEffectDescriptor(const effect_uuid_t *pUuid, + effect_descriptor_t *descriptor) const { - ALOGV("Audio hardware entering standby, mixer %p, suspend count %d", this, mSuspended); - mOutput->stream->common.standby(&mOutput->stream->common); + Mutex::Autolock _l(mLock); + return EffectGetDescriptor(pUuid, descriptor); } -void AudioFlinger::MixerThread::threadLoop_mix() + +sp AudioFlinger::createEffect(pid_t pid, + effect_descriptor_t *pDesc, + const sp& effectClient, + int32_t priority, + audio_io_handle_t io, + int sessionId, + status_t *status, + int *id, + int *enabled) { - // obtain the presentation timestamp of the next output buffer - int64_t pts; - status_t status = INVALID_OPERATION; + status_t lStatus = NO_ERROR; + sp handle; + effect_descriptor_t desc; - if (mNormalSink != 0) { - status = mNormalSink->getNextWriteTimestamp(&pts); - } else { - status = mOutputSink->getNextWriteTimestamp(&pts); - } + ALOGV("createEffect pid %d, effectClient %p, priority %d, sessionId %d, io %d", + pid, effectClient.get(), priority, sessionId, io); - if (status != NO_ERROR) { - pts = AudioBufferProvider::kInvalidPTS; + if (pDesc == NULL) { + lStatus = BAD_VALUE; + goto Exit; } - // mix buffers... - mAudioMixer->process(pts); - // increase sleep time progressively when application underrun condition clears. - // Only increase sleep time if the mixer is ready for two consecutive times to avoid - // that a steady state of alternating ready/not ready conditions keeps the sleep time - // such that we would underrun the audio HAL. - if ((sleepTime == 0) && (sleepTimeShift > 0)) { - sleepTimeShift--; + // check audio settings permission for global effects + if (sessionId == AUDIO_SESSION_OUTPUT_MIX && !settingsAllowed()) { + lStatus = PERMISSION_DENIED; + goto Exit; } - sleepTime = 0; - standbyTime = systemTime() + standbyDelay; - //TODO: delay standby when effects have a tail -} -void AudioFlinger::MixerThread::threadLoop_sleepTime() -{ - // If no tracks are ready, sleep once for the duration of an output - // buffer size, then write 0s to the output - if (sleepTime == 0) { - if (mMixerStatus == MIXER_TRACKS_ENABLED) { - sleepTime = activeSleepTime >> sleepTimeShift; - if (sleepTime < kMinThreadSleepTimeUs) { - sleepTime = kMinThreadSleepTimeUs; - } - // reduce sleep time in case of consecutive application underruns to avoid - // starving the audio HAL. As activeSleepTimeUs() is larger than a buffer - // duration we would end up writing less data than needed by the audio HAL if - // the condition persists. - if (sleepTimeShift < kMaxThreadSleepTimeShift) { - sleepTimeShift++; - } - } else { - sleepTime = idleSleepTime; - } - } else if (mBytesWritten != 0 || (mMixerStatus == MIXER_TRACKS_ENABLED)) { - memset (mMixBuffer, 0, mixBufferSize); - sleepTime = 0; - ALOGV_IF(mBytesWritten == 0 && (mMixerStatus == MIXER_TRACKS_ENABLED), - "anticipated start"); + // Session AUDIO_SESSION_OUTPUT_STAGE is reserved for output stage effects + // that can only be created by audio policy manager (running in same process) + if (sessionId == AUDIO_SESSION_OUTPUT_STAGE && getpid_cached != pid) { + lStatus = PERMISSION_DENIED; + goto Exit; } - // TODO add standby time extension fct of effect tail -} - -// prepareTracks_l() must be called with ThreadBase::mLock held -AudioFlinger::PlaybackThread::mixer_state AudioFlinger::MixerThread::prepareTracks_l( - Vector< sp > *tracksToRemove) -{ - mixer_state mixerStatus = MIXER_IDLE; - // find out which tracks need to be processed - size_t count = mActiveTracks.size(); - size_t mixedTracks = 0; - size_t tracksWithEffect = 0; - // counts only _active_ fast tracks - size_t fastTracks = 0; - uint32_t resetMask = 0; // bit mask of fast tracks that need to be reset - - float masterVolume = mMasterVolume; - bool masterMute = mMasterMute; - - if (masterMute) { - masterVolume = 0; - } - // Delegate master volume control to effect in output mix effect chain if needed - sp chain = getEffectChain_l(AUDIO_SESSION_OUTPUT_MIX); - if (chain != 0) { - uint32_t v = (uint32_t)(masterVolume * (1 << 24)); - chain->setVolume_l(&v, &v); - masterVolume = (float)((v + (1 << 23)) >> 24); - chain.clear(); + if (io == 0) { + if (sessionId == AUDIO_SESSION_OUTPUT_STAGE) { + // output must be specified by AudioPolicyManager when using session + // AUDIO_SESSION_OUTPUT_STAGE + lStatus = BAD_VALUE; + goto Exit; + } else if (sessionId == AUDIO_SESSION_OUTPUT_MIX) { + // if the output returned by getOutputForEffect() is removed before we lock the + // mutex below, the call to checkPlaybackThread_l(io) below will detect it + // and we will exit safely + io = AudioSystem::getOutputForEffect(&desc); + } } - // prepare a new state to push - FastMixerStateQueue *sq = NULL; - FastMixerState *state = NULL; - bool didModify = false; - FastMixerStateQueue::block_t block = FastMixerStateQueue::BLOCK_UNTIL_PUSHED; - if (mFastMixer != NULL) { - sq = mFastMixer->sq(); - state = sq->begin(); - } + { + Mutex::Autolock _l(mLock); - for (size_t i=0 ; i t = mActiveTracks[i].promote(); - if (t == 0) { - continue; - } - // this const just means the local variable doesn't change - Track* const track = t.get(); - - // process fast tracks - if (track->isFastTrack()) { - - // It's theoretically possible (though unlikely) for a fast track to be created - // and then removed within the same normal mix cycle. This is not a problem, as - // the track never becomes active so it's fast mixer slot is never touched. - // The converse, of removing an (active) track and then creating a new track - // at the identical fast mixer slot within the same normal mix cycle, - // is impossible because the slot isn't marked available until the end of each cycle. - int j = track->mFastIndex; - ALOG_ASSERT(0 < j && j < (int)FastMixerState::kMaxFastTracks); - ALOG_ASSERT(!(mFastTrackAvailMask & (1 << j))); - FastTrack *fastTrack = &state->mFastTracks[j]; - - // Determine whether the track is currently in underrun condition, - // and whether it had a recent underrun. - FastTrackDump *ftDump = &mFastMixerDumpState.mTracks[j]; - FastTrackUnderruns underruns = ftDump->mUnderruns; - uint32_t recentFull = (underruns.mBitFields.mFull - - track->mObservedUnderruns.mBitFields.mFull) & UNDERRUN_MASK; - uint32_t recentPartial = (underruns.mBitFields.mPartial - - track->mObservedUnderruns.mBitFields.mPartial) & UNDERRUN_MASK; - uint32_t recentEmpty = (underruns.mBitFields.mEmpty - - track->mObservedUnderruns.mBitFields.mEmpty) & UNDERRUN_MASK; - uint32_t recentUnderruns = recentPartial + recentEmpty; - track->mObservedUnderruns = underruns; - // don't count underruns that occur while stopping or pausing - // or stopped which can occur when flush() is called while active - if (!(track->isStopping() || track->isPausing() || track->isStopped())) { - track->mUnderrunCount += recentUnderruns; + if (!EffectIsNullUuid(&pDesc->uuid)) { + // if uuid is specified, request effect descriptor + lStatus = EffectGetDescriptor(&pDesc->uuid, &desc); + if (lStatus < 0) { + ALOGW("createEffect() error %d from EffectGetDescriptor", lStatus); + goto Exit; + } + } else { + // if uuid is not specified, look for an available implementation + // of the required type in effect factory + if (EffectIsNullUuid(&pDesc->type)) { + ALOGW("createEffect() no effect type"); + lStatus = BAD_VALUE; + goto Exit; } + uint32_t numEffects = 0; + effect_descriptor_t d; + d.flags = 0; // prevent compiler warning + bool found = false; - // This is similar to the state machine for normal tracks, - // with a few modifications for fast tracks. - bool isActive = true; - switch (track->mState) { - case TrackBase::STOPPING_1: - // track stays active in STOPPING_1 state until first underrun - if (recentUnderruns > 0) { - track->mState = TrackBase::STOPPING_2; - } - break; - case TrackBase::PAUSING: - // ramp down is not yet implemented - track->setPaused(); - break; - case TrackBase::RESUMING: - // ramp up is not yet implemented - track->mState = TrackBase::ACTIVE; - break; - case TrackBase::ACTIVE: - if (recentFull > 0 || recentPartial > 0) { - // track has provided at least some frames recently: reset retry count - track->mRetryCount = kMaxTrackRetries; - } - if (recentUnderruns == 0) { - // no recent underruns: stay active - break; - } - // there has recently been an underrun of some kind - if (track->sharedBuffer() == 0) { - // were any of the recent underruns "empty" (no frames available)? - if (recentEmpty == 0) { - // no, then ignore the partial underruns as they are allowed indefinitely - break; - } - // there has recently been an "empty" underrun: decrement the retry counter - if (--(track->mRetryCount) > 0) { - break; - } - // indicate to client process that the track was disabled because of underrun; - // it will then automatically call start() when data is available - android_atomic_or(CBLK_DISABLED, &track->mCblk->flags); - // remove from active list, but state remains ACTIVE [confusing but true] - isActive = false; - break; + lStatus = EffectQueryNumberEffects(&numEffects); + if (lStatus < 0) { + ALOGW("createEffect() error %d from EffectQueryNumberEffects", lStatus); + goto Exit; + } + for (uint32_t i = 0; i < numEffects; i++) { + lStatus = EffectQueryEffect(i, &desc); + if (lStatus < 0) { + ALOGW("createEffect() error %d from EffectQueryEffect", lStatus); + continue; } - // fall through - case TrackBase::STOPPING_2: - case TrackBase::PAUSED: - case TrackBase::TERMINATED: - case TrackBase::STOPPED: - case TrackBase::FLUSHED: // flush() while active - // Check for presentation complete if track is inactive - // We have consumed all the buffers of this track. - // This would be incomplete if we auto-paused on underrun - { - size_t audioHALFrames = - (mOutput->stream->get_latency(mOutput->stream)*mSampleRate) / 1000; - size_t framesWritten = mBytesWritten / mFrameSize; - if (!(mStandby || track->presentationComplete(framesWritten, audioHALFrames))) { - // track stays in active list until presentation is complete + if (memcmp(&desc.type, &pDesc->type, sizeof(effect_uuid_t)) == 0) { + // If matching type found save effect descriptor. If the session is + // 0 and the effect is not auxiliary, continue enumeration in case + // an auxiliary version of this effect type is available + found = true; + d = desc; + if (sessionId != AUDIO_SESSION_OUTPUT_MIX || + (desc.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_AUXILIARY) { break; } } - if (track->isStopping_2()) { - track->mState = TrackBase::STOPPED; - } - if (track->isStopped()) { - // Can't reset directly, as fast mixer is still polling this track - // track->reset(); - // So instead mark this track as needing to be reset after push with ack - resetMask |= 1 << i; - } - isActive = false; - break; - case TrackBase::IDLE: - default: - LOG_FATAL("unexpected track state %d", track->mState); } - - if (isActive) { - // was it previously inactive? - if (!(state->mTrackMask & (1 << j))) { - ExtendedAudioBufferProvider *eabp = track; - VolumeProvider *vp = track; - fastTrack->mBufferProvider = eabp; - fastTrack->mVolumeProvider = vp; - fastTrack->mSampleRate = track->mSampleRate; - fastTrack->mChannelMask = track->mChannelMask; - fastTrack->mGeneration++; - state->mTrackMask |= 1 << j; - didModify = true; - // no acknowledgement required for newly active tracks - } - // cache the combined master volume and stream type volume for fast mixer; this - // lacks any synchronization or barrier so VolumeProvider may read a stale value - track->mCachedVolume = track->isMuted() ? - 0 : masterVolume * mStreamTypes[track->streamType()].volume; - ++fastTracks; - } else { - // was it previously active? - if (state->mTrackMask & (1 << j)) { - fastTrack->mBufferProvider = NULL; - fastTrack->mGeneration++; - state->mTrackMask &= ~(1 << j); - didModify = true; - // If any fast tracks were removed, we must wait for acknowledgement - // because we're about to decrement the last sp<> on those tracks. - block = FastMixerStateQueue::BLOCK_UNTIL_ACKED; - } else { - LOG_FATAL("fast track %d should have been active", j); - } - tracksToRemove->add(track); - // Avoids a misleading display in dumpsys - track->mObservedUnderruns.mBitFields.mMostRecent = UNDERRUN_FULL; + if (!found) { + lStatus = BAD_VALUE; + ALOGW("createEffect() effect not found"); + goto Exit; + } + // For same effect type, chose auxiliary version over insert version if + // connect to output mix (Compliance to OpenSL ES) + if (sessionId == AUDIO_SESSION_OUTPUT_MIX && + (d.flags & EFFECT_FLAG_TYPE_MASK) != EFFECT_FLAG_TYPE_AUXILIARY) { + desc = d; } - continue; } - { // local variable scope to avoid goto warning - - audio_track_cblk_t* cblk = track->cblk(); - - // The first time a track is added we wait - // for all its buffers to be filled before processing it - int name = track->name(); - // make sure that we have enough frames to mix one full buffer. - // enforce this condition only once to enable draining the buffer in case the client - // app does not call stop() and relies on underrun to stop: - // hence the test on (mMixerStatus == MIXER_TRACKS_READY) meaning the track was mixed - // during last round - uint32_t minFrames = 1; - if ((track->sharedBuffer() == 0) && !track->isStopped() && !track->isPausing() && - (mMixerStatusIgnoringFastTracks == MIXER_TRACKS_READY)) { - if (t->sampleRate() == mSampleRate) { - minFrames = mNormalFrameCount; - } else { - // +1 for rounding and +1 for additional sample needed for interpolation - minFrames = (mNormalFrameCount * t->sampleRate()) / mSampleRate + 1 + 1; - // add frames already consumed but not yet released by the resampler - // because cblk->framesReady() will include these frames - minFrames += mAudioMixer->getUnreleasedFrames(track->name()); - // the minimum track buffer size is normally twice the number of frames necessary - // to fill one buffer and the resampler should not leave more than one buffer worth - // of unreleased frames after each pass, but just in case... - ALOG_ASSERT(minFrames <= cblk->frameCount); - } + // Do not allow auxiliary effects on a session different from 0 (output mix) + if (sessionId != AUDIO_SESSION_OUTPUT_MIX && + (desc.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_AUXILIARY) { + lStatus = INVALID_OPERATION; + goto Exit; } - if ((track->framesReady() >= minFrames) && track->isReady() && - !track->isPaused() && !track->isTerminated()) - { - ALOGVV("track %d u=%08x, s=%08x [OK] on thread %p", name, cblk->user, cblk->server, - this); - - mixedTracks++; - - // track->mainBuffer() != mMixBuffer means there is an effect chain - // connected to the track - chain.clear(); - if (track->mainBuffer() != mMixBuffer) { - chain = getEffectChain_l(track->sessionId()); - // Delegate volume control to effect in track effect chain if needed - if (chain != 0) { - tracksWithEffect++; - } else { - ALOGW("prepareTracks_l(): track %d attached to effect but no chain found on " - "session %d", - name, track->sessionId()); - } - } + // check recording permission for visualizer + if ((memcmp(&desc.type, SL_IID_VISUALIZATION, sizeof(effect_uuid_t)) == 0) && + !recordingAllowed()) { + lStatus = PERMISSION_DENIED; + goto Exit; + } - int param = AudioMixer::VOLUME; - if (track->mFillingUpStatus == Track::FS_FILLED) { - // no ramp for the first volume setting - track->mFillingUpStatus = Track::FS_ACTIVE; - if (track->mState == TrackBase::RESUMING) { - track->mState = TrackBase::ACTIVE; - param = AudioMixer::RAMP_VOLUME; - } - mAudioMixer->setParameter(name, AudioMixer::RESAMPLE, AudioMixer::RESET, NULL); - } else if (cblk->server != 0) { - // If the track is stopped before the first frame was mixed, - // do not apply ramp - param = AudioMixer::RAMP_VOLUME; - } + // return effect descriptor + *pDesc = desc; - // compute volume for this track - uint32_t vl, vr, va; - if (track->isMuted() || track->isPausing() || - mStreamTypes[track->streamType()].mute) { - vl = vr = va = 0; - if (track->isPausing()) { - track->setPaused(); - } - } else { - - // read original volumes with volume control - float typeVolume = mStreamTypes[track->streamType()].volume; - float v = masterVolume * typeVolume; - uint32_t vlr = cblk->getVolumeLR(); - vl = vlr & 0xFFFF; - vr = vlr >> 16; - // track volumes come from shared memory, so can't be trusted and must be clamped - if (vl > MAX_GAIN_INT) { - ALOGV("Track left volume out of range: %04X", vl); - vl = MAX_GAIN_INT; - } - if (vr > MAX_GAIN_INT) { - ALOGV("Track right volume out of range: %04X", vr); - vr = MAX_GAIN_INT; - } - // now apply the master volume and stream type volume - vl = (uint32_t)(v * vl) << 12; - vr = (uint32_t)(v * vr) << 12; - // assuming master volume and stream type volume each go up to 1.0, - // vl and vr are now in 8.24 format - - uint16_t sendLevel = cblk->getSendLevel_U4_12(); - // send level comes from shared memory and so may be corrupt - if (sendLevel > MAX_GAIN_INT) { - ALOGV("Track send level out of range: %04X", sendLevel); - sendLevel = MAX_GAIN_INT; - } - va = (uint32_t)(v * sendLevel); - } - // Delegate volume control to effect in track effect chain if needed - if (chain != 0 && chain->setVolume_l(&vl, &vr)) { - // Do not ramp volume if volume is controlled by effect - param = AudioMixer::VOLUME; - track->mHasVolumeController = true; - } else { - // force no volume ramp when volume controller was just disabled or removed - // from effect chain to avoid volume spike - if (track->mHasVolumeController) { - param = AudioMixer::VOLUME; + // If output is not specified try to find a matching audio session ID in one of the + // output threads. + // If output is 0 here, sessionId is neither SESSION_OUTPUT_STAGE nor SESSION_OUTPUT_MIX + // because of code checking output when entering the function. + // Note: io is never 0 when creating an effect on an input + if (io == 0) { + // look for the thread where the specified audio session is present + for (size_t i = 0; i < mPlaybackThreads.size(); i++) { + if (mPlaybackThreads.valueAt(i)->hasAudioSession(sessionId) != 0) { + io = mPlaybackThreads.keyAt(i); + break; } - track->mHasVolumeController = false; - } - - // Convert volumes from 8.24 to 4.12 format - // This additional clamping is needed in case chain->setVolume_l() overshot - vl = (vl + (1 << 11)) >> 12; - if (vl > MAX_GAIN_INT) { - vl = MAX_GAIN_INT; - } - vr = (vr + (1 << 11)) >> 12; - if (vr > MAX_GAIN_INT) { - vr = MAX_GAIN_INT; } - - if (va > MAX_GAIN_INT) { - va = MAX_GAIN_INT; // va is uint32_t, so no need to check for - - } - - // XXX: these things DON'T need to be done each time - mAudioMixer->setBufferProvider(name, track); - mAudioMixer->enable(name); - - mAudioMixer->setParameter(name, param, AudioMixer::VOLUME0, (void *)vl); - mAudioMixer->setParameter(name, param, AudioMixer::VOLUME1, (void *)vr); - mAudioMixer->setParameter(name, param, AudioMixer::AUXLEVEL, (void *)va); - mAudioMixer->setParameter( - name, - AudioMixer::TRACK, - AudioMixer::FORMAT, (void *)track->format()); - mAudioMixer->setParameter( - name, - AudioMixer::TRACK, - AudioMixer::CHANNEL_MASK, (void *)track->channelMask()); - mAudioMixer->setParameter( - name, - AudioMixer::RESAMPLE, - AudioMixer::SAMPLE_RATE, - (void *)(cblk->sampleRate)); - mAudioMixer->setParameter( - name, - AudioMixer::TRACK, - AudioMixer::MAIN_BUFFER, (void *)track->mainBuffer()); - mAudioMixer->setParameter( - name, - AudioMixer::TRACK, - AudioMixer::AUX_BUFFER, (void *)track->auxBuffer()); - - // reset retry count - track->mRetryCount = kMaxTrackRetries; - - // If one track is ready, set the mixer ready if: - // - the mixer was not ready during previous round OR - // - no other track is not ready - if (mMixerStatusIgnoringFastTracks != MIXER_TRACKS_READY || - mixerStatus != MIXER_TRACKS_ENABLED) { - mixerStatus = MIXER_TRACKS_READY; - } - } else { - // clear effect chain input buffer if an active track underruns to avoid sending - // previous audio buffer again to effects - chain = getEffectChain_l(track->sessionId()); - if (chain != 0) { - chain->clearInputBuffer(); - } - - ALOGVV("track %d u=%08x, s=%08x [NOT READY] on thread %p", name, cblk->user, - cblk->server, this); - if ((track->sharedBuffer() != 0) || track->isTerminated() || - track->isStopped() || track->isPaused()) { - // We have consumed all the buffers of this track. - // Remove it from the list of active tracks. - // TODO: use actual buffer filling status instead of latency when available from - // audio HAL - size_t audioHALFrames = (latency_l() * mSampleRate) / 1000; - size_t framesWritten = mBytesWritten / mFrameSize; - if (mStandby || track->presentationComplete(framesWritten, audioHALFrames)) { - if (track->isStopped()) { - track->reset(); + if (io == 0) { + for (size_t i = 0; i < mRecordThreads.size(); i++) { + if (mRecordThreads.valueAt(i)->hasAudioSession(sessionId) != 0) { + io = mRecordThreads.keyAt(i); + break; } - tracksToRemove->add(track); - } - } else { - track->mUnderrunCount++; - // No buffers for this track. Give it a few chances to - // fill a buffer, then remove it from active list. - if (--(track->mRetryCount) <= 0) { - ALOGV("BUFFER TIMEOUT: remove(%d) from active list on thread %p", name, this); - tracksToRemove->add(track); - // indicate to client process that the track was disabled because of underrun; - // it will then automatically call start() when data is available - android_atomic_or(CBLK_DISABLED, &cblk->flags); - // If one track is not ready, mark the mixer also not ready if: - // - the mixer was ready during previous round OR - // - no other track is ready - } else if (mMixerStatusIgnoringFastTracks == MIXER_TRACKS_READY || - mixerStatus != MIXER_TRACKS_READY) { - mixerStatus = MIXER_TRACKS_ENABLED; } } - mAudioMixer->disable(name); + // If no output thread contains the requested session ID, default to + // first output. The effect chain will be moved to the correct output + // thread when a track with the same session ID is created + if (io == 0 && mPlaybackThreads.size()) { + io = mPlaybackThreads.keyAt(0); + } + ALOGV("createEffect() got io %d for effect %s", io, desc.name); } - - } // local variable scope to avoid goto warning -track_is_ready: ; - - } - - // Push the new FastMixer state if necessary - bool pauseAudioWatchdog = false; - if (didModify) { - state->mFastTracksGen++; - // if the fast mixer was active, but now there are no fast tracks, then put it in cold idle - if (kUseFastMixer == FastMixer_Dynamic && - state->mCommand == FastMixerState::MIX_WRITE && state->mTrackMask <= 1) { - state->mCommand = FastMixerState::COLD_IDLE; - state->mColdFutexAddr = &mFastMixerFutex; - state->mColdGen++; - mFastMixerFutex = 0; - if (kUseFastMixer == FastMixer_Dynamic) { - mNormalSink = mOutputSink; + ThreadBase *thread = checkRecordThread_l(io); + if (thread == NULL) { + thread = checkPlaybackThread_l(io); + if (thread == NULL) { + ALOGE("createEffect() unknown output thread"); + lStatus = BAD_VALUE; + goto Exit; } - // If we go into cold idle, need to wait for acknowledgement - // so that fast mixer stops doing I/O. - block = FastMixerStateQueue::BLOCK_UNTIL_ACKED; - pauseAudioWatchdog = true; } - sq->end(); - } - if (sq != NULL) { - sq->end(didModify); - sq->push(block); - } -#ifdef AUDIO_WATCHDOG - if (pauseAudioWatchdog && mAudioWatchdog != 0) { - mAudioWatchdog->pause(); - } -#endif - // Now perform the deferred reset on fast tracks that have stopped - while (resetMask != 0) { - size_t i = __builtin_ctz(resetMask); - ALOG_ASSERT(i < count); - resetMask &= ~(1 << i); - sp t = mActiveTracks[i].promote(); - if (t == 0) { - continue; - } - Track* track = t.get(); - ALOG_ASSERT(track->isFastTrack() && track->isStopped()); - track->reset(); - } + sp client = registerPid_l(pid); - // remove all the tracks that need to be... - count = tracksToRemove->size(); - if (CC_UNLIKELY(count)) { - for (size_t i=0 ; i& track = tracksToRemove->itemAt(i); - mActiveTracks.remove(track); - if (track->mainBuffer() != mMixBuffer) { - chain = getEffectChain_l(track->sessionId()); - if (chain != 0) { - ALOGV("stopping track on chain %p for session Id: %d", chain.get(), - track->sessionId()); - chain->decActiveTrackCnt(); - } - } - if (track->isTerminated()) { - removeTrack_l(track); - } + // create effect on selected output thread + handle = thread->createEffect_l(client, effectClient, priority, sessionId, + &desc, enabled, &lStatus); + if (handle != 0 && id != NULL) { + *id = handle->id(); } } - // mix buffer must be cleared if all tracks are connected to an - // effect chain as in this case the mixer will not write to - // mix buffer and track effects will accumulate into it - if ((mixedTracks != 0 && mixedTracks == tracksWithEffect) || - (mixedTracks == 0 && fastTracks > 0)) { - // FIXME as a performance optimization, should remember previous zero status - memset(mMixBuffer, 0, mNormalFrameCount * mChannelCount * sizeof(int16_t)); - } - - // if any fast tracks, then status is ready - mMixerStatusIgnoringFastTracks = mixerStatus; - if (fastTracks > 0) { - mixerStatus = MIXER_TRACKS_READY; +Exit: + if (status != NULL) { + *status = lStatus; } - return mixerStatus; -} - -/* -The derived values that are cached: - - mixBufferSize from frame count * frame size - - activeSleepTime from activeSleepTimeUs() - - idleSleepTime from idleSleepTimeUs() - - standbyDelay from mActiveSleepTimeUs (DIRECT only) - - maxPeriod from frame count and sample rate (MIXER only) - -The parameters that affect these derived values are: - - frame count - - frame size - - sample rate - - device type: A2DP or not - - device latency - - format: PCM or not - - active sleep time - - idle sleep time -*/ - -void AudioFlinger::PlaybackThread::cacheParameters_l() -{ - mixBufferSize = mNormalFrameCount * mFrameSize; - activeSleepTime = activeSleepTimeUs(); - idleSleepTime = idleSleepTimeUs(); + return handle; } -void AudioFlinger::PlaybackThread::invalidateTracks(audio_stream_type_t streamType) +status_t AudioFlinger::moveEffects(int sessionId, audio_io_handle_t srcOutput, + audio_io_handle_t dstOutput) { - ALOGV ("MixerThread::invalidateTracks() mixer %p, streamType %d, mTracks.size %d", - this, streamType, mTracks.size()); + ALOGV("moveEffects() session %d, srcOutput %d, dstOutput %d", + sessionId, srcOutput, dstOutput); Mutex::Autolock _l(mLock); - - size_t size = mTracks.size(); - for (size_t i = 0; i < size; i++) { - sp t = mTracks[i]; - if (t->streamType() == streamType) { - android_atomic_or(CBLK_INVALID, &t->mCblk->flags); - t->mCblk->cv.signal(); - } + if (srcOutput == dstOutput) { + ALOGW("moveEffects() same dst and src outputs %d", dstOutput); + return NO_ERROR; + } + PlaybackThread *srcThread = checkPlaybackThread_l(srcOutput); + if (srcThread == NULL) { + ALOGW("moveEffects() bad srcOutput %d", srcOutput); + return BAD_VALUE; + } + PlaybackThread *dstThread = checkPlaybackThread_l(dstOutput); + if (dstThread == NULL) { + ALOGW("moveEffects() bad dstOutput %d", dstOutput); + return BAD_VALUE; } -} -// getTrackName_l() must be called with ThreadBase::mLock held -int AudioFlinger::MixerThread::getTrackName_l(audio_channel_mask_t channelMask, int sessionId) -{ - return mAudioMixer->getTrackName(channelMask, sessionId); -} + Mutex::Autolock _dl(dstThread->mLock); + Mutex::Autolock _sl(srcThread->mLock); + moveEffectChain_l(sessionId, srcThread, dstThread, false); -// deleteTrackName_l() must be called with ThreadBase::mLock held -void AudioFlinger::MixerThread::deleteTrackName_l(int name) -{ - ALOGV("remove track (%d) and delete from mixer", name); - mAudioMixer->deleteTrackName(name); + return NO_ERROR; } -// checkForNewParameters_l() must be called with ThreadBase::mLock held -bool AudioFlinger::MixerThread::checkForNewParameters_l() +// moveEffectChain_l must be called with both srcThread and dstThread mLocks held +status_t AudioFlinger::moveEffectChain_l(int sessionId, + AudioFlinger::PlaybackThread *srcThread, + AudioFlinger::PlaybackThread *dstThread, + bool reRegister) { - // if !&IDLE, holds the FastMixer state to restore after new parameters processed - FastMixerState::Command previousCommand = FastMixerState::HOT_IDLE; - bool reconfig = false; - - while (!mNewParameters.isEmpty()) { - - if (mFastMixer != NULL) { - FastMixerStateQueue *sq = mFastMixer->sq(); - FastMixerState *state = sq->begin(); - if (!(state->mCommand & FastMixerState::IDLE)) { - previousCommand = state->mCommand; - state->mCommand = FastMixerState::HOT_IDLE; - sq->end(); - sq->push(FastMixerStateQueue::BLOCK_UNTIL_ACKED); - } else { - sq->end(false /*didModify*/); - } - } - - status_t status = NO_ERROR; - String8 keyValuePair = mNewParameters[0]; - AudioParameter param = AudioParameter(keyValuePair); - int value; + ALOGV("moveEffectChain_l() session %d from thread %p to thread %p", + sessionId, srcThread, dstThread); - if (param.getInt(String8(AudioParameter::keySamplingRate), value) == NO_ERROR) { - reconfig = true; - } - if (param.getInt(String8(AudioParameter::keyFormat), value) == NO_ERROR) { - if ((audio_format_t) value != AUDIO_FORMAT_PCM_16_BIT) { - status = BAD_VALUE; - } else { - reconfig = true; - } - } - if (param.getInt(String8(AudioParameter::keyChannels), value) == NO_ERROR) { - if (value != AUDIO_CHANNEL_OUT_STEREO) { - status = BAD_VALUE; - } else { - reconfig = true; - } - } - if (param.getInt(String8(AudioParameter::keyFrameCount), value) == NO_ERROR) { - // do not accept frame count changes if tracks are open as the track buffer - // size depends on frame count and correct behavior would not be guaranteed - // if frame count is changed after track creation - if (!mTracks.isEmpty()) { - status = INVALID_OPERATION; - } else { - reconfig = true; - } - } - if (param.getInt(String8(AudioParameter::keyRouting), value) == NO_ERROR) { -#ifdef ADD_BATTERY_DATA - // when changing the audio output device, call addBatteryData to notify - // the change - if (mOutDevice != value) { - uint32_t params = 0; - // check whether speaker is on - if (value & AUDIO_DEVICE_OUT_SPEAKER) { - params |= IMediaPlayerService::kBatteryDataSpeakerOn; - } + sp chain = srcThread->getEffectChain_l(sessionId); + if (chain == 0) { + ALOGW("moveEffectChain_l() effect chain for session %d not on source thread %p", + sessionId, srcThread); + return INVALID_OPERATION; + } - audio_devices_t deviceWithoutSpeaker - = AUDIO_DEVICE_OUT_ALL & ~AUDIO_DEVICE_OUT_SPEAKER; - // check if any other device (except speaker) is on - if (value & deviceWithoutSpeaker ) { - params |= IMediaPlayerService::kBatteryDataOtherAudioDeviceOn; - } + // remove chain first. This is useful only if reconfiguring effect chain on same output thread, + // so that a new chain is created with correct parameters when first effect is added. This is + // otherwise unnecessary as removeEffect_l() will remove the chain when last effect is + // removed. + srcThread->removeEffectChain_l(chain); - if (params != 0) { - addBatteryData(params); - } - } -#endif - - // forward device change to effects that have requested to be - // aware of attached audio device. - mOutDevice = value; - for (size_t i = 0; i < mEffectChains.size(); i++) { - mEffectChains[i]->setDevice_l(mOutDevice); - } + // transfer all effects one by one so that new effect chain is created on new thread with + // correct buffer sizes and audio parameters and effect engines reconfigured accordingly + audio_io_handle_t dstOutput = dstThread->id(); + sp dstChain; + uint32_t strategy = 0; // prevent compiler warning + sp effect = chain->getEffectFromId_l(0); + while (effect != 0) { + srcThread->removeEffect_l(effect); + dstThread->addEffect_l(effect); + // removeEffect_l() has stopped the effect if it was active so it must be restarted + if (effect->state() == EffectModule::ACTIVE || + effect->state() == EffectModule::STOPPING) { + effect->start(); } - - if (status == NO_ERROR) { - status = mOutput->stream->common.set_parameters(&mOutput->stream->common, - keyValuePair.string()); - if (!mStandby && status == INVALID_OPERATION) { - mOutput->stream->common.standby(&mOutput->stream->common); - mStandby = true; - mBytesWritten = 0; - status = mOutput->stream->common.set_parameters(&mOutput->stream->common, - keyValuePair.string()); - } - if (status == NO_ERROR && reconfig) { - delete mAudioMixer; - // for safety in case readOutputParameters() accesses mAudioMixer (it doesn't) - mAudioMixer = NULL; - readOutputParameters(); - mAudioMixer = new AudioMixer(mNormalFrameCount, mSampleRate); - for (size_t i = 0; i < mTracks.size() ; i++) { - int name = getTrackName_l(mTracks[i]->mChannelMask, mTracks[i]->mSessionId); - if (name < 0) { - break; - } - mTracks[i]->mName = name; - // limit track sample rate to 2 x new output sample rate - if (mTracks[i]->mCblk->sampleRate > 2 * sampleRate()) { - mTracks[i]->mCblk->sampleRate = 2 * sampleRate(); - } - } - sendIoConfigEvent_l(AudioSystem::OUTPUT_CONFIG_CHANGED); + // if the move request is not received from audio policy manager, the effect must be + // re-registered with the new strategy and output + if (dstChain == 0) { + dstChain = effect->chain().promote(); + if (dstChain == 0) { + ALOGW("moveEffectChain_l() cannot get chain from effect %p", effect.get()); + srcThread->addEffect_l(effect); + return NO_INIT; } + strategy = dstChain->strategy(); } - - mNewParameters.removeAt(0); - - mParamStatus = status; - mParamCond.signal(); - // wait for condition with time out in case the thread calling ThreadBase::setParameters() - // already timed out waiting for the status and will never signal the condition. - mWaitWorkCV.waitRelative(mLock, kSetParametersTimeoutNs); - } - - if (!(previousCommand & FastMixerState::IDLE)) { - ALOG_ASSERT(mFastMixer != NULL); - FastMixerStateQueue *sq = mFastMixer->sq(); - FastMixerState *state = sq->begin(); - ALOG_ASSERT(state->mCommand == FastMixerState::HOT_IDLE); - state->mCommand = previousCommand; - sq->end(); - sq->push(FastMixerStateQueue::BLOCK_UNTIL_PUSHED); + if (reRegister) { + AudioSystem::unregisterEffect(effect->id()); + AudioSystem::registerEffect(&effect->desc(), + dstOutput, + strategy, + sessionId, + effect->id()); + } + effect = chain->getEffectFromId_l(0); } - return reconfig; + return NO_ERROR; } void AudioFlinger::dumpTee(int fd, const sp& source, audio_io_handle_t id) @@ -3686,6248 +2209,6 @@ void AudioFlinger::dumpTee(int fd, const sp& source, audio_io_hand } } -void AudioFlinger::MixerThread::dumpInternals(int fd, const Vector& args) -{ - const size_t SIZE = 256; - char buffer[SIZE]; - String8 result; - - PlaybackThread::dumpInternals(fd, args); - - snprintf(buffer, SIZE, "AudioMixer tracks: %08x\n", mAudioMixer->trackNames()); - result.append(buffer); - write(fd, result.string(), result.size()); - - // Make a non-atomic copy of fast mixer dump state so it won't change underneath us - FastMixerDumpState copy = mFastMixerDumpState; - copy.dump(fd); - -#ifdef STATE_QUEUE_DUMP - // Similar for state queue - StateQueueObserverDump observerCopy = mStateQueueObserverDump; - observerCopy.dump(fd); - StateQueueMutatorDump mutatorCopy = mStateQueueMutatorDump; - mutatorCopy.dump(fd); -#endif - - // Write the tee output to a .wav file - dumpTee(fd, mTeeSource, mId); - -#ifdef AUDIO_WATCHDOG - if (mAudioWatchdog != 0) { - // Make a non-atomic copy of audio watchdog dump so it won't change underneath us - AudioWatchdogDump wdCopy = mAudioWatchdogDump; - wdCopy.dump(fd); - } -#endif -} - -uint32_t AudioFlinger::MixerThread::idleSleepTimeUs() const -{ - return (uint32_t)(((mNormalFrameCount * 1000) / mSampleRate) * 1000) / 2; -} - -uint32_t AudioFlinger::MixerThread::suspendSleepTimeUs() const -{ - return (uint32_t)(((mNormalFrameCount * 1000) / mSampleRate) * 1000); -} - -void AudioFlinger::MixerThread::cacheParameters_l() -{ - PlaybackThread::cacheParameters_l(); - - // FIXME: Relaxed timing because of a certain device that can't meet latency - // Should be reduced to 2x after the vendor fixes the driver issue - // increase threshold again due to low power audio mode. The way this warning - // threshold is calculated and its usefulness should be reconsidered anyway. - maxPeriod = seconds(mNormalFrameCount) / mSampleRate * 15; -} - -// ---------------------------------------------------------------------------- -AudioFlinger::DirectOutputThread::DirectOutputThread(const sp& audioFlinger, - AudioStreamOut* output, audio_io_handle_t id, audio_devices_t device) - : PlaybackThread(audioFlinger, output, id, device, DIRECT) - // mLeftVolFloat, mRightVolFloat -{ -} - -AudioFlinger::DirectOutputThread::~DirectOutputThread() -{ -} - -AudioFlinger::PlaybackThread::mixer_state AudioFlinger::DirectOutputThread::prepareTracks_l( - Vector< sp > *tracksToRemove -) -{ - sp trackToRemove; - - mixer_state mixerStatus = MIXER_IDLE; - - // find out which tracks need to be processed - if (mActiveTracks.size() != 0) { - sp t = mActiveTracks[0].promote(); - // The track died recently - if (t == 0) { - return MIXER_IDLE; - } - - Track* const track = t.get(); - audio_track_cblk_t* cblk = track->cblk(); - - // The first time a track is added we wait - // for all its buffers to be filled before processing it - uint32_t minFrames; - if ((track->sharedBuffer() == 0) && !track->isStopped() && !track->isPausing()) { - minFrames = mNormalFrameCount; - } else { - minFrames = 1; - } - if ((track->framesReady() >= minFrames) && track->isReady() && - !track->isPaused() && !track->isTerminated()) - { - ALOGVV("track %d u=%08x, s=%08x [OK]", track->name(), cblk->user, cblk->server); - - if (track->mFillingUpStatus == Track::FS_FILLED) { - track->mFillingUpStatus = Track::FS_ACTIVE; - mLeftVolFloat = mRightVolFloat = 0; - if (track->mState == TrackBase::RESUMING) { - track->mState = TrackBase::ACTIVE; - } - } - - // compute volume for this track - float left, right; - if (track->isMuted() || mMasterMute || track->isPausing() || - mStreamTypes[track->streamType()].mute) { - left = right = 0; - if (track->isPausing()) { - track->setPaused(); - } - } else { - float typeVolume = mStreamTypes[track->streamType()].volume; - float v = mMasterVolume * typeVolume; - uint32_t vlr = cblk->getVolumeLR(); - float v_clamped = v * (vlr & 0xFFFF); - if (v_clamped > MAX_GAIN) { - v_clamped = MAX_GAIN; - } - left = v_clamped/MAX_GAIN; - v_clamped = v * (vlr >> 16); - if (v_clamped > MAX_GAIN) { - v_clamped = MAX_GAIN; - } - right = v_clamped/MAX_GAIN; - } - - if (left != mLeftVolFloat || right != mRightVolFloat) { - mLeftVolFloat = left; - mRightVolFloat = right; - - // Convert volumes from float to 8.24 - uint32_t vl = (uint32_t)(left * (1 << 24)); - uint32_t vr = (uint32_t)(right * (1 << 24)); - - // Delegate volume control to effect in track effect chain if needed - // only one effect chain can be present on DirectOutputThread, so if - // there is one, the track is connected to it - if (!mEffectChains.isEmpty()) { - // Do not ramp volume if volume is controlled by effect - mEffectChains[0]->setVolume_l(&vl, &vr); - left = (float)vl / (1 << 24); - right = (float)vr / (1 << 24); - } - mOutput->stream->set_volume(mOutput->stream, left, right); - } - - // reset retry count - track->mRetryCount = kMaxTrackRetriesDirect; - mActiveTrack = t; - mixerStatus = MIXER_TRACKS_READY; - } else { - // clear effect chain input buffer if an active track underruns to avoid sending - // previous audio buffer again to effects - if (!mEffectChains.isEmpty()) { - mEffectChains[0]->clearInputBuffer(); - } - - ALOGVV("track %d u=%08x, s=%08x [NOT READY]", track->name(), cblk->user, cblk->server); - if ((track->sharedBuffer() != 0) || track->isTerminated() || - track->isStopped() || track->isPaused()) { - // We have consumed all the buffers of this track. - // Remove it from the list of active tracks. - // TODO: implement behavior for compressed audio - size_t audioHALFrames = (latency_l() * mSampleRate) / 1000; - size_t framesWritten = mBytesWritten / mFrameSize; - if (mStandby || track->presentationComplete(framesWritten, audioHALFrames)) { - if (track->isStopped()) { - track->reset(); - } - trackToRemove = track; - } - } else { - // No buffers for this track. Give it a few chances to - // fill a buffer, then remove it from active list. - if (--(track->mRetryCount) <= 0) { - ALOGV("BUFFER TIMEOUT: remove(%d) from active list", track->name()); - trackToRemove = track; - } else { - mixerStatus = MIXER_TRACKS_ENABLED; - } - } - } - } - - // FIXME merge this with similar code for removing multiple tracks - // remove all the tracks that need to be... - if (CC_UNLIKELY(trackToRemove != 0)) { - tracksToRemove->add(trackToRemove); - mActiveTracks.remove(trackToRemove); - if (!mEffectChains.isEmpty()) { - ALOGV("stopping track on chain %p for session Id: %d", mEffectChains[0].get(), - trackToRemove->sessionId()); - mEffectChains[0]->decActiveTrackCnt(); - } - if (trackToRemove->isTerminated()) { - removeTrack_l(trackToRemove); - } - } - - return mixerStatus; -} - -void AudioFlinger::DirectOutputThread::threadLoop_mix() -{ - AudioBufferProvider::Buffer buffer; - size_t frameCount = mFrameCount; - int8_t *curBuf = (int8_t *)mMixBuffer; - // output audio to hardware - while (frameCount) { - buffer.frameCount = frameCount; - mActiveTrack->getNextBuffer(&buffer); - if (CC_UNLIKELY(buffer.raw == NULL)) { - memset(curBuf, 0, frameCount * mFrameSize); - break; - } - memcpy(curBuf, buffer.raw, buffer.frameCount * mFrameSize); - frameCount -= buffer.frameCount; - curBuf += buffer.frameCount * mFrameSize; - mActiveTrack->releaseBuffer(&buffer); - } - sleepTime = 0; - standbyTime = systemTime() + standbyDelay; - mActiveTrack.clear(); - -} - -void AudioFlinger::DirectOutputThread::threadLoop_sleepTime() -{ - if (sleepTime == 0) { - if (mMixerStatus == MIXER_TRACKS_ENABLED) { - sleepTime = activeSleepTime; - } else { - sleepTime = idleSleepTime; - } - } else if (mBytesWritten != 0 && audio_is_linear_pcm(mFormat)) { - memset(mMixBuffer, 0, mFrameCount * mFrameSize); - sleepTime = 0; - } -} - -// getTrackName_l() must be called with ThreadBase::mLock held -int AudioFlinger::DirectOutputThread::getTrackName_l(audio_channel_mask_t channelMask, - int sessionId) -{ - return 0; -} - -// deleteTrackName_l() must be called with ThreadBase::mLock held -void AudioFlinger::DirectOutputThread::deleteTrackName_l(int name) -{ -} - -// checkForNewParameters_l() must be called with ThreadBase::mLock held -bool AudioFlinger::DirectOutputThread::checkForNewParameters_l() -{ - bool reconfig = false; - - while (!mNewParameters.isEmpty()) { - status_t status = NO_ERROR; - String8 keyValuePair = mNewParameters[0]; - AudioParameter param = AudioParameter(keyValuePair); - int value; - - if (param.getInt(String8(AudioParameter::keyFrameCount), value) == NO_ERROR) { - // do not accept frame count changes if tracks are open as the track buffer - // size depends on frame count and correct behavior would not be garantied - // if frame count is changed after track creation - if (!mTracks.isEmpty()) { - status = INVALID_OPERATION; - } else { - reconfig = true; - } - } - if (status == NO_ERROR) { - status = mOutput->stream->common.set_parameters(&mOutput->stream->common, - keyValuePair.string()); - if (!mStandby && status == INVALID_OPERATION) { - mOutput->stream->common.standby(&mOutput->stream->common); - mStandby = true; - mBytesWritten = 0; - status = mOutput->stream->common.set_parameters(&mOutput->stream->common, - keyValuePair.string()); - } - if (status == NO_ERROR && reconfig) { - readOutputParameters(); - sendIoConfigEvent_l(AudioSystem::OUTPUT_CONFIG_CHANGED); - } - } - - mNewParameters.removeAt(0); - - mParamStatus = status; - mParamCond.signal(); - // wait for condition with time out in case the thread calling ThreadBase::setParameters() - // already timed out waiting for the status and will never signal the condition. - mWaitWorkCV.waitRelative(mLock, kSetParametersTimeoutNs); - } - return reconfig; -} - -uint32_t AudioFlinger::DirectOutputThread::activeSleepTimeUs() const -{ - uint32_t time; - if (audio_is_linear_pcm(mFormat)) { - time = PlaybackThread::activeSleepTimeUs(); - } else { - time = 10000; - } - return time; -} - -uint32_t AudioFlinger::DirectOutputThread::idleSleepTimeUs() const -{ - uint32_t time; - if (audio_is_linear_pcm(mFormat)) { - time = (uint32_t)(((mFrameCount * 1000) / mSampleRate) * 1000) / 2; - } else { - time = 10000; - } - return time; -} - -uint32_t AudioFlinger::DirectOutputThread::suspendSleepTimeUs() const -{ - uint32_t time; - if (audio_is_linear_pcm(mFormat)) { - time = (uint32_t)(((mFrameCount * 1000) / mSampleRate) * 1000); - } else { - time = 10000; - } - return time; -} - -void AudioFlinger::DirectOutputThread::cacheParameters_l() -{ - PlaybackThread::cacheParameters_l(); - - // use shorter standby delay as on normal output to release - // hardware resources as soon as possible - standbyDelay = microseconds(activeSleepTime*2); -} - -// ---------------------------------------------------------------------------- - -AudioFlinger::DuplicatingThread::DuplicatingThread(const sp& audioFlinger, - AudioFlinger::MixerThread* mainThread, audio_io_handle_t id) - : MixerThread(audioFlinger, mainThread->getOutput(), id, mainThread->outDevice(), - DUPLICATING), - mWaitTimeMs(UINT_MAX) -{ - addOutputTrack(mainThread); -} - -AudioFlinger::DuplicatingThread::~DuplicatingThread() -{ - for (size_t i = 0; i < mOutputTracks.size(); i++) { - mOutputTracks[i]->destroy(); - } -} - -void AudioFlinger::DuplicatingThread::threadLoop_mix() -{ - // mix buffers... - if (outputsReady(outputTracks)) { - mAudioMixer->process(AudioBufferProvider::kInvalidPTS); - } else { - memset(mMixBuffer, 0, mixBufferSize); - } - sleepTime = 0; - writeFrames = mNormalFrameCount; - standbyTime = systemTime() + standbyDelay; -} - -void AudioFlinger::DuplicatingThread::threadLoop_sleepTime() -{ - if (sleepTime == 0) { - if (mMixerStatus == MIXER_TRACKS_ENABLED) { - sleepTime = activeSleepTime; - } else { - sleepTime = idleSleepTime; - } - } else if (mBytesWritten != 0) { - if (mMixerStatus == MIXER_TRACKS_ENABLED) { - writeFrames = mNormalFrameCount; - memset(mMixBuffer, 0, mixBufferSize); - } else { - // flush remaining overflow buffers in output tracks - writeFrames = 0; - } - sleepTime = 0; - } -} - -void AudioFlinger::DuplicatingThread::threadLoop_write() -{ - for (size_t i = 0; i < outputTracks.size(); i++) { - outputTracks[i]->write(mMixBuffer, writeFrames); - } - mBytesWritten += mixBufferSize; -} - -void AudioFlinger::DuplicatingThread::threadLoop_standby() -{ - // DuplicatingThread implements standby by stopping all tracks - for (size_t i = 0; i < outputTracks.size(); i++) { - outputTracks[i]->stop(); - } -} - -void AudioFlinger::DuplicatingThread::saveOutputTracks() -{ - outputTracks = mOutputTracks; -} - -void AudioFlinger::DuplicatingThread::clearOutputTracks() -{ - outputTracks.clear(); -} - -void AudioFlinger::DuplicatingThread::addOutputTrack(MixerThread *thread) -{ - Mutex::Autolock _l(mLock); - // FIXME explain this formula - size_t frameCount = (3 * mNormalFrameCount * mSampleRate) / thread->sampleRate(); - OutputTrack *outputTrack = new OutputTrack(thread, - this, - mSampleRate, - mFormat, - mChannelMask, - frameCount); - if (outputTrack->cblk() != NULL) { - thread->setStreamVolume(AUDIO_STREAM_CNT, 1.0f); - mOutputTracks.add(outputTrack); - ALOGV("addOutputTrack() track %p, on thread %p", outputTrack, thread); - updateWaitTime_l(); - } -} - -void AudioFlinger::DuplicatingThread::removeOutputTrack(MixerThread *thread) -{ - Mutex::Autolock _l(mLock); - for (size_t i = 0; i < mOutputTracks.size(); i++) { - if (mOutputTracks[i]->thread() == thread) { - mOutputTracks[i]->destroy(); - mOutputTracks.removeAt(i); - updateWaitTime_l(); - return; - } - } - ALOGV("removeOutputTrack(): unkonwn thread: %p", thread); -} - -// caller must hold mLock -void AudioFlinger::DuplicatingThread::updateWaitTime_l() -{ - mWaitTimeMs = UINT_MAX; - for (size_t i = 0; i < mOutputTracks.size(); i++) { - sp strong = mOutputTracks[i]->thread().promote(); - if (strong != 0) { - uint32_t waitTimeMs = (strong->frameCount() * 2 * 1000) / strong->sampleRate(); - if (waitTimeMs < mWaitTimeMs) { - mWaitTimeMs = waitTimeMs; - } - } - } -} - - -bool AudioFlinger::DuplicatingThread::outputsReady( - const SortedVector< sp > &outputTracks) -{ - for (size_t i = 0; i < outputTracks.size(); i++) { - sp thread = outputTracks[i]->thread().promote(); - if (thread == 0) { - ALOGW("DuplicatingThread::outputsReady() could not promote thread on output track %p", - outputTracks[i].get()); - return false; - } - PlaybackThread *playbackThread = (PlaybackThread *)thread.get(); - // see note at standby() declaration - if (playbackThread->standby() && !playbackThread->isSuspended()) { - ALOGV("DuplicatingThread output track %p on thread %p Not Ready", outputTracks[i].get(), - thread.get()); - return false; - } - } - return true; -} - -uint32_t AudioFlinger::DuplicatingThread::activeSleepTimeUs() const -{ - return (mWaitTimeMs * 1000) / 2; -} - -void AudioFlinger::DuplicatingThread::cacheParameters_l() -{ - // updateWaitTime_l() sets mWaitTimeMs, which affects activeSleepTimeUs(), so call it first - updateWaitTime_l(); - - MixerThread::cacheParameters_l(); -} - -// ---------------------------------------------------------------------------- - -// TrackBase constructor must be called with AudioFlinger::mLock held -AudioFlinger::ThreadBase::TrackBase::TrackBase( - ThreadBase *thread, - const sp& client, - uint32_t sampleRate, - audio_format_t format, - audio_channel_mask_t channelMask, - size_t frameCount, - const sp& sharedBuffer, - int sessionId) - : RefBase(), - mThread(thread), - mClient(client), - mCblk(NULL), - // mBuffer - // mBufferEnd - mStepCount(0), - mState(IDLE), - mSampleRate(sampleRate), - mFormat(format), - mChannelMask(channelMask), - mChannelCount(popcount(channelMask)), - mFrameSize(audio_is_linear_pcm(format) ? - mChannelCount * audio_bytes_per_sample(format) : sizeof(int8_t)), - mFrameCount(frameCount), - mStepServerFailed(false), - mSessionId(sessionId) -{ - // client == 0 implies sharedBuffer == 0 - ALOG_ASSERT(!(client == 0 && sharedBuffer != 0)); - - ALOGV_IF(sharedBuffer != 0, "sharedBuffer: %p, size: %d", sharedBuffer->pointer(), - sharedBuffer->size()); - - // ALOGD("Creating track with %d buffers @ %d bytes", bufferCount, bufferSize); - size_t size = sizeof(audio_track_cblk_t); - size_t bufferSize = frameCount * mFrameSize; - if (sharedBuffer == 0) { - size += bufferSize; - } - - if (client != 0) { - mCblkMemory = client->heap()->allocate(size); - if (mCblkMemory != 0) { - mCblk = static_cast(mCblkMemory->pointer()); - // can't assume mCblk != NULL - } else { - ALOGE("not enough memory for AudioTrack size=%u", size); - client->heap()->dump("AudioTrack"); - return; - } - } else { - mCblk = (audio_track_cblk_t *)(new uint8_t[size]); - // assume mCblk != NULL - } - - // construct the shared structure in-place. - if (mCblk != NULL) { - new(mCblk) audio_track_cblk_t(); - // clear all buffers - mCblk->frameCount_ = frameCount; - mCblk->sampleRate = sampleRate; -// uncomment the following lines to quickly test 32-bit wraparound -// mCblk->user = 0xffff0000; -// mCblk->server = 0xffff0000; -// mCblk->userBase = 0xffff0000; -// mCblk->serverBase = 0xffff0000; - if (sharedBuffer == 0) { - mBuffer = (char*)mCblk + sizeof(audio_track_cblk_t); - memset(mBuffer, 0, bufferSize); - // Force underrun condition to avoid false underrun callback until first data is - // written to buffer (other flags are cleared) - mCblk->flags = CBLK_UNDERRUN; - } else { - mBuffer = sharedBuffer->pointer(); - } - mBufferEnd = (uint8_t *)mBuffer + bufferSize; - } -} - -AudioFlinger::ThreadBase::TrackBase::~TrackBase() -{ - if (mCblk != NULL) { - if (mClient == 0) { - delete mCblk; - } else { - mCblk->~audio_track_cblk_t(); // destroy our shared-structure. - } - } - mCblkMemory.clear(); // free the shared memory before releasing the heap it belongs to - if (mClient != 0) { - // Client destructor must run with AudioFlinger mutex locked - Mutex::Autolock _l(mClient->audioFlinger()->mLock); - // If the client's reference count drops to zero, the associated destructor - // must run with AudioFlinger lock held. Thus the explicit clear() rather than - // relying on the automatic clear() at end of scope. - mClient.clear(); - } -} - -// AudioBufferProvider interface -// getNextBuffer() = 0; -// This implementation of releaseBuffer() is used by Track and RecordTrack, but not TimedTrack -void AudioFlinger::ThreadBase::TrackBase::releaseBuffer(AudioBufferProvider::Buffer* buffer) -{ - buffer->raw = NULL; - mStepCount = buffer->frameCount; - // FIXME See note at getNextBuffer() - (void) step(); // ignore return value of step() - buffer->frameCount = 0; -} - -bool AudioFlinger::ThreadBase::TrackBase::step() { - bool result; - audio_track_cblk_t* cblk = this->cblk(); - - result = cblk->stepServer(mStepCount, mFrameCount, isOut()); - if (!result) { - ALOGV("stepServer failed acquiring cblk mutex"); - mStepServerFailed = true; - } - return result; -} - -void AudioFlinger::ThreadBase::TrackBase::reset() { - audio_track_cblk_t* cblk = this->cblk(); - - cblk->user = 0; - cblk->server = 0; - cblk->userBase = 0; - cblk->serverBase = 0; - mStepServerFailed = false; - ALOGV("TrackBase::reset"); -} - -uint32_t AudioFlinger::ThreadBase::TrackBase::sampleRate() const { - return mCblk->sampleRate; -} - -void* AudioFlinger::ThreadBase::TrackBase::getBuffer(uint32_t offset, uint32_t frames) const { - audio_track_cblk_t* cblk = this->cblk(); - int8_t *bufferStart = (int8_t *)mBuffer + (offset-cblk->serverBase) * mFrameSize; - int8_t *bufferEnd = bufferStart + frames * mFrameSize; - - // Check validity of returned pointer in case the track control block would have been corrupted. - ALOG_ASSERT(!(bufferStart < mBuffer || bufferStart > bufferEnd || bufferEnd > mBufferEnd), - "TrackBase::getBuffer buffer out of range:\n" - " start: %p, end %p , mBuffer %p mBufferEnd %p\n" - " server %u, serverBase %u, user %u, userBase %u, frameSize %u", - bufferStart, bufferEnd, mBuffer, mBufferEnd, - cblk->server, cblk->serverBase, cblk->user, cblk->userBase, mFrameSize); - - return bufferStart; -} - -status_t AudioFlinger::ThreadBase::TrackBase::setSyncEvent(const sp& event) -{ - mSyncEvents.add(event); - return NO_ERROR; -} - -// ---------------------------------------------------------------------------- - -// Track constructor must be called with AudioFlinger::mLock and ThreadBase::mLock held -AudioFlinger::PlaybackThread::Track::Track( - PlaybackThread *thread, - const sp& client, - audio_stream_type_t streamType, - uint32_t sampleRate, - audio_format_t format, - audio_channel_mask_t channelMask, - size_t frameCount, - const sp& sharedBuffer, - int sessionId, - IAudioFlinger::track_flags_t flags) - : TrackBase(thread, client, sampleRate, format, channelMask, frameCount, sharedBuffer, - sessionId), - mMute(false), - mFillingUpStatus(FS_INVALID), - // mRetryCount initialized later when needed - mSharedBuffer(sharedBuffer), - mStreamType(streamType), - mName(-1), // see note below - mMainBuffer(thread->mixBuffer()), - mAuxBuffer(NULL), - mAuxEffectId(0), mHasVolumeController(false), - mPresentationCompleteFrames(0), - mFlags(flags), - mFastIndex(-1), - mUnderrunCount(0), - mCachedVolume(1.0) -{ - if (mCblk != NULL) { - // to avoid leaking a track name, do not allocate one unless there is an mCblk - mName = thread->getTrackName_l(channelMask, sessionId); - mCblk->mName = mName; - if (mName < 0) { - ALOGE("no more track names available"); - return; - } - // only allocate a fast track index if we were able to allocate a normal track name - if (flags & IAudioFlinger::TRACK_FAST) { - ALOG_ASSERT(thread->mFastTrackAvailMask != 0); - int i = __builtin_ctz(thread->mFastTrackAvailMask); - ALOG_ASSERT(0 < i && i < (int)FastMixerState::kMaxFastTracks); - // FIXME This is too eager. We allocate a fast track index before the - // fast track becomes active. Since fast tracks are a scarce resource, - // this means we are potentially denying other more important fast tracks from - // being created. It would be better to allocate the index dynamically. - mFastIndex = i; - mCblk->mName = i; - // Read the initial underruns because this field is never cleared by the fast mixer - mObservedUnderruns = thread->getFastTrackUnderruns(i); - thread->mFastTrackAvailMask &= ~(1 << i); - } - } - ALOGV("Track constructor name %d, calling pid %d", mName, - IPCThreadState::self()->getCallingPid()); -} - -AudioFlinger::PlaybackThread::Track::~Track() -{ - ALOGV("PlaybackThread::Track destructor"); -} - -void AudioFlinger::PlaybackThread::Track::destroy() -{ - // NOTE: destroyTrack_l() can remove a strong reference to this Track - // by removing it from mTracks vector, so there is a risk that this Tracks's - // destructor is called. As the destructor needs to lock mLock, - // we must acquire a strong reference on this Track before locking mLock - // here so that the destructor is called only when exiting this function. - // On the other hand, as long as Track::destroy() is only called by - // TrackHandle destructor, the TrackHandle still holds a strong ref on - // this Track with its member mTrack. - sp keep(this); - { // scope for mLock - sp thread = mThread.promote(); - if (thread != 0) { - if (!isOutputTrack()) { - if (mState == ACTIVE || mState == RESUMING) { - AudioSystem::stopOutput(thread->id(), mStreamType, mSessionId); - -#ifdef ADD_BATTERY_DATA - // to track the speaker usage - addBatteryData(IMediaPlayerService::kBatteryDataAudioFlingerStop); -#endif - } - AudioSystem::releaseOutput(thread->id()); - } - Mutex::Autolock _l(thread->mLock); - PlaybackThread *playbackThread = (PlaybackThread *)thread.get(); - playbackThread->destroyTrack_l(this); - } - } -} - -/*static*/ void AudioFlinger::PlaybackThread::Track::appendDumpHeader(String8& result) -{ - result.append(" Name Client Type Fmt Chn mask Session StpCnt fCount S M F SRate " - "L dB R dB Server User Main buf Aux Buf Flags Underruns\n"); -} - -void AudioFlinger::PlaybackThread::Track::dump(char* buffer, size_t size) -{ - uint32_t vlr = mCblk->getVolumeLR(); - if (isFastTrack()) { - sprintf(buffer, " F %2d", mFastIndex); - } else { - sprintf(buffer, " %4d", mName - AudioMixer::TRACK0); - } - track_state state = mState; - char stateChar; - switch (state) { - case IDLE: - stateChar = 'I'; - break; - case TERMINATED: - stateChar = 'T'; - break; - case STOPPING_1: - stateChar = 's'; - break; - case STOPPING_2: - stateChar = '5'; - break; - case STOPPED: - stateChar = 'S'; - break; - case RESUMING: - stateChar = 'R'; - break; - case ACTIVE: - stateChar = 'A'; - break; - case PAUSING: - stateChar = 'p'; - break; - case PAUSED: - stateChar = 'P'; - break; - case FLUSHED: - stateChar = 'F'; - break; - default: - stateChar = '?'; - break; - } - char nowInUnderrun; - switch (mObservedUnderruns.mBitFields.mMostRecent) { - case UNDERRUN_FULL: - nowInUnderrun = ' '; - break; - case UNDERRUN_PARTIAL: - nowInUnderrun = '<'; - break; - case UNDERRUN_EMPTY: - nowInUnderrun = '*'; - break; - default: - nowInUnderrun = '?'; - break; - } - snprintf(&buffer[7], size-7, " %6d %4u %3u 0x%08x %7u %6u %6u %1c %1d %1d %5u %5.2g %5.2g " - "0x%08x 0x%08x 0x%08x 0x%08x %#5x %9u%c\n", - (mClient == 0) ? getpid_cached : mClient->pid(), - mStreamType, - mFormat, - mChannelMask, - mSessionId, - mStepCount, - mFrameCount, - stateChar, - mMute, - mFillingUpStatus, - mCblk->sampleRate, - 20.0 * log10((vlr & 0xFFFF) / 4096.0), - 20.0 * log10((vlr >> 16) / 4096.0), - mCblk->server, - mCblk->user, - (int)mMainBuffer, - (int)mAuxBuffer, - mCblk->flags, - mUnderrunCount, - nowInUnderrun); -} - -// AudioBufferProvider interface -status_t AudioFlinger::PlaybackThread::Track::getNextBuffer( - AudioBufferProvider::Buffer* buffer, int64_t pts) -{ - audio_track_cblk_t* cblk = this->cblk(); - uint32_t framesReady; - uint32_t framesReq = buffer->frameCount; - - // Check if last stepServer failed, try to step now - if (mStepServerFailed) { - // FIXME When called by fast mixer, this takes a mutex with tryLock(). - // Since the fast mixer is higher priority than client callback thread, - // it does not result in priority inversion for client. - // But a non-blocking solution would be preferable to avoid - // fast mixer being unable to tryLock(), and - // to avoid the extra context switches if the client wakes up, - // discovers the mutex is locked, then has to wait for fast mixer to unlock. - if (!step()) goto getNextBuffer_exit; - ALOGV("stepServer recovered"); - mStepServerFailed = false; - } - - // FIXME Same as above - framesReady = cblk->framesReadyOut(); - - if (CC_LIKELY(framesReady)) { - uint32_t s = cblk->server; - uint32_t bufferEnd = cblk->serverBase + mFrameCount; - - bufferEnd = (cblk->loopEnd < bufferEnd) ? cblk->loopEnd : bufferEnd; - if (framesReq > framesReady) { - framesReq = framesReady; - } - if (framesReq > bufferEnd - s) { - framesReq = bufferEnd - s; - } - - buffer->raw = getBuffer(s, framesReq); - buffer->frameCount = framesReq; - return NO_ERROR; - } - -getNextBuffer_exit: - buffer->raw = NULL; - buffer->frameCount = 0; - ALOGV("getNextBuffer() no more data for track %d on thread %p", mName, mThread.unsafe_get()); - return NOT_ENOUGH_DATA; -} - -// Note that framesReady() takes a mutex on the control block using tryLock(). -// This could result in priority inversion if framesReady() is called by the normal mixer, -// as the normal mixer thread runs at lower -// priority than the client's callback thread: there is a short window within framesReady() -// during which the normal mixer could be preempted, and the client callback would block. -// Another problem can occur if framesReady() is called by the fast mixer: -// the tryLock() could block for up to 1 ms, and a sequence of these could delay fast mixer. -// FIXME Replace AudioTrackShared control block implementation by a non-blocking FIFO queue. -size_t AudioFlinger::PlaybackThread::Track::framesReady() const { - return mCblk->framesReadyOut(); -} - -// Don't call for fast tracks; the framesReady() could result in priority inversion -bool AudioFlinger::PlaybackThread::Track::isReady() const { - if (mFillingUpStatus != FS_FILLING || isStopped() || isPausing()) { - return true; - } - - if (framesReady() >= mFrameCount || - (mCblk->flags & CBLK_FORCEREADY)) { - mFillingUpStatus = FS_FILLED; - android_atomic_and(~CBLK_FORCEREADY, &mCblk->flags); - return true; - } - return false; -} - -status_t AudioFlinger::PlaybackThread::Track::start(AudioSystem::sync_event_t event, - int triggerSession) -{ - status_t status = NO_ERROR; - ALOGV("start(%d), calling pid %d session %d", - mName, IPCThreadState::self()->getCallingPid(), mSessionId); - - sp thread = mThread.promote(); - if (thread != 0) { - Mutex::Autolock _l(thread->mLock); - track_state state = mState; - // here the track could be either new, or restarted - // in both cases "unstop" the track - if (mState == PAUSED) { - mState = TrackBase::RESUMING; - ALOGV("PAUSED => RESUMING (%d) on thread %p", mName, this); - } else { - mState = TrackBase::ACTIVE; - ALOGV("? => ACTIVE (%d) on thread %p", mName, this); - } - - if (!isOutputTrack() && state != ACTIVE && state != RESUMING) { - thread->mLock.unlock(); - status = AudioSystem::startOutput(thread->id(), mStreamType, mSessionId); - thread->mLock.lock(); - -#ifdef ADD_BATTERY_DATA - // to track the speaker usage - if (status == NO_ERROR) { - addBatteryData(IMediaPlayerService::kBatteryDataAudioFlingerStart); - } -#endif - } - if (status == NO_ERROR) { - PlaybackThread *playbackThread = (PlaybackThread *)thread.get(); - playbackThread->addTrack_l(this); - } else { - mState = state; - triggerEvents(AudioSystem::SYNC_EVENT_PRESENTATION_COMPLETE); - } - } else { - status = BAD_VALUE; - } - return status; -} - -void AudioFlinger::PlaybackThread::Track::stop() -{ - ALOGV("stop(%d), calling pid %d", mName, IPCThreadState::self()->getCallingPid()); - sp thread = mThread.promote(); - if (thread != 0) { - Mutex::Autolock _l(thread->mLock); - track_state state = mState; - if (state == RESUMING || state == ACTIVE || state == PAUSING || state == PAUSED) { - // If the track is not active (PAUSED and buffers full), flush buffers - PlaybackThread *playbackThread = (PlaybackThread *)thread.get(); - if (playbackThread->mActiveTracks.indexOf(this) < 0) { - reset(); - mState = STOPPED; - } else if (!isFastTrack()) { - mState = STOPPED; - } else { - // prepareTracks_l() will set state to STOPPING_2 after next underrun, - // and then to STOPPED and reset() when presentation is complete - mState = STOPPING_1; - } - ALOGV("not stopping/stopped => stopping/stopped (%d) on thread %p", mName, - playbackThread); - } - if (!isOutputTrack() && (state == ACTIVE || state == RESUMING)) { - thread->mLock.unlock(); - AudioSystem::stopOutput(thread->id(), mStreamType, mSessionId); - thread->mLock.lock(); - -#ifdef ADD_BATTERY_DATA - // to track the speaker usage - addBatteryData(IMediaPlayerService::kBatteryDataAudioFlingerStop); -#endif - } - } -} - -void AudioFlinger::PlaybackThread::Track::pause() -{ - ALOGV("pause(%d), calling pid %d", mName, IPCThreadState::self()->getCallingPid()); - sp thread = mThread.promote(); - if (thread != 0) { - Mutex::Autolock _l(thread->mLock); - if (mState == ACTIVE || mState == RESUMING) { - mState = PAUSING; - ALOGV("ACTIVE/RESUMING => PAUSING (%d) on thread %p", mName, thread.get()); - if (!isOutputTrack()) { - thread->mLock.unlock(); - AudioSystem::stopOutput(thread->id(), mStreamType, mSessionId); - thread->mLock.lock(); - -#ifdef ADD_BATTERY_DATA - // to track the speaker usage - addBatteryData(IMediaPlayerService::kBatteryDataAudioFlingerStop); -#endif - } - } - } -} - -void AudioFlinger::PlaybackThread::Track::flush() -{ - ALOGV("flush(%d)", mName); - sp thread = mThread.promote(); - if (thread != 0) { - Mutex::Autolock _l(thread->mLock); - if (mState != STOPPING_1 && mState != STOPPING_2 && mState != STOPPED && mState != PAUSED && - mState != PAUSING && mState != IDLE && mState != FLUSHED) { - return; - } - // No point remaining in PAUSED state after a flush => go to - // FLUSHED state - mState = FLUSHED; - // do not reset the track if it is still in the process of being stopped or paused. - // this will be done by prepareTracks_l() when the track is stopped. - // prepareTracks_l() will see mState == FLUSHED, then - // remove from active track list, reset(), and trigger presentation complete - PlaybackThread *playbackThread = (PlaybackThread *)thread.get(); - if (playbackThread->mActiveTracks.indexOf(this) < 0) { - reset(); - } - } -} - -void AudioFlinger::PlaybackThread::Track::reset() -{ - // Do not reset twice to avoid discarding data written just after a flush and before - // the audioflinger thread detects the track is stopped. - if (!mResetDone) { - TrackBase::reset(); - // Force underrun condition to avoid false underrun callback until first data is - // written to buffer - android_atomic_and(~CBLK_FORCEREADY, &mCblk->flags); - android_atomic_or(CBLK_UNDERRUN, &mCblk->flags); - mFillingUpStatus = FS_FILLING; - mResetDone = true; - if (mState == FLUSHED) { - mState = IDLE; - } - } -} - -void AudioFlinger::PlaybackThread::Track::mute(bool muted) -{ - mMute = muted; -} - -status_t AudioFlinger::PlaybackThread::Track::attachAuxEffect(int EffectId) -{ - status_t status = DEAD_OBJECT; - sp thread = mThread.promote(); - if (thread != 0) { - PlaybackThread *playbackThread = (PlaybackThread *)thread.get(); - sp af = mClient->audioFlinger(); - - Mutex::Autolock _l(af->mLock); - - sp srcThread = af->getEffectThread_l(AUDIO_SESSION_OUTPUT_MIX, EffectId); - - if (EffectId != 0 && srcThread != 0 && playbackThread != srcThread.get()) { - Mutex::Autolock _dl(playbackThread->mLock); - Mutex::Autolock _sl(srcThread->mLock); - sp chain = srcThread->getEffectChain_l(AUDIO_SESSION_OUTPUT_MIX); - if (chain == 0) { - return INVALID_OPERATION; - } - - sp effect = chain->getEffectFromId_l(EffectId); - if (effect == 0) { - return INVALID_OPERATION; - } - srcThread->removeEffect_l(effect); - playbackThread->addEffect_l(effect); - // removeEffect_l() has stopped the effect if it was active so it must be restarted - if (effect->state() == EffectModule::ACTIVE || - effect->state() == EffectModule::STOPPING) { - effect->start(); - } - - sp dstChain = effect->chain().promote(); - if (dstChain == 0) { - srcThread->addEffect_l(effect); - return INVALID_OPERATION; - } - AudioSystem::unregisterEffect(effect->id()); - AudioSystem::registerEffect(&effect->desc(), - srcThread->id(), - dstChain->strategy(), - AUDIO_SESSION_OUTPUT_MIX, - effect->id()); - } - status = playbackThread->attachAuxEffect(this, EffectId); - } - return status; -} - -void AudioFlinger::PlaybackThread::Track::setAuxBuffer(int EffectId, int32_t *buffer) -{ - mAuxEffectId = EffectId; - mAuxBuffer = buffer; -} - -bool AudioFlinger::PlaybackThread::Track::presentationComplete(size_t framesWritten, - size_t audioHalFrames) -{ - // a track is considered presented when the total number of frames written to audio HAL - // corresponds to the number of frames written when presentationComplete() is called for the - // first time (mPresentationCompleteFrames == 0) plus the buffer filling status at that time. - if (mPresentationCompleteFrames == 0) { - mPresentationCompleteFrames = framesWritten + audioHalFrames; - ALOGV("presentationComplete() reset: mPresentationCompleteFrames %d audioHalFrames %d", - mPresentationCompleteFrames, audioHalFrames); - } - if (framesWritten >= mPresentationCompleteFrames) { - ALOGV("presentationComplete() session %d complete: framesWritten %d", - mSessionId, framesWritten); - triggerEvents(AudioSystem::SYNC_EVENT_PRESENTATION_COMPLETE); - return true; - } - return false; -} - -void AudioFlinger::PlaybackThread::Track::triggerEvents(AudioSystem::sync_event_t type) -{ - for (int i = 0; i < (int)mSyncEvents.size(); i++) { - if (mSyncEvents[i]->type() == type) { - mSyncEvents[i]->trigger(); - mSyncEvents.removeAt(i); - i--; - } - } -} - -// implement VolumeBufferProvider interface - -uint32_t AudioFlinger::PlaybackThread::Track::getVolumeLR() -{ - // called by FastMixer, so not allowed to take any locks, block, or do I/O including logs - ALOG_ASSERT(isFastTrack() && (mCblk != NULL)); - uint32_t vlr = mCblk->getVolumeLR(); - uint32_t vl = vlr & 0xFFFF; - uint32_t vr = vlr >> 16; - // track volumes come from shared memory, so can't be trusted and must be clamped - if (vl > MAX_GAIN_INT) { - vl = MAX_GAIN_INT; - } - if (vr > MAX_GAIN_INT) { - vr = MAX_GAIN_INT; - } - // now apply the cached master volume and stream type volume; - // this is trusted but lacks any synchronization or barrier so may be stale - float v = mCachedVolume; - vl *= v; - vr *= v; - // re-combine into U4.16 - vlr = (vr << 16) | (vl & 0xFFFF); - // FIXME look at mute, pause, and stop flags - return vlr; -} - -status_t AudioFlinger::PlaybackThread::Track::setSyncEvent(const sp& event) -{ - if (mState == TERMINATED || mState == PAUSED || - ((framesReady() == 0) && ((mSharedBuffer != 0) || - (mState == STOPPED)))) { - ALOGW("Track::setSyncEvent() in invalid state %d on session %d %s mode, framesReady %d ", - mState, mSessionId, (mSharedBuffer != 0) ? "static" : "stream", framesReady()); - event->cancel(); - return INVALID_OPERATION; - } - (void) TrackBase::setSyncEvent(event); - return NO_ERROR; -} - -bool AudioFlinger::PlaybackThread::Track::isOut() const -{ - return true; -} - -// timed audio tracks - -sp -AudioFlinger::PlaybackThread::TimedTrack::create( - PlaybackThread *thread, - const sp& client, - audio_stream_type_t streamType, - uint32_t sampleRate, - audio_format_t format, - audio_channel_mask_t channelMask, - size_t frameCount, - const sp& sharedBuffer, - int sessionId) { - if (!client->reserveTimedTrack()) - return 0; - - return new TimedTrack( - thread, client, streamType, sampleRate, format, channelMask, frameCount, - sharedBuffer, sessionId); -} - -AudioFlinger::PlaybackThread::TimedTrack::TimedTrack( - PlaybackThread *thread, - const sp& client, - audio_stream_type_t streamType, - uint32_t sampleRate, - audio_format_t format, - audio_channel_mask_t channelMask, - size_t frameCount, - const sp& sharedBuffer, - int sessionId) - : Track(thread, client, streamType, sampleRate, format, channelMask, - frameCount, sharedBuffer, sessionId, IAudioFlinger::TRACK_TIMED), - mQueueHeadInFlight(false), - mTrimQueueHeadOnRelease(false), - mFramesPendingInQueue(0), - mTimedSilenceBuffer(NULL), - mTimedSilenceBufferSize(0), - mTimedAudioOutputOnTime(false), - mMediaTimeTransformValid(false) -{ - LocalClock lc; - mLocalTimeFreq = lc.getLocalFreq(); - - mLocalTimeToSampleTransform.a_zero = 0; - mLocalTimeToSampleTransform.b_zero = 0; - mLocalTimeToSampleTransform.a_to_b_numer = sampleRate; - mLocalTimeToSampleTransform.a_to_b_denom = mLocalTimeFreq; - LinearTransform::reduce(&mLocalTimeToSampleTransform.a_to_b_numer, - &mLocalTimeToSampleTransform.a_to_b_denom); - - mMediaTimeToSampleTransform.a_zero = 0; - mMediaTimeToSampleTransform.b_zero = 0; - mMediaTimeToSampleTransform.a_to_b_numer = sampleRate; - mMediaTimeToSampleTransform.a_to_b_denom = 1000000; - LinearTransform::reduce(&mMediaTimeToSampleTransform.a_to_b_numer, - &mMediaTimeToSampleTransform.a_to_b_denom); -} - -AudioFlinger::PlaybackThread::TimedTrack::~TimedTrack() { - mClient->releaseTimedTrack(); - delete [] mTimedSilenceBuffer; -} - -status_t AudioFlinger::PlaybackThread::TimedTrack::allocateTimedBuffer( - size_t size, sp* buffer) { - - Mutex::Autolock _l(mTimedBufferQueueLock); - - trimTimedBufferQueue_l(); - - // lazily initialize the shared memory heap for timed buffers - if (mTimedMemoryDealer == NULL) { - const int kTimedBufferHeapSize = 512 << 10; - - mTimedMemoryDealer = new MemoryDealer(kTimedBufferHeapSize, - "AudioFlingerTimed"); - if (mTimedMemoryDealer == NULL) - return NO_MEMORY; - } - - sp newBuffer = mTimedMemoryDealer->allocate(size); - if (newBuffer == NULL) { - newBuffer = mTimedMemoryDealer->allocate(size); - if (newBuffer == NULL) - return NO_MEMORY; - } - - *buffer = newBuffer; - return NO_ERROR; -} - -// caller must hold mTimedBufferQueueLock -void AudioFlinger::PlaybackThread::TimedTrack::trimTimedBufferQueue_l() { - int64_t mediaTimeNow; - { - Mutex::Autolock mttLock(mMediaTimeTransformLock); - if (!mMediaTimeTransformValid) - return; - - int64_t targetTimeNow; - status_t res = (mMediaTimeTransformTarget == TimedAudioTrack::COMMON_TIME) - ? mCCHelper.getCommonTime(&targetTimeNow) - : mCCHelper.getLocalTime(&targetTimeNow); - - if (OK != res) - return; - - if (!mMediaTimeTransform.doReverseTransform(targetTimeNow, - &mediaTimeNow)) { - return; - } - } - - size_t trimEnd; - for (trimEnd = 0; trimEnd < mTimedBufferQueue.size(); trimEnd++) { - int64_t bufEnd; - - if ((trimEnd + 1) < mTimedBufferQueue.size()) { - // We have a next buffer. Just use its PTS as the PTS of the frame - // following the last frame in this buffer. If the stream is sparse - // (ie, there are deliberate gaps left in the stream which should be - // filled with silence by the TimedAudioTrack), then this can result - // in one extra buffer being left un-trimmed when it could have - // been. In general, this is not typical, and we would rather - // optimized away the TS calculation below for the more common case - // where PTSes are contiguous. - bufEnd = mTimedBufferQueue[trimEnd + 1].pts(); - } else { - // We have no next buffer. Compute the PTS of the frame following - // the last frame in this buffer by computing the duration of of - // this frame in media time units and adding it to the PTS of the - // buffer. - int64_t frameCount = mTimedBufferQueue[trimEnd].buffer()->size() - / mFrameSize; - - if (!mMediaTimeToSampleTransform.doReverseTransform(frameCount, - &bufEnd)) { - ALOGE("Failed to convert frame count of %lld to media time" - " duration" " (scale factor %d/%u) in %s", - frameCount, - mMediaTimeToSampleTransform.a_to_b_numer, - mMediaTimeToSampleTransform.a_to_b_denom, - __PRETTY_FUNCTION__); - break; - } - bufEnd += mTimedBufferQueue[trimEnd].pts(); - } - - if (bufEnd > mediaTimeNow) - break; - - // Is the buffer we want to use in the middle of a mix operation right - // now? If so, don't actually trim it. Just wait for the releaseBuffer - // from the mixer which should be coming back shortly. - if (!trimEnd && mQueueHeadInFlight) { - mTrimQueueHeadOnRelease = true; - } - } - - size_t trimStart = mTrimQueueHeadOnRelease ? 1 : 0; - if (trimStart < trimEnd) { - // Update the bookkeeping for framesReady() - for (size_t i = trimStart; i < trimEnd; ++i) { - updateFramesPendingAfterTrim_l(mTimedBufferQueue[i], "trim"); - } - - // Now actually remove the buffers from the queue. - mTimedBufferQueue.removeItemsAt(trimStart, trimEnd); - } -} - -void AudioFlinger::PlaybackThread::TimedTrack::trimTimedBufferQueueHead_l( - const char* logTag) { - ALOG_ASSERT(mTimedBufferQueue.size() > 0, - "%s called (reason \"%s\"), but timed buffer queue has no" - " elements to trim.", __FUNCTION__, logTag); - - updateFramesPendingAfterTrim_l(mTimedBufferQueue[0], logTag); - mTimedBufferQueue.removeAt(0); -} - -void AudioFlinger::PlaybackThread::TimedTrack::updateFramesPendingAfterTrim_l( - const TimedBuffer& buf, - const char* logTag) { - uint32_t bufBytes = buf.buffer()->size(); - uint32_t consumedAlready = buf.position(); - - ALOG_ASSERT(consumedAlready <= bufBytes, - "Bad bookkeeping while updating frames pending. Timed buffer is" - " only %u bytes long, but claims to have consumed %u" - " bytes. (update reason: \"%s\")", - bufBytes, consumedAlready, logTag); - - uint32_t bufFrames = (bufBytes - consumedAlready) / mFrameSize; - ALOG_ASSERT(mFramesPendingInQueue >= bufFrames, - "Bad bookkeeping while updating frames pending. Should have at" - " least %u queued frames, but we think we have only %u. (update" - " reason: \"%s\")", - bufFrames, mFramesPendingInQueue, logTag); - - mFramesPendingInQueue -= bufFrames; -} - -status_t AudioFlinger::PlaybackThread::TimedTrack::queueTimedBuffer( - const sp& buffer, int64_t pts) { - - { - Mutex::Autolock mttLock(mMediaTimeTransformLock); - if (!mMediaTimeTransformValid) - return INVALID_OPERATION; - } - - Mutex::Autolock _l(mTimedBufferQueueLock); - - uint32_t bufFrames = buffer->size() / mFrameSize; - mFramesPendingInQueue += bufFrames; - mTimedBufferQueue.add(TimedBuffer(buffer, pts)); - - return NO_ERROR; -} - -status_t AudioFlinger::PlaybackThread::TimedTrack::setMediaTimeTransform( - const LinearTransform& xform, TimedAudioTrack::TargetTimeline target) { - - ALOGVV("setMediaTimeTransform az=%lld bz=%lld n=%d d=%u tgt=%d", - xform.a_zero, xform.b_zero, xform.a_to_b_numer, xform.a_to_b_denom, - target); - - if (!(target == TimedAudioTrack::LOCAL_TIME || - target == TimedAudioTrack::COMMON_TIME)) { - return BAD_VALUE; - } - - Mutex::Autolock lock(mMediaTimeTransformLock); - mMediaTimeTransform = xform; - mMediaTimeTransformTarget = target; - mMediaTimeTransformValid = true; - - return NO_ERROR; -} - -#define min(a, b) ((a) < (b) ? (a) : (b)) - -// implementation of getNextBuffer for tracks whose buffers have timestamps -status_t AudioFlinger::PlaybackThread::TimedTrack::getNextBuffer( - AudioBufferProvider::Buffer* buffer, int64_t pts) -{ - if (pts == AudioBufferProvider::kInvalidPTS) { - buffer->raw = NULL; - buffer->frameCount = 0; - mTimedAudioOutputOnTime = false; - return INVALID_OPERATION; - } - - Mutex::Autolock _l(mTimedBufferQueueLock); - - ALOG_ASSERT(!mQueueHeadInFlight, - "getNextBuffer called without releaseBuffer!"); - - while (true) { - - // if we have no timed buffers, then fail - if (mTimedBufferQueue.isEmpty()) { - buffer->raw = NULL; - buffer->frameCount = 0; - return NOT_ENOUGH_DATA; - } - - TimedBuffer& head = mTimedBufferQueue.editItemAt(0); - - // calculate the PTS of the head of the timed buffer queue expressed in - // local time - int64_t headLocalPTS; - { - Mutex::Autolock mttLock(mMediaTimeTransformLock); - - ALOG_ASSERT(mMediaTimeTransformValid, "media time transform invalid"); - - if (mMediaTimeTransform.a_to_b_denom == 0) { - // the transform represents a pause, so yield silence - timedYieldSilence_l(buffer->frameCount, buffer); - return NO_ERROR; - } - - int64_t transformedPTS; - if (!mMediaTimeTransform.doForwardTransform(head.pts(), - &transformedPTS)) { - // the transform failed. this shouldn't happen, but if it does - // then just drop this buffer - ALOGW("timedGetNextBuffer transform failed"); - buffer->raw = NULL; - buffer->frameCount = 0; - trimTimedBufferQueueHead_l("getNextBuffer; no transform"); - return NO_ERROR; - } - - if (mMediaTimeTransformTarget == TimedAudioTrack::COMMON_TIME) { - if (OK != mCCHelper.commonTimeToLocalTime(transformedPTS, - &headLocalPTS)) { - buffer->raw = NULL; - buffer->frameCount = 0; - return INVALID_OPERATION; - } - } else { - headLocalPTS = transformedPTS; - } - } - - // adjust the head buffer's PTS to reflect the portion of the head buffer - // that has already been consumed - int64_t effectivePTS = headLocalPTS + - ((head.position() / mFrameSize) * mLocalTimeFreq / sampleRate()); - - // Calculate the delta in samples between the head of the input buffer - // queue and the start of the next output buffer that will be written. - // If the transformation fails because of over or underflow, it means - // that the sample's position in the output stream is so far out of - // whack that it should just be dropped. - int64_t sampleDelta; - if (llabs(effectivePTS - pts) >= (static_cast(1) << 31)) { - ALOGV("*** head buffer is too far from PTS: dropped buffer"); - trimTimedBufferQueueHead_l("getNextBuffer, buf pts too far from" - " mix"); - continue; - } - if (!mLocalTimeToSampleTransform.doForwardTransform( - (effectivePTS - pts) << 32, &sampleDelta)) { - ALOGV("*** too late during sample rate transform: dropped buffer"); - trimTimedBufferQueueHead_l("getNextBuffer, bad local to sample"); - continue; - } - - ALOGVV("*** getNextBuffer head.pts=%lld head.pos=%d pts=%lld" - " sampleDelta=[%d.%08x]", - head.pts(), head.position(), pts, - static_cast((sampleDelta >= 0 ? 0 : 1) - + (sampleDelta >> 32)), - static_cast(sampleDelta & 0xFFFFFFFF)); - - // if the delta between the ideal placement for the next input sample and - // the current output position is within this threshold, then we will - // concatenate the next input samples to the previous output - const int64_t kSampleContinuityThreshold = - (static_cast(sampleRate()) << 32) / 250; - - // if this is the first buffer of audio that we're emitting from this track - // then it should be almost exactly on time. - const int64_t kSampleStartupThreshold = 1LL << 32; - - if ((mTimedAudioOutputOnTime && llabs(sampleDelta) <= kSampleContinuityThreshold) || - (!mTimedAudioOutputOnTime && llabs(sampleDelta) <= kSampleStartupThreshold)) { - // the next input is close enough to being on time, so concatenate it - // with the last output - timedYieldSamples_l(buffer); - - ALOGVV("*** on time: head.pos=%d frameCount=%u", - head.position(), buffer->frameCount); - return NO_ERROR; - } - - // Looks like our output is not on time. Reset our on timed status. - // Next time we mix samples from our input queue, then should be within - // the StartupThreshold. - mTimedAudioOutputOnTime = false; - if (sampleDelta > 0) { - // the gap between the current output position and the proper start of - // the next input sample is too big, so fill it with silence - uint32_t framesUntilNextInput = (sampleDelta + 0x80000000) >> 32; - - timedYieldSilence_l(framesUntilNextInput, buffer); - ALOGV("*** silence: frameCount=%u", buffer->frameCount); - return NO_ERROR; - } else { - // the next input sample is late - uint32_t lateFrames = static_cast(-((sampleDelta + 0x80000000) >> 32)); - size_t onTimeSamplePosition = - head.position() + lateFrames * mFrameSize; - - if (onTimeSamplePosition > head.buffer()->size()) { - // all the remaining samples in the head are too late, so - // drop it and move on - ALOGV("*** too late: dropped buffer"); - trimTimedBufferQueueHead_l("getNextBuffer, dropped late buffer"); - continue; - } else { - // skip over the late samples - head.setPosition(onTimeSamplePosition); - - // yield the available samples - timedYieldSamples_l(buffer); - - ALOGV("*** late: head.pos=%d frameCount=%u", head.position(), buffer->frameCount); - return NO_ERROR; - } - } - } -} - -// Yield samples from the timed buffer queue head up to the given output -// buffer's capacity. -// -// Caller must hold mTimedBufferQueueLock -void AudioFlinger::PlaybackThread::TimedTrack::timedYieldSamples_l( - AudioBufferProvider::Buffer* buffer) { - - const TimedBuffer& head = mTimedBufferQueue[0]; - - buffer->raw = (static_cast(head.buffer()->pointer()) + - head.position()); - - uint32_t framesLeftInHead = ((head.buffer()->size() - head.position()) / - mFrameSize); - size_t framesRequested = buffer->frameCount; - buffer->frameCount = min(framesLeftInHead, framesRequested); - - mQueueHeadInFlight = true; - mTimedAudioOutputOnTime = true; -} - -// Yield samples of silence up to the given output buffer's capacity -// -// Caller must hold mTimedBufferQueueLock -void AudioFlinger::PlaybackThread::TimedTrack::timedYieldSilence_l( - uint32_t numFrames, AudioBufferProvider::Buffer* buffer) { - - // lazily allocate a buffer filled with silence - if (mTimedSilenceBufferSize < numFrames * mFrameSize) { - delete [] mTimedSilenceBuffer; - mTimedSilenceBufferSize = numFrames * mFrameSize; - mTimedSilenceBuffer = new uint8_t[mTimedSilenceBufferSize]; - memset(mTimedSilenceBuffer, 0, mTimedSilenceBufferSize); - } - - buffer->raw = mTimedSilenceBuffer; - size_t framesRequested = buffer->frameCount; - buffer->frameCount = min(numFrames, framesRequested); - - mTimedAudioOutputOnTime = false; -} - -// AudioBufferProvider interface -void AudioFlinger::PlaybackThread::TimedTrack::releaseBuffer( - AudioBufferProvider::Buffer* buffer) { - - Mutex::Autolock _l(mTimedBufferQueueLock); - - // If the buffer which was just released is part of the buffer at the head - // of the queue, be sure to update the amt of the buffer which has been - // consumed. If the buffer being returned is not part of the head of the - // queue, its either because the buffer is part of the silence buffer, or - // because the head of the timed queue was trimmed after the mixer called - // getNextBuffer but before the mixer called releaseBuffer. - if (buffer->raw == mTimedSilenceBuffer) { - ALOG_ASSERT(!mQueueHeadInFlight, - "Queue head in flight during release of silence buffer!"); - goto done; - } - - ALOG_ASSERT(mQueueHeadInFlight, - "TimedTrack::releaseBuffer of non-silence buffer, but no queue" - " head in flight."); - - if (mTimedBufferQueue.size()) { - TimedBuffer& head = mTimedBufferQueue.editItemAt(0); - - void* start = head.buffer()->pointer(); - void* end = reinterpret_cast( - reinterpret_cast(head.buffer()->pointer()) - + head.buffer()->size()); - - ALOG_ASSERT((buffer->raw >= start) && (buffer->raw < end), - "released buffer not within the head of the timed buffer" - " queue; qHead = [%p, %p], released buffer = %p", - start, end, buffer->raw); - - head.setPosition(head.position() + - (buffer->frameCount * mFrameSize)); - mQueueHeadInFlight = false; - - ALOG_ASSERT(mFramesPendingInQueue >= buffer->frameCount, - "Bad bookkeeping during releaseBuffer! Should have at" - " least %u queued frames, but we think we have only %u", - buffer->frameCount, mFramesPendingInQueue); - - mFramesPendingInQueue -= buffer->frameCount; - - if ((static_cast(head.position()) >= head.buffer()->size()) - || mTrimQueueHeadOnRelease) { - trimTimedBufferQueueHead_l("releaseBuffer"); - mTrimQueueHeadOnRelease = false; - } - } else { - LOG_FATAL("TimedTrack::releaseBuffer of non-silence buffer with no" - " buffers in the timed buffer queue"); - } - -done: - buffer->raw = 0; - buffer->frameCount = 0; -} - -size_t AudioFlinger::PlaybackThread::TimedTrack::framesReady() const { - Mutex::Autolock _l(mTimedBufferQueueLock); - return mFramesPendingInQueue; -} - -AudioFlinger::PlaybackThread::TimedTrack::TimedBuffer::TimedBuffer() - : mPTS(0), mPosition(0) {} - -AudioFlinger::PlaybackThread::TimedTrack::TimedBuffer::TimedBuffer( - const sp& buffer, int64_t pts) - : mBuffer(buffer), mPTS(pts), mPosition(0) {} - -// ---------------------------------------------------------------------------- - -// RecordTrack constructor must be called with AudioFlinger::mLock held -AudioFlinger::RecordThread::RecordTrack::RecordTrack( - RecordThread *thread, - const sp& client, - uint32_t sampleRate, - audio_format_t format, - audio_channel_mask_t channelMask, - size_t frameCount, - int sessionId) - : TrackBase(thread, client, sampleRate, format, - channelMask, frameCount, 0 /*sharedBuffer*/, sessionId), - mOverflow(false) -{ - ALOGV("RecordTrack constructor, size %d", (int)mBufferEnd - (int)mBuffer); -} - -AudioFlinger::RecordThread::RecordTrack::~RecordTrack() -{ - ALOGV("%s", __func__); -} - -// AudioBufferProvider interface -status_t AudioFlinger::RecordThread::RecordTrack::getNextBuffer(AudioBufferProvider::Buffer* buffer, - int64_t pts) -{ - audio_track_cblk_t* cblk = this->cblk(); - uint32_t framesAvail; - uint32_t framesReq = buffer->frameCount; - - // Check if last stepServer failed, try to step now - if (mStepServerFailed) { - if (!step()) { - goto getNextBuffer_exit; - } - ALOGV("stepServer recovered"); - mStepServerFailed = false; - } - - // FIXME lock is not actually held, so overrun is possible - framesAvail = cblk->framesAvailableIn_l(mFrameCount); - - if (CC_LIKELY(framesAvail)) { - uint32_t s = cblk->server; - uint32_t bufferEnd = cblk->serverBase + mFrameCount; - - if (framesReq > framesAvail) { - framesReq = framesAvail; - } - if (framesReq > bufferEnd - s) { - framesReq = bufferEnd - s; - } - - buffer->raw = getBuffer(s, framesReq); - buffer->frameCount = framesReq; - return NO_ERROR; - } - -getNextBuffer_exit: - buffer->raw = NULL; - buffer->frameCount = 0; - return NOT_ENOUGH_DATA; -} - -status_t AudioFlinger::RecordThread::RecordTrack::start(AudioSystem::sync_event_t event, - int triggerSession) -{ - sp thread = mThread.promote(); - if (thread != 0) { - RecordThread *recordThread = (RecordThread *)thread.get(); - return recordThread->start(this, event, triggerSession); - } else { - return BAD_VALUE; - } -} - -void AudioFlinger::RecordThread::RecordTrack::stop() -{ - sp thread = mThread.promote(); - if (thread != 0) { - RecordThread *recordThread = (RecordThread *)thread.get(); - recordThread->mLock.lock(); - bool doStop = recordThread->stop_l(this); - if (doStop) { - TrackBase::reset(); - // Force overrun condition to avoid false overrun callback until first data is - // read from buffer - android_atomic_or(CBLK_UNDERRUN, &mCblk->flags); - } - recordThread->mLock.unlock(); - if (doStop) { - AudioSystem::stopInput(recordThread->id()); - } - } -} - -/*static*/ void AudioFlinger::RecordThread::RecordTrack::appendDumpHeader(String8& result) -{ - result.append(" Clien Fmt Chn mask Session Step S SRate Serv User FrameCount\n"); -} - -void AudioFlinger::RecordThread::RecordTrack::dump(char* buffer, size_t size) -{ - snprintf(buffer, size, " %05d %03u 0x%08x %05d %04u %01d %05u %08x %08x %05d\n", - (mClient == 0) ? getpid_cached : mClient->pid(), - mFormat, - mChannelMask, - mSessionId, - mStepCount, - mState, - mCblk->sampleRate, - mCblk->server, - mCblk->user, - mFrameCount); -} - -bool AudioFlinger::RecordThread::RecordTrack::isOut() const -{ - return false; -} - -// ---------------------------------------------------------------------------- - -AudioFlinger::PlaybackThread::OutputTrack::OutputTrack( - PlaybackThread *playbackThread, - DuplicatingThread *sourceThread, - uint32_t sampleRate, - audio_format_t format, - audio_channel_mask_t channelMask, - size_t frameCount) - : Track(playbackThread, NULL, AUDIO_STREAM_CNT, sampleRate, format, channelMask, frameCount, - NULL, 0, IAudioFlinger::TRACK_DEFAULT), - mActive(false), mSourceThread(sourceThread), mBuffers(NULL) -{ - - if (mCblk != NULL) { - mBuffers = (char*)mCblk + sizeof(audio_track_cblk_t); - mOutBuffer.frameCount = 0; - playbackThread->mTracks.add(this); - ALOGV("OutputTrack constructor mCblk %p, mBuffer %p, mBuffers %p, " \ - "mCblk->frameCount %d, mCblk->sampleRate %u, mChannelMask 0x%08x mBufferEnd %p", - mCblk, mBuffer, mBuffers, - mCblk->frameCount, mCblk->sampleRate, mChannelMask, mBufferEnd); - } else { - ALOGW("Error creating output track on thread %p", playbackThread); - } -} - -AudioFlinger::PlaybackThread::OutputTrack::~OutputTrack() -{ - clearBufferQueue(); -} - -status_t AudioFlinger::PlaybackThread::OutputTrack::start(AudioSystem::sync_event_t event, - int triggerSession) -{ - status_t status = Track::start(event, triggerSession); - if (status != NO_ERROR) { - return status; - } - - mActive = true; - mRetryCount = 127; - return status; -} - -void AudioFlinger::PlaybackThread::OutputTrack::stop() -{ - Track::stop(); - clearBufferQueue(); - mOutBuffer.frameCount = 0; - mActive = false; -} - -bool AudioFlinger::PlaybackThread::OutputTrack::write(int16_t* data, uint32_t frames) -{ - Buffer *pInBuffer; - Buffer inBuffer; - uint32_t channelCount = mChannelCount; - bool outputBufferFull = false; - inBuffer.frameCount = frames; - inBuffer.i16 = data; - - uint32_t waitTimeLeftMs = mSourceThread->waitTimeMs(); - - if (!mActive && frames != 0) { - start(); - sp thread = mThread.promote(); - if (thread != 0) { - MixerThread *mixerThread = (MixerThread *)thread.get(); - if (mFrameCount > frames){ - if (mBufferQueue.size() < kMaxOverFlowBuffers) { - uint32_t startFrames = (mFrameCount - frames); - pInBuffer = new Buffer; - pInBuffer->mBuffer = new int16_t[startFrames * channelCount]; - pInBuffer->frameCount = startFrames; - pInBuffer->i16 = pInBuffer->mBuffer; - memset(pInBuffer->raw, 0, startFrames * channelCount * sizeof(int16_t)); - mBufferQueue.add(pInBuffer); - } else { - ALOGW ("OutputTrack::write() %p no more buffers in queue", this); - } - } - } - } - - while (waitTimeLeftMs) { - // First write pending buffers, then new data - if (mBufferQueue.size()) { - pInBuffer = mBufferQueue.itemAt(0); - } else { - pInBuffer = &inBuffer; - } - - if (pInBuffer->frameCount == 0) { - break; - } - - if (mOutBuffer.frameCount == 0) { - mOutBuffer.frameCount = pInBuffer->frameCount; - nsecs_t startTime = systemTime(); - if (obtainBuffer(&mOutBuffer, waitTimeLeftMs) == (status_t)NO_MORE_BUFFERS) { - ALOGV ("OutputTrack::write() %p thread %p no more output buffers", this, - mThread.unsafe_get()); - outputBufferFull = true; - break; - } - uint32_t waitTimeMs = (uint32_t)ns2ms(systemTime() - startTime); - if (waitTimeLeftMs >= waitTimeMs) { - waitTimeLeftMs -= waitTimeMs; - } else { - waitTimeLeftMs = 0; - } - } - - uint32_t outFrames = pInBuffer->frameCount > mOutBuffer.frameCount ? mOutBuffer.frameCount : - pInBuffer->frameCount; - memcpy(mOutBuffer.raw, pInBuffer->raw, outFrames * channelCount * sizeof(int16_t)); - mCblk->stepUserOut(outFrames, mFrameCount); - pInBuffer->frameCount -= outFrames; - pInBuffer->i16 += outFrames * channelCount; - mOutBuffer.frameCount -= outFrames; - mOutBuffer.i16 += outFrames * channelCount; - - if (pInBuffer->frameCount == 0) { - if (mBufferQueue.size()) { - mBufferQueue.removeAt(0); - delete [] pInBuffer->mBuffer; - delete pInBuffer; - ALOGV("OutputTrack::write() %p thread %p released overflow buffer %d", this, - mThread.unsafe_get(), mBufferQueue.size()); - } else { - break; - } - } - } - - // If we could not write all frames, allocate a buffer and queue it for next time. - if (inBuffer.frameCount) { - sp thread = mThread.promote(); - if (thread != 0 && !thread->standby()) { - if (mBufferQueue.size() < kMaxOverFlowBuffers) { - pInBuffer = new Buffer; - pInBuffer->mBuffer = new int16_t[inBuffer.frameCount * channelCount]; - pInBuffer->frameCount = inBuffer.frameCount; - pInBuffer->i16 = pInBuffer->mBuffer; - memcpy(pInBuffer->raw, inBuffer.raw, inBuffer.frameCount * channelCount * - sizeof(int16_t)); - mBufferQueue.add(pInBuffer); - ALOGV("OutputTrack::write() %p thread %p adding overflow buffer %d", this, - mThread.unsafe_get(), mBufferQueue.size()); - } else { - ALOGW("OutputTrack::write() %p thread %p no more overflow buffers", - mThread.unsafe_get(), this); - } - } - } - - // Calling write() with a 0 length buffer, means that no more data will be written: - // If no more buffers are pending, fill output track buffer to make sure it is started - // by output mixer. - if (frames == 0 && mBufferQueue.size() == 0) { - if (mCblk->user < mFrameCount) { - frames = mFrameCount - mCblk->user; - pInBuffer = new Buffer; - pInBuffer->mBuffer = new int16_t[frames * channelCount]; - pInBuffer->frameCount = frames; - pInBuffer->i16 = pInBuffer->mBuffer; - memset(pInBuffer->raw, 0, frames * channelCount * sizeof(int16_t)); - mBufferQueue.add(pInBuffer); - } else if (mActive) { - stop(); - } - } - - return outputBufferFull; -} - -status_t AudioFlinger::PlaybackThread::OutputTrack::obtainBuffer( - AudioBufferProvider::Buffer* buffer, uint32_t waitTimeMs) -{ - int active; - status_t result; - audio_track_cblk_t* cblk = mCblk; - uint32_t framesReq = buffer->frameCount; - - ALOGVV("OutputTrack::obtainBuffer user %d, server %d", cblk->user, cblk->server); - buffer->frameCount = 0; - - uint32_t framesAvail = cblk->framesAvailableOut(mFrameCount); - - - if (framesAvail == 0) { - Mutex::Autolock _l(cblk->lock); - goto start_loop_here; - while (framesAvail == 0) { - active = mActive; - if (CC_UNLIKELY(!active)) { - ALOGV("Not active and NO_MORE_BUFFERS"); - return NO_MORE_BUFFERS; - } - result = cblk->cv.waitRelative(cblk->lock, milliseconds(waitTimeMs)); - if (result != NO_ERROR) { - return NO_MORE_BUFFERS; - } - // read the server count again - start_loop_here: - framesAvail = cblk->framesAvailableOut_l(mFrameCount); - } - } - -// if (framesAvail < framesReq) { -// return NO_MORE_BUFFERS; -// } - - if (framesReq > framesAvail) { - framesReq = framesAvail; - } - - uint32_t u = cblk->user; - uint32_t bufferEnd = cblk->userBase + mFrameCount; - - if (framesReq > bufferEnd - u) { - framesReq = bufferEnd - u; - } - - buffer->frameCount = framesReq; - buffer->raw = cblk->buffer(mBuffers, mFrameSize, u); - return NO_ERROR; -} - - -void AudioFlinger::PlaybackThread::OutputTrack::clearBufferQueue() -{ - size_t size = mBufferQueue.size(); - - for (size_t i = 0; i < size; i++) { - Buffer *pBuffer = mBufferQueue.itemAt(i); - delete [] pBuffer->mBuffer; - delete pBuffer; - } - mBufferQueue.clear(); -} - -// ---------------------------------------------------------------------------- - -AudioFlinger::Client::Client(const sp& audioFlinger, pid_t pid) - : RefBase(), - mAudioFlinger(audioFlinger), - // FIXME should be a "k" constant not hard-coded, in .h or ro. property, see 4 lines below - mMemoryDealer(new MemoryDealer(1024*1024, "AudioFlinger::Client")), - mPid(pid), - mTimedTrackCount(0) -{ - // 1 MB of address space is good for 32 tracks, 8 buffers each, 4 KB/buffer -} - -// Client destructor must be called with AudioFlinger::mLock held -AudioFlinger::Client::~Client() -{ - mAudioFlinger->removeClient_l(mPid); -} - -sp AudioFlinger::Client::heap() const -{ - return mMemoryDealer; -} - -// Reserve one of the limited slots for a timed audio track associated -// with this client -bool AudioFlinger::Client::reserveTimedTrack() -{ - const int kMaxTimedTracksPerClient = 4; - - Mutex::Autolock _l(mTimedTrackLock); - - if (mTimedTrackCount >= kMaxTimedTracksPerClient) { - ALOGW("can not create timed track - pid %d has exceeded the limit", - mPid); - return false; - } - - mTimedTrackCount++; - return true; -} - -// Release a slot for a timed audio track -void AudioFlinger::Client::releaseTimedTrack() -{ - Mutex::Autolock _l(mTimedTrackLock); - mTimedTrackCount--; -} - -// ---------------------------------------------------------------------------- - -AudioFlinger::NotificationClient::NotificationClient(const sp& audioFlinger, - const sp& client, - pid_t pid) - : mAudioFlinger(audioFlinger), mPid(pid), mAudioFlingerClient(client) -{ -} - -AudioFlinger::NotificationClient::~NotificationClient() -{ -} - -void AudioFlinger::NotificationClient::binderDied(const wp& who) -{ - sp keep(this); - mAudioFlinger->removeNotificationClient(mPid); -} - -// ---------------------------------------------------------------------------- - -AudioFlinger::TrackHandle::TrackHandle(const sp& track) - : BnAudioTrack(), - mTrack(track) -{ -} - -AudioFlinger::TrackHandle::~TrackHandle() { - // just stop the track on deletion, associated resources - // will be freed from the main thread once all pending buffers have - // been played. Unless it's not in the active track list, in which - // case we free everything now... - mTrack->destroy(); -} - -sp AudioFlinger::TrackHandle::getCblk() const { - return mTrack->getCblk(); -} - -status_t AudioFlinger::TrackHandle::start() { - return mTrack->start(); -} - -void AudioFlinger::TrackHandle::stop() { - mTrack->stop(); -} - -void AudioFlinger::TrackHandle::flush() { - mTrack->flush(); -} - -void AudioFlinger::TrackHandle::mute(bool e) { - mTrack->mute(e); -} - -void AudioFlinger::TrackHandle::pause() { - mTrack->pause(); -} - -status_t AudioFlinger::TrackHandle::attachAuxEffect(int EffectId) -{ - return mTrack->attachAuxEffect(EffectId); -} - -status_t AudioFlinger::TrackHandle::allocateTimedBuffer(size_t size, - sp* buffer) { - if (!mTrack->isTimedTrack()) - return INVALID_OPERATION; - - PlaybackThread::TimedTrack* tt = - reinterpret_cast(mTrack.get()); - return tt->allocateTimedBuffer(size, buffer); -} - -status_t AudioFlinger::TrackHandle::queueTimedBuffer(const sp& buffer, - int64_t pts) { - if (!mTrack->isTimedTrack()) - return INVALID_OPERATION; - - PlaybackThread::TimedTrack* tt = - reinterpret_cast(mTrack.get()); - return tt->queueTimedBuffer(buffer, pts); -} - -status_t AudioFlinger::TrackHandle::setMediaTimeTransform( - const LinearTransform& xform, int target) { - - if (!mTrack->isTimedTrack()) - return INVALID_OPERATION; - - PlaybackThread::TimedTrack* tt = - reinterpret_cast(mTrack.get()); - return tt->setMediaTimeTransform( - xform, static_cast(target)); -} - -status_t AudioFlinger::TrackHandle::onTransact( - uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) -{ - return BnAudioTrack::onTransact(code, data, reply, flags); -} - -// ---------------------------------------------------------------------------- - -sp AudioFlinger::openRecord( - pid_t pid, - audio_io_handle_t input, - uint32_t sampleRate, - audio_format_t format, - audio_channel_mask_t channelMask, - size_t frameCount, - IAudioFlinger::track_flags_t flags, - pid_t tid, - int *sessionId, - status_t *status) -{ - sp recordTrack; - sp recordHandle; - sp client; - status_t lStatus; - RecordThread *thread; - size_t inFrameCount; - int lSessionId; - - // check calling permissions - if (!recordingAllowed()) { - lStatus = PERMISSION_DENIED; - goto Exit; - } - - // add client to list - { // scope for mLock - Mutex::Autolock _l(mLock); - thread = checkRecordThread_l(input); - if (thread == NULL) { - lStatus = BAD_VALUE; - goto Exit; - } - - client = registerPid_l(pid); - - // If no audio session id is provided, create one here - if (sessionId != NULL && *sessionId != AUDIO_SESSION_OUTPUT_MIX) { - lSessionId = *sessionId; - } else { - lSessionId = nextUniqueId(); - if (sessionId != NULL) { - *sessionId = lSessionId; - } - } - // create new record track. - // The record track uses one track in mHardwareMixerThread by convention. - recordTrack = thread->createRecordTrack_l(client, sampleRate, format, channelMask, - frameCount, lSessionId, flags, tid, &lStatus); - } - if (lStatus != NO_ERROR) { - // remove local strong reference to Client before deleting the RecordTrack so that the - // Client destructor is called by the TrackBase destructor with mLock held - client.clear(); - recordTrack.clear(); - goto Exit; - } - - // return to handle to client - recordHandle = new RecordHandle(recordTrack); - lStatus = NO_ERROR; - -Exit: - if (status) { - *status = lStatus; - } - return recordHandle; -} - -// ---------------------------------------------------------------------------- - -AudioFlinger::RecordHandle::RecordHandle( - const sp& recordTrack) - : BnAudioRecord(), - mRecordTrack(recordTrack) -{ -} - -AudioFlinger::RecordHandle::~RecordHandle() { - stop_nonvirtual(); - mRecordTrack->destroy(); -} - -sp AudioFlinger::RecordHandle::getCblk() const { - return mRecordTrack->getCblk(); -} - -status_t AudioFlinger::RecordHandle::start(int /*AudioSystem::sync_event_t*/ event, - int triggerSession) { - ALOGV("RecordHandle::start()"); - return mRecordTrack->start((AudioSystem::sync_event_t)event, triggerSession); -} - -void AudioFlinger::RecordHandle::stop() { - stop_nonvirtual(); -} - -void AudioFlinger::RecordHandle::stop_nonvirtual() { - ALOGV("RecordHandle::stop()"); - mRecordTrack->stop(); -} - -status_t AudioFlinger::RecordHandle::onTransact( - uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) -{ - return BnAudioRecord::onTransact(code, data, reply, flags); -} - -// ---------------------------------------------------------------------------- - -AudioFlinger::RecordThread::RecordThread(const sp& audioFlinger, - AudioStreamIn *input, - uint32_t sampleRate, - audio_channel_mask_t channelMask, - audio_io_handle_t id, - audio_devices_t device, - const sp& teeSink) : - ThreadBase(audioFlinger, id, AUDIO_DEVICE_NONE, device, RECORD), - mInput(input), mResampler(NULL), mRsmpOutBuffer(NULL), mRsmpInBuffer(NULL), - // mRsmpInIndex and mInputBytes set by readInputParameters() - mReqChannelCount(popcount(channelMask)), - mReqSampleRate(sampleRate), - // mBytesRead is only meaningful while active, and so is cleared in start() - // (but might be better to also clear here for dump?) - mTeeSink(teeSink) -{ - snprintf(mName, kNameLength, "AudioIn_%X", id); - - readInputParameters(); - -} - - -AudioFlinger::RecordThread::~RecordThread() -{ - delete[] mRsmpInBuffer; - delete mResampler; - delete[] mRsmpOutBuffer; -} - -void AudioFlinger::RecordThread::onFirstRef() -{ - run(mName, PRIORITY_URGENT_AUDIO); -} - -status_t AudioFlinger::RecordThread::readyToRun() -{ - status_t status = initCheck(); - ALOGW_IF(status != NO_ERROR,"RecordThread %p could not initialize", this); - return status; -} - -bool AudioFlinger::RecordThread::threadLoop() -{ - AudioBufferProvider::Buffer buffer; - sp activeTrack; - Vector< sp > effectChains; - - nsecs_t lastWarning = 0; - - inputStandBy(); - acquireWakeLock(); - - // used to verify we've read at least once before evaluating how many bytes were read - bool readOnce = false; - - // start recording - while (!exitPending()) { - - processConfigEvents(); - - { // scope for mLock - Mutex::Autolock _l(mLock); - checkForNewParameters_l(); - if (mActiveTrack == 0 && mConfigEvents.isEmpty()) { - standby(); - - if (exitPending()) { - break; - } - - releaseWakeLock_l(); - ALOGV("RecordThread: loop stopping"); - // go to sleep - mWaitWorkCV.wait(mLock); - ALOGV("RecordThread: loop starting"); - acquireWakeLock_l(); - continue; - } - if (mActiveTrack != 0) { - if (mActiveTrack->mState == TrackBase::PAUSING) { - standby(); - mActiveTrack.clear(); - mStartStopCond.broadcast(); - } else if (mActiveTrack->mState == TrackBase::RESUMING) { - if (mReqChannelCount != mActiveTrack->channelCount()) { - mActiveTrack.clear(); - mStartStopCond.broadcast(); - } else if (readOnce) { - // record start succeeds only if first read from audio input - // succeeds - if (mBytesRead >= 0) { - mActiveTrack->mState = TrackBase::ACTIVE; - } else { - mActiveTrack.clear(); - } - mStartStopCond.broadcast(); - } - mStandby = false; - } else if (mActiveTrack->mState == TrackBase::TERMINATED) { - removeTrack_l(mActiveTrack); - mActiveTrack.clear(); - } - } - lockEffectChains_l(effectChains); - } - - if (mActiveTrack != 0) { - if (mActiveTrack->mState != TrackBase::ACTIVE && - mActiveTrack->mState != TrackBase::RESUMING) { - unlockEffectChains(effectChains); - usleep(kRecordThreadSleepUs); - continue; - } - for (size_t i = 0; i < effectChains.size(); i ++) { - effectChains[i]->process_l(); - } - - buffer.frameCount = mFrameCount; - if (CC_LIKELY(mActiveTrack->getNextBuffer(&buffer) == NO_ERROR)) { - readOnce = true; - size_t framesOut = buffer.frameCount; - if (mResampler == NULL) { - // no resampling - while (framesOut) { - size_t framesIn = mFrameCount - mRsmpInIndex; - if (framesIn) { - int8_t *src = (int8_t *)mRsmpInBuffer + mRsmpInIndex * mFrameSize; - int8_t *dst = buffer.i8 + (buffer.frameCount - framesOut) * - mActiveTrack->mFrameSize; - if (framesIn > framesOut) - framesIn = framesOut; - mRsmpInIndex += framesIn; - framesOut -= framesIn; - if (mChannelCount == mReqChannelCount || - mFormat != AUDIO_FORMAT_PCM_16_BIT) { - memcpy(dst, src, framesIn * mFrameSize); - } else { - if (mChannelCount == 1) { - upmix_to_stereo_i16_from_mono_i16((int16_t *)dst, - (int16_t *)src, framesIn); - } else { - downmix_to_mono_i16_from_stereo_i16((int16_t *)dst, - (int16_t *)src, framesIn); - } - } - } - if (framesOut && mFrameCount == mRsmpInIndex) { - void *readInto; - if (framesOut == mFrameCount && - (mChannelCount == mReqChannelCount || - mFormat != AUDIO_FORMAT_PCM_16_BIT)) { - readInto = buffer.raw; - framesOut = 0; - } else { - readInto = mRsmpInBuffer; - mRsmpInIndex = 0; - } - mBytesRead = mInput->stream->read(mInput->stream, readInto, mInputBytes); - if (mBytesRead <= 0) { - if ((mBytesRead < 0) && (mActiveTrack->mState == TrackBase::ACTIVE)) - { - ALOGE("Error reading audio input"); - // Force input into standby so that it tries to - // recover at next read attempt - inputStandBy(); - usleep(kRecordThreadSleepUs); - } - mRsmpInIndex = mFrameCount; - framesOut = 0; - buffer.frameCount = 0; - } else if (mTeeSink != 0) { - (void) mTeeSink->write(readInto, - mBytesRead >> Format_frameBitShift(mTeeSink->format())); - } - } - } - } else { - // resampling - - memset(mRsmpOutBuffer, 0, framesOut * 2 * sizeof(int32_t)); - // alter output frame count as if we were expecting stereo samples - if (mChannelCount == 1 && mReqChannelCount == 1) { - framesOut >>= 1; - } - mResampler->resample(mRsmpOutBuffer, framesOut, - this /* AudioBufferProvider* */); - // ditherAndClamp() works as long as all buffers returned by - // mActiveTrack->getNextBuffer() are 32 bit aligned which should be always true. - if (mChannelCount == 2 && mReqChannelCount == 1) { - ditherAndClamp(mRsmpOutBuffer, mRsmpOutBuffer, framesOut); - // the resampler always outputs stereo samples: - // do post stereo to mono conversion - downmix_to_mono_i16_from_stereo_i16(buffer.i16, (int16_t *)mRsmpOutBuffer, - framesOut); - } else { - ditherAndClamp((int32_t *)buffer.raw, mRsmpOutBuffer, framesOut); - } - - } - if (mFramestoDrop == 0) { - mActiveTrack->releaseBuffer(&buffer); - } else { - if (mFramestoDrop > 0) { - mFramestoDrop -= buffer.frameCount; - if (mFramestoDrop <= 0) { - clearSyncStartEvent(); - } - } else { - mFramestoDrop += buffer.frameCount; - if (mFramestoDrop >= 0 || mSyncStartEvent == 0 || - mSyncStartEvent->isCancelled()) { - ALOGW("Synced record %s, session %d, trigger session %d", - (mFramestoDrop >= 0) ? "timed out" : "cancelled", - mActiveTrack->sessionId(), - (mSyncStartEvent != 0) ? mSyncStartEvent->triggerSession() : 0); - clearSyncStartEvent(); - } - } - } - mActiveTrack->clearOverflow(); - } - // client isn't retrieving buffers fast enough - else { - if (!mActiveTrack->setOverflow()) { - nsecs_t now = systemTime(); - if ((now - lastWarning) > kWarningThrottleNs) { - ALOGW("RecordThread: buffer overflow"); - lastWarning = now; - } - } - // Release the processor for a while before asking for a new buffer. - // This will give the application more chance to read from the buffer and - // clear the overflow. - usleep(kRecordThreadSleepUs); - } - } - // enable changes in effect chain - unlockEffectChains(effectChains); - effectChains.clear(); - } - - standby(); - - { - Mutex::Autolock _l(mLock); - mActiveTrack.clear(); - mStartStopCond.broadcast(); - } - - releaseWakeLock(); - - ALOGV("RecordThread %p exiting", this); - return false; -} - -void AudioFlinger::RecordThread::standby() -{ - if (!mStandby) { - inputStandBy(); - mStandby = true; - } -} - -void AudioFlinger::RecordThread::inputStandBy() -{ - mInput->stream->common.standby(&mInput->stream->common); -} - -sp AudioFlinger::RecordThread::createRecordTrack_l( - const sp& client, - uint32_t sampleRate, - audio_format_t format, - audio_channel_mask_t channelMask, - size_t frameCount, - int sessionId, - IAudioFlinger::track_flags_t flags, - pid_t tid, - status_t *status) -{ - sp track; - status_t lStatus; - - lStatus = initCheck(); - if (lStatus != NO_ERROR) { - ALOGE("Audio driver not initialized."); - goto Exit; - } - - // FIXME use flags and tid similar to createTrack_l() - - { // scope for mLock - Mutex::Autolock _l(mLock); - - track = new RecordTrack(this, client, sampleRate, - format, channelMask, frameCount, sessionId); - - if (track->getCblk() == 0) { - lStatus = NO_MEMORY; - goto Exit; - } - mTracks.add(track); - - // disable AEC and NS if the device is a BT SCO headset supporting those pre processings - bool suspend = audio_is_bluetooth_sco_device(mInDevice) && - mAudioFlinger->btNrecIsOff(); - setEffectSuspended_l(FX_IID_AEC, suspend, sessionId); - setEffectSuspended_l(FX_IID_NS, suspend, sessionId); - } - lStatus = NO_ERROR; - -Exit: - if (status) { - *status = lStatus; - } - return track; -} - -status_t AudioFlinger::RecordThread::start(RecordThread::RecordTrack* recordTrack, - AudioSystem::sync_event_t event, - int triggerSession) -{ - ALOGV("RecordThread::start event %d, triggerSession %d", event, triggerSession); - sp strongMe = this; - status_t status = NO_ERROR; - - if (event == AudioSystem::SYNC_EVENT_NONE) { - clearSyncStartEvent(); - } else if (event != AudioSystem::SYNC_EVENT_SAME) { - mSyncStartEvent = mAudioFlinger->createSyncEvent(event, - triggerSession, - recordTrack->sessionId(), - syncStartEventCallback, - this); - // Sync event can be cancelled by the trigger session if the track is not in a - // compatible state in which case we start record immediately - if (mSyncStartEvent->isCancelled()) { - clearSyncStartEvent(); - } else { - // do not wait for the event for more than AudioSystem::kSyncRecordStartTimeOutMs - mFramestoDrop = - ((AudioSystem::kSyncRecordStartTimeOutMs * mReqSampleRate) / 1000); - } - } - - { - AutoMutex lock(mLock); - if (mActiveTrack != 0) { - if (recordTrack != mActiveTrack.get()) { - status = -EBUSY; - } else if (mActiveTrack->mState == TrackBase::PAUSING) { - mActiveTrack->mState = TrackBase::ACTIVE; - } - return status; - } - - recordTrack->mState = TrackBase::IDLE; - mActiveTrack = recordTrack; - mLock.unlock(); - status_t status = AudioSystem::startInput(mId); - mLock.lock(); - if (status != NO_ERROR) { - mActiveTrack.clear(); - clearSyncStartEvent(); - return status; - } - mRsmpInIndex = mFrameCount; - mBytesRead = 0; - if (mResampler != NULL) { - mResampler->reset(); - } - mActiveTrack->mState = TrackBase::RESUMING; - // signal thread to start - ALOGV("Signal record thread"); - mWaitWorkCV.broadcast(); - // do not wait for mStartStopCond if exiting - if (exitPending()) { - mActiveTrack.clear(); - status = INVALID_OPERATION; - goto startError; - } - mStartStopCond.wait(mLock); - if (mActiveTrack == 0) { - ALOGV("Record failed to start"); - status = BAD_VALUE; - goto startError; - } - ALOGV("Record started OK"); - return status; - } -startError: - AudioSystem::stopInput(mId); - clearSyncStartEvent(); - return status; -} - -void AudioFlinger::RecordThread::clearSyncStartEvent() -{ - if (mSyncStartEvent != 0) { - mSyncStartEvent->cancel(); - } - mSyncStartEvent.clear(); - mFramestoDrop = 0; -} - -void AudioFlinger::RecordThread::syncStartEventCallback(const wp& event) -{ - sp strongEvent = event.promote(); - - if (strongEvent != 0) { - RecordThread *me = (RecordThread *)strongEvent->cookie(); - me->handleSyncStartEvent(strongEvent); - } -} - -void AudioFlinger::RecordThread::handleSyncStartEvent(const sp& event) -{ - if (event == mSyncStartEvent) { - // TODO: use actual buffer filling status instead of 2 buffers when info is available - // from audio HAL - mFramestoDrop = mFrameCount * 2; - } -} - -bool AudioFlinger::RecordThread::stop_l(RecordThread::RecordTrack* recordTrack) { - ALOGV("RecordThread::stop"); - if (recordTrack != mActiveTrack.get() || recordTrack->mState == TrackBase::PAUSING) { - return false; - } - recordTrack->mState = TrackBase::PAUSING; - // do not wait for mStartStopCond if exiting - if (exitPending()) { - return true; - } - mStartStopCond.wait(mLock); - // if we have been restarted, recordTrack == mActiveTrack.get() here - if (exitPending() || recordTrack != mActiveTrack.get()) { - ALOGV("Record stopped OK"); - return true; - } - return false; -} - -bool AudioFlinger::RecordThread::isValidSyncEvent(const sp& event) const -{ - return false; -} - -status_t AudioFlinger::RecordThread::setSyncEvent(const sp& event) -{ -#if 0 // This branch is currently dead code, but is preserved in case it will be needed in future - if (!isValidSyncEvent(event)) { - return BAD_VALUE; - } - - int eventSession = event->triggerSession(); - status_t ret = NAME_NOT_FOUND; - - Mutex::Autolock _l(mLock); - - for (size_t i = 0; i < mTracks.size(); i++) { - sp track = mTracks[i]; - if (eventSession == track->sessionId()) { - (void) track->setSyncEvent(event); - ret = NO_ERROR; - } - } - return ret; -#else - return BAD_VALUE; -#endif -} - -void AudioFlinger::RecordThread::RecordTrack::destroy() -{ - // see comments at AudioFlinger::PlaybackThread::Track::destroy() - sp keep(this); - { - sp thread = mThread.promote(); - if (thread != 0) { - if (mState == ACTIVE || mState == RESUMING) { - AudioSystem::stopInput(thread->id()); - } - AudioSystem::releaseInput(thread->id()); - Mutex::Autolock _l(thread->mLock); - RecordThread *recordThread = (RecordThread *) thread.get(); - recordThread->destroyTrack_l(this); - } - } -} - -// destroyTrack_l() must be called with ThreadBase::mLock held -void AudioFlinger::RecordThread::destroyTrack_l(const sp& track) -{ - track->mState = TrackBase::TERMINATED; - // active tracks are removed by threadLoop() - if (mActiveTrack != track) { - removeTrack_l(track); - } -} - -void AudioFlinger::RecordThread::removeTrack_l(const sp& track) -{ - mTracks.remove(track); - // need anything related to effects here? -} - -void AudioFlinger::RecordThread::dump(int fd, const Vector& args) -{ - dumpInternals(fd, args); - dumpTracks(fd, args); - dumpEffectChains(fd, args); -} - -void AudioFlinger::RecordThread::dumpInternals(int fd, const Vector& args) -{ - const size_t SIZE = 256; - char buffer[SIZE]; - String8 result; - - snprintf(buffer, SIZE, "\nInput thread %p internals\n", this); - result.append(buffer); - - if (mActiveTrack != 0) { - snprintf(buffer, SIZE, "In index: %d\n", mRsmpInIndex); - result.append(buffer); - snprintf(buffer, SIZE, "In size: %d\n", mInputBytes); - result.append(buffer); - snprintf(buffer, SIZE, "Resampling: %d\n", (mResampler != NULL)); - result.append(buffer); - snprintf(buffer, SIZE, "Out channel count: %u\n", mReqChannelCount); - result.append(buffer); - snprintf(buffer, SIZE, "Out sample rate: %u\n", mReqSampleRate); - result.append(buffer); - } else { - result.append("No active record client\n"); - } - - write(fd, result.string(), result.size()); - - dumpBase(fd, args); -} - -void AudioFlinger::RecordThread::dumpTracks(int fd, const Vector& args) -{ - const size_t SIZE = 256; - char buffer[SIZE]; - String8 result; - - snprintf(buffer, SIZE, "Input thread %p tracks\n", this); - result.append(buffer); - RecordTrack::appendDumpHeader(result); - for (size_t i = 0; i < mTracks.size(); ++i) { - sp track = mTracks[i]; - if (track != 0) { - track->dump(buffer, SIZE); - result.append(buffer); - } - } - - if (mActiveTrack != 0) { - snprintf(buffer, SIZE, "\nInput thread %p active tracks\n", this); - result.append(buffer); - RecordTrack::appendDumpHeader(result); - mActiveTrack->dump(buffer, SIZE); - result.append(buffer); - - } - write(fd, result.string(), result.size()); -} - -// AudioBufferProvider interface -status_t AudioFlinger::RecordThread::getNextBuffer(AudioBufferProvider::Buffer* buffer, int64_t pts) -{ - size_t framesReq = buffer->frameCount; - size_t framesReady = mFrameCount - mRsmpInIndex; - int channelCount; - - if (framesReady == 0) { - mBytesRead = mInput->stream->read(mInput->stream, mRsmpInBuffer, mInputBytes); - if (mBytesRead <= 0) { - if ((mBytesRead < 0) && (mActiveTrack->mState == TrackBase::ACTIVE)) { - ALOGE("RecordThread::getNextBuffer() Error reading audio input"); - // Force input into standby so that it tries to - // recover at next read attempt - inputStandBy(); - usleep(kRecordThreadSleepUs); - } - buffer->raw = NULL; - buffer->frameCount = 0; - return NOT_ENOUGH_DATA; - } - mRsmpInIndex = 0; - framesReady = mFrameCount; - } - - if (framesReq > framesReady) { - framesReq = framesReady; - } - - if (mChannelCount == 1 && mReqChannelCount == 2) { - channelCount = 1; - } else { - channelCount = 2; - } - buffer->raw = mRsmpInBuffer + mRsmpInIndex * channelCount; - buffer->frameCount = framesReq; - return NO_ERROR; -} - -// AudioBufferProvider interface -void AudioFlinger::RecordThread::releaseBuffer(AudioBufferProvider::Buffer* buffer) -{ - mRsmpInIndex += buffer->frameCount; - buffer->frameCount = 0; -} - -bool AudioFlinger::RecordThread::checkForNewParameters_l() -{ - bool reconfig = false; - - while (!mNewParameters.isEmpty()) { - status_t status = NO_ERROR; - String8 keyValuePair = mNewParameters[0]; - AudioParameter param = AudioParameter(keyValuePair); - int value; - audio_format_t reqFormat = mFormat; - uint32_t reqSamplingRate = mReqSampleRate; - uint32_t reqChannelCount = mReqChannelCount; - - if (param.getInt(String8(AudioParameter::keySamplingRate), value) == NO_ERROR) { - reqSamplingRate = value; - reconfig = true; - } - if (param.getInt(String8(AudioParameter::keyFormat), value) == NO_ERROR) { - reqFormat = (audio_format_t) value; - reconfig = true; - } - if (param.getInt(String8(AudioParameter::keyChannels), value) == NO_ERROR) { - reqChannelCount = popcount(value); - reconfig = true; - } - if (param.getInt(String8(AudioParameter::keyFrameCount), value) == NO_ERROR) { - // do not accept frame count changes if tracks are open as the track buffer - // size depends on frame count and correct behavior would not be guaranteed - // if frame count is changed after track creation - if (mActiveTrack != 0) { - status = INVALID_OPERATION; - } else { - reconfig = true; - } - } - if (param.getInt(String8(AudioParameter::keyRouting), value) == NO_ERROR) { - // forward device change to effects that have requested to be - // aware of attached audio device. - for (size_t i = 0; i < mEffectChains.size(); i++) { - mEffectChains[i]->setDevice_l(value); - } - - // store input device and output device but do not forward output device to audio HAL. - // Note that status is ignored by the caller for output device - // (see AudioFlinger::setParameters() - if (audio_is_output_devices(value)) { - mOutDevice = value; - status = BAD_VALUE; - } else { - mInDevice = value; - // disable AEC and NS if the device is a BT SCO headset supporting those - // pre processings - if (mTracks.size() > 0) { - bool suspend = audio_is_bluetooth_sco_device(mInDevice) && - mAudioFlinger->btNrecIsOff(); - for (size_t i = 0; i < mTracks.size(); i++) { - sp track = mTracks[i]; - setEffectSuspended_l(FX_IID_AEC, suspend, track->sessionId()); - setEffectSuspended_l(FX_IID_NS, suspend, track->sessionId()); - } - } - } - } - if (param.getInt(String8(AudioParameter::keyInputSource), value) == NO_ERROR && - mAudioSource != (audio_source_t)value) { - // forward device change to effects that have requested to be - // aware of attached audio device. - for (size_t i = 0; i < mEffectChains.size(); i++) { - mEffectChains[i]->setAudioSource_l((audio_source_t)value); - } - mAudioSource = (audio_source_t)value; - } - if (status == NO_ERROR) { - status = mInput->stream->common.set_parameters(&mInput->stream->common, - keyValuePair.string()); - if (status == INVALID_OPERATION) { - inputStandBy(); - status = mInput->stream->common.set_parameters(&mInput->stream->common, - keyValuePair.string()); - } - if (reconfig) { - if (status == BAD_VALUE && - reqFormat == mInput->stream->common.get_format(&mInput->stream->common) && - reqFormat == AUDIO_FORMAT_PCM_16_BIT && - ((int)mInput->stream->common.get_sample_rate(&mInput->stream->common) - <= (2 * reqSamplingRate)) && - popcount(mInput->stream->common.get_channels(&mInput->stream->common)) - <= FCC_2 && - (reqChannelCount <= FCC_2)) { - status = NO_ERROR; - } - if (status == NO_ERROR) { - readInputParameters(); - sendIoConfigEvent_l(AudioSystem::INPUT_CONFIG_CHANGED); - } - } - } - - mNewParameters.removeAt(0); - - mParamStatus = status; - mParamCond.signal(); - // wait for condition with time out in case the thread calling ThreadBase::setParameters() - // already timed out waiting for the status and will never signal the condition. - mWaitWorkCV.waitRelative(mLock, kSetParametersTimeoutNs); - } - return reconfig; -} - -String8 AudioFlinger::RecordThread::getParameters(const String8& keys) -{ - char *s; - String8 out_s8 = String8(); - - Mutex::Autolock _l(mLock); - if (initCheck() != NO_ERROR) { - return out_s8; - } - - s = mInput->stream->common.get_parameters(&mInput->stream->common, keys.string()); - out_s8 = String8(s); - free(s); - return out_s8; -} - -void AudioFlinger::RecordThread::audioConfigChanged_l(int event, int param) { - AudioSystem::OutputDescriptor desc; - void *param2 = NULL; - - switch (event) { - case AudioSystem::INPUT_OPENED: - case AudioSystem::INPUT_CONFIG_CHANGED: - desc.channels = mChannelMask; - desc.samplingRate = mSampleRate; - desc.format = mFormat; - desc.frameCount = mFrameCount; - desc.latency = 0; - param2 = &desc; - break; - - case AudioSystem::INPUT_CLOSED: - default: - break; - } - mAudioFlinger->audioConfigChanged_l(event, mId, param2); -} - -void AudioFlinger::RecordThread::readInputParameters() -{ - delete mRsmpInBuffer; - // mRsmpInBuffer is always assigned a new[] below - delete mRsmpOutBuffer; - mRsmpOutBuffer = NULL; - delete mResampler; - mResampler = NULL; - - mSampleRate = mInput->stream->common.get_sample_rate(&mInput->stream->common); - mChannelMask = mInput->stream->common.get_channels(&mInput->stream->common); - mChannelCount = (uint16_t)popcount(mChannelMask); - mFormat = mInput->stream->common.get_format(&mInput->stream->common); - mFrameSize = audio_stream_frame_size(&mInput->stream->common); - mInputBytes = mInput->stream->common.get_buffer_size(&mInput->stream->common); - mFrameCount = mInputBytes / mFrameSize; - mNormalFrameCount = mFrameCount; // not used by record, but used by input effects - mRsmpInBuffer = new int16_t[mFrameCount * mChannelCount]; - - if (mSampleRate != mReqSampleRate && mChannelCount <= FCC_2 && mReqChannelCount <= FCC_2) - { - int channelCount; - // optimization: if mono to mono, use the resampler in stereo to stereo mode to avoid - // stereo to mono post process as the resampler always outputs stereo. - if (mChannelCount == 1 && mReqChannelCount == 2) { - channelCount = 1; - } else { - channelCount = 2; - } - mResampler = AudioResampler::create(16, channelCount, mReqSampleRate); - mResampler->setSampleRate(mSampleRate); - mResampler->setVolume(AudioMixer::UNITY_GAIN, AudioMixer::UNITY_GAIN); - mRsmpOutBuffer = new int32_t[mFrameCount * 2]; - - // optmization: if mono to mono, alter input frame count as if we were inputing - // stereo samples - if (mChannelCount == 1 && mReqChannelCount == 1) { - mFrameCount >>= 1; - } - - } - mRsmpInIndex = mFrameCount; -} - -unsigned int AudioFlinger::RecordThread::getInputFramesLost() -{ - Mutex::Autolock _l(mLock); - if (initCheck() != NO_ERROR) { - return 0; - } - - return mInput->stream->get_input_frames_lost(mInput->stream); -} - -uint32_t AudioFlinger::RecordThread::hasAudioSession(int sessionId) const -{ - Mutex::Autolock _l(mLock); - uint32_t result = 0; - if (getEffectChain_l(sessionId) != 0) { - result = EFFECT_SESSION; - } - - for (size_t i = 0; i < mTracks.size(); ++i) { - if (sessionId == mTracks[i]->sessionId()) { - result |= TRACK_SESSION; - break; - } - } - - return result; -} - -KeyedVector AudioFlinger::RecordThread::sessionIds() const -{ - KeyedVector ids; - Mutex::Autolock _l(mLock); - for (size_t j = 0; j < mTracks.size(); ++j) { - sp track = mTracks[j]; - int sessionId = track->sessionId(); - if (ids.indexOfKey(sessionId) < 0) { - ids.add(sessionId, true); - } - } - return ids; -} - -AudioFlinger::AudioStreamIn* AudioFlinger::RecordThread::clearInput() -{ - Mutex::Autolock _l(mLock); - AudioStreamIn *input = mInput; - mInput = NULL; - return input; -} - -// this method must always be called either with ThreadBase mLock held or inside the thread loop -audio_stream_t* AudioFlinger::RecordThread::stream() const -{ - if (mInput == NULL) { - return NULL; - } - return &mInput->stream->common; -} - - -// ---------------------------------------------------------------------------- - -audio_module_handle_t AudioFlinger::loadHwModule(const char *name) -{ - if (!settingsAllowed()) { - return 0; - } - Mutex::Autolock _l(mLock); - return loadHwModule_l(name); -} - -// loadHwModule_l() must be called with AudioFlinger::mLock held -audio_module_handle_t AudioFlinger::loadHwModule_l(const char *name) -{ - for (size_t i = 0; i < mAudioHwDevs.size(); i++) { - if (strncmp(mAudioHwDevs.valueAt(i)->moduleName(), name, strlen(name)) == 0) { - ALOGW("loadHwModule() module %s already loaded", name); - return mAudioHwDevs.keyAt(i); - } - } - - audio_hw_device_t *dev; - - int rc = load_audio_interface(name, &dev); - if (rc) { - ALOGI("loadHwModule() error %d loading module %s ", rc, name); - return 0; - } - - mHardwareStatus = AUDIO_HW_INIT; - rc = dev->init_check(dev); - mHardwareStatus = AUDIO_HW_IDLE; - if (rc) { - ALOGI("loadHwModule() init check error %d for module %s ", rc, name); - return 0; - } - - // Check and cache this HAL's level of support for master mute and master - // volume. If this is the first HAL opened, and it supports the get - // methods, use the initial values provided by the HAL as the current - // master mute and volume settings. - - AudioHwDevice::Flags flags = static_cast(0); - { // scope for auto-lock pattern - AutoMutex lock(mHardwareLock); - - if (0 == mAudioHwDevs.size()) { - mHardwareStatus = AUDIO_HW_GET_MASTER_VOLUME; - if (NULL != dev->get_master_volume) { - float mv; - if (OK == dev->get_master_volume(dev, &mv)) { - mMasterVolume = mv; - } - } - - mHardwareStatus = AUDIO_HW_GET_MASTER_MUTE; - if (NULL != dev->get_master_mute) { - bool mm; - if (OK == dev->get_master_mute(dev, &mm)) { - mMasterMute = mm; - } - } - } - - mHardwareStatus = AUDIO_HW_SET_MASTER_VOLUME; - if ((NULL != dev->set_master_volume) && - (OK == dev->set_master_volume(dev, mMasterVolume))) { - flags = static_cast(flags | - AudioHwDevice::AHWD_CAN_SET_MASTER_VOLUME); - } - - mHardwareStatus = AUDIO_HW_SET_MASTER_MUTE; - if ((NULL != dev->set_master_mute) && - (OK == dev->set_master_mute(dev, mMasterMute))) { - flags = static_cast(flags | - AudioHwDevice::AHWD_CAN_SET_MASTER_MUTE); - } - - mHardwareStatus = AUDIO_HW_IDLE; - } - - audio_module_handle_t handle = nextUniqueId(); - mAudioHwDevs.add(handle, new AudioHwDevice(name, dev, flags)); - - ALOGI("loadHwModule() Loaded %s audio interface from %s (%s) handle %d", - name, dev->common.module->name, dev->common.module->id, handle); - - return handle; - -} - -// ---------------------------------------------------------------------------- - -uint32_t AudioFlinger::getPrimaryOutputSamplingRate() -{ - Mutex::Autolock _l(mLock); - PlaybackThread *thread = primaryPlaybackThread_l(); - return thread != NULL ? thread->sampleRate() : 0; -} - -size_t AudioFlinger::getPrimaryOutputFrameCount() -{ - Mutex::Autolock _l(mLock); - PlaybackThread *thread = primaryPlaybackThread_l(); - return thread != NULL ? thread->frameCountHAL() : 0; -} - -// ---------------------------------------------------------------------------- - -audio_io_handle_t AudioFlinger::openOutput(audio_module_handle_t module, - audio_devices_t *pDevices, - uint32_t *pSamplingRate, - audio_format_t *pFormat, - audio_channel_mask_t *pChannelMask, - uint32_t *pLatencyMs, - audio_output_flags_t flags) -{ - status_t status; - PlaybackThread *thread = NULL; - struct audio_config config = { - sample_rate: pSamplingRate ? *pSamplingRate : 0, - channel_mask: pChannelMask ? *pChannelMask : 0, - format: pFormat ? *pFormat : AUDIO_FORMAT_DEFAULT, - }; - audio_stream_out_t *outStream = NULL; - AudioHwDevice *outHwDev; - - ALOGV("openOutput(), module %d Device %x, SamplingRate %d, Format %d, Channels %x, flags %x", - module, - (pDevices != NULL) ? *pDevices : 0, - config.sample_rate, - config.format, - config.channel_mask, - flags); - - if (pDevices == NULL || *pDevices == 0) { - return 0; - } - - Mutex::Autolock _l(mLock); - - outHwDev = findSuitableHwDev_l(module, *pDevices); - if (outHwDev == NULL) - return 0; - - audio_hw_device_t *hwDevHal = outHwDev->hwDevice(); - audio_io_handle_t id = nextUniqueId(); - - mHardwareStatus = AUDIO_HW_OUTPUT_OPEN; - - status = hwDevHal->open_output_stream(hwDevHal, - id, - *pDevices, - (audio_output_flags_t)flags, - &config, - &outStream); - - mHardwareStatus = AUDIO_HW_IDLE; - ALOGV("openOutput() openOutputStream returned output %p, SamplingRate %d, Format %d, " - "Channels %x, status %d", - outStream, - config.sample_rate, - config.format, - config.channel_mask, - status); - - if (status == NO_ERROR && outStream != NULL) { - AudioStreamOut *output = new AudioStreamOut(outHwDev, outStream); - - if ((flags & AUDIO_OUTPUT_FLAG_DIRECT) || - (config.format != AUDIO_FORMAT_PCM_16_BIT) || - (config.channel_mask != AUDIO_CHANNEL_OUT_STEREO)) { - thread = new DirectOutputThread(this, output, id, *pDevices); - ALOGV("openOutput() created direct output: ID %d thread %p", id, thread); - } else { - thread = new MixerThread(this, output, id, *pDevices); - ALOGV("openOutput() created mixer output: ID %d thread %p", id, thread); - } - mPlaybackThreads.add(id, thread); - - if (pSamplingRate != NULL) *pSamplingRate = config.sample_rate; - if (pFormat != NULL) *pFormat = config.format; - if (pChannelMask != NULL) *pChannelMask = config.channel_mask; - if (pLatencyMs != NULL) *pLatencyMs = thread->latency(); - - // notify client processes of the new output creation - thread->audioConfigChanged_l(AudioSystem::OUTPUT_OPENED); - - // the first primary output opened designates the primary hw device - if ((mPrimaryHardwareDev == NULL) && (flags & AUDIO_OUTPUT_FLAG_PRIMARY)) { - ALOGI("Using module %d has the primary audio interface", module); - mPrimaryHardwareDev = outHwDev; - - AutoMutex lock(mHardwareLock); - mHardwareStatus = AUDIO_HW_SET_MODE; - hwDevHal->set_mode(hwDevHal, mMode); - mHardwareStatus = AUDIO_HW_IDLE; - } - return id; - } - - return 0; -} - -audio_io_handle_t AudioFlinger::openDuplicateOutput(audio_io_handle_t output1, - audio_io_handle_t output2) -{ - Mutex::Autolock _l(mLock); - MixerThread *thread1 = checkMixerThread_l(output1); - MixerThread *thread2 = checkMixerThread_l(output2); - - if (thread1 == NULL || thread2 == NULL) { - ALOGW("openDuplicateOutput() wrong output mixer type for output %d or %d", output1, - output2); - return 0; - } - - audio_io_handle_t id = nextUniqueId(); - DuplicatingThread *thread = new DuplicatingThread(this, thread1, id); - thread->addOutputTrack(thread2); - mPlaybackThreads.add(id, thread); - // notify client processes of the new output creation - thread->audioConfigChanged_l(AudioSystem::OUTPUT_OPENED); - return id; -} - -status_t AudioFlinger::closeOutput(audio_io_handle_t output) -{ - return closeOutput_nonvirtual(output); -} - -status_t AudioFlinger::closeOutput_nonvirtual(audio_io_handle_t output) -{ - // keep strong reference on the playback thread so that - // it is not destroyed while exit() is executed - sp thread; - { - Mutex::Autolock _l(mLock); - thread = checkPlaybackThread_l(output); - if (thread == NULL) { - return BAD_VALUE; - } - - ALOGV("closeOutput() %d", output); - - if (thread->type() == ThreadBase::MIXER) { - for (size_t i = 0; i < mPlaybackThreads.size(); i++) { - if (mPlaybackThreads.valueAt(i)->type() == ThreadBase::DUPLICATING) { - DuplicatingThread *dupThread = - (DuplicatingThread *)mPlaybackThreads.valueAt(i).get(); - dupThread->removeOutputTrack((MixerThread *)thread.get()); - } - } - } - audioConfigChanged_l(AudioSystem::OUTPUT_CLOSED, output, NULL); - mPlaybackThreads.removeItem(output); - } - thread->exit(); - // The thread entity (active unit of execution) is no longer running here, - // but the ThreadBase container still exists. - - if (thread->type() != ThreadBase::DUPLICATING) { - AudioStreamOut *out = thread->clearOutput(); - ALOG_ASSERT(out != NULL, "out shouldn't be NULL"); - // from now on thread->mOutput is NULL - out->hwDev()->close_output_stream(out->hwDev(), out->stream); - delete out; - } - return NO_ERROR; -} - -status_t AudioFlinger::suspendOutput(audio_io_handle_t output) -{ - Mutex::Autolock _l(mLock); - PlaybackThread *thread = checkPlaybackThread_l(output); - - if (thread == NULL) { - return BAD_VALUE; - } - - ALOGV("suspendOutput() %d", output); - thread->suspend(); - - return NO_ERROR; -} - -status_t AudioFlinger::restoreOutput(audio_io_handle_t output) -{ - Mutex::Autolock _l(mLock); - PlaybackThread *thread = checkPlaybackThread_l(output); - - if (thread == NULL) { - return BAD_VALUE; - } - - ALOGV("restoreOutput() %d", output); - - thread->restore(); - - return NO_ERROR; -} - -audio_io_handle_t AudioFlinger::openInput(audio_module_handle_t module, - audio_devices_t *pDevices, - uint32_t *pSamplingRate, - audio_format_t *pFormat, - audio_channel_mask_t *pChannelMask) -{ - status_t status; - RecordThread *thread = NULL; - struct audio_config config = { - sample_rate: pSamplingRate ? *pSamplingRate : 0, - channel_mask: pChannelMask ? *pChannelMask : 0, - format: pFormat ? *pFormat : AUDIO_FORMAT_DEFAULT, - }; - uint32_t reqSamplingRate = config.sample_rate; - audio_format_t reqFormat = config.format; - audio_channel_mask_t reqChannels = config.channel_mask; - audio_stream_in_t *inStream = NULL; - AudioHwDevice *inHwDev; - - if (pDevices == NULL || *pDevices == 0) { - return 0; - } - - Mutex::Autolock _l(mLock); - - inHwDev = findSuitableHwDev_l(module, *pDevices); - if (inHwDev == NULL) - return 0; - - audio_hw_device_t *inHwHal = inHwDev->hwDevice(); - audio_io_handle_t id = nextUniqueId(); - - status = inHwHal->open_input_stream(inHwHal, id, *pDevices, &config, - &inStream); - ALOGV("openInput() openInputStream returned input %p, SamplingRate %d, Format %d, Channels %x, " - "status %d", - inStream, - config.sample_rate, - config.format, - config.channel_mask, - status); - - // If the input could not be opened with the requested parameters and we can handle the - // conversion internally, try to open again with the proposed parameters. The AudioFlinger can - // resample the input and do mono to stereo or stereo to mono conversions on 16 bit PCM inputs. - if (status == BAD_VALUE && - reqFormat == config.format && config.format == AUDIO_FORMAT_PCM_16_BIT && - (config.sample_rate <= 2 * reqSamplingRate) && - (popcount(config.channel_mask) <= FCC_2) && (popcount(reqChannels) <= FCC_2)) { - ALOGV("openInput() reopening with proposed sampling rate and channel mask"); - inStream = NULL; - status = inHwHal->open_input_stream(inHwHal, id, *pDevices, &config, &inStream); - } - - if (status == NO_ERROR && inStream != NULL) { - - // Try to re-use most recently used Pipe to archive a copy of input for dumpsys, - // or (re-)create if current Pipe is idle and does not match the new format - sp teeSink; -#ifdef TEE_SINK_INPUT_FRAMES - enum { - TEE_SINK_NO, // don't copy input - TEE_SINK_NEW, // copy input using a new pipe - TEE_SINK_OLD, // copy input using an existing pipe - } kind; - NBAIO_Format format = Format_from_SR_C(inStream->common.get_sample_rate(&inStream->common), - popcount(inStream->common.get_channels(&inStream->common))); - if (format == Format_Invalid) { - kind = TEE_SINK_NO; - } else if (mRecordTeeSink == 0) { - kind = TEE_SINK_NEW; - } else if (mRecordTeeSink->getStrongCount() != 1) { - kind = TEE_SINK_NO; - } else if (format == mRecordTeeSink->format()) { - kind = TEE_SINK_OLD; - } else { - kind = TEE_SINK_NEW; - } - switch (kind) { - case TEE_SINK_NEW: { - Pipe *pipe = new Pipe(TEE_SINK_INPUT_FRAMES, format); - size_t numCounterOffers = 0; - const NBAIO_Format offers[1] = {format}; - ssize_t index = pipe->negotiate(offers, 1, NULL, numCounterOffers); - ALOG_ASSERT(index == 0); - PipeReader *pipeReader = new PipeReader(*pipe); - numCounterOffers = 0; - index = pipeReader->negotiate(offers, 1, NULL, numCounterOffers); - ALOG_ASSERT(index == 0); - mRecordTeeSink = pipe; - mRecordTeeSource = pipeReader; - teeSink = pipe; - } - break; - case TEE_SINK_OLD: - teeSink = mRecordTeeSink; - break; - case TEE_SINK_NO: - default: - break; - } -#endif - AudioStreamIn *input = new AudioStreamIn(inHwDev, inStream); - - // Start record thread - // RecorThread require both input and output device indication to forward to audio - // pre processing modules - audio_devices_t device = (*pDevices) | primaryOutputDevice_l(); - - thread = new RecordThread(this, - input, - reqSamplingRate, - reqChannels, - id, - device, teeSink); - mRecordThreads.add(id, thread); - ALOGV("openInput() created record thread: ID %d thread %p", id, thread); - if (pSamplingRate != NULL) *pSamplingRate = reqSamplingRate; - if (pFormat != NULL) *pFormat = config.format; - if (pChannelMask != NULL) *pChannelMask = reqChannels; - - // notify client processes of the new input creation - thread->audioConfigChanged_l(AudioSystem::INPUT_OPENED); - return id; - } - - return 0; -} - -status_t AudioFlinger::closeInput(audio_io_handle_t input) -{ - return closeInput_nonvirtual(input); -} - -status_t AudioFlinger::closeInput_nonvirtual(audio_io_handle_t input) -{ - // keep strong reference on the record thread so that - // it is not destroyed while exit() is executed - sp thread; - { - Mutex::Autolock _l(mLock); - thread = checkRecordThread_l(input); - if (thread == 0) { - return BAD_VALUE; - } - - ALOGV("closeInput() %d", input); - audioConfigChanged_l(AudioSystem::INPUT_CLOSED, input, NULL); - mRecordThreads.removeItem(input); - } - thread->exit(); - // The thread entity (active unit of execution) is no longer running here, - // but the ThreadBase container still exists. - - AudioStreamIn *in = thread->clearInput(); - ALOG_ASSERT(in != NULL, "in shouldn't be NULL"); - // from now on thread->mInput is NULL - in->hwDev()->close_input_stream(in->hwDev(), in->stream); - delete in; - - return NO_ERROR; -} - -status_t AudioFlinger::setStreamOutput(audio_stream_type_t stream, audio_io_handle_t output) -{ - Mutex::Autolock _l(mLock); - ALOGV("setStreamOutput() stream %d to output %d", stream, output); - - for (size_t i = 0; i < mPlaybackThreads.size(); i++) { - PlaybackThread *thread = mPlaybackThreads.valueAt(i).get(); - thread->invalidateTracks(stream); - } - - return NO_ERROR; -} - - -int AudioFlinger::newAudioSessionId() -{ - return nextUniqueId(); -} - -void AudioFlinger::acquireAudioSessionId(int audioSession) -{ - Mutex::Autolock _l(mLock); - pid_t caller = IPCThreadState::self()->getCallingPid(); - ALOGV("acquiring %d from %d", audioSession, caller); - size_t num = mAudioSessionRefs.size(); - for (size_t i = 0; i< num; i++) { - AudioSessionRef *ref = mAudioSessionRefs.editItemAt(i); - if (ref->mSessionid == audioSession && ref->mPid == caller) { - ref->mCnt++; - ALOGV(" incremented refcount to %d", ref->mCnt); - return; - } - } - mAudioSessionRefs.push(new AudioSessionRef(audioSession, caller)); - ALOGV(" added new entry for %d", audioSession); -} - -void AudioFlinger::releaseAudioSessionId(int audioSession) -{ - Mutex::Autolock _l(mLock); - pid_t caller = IPCThreadState::self()->getCallingPid(); - ALOGV("releasing %d from %d", audioSession, caller); - size_t num = mAudioSessionRefs.size(); - for (size_t i = 0; i< num; i++) { - AudioSessionRef *ref = mAudioSessionRefs.itemAt(i); - if (ref->mSessionid == audioSession && ref->mPid == caller) { - ref->mCnt--; - ALOGV(" decremented refcount to %d", ref->mCnt); - if (ref->mCnt == 0) { - mAudioSessionRefs.removeAt(i); - delete ref; - purgeStaleEffects_l(); - } - return; - } - } - ALOGW("session id %d not found for pid %d", audioSession, caller); -} - -void AudioFlinger::purgeStaleEffects_l() { - - ALOGV("purging stale effects"); - - Vector< sp > chains; - - for (size_t i = 0; i < mPlaybackThreads.size(); i++) { - sp t = mPlaybackThreads.valueAt(i); - for (size_t j = 0; j < t->mEffectChains.size(); j++) { - sp ec = t->mEffectChains[j]; - if (ec->sessionId() > AUDIO_SESSION_OUTPUT_MIX) { - chains.push(ec); - } - } - } - for (size_t i = 0; i < mRecordThreads.size(); i++) { - sp t = mRecordThreads.valueAt(i); - for (size_t j = 0; j < t->mEffectChains.size(); j++) { - sp ec = t->mEffectChains[j]; - chains.push(ec); - } - } - - for (size_t i = 0; i < chains.size(); i++) { - sp ec = chains[i]; - int sessionid = ec->sessionId(); - sp t = ec->mThread.promote(); - if (t == 0) { - continue; - } - size_t numsessionrefs = mAudioSessionRefs.size(); - bool found = false; - for (size_t k = 0; k < numsessionrefs; k++) { - AudioSessionRef *ref = mAudioSessionRefs.itemAt(k); - if (ref->mSessionid == sessionid) { - ALOGV(" session %d still exists for %d with %d refs", - sessionid, ref->mPid, ref->mCnt); - found = true; - break; - } - } - if (!found) { - Mutex::Autolock _l (t->mLock); - // remove all effects from the chain - while (ec->mEffects.size()) { - sp effect = ec->mEffects[0]; - effect->unPin(); - t->removeEffect_l(effect); - if (effect->purgeHandles()) { - t->checkSuspendOnEffectEnabled_l(effect, false, effect->sessionId()); - } - AudioSystem::unregisterEffect(effect->id()); - } - } - } - return; -} - -// checkPlaybackThread_l() must be called with AudioFlinger::mLock held -AudioFlinger::PlaybackThread *AudioFlinger::checkPlaybackThread_l(audio_io_handle_t output) const -{ - return mPlaybackThreads.valueFor(output).get(); -} - -// checkMixerThread_l() must be called with AudioFlinger::mLock held -AudioFlinger::MixerThread *AudioFlinger::checkMixerThread_l(audio_io_handle_t output) const -{ - PlaybackThread *thread = checkPlaybackThread_l(output); - return thread != NULL && thread->type() != ThreadBase::DIRECT ? (MixerThread *) thread : NULL; -} - -// checkRecordThread_l() must be called with AudioFlinger::mLock held -AudioFlinger::RecordThread *AudioFlinger::checkRecordThread_l(audio_io_handle_t input) const -{ - return mRecordThreads.valueFor(input).get(); -} - -uint32_t AudioFlinger::nextUniqueId() -{ - return android_atomic_inc(&mNextUniqueId); -} - -AudioFlinger::PlaybackThread *AudioFlinger::primaryPlaybackThread_l() const -{ - for (size_t i = 0; i < mPlaybackThreads.size(); i++) { - PlaybackThread *thread = mPlaybackThreads.valueAt(i).get(); - AudioStreamOut *output = thread->getOutput(); - if (output != NULL && output->audioHwDev == mPrimaryHardwareDev) { - return thread; - } - } - return NULL; -} - -audio_devices_t AudioFlinger::primaryOutputDevice_l() const -{ - PlaybackThread *thread = primaryPlaybackThread_l(); - - if (thread == NULL) { - return 0; - } - - return thread->outDevice(); -} - -sp AudioFlinger::createSyncEvent(AudioSystem::sync_event_t type, - int triggerSession, - int listenerSession, - sync_event_callback_t callBack, - void *cookie) -{ - Mutex::Autolock _l(mLock); - - sp event = new SyncEvent(type, triggerSession, listenerSession, callBack, cookie); - status_t playStatus = NAME_NOT_FOUND; - status_t recStatus = NAME_NOT_FOUND; - for (size_t i = 0; i < mPlaybackThreads.size(); i++) { - playStatus = mPlaybackThreads.valueAt(i)->setSyncEvent(event); - if (playStatus == NO_ERROR) { - return event; - } - } - for (size_t i = 0; i < mRecordThreads.size(); i++) { - recStatus = mRecordThreads.valueAt(i)->setSyncEvent(event); - if (recStatus == NO_ERROR) { - return event; - } - } - if (playStatus == NAME_NOT_FOUND || recStatus == NAME_NOT_FOUND) { - mPendingSyncEvents.add(event); - } else { - ALOGV("createSyncEvent() invalid event %d", event->type()); - event.clear(); - } - return event; -} - -// ---------------------------------------------------------------------------- -// Effect management -// ---------------------------------------------------------------------------- - - -status_t AudioFlinger::queryNumberEffects(uint32_t *numEffects) const -{ - Mutex::Autolock _l(mLock); - return EffectQueryNumberEffects(numEffects); -} - -status_t AudioFlinger::queryEffect(uint32_t index, effect_descriptor_t *descriptor) const -{ - Mutex::Autolock _l(mLock); - return EffectQueryEffect(index, descriptor); -} - -status_t AudioFlinger::getEffectDescriptor(const effect_uuid_t *pUuid, - effect_descriptor_t *descriptor) const -{ - Mutex::Autolock _l(mLock); - return EffectGetDescriptor(pUuid, descriptor); -} - - -sp AudioFlinger::createEffect(pid_t pid, - effect_descriptor_t *pDesc, - const sp& effectClient, - int32_t priority, - audio_io_handle_t io, - int sessionId, - status_t *status, - int *id, - int *enabled) -{ - status_t lStatus = NO_ERROR; - sp handle; - effect_descriptor_t desc; - - ALOGV("createEffect pid %d, effectClient %p, priority %d, sessionId %d, io %d", - pid, effectClient.get(), priority, sessionId, io); - - if (pDesc == NULL) { - lStatus = BAD_VALUE; - goto Exit; - } - - // check audio settings permission for global effects - if (sessionId == AUDIO_SESSION_OUTPUT_MIX && !settingsAllowed()) { - lStatus = PERMISSION_DENIED; - goto Exit; - } - - // Session AUDIO_SESSION_OUTPUT_STAGE is reserved for output stage effects - // that can only be created by audio policy manager (running in same process) - if (sessionId == AUDIO_SESSION_OUTPUT_STAGE && getpid_cached != pid) { - lStatus = PERMISSION_DENIED; - goto Exit; - } - - if (io == 0) { - if (sessionId == AUDIO_SESSION_OUTPUT_STAGE) { - // output must be specified by AudioPolicyManager when using session - // AUDIO_SESSION_OUTPUT_STAGE - lStatus = BAD_VALUE; - goto Exit; - } else if (sessionId == AUDIO_SESSION_OUTPUT_MIX) { - // if the output returned by getOutputForEffect() is removed before we lock the - // mutex below, the call to checkPlaybackThread_l(io) below will detect it - // and we will exit safely - io = AudioSystem::getOutputForEffect(&desc); - } - } - - { - Mutex::Autolock _l(mLock); - - - if (!EffectIsNullUuid(&pDesc->uuid)) { - // if uuid is specified, request effect descriptor - lStatus = EffectGetDescriptor(&pDesc->uuid, &desc); - if (lStatus < 0) { - ALOGW("createEffect() error %d from EffectGetDescriptor", lStatus); - goto Exit; - } - } else { - // if uuid is not specified, look for an available implementation - // of the required type in effect factory - if (EffectIsNullUuid(&pDesc->type)) { - ALOGW("createEffect() no effect type"); - lStatus = BAD_VALUE; - goto Exit; - } - uint32_t numEffects = 0; - effect_descriptor_t d; - d.flags = 0; // prevent compiler warning - bool found = false; - - lStatus = EffectQueryNumberEffects(&numEffects); - if (lStatus < 0) { - ALOGW("createEffect() error %d from EffectQueryNumberEffects", lStatus); - goto Exit; - } - for (uint32_t i = 0; i < numEffects; i++) { - lStatus = EffectQueryEffect(i, &desc); - if (lStatus < 0) { - ALOGW("createEffect() error %d from EffectQueryEffect", lStatus); - continue; - } - if (memcmp(&desc.type, &pDesc->type, sizeof(effect_uuid_t)) == 0) { - // If matching type found save effect descriptor. If the session is - // 0 and the effect is not auxiliary, continue enumeration in case - // an auxiliary version of this effect type is available - found = true; - d = desc; - if (sessionId != AUDIO_SESSION_OUTPUT_MIX || - (desc.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_AUXILIARY) { - break; - } - } - } - if (!found) { - lStatus = BAD_VALUE; - ALOGW("createEffect() effect not found"); - goto Exit; - } - // For same effect type, chose auxiliary version over insert version if - // connect to output mix (Compliance to OpenSL ES) - if (sessionId == AUDIO_SESSION_OUTPUT_MIX && - (d.flags & EFFECT_FLAG_TYPE_MASK) != EFFECT_FLAG_TYPE_AUXILIARY) { - desc = d; - } - } - - // Do not allow auxiliary effects on a session different from 0 (output mix) - if (sessionId != AUDIO_SESSION_OUTPUT_MIX && - (desc.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_AUXILIARY) { - lStatus = INVALID_OPERATION; - goto Exit; - } - - // check recording permission for visualizer - if ((memcmp(&desc.type, SL_IID_VISUALIZATION, sizeof(effect_uuid_t)) == 0) && - !recordingAllowed()) { - lStatus = PERMISSION_DENIED; - goto Exit; - } - - // return effect descriptor - *pDesc = desc; - - // If output is not specified try to find a matching audio session ID in one of the - // output threads. - // If output is 0 here, sessionId is neither SESSION_OUTPUT_STAGE nor SESSION_OUTPUT_MIX - // because of code checking output when entering the function. - // Note: io is never 0 when creating an effect on an input - if (io == 0) { - // look for the thread where the specified audio session is present - for (size_t i = 0; i < mPlaybackThreads.size(); i++) { - if (mPlaybackThreads.valueAt(i)->hasAudioSession(sessionId) != 0) { - io = mPlaybackThreads.keyAt(i); - break; - } - } - if (io == 0) { - for (size_t i = 0; i < mRecordThreads.size(); i++) { - if (mRecordThreads.valueAt(i)->hasAudioSession(sessionId) != 0) { - io = mRecordThreads.keyAt(i); - break; - } - } - } - // If no output thread contains the requested session ID, default to - // first output. The effect chain will be moved to the correct output - // thread when a track with the same session ID is created - if (io == 0 && mPlaybackThreads.size()) { - io = mPlaybackThreads.keyAt(0); - } - ALOGV("createEffect() got io %d for effect %s", io, desc.name); - } - ThreadBase *thread = checkRecordThread_l(io); - if (thread == NULL) { - thread = checkPlaybackThread_l(io); - if (thread == NULL) { - ALOGE("createEffect() unknown output thread"); - lStatus = BAD_VALUE; - goto Exit; - } - } - - sp client = registerPid_l(pid); - - // create effect on selected output thread - handle = thread->createEffect_l(client, effectClient, priority, sessionId, - &desc, enabled, &lStatus); - if (handle != 0 && id != NULL) { - *id = handle->id(); - } - } - -Exit: - if (status != NULL) { - *status = lStatus; - } - return handle; -} - -status_t AudioFlinger::moveEffects(int sessionId, audio_io_handle_t srcOutput, - audio_io_handle_t dstOutput) -{ - ALOGV("moveEffects() session %d, srcOutput %d, dstOutput %d", - sessionId, srcOutput, dstOutput); - Mutex::Autolock _l(mLock); - if (srcOutput == dstOutput) { - ALOGW("moveEffects() same dst and src outputs %d", dstOutput); - return NO_ERROR; - } - PlaybackThread *srcThread = checkPlaybackThread_l(srcOutput); - if (srcThread == NULL) { - ALOGW("moveEffects() bad srcOutput %d", srcOutput); - return BAD_VALUE; - } - PlaybackThread *dstThread = checkPlaybackThread_l(dstOutput); - if (dstThread == NULL) { - ALOGW("moveEffects() bad dstOutput %d", dstOutput); - return BAD_VALUE; - } - - Mutex::Autolock _dl(dstThread->mLock); - Mutex::Autolock _sl(srcThread->mLock); - moveEffectChain_l(sessionId, srcThread, dstThread, false); - - return NO_ERROR; -} - -// moveEffectChain_l must be called with both srcThread and dstThread mLocks held -status_t AudioFlinger::moveEffectChain_l(int sessionId, - AudioFlinger::PlaybackThread *srcThread, - AudioFlinger::PlaybackThread *dstThread, - bool reRegister) -{ - ALOGV("moveEffectChain_l() session %d from thread %p to thread %p", - sessionId, srcThread, dstThread); - - sp chain = srcThread->getEffectChain_l(sessionId); - if (chain == 0) { - ALOGW("moveEffectChain_l() effect chain for session %d not on source thread %p", - sessionId, srcThread); - return INVALID_OPERATION; - } - - // remove chain first. This is useful only if reconfiguring effect chain on same output thread, - // so that a new chain is created with correct parameters when first effect is added. This is - // otherwise unnecessary as removeEffect_l() will remove the chain when last effect is - // removed. - srcThread->removeEffectChain_l(chain); - - // transfer all effects one by one so that new effect chain is created on new thread with - // correct buffer sizes and audio parameters and effect engines reconfigured accordingly - audio_io_handle_t dstOutput = dstThread->id(); - sp dstChain; - uint32_t strategy = 0; // prevent compiler warning - sp effect = chain->getEffectFromId_l(0); - while (effect != 0) { - srcThread->removeEffect_l(effect); - dstThread->addEffect_l(effect); - // removeEffect_l() has stopped the effect if it was active so it must be restarted - if (effect->state() == EffectModule::ACTIVE || - effect->state() == EffectModule::STOPPING) { - effect->start(); - } - // if the move request is not received from audio policy manager, the effect must be - // re-registered with the new strategy and output - if (dstChain == 0) { - dstChain = effect->chain().promote(); - if (dstChain == 0) { - ALOGW("moveEffectChain_l() cannot get chain from effect %p", effect.get()); - srcThread->addEffect_l(effect); - return NO_INIT; - } - strategy = dstChain->strategy(); - } - if (reRegister) { - AudioSystem::unregisterEffect(effect->id()); - AudioSystem::registerEffect(&effect->desc(), - dstOutput, - strategy, - sessionId, - effect->id()); - } - effect = chain->getEffectFromId_l(0); - } - - return NO_ERROR; -} - - -// PlaybackThread::createEffect_l() must be called with AudioFlinger::mLock held -sp AudioFlinger::ThreadBase::createEffect_l( - const sp& client, - const sp& effectClient, - int32_t priority, - int sessionId, - effect_descriptor_t *desc, - int *enabled, - status_t *status - ) -{ - sp effect; - sp handle; - status_t lStatus; - sp chain; - bool chainCreated = false; - bool effectCreated = false; - bool effectRegistered = false; - - lStatus = initCheck(); - if (lStatus != NO_ERROR) { - ALOGW("createEffect_l() Audio driver not initialized."); - goto Exit; - } - - // Do not allow effects with session ID 0 on direct output or duplicating threads - // TODO: add rule for hw accelerated effects on direct outputs with non PCM format - if (sessionId == AUDIO_SESSION_OUTPUT_MIX && mType != MIXER) { - ALOGW("createEffect_l() Cannot add auxiliary effect %s to session %d", - desc->name, sessionId); - lStatus = BAD_VALUE; - goto Exit; - } - // Only Pre processor effects are allowed on input threads and only on input threads - if ((mType == RECORD) != ((desc->flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_PRE_PROC)) { - ALOGW("createEffect_l() effect %s (flags %08x) created on wrong thread type %d", - desc->name, desc->flags, mType); - lStatus = BAD_VALUE; - goto Exit; - } - - ALOGV("createEffect_l() thread %p effect %s on session %d", this, desc->name, sessionId); - - { // scope for mLock - Mutex::Autolock _l(mLock); - - // check for existing effect chain with the requested audio session - chain = getEffectChain_l(sessionId); - if (chain == 0) { - // create a new chain for this session - ALOGV("createEffect_l() new effect chain for session %d", sessionId); - chain = new EffectChain(this, sessionId); - addEffectChain_l(chain); - chain->setStrategy(getStrategyForSession_l(sessionId)); - chainCreated = true; - } else { - effect = chain->getEffectFromDesc_l(desc); - } - - ALOGV("createEffect_l() got effect %p on chain %p", effect.get(), chain.get()); - - if (effect == 0) { - int id = mAudioFlinger->nextUniqueId(); - // Check CPU and memory usage - lStatus = AudioSystem::registerEffect(desc, mId, chain->strategy(), sessionId, id); - if (lStatus != NO_ERROR) { - goto Exit; - } - effectRegistered = true; - // create a new effect module if none present in the chain - effect = new EffectModule(this, chain, desc, id, sessionId); - lStatus = effect->status(); - if (lStatus != NO_ERROR) { - goto Exit; - } - lStatus = chain->addEffect_l(effect); - if (lStatus != NO_ERROR) { - goto Exit; - } - effectCreated = true; - - effect->setDevice(mOutDevice); - effect->setDevice(mInDevice); - effect->setMode(mAudioFlinger->getMode()); - effect->setAudioSource(mAudioSource); - } - // create effect handle and connect it to effect module - handle = new EffectHandle(effect, client, effectClient, priority); - lStatus = effect->addHandle(handle.get()); - if (enabled != NULL) { - *enabled = (int)effect->isEnabled(); - } - } - -Exit: - if (lStatus != NO_ERROR && lStatus != ALREADY_EXISTS) { - Mutex::Autolock _l(mLock); - if (effectCreated) { - chain->removeEffect_l(effect); - } - if (effectRegistered) { - AudioSystem::unregisterEffect(effect->id()); - } - if (chainCreated) { - removeEffectChain_l(chain); - } - handle.clear(); - } - - if (status != NULL) { - *status = lStatus; - } - return handle; -} - -sp AudioFlinger::ThreadBase::getEffect(int sessionId, int effectId) -{ - Mutex::Autolock _l(mLock); - return getEffect_l(sessionId, effectId); -} - -sp AudioFlinger::ThreadBase::getEffect_l(int sessionId, int effectId) -{ - sp chain = getEffectChain_l(sessionId); - return chain != 0 ? chain->getEffectFromId_l(effectId) : 0; -} - -// PlaybackThread::addEffect_l() must be called with AudioFlinger::mLock and -// PlaybackThread::mLock held -status_t AudioFlinger::ThreadBase::addEffect_l(const sp& effect) -{ - // check for existing effect chain with the requested audio session - int sessionId = effect->sessionId(); - sp chain = getEffectChain_l(sessionId); - bool chainCreated = false; - - if (chain == 0) { - // create a new chain for this session - ALOGV("addEffect_l() new effect chain for session %d", sessionId); - chain = new EffectChain(this, sessionId); - addEffectChain_l(chain); - chain->setStrategy(getStrategyForSession_l(sessionId)); - chainCreated = true; - } - ALOGV("addEffect_l() %p chain %p effect %p", this, chain.get(), effect.get()); - - if (chain->getEffectFromId_l(effect->id()) != 0) { - ALOGW("addEffect_l() %p effect %s already present in chain %p", - this, effect->desc().name, chain.get()); - return BAD_VALUE; - } - - status_t status = chain->addEffect_l(effect); - if (status != NO_ERROR) { - if (chainCreated) { - removeEffectChain_l(chain); - } - return status; - } - - effect->setDevice(mOutDevice); - effect->setDevice(mInDevice); - effect->setMode(mAudioFlinger->getMode()); - effect->setAudioSource(mAudioSource); - return NO_ERROR; -} - -void AudioFlinger::ThreadBase::removeEffect_l(const sp& effect) { - - ALOGV("removeEffect_l() %p effect %p", this, effect.get()); - effect_descriptor_t desc = effect->desc(); - if ((desc.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_AUXILIARY) { - detachAuxEffect_l(effect->id()); - } - - sp chain = effect->chain().promote(); - if (chain != 0) { - // remove effect chain if removing last effect - if (chain->removeEffect_l(effect) == 0) { - removeEffectChain_l(chain); - } - } else { - ALOGW("removeEffect_l() %p cannot promote chain for effect %p", this, effect.get()); - } -} - -void AudioFlinger::ThreadBase::lockEffectChains_l( - Vector< sp >& effectChains) -{ - effectChains = mEffectChains; - for (size_t i = 0; i < mEffectChains.size(); i++) { - mEffectChains[i]->lock(); - } -} - -void AudioFlinger::ThreadBase::unlockEffectChains( - const Vector< sp >& effectChains) -{ - for (size_t i = 0; i < effectChains.size(); i++) { - effectChains[i]->unlock(); - } -} - -sp AudioFlinger::ThreadBase::getEffectChain(int sessionId) -{ - Mutex::Autolock _l(mLock); - return getEffectChain_l(sessionId); -} - -sp AudioFlinger::ThreadBase::getEffectChain_l(int sessionId) const -{ - size_t size = mEffectChains.size(); - for (size_t i = 0; i < size; i++) { - if (mEffectChains[i]->sessionId() == sessionId) { - return mEffectChains[i]; - } - } - return 0; -} - -void AudioFlinger::ThreadBase::setMode(audio_mode_t mode) -{ - Mutex::Autolock _l(mLock); - size_t size = mEffectChains.size(); - for (size_t i = 0; i < size; i++) { - mEffectChains[i]->setMode_l(mode); - } -} - -void AudioFlinger::ThreadBase::disconnectEffect(const sp& effect, - EffectHandle *handle, - bool unpinIfLast) { - - Mutex::Autolock _l(mLock); - ALOGV("disconnectEffect() %p effect %p", this, effect.get()); - // delete the effect module if removing last handle on it - if (effect->removeHandle(handle) == 0) { - if (!effect->isPinned() || unpinIfLast) { - removeEffect_l(effect); - AudioSystem::unregisterEffect(effect->id()); - } - } -} - -status_t AudioFlinger::PlaybackThread::addEffectChain_l(const sp& chain) -{ - int session = chain->sessionId(); - int16_t *buffer = mMixBuffer; - bool ownsBuffer = false; - - ALOGV("addEffectChain_l() %p on thread %p for session %d", chain.get(), this, session); - if (session > 0) { - // Only one effect chain can be present in direct output thread and it uses - // the mix buffer as input - if (mType != DIRECT) { - size_t numSamples = mNormalFrameCount * mChannelCount; - buffer = new int16_t[numSamples]; - memset(buffer, 0, numSamples * sizeof(int16_t)); - ALOGV("addEffectChain_l() creating new input buffer %p session %d", buffer, session); - ownsBuffer = true; - } - - // Attach all tracks with same session ID to this chain. - for (size_t i = 0; i < mTracks.size(); ++i) { - sp track = mTracks[i]; - if (session == track->sessionId()) { - ALOGV("addEffectChain_l() track->setMainBuffer track %p buffer %p", track.get(), - buffer); - track->setMainBuffer(buffer); - chain->incTrackCnt(); - } - } - - // indicate all active tracks in the chain - for (size_t i = 0 ; i < mActiveTracks.size() ; ++i) { - sp track = mActiveTracks[i].promote(); - if (track == 0) { - continue; - } - if (session == track->sessionId()) { - ALOGV("addEffectChain_l() activating track %p on session %d", track.get(), session); - chain->incActiveTrackCnt(); - } - } - } - - chain->setInBuffer(buffer, ownsBuffer); - chain->setOutBuffer(mMixBuffer); - // Effect chain for session AUDIO_SESSION_OUTPUT_STAGE is inserted at end of effect - // chains list in order to be processed last as it contains output stage effects - // Effect chain for session AUDIO_SESSION_OUTPUT_MIX is inserted before - // session AUDIO_SESSION_OUTPUT_STAGE to be processed - // after track specific effects and before output stage - // It is therefore mandatory that AUDIO_SESSION_OUTPUT_MIX == 0 and - // that AUDIO_SESSION_OUTPUT_STAGE < AUDIO_SESSION_OUTPUT_MIX - // Effect chain for other sessions are inserted at beginning of effect - // chains list to be processed before output mix effects. Relative order between other - // sessions is not important - size_t size = mEffectChains.size(); - size_t i = 0; - for (i = 0; i < size; i++) { - if (mEffectChains[i]->sessionId() < session) { - break; - } - } - mEffectChains.insertAt(chain, i); - checkSuspendOnAddEffectChain_l(chain); - - return NO_ERROR; -} - -size_t AudioFlinger::PlaybackThread::removeEffectChain_l(const sp& chain) -{ - int session = chain->sessionId(); - - ALOGV("removeEffectChain_l() %p from thread %p for session %d", chain.get(), this, session); - - for (size_t i = 0; i < mEffectChains.size(); i++) { - if (chain == mEffectChains[i]) { - mEffectChains.removeAt(i); - // detach all active tracks from the chain - for (size_t i = 0 ; i < mActiveTracks.size() ; ++i) { - sp track = mActiveTracks[i].promote(); - if (track == 0) { - continue; - } - if (session == track->sessionId()) { - ALOGV("removeEffectChain_l(): stopping track on chain %p for session Id: %d", - chain.get(), session); - chain->decActiveTrackCnt(); - } - } - - // detach all tracks with same session ID from this chain - for (size_t i = 0; i < mTracks.size(); ++i) { - sp track = mTracks[i]; - if (session == track->sessionId()) { - track->setMainBuffer(mMixBuffer); - chain->decTrackCnt(); - } - } - break; - } - } - return mEffectChains.size(); -} - -status_t AudioFlinger::PlaybackThread::attachAuxEffect( - const sp track, int EffectId) -{ - Mutex::Autolock _l(mLock); - return attachAuxEffect_l(track, EffectId); -} - -status_t AudioFlinger::PlaybackThread::attachAuxEffect_l( - const sp track, int EffectId) -{ - status_t status = NO_ERROR; - - if (EffectId == 0) { - track->setAuxBuffer(0, NULL); - } else { - // Auxiliary effects are always in audio session AUDIO_SESSION_OUTPUT_MIX - sp effect = getEffect_l(AUDIO_SESSION_OUTPUT_MIX, EffectId); - if (effect != 0) { - if ((effect->desc().flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_AUXILIARY) { - track->setAuxBuffer(EffectId, (int32_t *)effect->inBuffer()); - } else { - status = INVALID_OPERATION; - } - } else { - status = BAD_VALUE; - } - } - return status; -} - -void AudioFlinger::PlaybackThread::detachAuxEffect_l(int effectId) -{ - for (size_t i = 0; i < mTracks.size(); ++i) { - sp track = mTracks[i]; - if (track->auxEffectId() == effectId) { - attachAuxEffect_l(track, 0); - } - } -} - -status_t AudioFlinger::RecordThread::addEffectChain_l(const sp& chain) -{ - // only one chain per input thread - if (mEffectChains.size() != 0) { - return INVALID_OPERATION; - } - ALOGV("addEffectChain_l() %p on thread %p", chain.get(), this); - - chain->setInBuffer(NULL); - chain->setOutBuffer(NULL); - - checkSuspendOnAddEffectChain_l(chain); - - mEffectChains.add(chain); - - return NO_ERROR; -} - -size_t AudioFlinger::RecordThread::removeEffectChain_l(const sp& chain) -{ - ALOGV("removeEffectChain_l() %p from thread %p", chain.get(), this); - ALOGW_IF(mEffectChains.size() != 1, - "removeEffectChain_l() %p invalid chain size %d on thread %p", - chain.get(), mEffectChains.size(), this); - if (mEffectChains.size() == 1) { - mEffectChains.removeAt(0); - } - return 0; -} - -// ---------------------------------------------------------------------------- -// EffectModule implementation -// ---------------------------------------------------------------------------- - -#undef LOG_TAG -#define LOG_TAG "AudioFlinger::EffectModule" - -AudioFlinger::EffectModule::EffectModule(ThreadBase *thread, - const wp& chain, - effect_descriptor_t *desc, - int id, - int sessionId) - : mPinned(sessionId > AUDIO_SESSION_OUTPUT_MIX), - mThread(thread), mChain(chain), mId(id), mSessionId(sessionId), - mDescriptor(*desc), - // mConfig is set by configure() and not used before then - mEffectInterface(NULL), - mStatus(NO_INIT), mState(IDLE), - // mMaxDisableWaitCnt is set by configure() and not used before then - // mDisableWaitCnt is set by process() and updateState() and not used before then - mSuspended(false) -{ - ALOGV("Constructor %p", this); - int lStatus; - - // create effect engine from effect factory - mStatus = EffectCreate(&desc->uuid, sessionId, thread->id(), &mEffectInterface); - - if (mStatus != NO_ERROR) { - return; - } - lStatus = init(); - if (lStatus < 0) { - mStatus = lStatus; - goto Error; - } - - ALOGV("Constructor success name %s, Interface %p", mDescriptor.name, mEffectInterface); - return; -Error: - EffectRelease(mEffectInterface); - mEffectInterface = NULL; - ALOGV("Constructor Error %d", mStatus); -} - -AudioFlinger::EffectModule::~EffectModule() -{ - ALOGV("Destructor %p", this); - if (mEffectInterface != NULL) { - if ((mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_PRE_PROC || - (mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_POST_PROC) { - sp thread = mThread.promote(); - if (thread != 0) { - audio_stream_t *stream = thread->stream(); - if (stream != NULL) { - stream->remove_audio_effect(stream, mEffectInterface); - } - } - } - // release effect engine - EffectRelease(mEffectInterface); - } -} - -status_t AudioFlinger::EffectModule::addHandle(EffectHandle *handle) -{ - status_t status; - - Mutex::Autolock _l(mLock); - int priority = handle->priority(); - size_t size = mHandles.size(); - EffectHandle *controlHandle = NULL; - size_t i; - for (i = 0; i < size; i++) { - EffectHandle *h = mHandles[i]; - if (h == NULL || h->destroyed_l()) { - continue; - } - // first non destroyed handle is considered in control - if (controlHandle == NULL) - controlHandle = h; - if (h->priority() <= priority) { - break; - } - } - // if inserted in first place, move effect control from previous owner to this handle - if (i == 0) { - bool enabled = false; - if (controlHandle != NULL) { - enabled = controlHandle->enabled(); - controlHandle->setControl(false/*hasControl*/, true /*signal*/, enabled /*enabled*/); - } - handle->setControl(true /*hasControl*/, false /*signal*/, enabled /*enabled*/); - status = NO_ERROR; - } else { - status = ALREADY_EXISTS; - } - ALOGV("addHandle() %p added handle %p in position %d", this, handle, i); - mHandles.insertAt(handle, i); - return status; -} - -size_t AudioFlinger::EffectModule::removeHandle(EffectHandle *handle) -{ - Mutex::Autolock _l(mLock); - size_t size = mHandles.size(); - size_t i; - for (i = 0; i < size; i++) { - if (mHandles[i] == handle) { - break; - } - } - if (i == size) { - return size; - } - ALOGV("removeHandle() %p removed handle %p in position %d", this, handle, i); - - mHandles.removeAt(i); - // if removed from first place, move effect control from this handle to next in line - if (i == 0) { - EffectHandle *h = controlHandle_l(); - if (h != NULL) { - h->setControl(true /*hasControl*/, true /*signal*/ , handle->enabled() /*enabled*/); - } - } - - // Prevent calls to process() and other functions on effect interface from now on. - // The effect engine will be released by the destructor when the last strong reference on - // this object is released which can happen after next process is called. - if (mHandles.size() == 0 && !mPinned) { - mState = DESTROYED; - } - - return mHandles.size(); -} - -// must be called with EffectModule::mLock held -AudioFlinger::EffectHandle *AudioFlinger::EffectModule::controlHandle_l() -{ - // the first valid handle in the list has control over the module - for (size_t i = 0; i < mHandles.size(); i++) { - EffectHandle *h = mHandles[i]; - if (h != NULL && !h->destroyed_l()) { - return h; - } - } - - return NULL; -} - -size_t AudioFlinger::EffectModule::disconnect(EffectHandle *handle, bool unpinIfLast) -{ - ALOGV("disconnect() %p handle %p", this, handle); - // keep a strong reference on this EffectModule to avoid calling the - // destructor before we exit - sp keep(this); - { - sp thread = mThread.promote(); - if (thread != 0) { - thread->disconnectEffect(keep, handle, unpinIfLast); - } - } - return mHandles.size(); -} - -void AudioFlinger::EffectModule::updateState() { - Mutex::Autolock _l(mLock); - - switch (mState) { - case RESTART: - reset_l(); - // FALL THROUGH - - case STARTING: - // clear auxiliary effect input buffer for next accumulation - if ((mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_AUXILIARY) { - memset(mConfig.inputCfg.buffer.raw, - 0, - mConfig.inputCfg.buffer.frameCount*sizeof(int32_t)); - } - start_l(); - mState = ACTIVE; - break; - case STOPPING: - stop_l(); - mDisableWaitCnt = mMaxDisableWaitCnt; - mState = STOPPED; - break; - case STOPPED: - // mDisableWaitCnt is forced to 1 by process() when the engine indicates the end of the - // turn off sequence. - if (--mDisableWaitCnt == 0) { - reset_l(); - mState = IDLE; - } - break; - default: //IDLE , ACTIVE, DESTROYED - break; - } -} - -void AudioFlinger::EffectModule::process() -{ - Mutex::Autolock _l(mLock); - - if (mState == DESTROYED || mEffectInterface == NULL || - mConfig.inputCfg.buffer.raw == NULL || - mConfig.outputCfg.buffer.raw == NULL) { - return; - } - - if (isProcessEnabled()) { - // do 32 bit to 16 bit conversion for auxiliary effect input buffer - if ((mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_AUXILIARY) { - ditherAndClamp(mConfig.inputCfg.buffer.s32, - mConfig.inputCfg.buffer.s32, - mConfig.inputCfg.buffer.frameCount/2); - } - - // do the actual processing in the effect engine - int ret = (*mEffectInterface)->process(mEffectInterface, - &mConfig.inputCfg.buffer, - &mConfig.outputCfg.buffer); - - // force transition to IDLE state when engine is ready - if (mState == STOPPED && ret == -ENODATA) { - mDisableWaitCnt = 1; - } - - // clear auxiliary effect input buffer for next accumulation - if ((mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_AUXILIARY) { - memset(mConfig.inputCfg.buffer.raw, 0, - mConfig.inputCfg.buffer.frameCount*sizeof(int32_t)); - } - } else if ((mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_INSERT && - mConfig.inputCfg.buffer.raw != mConfig.outputCfg.buffer.raw) { - // If an insert effect is idle and input buffer is different from output buffer, - // accumulate input onto output - sp chain = mChain.promote(); - if (chain != 0 && chain->activeTrackCnt() != 0) { - size_t frameCnt = mConfig.inputCfg.buffer.frameCount * 2; //always stereo here - int16_t *in = mConfig.inputCfg.buffer.s16; - int16_t *out = mConfig.outputCfg.buffer.s16; - for (size_t i = 0; i < frameCnt; i++) { - out[i] = clamp16((int32_t)out[i] + (int32_t)in[i]); - } - } - } -} - -void AudioFlinger::EffectModule::reset_l() -{ - if (mEffectInterface == NULL) { - return; - } - (*mEffectInterface)->command(mEffectInterface, EFFECT_CMD_RESET, 0, NULL, 0, NULL); -} - -status_t AudioFlinger::EffectModule::configure() -{ - if (mEffectInterface == NULL) { - return NO_INIT; - } - - sp thread = mThread.promote(); - if (thread == 0) { - return DEAD_OBJECT; - } - - // TODO: handle configuration of effects replacing track process - audio_channel_mask_t channelMask = thread->channelMask(); - - if ((mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_AUXILIARY) { - mConfig.inputCfg.channels = AUDIO_CHANNEL_OUT_MONO; - } else { - mConfig.inputCfg.channels = channelMask; - } - mConfig.outputCfg.channels = channelMask; - mConfig.inputCfg.format = AUDIO_FORMAT_PCM_16_BIT; - mConfig.outputCfg.format = AUDIO_FORMAT_PCM_16_BIT; - mConfig.inputCfg.samplingRate = thread->sampleRate(); - mConfig.outputCfg.samplingRate = mConfig.inputCfg.samplingRate; - mConfig.inputCfg.bufferProvider.cookie = NULL; - mConfig.inputCfg.bufferProvider.getBuffer = NULL; - mConfig.inputCfg.bufferProvider.releaseBuffer = NULL; - mConfig.outputCfg.bufferProvider.cookie = NULL; - mConfig.outputCfg.bufferProvider.getBuffer = NULL; - mConfig.outputCfg.bufferProvider.releaseBuffer = NULL; - mConfig.inputCfg.accessMode = EFFECT_BUFFER_ACCESS_READ; - // Insert effect: - // - in session AUDIO_SESSION_OUTPUT_MIX or AUDIO_SESSION_OUTPUT_STAGE, - // always overwrites output buffer: input buffer == output buffer - // - in other sessions: - // last effect in the chain accumulates in output buffer: input buffer != output buffer - // other effect: overwrites output buffer: input buffer == output buffer - // Auxiliary effect: - // accumulates in output buffer: input buffer != output buffer - // Therefore: accumulate <=> input buffer != output buffer - if (mConfig.inputCfg.buffer.raw != mConfig.outputCfg.buffer.raw) { - mConfig.outputCfg.accessMode = EFFECT_BUFFER_ACCESS_ACCUMULATE; - } else { - mConfig.outputCfg.accessMode = EFFECT_BUFFER_ACCESS_WRITE; - } - mConfig.inputCfg.mask = EFFECT_CONFIG_ALL; - mConfig.outputCfg.mask = EFFECT_CONFIG_ALL; - mConfig.inputCfg.buffer.frameCount = thread->frameCount(); - mConfig.outputCfg.buffer.frameCount = mConfig.inputCfg.buffer.frameCount; - - ALOGV("configure() %p thread %p buffer %p framecount %d", - this, thread.get(), mConfig.inputCfg.buffer.raw, mConfig.inputCfg.buffer.frameCount); - - status_t cmdStatus; - uint32_t size = sizeof(int); - status_t status = (*mEffectInterface)->command(mEffectInterface, - EFFECT_CMD_SET_CONFIG, - sizeof(effect_config_t), - &mConfig, - &size, - &cmdStatus); - if (status == 0) { - status = cmdStatus; - } - - if (status == 0 && - (memcmp(&mDescriptor.type, SL_IID_VISUALIZATION, sizeof(effect_uuid_t)) == 0)) { - uint32_t buf32[sizeof(effect_param_t) / sizeof(uint32_t) + 2]; - effect_param_t *p = (effect_param_t *)buf32; - - p->psize = sizeof(uint32_t); - p->vsize = sizeof(uint32_t); - size = sizeof(int); - *(int32_t *)p->data = VISUALIZER_PARAM_LATENCY; - - uint32_t latency = 0; - PlaybackThread *pbt = thread->mAudioFlinger->checkPlaybackThread_l(thread->mId); - if (pbt != NULL) { - latency = pbt->latency_l(); - } - - *((int32_t *)p->data + 1)= latency; - (*mEffectInterface)->command(mEffectInterface, - EFFECT_CMD_SET_PARAM, - sizeof(effect_param_t) + 8, - &buf32, - &size, - &cmdStatus); - } - - mMaxDisableWaitCnt = (MAX_DISABLE_TIME_MS * mConfig.outputCfg.samplingRate) / - (1000 * mConfig.outputCfg.buffer.frameCount); - - return status; -} - -status_t AudioFlinger::EffectModule::init() -{ - Mutex::Autolock _l(mLock); - if (mEffectInterface == NULL) { - return NO_INIT; - } - status_t cmdStatus; - uint32_t size = sizeof(status_t); - status_t status = (*mEffectInterface)->command(mEffectInterface, - EFFECT_CMD_INIT, - 0, - NULL, - &size, - &cmdStatus); - if (status == 0) { - status = cmdStatus; - } - return status; -} - -status_t AudioFlinger::EffectModule::start() -{ - Mutex::Autolock _l(mLock); - return start_l(); -} - -status_t AudioFlinger::EffectModule::start_l() -{ - if (mEffectInterface == NULL) { - return NO_INIT; - } - status_t cmdStatus; - uint32_t size = sizeof(status_t); - status_t status = (*mEffectInterface)->command(mEffectInterface, - EFFECT_CMD_ENABLE, - 0, - NULL, - &size, - &cmdStatus); - if (status == 0) { - status = cmdStatus; - } - if (status == 0 && - ((mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_PRE_PROC || - (mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_POST_PROC)) { - sp thread = mThread.promote(); - if (thread != 0) { - audio_stream_t *stream = thread->stream(); - if (stream != NULL) { - stream->add_audio_effect(stream, mEffectInterface); - } - } - } - return status; -} - -status_t AudioFlinger::EffectModule::stop() -{ - Mutex::Autolock _l(mLock); - return stop_l(); -} - -status_t AudioFlinger::EffectModule::stop_l() -{ - if (mEffectInterface == NULL) { - return NO_INIT; - } - status_t cmdStatus; - uint32_t size = sizeof(status_t); - status_t status = (*mEffectInterface)->command(mEffectInterface, - EFFECT_CMD_DISABLE, - 0, - NULL, - &size, - &cmdStatus); - if (status == 0) { - status = cmdStatus; - } - if (status == 0 && - ((mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_PRE_PROC || - (mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_POST_PROC)) { - sp thread = mThread.promote(); - if (thread != 0) { - audio_stream_t *stream = thread->stream(); - if (stream != NULL) { - stream->remove_audio_effect(stream, mEffectInterface); - } - } - } - return status; -} - -status_t AudioFlinger::EffectModule::command(uint32_t cmdCode, - uint32_t cmdSize, - void *pCmdData, - uint32_t *replySize, - void *pReplyData) -{ - Mutex::Autolock _l(mLock); - ALOGVV("command(), cmdCode: %d, mEffectInterface: %p", cmdCode, mEffectInterface); - - if (mState == DESTROYED || mEffectInterface == NULL) { - return NO_INIT; - } - status_t status = (*mEffectInterface)->command(mEffectInterface, - cmdCode, - cmdSize, - pCmdData, - replySize, - pReplyData); - if (cmdCode != EFFECT_CMD_GET_PARAM && status == NO_ERROR) { - uint32_t size = (replySize == NULL) ? 0 : *replySize; - for (size_t i = 1; i < mHandles.size(); i++) { - EffectHandle *h = mHandles[i]; - if (h != NULL && !h->destroyed_l()) { - h->commandExecuted(cmdCode, cmdSize, pCmdData, size, pReplyData); - } - } - } - return status; -} - -status_t AudioFlinger::EffectModule::setEnabled(bool enabled) -{ - Mutex::Autolock _l(mLock); - return setEnabled_l(enabled); -} - -// must be called with EffectModule::mLock held -status_t AudioFlinger::EffectModule::setEnabled_l(bool enabled) -{ - - ALOGV("setEnabled %p enabled %d", this, enabled); - - if (enabled != isEnabled()) { - status_t status = AudioSystem::setEffectEnabled(mId, enabled); - if (enabled && status != NO_ERROR) { - return status; - } - - switch (mState) { - // going from disabled to enabled - case IDLE: - mState = STARTING; - break; - case STOPPED: - mState = RESTART; - break; - case STOPPING: - mState = ACTIVE; - break; - - // going from enabled to disabled - case RESTART: - mState = STOPPED; - break; - case STARTING: - mState = IDLE; - break; - case ACTIVE: - mState = STOPPING; - break; - case DESTROYED: - return NO_ERROR; // simply ignore as we are being destroyed - } - for (size_t i = 1; i < mHandles.size(); i++) { - EffectHandle *h = mHandles[i]; - if (h != NULL && !h->destroyed_l()) { - h->setEnabled(enabled); - } - } - } - return NO_ERROR; -} - -bool AudioFlinger::EffectModule::isEnabled() const -{ - switch (mState) { - case RESTART: - case STARTING: - case ACTIVE: - return true; - case IDLE: - case STOPPING: - case STOPPED: - case DESTROYED: - default: - return false; - } -} - -bool AudioFlinger::EffectModule::isProcessEnabled() const -{ - switch (mState) { - case RESTART: - case ACTIVE: - case STOPPING: - case STOPPED: - return true; - case IDLE: - case STARTING: - case DESTROYED: - default: - return false; - } -} - -status_t AudioFlinger::EffectModule::setVolume(uint32_t *left, uint32_t *right, bool controller) -{ - Mutex::Autolock _l(mLock); - status_t status = NO_ERROR; - - // Send volume indication if EFFECT_FLAG_VOLUME_IND is set and read back altered volume - // if controller flag is set (Note that controller == TRUE => EFFECT_FLAG_VOLUME_CTRL set) - if (isProcessEnabled() && - ((mDescriptor.flags & EFFECT_FLAG_VOLUME_MASK) == EFFECT_FLAG_VOLUME_CTRL || - (mDescriptor.flags & EFFECT_FLAG_VOLUME_MASK) == EFFECT_FLAG_VOLUME_IND)) { - status_t cmdStatus; - uint32_t volume[2]; - uint32_t *pVolume = NULL; - uint32_t size = sizeof(volume); - volume[0] = *left; - volume[1] = *right; - if (controller) { - pVolume = volume; - } - status = (*mEffectInterface)->command(mEffectInterface, - EFFECT_CMD_SET_VOLUME, - size, - volume, - &size, - pVolume); - if (controller && status == NO_ERROR && size == sizeof(volume)) { - *left = volume[0]; - *right = volume[1]; - } - } - return status; -} - -status_t AudioFlinger::EffectModule::setDevice(audio_devices_t device) -{ - if (device == AUDIO_DEVICE_NONE) { - return NO_ERROR; - } - - Mutex::Autolock _l(mLock); - status_t status = NO_ERROR; - if (device && (mDescriptor.flags & EFFECT_FLAG_DEVICE_MASK) == EFFECT_FLAG_DEVICE_IND) { - status_t cmdStatus; - uint32_t size = sizeof(status_t); - uint32_t cmd = audio_is_output_devices(device) ? EFFECT_CMD_SET_DEVICE : - EFFECT_CMD_SET_INPUT_DEVICE; - status = (*mEffectInterface)->command(mEffectInterface, - cmd, - sizeof(uint32_t), - &device, - &size, - &cmdStatus); - } - return status; -} - -status_t AudioFlinger::EffectModule::setMode(audio_mode_t mode) -{ - Mutex::Autolock _l(mLock); - status_t status = NO_ERROR; - if ((mDescriptor.flags & EFFECT_FLAG_AUDIO_MODE_MASK) == EFFECT_FLAG_AUDIO_MODE_IND) { - status_t cmdStatus; - uint32_t size = sizeof(status_t); - status = (*mEffectInterface)->command(mEffectInterface, - EFFECT_CMD_SET_AUDIO_MODE, - sizeof(audio_mode_t), - &mode, - &size, - &cmdStatus); - if (status == NO_ERROR) { - status = cmdStatus; - } - } - return status; -} - -status_t AudioFlinger::EffectModule::setAudioSource(audio_source_t source) -{ - Mutex::Autolock _l(mLock); - status_t status = NO_ERROR; - if ((mDescriptor.flags & EFFECT_FLAG_AUDIO_SOURCE_MASK) == EFFECT_FLAG_AUDIO_SOURCE_IND) { - uint32_t size = 0; - status = (*mEffectInterface)->command(mEffectInterface, - EFFECT_CMD_SET_AUDIO_SOURCE, - sizeof(audio_source_t), - &source, - &size, - NULL); - } - return status; -} - -void AudioFlinger::EffectModule::setSuspended(bool suspended) -{ - Mutex::Autolock _l(mLock); - mSuspended = suspended; -} - -bool AudioFlinger::EffectModule::suspended() const -{ - Mutex::Autolock _l(mLock); - return mSuspended; -} - -bool AudioFlinger::EffectModule::purgeHandles() -{ - bool enabled = false; - Mutex::Autolock _l(mLock); - for (size_t i = 0; i < mHandles.size(); i++) { - EffectHandle *handle = mHandles[i]; - if (handle != NULL && !handle->destroyed_l()) { - handle->effect().clear(); - if (handle->hasControl()) { - enabled = handle->enabled(); - } - } - } - return enabled; -} - -void AudioFlinger::EffectModule::dump(int fd, const Vector& args) -{ - const size_t SIZE = 256; - char buffer[SIZE]; - String8 result; - - snprintf(buffer, SIZE, "\tEffect ID %d:\n", mId); - result.append(buffer); - - bool locked = tryLock(mLock); - // failed to lock - AudioFlinger is probably deadlocked - if (!locked) { - result.append("\t\tCould not lock Fx mutex:\n"); - } - - result.append("\t\tSession Status State Engine:\n"); - snprintf(buffer, SIZE, "\t\t%05d %03d %03d 0x%08x\n", - mSessionId, mStatus, mState, (uint32_t)mEffectInterface); - result.append(buffer); - - result.append("\t\tDescriptor:\n"); - snprintf(buffer, SIZE, "\t\t- UUID: %08X-%04X-%04X-%04X-%02X%02X%02X%02X%02X%02X\n", - mDescriptor.uuid.timeLow, mDescriptor.uuid.timeMid, mDescriptor.uuid.timeHiAndVersion, - mDescriptor.uuid.clockSeq, mDescriptor.uuid.node[0], mDescriptor.uuid.node[1], - mDescriptor.uuid.node[2], - mDescriptor.uuid.node[3],mDescriptor.uuid.node[4],mDescriptor.uuid.node[5]); - result.append(buffer); - snprintf(buffer, SIZE, "\t\t- TYPE: %08X-%04X-%04X-%04X-%02X%02X%02X%02X%02X%02X\n", - mDescriptor.type.timeLow, mDescriptor.type.timeMid, - mDescriptor.type.timeHiAndVersion, - mDescriptor.type.clockSeq, mDescriptor.type.node[0], mDescriptor.type.node[1], - mDescriptor.type.node[2], - mDescriptor.type.node[3],mDescriptor.type.node[4],mDescriptor.type.node[5]); - result.append(buffer); - snprintf(buffer, SIZE, "\t\t- apiVersion: %08X\n\t\t- flags: %08X\n", - mDescriptor.apiVersion, - mDescriptor.flags); - result.append(buffer); - snprintf(buffer, SIZE, "\t\t- name: %s\n", - mDescriptor.name); - result.append(buffer); - snprintf(buffer, SIZE, "\t\t- implementor: %s\n", - mDescriptor.implementor); - result.append(buffer); - - result.append("\t\t- Input configuration:\n"); - result.append("\t\t\tBuffer Frames Smp rate Channels Format\n"); - snprintf(buffer, SIZE, "\t\t\t0x%08x %05d %05d %08x %d\n", - (uint32_t)mConfig.inputCfg.buffer.raw, - mConfig.inputCfg.buffer.frameCount, - mConfig.inputCfg.samplingRate, - mConfig.inputCfg.channels, - mConfig.inputCfg.format); - result.append(buffer); - - result.append("\t\t- Output configuration:\n"); - result.append("\t\t\tBuffer Frames Smp rate Channels Format\n"); - snprintf(buffer, SIZE, "\t\t\t0x%08x %05d %05d %08x %d\n", - (uint32_t)mConfig.outputCfg.buffer.raw, - mConfig.outputCfg.buffer.frameCount, - mConfig.outputCfg.samplingRate, - mConfig.outputCfg.channels, - mConfig.outputCfg.format); - result.append(buffer); - - snprintf(buffer, SIZE, "\t\t%d Clients:\n", mHandles.size()); - result.append(buffer); - result.append("\t\t\tPid Priority Ctrl Locked client server\n"); - for (size_t i = 0; i < mHandles.size(); ++i) { - EffectHandle *handle = mHandles[i]; - if (handle != NULL && !handle->destroyed_l()) { - handle->dump(buffer, SIZE); - result.append(buffer); - } - } - - result.append("\n"); - - write(fd, result.string(), result.length()); - - if (locked) { - mLock.unlock(); - } -} - -// ---------------------------------------------------------------------------- -// EffectHandle implementation -// ---------------------------------------------------------------------------- - -#undef LOG_TAG -#define LOG_TAG "AudioFlinger::EffectHandle" - -AudioFlinger::EffectHandle::EffectHandle(const sp& effect, - const sp& client, - const sp& effectClient, - int32_t priority) - : BnEffect(), - mEffect(effect), mEffectClient(effectClient), mClient(client), mCblk(NULL), - mPriority(priority), mHasControl(false), mEnabled(false), mDestroyed(false) -{ - ALOGV("constructor %p", this); - - if (client == 0) { - return; - } - int bufOffset = ((sizeof(effect_param_cblk_t) - 1) / sizeof(int) + 1) * sizeof(int); - mCblkMemory = client->heap()->allocate(EFFECT_PARAM_BUFFER_SIZE + bufOffset); - if (mCblkMemory != 0) { - mCblk = static_cast(mCblkMemory->pointer()); - - if (mCblk != NULL) { - new(mCblk) effect_param_cblk_t(); - mBuffer = (uint8_t *)mCblk + bufOffset; - } - } else { - ALOGE("not enough memory for Effect size=%u", EFFECT_PARAM_BUFFER_SIZE + - sizeof(effect_param_cblk_t)); - return; - } -} - -AudioFlinger::EffectHandle::~EffectHandle() -{ - ALOGV("Destructor %p", this); - - if (mEffect == 0) { - mDestroyed = true; - return; - } - mEffect->lock(); - mDestroyed = true; - mEffect->unlock(); - disconnect(false); -} - -status_t AudioFlinger::EffectHandle::enable() -{ - ALOGV("enable %p", this); - if (!mHasControl) { - return INVALID_OPERATION; - } - if (mEffect == 0) { - return DEAD_OBJECT; - } - - if (mEnabled) { - return NO_ERROR; - } - - mEnabled = true; - - sp thread = mEffect->thread().promote(); - if (thread != 0) { - thread->checkSuspendOnEffectEnabled(mEffect, true, mEffect->sessionId()); - } - - // checkSuspendOnEffectEnabled() can suspend this same effect when enabled - if (mEffect->suspended()) { - return NO_ERROR; - } - - status_t status = mEffect->setEnabled(true); - if (status != NO_ERROR) { - if (thread != 0) { - thread->checkSuspendOnEffectEnabled(mEffect, false, mEffect->sessionId()); - } - mEnabled = false; - } - return status; -} - -status_t AudioFlinger::EffectHandle::disable() -{ - ALOGV("disable %p", this); - if (!mHasControl) { - return INVALID_OPERATION; - } - if (mEffect == 0) { - return DEAD_OBJECT; - } - - if (!mEnabled) { - return NO_ERROR; - } - mEnabled = false; - - if (mEffect->suspended()) { - return NO_ERROR; - } - - status_t status = mEffect->setEnabled(false); - - sp thread = mEffect->thread().promote(); - if (thread != 0) { - thread->checkSuspendOnEffectEnabled(mEffect, false, mEffect->sessionId()); - } - - return status; -} - -void AudioFlinger::EffectHandle::disconnect() -{ - disconnect(true); -} - -void AudioFlinger::EffectHandle::disconnect(bool unpinIfLast) -{ - ALOGV("disconnect(%s)", unpinIfLast ? "true" : "false"); - if (mEffect == 0) { - return; - } - // restore suspended effects if the disconnected handle was enabled and the last one. - if ((mEffect->disconnect(this, unpinIfLast) == 0) && mEnabled) { - sp thread = mEffect->thread().promote(); - if (thread != 0) { - thread->checkSuspendOnEffectEnabled(mEffect, false, mEffect->sessionId()); - } - } - - // release sp on module => module destructor can be called now - mEffect.clear(); - if (mClient != 0) { - if (mCblk != NULL) { - // unlike ~TrackBase(), mCblk is never a local new, so don't delete - mCblk->~effect_param_cblk_t(); // destroy our shared-structure. - } - mCblkMemory.clear(); // free the shared memory before releasing the heap it belongs to - // Client destructor must run with AudioFlinger mutex locked - Mutex::Autolock _l(mClient->audioFlinger()->mLock); - mClient.clear(); - } -} - -status_t AudioFlinger::EffectHandle::command(uint32_t cmdCode, - uint32_t cmdSize, - void *pCmdData, - uint32_t *replySize, - void *pReplyData) -{ - ALOGVV("command(), cmdCode: %d, mHasControl: %d, mEffect: %p", - cmdCode, mHasControl, (mEffect == 0) ? 0 : mEffect.get()); - - // only get parameter command is permitted for applications not controlling the effect - if (!mHasControl && cmdCode != EFFECT_CMD_GET_PARAM) { - return INVALID_OPERATION; - } - if (mEffect == 0) { - return DEAD_OBJECT; - } - if (mClient == 0) { - return INVALID_OPERATION; - } - - // handle commands that are not forwarded transparently to effect engine - if (cmdCode == EFFECT_CMD_SET_PARAM_COMMIT) { - // No need to trylock() here as this function is executed in the binder thread serving a - // particular client process: no risk to block the whole media server process or mixer - // threads if we are stuck here - Mutex::Autolock _l(mCblk->lock); - if (mCblk->clientIndex > EFFECT_PARAM_BUFFER_SIZE || - mCblk->serverIndex > EFFECT_PARAM_BUFFER_SIZE) { - mCblk->serverIndex = 0; - mCblk->clientIndex = 0; - return BAD_VALUE; - } - status_t status = NO_ERROR; - while (mCblk->serverIndex < mCblk->clientIndex) { - int reply; - uint32_t rsize = sizeof(int); - int *p = (int *)(mBuffer + mCblk->serverIndex); - int size = *p++; - if (((uint8_t *)p + size) > mBuffer + mCblk->clientIndex) { - ALOGW("command(): invalid parameter block size"); - break; - } - effect_param_t *param = (effect_param_t *)p; - if (param->psize == 0 || param->vsize == 0) { - ALOGW("command(): null parameter or value size"); - mCblk->serverIndex += size; - continue; - } - uint32_t psize = sizeof(effect_param_t) + - ((param->psize - 1) / sizeof(int) + 1) * sizeof(int) + - param->vsize; - status_t ret = mEffect->command(EFFECT_CMD_SET_PARAM, - psize, - p, - &rsize, - &reply); - // stop at first error encountered - if (ret != NO_ERROR) { - status = ret; - *(int *)pReplyData = reply; - break; - } else if (reply != NO_ERROR) { - *(int *)pReplyData = reply; - break; - } - mCblk->serverIndex += size; - } - mCblk->serverIndex = 0; - mCblk->clientIndex = 0; - return status; - } else if (cmdCode == EFFECT_CMD_ENABLE) { - *(int *)pReplyData = NO_ERROR; - return enable(); - } else if (cmdCode == EFFECT_CMD_DISABLE) { - *(int *)pReplyData = NO_ERROR; - return disable(); - } - - return mEffect->command(cmdCode, cmdSize, pCmdData, replySize, pReplyData); -} - -void AudioFlinger::EffectHandle::setControl(bool hasControl, bool signal, bool enabled) -{ - ALOGV("setControl %p control %d", this, hasControl); - - mHasControl = hasControl; - mEnabled = enabled; - - if (signal && mEffectClient != 0) { - mEffectClient->controlStatusChanged(hasControl); - } -} - -void AudioFlinger::EffectHandle::commandExecuted(uint32_t cmdCode, - uint32_t cmdSize, - void *pCmdData, - uint32_t replySize, - void *pReplyData) -{ - if (mEffectClient != 0) { - mEffectClient->commandExecuted(cmdCode, cmdSize, pCmdData, replySize, pReplyData); - } -} - - - -void AudioFlinger::EffectHandle::setEnabled(bool enabled) -{ - if (mEffectClient != 0) { - mEffectClient->enableStatusChanged(enabled); - } -} - -status_t AudioFlinger::EffectHandle::onTransact( - uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) -{ - return BnEffect::onTransact(code, data, reply, flags); -} - - -void AudioFlinger::EffectHandle::dump(char* buffer, size_t size) -{ - bool locked = mCblk != NULL && tryLock(mCblk->lock); - - snprintf(buffer, size, "\t\t\t%05d %05d %01u %01u %05u %05u\n", - (mClient == 0) ? getpid_cached : mClient->pid(), - mPriority, - mHasControl, - !locked, - mCblk ? mCblk->clientIndex : 0, - mCblk ? mCblk->serverIndex : 0 - ); - - if (locked) { - mCblk->lock.unlock(); - } -} - -#undef LOG_TAG -#define LOG_TAG "AudioFlinger::EffectChain" - -AudioFlinger::EffectChain::EffectChain(ThreadBase *thread, - int sessionId) - : mThread(thread), mSessionId(sessionId), mActiveTrackCnt(0), mTrackCnt(0), mTailBufferCount(0), - mOwnInBuffer(false), mVolumeCtrlIdx(-1), mLeftVolume(UINT_MAX), mRightVolume(UINT_MAX), - mNewLeftVolume(UINT_MAX), mNewRightVolume(UINT_MAX) -{ - mStrategy = AudioSystem::getStrategyForStream(AUDIO_STREAM_MUSIC); - if (thread == NULL) { - return; - } - mMaxTailBuffers = ((kProcessTailDurationMs * thread->sampleRate()) / 1000) / - thread->frameCount(); -} - -AudioFlinger::EffectChain::~EffectChain() -{ - if (mOwnInBuffer) { - delete mInBuffer; - } - -} - -// getEffectFromDesc_l() must be called with ThreadBase::mLock held -sp AudioFlinger::EffectChain::getEffectFromDesc_l( - effect_descriptor_t *descriptor) -{ - size_t size = mEffects.size(); - - for (size_t i = 0; i < size; i++) { - if (memcmp(&mEffects[i]->desc().uuid, &descriptor->uuid, sizeof(effect_uuid_t)) == 0) { - return mEffects[i]; - } - } - return 0; -} - -// getEffectFromId_l() must be called with ThreadBase::mLock held -sp AudioFlinger::EffectChain::getEffectFromId_l(int id) -{ - size_t size = mEffects.size(); - - for (size_t i = 0; i < size; i++) { - // by convention, return first effect if id provided is 0 (0 is never a valid id) - if (id == 0 || mEffects[i]->id() == id) { - return mEffects[i]; - } - } - return 0; -} - -// getEffectFromType_l() must be called with ThreadBase::mLock held -sp AudioFlinger::EffectChain::getEffectFromType_l( - const effect_uuid_t *type) -{ - size_t size = mEffects.size(); - - for (size_t i = 0; i < size; i++) { - if (memcmp(&mEffects[i]->desc().type, type, sizeof(effect_uuid_t)) == 0) { - return mEffects[i]; - } - } - return 0; -} - -void AudioFlinger::EffectChain::clearInputBuffer() -{ - Mutex::Autolock _l(mLock); - sp thread = mThread.promote(); - if (thread == 0) { - ALOGW("clearInputBuffer(): cannot promote mixer thread"); - return; - } - clearInputBuffer_l(thread); -} - -// Must be called with EffectChain::mLock locked -void AudioFlinger::EffectChain::clearInputBuffer_l(sp thread) -{ - size_t numSamples = thread->frameCount() * thread->channelCount(); - memset(mInBuffer, 0, numSamples * sizeof(int16_t)); - -} - -// Must be called with EffectChain::mLock locked -void AudioFlinger::EffectChain::process_l() -{ - sp thread = mThread.promote(); - if (thread == 0) { - ALOGW("process_l(): cannot promote mixer thread"); - return; - } - bool isGlobalSession = (mSessionId == AUDIO_SESSION_OUTPUT_MIX) || - (mSessionId == AUDIO_SESSION_OUTPUT_STAGE); - // always process effects unless no more tracks are on the session and the effect tail - // has been rendered - bool doProcess = true; - if (!isGlobalSession) { - bool tracksOnSession = (trackCnt() != 0); - - if (!tracksOnSession && mTailBufferCount == 0) { - doProcess = false; - } - - if (activeTrackCnt() == 0) { - // if no track is active and the effect tail has not been rendered, - // the input buffer must be cleared here as the mixer process will not do it - if (tracksOnSession || mTailBufferCount > 0) { - clearInputBuffer_l(thread); - if (mTailBufferCount > 0) { - mTailBufferCount--; - } - } - } - } - - size_t size = mEffects.size(); - if (doProcess) { - for (size_t i = 0; i < size; i++) { - mEffects[i]->process(); - } - } - for (size_t i = 0; i < size; i++) { - mEffects[i]->updateState(); - } -} - -// addEffect_l() must be called with PlaybackThread::mLock held -status_t AudioFlinger::EffectChain::addEffect_l(const sp& effect) -{ - effect_descriptor_t desc = effect->desc(); - uint32_t insertPref = desc.flags & EFFECT_FLAG_INSERT_MASK; - - Mutex::Autolock _l(mLock); - effect->setChain(this); - sp thread = mThread.promote(); - if (thread == 0) { - return NO_INIT; - } - effect->setThread(thread); - - if ((desc.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_AUXILIARY) { - // Auxiliary effects are inserted at the beginning of mEffects vector as - // they are processed first and accumulated in chain input buffer - mEffects.insertAt(effect, 0); - - // the input buffer for auxiliary effect contains mono samples in - // 32 bit format. This is to avoid saturation in AudoMixer - // accumulation stage. Saturation is done in EffectModule::process() before - // calling the process in effect engine - size_t numSamples = thread->frameCount(); - int32_t *buffer = new int32_t[numSamples]; - memset(buffer, 0, numSamples * sizeof(int32_t)); - effect->setInBuffer((int16_t *)buffer); - // auxiliary effects output samples to chain input buffer for further processing - // by insert effects - effect->setOutBuffer(mInBuffer); - } else { - // Insert effects are inserted at the end of mEffects vector as they are processed - // after track and auxiliary effects. - // Insert effect order as a function of indicated preference: - // if EFFECT_FLAG_INSERT_EXCLUSIVE, insert in first position or reject if - // another effect is present - // else if EFFECT_FLAG_INSERT_FIRST, insert in first position or after the - // last effect claiming first position - // else if EFFECT_FLAG_INSERT_LAST, insert in last position or before the - // first effect claiming last position - // else if EFFECT_FLAG_INSERT_ANY insert after first or before last - // Reject insertion if an effect with EFFECT_FLAG_INSERT_EXCLUSIVE is - // already present - - size_t size = mEffects.size(); - size_t idx_insert = size; - ssize_t idx_insert_first = -1; - ssize_t idx_insert_last = -1; - - for (size_t i = 0; i < size; i++) { - effect_descriptor_t d = mEffects[i]->desc(); - uint32_t iMode = d.flags & EFFECT_FLAG_TYPE_MASK; - uint32_t iPref = d.flags & EFFECT_FLAG_INSERT_MASK; - if (iMode == EFFECT_FLAG_TYPE_INSERT) { - // check invalid effect chaining combinations - if (insertPref == EFFECT_FLAG_INSERT_EXCLUSIVE || - iPref == EFFECT_FLAG_INSERT_EXCLUSIVE) { - ALOGW("addEffect_l() could not insert effect %s: exclusive conflict with %s", - desc.name, d.name); - return INVALID_OPERATION; - } - // remember position of first insert effect and by default - // select this as insert position for new effect - if (idx_insert == size) { - idx_insert = i; - } - // remember position of last insert effect claiming - // first position - if (iPref == EFFECT_FLAG_INSERT_FIRST) { - idx_insert_first = i; - } - // remember position of first insert effect claiming - // last position - if (iPref == EFFECT_FLAG_INSERT_LAST && - idx_insert_last == -1) { - idx_insert_last = i; - } - } - } - - // modify idx_insert from first position if needed - if (insertPref == EFFECT_FLAG_INSERT_LAST) { - if (idx_insert_last != -1) { - idx_insert = idx_insert_last; - } else { - idx_insert = size; - } - } else { - if (idx_insert_first != -1) { - idx_insert = idx_insert_first + 1; - } - } - - // always read samples from chain input buffer - effect->setInBuffer(mInBuffer); - - // if last effect in the chain, output samples to chain - // output buffer, otherwise to chain input buffer - if (idx_insert == size) { - if (idx_insert != 0) { - mEffects[idx_insert-1]->setOutBuffer(mInBuffer); - mEffects[idx_insert-1]->configure(); - } - effect->setOutBuffer(mOutBuffer); - } else { - effect->setOutBuffer(mInBuffer); - } - mEffects.insertAt(effect, idx_insert); - - ALOGV("addEffect_l() effect %p, added in chain %p at rank %d", effect.get(), this, - idx_insert); - } - effect->configure(); - return NO_ERROR; -} - -// removeEffect_l() must be called with PlaybackThread::mLock held -size_t AudioFlinger::EffectChain::removeEffect_l(const sp& effect) -{ - Mutex::Autolock _l(mLock); - size_t size = mEffects.size(); - uint32_t type = effect->desc().flags & EFFECT_FLAG_TYPE_MASK; - - for (size_t i = 0; i < size; i++) { - if (effect == mEffects[i]) { - // calling stop here will remove pre-processing effect from the audio HAL. - // This is safe as we hold the EffectChain mutex which guarantees that we are not in - // the middle of a read from audio HAL - if (mEffects[i]->state() == EffectModule::ACTIVE || - mEffects[i]->state() == EffectModule::STOPPING) { - mEffects[i]->stop(); - } - if (type == EFFECT_FLAG_TYPE_AUXILIARY) { - delete[] effect->inBuffer(); - } else { - if (i == size - 1 && i != 0) { - mEffects[i - 1]->setOutBuffer(mOutBuffer); - mEffects[i - 1]->configure(); - } - } - mEffects.removeAt(i); - ALOGV("removeEffect_l() effect %p, removed from chain %p at rank %d", effect.get(), - this, i); - break; - } - } - - return mEffects.size(); -} - -// setDevice_l() must be called with PlaybackThread::mLock held -void AudioFlinger::EffectChain::setDevice_l(audio_devices_t device) -{ - size_t size = mEffects.size(); - for (size_t i = 0; i < size; i++) { - mEffects[i]->setDevice(device); - } -} - -// setMode_l() must be called with PlaybackThread::mLock held -void AudioFlinger::EffectChain::setMode_l(audio_mode_t mode) -{ - size_t size = mEffects.size(); - for (size_t i = 0; i < size; i++) { - mEffects[i]->setMode(mode); - } -} - -// setAudioSource_l() must be called with PlaybackThread::mLock held -void AudioFlinger::EffectChain::setAudioSource_l(audio_source_t source) -{ - size_t size = mEffects.size(); - for (size_t i = 0; i < size; i++) { - mEffects[i]->setAudioSource(source); - } -} - -// setVolume_l() must be called with PlaybackThread::mLock held -bool AudioFlinger::EffectChain::setVolume_l(uint32_t *left, uint32_t *right) -{ - uint32_t newLeft = *left; - uint32_t newRight = *right; - bool hasControl = false; - int ctrlIdx = -1; - size_t size = mEffects.size(); - - // first update volume controller - for (size_t i = size; i > 0; i--) { - if (mEffects[i - 1]->isProcessEnabled() && - (mEffects[i - 1]->desc().flags & EFFECT_FLAG_VOLUME_MASK) == EFFECT_FLAG_VOLUME_CTRL) { - ctrlIdx = i - 1; - hasControl = true; - break; - } - } - - if (ctrlIdx == mVolumeCtrlIdx && *left == mLeftVolume && *right == mRightVolume) { - if (hasControl) { - *left = mNewLeftVolume; - *right = mNewRightVolume; - } - return hasControl; - } - - mVolumeCtrlIdx = ctrlIdx; - mLeftVolume = newLeft; - mRightVolume = newRight; - - // second get volume update from volume controller - if (ctrlIdx >= 0) { - mEffects[ctrlIdx]->setVolume(&newLeft, &newRight, true); - mNewLeftVolume = newLeft; - mNewRightVolume = newRight; - } - // then indicate volume to all other effects in chain. - // Pass altered volume to effects before volume controller - // and requested volume to effects after controller - uint32_t lVol = newLeft; - uint32_t rVol = newRight; - - for (size_t i = 0; i < size; i++) { - if ((int)i == ctrlIdx) { - continue; - } - // this also works for ctrlIdx == -1 when there is no volume controller - if ((int)i > ctrlIdx) { - lVol = *left; - rVol = *right; - } - mEffects[i]->setVolume(&lVol, &rVol, false); - } - *left = newLeft; - *right = newRight; - - return hasControl; -} - -void AudioFlinger::EffectChain::dump(int fd, const Vector& args) -{ - const size_t SIZE = 256; - char buffer[SIZE]; - String8 result; - - snprintf(buffer, SIZE, "Effects for session %d:\n", mSessionId); - result.append(buffer); - - bool locked = tryLock(mLock); - // failed to lock - AudioFlinger is probably deadlocked - if (!locked) { - result.append("\tCould not lock mutex:\n"); - } - - result.append("\tNum fx In buffer Out buffer Active tracks:\n"); - snprintf(buffer, SIZE, "\t%02d 0x%08x 0x%08x %d\n", - mEffects.size(), - (uint32_t)mInBuffer, - (uint32_t)mOutBuffer, - mActiveTrackCnt); - result.append(buffer); - write(fd, result.string(), result.size()); - - for (size_t i = 0; i < mEffects.size(); ++i) { - sp effect = mEffects[i]; - if (effect != 0) { - effect->dump(fd, args); - } - } - - if (locked) { - mLock.unlock(); - } -} - -// must be called with ThreadBase::mLock held -void AudioFlinger::EffectChain::setEffectSuspended_l( - const effect_uuid_t *type, bool suspend) -{ - sp desc; - // use effect type UUID timelow as key as there is no real risk of identical - // timeLow fields among effect type UUIDs. - ssize_t index = mSuspendedEffects.indexOfKey(type->timeLow); - if (suspend) { - if (index >= 0) { - desc = mSuspendedEffects.valueAt(index); - } else { - desc = new SuspendedEffectDesc(); - desc->mType = *type; - mSuspendedEffects.add(type->timeLow, desc); - ALOGV("setEffectSuspended_l() add entry for %08x", type->timeLow); - } - if (desc->mRefCount++ == 0) { - sp effect = getEffectIfEnabled(type); - if (effect != 0) { - desc->mEffect = effect; - effect->setSuspended(true); - effect->setEnabled(false); - } - } - } else { - if (index < 0) { - return; - } - desc = mSuspendedEffects.valueAt(index); - if (desc->mRefCount <= 0) { - ALOGW("setEffectSuspended_l() restore refcount should not be 0 %d", desc->mRefCount); - desc->mRefCount = 1; - } - if (--desc->mRefCount == 0) { - ALOGV("setEffectSuspended_l() remove entry for %08x", mSuspendedEffects.keyAt(index)); - if (desc->mEffect != 0) { - sp effect = desc->mEffect.promote(); - if (effect != 0) { - effect->setSuspended(false); - effect->lock(); - EffectHandle *handle = effect->controlHandle_l(); - if (handle != NULL && !handle->destroyed_l()) { - effect->setEnabled_l(handle->enabled()); - } - effect->unlock(); - } - desc->mEffect.clear(); - } - mSuspendedEffects.removeItemsAt(index); - } - } -} - -// must be called with ThreadBase::mLock held -void AudioFlinger::EffectChain::setEffectSuspendedAll_l(bool suspend) -{ - sp desc; - - ssize_t index = mSuspendedEffects.indexOfKey((int)kKeyForSuspendAll); - if (suspend) { - if (index >= 0) { - desc = mSuspendedEffects.valueAt(index); - } else { - desc = new SuspendedEffectDesc(); - mSuspendedEffects.add((int)kKeyForSuspendAll, desc); - ALOGV("setEffectSuspendedAll_l() add entry for 0"); - } - if (desc->mRefCount++ == 0) { - Vector< sp > effects; - getSuspendEligibleEffects(effects); - for (size_t i = 0; i < effects.size(); i++) { - setEffectSuspended_l(&effects[i]->desc().type, true); - } - } - } else { - if (index < 0) { - return; - } - desc = mSuspendedEffects.valueAt(index); - if (desc->mRefCount <= 0) { - ALOGW("setEffectSuspendedAll_l() restore refcount should not be 0 %d", desc->mRefCount); - desc->mRefCount = 1; - } - if (--desc->mRefCount == 0) { - Vector types; - for (size_t i = 0; i < mSuspendedEffects.size(); i++) { - if (mSuspendedEffects.keyAt(i) == (int)kKeyForSuspendAll) { - continue; - } - types.add(&mSuspendedEffects.valueAt(i)->mType); - } - for (size_t i = 0; i < types.size(); i++) { - setEffectSuspended_l(types[i], false); - } - ALOGV("setEffectSuspendedAll_l() remove entry for %08x", - mSuspendedEffects.keyAt(index)); - mSuspendedEffects.removeItem((int)kKeyForSuspendAll); - } - } -} - - -// The volume effect is used for automated tests only -#ifndef OPENSL_ES_H_ -static const effect_uuid_t SL_IID_VOLUME_ = { 0x09e8ede0, 0xddde, 0x11db, 0xb4f6, - { 0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b } }; -const effect_uuid_t * const SL_IID_VOLUME = &SL_IID_VOLUME_; -#endif //OPENSL_ES_H_ - -bool AudioFlinger::EffectChain::isEffectEligibleForSuspend(const effect_descriptor_t& desc) -{ - // auxiliary effects and visualizer are never suspended on output mix - if ((mSessionId == AUDIO_SESSION_OUTPUT_MIX) && - (((desc.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_AUXILIARY) || - (memcmp(&desc.type, SL_IID_VISUALIZATION, sizeof(effect_uuid_t)) == 0) || - (memcmp(&desc.type, SL_IID_VOLUME, sizeof(effect_uuid_t)) == 0))) { - return false; - } - return true; -} - -void AudioFlinger::EffectChain::getSuspendEligibleEffects( - Vector< sp > &effects) -{ - effects.clear(); - for (size_t i = 0; i < mEffects.size(); i++) { - if (isEffectEligibleForSuspend(mEffects[i]->desc())) { - effects.add(mEffects[i]); - } - } -} - -sp AudioFlinger::EffectChain::getEffectIfEnabled( - const effect_uuid_t *type) -{ - sp effect = getEffectFromType_l(type); - return effect != 0 && effect->isEnabled() ? effect : 0; -} - -void AudioFlinger::EffectChain::checkSuspendOnEffectEnabled(const sp& effect, - bool enabled) -{ - ssize_t index = mSuspendedEffects.indexOfKey(effect->desc().type.timeLow); - if (enabled) { - if (index < 0) { - // if the effect is not suspend check if all effects are suspended - index = mSuspendedEffects.indexOfKey((int)kKeyForSuspendAll); - if (index < 0) { - return; - } - if (!isEffectEligibleForSuspend(effect->desc())) { - return; - } - setEffectSuspended_l(&effect->desc().type, enabled); - index = mSuspendedEffects.indexOfKey(effect->desc().type.timeLow); - if (index < 0) { - ALOGW("checkSuspendOnEffectEnabled() Fx should be suspended here!"); - return; - } - } - ALOGV("checkSuspendOnEffectEnabled() enable suspending fx %08x", - effect->desc().type.timeLow); - sp desc = mSuspendedEffects.valueAt(index); - // if effect is requested to suspended but was not yet enabled, supend it now. - if (desc->mEffect == 0) { - desc->mEffect = effect; - effect->setEnabled(false); - effect->setSuspended(true); - } - } else { - if (index < 0) { - return; - } - ALOGV("checkSuspendOnEffectEnabled() disable restoring fx %08x", - effect->desc().type.timeLow); - sp desc = mSuspendedEffects.valueAt(index); - desc->mEffect.clear(); - effect->setSuspended(false); - } -} - -#undef LOG_TAG -#define LOG_TAG "AudioFlinger" - // ---------------------------------------------------------------------------- status_t AudioFlinger::onTransact( diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h index 2541b15..46a8e0f 100644 --- a/services/audioflinger/AudioFlinger.h +++ b/services/audioflinger/AudioFlinger.h @@ -75,6 +75,11 @@ class FastMixer; static const nsecs_t kDefaultStandbyTimeInNsecs = seconds(3); +#define MAX_GAIN 4096.0f +#define MAX_GAIN_INT 0x1000 + +#define INCLUDING_FROM_AUDIOFLINGER_H + class AudioFlinger : public BinderService, public BnAudioFlinger @@ -283,7 +288,14 @@ private: // ro.audio.flinger_standbytime_ms or defaults to kDefaultStandbyTimeInNsecs static nsecs_t mStandbyTimeInNsecs; + // incremented by 2 when screen state changes, bit 0 == 1 means "off" + // AudioFlinger::setParameters() updates, other threads read w/o lock + static uint32_t mScreenState; + // Internal dump utilities. + static const int kDumpLockRetries = 50; + static const int kDumpLockSleepUs = 20000; + static bool dumpTryLock(Mutex& mutex); void dumpPermissionDenial(int fd, const Vector& args); void dumpClients(int fd, const Vector& args); void dumpInternals(int fd, const Vector& args); @@ -348,419 +360,6 @@ private: struct AudioStreamOut; struct AudioStreamIn; - class ThreadBase : public Thread { - public: - - enum type_t { - MIXER, // Thread class is MixerThread - DIRECT, // Thread class is DirectOutputThread - DUPLICATING, // Thread class is DuplicatingThread - RECORD // Thread class is RecordThread - }; - - ThreadBase(const sp& audioFlinger, audio_io_handle_t id, - audio_devices_t outDevice, audio_devices_t inDevice, type_t type); - virtual ~ThreadBase(); - - void dumpBase(int fd, const Vector& args); - void dumpEffectChains(int fd, const Vector& args); - - void clearPowerManager(); - - // base for record and playback - class TrackBase : public ExtendedAudioBufferProvider, public RefBase { - - public: - enum track_state { - IDLE, - TERMINATED, - FLUSHED, - STOPPED, - // next 2 states are currently used for fast tracks only - STOPPING_1, // waiting for first underrun - STOPPING_2, // waiting for presentation complete - RESUMING, - ACTIVE, - PAUSING, - PAUSED - }; - - TrackBase(ThreadBase *thread, - const sp& client, - uint32_t sampleRate, - audio_format_t format, - audio_channel_mask_t channelMask, - size_t frameCount, - const sp& sharedBuffer, - int sessionId); - virtual ~TrackBase(); - - virtual status_t start(AudioSystem::sync_event_t event, - int triggerSession) = 0; - virtual void stop() = 0; - sp getCblk() const { return mCblkMemory; } - audio_track_cblk_t* cblk() const { return mCblk; } - int sessionId() const { return mSessionId; } - virtual status_t setSyncEvent(const sp& event); - - protected: - TrackBase(const TrackBase&); - TrackBase& operator = (const TrackBase&); - - // AudioBufferProvider interface - virtual status_t getNextBuffer(AudioBufferProvider::Buffer* buffer, int64_t pts) = 0; - virtual void releaseBuffer(AudioBufferProvider::Buffer* buffer); - - // ExtendedAudioBufferProvider interface is only needed for Track, - // but putting it in TrackBase avoids the complexity of virtual inheritance - virtual size_t framesReady() const { return SIZE_MAX; } - - audio_format_t format() const { - return mFormat; - } - - uint32_t channelCount() const { return mChannelCount; } - - audio_channel_mask_t channelMask() const { return mChannelMask; } - - uint32_t sampleRate() const; // FIXME inline after cblk sr moved - - // Return a pointer to the start of a contiguous slice of the track buffer. - // Parameter 'offset' is the requested start position, expressed in - // monotonically increasing frame units relative to the track epoch. - // Parameter 'frames' is the requested length, also in frame units. - // Always returns non-NULL. It is the caller's responsibility to - // verify that this will be successful; the result of calling this - // function with invalid 'offset' or 'frames' is undefined. - void* getBuffer(uint32_t offset, uint32_t frames) const; - - bool isStopped() const { - return (mState == STOPPED || mState == FLUSHED); - } - - // for fast tracks only - bool isStopping() const { - return mState == STOPPING_1 || mState == STOPPING_2; - } - bool isStopping_1() const { - return mState == STOPPING_1; - } - bool isStopping_2() const { - return mState == STOPPING_2; - } - - bool isTerminated() const { - return mState == TERMINATED; - } - - bool step(); // mStepCount is an implicit input - void reset(); - - virtual bool isOut() const = 0; // true for Track and TimedTrack, false for RecordTrack, - // this could be a track type if needed later - - const wp mThread; - /*const*/ sp mClient; // see explanation at ~TrackBase() why not const - sp mCblkMemory; - audio_track_cblk_t* mCblk; - void* mBuffer; // start of track buffer, typically in shared memory - void* mBufferEnd; // &mBuffer[mFrameCount * frameSize], where frameSize - // is based on mChannelCount and 16-bit samples - uint32_t mStepCount; // saves AudioBufferProvider::Buffer::frameCount as of - // time of releaseBuffer() for later use by step() - // we don't really need a lock for these - track_state mState; - const uint32_t mSampleRate; // initial sample rate only; for tracks which - // support dynamic rates, the current value is in control block - const audio_format_t mFormat; - const audio_channel_mask_t mChannelMask; - const uint8_t mChannelCount; - const size_t mFrameSize; // AudioFlinger's view of frame size in shared memory, - // where for AudioTrack (but not AudioRecord), - // 8-bit PCM samples are stored as 16-bit - const size_t mFrameCount;// size of track buffer given at createTrack() or - // openRecord(), and then adjusted as needed - bool mStepServerFailed; - const int mSessionId; - Vector < sp >mSyncEvents; - }; - - enum { - CFG_EVENT_IO, - CFG_EVENT_PRIO - }; - - class ConfigEvent { - public: - ConfigEvent(int type) : mType(type) {} - virtual ~ConfigEvent() {} - - int type() const { return mType; } - - virtual void dump(char *buffer, size_t size) = 0; - - private: - const int mType; - }; - - class IoConfigEvent : public ConfigEvent { - public: - IoConfigEvent(int event, int param) : - ConfigEvent(CFG_EVENT_IO), mEvent(event), mParam(event) {} - virtual ~IoConfigEvent() {} - - int event() const { return mEvent; } - int param() const { return mParam; } - - virtual void dump(char *buffer, size_t size) { - snprintf(buffer, size, "IO event: event %d, param %d\n", mEvent, mParam); - } - - private: - const int mEvent; - const int mParam; - }; - - class PrioConfigEvent : public ConfigEvent { - public: - PrioConfigEvent(pid_t pid, pid_t tid, int32_t prio) : - ConfigEvent(CFG_EVENT_PRIO), mPid(pid), mTid(tid), mPrio(prio) {} - virtual ~PrioConfigEvent() {} - - pid_t pid() const { return mPid; } - pid_t tid() const { return mTid; } - int32_t prio() const { return mPrio; } - - virtual void dump(char *buffer, size_t size) { - snprintf(buffer, size, "Prio event: pid %d, tid %d, prio %d\n", mPid, mTid, mPrio); - } - - private: - const pid_t mPid; - const pid_t mTid; - const int32_t mPrio; - }; - - - class PMDeathRecipient : public IBinder::DeathRecipient { - public: - PMDeathRecipient(const wp& thread) : mThread(thread) {} - virtual ~PMDeathRecipient() {} - - // IBinder::DeathRecipient - virtual void binderDied(const wp& who); - - private: - PMDeathRecipient(const PMDeathRecipient&); - PMDeathRecipient& operator = (const PMDeathRecipient&); - - wp mThread; - }; - - virtual status_t initCheck() const = 0; - - // static externally-visible - type_t type() const { return mType; } - audio_io_handle_t id() const { return mId;} - - // dynamic externally-visible - uint32_t sampleRate() const { return mSampleRate; } - uint32_t channelCount() const { return mChannelCount; } - audio_channel_mask_t channelMask() const { return mChannelMask; } - audio_format_t format() const { return mFormat; } - // Called by AudioFlinger::frameCount(audio_io_handle_t output) and effects, - // and returns the normal mix buffer's frame count. - size_t frameCount() const { return mNormalFrameCount; } - // Return's the HAL's frame count i.e. fast mixer buffer size. - size_t frameCountHAL() const { return mFrameCount; } - - // Should be "virtual status_t requestExitAndWait()" and override same - // method in Thread, but Thread::requestExitAndWait() is not yet virtual. - void exit(); - virtual bool checkForNewParameters_l() = 0; - virtual status_t setParameters(const String8& keyValuePairs); - virtual String8 getParameters(const String8& keys) = 0; - virtual void audioConfigChanged_l(int event, int param = 0) = 0; - void sendIoConfigEvent(int event, int param = 0); - void sendIoConfigEvent_l(int event, int param = 0); - void sendPrioConfigEvent_l(pid_t pid, pid_t tid, int32_t prio); - void processConfigEvents(); - - // see note at declaration of mStandby, mOutDevice and mInDevice - bool standby() const { return mStandby; } - audio_devices_t outDevice() const { return mOutDevice; } - audio_devices_t inDevice() const { return mInDevice; } - - virtual audio_stream_t* stream() const = 0; - - sp createEffect_l( - const sp& client, - const sp& effectClient, - int32_t priority, - int sessionId, - effect_descriptor_t *desc, - int *enabled, - status_t *status); - void disconnectEffect(const sp< EffectModule>& effect, - EffectHandle *handle, - bool unpinIfLast); - - // return values for hasAudioSession (bit field) - enum effect_state { - EFFECT_SESSION = 0x1, // the audio session corresponds to at least one - // effect - TRACK_SESSION = 0x2 // the audio session corresponds to at least one - // track - }; - - // get effect chain corresponding to session Id. - sp getEffectChain(int sessionId); - // same as getEffectChain() but must be called with ThreadBase mutex locked - sp getEffectChain_l(int sessionId) const; - // add an effect chain to the chain list (mEffectChains) - virtual status_t addEffectChain_l(const sp& chain) = 0; - // remove an effect chain from the chain list (mEffectChains) - virtual size_t removeEffectChain_l(const sp& chain) = 0; - // lock all effect chains Mutexes. Must be called before releasing the - // ThreadBase mutex before processing the mixer and effects. This guarantees the - // integrity of the chains during the process. - // Also sets the parameter 'effectChains' to current value of mEffectChains. - void lockEffectChains_l(Vector< sp >& effectChains); - // unlock effect chains after process - void unlockEffectChains(const Vector< sp >& effectChains); - // set audio mode to all effect chains - void setMode(audio_mode_t mode); - // get effect module with corresponding ID on specified audio session - sp getEffect(int sessionId, int effectId); - sp getEffect_l(int sessionId, int effectId); - // add and effect module. Also creates the effect chain is none exists for - // the effects audio session - status_t addEffect_l(const sp< EffectModule>& effect); - // remove and effect module. Also removes the effect chain is this was the last - // effect - void removeEffect_l(const sp< EffectModule>& effect); - // detach all tracks connected to an auxiliary effect - virtual void detachAuxEffect_l(int effectId) {} - // returns either EFFECT_SESSION if effects on this audio session exist in one - // chain, or TRACK_SESSION if tracks on this audio session exist, or both - virtual uint32_t hasAudioSession(int sessionId) const = 0; - // the value returned by default implementation is not important as the - // strategy is only meaningful for PlaybackThread which implements this method - virtual uint32_t getStrategyForSession_l(int sessionId) { return 0; } - - // suspend or restore effect according to the type of effect passed. a NULL - // type pointer means suspend all effects in the session - void setEffectSuspended(const effect_uuid_t *type, - bool suspend, - int sessionId = AUDIO_SESSION_OUTPUT_MIX); - // check if some effects must be suspended/restored when an effect is enabled - // or disabled - void checkSuspendOnEffectEnabled(const sp& effect, - bool enabled, - int sessionId = AUDIO_SESSION_OUTPUT_MIX); - void checkSuspendOnEffectEnabled_l(const sp& effect, - bool enabled, - int sessionId = AUDIO_SESSION_OUTPUT_MIX); - - virtual status_t setSyncEvent(const sp& event) = 0; - virtual bool isValidSyncEvent(const sp& event) const = 0; - - - mutable Mutex mLock; - - protected: - - // entry describing an effect being suspended in mSuspendedSessions keyed vector - class SuspendedSessionDesc : public RefBase { - public: - SuspendedSessionDesc() : mRefCount(0) {} - - int mRefCount; // number of active suspend requests - effect_uuid_t mType; // effect type UUID - }; - - void acquireWakeLock(); - void acquireWakeLock_l(); - void releaseWakeLock(); - void releaseWakeLock_l(); - void setEffectSuspended_l(const effect_uuid_t *type, - bool suspend, - int sessionId); - // updated mSuspendedSessions when an effect suspended or restored - void updateSuspendedSessions_l(const effect_uuid_t *type, - bool suspend, - int sessionId); - // check if some effects must be suspended when an effect chain is added - void checkSuspendOnAddEffectChain_l(const sp& chain); - - virtual void preExit() { } - - friend class AudioFlinger; // for mEffectChains - - const type_t mType; - - // Used by parameters, config events, addTrack_l, exit - Condition mWaitWorkCV; - - const sp mAudioFlinger; - uint32_t mSampleRate; - size_t mFrameCount; // output HAL, direct output, record - size_t mNormalFrameCount; // normal mixer and effects - audio_channel_mask_t mChannelMask; - uint16_t mChannelCount; - size_t mFrameSize; - audio_format_t mFormat; - - // Parameter sequence by client: binder thread calling setParameters(): - // 1. Lock mLock - // 2. Append to mNewParameters - // 3. mWaitWorkCV.signal - // 4. mParamCond.waitRelative with timeout - // 5. read mParamStatus - // 6. mWaitWorkCV.signal - // 7. Unlock - // - // Parameter sequence by server: threadLoop calling checkForNewParameters_l(): - // 1. Lock mLock - // 2. If there is an entry in mNewParameters proceed ... - // 2. Read first entry in mNewParameters - // 3. Process - // 4. Remove first entry from mNewParameters - // 5. Set mParamStatus - // 6. mParamCond.signal - // 7. mWaitWorkCV.wait with timeout (this is to avoid overwriting mParamStatus) - // 8. Unlock - Condition mParamCond; - Vector mNewParameters; - status_t mParamStatus; - - Vector mConfigEvents; - - // These fields are written and read by thread itself without lock or barrier, - // and read by other threads without lock or barrier via standby() , outDevice() - // and inDevice(). - // Because of the absence of a lock or barrier, any other thread that reads - // these fields must use the information in isolation, or be prepared to deal - // with possibility that it might be inconsistent with other information. - bool mStandby; // Whether thread is currently in standby. - audio_devices_t mOutDevice; // output device - audio_devices_t mInDevice; // input device - audio_source_t mAudioSource; // (see audio.h, audio_source_t) - - const audio_io_handle_t mId; - Vector< sp > mEffectChains; - - static const int kNameLength = 16; // prctl(PR_SET_NAME) limit - char mName[kNameLength]; - sp mPowerManager; - sp mWakeLockToken; - const sp mDeathRecipient; - // list of suspended effects per session and per type. The first vector is - // keyed by session ID, the second by type UUID timeLow field - KeyedVector< int, KeyedVector< int, sp > > - mSuspendedSessions; - }; - struct stream_type_t { stream_type_t() : volume(1.0f), @@ -772,658 +371,10 @@ private: }; // --- PlaybackThread --- - class PlaybackThread : public ThreadBase { - public: - - enum mixer_state { - MIXER_IDLE, // no active tracks - MIXER_TRACKS_ENABLED, // at least one active track, but no track has any data ready - MIXER_TRACKS_READY // at least one active track, and at least one track has data - // standby mode does not have an enum value - // suspend by audio policy manager is orthogonal to mixer state - }; - - // playback track - class Track : public TrackBase, public VolumeProvider { - public: - Track( PlaybackThread *thread, - const sp& client, - audio_stream_type_t streamType, - uint32_t sampleRate, - audio_format_t format, - audio_channel_mask_t channelMask, - size_t frameCount, - const sp& sharedBuffer, - int sessionId, - IAudioFlinger::track_flags_t flags); - virtual ~Track(); - - static void appendDumpHeader(String8& result); - void dump(char* buffer, size_t size); - virtual status_t start(AudioSystem::sync_event_t event = - AudioSystem::SYNC_EVENT_NONE, - int triggerSession = 0); - virtual void stop(); - void pause(); - - void flush(); - void destroy(); - void mute(bool); - int name() const { return mName; } - - audio_stream_type_t streamType() const { - return mStreamType; - } - status_t attachAuxEffect(int EffectId); - void setAuxBuffer(int EffectId, int32_t *buffer); - int32_t *auxBuffer() const { return mAuxBuffer; } - void setMainBuffer(int16_t *buffer) { mMainBuffer = buffer; } - int16_t *mainBuffer() const { return mMainBuffer; } - int auxEffectId() const { return mAuxEffectId; } - - // implement FastMixerState::VolumeProvider interface - virtual uint32_t getVolumeLR(); - - virtual status_t setSyncEvent(const sp& event); - - protected: - // for numerous - friend class PlaybackThread; - friend class MixerThread; - friend class DirectOutputThread; - - Track(const Track&); - Track& operator = (const Track&); - - // AudioBufferProvider interface - virtual status_t getNextBuffer(AudioBufferProvider::Buffer* buffer, - int64_t pts = kInvalidPTS); - // releaseBuffer() not overridden - - virtual size_t framesReady() const; - - bool isMuted() const { return mMute; } - bool isPausing() const { - return mState == PAUSING; - } - bool isPaused() const { - return mState == PAUSED; - } - bool isResuming() const { - return mState == RESUMING; - } - bool isReady() const; - void setPaused() { mState = PAUSED; } - void reset(); - - bool isOutputTrack() const { - return (mStreamType == AUDIO_STREAM_CNT); - } - - sp sharedBuffer() const { return mSharedBuffer; } - - // framesWritten is cumulative, never reset, and is shared all tracks - // audioHalFrames is derived from output latency - // FIXME parameters not needed, could get them from the thread - bool presentationComplete(size_t framesWritten, size_t audioHalFrames); - - public: - void triggerEvents(AudioSystem::sync_event_t type); - virtual bool isTimedTrack() const { return false; } - bool isFastTrack() const { return (mFlags & IAudioFlinger::TRACK_FAST) != 0; } - virtual bool isOut() const; - - protected: - - // written by Track::mute() called by binder thread(s), without a mutex or barrier. - // read by Track::isMuted() called by playback thread, also without a mutex or barrier. - // The lack of mutex or barrier is safe because the mute status is only used by itself. - bool mMute; - - // FILLED state is used for suppressing volume ramp at begin of playing - enum {FS_INVALID, FS_FILLING, FS_FILLED, FS_ACTIVE}; - mutable uint8_t mFillingUpStatus; - int8_t mRetryCount; - const sp mSharedBuffer; - bool mResetDone; - const audio_stream_type_t mStreamType; - int mName; // track name on the normal mixer, - // allocated statically at track creation time, - // and is even allocated (though unused) for fast tracks - // FIXME don't allocate track name for fast tracks - int16_t *mMainBuffer; - int32_t *mAuxBuffer; - int mAuxEffectId; - bool mHasVolumeController; - size_t mPresentationCompleteFrames; // number of frames written to the - // audio HAL when this track will be fully rendered - // zero means not monitoring - private: - IAudioFlinger::track_flags_t mFlags; - - // The following fields are only for fast tracks, and should be in a subclass - int mFastIndex; // index within FastMixerState::mFastTracks[]; - // either mFastIndex == -1 if not isFastTrack() - // or 0 < mFastIndex < FastMixerState::kMaxFast because - // index 0 is reserved for normal mixer's submix; - // index is allocated statically at track creation time - // but the slot is only used if track is active - FastTrackUnderruns mObservedUnderruns; // Most recently observed value of - // mFastMixerDumpState.mTracks[mFastIndex].mUnderruns - uint32_t mUnderrunCount; // Counter of total number of underruns, never reset - volatile float mCachedVolume; // combined master volume and stream type volume; - // 'volatile' means accessed without lock or - // barrier, but is read/written atomically - }; // end of Track - - class TimedTrack : public Track { - public: - static sp create(PlaybackThread *thread, - const sp& client, - audio_stream_type_t streamType, - uint32_t sampleRate, - audio_format_t format, - audio_channel_mask_t channelMask, - size_t frameCount, - const sp& sharedBuffer, - int sessionId); - virtual ~TimedTrack(); - - class TimedBuffer { - public: - TimedBuffer(); - TimedBuffer(const sp& buffer, int64_t pts); - const sp& buffer() const { return mBuffer; } - int64_t pts() const { return mPTS; } - uint32_t position() const { return mPosition; } - void setPosition(uint32_t pos) { mPosition = pos; } - private: - sp mBuffer; - int64_t mPTS; - uint32_t mPosition; - }; - - // Mixer facing methods. - virtual bool isTimedTrack() const { return true; } - virtual size_t framesReady() const; - - // AudioBufferProvider interface - virtual status_t getNextBuffer(AudioBufferProvider::Buffer* buffer, - int64_t pts); - virtual void releaseBuffer(AudioBufferProvider::Buffer* buffer); - - // Client/App facing methods. - status_t allocateTimedBuffer(size_t size, - sp* buffer); - status_t queueTimedBuffer(const sp& buffer, - int64_t pts); - status_t setMediaTimeTransform(const LinearTransform& xform, - TimedAudioTrack::TargetTimeline target); - - private: - TimedTrack(PlaybackThread *thread, - const sp& client, - audio_stream_type_t streamType, - uint32_t sampleRate, - audio_format_t format, - audio_channel_mask_t channelMask, - size_t frameCount, - const sp& sharedBuffer, - int sessionId); - - void timedYieldSamples_l(AudioBufferProvider::Buffer* buffer); - void timedYieldSilence_l(uint32_t numFrames, - AudioBufferProvider::Buffer* buffer); - void trimTimedBufferQueue_l(); - void trimTimedBufferQueueHead_l(const char* logTag); - void updateFramesPendingAfterTrim_l(const TimedBuffer& buf, - const char* logTag); - - uint64_t mLocalTimeFreq; - LinearTransform mLocalTimeToSampleTransform; - LinearTransform mMediaTimeToSampleTransform; - sp mTimedMemoryDealer; - - Vector mTimedBufferQueue; - bool mQueueHeadInFlight; - bool mTrimQueueHeadOnRelease; - uint32_t mFramesPendingInQueue; - - uint8_t* mTimedSilenceBuffer; - uint32_t mTimedSilenceBufferSize; - mutable Mutex mTimedBufferQueueLock; - bool mTimedAudioOutputOnTime; - CCHelper mCCHelper; - - Mutex mMediaTimeTransformLock; - LinearTransform mMediaTimeTransform; - bool mMediaTimeTransformValid; - TimedAudioTrack::TargetTimeline mMediaTimeTransformTarget; - }; - - - // playback track, used by DuplicatingThread - class OutputTrack : public Track { - public: - - class Buffer : public AudioBufferProvider::Buffer { - public: - int16_t *mBuffer; - }; - - OutputTrack(PlaybackThread *thread, - DuplicatingThread *sourceThread, - uint32_t sampleRate, - audio_format_t format, - audio_channel_mask_t channelMask, - size_t frameCount); - virtual ~OutputTrack(); - - virtual status_t start(AudioSystem::sync_event_t event = - AudioSystem::SYNC_EVENT_NONE, - int triggerSession = 0); - virtual void stop(); - bool write(int16_t* data, uint32_t frames); - bool bufferQueueEmpty() const { return mBufferQueue.size() == 0; } - bool isActive() const { return mActive; } - const wp& thread() const { return mThread; } - - private: - - enum { - NO_MORE_BUFFERS = 0x80000001, // same in AudioTrack.h, ok to be different value - }; - - status_t obtainBuffer(AudioBufferProvider::Buffer* buffer, - uint32_t waitTimeMs); - void clearBufferQueue(); - - // Maximum number of pending buffers allocated by OutputTrack::write() - static const uint8_t kMaxOverFlowBuffers = 10; - - Vector < Buffer* > mBufferQueue; - AudioBufferProvider::Buffer mOutBuffer; - bool mActive; - DuplicatingThread* const mSourceThread; // for waitTimeMs() in write() - void* mBuffers; // starting address of buffers in plain memory - }; // end of OutputTrack - - PlaybackThread(const sp& audioFlinger, AudioStreamOut* output, - audio_io_handle_t id, audio_devices_t device, type_t type); - virtual ~PlaybackThread(); - - void dump(int fd, const Vector& args); - - // Thread virtuals - virtual status_t readyToRun(); - virtual bool threadLoop(); - - // RefBase - virtual void onFirstRef(); - -protected: - // Code snippets that were lifted up out of threadLoop() - virtual void threadLoop_mix() = 0; - virtual void threadLoop_sleepTime() = 0; - virtual void threadLoop_write(); - virtual void threadLoop_standby(); - virtual void threadLoop_removeTracks(const Vector< sp >& tracksToRemove); - - // prepareTracks_l reads and writes mActiveTracks, and returns - // the pending set of tracks to remove via Vector 'tracksToRemove'. The caller - // is responsible for clearing or destroying this Vector later on, when it - // is safe to do so. That will drop the final ref count and destroy the tracks. - virtual mixer_state prepareTracks_l(Vector< sp > *tracksToRemove) = 0; - - // ThreadBase virtuals - virtual void preExit(); - -public: - - virtual status_t initCheck() const { return (mOutput == NULL) ? NO_INIT : NO_ERROR; } - - // return estimated latency in milliseconds, as reported by HAL - uint32_t latency() const; - // same, but lock must already be held - uint32_t latency_l() const; - - void setMasterVolume(float value); - void setMasterMute(bool muted); - - void setStreamVolume(audio_stream_type_t stream, float value); - void setStreamMute(audio_stream_type_t stream, bool muted); - - float streamVolume(audio_stream_type_t stream) const; - - sp createTrack_l( - const sp& client, - audio_stream_type_t streamType, - uint32_t sampleRate, - audio_format_t format, - audio_channel_mask_t channelMask, - size_t frameCount, - const sp& sharedBuffer, - int sessionId, - IAudioFlinger::track_flags_t *flags, - pid_t tid, - status_t *status); - - AudioStreamOut* getOutput() const; - AudioStreamOut* clearOutput(); - virtual audio_stream_t* stream() const; - - // a very large number of suspend() will eventually wraparound, but unlikely - void suspend() { (void) android_atomic_inc(&mSuspended); } - void restore() - { - // if restore() is done without suspend(), get back into - // range so that the next suspend() will operate correctly - if (android_atomic_dec(&mSuspended) <= 0) { - android_atomic_release_store(0, &mSuspended); - } - } - bool isSuspended() const - { return android_atomic_acquire_load(&mSuspended) > 0; } - - virtual String8 getParameters(const String8& keys); - virtual void audioConfigChanged_l(int event, int param = 0); - status_t getRenderPosition(size_t *halFrames, size_t *dspFrames); - int16_t *mixBuffer() const { return mMixBuffer; }; - - virtual void detachAuxEffect_l(int effectId); - status_t attachAuxEffect(const sp track, - int EffectId); - status_t attachAuxEffect_l(const sp track, - int EffectId); - - virtual status_t addEffectChain_l(const sp& chain); - virtual size_t removeEffectChain_l(const sp& chain); - virtual uint32_t hasAudioSession(int sessionId) const; - virtual uint32_t getStrategyForSession_l(int sessionId); - - - virtual status_t setSyncEvent(const sp& event); - virtual bool isValidSyncEvent(const sp& event) const; - void invalidateTracks(audio_stream_type_t streamType); - - - protected: - int16_t* mMixBuffer; - - // suspend count, > 0 means suspended. While suspended, the thread continues to pull from - // tracks and mix, but doesn't write to HAL. A2DP and SCO HAL implementations can't handle - // concurrent use of both of them, so Audio Policy Service suspends one of the threads to - // workaround that restriction. - // 'volatile' means accessed via atomic operations and no lock. - volatile int32_t mSuspended; - - // FIXME overflows every 6+ hours at 44.1 kHz stereo 16-bit samples - // mFramesWritten would be better, or 64-bit even better - size_t mBytesWritten; - private: - // mMasterMute is in both PlaybackThread and in AudioFlinger. When a - // PlaybackThread needs to find out if master-muted, it checks it's local - // copy rather than the one in AudioFlinger. This optimization saves a lock. - bool mMasterMute; - void setMasterMute_l(bool muted) { mMasterMute = muted; } - protected: - SortedVector< wp > mActiveTracks; // FIXME check if this could be sp<> - - // Allocate a track name for a given channel mask. - // Returns name >= 0 if successful, -1 on failure. - virtual int getTrackName_l(audio_channel_mask_t channelMask, int sessionId) = 0; - virtual void deleteTrackName_l(int name) = 0; - - // Time to sleep between cycles when: - virtual uint32_t activeSleepTimeUs() const; // mixer state MIXER_TRACKS_ENABLED - virtual uint32_t idleSleepTimeUs() const = 0; // mixer state MIXER_IDLE - virtual uint32_t suspendSleepTimeUs() const = 0; // audio policy manager suspended us - // No sleep when mixer state == MIXER_TRACKS_READY; relies on audio HAL stream->write() - // No sleep in standby mode; waits on a condition - - // Code snippets that are temporarily lifted up out of threadLoop() until the merge - void checkSilentMode_l(); - - // Non-trivial for DUPLICATING only - virtual void saveOutputTracks() { } - virtual void clearOutputTracks() { } - - // Cache various calculated values, at threadLoop() entry and after a parameter change - virtual void cacheParameters_l(); - - virtual uint32_t correctLatency_l(uint32_t latency) const; - - private: - - friend class AudioFlinger; // for numerous - - PlaybackThread(const Client&); - PlaybackThread& operator = (const PlaybackThread&); - - status_t addTrack_l(const sp& track); - void destroyTrack_l(const sp& track); - void removeTrack_l(const sp& track); - - void readOutputParameters(); - - virtual void dumpInternals(int fd, const Vector& args); - void dumpTracks(int fd, const Vector& args); - - SortedVector< sp > mTracks; - // mStreamTypes[] uses 1 additional stream type internally for the OutputTrack used by - // DuplicatingThread - stream_type_t mStreamTypes[AUDIO_STREAM_CNT + 1]; - AudioStreamOut *mOutput; - - float mMasterVolume; - nsecs_t mLastWriteTime; - int mNumWrites; - int mNumDelayedWrites; - bool mInWrite; - - // FIXME rename these former local variables of threadLoop to standard "m" names - nsecs_t standbyTime; - size_t mixBufferSize; - - // cached copies of activeSleepTimeUs() and idleSleepTimeUs() made by cacheParameters_l() - uint32_t activeSleepTime; - uint32_t idleSleepTime; - - uint32_t sleepTime; - - // mixer status returned by prepareTracks_l() - mixer_state mMixerStatus; // current cycle - // previous cycle when in prepareTracks_l() - mixer_state mMixerStatusIgnoringFastTracks; - // FIXME or a separate ready state per track - - // FIXME move these declarations into the specific sub-class that needs them - // MIXER only - uint32_t sleepTimeShift; - - // same as AudioFlinger::mStandbyTimeInNsecs except for DIRECT which uses a shorter value - nsecs_t standbyDelay; - - // MIXER only - nsecs_t maxPeriod; - - // DUPLICATING only - uint32_t writeFrames; - - private: - // The HAL output sink is treated as non-blocking, but current implementation is blocking - sp mOutputSink; - // If a fast mixer is present, the blocking pipe sink, otherwise clear - sp mPipeSink; - // The current sink for the normal mixer to write it's (sub)mix, mOutputSink or mPipeSink - sp mNormalSink; - // For dumpsys - sp mTeeSink; - sp mTeeSource; - uint32_t mScreenState; // cached copy of gScreenState - public: - virtual bool hasFastMixer() const = 0; - virtual FastTrackUnderruns getFastTrackUnderruns(size_t fastIndex) const - { FastTrackUnderruns dummy; return dummy; } - - protected: - // accessed by both binder threads and within threadLoop(), lock on mutex needed - unsigned mFastTrackAvailMask; // bit i set if fast track [i] is available - - }; - - class MixerThread : public PlaybackThread { - public: - MixerThread(const sp& audioFlinger, - AudioStreamOut* output, - audio_io_handle_t id, - audio_devices_t device, - type_t type = MIXER); - virtual ~MixerThread(); - - // Thread virtuals - - virtual bool checkForNewParameters_l(); - virtual void dumpInternals(int fd, const Vector& args); - - protected: - virtual mixer_state prepareTracks_l(Vector< sp > *tracksToRemove); - virtual int getTrackName_l(audio_channel_mask_t channelMask, int sessionId); - virtual void deleteTrackName_l(int name); - virtual uint32_t idleSleepTimeUs() const; - virtual uint32_t suspendSleepTimeUs() const; - virtual void cacheParameters_l(); - - // threadLoop snippets - virtual void threadLoop_write(); - virtual void threadLoop_standby(); - virtual void threadLoop_mix(); - virtual void threadLoop_sleepTime(); - virtual void threadLoop_removeTracks(const Vector< sp >& tracksToRemove); - virtual uint32_t correctLatency_l(uint32_t latency) const; - - AudioMixer* mAudioMixer; // normal mixer - private: - // one-time initialization, no locks required - FastMixer* mFastMixer; // non-NULL if there is also a fast mixer - sp mAudioWatchdog; // non-0 if there is an audio watchdog thread - - // contents are not guaranteed to be consistent, no locks required - FastMixerDumpState mFastMixerDumpState; -#ifdef STATE_QUEUE_DUMP - StateQueueObserverDump mStateQueueObserverDump; - StateQueueMutatorDump mStateQueueMutatorDump; -#endif - AudioWatchdogDump mAudioWatchdogDump; - - // accessible only within the threadLoop(), no locks required - // mFastMixer->sq() // for mutating and pushing state - int32_t mFastMixerFutex; // for cold idle - - public: - virtual bool hasFastMixer() const { return mFastMixer != NULL; } - virtual FastTrackUnderruns getFastTrackUnderruns(size_t fastIndex) const { - ALOG_ASSERT(fastIndex < FastMixerState::kMaxFastTracks); - return mFastMixerDumpState.mTracks[fastIndex].mUnderruns; - } - }; - - class DirectOutputThread : public PlaybackThread { - public: - - DirectOutputThread(const sp& audioFlinger, AudioStreamOut* output, - audio_io_handle_t id, audio_devices_t device); - virtual ~DirectOutputThread(); - - // Thread virtuals - - virtual bool checkForNewParameters_l(); - - protected: - virtual int getTrackName_l(audio_channel_mask_t channelMask, int sessionId); - virtual void deleteTrackName_l(int name); - virtual uint32_t activeSleepTimeUs() const; - virtual uint32_t idleSleepTimeUs() const; - virtual uint32_t suspendSleepTimeUs() const; - virtual void cacheParameters_l(); - - // threadLoop snippets - virtual mixer_state prepareTracks_l(Vector< sp > *tracksToRemove); - virtual void threadLoop_mix(); - virtual void threadLoop_sleepTime(); - - private: - // volumes last sent to audio HAL with stream->set_volume() - float mLeftVolFloat; - float mRightVolFloat; - - // prepareTracks_l() tells threadLoop_mix() the name of the single active track - sp mActiveTrack; - public: - virtual bool hasFastMixer() const { return false; } - }; - - class DuplicatingThread : public MixerThread { - public: - DuplicatingThread(const sp& audioFlinger, MixerThread* mainThread, - audio_io_handle_t id); - virtual ~DuplicatingThread(); - - // Thread virtuals - void addOutputTrack(MixerThread* thread); - void removeOutputTrack(MixerThread* thread); - uint32_t waitTimeMs() const { return mWaitTimeMs; } - protected: - virtual uint32_t activeSleepTimeUs() const; - - private: - bool outputsReady(const SortedVector< sp > &outputTracks); - protected: - // threadLoop snippets - virtual void threadLoop_mix(); - virtual void threadLoop_sleepTime(); - virtual void threadLoop_write(); - virtual void threadLoop_standby(); - virtual void cacheParameters_l(); - - private: - // called from threadLoop, addOutputTrack, removeOutputTrack - virtual void updateWaitTime_l(); - protected: - virtual void saveOutputTracks(); - virtual void clearOutputTracks(); - private: - - uint32_t mWaitTimeMs; - SortedVector < sp > outputTracks; - SortedVector < sp > mOutputTracks; - public: - virtual bool hasFastMixer() const { return false; } - }; - - PlaybackThread *checkPlaybackThread_l(audio_io_handle_t output) const; - MixerThread *checkMixerThread_l(audio_io_handle_t output) const; - RecordThread *checkRecordThread_l(audio_io_handle_t input) const; - // no range check, AudioFlinger::mLock held - bool streamMute_l(audio_stream_type_t stream) const - { return mStreamTypes[stream].mute; } - // no range check, doesn't check per-thread stream volume, AudioFlinger::mLock held - float streamVolume_l(audio_stream_type_t stream) const - { return mStreamTypes[stream].volume; } - void audioConfigChanged_l(int event, audio_io_handle_t ioHandle, const void *param2); - - // allocate an audio_io_handle_t, session ID, or effect ID - uint32_t nextUniqueId(); - status_t moveEffectChain_l(int sessionId, - PlaybackThread *srcThread, - PlaybackThread *dstThread, - bool reRegister); - // return thread associated with primary hardware device, or NULL - PlaybackThread *primaryPlaybackThread_l() const; - audio_devices_t primaryOutputDevice_l() const; +#include "Threads.h" - sp getEffectThread_l(int sessionId, int EffectId); +#include "Effects.h" // server side of the client's IAudioTrack class TrackHandle : public android::BnAudioTrack { @@ -1449,165 +400,6 @@ public: const sp mTrack; }; - void removeClient_l(pid_t pid); - void removeNotificationClient(pid_t pid); - - - // record thread - class RecordThread : public ThreadBase, public AudioBufferProvider - // derives from AudioBufferProvider interface for use by resampler - { - public: - - // record track - class RecordTrack : public TrackBase { - public: - RecordTrack(RecordThread *thread, - const sp& client, - uint32_t sampleRate, - audio_format_t format, - audio_channel_mask_t channelMask, - size_t frameCount, - int sessionId); - virtual ~RecordTrack(); - - virtual status_t start(AudioSystem::sync_event_t event, int triggerSession); - virtual void stop(); - - void destroy(); - - // clear the buffer overflow flag - void clearOverflow() { mOverflow = false; } - // set the buffer overflow flag and return previous value - bool setOverflow() { bool tmp = mOverflow; mOverflow = true; - return tmp; } - - static void appendDumpHeader(String8& result); - void dump(char* buffer, size_t size); - - virtual bool isOut() const; - - private: - friend class AudioFlinger; // for mState - - RecordTrack(const RecordTrack&); - RecordTrack& operator = (const RecordTrack&); - - // AudioBufferProvider interface - virtual status_t getNextBuffer(AudioBufferProvider::Buffer* buffer, - int64_t pts = kInvalidPTS); - // releaseBuffer() not overridden - - bool mOverflow; // overflow on most recent attempt to fill client buffer - }; - - RecordThread(const sp& audioFlinger, - AudioStreamIn *input, - uint32_t sampleRate, - audio_channel_mask_t channelMask, - audio_io_handle_t id, - audio_devices_t device, - const sp& teeSink); - virtual ~RecordThread(); - - // no addTrack_l ? - void destroyTrack_l(const sp& track); - void removeTrack_l(const sp& track); - - void dumpInternals(int fd, const Vector& args); - void dumpTracks(int fd, const Vector& args); - - // Thread virtuals - virtual bool threadLoop(); - virtual status_t readyToRun(); - - // RefBase - virtual void onFirstRef(); - - virtual status_t initCheck() const { return (mInput == NULL) ? NO_INIT : NO_ERROR; } - sp createRecordTrack_l( - const sp& client, - uint32_t sampleRate, - audio_format_t format, - audio_channel_mask_t channelMask, - size_t frameCount, - int sessionId, - IAudioFlinger::track_flags_t flags, - pid_t tid, - status_t *status); - - status_t start(RecordTrack* recordTrack, - AudioSystem::sync_event_t event, - int triggerSession); - - // ask the thread to stop the specified track, and - // return true if the caller should then do it's part of the stopping process - bool stop_l(RecordTrack* recordTrack); - - void dump(int fd, const Vector& args); - AudioStreamIn* clearInput(); - virtual audio_stream_t* stream() const; - - // AudioBufferProvider interface - virtual status_t getNextBuffer(AudioBufferProvider::Buffer* buffer, int64_t pts); - virtual void releaseBuffer(AudioBufferProvider::Buffer* buffer); - - virtual bool checkForNewParameters_l(); - virtual String8 getParameters(const String8& keys); - virtual void audioConfigChanged_l(int event, int param = 0); - void readInputParameters(); - virtual unsigned int getInputFramesLost(); - - virtual status_t addEffectChain_l(const sp& chain); - virtual size_t removeEffectChain_l(const sp& chain); - virtual uint32_t hasAudioSession(int sessionId) const; - - // Return the set of unique session IDs across all tracks. - // The keys are the session IDs, and the associated values are meaningless. - // FIXME replace by Set [and implement Bag/Multiset for other uses]. - KeyedVector sessionIds() const; - - virtual status_t setSyncEvent(const sp& event); - virtual bool isValidSyncEvent(const sp& event) const; - - static void syncStartEventCallback(const wp& event); - void handleSyncStartEvent(const sp& event); - - private: - void clearSyncStartEvent(); - - // Enter standby if not already in standby, and set mStandby flag - void standby(); - - // Call the HAL standby method unconditionally, and don't change mStandby flag - void inputStandBy(); - - AudioStreamIn *mInput; - SortedVector < sp > mTracks; - // mActiveTrack has dual roles: it indicates the current active track, and - // is used together with mStartStopCond to indicate start()/stop() progress - sp mActiveTrack; - Condition mStartStopCond; - AudioResampler *mResampler; - int32_t *mRsmpOutBuffer; - int16_t *mRsmpInBuffer; - size_t mRsmpInIndex; - size_t mInputBytes; - const uint32_t mReqChannelCount; - const uint32_t mReqSampleRate; - ssize_t mBytesRead; - // sync event triggering actual audio capture. Frames read before this event will - // be dropped and therefore not read by the application. - sp mSyncStartEvent; - // number of captured frames to drop after the start sync event has been received. - // when < 0, maximum frames to drop before starting capture even if sync event is - // not received - ssize_t mFramestoDrop; - - // For dumpsys - const sp mTeeSink; - }; - // server side of the client's IAudioRecord class RecordHandle : public android::BnAudioRecord { public: @@ -1625,344 +417,33 @@ public: void stop_nonvirtual(); }; - //--- Audio Effect Management - - // EffectModule and EffectChain classes both have their own mutex to protect - // state changes or resource modifications. Always respect the following order - // if multiple mutexes must be acquired to avoid cross deadlock: - // AudioFlinger -> ThreadBase -> EffectChain -> EffectModule - - // The EffectModule class is a wrapper object controlling the effect engine implementation - // in the effect library. It prevents concurrent calls to process() and command() functions - // from different client threads. It keeps a list of EffectHandle objects corresponding - // to all client applications using this effect and notifies applications of effect state, - // control or parameter changes. It manages the activation state machine to send appropriate - // reset, enable, disable commands to effect engine and provide volume - // ramping when effects are activated/deactivated. - // When controlling an auxiliary effect, the EffectModule also provides an input buffer used by - // the attached track(s) to accumulate their auxiliary channel. - class EffectModule : public RefBase { - public: - EffectModule(ThreadBase *thread, - const wp& chain, - effect_descriptor_t *desc, - int id, - int sessionId); - virtual ~EffectModule(); - - enum effect_state { - IDLE, - RESTART, - STARTING, - ACTIVE, - STOPPING, - STOPPED, - DESTROYED - }; - - int id() const { return mId; } - void process(); - void updateState(); - status_t command(uint32_t cmdCode, - uint32_t cmdSize, - void *pCmdData, - uint32_t *replySize, - void *pReplyData); - - void reset_l(); - status_t configure(); - status_t init(); - effect_state state() const { - return mState; - } - uint32_t status() { - return mStatus; - } - int sessionId() const { - return mSessionId; - } - status_t setEnabled(bool enabled); - status_t setEnabled_l(bool enabled); - bool isEnabled() const; - bool isProcessEnabled() const; - - void setInBuffer(int16_t *buffer) { mConfig.inputCfg.buffer.s16 = buffer; } - int16_t *inBuffer() { return mConfig.inputCfg.buffer.s16; } - void setOutBuffer(int16_t *buffer) { mConfig.outputCfg.buffer.s16 = buffer; } - int16_t *outBuffer() { return mConfig.outputCfg.buffer.s16; } - void setChain(const wp& chain) { mChain = chain; } - void setThread(const wp& thread) { mThread = thread; } - const wp& thread() { return mThread; } - - status_t addHandle(EffectHandle *handle); - size_t disconnect(EffectHandle *handle, bool unpinIfLast); - size_t removeHandle(EffectHandle *handle); - - const effect_descriptor_t& desc() const { return mDescriptor; } - wp& chain() { return mChain; } - - status_t setDevice(audio_devices_t device); - status_t setVolume(uint32_t *left, uint32_t *right, bool controller); - status_t setMode(audio_mode_t mode); - status_t setAudioSource(audio_source_t source); - status_t start(); - status_t stop(); - void setSuspended(bool suspended); - bool suspended() const; - - EffectHandle* controlHandle_l(); - - bool isPinned() const { return mPinned; } - void unPin() { mPinned = false; } - bool purgeHandles(); - void lock() { mLock.lock(); } - void unlock() { mLock.unlock(); } - - void dump(int fd, const Vector& args); - - protected: - friend class AudioFlinger; // for mHandles - bool mPinned; - - // Maximum time allocated to effect engines to complete the turn off sequence - static const uint32_t MAX_DISABLE_TIME_MS = 10000; - - EffectModule(const EffectModule&); - EffectModule& operator = (const EffectModule&); - - status_t start_l(); - status_t stop_l(); - -mutable Mutex mLock; // mutex for process, commands and handles list protection - wp mThread; // parent thread - wp mChain; // parent effect chain - const int mId; // this instance unique ID - const int mSessionId; // audio session ID - const effect_descriptor_t mDescriptor;// effect descriptor received from effect engine - effect_config_t mConfig; // input and output audio configuration - effect_handle_t mEffectInterface; // Effect module C API - status_t mStatus; // initialization status - effect_state mState; // current activation state - Vector mHandles; // list of client handles - // First handle in mHandles has highest priority and controls the effect module - uint32_t mMaxDisableWaitCnt; // maximum grace period before forcing an effect off after - // sending disable command. - uint32_t mDisableWaitCnt; // current process() calls count during disable period. - bool mSuspended; // effect is suspended: temporarily disabled by framework - }; - - // The EffectHandle class implements the IEffect interface. It provides resources - // to receive parameter updates, keeps track of effect control - // ownership and state and has a pointer to the EffectModule object it is controlling. - // There is one EffectHandle object for each application controlling (or using) - // an effect module. - // The EffectHandle is obtained by calling AudioFlinger::createEffect(). - class EffectHandle: public android::BnEffect { - public: - - EffectHandle(const sp& effect, - const sp& client, - const sp& effectClient, - int32_t priority); - virtual ~EffectHandle(); - - // IEffect - virtual status_t enable(); - virtual status_t disable(); - virtual status_t command(uint32_t cmdCode, - uint32_t cmdSize, - void *pCmdData, - uint32_t *replySize, - void *pReplyData); - virtual void disconnect(); - private: - void disconnect(bool unpinIfLast); - public: - virtual sp getCblk() const { return mCblkMemory; } - virtual status_t onTransact(uint32_t code, const Parcel& data, - Parcel* reply, uint32_t flags); - - - // Give or take control of effect module - // - hasControl: true if control is given, false if removed - // - signal: true client app should be signaled of change, false otherwise - // - enabled: state of the effect when control is passed - void setControl(bool hasControl, bool signal, bool enabled); - void commandExecuted(uint32_t cmdCode, - uint32_t cmdSize, - void *pCmdData, - uint32_t replySize, - void *pReplyData); - void setEnabled(bool enabled); - bool enabled() const { return mEnabled; } - - // Getters - int id() const { return mEffect->id(); } - int priority() const { return mPriority; } - bool hasControl() const { return mHasControl; } - sp effect() const { return mEffect; } - // destroyed_l() must be called with the associated EffectModule mLock held - bool destroyed_l() const { return mDestroyed; } - - void dump(char* buffer, size_t size); - - protected: - friend class AudioFlinger; // for mEffect, mHasControl, mEnabled - EffectHandle(const EffectHandle&); - EffectHandle& operator =(const EffectHandle&); - - sp mEffect; // pointer to controlled EffectModule - sp mEffectClient; // callback interface for client notifications - /*const*/ sp mClient; // client for shared memory allocation, see disconnect() - sp mCblkMemory; // shared memory for control block - effect_param_cblk_t* mCblk; // control block for deferred parameter setting via - // shared memory - uint8_t* mBuffer; // pointer to parameter area in shared memory - int mPriority; // client application priority to control the effect - bool mHasControl; // true if this handle is controlling the effect - bool mEnabled; // cached enable state: needed when the effect is - // restored after being suspended - bool mDestroyed; // Set to true by destructor. Access with EffectModule - // mLock held - }; - - // the EffectChain class represents a group of effects associated to one audio session. - // There can be any number of EffectChain objects per output mixer thread (PlaybackThread). - // The EffecChain with session ID 0 contains global effects applied to the output mix. - // Effects in this chain can be insert or auxiliary. Effects in other chains (attached to - // tracks) are insert only. The EffectChain maintains an ordered list of effect module, the - // order corresponding in the effect process order. When attached to a track (session ID != 0), - // it also provide it's own input buffer used by the track as accumulation buffer. - class EffectChain : public RefBase { - public: - EffectChain(const wp& wThread, int sessionId); - EffectChain(ThreadBase *thread, int sessionId); - virtual ~EffectChain(); - - // special key used for an entry in mSuspendedEffects keyed vector - // corresponding to a suspend all request. - static const int kKeyForSuspendAll = 0; - - // minimum duration during which we force calling effect process when last track on - // a session is stopped or removed to allow effect tail to be rendered - static const int kProcessTailDurationMs = 1000; - - void process_l(); - - void lock() { - mLock.lock(); - } - void unlock() { - mLock.unlock(); - } - - status_t addEffect_l(const sp& handle); - size_t removeEffect_l(const sp& handle); - - int sessionId() const { return mSessionId; } - void setSessionId(int sessionId) { mSessionId = sessionId; } - - sp getEffectFromDesc_l(effect_descriptor_t *descriptor); - sp getEffectFromId_l(int id); - sp getEffectFromType_l(const effect_uuid_t *type); - bool setVolume_l(uint32_t *left, uint32_t *right); - void setDevice_l(audio_devices_t device); - void setMode_l(audio_mode_t mode); - void setAudioSource_l(audio_source_t source); - - void setInBuffer(int16_t *buffer, bool ownsBuffer = false) { - mInBuffer = buffer; - mOwnInBuffer = ownsBuffer; - } - int16_t *inBuffer() const { - return mInBuffer; - } - void setOutBuffer(int16_t *buffer) { - mOutBuffer = buffer; - } - int16_t *outBuffer() const { - return mOutBuffer; - } - - void incTrackCnt() { android_atomic_inc(&mTrackCnt); } - void decTrackCnt() { android_atomic_dec(&mTrackCnt); } - int32_t trackCnt() const { return android_atomic_acquire_load(&mTrackCnt); } - - void incActiveTrackCnt() { android_atomic_inc(&mActiveTrackCnt); - mTailBufferCount = mMaxTailBuffers; } - void decActiveTrackCnt() { android_atomic_dec(&mActiveTrackCnt); } - int32_t activeTrackCnt() const { return android_atomic_acquire_load(&mActiveTrackCnt); } - - uint32_t strategy() const { return mStrategy; } - void setStrategy(uint32_t strategy) - { mStrategy = strategy; } - - // suspend effect of the given type - void setEffectSuspended_l(const effect_uuid_t *type, - bool suspend); - // suspend all eligible effects - void setEffectSuspendedAll_l(bool suspend); - // check if effects should be suspend or restored when a given effect is enable or disabled - void checkSuspendOnEffectEnabled(const sp& effect, - bool enabled); - - void clearInputBuffer(); + PlaybackThread *checkPlaybackThread_l(audio_io_handle_t output) const; + MixerThread *checkMixerThread_l(audio_io_handle_t output) const; + RecordThread *checkRecordThread_l(audio_io_handle_t input) const; + // no range check, AudioFlinger::mLock held + bool streamMute_l(audio_stream_type_t stream) const + { return mStreamTypes[stream].mute; } + // no range check, doesn't check per-thread stream volume, AudioFlinger::mLock held + float streamVolume_l(audio_stream_type_t stream) const + { return mStreamTypes[stream].volume; } + void audioConfigChanged_l(int event, audio_io_handle_t ioHandle, const void *param2); - void dump(int fd, const Vector& args); + // allocate an audio_io_handle_t, session ID, or effect ID + uint32_t nextUniqueId(); - protected: - friend class AudioFlinger; // for mThread, mEffects - EffectChain(const EffectChain&); - EffectChain& operator =(const EffectChain&); + status_t moveEffectChain_l(int sessionId, + PlaybackThread *srcThread, + PlaybackThread *dstThread, + bool reRegister); + // return thread associated with primary hardware device, or NULL + PlaybackThread *primaryPlaybackThread_l() const; + audio_devices_t primaryOutputDevice_l() const; - class SuspendedEffectDesc : public RefBase { - public: - SuspendedEffectDesc() : mRefCount(0) {} + sp getEffectThread_l(int sessionId, int EffectId); - int mRefCount; - effect_uuid_t mType; - wp mEffect; - }; - // get a list of effect modules to suspend when an effect of the type - // passed is enabled. - void getSuspendEligibleEffects(Vector< sp > &effects); - - // get an effect module if it is currently enable - sp getEffectIfEnabled(const effect_uuid_t *type); - // true if the effect whose descriptor is passed can be suspended - // OEMs can modify the rules implemented in this method to exclude specific effect - // types or implementations from the suspend/restore mechanism. - bool isEffectEligibleForSuspend(const effect_descriptor_t& desc); - - void clearInputBuffer_l(sp thread); - - wp mThread; // parent mixer thread - Mutex mLock; // mutex protecting effect list - Vector< sp > mEffects; // list of effect modules - int mSessionId; // audio session ID - int16_t *mInBuffer; // chain input buffer - int16_t *mOutBuffer; // chain output buffer - - // 'volatile' here means these are accessed with atomic operations instead of mutex - volatile int32_t mActiveTrackCnt; // number of active tracks connected - volatile int32_t mTrackCnt; // number of tracks connected - - int32_t mTailBufferCount; // current effect tail buffer count - int32_t mMaxTailBuffers; // maximum effect tail buffers - bool mOwnInBuffer; // true if the chain owns its input buffer - int mVolumeCtrlIdx; // index of insert effect having control over volume - uint32_t mLeftVolume; // previous volume on left channel - uint32_t mRightVolume; // previous volume on right channel - uint32_t mNewLeftVolume; // new volume on left channel - uint32_t mNewRightVolume; // new volume on right channel - uint32_t mStrategy; // strategy for this effect chain - // mSuspendedEffects lists all effects currently suspended in the chain. - // Use effect type UUID timelow field as key. There is no real risk of identical - // timeLow fields among effect type UUIDs. - // Updated by updateSuspendedSessions_l() only. - KeyedVector< int, sp > mSuspendedEffects; - }; + void removeClient_l(pid_t pid); + void removeNotificationClient(pid_t pid); class AudioHwDevice { public: @@ -2108,6 +589,7 @@ public: static void dumpTee(int fd, const sp& source, audio_io_handle_t id = 0); }; +#undef INCLUDING_FROM_AUDIOFLINGER_H // ---------------------------------------------------------------------------- diff --git a/services/audioflinger/Effects.cpp b/services/audioflinger/Effects.cpp new file mode 100644 index 0000000..74ba59e --- /dev/null +++ b/services/audioflinger/Effects.cpp @@ -0,0 +1,1684 @@ +/* +** +** Copyright 2012, The Android Open Source Project +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + + +#define LOG_TAG "AudioFlinger" +//#define LOG_NDEBUG 0 + +#include +#include +#include +#include +#include + +#include "AudioFlinger.h" +#include "ServiceUtilities.h" + +// ---------------------------------------------------------------------------- + +// Note: the following macro is used for extremely verbose logging message. In +// order to run with ALOG_ASSERT turned on, we need to have LOG_NDEBUG set to +// 0; but one side effect of this is to turn all LOGV's as well. Some messages +// are so verbose that we want to suppress them even when we have ALOG_ASSERT +// turned on. Do not uncomment the #def below unless you really know what you +// are doing and want to see all of the extremely verbose messages. +//#define VERY_VERY_VERBOSE_LOGGING +#ifdef VERY_VERY_VERBOSE_LOGGING +#define ALOGVV ALOGV +#else +#define ALOGVV(a...) do { } while(0) +#endif + +namespace android { + +// ---------------------------------------------------------------------------- +// EffectModule implementation +// ---------------------------------------------------------------------------- + +#undef LOG_TAG +#define LOG_TAG "AudioFlinger::EffectModule" + +AudioFlinger::EffectModule::EffectModule(ThreadBase *thread, + const wp& chain, + effect_descriptor_t *desc, + int id, + int sessionId) + : mPinned(sessionId > AUDIO_SESSION_OUTPUT_MIX), + mThread(thread), mChain(chain), mId(id), mSessionId(sessionId), + mDescriptor(*desc), + // mConfig is set by configure() and not used before then + mEffectInterface(NULL), + mStatus(NO_INIT), mState(IDLE), + // mMaxDisableWaitCnt is set by configure() and not used before then + // mDisableWaitCnt is set by process() and updateState() and not used before then + mSuspended(false) +{ + ALOGV("Constructor %p", this); + int lStatus; + + // create effect engine from effect factory + mStatus = EffectCreate(&desc->uuid, sessionId, thread->id(), &mEffectInterface); + + if (mStatus != NO_ERROR) { + return; + } + lStatus = init(); + if (lStatus < 0) { + mStatus = lStatus; + goto Error; + } + + ALOGV("Constructor success name %s, Interface %p", mDescriptor.name, mEffectInterface); + return; +Error: + EffectRelease(mEffectInterface); + mEffectInterface = NULL; + ALOGV("Constructor Error %d", mStatus); +} + +AudioFlinger::EffectModule::~EffectModule() +{ + ALOGV("Destructor %p", this); + if (mEffectInterface != NULL) { + if ((mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_PRE_PROC || + (mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_POST_PROC) { + sp thread = mThread.promote(); + if (thread != 0) { + audio_stream_t *stream = thread->stream(); + if (stream != NULL) { + stream->remove_audio_effect(stream, mEffectInterface); + } + } + } + // release effect engine + EffectRelease(mEffectInterface); + } +} + +status_t AudioFlinger::EffectModule::addHandle(EffectHandle *handle) +{ + status_t status; + + Mutex::Autolock _l(mLock); + int priority = handle->priority(); + size_t size = mHandles.size(); + EffectHandle *controlHandle = NULL; + size_t i; + for (i = 0; i < size; i++) { + EffectHandle *h = mHandles[i]; + if (h == NULL || h->destroyed_l()) { + continue; + } + // first non destroyed handle is considered in control + if (controlHandle == NULL) + controlHandle = h; + if (h->priority() <= priority) { + break; + } + } + // if inserted in first place, move effect control from previous owner to this handle + if (i == 0) { + bool enabled = false; + if (controlHandle != NULL) { + enabled = controlHandle->enabled(); + controlHandle->setControl(false/*hasControl*/, true /*signal*/, enabled /*enabled*/); + } + handle->setControl(true /*hasControl*/, false /*signal*/, enabled /*enabled*/); + status = NO_ERROR; + } else { + status = ALREADY_EXISTS; + } + ALOGV("addHandle() %p added handle %p in position %d", this, handle, i); + mHandles.insertAt(handle, i); + return status; +} + +size_t AudioFlinger::EffectModule::removeHandle(EffectHandle *handle) +{ + Mutex::Autolock _l(mLock); + size_t size = mHandles.size(); + size_t i; + for (i = 0; i < size; i++) { + if (mHandles[i] == handle) { + break; + } + } + if (i == size) { + return size; + } + ALOGV("removeHandle() %p removed handle %p in position %d", this, handle, i); + + mHandles.removeAt(i); + // if removed from first place, move effect control from this handle to next in line + if (i == 0) { + EffectHandle *h = controlHandle_l(); + if (h != NULL) { + h->setControl(true /*hasControl*/, true /*signal*/ , handle->enabled() /*enabled*/); + } + } + + // Prevent calls to process() and other functions on effect interface from now on. + // The effect engine will be released by the destructor when the last strong reference on + // this object is released which can happen after next process is called. + if (mHandles.size() == 0 && !mPinned) { + mState = DESTROYED; + } + + return mHandles.size(); +} + +// must be called with EffectModule::mLock held +AudioFlinger::EffectHandle *AudioFlinger::EffectModule::controlHandle_l() +{ + // the first valid handle in the list has control over the module + for (size_t i = 0; i < mHandles.size(); i++) { + EffectHandle *h = mHandles[i]; + if (h != NULL && !h->destroyed_l()) { + return h; + } + } + + return NULL; +} + +size_t AudioFlinger::EffectModule::disconnect(EffectHandle *handle, bool unpinIfLast) +{ + ALOGV("disconnect() %p handle %p", this, handle); + // keep a strong reference on this EffectModule to avoid calling the + // destructor before we exit + sp keep(this); + { + sp thread = mThread.promote(); + if (thread != 0) { + thread->disconnectEffect(keep, handle, unpinIfLast); + } + } + return mHandles.size(); +} + +void AudioFlinger::EffectModule::updateState() { + Mutex::Autolock _l(mLock); + + switch (mState) { + case RESTART: + reset_l(); + // FALL THROUGH + + case STARTING: + // clear auxiliary effect input buffer for next accumulation + if ((mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_AUXILIARY) { + memset(mConfig.inputCfg.buffer.raw, + 0, + mConfig.inputCfg.buffer.frameCount*sizeof(int32_t)); + } + start_l(); + mState = ACTIVE; + break; + case STOPPING: + stop_l(); + mDisableWaitCnt = mMaxDisableWaitCnt; + mState = STOPPED; + break; + case STOPPED: + // mDisableWaitCnt is forced to 1 by process() when the engine indicates the end of the + // turn off sequence. + if (--mDisableWaitCnt == 0) { + reset_l(); + mState = IDLE; + } + break; + default: //IDLE , ACTIVE, DESTROYED + break; + } +} + +void AudioFlinger::EffectModule::process() +{ + Mutex::Autolock _l(mLock); + + if (mState == DESTROYED || mEffectInterface == NULL || + mConfig.inputCfg.buffer.raw == NULL || + mConfig.outputCfg.buffer.raw == NULL) { + return; + } + + if (isProcessEnabled()) { + // do 32 bit to 16 bit conversion for auxiliary effect input buffer + if ((mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_AUXILIARY) { + ditherAndClamp(mConfig.inputCfg.buffer.s32, + mConfig.inputCfg.buffer.s32, + mConfig.inputCfg.buffer.frameCount/2); + } + + // do the actual processing in the effect engine + int ret = (*mEffectInterface)->process(mEffectInterface, + &mConfig.inputCfg.buffer, + &mConfig.outputCfg.buffer); + + // force transition to IDLE state when engine is ready + if (mState == STOPPED && ret == -ENODATA) { + mDisableWaitCnt = 1; + } + + // clear auxiliary effect input buffer for next accumulation + if ((mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_AUXILIARY) { + memset(mConfig.inputCfg.buffer.raw, 0, + mConfig.inputCfg.buffer.frameCount*sizeof(int32_t)); + } + } else if ((mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_INSERT && + mConfig.inputCfg.buffer.raw != mConfig.outputCfg.buffer.raw) { + // If an insert effect is idle and input buffer is different from output buffer, + // accumulate input onto output + sp chain = mChain.promote(); + if (chain != 0 && chain->activeTrackCnt() != 0) { + size_t frameCnt = mConfig.inputCfg.buffer.frameCount * 2; //always stereo here + int16_t *in = mConfig.inputCfg.buffer.s16; + int16_t *out = mConfig.outputCfg.buffer.s16; + for (size_t i = 0; i < frameCnt; i++) { + out[i] = clamp16((int32_t)out[i] + (int32_t)in[i]); + } + } + } +} + +void AudioFlinger::EffectModule::reset_l() +{ + if (mEffectInterface == NULL) { + return; + } + (*mEffectInterface)->command(mEffectInterface, EFFECT_CMD_RESET, 0, NULL, 0, NULL); +} + +status_t AudioFlinger::EffectModule::configure() +{ + if (mEffectInterface == NULL) { + return NO_INIT; + } + + sp thread = mThread.promote(); + if (thread == 0) { + return DEAD_OBJECT; + } + + // TODO: handle configuration of effects replacing track process + audio_channel_mask_t channelMask = thread->channelMask(); + + if ((mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_AUXILIARY) { + mConfig.inputCfg.channels = AUDIO_CHANNEL_OUT_MONO; + } else { + mConfig.inputCfg.channels = channelMask; + } + mConfig.outputCfg.channels = channelMask; + mConfig.inputCfg.format = AUDIO_FORMAT_PCM_16_BIT; + mConfig.outputCfg.format = AUDIO_FORMAT_PCM_16_BIT; + mConfig.inputCfg.samplingRate = thread->sampleRate(); + mConfig.outputCfg.samplingRate = mConfig.inputCfg.samplingRate; + mConfig.inputCfg.bufferProvider.cookie = NULL; + mConfig.inputCfg.bufferProvider.getBuffer = NULL; + mConfig.inputCfg.bufferProvider.releaseBuffer = NULL; + mConfig.outputCfg.bufferProvider.cookie = NULL; + mConfig.outputCfg.bufferProvider.getBuffer = NULL; + mConfig.outputCfg.bufferProvider.releaseBuffer = NULL; + mConfig.inputCfg.accessMode = EFFECT_BUFFER_ACCESS_READ; + // Insert effect: + // - in session AUDIO_SESSION_OUTPUT_MIX or AUDIO_SESSION_OUTPUT_STAGE, + // always overwrites output buffer: input buffer == output buffer + // - in other sessions: + // last effect in the chain accumulates in output buffer: input buffer != output buffer + // other effect: overwrites output buffer: input buffer == output buffer + // Auxiliary effect: + // accumulates in output buffer: input buffer != output buffer + // Therefore: accumulate <=> input buffer != output buffer + if (mConfig.inputCfg.buffer.raw != mConfig.outputCfg.buffer.raw) { + mConfig.outputCfg.accessMode = EFFECT_BUFFER_ACCESS_ACCUMULATE; + } else { + mConfig.outputCfg.accessMode = EFFECT_BUFFER_ACCESS_WRITE; + } + mConfig.inputCfg.mask = EFFECT_CONFIG_ALL; + mConfig.outputCfg.mask = EFFECT_CONFIG_ALL; + mConfig.inputCfg.buffer.frameCount = thread->frameCount(); + mConfig.outputCfg.buffer.frameCount = mConfig.inputCfg.buffer.frameCount; + + ALOGV("configure() %p thread %p buffer %p framecount %d", + this, thread.get(), mConfig.inputCfg.buffer.raw, mConfig.inputCfg.buffer.frameCount); + + status_t cmdStatus; + uint32_t size = sizeof(int); + status_t status = (*mEffectInterface)->command(mEffectInterface, + EFFECT_CMD_SET_CONFIG, + sizeof(effect_config_t), + &mConfig, + &size, + &cmdStatus); + if (status == 0) { + status = cmdStatus; + } + + if (status == 0 && + (memcmp(&mDescriptor.type, SL_IID_VISUALIZATION, sizeof(effect_uuid_t)) == 0)) { + uint32_t buf32[sizeof(effect_param_t) / sizeof(uint32_t) + 2]; + effect_param_t *p = (effect_param_t *)buf32; + + p->psize = sizeof(uint32_t); + p->vsize = sizeof(uint32_t); + size = sizeof(int); + *(int32_t *)p->data = VISUALIZER_PARAM_LATENCY; + + uint32_t latency = 0; + PlaybackThread *pbt = thread->mAudioFlinger->checkPlaybackThread_l(thread->mId); + if (pbt != NULL) { + latency = pbt->latency_l(); + } + + *((int32_t *)p->data + 1)= latency; + (*mEffectInterface)->command(mEffectInterface, + EFFECT_CMD_SET_PARAM, + sizeof(effect_param_t) + 8, + &buf32, + &size, + &cmdStatus); + } + + mMaxDisableWaitCnt = (MAX_DISABLE_TIME_MS * mConfig.outputCfg.samplingRate) / + (1000 * mConfig.outputCfg.buffer.frameCount); + + return status; +} + +status_t AudioFlinger::EffectModule::init() +{ + Mutex::Autolock _l(mLock); + if (mEffectInterface == NULL) { + return NO_INIT; + } + status_t cmdStatus; + uint32_t size = sizeof(status_t); + status_t status = (*mEffectInterface)->command(mEffectInterface, + EFFECT_CMD_INIT, + 0, + NULL, + &size, + &cmdStatus); + if (status == 0) { + status = cmdStatus; + } + return status; +} + +status_t AudioFlinger::EffectModule::start() +{ + Mutex::Autolock _l(mLock); + return start_l(); +} + +status_t AudioFlinger::EffectModule::start_l() +{ + if (mEffectInterface == NULL) { + return NO_INIT; + } + status_t cmdStatus; + uint32_t size = sizeof(status_t); + status_t status = (*mEffectInterface)->command(mEffectInterface, + EFFECT_CMD_ENABLE, + 0, + NULL, + &size, + &cmdStatus); + if (status == 0) { + status = cmdStatus; + } + if (status == 0 && + ((mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_PRE_PROC || + (mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_POST_PROC)) { + sp thread = mThread.promote(); + if (thread != 0) { + audio_stream_t *stream = thread->stream(); + if (stream != NULL) { + stream->add_audio_effect(stream, mEffectInterface); + } + } + } + return status; +} + +status_t AudioFlinger::EffectModule::stop() +{ + Mutex::Autolock _l(mLock); + return stop_l(); +} + +status_t AudioFlinger::EffectModule::stop_l() +{ + if (mEffectInterface == NULL) { + return NO_INIT; + } + status_t cmdStatus; + uint32_t size = sizeof(status_t); + status_t status = (*mEffectInterface)->command(mEffectInterface, + EFFECT_CMD_DISABLE, + 0, + NULL, + &size, + &cmdStatus); + if (status == 0) { + status = cmdStatus; + } + if (status == 0 && + ((mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_PRE_PROC || + (mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_POST_PROC)) { + sp thread = mThread.promote(); + if (thread != 0) { + audio_stream_t *stream = thread->stream(); + if (stream != NULL) { + stream->remove_audio_effect(stream, mEffectInterface); + } + } + } + return status; +} + +status_t AudioFlinger::EffectModule::command(uint32_t cmdCode, + uint32_t cmdSize, + void *pCmdData, + uint32_t *replySize, + void *pReplyData) +{ + Mutex::Autolock _l(mLock); + ALOGVV("command(), cmdCode: %d, mEffectInterface: %p", cmdCode, mEffectInterface); + + if (mState == DESTROYED || mEffectInterface == NULL) { + return NO_INIT; + } + status_t status = (*mEffectInterface)->command(mEffectInterface, + cmdCode, + cmdSize, + pCmdData, + replySize, + pReplyData); + if (cmdCode != EFFECT_CMD_GET_PARAM && status == NO_ERROR) { + uint32_t size = (replySize == NULL) ? 0 : *replySize; + for (size_t i = 1; i < mHandles.size(); i++) { + EffectHandle *h = mHandles[i]; + if (h != NULL && !h->destroyed_l()) { + h->commandExecuted(cmdCode, cmdSize, pCmdData, size, pReplyData); + } + } + } + return status; +} + +status_t AudioFlinger::EffectModule::setEnabled(bool enabled) +{ + Mutex::Autolock _l(mLock); + return setEnabled_l(enabled); +} + +// must be called with EffectModule::mLock held +status_t AudioFlinger::EffectModule::setEnabled_l(bool enabled) +{ + + ALOGV("setEnabled %p enabled %d", this, enabled); + + if (enabled != isEnabled()) { + status_t status = AudioSystem::setEffectEnabled(mId, enabled); + if (enabled && status != NO_ERROR) { + return status; + } + + switch (mState) { + // going from disabled to enabled + case IDLE: + mState = STARTING; + break; + case STOPPED: + mState = RESTART; + break; + case STOPPING: + mState = ACTIVE; + break; + + // going from enabled to disabled + case RESTART: + mState = STOPPED; + break; + case STARTING: + mState = IDLE; + break; + case ACTIVE: + mState = STOPPING; + break; + case DESTROYED: + return NO_ERROR; // simply ignore as we are being destroyed + } + for (size_t i = 1; i < mHandles.size(); i++) { + EffectHandle *h = mHandles[i]; + if (h != NULL && !h->destroyed_l()) { + h->setEnabled(enabled); + } + } + } + return NO_ERROR; +} + +bool AudioFlinger::EffectModule::isEnabled() const +{ + switch (mState) { + case RESTART: + case STARTING: + case ACTIVE: + return true; + case IDLE: + case STOPPING: + case STOPPED: + case DESTROYED: + default: + return false; + } +} + +bool AudioFlinger::EffectModule::isProcessEnabled() const +{ + switch (mState) { + case RESTART: + case ACTIVE: + case STOPPING: + case STOPPED: + return true; + case IDLE: + case STARTING: + case DESTROYED: + default: + return false; + } +} + +status_t AudioFlinger::EffectModule::setVolume(uint32_t *left, uint32_t *right, bool controller) +{ + Mutex::Autolock _l(mLock); + status_t status = NO_ERROR; + + // Send volume indication if EFFECT_FLAG_VOLUME_IND is set and read back altered volume + // if controller flag is set (Note that controller == TRUE => EFFECT_FLAG_VOLUME_CTRL set) + if (isProcessEnabled() && + ((mDescriptor.flags & EFFECT_FLAG_VOLUME_MASK) == EFFECT_FLAG_VOLUME_CTRL || + (mDescriptor.flags & EFFECT_FLAG_VOLUME_MASK) == EFFECT_FLAG_VOLUME_IND)) { + status_t cmdStatus; + uint32_t volume[2]; + uint32_t *pVolume = NULL; + uint32_t size = sizeof(volume); + volume[0] = *left; + volume[1] = *right; + if (controller) { + pVolume = volume; + } + status = (*mEffectInterface)->command(mEffectInterface, + EFFECT_CMD_SET_VOLUME, + size, + volume, + &size, + pVolume); + if (controller && status == NO_ERROR && size == sizeof(volume)) { + *left = volume[0]; + *right = volume[1]; + } + } + return status; +} + +status_t AudioFlinger::EffectModule::setDevice(audio_devices_t device) +{ + if (device == AUDIO_DEVICE_NONE) { + return NO_ERROR; + } + + Mutex::Autolock _l(mLock); + status_t status = NO_ERROR; + if (device && (mDescriptor.flags & EFFECT_FLAG_DEVICE_MASK) == EFFECT_FLAG_DEVICE_IND) { + status_t cmdStatus; + uint32_t size = sizeof(status_t); + uint32_t cmd = audio_is_output_devices(device) ? EFFECT_CMD_SET_DEVICE : + EFFECT_CMD_SET_INPUT_DEVICE; + status = (*mEffectInterface)->command(mEffectInterface, + cmd, + sizeof(uint32_t), + &device, + &size, + &cmdStatus); + } + return status; +} + +status_t AudioFlinger::EffectModule::setMode(audio_mode_t mode) +{ + Mutex::Autolock _l(mLock); + status_t status = NO_ERROR; + if ((mDescriptor.flags & EFFECT_FLAG_AUDIO_MODE_MASK) == EFFECT_FLAG_AUDIO_MODE_IND) { + status_t cmdStatus; + uint32_t size = sizeof(status_t); + status = (*mEffectInterface)->command(mEffectInterface, + EFFECT_CMD_SET_AUDIO_MODE, + sizeof(audio_mode_t), + &mode, + &size, + &cmdStatus); + if (status == NO_ERROR) { + status = cmdStatus; + } + } + return status; +} + +status_t AudioFlinger::EffectModule::setAudioSource(audio_source_t source) +{ + Mutex::Autolock _l(mLock); + status_t status = NO_ERROR; + if ((mDescriptor.flags & EFFECT_FLAG_AUDIO_SOURCE_MASK) == EFFECT_FLAG_AUDIO_SOURCE_IND) { + uint32_t size = 0; + status = (*mEffectInterface)->command(mEffectInterface, + EFFECT_CMD_SET_AUDIO_SOURCE, + sizeof(audio_source_t), + &source, + &size, + NULL); + } + return status; +} + +void AudioFlinger::EffectModule::setSuspended(bool suspended) +{ + Mutex::Autolock _l(mLock); + mSuspended = suspended; +} + +bool AudioFlinger::EffectModule::suspended() const +{ + Mutex::Autolock _l(mLock); + return mSuspended; +} + +bool AudioFlinger::EffectModule::purgeHandles() +{ + bool enabled = false; + Mutex::Autolock _l(mLock); + for (size_t i = 0; i < mHandles.size(); i++) { + EffectHandle *handle = mHandles[i]; + if (handle != NULL && !handle->destroyed_l()) { + handle->effect().clear(); + if (handle->hasControl()) { + enabled = handle->enabled(); + } + } + } + return enabled; +} + +void AudioFlinger::EffectModule::dump(int fd, const Vector& args) +{ + const size_t SIZE = 256; + char buffer[SIZE]; + String8 result; + + snprintf(buffer, SIZE, "\tEffect ID %d:\n", mId); + result.append(buffer); + + bool locked = AudioFlinger::dumpTryLock(mLock); + // failed to lock - AudioFlinger is probably deadlocked + if (!locked) { + result.append("\t\tCould not lock Fx mutex:\n"); + } + + result.append("\t\tSession Status State Engine:\n"); + snprintf(buffer, SIZE, "\t\t%05d %03d %03d 0x%08x\n", + mSessionId, mStatus, mState, (uint32_t)mEffectInterface); + result.append(buffer); + + result.append("\t\tDescriptor:\n"); + snprintf(buffer, SIZE, "\t\t- UUID: %08X-%04X-%04X-%04X-%02X%02X%02X%02X%02X%02X\n", + mDescriptor.uuid.timeLow, mDescriptor.uuid.timeMid, mDescriptor.uuid.timeHiAndVersion, + mDescriptor.uuid.clockSeq, mDescriptor.uuid.node[0], mDescriptor.uuid.node[1], + mDescriptor.uuid.node[2], + mDescriptor.uuid.node[3],mDescriptor.uuid.node[4],mDescriptor.uuid.node[5]); + result.append(buffer); + snprintf(buffer, SIZE, "\t\t- TYPE: %08X-%04X-%04X-%04X-%02X%02X%02X%02X%02X%02X\n", + mDescriptor.type.timeLow, mDescriptor.type.timeMid, + mDescriptor.type.timeHiAndVersion, + mDescriptor.type.clockSeq, mDescriptor.type.node[0], mDescriptor.type.node[1], + mDescriptor.type.node[2], + mDescriptor.type.node[3],mDescriptor.type.node[4],mDescriptor.type.node[5]); + result.append(buffer); + snprintf(buffer, SIZE, "\t\t- apiVersion: %08X\n\t\t- flags: %08X\n", + mDescriptor.apiVersion, + mDescriptor.flags); + result.append(buffer); + snprintf(buffer, SIZE, "\t\t- name: %s\n", + mDescriptor.name); + result.append(buffer); + snprintf(buffer, SIZE, "\t\t- implementor: %s\n", + mDescriptor.implementor); + result.append(buffer); + + result.append("\t\t- Input configuration:\n"); + result.append("\t\t\tBuffer Frames Smp rate Channels Format\n"); + snprintf(buffer, SIZE, "\t\t\t0x%08x %05d %05d %08x %d\n", + (uint32_t)mConfig.inputCfg.buffer.raw, + mConfig.inputCfg.buffer.frameCount, + mConfig.inputCfg.samplingRate, + mConfig.inputCfg.channels, + mConfig.inputCfg.format); + result.append(buffer); + + result.append("\t\t- Output configuration:\n"); + result.append("\t\t\tBuffer Frames Smp rate Channels Format\n"); + snprintf(buffer, SIZE, "\t\t\t0x%08x %05d %05d %08x %d\n", + (uint32_t)mConfig.outputCfg.buffer.raw, + mConfig.outputCfg.buffer.frameCount, + mConfig.outputCfg.samplingRate, + mConfig.outputCfg.channels, + mConfig.outputCfg.format); + result.append(buffer); + + snprintf(buffer, SIZE, "\t\t%d Clients:\n", mHandles.size()); + result.append(buffer); + result.append("\t\t\tPid Priority Ctrl Locked client server\n"); + for (size_t i = 0; i < mHandles.size(); ++i) { + EffectHandle *handle = mHandles[i]; + if (handle != NULL && !handle->destroyed_l()) { + handle->dump(buffer, SIZE); + result.append(buffer); + } + } + + result.append("\n"); + + write(fd, result.string(), result.length()); + + if (locked) { + mLock.unlock(); + } +} + +// ---------------------------------------------------------------------------- +// EffectHandle implementation +// ---------------------------------------------------------------------------- + +#undef LOG_TAG +#define LOG_TAG "AudioFlinger::EffectHandle" + +AudioFlinger::EffectHandle::EffectHandle(const sp& effect, + const sp& client, + const sp& effectClient, + int32_t priority) + : BnEffect(), + mEffect(effect), mEffectClient(effectClient), mClient(client), mCblk(NULL), + mPriority(priority), mHasControl(false), mEnabled(false), mDestroyed(false) +{ + ALOGV("constructor %p", this); + + if (client == 0) { + return; + } + int bufOffset = ((sizeof(effect_param_cblk_t) - 1) / sizeof(int) + 1) * sizeof(int); + mCblkMemory = client->heap()->allocate(EFFECT_PARAM_BUFFER_SIZE + bufOffset); + if (mCblkMemory != 0) { + mCblk = static_cast(mCblkMemory->pointer()); + + if (mCblk != NULL) { + new(mCblk) effect_param_cblk_t(); + mBuffer = (uint8_t *)mCblk + bufOffset; + } + } else { + ALOGE("not enough memory for Effect size=%u", EFFECT_PARAM_BUFFER_SIZE + + sizeof(effect_param_cblk_t)); + return; + } +} + +AudioFlinger::EffectHandle::~EffectHandle() +{ + ALOGV("Destructor %p", this); + + if (mEffect == 0) { + mDestroyed = true; + return; + } + mEffect->lock(); + mDestroyed = true; + mEffect->unlock(); + disconnect(false); +} + +status_t AudioFlinger::EffectHandle::enable() +{ + ALOGV("enable %p", this); + if (!mHasControl) { + return INVALID_OPERATION; + } + if (mEffect == 0) { + return DEAD_OBJECT; + } + + if (mEnabled) { + return NO_ERROR; + } + + mEnabled = true; + + sp thread = mEffect->thread().promote(); + if (thread != 0) { + thread->checkSuspendOnEffectEnabled(mEffect, true, mEffect->sessionId()); + } + + // checkSuspendOnEffectEnabled() can suspend this same effect when enabled + if (mEffect->suspended()) { + return NO_ERROR; + } + + status_t status = mEffect->setEnabled(true); + if (status != NO_ERROR) { + if (thread != 0) { + thread->checkSuspendOnEffectEnabled(mEffect, false, mEffect->sessionId()); + } + mEnabled = false; + } + return status; +} + +status_t AudioFlinger::EffectHandle::disable() +{ + ALOGV("disable %p", this); + if (!mHasControl) { + return INVALID_OPERATION; + } + if (mEffect == 0) { + return DEAD_OBJECT; + } + + if (!mEnabled) { + return NO_ERROR; + } + mEnabled = false; + + if (mEffect->suspended()) { + return NO_ERROR; + } + + status_t status = mEffect->setEnabled(false); + + sp thread = mEffect->thread().promote(); + if (thread != 0) { + thread->checkSuspendOnEffectEnabled(mEffect, false, mEffect->sessionId()); + } + + return status; +} + +void AudioFlinger::EffectHandle::disconnect() +{ + disconnect(true); +} + +void AudioFlinger::EffectHandle::disconnect(bool unpinIfLast) +{ + ALOGV("disconnect(%s)", unpinIfLast ? "true" : "false"); + if (mEffect == 0) { + return; + } + // restore suspended effects if the disconnected handle was enabled and the last one. + if ((mEffect->disconnect(this, unpinIfLast) == 0) && mEnabled) { + sp thread = mEffect->thread().promote(); + if (thread != 0) { + thread->checkSuspendOnEffectEnabled(mEffect, false, mEffect->sessionId()); + } + } + + // release sp on module => module destructor can be called now + mEffect.clear(); + if (mClient != 0) { + if (mCblk != NULL) { + // unlike ~TrackBase(), mCblk is never a local new, so don't delete + mCblk->~effect_param_cblk_t(); // destroy our shared-structure. + } + mCblkMemory.clear(); // free the shared memory before releasing the heap it belongs to + // Client destructor must run with AudioFlinger mutex locked + Mutex::Autolock _l(mClient->audioFlinger()->mLock); + mClient.clear(); + } +} + +status_t AudioFlinger::EffectHandle::command(uint32_t cmdCode, + uint32_t cmdSize, + void *pCmdData, + uint32_t *replySize, + void *pReplyData) +{ + ALOGVV("command(), cmdCode: %d, mHasControl: %d, mEffect: %p", + cmdCode, mHasControl, (mEffect == 0) ? 0 : mEffect.get()); + + // only get parameter command is permitted for applications not controlling the effect + if (!mHasControl && cmdCode != EFFECT_CMD_GET_PARAM) { + return INVALID_OPERATION; + } + if (mEffect == 0) { + return DEAD_OBJECT; + } + if (mClient == 0) { + return INVALID_OPERATION; + } + + // handle commands that are not forwarded transparently to effect engine + if (cmdCode == EFFECT_CMD_SET_PARAM_COMMIT) { + // No need to trylock() here as this function is executed in the binder thread serving a + // particular client process: no risk to block the whole media server process or mixer + // threads if we are stuck here + Mutex::Autolock _l(mCblk->lock); + if (mCblk->clientIndex > EFFECT_PARAM_BUFFER_SIZE || + mCblk->serverIndex > EFFECT_PARAM_BUFFER_SIZE) { + mCblk->serverIndex = 0; + mCblk->clientIndex = 0; + return BAD_VALUE; + } + status_t status = NO_ERROR; + while (mCblk->serverIndex < mCblk->clientIndex) { + int reply; + uint32_t rsize = sizeof(int); + int *p = (int *)(mBuffer + mCblk->serverIndex); + int size = *p++; + if (((uint8_t *)p + size) > mBuffer + mCblk->clientIndex) { + ALOGW("command(): invalid parameter block size"); + break; + } + effect_param_t *param = (effect_param_t *)p; + if (param->psize == 0 || param->vsize == 0) { + ALOGW("command(): null parameter or value size"); + mCblk->serverIndex += size; + continue; + } + uint32_t psize = sizeof(effect_param_t) + + ((param->psize - 1) / sizeof(int) + 1) * sizeof(int) + + param->vsize; + status_t ret = mEffect->command(EFFECT_CMD_SET_PARAM, + psize, + p, + &rsize, + &reply); + // stop at first error encountered + if (ret != NO_ERROR) { + status = ret; + *(int *)pReplyData = reply; + break; + } else if (reply != NO_ERROR) { + *(int *)pReplyData = reply; + break; + } + mCblk->serverIndex += size; + } + mCblk->serverIndex = 0; + mCblk->clientIndex = 0; + return status; + } else if (cmdCode == EFFECT_CMD_ENABLE) { + *(int *)pReplyData = NO_ERROR; + return enable(); + } else if (cmdCode == EFFECT_CMD_DISABLE) { + *(int *)pReplyData = NO_ERROR; + return disable(); + } + + return mEffect->command(cmdCode, cmdSize, pCmdData, replySize, pReplyData); +} + +void AudioFlinger::EffectHandle::setControl(bool hasControl, bool signal, bool enabled) +{ + ALOGV("setControl %p control %d", this, hasControl); + + mHasControl = hasControl; + mEnabled = enabled; + + if (signal && mEffectClient != 0) { + mEffectClient->controlStatusChanged(hasControl); + } +} + +void AudioFlinger::EffectHandle::commandExecuted(uint32_t cmdCode, + uint32_t cmdSize, + void *pCmdData, + uint32_t replySize, + void *pReplyData) +{ + if (mEffectClient != 0) { + mEffectClient->commandExecuted(cmdCode, cmdSize, pCmdData, replySize, pReplyData); + } +} + + + +void AudioFlinger::EffectHandle::setEnabled(bool enabled) +{ + if (mEffectClient != 0) { + mEffectClient->enableStatusChanged(enabled); + } +} + +status_t AudioFlinger::EffectHandle::onTransact( + uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) +{ + return BnEffect::onTransact(code, data, reply, flags); +} + + +void AudioFlinger::EffectHandle::dump(char* buffer, size_t size) +{ + bool locked = mCblk != NULL && AudioFlinger::dumpTryLock(mCblk->lock); + + snprintf(buffer, size, "\t\t\t%05d %05d %01u %01u %05u %05u\n", + (mClient == 0) ? getpid_cached : mClient->pid(), + mPriority, + mHasControl, + !locked, + mCblk ? mCblk->clientIndex : 0, + mCblk ? mCblk->serverIndex : 0 + ); + + if (locked) { + mCblk->lock.unlock(); + } +} + +#undef LOG_TAG +#define LOG_TAG "AudioFlinger::EffectChain" + +AudioFlinger::EffectChain::EffectChain(ThreadBase *thread, + int sessionId) + : mThread(thread), mSessionId(sessionId), mActiveTrackCnt(0), mTrackCnt(0), mTailBufferCount(0), + mOwnInBuffer(false), mVolumeCtrlIdx(-1), mLeftVolume(UINT_MAX), mRightVolume(UINT_MAX), + mNewLeftVolume(UINT_MAX), mNewRightVolume(UINT_MAX) +{ + mStrategy = AudioSystem::getStrategyForStream(AUDIO_STREAM_MUSIC); + if (thread == NULL) { + return; + } + mMaxTailBuffers = ((kProcessTailDurationMs * thread->sampleRate()) / 1000) / + thread->frameCount(); +} + +AudioFlinger::EffectChain::~EffectChain() +{ + if (mOwnInBuffer) { + delete mInBuffer; + } + +} + +// getEffectFromDesc_l() must be called with ThreadBase::mLock held +sp AudioFlinger::EffectChain::getEffectFromDesc_l( + effect_descriptor_t *descriptor) +{ + size_t size = mEffects.size(); + + for (size_t i = 0; i < size; i++) { + if (memcmp(&mEffects[i]->desc().uuid, &descriptor->uuid, sizeof(effect_uuid_t)) == 0) { + return mEffects[i]; + } + } + return 0; +} + +// getEffectFromId_l() must be called with ThreadBase::mLock held +sp AudioFlinger::EffectChain::getEffectFromId_l(int id) +{ + size_t size = mEffects.size(); + + for (size_t i = 0; i < size; i++) { + // by convention, return first effect if id provided is 0 (0 is never a valid id) + if (id == 0 || mEffects[i]->id() == id) { + return mEffects[i]; + } + } + return 0; +} + +// getEffectFromType_l() must be called with ThreadBase::mLock held +sp AudioFlinger::EffectChain::getEffectFromType_l( + const effect_uuid_t *type) +{ + size_t size = mEffects.size(); + + for (size_t i = 0; i < size; i++) { + if (memcmp(&mEffects[i]->desc().type, type, sizeof(effect_uuid_t)) == 0) { + return mEffects[i]; + } + } + return 0; +} + +void AudioFlinger::EffectChain::clearInputBuffer() +{ + Mutex::Autolock _l(mLock); + sp thread = mThread.promote(); + if (thread == 0) { + ALOGW("clearInputBuffer(): cannot promote mixer thread"); + return; + } + clearInputBuffer_l(thread); +} + +// Must be called with EffectChain::mLock locked +void AudioFlinger::EffectChain::clearInputBuffer_l(sp thread) +{ + size_t numSamples = thread->frameCount() * thread->channelCount(); + memset(mInBuffer, 0, numSamples * sizeof(int16_t)); + +} + +// Must be called with EffectChain::mLock locked +void AudioFlinger::EffectChain::process_l() +{ + sp thread = mThread.promote(); + if (thread == 0) { + ALOGW("process_l(): cannot promote mixer thread"); + return; + } + bool isGlobalSession = (mSessionId == AUDIO_SESSION_OUTPUT_MIX) || + (mSessionId == AUDIO_SESSION_OUTPUT_STAGE); + // always process effects unless no more tracks are on the session and the effect tail + // has been rendered + bool doProcess = true; + if (!isGlobalSession) { + bool tracksOnSession = (trackCnt() != 0); + + if (!tracksOnSession && mTailBufferCount == 0) { + doProcess = false; + } + + if (activeTrackCnt() == 0) { + // if no track is active and the effect tail has not been rendered, + // the input buffer must be cleared here as the mixer process will not do it + if (tracksOnSession || mTailBufferCount > 0) { + clearInputBuffer_l(thread); + if (mTailBufferCount > 0) { + mTailBufferCount--; + } + } + } + } + + size_t size = mEffects.size(); + if (doProcess) { + for (size_t i = 0; i < size; i++) { + mEffects[i]->process(); + } + } + for (size_t i = 0; i < size; i++) { + mEffects[i]->updateState(); + } +} + +// addEffect_l() must be called with PlaybackThread::mLock held +status_t AudioFlinger::EffectChain::addEffect_l(const sp& effect) +{ + effect_descriptor_t desc = effect->desc(); + uint32_t insertPref = desc.flags & EFFECT_FLAG_INSERT_MASK; + + Mutex::Autolock _l(mLock); + effect->setChain(this); + sp thread = mThread.promote(); + if (thread == 0) { + return NO_INIT; + } + effect->setThread(thread); + + if ((desc.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_AUXILIARY) { + // Auxiliary effects are inserted at the beginning of mEffects vector as + // they are processed first and accumulated in chain input buffer + mEffects.insertAt(effect, 0); + + // the input buffer for auxiliary effect contains mono samples in + // 32 bit format. This is to avoid saturation in AudoMixer + // accumulation stage. Saturation is done in EffectModule::process() before + // calling the process in effect engine + size_t numSamples = thread->frameCount(); + int32_t *buffer = new int32_t[numSamples]; + memset(buffer, 0, numSamples * sizeof(int32_t)); + effect->setInBuffer((int16_t *)buffer); + // auxiliary effects output samples to chain input buffer for further processing + // by insert effects + effect->setOutBuffer(mInBuffer); + } else { + // Insert effects are inserted at the end of mEffects vector as they are processed + // after track and auxiliary effects. + // Insert effect order as a function of indicated preference: + // if EFFECT_FLAG_INSERT_EXCLUSIVE, insert in first position or reject if + // another effect is present + // else if EFFECT_FLAG_INSERT_FIRST, insert in first position or after the + // last effect claiming first position + // else if EFFECT_FLAG_INSERT_LAST, insert in last position or before the + // first effect claiming last position + // else if EFFECT_FLAG_INSERT_ANY insert after first or before last + // Reject insertion if an effect with EFFECT_FLAG_INSERT_EXCLUSIVE is + // already present + + size_t size = mEffects.size(); + size_t idx_insert = size; + ssize_t idx_insert_first = -1; + ssize_t idx_insert_last = -1; + + for (size_t i = 0; i < size; i++) { + effect_descriptor_t d = mEffects[i]->desc(); + uint32_t iMode = d.flags & EFFECT_FLAG_TYPE_MASK; + uint32_t iPref = d.flags & EFFECT_FLAG_INSERT_MASK; + if (iMode == EFFECT_FLAG_TYPE_INSERT) { + // check invalid effect chaining combinations + if (insertPref == EFFECT_FLAG_INSERT_EXCLUSIVE || + iPref == EFFECT_FLAG_INSERT_EXCLUSIVE) { + ALOGW("addEffect_l() could not insert effect %s: exclusive conflict with %s", + desc.name, d.name); + return INVALID_OPERATION; + } + // remember position of first insert effect and by default + // select this as insert position for new effect + if (idx_insert == size) { + idx_insert = i; + } + // remember position of last insert effect claiming + // first position + if (iPref == EFFECT_FLAG_INSERT_FIRST) { + idx_insert_first = i; + } + // remember position of first insert effect claiming + // last position + if (iPref == EFFECT_FLAG_INSERT_LAST && + idx_insert_last == -1) { + idx_insert_last = i; + } + } + } + + // modify idx_insert from first position if needed + if (insertPref == EFFECT_FLAG_INSERT_LAST) { + if (idx_insert_last != -1) { + idx_insert = idx_insert_last; + } else { + idx_insert = size; + } + } else { + if (idx_insert_first != -1) { + idx_insert = idx_insert_first + 1; + } + } + + // always read samples from chain input buffer + effect->setInBuffer(mInBuffer); + + // if last effect in the chain, output samples to chain + // output buffer, otherwise to chain input buffer + if (idx_insert == size) { + if (idx_insert != 0) { + mEffects[idx_insert-1]->setOutBuffer(mInBuffer); + mEffects[idx_insert-1]->configure(); + } + effect->setOutBuffer(mOutBuffer); + } else { + effect->setOutBuffer(mInBuffer); + } + mEffects.insertAt(effect, idx_insert); + + ALOGV("addEffect_l() effect %p, added in chain %p at rank %d", effect.get(), this, + idx_insert); + } + effect->configure(); + return NO_ERROR; +} + +// removeEffect_l() must be called with PlaybackThread::mLock held +size_t AudioFlinger::EffectChain::removeEffect_l(const sp& effect) +{ + Mutex::Autolock _l(mLock); + size_t size = mEffects.size(); + uint32_t type = effect->desc().flags & EFFECT_FLAG_TYPE_MASK; + + for (size_t i = 0; i < size; i++) { + if (effect == mEffects[i]) { + // calling stop here will remove pre-processing effect from the audio HAL. + // This is safe as we hold the EffectChain mutex which guarantees that we are not in + // the middle of a read from audio HAL + if (mEffects[i]->state() == EffectModule::ACTIVE || + mEffects[i]->state() == EffectModule::STOPPING) { + mEffects[i]->stop(); + } + if (type == EFFECT_FLAG_TYPE_AUXILIARY) { + delete[] effect->inBuffer(); + } else { + if (i == size - 1 && i != 0) { + mEffects[i - 1]->setOutBuffer(mOutBuffer); + mEffects[i - 1]->configure(); + } + } + mEffects.removeAt(i); + ALOGV("removeEffect_l() effect %p, removed from chain %p at rank %d", effect.get(), + this, i); + break; + } + } + + return mEffects.size(); +} + +// setDevice_l() must be called with PlaybackThread::mLock held +void AudioFlinger::EffectChain::setDevice_l(audio_devices_t device) +{ + size_t size = mEffects.size(); + for (size_t i = 0; i < size; i++) { + mEffects[i]->setDevice(device); + } +} + +// setMode_l() must be called with PlaybackThread::mLock held +void AudioFlinger::EffectChain::setMode_l(audio_mode_t mode) +{ + size_t size = mEffects.size(); + for (size_t i = 0; i < size; i++) { + mEffects[i]->setMode(mode); + } +} + +// setAudioSource_l() must be called with PlaybackThread::mLock held +void AudioFlinger::EffectChain::setAudioSource_l(audio_source_t source) +{ + size_t size = mEffects.size(); + for (size_t i = 0; i < size; i++) { + mEffects[i]->setAudioSource(source); + } +} + +// setVolume_l() must be called with PlaybackThread::mLock held +bool AudioFlinger::EffectChain::setVolume_l(uint32_t *left, uint32_t *right) +{ + uint32_t newLeft = *left; + uint32_t newRight = *right; + bool hasControl = false; + int ctrlIdx = -1; + size_t size = mEffects.size(); + + // first update volume controller + for (size_t i = size; i > 0; i--) { + if (mEffects[i - 1]->isProcessEnabled() && + (mEffects[i - 1]->desc().flags & EFFECT_FLAG_VOLUME_MASK) == EFFECT_FLAG_VOLUME_CTRL) { + ctrlIdx = i - 1; + hasControl = true; + break; + } + } + + if (ctrlIdx == mVolumeCtrlIdx && *left == mLeftVolume && *right == mRightVolume) { + if (hasControl) { + *left = mNewLeftVolume; + *right = mNewRightVolume; + } + return hasControl; + } + + mVolumeCtrlIdx = ctrlIdx; + mLeftVolume = newLeft; + mRightVolume = newRight; + + // second get volume update from volume controller + if (ctrlIdx >= 0) { + mEffects[ctrlIdx]->setVolume(&newLeft, &newRight, true); + mNewLeftVolume = newLeft; + mNewRightVolume = newRight; + } + // then indicate volume to all other effects in chain. + // Pass altered volume to effects before volume controller + // and requested volume to effects after controller + uint32_t lVol = newLeft; + uint32_t rVol = newRight; + + for (size_t i = 0; i < size; i++) { + if ((int)i == ctrlIdx) { + continue; + } + // this also works for ctrlIdx == -1 when there is no volume controller + if ((int)i > ctrlIdx) { + lVol = *left; + rVol = *right; + } + mEffects[i]->setVolume(&lVol, &rVol, false); + } + *left = newLeft; + *right = newRight; + + return hasControl; +} + +void AudioFlinger::EffectChain::dump(int fd, const Vector& args) +{ + const size_t SIZE = 256; + char buffer[SIZE]; + String8 result; + + snprintf(buffer, SIZE, "Effects for session %d:\n", mSessionId); + result.append(buffer); + + bool locked = AudioFlinger::dumpTryLock(mLock); + // failed to lock - AudioFlinger is probably deadlocked + if (!locked) { + result.append("\tCould not lock mutex:\n"); + } + + result.append("\tNum fx In buffer Out buffer Active tracks:\n"); + snprintf(buffer, SIZE, "\t%02d 0x%08x 0x%08x %d\n", + mEffects.size(), + (uint32_t)mInBuffer, + (uint32_t)mOutBuffer, + mActiveTrackCnt); + result.append(buffer); + write(fd, result.string(), result.size()); + + for (size_t i = 0; i < mEffects.size(); ++i) { + sp effect = mEffects[i]; + if (effect != 0) { + effect->dump(fd, args); + } + } + + if (locked) { + mLock.unlock(); + } +} + +// must be called with ThreadBase::mLock held +void AudioFlinger::EffectChain::setEffectSuspended_l( + const effect_uuid_t *type, bool suspend) +{ + sp desc; + // use effect type UUID timelow as key as there is no real risk of identical + // timeLow fields among effect type UUIDs. + ssize_t index = mSuspendedEffects.indexOfKey(type->timeLow); + if (suspend) { + if (index >= 0) { + desc = mSuspendedEffects.valueAt(index); + } else { + desc = new SuspendedEffectDesc(); + desc->mType = *type; + mSuspendedEffects.add(type->timeLow, desc); + ALOGV("setEffectSuspended_l() add entry for %08x", type->timeLow); + } + if (desc->mRefCount++ == 0) { + sp effect = getEffectIfEnabled(type); + if (effect != 0) { + desc->mEffect = effect; + effect->setSuspended(true); + effect->setEnabled(false); + } + } + } else { + if (index < 0) { + return; + } + desc = mSuspendedEffects.valueAt(index); + if (desc->mRefCount <= 0) { + ALOGW("setEffectSuspended_l() restore refcount should not be 0 %d", desc->mRefCount); + desc->mRefCount = 1; + } + if (--desc->mRefCount == 0) { + ALOGV("setEffectSuspended_l() remove entry for %08x", mSuspendedEffects.keyAt(index)); + if (desc->mEffect != 0) { + sp effect = desc->mEffect.promote(); + if (effect != 0) { + effect->setSuspended(false); + effect->lock(); + EffectHandle *handle = effect->controlHandle_l(); + if (handle != NULL && !handle->destroyed_l()) { + effect->setEnabled_l(handle->enabled()); + } + effect->unlock(); + } + desc->mEffect.clear(); + } + mSuspendedEffects.removeItemsAt(index); + } + } +} + +// must be called with ThreadBase::mLock held +void AudioFlinger::EffectChain::setEffectSuspendedAll_l(bool suspend) +{ + sp desc; + + ssize_t index = mSuspendedEffects.indexOfKey((int)kKeyForSuspendAll); + if (suspend) { + if (index >= 0) { + desc = mSuspendedEffects.valueAt(index); + } else { + desc = new SuspendedEffectDesc(); + mSuspendedEffects.add((int)kKeyForSuspendAll, desc); + ALOGV("setEffectSuspendedAll_l() add entry for 0"); + } + if (desc->mRefCount++ == 0) { + Vector< sp > effects; + getSuspendEligibleEffects(effects); + for (size_t i = 0; i < effects.size(); i++) { + setEffectSuspended_l(&effects[i]->desc().type, true); + } + } + } else { + if (index < 0) { + return; + } + desc = mSuspendedEffects.valueAt(index); + if (desc->mRefCount <= 0) { + ALOGW("setEffectSuspendedAll_l() restore refcount should not be 0 %d", desc->mRefCount); + desc->mRefCount = 1; + } + if (--desc->mRefCount == 0) { + Vector types; + for (size_t i = 0; i < mSuspendedEffects.size(); i++) { + if (mSuspendedEffects.keyAt(i) == (int)kKeyForSuspendAll) { + continue; + } + types.add(&mSuspendedEffects.valueAt(i)->mType); + } + for (size_t i = 0; i < types.size(); i++) { + setEffectSuspended_l(types[i], false); + } + ALOGV("setEffectSuspendedAll_l() remove entry for %08x", + mSuspendedEffects.keyAt(index)); + mSuspendedEffects.removeItem((int)kKeyForSuspendAll); + } + } +} + + +// The volume effect is used for automated tests only +#ifndef OPENSL_ES_H_ +static const effect_uuid_t SL_IID_VOLUME_ = { 0x09e8ede0, 0xddde, 0x11db, 0xb4f6, + { 0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b } }; +const effect_uuid_t * const SL_IID_VOLUME = &SL_IID_VOLUME_; +#endif //OPENSL_ES_H_ + +bool AudioFlinger::EffectChain::isEffectEligibleForSuspend(const effect_descriptor_t& desc) +{ + // auxiliary effects and visualizer are never suspended on output mix + if ((mSessionId == AUDIO_SESSION_OUTPUT_MIX) && + (((desc.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_AUXILIARY) || + (memcmp(&desc.type, SL_IID_VISUALIZATION, sizeof(effect_uuid_t)) == 0) || + (memcmp(&desc.type, SL_IID_VOLUME, sizeof(effect_uuid_t)) == 0))) { + return false; + } + return true; +} + +void AudioFlinger::EffectChain::getSuspendEligibleEffects( + Vector< sp > &effects) +{ + effects.clear(); + for (size_t i = 0; i < mEffects.size(); i++) { + if (isEffectEligibleForSuspend(mEffects[i]->desc())) { + effects.add(mEffects[i]); + } + } +} + +sp AudioFlinger::EffectChain::getEffectIfEnabled( + const effect_uuid_t *type) +{ + sp effect = getEffectFromType_l(type); + return effect != 0 && effect->isEnabled() ? effect : 0; +} + +void AudioFlinger::EffectChain::checkSuspendOnEffectEnabled(const sp& effect, + bool enabled) +{ + ssize_t index = mSuspendedEffects.indexOfKey(effect->desc().type.timeLow); + if (enabled) { + if (index < 0) { + // if the effect is not suspend check if all effects are suspended + index = mSuspendedEffects.indexOfKey((int)kKeyForSuspendAll); + if (index < 0) { + return; + } + if (!isEffectEligibleForSuspend(effect->desc())) { + return; + } + setEffectSuspended_l(&effect->desc().type, enabled); + index = mSuspendedEffects.indexOfKey(effect->desc().type.timeLow); + if (index < 0) { + ALOGW("checkSuspendOnEffectEnabled() Fx should be suspended here!"); + return; + } + } + ALOGV("checkSuspendOnEffectEnabled() enable suspending fx %08x", + effect->desc().type.timeLow); + sp desc = mSuspendedEffects.valueAt(index); + // if effect is requested to suspended but was not yet enabled, supend it now. + if (desc->mEffect == 0) { + desc->mEffect = effect; + effect->setEnabled(false); + effect->setSuspended(true); + } + } else { + if (index < 0) { + return; + } + ALOGV("checkSuspendOnEffectEnabled() disable restoring fx %08x", + effect->desc().type.timeLow); + sp desc = mSuspendedEffects.valueAt(index); + desc->mEffect.clear(); + effect->setSuspended(false); + } +} + +}; // namespace android diff --git a/services/audioflinger/Effects.h b/services/audioflinger/Effects.h new file mode 100644 index 0000000..91303ee --- /dev/null +++ b/services/audioflinger/Effects.h @@ -0,0 +1,359 @@ +/* +** +** Copyright 2012, The Android Open Source Project +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef INCLUDING_FROM_AUDIOFLINGER_H + #error This header file should only be included from AudioFlinger.h +#endif + +//--- Audio Effect Management + +// EffectModule and EffectChain classes both have their own mutex to protect +// state changes or resource modifications. Always respect the following order +// if multiple mutexes must be acquired to avoid cross deadlock: +// AudioFlinger -> ThreadBase -> EffectChain -> EffectModule + +// The EffectModule class is a wrapper object controlling the effect engine implementation +// in the effect library. It prevents concurrent calls to process() and command() functions +// from different client threads. It keeps a list of EffectHandle objects corresponding +// to all client applications using this effect and notifies applications of effect state, +// control or parameter changes. It manages the activation state machine to send appropriate +// reset, enable, disable commands to effect engine and provide volume +// ramping when effects are activated/deactivated. +// When controlling an auxiliary effect, the EffectModule also provides an input buffer used by +// the attached track(s) to accumulate their auxiliary channel. +class EffectModule : public RefBase { +public: + EffectModule(ThreadBase *thread, + const wp& chain, + effect_descriptor_t *desc, + int id, + int sessionId); + virtual ~EffectModule(); + + enum effect_state { + IDLE, + RESTART, + STARTING, + ACTIVE, + STOPPING, + STOPPED, + DESTROYED + }; + + int id() const { return mId; } + void process(); + void updateState(); + status_t command(uint32_t cmdCode, + uint32_t cmdSize, + void *pCmdData, + uint32_t *replySize, + void *pReplyData); + + void reset_l(); + status_t configure(); + status_t init(); + effect_state state() const { + return mState; + } + uint32_t status() { + return mStatus; + } + int sessionId() const { + return mSessionId; + } + status_t setEnabled(bool enabled); + status_t setEnabled_l(bool enabled); + bool isEnabled() const; + bool isProcessEnabled() const; + + void setInBuffer(int16_t *buffer) { mConfig.inputCfg.buffer.s16 = buffer; } + int16_t *inBuffer() { return mConfig.inputCfg.buffer.s16; } + void setOutBuffer(int16_t *buffer) { mConfig.outputCfg.buffer.s16 = buffer; } + int16_t *outBuffer() { return mConfig.outputCfg.buffer.s16; } + void setChain(const wp& chain) { mChain = chain; } + void setThread(const wp& thread) { mThread = thread; } + const wp& thread() { return mThread; } + + status_t addHandle(EffectHandle *handle); + size_t disconnect(EffectHandle *handle, bool unpinIfLast); + size_t removeHandle(EffectHandle *handle); + + const effect_descriptor_t& desc() const { return mDescriptor; } + wp& chain() { return mChain; } + + status_t setDevice(audio_devices_t device); + status_t setVolume(uint32_t *left, uint32_t *right, bool controller); + status_t setMode(audio_mode_t mode); + status_t setAudioSource(audio_source_t source); + status_t start(); + status_t stop(); + void setSuspended(bool suspended); + bool suspended() const; + + EffectHandle* controlHandle_l(); + + bool isPinned() const { return mPinned; } + void unPin() { mPinned = false; } + bool purgeHandles(); + void lock() { mLock.lock(); } + void unlock() { mLock.unlock(); } + + void dump(int fd, const Vector& args); + +protected: + friend class AudioFlinger; // for mHandles + bool mPinned; + + // Maximum time allocated to effect engines to complete the turn off sequence + static const uint32_t MAX_DISABLE_TIME_MS = 10000; + + EffectModule(const EffectModule&); + EffectModule& operator = (const EffectModule&); + + status_t start_l(); + status_t stop_l(); + +mutable Mutex mLock; // mutex for process, commands and handles list protection + wp mThread; // parent thread + wp mChain; // parent effect chain + const int mId; // this instance unique ID + const int mSessionId; // audio session ID + const effect_descriptor_t mDescriptor;// effect descriptor received from effect engine + effect_config_t mConfig; // input and output audio configuration + effect_handle_t mEffectInterface; // Effect module C API + status_t mStatus; // initialization status + effect_state mState; // current activation state + Vector mHandles; // list of client handles + // First handle in mHandles has highest priority and controls the effect module + uint32_t mMaxDisableWaitCnt; // maximum grace period before forcing an effect off after + // sending disable command. + uint32_t mDisableWaitCnt; // current process() calls count during disable period. + bool mSuspended; // effect is suspended: temporarily disabled by framework +}; + +// The EffectHandle class implements the IEffect interface. It provides resources +// to receive parameter updates, keeps track of effect control +// ownership and state and has a pointer to the EffectModule object it is controlling. +// There is one EffectHandle object for each application controlling (or using) +// an effect module. +// The EffectHandle is obtained by calling AudioFlinger::createEffect(). +class EffectHandle: public android::BnEffect { +public: + + EffectHandle(const sp& effect, + const sp& client, + const sp& effectClient, + int32_t priority); + virtual ~EffectHandle(); + + // IEffect + virtual status_t enable(); + virtual status_t disable(); + virtual status_t command(uint32_t cmdCode, + uint32_t cmdSize, + void *pCmdData, + uint32_t *replySize, + void *pReplyData); + virtual void disconnect(); +private: + void disconnect(bool unpinIfLast); +public: + virtual sp getCblk() const { return mCblkMemory; } + virtual status_t onTransact(uint32_t code, const Parcel& data, + Parcel* reply, uint32_t flags); + + + // Give or take control of effect module + // - hasControl: true if control is given, false if removed + // - signal: true client app should be signaled of change, false otherwise + // - enabled: state of the effect when control is passed + void setControl(bool hasControl, bool signal, bool enabled); + void commandExecuted(uint32_t cmdCode, + uint32_t cmdSize, + void *pCmdData, + uint32_t replySize, + void *pReplyData); + void setEnabled(bool enabled); + bool enabled() const { return mEnabled; } + + // Getters + int id() const { return mEffect->id(); } + int priority() const { return mPriority; } + bool hasControl() const { return mHasControl; } + sp effect() const { return mEffect; } + // destroyed_l() must be called with the associated EffectModule mLock held + bool destroyed_l() const { return mDestroyed; } + + void dump(char* buffer, size_t size); + +protected: + friend class AudioFlinger; // for mEffect, mHasControl, mEnabled + EffectHandle(const EffectHandle&); + EffectHandle& operator =(const EffectHandle&); + + sp mEffect; // pointer to controlled EffectModule + sp mEffectClient; // callback interface for client notifications + /*const*/ sp mClient; // client for shared memory allocation, see disconnect() + sp mCblkMemory; // shared memory for control block + effect_param_cblk_t* mCblk; // control block for deferred parameter setting via + // shared memory + uint8_t* mBuffer; // pointer to parameter area in shared memory + int mPriority; // client application priority to control the effect + bool mHasControl; // true if this handle is controlling the effect + bool mEnabled; // cached enable state: needed when the effect is + // restored after being suspended + bool mDestroyed; // Set to true by destructor. Access with EffectModule + // mLock held +}; + +// the EffectChain class represents a group of effects associated to one audio session. +// There can be any number of EffectChain objects per output mixer thread (PlaybackThread). +// The EffecChain with session ID 0 contains global effects applied to the output mix. +// Effects in this chain can be insert or auxiliary. Effects in other chains (attached to +// tracks) are insert only. The EffectChain maintains an ordered list of effect module, the +// order corresponding in the effect process order. When attached to a track (session ID != 0), +// it also provide it's own input buffer used by the track as accumulation buffer. +class EffectChain : public RefBase { +public: + EffectChain(const wp& wThread, int sessionId); + EffectChain(ThreadBase *thread, int sessionId); + virtual ~EffectChain(); + + // special key used for an entry in mSuspendedEffects keyed vector + // corresponding to a suspend all request. + static const int kKeyForSuspendAll = 0; + + // minimum duration during which we force calling effect process when last track on + // a session is stopped or removed to allow effect tail to be rendered + static const int kProcessTailDurationMs = 1000; + + void process_l(); + + void lock() { + mLock.lock(); + } + void unlock() { + mLock.unlock(); + } + + status_t addEffect_l(const sp& handle); + size_t removeEffect_l(const sp& handle); + + int sessionId() const { return mSessionId; } + void setSessionId(int sessionId) { mSessionId = sessionId; } + + sp getEffectFromDesc_l(effect_descriptor_t *descriptor); + sp getEffectFromId_l(int id); + sp getEffectFromType_l(const effect_uuid_t *type); + bool setVolume_l(uint32_t *left, uint32_t *right); + void setDevice_l(audio_devices_t device); + void setMode_l(audio_mode_t mode); + void setAudioSource_l(audio_source_t source); + + void setInBuffer(int16_t *buffer, bool ownsBuffer = false) { + mInBuffer = buffer; + mOwnInBuffer = ownsBuffer; + } + int16_t *inBuffer() const { + return mInBuffer; + } + void setOutBuffer(int16_t *buffer) { + mOutBuffer = buffer; + } + int16_t *outBuffer() const { + return mOutBuffer; + } + + void incTrackCnt() { android_atomic_inc(&mTrackCnt); } + void decTrackCnt() { android_atomic_dec(&mTrackCnt); } + int32_t trackCnt() const { return android_atomic_acquire_load(&mTrackCnt); } + + void incActiveTrackCnt() { android_atomic_inc(&mActiveTrackCnt); + mTailBufferCount = mMaxTailBuffers; } + void decActiveTrackCnt() { android_atomic_dec(&mActiveTrackCnt); } + int32_t activeTrackCnt() const { return android_atomic_acquire_load(&mActiveTrackCnt); } + + uint32_t strategy() const { return mStrategy; } + void setStrategy(uint32_t strategy) + { mStrategy = strategy; } + + // suspend effect of the given type + void setEffectSuspended_l(const effect_uuid_t *type, + bool suspend); + // suspend all eligible effects + void setEffectSuspendedAll_l(bool suspend); + // check if effects should be suspend or restored when a given effect is enable or disabled + void checkSuspendOnEffectEnabled(const sp& effect, + bool enabled); + + void clearInputBuffer(); + + void dump(int fd, const Vector& args); + +protected: + friend class AudioFlinger; // for mThread, mEffects + EffectChain(const EffectChain&); + EffectChain& operator =(const EffectChain&); + + class SuspendedEffectDesc : public RefBase { + public: + SuspendedEffectDesc() : mRefCount(0) {} + + int mRefCount; + effect_uuid_t mType; + wp mEffect; + }; + + // get a list of effect modules to suspend when an effect of the type + // passed is enabled. + void getSuspendEligibleEffects(Vector< sp > &effects); + + // get an effect module if it is currently enable + sp getEffectIfEnabled(const effect_uuid_t *type); + // true if the effect whose descriptor is passed can be suspended + // OEMs can modify the rules implemented in this method to exclude specific effect + // types or implementations from the suspend/restore mechanism. + bool isEffectEligibleForSuspend(const effect_descriptor_t& desc); + + void clearInputBuffer_l(sp thread); + + wp mThread; // parent mixer thread + Mutex mLock; // mutex protecting effect list + Vector< sp > mEffects; // list of effect modules + int mSessionId; // audio session ID + int16_t *mInBuffer; // chain input buffer + int16_t *mOutBuffer; // chain output buffer + + // 'volatile' here means these are accessed with atomic operations instead of mutex + volatile int32_t mActiveTrackCnt; // number of active tracks connected + volatile int32_t mTrackCnt; // number of tracks connected + + int32_t mTailBufferCount; // current effect tail buffer count + int32_t mMaxTailBuffers; // maximum effect tail buffers + bool mOwnInBuffer; // true if the chain owns its input buffer + int mVolumeCtrlIdx; // index of insert effect having control over volume + uint32_t mLeftVolume; // previous volume on left channel + uint32_t mRightVolume; // previous volume on right channel + uint32_t mNewLeftVolume; // new volume on left channel + uint32_t mNewRightVolume; // new volume on right channel + uint32_t mStrategy; // strategy for this effect chain + // mSuspendedEffects lists all effects currently suspended in the chain. + // Use effect type UUID timelow field as key. There is no real risk of identical + // timeLow fields among effect type UUIDs. + // Updated by updateSuspendedSessions_l() only. + KeyedVector< int, sp > mSuspendedEffects; +}; diff --git a/services/audioflinger/PlaybackTracks.h b/services/audioflinger/PlaybackTracks.h new file mode 100644 index 0000000..b898924 --- /dev/null +++ b/services/audioflinger/PlaybackTracks.h @@ -0,0 +1,285 @@ +/* +** +** Copyright 2012, The Android Open Source Project +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef INCLUDING_FROM_AUDIOFLINGER_H + #error This header file should only be included from AudioFlinger.h +#endif + +// playback track +class Track : public TrackBase, public VolumeProvider { +public: + Track( PlaybackThread *thread, + const sp& client, + audio_stream_type_t streamType, + uint32_t sampleRate, + audio_format_t format, + audio_channel_mask_t channelMask, + size_t frameCount, + const sp& sharedBuffer, + int sessionId, + IAudioFlinger::track_flags_t flags); + virtual ~Track(); + + static void appendDumpHeader(String8& result); + void dump(char* buffer, size_t size); + virtual status_t start(AudioSystem::sync_event_t event = + AudioSystem::SYNC_EVENT_NONE, + int triggerSession = 0); + virtual void stop(); + void pause(); + + void flush(); + void destroy(); + void mute(bool); + int name() const { return mName; } + + audio_stream_type_t streamType() const { + return mStreamType; + } + status_t attachAuxEffect(int EffectId); + void setAuxBuffer(int EffectId, int32_t *buffer); + int32_t *auxBuffer() const { return mAuxBuffer; } + void setMainBuffer(int16_t *buffer) { mMainBuffer = buffer; } + int16_t *mainBuffer() const { return mMainBuffer; } + int auxEffectId() const { return mAuxEffectId; } + +// implement FastMixerState::VolumeProvider interface + virtual uint32_t getVolumeLR(); + + virtual status_t setSyncEvent(const sp& event); + +protected: + // for numerous + friend class PlaybackThread; + friend class MixerThread; + friend class DirectOutputThread; + + Track(const Track&); + Track& operator = (const Track&); + + // AudioBufferProvider interface + virtual status_t getNextBuffer(AudioBufferProvider::Buffer* buffer, + int64_t pts = kInvalidPTS); + // releaseBuffer() not overridden + + virtual size_t framesReady() const; + + bool isMuted() const { return mMute; } + bool isPausing() const { + return mState == PAUSING; + } + bool isPaused() const { + return mState == PAUSED; + } + bool isResuming() const { + return mState == RESUMING; + } + bool isReady() const; + void setPaused() { mState = PAUSED; } + void reset(); + + bool isOutputTrack() const { + return (mStreamType == AUDIO_STREAM_CNT); + } + + sp sharedBuffer() const { return mSharedBuffer; } + + // framesWritten is cumulative, never reset, and is shared all tracks + // audioHalFrames is derived from output latency + // FIXME parameters not needed, could get them from the thread + bool presentationComplete(size_t framesWritten, size_t audioHalFrames); + +public: + void triggerEvents(AudioSystem::sync_event_t type); + virtual bool isTimedTrack() const { return false; } + bool isFastTrack() const { return (mFlags & IAudioFlinger::TRACK_FAST) != 0; } + virtual bool isOut() const; + +protected: + + // written by Track::mute() called by binder thread(s), without a mutex or barrier. + // read by Track::isMuted() called by playback thread, also without a mutex or barrier. + // The lack of mutex or barrier is safe because the mute status is only used by itself. + bool mMute; + + // FILLED state is used for suppressing volume ramp at begin of playing + enum {FS_INVALID, FS_FILLING, FS_FILLED, FS_ACTIVE}; + mutable uint8_t mFillingUpStatus; + int8_t mRetryCount; + const sp mSharedBuffer; + bool mResetDone; + const audio_stream_type_t mStreamType; + int mName; // track name on the normal mixer, + // allocated statically at track creation time, + // and is even allocated (though unused) for fast tracks + // FIXME don't allocate track name for fast tracks + int16_t *mMainBuffer; + int32_t *mAuxBuffer; + int mAuxEffectId; + bool mHasVolumeController; + size_t mPresentationCompleteFrames; // number of frames written to the + // audio HAL when this track will be fully rendered + // zero means not monitoring +private: + IAudioFlinger::track_flags_t mFlags; + + // The following fields are only for fast tracks, and should be in a subclass + int mFastIndex; // index within FastMixerState::mFastTracks[]; + // either mFastIndex == -1 if not isFastTrack() + // or 0 < mFastIndex < FastMixerState::kMaxFast because + // index 0 is reserved for normal mixer's submix; + // index is allocated statically at track creation time + // but the slot is only used if track is active + FastTrackUnderruns mObservedUnderruns; // Most recently observed value of + // mFastMixerDumpState.mTracks[mFastIndex].mUnderruns + uint32_t mUnderrunCount; // Counter of total number of underruns, never reset + volatile float mCachedVolume; // combined master volume and stream type volume; + // 'volatile' means accessed without lock or + // barrier, but is read/written atomically +}; // end of Track + +class TimedTrack : public Track { + public: + static sp create(PlaybackThread *thread, + const sp& client, + audio_stream_type_t streamType, + uint32_t sampleRate, + audio_format_t format, + audio_channel_mask_t channelMask, + size_t frameCount, + const sp& sharedBuffer, + int sessionId); + virtual ~TimedTrack(); + + class TimedBuffer { + public: + TimedBuffer(); + TimedBuffer(const sp& buffer, int64_t pts); + const sp& buffer() const { return mBuffer; } + int64_t pts() const { return mPTS; } + uint32_t position() const { return mPosition; } + void setPosition(uint32_t pos) { mPosition = pos; } + private: + sp mBuffer; + int64_t mPTS; + uint32_t mPosition; + }; + + // Mixer facing methods. + virtual bool isTimedTrack() const { return true; } + virtual size_t framesReady() const; + + // AudioBufferProvider interface + virtual status_t getNextBuffer(AudioBufferProvider::Buffer* buffer, + int64_t pts); + virtual void releaseBuffer(AudioBufferProvider::Buffer* buffer); + + // Client/App facing methods. + status_t allocateTimedBuffer(size_t size, + sp* buffer); + status_t queueTimedBuffer(const sp& buffer, + int64_t pts); + status_t setMediaTimeTransform(const LinearTransform& xform, + TimedAudioTrack::TargetTimeline target); + + private: + TimedTrack(PlaybackThread *thread, + const sp& client, + audio_stream_type_t streamType, + uint32_t sampleRate, + audio_format_t format, + audio_channel_mask_t channelMask, + size_t frameCount, + const sp& sharedBuffer, + int sessionId); + + void timedYieldSamples_l(AudioBufferProvider::Buffer* buffer); + void timedYieldSilence_l(uint32_t numFrames, + AudioBufferProvider::Buffer* buffer); + void trimTimedBufferQueue_l(); + void trimTimedBufferQueueHead_l(const char* logTag); + void updateFramesPendingAfterTrim_l(const TimedBuffer& buf, + const char* logTag); + + uint64_t mLocalTimeFreq; + LinearTransform mLocalTimeToSampleTransform; + LinearTransform mMediaTimeToSampleTransform; + sp mTimedMemoryDealer; + + Vector mTimedBufferQueue; + bool mQueueHeadInFlight; + bool mTrimQueueHeadOnRelease; + uint32_t mFramesPendingInQueue; + + uint8_t* mTimedSilenceBuffer; + uint32_t mTimedSilenceBufferSize; + mutable Mutex mTimedBufferQueueLock; + bool mTimedAudioOutputOnTime; + CCHelper mCCHelper; + + Mutex mMediaTimeTransformLock; + LinearTransform mMediaTimeTransform; + bool mMediaTimeTransformValid; + TimedAudioTrack::TargetTimeline mMediaTimeTransformTarget; +}; + + +// playback track, used by DuplicatingThread +class OutputTrack : public Track { +public: + + class Buffer : public AudioBufferProvider::Buffer { + public: + int16_t *mBuffer; + }; + + OutputTrack(PlaybackThread *thread, + DuplicatingThread *sourceThread, + uint32_t sampleRate, + audio_format_t format, + audio_channel_mask_t channelMask, + size_t frameCount); + virtual ~OutputTrack(); + + virtual status_t start(AudioSystem::sync_event_t event = + AudioSystem::SYNC_EVENT_NONE, + int triggerSession = 0); + virtual void stop(); + bool write(int16_t* data, uint32_t frames); + bool bufferQueueEmpty() const { return mBufferQueue.size() == 0; } + bool isActive() const { return mActive; } + const wp& thread() const { return mThread; } + +private: + + enum { + NO_MORE_BUFFERS = 0x80000001, // same in AudioTrack.h, ok to be different value + }; + + status_t obtainBuffer(AudioBufferProvider::Buffer* buffer, + uint32_t waitTimeMs); + void clearBufferQueue(); + + // Maximum number of pending buffers allocated by OutputTrack::write() + static const uint8_t kMaxOverFlowBuffers = 10; + + Vector < Buffer* > mBufferQueue; + AudioBufferProvider::Buffer mOutBuffer; + bool mActive; + DuplicatingThread* const mSourceThread; // for waitTimeMs() in write() + void* mBuffers; // starting address of buffers in plain memory +}; // end of OutputTrack diff --git a/services/audioflinger/RecordTracks.h b/services/audioflinger/RecordTracks.h new file mode 100644 index 0000000..fe681d7 --- /dev/null +++ b/services/audioflinger/RecordTracks.h @@ -0,0 +1,62 @@ +/* +** +** Copyright 2012, The Android Open Source Project +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef INCLUDING_FROM_AUDIOFLINGER_H + #error This header file should only be included from AudioFlinger.h +#endif + +// record track +class RecordTrack : public TrackBase { +public: + RecordTrack(RecordThread *thread, + const sp& client, + uint32_t sampleRate, + audio_format_t format, + audio_channel_mask_t channelMask, + size_t frameCount, + int sessionId); + virtual ~RecordTrack(); + + virtual status_t start(AudioSystem::sync_event_t event, int triggerSession); + virtual void stop(); + + void destroy(); + + // clear the buffer overflow flag + void clearOverflow() { mOverflow = false; } + // set the buffer overflow flag and return previous value + bool setOverflow() { bool tmp = mOverflow; mOverflow = true; + return tmp; } + + static void appendDumpHeader(String8& result); + void dump(char* buffer, size_t size); + + virtual bool isOut() const; + +private: + friend class AudioFlinger; // for mState + + RecordTrack(const RecordTrack&); + RecordTrack& operator = (const RecordTrack&); + + // AudioBufferProvider interface + virtual status_t getNextBuffer(AudioBufferProvider::Buffer* buffer, + int64_t pts = kInvalidPTS); + // releaseBuffer() not overridden + + bool mOverflow; // overflow on most recent attempt to fill client buffer +}; diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp new file mode 100644 index 0000000..1ceb850 --- /dev/null +++ b/services/audioflinger/Threads.cpp @@ -0,0 +1,4426 @@ +/* +** +** Copyright 2012, The Android Open Source Project +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + + +#define LOG_TAG "AudioFlinger" +//#define LOG_NDEBUG 0 + +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include +#include + +// NBAIO implementations +#include +#include +#include +#include +#include +#include + +#include + +#include +#include + +#include "AudioFlinger.h" +#include "AudioMixer.h" +#include "FastMixer.h" +#include "ServiceUtilities.h" +#include "SchedulingPolicyService.h" + +#undef ADD_BATTERY_DATA + +#ifdef ADD_BATTERY_DATA +#include +#include +#endif + +// #define DEBUG_CPU_USAGE 10 // log statistics every n wall clock seconds +#ifdef DEBUG_CPU_USAGE +#include +#include +#endif + +// ---------------------------------------------------------------------------- + +// Note: the following macro is used for extremely verbose logging message. In +// order to run with ALOG_ASSERT turned on, we need to have LOG_NDEBUG set to +// 0; but one side effect of this is to turn all LOGV's as well. Some messages +// are so verbose that we want to suppress them even when we have ALOG_ASSERT +// turned on. Do not uncomment the #def below unless you really know what you +// are doing and want to see all of the extremely verbose messages. +//#define VERY_VERY_VERBOSE_LOGGING +#ifdef VERY_VERY_VERBOSE_LOGGING +#define ALOGVV ALOGV +#else +#define ALOGVV(a...) do { } while(0) +#endif + +namespace android { + +// retry counts for buffer fill timeout +// 50 * ~20msecs = 1 second +static const int8_t kMaxTrackRetries = 50; +static const int8_t kMaxTrackStartupRetries = 50; +// allow less retry attempts on direct output thread. +// direct outputs can be a scarce resource in audio hardware and should +// be released as quickly as possible. +static const int8_t kMaxTrackRetriesDirect = 2; + +// don't warn about blocked writes or record buffer overflows more often than this +static const nsecs_t kWarningThrottleNs = seconds(5); + +// RecordThread loop sleep time upon application overrun or audio HAL read error +static const int kRecordThreadSleepUs = 5000; + +// maximum time to wait for setParameters to complete +static const nsecs_t kSetParametersTimeoutNs = seconds(2); + +// minimum sleep time for the mixer thread loop when tracks are active but in underrun +static const uint32_t kMinThreadSleepTimeUs = 5000; +// maximum divider applied to the active sleep time in the mixer thread loop +static const uint32_t kMaxThreadSleepTimeShift = 2; + +// minimum normal mix buffer size, expressed in milliseconds rather than frames +static const uint32_t kMinNormalMixBufferSizeMs = 20; +// maximum normal mix buffer size +static const uint32_t kMaxNormalMixBufferSizeMs = 24; + +// Whether to use fast mixer +static const enum { + FastMixer_Never, // never initialize or use: for debugging only + FastMixer_Always, // always initialize and use, even if not needed: for debugging only + // normal mixer multiplier is 1 + FastMixer_Static, // initialize if needed, then use all the time if initialized, + // multiplier is calculated based on min & max normal mixer buffer size + FastMixer_Dynamic, // initialize if needed, then use dynamically depending on track load, + // multiplier is calculated based on min & max normal mixer buffer size + // FIXME for FastMixer_Dynamic: + // Supporting this option will require fixing HALs that can't handle large writes. + // For example, one HAL implementation returns an error from a large write, + // and another HAL implementation corrupts memory, possibly in the sample rate converter. + // We could either fix the HAL implementations, or provide a wrapper that breaks + // up large writes into smaller ones, and the wrapper would need to deal with scheduler. +} kUseFastMixer = FastMixer_Static; + +// Priorities for requestPriority +static const int kPriorityAudioApp = 2; +static const int kPriorityFastMixer = 3; + +// IAudioFlinger::createTrack() reports back to client the total size of shared memory area +// for the track. The client then sub-divides this into smaller buffers for its use. +// Currently the client uses double-buffering by default, but doesn't tell us about that. +// So for now we just assume that client is double-buffered. +// FIXME It would be better for client to tell AudioFlinger whether it wants double-buffering or +// N-buffering, so AudioFlinger could allocate the right amount of memory. +// See the client's minBufCount and mNotificationFramesAct calculations for details. +static const int kFastTrackMultiplier = 2; + +// ---------------------------------------------------------------------------- + +#ifdef ADD_BATTERY_DATA +// To collect the amplifier usage +static void addBatteryData(uint32_t params) { + sp service = IMediaDeathNotifier::getMediaPlayerService(); + if (service == NULL) { + // it already logged + return; + } + + service->addBatteryData(params); +} +#endif + + +// ---------------------------------------------------------------------------- +// CPU Stats +// ---------------------------------------------------------------------------- + +class CpuStats { +public: + CpuStats(); + void sample(const String8 &title); +#ifdef DEBUG_CPU_USAGE +private: + ThreadCpuUsage mCpuUsage; // instantaneous thread CPU usage in wall clock ns + CentralTendencyStatistics mWcStats; // statistics on thread CPU usage in wall clock ns + + CentralTendencyStatistics mHzStats; // statistics on thread CPU usage in cycles + + int mCpuNum; // thread's current CPU number + int mCpukHz; // frequency of thread's current CPU in kHz +#endif +}; + +CpuStats::CpuStats() +#ifdef DEBUG_CPU_USAGE + : mCpuNum(-1), mCpukHz(-1) +#endif +{ +} + +void CpuStats::sample(const String8 &title) { +#ifdef DEBUG_CPU_USAGE + // get current thread's delta CPU time in wall clock ns + double wcNs; + bool valid = mCpuUsage.sampleAndEnable(wcNs); + + // record sample for wall clock statistics + if (valid) { + mWcStats.sample(wcNs); + } + + // get the current CPU number + int cpuNum = sched_getcpu(); + + // get the current CPU frequency in kHz + int cpukHz = mCpuUsage.getCpukHz(cpuNum); + + // check if either CPU number or frequency changed + if (cpuNum != mCpuNum || cpukHz != mCpukHz) { + mCpuNum = cpuNum; + mCpukHz = cpukHz; + // ignore sample for purposes of cycles + valid = false; + } + + // if no change in CPU number or frequency, then record sample for cycle statistics + if (valid && mCpukHz > 0) { + double cycles = wcNs * cpukHz * 0.000001; + mHzStats.sample(cycles); + } + + unsigned n = mWcStats.n(); + // mCpuUsage.elapsed() is expensive, so don't call it every loop + if ((n & 127) == 1) { + long long elapsed = mCpuUsage.elapsed(); + if (elapsed >= DEBUG_CPU_USAGE * 1000000000LL) { + double perLoop = elapsed / (double) n; + double perLoop100 = perLoop * 0.01; + double perLoop1k = perLoop * 0.001; + double mean = mWcStats.mean(); + double stddev = mWcStats.stddev(); + double minimum = mWcStats.minimum(); + double maximum = mWcStats.maximum(); + double meanCycles = mHzStats.mean(); + double stddevCycles = mHzStats.stddev(); + double minCycles = mHzStats.minimum(); + double maxCycles = mHzStats.maximum(); + mCpuUsage.resetElapsed(); + mWcStats.reset(); + mHzStats.reset(); + ALOGD("CPU usage for %s over past %.1f secs\n" + " (%u mixer loops at %.1f mean ms per loop):\n" + " us per mix loop: mean=%.0f stddev=%.0f min=%.0f max=%.0f\n" + " %% of wall: mean=%.1f stddev=%.1f min=%.1f max=%.1f\n" + " MHz: mean=%.1f, stddev=%.1f, min=%.1f max=%.1f", + title.string(), + elapsed * .000000001, n, perLoop * .000001, + mean * .001, + stddev * .001, + minimum * .001, + maximum * .001, + mean / perLoop100, + stddev / perLoop100, + minimum / perLoop100, + maximum / perLoop100, + meanCycles / perLoop1k, + stddevCycles / perLoop1k, + minCycles / perLoop1k, + maxCycles / perLoop1k); + + } + } +#endif +}; + +// ---------------------------------------------------------------------------- +// ThreadBase +// ---------------------------------------------------------------------------- + +AudioFlinger::ThreadBase::ThreadBase(const sp& audioFlinger, audio_io_handle_t id, + audio_devices_t outDevice, audio_devices_t inDevice, type_t type) + : Thread(false /*canCallJava*/), + mType(type), + mAudioFlinger(audioFlinger), mSampleRate(0), mFrameCount(0), mNormalFrameCount(0), + // mChannelMask + mChannelCount(0), + mFrameSize(1), mFormat(AUDIO_FORMAT_INVALID), + mParamStatus(NO_ERROR), + mStandby(false), mOutDevice(outDevice), mInDevice(inDevice), + mAudioSource(AUDIO_SOURCE_DEFAULT), mId(id), + // mName will be set by concrete (non-virtual) subclass + mDeathRecipient(new PMDeathRecipient(this)) +{ +} + +AudioFlinger::ThreadBase::~ThreadBase() +{ + mParamCond.broadcast(); + // do not lock the mutex in destructor + releaseWakeLock_l(); + if (mPowerManager != 0) { + sp binder = mPowerManager->asBinder(); + binder->unlinkToDeath(mDeathRecipient); + } +} + +void AudioFlinger::ThreadBase::exit() +{ + ALOGV("ThreadBase::exit"); + // do any cleanup required for exit to succeed + preExit(); + { + // This lock prevents the following race in thread (uniprocessor for illustration): + // if (!exitPending()) { + // // context switch from here to exit() + // // exit() calls requestExit(), what exitPending() observes + // // exit() calls signal(), which is dropped since no waiters + // // context switch back from exit() to here + // mWaitWorkCV.wait(...); + // // now thread is hung + // } + AutoMutex lock(mLock); + requestExit(); + mWaitWorkCV.broadcast(); + } + // When Thread::requestExitAndWait is made virtual and this method is renamed to + // "virtual status_t requestExitAndWait()", replace by "return Thread::requestExitAndWait();" + requestExitAndWait(); +} + +status_t AudioFlinger::ThreadBase::setParameters(const String8& keyValuePairs) +{ + status_t status; + + ALOGV("ThreadBase::setParameters() %s", keyValuePairs.string()); + Mutex::Autolock _l(mLock); + + mNewParameters.add(keyValuePairs); + mWaitWorkCV.signal(); + // wait condition with timeout in case the thread loop has exited + // before the request could be processed + if (mParamCond.waitRelative(mLock, kSetParametersTimeoutNs) == NO_ERROR) { + status = mParamStatus; + mWaitWorkCV.signal(); + } else { + status = TIMED_OUT; + } + return status; +} + +void AudioFlinger::ThreadBase::sendIoConfigEvent(int event, int param) +{ + Mutex::Autolock _l(mLock); + sendIoConfigEvent_l(event, param); +} + +// sendIoConfigEvent_l() must be called with ThreadBase::mLock held +void AudioFlinger::ThreadBase::sendIoConfigEvent_l(int event, int param) +{ + IoConfigEvent *ioEvent = new IoConfigEvent(event, param); + mConfigEvents.add(static_cast(ioEvent)); + ALOGV("sendIoConfigEvent() num events %d event %d, param %d", mConfigEvents.size(), event, + param); + mWaitWorkCV.signal(); +} + +// sendPrioConfigEvent_l() must be called with ThreadBase::mLock held +void AudioFlinger::ThreadBase::sendPrioConfigEvent_l(pid_t pid, pid_t tid, int32_t prio) +{ + PrioConfigEvent *prioEvent = new PrioConfigEvent(pid, tid, prio); + mConfigEvents.add(static_cast(prioEvent)); + ALOGV("sendPrioConfigEvent_l() num events %d pid %d, tid %d prio %d", + mConfigEvents.size(), pid, tid, prio); + mWaitWorkCV.signal(); +} + +void AudioFlinger::ThreadBase::processConfigEvents() +{ + mLock.lock(); + while (!mConfigEvents.isEmpty()) { + ALOGV("processConfigEvents() remaining events %d", mConfigEvents.size()); + ConfigEvent *event = mConfigEvents[0]; + mConfigEvents.removeAt(0); + // release mLock before locking AudioFlinger mLock: lock order is always + // AudioFlinger then ThreadBase to avoid cross deadlock + mLock.unlock(); + switch(event->type()) { + case CFG_EVENT_PRIO: { + PrioConfigEvent *prioEvent = static_cast(event); + int err = requestPriority(prioEvent->pid(), prioEvent->tid(), prioEvent->prio()); + if (err != 0) { + ALOGW("Policy SCHED_FIFO priority %d is unavailable for pid %d tid %d; " + "error %d", + prioEvent->prio(), prioEvent->pid(), prioEvent->tid(), err); + } + } break; + case CFG_EVENT_IO: { + IoConfigEvent *ioEvent = static_cast(event); + mAudioFlinger->mLock.lock(); + audioConfigChanged_l(ioEvent->event(), ioEvent->param()); + mAudioFlinger->mLock.unlock(); + } break; + default: + ALOGE("processConfigEvents() unknown event type %d", event->type()); + break; + } + delete event; + mLock.lock(); + } + mLock.unlock(); +} + +void AudioFlinger::ThreadBase::dumpBase(int fd, const Vector& args) +{ + const size_t SIZE = 256; + char buffer[SIZE]; + String8 result; + + bool locked = AudioFlinger::dumpTryLock(mLock); + if (!locked) { + snprintf(buffer, SIZE, "thread %p maybe dead locked\n", this); + write(fd, buffer, strlen(buffer)); + } + + snprintf(buffer, SIZE, "io handle: %d\n", mId); + result.append(buffer); + snprintf(buffer, SIZE, "TID: %d\n", getTid()); + result.append(buffer); + snprintf(buffer, SIZE, "standby: %d\n", mStandby); + result.append(buffer); + snprintf(buffer, SIZE, "Sample rate: %u\n", mSampleRate); + result.append(buffer); + snprintf(buffer, SIZE, "HAL frame count: %d\n", mFrameCount); + result.append(buffer); + snprintf(buffer, SIZE, "Normal frame count: %d\n", mNormalFrameCount); + result.append(buffer); + snprintf(buffer, SIZE, "Channel Count: %d\n", mChannelCount); + result.append(buffer); + snprintf(buffer, SIZE, "Channel Mask: 0x%08x\n", mChannelMask); + result.append(buffer); + snprintf(buffer, SIZE, "Format: %d\n", mFormat); + result.append(buffer); + snprintf(buffer, SIZE, "Frame size: %u\n", mFrameSize); + result.append(buffer); + + snprintf(buffer, SIZE, "\nPending setParameters commands: \n"); + result.append(buffer); + result.append(" Index Command"); + for (size_t i = 0; i < mNewParameters.size(); ++i) { + snprintf(buffer, SIZE, "\n %02d ", i); + result.append(buffer); + result.append(mNewParameters[i]); + } + + snprintf(buffer, SIZE, "\n\nPending config events: \n"); + result.append(buffer); + for (size_t i = 0; i < mConfigEvents.size(); i++) { + mConfigEvents[i]->dump(buffer, SIZE); + result.append(buffer); + } + result.append("\n"); + + write(fd, result.string(), result.size()); + + if (locked) { + mLock.unlock(); + } +} + +void AudioFlinger::ThreadBase::dumpEffectChains(int fd, const Vector& args) +{ + const size_t SIZE = 256; + char buffer[SIZE]; + String8 result; + + snprintf(buffer, SIZE, "\n- %d Effect Chains:\n", mEffectChains.size()); + write(fd, buffer, strlen(buffer)); + + for (size_t i = 0; i < mEffectChains.size(); ++i) { + sp chain = mEffectChains[i]; + if (chain != 0) { + chain->dump(fd, args); + } + } +} + +void AudioFlinger::ThreadBase::acquireWakeLock() +{ + Mutex::Autolock _l(mLock); + acquireWakeLock_l(); +} + +void AudioFlinger::ThreadBase::acquireWakeLock_l() +{ + if (mPowerManager == 0) { + // use checkService() to avoid blocking if power service is not up yet + sp binder = + defaultServiceManager()->checkService(String16("power")); + if (binder == 0) { + ALOGW("Thread %s cannot connect to the power manager service", mName); + } else { + mPowerManager = interface_cast(binder); + binder->linkToDeath(mDeathRecipient); + } + } + if (mPowerManager != 0) { + sp binder = new BBinder(); + status_t status = mPowerManager->acquireWakeLock(POWERMANAGER_PARTIAL_WAKE_LOCK, + binder, + String16(mName)); + if (status == NO_ERROR) { + mWakeLockToken = binder; + } + ALOGV("acquireWakeLock_l() %s status %d", mName, status); + } +} + +void AudioFlinger::ThreadBase::releaseWakeLock() +{ + Mutex::Autolock _l(mLock); + releaseWakeLock_l(); +} + +void AudioFlinger::ThreadBase::releaseWakeLock_l() +{ + if (mWakeLockToken != 0) { + ALOGV("releaseWakeLock_l() %s", mName); + if (mPowerManager != 0) { + mPowerManager->releaseWakeLock(mWakeLockToken, 0); + } + mWakeLockToken.clear(); + } +} + +void AudioFlinger::ThreadBase::clearPowerManager() +{ + Mutex::Autolock _l(mLock); + releaseWakeLock_l(); + mPowerManager.clear(); +} + +void AudioFlinger::ThreadBase::PMDeathRecipient::binderDied(const wp& who) +{ + sp thread = mThread.promote(); + if (thread != 0) { + thread->clearPowerManager(); + } + ALOGW("power manager service died !!!"); +} + +void AudioFlinger::ThreadBase::setEffectSuspended( + const effect_uuid_t *type, bool suspend, int sessionId) +{ + Mutex::Autolock _l(mLock); + setEffectSuspended_l(type, suspend, sessionId); +} + +void AudioFlinger::ThreadBase::setEffectSuspended_l( + const effect_uuid_t *type, bool suspend, int sessionId) +{ + sp chain = getEffectChain_l(sessionId); + if (chain != 0) { + if (type != NULL) { + chain->setEffectSuspended_l(type, suspend); + } else { + chain->setEffectSuspendedAll_l(suspend); + } + } + + updateSuspendedSessions_l(type, suspend, sessionId); +} + +void AudioFlinger::ThreadBase::checkSuspendOnAddEffectChain_l(const sp& chain) +{ + ssize_t index = mSuspendedSessions.indexOfKey(chain->sessionId()); + if (index < 0) { + return; + } + + const KeyedVector >& sessionEffects = + mSuspendedSessions.valueAt(index); + + for (size_t i = 0; i < sessionEffects.size(); i++) { + sp desc = sessionEffects.valueAt(i); + for (int j = 0; j < desc->mRefCount; j++) { + if (sessionEffects.keyAt(i) == EffectChain::kKeyForSuspendAll) { + chain->setEffectSuspendedAll_l(true); + } else { + ALOGV("checkSuspendOnAddEffectChain_l() suspending effects %08x", + desc->mType.timeLow); + chain->setEffectSuspended_l(&desc->mType, true); + } + } + } +} + +void AudioFlinger::ThreadBase::updateSuspendedSessions_l(const effect_uuid_t *type, + bool suspend, + int sessionId) +{ + ssize_t index = mSuspendedSessions.indexOfKey(sessionId); + + KeyedVector > sessionEffects; + + if (suspend) { + if (index >= 0) { + sessionEffects = mSuspendedSessions.valueAt(index); + } else { + mSuspendedSessions.add(sessionId, sessionEffects); + } + } else { + if (index < 0) { + return; + } + sessionEffects = mSuspendedSessions.valueAt(index); + } + + + int key = EffectChain::kKeyForSuspendAll; + if (type != NULL) { + key = type->timeLow; + } + index = sessionEffects.indexOfKey(key); + + sp desc; + if (suspend) { + if (index >= 0) { + desc = sessionEffects.valueAt(index); + } else { + desc = new SuspendedSessionDesc(); + if (type != NULL) { + desc->mType = *type; + } + sessionEffects.add(key, desc); + ALOGV("updateSuspendedSessions_l() suspend adding effect %08x", key); + } + desc->mRefCount++; + } else { + if (index < 0) { + return; + } + desc = sessionEffects.valueAt(index); + if (--desc->mRefCount == 0) { + ALOGV("updateSuspendedSessions_l() restore removing effect %08x", key); + sessionEffects.removeItemsAt(index); + if (sessionEffects.isEmpty()) { + ALOGV("updateSuspendedSessions_l() restore removing session %d", + sessionId); + mSuspendedSessions.removeItem(sessionId); + } + } + } + if (!sessionEffects.isEmpty()) { + mSuspendedSessions.replaceValueFor(sessionId, sessionEffects); + } +} + +void AudioFlinger::ThreadBase::checkSuspendOnEffectEnabled(const sp& effect, + bool enabled, + int sessionId) +{ + Mutex::Autolock _l(mLock); + checkSuspendOnEffectEnabled_l(effect, enabled, sessionId); +} + +void AudioFlinger::ThreadBase::checkSuspendOnEffectEnabled_l(const sp& effect, + bool enabled, + int sessionId) +{ + if (mType != RECORD) { + // suspend all effects in AUDIO_SESSION_OUTPUT_MIX when enabling any effect on + // another session. This gives the priority to well behaved effect control panels + // and applications not using global effects. + // Enabling post processing in AUDIO_SESSION_OUTPUT_STAGE session does not affect + // global effects + if ((sessionId != AUDIO_SESSION_OUTPUT_MIX) && (sessionId != AUDIO_SESSION_OUTPUT_STAGE)) { + setEffectSuspended_l(NULL, enabled, AUDIO_SESSION_OUTPUT_MIX); + } + } + + sp chain = getEffectChain_l(sessionId); + if (chain != 0) { + chain->checkSuspendOnEffectEnabled(effect, enabled); + } +} + +// ThreadBase::createEffect_l() must be called with AudioFlinger::mLock held +sp AudioFlinger::ThreadBase::createEffect_l( + const sp& client, + const sp& effectClient, + int32_t priority, + int sessionId, + effect_descriptor_t *desc, + int *enabled, + status_t *status + ) +{ + sp effect; + sp handle; + status_t lStatus; + sp chain; + bool chainCreated = false; + bool effectCreated = false; + bool effectRegistered = false; + + lStatus = initCheck(); + if (lStatus != NO_ERROR) { + ALOGW("createEffect_l() Audio driver not initialized."); + goto Exit; + } + + // Do not allow effects with session ID 0 on direct output or duplicating threads + // TODO: add rule for hw accelerated effects on direct outputs with non PCM format + if (sessionId == AUDIO_SESSION_OUTPUT_MIX && mType != MIXER) { + ALOGW("createEffect_l() Cannot add auxiliary effect %s to session %d", + desc->name, sessionId); + lStatus = BAD_VALUE; + goto Exit; + } + // Only Pre processor effects are allowed on input threads and only on input threads + if ((mType == RECORD) != ((desc->flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_PRE_PROC)) { + ALOGW("createEffect_l() effect %s (flags %08x) created on wrong thread type %d", + desc->name, desc->flags, mType); + lStatus = BAD_VALUE; + goto Exit; + } + + ALOGV("createEffect_l() thread %p effect %s on session %d", this, desc->name, sessionId); + + { // scope for mLock + Mutex::Autolock _l(mLock); + + // check for existing effect chain with the requested audio session + chain = getEffectChain_l(sessionId); + if (chain == 0) { + // create a new chain for this session + ALOGV("createEffect_l() new effect chain for session %d", sessionId); + chain = new EffectChain(this, sessionId); + addEffectChain_l(chain); + chain->setStrategy(getStrategyForSession_l(sessionId)); + chainCreated = true; + } else { + effect = chain->getEffectFromDesc_l(desc); + } + + ALOGV("createEffect_l() got effect %p on chain %p", effect.get(), chain.get()); + + if (effect == 0) { + int id = mAudioFlinger->nextUniqueId(); + // Check CPU and memory usage + lStatus = AudioSystem::registerEffect(desc, mId, chain->strategy(), sessionId, id); + if (lStatus != NO_ERROR) { + goto Exit; + } + effectRegistered = true; + // create a new effect module if none present in the chain + effect = new EffectModule(this, chain, desc, id, sessionId); + lStatus = effect->status(); + if (lStatus != NO_ERROR) { + goto Exit; + } + lStatus = chain->addEffect_l(effect); + if (lStatus != NO_ERROR) { + goto Exit; + } + effectCreated = true; + + effect->setDevice(mOutDevice); + effect->setDevice(mInDevice); + effect->setMode(mAudioFlinger->getMode()); + effect->setAudioSource(mAudioSource); + } + // create effect handle and connect it to effect module + handle = new EffectHandle(effect, client, effectClient, priority); + lStatus = effect->addHandle(handle.get()); + if (enabled != NULL) { + *enabled = (int)effect->isEnabled(); + } + } + +Exit: + if (lStatus != NO_ERROR && lStatus != ALREADY_EXISTS) { + Mutex::Autolock _l(mLock); + if (effectCreated) { + chain->removeEffect_l(effect); + } + if (effectRegistered) { + AudioSystem::unregisterEffect(effect->id()); + } + if (chainCreated) { + removeEffectChain_l(chain); + } + handle.clear(); + } + + if (status != NULL) { + *status = lStatus; + } + return handle; +} + +sp AudioFlinger::ThreadBase::getEffect(int sessionId, int effectId) +{ + Mutex::Autolock _l(mLock); + return getEffect_l(sessionId, effectId); +} + +sp AudioFlinger::ThreadBase::getEffect_l(int sessionId, int effectId) +{ + sp chain = getEffectChain_l(sessionId); + return chain != 0 ? chain->getEffectFromId_l(effectId) : 0; +} + +// PlaybackThread::addEffect_l() must be called with AudioFlinger::mLock and +// PlaybackThread::mLock held +status_t AudioFlinger::ThreadBase::addEffect_l(const sp& effect) +{ + // check for existing effect chain with the requested audio session + int sessionId = effect->sessionId(); + sp chain = getEffectChain_l(sessionId); + bool chainCreated = false; + + if (chain == 0) { + // create a new chain for this session + ALOGV("addEffect_l() new effect chain for session %d", sessionId); + chain = new EffectChain(this, sessionId); + addEffectChain_l(chain); + chain->setStrategy(getStrategyForSession_l(sessionId)); + chainCreated = true; + } + ALOGV("addEffect_l() %p chain %p effect %p", this, chain.get(), effect.get()); + + if (chain->getEffectFromId_l(effect->id()) != 0) { + ALOGW("addEffect_l() %p effect %s already present in chain %p", + this, effect->desc().name, chain.get()); + return BAD_VALUE; + } + + status_t status = chain->addEffect_l(effect); + if (status != NO_ERROR) { + if (chainCreated) { + removeEffectChain_l(chain); + } + return status; + } + + effect->setDevice(mOutDevice); + effect->setDevice(mInDevice); + effect->setMode(mAudioFlinger->getMode()); + effect->setAudioSource(mAudioSource); + return NO_ERROR; +} + +void AudioFlinger::ThreadBase::removeEffect_l(const sp& effect) { + + ALOGV("removeEffect_l() %p effect %p", this, effect.get()); + effect_descriptor_t desc = effect->desc(); + if ((desc.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_AUXILIARY) { + detachAuxEffect_l(effect->id()); + } + + sp chain = effect->chain().promote(); + if (chain != 0) { + // remove effect chain if removing last effect + if (chain->removeEffect_l(effect) == 0) { + removeEffectChain_l(chain); + } + } else { + ALOGW("removeEffect_l() %p cannot promote chain for effect %p", this, effect.get()); + } +} + +void AudioFlinger::ThreadBase::lockEffectChains_l( + Vector< sp >& effectChains) +{ + effectChains = mEffectChains; + for (size_t i = 0; i < mEffectChains.size(); i++) { + mEffectChains[i]->lock(); + } +} + +void AudioFlinger::ThreadBase::unlockEffectChains( + const Vector< sp >& effectChains) +{ + for (size_t i = 0; i < effectChains.size(); i++) { + effectChains[i]->unlock(); + } +} + +sp AudioFlinger::ThreadBase::getEffectChain(int sessionId) +{ + Mutex::Autolock _l(mLock); + return getEffectChain_l(sessionId); +} + +sp AudioFlinger::ThreadBase::getEffectChain_l(int sessionId) const +{ + size_t size = mEffectChains.size(); + for (size_t i = 0; i < size; i++) { + if (mEffectChains[i]->sessionId() == sessionId) { + return mEffectChains[i]; + } + } + return 0; +} + +void AudioFlinger::ThreadBase::setMode(audio_mode_t mode) +{ + Mutex::Autolock _l(mLock); + size_t size = mEffectChains.size(); + for (size_t i = 0; i < size; i++) { + mEffectChains[i]->setMode_l(mode); + } +} + +void AudioFlinger::ThreadBase::disconnectEffect(const sp& effect, + EffectHandle *handle, + bool unpinIfLast) { + + Mutex::Autolock _l(mLock); + ALOGV("disconnectEffect() %p effect %p", this, effect.get()); + // delete the effect module if removing last handle on it + if (effect->removeHandle(handle) == 0) { + if (!effect->isPinned() || unpinIfLast) { + removeEffect_l(effect); + AudioSystem::unregisterEffect(effect->id()); + } + } +} + +// ---------------------------------------------------------------------------- +// Playback +// ---------------------------------------------------------------------------- + +AudioFlinger::PlaybackThread::PlaybackThread(const sp& audioFlinger, + AudioStreamOut* output, + audio_io_handle_t id, + audio_devices_t device, + type_t type) + : ThreadBase(audioFlinger, id, device, AUDIO_DEVICE_NONE, type), + mMixBuffer(NULL), mSuspended(0), mBytesWritten(0), + // mStreamTypes[] initialized in constructor body + mOutput(output), + mLastWriteTime(0), mNumWrites(0), mNumDelayedWrites(0), mInWrite(false), + mMixerStatus(MIXER_IDLE), + mMixerStatusIgnoringFastTracks(MIXER_IDLE), + standbyDelay(AudioFlinger::mStandbyTimeInNsecs), + mScreenState(AudioFlinger::mScreenState), + // index 0 is reserved for normal mixer's submix + mFastTrackAvailMask(((1 << FastMixerState::kMaxFastTracks) - 1) & ~1) +{ + snprintf(mName, kNameLength, "AudioOut_%X", id); + + // Assumes constructor is called by AudioFlinger with it's mLock held, but + // it would be safer to explicitly pass initial masterVolume/masterMute as + // parameter. + // + // If the HAL we are using has support for master volume or master mute, + // then do not attenuate or mute during mixing (just leave the volume at 1.0 + // and the mute set to false). + mMasterVolume = audioFlinger->masterVolume_l(); + mMasterMute = audioFlinger->masterMute_l(); + if (mOutput && mOutput->audioHwDev) { + if (mOutput->audioHwDev->canSetMasterVolume()) { + mMasterVolume = 1.0; + } + + if (mOutput->audioHwDev->canSetMasterMute()) { + mMasterMute = false; + } + } + + readOutputParameters(); + + // mStreamTypes[AUDIO_STREAM_CNT] is initialized by stream_type_t default constructor + // There is no AUDIO_STREAM_MIN, and ++ operator does not compile + for (audio_stream_type_t stream = (audio_stream_type_t) 0; stream < AUDIO_STREAM_CNT; + stream = (audio_stream_type_t) (stream + 1)) { + mStreamTypes[stream].volume = mAudioFlinger->streamVolume_l(stream); + mStreamTypes[stream].mute = mAudioFlinger->streamMute_l(stream); + } + // mStreamTypes[AUDIO_STREAM_CNT] exists but isn't explicitly initialized here, + // because mAudioFlinger doesn't have one to copy from +} + +AudioFlinger::PlaybackThread::~PlaybackThread() +{ + delete [] mMixBuffer; +} + +void AudioFlinger::PlaybackThread::dump(int fd, const Vector& args) +{ + dumpInternals(fd, args); + dumpTracks(fd, args); + dumpEffectChains(fd, args); +} + +void AudioFlinger::PlaybackThread::dumpTracks(int fd, const Vector& args) +{ + const size_t SIZE = 256; + char buffer[SIZE]; + String8 result; + + result.appendFormat("Output thread %p stream volumes in dB:\n ", this); + for (int i = 0; i < AUDIO_STREAM_CNT; ++i) { + const stream_type_t *st = &mStreamTypes[i]; + if (i > 0) { + result.appendFormat(", "); + } + result.appendFormat("%d:%.2g", i, 20.0 * log10(st->volume)); + if (st->mute) { + result.append("M"); + } + } + result.append("\n"); + write(fd, result.string(), result.length()); + result.clear(); + + snprintf(buffer, SIZE, "Output thread %p tracks\n", this); + result.append(buffer); + Track::appendDumpHeader(result); + for (size_t i = 0; i < mTracks.size(); ++i) { + sp track = mTracks[i]; + if (track != 0) { + track->dump(buffer, SIZE); + result.append(buffer); + } + } + + snprintf(buffer, SIZE, "Output thread %p active tracks\n", this); + result.append(buffer); + Track::appendDumpHeader(result); + for (size_t i = 0; i < mActiveTracks.size(); ++i) { + sp track = mActiveTracks[i].promote(); + if (track != 0) { + track->dump(buffer, SIZE); + result.append(buffer); + } + } + write(fd, result.string(), result.size()); + + // These values are "raw"; they will wrap around. See prepareTracks_l() for a better way. + FastTrackUnderruns underruns = getFastTrackUnderruns(0); + fdprintf(fd, "Normal mixer raw underrun counters: partial=%u empty=%u\n", + underruns.mBitFields.mPartial, underruns.mBitFields.mEmpty); +} + +void AudioFlinger::PlaybackThread::dumpInternals(int fd, const Vector& args) +{ + const size_t SIZE = 256; + char buffer[SIZE]; + String8 result; + + snprintf(buffer, SIZE, "\nOutput thread %p internals\n", this); + result.append(buffer); + snprintf(buffer, SIZE, "last write occurred (msecs): %llu\n", + ns2ms(systemTime() - mLastWriteTime)); + result.append(buffer); + snprintf(buffer, SIZE, "total writes: %d\n", mNumWrites); + result.append(buffer); + snprintf(buffer, SIZE, "delayed writes: %d\n", mNumDelayedWrites); + result.append(buffer); + snprintf(buffer, SIZE, "blocked in write: %d\n", mInWrite); + result.append(buffer); + snprintf(buffer, SIZE, "suspend count: %d\n", mSuspended); + result.append(buffer); + snprintf(buffer, SIZE, "mix buffer : %p\n", mMixBuffer); + result.append(buffer); + write(fd, result.string(), result.size()); + fdprintf(fd, "Fast track availMask=%#x\n", mFastTrackAvailMask); + + dumpBase(fd, args); +} + +// Thread virtuals +status_t AudioFlinger::PlaybackThread::readyToRun() +{ + status_t status = initCheck(); + if (status == NO_ERROR) { + ALOGI("AudioFlinger's thread %p ready to run", this); + } else { + ALOGE("No working audio driver found."); + } + return status; +} + +void AudioFlinger::PlaybackThread::onFirstRef() +{ + run(mName, ANDROID_PRIORITY_URGENT_AUDIO); +} + +// ThreadBase virtuals +void AudioFlinger::PlaybackThread::preExit() +{ + ALOGV(" preExit()"); + // FIXME this is using hard-coded strings but in the future, this functionality will be + // converted to use audio HAL extensions required to support tunneling + mOutput->stream->common.set_parameters(&mOutput->stream->common, "exiting=1"); +} + +// PlaybackThread::createTrack_l() must be called with AudioFlinger::mLock held +sp AudioFlinger::PlaybackThread::createTrack_l( + const sp& client, + audio_stream_type_t streamType, + uint32_t sampleRate, + audio_format_t format, + audio_channel_mask_t channelMask, + size_t frameCount, + const sp& sharedBuffer, + int sessionId, + IAudioFlinger::track_flags_t *flags, + pid_t tid, + status_t *status) +{ + sp track; + status_t lStatus; + + bool isTimed = (*flags & IAudioFlinger::TRACK_TIMED) != 0; + + // client expresses a preference for FAST, but we get the final say + if (*flags & IAudioFlinger::TRACK_FAST) { + if ( + // not timed + (!isTimed) && + // either of these use cases: + ( + // use case 1: shared buffer with any frame count + ( + (sharedBuffer != 0) + ) || + // use case 2: callback handler and frame count is default or at least as large as HAL + ( + (tid != -1) && + ((frameCount == 0) || + (frameCount >= (mFrameCount * kFastTrackMultiplier))) + ) + ) && + // PCM data + audio_is_linear_pcm(format) && + // mono or stereo + ( (channelMask == AUDIO_CHANNEL_OUT_MONO) || + (channelMask == AUDIO_CHANNEL_OUT_STEREO) ) && +#ifndef FAST_TRACKS_AT_NON_NATIVE_SAMPLE_RATE + // hardware sample rate + (sampleRate == mSampleRate) && +#endif + // normal mixer has an associated fast mixer + hasFastMixer() && + // there are sufficient fast track slots available + (mFastTrackAvailMask != 0) + // FIXME test that MixerThread for this fast track has a capable output HAL + // FIXME add a permission test also? + ) { + // if frameCount not specified, then it defaults to fast mixer (HAL) frame count + if (frameCount == 0) { + frameCount = mFrameCount * kFastTrackMultiplier; + } + ALOGV("AUDIO_OUTPUT_FLAG_FAST accepted: frameCount=%d mFrameCount=%d", + frameCount, mFrameCount); + } else { + ALOGV("AUDIO_OUTPUT_FLAG_FAST denied: isTimed=%d sharedBuffer=%p frameCount=%d " + "mFrameCount=%d format=%d isLinear=%d channelMask=%#x sampleRate=%u mSampleRate=%u " + "hasFastMixer=%d tid=%d fastTrackAvailMask=%#x", + isTimed, sharedBuffer.get(), frameCount, mFrameCount, format, + audio_is_linear_pcm(format), + channelMask, sampleRate, mSampleRate, hasFastMixer(), tid, mFastTrackAvailMask); + *flags &= ~IAudioFlinger::TRACK_FAST; + // For compatibility with AudioTrack calculation, buffer depth is forced + // to be at least 2 x the normal mixer frame count and cover audio hardware latency. + // This is probably too conservative, but legacy application code may depend on it. + // If you change this calculation, also review the start threshold which is related. + uint32_t latencyMs = mOutput->stream->get_latency(mOutput->stream); + uint32_t minBufCount = latencyMs / ((1000 * mNormalFrameCount) / mSampleRate); + if (minBufCount < 2) { + minBufCount = 2; + } + size_t minFrameCount = mNormalFrameCount * minBufCount; + if (frameCount < minFrameCount) { + frameCount = minFrameCount; + } + } + } + + if (mType == DIRECT) { + if ((format & AUDIO_FORMAT_MAIN_MASK) == AUDIO_FORMAT_PCM) { + if (sampleRate != mSampleRate || format != mFormat || channelMask != mChannelMask) { + ALOGE("createTrack_l() Bad parameter: sampleRate %u format %d, channelMask 0x%08x " + "for output %p with format %d", + sampleRate, format, channelMask, mOutput, mFormat); + lStatus = BAD_VALUE; + goto Exit; + } + } + } else { + // Resampler implementation limits input sampling rate to 2 x output sampling rate. + if (sampleRate > mSampleRate*2) { + ALOGE("Sample rate out of range: %u mSampleRate %u", sampleRate, mSampleRate); + lStatus = BAD_VALUE; + goto Exit; + } + } + + lStatus = initCheck(); + if (lStatus != NO_ERROR) { + ALOGE("Audio driver not initialized."); + goto Exit; + } + + { // scope for mLock + Mutex::Autolock _l(mLock); + + // all tracks in same audio session must share the same routing strategy otherwise + // conflicts will happen when tracks are moved from one output to another by audio policy + // manager + uint32_t strategy = AudioSystem::getStrategyForStream(streamType); + for (size_t i = 0; i < mTracks.size(); ++i) { + sp t = mTracks[i]; + if (t != 0 && !t->isOutputTrack()) { + uint32_t actual = AudioSystem::getStrategyForStream(t->streamType()); + if (sessionId == t->sessionId() && strategy != actual) { + ALOGE("createTrack_l() mismatched strategy; expected %u but found %u", + strategy, actual); + lStatus = BAD_VALUE; + goto Exit; + } + } + } + + if (!isTimed) { + track = new Track(this, client, streamType, sampleRate, format, + channelMask, frameCount, sharedBuffer, sessionId, *flags); + } else { + track = TimedTrack::create(this, client, streamType, sampleRate, format, + channelMask, frameCount, sharedBuffer, sessionId); + } + if (track == 0 || track->getCblk() == NULL || track->name() < 0) { + lStatus = NO_MEMORY; + goto Exit; + } + mTracks.add(track); + + sp chain = getEffectChain_l(sessionId); + if (chain != 0) { + ALOGV("createTrack_l() setting main buffer %p", chain->inBuffer()); + track->setMainBuffer(chain->inBuffer()); + chain->setStrategy(AudioSystem::getStrategyForStream(track->streamType())); + chain->incTrackCnt(); + } + + if ((*flags & IAudioFlinger::TRACK_FAST) && (tid != -1)) { + pid_t callingPid = IPCThreadState::self()->getCallingPid(); + // we don't have CAP_SYS_NICE, nor do we want to have it as it's too powerful, + // so ask activity manager to do this on our behalf + sendPrioConfigEvent_l(callingPid, tid, kPriorityAudioApp); + } + } + + lStatus = NO_ERROR; + +Exit: + if (status) { + *status = lStatus; + } + return track; +} + +uint32_t AudioFlinger::PlaybackThread::correctLatency_l(uint32_t latency) const +{ + return latency; +} + +uint32_t AudioFlinger::PlaybackThread::latency() const +{ + Mutex::Autolock _l(mLock); + return latency_l(); +} +uint32_t AudioFlinger::PlaybackThread::latency_l() const +{ + if (initCheck() == NO_ERROR) { + return correctLatency_l(mOutput->stream->get_latency(mOutput->stream)); + } else { + return 0; + } +} + +void AudioFlinger::PlaybackThread::setMasterVolume(float value) +{ + Mutex::Autolock _l(mLock); + // Don't apply master volume in SW if our HAL can do it for us. + if (mOutput && mOutput->audioHwDev && + mOutput->audioHwDev->canSetMasterVolume()) { + mMasterVolume = 1.0; + } else { + mMasterVolume = value; + } +} + +void AudioFlinger::PlaybackThread::setMasterMute(bool muted) +{ + Mutex::Autolock _l(mLock); + // Don't apply master mute in SW if our HAL can do it for us. + if (mOutput && mOutput->audioHwDev && + mOutput->audioHwDev->canSetMasterMute()) { + mMasterMute = false; + } else { + mMasterMute = muted; + } +} + +void AudioFlinger::PlaybackThread::setStreamVolume(audio_stream_type_t stream, float value) +{ + Mutex::Autolock _l(mLock); + mStreamTypes[stream].volume = value; +} + +void AudioFlinger::PlaybackThread::setStreamMute(audio_stream_type_t stream, bool muted) +{ + Mutex::Autolock _l(mLock); + mStreamTypes[stream].mute = muted; +} + +float AudioFlinger::PlaybackThread::streamVolume(audio_stream_type_t stream) const +{ + Mutex::Autolock _l(mLock); + return mStreamTypes[stream].volume; +} + +// addTrack_l() must be called with ThreadBase::mLock held +status_t AudioFlinger::PlaybackThread::addTrack_l(const sp& track) +{ + status_t status = ALREADY_EXISTS; + + // set retry count for buffer fill + track->mRetryCount = kMaxTrackStartupRetries; + if (mActiveTracks.indexOf(track) < 0) { + // the track is newly added, make sure it fills up all its + // buffers before playing. This is to ensure the client will + // effectively get the latency it requested. + track->mFillingUpStatus = Track::FS_FILLING; + track->mResetDone = false; + track->mPresentationCompleteFrames = 0; + mActiveTracks.add(track); + if (track->mainBuffer() != mMixBuffer) { + sp chain = getEffectChain_l(track->sessionId()); + if (chain != 0) { + ALOGV("addTrack_l() starting track on chain %p for session %d", chain.get(), + track->sessionId()); + chain->incActiveTrackCnt(); + } + } + + status = NO_ERROR; + } + + ALOGV("mWaitWorkCV.broadcast"); + mWaitWorkCV.broadcast(); + + return status; +} + +// destroyTrack_l() must be called with ThreadBase::mLock held +void AudioFlinger::PlaybackThread::destroyTrack_l(const sp& track) +{ + track->mState = TrackBase::TERMINATED; + // active tracks are removed by threadLoop() + if (mActiveTracks.indexOf(track) < 0) { + removeTrack_l(track); + } +} + +void AudioFlinger::PlaybackThread::removeTrack_l(const sp& track) +{ + track->triggerEvents(AudioSystem::SYNC_EVENT_PRESENTATION_COMPLETE); + mTracks.remove(track); + deleteTrackName_l(track->name()); + // redundant as track is about to be destroyed, for dumpsys only + track->mName = -1; + if (track->isFastTrack()) { + int index = track->mFastIndex; + ALOG_ASSERT(0 < index && index < (int)FastMixerState::kMaxFastTracks); + ALOG_ASSERT(!(mFastTrackAvailMask & (1 << index))); + mFastTrackAvailMask |= 1 << index; + // redundant as track is about to be destroyed, for dumpsys only + track->mFastIndex = -1; + } + sp chain = getEffectChain_l(track->sessionId()); + if (chain != 0) { + chain->decTrackCnt(); + } +} + +String8 AudioFlinger::PlaybackThread::getParameters(const String8& keys) +{ + String8 out_s8 = String8(""); + char *s; + + Mutex::Autolock _l(mLock); + if (initCheck() != NO_ERROR) { + return out_s8; + } + + s = mOutput->stream->common.get_parameters(&mOutput->stream->common, keys.string()); + out_s8 = String8(s); + free(s); + return out_s8; +} + +// audioConfigChanged_l() must be called with AudioFlinger::mLock held +void AudioFlinger::PlaybackThread::audioConfigChanged_l(int event, int param) { + AudioSystem::OutputDescriptor desc; + void *param2 = NULL; + + ALOGV("PlaybackThread::audioConfigChanged_l, thread %p, event %d, param %d", this, event, + param); + + switch (event) { + case AudioSystem::OUTPUT_OPENED: + case AudioSystem::OUTPUT_CONFIG_CHANGED: + desc.channels = mChannelMask; + desc.samplingRate = mSampleRate; + desc.format = mFormat; + desc.frameCount = mNormalFrameCount; // FIXME see + // AudioFlinger::frameCount(audio_io_handle_t) + desc.latency = latency(); + param2 = &desc; + break; + + case AudioSystem::STREAM_CONFIG_CHANGED: + param2 = ¶m; + case AudioSystem::OUTPUT_CLOSED: + default: + break; + } + mAudioFlinger->audioConfigChanged_l(event, mId, param2); +} + +void AudioFlinger::PlaybackThread::readOutputParameters() +{ + mSampleRate = mOutput->stream->common.get_sample_rate(&mOutput->stream->common); + mChannelMask = mOutput->stream->common.get_channels(&mOutput->stream->common); + mChannelCount = (uint16_t)popcount(mChannelMask); + mFormat = mOutput->stream->common.get_format(&mOutput->stream->common); + mFrameSize = audio_stream_frame_size(&mOutput->stream->common); + mFrameCount = mOutput->stream->common.get_buffer_size(&mOutput->stream->common) / mFrameSize; + if (mFrameCount & 15) { + ALOGW("HAL output buffer size is %u frames but AudioMixer requires multiples of 16 frames", + mFrameCount); + } + + // Calculate size of normal mix buffer relative to the HAL output buffer size + double multiplier = 1.0; + if (mType == MIXER && (kUseFastMixer == FastMixer_Static || + kUseFastMixer == FastMixer_Dynamic)) { + size_t minNormalFrameCount = (kMinNormalMixBufferSizeMs * mSampleRate) / 1000; + size_t maxNormalFrameCount = (kMaxNormalMixBufferSizeMs * mSampleRate) / 1000; + // round up minimum and round down maximum to nearest 16 frames to satisfy AudioMixer + minNormalFrameCount = (minNormalFrameCount + 15) & ~15; + maxNormalFrameCount = maxNormalFrameCount & ~15; + if (maxNormalFrameCount < minNormalFrameCount) { + maxNormalFrameCount = minNormalFrameCount; + } + multiplier = (double) minNormalFrameCount / (double) mFrameCount; + if (multiplier <= 1.0) { + multiplier = 1.0; + } else if (multiplier <= 2.0) { + if (2 * mFrameCount <= maxNormalFrameCount) { + multiplier = 2.0; + } else { + multiplier = (double) maxNormalFrameCount / (double) mFrameCount; + } + } else { + // prefer an even multiplier, for compatibility with doubling of fast tracks due to HAL + // SRC (it would be unusual for the normal mix buffer size to not be a multiple of fast + // track, but we sometimes have to do this to satisfy the maximum frame count + // constraint) + // FIXME this rounding up should not be done if no HAL SRC + uint32_t truncMult = (uint32_t) multiplier; + if ((truncMult & 1)) { + if ((truncMult + 1) * mFrameCount <= maxNormalFrameCount) { + ++truncMult; + } + } + multiplier = (double) truncMult; + } + } + mNormalFrameCount = multiplier * mFrameCount; + // round up to nearest 16 frames to satisfy AudioMixer + mNormalFrameCount = (mNormalFrameCount + 15) & ~15; + ALOGI("HAL output buffer size %u frames, normal mix buffer size %u frames", mFrameCount, + mNormalFrameCount); + + delete[] mMixBuffer; + mMixBuffer = new int16_t[mNormalFrameCount * mChannelCount]; + memset(mMixBuffer, 0, mNormalFrameCount * mChannelCount * sizeof(int16_t)); + + // force reconfiguration of effect chains and engines to take new buffer size and audio + // parameters into account + // Note that mLock is not held when readOutputParameters() is called from the constructor + // but in this case nothing is done below as no audio sessions have effect yet so it doesn't + // matter. + // create a copy of mEffectChains as calling moveEffectChain_l() can reorder some effect chains + Vector< sp > effectChains = mEffectChains; + for (size_t i = 0; i < effectChains.size(); i ++) { + mAudioFlinger->moveEffectChain_l(effectChains[i]->sessionId(), this, this, false); + } +} + + +status_t AudioFlinger::PlaybackThread::getRenderPosition(size_t *halFrames, size_t *dspFrames) +{ + if (halFrames == NULL || dspFrames == NULL) { + return BAD_VALUE; + } + Mutex::Autolock _l(mLock); + if (initCheck() != NO_ERROR) { + return INVALID_OPERATION; + } + size_t framesWritten = mBytesWritten / mFrameSize; + *halFrames = framesWritten; + + if (isSuspended()) { + // return an estimation of rendered frames when the output is suspended + size_t latencyFrames = (latency_l() * mSampleRate) / 1000; + *dspFrames = framesWritten >= latencyFrames ? framesWritten - latencyFrames : 0; + return NO_ERROR; + } else { + return mOutput->stream->get_render_position(mOutput->stream, dspFrames); + } +} + +uint32_t AudioFlinger::PlaybackThread::hasAudioSession(int sessionId) const +{ + Mutex::Autolock _l(mLock); + uint32_t result = 0; + if (getEffectChain_l(sessionId) != 0) { + result = EFFECT_SESSION; + } + + for (size_t i = 0; i < mTracks.size(); ++i) { + sp track = mTracks[i]; + if (sessionId == track->sessionId() && + !(track->mCblk->flags & CBLK_INVALID)) { + result |= TRACK_SESSION; + break; + } + } + + return result; +} + +uint32_t AudioFlinger::PlaybackThread::getStrategyForSession_l(int sessionId) +{ + // session AUDIO_SESSION_OUTPUT_MIX is placed in same strategy as MUSIC stream so that + // it is moved to correct output by audio policy manager when A2DP is connected or disconnected + if (sessionId == AUDIO_SESSION_OUTPUT_MIX) { + return AudioSystem::getStrategyForStream(AUDIO_STREAM_MUSIC); + } + for (size_t i = 0; i < mTracks.size(); i++) { + sp track = mTracks[i]; + if (sessionId == track->sessionId() && + !(track->mCblk->flags & CBLK_INVALID)) { + return AudioSystem::getStrategyForStream(track->streamType()); + } + } + return AudioSystem::getStrategyForStream(AUDIO_STREAM_MUSIC); +} + + +AudioFlinger::AudioStreamOut* AudioFlinger::PlaybackThread::getOutput() const +{ + Mutex::Autolock _l(mLock); + return mOutput; +} + +AudioFlinger::AudioStreamOut* AudioFlinger::PlaybackThread::clearOutput() +{ + Mutex::Autolock _l(mLock); + AudioStreamOut *output = mOutput; + mOutput = NULL; + // FIXME FastMixer might also have a raw ptr to mOutputSink; + // must push a NULL and wait for ack + mOutputSink.clear(); + mPipeSink.clear(); + mNormalSink.clear(); + return output; +} + +// this method must always be called either with ThreadBase mLock held or inside the thread loop +audio_stream_t* AudioFlinger::PlaybackThread::stream() const +{ + if (mOutput == NULL) { + return NULL; + } + return &mOutput->stream->common; +} + +uint32_t AudioFlinger::PlaybackThread::activeSleepTimeUs() const +{ + return (uint32_t)((uint32_t)((mNormalFrameCount * 1000) / mSampleRate) * 1000); +} + +status_t AudioFlinger::PlaybackThread::setSyncEvent(const sp& event) +{ + if (!isValidSyncEvent(event)) { + return BAD_VALUE; + } + + Mutex::Autolock _l(mLock); + + for (size_t i = 0; i < mTracks.size(); ++i) { + sp track = mTracks[i]; + if (event->triggerSession() == track->sessionId()) { + (void) track->setSyncEvent(event); + return NO_ERROR; + } + } + + return NAME_NOT_FOUND; +} + +bool AudioFlinger::PlaybackThread::isValidSyncEvent(const sp& event) const +{ + return event->type() == AudioSystem::SYNC_EVENT_PRESENTATION_COMPLETE; +} + +void AudioFlinger::PlaybackThread::threadLoop_removeTracks( + const Vector< sp >& tracksToRemove) +{ + size_t count = tracksToRemove.size(); + if (CC_UNLIKELY(count)) { + for (size_t i = 0 ; i < count ; i++) { + const sp& track = tracksToRemove.itemAt(i); + if ((track->sharedBuffer() != 0) && + (track->mState == TrackBase::ACTIVE || track->mState == TrackBase::RESUMING)) { + AudioSystem::stopOutput(mId, track->streamType(), track->sessionId()); + } + } + } + +} + +void AudioFlinger::PlaybackThread::checkSilentMode_l() +{ + if (!mMasterMute) { + char value[PROPERTY_VALUE_MAX]; + if (property_get("ro.audio.silent", value, "0") > 0) { + char *endptr; + unsigned long ul = strtoul(value, &endptr, 0); + if (*endptr == '\0' && ul != 0) { + ALOGD("Silence is golden"); + // The setprop command will not allow a property to be changed after + // the first time it is set, so we don't have to worry about un-muting. + setMasterMute_l(true); + } + } + } +} + +// shared by MIXER and DIRECT, overridden by DUPLICATING +void AudioFlinger::PlaybackThread::threadLoop_write() +{ + // FIXME rewrite to reduce number of system calls + mLastWriteTime = systemTime(); + mInWrite = true; + int bytesWritten; + + // If an NBAIO sink is present, use it to write the normal mixer's submix + if (mNormalSink != 0) { +#define mBitShift 2 // FIXME + size_t count = mixBufferSize >> mBitShift; +#if defined(ATRACE_TAG) && (ATRACE_TAG != ATRACE_TAG_NEVER) + Tracer::traceBegin(ATRACE_TAG, "write"); +#endif + // update the setpoint when AudioFlinger::mScreenState changes + uint32_t screenState = AudioFlinger::mScreenState; + if (screenState != mScreenState) { + mScreenState = screenState; + MonoPipe *pipe = (MonoPipe *)mPipeSink.get(); + if (pipe != NULL) { + pipe->setAvgFrames((mScreenState & 1) ? + (pipe->maxFrames() * 7) / 8 : mNormalFrameCount * 2); + } + } + ssize_t framesWritten = mNormalSink->write(mMixBuffer, count); +#if defined(ATRACE_TAG) && (ATRACE_TAG != ATRACE_TAG_NEVER) + Tracer::traceEnd(ATRACE_TAG); +#endif + if (framesWritten > 0) { + bytesWritten = framesWritten << mBitShift; + } else { + bytesWritten = framesWritten; + } + // otherwise use the HAL / AudioStreamOut directly + } else { + // Direct output thread. + bytesWritten = (int)mOutput->stream->write(mOutput->stream, mMixBuffer, mixBufferSize); + } + + if (bytesWritten > 0) { + mBytesWritten += mixBufferSize; + } + mNumWrites++; + mInWrite = false; +} + +/* +The derived values that are cached: + - mixBufferSize from frame count * frame size + - activeSleepTime from activeSleepTimeUs() + - idleSleepTime from idleSleepTimeUs() + - standbyDelay from mActiveSleepTimeUs (DIRECT only) + - maxPeriod from frame count and sample rate (MIXER only) + +The parameters that affect these derived values are: + - frame count + - frame size + - sample rate + - device type: A2DP or not + - device latency + - format: PCM or not + - active sleep time + - idle sleep time +*/ + +void AudioFlinger::PlaybackThread::cacheParameters_l() +{ + mixBufferSize = mNormalFrameCount * mFrameSize; + activeSleepTime = activeSleepTimeUs(); + idleSleepTime = idleSleepTimeUs(); +} + +void AudioFlinger::PlaybackThread::invalidateTracks(audio_stream_type_t streamType) +{ + ALOGV ("MixerThread::invalidateTracks() mixer %p, streamType %d, mTracks.size %d", + this, streamType, mTracks.size()); + Mutex::Autolock _l(mLock); + + size_t size = mTracks.size(); + for (size_t i = 0; i < size; i++) { + sp t = mTracks[i]; + if (t->streamType() == streamType) { + android_atomic_or(CBLK_INVALID, &t->mCblk->flags); + t->mCblk->cv.signal(); + } + } +} + +status_t AudioFlinger::PlaybackThread::addEffectChain_l(const sp& chain) +{ + int session = chain->sessionId(); + int16_t *buffer = mMixBuffer; + bool ownsBuffer = false; + + ALOGV("addEffectChain_l() %p on thread %p for session %d", chain.get(), this, session); + if (session > 0) { + // Only one effect chain can be present in direct output thread and it uses + // the mix buffer as input + if (mType != DIRECT) { + size_t numSamples = mNormalFrameCount * mChannelCount; + buffer = new int16_t[numSamples]; + memset(buffer, 0, numSamples * sizeof(int16_t)); + ALOGV("addEffectChain_l() creating new input buffer %p session %d", buffer, session); + ownsBuffer = true; + } + + // Attach all tracks with same session ID to this chain. + for (size_t i = 0; i < mTracks.size(); ++i) { + sp track = mTracks[i]; + if (session == track->sessionId()) { + ALOGV("addEffectChain_l() track->setMainBuffer track %p buffer %p", track.get(), + buffer); + track->setMainBuffer(buffer); + chain->incTrackCnt(); + } + } + + // indicate all active tracks in the chain + for (size_t i = 0 ; i < mActiveTracks.size() ; ++i) { + sp track = mActiveTracks[i].promote(); + if (track == 0) { + continue; + } + if (session == track->sessionId()) { + ALOGV("addEffectChain_l() activating track %p on session %d", track.get(), session); + chain->incActiveTrackCnt(); + } + } + } + + chain->setInBuffer(buffer, ownsBuffer); + chain->setOutBuffer(mMixBuffer); + // Effect chain for session AUDIO_SESSION_OUTPUT_STAGE is inserted at end of effect + // chains list in order to be processed last as it contains output stage effects + // Effect chain for session AUDIO_SESSION_OUTPUT_MIX is inserted before + // session AUDIO_SESSION_OUTPUT_STAGE to be processed + // after track specific effects and before output stage + // It is therefore mandatory that AUDIO_SESSION_OUTPUT_MIX == 0 and + // that AUDIO_SESSION_OUTPUT_STAGE < AUDIO_SESSION_OUTPUT_MIX + // Effect chain for other sessions are inserted at beginning of effect + // chains list to be processed before output mix effects. Relative order between other + // sessions is not important + size_t size = mEffectChains.size(); + size_t i = 0; + for (i = 0; i < size; i++) { + if (mEffectChains[i]->sessionId() < session) { + break; + } + } + mEffectChains.insertAt(chain, i); + checkSuspendOnAddEffectChain_l(chain); + + return NO_ERROR; +} + +size_t AudioFlinger::PlaybackThread::removeEffectChain_l(const sp& chain) +{ + int session = chain->sessionId(); + + ALOGV("removeEffectChain_l() %p from thread %p for session %d", chain.get(), this, session); + + for (size_t i = 0; i < mEffectChains.size(); i++) { + if (chain == mEffectChains[i]) { + mEffectChains.removeAt(i); + // detach all active tracks from the chain + for (size_t i = 0 ; i < mActiveTracks.size() ; ++i) { + sp track = mActiveTracks[i].promote(); + if (track == 0) { + continue; + } + if (session == track->sessionId()) { + ALOGV("removeEffectChain_l(): stopping track on chain %p for session Id: %d", + chain.get(), session); + chain->decActiveTrackCnt(); + } + } + + // detach all tracks with same session ID from this chain + for (size_t i = 0; i < mTracks.size(); ++i) { + sp track = mTracks[i]; + if (session == track->sessionId()) { + track->setMainBuffer(mMixBuffer); + chain->decTrackCnt(); + } + } + break; + } + } + return mEffectChains.size(); +} + +status_t AudioFlinger::PlaybackThread::attachAuxEffect( + const sp track, int EffectId) +{ + Mutex::Autolock _l(mLock); + return attachAuxEffect_l(track, EffectId); +} + +status_t AudioFlinger::PlaybackThread::attachAuxEffect_l( + const sp track, int EffectId) +{ + status_t status = NO_ERROR; + + if (EffectId == 0) { + track->setAuxBuffer(0, NULL); + } else { + // Auxiliary effects are always in audio session AUDIO_SESSION_OUTPUT_MIX + sp effect = getEffect_l(AUDIO_SESSION_OUTPUT_MIX, EffectId); + if (effect != 0) { + if ((effect->desc().flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_AUXILIARY) { + track->setAuxBuffer(EffectId, (int32_t *)effect->inBuffer()); + } else { + status = INVALID_OPERATION; + } + } else { + status = BAD_VALUE; + } + } + return status; +} + +void AudioFlinger::PlaybackThread::detachAuxEffect_l(int effectId) +{ + for (size_t i = 0; i < mTracks.size(); ++i) { + sp track = mTracks[i]; + if (track->auxEffectId() == effectId) { + attachAuxEffect_l(track, 0); + } + } +} + +bool AudioFlinger::PlaybackThread::threadLoop() +{ + Vector< sp > tracksToRemove; + + standbyTime = systemTime(); + + // MIXER + nsecs_t lastWarning = 0; + + // DUPLICATING + // FIXME could this be made local to while loop? + writeFrames = 0; + + cacheParameters_l(); + sleepTime = idleSleepTime; + + if (mType == MIXER) { + sleepTimeShift = 0; + } + + CpuStats cpuStats; + const String8 myName(String8::format("thread %p type %d TID %d", this, mType, gettid())); + + acquireWakeLock(); + + while (!exitPending()) + { + cpuStats.sample(myName); + + Vector< sp > effectChains; + + processConfigEvents(); + + { // scope for mLock + + Mutex::Autolock _l(mLock); + + if (checkForNewParameters_l()) { + cacheParameters_l(); + } + + saveOutputTracks(); + + // put audio hardware into standby after short delay + if (CC_UNLIKELY((!mActiveTracks.size() && systemTime() > standbyTime) || + isSuspended())) { + if (!mStandby) { + + threadLoop_standby(); + + mStandby = true; + } + + if (!mActiveTracks.size() && mConfigEvents.isEmpty()) { + // we're about to wait, flush the binder command buffer + IPCThreadState::self()->flushCommands(); + + clearOutputTracks(); + + if (exitPending()) { + break; + } + + releaseWakeLock_l(); + // wait until we have something to do... + ALOGV("%s going to sleep", myName.string()); + mWaitWorkCV.wait(mLock); + ALOGV("%s waking up", myName.string()); + acquireWakeLock_l(); + + mMixerStatus = MIXER_IDLE; + mMixerStatusIgnoringFastTracks = MIXER_IDLE; + mBytesWritten = 0; + + checkSilentMode_l(); + + standbyTime = systemTime() + standbyDelay; + sleepTime = idleSleepTime; + if (mType == MIXER) { + sleepTimeShift = 0; + } + + continue; + } + } + + // mMixerStatusIgnoringFastTracks is also updated internally + mMixerStatus = prepareTracks_l(&tracksToRemove); + + // prevent any changes in effect chain list and in each effect chain + // during mixing and effect process as the audio buffers could be deleted + // or modified if an effect is created or deleted + lockEffectChains_l(effectChains); + } + + if (CC_LIKELY(mMixerStatus == MIXER_TRACKS_READY)) { + threadLoop_mix(); + } else { + threadLoop_sleepTime(); + } + + if (isSuspended()) { + sleepTime = suspendSleepTimeUs(); + mBytesWritten += mixBufferSize; + } + + // only process effects if we're going to write + if (sleepTime == 0) { + for (size_t i = 0; i < effectChains.size(); i ++) { + effectChains[i]->process_l(); + } + } + + // enable changes in effect chain + unlockEffectChains(effectChains); + + // sleepTime == 0 means we must write to audio hardware + if (sleepTime == 0) { + + threadLoop_write(); + +if (mType == MIXER) { + // write blocked detection + nsecs_t now = systemTime(); + nsecs_t delta = now - mLastWriteTime; + if (!mStandby && delta > maxPeriod) { + mNumDelayedWrites++; + if ((now - lastWarning) > kWarningThrottleNs) { +#if defined(ATRACE_TAG) && (ATRACE_TAG != ATRACE_TAG_NEVER) + ScopedTrace st(ATRACE_TAG, "underrun"); +#endif + ALOGW("write blocked for %llu msecs, %d delayed writes, thread %p", + ns2ms(delta), mNumDelayedWrites, this); + lastWarning = now; + } + } +} + + mStandby = false; + } else { + usleep(sleepTime); + } + + // Finally let go of removed track(s), without the lock held + // since we can't guarantee the destructors won't acquire that + // same lock. This will also mutate and push a new fast mixer state. + threadLoop_removeTracks(tracksToRemove); + tracksToRemove.clear(); + + // FIXME I don't understand the need for this here; + // it was in the original code but maybe the + // assignment in saveOutputTracks() makes this unnecessary? + clearOutputTracks(); + + // Effect chains will be actually deleted here if they were removed from + // mEffectChains list during mixing or effects processing + effectChains.clear(); + + // FIXME Note that the above .clear() is no longer necessary since effectChains + // is now local to this block, but will keep it for now (at least until merge done). + } + + // for DuplicatingThread, standby mode is handled by the outputTracks, otherwise ... + if (mType == MIXER || mType == DIRECT) { + // put output stream into standby mode + if (!mStandby) { + mOutput->stream->common.standby(&mOutput->stream->common); + } + } + + releaseWakeLock(); + + ALOGV("Thread %p type %d exiting", this, mType); + return false; +} + + +// ---------------------------------------------------------------------------- + +AudioFlinger::MixerThread::MixerThread(const sp& audioFlinger, AudioStreamOut* output, + audio_io_handle_t id, audio_devices_t device, type_t type) + : PlaybackThread(audioFlinger, output, id, device, type), + // mAudioMixer below + // mFastMixer below + mFastMixerFutex(0) + // mOutputSink below + // mPipeSink below + // mNormalSink below +{ + ALOGV("MixerThread() id=%d device=%#x type=%d", id, device, type); + ALOGV("mSampleRate=%u, mChannelMask=%#x, mChannelCount=%d, mFormat=%d, mFrameSize=%u, " + "mFrameCount=%d, mNormalFrameCount=%d", + mSampleRate, mChannelMask, mChannelCount, mFormat, mFrameSize, mFrameCount, + mNormalFrameCount); + mAudioMixer = new AudioMixer(mNormalFrameCount, mSampleRate); + + // FIXME - Current mixer implementation only supports stereo output + if (mChannelCount != FCC_2) { + ALOGE("Invalid audio hardware channel count %d", mChannelCount); + } + + // create an NBAIO sink for the HAL output stream, and negotiate + mOutputSink = new AudioStreamOutSink(output->stream); + size_t numCounterOffers = 0; + const NBAIO_Format offers[1] = {Format_from_SR_C(mSampleRate, mChannelCount)}; + ssize_t index = mOutputSink->negotiate(offers, 1, NULL, numCounterOffers); + ALOG_ASSERT(index == 0); + + // initialize fast mixer depending on configuration + bool initFastMixer; + switch (kUseFastMixer) { + case FastMixer_Never: + initFastMixer = false; + break; + case FastMixer_Always: + initFastMixer = true; + break; + case FastMixer_Static: + case FastMixer_Dynamic: + initFastMixer = mFrameCount < mNormalFrameCount; + break; + } + if (initFastMixer) { + + // create a MonoPipe to connect our submix to FastMixer + NBAIO_Format format = mOutputSink->format(); + // This pipe depth compensates for scheduling latency of the normal mixer thread. + // When it wakes up after a maximum latency, it runs a few cycles quickly before + // finally blocking. Note the pipe implementation rounds up the request to a power of 2. + MonoPipe *monoPipe = new MonoPipe(mNormalFrameCount * 4, format, true /*writeCanBlock*/); + const NBAIO_Format offers[1] = {format}; + size_t numCounterOffers = 0; + ssize_t index = monoPipe->negotiate(offers, 1, NULL, numCounterOffers); + ALOG_ASSERT(index == 0); + monoPipe->setAvgFrames((mScreenState & 1) ? + (monoPipe->maxFrames() * 7) / 8 : mNormalFrameCount * 2); + mPipeSink = monoPipe; + +#ifdef TEE_SINK_FRAMES + // create a Pipe to archive a copy of FastMixer's output for dumpsys + Pipe *teeSink = new Pipe(TEE_SINK_FRAMES, format); + numCounterOffers = 0; + index = teeSink->negotiate(offers, 1, NULL, numCounterOffers); + ALOG_ASSERT(index == 0); + mTeeSink = teeSink; + PipeReader *teeSource = new PipeReader(*teeSink); + numCounterOffers = 0; + index = teeSource->negotiate(offers, 1, NULL, numCounterOffers); + ALOG_ASSERT(index == 0); + mTeeSource = teeSource; +#endif + + // create fast mixer and configure it initially with just one fast track for our submix + mFastMixer = new FastMixer(); + FastMixerStateQueue *sq = mFastMixer->sq(); +#ifdef STATE_QUEUE_DUMP + sq->setObserverDump(&mStateQueueObserverDump); + sq->setMutatorDump(&mStateQueueMutatorDump); +#endif + FastMixerState *state = sq->begin(); + FastTrack *fastTrack = &state->mFastTracks[0]; + // wrap the source side of the MonoPipe to make it an AudioBufferProvider + fastTrack->mBufferProvider = new SourceAudioBufferProvider(new MonoPipeReader(monoPipe)); + fastTrack->mVolumeProvider = NULL; + fastTrack->mGeneration++; + state->mFastTracksGen++; + state->mTrackMask = 1; + // fast mixer will use the HAL output sink + state->mOutputSink = mOutputSink.get(); + state->mOutputSinkGen++; + state->mFrameCount = mFrameCount; + state->mCommand = FastMixerState::COLD_IDLE; + // already done in constructor initialization list + //mFastMixerFutex = 0; + state->mColdFutexAddr = &mFastMixerFutex; + state->mColdGen++; + state->mDumpState = &mFastMixerDumpState; + state->mTeeSink = mTeeSink.get(); + sq->end(); + sq->push(FastMixerStateQueue::BLOCK_UNTIL_PUSHED); + + // start the fast mixer + mFastMixer->run("FastMixer", PRIORITY_URGENT_AUDIO); + pid_t tid = mFastMixer->getTid(); + int err = requestPriority(getpid_cached, tid, kPriorityFastMixer); + if (err != 0) { + ALOGW("Policy SCHED_FIFO priority %d is unavailable for pid %d tid %d; error %d", + kPriorityFastMixer, getpid_cached, tid, err); + } + +#ifdef AUDIO_WATCHDOG + // create and start the watchdog + mAudioWatchdog = new AudioWatchdog(); + mAudioWatchdog->setDump(&mAudioWatchdogDump); + mAudioWatchdog->run("AudioWatchdog", PRIORITY_URGENT_AUDIO); + tid = mAudioWatchdog->getTid(); + err = requestPriority(getpid_cached, tid, kPriorityFastMixer); + if (err != 0) { + ALOGW("Policy SCHED_FIFO priority %d is unavailable for pid %d tid %d; error %d", + kPriorityFastMixer, getpid_cached, tid, err); + } +#endif + + } else { + mFastMixer = NULL; + } + + switch (kUseFastMixer) { + case FastMixer_Never: + case FastMixer_Dynamic: + mNormalSink = mOutputSink; + break; + case FastMixer_Always: + mNormalSink = mPipeSink; + break; + case FastMixer_Static: + mNormalSink = initFastMixer ? mPipeSink : mOutputSink; + break; + } +} + +AudioFlinger::MixerThread::~MixerThread() +{ + if (mFastMixer != NULL) { + FastMixerStateQueue *sq = mFastMixer->sq(); + FastMixerState *state = sq->begin(); + if (state->mCommand == FastMixerState::COLD_IDLE) { + int32_t old = android_atomic_inc(&mFastMixerFutex); + if (old == -1) { + __futex_syscall3(&mFastMixerFutex, FUTEX_WAKE_PRIVATE, 1); + } + } + state->mCommand = FastMixerState::EXIT; + sq->end(); + sq->push(FastMixerStateQueue::BLOCK_UNTIL_PUSHED); + mFastMixer->join(); + // Though the fast mixer thread has exited, it's state queue is still valid. + // We'll use that extract the final state which contains one remaining fast track + // corresponding to our sub-mix. + state = sq->begin(); + ALOG_ASSERT(state->mTrackMask == 1); + FastTrack *fastTrack = &state->mFastTracks[0]; + ALOG_ASSERT(fastTrack->mBufferProvider != NULL); + delete fastTrack->mBufferProvider; + sq->end(false /*didModify*/); + delete mFastMixer; +#ifdef AUDIO_WATCHDOG + if (mAudioWatchdog != 0) { + mAudioWatchdog->requestExit(); + mAudioWatchdog->requestExitAndWait(); + mAudioWatchdog.clear(); + } +#endif + } + delete mAudioMixer; +} + + +uint32_t AudioFlinger::MixerThread::correctLatency_l(uint32_t latency) const +{ + if (mFastMixer != NULL) { + MonoPipe *pipe = (MonoPipe *)mPipeSink.get(); + latency += (pipe->getAvgFrames() * 1000) / mSampleRate; + } + return latency; +} + + +void AudioFlinger::MixerThread::threadLoop_removeTracks(const Vector< sp >& tracksToRemove) +{ + PlaybackThread::threadLoop_removeTracks(tracksToRemove); +} + +void AudioFlinger::MixerThread::threadLoop_write() +{ + // FIXME we should only do one push per cycle; confirm this is true + // Start the fast mixer if it's not already running + if (mFastMixer != NULL) { + FastMixerStateQueue *sq = mFastMixer->sq(); + FastMixerState *state = sq->begin(); + if (state->mCommand != FastMixerState::MIX_WRITE && + (kUseFastMixer != FastMixer_Dynamic || state->mTrackMask > 1)) { + if (state->mCommand == FastMixerState::COLD_IDLE) { + int32_t old = android_atomic_inc(&mFastMixerFutex); + if (old == -1) { + __futex_syscall3(&mFastMixerFutex, FUTEX_WAKE_PRIVATE, 1); + } +#ifdef AUDIO_WATCHDOG + if (mAudioWatchdog != 0) { + mAudioWatchdog->resume(); + } +#endif + } + state->mCommand = FastMixerState::MIX_WRITE; + sq->end(); + sq->push(FastMixerStateQueue::BLOCK_UNTIL_PUSHED); + if (kUseFastMixer == FastMixer_Dynamic) { + mNormalSink = mPipeSink; + } + } else { + sq->end(false /*didModify*/); + } + } + PlaybackThread::threadLoop_write(); +} + +void AudioFlinger::MixerThread::threadLoop_standby() +{ + // Idle the fast mixer if it's currently running + if (mFastMixer != NULL) { + FastMixerStateQueue *sq = mFastMixer->sq(); + FastMixerState *state = sq->begin(); + if (!(state->mCommand & FastMixerState::IDLE)) { + state->mCommand = FastMixerState::COLD_IDLE; + state->mColdFutexAddr = &mFastMixerFutex; + state->mColdGen++; + mFastMixerFutex = 0; + sq->end(); + // BLOCK_UNTIL_PUSHED would be insufficient, as we need it to stop doing I/O now + sq->push(FastMixerStateQueue::BLOCK_UNTIL_ACKED); + if (kUseFastMixer == FastMixer_Dynamic) { + mNormalSink = mOutputSink; + } +#ifdef AUDIO_WATCHDOG + if (mAudioWatchdog != 0) { + mAudioWatchdog->pause(); + } +#endif + } else { + sq->end(false /*didModify*/); + } + } + PlaybackThread::threadLoop_standby(); +} + +// shared by MIXER and DIRECT, overridden by DUPLICATING +void AudioFlinger::PlaybackThread::threadLoop_standby() +{ + ALOGV("Audio hardware entering standby, mixer %p, suspend count %d", this, mSuspended); + mOutput->stream->common.standby(&mOutput->stream->common); +} + +void AudioFlinger::MixerThread::threadLoop_mix() +{ + // obtain the presentation timestamp of the next output buffer + int64_t pts; + status_t status = INVALID_OPERATION; + + if (mNormalSink != 0) { + status = mNormalSink->getNextWriteTimestamp(&pts); + } else { + status = mOutputSink->getNextWriteTimestamp(&pts); + } + + if (status != NO_ERROR) { + pts = AudioBufferProvider::kInvalidPTS; + } + + // mix buffers... + mAudioMixer->process(pts); + // increase sleep time progressively when application underrun condition clears. + // Only increase sleep time if the mixer is ready for two consecutive times to avoid + // that a steady state of alternating ready/not ready conditions keeps the sleep time + // such that we would underrun the audio HAL. + if ((sleepTime == 0) && (sleepTimeShift > 0)) { + sleepTimeShift--; + } + sleepTime = 0; + standbyTime = systemTime() + standbyDelay; + //TODO: delay standby when effects have a tail +} + +void AudioFlinger::MixerThread::threadLoop_sleepTime() +{ + // If no tracks are ready, sleep once for the duration of an output + // buffer size, then write 0s to the output + if (sleepTime == 0) { + if (mMixerStatus == MIXER_TRACKS_ENABLED) { + sleepTime = activeSleepTime >> sleepTimeShift; + if (sleepTime < kMinThreadSleepTimeUs) { + sleepTime = kMinThreadSleepTimeUs; + } + // reduce sleep time in case of consecutive application underruns to avoid + // starving the audio HAL. As activeSleepTimeUs() is larger than a buffer + // duration we would end up writing less data than needed by the audio HAL if + // the condition persists. + if (sleepTimeShift < kMaxThreadSleepTimeShift) { + sleepTimeShift++; + } + } else { + sleepTime = idleSleepTime; + } + } else if (mBytesWritten != 0 || (mMixerStatus == MIXER_TRACKS_ENABLED)) { + memset (mMixBuffer, 0, mixBufferSize); + sleepTime = 0; + ALOGV_IF(mBytesWritten == 0 && (mMixerStatus == MIXER_TRACKS_ENABLED), + "anticipated start"); + } + // TODO add standby time extension fct of effect tail +} + +// prepareTracks_l() must be called with ThreadBase::mLock held +AudioFlinger::PlaybackThread::mixer_state AudioFlinger::MixerThread::prepareTracks_l( + Vector< sp > *tracksToRemove) +{ + + mixer_state mixerStatus = MIXER_IDLE; + // find out which tracks need to be processed + size_t count = mActiveTracks.size(); + size_t mixedTracks = 0; + size_t tracksWithEffect = 0; + // counts only _active_ fast tracks + size_t fastTracks = 0; + uint32_t resetMask = 0; // bit mask of fast tracks that need to be reset + + float masterVolume = mMasterVolume; + bool masterMute = mMasterMute; + + if (masterMute) { + masterVolume = 0; + } + // Delegate master volume control to effect in output mix effect chain if needed + sp chain = getEffectChain_l(AUDIO_SESSION_OUTPUT_MIX); + if (chain != 0) { + uint32_t v = (uint32_t)(masterVolume * (1 << 24)); + chain->setVolume_l(&v, &v); + masterVolume = (float)((v + (1 << 23)) >> 24); + chain.clear(); + } + + // prepare a new state to push + FastMixerStateQueue *sq = NULL; + FastMixerState *state = NULL; + bool didModify = false; + FastMixerStateQueue::block_t block = FastMixerStateQueue::BLOCK_UNTIL_PUSHED; + if (mFastMixer != NULL) { + sq = mFastMixer->sq(); + state = sq->begin(); + } + + for (size_t i=0 ; i t = mActiveTracks[i].promote(); + if (t == 0) { + continue; + } + + // this const just means the local variable doesn't change + Track* const track = t.get(); + + // process fast tracks + if (track->isFastTrack()) { + + // It's theoretically possible (though unlikely) for a fast track to be created + // and then removed within the same normal mix cycle. This is not a problem, as + // the track never becomes active so it's fast mixer slot is never touched. + // The converse, of removing an (active) track and then creating a new track + // at the identical fast mixer slot within the same normal mix cycle, + // is impossible because the slot isn't marked available until the end of each cycle. + int j = track->mFastIndex; + ALOG_ASSERT(0 < j && j < (int)FastMixerState::kMaxFastTracks); + ALOG_ASSERT(!(mFastTrackAvailMask & (1 << j))); + FastTrack *fastTrack = &state->mFastTracks[j]; + + // Determine whether the track is currently in underrun condition, + // and whether it had a recent underrun. + FastTrackDump *ftDump = &mFastMixerDumpState.mTracks[j]; + FastTrackUnderruns underruns = ftDump->mUnderruns; + uint32_t recentFull = (underruns.mBitFields.mFull - + track->mObservedUnderruns.mBitFields.mFull) & UNDERRUN_MASK; + uint32_t recentPartial = (underruns.mBitFields.mPartial - + track->mObservedUnderruns.mBitFields.mPartial) & UNDERRUN_MASK; + uint32_t recentEmpty = (underruns.mBitFields.mEmpty - + track->mObservedUnderruns.mBitFields.mEmpty) & UNDERRUN_MASK; + uint32_t recentUnderruns = recentPartial + recentEmpty; + track->mObservedUnderruns = underruns; + // don't count underruns that occur while stopping or pausing + // or stopped which can occur when flush() is called while active + if (!(track->isStopping() || track->isPausing() || track->isStopped())) { + track->mUnderrunCount += recentUnderruns; + } + + // This is similar to the state machine for normal tracks, + // with a few modifications for fast tracks. + bool isActive = true; + switch (track->mState) { + case TrackBase::STOPPING_1: + // track stays active in STOPPING_1 state until first underrun + if (recentUnderruns > 0) { + track->mState = TrackBase::STOPPING_2; + } + break; + case TrackBase::PAUSING: + // ramp down is not yet implemented + track->setPaused(); + break; + case TrackBase::RESUMING: + // ramp up is not yet implemented + track->mState = TrackBase::ACTIVE; + break; + case TrackBase::ACTIVE: + if (recentFull > 0 || recentPartial > 0) { + // track has provided at least some frames recently: reset retry count + track->mRetryCount = kMaxTrackRetries; + } + if (recentUnderruns == 0) { + // no recent underruns: stay active + break; + } + // there has recently been an underrun of some kind + if (track->sharedBuffer() == 0) { + // were any of the recent underruns "empty" (no frames available)? + if (recentEmpty == 0) { + // no, then ignore the partial underruns as they are allowed indefinitely + break; + } + // there has recently been an "empty" underrun: decrement the retry counter + if (--(track->mRetryCount) > 0) { + break; + } + // indicate to client process that the track was disabled because of underrun; + // it will then automatically call start() when data is available + android_atomic_or(CBLK_DISABLED, &track->mCblk->flags); + // remove from active list, but state remains ACTIVE [confusing but true] + isActive = false; + break; + } + // fall through + case TrackBase::STOPPING_2: + case TrackBase::PAUSED: + case TrackBase::TERMINATED: + case TrackBase::STOPPED: + case TrackBase::FLUSHED: // flush() while active + // Check for presentation complete if track is inactive + // We have consumed all the buffers of this track. + // This would be incomplete if we auto-paused on underrun + { + size_t audioHALFrames = + (mOutput->stream->get_latency(mOutput->stream)*mSampleRate) / 1000; + size_t framesWritten = mBytesWritten / mFrameSize; + if (!(mStandby || track->presentationComplete(framesWritten, audioHALFrames))) { + // track stays in active list until presentation is complete + break; + } + } + if (track->isStopping_2()) { + track->mState = TrackBase::STOPPED; + } + if (track->isStopped()) { + // Can't reset directly, as fast mixer is still polling this track + // track->reset(); + // So instead mark this track as needing to be reset after push with ack + resetMask |= 1 << i; + } + isActive = false; + break; + case TrackBase::IDLE: + default: + LOG_FATAL("unexpected track state %d", track->mState); + } + + if (isActive) { + // was it previously inactive? + if (!(state->mTrackMask & (1 << j))) { + ExtendedAudioBufferProvider *eabp = track; + VolumeProvider *vp = track; + fastTrack->mBufferProvider = eabp; + fastTrack->mVolumeProvider = vp; + fastTrack->mSampleRate = track->mSampleRate; + fastTrack->mChannelMask = track->mChannelMask; + fastTrack->mGeneration++; + state->mTrackMask |= 1 << j; + didModify = true; + // no acknowledgement required for newly active tracks + } + // cache the combined master volume and stream type volume for fast mixer; this + // lacks any synchronization or barrier so VolumeProvider may read a stale value + track->mCachedVolume = track->isMuted() ? + 0 : masterVolume * mStreamTypes[track->streamType()].volume; + ++fastTracks; + } else { + // was it previously active? + if (state->mTrackMask & (1 << j)) { + fastTrack->mBufferProvider = NULL; + fastTrack->mGeneration++; + state->mTrackMask &= ~(1 << j); + didModify = true; + // If any fast tracks were removed, we must wait for acknowledgement + // because we're about to decrement the last sp<> on those tracks. + block = FastMixerStateQueue::BLOCK_UNTIL_ACKED; + } else { + LOG_FATAL("fast track %d should have been active", j); + } + tracksToRemove->add(track); + // Avoids a misleading display in dumpsys + track->mObservedUnderruns.mBitFields.mMostRecent = UNDERRUN_FULL; + } + continue; + } + + { // local variable scope to avoid goto warning + + audio_track_cblk_t* cblk = track->cblk(); + + // The first time a track is added we wait + // for all its buffers to be filled before processing it + int name = track->name(); + // make sure that we have enough frames to mix one full buffer. + // enforce this condition only once to enable draining the buffer in case the client + // app does not call stop() and relies on underrun to stop: + // hence the test on (mMixerStatus == MIXER_TRACKS_READY) meaning the track was mixed + // during last round + uint32_t minFrames = 1; + if ((track->sharedBuffer() == 0) && !track->isStopped() && !track->isPausing() && + (mMixerStatusIgnoringFastTracks == MIXER_TRACKS_READY)) { + if (t->sampleRate() == mSampleRate) { + minFrames = mNormalFrameCount; + } else { + // +1 for rounding and +1 for additional sample needed for interpolation + minFrames = (mNormalFrameCount * t->sampleRate()) / mSampleRate + 1 + 1; + // add frames already consumed but not yet released by the resampler + // because cblk->framesReady() will include these frames + minFrames += mAudioMixer->getUnreleasedFrames(track->name()); + // the minimum track buffer size is normally twice the number of frames necessary + // to fill one buffer and the resampler should not leave more than one buffer worth + // of unreleased frames after each pass, but just in case... + ALOG_ASSERT(minFrames <= cblk->frameCount); + } + } + if ((track->framesReady() >= minFrames) && track->isReady() && + !track->isPaused() && !track->isTerminated()) + { + ALOGVV("track %d u=%08x, s=%08x [OK] on thread %p", name, cblk->user, cblk->server, + this); + + mixedTracks++; + + // track->mainBuffer() != mMixBuffer means there is an effect chain + // connected to the track + chain.clear(); + if (track->mainBuffer() != mMixBuffer) { + chain = getEffectChain_l(track->sessionId()); + // Delegate volume control to effect in track effect chain if needed + if (chain != 0) { + tracksWithEffect++; + } else { + ALOGW("prepareTracks_l(): track %d attached to effect but no chain found on " + "session %d", + name, track->sessionId()); + } + } + + + int param = AudioMixer::VOLUME; + if (track->mFillingUpStatus == Track::FS_FILLED) { + // no ramp for the first volume setting + track->mFillingUpStatus = Track::FS_ACTIVE; + if (track->mState == TrackBase::RESUMING) { + track->mState = TrackBase::ACTIVE; + param = AudioMixer::RAMP_VOLUME; + } + mAudioMixer->setParameter(name, AudioMixer::RESAMPLE, AudioMixer::RESET, NULL); + } else if (cblk->server != 0) { + // If the track is stopped before the first frame was mixed, + // do not apply ramp + param = AudioMixer::RAMP_VOLUME; + } + + // compute volume for this track + uint32_t vl, vr, va; + if (track->isMuted() || track->isPausing() || + mStreamTypes[track->streamType()].mute) { + vl = vr = va = 0; + if (track->isPausing()) { + track->setPaused(); + } + } else { + + // read original volumes with volume control + float typeVolume = mStreamTypes[track->streamType()].volume; + float v = masterVolume * typeVolume; + uint32_t vlr = cblk->getVolumeLR(); + vl = vlr & 0xFFFF; + vr = vlr >> 16; + // track volumes come from shared memory, so can't be trusted and must be clamped + if (vl > MAX_GAIN_INT) { + ALOGV("Track left volume out of range: %04X", vl); + vl = MAX_GAIN_INT; + } + if (vr > MAX_GAIN_INT) { + ALOGV("Track right volume out of range: %04X", vr); + vr = MAX_GAIN_INT; + } + // now apply the master volume and stream type volume + vl = (uint32_t)(v * vl) << 12; + vr = (uint32_t)(v * vr) << 12; + // assuming master volume and stream type volume each go up to 1.0, + // vl and vr are now in 8.24 format + + uint16_t sendLevel = cblk->getSendLevel_U4_12(); + // send level comes from shared memory and so may be corrupt + if (sendLevel > MAX_GAIN_INT) { + ALOGV("Track send level out of range: %04X", sendLevel); + sendLevel = MAX_GAIN_INT; + } + va = (uint32_t)(v * sendLevel); + } + // Delegate volume control to effect in track effect chain if needed + if (chain != 0 && chain->setVolume_l(&vl, &vr)) { + // Do not ramp volume if volume is controlled by effect + param = AudioMixer::VOLUME; + track->mHasVolumeController = true; + } else { + // force no volume ramp when volume controller was just disabled or removed + // from effect chain to avoid volume spike + if (track->mHasVolumeController) { + param = AudioMixer::VOLUME; + } + track->mHasVolumeController = false; + } + + // Convert volumes from 8.24 to 4.12 format + // This additional clamping is needed in case chain->setVolume_l() overshot + vl = (vl + (1 << 11)) >> 12; + if (vl > MAX_GAIN_INT) { + vl = MAX_GAIN_INT; + } + vr = (vr + (1 << 11)) >> 12; + if (vr > MAX_GAIN_INT) { + vr = MAX_GAIN_INT; + } + + if (va > MAX_GAIN_INT) { + va = MAX_GAIN_INT; // va is uint32_t, so no need to check for - + } + + // XXX: these things DON'T need to be done each time + mAudioMixer->setBufferProvider(name, track); + mAudioMixer->enable(name); + + mAudioMixer->setParameter(name, param, AudioMixer::VOLUME0, (void *)vl); + mAudioMixer->setParameter(name, param, AudioMixer::VOLUME1, (void *)vr); + mAudioMixer->setParameter(name, param, AudioMixer::AUXLEVEL, (void *)va); + mAudioMixer->setParameter( + name, + AudioMixer::TRACK, + AudioMixer::FORMAT, (void *)track->format()); + mAudioMixer->setParameter( + name, + AudioMixer::TRACK, + AudioMixer::CHANNEL_MASK, (void *)track->channelMask()); + mAudioMixer->setParameter( + name, + AudioMixer::RESAMPLE, + AudioMixer::SAMPLE_RATE, + (void *)(cblk->sampleRate)); + mAudioMixer->setParameter( + name, + AudioMixer::TRACK, + AudioMixer::MAIN_BUFFER, (void *)track->mainBuffer()); + mAudioMixer->setParameter( + name, + AudioMixer::TRACK, + AudioMixer::AUX_BUFFER, (void *)track->auxBuffer()); + + // reset retry count + track->mRetryCount = kMaxTrackRetries; + + // If one track is ready, set the mixer ready if: + // - the mixer was not ready during previous round OR + // - no other track is not ready + if (mMixerStatusIgnoringFastTracks != MIXER_TRACKS_READY || + mixerStatus != MIXER_TRACKS_ENABLED) { + mixerStatus = MIXER_TRACKS_READY; + } + } else { + // clear effect chain input buffer if an active track underruns to avoid sending + // previous audio buffer again to effects + chain = getEffectChain_l(track->sessionId()); + if (chain != 0) { + chain->clearInputBuffer(); + } + + ALOGVV("track %d u=%08x, s=%08x [NOT READY] on thread %p", name, cblk->user, + cblk->server, this); + if ((track->sharedBuffer() != 0) || track->isTerminated() || + track->isStopped() || track->isPaused()) { + // We have consumed all the buffers of this track. + // Remove it from the list of active tracks. + // TODO: use actual buffer filling status instead of latency when available from + // audio HAL + size_t audioHALFrames = (latency_l() * mSampleRate) / 1000; + size_t framesWritten = mBytesWritten / mFrameSize; + if (mStandby || track->presentationComplete(framesWritten, audioHALFrames)) { + if (track->isStopped()) { + track->reset(); + } + tracksToRemove->add(track); + } + } else { + track->mUnderrunCount++; + // No buffers for this track. Give it a few chances to + // fill a buffer, then remove it from active list. + if (--(track->mRetryCount) <= 0) { + ALOGV("BUFFER TIMEOUT: remove(%d) from active list on thread %p", name, this); + tracksToRemove->add(track); + // indicate to client process that the track was disabled because of underrun; + // it will then automatically call start() when data is available + android_atomic_or(CBLK_DISABLED, &cblk->flags); + // If one track is not ready, mark the mixer also not ready if: + // - the mixer was ready during previous round OR + // - no other track is ready + } else if (mMixerStatusIgnoringFastTracks == MIXER_TRACKS_READY || + mixerStatus != MIXER_TRACKS_READY) { + mixerStatus = MIXER_TRACKS_ENABLED; + } + } + mAudioMixer->disable(name); + } + + } // local variable scope to avoid goto warning +track_is_ready: ; + + } + + // Push the new FastMixer state if necessary + bool pauseAudioWatchdog = false; + if (didModify) { + state->mFastTracksGen++; + // if the fast mixer was active, but now there are no fast tracks, then put it in cold idle + if (kUseFastMixer == FastMixer_Dynamic && + state->mCommand == FastMixerState::MIX_WRITE && state->mTrackMask <= 1) { + state->mCommand = FastMixerState::COLD_IDLE; + state->mColdFutexAddr = &mFastMixerFutex; + state->mColdGen++; + mFastMixerFutex = 0; + if (kUseFastMixer == FastMixer_Dynamic) { + mNormalSink = mOutputSink; + } + // If we go into cold idle, need to wait for acknowledgement + // so that fast mixer stops doing I/O. + block = FastMixerStateQueue::BLOCK_UNTIL_ACKED; + pauseAudioWatchdog = true; + } + sq->end(); + } + if (sq != NULL) { + sq->end(didModify); + sq->push(block); + } +#ifdef AUDIO_WATCHDOG + if (pauseAudioWatchdog && mAudioWatchdog != 0) { + mAudioWatchdog->pause(); + } +#endif + + // Now perform the deferred reset on fast tracks that have stopped + while (resetMask != 0) { + size_t i = __builtin_ctz(resetMask); + ALOG_ASSERT(i < count); + resetMask &= ~(1 << i); + sp t = mActiveTracks[i].promote(); + if (t == 0) { + continue; + } + Track* track = t.get(); + ALOG_ASSERT(track->isFastTrack() && track->isStopped()); + track->reset(); + } + + // remove all the tracks that need to be... + count = tracksToRemove->size(); + if (CC_UNLIKELY(count)) { + for (size_t i=0 ; i& track = tracksToRemove->itemAt(i); + mActiveTracks.remove(track); + if (track->mainBuffer() != mMixBuffer) { + chain = getEffectChain_l(track->sessionId()); + if (chain != 0) { + ALOGV("stopping track on chain %p for session Id: %d", chain.get(), + track->sessionId()); + chain->decActiveTrackCnt(); + } + } + if (track->isTerminated()) { + removeTrack_l(track); + } + } + } + + // mix buffer must be cleared if all tracks are connected to an + // effect chain as in this case the mixer will not write to + // mix buffer and track effects will accumulate into it + if ((mixedTracks != 0 && mixedTracks == tracksWithEffect) || + (mixedTracks == 0 && fastTracks > 0)) { + // FIXME as a performance optimization, should remember previous zero status + memset(mMixBuffer, 0, mNormalFrameCount * mChannelCount * sizeof(int16_t)); + } + + // if any fast tracks, then status is ready + mMixerStatusIgnoringFastTracks = mixerStatus; + if (fastTracks > 0) { + mixerStatus = MIXER_TRACKS_READY; + } + return mixerStatus; +} + +// getTrackName_l() must be called with ThreadBase::mLock held +int AudioFlinger::MixerThread::getTrackName_l(audio_channel_mask_t channelMask, int sessionId) +{ + return mAudioMixer->getTrackName(channelMask, sessionId); +} + +// deleteTrackName_l() must be called with ThreadBase::mLock held +void AudioFlinger::MixerThread::deleteTrackName_l(int name) +{ + ALOGV("remove track (%d) and delete from mixer", name); + mAudioMixer->deleteTrackName(name); +} + +// checkForNewParameters_l() must be called with ThreadBase::mLock held +bool AudioFlinger::MixerThread::checkForNewParameters_l() +{ + // if !&IDLE, holds the FastMixer state to restore after new parameters processed + FastMixerState::Command previousCommand = FastMixerState::HOT_IDLE; + bool reconfig = false; + + while (!mNewParameters.isEmpty()) { + + if (mFastMixer != NULL) { + FastMixerStateQueue *sq = mFastMixer->sq(); + FastMixerState *state = sq->begin(); + if (!(state->mCommand & FastMixerState::IDLE)) { + previousCommand = state->mCommand; + state->mCommand = FastMixerState::HOT_IDLE; + sq->end(); + sq->push(FastMixerStateQueue::BLOCK_UNTIL_ACKED); + } else { + sq->end(false /*didModify*/); + } + } + + status_t status = NO_ERROR; + String8 keyValuePair = mNewParameters[0]; + AudioParameter param = AudioParameter(keyValuePair); + int value; + + if (param.getInt(String8(AudioParameter::keySamplingRate), value) == NO_ERROR) { + reconfig = true; + } + if (param.getInt(String8(AudioParameter::keyFormat), value) == NO_ERROR) { + if ((audio_format_t) value != AUDIO_FORMAT_PCM_16_BIT) { + status = BAD_VALUE; + } else { + reconfig = true; + } + } + if (param.getInt(String8(AudioParameter::keyChannels), value) == NO_ERROR) { + if (value != AUDIO_CHANNEL_OUT_STEREO) { + status = BAD_VALUE; + } else { + reconfig = true; + } + } + if (param.getInt(String8(AudioParameter::keyFrameCount), value) == NO_ERROR) { + // do not accept frame count changes if tracks are open as the track buffer + // size depends on frame count and correct behavior would not be guaranteed + // if frame count is changed after track creation + if (!mTracks.isEmpty()) { + status = INVALID_OPERATION; + } else { + reconfig = true; + } + } + if (param.getInt(String8(AudioParameter::keyRouting), value) == NO_ERROR) { +#ifdef ADD_BATTERY_DATA + // when changing the audio output device, call addBatteryData to notify + // the change + if (mOutDevice != value) { + uint32_t params = 0; + // check whether speaker is on + if (value & AUDIO_DEVICE_OUT_SPEAKER) { + params |= IMediaPlayerService::kBatteryDataSpeakerOn; + } + + audio_devices_t deviceWithoutSpeaker + = AUDIO_DEVICE_OUT_ALL & ~AUDIO_DEVICE_OUT_SPEAKER; + // check if any other device (except speaker) is on + if (value & deviceWithoutSpeaker ) { + params |= IMediaPlayerService::kBatteryDataOtherAudioDeviceOn; + } + + if (params != 0) { + addBatteryData(params); + } + } +#endif + + // forward device change to effects that have requested to be + // aware of attached audio device. + mOutDevice = value; + for (size_t i = 0; i < mEffectChains.size(); i++) { + mEffectChains[i]->setDevice_l(mOutDevice); + } + } + + if (status == NO_ERROR) { + status = mOutput->stream->common.set_parameters(&mOutput->stream->common, + keyValuePair.string()); + if (!mStandby && status == INVALID_OPERATION) { + mOutput->stream->common.standby(&mOutput->stream->common); + mStandby = true; + mBytesWritten = 0; + status = mOutput->stream->common.set_parameters(&mOutput->stream->common, + keyValuePair.string()); + } + if (status == NO_ERROR && reconfig) { + delete mAudioMixer; + // for safety in case readOutputParameters() accesses mAudioMixer (it doesn't) + mAudioMixer = NULL; + readOutputParameters(); + mAudioMixer = new AudioMixer(mNormalFrameCount, mSampleRate); + for (size_t i = 0; i < mTracks.size() ; i++) { + int name = getTrackName_l(mTracks[i]->mChannelMask, mTracks[i]->mSessionId); + if (name < 0) { + break; + } + mTracks[i]->mName = name; + // limit track sample rate to 2 x new output sample rate + if (mTracks[i]->mCblk->sampleRate > 2 * sampleRate()) { + mTracks[i]->mCblk->sampleRate = 2 * sampleRate(); + } + } + sendIoConfigEvent_l(AudioSystem::OUTPUT_CONFIG_CHANGED); + } + } + + mNewParameters.removeAt(0); + + mParamStatus = status; + mParamCond.signal(); + // wait for condition with time out in case the thread calling ThreadBase::setParameters() + // already timed out waiting for the status and will never signal the condition. + mWaitWorkCV.waitRelative(mLock, kSetParametersTimeoutNs); + } + + if (!(previousCommand & FastMixerState::IDLE)) { + ALOG_ASSERT(mFastMixer != NULL); + FastMixerStateQueue *sq = mFastMixer->sq(); + FastMixerState *state = sq->begin(); + ALOG_ASSERT(state->mCommand == FastMixerState::HOT_IDLE); + state->mCommand = previousCommand; + sq->end(); + sq->push(FastMixerStateQueue::BLOCK_UNTIL_PUSHED); + } + + return reconfig; +} + + +void AudioFlinger::MixerThread::dumpInternals(int fd, const Vector& args) +{ + const size_t SIZE = 256; + char buffer[SIZE]; + String8 result; + + PlaybackThread::dumpInternals(fd, args); + + snprintf(buffer, SIZE, "AudioMixer tracks: %08x\n", mAudioMixer->trackNames()); + result.append(buffer); + write(fd, result.string(), result.size()); + + // Make a non-atomic copy of fast mixer dump state so it won't change underneath us + FastMixerDumpState copy = mFastMixerDumpState; + copy.dump(fd); + +#ifdef STATE_QUEUE_DUMP + // Similar for state queue + StateQueueObserverDump observerCopy = mStateQueueObserverDump; + observerCopy.dump(fd); + StateQueueMutatorDump mutatorCopy = mStateQueueMutatorDump; + mutatorCopy.dump(fd); +#endif + + // Write the tee output to a .wav file + dumpTee(fd, mTeeSource, mId); + +#ifdef AUDIO_WATCHDOG + if (mAudioWatchdog != 0) { + // Make a non-atomic copy of audio watchdog dump so it won't change underneath us + AudioWatchdogDump wdCopy = mAudioWatchdogDump; + wdCopy.dump(fd); + } +#endif +} + +uint32_t AudioFlinger::MixerThread::idleSleepTimeUs() const +{ + return (uint32_t)(((mNormalFrameCount * 1000) / mSampleRate) * 1000) / 2; +} + +uint32_t AudioFlinger::MixerThread::suspendSleepTimeUs() const +{ + return (uint32_t)(((mNormalFrameCount * 1000) / mSampleRate) * 1000); +} + +void AudioFlinger::MixerThread::cacheParameters_l() +{ + PlaybackThread::cacheParameters_l(); + + // FIXME: Relaxed timing because of a certain device that can't meet latency + // Should be reduced to 2x after the vendor fixes the driver issue + // increase threshold again due to low power audio mode. The way this warning + // threshold is calculated and its usefulness should be reconsidered anyway. + maxPeriod = seconds(mNormalFrameCount) / mSampleRate * 15; +} + +// ---------------------------------------------------------------------------- + +AudioFlinger::DirectOutputThread::DirectOutputThread(const sp& audioFlinger, + AudioStreamOut* output, audio_io_handle_t id, audio_devices_t device) + : PlaybackThread(audioFlinger, output, id, device, DIRECT) + // mLeftVolFloat, mRightVolFloat +{ +} + +AudioFlinger::DirectOutputThread::~DirectOutputThread() +{ +} + +AudioFlinger::PlaybackThread::mixer_state AudioFlinger::DirectOutputThread::prepareTracks_l( + Vector< sp > *tracksToRemove +) +{ + sp trackToRemove; + + mixer_state mixerStatus = MIXER_IDLE; + + // find out which tracks need to be processed + if (mActiveTracks.size() != 0) { + sp t = mActiveTracks[0].promote(); + // The track died recently + if (t == 0) { + return MIXER_IDLE; + } + + Track* const track = t.get(); + audio_track_cblk_t* cblk = track->cblk(); + + // The first time a track is added we wait + // for all its buffers to be filled before processing it + uint32_t minFrames; + if ((track->sharedBuffer() == 0) && !track->isStopped() && !track->isPausing()) { + minFrames = mNormalFrameCount; + } else { + minFrames = 1; + } + if ((track->framesReady() >= minFrames) && track->isReady() && + !track->isPaused() && !track->isTerminated()) + { + ALOGVV("track %d u=%08x, s=%08x [OK]", track->name(), cblk->user, cblk->server); + + if (track->mFillingUpStatus == Track::FS_FILLED) { + track->mFillingUpStatus = Track::FS_ACTIVE; + mLeftVolFloat = mRightVolFloat = 0; + if (track->mState == TrackBase::RESUMING) { + track->mState = TrackBase::ACTIVE; + } + } + + // compute volume for this track + float left, right; + if (track->isMuted() || mMasterMute || track->isPausing() || + mStreamTypes[track->streamType()].mute) { + left = right = 0; + if (track->isPausing()) { + track->setPaused(); + } + } else { + float typeVolume = mStreamTypes[track->streamType()].volume; + float v = mMasterVolume * typeVolume; + uint32_t vlr = cblk->getVolumeLR(); + float v_clamped = v * (vlr & 0xFFFF); + if (v_clamped > MAX_GAIN) { + v_clamped = MAX_GAIN; + } + left = v_clamped/MAX_GAIN; + v_clamped = v * (vlr >> 16); + if (v_clamped > MAX_GAIN) { + v_clamped = MAX_GAIN; + } + right = v_clamped/MAX_GAIN; + } + + if (left != mLeftVolFloat || right != mRightVolFloat) { + mLeftVolFloat = left; + mRightVolFloat = right; + + // Convert volumes from float to 8.24 + uint32_t vl = (uint32_t)(left * (1 << 24)); + uint32_t vr = (uint32_t)(right * (1 << 24)); + + // Delegate volume control to effect in track effect chain if needed + // only one effect chain can be present on DirectOutputThread, so if + // there is one, the track is connected to it + if (!mEffectChains.isEmpty()) { + // Do not ramp volume if volume is controlled by effect + mEffectChains[0]->setVolume_l(&vl, &vr); + left = (float)vl / (1 << 24); + right = (float)vr / (1 << 24); + } + mOutput->stream->set_volume(mOutput->stream, left, right); + } + + // reset retry count + track->mRetryCount = kMaxTrackRetriesDirect; + mActiveTrack = t; + mixerStatus = MIXER_TRACKS_READY; + } else { + // clear effect chain input buffer if an active track underruns to avoid sending + // previous audio buffer again to effects + if (!mEffectChains.isEmpty()) { + mEffectChains[0]->clearInputBuffer(); + } + + ALOGVV("track %d u=%08x, s=%08x [NOT READY]", track->name(), cblk->user, cblk->server); + if ((track->sharedBuffer() != 0) || track->isTerminated() || + track->isStopped() || track->isPaused()) { + // We have consumed all the buffers of this track. + // Remove it from the list of active tracks. + // TODO: implement behavior for compressed audio + size_t audioHALFrames = (latency_l() * mSampleRate) / 1000; + size_t framesWritten = mBytesWritten / mFrameSize; + if (mStandby || track->presentationComplete(framesWritten, audioHALFrames)) { + if (track->isStopped()) { + track->reset(); + } + trackToRemove = track; + } + } else { + // No buffers for this track. Give it a few chances to + // fill a buffer, then remove it from active list. + if (--(track->mRetryCount) <= 0) { + ALOGV("BUFFER TIMEOUT: remove(%d) from active list", track->name()); + trackToRemove = track; + } else { + mixerStatus = MIXER_TRACKS_ENABLED; + } + } + } + } + + // FIXME merge this with similar code for removing multiple tracks + // remove all the tracks that need to be... + if (CC_UNLIKELY(trackToRemove != 0)) { + tracksToRemove->add(trackToRemove); + mActiveTracks.remove(trackToRemove); + if (!mEffectChains.isEmpty()) { + ALOGV("stopping track on chain %p for session Id: %d", mEffectChains[0].get(), + trackToRemove->sessionId()); + mEffectChains[0]->decActiveTrackCnt(); + } + if (trackToRemove->isTerminated()) { + removeTrack_l(trackToRemove); + } + } + + return mixerStatus; +} + +void AudioFlinger::DirectOutputThread::threadLoop_mix() +{ + AudioBufferProvider::Buffer buffer; + size_t frameCount = mFrameCount; + int8_t *curBuf = (int8_t *)mMixBuffer; + // output audio to hardware + while (frameCount) { + buffer.frameCount = frameCount; + mActiveTrack->getNextBuffer(&buffer); + if (CC_UNLIKELY(buffer.raw == NULL)) { + memset(curBuf, 0, frameCount * mFrameSize); + break; + } + memcpy(curBuf, buffer.raw, buffer.frameCount * mFrameSize); + frameCount -= buffer.frameCount; + curBuf += buffer.frameCount * mFrameSize; + mActiveTrack->releaseBuffer(&buffer); + } + sleepTime = 0; + standbyTime = systemTime() + standbyDelay; + mActiveTrack.clear(); + +} + +void AudioFlinger::DirectOutputThread::threadLoop_sleepTime() +{ + if (sleepTime == 0) { + if (mMixerStatus == MIXER_TRACKS_ENABLED) { + sleepTime = activeSleepTime; + } else { + sleepTime = idleSleepTime; + } + } else if (mBytesWritten != 0 && audio_is_linear_pcm(mFormat)) { + memset(mMixBuffer, 0, mFrameCount * mFrameSize); + sleepTime = 0; + } +} + +// getTrackName_l() must be called with ThreadBase::mLock held +int AudioFlinger::DirectOutputThread::getTrackName_l(audio_channel_mask_t channelMask, + int sessionId) +{ + return 0; +} + +// deleteTrackName_l() must be called with ThreadBase::mLock held +void AudioFlinger::DirectOutputThread::deleteTrackName_l(int name) +{ +} + +// checkForNewParameters_l() must be called with ThreadBase::mLock held +bool AudioFlinger::DirectOutputThread::checkForNewParameters_l() +{ + bool reconfig = false; + + while (!mNewParameters.isEmpty()) { + status_t status = NO_ERROR; + String8 keyValuePair = mNewParameters[0]; + AudioParameter param = AudioParameter(keyValuePair); + int value; + + if (param.getInt(String8(AudioParameter::keyFrameCount), value) == NO_ERROR) { + // do not accept frame count changes if tracks are open as the track buffer + // size depends on frame count and correct behavior would not be garantied + // if frame count is changed after track creation + if (!mTracks.isEmpty()) { + status = INVALID_OPERATION; + } else { + reconfig = true; + } + } + if (status == NO_ERROR) { + status = mOutput->stream->common.set_parameters(&mOutput->stream->common, + keyValuePair.string()); + if (!mStandby && status == INVALID_OPERATION) { + mOutput->stream->common.standby(&mOutput->stream->common); + mStandby = true; + mBytesWritten = 0; + status = mOutput->stream->common.set_parameters(&mOutput->stream->common, + keyValuePair.string()); + } + if (status == NO_ERROR && reconfig) { + readOutputParameters(); + sendIoConfigEvent_l(AudioSystem::OUTPUT_CONFIG_CHANGED); + } + } + + mNewParameters.removeAt(0); + + mParamStatus = status; + mParamCond.signal(); + // wait for condition with time out in case the thread calling ThreadBase::setParameters() + // already timed out waiting for the status and will never signal the condition. + mWaitWorkCV.waitRelative(mLock, kSetParametersTimeoutNs); + } + return reconfig; +} + +uint32_t AudioFlinger::DirectOutputThread::activeSleepTimeUs() const +{ + uint32_t time; + if (audio_is_linear_pcm(mFormat)) { + time = PlaybackThread::activeSleepTimeUs(); + } else { + time = 10000; + } + return time; +} + +uint32_t AudioFlinger::DirectOutputThread::idleSleepTimeUs() const +{ + uint32_t time; + if (audio_is_linear_pcm(mFormat)) { + time = (uint32_t)(((mFrameCount * 1000) / mSampleRate) * 1000) / 2; + } else { + time = 10000; + } + return time; +} + +uint32_t AudioFlinger::DirectOutputThread::suspendSleepTimeUs() const +{ + uint32_t time; + if (audio_is_linear_pcm(mFormat)) { + time = (uint32_t)(((mFrameCount * 1000) / mSampleRate) * 1000); + } else { + time = 10000; + } + return time; +} + +void AudioFlinger::DirectOutputThread::cacheParameters_l() +{ + PlaybackThread::cacheParameters_l(); + + // use shorter standby delay as on normal output to release + // hardware resources as soon as possible + standbyDelay = microseconds(activeSleepTime*2); +} + +// ---------------------------------------------------------------------------- + +AudioFlinger::DuplicatingThread::DuplicatingThread(const sp& audioFlinger, + AudioFlinger::MixerThread* mainThread, audio_io_handle_t id) + : MixerThread(audioFlinger, mainThread->getOutput(), id, mainThread->outDevice(), + DUPLICATING), + mWaitTimeMs(UINT_MAX) +{ + addOutputTrack(mainThread); +} + +AudioFlinger::DuplicatingThread::~DuplicatingThread() +{ + for (size_t i = 0; i < mOutputTracks.size(); i++) { + mOutputTracks[i]->destroy(); + } +} + +void AudioFlinger::DuplicatingThread::threadLoop_mix() +{ + // mix buffers... + if (outputsReady(outputTracks)) { + mAudioMixer->process(AudioBufferProvider::kInvalidPTS); + } else { + memset(mMixBuffer, 0, mixBufferSize); + } + sleepTime = 0; + writeFrames = mNormalFrameCount; + standbyTime = systemTime() + standbyDelay; +} + +void AudioFlinger::DuplicatingThread::threadLoop_sleepTime() +{ + if (sleepTime == 0) { + if (mMixerStatus == MIXER_TRACKS_ENABLED) { + sleepTime = activeSleepTime; + } else { + sleepTime = idleSleepTime; + } + } else if (mBytesWritten != 0) { + if (mMixerStatus == MIXER_TRACKS_ENABLED) { + writeFrames = mNormalFrameCount; + memset(mMixBuffer, 0, mixBufferSize); + } else { + // flush remaining overflow buffers in output tracks + writeFrames = 0; + } + sleepTime = 0; + } +} + +void AudioFlinger::DuplicatingThread::threadLoop_write() +{ + for (size_t i = 0; i < outputTracks.size(); i++) { + outputTracks[i]->write(mMixBuffer, writeFrames); + } + mBytesWritten += mixBufferSize; +} + +void AudioFlinger::DuplicatingThread::threadLoop_standby() +{ + // DuplicatingThread implements standby by stopping all tracks + for (size_t i = 0; i < outputTracks.size(); i++) { + outputTracks[i]->stop(); + } +} + +void AudioFlinger::DuplicatingThread::saveOutputTracks() +{ + outputTracks = mOutputTracks; +} + +void AudioFlinger::DuplicatingThread::clearOutputTracks() +{ + outputTracks.clear(); +} + +void AudioFlinger::DuplicatingThread::addOutputTrack(MixerThread *thread) +{ + Mutex::Autolock _l(mLock); + // FIXME explain this formula + size_t frameCount = (3 * mNormalFrameCount * mSampleRate) / thread->sampleRate(); + OutputTrack *outputTrack = new OutputTrack(thread, + this, + mSampleRate, + mFormat, + mChannelMask, + frameCount); + if (outputTrack->cblk() != NULL) { + thread->setStreamVolume(AUDIO_STREAM_CNT, 1.0f); + mOutputTracks.add(outputTrack); + ALOGV("addOutputTrack() track %p, on thread %p", outputTrack, thread); + updateWaitTime_l(); + } +} + +void AudioFlinger::DuplicatingThread::removeOutputTrack(MixerThread *thread) +{ + Mutex::Autolock _l(mLock); + for (size_t i = 0; i < mOutputTracks.size(); i++) { + if (mOutputTracks[i]->thread() == thread) { + mOutputTracks[i]->destroy(); + mOutputTracks.removeAt(i); + updateWaitTime_l(); + return; + } + } + ALOGV("removeOutputTrack(): unkonwn thread: %p", thread); +} + +// caller must hold mLock +void AudioFlinger::DuplicatingThread::updateWaitTime_l() +{ + mWaitTimeMs = UINT_MAX; + for (size_t i = 0; i < mOutputTracks.size(); i++) { + sp strong = mOutputTracks[i]->thread().promote(); + if (strong != 0) { + uint32_t waitTimeMs = (strong->frameCount() * 2 * 1000) / strong->sampleRate(); + if (waitTimeMs < mWaitTimeMs) { + mWaitTimeMs = waitTimeMs; + } + } + } +} + + +bool AudioFlinger::DuplicatingThread::outputsReady( + const SortedVector< sp > &outputTracks) +{ + for (size_t i = 0; i < outputTracks.size(); i++) { + sp thread = outputTracks[i]->thread().promote(); + if (thread == 0) { + ALOGW("DuplicatingThread::outputsReady() could not promote thread on output track %p", + outputTracks[i].get()); + return false; + } + PlaybackThread *playbackThread = (PlaybackThread *)thread.get(); + // see note at standby() declaration + if (playbackThread->standby() && !playbackThread->isSuspended()) { + ALOGV("DuplicatingThread output track %p on thread %p Not Ready", outputTracks[i].get(), + thread.get()); + return false; + } + } + return true; +} + +uint32_t AudioFlinger::DuplicatingThread::activeSleepTimeUs() const +{ + return (mWaitTimeMs * 1000) / 2; +} + +void AudioFlinger::DuplicatingThread::cacheParameters_l() +{ + // updateWaitTime_l() sets mWaitTimeMs, which affects activeSleepTimeUs(), so call it first + updateWaitTime_l(); + + MixerThread::cacheParameters_l(); +} + +// ---------------------------------------------------------------------------- +// Record +// ---------------------------------------------------------------------------- + +AudioFlinger::RecordThread::RecordThread(const sp& audioFlinger, + AudioStreamIn *input, + uint32_t sampleRate, + audio_channel_mask_t channelMask, + audio_io_handle_t id, + audio_devices_t device, + const sp& teeSink) : + ThreadBase(audioFlinger, id, AUDIO_DEVICE_NONE, device, RECORD), + mInput(input), mResampler(NULL), mRsmpOutBuffer(NULL), mRsmpInBuffer(NULL), + // mRsmpInIndex and mInputBytes set by readInputParameters() + mReqChannelCount(popcount(channelMask)), + mReqSampleRate(sampleRate), + // mBytesRead is only meaningful while active, and so is cleared in start() + // (but might be better to also clear here for dump?) + mTeeSink(teeSink) +{ + snprintf(mName, kNameLength, "AudioIn_%X", id); + + readInputParameters(); + +} + + +AudioFlinger::RecordThread::~RecordThread() +{ + delete[] mRsmpInBuffer; + delete mResampler; + delete[] mRsmpOutBuffer; +} + +void AudioFlinger::RecordThread::onFirstRef() +{ + run(mName, PRIORITY_URGENT_AUDIO); +} + +status_t AudioFlinger::RecordThread::readyToRun() +{ + status_t status = initCheck(); + ALOGW_IF(status != NO_ERROR,"RecordThread %p could not initialize", this); + return status; +} + +bool AudioFlinger::RecordThread::threadLoop() +{ + AudioBufferProvider::Buffer buffer; + sp activeTrack; + Vector< sp > effectChains; + + nsecs_t lastWarning = 0; + + inputStandBy(); + acquireWakeLock(); + + // used to verify we've read at least once before evaluating how many bytes were read + bool readOnce = false; + + // start recording + while (!exitPending()) { + + processConfigEvents(); + + { // scope for mLock + Mutex::Autolock _l(mLock); + checkForNewParameters_l(); + if (mActiveTrack == 0 && mConfigEvents.isEmpty()) { + standby(); + + if (exitPending()) { + break; + } + + releaseWakeLock_l(); + ALOGV("RecordThread: loop stopping"); + // go to sleep + mWaitWorkCV.wait(mLock); + ALOGV("RecordThread: loop starting"); + acquireWakeLock_l(); + continue; + } + if (mActiveTrack != 0) { + if (mActiveTrack->mState == TrackBase::PAUSING) { + standby(); + mActiveTrack.clear(); + mStartStopCond.broadcast(); + } else if (mActiveTrack->mState == TrackBase::RESUMING) { + if (mReqChannelCount != mActiveTrack->channelCount()) { + mActiveTrack.clear(); + mStartStopCond.broadcast(); + } else if (readOnce) { + // record start succeeds only if first read from audio input + // succeeds + if (mBytesRead >= 0) { + mActiveTrack->mState = TrackBase::ACTIVE; + } else { + mActiveTrack.clear(); + } + mStartStopCond.broadcast(); + } + mStandby = false; + } else if (mActiveTrack->mState == TrackBase::TERMINATED) { + removeTrack_l(mActiveTrack); + mActiveTrack.clear(); + } + } + lockEffectChains_l(effectChains); + } + + if (mActiveTrack != 0) { + if (mActiveTrack->mState != TrackBase::ACTIVE && + mActiveTrack->mState != TrackBase::RESUMING) { + unlockEffectChains(effectChains); + usleep(kRecordThreadSleepUs); + continue; + } + for (size_t i = 0; i < effectChains.size(); i ++) { + effectChains[i]->process_l(); + } + + buffer.frameCount = mFrameCount; + if (CC_LIKELY(mActiveTrack->getNextBuffer(&buffer) == NO_ERROR)) { + readOnce = true; + size_t framesOut = buffer.frameCount; + if (mResampler == NULL) { + // no resampling + while (framesOut) { + size_t framesIn = mFrameCount - mRsmpInIndex; + if (framesIn) { + int8_t *src = (int8_t *)mRsmpInBuffer + mRsmpInIndex * mFrameSize; + int8_t *dst = buffer.i8 + (buffer.frameCount - framesOut) * + mActiveTrack->mFrameSize; + if (framesIn > framesOut) + framesIn = framesOut; + mRsmpInIndex += framesIn; + framesOut -= framesIn; + if (mChannelCount == mReqChannelCount || + mFormat != AUDIO_FORMAT_PCM_16_BIT) { + memcpy(dst, src, framesIn * mFrameSize); + } else { + if (mChannelCount == 1) { + upmix_to_stereo_i16_from_mono_i16((int16_t *)dst, + (int16_t *)src, framesIn); + } else { + downmix_to_mono_i16_from_stereo_i16((int16_t *)dst, + (int16_t *)src, framesIn); + } + } + } + if (framesOut && mFrameCount == mRsmpInIndex) { + void *readInto; + if (framesOut == mFrameCount && + (mChannelCount == mReqChannelCount || + mFormat != AUDIO_FORMAT_PCM_16_BIT)) { + readInto = buffer.raw; + framesOut = 0; + } else { + readInto = mRsmpInBuffer; + mRsmpInIndex = 0; + } + mBytesRead = mInput->stream->read(mInput->stream, readInto, mInputBytes); + if (mBytesRead <= 0) { + if ((mBytesRead < 0) && (mActiveTrack->mState == TrackBase::ACTIVE)) + { + ALOGE("Error reading audio input"); + // Force input into standby so that it tries to + // recover at next read attempt + inputStandBy(); + usleep(kRecordThreadSleepUs); + } + mRsmpInIndex = mFrameCount; + framesOut = 0; + buffer.frameCount = 0; + } else if (mTeeSink != 0) { + (void) mTeeSink->write(readInto, + mBytesRead >> Format_frameBitShift(mTeeSink->format())); + } + } + } + } else { + // resampling + + memset(mRsmpOutBuffer, 0, framesOut * 2 * sizeof(int32_t)); + // alter output frame count as if we were expecting stereo samples + if (mChannelCount == 1 && mReqChannelCount == 1) { + framesOut >>= 1; + } + mResampler->resample(mRsmpOutBuffer, framesOut, + this /* AudioBufferProvider* */); + // ditherAndClamp() works as long as all buffers returned by + // mActiveTrack->getNextBuffer() are 32 bit aligned which should be always true. + if (mChannelCount == 2 && mReqChannelCount == 1) { + ditherAndClamp(mRsmpOutBuffer, mRsmpOutBuffer, framesOut); + // the resampler always outputs stereo samples: + // do post stereo to mono conversion + downmix_to_mono_i16_from_stereo_i16(buffer.i16, (int16_t *)mRsmpOutBuffer, + framesOut); + } else { + ditherAndClamp((int32_t *)buffer.raw, mRsmpOutBuffer, framesOut); + } + + } + if (mFramestoDrop == 0) { + mActiveTrack->releaseBuffer(&buffer); + } else { + if (mFramestoDrop > 0) { + mFramestoDrop -= buffer.frameCount; + if (mFramestoDrop <= 0) { + clearSyncStartEvent(); + } + } else { + mFramestoDrop += buffer.frameCount; + if (mFramestoDrop >= 0 || mSyncStartEvent == 0 || + mSyncStartEvent->isCancelled()) { + ALOGW("Synced record %s, session %d, trigger session %d", + (mFramestoDrop >= 0) ? "timed out" : "cancelled", + mActiveTrack->sessionId(), + (mSyncStartEvent != 0) ? mSyncStartEvent->triggerSession() : 0); + clearSyncStartEvent(); + } + } + } + mActiveTrack->clearOverflow(); + } + // client isn't retrieving buffers fast enough + else { + if (!mActiveTrack->setOverflow()) { + nsecs_t now = systemTime(); + if ((now - lastWarning) > kWarningThrottleNs) { + ALOGW("RecordThread: buffer overflow"); + lastWarning = now; + } + } + // Release the processor for a while before asking for a new buffer. + // This will give the application more chance to read from the buffer and + // clear the overflow. + usleep(kRecordThreadSleepUs); + } + } + // enable changes in effect chain + unlockEffectChains(effectChains); + effectChains.clear(); + } + + standby(); + + { + Mutex::Autolock _l(mLock); + mActiveTrack.clear(); + mStartStopCond.broadcast(); + } + + releaseWakeLock(); + + ALOGV("RecordThread %p exiting", this); + return false; +} + +void AudioFlinger::RecordThread::standby() +{ + if (!mStandby) { + inputStandBy(); + mStandby = true; + } +} + +void AudioFlinger::RecordThread::inputStandBy() +{ + mInput->stream->common.standby(&mInput->stream->common); +} + +sp AudioFlinger::RecordThread::createRecordTrack_l( + const sp& client, + uint32_t sampleRate, + audio_format_t format, + audio_channel_mask_t channelMask, + size_t frameCount, + int sessionId, + IAudioFlinger::track_flags_t flags, + pid_t tid, + status_t *status) +{ + sp track; + status_t lStatus; + + lStatus = initCheck(); + if (lStatus != NO_ERROR) { + ALOGE("Audio driver not initialized."); + goto Exit; + } + + // FIXME use flags and tid similar to createTrack_l() + + { // scope for mLock + Mutex::Autolock _l(mLock); + + track = new RecordTrack(this, client, sampleRate, + format, channelMask, frameCount, sessionId); + + if (track->getCblk() == 0) { + lStatus = NO_MEMORY; + goto Exit; + } + mTracks.add(track); + + // disable AEC and NS if the device is a BT SCO headset supporting those pre processings + bool suspend = audio_is_bluetooth_sco_device(mInDevice) && + mAudioFlinger->btNrecIsOff(); + setEffectSuspended_l(FX_IID_AEC, suspend, sessionId); + setEffectSuspended_l(FX_IID_NS, suspend, sessionId); + } + lStatus = NO_ERROR; + +Exit: + if (status) { + *status = lStatus; + } + return track; +} + +status_t AudioFlinger::RecordThread::start(RecordThread::RecordTrack* recordTrack, + AudioSystem::sync_event_t event, + int triggerSession) +{ + ALOGV("RecordThread::start event %d, triggerSession %d", event, triggerSession); + sp strongMe = this; + status_t status = NO_ERROR; + + if (event == AudioSystem::SYNC_EVENT_NONE) { + clearSyncStartEvent(); + } else if (event != AudioSystem::SYNC_EVENT_SAME) { + mSyncStartEvent = mAudioFlinger->createSyncEvent(event, + triggerSession, + recordTrack->sessionId(), + syncStartEventCallback, + this); + // Sync event can be cancelled by the trigger session if the track is not in a + // compatible state in which case we start record immediately + if (mSyncStartEvent->isCancelled()) { + clearSyncStartEvent(); + } else { + // do not wait for the event for more than AudioSystem::kSyncRecordStartTimeOutMs + mFramestoDrop = - ((AudioSystem::kSyncRecordStartTimeOutMs * mReqSampleRate) / 1000); + } + } + + { + AutoMutex lock(mLock); + if (mActiveTrack != 0) { + if (recordTrack != mActiveTrack.get()) { + status = -EBUSY; + } else if (mActiveTrack->mState == TrackBase::PAUSING) { + mActiveTrack->mState = TrackBase::ACTIVE; + } + return status; + } + + recordTrack->mState = TrackBase::IDLE; + mActiveTrack = recordTrack; + mLock.unlock(); + status_t status = AudioSystem::startInput(mId); + mLock.lock(); + if (status != NO_ERROR) { + mActiveTrack.clear(); + clearSyncStartEvent(); + return status; + } + mRsmpInIndex = mFrameCount; + mBytesRead = 0; + if (mResampler != NULL) { + mResampler->reset(); + } + mActiveTrack->mState = TrackBase::RESUMING; + // signal thread to start + ALOGV("Signal record thread"); + mWaitWorkCV.broadcast(); + // do not wait for mStartStopCond if exiting + if (exitPending()) { + mActiveTrack.clear(); + status = INVALID_OPERATION; + goto startError; + } + mStartStopCond.wait(mLock); + if (mActiveTrack == 0) { + ALOGV("Record failed to start"); + status = BAD_VALUE; + goto startError; + } + ALOGV("Record started OK"); + return status; + } +startError: + AudioSystem::stopInput(mId); + clearSyncStartEvent(); + return status; +} + +void AudioFlinger::RecordThread::clearSyncStartEvent() +{ + if (mSyncStartEvent != 0) { + mSyncStartEvent->cancel(); + } + mSyncStartEvent.clear(); + mFramestoDrop = 0; +} + +void AudioFlinger::RecordThread::syncStartEventCallback(const wp& event) +{ + sp strongEvent = event.promote(); + + if (strongEvent != 0) { + RecordThread *me = (RecordThread *)strongEvent->cookie(); + me->handleSyncStartEvent(strongEvent); + } +} + +void AudioFlinger::RecordThread::handleSyncStartEvent(const sp& event) +{ + if (event == mSyncStartEvent) { + // TODO: use actual buffer filling status instead of 2 buffers when info is available + // from audio HAL + mFramestoDrop = mFrameCount * 2; + } +} + +bool AudioFlinger::RecordThread::stop_l(RecordThread::RecordTrack* recordTrack) { + ALOGV("RecordThread::stop"); + if (recordTrack != mActiveTrack.get() || recordTrack->mState == TrackBase::PAUSING) { + return false; + } + recordTrack->mState = TrackBase::PAUSING; + // do not wait for mStartStopCond if exiting + if (exitPending()) { + return true; + } + mStartStopCond.wait(mLock); + // if we have been restarted, recordTrack == mActiveTrack.get() here + if (exitPending() || recordTrack != mActiveTrack.get()) { + ALOGV("Record stopped OK"); + return true; + } + return false; +} + +bool AudioFlinger::RecordThread::isValidSyncEvent(const sp& event) const +{ + return false; +} + +status_t AudioFlinger::RecordThread::setSyncEvent(const sp& event) +{ +#if 0 // This branch is currently dead code, but is preserved in case it will be needed in future + if (!isValidSyncEvent(event)) { + return BAD_VALUE; + } + + int eventSession = event->triggerSession(); + status_t ret = NAME_NOT_FOUND; + + Mutex::Autolock _l(mLock); + + for (size_t i = 0; i < mTracks.size(); i++) { + sp track = mTracks[i]; + if (eventSession == track->sessionId()) { + (void) track->setSyncEvent(event); + ret = NO_ERROR; + } + } + return ret; +#else + return BAD_VALUE; +#endif +} + +// destroyTrack_l() must be called with ThreadBase::mLock held +void AudioFlinger::RecordThread::destroyTrack_l(const sp& track) +{ + track->mState = TrackBase::TERMINATED; + // active tracks are removed by threadLoop() + if (mActiveTrack != track) { + removeTrack_l(track); + } +} + +void AudioFlinger::RecordThread::removeTrack_l(const sp& track) +{ + mTracks.remove(track); + // need anything related to effects here? +} + +void AudioFlinger::RecordThread::dump(int fd, const Vector& args) +{ + dumpInternals(fd, args); + dumpTracks(fd, args); + dumpEffectChains(fd, args); +} + +void AudioFlinger::RecordThread::dumpInternals(int fd, const Vector& args) +{ + const size_t SIZE = 256; + char buffer[SIZE]; + String8 result; + + snprintf(buffer, SIZE, "\nInput thread %p internals\n", this); + result.append(buffer); + + if (mActiveTrack != 0) { + snprintf(buffer, SIZE, "In index: %d\n", mRsmpInIndex); + result.append(buffer); + snprintf(buffer, SIZE, "In size: %d\n", mInputBytes); + result.append(buffer); + snprintf(buffer, SIZE, "Resampling: %d\n", (mResampler != NULL)); + result.append(buffer); + snprintf(buffer, SIZE, "Out channel count: %u\n", mReqChannelCount); + result.append(buffer); + snprintf(buffer, SIZE, "Out sample rate: %u\n", mReqSampleRate); + result.append(buffer); + } else { + result.append("No active record client\n"); + } + + write(fd, result.string(), result.size()); + + dumpBase(fd, args); +} + +void AudioFlinger::RecordThread::dumpTracks(int fd, const Vector& args) +{ + const size_t SIZE = 256; + char buffer[SIZE]; + String8 result; + + snprintf(buffer, SIZE, "Input thread %p tracks\n", this); + result.append(buffer); + RecordTrack::appendDumpHeader(result); + for (size_t i = 0; i < mTracks.size(); ++i) { + sp track = mTracks[i]; + if (track != 0) { + track->dump(buffer, SIZE); + result.append(buffer); + } + } + + if (mActiveTrack != 0) { + snprintf(buffer, SIZE, "\nInput thread %p active tracks\n", this); + result.append(buffer); + RecordTrack::appendDumpHeader(result); + mActiveTrack->dump(buffer, SIZE); + result.append(buffer); + + } + write(fd, result.string(), result.size()); +} + +// AudioBufferProvider interface +status_t AudioFlinger::RecordThread::getNextBuffer(AudioBufferProvider::Buffer* buffer, int64_t pts) +{ + size_t framesReq = buffer->frameCount; + size_t framesReady = mFrameCount - mRsmpInIndex; + int channelCount; + + if (framesReady == 0) { + mBytesRead = mInput->stream->read(mInput->stream, mRsmpInBuffer, mInputBytes); + if (mBytesRead <= 0) { + if ((mBytesRead < 0) && (mActiveTrack->mState == TrackBase::ACTIVE)) { + ALOGE("RecordThread::getNextBuffer() Error reading audio input"); + // Force input into standby so that it tries to + // recover at next read attempt + inputStandBy(); + usleep(kRecordThreadSleepUs); + } + buffer->raw = NULL; + buffer->frameCount = 0; + return NOT_ENOUGH_DATA; + } + mRsmpInIndex = 0; + framesReady = mFrameCount; + } + + if (framesReq > framesReady) { + framesReq = framesReady; + } + + if (mChannelCount == 1 && mReqChannelCount == 2) { + channelCount = 1; + } else { + channelCount = 2; + } + buffer->raw = mRsmpInBuffer + mRsmpInIndex * channelCount; + buffer->frameCount = framesReq; + return NO_ERROR; +} + +// AudioBufferProvider interface +void AudioFlinger::RecordThread::releaseBuffer(AudioBufferProvider::Buffer* buffer) +{ + mRsmpInIndex += buffer->frameCount; + buffer->frameCount = 0; +} + +bool AudioFlinger::RecordThread::checkForNewParameters_l() +{ + bool reconfig = false; + + while (!mNewParameters.isEmpty()) { + status_t status = NO_ERROR; + String8 keyValuePair = mNewParameters[0]; + AudioParameter param = AudioParameter(keyValuePair); + int value; + audio_format_t reqFormat = mFormat; + uint32_t reqSamplingRate = mReqSampleRate; + uint32_t reqChannelCount = mReqChannelCount; + + if (param.getInt(String8(AudioParameter::keySamplingRate), value) == NO_ERROR) { + reqSamplingRate = value; + reconfig = true; + } + if (param.getInt(String8(AudioParameter::keyFormat), value) == NO_ERROR) { + reqFormat = (audio_format_t) value; + reconfig = true; + } + if (param.getInt(String8(AudioParameter::keyChannels), value) == NO_ERROR) { + reqChannelCount = popcount(value); + reconfig = true; + } + if (param.getInt(String8(AudioParameter::keyFrameCount), value) == NO_ERROR) { + // do not accept frame count changes if tracks are open as the track buffer + // size depends on frame count and correct behavior would not be guaranteed + // if frame count is changed after track creation + if (mActiveTrack != 0) { + status = INVALID_OPERATION; + } else { + reconfig = true; + } + } + if (param.getInt(String8(AudioParameter::keyRouting), value) == NO_ERROR) { + // forward device change to effects that have requested to be + // aware of attached audio device. + for (size_t i = 0; i < mEffectChains.size(); i++) { + mEffectChains[i]->setDevice_l(value); + } + + // store input device and output device but do not forward output device to audio HAL. + // Note that status is ignored by the caller for output device + // (see AudioFlinger::setParameters() + if (audio_is_output_devices(value)) { + mOutDevice = value; + status = BAD_VALUE; + } else { + mInDevice = value; + // disable AEC and NS if the device is a BT SCO headset supporting those + // pre processings + if (mTracks.size() > 0) { + bool suspend = audio_is_bluetooth_sco_device(mInDevice) && + mAudioFlinger->btNrecIsOff(); + for (size_t i = 0; i < mTracks.size(); i++) { + sp track = mTracks[i]; + setEffectSuspended_l(FX_IID_AEC, suspend, track->sessionId()); + setEffectSuspended_l(FX_IID_NS, suspend, track->sessionId()); + } + } + } + } + if (param.getInt(String8(AudioParameter::keyInputSource), value) == NO_ERROR && + mAudioSource != (audio_source_t)value) { + // forward device change to effects that have requested to be + // aware of attached audio device. + for (size_t i = 0; i < mEffectChains.size(); i++) { + mEffectChains[i]->setAudioSource_l((audio_source_t)value); + } + mAudioSource = (audio_source_t)value; + } + if (status == NO_ERROR) { + status = mInput->stream->common.set_parameters(&mInput->stream->common, + keyValuePair.string()); + if (status == INVALID_OPERATION) { + inputStandBy(); + status = mInput->stream->common.set_parameters(&mInput->stream->common, + keyValuePair.string()); + } + if (reconfig) { + if (status == BAD_VALUE && + reqFormat == mInput->stream->common.get_format(&mInput->stream->common) && + reqFormat == AUDIO_FORMAT_PCM_16_BIT && + ((int)mInput->stream->common.get_sample_rate(&mInput->stream->common) + <= (2 * reqSamplingRate)) && + popcount(mInput->stream->common.get_channels(&mInput->stream->common)) + <= FCC_2 && + (reqChannelCount <= FCC_2)) { + status = NO_ERROR; + } + if (status == NO_ERROR) { + readInputParameters(); + sendIoConfigEvent_l(AudioSystem::INPUT_CONFIG_CHANGED); + } + } + } + + mNewParameters.removeAt(0); + + mParamStatus = status; + mParamCond.signal(); + // wait for condition with time out in case the thread calling ThreadBase::setParameters() + // already timed out waiting for the status and will never signal the condition. + mWaitWorkCV.waitRelative(mLock, kSetParametersTimeoutNs); + } + return reconfig; +} + +String8 AudioFlinger::RecordThread::getParameters(const String8& keys) +{ + char *s; + String8 out_s8 = String8(); + + Mutex::Autolock _l(mLock); + if (initCheck() != NO_ERROR) { + return out_s8; + } + + s = mInput->stream->common.get_parameters(&mInput->stream->common, keys.string()); + out_s8 = String8(s); + free(s); + return out_s8; +} + +void AudioFlinger::RecordThread::audioConfigChanged_l(int event, int param) { + AudioSystem::OutputDescriptor desc; + void *param2 = NULL; + + switch (event) { + case AudioSystem::INPUT_OPENED: + case AudioSystem::INPUT_CONFIG_CHANGED: + desc.channels = mChannelMask; + desc.samplingRate = mSampleRate; + desc.format = mFormat; + desc.frameCount = mFrameCount; + desc.latency = 0; + param2 = &desc; + break; + + case AudioSystem::INPUT_CLOSED: + default: + break; + } + mAudioFlinger->audioConfigChanged_l(event, mId, param2); +} + +void AudioFlinger::RecordThread::readInputParameters() +{ + delete mRsmpInBuffer; + // mRsmpInBuffer is always assigned a new[] below + delete mRsmpOutBuffer; + mRsmpOutBuffer = NULL; + delete mResampler; + mResampler = NULL; + + mSampleRate = mInput->stream->common.get_sample_rate(&mInput->stream->common); + mChannelMask = mInput->stream->common.get_channels(&mInput->stream->common); + mChannelCount = (uint16_t)popcount(mChannelMask); + mFormat = mInput->stream->common.get_format(&mInput->stream->common); + mFrameSize = audio_stream_frame_size(&mInput->stream->common); + mInputBytes = mInput->stream->common.get_buffer_size(&mInput->stream->common); + mFrameCount = mInputBytes / mFrameSize; + mNormalFrameCount = mFrameCount; // not used by record, but used by input effects + mRsmpInBuffer = new int16_t[mFrameCount * mChannelCount]; + + if (mSampleRate != mReqSampleRate && mChannelCount <= FCC_2 && mReqChannelCount <= FCC_2) + { + int channelCount; + // optimization: if mono to mono, use the resampler in stereo to stereo mode to avoid + // stereo to mono post process as the resampler always outputs stereo. + if (mChannelCount == 1 && mReqChannelCount == 2) { + channelCount = 1; + } else { + channelCount = 2; + } + mResampler = AudioResampler::create(16, channelCount, mReqSampleRate); + mResampler->setSampleRate(mSampleRate); + mResampler->setVolume(AudioMixer::UNITY_GAIN, AudioMixer::UNITY_GAIN); + mRsmpOutBuffer = new int32_t[mFrameCount * 2]; + + // optmization: if mono to mono, alter input frame count as if we were inputing + // stereo samples + if (mChannelCount == 1 && mReqChannelCount == 1) { + mFrameCount >>= 1; + } + + } + mRsmpInIndex = mFrameCount; +} + +unsigned int AudioFlinger::RecordThread::getInputFramesLost() +{ + Mutex::Autolock _l(mLock); + if (initCheck() != NO_ERROR) { + return 0; + } + + return mInput->stream->get_input_frames_lost(mInput->stream); +} + +uint32_t AudioFlinger::RecordThread::hasAudioSession(int sessionId) const +{ + Mutex::Autolock _l(mLock); + uint32_t result = 0; + if (getEffectChain_l(sessionId) != 0) { + result = EFFECT_SESSION; + } + + for (size_t i = 0; i < mTracks.size(); ++i) { + if (sessionId == mTracks[i]->sessionId()) { + result |= TRACK_SESSION; + break; + } + } + + return result; +} + +KeyedVector AudioFlinger::RecordThread::sessionIds() const +{ + KeyedVector ids; + Mutex::Autolock _l(mLock); + for (size_t j = 0; j < mTracks.size(); ++j) { + sp track = mTracks[j]; + int sessionId = track->sessionId(); + if (ids.indexOfKey(sessionId) < 0) { + ids.add(sessionId, true); + } + } + return ids; +} + +AudioFlinger::AudioStreamIn* AudioFlinger::RecordThread::clearInput() +{ + Mutex::Autolock _l(mLock); + AudioStreamIn *input = mInput; + mInput = NULL; + return input; +} + +// this method must always be called either with ThreadBase mLock held or inside the thread loop +audio_stream_t* AudioFlinger::RecordThread::stream() const +{ + if (mInput == NULL) { + return NULL; + } + return &mInput->stream->common; +} + +status_t AudioFlinger::RecordThread::addEffectChain_l(const sp& chain) +{ + // only one chain per input thread + if (mEffectChains.size() != 0) { + return INVALID_OPERATION; + } + ALOGV("addEffectChain_l() %p on thread %p", chain.get(), this); + + chain->setInBuffer(NULL); + chain->setOutBuffer(NULL); + + checkSuspendOnAddEffectChain_l(chain); + + mEffectChains.add(chain); + + return NO_ERROR; +} + +size_t AudioFlinger::RecordThread::removeEffectChain_l(const sp& chain) +{ + ALOGV("removeEffectChain_l() %p from thread %p", chain.get(), this); + ALOGW_IF(mEffectChains.size() != 1, + "removeEffectChain_l() %p invalid chain size %d on thread %p", + chain.get(), mEffectChains.size(), this); + if (mEffectChains.size() == 1) { + mEffectChains.removeAt(0); + } + return 0; +} + +}; // namespace android diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h new file mode 100644 index 0000000..06a1c8c --- /dev/null +++ b/services/audioflinger/Threads.h @@ -0,0 +1,801 @@ +/* +** +** Copyright 2012, The Android Open Source Project +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef INCLUDING_FROM_AUDIOFLINGER_H + #error This header file should only be included from AudioFlinger.h +#endif + +class ThreadBase : public Thread { +public: + +#include "TrackBase.h" + + enum type_t { + MIXER, // Thread class is MixerThread + DIRECT, // Thread class is DirectOutputThread + DUPLICATING, // Thread class is DuplicatingThread + RECORD // Thread class is RecordThread + }; + + ThreadBase(const sp& audioFlinger, audio_io_handle_t id, + audio_devices_t outDevice, audio_devices_t inDevice, type_t type); + virtual ~ThreadBase(); + + void dumpBase(int fd, const Vector& args); + void dumpEffectChains(int fd, const Vector& args); + + void clearPowerManager(); + + // base for record and playback + enum { + CFG_EVENT_IO, + CFG_EVENT_PRIO + }; + + class ConfigEvent { + public: + ConfigEvent(int type) : mType(type) {} + virtual ~ConfigEvent() {} + + int type() const { return mType; } + + virtual void dump(char *buffer, size_t size) = 0; + + private: + const int mType; + }; + + class IoConfigEvent : public ConfigEvent { + public: + IoConfigEvent(int event, int param) : + ConfigEvent(CFG_EVENT_IO), mEvent(event), mParam(event) {} + virtual ~IoConfigEvent() {} + + int event() const { return mEvent; } + int param() const { return mParam; } + + virtual void dump(char *buffer, size_t size) { + snprintf(buffer, size, "IO event: event %d, param %d\n", mEvent, mParam); + } + + private: + const int mEvent; + const int mParam; + }; + + class PrioConfigEvent : public ConfigEvent { + public: + PrioConfigEvent(pid_t pid, pid_t tid, int32_t prio) : + ConfigEvent(CFG_EVENT_PRIO), mPid(pid), mTid(tid), mPrio(prio) {} + virtual ~PrioConfigEvent() {} + + pid_t pid() const { return mPid; } + pid_t tid() const { return mTid; } + int32_t prio() const { return mPrio; } + + virtual void dump(char *buffer, size_t size) { + snprintf(buffer, size, "Prio event: pid %d, tid %d, prio %d\n", mPid, mTid, mPrio); + } + + private: + const pid_t mPid; + const pid_t mTid; + const int32_t mPrio; + }; + + + class PMDeathRecipient : public IBinder::DeathRecipient { + public: + PMDeathRecipient(const wp& thread) : mThread(thread) {} + virtual ~PMDeathRecipient() {} + + // IBinder::DeathRecipient + virtual void binderDied(const wp& who); + + private: + PMDeathRecipient(const PMDeathRecipient&); + PMDeathRecipient& operator = (const PMDeathRecipient&); + + wp mThread; + }; + + virtual status_t initCheck() const = 0; + + // static externally-visible + type_t type() const { return mType; } + audio_io_handle_t id() const { return mId;} + + // dynamic externally-visible + uint32_t sampleRate() const { return mSampleRate; } + uint32_t channelCount() const { return mChannelCount; } + audio_channel_mask_t channelMask() const { return mChannelMask; } + audio_format_t format() const { return mFormat; } + // Called by AudioFlinger::frameCount(audio_io_handle_t output) and effects, + // and returns the normal mix buffer's frame count. + size_t frameCount() const { return mNormalFrameCount; } + // Return's the HAL's frame count i.e. fast mixer buffer size. + size_t frameCountHAL() const { return mFrameCount; } + + // Should be "virtual status_t requestExitAndWait()" and override same + // method in Thread, but Thread::requestExitAndWait() is not yet virtual. + void exit(); + virtual bool checkForNewParameters_l() = 0; + virtual status_t setParameters(const String8& keyValuePairs); + virtual String8 getParameters(const String8& keys) = 0; + virtual void audioConfigChanged_l(int event, int param = 0) = 0; + void sendIoConfigEvent(int event, int param = 0); + void sendIoConfigEvent_l(int event, int param = 0); + void sendPrioConfigEvent_l(pid_t pid, pid_t tid, int32_t prio); + void processConfigEvents(); + + // see note at declaration of mStandby, mOutDevice and mInDevice + bool standby() const { return mStandby; } + audio_devices_t outDevice() const { return mOutDevice; } + audio_devices_t inDevice() const { return mInDevice; } + + virtual audio_stream_t* stream() const = 0; + + sp createEffect_l( + const sp& client, + const sp& effectClient, + int32_t priority, + int sessionId, + effect_descriptor_t *desc, + int *enabled, + status_t *status); + void disconnectEffect(const sp< EffectModule>& effect, + EffectHandle *handle, + bool unpinIfLast); + + // return values for hasAudioSession (bit field) + enum effect_state { + EFFECT_SESSION = 0x1, // the audio session corresponds to at least one + // effect + TRACK_SESSION = 0x2 // the audio session corresponds to at least one + // track + }; + + // get effect chain corresponding to session Id. + sp getEffectChain(int sessionId); + // same as getEffectChain() but must be called with ThreadBase mutex locked + sp getEffectChain_l(int sessionId) const; + // add an effect chain to the chain list (mEffectChains) + virtual status_t addEffectChain_l(const sp& chain) = 0; + // remove an effect chain from the chain list (mEffectChains) + virtual size_t removeEffectChain_l(const sp& chain) = 0; + // lock all effect chains Mutexes. Must be called before releasing the + // ThreadBase mutex before processing the mixer and effects. This guarantees the + // integrity of the chains during the process. + // Also sets the parameter 'effectChains' to current value of mEffectChains. + void lockEffectChains_l(Vector< sp >& effectChains); + // unlock effect chains after process + void unlockEffectChains(const Vector< sp >& effectChains); + // set audio mode to all effect chains + void setMode(audio_mode_t mode); + // get effect module with corresponding ID on specified audio session + sp getEffect(int sessionId, int effectId); + sp getEffect_l(int sessionId, int effectId); + // add and effect module. Also creates the effect chain is none exists for + // the effects audio session + status_t addEffect_l(const sp< EffectModule>& effect); + // remove and effect module. Also removes the effect chain is this was the last + // effect + void removeEffect_l(const sp< EffectModule>& effect); + // detach all tracks connected to an auxiliary effect + virtual void detachAuxEffect_l(int effectId) {} + // returns either EFFECT_SESSION if effects on this audio session exist in one + // chain, or TRACK_SESSION if tracks on this audio session exist, or both + virtual uint32_t hasAudioSession(int sessionId) const = 0; + // the value returned by default implementation is not important as the + // strategy is only meaningful for PlaybackThread which implements this method + virtual uint32_t getStrategyForSession_l(int sessionId) { return 0; } + + // suspend or restore effect according to the type of effect passed. a NULL + // type pointer means suspend all effects in the session + void setEffectSuspended(const effect_uuid_t *type, + bool suspend, + int sessionId = AUDIO_SESSION_OUTPUT_MIX); + // check if some effects must be suspended/restored when an effect is enabled + // or disabled + void checkSuspendOnEffectEnabled(const sp& effect, + bool enabled, + int sessionId = AUDIO_SESSION_OUTPUT_MIX); + void checkSuspendOnEffectEnabled_l(const sp& effect, + bool enabled, + int sessionId = AUDIO_SESSION_OUTPUT_MIX); + + virtual status_t setSyncEvent(const sp& event) = 0; + virtual bool isValidSyncEvent(const sp& event) const = 0; + + + mutable Mutex mLock; + +protected: + + // entry describing an effect being suspended in mSuspendedSessions keyed vector + class SuspendedSessionDesc : public RefBase { + public: + SuspendedSessionDesc() : mRefCount(0) {} + + int mRefCount; // number of active suspend requests + effect_uuid_t mType; // effect type UUID + }; + + void acquireWakeLock(); + void acquireWakeLock_l(); + void releaseWakeLock(); + void releaseWakeLock_l(); + void setEffectSuspended_l(const effect_uuid_t *type, + bool suspend, + int sessionId); + // updated mSuspendedSessions when an effect suspended or restored + void updateSuspendedSessions_l(const effect_uuid_t *type, + bool suspend, + int sessionId); + // check if some effects must be suspended when an effect chain is added + void checkSuspendOnAddEffectChain_l(const sp& chain); + + virtual void preExit() { } + + friend class AudioFlinger; // for mEffectChains + + const type_t mType; + + // Used by parameters, config events, addTrack_l, exit + Condition mWaitWorkCV; + + const sp mAudioFlinger; + uint32_t mSampleRate; + size_t mFrameCount; // output HAL, direct output, record + size_t mNormalFrameCount; // normal mixer and effects + audio_channel_mask_t mChannelMask; + uint16_t mChannelCount; + size_t mFrameSize; + audio_format_t mFormat; + + // Parameter sequence by client: binder thread calling setParameters(): + // 1. Lock mLock + // 2. Append to mNewParameters + // 3. mWaitWorkCV.signal + // 4. mParamCond.waitRelative with timeout + // 5. read mParamStatus + // 6. mWaitWorkCV.signal + // 7. Unlock + // + // Parameter sequence by server: threadLoop calling checkForNewParameters_l(): + // 1. Lock mLock + // 2. If there is an entry in mNewParameters proceed ... + // 2. Read first entry in mNewParameters + // 3. Process + // 4. Remove first entry from mNewParameters + // 5. Set mParamStatus + // 6. mParamCond.signal + // 7. mWaitWorkCV.wait with timeout (this is to avoid overwriting mParamStatus) + // 8. Unlock + Condition mParamCond; + Vector mNewParameters; + status_t mParamStatus; + + Vector mConfigEvents; + + // These fields are written and read by thread itself without lock or barrier, + // and read by other threads without lock or barrier via standby() , outDevice() + // and inDevice(). + // Because of the absence of a lock or barrier, any other thread that reads + // these fields must use the information in isolation, or be prepared to deal + // with possibility that it might be inconsistent with other information. + bool mStandby; // Whether thread is currently in standby. + audio_devices_t mOutDevice; // output device + audio_devices_t mInDevice; // input device + audio_source_t mAudioSource; // (see audio.h, audio_source_t) + + const audio_io_handle_t mId; + Vector< sp > mEffectChains; + + static const int kNameLength = 16; // prctl(PR_SET_NAME) limit + char mName[kNameLength]; + sp mPowerManager; + sp mWakeLockToken; + const sp mDeathRecipient; + // list of suspended effects per session and per type. The first vector is + // keyed by session ID, the second by type UUID timeLow field + KeyedVector< int, KeyedVector< int, sp > > + mSuspendedSessions; +}; + +// --- PlaybackThread --- +class PlaybackThread : public ThreadBase { +public: + +#include "PlaybackTracks.h" + + enum mixer_state { + MIXER_IDLE, // no active tracks + MIXER_TRACKS_ENABLED, // at least one active track, but no track has any data ready + MIXER_TRACKS_READY // at least one active track, and at least one track has data + // standby mode does not have an enum value + // suspend by audio policy manager is orthogonal to mixer state + }; + + PlaybackThread(const sp& audioFlinger, AudioStreamOut* output, + audio_io_handle_t id, audio_devices_t device, type_t type); + virtual ~PlaybackThread(); + + void dump(int fd, const Vector& args); + + // Thread virtuals + virtual status_t readyToRun(); + virtual bool threadLoop(); + + // RefBase + virtual void onFirstRef(); + +protected: + // Code snippets that were lifted up out of threadLoop() + virtual void threadLoop_mix() = 0; + virtual void threadLoop_sleepTime() = 0; + virtual void threadLoop_write(); + virtual void threadLoop_standby(); + virtual void threadLoop_removeTracks(const Vector< sp >& tracksToRemove); + + // prepareTracks_l reads and writes mActiveTracks, and returns + // the pending set of tracks to remove via Vector 'tracksToRemove'. The caller + // is responsible for clearing or destroying this Vector later on, when it + // is safe to do so. That will drop the final ref count and destroy the tracks. + virtual mixer_state prepareTracks_l(Vector< sp > *tracksToRemove) = 0; + + // ThreadBase virtuals + virtual void preExit(); + +public: + + virtual status_t initCheck() const { return (mOutput == NULL) ? NO_INIT : NO_ERROR; } + + // return estimated latency in milliseconds, as reported by HAL + uint32_t latency() const; + // same, but lock must already be held + uint32_t latency_l() const; + + void setMasterVolume(float value); + void setMasterMute(bool muted); + + void setStreamVolume(audio_stream_type_t stream, float value); + void setStreamMute(audio_stream_type_t stream, bool muted); + + float streamVolume(audio_stream_type_t stream) const; + + sp createTrack_l( + const sp& client, + audio_stream_type_t streamType, + uint32_t sampleRate, + audio_format_t format, + audio_channel_mask_t channelMask, + size_t frameCount, + const sp& sharedBuffer, + int sessionId, + IAudioFlinger::track_flags_t *flags, + pid_t tid, + status_t *status); + + AudioStreamOut* getOutput() const; + AudioStreamOut* clearOutput(); + virtual audio_stream_t* stream() const; + + // a very large number of suspend() will eventually wraparound, but unlikely + void suspend() { (void) android_atomic_inc(&mSuspended); } + void restore() + { + // if restore() is done without suspend(), get back into + // range so that the next suspend() will operate correctly + if (android_atomic_dec(&mSuspended) <= 0) { + android_atomic_release_store(0, &mSuspended); + } + } + bool isSuspended() const + { return android_atomic_acquire_load(&mSuspended) > 0; } + + virtual String8 getParameters(const String8& keys); + virtual void audioConfigChanged_l(int event, int param = 0); + status_t getRenderPosition(size_t *halFrames, size_t *dspFrames); + int16_t *mixBuffer() const { return mMixBuffer; }; + + virtual void detachAuxEffect_l(int effectId); + status_t attachAuxEffect(const sp track, + int EffectId); + status_t attachAuxEffect_l(const sp track, + int EffectId); + + virtual status_t addEffectChain_l(const sp& chain); + virtual size_t removeEffectChain_l(const sp& chain); + virtual uint32_t hasAudioSession(int sessionId) const; + virtual uint32_t getStrategyForSession_l(int sessionId); + + + virtual status_t setSyncEvent(const sp& event); + virtual bool isValidSyncEvent(const sp& event) const; + void invalidateTracks(audio_stream_type_t streamType); + + +protected: + int16_t* mMixBuffer; + + // suspend count, > 0 means suspended. While suspended, the thread continues to pull from + // tracks and mix, but doesn't write to HAL. A2DP and SCO HAL implementations can't handle + // concurrent use of both of them, so Audio Policy Service suspends one of the threads to + // workaround that restriction. + // 'volatile' means accessed via atomic operations and no lock. + volatile int32_t mSuspended; + + // FIXME overflows every 6+ hours at 44.1 kHz stereo 16-bit samples + // mFramesWritten would be better, or 64-bit even better + size_t mBytesWritten; +private: + // mMasterMute is in both PlaybackThread and in AudioFlinger. When a + // PlaybackThread needs to find out if master-muted, it checks it's local + // copy rather than the one in AudioFlinger. This optimization saves a lock. + bool mMasterMute; + void setMasterMute_l(bool muted) { mMasterMute = muted; } +protected: + SortedVector< wp > mActiveTracks; // FIXME check if this could be sp<> + + // Allocate a track name for a given channel mask. + // Returns name >= 0 if successful, -1 on failure. + virtual int getTrackName_l(audio_channel_mask_t channelMask, int sessionId) = 0; + virtual void deleteTrackName_l(int name) = 0; + + // Time to sleep between cycles when: + virtual uint32_t activeSleepTimeUs() const; // mixer state MIXER_TRACKS_ENABLED + virtual uint32_t idleSleepTimeUs() const = 0; // mixer state MIXER_IDLE + virtual uint32_t suspendSleepTimeUs() const = 0; // audio policy manager suspended us + // No sleep when mixer state == MIXER_TRACKS_READY; relies on audio HAL stream->write() + // No sleep in standby mode; waits on a condition + + // Code snippets that are temporarily lifted up out of threadLoop() until the merge + void checkSilentMode_l(); + + // Non-trivial for DUPLICATING only + virtual void saveOutputTracks() { } + virtual void clearOutputTracks() { } + + // Cache various calculated values, at threadLoop() entry and after a parameter change + virtual void cacheParameters_l(); + + virtual uint32_t correctLatency_l(uint32_t latency) const; + +private: + + friend class AudioFlinger; // for numerous + + PlaybackThread(const Client&); + PlaybackThread& operator = (const PlaybackThread&); + + status_t addTrack_l(const sp& track); + void destroyTrack_l(const sp& track); + void removeTrack_l(const sp& track); + + void readOutputParameters(); + + virtual void dumpInternals(int fd, const Vector& args); + void dumpTracks(int fd, const Vector& args); + + SortedVector< sp > mTracks; + // mStreamTypes[] uses 1 additional stream type internally for the OutputTrack used by + // DuplicatingThread + stream_type_t mStreamTypes[AUDIO_STREAM_CNT + 1]; + AudioStreamOut *mOutput; + + float mMasterVolume; + nsecs_t mLastWriteTime; + int mNumWrites; + int mNumDelayedWrites; + bool mInWrite; + + // FIXME rename these former local variables of threadLoop to standard "m" names + nsecs_t standbyTime; + size_t mixBufferSize; + + // cached copies of activeSleepTimeUs() and idleSleepTimeUs() made by cacheParameters_l() + uint32_t activeSleepTime; + uint32_t idleSleepTime; + + uint32_t sleepTime; + + // mixer status returned by prepareTracks_l() + mixer_state mMixerStatus; // current cycle + // previous cycle when in prepareTracks_l() + mixer_state mMixerStatusIgnoringFastTracks; + // FIXME or a separate ready state per track + + // FIXME move these declarations into the specific sub-class that needs them + // MIXER only + uint32_t sleepTimeShift; + + // same as AudioFlinger::mStandbyTimeInNsecs except for DIRECT which uses a shorter value + nsecs_t standbyDelay; + + // MIXER only + nsecs_t maxPeriod; + + // DUPLICATING only + uint32_t writeFrames; + +private: + // The HAL output sink is treated as non-blocking, but current implementation is blocking + sp mOutputSink; + // If a fast mixer is present, the blocking pipe sink, otherwise clear + sp mPipeSink; + // The current sink for the normal mixer to write it's (sub)mix, mOutputSink or mPipeSink + sp mNormalSink; + // For dumpsys + sp mTeeSink; + sp mTeeSource; + uint32_t mScreenState; // cached copy of gScreenState +public: + virtual bool hasFastMixer() const = 0; + virtual FastTrackUnderruns getFastTrackUnderruns(size_t fastIndex) const + { FastTrackUnderruns dummy; return dummy; } + +protected: + // accessed by both binder threads and within threadLoop(), lock on mutex needed + unsigned mFastTrackAvailMask; // bit i set if fast track [i] is available + +}; + +class MixerThread : public PlaybackThread { +public: + MixerThread(const sp& audioFlinger, + AudioStreamOut* output, + audio_io_handle_t id, + audio_devices_t device, + type_t type = MIXER); + virtual ~MixerThread(); + + // Thread virtuals + + virtual bool checkForNewParameters_l(); + virtual void dumpInternals(int fd, const Vector& args); + +protected: + virtual mixer_state prepareTracks_l(Vector< sp > *tracksToRemove); + virtual int getTrackName_l(audio_channel_mask_t channelMask, int sessionId); + virtual void deleteTrackName_l(int name); + virtual uint32_t idleSleepTimeUs() const; + virtual uint32_t suspendSleepTimeUs() const; + virtual void cacheParameters_l(); + + // threadLoop snippets + virtual void threadLoop_write(); + virtual void threadLoop_standby(); + virtual void threadLoop_mix(); + virtual void threadLoop_sleepTime(); + virtual void threadLoop_removeTracks(const Vector< sp >& tracksToRemove); + virtual uint32_t correctLatency_l(uint32_t latency) const; + + AudioMixer* mAudioMixer; // normal mixer +private: + // one-time initialization, no locks required + FastMixer* mFastMixer; // non-NULL if there is also a fast mixer + sp mAudioWatchdog; // non-0 if there is an audio watchdog thread + + // contents are not guaranteed to be consistent, no locks required + FastMixerDumpState mFastMixerDumpState; +#ifdef STATE_QUEUE_DUMP + StateQueueObserverDump mStateQueueObserverDump; + StateQueueMutatorDump mStateQueueMutatorDump; +#endif + AudioWatchdogDump mAudioWatchdogDump; + + // accessible only within the threadLoop(), no locks required + // mFastMixer->sq() // for mutating and pushing state + int32_t mFastMixerFutex; // for cold idle + +public: + virtual bool hasFastMixer() const { return mFastMixer != NULL; } + virtual FastTrackUnderruns getFastTrackUnderruns(size_t fastIndex) const { + ALOG_ASSERT(fastIndex < FastMixerState::kMaxFastTracks); + return mFastMixerDumpState.mTracks[fastIndex].mUnderruns; + } +}; + +class DirectOutputThread : public PlaybackThread { +public: + + DirectOutputThread(const sp& audioFlinger, AudioStreamOut* output, + audio_io_handle_t id, audio_devices_t device); + virtual ~DirectOutputThread(); + + // Thread virtuals + + virtual bool checkForNewParameters_l(); + +protected: + virtual int getTrackName_l(audio_channel_mask_t channelMask, int sessionId); + virtual void deleteTrackName_l(int name); + virtual uint32_t activeSleepTimeUs() const; + virtual uint32_t idleSleepTimeUs() const; + virtual uint32_t suspendSleepTimeUs() const; + virtual void cacheParameters_l(); + + // threadLoop snippets + virtual mixer_state prepareTracks_l(Vector< sp > *tracksToRemove); + virtual void threadLoop_mix(); + virtual void threadLoop_sleepTime(); + +private: + // volumes last sent to audio HAL with stream->set_volume() + float mLeftVolFloat; + float mRightVolFloat; + + // prepareTracks_l() tells threadLoop_mix() the name of the single active track + sp mActiveTrack; +public: + virtual bool hasFastMixer() const { return false; } +}; + +class DuplicatingThread : public MixerThread { +public: + DuplicatingThread(const sp& audioFlinger, MixerThread* mainThread, + audio_io_handle_t id); + virtual ~DuplicatingThread(); + + // Thread virtuals + void addOutputTrack(MixerThread* thread); + void removeOutputTrack(MixerThread* thread); + uint32_t waitTimeMs() const { return mWaitTimeMs; } +protected: + virtual uint32_t activeSleepTimeUs() const; + +private: + bool outputsReady(const SortedVector< sp > &outputTracks); +protected: + // threadLoop snippets + virtual void threadLoop_mix(); + virtual void threadLoop_sleepTime(); + virtual void threadLoop_write(); + virtual void threadLoop_standby(); + virtual void cacheParameters_l(); + +private: + // called from threadLoop, addOutputTrack, removeOutputTrack + virtual void updateWaitTime_l(); +protected: + virtual void saveOutputTracks(); + virtual void clearOutputTracks(); +private: + + uint32_t mWaitTimeMs; + SortedVector < sp > outputTracks; + SortedVector < sp > mOutputTracks; +public: + virtual bool hasFastMixer() const { return false; } +}; + + +// record thread +class RecordThread : public ThreadBase, public AudioBufferProvider + // derives from AudioBufferProvider interface for use by resampler +{ +public: + +#include "RecordTracks.h" + + RecordThread(const sp& audioFlinger, + AudioStreamIn *input, + uint32_t sampleRate, + audio_channel_mask_t channelMask, + audio_io_handle_t id, + audio_devices_t device, + const sp& teeSink); + virtual ~RecordThread(); + + // no addTrack_l ? + void destroyTrack_l(const sp& track); + void removeTrack_l(const sp& track); + + void dumpInternals(int fd, const Vector& args); + void dumpTracks(int fd, const Vector& args); + + // Thread virtuals + virtual bool threadLoop(); + virtual status_t readyToRun(); + + // RefBase + virtual void onFirstRef(); + + virtual status_t initCheck() const { return (mInput == NULL) ? NO_INIT : NO_ERROR; } + sp createRecordTrack_l( + const sp& client, + uint32_t sampleRate, + audio_format_t format, + audio_channel_mask_t channelMask, + size_t frameCount, + int sessionId, + IAudioFlinger::track_flags_t flags, + pid_t tid, + status_t *status); + + status_t start(RecordTrack* recordTrack, + AudioSystem::sync_event_t event, + int triggerSession); + + // ask the thread to stop the specified track, and + // return true if the caller should then do it's part of the stopping process + bool stop_l(RecordTrack* recordTrack); + + void dump(int fd, const Vector& args); + AudioStreamIn* clearInput(); + virtual audio_stream_t* stream() const; + + // AudioBufferProvider interface + virtual status_t getNextBuffer(AudioBufferProvider::Buffer* buffer, int64_t pts); + virtual void releaseBuffer(AudioBufferProvider::Buffer* buffer); + + virtual bool checkForNewParameters_l(); + virtual String8 getParameters(const String8& keys); + virtual void audioConfigChanged_l(int event, int param = 0); + void readInputParameters(); + virtual unsigned int getInputFramesLost(); + + virtual status_t addEffectChain_l(const sp& chain); + virtual size_t removeEffectChain_l(const sp& chain); + virtual uint32_t hasAudioSession(int sessionId) const; + + // Return the set of unique session IDs across all tracks. + // The keys are the session IDs, and the associated values are meaningless. + // FIXME replace by Set [and implement Bag/Multiset for other uses]. + KeyedVector sessionIds() const; + + virtual status_t setSyncEvent(const sp& event); + virtual bool isValidSyncEvent(const sp& event) const; + + static void syncStartEventCallback(const wp& event); + void handleSyncStartEvent(const sp& event); + +private: + void clearSyncStartEvent(); + + // Enter standby if not already in standby, and set mStandby flag + void standby(); + + // Call the HAL standby method unconditionally, and don't change mStandby flag + void inputStandBy(); + + AudioStreamIn *mInput; + SortedVector < sp > mTracks; + // mActiveTrack has dual roles: it indicates the current active track, and + // is used together with mStartStopCond to indicate start()/stop() progress + sp mActiveTrack; + Condition mStartStopCond; + AudioResampler *mResampler; + int32_t *mRsmpOutBuffer; + int16_t *mRsmpInBuffer; + size_t mRsmpInIndex; + size_t mInputBytes; + const uint32_t mReqChannelCount; + const uint32_t mReqSampleRate; + ssize_t mBytesRead; + // sync event triggering actual audio capture. Frames read before this event will + // be dropped and therefore not read by the application. + sp mSyncStartEvent; + // number of captured frames to drop after the start sync event has been received. + // when < 0, maximum frames to drop before starting capture even if sync event is + // not received + ssize_t mFramestoDrop; + + // For dumpsys + const sp mTeeSink; +}; diff --git a/services/audioflinger/TrackBase.h b/services/audioflinger/TrackBase.h new file mode 100644 index 0000000..17de49b --- /dev/null +++ b/services/audioflinger/TrackBase.h @@ -0,0 +1,139 @@ +/* +** +** Copyright 2012, The Android Open Source Project +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef INCLUDING_FROM_AUDIOFLINGER_H + #error This header file should only be included from AudioFlinger.h +#endif + +// base for record and playback +class TrackBase : public ExtendedAudioBufferProvider, public RefBase { + +public: + enum track_state { + IDLE, + TERMINATED, + FLUSHED, + STOPPED, + // next 2 states are currently used for fast tracks only + STOPPING_1, // waiting for first underrun + STOPPING_2, // waiting for presentation complete + RESUMING, + ACTIVE, + PAUSING, + PAUSED + }; + + TrackBase(ThreadBase *thread, + const sp& client, + uint32_t sampleRate, + audio_format_t format, + audio_channel_mask_t channelMask, + size_t frameCount, + const sp& sharedBuffer, + int sessionId); + virtual ~TrackBase(); + + virtual status_t start(AudioSystem::sync_event_t event, + int triggerSession) = 0; + virtual void stop() = 0; + sp getCblk() const { return mCblkMemory; } + audio_track_cblk_t* cblk() const { return mCblk; } + int sessionId() const { return mSessionId; } + virtual status_t setSyncEvent(const sp& event); + +protected: + TrackBase(const TrackBase&); + TrackBase& operator = (const TrackBase&); + + // AudioBufferProvider interface + virtual status_t getNextBuffer(AudioBufferProvider::Buffer* buffer, int64_t pts) = 0; + virtual void releaseBuffer(AudioBufferProvider::Buffer* buffer); + + // ExtendedAudioBufferProvider interface is only needed for Track, + // but putting it in TrackBase avoids the complexity of virtual inheritance + virtual size_t framesReady() const { return SIZE_MAX; } + + audio_format_t format() const { + return mFormat; + } + + uint32_t channelCount() const { return mChannelCount; } + + audio_channel_mask_t channelMask() const { return mChannelMask; } + + uint32_t sampleRate() const; // FIXME inline after cblk sr moved + + // Return a pointer to the start of a contiguous slice of the track buffer. + // Parameter 'offset' is the requested start position, expressed in + // monotonically increasing frame units relative to the track epoch. + // Parameter 'frames' is the requested length, also in frame units. + // Always returns non-NULL. It is the caller's responsibility to + // verify that this will be successful; the result of calling this + // function with invalid 'offset' or 'frames' is undefined. + void* getBuffer(uint32_t offset, uint32_t frames) const; + + bool isStopped() const { + return (mState == STOPPED || mState == FLUSHED); + } + + // for fast tracks only + bool isStopping() const { + return mState == STOPPING_1 || mState == STOPPING_2; + } + bool isStopping_1() const { + return mState == STOPPING_1; + } + bool isStopping_2() const { + return mState == STOPPING_2; + } + + bool isTerminated() const { + return mState == TERMINATED; + } + + bool step(); // mStepCount is an implicit input + void reset(); + + virtual bool isOut() const = 0; // true for Track and TimedTrack, false for RecordTrack, + // this could be a track type if needed later + + const wp mThread; + /*const*/ sp mClient; // see explanation at ~TrackBase() why not const + sp mCblkMemory; + audio_track_cblk_t* mCblk; + void* mBuffer; // start of track buffer, typically in shared memory + void* mBufferEnd; // &mBuffer[mFrameCount * frameSize], where frameSize + // is based on mChannelCount and 16-bit samples + uint32_t mStepCount; // saves AudioBufferProvider::Buffer::frameCount as of + // time of releaseBuffer() for later use by step() + // we don't really need a lock for these + track_state mState; + const uint32_t mSampleRate; // initial sample rate only; for tracks which + // support dynamic rates, the current value is in control block + const audio_format_t mFormat; + const audio_channel_mask_t mChannelMask; + const uint8_t mChannelCount; + const size_t mFrameSize; // AudioFlinger's view of frame size in shared memory, + // where for AudioTrack (but not AudioRecord), + // 8-bit PCM samples are stored as 16-bit + const size_t mFrameCount;// size of track buffer given at createTrack() or + // openRecord(), and then adjusted as needed + + bool mStepServerFailed; + const int mSessionId; + Vector < sp >mSyncEvents; +}; diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp new file mode 100644 index 0000000..2c6ba8b --- /dev/null +++ b/services/audioflinger/Tracks.cpp @@ -0,0 +1,1789 @@ +/* +** +** Copyright 2012, The Android Open Source Project +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + + +#define LOG_TAG "AudioFlinger" +//#define LOG_NDEBUG 0 + +#include +#include +#include + +#include + +#include +#include + +#include "AudioMixer.h" +#include "AudioFlinger.h" +#include "ServiceUtilities.h" + +// ---------------------------------------------------------------------------- + +// Note: the following macro is used for extremely verbose logging message. In +// order to run with ALOG_ASSERT turned on, we need to have LOG_NDEBUG set to +// 0; but one side effect of this is to turn all LOGV's as well. Some messages +// are so verbose that we want to suppress them even when we have ALOG_ASSERT +// turned on. Do not uncomment the #def below unless you really know what you +// are doing and want to see all of the extremely verbose messages. +//#define VERY_VERY_VERBOSE_LOGGING +#ifdef VERY_VERY_VERBOSE_LOGGING +#define ALOGVV ALOGV +#else +#define ALOGVV(a...) do { } while(0) +#endif + +namespace android { + +// ---------------------------------------------------------------------------- +// TrackBase +// ---------------------------------------------------------------------------- + +// TrackBase constructor must be called with AudioFlinger::mLock held +AudioFlinger::ThreadBase::TrackBase::TrackBase( + ThreadBase *thread, + const sp& client, + uint32_t sampleRate, + audio_format_t format, + audio_channel_mask_t channelMask, + size_t frameCount, + const sp& sharedBuffer, + int sessionId) + : RefBase(), + mThread(thread), + mClient(client), + mCblk(NULL), + // mBuffer + // mBufferEnd + mStepCount(0), + mState(IDLE), + mSampleRate(sampleRate), + mFormat(format), + mChannelMask(channelMask), + mChannelCount(popcount(channelMask)), + mFrameSize(audio_is_linear_pcm(format) ? + mChannelCount * audio_bytes_per_sample(format) : sizeof(int8_t)), + mFrameCount(frameCount), + mStepServerFailed(false), + mSessionId(sessionId) +{ + // client == 0 implies sharedBuffer == 0 + ALOG_ASSERT(!(client == 0 && sharedBuffer != 0)); + + ALOGV_IF(sharedBuffer != 0, "sharedBuffer: %p, size: %d", sharedBuffer->pointer(), + sharedBuffer->size()); + + // ALOGD("Creating track with %d buffers @ %d bytes", bufferCount, bufferSize); + size_t size = sizeof(audio_track_cblk_t); + size_t bufferSize = frameCount * mFrameSize; + if (sharedBuffer == 0) { + size += bufferSize; + } + + if (client != 0) { + mCblkMemory = client->heap()->allocate(size); + if (mCblkMemory != 0) { + mCblk = static_cast(mCblkMemory->pointer()); + // can't assume mCblk != NULL + } else { + ALOGE("not enough memory for AudioTrack size=%u", size); + client->heap()->dump("AudioTrack"); + return; + } + } else { + mCblk = (audio_track_cblk_t *)(new uint8_t[size]); + // assume mCblk != NULL + } + + // construct the shared structure in-place. + if (mCblk != NULL) { + new(mCblk) audio_track_cblk_t(); + // clear all buffers + mCblk->frameCount_ = frameCount; + mCblk->sampleRate = sampleRate; +// uncomment the following lines to quickly test 32-bit wraparound +// mCblk->user = 0xffff0000; +// mCblk->server = 0xffff0000; +// mCblk->userBase = 0xffff0000; +// mCblk->serverBase = 0xffff0000; + if (sharedBuffer == 0) { + mBuffer = (char*)mCblk + sizeof(audio_track_cblk_t); + memset(mBuffer, 0, bufferSize); + // Force underrun condition to avoid false underrun callback until first data is + // written to buffer (other flags are cleared) + mCblk->flags = CBLK_UNDERRUN; + } else { + mBuffer = sharedBuffer->pointer(); + } + mBufferEnd = (uint8_t *)mBuffer + bufferSize; + } +} + +AudioFlinger::ThreadBase::TrackBase::~TrackBase() +{ + if (mCblk != NULL) { + if (mClient == 0) { + delete mCblk; + } else { + mCblk->~audio_track_cblk_t(); // destroy our shared-structure. + } + } + mCblkMemory.clear(); // free the shared memory before releasing the heap it belongs to + if (mClient != 0) { + // Client destructor must run with AudioFlinger mutex locked + Mutex::Autolock _l(mClient->audioFlinger()->mLock); + // If the client's reference count drops to zero, the associated destructor + // must run with AudioFlinger lock held. Thus the explicit clear() rather than + // relying on the automatic clear() at end of scope. + mClient.clear(); + } +} + +// AudioBufferProvider interface +// getNextBuffer() = 0; +// This implementation of releaseBuffer() is used by Track and RecordTrack, but not TimedTrack +void AudioFlinger::ThreadBase::TrackBase::releaseBuffer(AudioBufferProvider::Buffer* buffer) +{ + buffer->raw = NULL; + mStepCount = buffer->frameCount; + // FIXME See note at getNextBuffer() + (void) step(); // ignore return value of step() + buffer->frameCount = 0; +} + +bool AudioFlinger::ThreadBase::TrackBase::step() { + bool result; + audio_track_cblk_t* cblk = this->cblk(); + + result = cblk->stepServer(mStepCount, mFrameCount, isOut()); + if (!result) { + ALOGV("stepServer failed acquiring cblk mutex"); + mStepServerFailed = true; + } + return result; +} + +void AudioFlinger::ThreadBase::TrackBase::reset() { + audio_track_cblk_t* cblk = this->cblk(); + + cblk->user = 0; + cblk->server = 0; + cblk->userBase = 0; + cblk->serverBase = 0; + mStepServerFailed = false; + ALOGV("TrackBase::reset"); +} + +uint32_t AudioFlinger::ThreadBase::TrackBase::sampleRate() const { + return mCblk->sampleRate; +} + +void* AudioFlinger::ThreadBase::TrackBase::getBuffer(uint32_t offset, uint32_t frames) const { + audio_track_cblk_t* cblk = this->cblk(); + int8_t *bufferStart = (int8_t *)mBuffer + (offset-cblk->serverBase) * mFrameSize; + int8_t *bufferEnd = bufferStart + frames * mFrameSize; + + // Check validity of returned pointer in case the track control block would have been corrupted. + ALOG_ASSERT(!(bufferStart < mBuffer || bufferStart > bufferEnd || bufferEnd > mBufferEnd), + "TrackBase::getBuffer buffer out of range:\n" + " start: %p, end %p , mBuffer %p mBufferEnd %p\n" + " server %u, serverBase %u, user %u, userBase %u, frameSize %u", + bufferStart, bufferEnd, mBuffer, mBufferEnd, + cblk->server, cblk->serverBase, cblk->user, cblk->userBase, mFrameSize); + + return bufferStart; +} + +status_t AudioFlinger::ThreadBase::TrackBase::setSyncEvent(const sp& event) +{ + mSyncEvents.add(event); + return NO_ERROR; +} + +// ---------------------------------------------------------------------------- +// Playback +// ---------------------------------------------------------------------------- + +AudioFlinger::TrackHandle::TrackHandle(const sp& track) + : BnAudioTrack(), + mTrack(track) +{ +} + +AudioFlinger::TrackHandle::~TrackHandle() { + // just stop the track on deletion, associated resources + // will be freed from the main thread once all pending buffers have + // been played. Unless it's not in the active track list, in which + // case we free everything now... + mTrack->destroy(); +} + +sp AudioFlinger::TrackHandle::getCblk() const { + return mTrack->getCblk(); +} + +status_t AudioFlinger::TrackHandle::start() { + return mTrack->start(); +} + +void AudioFlinger::TrackHandle::stop() { + mTrack->stop(); +} + +void AudioFlinger::TrackHandle::flush() { + mTrack->flush(); +} + +void AudioFlinger::TrackHandle::mute(bool e) { + mTrack->mute(e); +} + +void AudioFlinger::TrackHandle::pause() { + mTrack->pause(); +} + +status_t AudioFlinger::TrackHandle::attachAuxEffect(int EffectId) +{ + return mTrack->attachAuxEffect(EffectId); +} + +status_t AudioFlinger::TrackHandle::allocateTimedBuffer(size_t size, + sp* buffer) { + if (!mTrack->isTimedTrack()) + return INVALID_OPERATION; + + PlaybackThread::TimedTrack* tt = + reinterpret_cast(mTrack.get()); + return tt->allocateTimedBuffer(size, buffer); +} + +status_t AudioFlinger::TrackHandle::queueTimedBuffer(const sp& buffer, + int64_t pts) { + if (!mTrack->isTimedTrack()) + return INVALID_OPERATION; + + PlaybackThread::TimedTrack* tt = + reinterpret_cast(mTrack.get()); + return tt->queueTimedBuffer(buffer, pts); +} + +status_t AudioFlinger::TrackHandle::setMediaTimeTransform( + const LinearTransform& xform, int target) { + + if (!mTrack->isTimedTrack()) + return INVALID_OPERATION; + + PlaybackThread::TimedTrack* tt = + reinterpret_cast(mTrack.get()); + return tt->setMediaTimeTransform( + xform, static_cast(target)); +} + +status_t AudioFlinger::TrackHandle::onTransact( + uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) +{ + return BnAudioTrack::onTransact(code, data, reply, flags); +} + +// ---------------------------------------------------------------------------- + +// Track constructor must be called with AudioFlinger::mLock and ThreadBase::mLock held +AudioFlinger::PlaybackThread::Track::Track( + PlaybackThread *thread, + const sp& client, + audio_stream_type_t streamType, + uint32_t sampleRate, + audio_format_t format, + audio_channel_mask_t channelMask, + size_t frameCount, + const sp& sharedBuffer, + int sessionId, + IAudioFlinger::track_flags_t flags) + : TrackBase(thread, client, sampleRate, format, channelMask, frameCount, sharedBuffer, + sessionId), + mMute(false), + mFillingUpStatus(FS_INVALID), + // mRetryCount initialized later when needed + mSharedBuffer(sharedBuffer), + mStreamType(streamType), + mName(-1), // see note below + mMainBuffer(thread->mixBuffer()), + mAuxBuffer(NULL), + mAuxEffectId(0), mHasVolumeController(false), + mPresentationCompleteFrames(0), + mFlags(flags), + mFastIndex(-1), + mUnderrunCount(0), + mCachedVolume(1.0) +{ + if (mCblk != NULL) { + // to avoid leaking a track name, do not allocate one unless there is an mCblk + mName = thread->getTrackName_l(channelMask, sessionId); + mCblk->mName = mName; + if (mName < 0) { + ALOGE("no more track names available"); + return; + } + // only allocate a fast track index if we were able to allocate a normal track name + if (flags & IAudioFlinger::TRACK_FAST) { + ALOG_ASSERT(thread->mFastTrackAvailMask != 0); + int i = __builtin_ctz(thread->mFastTrackAvailMask); + ALOG_ASSERT(0 < i && i < (int)FastMixerState::kMaxFastTracks); + // FIXME This is too eager. We allocate a fast track index before the + // fast track becomes active. Since fast tracks are a scarce resource, + // this means we are potentially denying other more important fast tracks from + // being created. It would be better to allocate the index dynamically. + mFastIndex = i; + mCblk->mName = i; + // Read the initial underruns because this field is never cleared by the fast mixer + mObservedUnderruns = thread->getFastTrackUnderruns(i); + thread->mFastTrackAvailMask &= ~(1 << i); + } + } + ALOGV("Track constructor name %d, calling pid %d", mName, + IPCThreadState::self()->getCallingPid()); +} + +AudioFlinger::PlaybackThread::Track::~Track() +{ + ALOGV("PlaybackThread::Track destructor"); +} + +void AudioFlinger::PlaybackThread::Track::destroy() +{ + // NOTE: destroyTrack_l() can remove a strong reference to this Track + // by removing it from mTracks vector, so there is a risk that this Tracks's + // destructor is called. As the destructor needs to lock mLock, + // we must acquire a strong reference on this Track before locking mLock + // here so that the destructor is called only when exiting this function. + // On the other hand, as long as Track::destroy() is only called by + // TrackHandle destructor, the TrackHandle still holds a strong ref on + // this Track with its member mTrack. + sp keep(this); + { // scope for mLock + sp thread = mThread.promote(); + if (thread != 0) { + if (!isOutputTrack()) { + if (mState == ACTIVE || mState == RESUMING) { + AudioSystem::stopOutput(thread->id(), mStreamType, mSessionId); + +#ifdef ADD_BATTERY_DATA + // to track the speaker usage + addBatteryData(IMediaPlayerService::kBatteryDataAudioFlingerStop); +#endif + } + AudioSystem::releaseOutput(thread->id()); + } + Mutex::Autolock _l(thread->mLock); + PlaybackThread *playbackThread = (PlaybackThread *)thread.get(); + playbackThread->destroyTrack_l(this); + } + } +} + +/*static*/ void AudioFlinger::PlaybackThread::Track::appendDumpHeader(String8& result) +{ + result.append(" Name Client Type Fmt Chn mask Session StpCnt fCount S M F SRate " + "L dB R dB Server User Main buf Aux Buf Flags Underruns\n"); +} + +void AudioFlinger::PlaybackThread::Track::dump(char* buffer, size_t size) +{ + uint32_t vlr = mCblk->getVolumeLR(); + if (isFastTrack()) { + sprintf(buffer, " F %2d", mFastIndex); + } else { + sprintf(buffer, " %4d", mName - AudioMixer::TRACK0); + } + track_state state = mState; + char stateChar; + switch (state) { + case IDLE: + stateChar = 'I'; + break; + case TERMINATED: + stateChar = 'T'; + break; + case STOPPING_1: + stateChar = 's'; + break; + case STOPPING_2: + stateChar = '5'; + break; + case STOPPED: + stateChar = 'S'; + break; + case RESUMING: + stateChar = 'R'; + break; + case ACTIVE: + stateChar = 'A'; + break; + case PAUSING: + stateChar = 'p'; + break; + case PAUSED: + stateChar = 'P'; + break; + case FLUSHED: + stateChar = 'F'; + break; + default: + stateChar = '?'; + break; + } + char nowInUnderrun; + switch (mObservedUnderruns.mBitFields.mMostRecent) { + case UNDERRUN_FULL: + nowInUnderrun = ' '; + break; + case UNDERRUN_PARTIAL: + nowInUnderrun = '<'; + break; + case UNDERRUN_EMPTY: + nowInUnderrun = '*'; + break; + default: + nowInUnderrun = '?'; + break; + } + snprintf(&buffer[7], size-7, " %6d %4u %3u 0x%08x %7u %6u %6u %1c %1d %1d %5u %5.2g %5.2g " + "0x%08x 0x%08x 0x%08x 0x%08x %#5x %9u%c\n", + (mClient == 0) ? getpid_cached : mClient->pid(), + mStreamType, + mFormat, + mChannelMask, + mSessionId, + mStepCount, + mFrameCount, + stateChar, + mMute, + mFillingUpStatus, + mCblk->sampleRate, + 20.0 * log10((vlr & 0xFFFF) / 4096.0), + 20.0 * log10((vlr >> 16) / 4096.0), + mCblk->server, + mCblk->user, + (int)mMainBuffer, + (int)mAuxBuffer, + mCblk->flags, + mUnderrunCount, + nowInUnderrun); +} + +// AudioBufferProvider interface +status_t AudioFlinger::PlaybackThread::Track::getNextBuffer( + AudioBufferProvider::Buffer* buffer, int64_t pts) +{ + audio_track_cblk_t* cblk = this->cblk(); + uint32_t framesReady; + uint32_t framesReq = buffer->frameCount; + + // Check if last stepServer failed, try to step now + if (mStepServerFailed) { + // FIXME When called by fast mixer, this takes a mutex with tryLock(). + // Since the fast mixer is higher priority than client callback thread, + // it does not result in priority inversion for client. + // But a non-blocking solution would be preferable to avoid + // fast mixer being unable to tryLock(), and + // to avoid the extra context switches if the client wakes up, + // discovers the mutex is locked, then has to wait for fast mixer to unlock. + if (!step()) goto getNextBuffer_exit; + ALOGV("stepServer recovered"); + mStepServerFailed = false; + } + + // FIXME Same as above + framesReady = cblk->framesReadyOut(); + + if (CC_LIKELY(framesReady)) { + uint32_t s = cblk->server; + uint32_t bufferEnd = cblk->serverBase + mFrameCount; + + bufferEnd = (cblk->loopEnd < bufferEnd) ? cblk->loopEnd : bufferEnd; + if (framesReq > framesReady) { + framesReq = framesReady; + } + if (framesReq > bufferEnd - s) { + framesReq = bufferEnd - s; + } + + buffer->raw = getBuffer(s, framesReq); + buffer->frameCount = framesReq; + return NO_ERROR; + } + +getNextBuffer_exit: + buffer->raw = NULL; + buffer->frameCount = 0; + ALOGV("getNextBuffer() no more data for track %d on thread %p", mName, mThread.unsafe_get()); + return NOT_ENOUGH_DATA; +} + +// Note that framesReady() takes a mutex on the control block using tryLock(). +// This could result in priority inversion if framesReady() is called by the normal mixer, +// as the normal mixer thread runs at lower +// priority than the client's callback thread: there is a short window within framesReady() +// during which the normal mixer could be preempted, and the client callback would block. +// Another problem can occur if framesReady() is called by the fast mixer: +// the tryLock() could block for up to 1 ms, and a sequence of these could delay fast mixer. +// FIXME Replace AudioTrackShared control block implementation by a non-blocking FIFO queue. +size_t AudioFlinger::PlaybackThread::Track::framesReady() const { + return mCblk->framesReadyOut(); +} + +// Don't call for fast tracks; the framesReady() could result in priority inversion +bool AudioFlinger::PlaybackThread::Track::isReady() const { + if (mFillingUpStatus != FS_FILLING || isStopped() || isPausing()) { + return true; + } + + if (framesReady() >= mFrameCount || + (mCblk->flags & CBLK_FORCEREADY)) { + mFillingUpStatus = FS_FILLED; + android_atomic_and(~CBLK_FORCEREADY, &mCblk->flags); + return true; + } + return false; +} + +status_t AudioFlinger::PlaybackThread::Track::start(AudioSystem::sync_event_t event, + int triggerSession) +{ + status_t status = NO_ERROR; + ALOGV("start(%d), calling pid %d session %d", + mName, IPCThreadState::self()->getCallingPid(), mSessionId); + + sp thread = mThread.promote(); + if (thread != 0) { + Mutex::Autolock _l(thread->mLock); + track_state state = mState; + // here the track could be either new, or restarted + // in both cases "unstop" the track + if (mState == PAUSED) { + mState = TrackBase::RESUMING; + ALOGV("PAUSED => RESUMING (%d) on thread %p", mName, this); + } else { + mState = TrackBase::ACTIVE; + ALOGV("? => ACTIVE (%d) on thread %p", mName, this); + } + + if (!isOutputTrack() && state != ACTIVE && state != RESUMING) { + thread->mLock.unlock(); + status = AudioSystem::startOutput(thread->id(), mStreamType, mSessionId); + thread->mLock.lock(); + +#ifdef ADD_BATTERY_DATA + // to track the speaker usage + if (status == NO_ERROR) { + addBatteryData(IMediaPlayerService::kBatteryDataAudioFlingerStart); + } +#endif + } + if (status == NO_ERROR) { + PlaybackThread *playbackThread = (PlaybackThread *)thread.get(); + playbackThread->addTrack_l(this); + } else { + mState = state; + triggerEvents(AudioSystem::SYNC_EVENT_PRESENTATION_COMPLETE); + } + } else { + status = BAD_VALUE; + } + return status; +} + +void AudioFlinger::PlaybackThread::Track::stop() +{ + ALOGV("stop(%d), calling pid %d", mName, IPCThreadState::self()->getCallingPid()); + sp thread = mThread.promote(); + if (thread != 0) { + Mutex::Autolock _l(thread->mLock); + track_state state = mState; + if (state == RESUMING || state == ACTIVE || state == PAUSING || state == PAUSED) { + // If the track is not active (PAUSED and buffers full), flush buffers + PlaybackThread *playbackThread = (PlaybackThread *)thread.get(); + if (playbackThread->mActiveTracks.indexOf(this) < 0) { + reset(); + mState = STOPPED; + } else if (!isFastTrack()) { + mState = STOPPED; + } else { + // prepareTracks_l() will set state to STOPPING_2 after next underrun, + // and then to STOPPED and reset() when presentation is complete + mState = STOPPING_1; + } + ALOGV("not stopping/stopped => stopping/stopped (%d) on thread %p", mName, + playbackThread); + } + if (!isOutputTrack() && (state == ACTIVE || state == RESUMING)) { + thread->mLock.unlock(); + AudioSystem::stopOutput(thread->id(), mStreamType, mSessionId); + thread->mLock.lock(); + +#ifdef ADD_BATTERY_DATA + // to track the speaker usage + addBatteryData(IMediaPlayerService::kBatteryDataAudioFlingerStop); +#endif + } + } +} + +void AudioFlinger::PlaybackThread::Track::pause() +{ + ALOGV("pause(%d), calling pid %d", mName, IPCThreadState::self()->getCallingPid()); + sp thread = mThread.promote(); + if (thread != 0) { + Mutex::Autolock _l(thread->mLock); + if (mState == ACTIVE || mState == RESUMING) { + mState = PAUSING; + ALOGV("ACTIVE/RESUMING => PAUSING (%d) on thread %p", mName, thread.get()); + if (!isOutputTrack()) { + thread->mLock.unlock(); + AudioSystem::stopOutput(thread->id(), mStreamType, mSessionId); + thread->mLock.lock(); + +#ifdef ADD_BATTERY_DATA + // to track the speaker usage + addBatteryData(IMediaPlayerService::kBatteryDataAudioFlingerStop); +#endif + } + } + } +} + +void AudioFlinger::PlaybackThread::Track::flush() +{ + ALOGV("flush(%d)", mName); + sp thread = mThread.promote(); + if (thread != 0) { + Mutex::Autolock _l(thread->mLock); + if (mState != STOPPING_1 && mState != STOPPING_2 && mState != STOPPED && mState != PAUSED && + mState != PAUSING && mState != IDLE && mState != FLUSHED) { + return; + } + // No point remaining in PAUSED state after a flush => go to + // FLUSHED state + mState = FLUSHED; + // do not reset the track if it is still in the process of being stopped or paused. + // this will be done by prepareTracks_l() when the track is stopped. + // prepareTracks_l() will see mState == FLUSHED, then + // remove from active track list, reset(), and trigger presentation complete + PlaybackThread *playbackThread = (PlaybackThread *)thread.get(); + if (playbackThread->mActiveTracks.indexOf(this) < 0) { + reset(); + } + } +} + +void AudioFlinger::PlaybackThread::Track::reset() +{ + // Do not reset twice to avoid discarding data written just after a flush and before + // the audioflinger thread detects the track is stopped. + if (!mResetDone) { + TrackBase::reset(); + // Force underrun condition to avoid false underrun callback until first data is + // written to buffer + android_atomic_and(~CBLK_FORCEREADY, &mCblk->flags); + android_atomic_or(CBLK_UNDERRUN, &mCblk->flags); + mFillingUpStatus = FS_FILLING; + mResetDone = true; + if (mState == FLUSHED) { + mState = IDLE; + } + } +} + +void AudioFlinger::PlaybackThread::Track::mute(bool muted) +{ + mMute = muted; +} + +status_t AudioFlinger::PlaybackThread::Track::attachAuxEffect(int EffectId) +{ + status_t status = DEAD_OBJECT; + sp thread = mThread.promote(); + if (thread != 0) { + PlaybackThread *playbackThread = (PlaybackThread *)thread.get(); + sp af = mClient->audioFlinger(); + + Mutex::Autolock _l(af->mLock); + + sp srcThread = af->getEffectThread_l(AUDIO_SESSION_OUTPUT_MIX, EffectId); + + if (EffectId != 0 && srcThread != 0 && playbackThread != srcThread.get()) { + Mutex::Autolock _dl(playbackThread->mLock); + Mutex::Autolock _sl(srcThread->mLock); + sp chain = srcThread->getEffectChain_l(AUDIO_SESSION_OUTPUT_MIX); + if (chain == 0) { + return INVALID_OPERATION; + } + + sp effect = chain->getEffectFromId_l(EffectId); + if (effect == 0) { + return INVALID_OPERATION; + } + srcThread->removeEffect_l(effect); + playbackThread->addEffect_l(effect); + // removeEffect_l() has stopped the effect if it was active so it must be restarted + if (effect->state() == EffectModule::ACTIVE || + effect->state() == EffectModule::STOPPING) { + effect->start(); + } + + sp dstChain = effect->chain().promote(); + if (dstChain == 0) { + srcThread->addEffect_l(effect); + return INVALID_OPERATION; + } + AudioSystem::unregisterEffect(effect->id()); + AudioSystem::registerEffect(&effect->desc(), + srcThread->id(), + dstChain->strategy(), + AUDIO_SESSION_OUTPUT_MIX, + effect->id()); + } + status = playbackThread->attachAuxEffect(this, EffectId); + } + return status; +} + +void AudioFlinger::PlaybackThread::Track::setAuxBuffer(int EffectId, int32_t *buffer) +{ + mAuxEffectId = EffectId; + mAuxBuffer = buffer; +} + +bool AudioFlinger::PlaybackThread::Track::presentationComplete(size_t framesWritten, + size_t audioHalFrames) +{ + // a track is considered presented when the total number of frames written to audio HAL + // corresponds to the number of frames written when presentationComplete() is called for the + // first time (mPresentationCompleteFrames == 0) plus the buffer filling status at that time. + if (mPresentationCompleteFrames == 0) { + mPresentationCompleteFrames = framesWritten + audioHalFrames; + ALOGV("presentationComplete() reset: mPresentationCompleteFrames %d audioHalFrames %d", + mPresentationCompleteFrames, audioHalFrames); + } + if (framesWritten >= mPresentationCompleteFrames) { + ALOGV("presentationComplete() session %d complete: framesWritten %d", + mSessionId, framesWritten); + triggerEvents(AudioSystem::SYNC_EVENT_PRESENTATION_COMPLETE); + return true; + } + return false; +} + +void AudioFlinger::PlaybackThread::Track::triggerEvents(AudioSystem::sync_event_t type) +{ + for (int i = 0; i < (int)mSyncEvents.size(); i++) { + if (mSyncEvents[i]->type() == type) { + mSyncEvents[i]->trigger(); + mSyncEvents.removeAt(i); + i--; + } + } +} + +// implement VolumeBufferProvider interface + +uint32_t AudioFlinger::PlaybackThread::Track::getVolumeLR() +{ + // called by FastMixer, so not allowed to take any locks, block, or do I/O including logs + ALOG_ASSERT(isFastTrack() && (mCblk != NULL)); + uint32_t vlr = mCblk->getVolumeLR(); + uint32_t vl = vlr & 0xFFFF; + uint32_t vr = vlr >> 16; + // track volumes come from shared memory, so can't be trusted and must be clamped + if (vl > MAX_GAIN_INT) { + vl = MAX_GAIN_INT; + } + if (vr > MAX_GAIN_INT) { + vr = MAX_GAIN_INT; + } + // now apply the cached master volume and stream type volume; + // this is trusted but lacks any synchronization or barrier so may be stale + float v = mCachedVolume; + vl *= v; + vr *= v; + // re-combine into U4.16 + vlr = (vr << 16) | (vl & 0xFFFF); + // FIXME look at mute, pause, and stop flags + return vlr; +} + +status_t AudioFlinger::PlaybackThread::Track::setSyncEvent(const sp& event) +{ + if (mState == TERMINATED || mState == PAUSED || + ((framesReady() == 0) && ((mSharedBuffer != 0) || + (mState == STOPPED)))) { + ALOGW("Track::setSyncEvent() in invalid state %d on session %d %s mode, framesReady %d ", + mState, mSessionId, (mSharedBuffer != 0) ? "static" : "stream", framesReady()); + event->cancel(); + return INVALID_OPERATION; + } + (void) TrackBase::setSyncEvent(event); + return NO_ERROR; +} + +bool AudioFlinger::PlaybackThread::Track::isOut() const +{ + return true; +} + +// ---------------------------------------------------------------------------- + +sp +AudioFlinger::PlaybackThread::TimedTrack::create( + PlaybackThread *thread, + const sp& client, + audio_stream_type_t streamType, + uint32_t sampleRate, + audio_format_t format, + audio_channel_mask_t channelMask, + size_t frameCount, + const sp& sharedBuffer, + int sessionId) { + if (!client->reserveTimedTrack()) + return 0; + + return new TimedTrack( + thread, client, streamType, sampleRate, format, channelMask, frameCount, + sharedBuffer, sessionId); +} + +AudioFlinger::PlaybackThread::TimedTrack::TimedTrack( + PlaybackThread *thread, + const sp& client, + audio_stream_type_t streamType, + uint32_t sampleRate, + audio_format_t format, + audio_channel_mask_t channelMask, + size_t frameCount, + const sp& sharedBuffer, + int sessionId) + : Track(thread, client, streamType, sampleRate, format, channelMask, + frameCount, sharedBuffer, sessionId, IAudioFlinger::TRACK_TIMED), + mQueueHeadInFlight(false), + mTrimQueueHeadOnRelease(false), + mFramesPendingInQueue(0), + mTimedSilenceBuffer(NULL), + mTimedSilenceBufferSize(0), + mTimedAudioOutputOnTime(false), + mMediaTimeTransformValid(false) +{ + LocalClock lc; + mLocalTimeFreq = lc.getLocalFreq(); + + mLocalTimeToSampleTransform.a_zero = 0; + mLocalTimeToSampleTransform.b_zero = 0; + mLocalTimeToSampleTransform.a_to_b_numer = sampleRate; + mLocalTimeToSampleTransform.a_to_b_denom = mLocalTimeFreq; + LinearTransform::reduce(&mLocalTimeToSampleTransform.a_to_b_numer, + &mLocalTimeToSampleTransform.a_to_b_denom); + + mMediaTimeToSampleTransform.a_zero = 0; + mMediaTimeToSampleTransform.b_zero = 0; + mMediaTimeToSampleTransform.a_to_b_numer = sampleRate; + mMediaTimeToSampleTransform.a_to_b_denom = 1000000; + LinearTransform::reduce(&mMediaTimeToSampleTransform.a_to_b_numer, + &mMediaTimeToSampleTransform.a_to_b_denom); +} + +AudioFlinger::PlaybackThread::TimedTrack::~TimedTrack() { + mClient->releaseTimedTrack(); + delete [] mTimedSilenceBuffer; +} + +status_t AudioFlinger::PlaybackThread::TimedTrack::allocateTimedBuffer( + size_t size, sp* buffer) { + + Mutex::Autolock _l(mTimedBufferQueueLock); + + trimTimedBufferQueue_l(); + + // lazily initialize the shared memory heap for timed buffers + if (mTimedMemoryDealer == NULL) { + const int kTimedBufferHeapSize = 512 << 10; + + mTimedMemoryDealer = new MemoryDealer(kTimedBufferHeapSize, + "AudioFlingerTimed"); + if (mTimedMemoryDealer == NULL) + return NO_MEMORY; + } + + sp newBuffer = mTimedMemoryDealer->allocate(size); + if (newBuffer == NULL) { + newBuffer = mTimedMemoryDealer->allocate(size); + if (newBuffer == NULL) + return NO_MEMORY; + } + + *buffer = newBuffer; + return NO_ERROR; +} + +// caller must hold mTimedBufferQueueLock +void AudioFlinger::PlaybackThread::TimedTrack::trimTimedBufferQueue_l() { + int64_t mediaTimeNow; + { + Mutex::Autolock mttLock(mMediaTimeTransformLock); + if (!mMediaTimeTransformValid) + return; + + int64_t targetTimeNow; + status_t res = (mMediaTimeTransformTarget == TimedAudioTrack::COMMON_TIME) + ? mCCHelper.getCommonTime(&targetTimeNow) + : mCCHelper.getLocalTime(&targetTimeNow); + + if (OK != res) + return; + + if (!mMediaTimeTransform.doReverseTransform(targetTimeNow, + &mediaTimeNow)) { + return; + } + } + + size_t trimEnd; + for (trimEnd = 0; trimEnd < mTimedBufferQueue.size(); trimEnd++) { + int64_t bufEnd; + + if ((trimEnd + 1) < mTimedBufferQueue.size()) { + // We have a next buffer. Just use its PTS as the PTS of the frame + // following the last frame in this buffer. If the stream is sparse + // (ie, there are deliberate gaps left in the stream which should be + // filled with silence by the TimedAudioTrack), then this can result + // in one extra buffer being left un-trimmed when it could have + // been. In general, this is not typical, and we would rather + // optimized away the TS calculation below for the more common case + // where PTSes are contiguous. + bufEnd = mTimedBufferQueue[trimEnd + 1].pts(); + } else { + // We have no next buffer. Compute the PTS of the frame following + // the last frame in this buffer by computing the duration of of + // this frame in media time units and adding it to the PTS of the + // buffer. + int64_t frameCount = mTimedBufferQueue[trimEnd].buffer()->size() + / mFrameSize; + + if (!mMediaTimeToSampleTransform.doReverseTransform(frameCount, + &bufEnd)) { + ALOGE("Failed to convert frame count of %lld to media time" + " duration" " (scale factor %d/%u) in %s", + frameCount, + mMediaTimeToSampleTransform.a_to_b_numer, + mMediaTimeToSampleTransform.a_to_b_denom, + __PRETTY_FUNCTION__); + break; + } + bufEnd += mTimedBufferQueue[trimEnd].pts(); + } + + if (bufEnd > mediaTimeNow) + break; + + // Is the buffer we want to use in the middle of a mix operation right + // now? If so, don't actually trim it. Just wait for the releaseBuffer + // from the mixer which should be coming back shortly. + if (!trimEnd && mQueueHeadInFlight) { + mTrimQueueHeadOnRelease = true; + } + } + + size_t trimStart = mTrimQueueHeadOnRelease ? 1 : 0; + if (trimStart < trimEnd) { + // Update the bookkeeping for framesReady() + for (size_t i = trimStart; i < trimEnd; ++i) { + updateFramesPendingAfterTrim_l(mTimedBufferQueue[i], "trim"); + } + + // Now actually remove the buffers from the queue. + mTimedBufferQueue.removeItemsAt(trimStart, trimEnd); + } +} + +void AudioFlinger::PlaybackThread::TimedTrack::trimTimedBufferQueueHead_l( + const char* logTag) { + ALOG_ASSERT(mTimedBufferQueue.size() > 0, + "%s called (reason \"%s\"), but timed buffer queue has no" + " elements to trim.", __FUNCTION__, logTag); + + updateFramesPendingAfterTrim_l(mTimedBufferQueue[0], logTag); + mTimedBufferQueue.removeAt(0); +} + +void AudioFlinger::PlaybackThread::TimedTrack::updateFramesPendingAfterTrim_l( + const TimedBuffer& buf, + const char* logTag) { + uint32_t bufBytes = buf.buffer()->size(); + uint32_t consumedAlready = buf.position(); + + ALOG_ASSERT(consumedAlready <= bufBytes, + "Bad bookkeeping while updating frames pending. Timed buffer is" + " only %u bytes long, but claims to have consumed %u" + " bytes. (update reason: \"%s\")", + bufBytes, consumedAlready, logTag); + + uint32_t bufFrames = (bufBytes - consumedAlready) / mFrameSize; + ALOG_ASSERT(mFramesPendingInQueue >= bufFrames, + "Bad bookkeeping while updating frames pending. Should have at" + " least %u queued frames, but we think we have only %u. (update" + " reason: \"%s\")", + bufFrames, mFramesPendingInQueue, logTag); + + mFramesPendingInQueue -= bufFrames; +} + +status_t AudioFlinger::PlaybackThread::TimedTrack::queueTimedBuffer( + const sp& buffer, int64_t pts) { + + { + Mutex::Autolock mttLock(mMediaTimeTransformLock); + if (!mMediaTimeTransformValid) + return INVALID_OPERATION; + } + + Mutex::Autolock _l(mTimedBufferQueueLock); + + uint32_t bufFrames = buffer->size() / mFrameSize; + mFramesPendingInQueue += bufFrames; + mTimedBufferQueue.add(TimedBuffer(buffer, pts)); + + return NO_ERROR; +} + +status_t AudioFlinger::PlaybackThread::TimedTrack::setMediaTimeTransform( + const LinearTransform& xform, TimedAudioTrack::TargetTimeline target) { + + ALOGVV("setMediaTimeTransform az=%lld bz=%lld n=%d d=%u tgt=%d", + xform.a_zero, xform.b_zero, xform.a_to_b_numer, xform.a_to_b_denom, + target); + + if (!(target == TimedAudioTrack::LOCAL_TIME || + target == TimedAudioTrack::COMMON_TIME)) { + return BAD_VALUE; + } + + Mutex::Autolock lock(mMediaTimeTransformLock); + mMediaTimeTransform = xform; + mMediaTimeTransformTarget = target; + mMediaTimeTransformValid = true; + + return NO_ERROR; +} + +#define min(a, b) ((a) < (b) ? (a) : (b)) + +// implementation of getNextBuffer for tracks whose buffers have timestamps +status_t AudioFlinger::PlaybackThread::TimedTrack::getNextBuffer( + AudioBufferProvider::Buffer* buffer, int64_t pts) +{ + if (pts == AudioBufferProvider::kInvalidPTS) { + buffer->raw = NULL; + buffer->frameCount = 0; + mTimedAudioOutputOnTime = false; + return INVALID_OPERATION; + } + + Mutex::Autolock _l(mTimedBufferQueueLock); + + ALOG_ASSERT(!mQueueHeadInFlight, + "getNextBuffer called without releaseBuffer!"); + + while (true) { + + // if we have no timed buffers, then fail + if (mTimedBufferQueue.isEmpty()) { + buffer->raw = NULL; + buffer->frameCount = 0; + return NOT_ENOUGH_DATA; + } + + TimedBuffer& head = mTimedBufferQueue.editItemAt(0); + + // calculate the PTS of the head of the timed buffer queue expressed in + // local time + int64_t headLocalPTS; + { + Mutex::Autolock mttLock(mMediaTimeTransformLock); + + ALOG_ASSERT(mMediaTimeTransformValid, "media time transform invalid"); + + if (mMediaTimeTransform.a_to_b_denom == 0) { + // the transform represents a pause, so yield silence + timedYieldSilence_l(buffer->frameCount, buffer); + return NO_ERROR; + } + + int64_t transformedPTS; + if (!mMediaTimeTransform.doForwardTransform(head.pts(), + &transformedPTS)) { + // the transform failed. this shouldn't happen, but if it does + // then just drop this buffer + ALOGW("timedGetNextBuffer transform failed"); + buffer->raw = NULL; + buffer->frameCount = 0; + trimTimedBufferQueueHead_l("getNextBuffer; no transform"); + return NO_ERROR; + } + + if (mMediaTimeTransformTarget == TimedAudioTrack::COMMON_TIME) { + if (OK != mCCHelper.commonTimeToLocalTime(transformedPTS, + &headLocalPTS)) { + buffer->raw = NULL; + buffer->frameCount = 0; + return INVALID_OPERATION; + } + } else { + headLocalPTS = transformedPTS; + } + } + + // adjust the head buffer's PTS to reflect the portion of the head buffer + // that has already been consumed + int64_t effectivePTS = headLocalPTS + + ((head.position() / mFrameSize) * mLocalTimeFreq / sampleRate()); + + // Calculate the delta in samples between the head of the input buffer + // queue and the start of the next output buffer that will be written. + // If the transformation fails because of over or underflow, it means + // that the sample's position in the output stream is so far out of + // whack that it should just be dropped. + int64_t sampleDelta; + if (llabs(effectivePTS - pts) >= (static_cast(1) << 31)) { + ALOGV("*** head buffer is too far from PTS: dropped buffer"); + trimTimedBufferQueueHead_l("getNextBuffer, buf pts too far from" + " mix"); + continue; + } + if (!mLocalTimeToSampleTransform.doForwardTransform( + (effectivePTS - pts) << 32, &sampleDelta)) { + ALOGV("*** too late during sample rate transform: dropped buffer"); + trimTimedBufferQueueHead_l("getNextBuffer, bad local to sample"); + continue; + } + + ALOGVV("*** getNextBuffer head.pts=%lld head.pos=%d pts=%lld" + " sampleDelta=[%d.%08x]", + head.pts(), head.position(), pts, + static_cast((sampleDelta >= 0 ? 0 : 1) + + (sampleDelta >> 32)), + static_cast(sampleDelta & 0xFFFFFFFF)); + + // if the delta between the ideal placement for the next input sample and + // the current output position is within this threshold, then we will + // concatenate the next input samples to the previous output + const int64_t kSampleContinuityThreshold = + (static_cast(sampleRate()) << 32) / 250; + + // if this is the first buffer of audio that we're emitting from this track + // then it should be almost exactly on time. + const int64_t kSampleStartupThreshold = 1LL << 32; + + if ((mTimedAudioOutputOnTime && llabs(sampleDelta) <= kSampleContinuityThreshold) || + (!mTimedAudioOutputOnTime && llabs(sampleDelta) <= kSampleStartupThreshold)) { + // the next input is close enough to being on time, so concatenate it + // with the last output + timedYieldSamples_l(buffer); + + ALOGVV("*** on time: head.pos=%d frameCount=%u", + head.position(), buffer->frameCount); + return NO_ERROR; + } + + // Looks like our output is not on time. Reset our on timed status. + // Next time we mix samples from our input queue, then should be within + // the StartupThreshold. + mTimedAudioOutputOnTime = false; + if (sampleDelta > 0) { + // the gap between the current output position and the proper start of + // the next input sample is too big, so fill it with silence + uint32_t framesUntilNextInput = (sampleDelta + 0x80000000) >> 32; + + timedYieldSilence_l(framesUntilNextInput, buffer); + ALOGV("*** silence: frameCount=%u", buffer->frameCount); + return NO_ERROR; + } else { + // the next input sample is late + uint32_t lateFrames = static_cast(-((sampleDelta + 0x80000000) >> 32)); + size_t onTimeSamplePosition = + head.position() + lateFrames * mFrameSize; + + if (onTimeSamplePosition > head.buffer()->size()) { + // all the remaining samples in the head are too late, so + // drop it and move on + ALOGV("*** too late: dropped buffer"); + trimTimedBufferQueueHead_l("getNextBuffer, dropped late buffer"); + continue; + } else { + // skip over the late samples + head.setPosition(onTimeSamplePosition); + + // yield the available samples + timedYieldSamples_l(buffer); + + ALOGV("*** late: head.pos=%d frameCount=%u", head.position(), buffer->frameCount); + return NO_ERROR; + } + } + } +} + +// Yield samples from the timed buffer queue head up to the given output +// buffer's capacity. +// +// Caller must hold mTimedBufferQueueLock +void AudioFlinger::PlaybackThread::TimedTrack::timedYieldSamples_l( + AudioBufferProvider::Buffer* buffer) { + + const TimedBuffer& head = mTimedBufferQueue[0]; + + buffer->raw = (static_cast(head.buffer()->pointer()) + + head.position()); + + uint32_t framesLeftInHead = ((head.buffer()->size() - head.position()) / + mFrameSize); + size_t framesRequested = buffer->frameCount; + buffer->frameCount = min(framesLeftInHead, framesRequested); + + mQueueHeadInFlight = true; + mTimedAudioOutputOnTime = true; +} + +// Yield samples of silence up to the given output buffer's capacity +// +// Caller must hold mTimedBufferQueueLock +void AudioFlinger::PlaybackThread::TimedTrack::timedYieldSilence_l( + uint32_t numFrames, AudioBufferProvider::Buffer* buffer) { + + // lazily allocate a buffer filled with silence + if (mTimedSilenceBufferSize < numFrames * mFrameSize) { + delete [] mTimedSilenceBuffer; + mTimedSilenceBufferSize = numFrames * mFrameSize; + mTimedSilenceBuffer = new uint8_t[mTimedSilenceBufferSize]; + memset(mTimedSilenceBuffer, 0, mTimedSilenceBufferSize); + } + + buffer->raw = mTimedSilenceBuffer; + size_t framesRequested = buffer->frameCount; + buffer->frameCount = min(numFrames, framesRequested); + + mTimedAudioOutputOnTime = false; +} + +// AudioBufferProvider interface +void AudioFlinger::PlaybackThread::TimedTrack::releaseBuffer( + AudioBufferProvider::Buffer* buffer) { + + Mutex::Autolock _l(mTimedBufferQueueLock); + + // If the buffer which was just released is part of the buffer at the head + // of the queue, be sure to update the amt of the buffer which has been + // consumed. If the buffer being returned is not part of the head of the + // queue, its either because the buffer is part of the silence buffer, or + // because the head of the timed queue was trimmed after the mixer called + // getNextBuffer but before the mixer called releaseBuffer. + if (buffer->raw == mTimedSilenceBuffer) { + ALOG_ASSERT(!mQueueHeadInFlight, + "Queue head in flight during release of silence buffer!"); + goto done; + } + + ALOG_ASSERT(mQueueHeadInFlight, + "TimedTrack::releaseBuffer of non-silence buffer, but no queue" + " head in flight."); + + if (mTimedBufferQueue.size()) { + TimedBuffer& head = mTimedBufferQueue.editItemAt(0); + + void* start = head.buffer()->pointer(); + void* end = reinterpret_cast( + reinterpret_cast(head.buffer()->pointer()) + + head.buffer()->size()); + + ALOG_ASSERT((buffer->raw >= start) && (buffer->raw < end), + "released buffer not within the head of the timed buffer" + " queue; qHead = [%p, %p], released buffer = %p", + start, end, buffer->raw); + + head.setPosition(head.position() + + (buffer->frameCount * mFrameSize)); + mQueueHeadInFlight = false; + + ALOG_ASSERT(mFramesPendingInQueue >= buffer->frameCount, + "Bad bookkeeping during releaseBuffer! Should have at" + " least %u queued frames, but we think we have only %u", + buffer->frameCount, mFramesPendingInQueue); + + mFramesPendingInQueue -= buffer->frameCount; + + if ((static_cast(head.position()) >= head.buffer()->size()) + || mTrimQueueHeadOnRelease) { + trimTimedBufferQueueHead_l("releaseBuffer"); + mTrimQueueHeadOnRelease = false; + } + } else { + LOG_FATAL("TimedTrack::releaseBuffer of non-silence buffer with no" + " buffers in the timed buffer queue"); + } + +done: + buffer->raw = 0; + buffer->frameCount = 0; +} + +size_t AudioFlinger::PlaybackThread::TimedTrack::framesReady() const { + Mutex::Autolock _l(mTimedBufferQueueLock); + return mFramesPendingInQueue; +} + +AudioFlinger::PlaybackThread::TimedTrack::TimedBuffer::TimedBuffer() + : mPTS(0), mPosition(0) {} + +AudioFlinger::PlaybackThread::TimedTrack::TimedBuffer::TimedBuffer( + const sp& buffer, int64_t pts) + : mBuffer(buffer), mPTS(pts), mPosition(0) {} + + +// ---------------------------------------------------------------------------- + +AudioFlinger::PlaybackThread::OutputTrack::OutputTrack( + PlaybackThread *playbackThread, + DuplicatingThread *sourceThread, + uint32_t sampleRate, + audio_format_t format, + audio_channel_mask_t channelMask, + size_t frameCount) + : Track(playbackThread, NULL, AUDIO_STREAM_CNT, sampleRate, format, channelMask, frameCount, + NULL, 0, IAudioFlinger::TRACK_DEFAULT), + mActive(false), mSourceThread(sourceThread), mBuffers(NULL) +{ + + if (mCblk != NULL) { + mBuffers = (char*)mCblk + sizeof(audio_track_cblk_t); + mOutBuffer.frameCount = 0; + playbackThread->mTracks.add(this); + ALOGV("OutputTrack constructor mCblk %p, mBuffer %p, mBuffers %p, " \ + "mCblk->frameCount %d, mCblk->sampleRate %u, mChannelMask 0x%08x mBufferEnd %p", + mCblk, mBuffer, mBuffers, + mCblk->frameCount, mCblk->sampleRate, mChannelMask, mBufferEnd); + } else { + ALOGW("Error creating output track on thread %p", playbackThread); + } +} + +AudioFlinger::PlaybackThread::OutputTrack::~OutputTrack() +{ + clearBufferQueue(); +} + +status_t AudioFlinger::PlaybackThread::OutputTrack::start(AudioSystem::sync_event_t event, + int triggerSession) +{ + status_t status = Track::start(event, triggerSession); + if (status != NO_ERROR) { + return status; + } + + mActive = true; + mRetryCount = 127; + return status; +} + +void AudioFlinger::PlaybackThread::OutputTrack::stop() +{ + Track::stop(); + clearBufferQueue(); + mOutBuffer.frameCount = 0; + mActive = false; +} + +bool AudioFlinger::PlaybackThread::OutputTrack::write(int16_t* data, uint32_t frames) +{ + Buffer *pInBuffer; + Buffer inBuffer; + uint32_t channelCount = mChannelCount; + bool outputBufferFull = false; + inBuffer.frameCount = frames; + inBuffer.i16 = data; + + uint32_t waitTimeLeftMs = mSourceThread->waitTimeMs(); + + if (!mActive && frames != 0) { + start(); + sp thread = mThread.promote(); + if (thread != 0) { + MixerThread *mixerThread = (MixerThread *)thread.get(); + if (mFrameCount > frames) { + if (mBufferQueue.size() < kMaxOverFlowBuffers) { + uint32_t startFrames = (mFrameCount - frames); + pInBuffer = new Buffer; + pInBuffer->mBuffer = new int16_t[startFrames * channelCount]; + pInBuffer->frameCount = startFrames; + pInBuffer->i16 = pInBuffer->mBuffer; + memset(pInBuffer->raw, 0, startFrames * channelCount * sizeof(int16_t)); + mBufferQueue.add(pInBuffer); + } else { + ALOGW ("OutputTrack::write() %p no more buffers in queue", this); + } + } + } + } + + while (waitTimeLeftMs) { + // First write pending buffers, then new data + if (mBufferQueue.size()) { + pInBuffer = mBufferQueue.itemAt(0); + } else { + pInBuffer = &inBuffer; + } + + if (pInBuffer->frameCount == 0) { + break; + } + + if (mOutBuffer.frameCount == 0) { + mOutBuffer.frameCount = pInBuffer->frameCount; + nsecs_t startTime = systemTime(); + if (obtainBuffer(&mOutBuffer, waitTimeLeftMs) == (status_t)NO_MORE_BUFFERS) { + ALOGV ("OutputTrack::write() %p thread %p no more output buffers", this, + mThread.unsafe_get()); + outputBufferFull = true; + break; + } + uint32_t waitTimeMs = (uint32_t)ns2ms(systemTime() - startTime); + if (waitTimeLeftMs >= waitTimeMs) { + waitTimeLeftMs -= waitTimeMs; + } else { + waitTimeLeftMs = 0; + } + } + + uint32_t outFrames = pInBuffer->frameCount > mOutBuffer.frameCount ? mOutBuffer.frameCount : + pInBuffer->frameCount; + memcpy(mOutBuffer.raw, pInBuffer->raw, outFrames * channelCount * sizeof(int16_t)); + mCblk->stepUserOut(outFrames, mFrameCount); + pInBuffer->frameCount -= outFrames; + pInBuffer->i16 += outFrames * channelCount; + mOutBuffer.frameCount -= outFrames; + mOutBuffer.i16 += outFrames * channelCount; + + if (pInBuffer->frameCount == 0) { + if (mBufferQueue.size()) { + mBufferQueue.removeAt(0); + delete [] pInBuffer->mBuffer; + delete pInBuffer; + ALOGV("OutputTrack::write() %p thread %p released overflow buffer %d", this, + mThread.unsafe_get(), mBufferQueue.size()); + } else { + break; + } + } + } + + // If we could not write all frames, allocate a buffer and queue it for next time. + if (inBuffer.frameCount) { + sp thread = mThread.promote(); + if (thread != 0 && !thread->standby()) { + if (mBufferQueue.size() < kMaxOverFlowBuffers) { + pInBuffer = new Buffer; + pInBuffer->mBuffer = new int16_t[inBuffer.frameCount * channelCount]; + pInBuffer->frameCount = inBuffer.frameCount; + pInBuffer->i16 = pInBuffer->mBuffer; + memcpy(pInBuffer->raw, inBuffer.raw, inBuffer.frameCount * channelCount * + sizeof(int16_t)); + mBufferQueue.add(pInBuffer); + ALOGV("OutputTrack::write() %p thread %p adding overflow buffer %d", this, + mThread.unsafe_get(), mBufferQueue.size()); + } else { + ALOGW("OutputTrack::write() %p thread %p no more overflow buffers", + mThread.unsafe_get(), this); + } + } + } + + // Calling write() with a 0 length buffer, means that no more data will be written: + // If no more buffers are pending, fill output track buffer to make sure it is started + // by output mixer. + if (frames == 0 && mBufferQueue.size() == 0) { + if (mCblk->user < mFrameCount) { + frames = mFrameCount - mCblk->user; + pInBuffer = new Buffer; + pInBuffer->mBuffer = new int16_t[frames * channelCount]; + pInBuffer->frameCount = frames; + pInBuffer->i16 = pInBuffer->mBuffer; + memset(pInBuffer->raw, 0, frames * channelCount * sizeof(int16_t)); + mBufferQueue.add(pInBuffer); + } else if (mActive) { + stop(); + } + } + + return outputBufferFull; +} + +status_t AudioFlinger::PlaybackThread::OutputTrack::obtainBuffer( + AudioBufferProvider::Buffer* buffer, uint32_t waitTimeMs) +{ + int active; + status_t result; + audio_track_cblk_t* cblk = mCblk; + uint32_t framesReq = buffer->frameCount; + + ALOGVV("OutputTrack::obtainBuffer user %d, server %d", cblk->user, cblk->server); + buffer->frameCount = 0; + + uint32_t framesAvail = cblk->framesAvailableOut(mFrameCount); + + + if (framesAvail == 0) { + Mutex::Autolock _l(cblk->lock); + goto start_loop_here; + while (framesAvail == 0) { + active = mActive; + if (CC_UNLIKELY(!active)) { + ALOGV("Not active and NO_MORE_BUFFERS"); + return NO_MORE_BUFFERS; + } + result = cblk->cv.waitRelative(cblk->lock, milliseconds(waitTimeMs)); + if (result != NO_ERROR) { + return NO_MORE_BUFFERS; + } + // read the server count again + start_loop_here: + framesAvail = cblk->framesAvailableOut_l(mFrameCount); + } + } + +// if (framesAvail < framesReq) { +// return NO_MORE_BUFFERS; +// } + + if (framesReq > framesAvail) { + framesReq = framesAvail; + } + + uint32_t u = cblk->user; + uint32_t bufferEnd = cblk->userBase + mFrameCount; + + if (framesReq > bufferEnd - u) { + framesReq = bufferEnd - u; + } + + buffer->frameCount = framesReq; + buffer->raw = cblk->buffer(mBuffers, mFrameSize, u); + return NO_ERROR; +} + + +void AudioFlinger::PlaybackThread::OutputTrack::clearBufferQueue() +{ + size_t size = mBufferQueue.size(); + + for (size_t i = 0; i < size; i++) { + Buffer *pBuffer = mBufferQueue.itemAt(i); + delete [] pBuffer->mBuffer; + delete pBuffer; + } + mBufferQueue.clear(); +} + + +// ---------------------------------------------------------------------------- +// Record +// ---------------------------------------------------------------------------- + +AudioFlinger::RecordHandle::RecordHandle( + const sp& recordTrack) + : BnAudioRecord(), + mRecordTrack(recordTrack) +{ +} + +AudioFlinger::RecordHandle::~RecordHandle() { + stop_nonvirtual(); + mRecordTrack->destroy(); +} + +sp AudioFlinger::RecordHandle::getCblk() const { + return mRecordTrack->getCblk(); +} + +status_t AudioFlinger::RecordHandle::start(int /*AudioSystem::sync_event_t*/ event, + int triggerSession) { + ALOGV("RecordHandle::start()"); + return mRecordTrack->start((AudioSystem::sync_event_t)event, triggerSession); +} + +void AudioFlinger::RecordHandle::stop() { + stop_nonvirtual(); +} + +void AudioFlinger::RecordHandle::stop_nonvirtual() { + ALOGV("RecordHandle::stop()"); + mRecordTrack->stop(); +} + +status_t AudioFlinger::RecordHandle::onTransact( + uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) +{ + return BnAudioRecord::onTransact(code, data, reply, flags); +} + +// ---------------------------------------------------------------------------- + +// RecordTrack constructor must be called with AudioFlinger::mLock held +AudioFlinger::RecordThread::RecordTrack::RecordTrack( + RecordThread *thread, + const sp& client, + uint32_t sampleRate, + audio_format_t format, + audio_channel_mask_t channelMask, + size_t frameCount, + int sessionId) + : TrackBase(thread, client, sampleRate, format, + channelMask, frameCount, 0 /*sharedBuffer*/, sessionId), + mOverflow(false) +{ + ALOGV("RecordTrack constructor, size %d", (int)mBufferEnd - (int)mBuffer); +} + +AudioFlinger::RecordThread::RecordTrack::~RecordTrack() +{ + ALOGV("%s", __func__); +} + +// AudioBufferProvider interface +status_t AudioFlinger::RecordThread::RecordTrack::getNextBuffer(AudioBufferProvider::Buffer* buffer, + int64_t pts) +{ + audio_track_cblk_t* cblk = this->cblk(); + uint32_t framesAvail; + uint32_t framesReq = buffer->frameCount; + + // Check if last stepServer failed, try to step now + if (mStepServerFailed) { + if (!step()) { + goto getNextBuffer_exit; + } + ALOGV("stepServer recovered"); + mStepServerFailed = false; + } + + // FIXME lock is not actually held, so overrun is possible + framesAvail = cblk->framesAvailableIn_l(mFrameCount); + + if (CC_LIKELY(framesAvail)) { + uint32_t s = cblk->server; + uint32_t bufferEnd = cblk->serverBase + mFrameCount; + + if (framesReq > framesAvail) { + framesReq = framesAvail; + } + if (framesReq > bufferEnd - s) { + framesReq = bufferEnd - s; + } + + buffer->raw = getBuffer(s, framesReq); + buffer->frameCount = framesReq; + return NO_ERROR; + } + +getNextBuffer_exit: + buffer->raw = NULL; + buffer->frameCount = 0; + return NOT_ENOUGH_DATA; +} + +status_t AudioFlinger::RecordThread::RecordTrack::start(AudioSystem::sync_event_t event, + int triggerSession) +{ + sp thread = mThread.promote(); + if (thread != 0) { + RecordThread *recordThread = (RecordThread *)thread.get(); + return recordThread->start(this, event, triggerSession); + } else { + return BAD_VALUE; + } +} + +void AudioFlinger::RecordThread::RecordTrack::stop() +{ + sp thread = mThread.promote(); + if (thread != 0) { + RecordThread *recordThread = (RecordThread *)thread.get(); + recordThread->mLock.lock(); + bool doStop = recordThread->stop_l(this); + if (doStop) { + TrackBase::reset(); + // Force overrun condition to avoid false overrun callback until first data is + // read from buffer + android_atomic_or(CBLK_UNDERRUN, &mCblk->flags); + } + recordThread->mLock.unlock(); + if (doStop) { + AudioSystem::stopInput(recordThread->id()); + } + } +} + +void AudioFlinger::RecordThread::RecordTrack::destroy() +{ + // see comments at AudioFlinger::PlaybackThread::Track::destroy() + sp keep(this); + { + sp thread = mThread.promote(); + if (thread != 0) { + if (mState == ACTIVE || mState == RESUMING) { + AudioSystem::stopInput(thread->id()); + } + AudioSystem::releaseInput(thread->id()); + Mutex::Autolock _l(thread->mLock); + RecordThread *recordThread = (RecordThread *) thread.get(); + recordThread->destroyTrack_l(this); + } + } +} + + +/*static*/ void AudioFlinger::RecordThread::RecordTrack::appendDumpHeader(String8& result) +{ + result.append(" Clien Fmt Chn mask Session Step S SRate Serv User FrameCount\n"); +} + +void AudioFlinger::RecordThread::RecordTrack::dump(char* buffer, size_t size) +{ + snprintf(buffer, size, " %05d %03u 0x%08x %05d %04u %01d %05u %08x %08x %05d\n", + (mClient == 0) ? getpid_cached : mClient->pid(), + mFormat, + mChannelMask, + mSessionId, + mStepCount, + mState, + mCblk->sampleRate, + mCblk->server, + mCblk->user, + mFrameCount); +} + +bool AudioFlinger::RecordThread::RecordTrack::isOut() const +{ + return false; +} + +}; // namespace android -- cgit v1.1 From ff82370c7c04340d50673d425f48acafd00f2bc9 Mon Sep 17 00:00:00 2001 From: Ben Murdoch Date: Wed, 28 Nov 2012 13:58:29 +0000 Subject: Fix master build. Change-Id: Ia362f74d8cd7df76292473c26c112dffe190c599 --- services/camera/libcameraservice/Camera2Client.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/camera/libcameraservice/Camera2Client.cpp b/services/camera/libcameraservice/Camera2Client.cpp index d468aa6..5a7bb48 100644 --- a/services/camera/libcameraservice/Camera2Client.cpp +++ b/services/camera/libcameraservice/Camera2Client.cpp @@ -1057,7 +1057,7 @@ status_t Camera2Client::cancelAutoFocus() { return OK; } -status_t Camera2Client::takePicture(int /*msgType*/) { +status_t Camera2Client::takePicture(int msgType) { ATRACE_CALL(); Mutex::Autolock icl(mICameraLock); status_t res; -- cgit v1.1 From f6f38287b97ec69b169387add6458f859b770e65 Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Thu, 29 Nov 2012 13:49:07 -0800 Subject: Clear the sticky EOS flags when transitioning to LOADED state instead of transitioning _from_ UNINITIALIZED state. This makes codec instances reusable. Change-Id: I8f0c11923978ffee58b553a5ac59c740b0223c54 --- media/libstagefright/ACodec.cpp | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp index a135222..8f3dc6c 100644 --- a/media/libstagefright/ACodec.cpp +++ b/media/libstagefright/ACodec.cpp @@ -3106,11 +3106,6 @@ bool ACodec::UninitializedState::onAllocateComponent(const sp &msg) { mCodec->mOMX = omx; mCodec->mNode = node; - mCodec->mPortEOS[kPortIndexInput] = - mCodec->mPortEOS[kPortIndexOutput] = false; - - mCodec->mInputEOSResult = OK; - { sp notify = mCodec->mNotify->dup(); notify->setInt32("what", ACodec::kWhatComponentAllocated); @@ -3132,6 +3127,11 @@ ACodec::LoadedState::LoadedState(ACodec *codec) void ACodec::LoadedState::stateEntered() { ALOGV("[%s] Now Loaded", mCodec->mComponentName.c_str()); + mCodec->mPortEOS[kPortIndexInput] = + mCodec->mPortEOS[kPortIndexOutput] = false; + + mCodec->mInputEOSResult = OK; + if (mCodec->mShutdownInProgress) { bool keepComponentAllocated = mCodec->mKeepComponentAllocated; -- cgit v1.1 From 2d590964aa58e137d17a43e095e6443dd0fe2e98 Mon Sep 17 00:00:00 2001 From: Simon Wilson Date: Thu, 29 Nov 2012 15:18:50 -0800 Subject: Use ATRACE macros instead of Tracer statics ATRACE_BEGIN and ATRACE_END have replaced the static Tracer::traceBegin and Tracer::traceEnd functions, so use them instead. Fixes compilation errors when tracing is enabled. Change-Id: I4d1147d2f76afcdf113e9986f0544cb848802b15 --- services/audioflinger/FastMixer.cpp | 4 ++-- services/audioflinger/Threads.cpp | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/services/audioflinger/FastMixer.cpp b/services/audioflinger/FastMixer.cpp index 3c8a256..2160ea3 100644 --- a/services/audioflinger/FastMixer.cpp +++ b/services/audioflinger/FastMixer.cpp @@ -425,11 +425,11 @@ bool FastMixer::threadLoop() // but this code should be modified to handle both non-blocking and blocking sinks dumpState->mWriteSequence++; #if defined(ATRACE_TAG) && (ATRACE_TAG != ATRACE_TAG_NEVER) - Tracer::traceBegin(ATRACE_TAG, "write"); + ATRACE_BEGIN("write"); #endif ssize_t framesWritten = outputSink->write(mixBuffer, frameCount); #if defined(ATRACE_TAG) && (ATRACE_TAG != ATRACE_TAG_NEVER) - Tracer::traceEnd(ATRACE_TAG); + ATRACE_END(); #endif dumpState->mWriteSequence++; if (framesWritten >= 0) { diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index 1ceb850..6cada0a 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -1653,7 +1653,7 @@ void AudioFlinger::PlaybackThread::threadLoop_write() #define mBitShift 2 // FIXME size_t count = mixBufferSize >> mBitShift; #if defined(ATRACE_TAG) && (ATRACE_TAG != ATRACE_TAG_NEVER) - Tracer::traceBegin(ATRACE_TAG, "write"); + ATRACE_BEGIN("write"); #endif // update the setpoint when AudioFlinger::mScreenState changes uint32_t screenState = AudioFlinger::mScreenState; @@ -1667,7 +1667,7 @@ void AudioFlinger::PlaybackThread::threadLoop_write() } ssize_t framesWritten = mNormalSink->write(mMixBuffer, count); #if defined(ATRACE_TAG) && (ATRACE_TAG != ATRACE_TAG_NEVER) - Tracer::traceEnd(ATRACE_TAG); + ATRACE_END(); #endif if (framesWritten > 0) { bytesWritten = framesWritten << mBitShift; -- cgit v1.1 From a3d2628a22f2b3d682495044897a40ea1399a662 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Fri, 30 Nov 2012 07:57:43 -0800 Subject: Add warning about following the design rules Change-Id: Ic4895ed5682bad10b03e97d8015e642ee1696533 --- services/audioflinger/FastMixer.cpp | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/services/audioflinger/FastMixer.cpp b/services/audioflinger/FastMixer.cpp index 3c8a256..2005899 100644 --- a/services/audioflinger/FastMixer.cpp +++ b/services/audioflinger/FastMixer.cpp @@ -14,6 +14,12 @@ * limitations under the License. */ +// +// Design rules for threadLoop() are given in the comments at section "Fast mixer thread" of +// StateQueue.h. In particular, avoid library and system calls except at well-known points. +// The design rules are only for threadLoop(), and don't apply to FastMixerDumpState methods. +// + #define LOG_TAG "FastMixer" //#define LOG_NDEBUG 0 -- cgit v1.1 From 5876f2f28f31c1bd99864ba3bb1590e3d6765018 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Fri, 30 Nov 2012 10:52:16 -0800 Subject: Remove last bits of IAudioFlinger::channel_count Change-Id: I9e13678e0aa32a86eb27367a4aff4b32b8aec8cc --- media/libmedia/IAudioFlinger.cpp | 20 +------------------- 1 file changed, 1 insertion(+), 19 deletions(-) diff --git a/media/libmedia/IAudioFlinger.cpp b/media/libmedia/IAudioFlinger.cpp index a010bb6..c5fbbf0 100644 --- a/media/libmedia/IAudioFlinger.cpp +++ b/media/libmedia/IAudioFlinger.cpp @@ -32,7 +32,7 @@ enum { CREATE_TRACK = IBinder::FIRST_CALL_TRANSACTION, OPEN_RECORD, SAMPLE_RATE, - CHANNEL_COUNT, // obsolete + RESERVED, // obsolete, was CHANNEL_COUNT FORMAT, FRAME_COUNT, LATENCY, @@ -191,17 +191,6 @@ public: return reply.readInt32(); } -#if 0 - virtual int channelCount(audio_io_handle_t output) const - { - Parcel data, reply; - data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor()); - data.writeInt32((int32_t) output); - remote()->transact(CHANNEL_COUNT, data, &reply); - return reply.readInt32(); - } -#endif - virtual audio_format_t format(audio_io_handle_t output) const { Parcel data, reply; @@ -768,13 +757,6 @@ status_t BnAudioFlinger::onTransact( reply->writeInt32( sampleRate((audio_io_handle_t) data.readInt32()) ); return NO_ERROR; } break; -#if 0 - case CHANNEL_COUNT: { - CHECK_INTERFACE(IAudioFlinger, data, reply); - reply->writeInt32( channelCount((audio_io_handle_t) data.readInt32()) ); - return NO_ERROR; - } break; -#endif case FORMAT: { CHECK_INTERFACE(IAudioFlinger, data, reply); reply->writeInt32( format((audio_io_handle_t) data.readInt32()) ); -- cgit v1.1 From 371eb9756c32109ea572b91216b19bb623f6d3fd Mon Sep 17 00:00:00 2001 From: Alex Ray Date: Fri, 30 Nov 2012 11:11:54 -0800 Subject: Remove conditional compilation of ATRACE functions Tracing functions are meant to be dynamically controlled via sysprops. Conditional compilation removes this functionality. Change-Id: I26bc473d104d0b3c50a228dddfda3fa2428d157a --- services/audioflinger/Android.mk | 3 --- services/audioflinger/FastMixer.cpp | 5 +++++ services/audioflinger/Threads.cpp | 10 +++------- 3 files changed, 8 insertions(+), 10 deletions(-) diff --git a/services/audioflinger/Android.mk b/services/audioflinger/Android.mk index c4050b8..dc65833 100644 --- a/services/audioflinger/Android.mk +++ b/services/audioflinger/Android.mk @@ -66,9 +66,6 @@ LOCAL_CFLAGS += -DSTATE_QUEUE_INSTANTIATIONS='"StateQueueInstantiations.cpp"' LOCAL_CFLAGS += -UFAST_TRACKS_AT_NON_NATIVE_SAMPLE_RATE -# uncomment for systrace -# LOCAL_CFLAGS += -DATRACE_TAG=ATRACE_TAG_AUDIO - # uncomment for dumpsys to write most recent audio output to .wav file # 47.5 seconds at 44.1 kHz, 8 megabytes # LOCAL_CFLAGS += -DTEE_SINK_FRAMES=0x200000 diff --git a/services/audioflinger/FastMixer.cpp b/services/audioflinger/FastMixer.cpp index 2160ea3..0366dfe 100644 --- a/services/audioflinger/FastMixer.cpp +++ b/services/audioflinger/FastMixer.cpp @@ -17,6 +17,11 @@ #define LOG_TAG "FastMixer" //#define LOG_NDEBUG 0 +/** Uncomment for systrace. + * ATRACE_TAG will default to ATRACE_TAG_NEVER in the header. + */ +//#define ATRACE_TAG ATRACE_TAG_AUDIO + #include #include #include diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index 6cada0a..fd64395 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -18,6 +18,7 @@ #define LOG_TAG "AudioFlinger" //#define LOG_NDEBUG 0 +#define ATRACE_TAG ATRACE_TAG_AUDIO #include #include @@ -25,6 +26,7 @@ #include #include #include +#include #include #include @@ -1652,9 +1654,7 @@ void AudioFlinger::PlaybackThread::threadLoop_write() if (mNormalSink != 0) { #define mBitShift 2 // FIXME size_t count = mixBufferSize >> mBitShift; -#if defined(ATRACE_TAG) && (ATRACE_TAG != ATRACE_TAG_NEVER) ATRACE_BEGIN("write"); -#endif // update the setpoint when AudioFlinger::mScreenState changes uint32_t screenState = AudioFlinger::mScreenState; if (screenState != mScreenState) { @@ -1666,9 +1666,7 @@ void AudioFlinger::PlaybackThread::threadLoop_write() } } ssize_t framesWritten = mNormalSink->write(mMixBuffer, count); -#if defined(ATRACE_TAG) && (ATRACE_TAG != ATRACE_TAG_NEVER) ATRACE_END(); -#endif if (framesWritten > 0) { bytesWritten = framesWritten << mBitShift; } else { @@ -2000,9 +1998,7 @@ if (mType == MIXER) { if (!mStandby && delta > maxPeriod) { mNumDelayedWrites++; if ((now - lastWarning) > kWarningThrottleNs) { -#if defined(ATRACE_TAG) && (ATRACE_TAG != ATRACE_TAG_NEVER) - ScopedTrace st(ATRACE_TAG, "underrun"); -#endif + ATRACE_NAME("underrun"); ALOGW("write blocked for %llu msecs, %d delayed writes, thread %p", ns2ms(delta), mNumDelayedWrites, this); lastWarning = now; -- cgit v1.1 From a1f8ab0ad670c30e57f3f072df13df66fe4f4910 Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Fri, 30 Nov 2012 10:53:22 -0800 Subject: Fix nuplayer seek jankiness by properly flushing decoders before initiating the seek. Also refactor the nuplayer state machine to make this a little more maintainable and extensible in the future. Change-Id: I36a673bdecff732bca7094c8f72bac24f37c01e9 related-to-bug: 7120373 --- media/libmediaplayerservice/nuplayer/NuPlayer.cpp | 279 +++++++++++++++------- media/libmediaplayerservice/nuplayer/NuPlayer.h | 16 +- 2 files changed, 207 insertions(+), 88 deletions(-) diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp index d3ec122..f363568 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp +++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp @@ -50,6 +50,49 @@ namespace android { +struct NuPlayer::Action : public RefBase { + Action() {} + + virtual void execute(NuPlayer *player) = 0; + +private: + DISALLOW_EVIL_CONSTRUCTORS(Action); +}; + +struct NuPlayer::SeekAction : public Action { + SeekAction(int64_t seekTimeUs) + : mSeekTimeUs(seekTimeUs) { + } + + virtual void execute(NuPlayer *player) { + player->performSeek(mSeekTimeUs); + } + +private: + int64_t mSeekTimeUs; + + DISALLOW_EVIL_CONSTRUCTORS(SeekAction); +}; + +// Use this if there's no state necessary to save in order to execute +// the action. +struct NuPlayer::SimpleAction : public Action { + typedef void (NuPlayer::*ActionFunc)(); + + SimpleAction(ActionFunc func) + : mFunc(func) { + } + + virtual void execute(NuPlayer *player) { + (player->*mFunc)(); + } + +private: + ActionFunc mFunc; + + DISALLOW_EVIL_CONSTRUCTORS(SimpleAction); +}; + //////////////////////////////////////////////////////////////////////////////// NuPlayer::NuPlayer() @@ -63,8 +106,6 @@ NuPlayer::NuPlayer() mTimeDiscontinuityPending(false), mFlushingAudio(NONE), mFlushingVideo(NONE), - mResetInProgress(false), - mResetPostponed(false), mSkipRenderingAudioUntilMediaTimeUs(-1ll), mSkipRenderingVideoUntilMediaTimeUs(-1ll), mVideoLateByUs(0ll), @@ -495,8 +536,15 @@ void NuPlayer::onMessageReceived(const sp &msg) { mRenderer->queueEOS(audio, UNKNOWN_ERROR); } else if (what == ACodec::kWhatDrainThisBuffer) { renderBuffer(audio, codecRequest); - } else { - ALOGV("Unhandled codec notification %d.", what); + } else if (what != ACodec::kWhatComponentAllocated + && what != ACodec::kWhatComponentConfigured + && what != ACodec::kWhatBuffersAllocated) { + ALOGV("Unhandled codec notification %d '%c%c%c%c'.", + what, + what >> 24, + (what >> 16) & 0xff, + (what >> 8) & 0xff, + what & 0xff); } break; @@ -569,47 +617,13 @@ void NuPlayer::onMessageReceived(const sp &msg) { { ALOGV("kWhatReset"); - cancelPollDuration(); - - if (mRenderer != NULL) { - // There's an edge case where the renderer owns all output - // buffers and is paused, therefore the decoder will not read - // more input data and will never encounter the matching - // discontinuity. To avoid this, we resume the renderer. + mDeferredActions.push_back( + new SimpleAction(&NuPlayer::performDecoderShutdown)); - if (mFlushingAudio == AWAITING_DISCONTINUITY - || mFlushingVideo == AWAITING_DISCONTINUITY) { - mRenderer->resume(); - } - } - - if (mFlushingAudio != NONE || mFlushingVideo != NONE) { - // We're currently flushing, postpone the reset until that's - // completed. + mDeferredActions.push_back( + new SimpleAction(&NuPlayer::performReset)); - ALOGV("postponing reset mFlushingAudio=%d, mFlushingVideo=%d", - mFlushingAudio, mFlushingVideo); - - mResetPostponed = true; - break; - } - - if (mAudioDecoder == NULL && mVideoDecoder == NULL) { - finishReset(); - break; - } - - mTimeDiscontinuityPending = true; - - if (mAudioDecoder != NULL) { - flushDecoder(true /* audio */, true /* needShutdown */); - } - - if (mVideoDecoder != NULL) { - flushDecoder(false /* audio */, true /* needShutdown */); - } - - mResetInProgress = true; + processDeferredActions(); break; } @@ -618,18 +632,14 @@ void NuPlayer::onMessageReceived(const sp &msg) { int64_t seekTimeUs; CHECK(msg->findInt64("seekTimeUs", &seekTimeUs)); - ALOGV("kWhatSeek seekTimeUs=%lld us (%.2f secs)", - seekTimeUs, seekTimeUs / 1E6); + ALOGV("kWhatSeek seekTimeUs=%lld us", seekTimeUs); - mSource->seekTo(seekTimeUs); + mDeferredActions.push_back( + new SimpleAction(&NuPlayer::performDecoderFlush)); - if (mDriver != NULL) { - sp driver = mDriver.promote(); - if (driver != NULL) { - driver->notifySeekComplete(); - } - } + mDeferredActions.push_back(new SeekAction(seekTimeUs)); + processDeferredActions(); break; } @@ -680,39 +690,7 @@ void NuPlayer::finishFlushIfPossible() { mFlushingAudio = NONE; mFlushingVideo = NONE; - if (mResetInProgress) { - ALOGV("reset completed"); - - mResetInProgress = false; - finishReset(); - } else if (mResetPostponed) { - (new AMessage(kWhatReset, id()))->post(); - mResetPostponed = false; - } else if (mAudioDecoder == NULL || mVideoDecoder == NULL) { - postScanSources(); - } -} - -void NuPlayer::finishReset() { - CHECK(mAudioDecoder == NULL); - CHECK(mVideoDecoder == NULL); - - ++mScanSourcesGeneration; - mScanSourcesPending = false; - - mRenderer.clear(); - - if (mSource != NULL) { - mSource->stop(); - mSource.clear(); - } - - if (mDriver != NULL) { - sp driver = mDriver.promote(); - if (driver != NULL) { - driver->notifyResetComplete(); - } - } + processDeferredActions(); } void NuPlayer::postScanSources() { @@ -831,6 +809,14 @@ status_t NuPlayer::feedDecoderInputData(bool audio, const sp &msg) { mTimeDiscontinuityPending || timeChange; if (formatChange || timeChange) { + if (mFlushingAudio == NONE && mFlushingVideo == NONE) { + // And we'll resume scanning sources once we're done + // flushing. + mDeferredActions.push_front( + new SimpleAction( + &NuPlayer::performScanSources)); + } + flushDecoder(audio, formatChange); } else { // This stream is unaffected by the discontinuity @@ -1023,4 +1009,127 @@ void NuPlayer::cancelPollDuration() { ++mPollDurationGeneration; } +void NuPlayer::processDeferredActions() { + while (!mDeferredActions.empty()) { + // We won't execute any deferred actions until we're no longer in + // an intermediate state, i.e. one more more decoders are currently + // flushing or shutting down. + + if (mRenderer != NULL) { + // There's an edge case where the renderer owns all output + // buffers and is paused, therefore the decoder will not read + // more input data and will never encounter the matching + // discontinuity. To avoid this, we resume the renderer. + + if (mFlushingAudio == AWAITING_DISCONTINUITY + || mFlushingVideo == AWAITING_DISCONTINUITY) { + mRenderer->resume(); + } + } + + if (mFlushingAudio != NONE || mFlushingVideo != NONE) { + // We're currently flushing, postpone the reset until that's + // completed. + + ALOGV("postponing action mFlushingAudio=%d, mFlushingVideo=%d", + mFlushingAudio, mFlushingVideo); + + break; + } + + sp action = *mDeferredActions.begin(); + mDeferredActions.erase(mDeferredActions.begin()); + + action->execute(this); + } +} + +void NuPlayer::performSeek(int64_t seekTimeUs) { + ALOGV("performSeek seekTimeUs=%lld us (%.2f secs)", + seekTimeUs, + seekTimeUs / 1E6); + + mSource->seekTo(seekTimeUs); + + if (mDriver != NULL) { + sp driver = mDriver.promote(); + if (driver != NULL) { + driver->notifyPosition(seekTimeUs); + driver->notifySeekComplete(); + } + } + + // everything's flushed, continue playback. +} + +void NuPlayer::performDecoderFlush() { + ALOGV("performDecoderFlush"); + + if (mAudioDecoder != NULL && mVideoDecoder == NULL) { + return; + } + + mTimeDiscontinuityPending = true; + + if (mAudioDecoder != NULL) { + flushDecoder(true /* audio */, false /* needShutdown */); + } + + if (mVideoDecoder != NULL) { + flushDecoder(false /* audio */, false /* needShutdown */); + } +} + +void NuPlayer::performDecoderShutdown() { + ALOGV("performDecoderShutdown"); + + if (mAudioDecoder != NULL && mVideoDecoder == NULL) { + return; + } + + mTimeDiscontinuityPending = true; + + if (mAudioDecoder != NULL) { + flushDecoder(true /* audio */, true /* needShutdown */); + } + + if (mVideoDecoder != NULL) { + flushDecoder(false /* audio */, true /* needShutdown */); + } +} + +void NuPlayer::performReset() { + ALOGV("performReset"); + + CHECK(mAudioDecoder == NULL); + CHECK(mVideoDecoder == NULL); + + cancelPollDuration(); + + ++mScanSourcesGeneration; + mScanSourcesPending = false; + + mRenderer.clear(); + + if (mSource != NULL) { + mSource->stop(); + mSource.clear(); + } + + if (mDriver != NULL) { + sp driver = mDriver.promote(); + if (driver != NULL) { + driver->notifyResetComplete(); + } + } +} + +void NuPlayer::performScanSources() { + ALOGV("performScanSources"); + + if (mAudioDecoder == NULL || mVideoDecoder == NULL) { + postScanSources(); + } +} + } // namespace android diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.h b/media/libmediaplayerservice/nuplayer/NuPlayer.h index 31efb2e..6e174e0 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayer.h +++ b/media/libmediaplayerservice/nuplayer/NuPlayer.h @@ -73,6 +73,9 @@ private: struct Renderer; struct RTSPSource; struct StreamingSource; + struct Action; + struct SeekAction; + struct SimpleAction; enum { kWhatSetDataSource = '=DaS', @@ -102,6 +105,8 @@ private: sp mAudioDecoder; sp mRenderer; + List > mDeferredActions; + bool mAudioEOS; bool mVideoEOS; @@ -126,8 +131,6 @@ private: FlushStatus mFlushingAudio; FlushStatus mFlushingVideo; - bool mResetInProgress; - bool mResetPostponed; int64_t mSkipRenderingAudioUntilMediaTimeUs; int64_t mSkipRenderingVideoUntilMediaTimeUs; @@ -150,12 +153,19 @@ private: static bool IsFlushingState(FlushStatus state, bool *needShutdown = NULL); - void finishReset(); void postScanSources(); void schedulePollDuration(); void cancelPollDuration(); + void processDeferredActions(); + + void performSeek(int64_t seekTimeUs); + void performDecoderFlush(); + void performDecoderShutdown(); + void performReset(); + void performScanSources(); + DISALLOW_EVIL_CONSTRUCTORS(NuPlayer); }; -- cgit v1.1 From 01437b7cdaecf53acb46b50ff8b5d86b9d36eb20 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Thu, 29 Nov 2012 07:32:49 -0800 Subject: AudioTrack inline short const methods Change-Id: I142917edb454d510bbe545e94e6eaea30b650fae --- include/media/AudioTrack.h | 21 ++++++++++--------- media/libmedia/AudioTrack.cpp | 47 ------------------------------------------- 2 files changed, 11 insertions(+), 57 deletions(-) diff --git a/include/media/AudioTrack.h b/include/media/AudioTrack.h index f1b77ab..d5cd28a 100644 --- a/include/media/AudioTrack.h +++ b/include/media/AudioTrack.h @@ -211,27 +211,28 @@ public: * an uninitialized AudioTrack produces undefined results. * See set() method above for possible return codes. */ - status_t initCheck() const; + status_t initCheck() const { return mStatus; } /* Returns this track's estimated latency in milliseconds. * This includes the latency due to AudioTrack buffer size, AudioMixer (if any) * and audio hardware driver. */ - uint32_t latency() const; + uint32_t latency() const { return mLatency; } /* getters, see constructors and set() */ - audio_stream_type_t streamType() const; - audio_format_t format() const; - uint32_t channelCount() const; - uint32_t frameCount() const; + audio_stream_type_t streamType() const { return mStreamType; } + audio_format_t format() const { return mFormat; } /* Return channelCount * (bit depth per channel / 8). * channelCount is determined from channelMask, and bit depth comes from format. */ - size_t frameSize() const { return mFrameSize; } + uint32_t channelCount() const { return mChannelCount; } - sp& sharedBuffer(); + uint32_t frameCount() const { return mFrameCount; } + size_t frameSize() const { return mFrameSize; } + + sp sharedBuffer() const { return mSharedBuffer; } /* After it's created the track is not active. Call start() to @@ -261,7 +262,7 @@ public: * While muted, the callback, if set, is still called. */ void mute(bool); - bool muted() const; + bool muted() const { return mMuted; } /* Set volume for this track, mostly used for games' sound effects * left and right volumes. Levels must be >= 0.0 and <= 1.0. @@ -387,7 +388,7 @@ public: * Returned value: * AudioTrack session ID. */ - int getSessionId() const; + int getSessionId() const { return mSessionId; } /* Attach track auxiliary output to specified effect. Use effectId = 0 * to detach track from effect. diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp index e40895a..0463433 100644 --- a/media/libmedia/AudioTrack.cpp +++ b/media/libmedia/AudioTrack.cpp @@ -317,43 +317,6 @@ status_t AudioTrack::set( return NO_ERROR; } -status_t AudioTrack::initCheck() const -{ - return mStatus; -} - -// ------------------------------------------------------------------------- - -uint32_t AudioTrack::latency() const -{ - return mLatency; -} - -audio_stream_type_t AudioTrack::streamType() const -{ - return mStreamType; -} - -audio_format_t AudioTrack::format() const -{ - return mFormat; -} - -uint32_t AudioTrack::channelCount() const -{ - return mChannelCount; -} - -size_t AudioTrack::frameCount() const -{ - return mFrameCount; -} - -sp& AudioTrack::sharedBuffer() -{ - return mSharedBuffer; -} - // ------------------------------------------------------------------------- void AudioTrack::start() @@ -496,11 +459,6 @@ void AudioTrack::mute(bool e) mMuted = e; } -bool AudioTrack::muted() const -{ - return mMuted; -} - status_t AudioTrack::setVolume(float left, float right) { if (left < 0.0f || left > 1.0f || right < 0.0f || right > 1.0f) { @@ -735,11 +693,6 @@ audio_io_handle_t AudioTrack::getOutput_l() mCblk->sampleRate, mFormat, mChannelMask, mFlags); } -int AudioTrack::getSessionId() const -{ - return mSessionId; -} - status_t AudioTrack::attachAuxEffect(int effectId) { ALOGV("attachAuxEffect(%d)", effectId); -- cgit v1.1 From b3a8364eeea621ef63b983e4c1b0771f62069fe0 Mon Sep 17 00:00:00 2001 From: Alex Ray Date: Fri, 30 Nov 2012 19:42:28 -0800 Subject: audioflinger: define ANDROID_SMP, remove conditional tracing With ANDROID_SMP set, tracing functionality is completely inline, and without the performance hits of external library calls, tracing does not need to be conditionally compiled. Change-Id: I4b29a9a52c403f0d2ea137c5b7bc05a518a7ca4b --- services/audioflinger/Android.mk | 7 +++++++ services/audioflinger/FastMixer.cpp | 31 ++++++++++--------------------- 2 files changed, 17 insertions(+), 21 deletions(-) diff --git a/services/audioflinger/Android.mk b/services/audioflinger/Android.mk index dc65833..6d42143 100644 --- a/services/audioflinger/Android.mk +++ b/services/audioflinger/Android.mk @@ -78,6 +78,13 @@ LOCAL_CFLAGS += -UFAST_TRACKS_AT_NON_NATIVE_SAMPLE_RATE # LOCAL_SRC_FILES += AudioWatchdog.cpp # LOCAL_CFLAGS += -DAUDIO_WATCHDOG +# Define ANDROID_SMP appropriately. Used to get inline tracing fast-path. +ifeq ($(TARGET_CPU_SMP),true) + LOCAL_CFLAGS += -DANDROID_SMP=1 +else + LOCAL_CFLAGS += -DANDROID_SMP=0 +endif + include $(BUILD_SHARED_LIBRARY) # diff --git a/services/audioflinger/FastMixer.cpp b/services/audioflinger/FastMixer.cpp index 0366dfe..5e6af16 100644 --- a/services/audioflinger/FastMixer.cpp +++ b/services/audioflinger/FastMixer.cpp @@ -17,10 +17,7 @@ #define LOG_TAG "FastMixer" //#define LOG_NDEBUG 0 -/** Uncomment for systrace. - * ATRACE_TAG will default to ATRACE_TAG_NEVER in the header. - */ -//#define ATRACE_TAG ATRACE_TAG_AUDIO +#define ATRACE_TAG ATRACE_TAG_AUDIO #include #include @@ -376,14 +373,14 @@ bool FastMixer::threadLoop() // up to 1 ms. If enough active tracks all blocked in sequence, this would result // in the overall fast mix cycle being delayed. Should use a non-blocking FIFO. size_t framesReady = fastTrack->mBufferProvider->framesReady(); -#if defined(ATRACE_TAG) && (ATRACE_TAG != ATRACE_TAG_NEVER) - // I wish we had formatted trace names - char traceName[16]; - strcpy(traceName, "framesReady"); - traceName[11] = i + (i < 10 ? '0' : 'A' - 10); - traceName[12] = '\0'; - ATRACE_INT(traceName, framesReady); -#endif + if (ATRACE_ENABLED()) { + // I wish we had formatted trace names + char traceName[16]; + strcpy(traceName, "framesReady"); + traceName[11] = i + (i < 10 ? '0' : 'A' - 10); + traceName[12] = '\0'; + ATRACE_INT(traceName, framesReady); + } FastTrackDump *ftDump = &dumpState->mTracks[i]; FastTrackUnderruns underruns = ftDump->mUnderruns; if (framesReady < frameCount) { @@ -429,13 +426,9 @@ bool FastMixer::threadLoop() // FIXME write() is non-blocking and lock-free for a properly implemented NBAIO sink, // but this code should be modified to handle both non-blocking and blocking sinks dumpState->mWriteSequence++; -#if defined(ATRACE_TAG) && (ATRACE_TAG != ATRACE_TAG_NEVER) ATRACE_BEGIN("write"); -#endif ssize_t framesWritten = outputSink->write(mixBuffer, frameCount); -#if defined(ATRACE_TAG) && (ATRACE_TAG != ATRACE_TAG_NEVER) ATRACE_END(); -#endif dumpState->mWriteSequence++; if (framesWritten >= 0) { ALOG_ASSERT(framesWritten <= frameCount); @@ -490,9 +483,7 @@ bool FastMixer::threadLoop() sleepNs = -1; if (isWarm) { if (sec > 0 || nsec > underrunNs) { -#if defined(ATRACE_TAG) && (ATRACE_TAG != ATRACE_TAG_NEVER) - ScopedTrace st(ATRACE_TAG, "underrun"); -#endif + ATRACE_NAME("underrun"); // FIXME only log occasionally ALOGV("underrun: time since last cycle %d.%03ld sec", (int) sec, nsec / 1000000L); @@ -572,10 +563,8 @@ bool FastMixer::threadLoop() // this store #4 is not atomic with respect to stores #1, #2, #3 above, but // the newest open and oldest closed halves are atomic with respect to each other dumpState->mBounds = bounds; -#if defined(ATRACE_TAG) && (ATRACE_TAG != ATRACE_TAG_NEVER) ATRACE_INT("cycle_ms", monotonicNs / 1000000); ATRACE_INT("load_us", loadNs / 1000); -#endif } #endif } else { -- cgit v1.1 From e4756fe3a387615acb63c6a05788c8db9b5786cb Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Thu, 29 Nov 2012 13:38:14 -0800 Subject: AudioTrack::mute() is unused so remove it If ever needed again, it could be implemented on client side by forcing a track volume of 0. Change-Id: I88a9b4f675b6dca2948549414f9ec2c192d29269 --- include/media/AudioTrack.h | 10 +--------- include/media/IAudioTrack.h | 5 ----- media/libmedia/AudioTrack.cpp | 11 ++--------- media/libmedia/IAudioTrack.cpp | 15 +-------------- services/audioflinger/AudioFlinger.h | 1 - services/audioflinger/PlaybackTracks.h | 7 ------- services/audioflinger/Threads.cpp | 9 +++------ services/audioflinger/Tracks.cpp | 15 ++------------- 8 files changed, 9 insertions(+), 64 deletions(-) diff --git a/include/media/AudioTrack.h b/include/media/AudioTrack.h index d5cd28a..fe46a22 100644 --- a/include/media/AudioTrack.h +++ b/include/media/AudioTrack.h @@ -258,12 +258,6 @@ public: */ void pause(); - /* Mute or unmute this track. - * While muted, the callback, if set, is still called. - */ - void mute(bool); - bool muted() const { return mMuted; } - /* Set volume for this track, mostly used for games' sound effects * left and right volumes. Levels must be >= 0.0 and <= 1.0. * This is the older API. New applications should use setVolume(float) when possible. @@ -524,9 +518,7 @@ protected: audio_format_t mFormat; // as requested by client, not forced to 16-bit audio_stream_type_t mStreamType; - uint8_t mChannelCount; - uint8_t mMuted; - uint8_t mReserved; + uint32_t mChannelCount; audio_channel_mask_t mChannelMask; // mFrameSize is equal to mFrameSizeAF for non-PCM or 16-bit PCM data. diff --git a/include/media/IAudioTrack.h b/include/media/IAudioTrack.h index 9e0e389..144be0e 100644 --- a/include/media/IAudioTrack.h +++ b/include/media/IAudioTrack.h @@ -54,11 +54,6 @@ public: */ virtual void flush() = 0; - /* Mute or unmute this track. - * While muted, the callback, if set, is still called. - */ - virtual void mute(bool) = 0; - /* Pause a track. If set, the callback will cease being called and * obtainBuffer will return an error. Buffers that are already released * will continue to be processed, unless/until flush() is called. diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp index 0463433..f5641e0 100644 --- a/media/libmedia/AudioTrack.cpp +++ b/media/libmedia/AudioTrack.cpp @@ -304,7 +304,6 @@ status_t AudioTrack::set( } mSharedBuffer = sharedBuffer; - mMuted = false; mActive = false; mUserData = user; mLoopCount = 0; @@ -453,12 +452,6 @@ void AudioTrack::pause() } } -void AudioTrack::mute(bool e) -{ - mAudioTrack->mute(e); - mMuted = e; -} - status_t AudioTrack::setVolume(float left, float right) { if (left < 0.0f || left > 1.0f || right < 0.0f || right > 1.0f) { @@ -1424,8 +1417,8 @@ status_t AudioTrack::dump(int fd, const Vector& args) const snprintf(buffer, 255, " format(%d), channel count(%d), frame count(%d)\n", mFormat, mChannelCount, mFrameCount); result.append(buffer); - snprintf(buffer, 255, " sample rate(%u), status(%d), muted(%d)\n", - (cblk == 0) ? 0 : cblk->sampleRate, mStatus, mMuted); + snprintf(buffer, 255, " sample rate(%u), status(%d)\n", + (cblk == 0) ? 0 : cblk->sampleRate, mStatus); result.append(buffer); snprintf(buffer, 255, " active(%d), latency (%d)\n", mActive, mLatency); result.append(buffer); diff --git a/media/libmedia/IAudioTrack.cpp b/media/libmedia/IAudioTrack.cpp index 867d1a5..e92f8aa 100644 --- a/media/libmedia/IAudioTrack.cpp +++ b/media/libmedia/IAudioTrack.cpp @@ -33,7 +33,7 @@ enum { START, STOP, FLUSH, - MUTE, + RESERVED, // was MUTE PAUSE, ATTACH_AUX_EFFECT, ALLOCATE_TIMED_BUFFER, @@ -88,14 +88,6 @@ public: remote()->transact(FLUSH, data, &reply); } - virtual void mute(bool e) - { - Parcel data, reply; - data.writeInterfaceToken(IAudioTrack::getInterfaceDescriptor()); - data.writeInt32(e); - remote()->transact(MUTE, data, &reply); - } - virtual void pause() { Parcel data, reply; @@ -192,11 +184,6 @@ status_t BnAudioTrack::onTransact( flush(); return NO_ERROR; } break; - case MUTE: { - CHECK_INTERFACE(IAudioTrack, data, reply); - mute( data.readInt32() ); - return NO_ERROR; - } break; case PAUSE: { CHECK_INTERFACE(IAudioTrack, data, reply); pause(); diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h index 46a8e0f..6d3f0a1 100644 --- a/services/audioflinger/AudioFlinger.h +++ b/services/audioflinger/AudioFlinger.h @@ -385,7 +385,6 @@ private: virtual status_t start(); virtual void stop(); virtual void flush(); - virtual void mute(bool); virtual void pause(); virtual status_t attachAuxEffect(int effectId); virtual status_t allocateTimedBuffer(size_t size, diff --git a/services/audioflinger/PlaybackTracks.h b/services/audioflinger/PlaybackTracks.h index b898924..37e39a0 100644 --- a/services/audioflinger/PlaybackTracks.h +++ b/services/audioflinger/PlaybackTracks.h @@ -44,7 +44,6 @@ public: void flush(); void destroy(); - void mute(bool); int name() const { return mName; } audio_stream_type_t streamType() const { @@ -78,7 +77,6 @@ protected: virtual size_t framesReady() const; - bool isMuted() const { return mMute; } bool isPausing() const { return mState == PAUSING; } @@ -111,11 +109,6 @@ public: protected: - // written by Track::mute() called by binder thread(s), without a mutex or barrier. - // read by Track::isMuted() called by playback thread, also without a mutex or barrier. - // The lack of mutex or barrier is safe because the mute status is only used by itself. - bool mMute; - // FILLED state is used for suppressing volume ramp at begin of playing enum {FS_INVALID, FS_FILLING, FS_FILLED, FS_ACTIVE}; mutable uint8_t mFillingUpStatus; diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index fd64395..a285e6c 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -2544,8 +2544,7 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::MixerThread::prepareTrac } // cache the combined master volume and stream type volume for fast mixer; this // lacks any synchronization or barrier so VolumeProvider may read a stale value - track->mCachedVolume = track->isMuted() ? - 0 : masterVolume * mStreamTypes[track->streamType()].volume; + track->mCachedVolume = masterVolume * mStreamTypes[track->streamType()].volume; ++fastTracks; } else { // was it previously active? @@ -2637,8 +2636,7 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::MixerThread::prepareTrac // compute volume for this track uint32_t vl, vr, va; - if (track->isMuted() || track->isPausing() || - mStreamTypes[track->streamType()].mute) { + if (track->isPausing() || mStreamTypes[track->streamType()].mute) { vl = vr = va = 0; if (track->isPausing()) { track->setPaused(); @@ -3139,8 +3137,7 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::DirectOutputThread::prep // compute volume for this track float left, right; - if (track->isMuted() || mMasterMute || track->isPausing() || - mStreamTypes[track->streamType()].mute) { + if (mMasterMute || track->isPausing() || mStreamTypes[track->streamType()].mute) { left = right = 0; if (track->isPausing()) { track->setPaused(); diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp index 2c6ba8b..e8ca5ee 100644 --- a/services/audioflinger/Tracks.cpp +++ b/services/audioflinger/Tracks.cpp @@ -248,10 +248,6 @@ void AudioFlinger::TrackHandle::flush() { mTrack->flush(); } -void AudioFlinger::TrackHandle::mute(bool e) { - mTrack->mute(e); -} - void AudioFlinger::TrackHandle::pause() { mTrack->pause(); } @@ -315,7 +311,6 @@ AudioFlinger::PlaybackThread::Track::Track( IAudioFlinger::track_flags_t flags) : TrackBase(thread, client, sampleRate, format, channelMask, frameCount, sharedBuffer, sessionId), - mMute(false), mFillingUpStatus(FS_INVALID), // mRetryCount initialized later when needed mSharedBuffer(sharedBuffer), @@ -397,7 +392,7 @@ void AudioFlinger::PlaybackThread::Track::destroy() /*static*/ void AudioFlinger::PlaybackThread::Track::appendDumpHeader(String8& result) { - result.append(" Name Client Type Fmt Chn mask Session StpCnt fCount S M F SRate " + result.append(" Name Client Type Fmt Chn mask Session StpCnt fCount S F SRate " "L dB R dB Server User Main buf Aux Buf Flags Underruns\n"); } @@ -461,7 +456,7 @@ void AudioFlinger::PlaybackThread::Track::dump(char* buffer, size_t size) nowInUnderrun = '?'; break; } - snprintf(&buffer[7], size-7, " %6d %4u %3u 0x%08x %7u %6u %6u %1c %1d %1d %5u %5.2g %5.2g " + snprintf(&buffer[7], size-7, " %6d %4u %3u 0x%08x %7u %6u %6u %1c %1d %5u %5.2g %5.2g " "0x%08x 0x%08x 0x%08x 0x%08x %#5x %9u%c\n", (mClient == 0) ? getpid_cached : mClient->pid(), mStreamType, @@ -471,7 +466,6 @@ void AudioFlinger::PlaybackThread::Track::dump(char* buffer, size_t size) mStepCount, mFrameCount, stateChar, - mMute, mFillingUpStatus, mCblk->sampleRate, 20.0 * log10((vlr & 0xFFFF) / 4096.0), @@ -708,11 +702,6 @@ void AudioFlinger::PlaybackThread::Track::reset() } } -void AudioFlinger::PlaybackThread::Track::mute(bool muted) -{ - mMute = muted; -} - status_t AudioFlinger::PlaybackThread::Track::attachAuxEffect(int EffectId) { status_t status = DEAD_OBJECT; -- cgit v1.1 From 4bae3649d504d590a546717a8e49f96a30d9a745 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Fri, 30 Nov 2012 13:41:12 -0800 Subject: flush() comments and checks flush() is only useful for streaming mode. It is a no-op if track is active or uses a static buffer. Change-Id: I918ac181ffae3d16a0d67d8a7208f4aec61b5bd6 --- include/media/AudioTrack.h | 11 ++++++++--- media/libmedia/AudioTrack.cpp | 19 ++++++++++--------- 2 files changed, 18 insertions(+), 12 deletions(-) diff --git a/include/media/AudioTrack.h b/include/media/AudioTrack.h index fe46a22..d2739f7 100644 --- a/include/media/AudioTrack.h +++ b/include/media/AudioTrack.h @@ -163,7 +163,7 @@ public: * the PCM data to be rendered by AudioTrack is passed in a shared memory buffer * identified by the argument sharedBuffer. This prototype is for static buffer playback. * PCM data must be present in memory before the AudioTrack is started. - * The write() and flush() methods are not supported in this case. + * The write() method is not supported in this case. * It is recommended to pass a callback function to be notified of playback end by an * EVENT_UNDERRUN event. */ @@ -247,8 +247,10 @@ public: void stop(); bool stopped() const; - /* Flush a stopped track. All pending buffers are discarded. - * This function has no effect if the track is not stopped. + /* Flush a stopped or paused track. All previously buffered data is discarded immediately. + * This has the effect of draining the buffers without mixing or output. + * Flush is intended for streaming mode, for example before switching to non-contiguous content. + * This function is a no-op if the track is not stopped or paused, or uses a static buffer. */ void flush(); @@ -492,7 +494,10 @@ protected: audio_output_flags_t flags, const sp& sharedBuffer, audio_io_handle_t output); + + // can only be called when !mActive void flush_l(); + status_t setLoop_l(uint32_t loopStart, uint32_t loopEnd, int loopCount); audio_io_handle_t getOutput_l(); status_t restoreTrack_l(audio_track_cblk_t*& cblk, bool fromStart); diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp index f5641e0..597d057 100644 --- a/media/libmedia/AudioTrack.cpp +++ b/media/libmedia/AudioTrack.cpp @@ -397,6 +397,7 @@ void AudioTrack::stop() mMarkerReached = false; // Force flush if a shared buffer is used otherwise audioflinger // will not stop before end of buffer is reached. + // It may be needed to make sure that we stop playback, likely in case looping is on. if (mSharedBuffer != 0) { flush_l(); } @@ -419,26 +420,26 @@ bool AudioTrack::stopped() const void AudioTrack::flush() { AutoMutex lock(mLock); - flush_l(); + if (!mActive && mSharedBuffer == 0) { + flush_l(); + } } -// must be called with mLock held void AudioTrack::flush_l() { ALOGV("flush"); + ALOG_ASSERT(!mActive); // clear playback marker and periodic update counter mMarkerPosition = 0; mMarkerReached = false; mUpdatePeriod = 0; - if (!mActive) { - mFlushed = true; - mAudioTrack->flush(); - // Release AudioTrack callback thread in case it was waiting for new buffers - // in AudioTrack::obtainBuffer() - mCblk->cv.signal(); - } + mFlushed = true; + mAudioTrack->flush(); + // Release AudioTrack callback thread in case it was waiting for new buffers + // in AudioTrack::obtainBuffer() + mCblk->cv.signal(); } void AudioTrack::pause() -- cgit v1.1 From 083d1c1492d496960d5b28f4664ff02101736677 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Fri, 30 Nov 2012 15:00:36 -0800 Subject: Emphasize distinction between streaming and static Update comments and improve error checks to match Change-Id: I7370d6e59a7ef26dfb284a8b058d5ab2e0a42ccf --- include/media/AudioTrack.h | 75 ++++++++++++++++++++++++++++++------------- media/libmedia/AudioTrack.cpp | 24 ++++++++------ 2 files changed, 67 insertions(+), 32 deletions(-) diff --git a/include/media/AudioTrack.h b/include/media/AudioTrack.h index d2739f7..6f85527 100644 --- a/include/media/AudioTrack.h +++ b/include/media/AudioTrack.h @@ -52,8 +52,11 @@ public: * Keep in sync with frameworks/base/media/java/android/media/AudioTrack.java NATIVE_EVENT_*. */ enum event_type { - EVENT_MORE_DATA = 0, // Request to write more data to PCM buffer. - EVENT_UNDERRUN = 1, // PCM buffer underrun occurred. + EVENT_MORE_DATA = 0, // Request to write more data to buffer. + // If this event is delivered but the callback handler + // does not want to write more data, the handler must explicitly + // ignore the event by setting frameCount to zero. + EVENT_UNDERRUN = 1, // Buffer underrun occurred. EVENT_LOOP_END = 2, // Sample loop end was reached; playback restarted from // loop start if loop count was not 0. EVENT_MARKER = 3, // Playback head is at the specified marker position @@ -115,14 +118,16 @@ public: uint32_t sampleRate = 0); /* Constructs an uninitialized AudioTrack. No connection with - * AudioFlinger takes place. + * AudioFlinger takes place. Use set() after this. */ AudioTrack(); /* Creates an AudioTrack object and registers it with AudioFlinger. * Once created, the track needs to be started before it can be used. - * Unspecified values are set to the audio hardware's current - * values. + * Unspecified values are set to appropriate default values. + * With this constructor, the track is configured for streaming mode. + * Data to be rendered is supplied by write() or by the callback EVENT_MORE_DATA. + * Intermixing a combination of write() and non-ignored EVENT_MORE_DATA is deprecated. * * Parameters: * @@ -136,10 +141,10 @@ public: * application's contribution to the * latency of the track. The actual size selected by the AudioTrack could be * larger if the requested size is not compatible with current audio HAL - * latency. Zero means to use a default value. + * configuration. Zero means to use a default value. * flags: See comments on audio_output_flags_t in . * cbf: Callback function. If not null, this function is called periodically - * to provide new PCM data. + * to provide new data and inform of marker, position updates, etc. * user: Context for use by the callback receiver. * notificationFrames: The callback function is called each time notificationFrames PCM * frames have been consumed from track input buffer. @@ -159,13 +164,16 @@ public: int notificationFrames = 0, int sessionId = 0); - /* Creates an audio track and registers it with AudioFlinger. With this constructor, - * the PCM data to be rendered by AudioTrack is passed in a shared memory buffer - * identified by the argument sharedBuffer. This prototype is for static buffer playback. - * PCM data must be present in memory before the AudioTrack is started. + /* Creates an audio track and registers it with AudioFlinger. + * With this constructor, the track is configured for static buffer mode. + * The format must not be 8-bit linear PCM. + * Data to be rendered is passed in a shared memory buffer + * identified by the argument sharedBuffer, which must be non-0. + * The memory should be initialized to the desired data before calling start(). * The write() method is not supported in this case. * It is recommended to pass a callback function to be notified of playback end by an * EVENT_UNDERRUN event. + * FIXME EVENT_MORE_DATA still occurs; it must be ignored. */ AudioTrack( audio_stream_type_t streamType, @@ -184,13 +192,14 @@ public: */ ~AudioTrack(); - /* Initialize an uninitialized AudioTrack. * Returned status (from utils/Errors.h) can be: * - NO_ERROR: successful initialization * - INVALID_OPERATION: AudioTrack is already initialized * - BAD_VALUE: invalid parameter (channelMask, format, sampleRate...) * - NO_INIT: audio server or audio hardware not initialized + * If sharedBuffer is non-0, the frameCount parameter is ignored and + * replaced by the shared buffer's total allocated size in frame units. */ status_t set(audio_stream_type_t streamType = AUDIO_STREAM_DEFAULT, uint32_t sampleRate = 0, @@ -205,7 +214,6 @@ public: bool threadCanCallJava = false, int sessionId = 0); - /* Result of constructing the AudioTrack. This must be checked * before using any AudioTrack API (except for set()), because using * an uninitialized AudioTrack produces undefined results. @@ -224,25 +232,31 @@ public: audio_stream_type_t streamType() const { return mStreamType; } audio_format_t format() const { return mFormat; } - /* Return channelCount * (bit depth per channel / 8). + /* Return frame size in bytes, which for linear PCM is channelCount * (bit depth per channel / 8). * channelCount is determined from channelMask, and bit depth comes from format. + * For non-linear formats, the frame size is typically 1 byte. */ uint32_t channelCount() const { return mChannelCount; } uint32_t frameCount() const { return mFrameCount; } size_t frameSize() const { return mFrameSize; } + /* Return the static buffer specified in constructor or set(), or 0 for streaming mode */ sp sharedBuffer() const { return mSharedBuffer; } - /* After it's created the track is not active. Call start() to * make it active. If set, the callback will start being called. + * If the track was previously paused, volume is ramped up over the first mix buffer. */ void start(); - /* Stop a track. If set, the callback will cease being called and + /* Stop a track. + * In static buffer mode, the track is stopped immediately. + * In streaming mode, the callback will cease being called and * obtainBuffer returns STOPPED. Note that obtainBuffer() still works * and will fill up buffers until the pool is exhausted. + * The stop does not occur immediately: any data remaining in the buffer + * is first drained, mixed, and output, and only then is the track marked as stopped. */ void stop(); bool stopped() const; @@ -254,9 +268,11 @@ public: */ void flush(); - /* Pause a track. If set, the callback will cease being called and + /* Pause a track. After pause, the callback will cease being called and * obtainBuffer returns STOPPED. Note that obtainBuffer() still works * and will fill up buffers until the pool is exhausted. + * Volume is ramped down over the next mix buffer following the pause request, + * and then the track is marked as paused. It can be resumed with ramp up by start(). */ void pause(); @@ -285,6 +301,7 @@ public: uint32_t getSampleRate() const; /* Enables looping and sets the start and end points of looping. + * Only supported for static buffer mode. * * Parameters: * @@ -300,13 +317,15 @@ public: /* Sets marker position. When playback reaches the number of frames specified, a callback with * event type EVENT_MARKER is called. Calling setMarkerPosition with marker == 0 cancels marker - * notification callback. + * notification callback. To set a marker at a position which would compute as 0, + * a workaround is to the set the marker at a nearby position such as -1 or 1. * If the AudioTrack has been opened with no callback function associated, the operation will * fail. * * Parameters: * - * marker: marker position expressed in frames. + * marker: marker position expressed in wrapping (overflow) frame units, + * like the return value of getPosition(). * * Returned status (from utils/Errors.h) can be: * - NO_ERROR: successful operation @@ -315,13 +334,13 @@ public: status_t setMarkerPosition(uint32_t marker); status_t getMarkerPosition(uint32_t *marker) const; - /* Sets position update period. Every time the number of frames specified has been played, * a callback with event type EVENT_NEW_POS is called. * Calling setPositionUpdatePeriod with updatePeriod == 0 cancels new position notification * callback. * If the AudioTrack has been opened with no callback function associated, the operation will * fail. + * Extremely small values may be rounded up to a value the implementation can support. * * Parameters: * @@ -349,20 +368,26 @@ public: * * Returned status (from utils/Errors.h) can be: * - NO_ERROR: successful operation - * - INVALID_OPERATION: the AudioTrack is not stopped. + * - INVALID_OPERATION: the AudioTrack is not stopped or paused, or is streaming mode. * - BAD_VALUE: The specified position is beyond the number of frames present in AudioTrack * buffer */ status_t setPosition(uint32_t position); + + /* Return the total number of frames played since playback start. + * The counter will wrap (overflow) periodically, e.g. every ~27 hours at 44.1 kHz. + * It is reset to zero by flush(), reload(), and stop(). + */ status_t getPosition(uint32_t *position); /* Forces AudioTrack buffer full condition. When playing a static buffer, this method avoids * rewriting the buffer before restarting playback after a stop. * This method must be called with the AudioTrack in paused or stopped state. + * Not allowed in streaming mode. * * Returned status (from utils/Errors.h) can be: * - NO_ERROR: successful operation - * - INVALID_OPERATION: the AudioTrack is not stopped. + * - INVALID_OPERATION: the AudioTrack is not stopped or paused, or is streaming mode. */ status_t reload(); @@ -410,6 +435,9 @@ public: * or return WOULD_BLOCK depending on the value of the "blocking" * parameter. * + * obtainBuffer() and releaseBuffer() are deprecated for direct use by applications, + * which should use write() or callback EVENT_MORE_DATA instead. + * * Interpretation of waitCount: * +n limits wait time to n * WAIT_PERIOD_MS, * -1 causes an (almost) infinite wait time, @@ -447,6 +475,7 @@ public: * STOPPED AudioTrack was stopped during the write * NO_MORE_BUFFERS when obtainBuffer() returns same * or any other error code returned by IAudioTrack::start() or restoreTrack_l(). + * Not supported for static buffer mode. */ ssize_t write(const void* buffer, size_t size); @@ -548,7 +577,7 @@ protected: sp mSharedBuffer; int mLoopCount; uint32_t mRemainingFrames; - uint32_t mMarkerPosition; // in frames + uint32_t mMarkerPosition; // in wrapping (overflow) frame units bool mMarkerReached; uint32_t mNewPosition; // in frames uint32_t mUpdatePeriod; // in frames diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp index 597d057..ac672a7 100644 --- a/media/libmedia/AudioTrack.cpp +++ b/media/libmedia/AudioTrack.cpp @@ -138,6 +138,11 @@ AudioTrack::AudioTrack( mPreviousPriority(ANDROID_PRIORITY_NORMAL), mPreviousSchedulingGroup(SP_DEFAULT) { + if (sharedBuffer == 0) { + ALOGE("sharedBuffer must be non-0"); + mStatus = BAD_VALUE; + return; + } mStatus = set(streamType, sampleRate, format, channelMask, 0 /*frameCount*/, flags, cbf, user, notificationFrames, sharedBuffer, false /*threadCanCallJava*/, sessionId); @@ -535,6 +540,10 @@ status_t AudioTrack::setLoop(uint32_t loopStart, uint32_t loopEnd, int loopCount // must be called with mLock held status_t AudioTrack::setLoop_l(uint32_t loopStart, uint32_t loopEnd, int loopCount) { + if (mSharedBuffer == 0 || mIsTimed) { + return INVALID_OPERATION; + } + audio_track_cblk_t* cblk = mCblk; Mutex::Autolock _l(cblk->lock); @@ -547,10 +556,6 @@ status_t AudioTrack::setLoop_l(uint32_t loopStart, uint32_t loopEnd, int loopCou return NO_ERROR; } - if (mIsTimed) { - return INVALID_OPERATION; - } - if (loopStart >= loopEnd || loopEnd - loopStart > mFrameCount || cblk->server > loopStart) { @@ -624,7 +629,7 @@ status_t AudioTrack::getPositionUpdatePeriod(uint32_t *updatePeriod) const status_t AudioTrack::setPosition(uint32_t position) { - if (mIsTimed) { + if (mSharedBuffer == 0 || mIsTimed) { return INVALID_OPERATION; } @@ -660,6 +665,10 @@ status_t AudioTrack::getPosition(uint32_t *position) status_t AudioTrack::reload() { + if (mSharedBuffer == 0 || mIsTimed) { + return INVALID_OPERATION; + } + AutoMutex lock(mLock); if (!stopped_l()) { @@ -1036,10 +1045,7 @@ void AudioTrack::releaseBuffer(Buffer* audioBuffer) ssize_t AudioTrack::write(const void* buffer, size_t userSize) { - if (mSharedBuffer != 0) { - return INVALID_OPERATION; - } - if (mIsTimed) { + if (mSharedBuffer != 0 || mIsTimed) { return INVALID_OPERATION; } -- cgit v1.1 From 57a339cdb7524f883de3ceb364c0b5606df0c610 Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Mon, 3 Dec 2012 11:18:00 -0800 Subject: setVideoSurfaceTexture is now synchronous and applied dynamically while playing. Change-Id: If9f08659a01bdc7dac0999730368e9dfa5e58d36 related-to-bug: 5666482 --- media/libmediaplayerservice/nuplayer/NuPlayer.cpp | 77 +++++++++++++++++++--- media/libmediaplayerservice/nuplayer/NuPlayer.h | 8 ++- .../nuplayer/NuPlayerDriver.cpp | 22 ++++++- .../nuplayer/NuPlayerDriver.h | 2 + 4 files changed, 97 insertions(+), 12 deletions(-) diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp index f363568..f9c3283 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp +++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp @@ -74,6 +74,21 @@ private: DISALLOW_EVIL_CONSTRUCTORS(SeekAction); }; +struct NuPlayer::SetSurfaceAction : public Action { + SetSurfaceAction(const sp &wrapper) + : mWrapper(wrapper) { + } + + virtual void execute(NuPlayer *player) { + player->performSetSurface(mWrapper); + } + +private: + sp mWrapper; + + DISALLOW_EVIL_CONSTRUCTORS(SetSurfaceAction); +}; + // Use this if there's no state necessary to save in order to execute // the action. struct NuPlayer::SimpleAction : public Action { @@ -111,7 +126,8 @@ NuPlayer::NuPlayer() mVideoLateByUs(0ll), mNumFramesTotal(0ll), mNumFramesDropped(0ll), - mVideoScalingMode(NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW) { + mVideoScalingMode(NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW), + mStarted(false) { } NuPlayer::~NuPlayer() { @@ -181,11 +197,19 @@ void NuPlayer::setDataSource(int fd, int64_t offset, int64_t length) { msg->post(); } -void NuPlayer::setVideoSurfaceTexture(const sp &surfaceTexture) { +void NuPlayer::setVideoSurfaceTextureAsync( + const sp &surfaceTexture) { sp msg = new AMessage(kWhatSetVideoNativeWindow, id()); - sp surfaceTextureClient(surfaceTexture != NULL ? - new SurfaceTextureClient(surfaceTexture) : NULL); - msg->setObject("native-window", new NativeWindowWrapper(surfaceTextureClient)); + + if (surfaceTexture == NULL) { + msg->setObject("native-window", NULL); + } else { + msg->setObject( + "native-window", + new NativeWindowWrapper( + new SurfaceTextureClient(surfaceTexture))); + } + msg->post(); } @@ -278,13 +302,24 @@ void NuPlayer::onMessageReceived(const sp &msg) { { ALOGV("kWhatSetVideoNativeWindow"); + mDeferredActions.push_back( + new SimpleAction(&NuPlayer::performDecoderShutdown)); + sp obj; CHECK(msg->findObject("native-window", &obj)); - mNativeWindow = static_cast(obj.get()); + mDeferredActions.push_back( + new SetSurfaceAction( + static_cast(obj.get()))); + + if (obj != NULL) { + // If there is a new surface texture, instantiate decoders + // again if possible. + mDeferredActions.push_back( + new SimpleAction(&NuPlayer::performScanSources)); + } - // XXX - ignore error from setVideoScalingMode for now - setVideoScalingMode(mVideoScalingMode); + processDeferredActions(); break; } @@ -311,6 +346,7 @@ void NuPlayer::onMessageReceived(const sp &msg) { mVideoLateByUs = 0; mNumFramesTotal = 0; mNumFramesDropped = 0; + mStarted = true; mSource->start(); @@ -986,8 +1022,7 @@ sp NuPlayer::Source::getFormat(bool audio) { status_t NuPlayer::setVideoScalingMode(int32_t mode) { mVideoScalingMode = mode; - if (mNativeWindow != NULL - && mNativeWindow->getNativeWindow() != NULL) { + if (mNativeWindow != NULL) { status_t ret = native_window_set_scaling_mode( mNativeWindow->getNativeWindow().get(), mVideoScalingMode); if (ret != OK) { @@ -1122,14 +1157,36 @@ void NuPlayer::performReset() { driver->notifyResetComplete(); } } + + mStarted = false; } void NuPlayer::performScanSources() { ALOGV("performScanSources"); + if (!mStarted) { + return; + } + if (mAudioDecoder == NULL || mVideoDecoder == NULL) { postScanSources(); } } +void NuPlayer::performSetSurface(const sp &wrapper) { + ALOGV("performSetSurface"); + + mNativeWindow = wrapper; + + // XXX - ignore error from setVideoScalingMode for now + setVideoScalingMode(mVideoScalingMode); + + if (mDriver != NULL) { + sp driver = mDriver.promote(); + if (driver != NULL) { + driver->notifySetSurfaceComplete(); + } + } +} + } // namespace android diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.h b/media/libmediaplayerservice/nuplayer/NuPlayer.h index 6e174e0..ca87be9 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayer.h +++ b/media/libmediaplayerservice/nuplayer/NuPlayer.h @@ -42,7 +42,9 @@ struct NuPlayer : public AHandler { void setDataSource(int fd, int64_t offset, int64_t length); - void setVideoSurfaceTexture(const sp &surfaceTexture); + void setVideoSurfaceTextureAsync( + const sp &surfaceTexture); + void setAudioSink(const sp &sink); void start(); @@ -75,6 +77,7 @@ private: struct StreamingSource; struct Action; struct SeekAction; + struct SetSurfaceAction; struct SimpleAction; enum { @@ -140,6 +143,8 @@ private: int32_t mVideoScalingMode; + bool mStarted; + status_t instantiateDecoder(bool audio, sp *decoder); status_t feedDecoderInputData(bool audio, const sp &msg); @@ -165,6 +170,7 @@ private: void performDecoderShutdown(); void performReset(); void performScanSources(); + void performSetSurface(const sp &wrapper); DISALLOW_EVIL_CONSTRUCTORS(NuPlayer); }; diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp index d03601f..a485dda 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp +++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp @@ -29,6 +29,7 @@ namespace android { NuPlayerDriver::NuPlayerDriver() : mResetInProgress(false), + mSetSurfaceInProgress(false), mDurationUs(-1), mPositionUs(-1), mNumFramesTotal(0), @@ -97,7 +98,19 @@ status_t NuPlayerDriver::setDataSource(const sp &source) { status_t NuPlayerDriver::setVideoSurfaceTexture( const sp &surfaceTexture) { - mPlayer->setVideoSurfaceTexture(surfaceTexture); + Mutex::Autolock autoLock(mLock); + + if (mResetInProgress) { + return INVALID_OPERATION; + } + + mSetSurfaceInProgress = true; + + mPlayer->setVideoSurfaceTextureAsync(surfaceTexture); + + while (mSetSurfaceInProgress) { + mCondition.wait(mLock); + } return OK; } @@ -308,6 +321,13 @@ void NuPlayerDriver::notifyResetComplete() { mCondition.broadcast(); } +void NuPlayerDriver::notifySetSurfaceComplete() { + Mutex::Autolock autoLock(mLock); + CHECK(mSetSurfaceInProgress); + mSetSurfaceInProgress = false; + mCondition.broadcast(); +} + void NuPlayerDriver::notifyDuration(int64_t durationUs) { Mutex::Autolock autoLock(mLock); mDurationUs = durationUs; diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h index 4a0026c..d551bf1 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h +++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h @@ -62,6 +62,7 @@ struct NuPlayerDriver : public MediaPlayerInterface { virtual status_t dump(int fd, const Vector &args) const; void notifyResetComplete(); + void notifySetSurfaceComplete(); void notifyDuration(int64_t durationUs); void notifyPosition(int64_t positionUs); void notifySeekComplete(); @@ -78,6 +79,7 @@ private: // The following are protected through "mLock" // >>> bool mResetInProgress; + bool mSetSurfaceInProgress; int64_t mDurationUs; int64_t mPositionUs; int64_t mNumFramesTotal; -- cgit v1.1 From d32b99b1a87497280add6efc0e99bd383e402de1 Mon Sep 17 00:00:00 2001 From: Igor Murashkin Date: Tue, 27 Nov 2012 16:25:46 -0800 Subject: Camera2: fix metadata symbols Change-Id: I59e457824782de26b7b489cd92eb33d48e6ee2d9 --- services/camera/libcameraservice/Camera2Client.cpp | 44 ++-- .../libcameraservice/camera2/FrameProcessor.cpp | 16 +- .../camera/libcameraservice/camera2/Parameters.cpp | 232 ++++++++++----------- .../camera/libcameraservice/camera2/Parameters.h | 14 +- 4 files changed, 153 insertions(+), 153 deletions(-) diff --git a/services/camera/libcameraservice/Camera2Client.cpp b/services/camera/libcameraservice/Camera2Client.cpp index 5a7bb48..e804f77 100644 --- a/services/camera/libcameraservice/Camera2Client.cpp +++ b/services/camera/libcameraservice/Camera2Client.cpp @@ -183,37 +183,37 @@ status_t Camera2Client::dump(int fd, const Vector& args) { result.append(" White balance mode: "); switch (p.wbMode) { - CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_AUTO) - CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_INCANDESCENT) - CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_FLUORESCENT) - CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_WARM_FLUORESCENT) - CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_DAYLIGHT) - CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_CLOUDY_DAYLIGHT) - CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_TWILIGHT) - CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_SHADE) + CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_AUTO) + CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_INCANDESCENT) + CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_FLUORESCENT) + CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT) + CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_DAYLIGHT) + CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT) + CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_TWILIGHT) + CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_SHADE) default: result.append("UNKNOWN\n"); } result.append(" Effect mode: "); switch (p.effectMode) { - CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_OFF) - CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MONO) - CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_NEGATIVE) - CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_SOLARIZE) - CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_SEPIA) - CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_POSTERIZE) - CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_WHITEBOARD) - CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_BLACKBOARD) - CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_AQUA) + CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_OFF) + CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_MONO) + CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_NEGATIVE) + CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_SOLARIZE) + CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_SEPIA) + CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_POSTERIZE) + CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD) + CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD) + CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_AQUA) default: result.append("UNKNOWN\n"); } result.append(" Antibanding mode: "); switch (p.antibandingMode) { - CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_AUTO) - CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_OFF) - CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_50HZ) - CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_60HZ) + CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO) + CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF) + CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ) + CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ) default: result.append("UNKNOWN\n"); } @@ -1260,7 +1260,7 @@ status_t Camera2Client::commandStartFaceDetectionL(int /*type*/) { } // Ignoring type if (l.mParameters.fastInfo.bestFaceDetectMode == - ANDROID_STATS_FACE_DETECTION_OFF) { + ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) { ALOGE("%s: Camera %d: Face detection not supported", __FUNCTION__, mCameraId); return INVALID_OPERATION; diff --git a/services/camera/libcameraservice/camera2/FrameProcessor.cpp b/services/camera/libcameraservice/camera2/FrameProcessor.cpp index e032522..8ee5de7 100644 --- a/services/camera/libcameraservice/camera2/FrameProcessor.cpp +++ b/services/camera/libcameraservice/camera2/FrameProcessor.cpp @@ -177,7 +177,7 @@ status_t FrameProcessor::processFaceDetect(const CameraMetadata &frame, SharedParameters::Lock l(client->getParameters()); enableFaceDetect = l.mParameters.enableFaceDetect; } - entry = frame.find(ANDROID_STATS_FACE_DETECT_MODE); + entry = frame.find(ANDROID_STATISTICS_FACE_DETECT_MODE); // TODO: This should be an error once implementations are compliant if (entry.count == 0) { @@ -190,9 +190,9 @@ status_t FrameProcessor::processFaceDetect(const CameraMetadata &frame, Vector faces; metadata.number_of_faces = 0; - if (enableFaceDetect && faceDetectMode != ANDROID_STATS_FACE_DETECTION_OFF) { + if (enableFaceDetect && faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) { SharedParameters::Lock l(client->getParameters()); - entry = frame.find(ANDROID_STATS_FACE_RECTANGLES); + entry = frame.find(ANDROID_STATISTICS_FACE_RECTANGLES); if (entry.count == 0) { // No faces this frame /* warning: locks SharedCameraClient */ @@ -209,7 +209,7 @@ status_t FrameProcessor::processFaceDetect(const CameraMetadata &frame, } const int32_t *faceRects = entry.data.i32; - entry = frame.find(ANDROID_STATS_FACE_SCORES); + entry = frame.find(ANDROID_STATISTICS_FACE_SCORES); if (entry.count == 0) { ALOGE("%s: Camera %d: Unable to read face scores", __FUNCTION__, client->getCameraId()); @@ -220,8 +220,8 @@ status_t FrameProcessor::processFaceDetect(const CameraMetadata &frame, const int32_t *faceLandmarks = NULL; const int32_t *faceIds = NULL; - if (faceDetectMode == ANDROID_STATS_FACE_DETECTION_FULL) { - entry = frame.find(ANDROID_STATS_FACE_LANDMARKS); + if (faceDetectMode == ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) { + entry = frame.find(ANDROID_STATISTICS_FACE_LANDMARKS); if (entry.count == 0) { ALOGE("%s: Camera %d: Unable to read face landmarks", __FUNCTION__, client->getCameraId()); @@ -229,7 +229,7 @@ status_t FrameProcessor::processFaceDetect(const CameraMetadata &frame, } faceLandmarks = entry.data.i32; - entry = frame.find(ANDROID_STATS_FACE_IDS); + entry = frame.find(ANDROID_STATISTICS_FACE_IDS); if (entry.count == 0) { ALOGE("%s: Camera %d: Unable to read face IDs", @@ -256,7 +256,7 @@ status_t FrameProcessor::processFaceDetect(const CameraMetadata &frame, face.rect[3] = l.mParameters.arrayYToNormalized(faceRects[i*4 + 3]); face.score = faceScores[i]; - if (faceDetectMode == ANDROID_STATS_FACE_DETECTION_FULL) { + if (faceDetectMode == ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) { face.id = faceIds[i]; face.left_eye[0] = l.mParameters.arrayXToNormalized(faceLandmarks[i*6 + 0]); diff --git a/services/camera/libcameraservice/camera2/Parameters.cpp b/services/camera/libcameraservice/camera2/Parameters.cpp index 93927e6..6ab19b1 100644 --- a/services/camera/libcameraservice/camera2/Parameters.cpp +++ b/services/camera/libcameraservice/camera2/Parameters.cpp @@ -278,7 +278,7 @@ status_t Parameters::initialize(const CameraMetadata *info) { gpsProcessingMethod = "unknown"; // GPS fields in CameraParameters are not set by implementation - wbMode = ANDROID_CONTROL_AWB_AUTO; + wbMode = ANDROID_CONTROL_AWB_MODE_AUTO; params.set(CameraParameters::KEY_WHITE_BALANCE, CameraParameters::WHITE_BALANCE_AUTO); @@ -291,40 +291,40 @@ status_t Parameters::initialize(const CameraMetadata *info) { if (addComma) supportedWhiteBalance += ","; addComma = true; switch (availableWhiteBalanceModes.data.u8[i]) { - case ANDROID_CONTROL_AWB_AUTO: + case ANDROID_CONTROL_AWB_MODE_AUTO: supportedWhiteBalance += CameraParameters::WHITE_BALANCE_AUTO; break; - case ANDROID_CONTROL_AWB_INCANDESCENT: + case ANDROID_CONTROL_AWB_MODE_INCANDESCENT: supportedWhiteBalance += CameraParameters::WHITE_BALANCE_INCANDESCENT; break; - case ANDROID_CONTROL_AWB_FLUORESCENT: + case ANDROID_CONTROL_AWB_MODE_FLUORESCENT: supportedWhiteBalance += CameraParameters::WHITE_BALANCE_FLUORESCENT; break; - case ANDROID_CONTROL_AWB_WARM_FLUORESCENT: + case ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT: supportedWhiteBalance += CameraParameters::WHITE_BALANCE_WARM_FLUORESCENT; break; - case ANDROID_CONTROL_AWB_DAYLIGHT: + case ANDROID_CONTROL_AWB_MODE_DAYLIGHT: supportedWhiteBalance += CameraParameters::WHITE_BALANCE_DAYLIGHT; break; - case ANDROID_CONTROL_AWB_CLOUDY_DAYLIGHT: + case ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT: supportedWhiteBalance += CameraParameters::WHITE_BALANCE_CLOUDY_DAYLIGHT; break; - case ANDROID_CONTROL_AWB_TWILIGHT: + case ANDROID_CONTROL_AWB_MODE_TWILIGHT: supportedWhiteBalance += CameraParameters::WHITE_BALANCE_TWILIGHT; break; - case ANDROID_CONTROL_AWB_SHADE: + case ANDROID_CONTROL_AWB_MODE_SHADE: supportedWhiteBalance += CameraParameters::WHITE_BALANCE_SHADE; break; // Skipping values not mappable to v1 API - case ANDROID_CONTROL_AWB_OFF: + case ANDROID_CONTROL_AWB_MODE_OFF: addComma = false; break; default: @@ -339,7 +339,7 @@ status_t Parameters::initialize(const CameraMetadata *info) { supportedWhiteBalance); } - effectMode = ANDROID_CONTROL_EFFECT_OFF; + effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF; params.set(CameraParameters::KEY_EFFECT, CameraParameters::EFFECT_NONE); @@ -353,39 +353,39 @@ status_t Parameters::initialize(const CameraMetadata *info) { if (addComma) supportedEffects += ","; addComma = true; switch (availableEffects.data.u8[i]) { - case ANDROID_CONTROL_EFFECT_OFF: + case ANDROID_CONTROL_EFFECT_MODE_OFF: supportedEffects += CameraParameters::EFFECT_NONE; break; - case ANDROID_CONTROL_EFFECT_MONO: + case ANDROID_CONTROL_EFFECT_MODE_MONO: supportedEffects += CameraParameters::EFFECT_MONO; break; - case ANDROID_CONTROL_EFFECT_NEGATIVE: + case ANDROID_CONTROL_EFFECT_MODE_NEGATIVE: supportedEffects += CameraParameters::EFFECT_NEGATIVE; break; - case ANDROID_CONTROL_EFFECT_SOLARIZE: + case ANDROID_CONTROL_EFFECT_MODE_SOLARIZE: supportedEffects += CameraParameters::EFFECT_SOLARIZE; break; - case ANDROID_CONTROL_EFFECT_SEPIA: + case ANDROID_CONTROL_EFFECT_MODE_SEPIA: supportedEffects += CameraParameters::EFFECT_SEPIA; break; - case ANDROID_CONTROL_EFFECT_POSTERIZE: + case ANDROID_CONTROL_EFFECT_MODE_POSTERIZE: supportedEffects += CameraParameters::EFFECT_POSTERIZE; break; - case ANDROID_CONTROL_EFFECT_WHITEBOARD: + case ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD: supportedEffects += CameraParameters::EFFECT_WHITEBOARD; break; - case ANDROID_CONTROL_EFFECT_BLACKBOARD: + case ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD: supportedEffects += CameraParameters::EFFECT_BLACKBOARD; break; - case ANDROID_CONTROL_EFFECT_AQUA: + case ANDROID_CONTROL_EFFECT_MODE_AQUA: supportedEffects += CameraParameters::EFFECT_AQUA; break; @@ -399,7 +399,7 @@ status_t Parameters::initialize(const CameraMetadata *info) { params.set(CameraParameters::KEY_SUPPORTED_EFFECTS, supportedEffects); } - antibandingMode = ANDROID_CONTROL_AE_ANTIBANDING_AUTO; + antibandingMode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO; params.set(CameraParameters::KEY_ANTIBANDING, CameraParameters::ANTIBANDING_AUTO); @@ -413,19 +413,19 @@ status_t Parameters::initialize(const CameraMetadata *info) { if (addComma) supportedAntibanding += ","; addComma = true; switch (availableAntibandingModes.data.u8[i]) { - case ANDROID_CONTROL_AE_ANTIBANDING_OFF: + case ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF: supportedAntibanding += CameraParameters::ANTIBANDING_OFF; break; - case ANDROID_CONTROL_AE_ANTIBANDING_50HZ: + case ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ: supportedAntibanding += CameraParameters::ANTIBANDING_50HZ; break; - case ANDROID_CONTROL_AE_ANTIBANDING_60HZ: + case ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ: supportedAntibanding += CameraParameters::ANTIBANDING_60HZ; break; - case ANDROID_CONTROL_AE_ANTIBANDING_AUTO: + case ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO: supportedAntibanding += CameraParameters::ANTIBANDING_AUTO; break; @@ -538,7 +538,7 @@ status_t Parameters::initialize(const CameraMetadata *info) { } camera_metadata_ro_entry_t flashAvailable = - staticInfo(ANDROID_FLASH_AVAILABLE, 1, 1); + staticInfo(ANDROID_FLASH_INFO_AVAILABLE, 1, 1); if (!flashAvailable.count) return NO_INIT; camera_metadata_ro_entry_t availableAeModes = @@ -557,7 +557,7 @@ status_t Parameters::initialize(const CameraMetadata *info) { "," + CameraParameters::FLASH_MODE_TORCH; for (size_t i=0; i < availableAeModes.count; i++) { if (availableAeModes.data.u8[i] == - ANDROID_CONTROL_AE_ON_AUTO_FLASH_REDEYE) { + ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) { supportedFlashModes = supportedFlashModes + "," + CameraParameters::FLASH_MODE_RED_EYE; break; @@ -574,7 +574,7 @@ status_t Parameters::initialize(const CameraMetadata *info) { } camera_metadata_ro_entry_t minFocusDistance = - staticInfo(ANDROID_LENS_MINIMUM_FOCUS_DISTANCE, 1, 1); + staticInfo(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE, 1, 1); if (!minFocusDistance.count) return NO_INIT; camera_metadata_ro_entry_t availableAfModes = @@ -599,28 +599,28 @@ status_t Parameters::initialize(const CameraMetadata *info) { if (addComma) supportedFocusModes += ","; addComma = true; switch (availableAfModes.data.u8[i]) { - case ANDROID_CONTROL_AF_AUTO: + case ANDROID_CONTROL_AF_MODE_AUTO: supportedFocusModes += CameraParameters::FOCUS_MODE_AUTO; break; - case ANDROID_CONTROL_AF_MACRO: + case ANDROID_CONTROL_AF_MODE_MACRO: supportedFocusModes += CameraParameters::FOCUS_MODE_MACRO; break; - case ANDROID_CONTROL_AF_CONTINUOUS_VIDEO: + case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO: supportedFocusModes += CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO; break; - case ANDROID_CONTROL_AF_CONTINUOUS_PICTURE: + case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE: supportedFocusModes += CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE; break; - case ANDROID_CONTROL_AF_EDOF: + case ANDROID_CONTROL_AF_MODE_EDOF: supportedFocusModes += CameraParameters::FOCUS_MODE_EDOF; break; // Not supported in old API - case ANDROID_CONTROL_AF_OFF: + case ANDROID_CONTROL_AF_MODE_OFF: addComma = false; break; default: @@ -651,14 +651,14 @@ status_t Parameters::initialize(const CameraMetadata *info) { focusingAreas.add(Parameters::Area(0,0,0,0,0)); camera_metadata_ro_entry_t availableFocalLengths = - staticInfo(ANDROID_LENS_AVAILABLE_FOCAL_LENGTHS); + staticInfo(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS); if (!availableFocalLengths.count) return NO_INIT; float minFocalLength = availableFocalLengths.data.f[0]; params.setFloat(CameraParameters::KEY_FOCAL_LENGTH, minFocalLength); camera_metadata_ro_entry_t sensorSize = - staticInfo(ANDROID_SENSOR_PHYSICAL_SIZE, 2, 2); + staticInfo(ANDROID_SENSOR_INFO_PHYSICAL_SIZE, 2, 2); if (!sensorSize.count) return NO_INIT; // The fields of view here assume infinity focus, maximum wide angle @@ -674,7 +674,7 @@ status_t Parameters::initialize(const CameraMetadata *info) { exposureCompensation); camera_metadata_ro_entry_t exposureCompensationRange = - staticInfo(ANDROID_CONTROL_AE_EXP_COMPENSATION_RANGE, 2, 2); + staticInfo(ANDROID_CONTROL_AE_COMPENSATION_RANGE, 2, 2); if (!exposureCompensationRange.count) return NO_INIT; params.set(CameraParameters::KEY_MAX_EXPOSURE_COMPENSATION, @@ -683,7 +683,7 @@ status_t Parameters::initialize(const CameraMetadata *info) { exposureCompensationRange.data.i32[0]); camera_metadata_ro_entry_t exposureCompensationStep = - staticInfo(ANDROID_CONTROL_AE_EXP_COMPENSATION_STEP, 1, 1); + staticInfo(ANDROID_CONTROL_AE_COMPENSATION_STEP, 1, 1); if (!exposureCompensationStep.count) return NO_INIT; params.setFloat(CameraParameters::KEY_EXPOSURE_COMPENSATION_STEP, @@ -713,7 +713,7 @@ status_t Parameters::initialize(const CameraMetadata *info) { params.set(CameraParameters::KEY_MAX_ZOOM, NUM_ZOOM_STEPS - 1); camera_metadata_ro_entry_t maxDigitalZoom = - staticInfo(ANDROID_SCALER_AVAILABLE_MAX_ZOOM, /*minCount*/1, /*maxCount*/1); + staticInfo(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, /*minCount*/1, /*maxCount*/1); if (!maxDigitalZoom.count) return NO_INIT; { @@ -811,31 +811,31 @@ String8 Parameters::get() const { status_t Parameters::buildFastInfo() { camera_metadata_ro_entry_t activeArraySize = - staticInfo(ANDROID_SENSOR_ACTIVE_ARRAY_SIZE, 2, 2); + staticInfo(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE, 2, 2); if (!activeArraySize.count) return NO_INIT; int32_t arrayWidth = activeArraySize.data.i32[0]; int32_t arrayHeight = activeArraySize.data.i32[1]; camera_metadata_ro_entry_t availableFaceDetectModes = - staticInfo(ANDROID_STATS_AVAILABLE_FACE_DETECT_MODES); + staticInfo(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES); if (!availableFaceDetectModes.count) return NO_INIT; uint8_t bestFaceDetectMode = - ANDROID_STATS_FACE_DETECTION_OFF; + ANDROID_STATISTICS_FACE_DETECT_MODE_OFF; for (size_t i = 0 ; i < availableFaceDetectModes.count; i++) { switch (availableFaceDetectModes.data.u8[i]) { - case ANDROID_STATS_FACE_DETECTION_OFF: + case ANDROID_STATISTICS_FACE_DETECT_MODE_OFF: break; - case ANDROID_STATS_FACE_DETECTION_SIMPLE: + case ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE: if (bestFaceDetectMode != - ANDROID_STATS_FACE_DETECTION_FULL) { + ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) { bestFaceDetectMode = - ANDROID_STATS_FACE_DETECTION_SIMPLE; + ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE; } break; - case ANDROID_STATS_FACE_DETECTION_FULL: + case ANDROID_STATISTICS_FACE_DETECT_MODE_FULL: bestFaceDetectMode = - ANDROID_STATS_FACE_DETECTION_FULL; + ANDROID_STATISTICS_FACE_DETECT_MODE_FULL; break; default: ALOGE("%s: Camera %d: Unknown face detect mode %d:", @@ -846,7 +846,7 @@ status_t Parameters::buildFastInfo() { } camera_metadata_ro_entry_t maxFacesDetected = - staticInfo(ANDROID_STATS_MAX_FACE_COUNT, 1, 1); + staticInfo(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, 1, 1); if (!maxFacesDetected.count) return NO_INIT; int32_t maxFaces = maxFacesDetected.data.i32[0]; @@ -856,7 +856,7 @@ status_t Parameters::buildFastInfo() { camera_metadata_ro_entry_t sceneModeOverrides = staticInfo(ANDROID_CONTROL_SCENE_MODE_OVERRIDES); camera_metadata_ro_entry_t minFocusDistance = - staticInfo(ANDROID_LENS_MINIMUM_FOCUS_DISTANCE); + staticInfo(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE); bool fixedLens = (minFocusDistance.data.f[0] == 0); if (sceneModeOverrides.count > 0) { @@ -877,16 +877,16 @@ status_t Parameters::buildFastInfo() { uint8_t aeMode = sceneModeOverrides.data.u8[i * kModesPerSceneMode + 0]; switch(aeMode) { - case ANDROID_CONTROL_AE_ON: + case ANDROID_CONTROL_AE_MODE_ON: modes.flashMode = FLASH_MODE_OFF; break; - case ANDROID_CONTROL_AE_ON_AUTO_FLASH: + case ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH: modes.flashMode = FLASH_MODE_AUTO; break; - case ANDROID_CONTROL_AE_ON_ALWAYS_FLASH: + case ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH: modes.flashMode = FLASH_MODE_ON; break; - case ANDROID_CONTROL_AE_ON_AUTO_FLASH_REDEYE: + case ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE: modes.flashMode = FLASH_MODE_RED_EYE; break; default: @@ -900,15 +900,15 @@ status_t Parameters::buildFastInfo() { uint8_t afMode = sceneModeOverrides.data.u8[i * kModesPerSceneMode + 2]; switch(afMode) { - case ANDROID_CONTROL_AF_OFF: + case ANDROID_CONTROL_AF_MODE_OFF: modes.focusMode = fixedLens ? FOCUS_MODE_FIXED : FOCUS_MODE_INFINITY; break; - case ANDROID_CONTROL_AF_AUTO: - case ANDROID_CONTROL_AF_MACRO: - case ANDROID_CONTROL_AF_CONTINUOUS_VIDEO: - case ANDROID_CONTROL_AF_CONTINUOUS_PICTURE: - case ANDROID_CONTROL_AF_EDOF: + case ANDROID_CONTROL_AF_MODE_AUTO: + case ANDROID_CONTROL_AF_MODE_MACRO: + case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO: + case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE: + case ANDROID_CONTROL_AF_MODE_EDOF: modes.focusMode = static_cast(afMode); break; default: @@ -1363,7 +1363,7 @@ status_t Parameters::set(const String8& paramString) { if (validatedParams.flashMode != flashMode) { camera_metadata_ro_entry_t flashAvailable = - staticInfo(ANDROID_FLASH_AVAILABLE, 1, 1); + staticInfo(ANDROID_FLASH_INFO_AVAILABLE, 1, 1); if (!flashAvailable.data.u8[0] && validatedParams.flashMode != Parameters::FLASH_MODE_OFF) { ALOGE("%s: Requested flash mode \"%s\" is not supported: " @@ -1400,9 +1400,9 @@ status_t Parameters::set(const String8& paramString) { fastInfo.sceneModeOverrides. valueFor(validatedParams.sceneMode).wbMode; } else { - validatedParams.wbMode = ANDROID_CONTROL_AWB_OFF; + validatedParams.wbMode = ANDROID_CONTROL_AWB_MODE_OFF; } - if (validatedParams.wbMode == ANDROID_CONTROL_AWB_OFF) { + if (validatedParams.wbMode == ANDROID_CONTROL_AWB_MODE_OFF) { validatedParams.wbMode = wbModeStringToEnum( newParams.get(CameraParameters::KEY_WHITE_BALANCE) ); } @@ -1439,7 +1439,7 @@ status_t Parameters::set(const String8& paramString) { validatedParams.currentAfTriggerId = -1; if (validatedParams.focusMode != Parameters::FOCUS_MODE_FIXED) { camera_metadata_ro_entry_t minFocusDistance = - staticInfo(ANDROID_LENS_MINIMUM_FOCUS_DISTANCE); + staticInfo(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE); if (minFocusDistance.data.f[0] == 0) { ALOGE("%s: Requested focus mode \"%s\" is not available: " "fixed focus lens", @@ -1489,7 +1489,7 @@ status_t Parameters::set(const String8& paramString) { validatedParams.exposureCompensation = newParams.getInt(CameraParameters::KEY_EXPOSURE_COMPENSATION); camera_metadata_ro_entry_t exposureCompensationRange = - staticInfo(ANDROID_CONTROL_AE_EXP_COMPENSATION_RANGE); + staticInfo(ANDROID_CONTROL_AE_COMPENSATION_RANGE); if ((validatedParams.exposureCompensation < exposureCompensationRange.data.i32[0]) || (validatedParams.exposureCompensation > @@ -1585,7 +1585,7 @@ status_t Parameters::updateRequest(CameraMetadata *request) const { ATRACE_CALL(); status_t res; - uint8_t metadataMode = ANDROID_REQUEST_METADATA_FULL; + uint8_t metadataMode = ANDROID_REQUEST_METADATA_MODE_FULL; res = request->update(ANDROID_REQUEST_METADATA_MODE, &metadataMode, 1); if (res != OK) return res; @@ -1612,9 +1612,9 @@ status_t Parameters::updateRequest(CameraMetadata *request) const { // to the other. bool sceneModeActive = sceneMode != (uint8_t)ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED; - uint8_t reqControlMode = ANDROID_CONTROL_AUTO; + uint8_t reqControlMode = ANDROID_CONTROL_MODE_AUTO; if (enableFaceDetect || sceneModeActive) { - reqControlMode = ANDROID_CONTROL_USE_SCENE_MODE; + reqControlMode = ANDROID_CONTROL_MODE_USE_SCENE_MODE; } res = request->update(ANDROID_CONTROL_MODE, &reqControlMode, 1); @@ -1628,21 +1628,21 @@ status_t Parameters::updateRequest(CameraMetadata *request) const { &reqSceneMode, 1); if (res != OK) return res; - uint8_t reqFlashMode = ANDROID_FLASH_OFF; - uint8_t reqAeMode = ANDROID_CONTROL_AE_OFF; + uint8_t reqFlashMode = ANDROID_FLASH_MODE_OFF; + uint8_t reqAeMode = ANDROID_CONTROL_AE_MODE_OFF; switch (flashMode) { case Parameters::FLASH_MODE_OFF: - reqAeMode = ANDROID_CONTROL_AE_ON; break; + reqAeMode = ANDROID_CONTROL_AE_MODE_ON; break; case Parameters::FLASH_MODE_AUTO: - reqAeMode = ANDROID_CONTROL_AE_ON_AUTO_FLASH; break; + reqAeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH; break; case Parameters::FLASH_MODE_ON: - reqAeMode = ANDROID_CONTROL_AE_ON_ALWAYS_FLASH; break; + reqAeMode = ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH; break; case Parameters::FLASH_MODE_TORCH: - reqAeMode = ANDROID_CONTROL_AE_ON; - reqFlashMode = ANDROID_FLASH_TORCH; + reqAeMode = ANDROID_CONTROL_AE_MODE_ON; + reqFlashMode = ANDROID_FLASH_MODE_TORCH; break; case Parameters::FLASH_MODE_RED_EYE: - reqAeMode = ANDROID_CONTROL_AE_ON_AUTO_FLASH_REDEYE; break; + reqAeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE; break; default: ALOGE("%s: Camera %d: Unknown flash mode %d", __FUNCTION__, cameraId, flashMode); @@ -1666,7 +1666,7 @@ status_t Parameters::updateRequest(CameraMetadata *request) const { if (res != OK) return res; float reqFocusDistance = 0; // infinity focus in diopters - uint8_t reqFocusMode = ANDROID_CONTROL_AF_OFF; + uint8_t reqFocusMode = ANDROID_CONTROL_AF_MODE_OFF; switch (focusMode) { case Parameters::FOCUS_MODE_AUTO: case Parameters::FOCUS_MODE_MACRO: @@ -1677,7 +1677,7 @@ status_t Parameters::updateRequest(CameraMetadata *request) const { break; case Parameters::FOCUS_MODE_INFINITY: case Parameters::FOCUS_MODE_FIXED: - reqFocusMode = ANDROID_CONTROL_AF_OFF; + reqFocusMode = ANDROID_CONTROL_AF_MODE_OFF; break; default: ALOGE("%s: Camera %d: Unknown focus mode %d", __FUNCTION__, @@ -1716,7 +1716,7 @@ status_t Parameters::updateRequest(CameraMetadata *request) const { if (res != OK) return res; delete[] reqFocusingAreas; - res = request->update(ANDROID_CONTROL_AE_EXP_COMPENSATION, + res = request->update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &exposureCompensation, 1); if (res != OK) return res; @@ -1758,16 +1758,16 @@ status_t Parameters::updateRequest(CameraMetadata *request) const { if (res != OK) return res; uint8_t reqVstabMode = videoStabilization ? - ANDROID_CONTROL_VIDEO_STABILIZATION_ON : - ANDROID_CONTROL_VIDEO_STABILIZATION_OFF; + ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON : + ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF; res = request->update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &reqVstabMode, 1); if (res != OK) return res; uint8_t reqFaceDetectMode = enableFaceDetect ? fastInfo.bestFaceDetectMode : - (uint8_t)ANDROID_STATS_FACE_DETECTION_OFF; - res = request->update(ANDROID_STATS_FACE_DETECT_MODE, + (uint8_t)ANDROID_STATISTICS_FACE_DETECT_MODE_OFF; + res = request->update(ANDROID_STATISTICS_FACE_DETECT_MODE, &reqFaceDetectMode, 1); if (res != OK) return res; @@ -1891,43 +1891,43 @@ const char* Parameters::formatEnumToString(int format) { int Parameters::wbModeStringToEnum(const char *wbMode) { return !wbMode ? - ANDROID_CONTROL_AWB_AUTO : + ANDROID_CONTROL_AWB_MODE_AUTO : !strcmp(wbMode, CameraParameters::WHITE_BALANCE_AUTO) ? - ANDROID_CONTROL_AWB_AUTO : + ANDROID_CONTROL_AWB_MODE_AUTO : !strcmp(wbMode, CameraParameters::WHITE_BALANCE_INCANDESCENT) ? - ANDROID_CONTROL_AWB_INCANDESCENT : + ANDROID_CONTROL_AWB_MODE_INCANDESCENT : !strcmp(wbMode, CameraParameters::WHITE_BALANCE_FLUORESCENT) ? - ANDROID_CONTROL_AWB_FLUORESCENT : + ANDROID_CONTROL_AWB_MODE_FLUORESCENT : !strcmp(wbMode, CameraParameters::WHITE_BALANCE_WARM_FLUORESCENT) ? - ANDROID_CONTROL_AWB_WARM_FLUORESCENT : + ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT : !strcmp(wbMode, CameraParameters::WHITE_BALANCE_DAYLIGHT) ? - ANDROID_CONTROL_AWB_DAYLIGHT : + ANDROID_CONTROL_AWB_MODE_DAYLIGHT : !strcmp(wbMode, CameraParameters::WHITE_BALANCE_CLOUDY_DAYLIGHT) ? - ANDROID_CONTROL_AWB_CLOUDY_DAYLIGHT : + ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT : !strcmp(wbMode, CameraParameters::WHITE_BALANCE_TWILIGHT) ? - ANDROID_CONTROL_AWB_TWILIGHT : + ANDROID_CONTROL_AWB_MODE_TWILIGHT : !strcmp(wbMode, CameraParameters::WHITE_BALANCE_SHADE) ? - ANDROID_CONTROL_AWB_SHADE : + ANDROID_CONTROL_AWB_MODE_SHADE : -1; } const char* Parameters::wbModeEnumToString(uint8_t wbMode) { switch (wbMode) { - case ANDROID_CONTROL_AWB_AUTO: + case ANDROID_CONTROL_AWB_MODE_AUTO: return CameraParameters::WHITE_BALANCE_AUTO; - case ANDROID_CONTROL_AWB_INCANDESCENT: + case ANDROID_CONTROL_AWB_MODE_INCANDESCENT: return CameraParameters::WHITE_BALANCE_INCANDESCENT; - case ANDROID_CONTROL_AWB_FLUORESCENT: + case ANDROID_CONTROL_AWB_MODE_FLUORESCENT: return CameraParameters::WHITE_BALANCE_FLUORESCENT; - case ANDROID_CONTROL_AWB_WARM_FLUORESCENT: + case ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT: return CameraParameters::WHITE_BALANCE_WARM_FLUORESCENT; - case ANDROID_CONTROL_AWB_DAYLIGHT: + case ANDROID_CONTROL_AWB_MODE_DAYLIGHT: return CameraParameters::WHITE_BALANCE_DAYLIGHT; - case ANDROID_CONTROL_AWB_CLOUDY_DAYLIGHT: + case ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT: return CameraParameters::WHITE_BALANCE_CLOUDY_DAYLIGHT; - case ANDROID_CONTROL_AWB_TWILIGHT: + case ANDROID_CONTROL_AWB_MODE_TWILIGHT: return CameraParameters::WHITE_BALANCE_TWILIGHT; - case ANDROID_CONTROL_AWB_SHADE: + case ANDROID_CONTROL_AWB_MODE_SHADE: return CameraParameters::WHITE_BALANCE_SHADE; default: ALOGE("%s: Unknown AWB mode enum: %d", @@ -1939,40 +1939,40 @@ const char* Parameters::wbModeEnumToString(uint8_t wbMode) { int Parameters::effectModeStringToEnum(const char *effectMode) { return !effectMode ? - ANDROID_CONTROL_EFFECT_OFF : + ANDROID_CONTROL_EFFECT_MODE_OFF : !strcmp(effectMode, CameraParameters::EFFECT_NONE) ? - ANDROID_CONTROL_EFFECT_OFF : + ANDROID_CONTROL_EFFECT_MODE_OFF : !strcmp(effectMode, CameraParameters::EFFECT_MONO) ? - ANDROID_CONTROL_EFFECT_MONO : + ANDROID_CONTROL_EFFECT_MODE_MONO : !strcmp(effectMode, CameraParameters::EFFECT_NEGATIVE) ? - ANDROID_CONTROL_EFFECT_NEGATIVE : + ANDROID_CONTROL_EFFECT_MODE_NEGATIVE : !strcmp(effectMode, CameraParameters::EFFECT_SOLARIZE) ? - ANDROID_CONTROL_EFFECT_SOLARIZE : + ANDROID_CONTROL_EFFECT_MODE_SOLARIZE : !strcmp(effectMode, CameraParameters::EFFECT_SEPIA) ? - ANDROID_CONTROL_EFFECT_SEPIA : + ANDROID_CONTROL_EFFECT_MODE_SEPIA : !strcmp(effectMode, CameraParameters::EFFECT_POSTERIZE) ? - ANDROID_CONTROL_EFFECT_POSTERIZE : + ANDROID_CONTROL_EFFECT_MODE_POSTERIZE : !strcmp(effectMode, CameraParameters::EFFECT_WHITEBOARD) ? - ANDROID_CONTROL_EFFECT_WHITEBOARD : + ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD : !strcmp(effectMode, CameraParameters::EFFECT_BLACKBOARD) ? - ANDROID_CONTROL_EFFECT_BLACKBOARD : + ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD : !strcmp(effectMode, CameraParameters::EFFECT_AQUA) ? - ANDROID_CONTROL_EFFECT_AQUA : + ANDROID_CONTROL_EFFECT_MODE_AQUA : -1; } int Parameters::abModeStringToEnum(const char *abMode) { return !abMode ? - ANDROID_CONTROL_AE_ANTIBANDING_AUTO : + ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO : !strcmp(abMode, CameraParameters::ANTIBANDING_AUTO) ? - ANDROID_CONTROL_AE_ANTIBANDING_AUTO : + ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO : !strcmp(abMode, CameraParameters::ANTIBANDING_OFF) ? - ANDROID_CONTROL_AE_ANTIBANDING_OFF : + ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF : !strcmp(abMode, CameraParameters::ANTIBANDING_50HZ) ? - ANDROID_CONTROL_AE_ANTIBANDING_50HZ : + ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ : !strcmp(abMode, CameraParameters::ANTIBANDING_60HZ) ? - ANDROID_CONTROL_AE_ANTIBANDING_60HZ : + ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ : -1; } @@ -2329,7 +2329,7 @@ Parameters::CropRegion Parameters::calculateCropRegion( // chosen to maximize its area on the sensor camera_metadata_ro_entry_t maxDigitalZoom = - staticInfo(ANDROID_SCALER_AVAILABLE_MAX_ZOOM); + staticInfo(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM); // For each zoom step by how many pixels more do we change the zoom float zoomIncrement = (maxDigitalZoom.data.f[0] - 1) / (NUM_ZOOM_STEPS-1); diff --git a/services/camera/libcameraservice/camera2/Parameters.h b/services/camera/libcameraservice/camera2/Parameters.h index 6d32bf6..4192e97 100644 --- a/services/camera/libcameraservice/camera2/Parameters.h +++ b/services/camera/libcameraservice/camera2/Parameters.h @@ -73,16 +73,16 @@ struct Parameters { FLASH_MODE_AUTO, FLASH_MODE_ON, FLASH_MODE_TORCH, - FLASH_MODE_RED_EYE = ANDROID_CONTROL_AE_ON_AUTO_FLASH_REDEYE, + FLASH_MODE_RED_EYE = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, FLASH_MODE_INVALID = -1 } flashMode; enum focusMode_t { - FOCUS_MODE_AUTO = ANDROID_CONTROL_AF_AUTO, - FOCUS_MODE_MACRO = ANDROID_CONTROL_AF_MACRO, - FOCUS_MODE_CONTINUOUS_VIDEO = ANDROID_CONTROL_AF_CONTINUOUS_VIDEO, - FOCUS_MODE_CONTINUOUS_PICTURE = ANDROID_CONTROL_AF_CONTINUOUS_PICTURE, - FOCUS_MODE_EDOF = ANDROID_CONTROL_AF_EDOF, + FOCUS_MODE_AUTO = ANDROID_CONTROL_AF_MODE_AUTO, + FOCUS_MODE_MACRO = ANDROID_CONTROL_AF_MODE_MACRO, + FOCUS_MODE_CONTINUOUS_VIDEO = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, + FOCUS_MODE_CONTINUOUS_PICTURE = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, + FOCUS_MODE_EDOF = ANDROID_CONTROL_AF_MODE_EDOF, FOCUS_MODE_INFINITY, FOCUS_MODE_FIXED, FOCUS_MODE_INVALID = -1 @@ -179,7 +179,7 @@ struct Parameters { focusMode_t focusMode; OverrideModes(): flashMode(FLASH_MODE_INVALID), - wbMode(ANDROID_CONTROL_AWB_OFF), + wbMode(ANDROID_CONTROL_AWB_MODE_OFF), focusMode(FOCUS_MODE_INVALID) { } }; -- cgit v1.1 From a8190fc518b6769257896605f3aee091aeb60b50 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Mon, 3 Dec 2012 17:06:56 -0800 Subject: Split off the current control block to separate file Prepare for a new implementation of step() etc. Change-Id: I268421976ba577aa1fb5d7015de5441c05861190 --- media/libmedia/Android.mk | 1 + media/libmedia/AudioTrack.cpp | 176 -------------------------------- media/libmedia/AudioTrackShared.cpp | 196 ++++++++++++++++++++++++++++++++++++ 3 files changed, 197 insertions(+), 176 deletions(-) create mode 100644 media/libmedia/AudioTrackShared.cpp diff --git a/media/libmedia/Android.mk b/media/libmedia/Android.mk index f2b6441..a35d562 100644 --- a/media/libmedia/Android.mk +++ b/media/libmedia/Android.mk @@ -13,6 +13,7 @@ include $(CLEAR_VARS) LOCAL_SRC_FILES:= \ AudioTrack.cpp \ + AudioTrackShared.cpp \ IAudioFlinger.cpp \ IAudioFlingerClient.cpp \ IAudioTrack.cpp \ diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp index ac672a7..1d87ff8 100644 --- a/media/libmedia/AudioTrack.cpp +++ b/media/libmedia/AudioTrack.cpp @@ -1482,180 +1482,4 @@ void AudioTrack::AudioTrackThread::resume() } } -// ========================================================================= - - -audio_track_cblk_t::audio_track_cblk_t() - : lock(Mutex::SHARED), cv(Condition::SHARED), user(0), server(0), - userBase(0), serverBase(0), frameCount_(0), - loopStart(UINT_MAX), loopEnd(UINT_MAX), loopCount(0), mVolumeLR(0x10001000), - mSendLevel(0), flags(0) -{ -} - -uint32_t audio_track_cblk_t::stepUser(size_t stepCount, size_t frameCount, bool isOut) -{ - ALOGV("stepuser %08x %08x %d", user, server, stepCount); - - uint32_t u = user; - u += stepCount; - // Ensure that user is never ahead of server for AudioRecord - if (isOut) { - // If stepServer() has been called once, switch to normal obtainBuffer() timeout period - if (bufferTimeoutMs == MAX_STARTUP_TIMEOUT_MS-1) { - bufferTimeoutMs = MAX_RUN_TIMEOUT_MS; - } - } else if (u > server) { - ALOGW("stepUser occurred after track reset"); - u = server; - } - - if (u >= frameCount) { - // common case, user didn't just wrap - if (u - frameCount >= userBase ) { - userBase += frameCount; - } - } else if (u >= userBase + frameCount) { - // user just wrapped - userBase += frameCount; - } - - user = u; - - // Clear flow control error condition as new data has been written/read to/from buffer. - if (flags & CBLK_UNDERRUN) { - android_atomic_and(~CBLK_UNDERRUN, &flags); - } - - return u; -} - -bool audio_track_cblk_t::stepServer(size_t stepCount, size_t frameCount, bool isOut) -{ - ALOGV("stepserver %08x %08x %d", user, server, stepCount); - - if (!tryLock()) { - ALOGW("stepServer() could not lock cblk"); - return false; - } - - uint32_t s = server; - bool flushed = (s == user); - - s += stepCount; - if (isOut) { - // Mark that we have read the first buffer so that next time stepUser() is called - // we switch to normal obtainBuffer() timeout period - if (bufferTimeoutMs == MAX_STARTUP_TIMEOUT_MS) { - bufferTimeoutMs = MAX_STARTUP_TIMEOUT_MS - 1; - } - // It is possible that we receive a flush() - // while the mixer is processing a block: in this case, - // stepServer() is called After the flush() has reset u & s and - // we have s > u - if (flushed) { - ALOGW("stepServer occurred after track reset"); - s = user; - } - } - - if (s >= loopEnd) { - ALOGW_IF(s > loopEnd, "stepServer: s %u > loopEnd %u", s, loopEnd); - s = loopStart; - if (--loopCount == 0) { - loopEnd = UINT_MAX; - loopStart = UINT_MAX; - } - } - - if (s >= frameCount) { - // common case, server didn't just wrap - if (s - frameCount >= serverBase ) { - serverBase += frameCount; - } - } else if (s >= serverBase + frameCount) { - // server just wrapped - serverBase += frameCount; - } - - server = s; - - if (!(flags & CBLK_INVALID)) { - cv.signal(); - } - lock.unlock(); - return true; -} - -void* audio_track_cblk_t::buffer(void *buffers, size_t frameSize, uint32_t offset) const -{ - return (int8_t *)buffers + (offset - userBase) * frameSize; -} - -uint32_t audio_track_cblk_t::framesAvailable(size_t frameCount, bool isOut) -{ - Mutex::Autolock _l(lock); - return framesAvailable_l(frameCount, isOut); -} - -uint32_t audio_track_cblk_t::framesAvailable_l(size_t frameCount, bool isOut) -{ - uint32_t u = user; - uint32_t s = server; - - if (isOut) { - uint32_t limit = (s < loopStart) ? s : loopStart; - return limit + frameCount - u; - } else { - return frameCount + u - s; - } -} - -uint32_t audio_track_cblk_t::framesReady(bool isOut) -{ - uint32_t u = user; - uint32_t s = server; - - if (isOut) { - if (u < loopEnd) { - return u - s; - } else { - // do not block on mutex shared with client on AudioFlinger side - if (!tryLock()) { - ALOGW("framesReady() could not lock cblk"); - return 0; - } - uint32_t frames = UINT_MAX; - if (loopCount >= 0) { - frames = (loopEnd - loopStart)*loopCount + u - s; - } - lock.unlock(); - return frames; - } - } else { - return s - u; - } -} - -bool audio_track_cblk_t::tryLock() -{ - // the code below simulates lock-with-timeout - // we MUST do this to protect the AudioFlinger server - // as this lock is shared with the client. - status_t err; - - err = lock.tryLock(); - if (err == -EBUSY) { // just wait a bit - usleep(1000); - err = lock.tryLock(); - } - if (err != NO_ERROR) { - // probably, the client just died. - return false; - } - return true; -} - -// ------------------------------------------------------------------------- - }; // namespace android diff --git a/media/libmedia/AudioTrackShared.cpp b/media/libmedia/AudioTrackShared.cpp new file mode 100644 index 0000000..bee13c8 --- /dev/null +++ b/media/libmedia/AudioTrackShared.cpp @@ -0,0 +1,196 @@ +/* + * Copyright (C) 2007 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "AudioTrackShared" +//#define LOG_NDEBUG 0 + +#include +#include + +namespace android { + +audio_track_cblk_t::audio_track_cblk_t() + : lock(Mutex::SHARED), cv(Condition::SHARED), user(0), server(0), + userBase(0), serverBase(0), frameCount_(0), + loopStart(UINT_MAX), loopEnd(UINT_MAX), loopCount(0), mVolumeLR(0x10001000), + mSendLevel(0), flags(0) +{ +} + +uint32_t audio_track_cblk_t::stepUser(size_t stepCount, size_t frameCount, bool isOut) +{ + ALOGV("stepuser %08x %08x %d", user, server, stepCount); + + uint32_t u = user; + u += stepCount; + // Ensure that user is never ahead of server for AudioRecord + if (isOut) { + // If stepServer() has been called once, switch to normal obtainBuffer() timeout period + if (bufferTimeoutMs == MAX_STARTUP_TIMEOUT_MS-1) { + bufferTimeoutMs = MAX_RUN_TIMEOUT_MS; + } + } else if (u > server) { + ALOGW("stepUser occurred after track reset"); + u = server; + } + + if (u >= frameCount) { + // common case, user didn't just wrap + if (u - frameCount >= userBase ) { + userBase += frameCount; + } + } else if (u >= userBase + frameCount) { + // user just wrapped + userBase += frameCount; + } + + user = u; + + // Clear flow control error condition as new data has been written/read to/from buffer. + if (flags & CBLK_UNDERRUN) { + android_atomic_and(~CBLK_UNDERRUN, &flags); + } + + return u; +} + +bool audio_track_cblk_t::stepServer(size_t stepCount, size_t frameCount, bool isOut) +{ + ALOGV("stepserver %08x %08x %d", user, server, stepCount); + + if (!tryLock()) { + ALOGW("stepServer() could not lock cblk"); + return false; + } + + uint32_t s = server; + bool flushed = (s == user); + + s += stepCount; + if (isOut) { + // Mark that we have read the first buffer so that next time stepUser() is called + // we switch to normal obtainBuffer() timeout period + if (bufferTimeoutMs == MAX_STARTUP_TIMEOUT_MS) { + bufferTimeoutMs = MAX_STARTUP_TIMEOUT_MS - 1; + } + // It is possible that we receive a flush() + // while the mixer is processing a block: in this case, + // stepServer() is called After the flush() has reset u & s and + // we have s > u + if (flushed) { + ALOGW("stepServer occurred after track reset"); + s = user; + } + } + + if (s >= loopEnd) { + ALOGW_IF(s > loopEnd, "stepServer: s %u > loopEnd %u", s, loopEnd); + s = loopStart; + if (--loopCount == 0) { + loopEnd = UINT_MAX; + loopStart = UINT_MAX; + } + } + + if (s >= frameCount) { + // common case, server didn't just wrap + if (s - frameCount >= serverBase ) { + serverBase += frameCount; + } + } else if (s >= serverBase + frameCount) { + // server just wrapped + serverBase += frameCount; + } + + server = s; + + if (!(flags & CBLK_INVALID)) { + cv.signal(); + } + lock.unlock(); + return true; +} + +void* audio_track_cblk_t::buffer(void *buffers, size_t frameSize, uint32_t offset) const +{ + return (int8_t *)buffers + (offset - userBase) * frameSize; +} + +uint32_t audio_track_cblk_t::framesAvailable(size_t frameCount, bool isOut) +{ + Mutex::Autolock _l(lock); + return framesAvailable_l(frameCount, isOut); +} + +uint32_t audio_track_cblk_t::framesAvailable_l(size_t frameCount, bool isOut) +{ + uint32_t u = user; + uint32_t s = server; + + if (isOut) { + uint32_t limit = (s < loopStart) ? s : loopStart; + return limit + frameCount - u; + } else { + return frameCount + u - s; + } +} + +uint32_t audio_track_cblk_t::framesReady(bool isOut) +{ + uint32_t u = user; + uint32_t s = server; + + if (isOut) { + if (u < loopEnd) { + return u - s; + } else { + // do not block on mutex shared with client on AudioFlinger side + if (!tryLock()) { + ALOGW("framesReady() could not lock cblk"); + return 0; + } + uint32_t frames = UINT_MAX; + if (loopCount >= 0) { + frames = (loopEnd - loopStart)*loopCount + u - s; + } + lock.unlock(); + return frames; + } + } else { + return s - u; + } +} + +bool audio_track_cblk_t::tryLock() +{ + // the code below simulates lock-with-timeout + // we MUST do this to protect the AudioFlinger server + // as this lock is shared with the client. + status_t err; + + err = lock.tryLock(); + if (err == -EBUSY) { // just wait a bit + usleep(1000); + err = lock.tryLock(); + } + if (err != NO_ERROR) { + // probably, the client just died. + return false; + } + return true; +} + +} // namespace android -- cgit v1.1 From 3fb57dc603a0d3b2817b91018c03673c8341d6b4 Mon Sep 17 00:00:00 2001 From: Andy McFadden Date: Tue, 4 Dec 2012 17:16:22 -0800 Subject: Add explicit cutils to stagefright Without this, some changes I'm making over in frameworks/native result in a link-time failure to find android_atomic_dec. Change-Id: Ieb45a86ef6508816ac51589e249ff89caf7ba309 --- cmds/stagefright/Android.mk | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cmds/stagefright/Android.mk b/cmds/stagefright/Android.mk index f60b1a4..a59186a 100644 --- a/cmds/stagefright/Android.mk +++ b/cmds/stagefright/Android.mk @@ -9,7 +9,7 @@ LOCAL_SRC_FILES:= \ LOCAL_SHARED_LIBRARIES := \ libstagefright libmedia libutils libbinder libstagefright_foundation \ - libjpeg libgui + libjpeg libgui libcutils LOCAL_C_INCLUDES:= \ frameworks/av/media/libstagefright \ -- cgit v1.1 From 516dacfb02d0b0eafe21114330c98ce0e7d90da9 Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Mon, 3 Dec 2012 15:20:40 -0800 Subject: Respect sample aspect ratio in NuPlayer. related-to-bug: 7569402 Change-Id: I302de95d83b180bd2dc72ddd0c69a665dbce2527 --- include/media/stagefright/MetaData.h | 2 + media/libmediaplayerservice/nuplayer/NuPlayer.cpp | 41 +++++++++--- media/libstagefright/ACodec.cpp | 14 +++-- media/libstagefright/Utils.cpp | 14 +++++ media/libstagefright/avc_utils.cpp | 76 +++++++++++++++++++++-- media/libstagefright/include/avc_utils.h | 5 +- 6 files changed, 133 insertions(+), 19 deletions(-) diff --git a/include/media/stagefright/MetaData.h b/include/media/stagefright/MetaData.h index e91904c..be08c19 100644 --- a/include/media/stagefright/MetaData.h +++ b/include/media/stagefright/MetaData.h @@ -35,6 +35,8 @@ enum { kKeyHeight = 'heig', // int32_t, image pixel kKeyDisplayWidth = 'dWid', // int32_t, display/presentation kKeyDisplayHeight = 'dHgt', // int32_t, display/presentation + kKeySARWidth = 'sarW', // int32_t, sampleAspectRatio width + kKeySARHeight = 'sarH', // int32_t, sampleAspectRatio height // a rectangle, if absent assumed to be (0, 0, width - 1, height - 1) kKeyCropRect = 'crop', diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp index f363568..746055c 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp +++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp @@ -448,7 +448,8 @@ void NuPlayer::onMessageReceived(const sp &msg) { } else if (what == ACodec::kWhatOutputFormatChanged) { if (audio) { int32_t numChannels; - CHECK(codecRequest->findInt32("channel-count", &numChannels)); + CHECK(codecRequest->findInt32( + "channel-count", &numChannels)); int32_t sampleRate; CHECK(codecRequest->findInt32("sample-rate", &sampleRate)); @@ -460,13 +461,15 @@ void NuPlayer::onMessageReceived(const sp &msg) { audio_output_flags_t flags; int64_t durationUs; - // FIXME: we should handle the case where the video decoder is created after - // we receive the format change indication. Current code will just make that - // we select deep buffer with video which should not be a problem as it should + // FIXME: we should handle the case where the video decoder + // is created after we receive the format change indication. + // Current code will just make that we select deep buffer + // with video which should not be a problem as it should // not prevent from keeping A/V sync. if (mVideoDecoder == NULL && mSource->getDuration(&durationUs) == OK && - durationUs > AUDIO_SINK_MIN_DEEP_BUFFER_DURATION_US) { + durationUs + > AUDIO_SINK_MIN_DEEP_BUFFER_DURATION_US) { flags = AUDIO_OUTPUT_FLAG_DEEP_BUFFER; } else { flags = AUDIO_OUTPUT_FLAG_NONE; @@ -502,17 +505,35 @@ void NuPlayer::onMessageReceived(const sp &msg) { "crop", &cropLeft, &cropTop, &cropRight, &cropBottom)); + int32_t displayWidth = cropRight - cropLeft + 1; + int32_t displayHeight = cropBottom - cropTop + 1; + ALOGV("Video output format changed to %d x %d " "(crop: %d x %d @ (%d, %d))", width, height, - (cropRight - cropLeft + 1), - (cropBottom - cropTop + 1), + displayWidth, + displayHeight, cropLeft, cropTop); + sp videoInputFormat = + mSource->getFormat(false /* audio */); + + // Take into account sample aspect ratio if necessary: + int32_t sarWidth, sarHeight; + if (videoInputFormat->findInt32("sar-width", &sarWidth) + && videoInputFormat->findInt32( + "sar-height", &sarHeight)) { + ALOGV("Sample aspect ratio %d : %d", + sarWidth, sarHeight); + + displayWidth = (displayWidth * sarWidth) / sarHeight; + + ALOGV("display dimensions %d x %d", + displayWidth, displayHeight); + } + notifyListener( - MEDIA_SET_VIDEO_SIZE, - cropRight - cropLeft + 1, - cropBottom - cropTop + 1); + MEDIA_SET_VIDEO_SIZE, displayWidth, displayHeight); } } else if (what == ACodec::kWhatShutdownCompleted) { ALOGV("%s shutdown completed", audio ? "audio" : "video"); diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp index 2a7b2ae..2b20ab0 100644 --- a/media/libstagefright/ACodec.cpp +++ b/media/libstagefright/ACodec.cpp @@ -966,17 +966,23 @@ status_t ACodec::configureCodec( err = INVALID_OPERATION; } else { if (encoder) { - if (!msg->findInt32("flac-compression-level", &compressionLevel)) { + if (!msg->findInt32( + "flac-compression-level", &compressionLevel)) { compressionLevel = 5;// default FLAC compression level } else if (compressionLevel < 0) { - ALOGW("compression level %d outside [0..8] range, using 0", compressionLevel); + ALOGW("compression level %d outside [0..8] range, " + "using 0", + compressionLevel); compressionLevel = 0; } else if (compressionLevel > 8) { - ALOGW("compression level %d outside [0..8] range, using 8", compressionLevel); + ALOGW("compression level %d outside [0..8] range, " + "using 8", + compressionLevel); compressionLevel = 8; } } - err = setupFlacCodec(encoder, numChannels, sampleRate, compressionLevel); + err = setupFlacCodec( + encoder, numChannels, sampleRate, compressionLevel); } } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW)) { int32_t numChannels, sampleRate; diff --git a/media/libstagefright/Utils.cpp b/media/libstagefright/Utils.cpp index 74e9222..1a6ff4b 100644 --- a/media/libstagefright/Utils.cpp +++ b/media/libstagefright/Utils.cpp @@ -85,6 +85,13 @@ status_t convertMetaDataToMessage( msg->setInt32("width", width); msg->setInt32("height", height); + + int32_t sarWidth, sarHeight; + if (meta->findInt32(kKeySARWidth, &sarWidth) + && meta->findInt32(kKeySARHeight, &sarHeight)) { + msg->setInt32("sar-width", sarWidth); + msg->setInt32("sar-height", sarHeight); + } } else if (!strncasecmp("audio/", mime, 6)) { int32_t numChannels, sampleRate; CHECK(meta->findInt32(kKeyChannelCount, &numChannels)); @@ -372,6 +379,13 @@ void convertMessageToMetaData(const sp &msg, sp &meta) { } else { ALOGW("did not find width and/or height"); } + + int32_t sarWidth, sarHeight; + if (msg->findInt32("sar-width", &sarWidth) + && msg->findInt32("sar-height", &sarHeight)) { + meta->setInt32(kKeySARWidth, sarWidth); + meta->setInt32(kKeySARHeight, sarHeight); + } } else if (mime.startsWith("audio/")) { int32_t numChannels; if (msg->findInt32("channel-count", &numChannels)) { diff --git a/media/libstagefright/avc_utils.cpp b/media/libstagefright/avc_utils.cpp index a141752..b822868 100644 --- a/media/libstagefright/avc_utils.cpp +++ b/media/libstagefright/avc_utils.cpp @@ -22,6 +22,7 @@ #include #include +#include #include #include #include @@ -41,7 +42,9 @@ unsigned parseUE(ABitReader *br) { // Determine video dimensions from the sequence parameterset. void FindAVCDimensions( - const sp &seqParamSet, int32_t *width, int32_t *height) { + const sp &seqParamSet, + int32_t *width, int32_t *height, + int32_t *sarWidth, int32_t *sarHeight) { ABitReader br(seqParamSet->data() + 1, seqParamSet->size() - 1); unsigned profile_idc = br.getBits(8); @@ -129,6 +132,48 @@ void FindAVCDimensions( *height -= (frame_crop_top_offset + frame_crop_bottom_offset) * cropUnitY; } + + if (sarWidth != NULL) { + *sarWidth = 0; + } + + if (sarHeight != NULL) { + *sarHeight = 0; + } + + if (br.getBits(1)) { // vui_parameters_present_flag + unsigned sar_width = 0, sar_height = 0; + + if (br.getBits(1)) { // aspect_ratio_info_present_flag + unsigned aspect_ratio_idc = br.getBits(8); + + if (aspect_ratio_idc == 255 /* extendedSAR */) { + sar_width = br.getBits(16); + sar_height = br.getBits(16); + } else if (aspect_ratio_idc > 0 && aspect_ratio_idc < 14) { + static const int32_t kFixedSARWidth[] = { + 1, 12, 10, 16, 40, 24, 20, 32, 80, 18, 15, 64, 160 + }; + + static const int32_t kFixedSARHeight[] = { + 1, 11, 11, 11, 33, 11, 11, 11, 33, 11, 11, 33, 99 + }; + + sar_width = kFixedSARWidth[aspect_ratio_idc - 1]; + sar_height = kFixedSARHeight[aspect_ratio_idc - 1]; + } + } + + ALOGV("sample aspect ratio = %u : %u", sar_width, sar_height); + + if (sarWidth != NULL) { + *sarWidth = sar_width; + } + + if (sarHeight != NULL) { + *sarHeight = sar_height; + } + } } status_t getNextNALUnit( @@ -254,7 +299,9 @@ sp MakeAVCCodecSpecificData(const sp &accessUnit) { } int32_t width, height; - FindAVCDimensions(seqParamSet, &width, &height); + int32_t sarWidth, sarHeight; + FindAVCDimensions( + seqParamSet, &width, &height, &sarWidth, &sarHeight); size_t stopOffset; sp picParamSet = FindNAL(data, size, 8, &stopOffset); @@ -301,8 +348,29 @@ sp MakeAVCCodecSpecificData(const sp &accessUnit) { meta->setInt32(kKeyWidth, width); meta->setInt32(kKeyHeight, height); - ALOGI("found AVC codec config (%d x %d, %s-profile level %d.%d)", - width, height, AVCProfileToString(profile), level / 10, level % 10); + if (sarWidth > 1 || sarHeight > 1) { + // We treat 0:0 (unspecified) as 1:1. + + meta->setInt32(kKeySARWidth, sarWidth); + meta->setInt32(kKeySARHeight, sarHeight); + + ALOGI("found AVC codec config (%d x %d, %s-profile level %d.%d) " + "SAR %d : %d", + width, + height, + AVCProfileToString(profile), + level / 10, + level % 10, + sarWidth, + sarHeight); + } else { + ALOGI("found AVC codec config (%d x %d, %s-profile level %d.%d)", + width, + height, + AVCProfileToString(profile), + level / 10, + level % 10); + } return meta; } diff --git a/media/libstagefright/include/avc_utils.h b/media/libstagefright/include/avc_utils.h index e418822..d517320 100644 --- a/media/libstagefright/include/avc_utils.h +++ b/media/libstagefright/include/avc_utils.h @@ -36,8 +36,11 @@ enum { kAVCProfileCAVLC444Intra = 0x2c }; +// Optionally returns sample aspect ratio as well. void FindAVCDimensions( - const sp &seqParamSet, int32_t *width, int32_t *height); + const sp &seqParamSet, + int32_t *width, int32_t *height, + int32_t *sarWidth = NULL, int32_t *sarHeight = NULL); unsigned parseUE(ABitReader *br); -- cgit v1.1 From efc0cfb61e34c3bc688a7cbcc1ccef23922251c1 Mon Sep 17 00:00:00 2001 From: Dima Zavin Date: Tue, 11 Dec 2012 14:42:55 -0800 Subject: stagefright: recordvideo: add -o flag to specify output filename Change-Id: I6b464a7b3f7dd918565c6abbd432d779eb286fd5 Signed-off-by: Dima Zavin --- cmds/stagefright/recordvideo.cpp | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/cmds/stagefright/recordvideo.cpp b/cmds/stagefright/recordvideo.cpp index e02f111..c30c122 100644 --- a/cmds/stagefright/recordvideo.cpp +++ b/cmds/stagefright/recordvideo.cpp @@ -44,7 +44,7 @@ static void usage(const char *me) { fprintf(stderr, " -p encoder profile. see omx il header (default: encoder specific)\n"); fprintf(stderr, " -v video codec: [0] AVC [1] M4V [2] H263 (default: 0)\n"); fprintf(stderr, " -s(oftware) prefer software codec\n"); - fprintf(stderr, "The output file is /sdcard/output.mp4\n"); + fprintf(stderr, " -o filename: output file (default: /sdcard/output.mp4)\n"); exit(1); } @@ -162,12 +162,12 @@ int main(int argc, char **argv) { int level = -1; // Encoder specific default int profile = -1; // Encoder specific default int codec = 0; - const char *fileName = "/sdcard/output.mp4"; + char *fileName = "/sdcard/output.mp4"; bool preferSoftwareCodec = false; android::ProcessState::self()->startThreadPool(); int res; - while ((res = getopt(argc, argv, "b:c:f:i:n:w:t:l:p:v:hs")) >= 0) { + while ((res = getopt(argc, argv, "b:c:f:i:n:w:t:l:p:v:o:hs")) >= 0) { switch (res) { case 'b': { @@ -235,6 +235,12 @@ int main(int argc, char **argv) { break; } + case 'o': + { + fileName = optarg; + break; + } + case 's': { preferSoftwareCodec = true; -- cgit v1.1 From aef79b0676d8f0ee8ef637ec8be5ba73225b038d Mon Sep 17 00:00:00 2001 From: Jamie Gennis Date: Tue, 11 Dec 2012 17:03:12 -0800 Subject: stop using a deprecated SurfaceTextureClient ctor Change-Id: I792277b0d711da22f6b2abb7f1d837450d6af8e2 --- cmds/stagefright/stagefright.cpp | 2 +- libvideoeditor/lvpp/NativeWindowRenderer.cpp | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/cmds/stagefright/stagefright.cpp b/cmds/stagefright/stagefright.cpp index 1e0e7f8..148b66e 100644 --- a/cmds/stagefright/stagefright.cpp +++ b/cmds/stagefright/stagefright.cpp @@ -941,7 +941,7 @@ int main(int argc, char **argv) { CHECK(useSurfaceTexAlloc); sp texture = new SurfaceTexture(0 /* tex */); - gSurface = new SurfaceTextureClient(texture); + gSurface = new SurfaceTextureClient(texture->getBufferQueue()); } CHECK_EQ((status_t)OK, diff --git a/libvideoeditor/lvpp/NativeWindowRenderer.cpp b/libvideoeditor/lvpp/NativeWindowRenderer.cpp index 2e15ff9..efb45e2 100755 --- a/libvideoeditor/lvpp/NativeWindowRenderer.cpp +++ b/libvideoeditor/lvpp/NativeWindowRenderer.cpp @@ -569,7 +569,7 @@ RenderInput::RenderInput(NativeWindowRenderer* renderer, GLuint textureId) : mRenderer(renderer) , mTextureId(textureId) { mST = new SurfaceTexture(mTextureId); - mSTC = new SurfaceTextureClient(mST); + mSTC = new SurfaceTextureClient(mST->getBufferQueue()); native_window_connect(mSTC.get(), NATIVE_WINDOW_API_MEDIA); } -- cgit v1.1 From 86355f5b1ef6c6434d8717c71428e3165b0fe7b5 Mon Sep 17 00:00:00 2001 From: Marco Nelissen Date: Tue, 11 Dec 2012 15:34:18 -0800 Subject: Fix timestamps after seek Ensure buffers are correctly timestamped after a seek. Change-Id: I7d76689138e4f95c0ceb9fb7a4c4d42c48568212 --- media/libstagefright/mp4/FragmentedMP4Parser.cpp | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/media/libstagefright/mp4/FragmentedMP4Parser.cpp b/media/libstagefright/mp4/FragmentedMP4Parser.cpp index 54c3d63..7aa5be9 100644 --- a/media/libstagefright/mp4/FragmentedMP4Parser.cpp +++ b/media/libstagefright/mp4/FragmentedMP4Parser.cpp @@ -319,8 +319,7 @@ status_t FragmentedMP4Parser::onSeekTo(bool wantAudio, int64_t position) { off_t totalOffset = mFirstMoofOffset; for (int i = 0; i < numSidxEntries; i++) { const SidxEntry *se = &info->mSidx[i]; - totalTime += se->mDurationUs; - if (totalTime > position) { + if (totalTime + se->mDurationUs > position) { mBuffer->setRange(0,0); mBufferPos = totalOffset; if (mFinalResult == ERROR_END_OF_STREAM) { @@ -329,9 +328,10 @@ status_t FragmentedMP4Parser::onSeekTo(bool wantAudio, int64_t position) { resumeIfNecessary(); } info->mFragments.clear(); - info->mDecodingTime = position * info->mMediaTimeScale / 1000000ll; + info->mDecodingTime = totalTime * info->mMediaTimeScale / 1000000ll; return OK; } + totalTime += se->mDurationUs; totalOffset += se->mSize; } } -- cgit v1.1 From 5736c35b841de56ce394b4879389f669b61425e6 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Tue, 4 Dec 2012 12:12:34 -0800 Subject: Prepare for track invalidation to be done by proxy Don't rely on control block to determine whether track has been marked invalid. Instead, use a local flag that can't be corrupted by client. Change-Id: I783dafe828f93c1c3d2d0e5a08105ea536436efb --- services/audioflinger/PlaybackTracks.h | 3 +++ services/audioflinger/Threads.cpp | 9 +++------ services/audioflinger/Tracks.cpp | 11 ++++++++++- 3 files changed, 16 insertions(+), 7 deletions(-) diff --git a/services/audioflinger/PlaybackTracks.h b/services/audioflinger/PlaybackTracks.h index 37e39a0..aaa5333 100644 --- a/services/audioflinger/PlaybackTracks.h +++ b/services/audioflinger/PlaybackTracks.h @@ -103,6 +103,8 @@ protected: public: void triggerEvents(AudioSystem::sync_event_t type); + void invalidate(); + bool isInvalid() const { return mIsInvalid; } virtual bool isTimedTrack() const { return false; } bool isFastTrack() const { return (mFlags & IAudioFlinger::TRACK_FAST) != 0; } virtual bool isOut() const; @@ -143,6 +145,7 @@ private: volatile float mCachedVolume; // combined master volume and stream type volume; // 'volatile' means accessed without lock or // barrier, but is read/written atomically + bool mIsInvalid; // non-resettable latch, set by invalidate() }; // end of Track class TimedTrack : public Track { diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index a285e6c..d2b2931 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -1524,8 +1524,7 @@ uint32_t AudioFlinger::PlaybackThread::hasAudioSession(int sessionId) const for (size_t i = 0; i < mTracks.size(); ++i) { sp track = mTracks[i]; - if (sessionId == track->sessionId() && - !(track->mCblk->flags & CBLK_INVALID)) { + if (sessionId == track->sessionId() && !track->isInvalid()) { result |= TRACK_SESSION; break; } @@ -1543,8 +1542,7 @@ uint32_t AudioFlinger::PlaybackThread::getStrategyForSession_l(int sessionId) } for (size_t i = 0; i < mTracks.size(); i++) { sp track = mTracks[i]; - if (sessionId == track->sessionId() && - !(track->mCblk->flags & CBLK_INVALID)) { + if (sessionId == track->sessionId() && !track->isInvalid()) { return AudioSystem::getStrategyForStream(track->streamType()); } } @@ -1721,8 +1719,7 @@ void AudioFlinger::PlaybackThread::invalidateTracks(audio_stream_type_t streamTy for (size_t i = 0; i < size; i++) { sp t = mTracks[i]; if (t->streamType() == streamType) { - android_atomic_or(CBLK_INVALID, &t->mCblk->flags); - t->mCblk->cv.signal(); + t->invalidate(); } } } diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp index e8ca5ee..9b611d2 100644 --- a/services/audioflinger/Tracks.cpp +++ b/services/audioflinger/Tracks.cpp @@ -323,7 +323,8 @@ AudioFlinger::PlaybackThread::Track::Track( mFlags(flags), mFastIndex(-1), mUnderrunCount(0), - mCachedVolume(1.0) + mCachedVolume(1.0), + mIsInvalid(false) { if (mCblk != NULL) { // to avoid leaking a track name, do not allocate one unless there is an mCblk @@ -834,6 +835,14 @@ bool AudioFlinger::PlaybackThread::Track::isOut() const return true; } +void AudioFlinger::PlaybackThread::Track::invalidate() +{ + // FIXME should use proxy + android_atomic_or(CBLK_INVALID, &mCblk->flags); + mCblk->cv.signal(); + mIsInvalid = true; +} + // ---------------------------------------------------------------------------- sp -- cgit v1.1 From af0351f930459098d98792d569b1436c43c536d8 Mon Sep 17 00:00:00 2001 From: Jamie Gennis Date: Wed, 12 Dec 2012 12:08:51 -0800 Subject: Revert "stop using a deprecated SurfaceTextureClient ctor" This reverts commit aef79b0676d8f0ee8ef637ec8be5ba73225b038d Change-Id: I5f6cf4d6d5dc895a9426f212dfc8a25b4bf2d23a --- cmds/stagefright/stagefright.cpp | 2 +- libvideoeditor/lvpp/NativeWindowRenderer.cpp | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/cmds/stagefright/stagefright.cpp b/cmds/stagefright/stagefright.cpp index 148b66e..1e0e7f8 100644 --- a/cmds/stagefright/stagefright.cpp +++ b/cmds/stagefright/stagefright.cpp @@ -941,7 +941,7 @@ int main(int argc, char **argv) { CHECK(useSurfaceTexAlloc); sp texture = new SurfaceTexture(0 /* tex */); - gSurface = new SurfaceTextureClient(texture->getBufferQueue()); + gSurface = new SurfaceTextureClient(texture); } CHECK_EQ((status_t)OK, diff --git a/libvideoeditor/lvpp/NativeWindowRenderer.cpp b/libvideoeditor/lvpp/NativeWindowRenderer.cpp index efb45e2..2e15ff9 100755 --- a/libvideoeditor/lvpp/NativeWindowRenderer.cpp +++ b/libvideoeditor/lvpp/NativeWindowRenderer.cpp @@ -569,7 +569,7 @@ RenderInput::RenderInput(NativeWindowRenderer* renderer, GLuint textureId) : mRenderer(renderer) , mTextureId(textureId) { mST = new SurfaceTexture(mTextureId); - mSTC = new SurfaceTextureClient(mST->getBufferQueue()); + mSTC = new SurfaceTextureClient(mST); native_window_connect(mSTC.get(), NATIVE_WINDOW_API_MEDIA); } -- cgit v1.1 From 84b6440fa1d1274267deabe0bd8dc626b5548e4c Mon Sep 17 00:00:00 2001 From: Jamie Gennis Date: Wed, 12 Dec 2012 12:15:23 -0800 Subject: stop using a deprecated SurfaceTextureClient ctor" --- cmds/stagefright/stagefright.cpp | 2 +- libvideoeditor/lvpp/NativeWindowRenderer.cpp | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/cmds/stagefright/stagefright.cpp b/cmds/stagefright/stagefright.cpp index 1e0e7f8..148b66e 100644 --- a/cmds/stagefright/stagefright.cpp +++ b/cmds/stagefright/stagefright.cpp @@ -941,7 +941,7 @@ int main(int argc, char **argv) { CHECK(useSurfaceTexAlloc); sp texture = new SurfaceTexture(0 /* tex */); - gSurface = new SurfaceTextureClient(texture); + gSurface = new SurfaceTextureClient(texture->getBufferQueue()); } CHECK_EQ((status_t)OK, diff --git a/libvideoeditor/lvpp/NativeWindowRenderer.cpp b/libvideoeditor/lvpp/NativeWindowRenderer.cpp index 2e15ff9..efb45e2 100755 --- a/libvideoeditor/lvpp/NativeWindowRenderer.cpp +++ b/libvideoeditor/lvpp/NativeWindowRenderer.cpp @@ -569,7 +569,7 @@ RenderInput::RenderInput(NativeWindowRenderer* renderer, GLuint textureId) : mRenderer(renderer) , mTextureId(textureId) { mST = new SurfaceTexture(mTextureId); - mSTC = new SurfaceTextureClient(mST); + mSTC = new SurfaceTextureClient(mST->getBufferQueue()); native_window_connect(mSTC.get(), NATIVE_WINDOW_API_MEDIA); } -- cgit v1.1 From e3aa659e9cee7df5c12a80d285cc29ab3b2cbb39 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Tue, 4 Dec 2012 12:22:46 -0800 Subject: Start isolating control block accesses in a proxy The proxy object will eventually be the only code that understands the details of the control block. This should make it easier to change the control block in the future. Initial set of control block fields that are isolated: - sample rate - send level - volume Prepare for streaming/static separation by adding a union to the control block for the new fields. Fix bug in handling of max sample rate on a track. It was only checking at re-configuration, not at each mix. Simplify OutputTrack::obtainBuffer. Change-Id: I2249f9d04f73a911a922ad1d7f6197292c74cd92 --- include/media/AudioRecord.h | 3 + include/media/AudioTrack.h | 3 + include/private/media/AudioTrackShared.h | 228 +++++++++++++++++++++++++------ media/libmedia/AudioRecord.cpp | 47 ++++--- media/libmedia/AudioTrack.cpp | 109 +++++++++------ media/libmedia/AudioTrackShared.cpp | 2 +- services/audioflinger/AudioFlinger.h | 1 + services/audioflinger/PlaybackTracks.h | 3 +- services/audioflinger/RecordTracks.h | 2 - services/audioflinger/Threads.cpp | 21 +-- services/audioflinger/TrackBase.h | 9 +- services/audioflinger/Tracks.cpp | 97 ++++++------- 12 files changed, 355 insertions(+), 170 deletions(-) diff --git a/include/media/AudioRecord.h b/include/media/AudioRecord.h index ae444c3..38c6548 100644 --- a/include/media/AudioRecord.h +++ b/include/media/AudioRecord.h @@ -29,6 +29,7 @@ namespace android { class audio_track_cblk_t; +class AudioRecordClientProxy; // ---------------------------------------------------------------------------- @@ -374,6 +375,7 @@ private: uint32_t mUpdatePeriod; // in ms // constant after constructor or set() + uint32_t mSampleRate; size_t mFrameCount; audio_format_t mFormat; uint8_t mChannelCount; @@ -393,6 +395,7 @@ private: int mPreviousPriority; // before start() SchedPolicy mPreviousSchedulingGroup; + AudioRecordClientProxy* mProxy; }; }; // namespace android diff --git a/include/media/AudioTrack.h b/include/media/AudioTrack.h index 6f85527..9d07ed5 100644 --- a/include/media/AudioTrack.h +++ b/include/media/AudioTrack.h @@ -36,6 +36,7 @@ namespace android { // ---------------------------------------------------------------------------- class audio_track_cblk_t; +class AudioTrackClientProxy; // ---------------------------------------------------------------------------- @@ -538,6 +539,7 @@ protected: float mVolume[2]; float mSendLevel; + uint32_t mSampleRate; size_t mFrameCount; // corresponds to current IAudioTrack size_t mReqFrameCount; // frame count to request the next time a new // IAudioTrack is needed @@ -596,6 +598,7 @@ protected: bool mIsTimed; int mPreviousPriority; // before start() SchedPolicy mPreviousSchedulingGroup; + AudioTrackClientProxy* mProxy; }; class TimedAudioTrack : public AudioTrack diff --git a/include/private/media/AudioTrackShared.h b/include/private/media/AudioTrackShared.h index 48b6b21..41e20f8 100644 --- a/include/private/media/AudioTrackShared.h +++ b/include/private/media/AudioTrackShared.h @@ -21,6 +21,7 @@ #include #include +#include namespace android { @@ -38,9 +39,26 @@ namespace android { #define CBLK_INVALID 0x04 // track buffer invalidated by AudioFlinger, need to re-create #define CBLK_DISABLED 0x08 // track disabled by AudioFlinger due to underrun, need to re-start +struct AudioTrackSharedStreaming { + // similar to NBAIO MonoPipe + volatile int32_t mFront; + volatile int32_t mRear; +}; + +// future +struct AudioTrackSharedStatic { + int mReserved; +}; + +// ---------------------------------------------------------------------------- + // Important: do not add any virtual methods, including ~ struct audio_track_cblk_t { + friend class Proxy; + friend class AudioTrackClientProxy; + friend class AudioRecordClientProxy; + friend class ServerProxy; // The data members are grouped so that members accessed frequently and in the same context // are in the same line of data cache. @@ -72,12 +90,13 @@ struct audio_track_cblk_t // For AudioTrack only, not used by AudioRecord. private: uint32_t mVolumeLR; -public: - uint32_t sampleRate; + uint32_t mSampleRate; // AudioTrack only: client's requested sample rate in Hz + // or 0 == default. Write-only client, read-only server. uint8_t mPad2; // unused +public: // read-only for client, server writes once at initialization and is then read-only uint8_t mName; // normal tracks: track name, fast tracks: track index @@ -94,65 +113,184 @@ public: // Cache line boundary (32 bytes) +#if 0 + union { + AudioTrackSharedStreaming mStreaming; + AudioTrackSharedStatic mStatic; + int mAlign[8]; + } u; + + // Cache line boundary (32 bytes) +#endif + // Since the control block is always located in shared memory, this constructor // is only used for placement new(). It is never used for regular new() or stack. audio_track_cblk_t(); - // called by client only, where client includes regular - // AudioTrack and AudioFlinger::PlaybackThread::OutputTrack - uint32_t stepUserIn(size_t stepCount, size_t frameCount) { return stepUser(stepCount, frameCount, false); } - uint32_t stepUserOut(size_t stepCount, size_t frameCount) { return stepUser(stepCount, frameCount, true); } - - bool stepServer(size_t stepCount, size_t frameCount, bool isOut); - +private: // if there is a shared buffer, "buffers" is the value of pointer() for the shared // buffer, otherwise "buffers" points immediately after the control block - void* buffer(void *buffers, uint32_t frameSize, uint32_t offset) const; - - uint32_t framesAvailableIn(size_t frameCount) - { return framesAvailable(frameCount, false); } - uint32_t framesAvailableOut(size_t frameCount) - { return framesAvailable(frameCount, true); } - uint32_t framesAvailableIn_l(size_t frameCount) - { return framesAvailable_l(frameCount, false); } - uint32_t framesAvailableOut_l(size_t frameCount) - { return framesAvailable_l(frameCount, true); } - uint32_t framesReadyIn() { return framesReady(false); } - uint32_t framesReadyOut() { return framesReady(true); } + void* buffer(void *buffers, uint32_t frameSize, size_t offset) const; bool tryLock(); - // No barriers on the following operations, so the ordering of loads/stores - // with respect to other parameters is UNPREDICTABLE. That's considered safe. - - // for AudioTrack client only, caller must limit to 0.0 <= sendLevel <= 1.0 - void setSendLevel(float sendLevel) { - mSendLevel = uint16_t(sendLevel * 0x1000); - } - - // for AudioFlinger only; the return value must be validated by the caller - uint16_t getSendLevel_U4_12() const { - return mSendLevel; - } - - // for AudioTrack client only, caller must limit to 0 <= volumeLR <= 0x10001000 - void setVolumeLR(uint32_t volumeLR) { - mVolumeLR = volumeLR; - } - - // for AudioFlinger only; the return value must be validated by the caller - uint32_t getVolumeLR() const { - return mVolumeLR; - } - -private: // isOut == true means AudioTrack, isOut == false means AudioRecord + bool stepServer(size_t stepCount, size_t frameCount, bool isOut); uint32_t stepUser(size_t stepCount, size_t frameCount, bool isOut); uint32_t framesAvailable(size_t frameCount, bool isOut); uint32_t framesAvailable_l(size_t frameCount, bool isOut); uint32_t framesReady(bool isOut); }; +// ---------------------------------------------------------------------------- + +// Proxy for shared memory control block, to isolate callers from needing to know the details. +// There is exactly one ClientProxy and one ServerProxy per shared memory control block. +// The proxies are located in normal memory, and are not multi-thread safe within a given side. +class Proxy { +protected: + Proxy(audio_track_cblk_t* cblk, void *buffers, size_t frameCount, size_t frameSize) + : mCblk(cblk), mBuffers(buffers), mFrameCount(frameCount), mFrameSize(frameSize) { } + virtual ~Proxy() { } + +public: + void* buffer(size_t offset) const { + return mCblk->buffer(mBuffers, mFrameSize, offset); + } + +protected: + // These refer to shared memory, and are virtual addresses with respect to the current process. + // They may have different virtual addresses within the other process. + audio_track_cblk_t* const mCblk; // the control block + void* const mBuffers; // starting address of buffers + + const size_t mFrameCount; // not necessarily a power of 2 + const size_t mFrameSize; // in bytes +#if 0 + const size_t mFrameCountP2; // mFrameCount rounded to power of 2, streaming mode +#endif + +}; + +// ---------------------------------------------------------------------------- + +// Proxy seen by AudioTrack client and AudioRecord client +class ClientProxy : public Proxy { +protected: + ClientProxy(audio_track_cblk_t* cblk, void *buffers, size_t frameCount, size_t frameSize) + : Proxy(cblk, buffers, frameCount, frameSize) { } + virtual ~ClientProxy() { } +}; + +// ---------------------------------------------------------------------------- + +// Proxy used by AudioTrack client, which also includes AudioFlinger::PlaybackThread::OutputTrack +class AudioTrackClientProxy : public ClientProxy { +public: + AudioTrackClientProxy(audio_track_cblk_t* cblk, void *buffers, size_t frameCount, size_t frameSize) + : ClientProxy(cblk, buffers, frameCount, frameSize) { } + virtual ~AudioTrackClientProxy() { } + + // No barriers on the following operations, so the ordering of loads/stores + // with respect to other parameters is UNPREDICTABLE. That's considered safe. + + // caller must limit to 0.0 <= sendLevel <= 1.0 + void setSendLevel(float sendLevel) { + mCblk->mSendLevel = uint16_t(sendLevel * 0x1000); + } + + // caller must limit to 0 <= volumeLR <= 0x10001000 + void setVolumeLR(uint32_t volumeLR) { + mCblk->mVolumeLR = volumeLR; + } + + void setSampleRate(uint32_t sampleRate) { + mCblk->mSampleRate = sampleRate; + } + + // called by: + // PlaybackThread::OutputTrack::write + // AudioTrack::createTrack_l + // AudioTrack::releaseBuffer + // AudioTrack::reload + // AudioTrack::restoreTrack_l (2 places) + size_t stepUser(size_t stepCount) { + return mCblk->stepUser(stepCount, mFrameCount, true /*isOut*/); + } + + // called by AudioTrack::obtainBuffer and AudioTrack::processBuffer + size_t framesAvailable() { + return mCblk->framesAvailable(mFrameCount, true /*isOut*/); + } + + // called by AudioTrack::obtainBuffer and PlaybackThread::OutputTrack::obtainBuffer + // FIXME remove this API since it assumes a lock that should be invisible to caller + size_t framesAvailable_l() { + return mCblk->framesAvailable_l(mFrameCount, true /*isOut*/); + } + +}; + +// ---------------------------------------------------------------------------- + +// Proxy used by AudioRecord client +class AudioRecordClientProxy : public ClientProxy { +public: + AudioRecordClientProxy(audio_track_cblk_t* cblk, void *buffers, size_t frameCount, size_t frameSize) + : ClientProxy(cblk, buffers, frameCount, frameSize) { } + ~AudioRecordClientProxy() { } + + // called by AudioRecord::releaseBuffer + size_t stepUser(size_t stepCount) { + return mCblk->stepUser(stepCount, mFrameCount, false /*isOut*/); + } + + // called by AudioRecord::processBuffer + size_t framesAvailable() { + return mCblk->framesAvailable(mFrameCount, false /*isOut*/); + } + + // called by AudioRecord::obtainBuffer + size_t framesReady() { + return mCblk->framesReady(false /*isOut*/); + } + +}; + +// ---------------------------------------------------------------------------- + +// Proxy used by AudioFlinger server +class ServerProxy : public Proxy { +public: + ServerProxy(audio_track_cblk_t* cblk, void *buffers, size_t frameCount, size_t frameSize, bool isOut) + : Proxy(cblk, buffers, frameCount, frameSize), mIsOut(isOut) { } + virtual ~ServerProxy() { } + + // for AudioTrack and AudioRecord + bool step(size_t stepCount) { return mCblk->stepServer(stepCount, mFrameCount, mIsOut); } + + // return value of these methods must be validated by the caller + uint32_t getSampleRate() const { return mCblk->mSampleRate; } + uint16_t getSendLevel_U4_12() const { return mCblk->mSendLevel; } + uint32_t getVolumeLR() const { return mCblk->mVolumeLR; } + + // for AudioTrack only + size_t framesReady() { + ALOG_ASSERT(mIsOut); + return mCblk->framesReady(true); + } + + // for AudioRecord only, called by RecordThread::RecordTrack::getNextBuffer + // FIXME remove this API since it assumes a lock that should be invisible to caller + size_t framesAvailableIn_l() { + ALOG_ASSERT(!mIsOut); + return mCblk->framesAvailable_l(mFrameCount, false); + } + +private: + const bool mIsOut; // true for AudioTrack, false for AudioRecord + +}; // ---------------------------------------------------------------------------- diff --git a/media/libmedia/AudioRecord.cpp b/media/libmedia/AudioRecord.cpp index c2ef68c..8eb1656 100644 --- a/media/libmedia/AudioRecord.cpp +++ b/media/libmedia/AudioRecord.cpp @@ -75,7 +75,8 @@ status_t AudioRecord::getMinFrameCount( AudioRecord::AudioRecord() : mStatus(NO_INIT), mSessionId(0), - mPreviousPriority(ANDROID_PRIORITY_NORMAL), mPreviousSchedulingGroup(SP_DEFAULT) + mPreviousPriority(ANDROID_PRIORITY_NORMAL), mPreviousSchedulingGroup(SP_DEFAULT), + mProxy(NULL) { } @@ -90,7 +91,9 @@ AudioRecord::AudioRecord( int notificationFrames, int sessionId) : mStatus(NO_INIT), mSessionId(0), - mPreviousPriority(ANDROID_PRIORITY_NORMAL), mPreviousSchedulingGroup(SP_DEFAULT) + mPreviousPriority(ANDROID_PRIORITY_NORMAL), + mPreviousSchedulingGroup(SP_DEFAULT), + mProxy(NULL) { mStatus = set(inputSource, sampleRate, format, channelMask, frameCount, cbf, user, notificationFrames, sessionId); @@ -112,6 +115,7 @@ AudioRecord::~AudioRecord() IPCThreadState::self()->flushCommands(); AudioSystem::releaseAudioSessionId(mSessionId); } + delete mProxy; } status_t AudioRecord::set( @@ -149,6 +153,8 @@ status_t AudioRecord::set( if (sampleRate == 0) { sampleRate = DEFAULT_SAMPLE_RATE; } + mSampleRate = sampleRate; + // these below should probably come from the audioFlinger too... if (format == AUDIO_FORMAT_DEFAULT) { format = AUDIO_FORMAT_PCM_16_BIT; @@ -166,6 +172,12 @@ status_t AudioRecord::set( uint32_t channelCount = popcount(channelMask); mChannelCount = channelCount; + if (audio_is_linear_pcm(mFormat)) { + mFrameSize = channelCount * audio_bytes_per_sample(format); + } else { + mFrameSize = sizeof(uint8_t); + } + if (sessionId == 0 ) { mSessionId = AudioSystem::newAudioSessionId(); } else { @@ -218,12 +230,6 @@ status_t AudioRecord::set( // Update buffer size in case it has been limited by AudioFlinger during track creation mFrameCount = mCblk->frameCount_; - if (audio_is_linear_pcm(mFormat)) { - mFrameSize = channelCount * audio_bytes_per_sample(format); - } else { - mFrameSize = sizeof(uint8_t); - } - mActive = false; mCbf = cbf; mNotificationFrames = notificationFrames; @@ -360,7 +366,7 @@ bool AudioRecord::stopped() const uint32_t AudioRecord::getSampleRate() const { - return mCblk->sampleRate; + return mSampleRate; } status_t AudioRecord::setMarkerPosition(uint32_t marker) @@ -473,11 +479,18 @@ status_t AudioRecord::openRecord_l( mBuffers = (char*)cblk + sizeof(audio_track_cblk_t); cblk->bufferTimeoutMs = MAX_RUN_TIMEOUT_MS; cblk->waitTimeMs = 0; + + // update proxy + delete mProxy; + mProxy = new AudioRecordClientProxy(cblk, mBuffers, frameCount, mFrameSize); + return NO_ERROR; } status_t AudioRecord::obtainBuffer(Buffer* audioBuffer, int32_t waitCount) { + ALOG_ASSERT(mStatus == NO_ERROR && mProxy != NULL); + AutoMutex lock(mLock); bool active; status_t result = NO_ERROR; @@ -488,7 +501,7 @@ status_t AudioRecord::obtainBuffer(Buffer* audioBuffer, int32_t waitCount) audioBuffer->frameCount = 0; audioBuffer->size = 0; - uint32_t framesReady = cblk->framesReadyIn(); + size_t framesReady = mProxy->framesReady(); if (framesReady == 0) { cblk->lock.lock(); @@ -551,7 +564,7 @@ create_new_record: } // read the server count again start_loop_here: - framesReady = cblk->framesReadyIn(); + framesReady = mProxy->framesReady(); } cblk->lock.unlock(); } @@ -573,15 +586,17 @@ create_new_record: audioBuffer->frameCount = framesReq; audioBuffer->size = framesReq * mFrameSize; - audioBuffer->raw = cblk->buffer(mBuffers, mFrameSize, u); + audioBuffer->raw = mProxy->buffer(u); active = mActive; return active ? status_t(NO_ERROR) : status_t(STOPPED); } void AudioRecord::releaseBuffer(Buffer* audioBuffer) { + ALOG_ASSERT(mStatus == NO_ERROR && mProxy != NULL); + AutoMutex lock(mLock); - mCblk->stepUserIn(audioBuffer->frameCount, mFrameCount); + (void) mProxy->stepUser(audioBuffer->frameCount); } audio_io_handle_t AudioRecord::getInput() const @@ -594,7 +609,7 @@ audio_io_handle_t AudioRecord::getInput() const audio_io_handle_t AudioRecord::getInput_l() { mInput = AudioSystem::getInput(mInputSource, - mCblk->sampleRate, + mSampleRate, mFormat, mChannelMask, mSessionId); @@ -745,7 +760,7 @@ bool AudioRecord::processAudioBuffer(const sp& thread) // Manage overrun callback - if (active && (cblk->framesAvailableIn(mFrameCount) == 0)) { + if (active && (mProxy->framesAvailable() == 0)) { // The value of active is stale, but we are almost sure to be active here because // otherwise we would have exited when obtainBuffer returned STOPPED earlier. ALOGV("Overrun user: %x, server: %x, flags %04x", cblk->user, cblk->server, cblk->flags); @@ -781,7 +796,7 @@ status_t AudioRecord::restoreRecord_l(audio_track_cblk_t*& refCblk) // if the new IAudioRecord is created, openRecord_l() will modify the // following member variables: mAudioRecord, mCblkMemory and mCblk. // It will also delete the strong references on previous IAudioRecord and IMemory - result = openRecord_l(cblk->sampleRate, mFormat, mFrameCount, getInput_l()); + result = openRecord_l(mSampleRate, mFormat, mFrameCount, getInput_l()); if (result == NO_ERROR) { newCblk = mCblk; // callback thread or sync event hasn't changed diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp index 1d87ff8..86a5579 100644 --- a/media/libmedia/AudioTrack.cpp +++ b/media/libmedia/AudioTrack.cpp @@ -97,7 +97,8 @@ AudioTrack::AudioTrack() : mStatus(NO_INIT), mIsTimed(false), mPreviousPriority(ANDROID_PRIORITY_NORMAL), - mPreviousSchedulingGroup(SP_DEFAULT) + mPreviousSchedulingGroup(SP_DEFAULT), + mProxy(NULL) { } @@ -115,7 +116,8 @@ AudioTrack::AudioTrack( : mStatus(NO_INIT), mIsTimed(false), mPreviousPriority(ANDROID_PRIORITY_NORMAL), - mPreviousSchedulingGroup(SP_DEFAULT) + mPreviousSchedulingGroup(SP_DEFAULT), + mProxy(NULL) { mStatus = set(streamType, sampleRate, format, channelMask, frameCount, flags, cbf, user, notificationFrames, @@ -136,7 +138,8 @@ AudioTrack::AudioTrack( : mStatus(NO_INIT), mIsTimed(false), mPreviousPriority(ANDROID_PRIORITY_NORMAL), - mPreviousSchedulingGroup(SP_DEFAULT) + mPreviousSchedulingGroup(SP_DEFAULT), + mProxy(NULL) { if (sharedBuffer == 0) { ALOGE("sharedBuffer must be non-0"); @@ -166,6 +169,7 @@ AudioTrack::~AudioTrack() IPCThreadState::self()->flushCommands(); AudioSystem::releaseAudioSessionId(mSessionId); } + delete mProxy; } status_t AudioTrack::set( @@ -212,6 +216,7 @@ status_t AudioTrack::set( } sampleRate = afSampleRate; } + mSampleRate = sampleRate; // these below should probably come from the audioFlinger too... if (format == AUDIO_FORMAT_DEFAULT) { @@ -252,6 +257,14 @@ status_t AudioTrack::set( uint32_t channelCount = popcount(channelMask); mChannelCount = channelCount; + if (audio_is_linear_pcm(format)) { + mFrameSize = channelCount * audio_bytes_per_sample(format); + mFrameSizeAF = channelCount * sizeof(int16_t); + } else { + mFrameSize = sizeof(uint8_t); + mFrameSizeAF = sizeof(uint8_t); + } + audio_io_handle_t output = AudioSystem::getOutput( streamType, sampleRate, format, channelMask, @@ -300,14 +313,6 @@ status_t AudioTrack::set( mStreamType = streamType; mFormat = format; - if (audio_is_linear_pcm(format)) { - mFrameSize = channelCount * audio_bytes_per_sample(format); - mFrameSizeAF = channelCount * sizeof(int16_t); - } else { - mFrameSize = sizeof(uint8_t); - mFrameSizeAF = sizeof(uint8_t); - } - mSharedBuffer = sharedBuffer; mActive = false; mUserData = user; @@ -460,6 +465,11 @@ void AudioTrack::pause() status_t AudioTrack::setVolume(float left, float right) { + if (mStatus != NO_ERROR) { + return mStatus; + } + ALOG_ASSERT(mProxy != NULL); + if (left < 0.0f || left > 1.0f || right < 0.0f || right > 1.0f) { return BAD_VALUE; } @@ -468,7 +478,7 @@ status_t AudioTrack::setVolume(float left, float right) mVolume[LEFT] = left; mVolume[RIGHT] = right; - mCblk->setVolumeLR((uint32_t(uint16_t(right * 0x1000)) << 16) | uint16_t(left * 0x1000)); + mProxy->setVolumeLR((uint32_t(uint16_t(right * 0x1000)) << 16) | uint16_t(left * 0x1000)); return NO_ERROR; } @@ -481,14 +491,19 @@ status_t AudioTrack::setVolume(float volume) status_t AudioTrack::setAuxEffectSendLevel(float level) { ALOGV("setAuxEffectSendLevel(%f)", level); + + if (mStatus != NO_ERROR) { + return mStatus; + } + ALOG_ASSERT(mProxy != NULL); + if (level < 0.0f || level > 1.0f) { return BAD_VALUE; } AutoMutex lock(mLock); mSendLevel = level; - - mCblk->setSendLevel(level); + mProxy->setSendLevel(level); return NO_ERROR; } @@ -517,7 +532,9 @@ status_t AudioTrack::setSampleRate(uint32_t rate) } AutoMutex lock(mLock); - mCblk->sampleRate = rate; + mSampleRate = rate; + mProxy->setSampleRate(rate); + return NO_ERROR; } @@ -528,7 +545,7 @@ uint32_t AudioTrack::getSampleRate() const } AutoMutex lock(mLock); - return mCblk->sampleRate; + return mSampleRate; } status_t AudioTrack::setLoop(uint32_t loopStart, uint32_t loopEnd, int loopCount) @@ -665,6 +682,11 @@ status_t AudioTrack::getPosition(uint32_t *position) status_t AudioTrack::reload() { + if (mStatus != NO_ERROR) { + return mStatus; + } + ALOG_ASSERT(mProxy != NULL); + if (mSharedBuffer == 0 || mIsTimed) { return INVALID_OPERATION; } @@ -677,8 +699,7 @@ status_t AudioTrack::reload() flush_l(); - audio_track_cblk_t* cblk = mCblk; - cblk->stepUserOut(mFrameCount, mFrameCount); + (void) mProxy->stepUser(mFrameCount); return NO_ERROR; } @@ -693,7 +714,7 @@ audio_io_handle_t AudioTrack::getOutput() audio_io_handle_t AudioTrack::getOutput_l() { return AudioSystem::getOutput(mStreamType, - mCblk->sampleRate, mFormat, mChannelMask, mFlags); + mSampleRate, mFormat, mChannelMask, mFlags); } status_t AudioTrack::attachAuxEffect(int effectId) @@ -890,13 +911,8 @@ status_t AudioTrack::createTrack_l( mBuffers = (char*)cblk + sizeof(audio_track_cblk_t); } else { mBuffers = sharedBuffer->pointer(); - // Force buffer full condition as data is already present in shared memory - cblk->stepUserOut(frameCount, frameCount); } - cblk->setVolumeLR((uint32_t(uint16_t(mVolume[RIGHT] * 0x1000)) << 16) | - uint16_t(mVolume[LEFT] * 0x1000)); - cblk->setSendLevel(mSendLevel); mAudioTrack->attachAuxEffect(mAuxEffectId); cblk->bufferTimeoutMs = MAX_STARTUP_TIMEOUT_MS; cblk->waitTimeMs = 0; @@ -909,11 +925,26 @@ status_t AudioTrack::createTrack_l( if (frameCount > mReqFrameCount) { mReqFrameCount = frameCount; } + + // update proxy + delete mProxy; + mProxy = new AudioTrackClientProxy(cblk, mBuffers, frameCount, mFrameSizeAF); + mProxy->setVolumeLR((uint32_t(uint16_t(mVolume[RIGHT] * 0x1000)) << 16) | + uint16_t(mVolume[LEFT] * 0x1000)); + mProxy->setSendLevel(mSendLevel); + mProxy->setSampleRate(mSampleRate); + if (sharedBuffer != 0) { + // Force buffer full condition as data is already present in shared memory + mProxy->stepUser(frameCount); + } + return NO_ERROR; } status_t AudioTrack::obtainBuffer(Buffer* audioBuffer, int32_t waitCount) { + ALOG_ASSERT(mStatus == NO_ERROR && mProxy != NULL); + AutoMutex lock(mLock); bool active; status_t result = NO_ERROR; @@ -924,7 +955,7 @@ status_t AudioTrack::obtainBuffer(Buffer* audioBuffer, int32_t waitCount) audioBuffer->frameCount = 0; audioBuffer->size = 0; - uint32_t framesAvail = cblk->framesAvailableOut(mFrameCount); + size_t framesAvail = mProxy->framesAvailable(); cblk->lock.lock(); if (cblk->flags & CBLK_INVALID) { @@ -1000,7 +1031,7 @@ create_new_track: } // read the server count again start_loop_here: - framesAvail = cblk->framesAvailableOut_l(mFrameCount); + framesAvail = mProxy->framesAvailable_l(); } cblk->lock.unlock(); } @@ -1020,16 +1051,18 @@ create_new_track: audioBuffer->frameCount = framesReq; audioBuffer->size = framesReq * mFrameSizeAF; - audioBuffer->raw = cblk->buffer(mBuffers, mFrameSizeAF, u); + audioBuffer->raw = mProxy->buffer(u); active = mActive; return active ? status_t(NO_ERROR) : status_t(STOPPED); } void AudioTrack::releaseBuffer(Buffer* audioBuffer) { + ALOG_ASSERT(mStatus == NO_ERROR && mProxy != NULL); + AutoMutex lock(mLock); audio_track_cblk_t* cblk = mCblk; - cblk->stepUserOut(audioBuffer->frameCount, mFrameCount); + (void) mProxy->stepUser(audioBuffer->frameCount); if (audioBuffer->frameCount > 0) { // restart track if it was disabled by audioflinger due to previous underrun if (mActive && (cblk->flags & CBLK_DISABLED)) { @@ -1199,7 +1232,7 @@ bool AudioTrack::processAudioBuffer(const sp& thread) // so all cblk references might still refer to old shared memory, but that should be benign // Manage underrun callback - if (active && (cblk->framesAvailableOut(mFrameCount) == mFrameCount)) { + if (active && (mProxy->framesAvailable() == mFrameCount)) { ALOGV("Underrun user: %x, server: %x, flags %04x", cblk->user, cblk->server, cblk->flags); if (!(android_atomic_or(CBLK_UNDERRUN, &cblk->flags) & CBLK_UNDERRUN)) { mCbf(EVENT_UNDERRUN, mUserData, 0); @@ -1346,7 +1379,7 @@ status_t AudioTrack::restoreTrack_l(audio_track_cblk_t*& refCblk, bool fromStart // following member variables: mAudioTrack, mCblkMemory and mCblk. // It will also delete the strong references on previous IAudioTrack and IMemory result = createTrack_l(mStreamType, - cblk->sampleRate, + mSampleRate, mFormat, mReqFrameCount, // so that frame count never goes down mFlags, @@ -1365,12 +1398,12 @@ status_t AudioTrack::restoreTrack_l(audio_track_cblk_t*& refCblk, bool fromStart // restore loop: this is not guaranteed to succeed if new frame count is not // compatible with loop length setLoop_l(cblk->loopStart, cblk->loopEnd, cblk->loopCount); + size_t frames = 0; if (!fromStart) { newCblk->bufferTimeoutMs = MAX_RUN_TIMEOUT_MS; // Make sure that a client relying on callback events indicating underrun or // the actual amount of audio frames played (e.g SoundPool) receives them. if (mSharedBuffer == 0) { - uint32_t frames = 0; if (user > server) { frames = ((user - server) > mFrameCount) ? mFrameCount : (user - server); @@ -1378,13 +1411,15 @@ status_t AudioTrack::restoreTrack_l(audio_track_cblk_t*& refCblk, bool fromStart } // restart playback even if buffer is not completely filled. android_atomic_or(CBLK_FORCEREADY, &newCblk->flags); - // stepUser() clears CBLK_UNDERRUN flag enabling underrun callbacks to - // the client - newCblk->stepUserOut(frames, mFrameCount); } } if (mSharedBuffer != 0) { - newCblk->stepUserOut(mFrameCount, mFrameCount); + frames = mFrameCount; + } + if (frames > 0) { + // stepUser() clears CBLK_UNDERRUN flag enabling underrun callbacks to + // the client + mProxy->stepUser(frames); } if (mActive) { result = mAudioTrack->start(); @@ -1416,7 +1451,6 @@ status_t AudioTrack::dump(int fd, const Vector& args) const char buffer[SIZE]; String8 result; - audio_track_cblk_t* cblk = mCblk; result.append(" AudioTrack::dump\n"); snprintf(buffer, 255, " stream type(%d), left - right volume(%f, %f)\n", mStreamType, mVolume[0], mVolume[1]); @@ -1424,8 +1458,7 @@ status_t AudioTrack::dump(int fd, const Vector& args) const snprintf(buffer, 255, " format(%d), channel count(%d), frame count(%d)\n", mFormat, mChannelCount, mFrameCount); result.append(buffer); - snprintf(buffer, 255, " sample rate(%u), status(%d)\n", - (cblk == 0) ? 0 : cblk->sampleRate, mStatus); + snprintf(buffer, 255, " sample rate(%u), status(%d)\n", mSampleRate, mStatus); result.append(buffer); snprintf(buffer, 255, " active(%d), latency (%d)\n", mActive, mLatency); result.append(buffer); diff --git a/media/libmedia/AudioTrackShared.cpp b/media/libmedia/AudioTrackShared.cpp index bee13c8..13d47c9 100644 --- a/media/libmedia/AudioTrackShared.cpp +++ b/media/libmedia/AudioTrackShared.cpp @@ -26,7 +26,7 @@ audio_track_cblk_t::audio_track_cblk_t() : lock(Mutex::SHARED), cv(Condition::SHARED), user(0), server(0), userBase(0), serverBase(0), frameCount_(0), loopStart(UINT_MAX), loopEnd(UINT_MAX), loopCount(0), mVolumeLR(0x10001000), - mSendLevel(0), flags(0) + mSampleRate(0), mSendLevel(0), flags(0) { } diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h index 6d3f0a1..64a9871 100644 --- a/services/audioflinger/AudioFlinger.h +++ b/services/audioflinger/AudioFlinger.h @@ -61,6 +61,7 @@ class AudioMixer; class AudioBuffer; class AudioResampler; class FastMixer; +class ServerProxy; // ---------------------------------------------------------------------------- diff --git a/services/audioflinger/PlaybackTracks.h b/services/audioflinger/PlaybackTracks.h index aaa5333..adec938 100644 --- a/services/audioflinger/PlaybackTracks.h +++ b/services/audioflinger/PlaybackTracks.h @@ -107,7 +107,6 @@ public: bool isInvalid() const { return mIsInvalid; } virtual bool isTimedTrack() const { return false; } bool isFastTrack() const { return (mFlags & IAudioFlinger::TRACK_FAST) != 0; } - virtual bool isOut() const; protected: @@ -277,5 +276,5 @@ private: AudioBufferProvider::Buffer mOutBuffer; bool mActive; DuplicatingThread* const mSourceThread; // for waitTimeMs() in write() - void* mBuffers; // starting address of buffers in plain memory + AudioTrackClientProxy* mClientProxy; }; // end of OutputTrack diff --git a/services/audioflinger/RecordTracks.h b/services/audioflinger/RecordTracks.h index fe681d7..6c0d1d3 100644 --- a/services/audioflinger/RecordTracks.h +++ b/services/audioflinger/RecordTracks.h @@ -45,8 +45,6 @@ public: static void appendDumpHeader(String8& result); void dump(char* buffer, size_t size); - virtual bool isOut() const; - private: friend class AudioFlinger; // for mState diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index d2b2931..82acd3a 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -2643,7 +2643,8 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::MixerThread::prepareTrac // read original volumes with volume control float typeVolume = mStreamTypes[track->streamType()].volume; float v = masterVolume * typeVolume; - uint32_t vlr = cblk->getVolumeLR(); + ServerProxy *proxy = track->mServerProxy; + uint32_t vlr = proxy->getVolumeLR(); vl = vlr & 0xFFFF; vr = vlr >> 16; // track volumes come from shared memory, so can't be trusted and must be clamped @@ -2661,7 +2662,7 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::MixerThread::prepareTrac // assuming master volume and stream type volume each go up to 1.0, // vl and vr are now in 8.24 format - uint16_t sendLevel = cblk->getSendLevel_U4_12(); + uint16_t sendLevel = proxy->getSendLevel_U4_12(); // send level comes from shared memory and so may be corrupt if (sendLevel > MAX_GAIN_INT) { ALOGV("Track send level out of range: %04X", sendLevel); @@ -2713,11 +2714,19 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::MixerThread::prepareTrac name, AudioMixer::TRACK, AudioMixer::CHANNEL_MASK, (void *)track->channelMask()); + // limit track sample rate to 2 x output sample rate, which changes at re-configuration + uint32_t maxSampleRate = mSampleRate * 2; + uint32_t reqSampleRate = track->mServerProxy->getSampleRate(); + if (reqSampleRate == 0) { + reqSampleRate = mSampleRate; + } else if (reqSampleRate > maxSampleRate) { + reqSampleRate = maxSampleRate; + } mAudioMixer->setParameter( name, AudioMixer::RESAMPLE, AudioMixer::SAMPLE_RATE, - (void *)(cblk->sampleRate)); + (void *)reqSampleRate); mAudioMixer->setParameter( name, AudioMixer::TRACK, @@ -2990,10 +2999,6 @@ bool AudioFlinger::MixerThread::checkForNewParameters_l() break; } mTracks[i]->mName = name; - // limit track sample rate to 2 x new output sample rate - if (mTracks[i]->mCblk->sampleRate > 2 * sampleRate()) { - mTracks[i]->mCblk->sampleRate = 2 * sampleRate(); - } } sendIoConfigEvent_l(AudioSystem::OUTPUT_CONFIG_CHANGED); } @@ -3142,7 +3147,7 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::DirectOutputThread::prep } else { float typeVolume = mStreamTypes[track->streamType()].volume; float v = mMasterVolume * typeVolume; - uint32_t vlr = cblk->getVolumeLR(); + uint32_t vlr = track->mServerProxy->getVolumeLR(); float v_clamped = v * (vlr & 0xFFFF); if (v_clamped > MAX_GAIN) { v_clamped = MAX_GAIN; diff --git a/services/audioflinger/TrackBase.h b/services/audioflinger/TrackBase.h index 17de49b..e0bd97a 100644 --- a/services/audioflinger/TrackBase.h +++ b/services/audioflinger/TrackBase.h @@ -44,7 +44,8 @@ public: audio_channel_mask_t channelMask, size_t frameCount, const sp& sharedBuffer, - int sessionId); + int sessionId, + bool isOut); virtual ~TrackBase(); virtual status_t start(AudioSystem::sync_event_t event, @@ -108,7 +109,8 @@ protected: bool step(); // mStepCount is an implicit input void reset(); - virtual bool isOut() const = 0; // true for Track and TimedTrack, false for RecordTrack, + bool isOut() const { return mIsOut; } + // true for Track and TimedTrack, false for RecordTrack, // this could be a track type if needed later const wp mThread; @@ -116,6 +118,7 @@ protected: sp mCblkMemory; audio_track_cblk_t* mCblk; void* mBuffer; // start of track buffer, typically in shared memory + // except for OutputTrack when it is in local memory void* mBufferEnd; // &mBuffer[mFrameCount * frameSize], where frameSize // is based on mChannelCount and 16-bit samples uint32_t mStepCount; // saves AudioBufferProvider::Buffer::frameCount as of @@ -136,4 +139,6 @@ protected: bool mStepServerFailed; const int mSessionId; Vector < sp >mSyncEvents; + const bool mIsOut; + ServerProxy* mServerProxy; }; diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp index 9b611d2..c5f0ed7 100644 --- a/services/audioflinger/Tracks.cpp +++ b/services/audioflinger/Tracks.cpp @@ -62,7 +62,8 @@ AudioFlinger::ThreadBase::TrackBase::TrackBase( audio_channel_mask_t channelMask, size_t frameCount, const sp& sharedBuffer, - int sessionId) + int sessionId, + bool isOut) : RefBase(), mThread(thread), mClient(client), @@ -79,7 +80,9 @@ AudioFlinger::ThreadBase::TrackBase::TrackBase( mChannelCount * audio_bytes_per_sample(format) : sizeof(int8_t)), mFrameCount(frameCount), mStepServerFailed(false), - mSessionId(sessionId) + mSessionId(sessionId), + mIsOut(isOut), + mServerProxy(NULL) { // client == 0 implies sharedBuffer == 0 ALOG_ASSERT(!(client == 0 && sharedBuffer != 0)); @@ -105,7 +108,8 @@ AudioFlinger::ThreadBase::TrackBase::TrackBase( return; } } else { - mCblk = (audio_track_cblk_t *)(new uint8_t[size]); + // this syntax avoids calling the audio_track_cblk_t constructor twice + mCblk = (audio_track_cblk_t *) new uint8_t[size]; // assume mCblk != NULL } @@ -114,7 +118,6 @@ AudioFlinger::ThreadBase::TrackBase::TrackBase( new(mCblk) audio_track_cblk_t(); // clear all buffers mCblk->frameCount_ = frameCount; - mCblk->sampleRate = sampleRate; // uncomment the following lines to quickly test 32-bit wraparound // mCblk->user = 0xffff0000; // mCblk->server = 0xffff0000; @@ -130,11 +133,14 @@ AudioFlinger::ThreadBase::TrackBase::TrackBase( mBuffer = sharedBuffer->pointer(); } mBufferEnd = (uint8_t *)mBuffer + bufferSize; + mServerProxy = new ServerProxy(mCblk, mBuffer, frameCount, mFrameSize, isOut); } } AudioFlinger::ThreadBase::TrackBase::~TrackBase() { + // delete the proxy before deleting the shared memory it refers to, to avoid dangling reference + delete mServerProxy; if (mCblk != NULL) { if (mClient == 0) { delete mCblk; @@ -166,10 +172,7 @@ void AudioFlinger::ThreadBase::TrackBase::releaseBuffer(AudioBufferProvider::Buf } bool AudioFlinger::ThreadBase::TrackBase::step() { - bool result; - audio_track_cblk_t* cblk = this->cblk(); - - result = cblk->stepServer(mStepCount, mFrameCount, isOut()); + bool result = mServerProxy->step(mStepCount); if (!result) { ALOGV("stepServer failed acquiring cblk mutex"); mStepServerFailed = true; @@ -189,7 +192,7 @@ void AudioFlinger::ThreadBase::TrackBase::reset() { } uint32_t AudioFlinger::ThreadBase::TrackBase::sampleRate() const { - return mCblk->sampleRate; + return mServerProxy->getSampleRate(); } void* AudioFlinger::ThreadBase::TrackBase::getBuffer(uint32_t offset, uint32_t frames) const { @@ -310,7 +313,7 @@ AudioFlinger::PlaybackThread::Track::Track( int sessionId, IAudioFlinger::track_flags_t flags) : TrackBase(thread, client, sampleRate, format, channelMask, frameCount, sharedBuffer, - sessionId), + sessionId, true /*isOut*/), mFillingUpStatus(FS_INVALID), // mRetryCount initialized later when needed mSharedBuffer(sharedBuffer), @@ -399,7 +402,7 @@ void AudioFlinger::PlaybackThread::Track::destroy() void AudioFlinger::PlaybackThread::Track::dump(char* buffer, size_t size) { - uint32_t vlr = mCblk->getVolumeLR(); + uint32_t vlr = mServerProxy->getVolumeLR(); if (isFastTrack()) { sprintf(buffer, " F %2d", mFastIndex); } else { @@ -468,7 +471,7 @@ void AudioFlinger::PlaybackThread::Track::dump(char* buffer, size_t size) mFrameCount, stateChar, mFillingUpStatus, - mCblk->sampleRate, + mServerProxy->getSampleRate(), 20.0 * log10((vlr & 0xFFFF) / 4096.0), 20.0 * log10((vlr >> 16) / 4096.0), mCblk->server, @@ -503,7 +506,7 @@ status_t AudioFlinger::PlaybackThread::Track::getNextBuffer( } // FIXME Same as above - framesReady = cblk->framesReadyOut(); + framesReady = mServerProxy->framesReady(); if (CC_LIKELY(framesReady)) { uint32_t s = cblk->server; @@ -538,7 +541,7 @@ getNextBuffer_exit: // the tryLock() could block for up to 1 ms, and a sequence of these could delay fast mixer. // FIXME Replace AudioTrackShared control block implementation by a non-blocking FIFO queue. size_t AudioFlinger::PlaybackThread::Track::framesReady() const { - return mCblk->framesReadyOut(); + return mServerProxy->framesReady(); } // Don't call for fast tracks; the framesReady() could result in priority inversion @@ -795,7 +798,7 @@ uint32_t AudioFlinger::PlaybackThread::Track::getVolumeLR() { // called by FastMixer, so not allowed to take any locks, block, or do I/O including logs ALOG_ASSERT(isFastTrack() && (mCblk != NULL)); - uint32_t vlr = mCblk->getVolumeLR(); + uint32_t vlr = mServerProxy->getVolumeLR(); uint32_t vl = vlr & 0xFFFF; uint32_t vr = vlr >> 16; // track volumes come from shared memory, so can't be trusted and must be clamped @@ -830,11 +833,6 @@ status_t AudioFlinger::PlaybackThread::Track::setSyncEvent(const sp& return NO_ERROR; } -bool AudioFlinger::PlaybackThread::Track::isOut() const -{ - return true; -} - void AudioFlinger::PlaybackThread::Track::invalidate() { // FIXME should use proxy @@ -1369,17 +1367,19 @@ AudioFlinger::PlaybackThread::OutputTrack::OutputTrack( size_t frameCount) : Track(playbackThread, NULL, AUDIO_STREAM_CNT, sampleRate, format, channelMask, frameCount, NULL, 0, IAudioFlinger::TRACK_DEFAULT), - mActive(false), mSourceThread(sourceThread), mBuffers(NULL) + mActive(false), mSourceThread(sourceThread), mClientProxy(NULL) { if (mCblk != NULL) { - mBuffers = (char*)mCblk + sizeof(audio_track_cblk_t); mOutBuffer.frameCount = 0; playbackThread->mTracks.add(this); - ALOGV("OutputTrack constructor mCblk %p, mBuffer %p, mBuffers %p, " \ - "mCblk->frameCount %d, mCblk->sampleRate %u, mChannelMask 0x%08x mBufferEnd %p", - mCblk, mBuffer, mBuffers, - mCblk->frameCount, mCblk->sampleRate, mChannelMask, mBufferEnd); + ALOGV("OutputTrack constructor mCblk %p, mBuffer %p, " + "mCblk->frameCount_ %u, mChannelMask 0x%08x mBufferEnd %p", + mCblk, mBuffer, + mCblk->frameCount_, mChannelMask, mBufferEnd); + // since client and server are in the same process, + // the buffer has the same virtual address on both sides + mClientProxy = new AudioTrackClientProxy(mCblk, mBuffer, mFrameCount, mFrameSize); } else { ALOGW("Error creating output track on thread %p", playbackThread); } @@ -1388,6 +1388,8 @@ AudioFlinger::PlaybackThread::OutputTrack::OutputTrack( AudioFlinger::PlaybackThread::OutputTrack::~OutputTrack() { clearBufferQueue(); + delete mClientProxy; + // superclass destructor will now delete the server proxy and shared memory both refer to } status_t AudioFlinger::PlaybackThread::OutputTrack::start(AudioSystem::sync_event_t event, @@ -1475,7 +1477,7 @@ bool AudioFlinger::PlaybackThread::OutputTrack::write(int16_t* data, uint32_t fr uint32_t outFrames = pInBuffer->frameCount > mOutBuffer.frameCount ? mOutBuffer.frameCount : pInBuffer->frameCount; memcpy(mOutBuffer.raw, pInBuffer->raw, outFrames * channelCount * sizeof(int16_t)); - mCblk->stepUserOut(outFrames, mFrameCount); + mClientProxy->stepUser(outFrames); pInBuffer->frameCount -= outFrames; pInBuffer->i16 += outFrames * channelCount; mOutBuffer.frameCount -= outFrames; @@ -1538,40 +1540,29 @@ bool AudioFlinger::PlaybackThread::OutputTrack::write(int16_t* data, uint32_t fr status_t AudioFlinger::PlaybackThread::OutputTrack::obtainBuffer( AudioBufferProvider::Buffer* buffer, uint32_t waitTimeMs) { - int active; - status_t result; audio_track_cblk_t* cblk = mCblk; uint32_t framesReq = buffer->frameCount; ALOGVV("OutputTrack::obtainBuffer user %d, server %d", cblk->user, cblk->server); buffer->frameCount = 0; - uint32_t framesAvail = cblk->framesAvailableOut(mFrameCount); - - - if (framesAvail == 0) { + size_t framesAvail; + { Mutex::Autolock _l(cblk->lock); - goto start_loop_here; - while (framesAvail == 0) { - active = mActive; - if (CC_UNLIKELY(!active)) { + + // read the server count again + while (!(framesAvail = mClientProxy->framesAvailable_l())) { + if (CC_UNLIKELY(!mActive)) { ALOGV("Not active and NO_MORE_BUFFERS"); return NO_MORE_BUFFERS; } - result = cblk->cv.waitRelative(cblk->lock, milliseconds(waitTimeMs)); + status_t result = cblk->cv.waitRelative(cblk->lock, milliseconds(waitTimeMs)); if (result != NO_ERROR) { return NO_MORE_BUFFERS; } - // read the server count again - start_loop_here: - framesAvail = cblk->framesAvailableOut_l(mFrameCount); } } -// if (framesAvail < framesReq) { -// return NO_MORE_BUFFERS; -// } - if (framesReq > framesAvail) { framesReq = framesAvail; } @@ -1584,7 +1575,7 @@ status_t AudioFlinger::PlaybackThread::OutputTrack::obtainBuffer( } buffer->frameCount = framesReq; - buffer->raw = cblk->buffer(mBuffers, mFrameSize, u); + buffer->raw = mClientProxy->buffer(u); return NO_ERROR; } @@ -1655,7 +1646,7 @@ AudioFlinger::RecordThread::RecordTrack::RecordTrack( size_t frameCount, int sessionId) : TrackBase(thread, client, sampleRate, format, - channelMask, frameCount, 0 /*sharedBuffer*/, sessionId), + channelMask, frameCount, 0 /*sharedBuffer*/, sessionId, false /*isOut*/), mOverflow(false) { ALOGV("RecordTrack constructor, size %d", (int)mBufferEnd - (int)mBuffer); @@ -1684,7 +1675,7 @@ status_t AudioFlinger::RecordThread::RecordTrack::getNextBuffer(AudioBufferProvi } // FIXME lock is not actually held, so overrun is possible - framesAvail = cblk->framesAvailableIn_l(mFrameCount); + framesAvail = mServerProxy->framesAvailableIn_l(); if (CC_LIKELY(framesAvail)) { uint32_t s = cblk->server; @@ -1761,27 +1752,21 @@ void AudioFlinger::RecordThread::RecordTrack::destroy() /*static*/ void AudioFlinger::RecordThread::RecordTrack::appendDumpHeader(String8& result) { - result.append(" Clien Fmt Chn mask Session Step S SRate Serv User FrameCount\n"); + result.append(" Clien Fmt Chn mask Session Step S Serv User FrameCount\n"); } void AudioFlinger::RecordThread::RecordTrack::dump(char* buffer, size_t size) { - snprintf(buffer, size, " %05d %03u 0x%08x %05d %04u %01d %05u %08x %08x %05d\n", + snprintf(buffer, size, " %05d %03u 0x%08x %05d %04u %01d %08x %08x %05d\n", (mClient == 0) ? getpid_cached : mClient->pid(), mFormat, mChannelMask, mSessionId, mStepCount, mState, - mCblk->sampleRate, mCblk->server, mCblk->user, mFrameCount); } -bool AudioFlinger::RecordThread::RecordTrack::isOut() const -{ - return false; -} - }; // namespace android -- cgit v1.1 From 308ca621005ab86847b1b1dabaf65a2521844a2a Mon Sep 17 00:00:00 2001 From: Marco Nelissen Date: Wed, 12 Dec 2012 11:49:23 -0800 Subject: Increase buffer size for video Change-Id: I055e1336954387f7b48aa58d893a3a5fae036ece --- media/libstagefright/FragmentedMP4Extractor.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/media/libstagefright/FragmentedMP4Extractor.cpp b/media/libstagefright/FragmentedMP4Extractor.cpp index 82712ef..10655b9 100644 --- a/media/libstagefright/FragmentedMP4Extractor.cpp +++ b/media/libstagefright/FragmentedMP4Extractor.cpp @@ -222,8 +222,8 @@ status_t FragmentedMPEG4Source::start(MetaData *params) { mGroup = new MediaBufferGroup; - int32_t max_size = 65536; - // XXX CHECK(mFormat->findInt32(kKeyMaxInputSize, &max_size)); + // for video, make the buffer big enough for an extremely poorly compressed 1080p frame. + int32_t max_size = mIsAudioTrack ? 65536 : 3110400; mGroup->add_buffer(new MediaBuffer(max_size)); -- cgit v1.1 From 8c95fa91fff6e8726df03598d52243f22e5ff8e7 Mon Sep 17 00:00:00 2001 From: Marco Nelissen Date: Thu, 13 Dec 2012 11:10:05 -0800 Subject: Signal that IDR frames are sync frames Change-Id: Iaf77edc0572cae38935fd9d94367adbfcb370985 --- media/libstagefright/FragmentedMP4Extractor.cpp | 4 ++++ media/libstagefright/Utils.cpp | 10 ++++++++++ media/libstagefright/mp4/FragmentedMP4Parser.cpp | 8 ++++++++ 3 files changed, 22 insertions(+) diff --git a/media/libstagefright/FragmentedMP4Extractor.cpp b/media/libstagefright/FragmentedMP4Extractor.cpp index 10655b9..496828d 100644 --- a/media/libstagefright/FragmentedMP4Extractor.cpp +++ b/media/libstagefright/FragmentedMP4Extractor.cpp @@ -278,6 +278,10 @@ status_t FragmentedMPEG4Source::read( sp meta = parseBuffer->meta(); int64_t timeUs; CHECK(meta->findInt64("timeUs", &timeUs)); + int32_t isSync; + if (meta->findInt32("is-sync-frame", &isSync) && isSync != 0) { + buffer->meta_data()->setInt32(kKeyIsSyncFrame, 1); + } buffer->meta_data()->setInt64(kKeyTime, timeUs); buffer->set_range(0, parseBuffer->size()); memcpy(buffer->data(), parseBuffer->data(), parseBuffer->size()); diff --git a/media/libstagefright/Utils.cpp b/media/libstagefright/Utils.cpp index 1a6ff4b..8ed07bf 100644 --- a/media/libstagefright/Utils.cpp +++ b/media/libstagefright/Utils.cpp @@ -78,6 +78,11 @@ status_t convertMetaDataToMessage( msg->setInt64("durationUs", durationUs); } + int32_t isSync; + if (meta->findInt32(kKeyIsSyncFrame, &isSync) && isSync != 0) { + msg->setInt32("is-sync-frame", 1); + } + if (!strncasecmp("video/", mime, 6)) { int32_t width, height; CHECK(meta->findInt32(kKeyWidth, &width)); @@ -370,6 +375,11 @@ void convertMessageToMetaData(const sp &msg, sp &meta) { meta->setInt64(kKeyDuration, durationUs); } + int32_t isSync; + if (msg->findInt32("is-sync-frame", &isSync) && isSync != 0) { + meta->setInt32(kKeyIsSyncFrame, 1); + } + if (mime.startsWith("video/")) { int32_t width; int32_t height; diff --git a/media/libstagefright/mp4/FragmentedMP4Parser.cpp b/media/libstagefright/mp4/FragmentedMP4Parser.cpp index 7aa5be9..6130d72 100644 --- a/media/libstagefright/mp4/FragmentedMP4Parser.cpp +++ b/media/libstagefright/mp4/FragmentedMP4Parser.cpp @@ -18,6 +18,7 @@ #define LOG_TAG "FragmentedMP4Parser" #include +#include "include/avc_utils.h" #include "include/ESDS.h" #include "include/FragmentedMP4Parser.h" #include "TrackFragment.h" @@ -961,6 +962,10 @@ status_t FragmentedMP4Parser::makeAccessUnit( sample.mSize); (*accessUnit)->meta()->setInt64("timeUs", presentationTimeUs); + if (IsIDR(*accessUnit)) { + (*accessUnit)->meta()->setInt32("is-sync-frame", 1); + } + return OK; } @@ -1003,6 +1008,9 @@ status_t FragmentedMP4Parser::makeAccessUnit( "timeUs", presentationTimeUs); } } + if (IsIDR(*accessUnit)) { + (*accessUnit)->meta()->setInt32("is-sync-frame", 1); + } return OK; } -- cgit v1.1 From ef3d158d102b64513ebb0707b49eb99566b067a6 Mon Sep 17 00:00:00 2001 From: Greg Hackmann Date: Fri, 14 Dec 2012 13:49:48 -0800 Subject: SurfaceMediaSource: wait on fence from acquired buffers Change-Id: I4ab93a4adeec536648258c70a7d943503d9b10f4 Signed-off-by: Greg Hackmann --- media/libstagefright/SurfaceMediaSource.cpp | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/media/libstagefright/SurfaceMediaSource.cpp b/media/libstagefright/SurfaceMediaSource.cpp index 3c002fc..0345de6 100644 --- a/media/libstagefright/SurfaceMediaSource.cpp +++ b/media/libstagefright/SurfaceMediaSource.cpp @@ -298,6 +298,10 @@ status_t SurfaceMediaSource::read( MediaBuffer **buffer, // wait for a buffer to be queued mFrameAvailableCondition.wait(mMutex); } else if (err == OK) { + err = item.mFence->waitForever(1000, "SurfaceMediaSource::read"); + if (err) { + ALOGW("read: failed to wait for buffer fence: %d", err); + } // First time seeing the buffer? Added it to the SMS slot if (item.mGraphicBuffer != NULL) { -- cgit v1.1 From c4974312e5a1e2ab94eca56045f991baf1508d73 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Fri, 14 Dec 2012 07:13:28 -0800 Subject: Fix build warnings Change-Id: I9e3cfa0c6b3467fe763ce55f759d179f02c5deea --- services/audioflinger/AudioResamplerSinc.cpp | 6 +++--- services/audioflinger/AudioResamplerSinc.h | 4 ++-- services/audioflinger/Threads.cpp | 2 +- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/services/audioflinger/AudioResamplerSinc.cpp b/services/audioflinger/AudioResamplerSinc.cpp index 3f22ca6..207f26b 100644 --- a/services/audioflinger/AudioResamplerSinc.cpp +++ b/services/audioflinger/AudioResamplerSinc.cpp @@ -53,7 +53,7 @@ namespace android { * tools/resampler_tools * cmd-line: fir -l 7 -s 48000 -c 20478 */ -const int32_t AudioResamplerSinc::mFirCoefsUp[] __attribute__ ((aligned (32))) = { +const uint32_t AudioResamplerSinc::mFirCoefsUp[] __attribute__ ((aligned (32))) = { 0x6d374bc7, 0x111c6ba0, 0xf3240e61, 0x07d14a38, 0xfc509e64, 0x0139cee9, 0xffc8c866, 0xfffcc300, 0x6d35278a, 0x103e8192, 0xf36b9dfd, 0x07bdfaa5, 0xfc5102d0, 0x013d618d, 0xffc663b9, 0xfffd9592, 0x6d2ebafe, 0x0f62811a, 0xf3b3d8ac, 0x07a9f399, 0xfc51d9a6, 0x0140bea5, 0xffc41212, 0xfffe631e, @@ -189,7 +189,7 @@ const int32_t AudioResamplerSinc::mFirCoefsUp[] __attribute__ ((aligned (32))) = * These coefficients are optimized for 48KHz -> 44.1KHz * cmd-line: fir -l 7 -s 48000 -c 17189 */ -const int32_t AudioResamplerSinc::mFirCoefsDown[] __attribute__ ((aligned (32))) = { +const uint32_t AudioResamplerSinc::mFirCoefsDown[] __attribute__ ((aligned (32))) = { 0x5bacb6f4, 0x1ded1a1d, 0xf0398d56, 0x0394f674, 0x0193a5f9, 0xfe66dbeb, 0x00791043, 0xfffe6631, 0x5bab6c81, 0x1d3ddccd, 0xf0421d2c, 0x03af9995, 0x01818dc9, 0xfe6bb63e, 0x0079812a, 0xfffdc37d, 0x5ba78d37, 0x1c8f2cf9, 0xf04beb1d, 0x03c9a04a, 0x016f8aca, 0xfe70a511, 0x0079e34d, 0xfffd2545, @@ -512,7 +512,7 @@ void AudioResamplerSinc::resample(int32_t* out, size_t outFrameCount, if (mConstants == &veryHighQualityConstants && readResampleCoefficients) { mFirCoefs = readResampleCoefficients( mInSampleRate <= mSampleRate ); } else { - mFirCoefs = (mInSampleRate <= mSampleRate) ? mFirCoefsUp : mFirCoefsDown; + mFirCoefs = (const int32_t *) ((mInSampleRate <= mSampleRate) ? mFirCoefsUp : mFirCoefsDown); } // select the appropriate resampler diff --git a/services/audioflinger/AudioResamplerSinc.h b/services/audioflinger/AudioResamplerSinc.h index 09c6866..1ea4474 100644 --- a/services/audioflinger/AudioResamplerSinc.h +++ b/services/audioflinger/AudioResamplerSinc.h @@ -70,8 +70,8 @@ private: int32_t mVolumeSIMD[2]; const int32_t * mFirCoefs; - static const int32_t mFirCoefsDown[]; - static const int32_t mFirCoefsUp[]; + static const uint32_t mFirCoefsDown[]; + static const uint32_t mFirCoefsUp[]; // ---------------------------------------------------------------------------- static const int32_t RESAMPLE_FIR_NUM_COEF = 8; diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index d2b2931..a50c936 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -4217,7 +4217,7 @@ bool AudioFlinger::RecordThread::checkForNewParameters_l() if (status == BAD_VALUE && reqFormat == mInput->stream->common.get_format(&mInput->stream->common) && reqFormat == AUDIO_FORMAT_PCM_16_BIT && - ((int)mInput->stream->common.get_sample_rate(&mInput->stream->common) + (mInput->stream->common.get_sample_rate(&mInput->stream->common) <= (2 * reqSamplingRate)) && popcount(mInput->stream->common.get_channels(&mInput->stream->common)) <= FCC_2 && -- cgit v1.1 From 89b629b398e87095cf262692f4e476d605fe87ed Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Mon, 17 Dec 2012 11:44:20 -0800 Subject: Fix bug with discarded AudioRecord::read count Formerly, if an AudioRecord::read() got a timeout on obtainBuffer() after already successfully transferring some data, then it returned zero. This had the effect of discarding a partial transfer, which resulted in a gap in the audio data delivered to the app. Now if a timeout occurs after a partial transfer, it returns that partial transfer count so that no data is lost. Change-Id: I0d9c2f4e495a400b56ef916a06613ba26537ca97 --- media/libmedia/AudioRecord.cpp | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/media/libmedia/AudioRecord.cpp b/media/libmedia/AudioRecord.cpp index c2ef68c..9fda0a5 100644 --- a/media/libmedia/AudioRecord.cpp +++ b/media/libmedia/AudioRecord.cpp @@ -644,7 +644,8 @@ ssize_t AudioRecord::read(void* buffer, size_t userSize) break; } if (err == status_t(TIMED_OUT)) { - err = 0; + // return partial transfer count + return read; } return ssize_t(err); } -- cgit v1.1 From 7b670d4a0a4fa560f536f132e0a3fc7247f6724c Mon Sep 17 00:00:00 2001 From: James Dong Date: Thu, 13 Dec 2012 18:58:38 -0800 Subject: Fix memory leakage from MPEG4Writer. o The in-memory cache, mMoovBoxBuffer, holding the content for Moov box may not be freed. o Added comment describing how the in-memory cache works o Moved the memory release to a single place to make the code more robust o Avoided allocating the in-memory cache if the file is not intended to be streamable o related-to-bug: 7664029 Change-Id: If04fc6b12daeaaa86710dfb4b4b9c175da6421df --- media/libstagefright/MPEG4Writer.cpp | 90 ++++++++++++++++++++++++++++-------- 1 file changed, 72 insertions(+), 18 deletions(-) diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp index 8b52e15..14986b2 100644 --- a/media/libstagefright/MPEG4Writer.cpp +++ b/media/libstagefright/MPEG4Writer.cpp @@ -575,13 +575,50 @@ status_t MPEG4Writer::start(MetaData *param) { /* * When the requested file size limit is small, the priority * is to meet the file size limit requirement, rather than - * to make the file streamable. + * to make the file streamable. mStreamableFile does not tell + * whether the actual recorded file is streamable or not. */ mStreamableFile = (mMaxFileSizeLimitBytes != 0 && mMaxFileSizeLimitBytes >= kMinStreamableFileSizeInBytes); - mWriteMoovBoxToMemory = mStreamableFile; + /* + * mWriteMoovBoxToMemory is true if the amount of data in moov box is + * smaller than the reserved free space at the beginning of a file, AND + * when the content of moov box is constructed. Note that video/audio + * frame data is always written to the file but not in the memory. + * + * Before stop()/reset() is called, mWriteMoovBoxToMemory is always + * false. When reset() is called at the end of a recording session, + * Moov box needs to be constructed. + * + * 1) Right before a moov box is constructed, mWriteMoovBoxToMemory + * to set to mStreamableFile so that if + * the file is intended to be streamable, it is set to true; + * otherwise, it is set to false. When the value is set to false, + * all the content of the moov box is written immediately to + * the end of the file. When the value is set to true, all the + * content of the moov box is written to an in-memory cache, + * mMoovBoxBuffer, util the following condition happens. Note + * that the size of the in-memory cache is the same as the + * reserved free space at the beginning of the file. + * + * 2) While the data of the moov box is written to an in-memory + * cache, the data size is checked against the reserved space. + * If the data size surpasses the reserved space, subsequent moov + * data could no longer be hold in the in-memory cache. This also + * indicates that the reserved space was too small. At this point, + * _all_ moov data must be written to the end of the file. + * mWriteMoovBoxToMemory must be set to false to direct the write + * to the file. + * + * 3) If the data size in moov box is smaller than the reserved + * space after moov box is completely constructed, the in-memory + * cache copy of the moov box is written to the reserved free + * space. Thus, immediately after the moov is completedly + * constructed, mWriteMoovBoxToMemory is always set to false. + */ + mWriteMoovBoxToMemory = false; mMoovBoxBuffer = NULL; mMoovBoxBufferOffset = 0; @@ -786,15 +823,25 @@ status_t MPEG4Writer::reset() { } lseek64(mFd, mOffset, SEEK_SET); - const off64_t moovOffset = mOffset; - mWriteMoovBoxToMemory = mStreamableFile; - mMoovBoxBuffer = (uint8_t *) malloc(mEstimatedMoovBoxSize); + // Construct moov box now mMoovBoxBufferOffset = 0; - CHECK(mMoovBoxBuffer != NULL); + mWriteMoovBoxToMemory = mStreamableFile; + if (mWriteMoovBoxToMemory) { + // There is no need to allocate in-memory cache + // for moov box if the file is not streamable. + + mMoovBoxBuffer = (uint8_t *) malloc(mEstimatedMoovBoxSize); + CHECK(mMoovBoxBuffer != NULL); + } writeMoovBox(maxDurationUs); - mWriteMoovBoxToMemory = false; - if (mStreamableFile) { + // mWriteMoovBoxToMemory could be set to false in + // MPEG4Writer::write() method + if (mWriteMoovBoxToMemory) { + mWriteMoovBoxToMemory = false; + // Content of the moov box is saved in the cache, and the in-memory + // moov box needs to be written to the file in a single shot. + CHECK_LE(mMoovBoxBufferOffset + 8, mEstimatedMoovBoxSize); // Moov box @@ -806,13 +853,15 @@ status_t MPEG4Writer::reset() { lseek64(mFd, mOffset, SEEK_SET); writeInt32(mEstimatedMoovBoxSize - mMoovBoxBufferOffset); write("free", 4); + } else { + ALOGI("The mp4 file will not be streamable."); + } - // Free temp memory + // Free in-memory cache for moov box + if (mMoovBoxBuffer != NULL) { free(mMoovBoxBuffer); mMoovBoxBuffer = NULL; mMoovBoxBufferOffset = 0; - } else { - ALOGI("The mp4 file will not be streamable."); } CHECK(mBoxes.empty()); @@ -994,23 +1043,28 @@ size_t MPEG4Writer::write( const size_t bytes = size * nmemb; if (mWriteMoovBoxToMemory) { - // This happens only when we write the moov box at the end of - // recording, not for each output video/audio frame we receive. + off64_t moovBoxSize = 8 + mMoovBoxBufferOffset + bytes; if (moovBoxSize > mEstimatedMoovBoxSize) { + // The reserved moov box at the beginning of the file + // is not big enough. Moov box should be written to + // the end of the file from now on, but not to the + // in-memory cache. + + // We write partial moov box that is in the memory to + // the file first. for (List::iterator it = mBoxes.begin(); it != mBoxes.end(); ++it) { (*it) += mOffset; } lseek64(mFd, mOffset, SEEK_SET); ::write(mFd, mMoovBoxBuffer, mMoovBoxBufferOffset); - ::write(mFd, ptr, size * nmemb); + ::write(mFd, ptr, bytes); mOffset += (bytes + mMoovBoxBufferOffset); - free(mMoovBoxBuffer); - mMoovBoxBuffer = NULL; - mMoovBoxBufferOffset = 0; + + // All subsequent moov box content will be written + // to the end of the file. mWriteMoovBoxToMemory = false; - mStreamableFile = false; } else { memcpy(mMoovBoxBuffer + mMoovBoxBufferOffset, ptr, bytes); mMoovBoxBufferOffset += bytes; -- cgit v1.1 From 8ba01021b573889802e67e029225a96f0dfa471a Mon Sep 17 00:00:00 2001 From: Andy McFadden Date: Tue, 18 Dec 2012 09:46:54 -0800 Subject: Rename ISurfaceTexture and SurfaceTexture The C++ class names don't match what the classes do, so rename ISurfaceTexture to IGraphicBufferProducer, and SurfaceTexture to GLConsumer. Bug 7736700 Change-Id: I64520a55f8c09fe6215382ea361c539a9940cba5 --- camera/Camera.cpp | 12 ++++++------ camera/ICamera.cpp | 11 ++++++----- cmds/stagefright/SimplePlayer.cpp | 6 +++--- cmds/stagefright/SimplePlayer.h | 4 ++-- cmds/stagefright/stagefright.cpp | 2 +- include/camera/Camera.h | 6 +++--- include/camera/ICamera.h | 6 +++--- include/media/IMediaPlayer.h | 4 ++-- include/media/IMediaRecorder.h | 4 ++-- include/media/IRemoteDisplayClient.h | 4 ++-- include/media/MediaPlayerInterface.h | 6 +++--- include/media/MediaRecorderBase.h | 4 ++-- include/media/mediaplayer.h | 4 ++-- include/media/mediarecorder.h | 8 ++++---- include/media/stagefright/MediaCodec.h | 2 +- include/media/stagefright/SurfaceMediaSource.h | 8 ++++---- libvideoeditor/lvpp/NativeWindowRenderer.cpp | 6 +++--- libvideoeditor/lvpp/NativeWindowRenderer.h | 14 +++++++------- libvideoeditor/lvpp/PreviewPlayer.cpp | 8 ++++---- libvideoeditor/lvpp/PreviewPlayer.h | 2 +- libvideoeditor/lvpp/VideoEditorPlayer.cpp | 4 ++-- libvideoeditor/lvpp/VideoEditorPlayer.h | 2 +- media/libmedia/IMediaPlayer.cpp | 14 +++++++------- media/libmedia/IMediaRecorder.cpp | 8 ++++---- media/libmedia/IRemoteDisplayClient.cpp | 10 +++++----- media/libmedia/mediaplayer.cpp | 4 ++-- media/libmedia/mediarecorder.cpp | 8 ++++---- media/libmediaplayerservice/MediaPlayerService.cpp | 16 ++++++++-------- media/libmediaplayerservice/MediaPlayerService.h | 2 +- media/libmediaplayerservice/MediaRecorderClient.cpp | 4 ++-- media/libmediaplayerservice/MediaRecorderClient.h | 4 ++-- media/libmediaplayerservice/MidiFile.h | 2 +- media/libmediaplayerservice/StagefrightPlayer.cpp | 4 ++-- media/libmediaplayerservice/StagefrightPlayer.h | 2 +- media/libmediaplayerservice/StagefrightRecorder.cpp | 2 +- media/libmediaplayerservice/StagefrightRecorder.h | 6 +++--- media/libmediaplayerservice/TestPlayerStub.h | 2 +- media/libmediaplayerservice/nuplayer/NuPlayer.cpp | 8 ++++---- media/libmediaplayerservice/nuplayer/NuPlayer.h | 2 +- media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp | 4 ++-- media/libmediaplayerservice/nuplayer/NuPlayerDriver.h | 2 +- media/libstagefright/AwesomePlayer.cpp | 8 ++++---- .../libstagefright/colorconversion/SoftwareRenderer.cpp | 2 +- media/libstagefright/include/AwesomePlayer.h | 4 ++-- media/libstagefright/tests/SurfaceMediaSource_test.cpp | 12 ++++++------ media/libstagefright/wifi-display/sink/RTPSink.cpp | 4 ++-- media/libstagefright/wifi-display/sink/RTPSink.h | 4 ++-- .../libstagefright/wifi-display/sink/TunnelRenderer.cpp | 4 ++-- media/libstagefright/wifi-display/sink/TunnelRenderer.h | 4 ++-- .../libstagefright/wifi-display/sink/WifiDisplaySink.cpp | 4 ++-- media/libstagefright/wifi-display/sink/WifiDisplaySink.h | 4 ++-- .../wifi-display/source/PlaybackSession.cpp | 2 +- .../libstagefright/wifi-display/source/PlaybackSession.h | 4 ++-- .../wifi-display/source/WifiDisplaySource.cpp | 2 +- media/libstagefright/wifi-display/wfd.cpp | 8 ++++---- services/camera/libcameraservice/Camera2Client.cpp | 8 ++++---- services/camera/libcameraservice/Camera2Client.h | 2 +- services/camera/libcameraservice/CameraClient.cpp | 12 ++++++------ services/camera/libcameraservice/CameraClient.h | 4 ++-- services/camera/libcameraservice/CameraService.h | 2 +- 60 files changed, 163 insertions(+), 162 deletions(-) diff --git a/camera/Camera.cpp b/camera/Camera.cpp index d43cb0b..3aaacaf 100644 --- a/camera/Camera.cpp +++ b/camera/Camera.cpp @@ -27,7 +27,7 @@ #include #include -#include +#include #include namespace android { @@ -184,14 +184,14 @@ status_t Camera::setPreviewDisplay(const sp& surface) } } -// pass the buffered ISurfaceTexture to the camera service -status_t Camera::setPreviewTexture(const sp& surfaceTexture) +// pass the buffered IGraphicBufferProducer to the camera service +status_t Camera::setPreviewTexture(const sp& bufferProducer) { - ALOGV("setPreviewTexture(%p)", surfaceTexture.get()); + ALOGV("setPreviewTexture(%p)", bufferProducer.get()); sp c = mCamera; if (c == 0) return NO_INIT; - if (surfaceTexture != 0) { - return c->setPreviewTexture(surfaceTexture); + if (bufferProducer != 0) { + return c->setPreviewTexture(bufferProducer); } else { ALOGD("app passed NULL surface"); return c->setPreviewTexture(0); diff --git a/camera/ICamera.cpp b/camera/ICamera.cpp index 8d8408c..5d210e7 100644 --- a/camera/ICamera.cpp +++ b/camera/ICamera.cpp @@ -22,7 +22,7 @@ #include #include #include -#include +#include #include namespace android { @@ -79,13 +79,13 @@ public: return reply.readInt32(); } - // pass the buffered SurfaceTexture to the camera service - status_t setPreviewTexture(const sp& surfaceTexture) + // pass the buffered IGraphicBufferProducer to the camera service + status_t setPreviewTexture(const sp& bufferProducer) { ALOGV("setPreviewTexture"); Parcel data, reply; data.writeInterfaceToken(ICamera::getInterfaceDescriptor()); - sp b(surfaceTexture->asBinder()); + sp b(bufferProducer->asBinder()); data.writeStrongBinder(b); remote()->transact(SET_PREVIEW_TEXTURE, data, &reply); return reply.readInt32(); @@ -292,7 +292,8 @@ status_t BnCamera::onTransact( case SET_PREVIEW_TEXTURE: { ALOGV("SET_PREVIEW_TEXTURE"); CHECK_INTERFACE(ICamera, data, reply); - sp st = interface_cast(data.readStrongBinder()); + sp st = + interface_cast(data.readStrongBinder()); reply->writeInt32(setPreviewTexture(st)); return NO_ERROR; } break; diff --git a/cmds/stagefright/SimplePlayer.cpp b/cmds/stagefright/SimplePlayer.cpp index eb3296e..93de112 100644 --- a/cmds/stagefright/SimplePlayer.cpp +++ b/cmds/stagefright/SimplePlayer.cpp @@ -64,12 +64,12 @@ status_t SimplePlayer::setDataSource(const char *path) { return PostAndAwaitResponse(msg, &response); } -status_t SimplePlayer::setSurface(const sp &surfaceTexture) { +status_t SimplePlayer::setSurface(const sp &bufferProducer) { sp msg = new AMessage(kWhatSetSurface, id()); sp surfaceTextureClient; - if (surfaceTexture != NULL) { - surfaceTextureClient = new SurfaceTextureClient(surfaceTexture); + if (bufferProducer != NULL) { + surfaceTextureClient = new SurfaceTextureClient(bufferProducer); } msg->setObject( diff --git a/cmds/stagefright/SimplePlayer.h b/cmds/stagefright/SimplePlayer.h index 2548252..0a06059 100644 --- a/cmds/stagefright/SimplePlayer.h +++ b/cmds/stagefright/SimplePlayer.h @@ -23,7 +23,7 @@ namespace android { struct ABuffer; struct ALooper; struct AudioTrack; -struct ISurfaceTexture; +struct IGraphicBufferProducer; struct MediaCodec; struct NativeWindowWrapper; struct NuMediaExtractor; @@ -32,7 +32,7 @@ struct SimplePlayer : public AHandler { SimplePlayer(); status_t setDataSource(const char *path); - status_t setSurface(const sp &surfaceTexture); + status_t setSurface(const sp &bufferProducer); status_t prepare(); status_t start(); status_t stop(); diff --git a/cmds/stagefright/stagefright.cpp b/cmds/stagefright/stagefright.cpp index 148b66e..1002ac4 100644 --- a/cmds/stagefright/stagefright.cpp +++ b/cmds/stagefright/stagefright.cpp @@ -940,7 +940,7 @@ int main(int argc, char **argv) { } else { CHECK(useSurfaceTexAlloc); - sp texture = new SurfaceTexture(0 /* tex */); + sp texture = new GLConsumer(0 /* tex */); gSurface = new SurfaceTextureClient(texture->getBufferQueue()); } diff --git a/include/camera/Camera.h b/include/camera/Camera.h index 234e165..43dae1c 100644 --- a/include/camera/Camera.h +++ b/include/camera/Camera.h @@ -18,7 +18,7 @@ #define ANDROID_HARDWARE_CAMERA_H #include -#include +#include #include #include #include @@ -86,8 +86,8 @@ public: // pass the buffered Surface to the camera service status_t setPreviewDisplay(const sp& surface); - // pass the buffered ISurfaceTexture to the camera service - status_t setPreviewTexture(const sp& surfaceTexture); + // pass the buffered IGraphicBufferProducer to the camera service + status_t setPreviewTexture(const sp& bufferProducer); // start preview mode, must call setPreviewDisplay first status_t startPreview(); diff --git a/include/camera/ICamera.h b/include/camera/ICamera.h index 3d18837..eccaa41 100644 --- a/include/camera/ICamera.h +++ b/include/camera/ICamera.h @@ -27,7 +27,7 @@ namespace android { class ICameraClient; -class ISurfaceTexture; +class IGraphicBufferProducer; class Surface; class ICamera: public IInterface @@ -49,9 +49,9 @@ public: // pass the buffered Surface to the camera service virtual status_t setPreviewDisplay(const sp& surface) = 0; - // pass the buffered ISurfaceTexture to the camera service + // pass the buffered IGraphicBufferProducer to the camera service virtual status_t setPreviewTexture( - const sp& surfaceTexture) = 0; + const sp& bufferProducer) = 0; // set the preview callback flag to affect how the received frames from // preview are handled. diff --git a/include/media/IMediaPlayer.h b/include/media/IMediaPlayer.h index 4ed1863..0cbd269 100644 --- a/include/media/IMediaPlayer.h +++ b/include/media/IMediaPlayer.h @@ -32,7 +32,7 @@ namespace android { class Parcel; class Surface; class IStreamSource; -class ISurfaceTexture; +class IGraphicBufferProducer; class IMediaPlayer: public IInterface { @@ -46,7 +46,7 @@ public: virtual status_t setDataSource(int fd, int64_t offset, int64_t length) = 0; virtual status_t setDataSource(const sp& source) = 0; virtual status_t setVideoSurfaceTexture( - const sp& surfaceTexture) = 0; + const sp& bufferProducer) = 0; virtual status_t prepareAsync() = 0; virtual status_t start() = 0; virtual status_t stop() = 0; diff --git a/include/media/IMediaRecorder.h b/include/media/IMediaRecorder.h index ec84e25..54af0d3 100644 --- a/include/media/IMediaRecorder.h +++ b/include/media/IMediaRecorder.h @@ -26,7 +26,7 @@ class Surface; class ICamera; class ICameraRecordingProxy; class IMediaRecorderClient; -class ISurfaceTexture; +class IGraphicBufferProducer; class IMediaRecorder: public IInterface { @@ -55,7 +55,7 @@ public: virtual status_t init() = 0; virtual status_t close() = 0; virtual status_t release() = 0; - virtual sp querySurfaceMediaSource() = 0; + virtual sp querySurfaceMediaSource() = 0; }; // ---------------------------------------------------------------------------- diff --git a/include/media/IRemoteDisplayClient.h b/include/media/IRemoteDisplayClient.h index 252b401..7b0fa9e 100644 --- a/include/media/IRemoteDisplayClient.h +++ b/include/media/IRemoteDisplayClient.h @@ -26,7 +26,7 @@ namespace android { -class ISurfaceTexture; +class IGraphicBufferProducer; class IRemoteDisplayClient : public IInterface { @@ -48,7 +48,7 @@ public: // Indicates that the remote display has been connected successfully. // Provides a surface texture that the client should use to stream buffers to // the remote display. - virtual void onDisplayConnected(const sp& surfaceTexture, + virtual void onDisplayConnected(const sp& bufferProducer, uint32_t width, uint32_t height, uint32_t flags) = 0; // one-way // Indicates that the remote display has been disconnected normally. diff --git a/include/media/MediaPlayerInterface.h b/include/media/MediaPlayerInterface.h index b7bee3f..8fc72c3 100644 --- a/include/media/MediaPlayerInterface.h +++ b/include/media/MediaPlayerInterface.h @@ -37,7 +37,7 @@ namespace android { class Parcel; class Surface; -class ISurfaceTexture; +class IGraphicBufferProducer; template class SortedVector; @@ -131,9 +131,9 @@ public: return INVALID_OPERATION; } - // pass the buffered ISurfaceTexture to the media player service + // pass the buffered IGraphicBufferProducer to the media player service virtual status_t setVideoSurfaceTexture( - const sp& surfaceTexture) = 0; + const sp& bufferProducer) = 0; virtual status_t prepare() = 0; virtual status_t prepareAsync() = 0; diff --git a/include/media/MediaRecorderBase.h b/include/media/MediaRecorderBase.h index ef799f5..803bc64 100644 --- a/include/media/MediaRecorderBase.h +++ b/include/media/MediaRecorderBase.h @@ -26,7 +26,7 @@ namespace android { class ICameraRecordingProxy; class Surface; -class ISurfaceTexture; +class IGraphicBufferProducer; struct MediaRecorderBase { MediaRecorderBase() {} @@ -55,7 +55,7 @@ struct MediaRecorderBase { virtual status_t reset() = 0; virtual status_t getMaxAmplitude(int *max) = 0; virtual status_t dump(int fd, const Vector& args) const = 0; - virtual sp querySurfaceMediaSource() const = 0; + virtual sp querySurfaceMediaSource() const = 0; private: MediaRecorderBase(const MediaRecorderBase &); diff --git a/include/media/mediaplayer.h b/include/media/mediaplayer.h index d753eba..e5aa033 100644 --- a/include/media/mediaplayer.h +++ b/include/media/mediaplayer.h @@ -33,7 +33,7 @@ class ANativeWindow; namespace android { class Surface; -class ISurfaceTexture; +class IGraphicBufferProducer; enum media_event_type { MEDIA_NOP = 0, // interface test message @@ -199,7 +199,7 @@ public: status_t setDataSource(int fd, int64_t offset, int64_t length); status_t setDataSource(const sp &source); status_t setVideoSurfaceTexture( - const sp& surfaceTexture); + const sp& bufferProducer); status_t setListener(const sp& listener); status_t prepare(); status_t prepareAsync(); diff --git a/include/media/mediarecorder.h b/include/media/mediarecorder.h index 6d304e0..2882c41 100644 --- a/include/media/mediarecorder.h +++ b/include/media/mediarecorder.h @@ -31,7 +31,7 @@ class Surface; class IMediaRecorder; class ICamera; class ICameraRecordingProxy; -class ISurfaceTexture; +class IGraphicBufferProducer; class SurfaceTextureClient; typedef void (*media_completion_f)(status_t status, void *cookie); @@ -228,7 +228,7 @@ public: status_t close(); status_t release(); void notify(int msg, int ext1, int ext2); - sp querySurfaceMediaSourceFromMediaServer(); + sp querySurfaceMediaSourceFromMediaServer(); private: void doCleanUp(); @@ -237,10 +237,10 @@ private: sp mMediaRecorder; sp mListener; - // Reference toISurfaceTexture + // Reference to IGraphicBufferProducer // for encoding GL Frames. That is useful only when the // video source is set to VIDEO_SOURCE_GRALLOC_BUFFER - sp mSurfaceMediaSource; + sp mSurfaceMediaSource; media_recorder_states mCurrentState; bool mIsAudioSourceSet; diff --git a/include/media/stagefright/MediaCodec.h b/include/media/stagefright/MediaCodec.h index b1e57cf..88aabf6 100644 --- a/include/media/stagefright/MediaCodec.h +++ b/include/media/stagefright/MediaCodec.h @@ -18,7 +18,7 @@ #define MEDIA_CODEC_H_ -#include +#include #include #include #include diff --git a/include/media/stagefright/SurfaceMediaSource.h b/include/media/stagefright/SurfaceMediaSource.h index e56527d..609d84f 100644 --- a/include/media/stagefright/SurfaceMediaSource.h +++ b/include/media/stagefright/SurfaceMediaSource.h @@ -17,7 +17,7 @@ #ifndef ANDROID_GUI_SURFACEMEDIASOURCE_H #define ANDROID_GUI_SURFACEMEDIASOURCE_H -#include +#include #include #include @@ -35,7 +35,7 @@ class GraphicBuffer; // ASSUMPTIONS // 1. SurfaceMediaSource is initialized with width*height which // can never change. However, deqeueue buffer does not currently -// enforce this as in BufferQueue, dequeue can be used by SurfaceTexture +// enforce this as in BufferQueue, dequeue can be used by SurfaceTextureClient // which can modify the default width and heght. Also neither the width // nor height can be 0. // 2. setSynchronousMode is never used (basically no one should call @@ -122,7 +122,7 @@ public: protected: // Implementation of the BufferQueue::ConsumerListener interface. These - // calls are used to notify the SurfaceTexture of asynchronous events in the + // calls are used to notify the SurfaceTextureClient of asynchronous events in the // BufferQueue. virtual void onFrameAvailable(); @@ -157,7 +157,7 @@ private: // mCurrentSlot is the buffer slot index of the buffer that is currently // being used by buffer consumer // (e.g. StageFrightRecorder in the case of SurfaceMediaSource or GLTexture - // in the case of SurfaceTexture). + // in the case of SurfaceTextureClient). // It is initialized to INVALID_BUFFER_SLOT, // indicating that no buffer slot is currently bound to the texture. Note, // however, that a value of INVALID_BUFFER_SLOT does not necessarily mean diff --git a/libvideoeditor/lvpp/NativeWindowRenderer.cpp b/libvideoeditor/lvpp/NativeWindowRenderer.cpp index efb45e2..114f0f6 100755 --- a/libvideoeditor/lvpp/NativeWindowRenderer.cpp +++ b/libvideoeditor/lvpp/NativeWindowRenderer.cpp @@ -20,7 +20,7 @@ #include #include #include -#include +#include #include #include #include @@ -315,7 +315,7 @@ NativeWindowRenderer::~NativeWindowRenderer() { } void NativeWindowRenderer::render(RenderInput* input) { - sp ST = input->mST; + sp ST = input->mST; sp STC = input->mSTC; if (input->mIsExternalBuffer) { @@ -568,7 +568,7 @@ void NativeWindowRenderer::destroyRenderInput(RenderInput* input) { RenderInput::RenderInput(NativeWindowRenderer* renderer, GLuint textureId) : mRenderer(renderer) , mTextureId(textureId) { - mST = new SurfaceTexture(mTextureId); + mST = new GLConsumer(mTextureId); mSTC = new SurfaceTextureClient(mST->getBufferQueue()); native_window_connect(mSTC.get(), NATIVE_WINDOW_API_MEDIA); } diff --git a/libvideoeditor/lvpp/NativeWindowRenderer.h b/libvideoeditor/lvpp/NativeWindowRenderer.h index 8fbb4f9..b0623ba 100755 --- a/libvideoeditor/lvpp/NativeWindowRenderer.h +++ b/libvideoeditor/lvpp/NativeWindowRenderer.h @@ -37,15 +37,15 @@ // we only expect that happens briefly when one clip is about to finish // and the next clip is about to start. // -// We allocate a SurfaceTexture for each RenderInput and the user can use +// We allocate a SurfaceTextureClient for each RenderInput and the user can use // the getTargetWindow() function to get the corresponding ANativeWindow -// for that SurfaceTexture. The intention is that the user can pass that +// for that SurfaceTextureClient. The intention is that the user can pass that // ANativeWindow to OMXCodec::Create() so the codec can decode directly // to buffers provided by the texture. namespace android { -class SurfaceTexture; +class GLConsumer; class SurfaceTextureClient; class RenderInput; @@ -110,7 +110,7 @@ private: // destination aspect ratio. GLfloat mPositionCoordinates[8]; - // We use a different GL id for each SurfaceTexture. + // We use a different GL id for each SurfaceTextureClient. GLuint mNextTextureId; // Number of existing RenderInputs, just for debugging. @@ -146,7 +146,7 @@ private: class RenderInput { public: - // Returns the ANativeWindow corresponds to the SurfaceTexture. + // Returns the ANativeWindow corresponds to the SurfaceTextureClient. ANativeWindow* getTargetWindow(); // Updates video frame size from the MediaSource's metadata. Specifically @@ -156,7 +156,7 @@ public: // Renders the buffer with the given video effect and rending mode. // The video effets are defined in VideoEditorTools.h // Set isExternalBuffer to true only when the buffer given is not - // provided by the SurfaceTexture. + // provided by the SurfaceTextureClient. void render(MediaBuffer *buffer, uint32_t videoEffect, M4xVSS_MediaRendering renderingMode, bool isExternalBuffer); private: @@ -164,7 +164,7 @@ private: ~RenderInput(); NativeWindowRenderer* mRenderer; GLuint mTextureId; - sp mST; + sp mST; sp mSTC; int mWidth, mHeight; diff --git a/libvideoeditor/lvpp/PreviewPlayer.cpp b/libvideoeditor/lvpp/PreviewPlayer.cpp index 34731d7..754c5a9 100755 --- a/libvideoeditor/lvpp/PreviewPlayer.cpp +++ b/libvideoeditor/lvpp/PreviewPlayer.cpp @@ -31,7 +31,7 @@ #include #include #include -#include +#include #include #include "VideoEditorPreviewController.h" @@ -1775,12 +1775,12 @@ void PreviewPlayer::setSurface(const sp &surface) { setNativeWindow_l(surface); } -void PreviewPlayer::setSurfaceTexture(const sp &surfaceTexture) { +void PreviewPlayer::setSurfaceTexture(const sp &bufferProducer) { Mutex::Autolock autoLock(mLock); mSurface.clear(); - if (surfaceTexture != NULL) { - setNativeWindow_l(new SurfaceTextureClient(surfaceTexture)); + if (bufferProducer != NULL) { + setNativeWindow_l(new SurfaceTextureClient(bufferProducer)); } } diff --git a/libvideoeditor/lvpp/PreviewPlayer.h b/libvideoeditor/lvpp/PreviewPlayer.h index 177853f..5a13b58 100755 --- a/libvideoeditor/lvpp/PreviewPlayer.h +++ b/libvideoeditor/lvpp/PreviewPlayer.h @@ -44,7 +44,7 @@ struct PreviewPlayer { bool isPlaying() const; void setSurface(const sp &surface); - void setSurfaceTexture(const sp &surfaceTexture); + void setSurfaceTexture(const sp &bufferProducer); status_t seekTo(int64_t timeUs); status_t getVideoDimensions(int32_t *width, int32_t *height) const; diff --git a/libvideoeditor/lvpp/VideoEditorPlayer.cpp b/libvideoeditor/lvpp/VideoEditorPlayer.cpp index a47fc15..91a4415 100755 --- a/libvideoeditor/lvpp/VideoEditorPlayer.cpp +++ b/libvideoeditor/lvpp/VideoEditorPlayer.cpp @@ -81,10 +81,10 @@ status_t VideoEditorPlayer::setVideoSurface(const sp &surface) { return OK; } -status_t VideoEditorPlayer::setVideoSurfaceTexture(const sp &surfaceTexture) { +status_t VideoEditorPlayer::setVideoSurfaceTexture(const sp &bufferProducer) { ALOGV("setVideoSurfaceTexture"); - mPlayer->setSurfaceTexture(surfaceTexture); + mPlayer->setSurfaceTexture(bufferProducer); return OK; } diff --git a/libvideoeditor/lvpp/VideoEditorPlayer.h b/libvideoeditor/lvpp/VideoEditorPlayer.h index 2ab4eef..77194ab 100755 --- a/libvideoeditor/lvpp/VideoEditorPlayer.h +++ b/libvideoeditor/lvpp/VideoEditorPlayer.h @@ -99,7 +99,7 @@ public: virtual status_t setDataSource(int fd, int64_t offset, int64_t length); virtual status_t setVideoSurface(const sp &surface); - virtual status_t setVideoSurfaceTexture(const sp &surfaceTexture); + virtual status_t setVideoSurfaceTexture(const sp &bufferProducer); virtual status_t prepare(); virtual status_t prepareAsync(); virtual status_t start(); diff --git a/media/libmedia/IMediaPlayer.cpp b/media/libmedia/IMediaPlayer.cpp index cb07766..e79bcd2 100644 --- a/media/libmedia/IMediaPlayer.cpp +++ b/media/libmedia/IMediaPlayer.cpp @@ -24,7 +24,7 @@ #include #include -#include +#include #include namespace android { @@ -113,12 +113,12 @@ public: return reply.readInt32(); } - // pass the buffered ISurfaceTexture to the media player service - status_t setVideoSurfaceTexture(const sp& surfaceTexture) + // pass the buffered IGraphicBufferProducer to the media player service + status_t setVideoSurfaceTexture(const sp& bufferProducer) { Parcel data, reply; data.writeInterfaceToken(IMediaPlayer::getInterfaceDescriptor()); - sp b(surfaceTexture->asBinder()); + sp b(bufferProducer->asBinder()); data.writeStrongBinder(b); remote()->transact(SET_VIDEO_SURFACETEXTURE, data, &reply); return reply.readInt32(); @@ -383,9 +383,9 @@ status_t BnMediaPlayer::onTransact( } case SET_VIDEO_SURFACETEXTURE: { CHECK_INTERFACE(IMediaPlayer, data, reply); - sp surfaceTexture = - interface_cast(data.readStrongBinder()); - reply->writeInt32(setVideoSurfaceTexture(surfaceTexture)); + sp bufferProducer = + interface_cast(data.readStrongBinder()); + reply->writeInt32(setVideoSurfaceTexture(bufferProducer)); return NO_ERROR; } break; case PREPARE_ASYNC: { diff --git a/media/libmedia/IMediaRecorder.cpp b/media/libmedia/IMediaRecorder.cpp index a710fd7..fdbc747 100644 --- a/media/libmedia/IMediaRecorder.cpp +++ b/media/libmedia/IMediaRecorder.cpp @@ -23,7 +23,7 @@ #include #include #include -#include +#include #include @@ -73,7 +73,7 @@ public: return reply.readInt32(); } - sp querySurfaceMediaSource() + sp querySurfaceMediaSource() { ALOGV("Query SurfaceMediaSource"); Parcel data, reply; @@ -83,7 +83,7 @@ public: if (returnedNull) { return NULL; } - return interface_cast(reply.readStrongBinder()); + return interface_cast(reply.readStrongBinder()); } status_t setPreviewSurface(const sp& surface) @@ -444,7 +444,7 @@ status_t BnMediaRecorder::onTransact( CHECK_INTERFACE(IMediaRecorder, data, reply); // call the mediaserver side to create // a surfacemediasource - sp surfaceMediaSource = querySurfaceMediaSource(); + sp surfaceMediaSource = querySurfaceMediaSource(); // The mediaserver might have failed to create a source int returnedNull= (surfaceMediaSource == NULL) ? 1 : 0 ; reply->writeInt32(returnedNull); diff --git a/media/libmedia/IRemoteDisplayClient.cpp b/media/libmedia/IRemoteDisplayClient.cpp index 4a1b570..5c494b3 100644 --- a/media/libmedia/IRemoteDisplayClient.cpp +++ b/media/libmedia/IRemoteDisplayClient.cpp @@ -18,7 +18,7 @@ #include #include -#include +#include #include namespace android { @@ -37,12 +37,12 @@ public: { } - void onDisplayConnected(const sp& surfaceTexture, + void onDisplayConnected(const sp& bufferProducer, uint32_t width, uint32_t height, uint32_t flags) { Parcel data, reply; data.writeInterfaceToken(IRemoteDisplayClient::getInterfaceDescriptor()); - data.writeStrongBinder(surfaceTexture->asBinder()); + data.writeStrongBinder(bufferProducer->asBinder()); data.writeInt32(width); data.writeInt32(height); data.writeInt32(flags); @@ -75,8 +75,8 @@ status_t BnRemoteDisplayClient::onTransact( switch (code) { case ON_DISPLAY_CONNECTED: { CHECK_INTERFACE(IRemoteDisplayClient, data, reply); - sp surfaceTexture( - interface_cast(data.readStrongBinder())); + sp surfaceTexture( + interface_cast(data.readStrongBinder())); uint32_t width = data.readInt32(); uint32_t height = data.readInt32(); uint32_t flags = data.readInt32(); diff --git a/media/libmedia/mediaplayer.cpp b/media/libmedia/mediaplayer.cpp index bbbf4b6..ae527e8 100644 --- a/media/libmedia/mediaplayer.cpp +++ b/media/libmedia/mediaplayer.cpp @@ -221,12 +221,12 @@ status_t MediaPlayer::getMetadata(bool update_only, bool apply_filter, Parcel *m } status_t MediaPlayer::setVideoSurfaceTexture( - const sp& surfaceTexture) + const sp& bufferProducer) { ALOGV("setVideoSurfaceTexture"); Mutex::Autolock _l(mLock); if (mPlayer == 0) return NO_INIT; - return mPlayer->setVideoSurfaceTexture(surfaceTexture); + return mPlayer->setVideoSurfaceTexture(bufferProducer); } // must call with lock held diff --git a/media/libmedia/mediarecorder.cpp b/media/libmedia/mediarecorder.cpp index 9541015..95c7f3e 100644 --- a/media/libmedia/mediarecorder.cpp +++ b/media/libmedia/mediarecorder.cpp @@ -24,7 +24,7 @@ #include #include #include // for MEDIA_ERROR_SERVER_DIED -#include +#include namespace android { @@ -348,9 +348,9 @@ status_t MediaRecorder::setVideoSize(int width, int height) } // Query a SurfaceMediaSurface through the Mediaserver, over the -// binder interface. This is used by the Filter Framework (MeidaEncoder) -// to get an object to hook up to ANativeWindow. -sp MediaRecorder:: +// binder interface. This is used by the Filter Framework (MediaEncoder) +// to get an object to hook up to ANativeWindow. +sp MediaRecorder:: querySurfaceMediaSourceFromMediaServer() { Mutex::Autolock _l(mLock); diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp index c3e5c40..4ca0811 100644 --- a/media/libmediaplayerservice/MediaPlayerService.cpp +++ b/media/libmediaplayerservice/MediaPlayerService.cpp @@ -714,21 +714,21 @@ void MediaPlayerService::Client::disconnectNativeWindow() { } status_t MediaPlayerService::Client::setVideoSurfaceTexture( - const sp& surfaceTexture) + const sp& bufferProducer) { - ALOGV("[%d] setVideoSurfaceTexture(%p)", mConnId, surfaceTexture.get()); + ALOGV("[%d] setVideoSurfaceTexture(%p)", mConnId, bufferProducer.get()); sp p = getPlayer(); if (p == 0) return UNKNOWN_ERROR; - sp binder(surfaceTexture == NULL ? NULL : - surfaceTexture->asBinder()); + sp binder(bufferProducer == NULL ? NULL : + bufferProducer->asBinder()); if (mConnectedWindowBinder == binder) { return OK; } sp anw; - if (surfaceTexture != NULL) { - anw = new SurfaceTextureClient(surfaceTexture); + if (bufferProducer != NULL) { + anw = new SurfaceTextureClient(bufferProducer); status_t err = native_window_api_connect(anw.get(), NATIVE_WINDOW_API_MEDIA); @@ -745,10 +745,10 @@ status_t MediaPlayerService::Client::setVideoSurfaceTexture( } } - // Note that we must set the player's new SurfaceTexture before + // Note that we must set the player's new GraphicBufferProducer before // disconnecting the old one. Otherwise queue/dequeue calls could be made // on the disconnected ANW, which may result in errors. - status_t err = p->setVideoSurfaceTexture(surfaceTexture); + status_t err = p->setVideoSurfaceTexture(bufferProducer); disconnectNativeWindow(); diff --git a/media/libmediaplayerservice/MediaPlayerService.h b/media/libmediaplayerservice/MediaPlayerService.h index fd648df..afb6780 100644 --- a/media/libmediaplayerservice/MediaPlayerService.h +++ b/media/libmediaplayerservice/MediaPlayerService.h @@ -307,7 +307,7 @@ private: // IMediaPlayer interface virtual void disconnect(); virtual status_t setVideoSurfaceTexture( - const sp& surfaceTexture); + const sp& bufferProducer); virtual status_t prepareAsync(); virtual status_t start(); virtual status_t stop(); diff --git a/media/libmediaplayerservice/MediaRecorderClient.cpp b/media/libmediaplayerservice/MediaRecorderClient.cpp index eadc8ee..c6d8b76 100644 --- a/media/libmediaplayerservice/MediaRecorderClient.cpp +++ b/media/libmediaplayerservice/MediaRecorderClient.cpp @@ -38,7 +38,7 @@ #include "MediaPlayerService.h" #include "StagefrightRecorder.h" -#include +#include namespace android { @@ -56,7 +56,7 @@ static bool checkPermission(const char* permissionString) { } -sp MediaRecorderClient::querySurfaceMediaSource() +sp MediaRecorderClient::querySurfaceMediaSource() { ALOGV("Query SurfaceMediaSource"); Mutex::Autolock lock(mLock); diff --git a/media/libmediaplayerservice/MediaRecorderClient.h b/media/libmediaplayerservice/MediaRecorderClient.h index c9ccf22..5623917 100644 --- a/media/libmediaplayerservice/MediaRecorderClient.h +++ b/media/libmediaplayerservice/MediaRecorderClient.h @@ -25,7 +25,7 @@ namespace android { class MediaRecorderBase; class MediaPlayerService; class ICameraRecordingProxy; -class ISurfaceTexture; +class IGraphicBufferProducer; class MediaRecorderClient : public BnMediaRecorder { @@ -55,7 +55,7 @@ public: virtual status_t close(); virtual status_t release(); virtual status_t dump(int fd, const Vector& args) const; - virtual sp querySurfaceMediaSource(); + virtual sp querySurfaceMediaSource(); private: friend class MediaPlayerService; // for accessing private constructor diff --git a/media/libmediaplayerservice/MidiFile.h b/media/libmediaplayerservice/MidiFile.h index f6f8f7b..24d59b4 100644 --- a/media/libmediaplayerservice/MidiFile.h +++ b/media/libmediaplayerservice/MidiFile.h @@ -36,7 +36,7 @@ public: virtual status_t setDataSource(int fd, int64_t offset, int64_t length); virtual status_t setVideoSurfaceTexture( - const sp& surfaceTexture) + const sp& bufferProducer) { return UNKNOWN_ERROR; } virtual status_t prepare(); virtual status_t prepareAsync(); diff --git a/media/libmediaplayerservice/StagefrightPlayer.cpp b/media/libmediaplayerservice/StagefrightPlayer.cpp index 619c149..de61d9b 100644 --- a/media/libmediaplayerservice/StagefrightPlayer.cpp +++ b/media/libmediaplayerservice/StagefrightPlayer.cpp @@ -70,10 +70,10 @@ status_t StagefrightPlayer::setDataSource(const sp &source) { } status_t StagefrightPlayer::setVideoSurfaceTexture( - const sp &surfaceTexture) { + const sp &bufferProducer) { ALOGV("setVideoSurfaceTexture"); - return mPlayer->setSurfaceTexture(surfaceTexture); + return mPlayer->setSurfaceTexture(bufferProducer); } status_t StagefrightPlayer::prepare() { diff --git a/media/libmediaplayerservice/StagefrightPlayer.h b/media/libmediaplayerservice/StagefrightPlayer.h index e89e18a..600945e 100644 --- a/media/libmediaplayerservice/StagefrightPlayer.h +++ b/media/libmediaplayerservice/StagefrightPlayer.h @@ -41,7 +41,7 @@ public: virtual status_t setDataSource(const sp &source); virtual status_t setVideoSurfaceTexture( - const sp &surfaceTexture); + const sp &bufferProducer); virtual status_t prepare(); virtual status_t prepareAsync(); virtual status_t start(); diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp index 57b0ec2..497dda6 100644 --- a/media/libmediaplayerservice/StagefrightRecorder.cpp +++ b/media/libmediaplayerservice/StagefrightRecorder.cpp @@ -89,7 +89,7 @@ status_t StagefrightRecorder::init() { // The client side of mediaserver asks it to creat a SurfaceMediaSource // and return a interface reference. The client side will use that // while encoding GL Frames -sp StagefrightRecorder::querySurfaceMediaSource() const { +sp StagefrightRecorder::querySurfaceMediaSource() const { ALOGV("Get SurfaceMediaSource"); return mSurfaceMediaSource->getBufferQueue(); } diff --git a/media/libmediaplayerservice/StagefrightRecorder.h b/media/libmediaplayerservice/StagefrightRecorder.h index ec5ce7e..351efd4 100644 --- a/media/libmediaplayerservice/StagefrightRecorder.h +++ b/media/libmediaplayerservice/StagefrightRecorder.h @@ -35,7 +35,7 @@ struct MediaWriter; class MetaData; struct AudioSource; class MediaProfiles; -class ISurfaceTexture; +class IGraphicBufferProducer; class SurfaceMediaSource; struct StagefrightRecorder : public MediaRecorderBase { @@ -65,7 +65,7 @@ struct StagefrightRecorder : public MediaRecorderBase { virtual status_t getMaxAmplitude(int *max); virtual status_t dump(int fd, const Vector& args) const; // Querying a SurfaceMediaSourcer - virtual sp querySurfaceMediaSource() const; + virtual sp querySurfaceMediaSource() const; private: sp mCamera; @@ -116,7 +116,7 @@ private: bool mStarted; // Needed when GLFrames are encoded. - // An pointer + // An pointer // will be sent to the client side using which the // frame buffers will be queued and dequeued sp mSurfaceMediaSource; diff --git a/media/libmediaplayerservice/TestPlayerStub.h b/media/libmediaplayerservice/TestPlayerStub.h index 91ffa7d..a3802eb 100644 --- a/media/libmediaplayerservice/TestPlayerStub.h +++ b/media/libmediaplayerservice/TestPlayerStub.h @@ -76,7 +76,7 @@ class TestPlayerStub : public MediaPlayerInterface { // All the methods below wrap the mPlayer instance. virtual status_t setVideoSurfaceTexture( - const android::sp& st) { + const android::sp& st) { return mPlayer->setVideoSurfaceTexture(st); } virtual status_t prepare() {return mPlayer->prepare();} diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp index 0f30372..517fb34 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp +++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp @@ -41,7 +41,7 @@ #include #include #include -#include +#include #include "avc_utils.h" @@ -198,16 +198,16 @@ void NuPlayer::setDataSource(int fd, int64_t offset, int64_t length) { } void NuPlayer::setVideoSurfaceTextureAsync( - const sp &surfaceTexture) { + const sp &bufferProducer) { sp msg = new AMessage(kWhatSetVideoNativeWindow, id()); - if (surfaceTexture == NULL) { + if (bufferProducer == NULL) { msg->setObject("native-window", NULL); } else { msg->setObject( "native-window", new NativeWindowWrapper( - new SurfaceTextureClient(surfaceTexture))); + new SurfaceTextureClient(bufferProducer))); } msg->post(); diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.h b/media/libmediaplayerservice/nuplayer/NuPlayer.h index ca87be9..09fc0ba 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayer.h +++ b/media/libmediaplayerservice/nuplayer/NuPlayer.h @@ -43,7 +43,7 @@ struct NuPlayer : public AHandler { void setDataSource(int fd, int64_t offset, int64_t length); void setVideoSurfaceTextureAsync( - const sp &surfaceTexture); + const sp &bufferProducer); void setAudioSink(const sp &sink); void start(); diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp index a485dda..7043404 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp +++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp @@ -97,7 +97,7 @@ status_t NuPlayerDriver::setDataSource(const sp &source) { } status_t NuPlayerDriver::setVideoSurfaceTexture( - const sp &surfaceTexture) { + const sp &bufferProducer) { Mutex::Autolock autoLock(mLock); if (mResetInProgress) { @@ -106,7 +106,7 @@ status_t NuPlayerDriver::setVideoSurfaceTexture( mSetSurfaceInProgress = true; - mPlayer->setVideoSurfaceTextureAsync(surfaceTexture); + mPlayer->setVideoSurfaceTextureAsync(bufferProducer); while (mSetSurfaceInProgress) { mCondition.wait(mLock); diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h index d551bf1..553c406 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h +++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h @@ -38,7 +38,7 @@ struct NuPlayerDriver : public MediaPlayerInterface { virtual status_t setDataSource(const sp &source); virtual status_t setVideoSurfaceTexture( - const sp &surfaceTexture); + const sp &bufferProducer); virtual status_t prepare(); virtual status_t prepareAsync(); virtual status_t start(); diff --git a/media/libstagefright/AwesomePlayer.cpp b/media/libstagefright/AwesomePlayer.cpp index 1e2625a..23ce088 100644 --- a/media/libstagefright/AwesomePlayer.cpp +++ b/media/libstagefright/AwesomePlayer.cpp @@ -48,7 +48,7 @@ #include #include -#include +#include #include #include @@ -1178,12 +1178,12 @@ bool AwesomePlayer::isPlaying() const { return (mFlags & PLAYING) || (mFlags & CACHE_UNDERRUN); } -status_t AwesomePlayer::setSurfaceTexture(const sp &surfaceTexture) { +status_t AwesomePlayer::setSurfaceTexture(const sp &bufferProducer) { Mutex::Autolock autoLock(mLock); status_t err; - if (surfaceTexture != NULL) { - err = setNativeWindow_l(new SurfaceTextureClient(surfaceTexture)); + if (bufferProducer != NULL) { + err = setNativeWindow_l(new SurfaceTextureClient(bufferProducer)); } else { err = setNativeWindow_l(NULL); } diff --git a/media/libstagefright/colorconversion/SoftwareRenderer.cpp b/media/libstagefright/colorconversion/SoftwareRenderer.cpp index 2704a37..77f21b7 100644 --- a/media/libstagefright/colorconversion/SoftwareRenderer.cpp +++ b/media/libstagefright/colorconversion/SoftwareRenderer.cpp @@ -24,7 +24,7 @@ #include #include #include -#include +#include namespace android { diff --git a/media/libstagefright/include/AwesomePlayer.h b/media/libstagefright/include/AwesomePlayer.h index 1422687..2306f31 100644 --- a/media/libstagefright/include/AwesomePlayer.h +++ b/media/libstagefright/include/AwesomePlayer.h @@ -36,7 +36,7 @@ struct MediaBuffer; struct MediaExtractor; struct MediaSource; struct NuCachedSource2; -struct ISurfaceTexture; +struct IGraphicBufferProducer; class DrmManagerClinet; class DecryptHandle; @@ -81,7 +81,7 @@ struct AwesomePlayer { bool isPlaying() const; - status_t setSurfaceTexture(const sp &surfaceTexture); + status_t setSurfaceTexture(const sp &bufferProducer); void setAudioSink(const sp &audioSink); status_t setLooping(bool shouldLoop); diff --git a/media/libstagefright/tests/SurfaceMediaSource_test.cpp b/media/libstagefright/tests/SurfaceMediaSource_test.cpp index a61d6a2..6a98509 100644 --- a/media/libstagefright/tests/SurfaceMediaSource_test.cpp +++ b/media/libstagefright/tests/SurfaceMediaSource_test.cpp @@ -107,7 +107,7 @@ protected: window.get(), NULL); } else { ALOGV("No actual display. Choosing EGLSurface based on SurfaceMediaSource"); - sp sms = (new SurfaceMediaSource( + sp sms = (new SurfaceMediaSource( getSurfaceWidth(), getSurfaceHeight()))->getBufferQueue(); sp stc = new SurfaceTextureClient(sms); sp window = stc; @@ -361,7 +361,7 @@ protected: mSMS = new SurfaceMediaSource(mYuvTexWidth, mYuvTexHeight); // Manual cast is required to avoid constructor ambiguity - mSTC = new SurfaceTextureClient(static_cast >( mSMS->getBufferQueue())); + mSTC = new SurfaceTextureClient(static_cast >( mSMS->getBufferQueue())); mANW = mSTC; } @@ -396,7 +396,7 @@ protected: ALOGV("SMS-GLTest::SetUp()"); android::ProcessState::self()->startThreadPool(); mSMS = new SurfaceMediaSource(mYuvTexWidth, mYuvTexHeight); - mSTC = new SurfaceTextureClient(static_cast >( mSMS->getBufferQueue())); + mSTC = new SurfaceTextureClient(static_cast >( mSMS->getBufferQueue())); mANW = mSTC; // Doing the setup related to the GL Side @@ -482,7 +482,7 @@ sp SurfaceMediaSourceGLTest::setUpMediaRecorder(int fd, int video // query the mediarecorder for a surfacemeidasource and create an egl surface with that void SurfaceMediaSourceGLTest::setUpEGLSurfaceFromMediaRecorder(sp& mr) { - sp iST = mr->querySurfaceMediaSourceFromMediaServer(); + sp iST = mr->querySurfaceMediaSourceFromMediaServer(); mSTC = new SurfaceTextureClient(iST); mANW = mSTC; @@ -749,7 +749,7 @@ TEST_F(SurfaceMediaSourceTest, DISABLED_EncodingFromCpuYV12BufferNpotWriteMediaS mYuvTexHeight, 30); // get the reference to the surfacemediasource living in // mediaserver that is created by stagefrightrecorder - sp iST = mr->querySurfaceMediaSourceFromMediaServer(); + sp iST = mr->querySurfaceMediaSourceFromMediaServer(); mSTC = new SurfaceTextureClient(iST); mANW = mSTC; ASSERT_EQ(NO_ERROR, native_window_api_connect(mANW.get(), NATIVE_WINDOW_API_CPU)); @@ -781,7 +781,7 @@ TEST_F(SurfaceMediaSourceGLTest, ChooseAndroidRecordableEGLConfigDummyWriter) { ALOGV("Verify creating a surface w/ right config + dummy writer*********"); mSMS = new SurfaceMediaSource(mYuvTexWidth, mYuvTexHeight); - mSTC = new SurfaceTextureClient(static_cast >( mSMS->getBufferQueue())); + mSTC = new SurfaceTextureClient(static_cast >( mSMS->getBufferQueue())); mANW = mSTC; DummyRecorder writer(mSMS); diff --git a/media/libstagefright/wifi-display/sink/RTPSink.cpp b/media/libstagefright/wifi-display/sink/RTPSink.cpp index 0918034..640e055 100644 --- a/media/libstagefright/wifi-display/sink/RTPSink.cpp +++ b/media/libstagefright/wifi-display/sink/RTPSink.cpp @@ -238,9 +238,9 @@ void RTPSink::Source::addReportBlock( RTPSink::RTPSink( const sp &netSession, - const sp &surfaceTex) + const sp &bufferProducer) : mNetSession(netSession), - mSurfaceTex(surfaceTex), + mSurfaceTex(bufferProducer), mRTPPort(0), mRTPSessionID(0), mRTCPSessionID(0), diff --git a/media/libstagefright/wifi-display/sink/RTPSink.h b/media/libstagefright/wifi-display/sink/RTPSink.h index a1d127d..2183fd6 100644 --- a/media/libstagefright/wifi-display/sink/RTPSink.h +++ b/media/libstagefright/wifi-display/sink/RTPSink.h @@ -35,7 +35,7 @@ struct TunnelRenderer; // the RTCP channel. struct RTPSink : public AHandler { RTPSink(const sp &netSession, - const sp &surfaceTex); + const sp &bufferProducer); // If TCP interleaving is used, no UDP sockets are created, instead // incoming RTP/RTCP packets (arriving on the RTSP control connection) @@ -66,7 +66,7 @@ private: struct StreamSource; sp mNetSession; - sp mSurfaceTex; + sp mSurfaceTex; KeyedVector > mSources; int32_t mRTPPort; diff --git a/media/libstagefright/wifi-display/sink/TunnelRenderer.cpp b/media/libstagefright/wifi-display/sink/TunnelRenderer.cpp index b913124..8ffb877 100644 --- a/media/libstagefright/wifi-display/sink/TunnelRenderer.cpp +++ b/media/libstagefright/wifi-display/sink/TunnelRenderer.cpp @@ -159,9 +159,9 @@ void TunnelRenderer::StreamSource::doSomeWork() { TunnelRenderer::TunnelRenderer( const sp ¬ifyLost, - const sp &surfaceTex) + const sp &bufferProducer) : mNotifyLost(notifyLost), - mSurfaceTex(surfaceTex), + mSurfaceTex(bufferProducer), mTotalBytesQueued(0ll), mLastDequeuedExtSeqNo(-1), mFirstFailedAttemptUs(-1ll), diff --git a/media/libstagefright/wifi-display/sink/TunnelRenderer.h b/media/libstagefright/wifi-display/sink/TunnelRenderer.h index c9597e0..52e6e66 100644 --- a/media/libstagefright/wifi-display/sink/TunnelRenderer.h +++ b/media/libstagefright/wifi-display/sink/TunnelRenderer.h @@ -36,7 +36,7 @@ struct IStreamListener; struct TunnelRenderer : public AHandler { TunnelRenderer( const sp ¬ifyLost, - const sp &surfaceTex); + const sp &bufferProducer); sp dequeueBuffer(); @@ -55,7 +55,7 @@ private: mutable Mutex mLock; sp mNotifyLost; - sp mSurfaceTex; + sp mSurfaceTex; List > mPackets; int64_t mTotalBytesQueued; diff --git a/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp b/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp index c3e0470..0f0caf1 100644 --- a/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp +++ b/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp @@ -31,10 +31,10 @@ namespace android { WifiDisplaySink::WifiDisplaySink( const sp &netSession, - const sp &surfaceTex) + const sp &bufferProducer) : mState(UNDEFINED), mNetSession(netSession), - mSurfaceTex(surfaceTex), + mSurfaceTex(bufferProducer), mSessionID(0), mNextCSeq(1) { } diff --git a/media/libstagefright/wifi-display/sink/WifiDisplaySink.h b/media/libstagefright/wifi-display/sink/WifiDisplaySink.h index f886ee5..a508839 100644 --- a/media/libstagefright/wifi-display/sink/WifiDisplaySink.h +++ b/media/libstagefright/wifi-display/sink/WifiDisplaySink.h @@ -34,7 +34,7 @@ struct RTPSink; struct WifiDisplaySink : public AHandler { WifiDisplaySink( const sp &netSession, - const sp &surfaceTex = NULL); + const sp &bufferProducer = NULL); void start(const char *sourceHost, int32_t sourcePort); void start(const char *uri); @@ -76,7 +76,7 @@ private: State mState; sp mNetSession; - sp mSurfaceTex; + sp mSurfaceTex; AString mSetupURI; AString mRTSPHost; int32_t mSessionID; diff --git a/media/libstagefright/wifi-display/source/PlaybackSession.cpp b/media/libstagefright/wifi-display/source/PlaybackSession.cpp index 916f797..d6b87a7 100644 --- a/media/libstagefright/wifi-display/source/PlaybackSession.cpp +++ b/media/libstagefright/wifi-display/source/PlaybackSession.cpp @@ -786,7 +786,7 @@ status_t WifiDisplaySource::PlaybackSession::addAudioSource(bool usePCMAudio) { return OK; } -sp WifiDisplaySource::PlaybackSession::getSurfaceTexture() { +sp WifiDisplaySource::PlaybackSession::getSurfaceTexture() { return mBufferQueue; } diff --git a/media/libstagefright/wifi-display/source/PlaybackSession.h b/media/libstagefright/wifi-display/source/PlaybackSession.h index b9d193b..281548d 100644 --- a/media/libstagefright/wifi-display/source/PlaybackSession.h +++ b/media/libstagefright/wifi-display/source/PlaybackSession.h @@ -26,7 +26,7 @@ namespace android { struct ABuffer; struct BufferQueue; struct IHDCP; -struct ISurfaceTexture; +struct IGraphicBufferProducer; struct MediaPuller; struct MediaSource; struct TSPacketizer; @@ -56,7 +56,7 @@ struct WifiDisplaySource::PlaybackSession : public AHandler { status_t finishPlay(); status_t pause(); - sp getSurfaceTexture(); + sp getSurfaceTexture(); int32_t width() const; int32_t height() const; diff --git a/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp b/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp index 08f67f9..9ec1064 100644 --- a/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp +++ b/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp @@ -25,7 +25,7 @@ #include "Sender.h" #include -#include +#include #include #include #include diff --git a/media/libstagefright/wifi-display/wfd.cpp b/media/libstagefright/wifi-display/wfd.cpp index 03a1123..2ec9b4f 100644 --- a/media/libstagefright/wifi-display/wfd.cpp +++ b/media/libstagefright/wifi-display/wfd.cpp @@ -47,7 +47,7 @@ struct RemoteDisplayClient : public BnRemoteDisplayClient { RemoteDisplayClient(); virtual void onDisplayConnected( - const sp &surfaceTexture, + const sp &bufferProducer, uint32_t width, uint32_t height, uint32_t flags); @@ -67,7 +67,7 @@ private: bool mDone; sp mComposerClient; - sp mSurfaceTexture; + sp mSurfaceTexture; sp mDisplayBinder; DISALLOW_EVIL_CONSTRUCTORS(RemoteDisplayClient); @@ -83,14 +83,14 @@ RemoteDisplayClient::~RemoteDisplayClient() { } void RemoteDisplayClient::onDisplayConnected( - const sp &surfaceTexture, + const sp &bufferProducer, uint32_t width, uint32_t height, uint32_t flags) { ALOGI("onDisplayConnected width=%u, height=%u, flags = 0x%08x", width, height, flags); - mSurfaceTexture = surfaceTexture; + mSurfaceTexture = bufferProducer; mDisplayBinder = mComposerClient->createDisplay( String8("foo"), false /* secure */); diff --git a/services/camera/libcameraservice/Camera2Client.cpp b/services/camera/libcameraservice/Camera2Client.cpp index e804f77..70bf0ac 100644 --- a/services/camera/libcameraservice/Camera2Client.cpp +++ b/services/camera/libcameraservice/Camera2Client.cpp @@ -497,7 +497,7 @@ status_t Camera2Client::setPreviewDisplay( } status_t Camera2Client::setPreviewTexture( - const sp& surfaceTexture) { + const sp& bufferProducer) { ATRACE_CALL(); ALOGV("%s: E", __FUNCTION__); Mutex::Autolock icl(mICameraLock); @@ -506,9 +506,9 @@ status_t Camera2Client::setPreviewTexture( sp binder; sp window; - if (surfaceTexture != 0) { - binder = surfaceTexture->asBinder(); - window = new SurfaceTextureClient(surfaceTexture); + if (bufferProducer != 0) { + binder = bufferProducer->asBinder(); + window = new SurfaceTextureClient(bufferProducer); } return setPreviewWindowL(binder, window); } diff --git a/services/camera/libcameraservice/Camera2Client.h b/services/camera/libcameraservice/Camera2Client.h index 55ead02..4669958 100644 --- a/services/camera/libcameraservice/Camera2Client.h +++ b/services/camera/libcameraservice/Camera2Client.h @@ -49,7 +49,7 @@ public: virtual status_t unlock(); virtual status_t setPreviewDisplay(const sp& surface); virtual status_t setPreviewTexture( - const sp& surfaceTexture); + const sp& bufferProducer); virtual void setPreviewCallbackFlag(int flag); virtual status_t startPreview(); virtual void stopPreview(); diff --git a/services/camera/libcameraservice/CameraClient.cpp b/services/camera/libcameraservice/CameraClient.cpp index 006a9c9..f9cee0d 100644 --- a/services/camera/libcameraservice/CameraClient.cpp +++ b/services/camera/libcameraservice/CameraClient.cpp @@ -307,17 +307,17 @@ status_t CameraClient::setPreviewDisplay(const sp& surface) { return setPreviewWindow(binder, window); } -// set the SurfaceTexture that the preview will use +// set the SurfaceTextureClient that the preview will use status_t CameraClient::setPreviewTexture( - const sp& surfaceTexture) { - LOG1("setPreviewTexture(%p) (pid %d)", surfaceTexture.get(), + const sp& bufferProducer) { + LOG1("setPreviewTexture(%p) (pid %d)", bufferProducer.get(), getCallingPid()); sp binder; sp window; - if (surfaceTexture != 0) { - binder = surfaceTexture->asBinder(); - window = new SurfaceTextureClient(surfaceTexture); + if (bufferProducer != 0) { + binder = bufferProducer->asBinder(); + window = new SurfaceTextureClient(bufferProducer); } return setPreviewWindow(binder, window); } diff --git a/services/camera/libcameraservice/CameraClient.h b/services/camera/libcameraservice/CameraClient.h index 2f31c4e..7da3da7 100644 --- a/services/camera/libcameraservice/CameraClient.h +++ b/services/camera/libcameraservice/CameraClient.h @@ -33,7 +33,7 @@ public: virtual status_t lock(); virtual status_t unlock(); virtual status_t setPreviewDisplay(const sp& surface); - virtual status_t setPreviewTexture(const sp& surfaceTexture); + virtual status_t setPreviewTexture(const sp& bufferProducer); virtual void setPreviewCallbackFlag(int flag); virtual status_t startPreview(); virtual void stopPreview(); @@ -124,7 +124,7 @@ private: // Ensures atomicity among the public methods mutable Mutex mLock; - // This is a binder of Surface or SurfaceTexture. + // This is a binder of Surface or SurfaceTextureClient. sp mSurface; sp mPreviewWindow; diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h index 4dab340..41365a0 100644 --- a/services/camera/libcameraservice/CameraService.h +++ b/services/camera/libcameraservice/CameraService.h @@ -81,7 +81,7 @@ public: virtual status_t lock() = 0; virtual status_t unlock() = 0; virtual status_t setPreviewDisplay(const sp& surface) = 0; - virtual status_t setPreviewTexture(const sp& surfaceTexture) = 0; + virtual status_t setPreviewTexture(const sp& bufferProducer)=0; virtual void setPreviewCallbackFlag(int flag) = 0; virtual status_t startPreview() = 0; virtual void stopPreview() = 0; -- cgit v1.1 From 21ad778dcfcddb8f8fd9dc3fe4992fbef246c511 Mon Sep 17 00:00:00 2001 From: Marco Nelissen Date: Tue, 18 Dec 2012 12:28:27 -0800 Subject: Report buffer size even when using hardware buffers This makes it so that the buffers dequeued from a MediaCodec show a non-zero size when there's actually data in them, which allows the caller to distinguish between a valid frame and an empty buffer. Change-Id: I891b2301501e26f0b4e8cf2e24c169e501a6d026 --- media/libstagefright/ACodec.cpp | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp index 2b20ab0..7920d32 100644 --- a/media/libstagefright/ACodec.cpp +++ b/media/libstagefright/ACodec.cpp @@ -612,7 +612,7 @@ status_t ACodec::allocateOutputBuffersFromNativeWindow() { sp graphicBuffer(new GraphicBuffer(buf, false)); BufferInfo info; info.mStatus = BufferInfo::OWNED_BY_US; - info.mData = new ABuffer(0); + info.mData = new ABuffer(NULL /* data */, def.nBufferSize /* capacity */); info.mGraphicBuffer = graphicBuffer; mBuffers[kPortIndexOutput].push(info); @@ -2868,15 +2868,14 @@ bool ACodec::BaseState::onOMXFillBufferDone( mCodec->sendFormatChange(); } - if (mCodec->mNativeWindow == NULL) { - info->mData->setRange(rangeOffset, rangeLength); - + info->mData->setRange(rangeOffset, rangeLength); #if 0 + if (mCodec->mNativeWindow == NULL) { if (IsIDR(info->mData)) { ALOGI("IDR frame"); } -#endif } +#endif if (mCodec->mSkipCutBuffer != NULL) { mCodec->mSkipCutBuffer->submit(info->mData); -- cgit v1.1 From 6fc72b01a3b67903b52f1d33b1ad5c960b5365f1 Mon Sep 17 00:00:00 2001 From: Marco Nelissen Date: Mon, 17 Dec 2012 16:35:08 -0800 Subject: Make codecs reconfigurable Change-Id: I3dd46cb4401493becbf6152f4dcd5a8f1e9a0b44 --- media/libstagefright/codecs/aacdec/SoftAAC2.cpp | 5 +++++ media/libstagefright/codecs/aacdec/SoftAAC2.h | 1 + media/libstagefright/codecs/mp3dec/SoftMP3.cpp | 5 +++++ media/libstagefright/codecs/mp3dec/SoftMP3.h | 1 + media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp | 5 +++++ media/libstagefright/codecs/vorbis/dec/SoftVorbis.h | 1 + media/libstagefright/include/SimpleSoftOMXComponent.h | 1 + media/libstagefright/omx/SimpleSoftOMXComponent.cpp | 8 ++++++++ 8 files changed, 27 insertions(+) diff --git a/media/libstagefright/codecs/aacdec/SoftAAC2.cpp b/media/libstagefright/codecs/aacdec/SoftAAC2.cpp index d88813e..a8ab2ac 100644 --- a/media/libstagefright/codecs/aacdec/SoftAAC2.cpp +++ b/media/libstagefright/codecs/aacdec/SoftAAC2.cpp @@ -594,6 +594,11 @@ void SoftAAC2::onPortFlushCompleted(OMX_U32 portIndex) { } } +void SoftAAC2::onReset() { + aacDecoder_SetParam(mAACDecoder, AAC_TPDEC_CLEAR_BUFFER, 1); + mIsFirst = true; +} + void SoftAAC2::onPortEnableCompleted(OMX_U32 portIndex, bool enabled) { if (portIndex != 1) { return; diff --git a/media/libstagefright/codecs/aacdec/SoftAAC2.h b/media/libstagefright/codecs/aacdec/SoftAAC2.h index 0353196..6957ade 100644 --- a/media/libstagefright/codecs/aacdec/SoftAAC2.h +++ b/media/libstagefright/codecs/aacdec/SoftAAC2.h @@ -41,6 +41,7 @@ protected: virtual void onQueueFilled(OMX_U32 portIndex); virtual void onPortFlushCompleted(OMX_U32 portIndex); virtual void onPortEnableCompleted(OMX_U32 portIndex, bool enabled); + virtual void onReset(); private: enum { diff --git a/media/libstagefright/codecs/mp3dec/SoftMP3.cpp b/media/libstagefright/codecs/mp3dec/SoftMP3.cpp index fb1135c..849be87 100644 --- a/media/libstagefright/codecs/mp3dec/SoftMP3.cpp +++ b/media/libstagefright/codecs/mp3dec/SoftMP3.cpp @@ -343,6 +343,11 @@ void SoftMP3::onPortEnableCompleted(OMX_U32 portIndex, bool enabled) { } } +void SoftMP3::onReset() { + pvmp3_InitDecoder(mConfig, mDecoderBuf); + mIsFirst = true; +} + } // namespace android android::SoftOMXComponent *createSoftOMXComponent( diff --git a/media/libstagefright/codecs/mp3dec/SoftMP3.h b/media/libstagefright/codecs/mp3dec/SoftMP3.h index 3a05466..4af91ea 100644 --- a/media/libstagefright/codecs/mp3dec/SoftMP3.h +++ b/media/libstagefright/codecs/mp3dec/SoftMP3.h @@ -42,6 +42,7 @@ protected: virtual void onQueueFilled(OMX_U32 portIndex); virtual void onPortFlushCompleted(OMX_U32 portIndex); virtual void onPortEnableCompleted(OMX_U32 portIndex, bool enabled); + virtual void onReset(); private: enum { diff --git a/media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp b/media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp index ac88107..13dfc8c 100644 --- a/media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp +++ b/media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp @@ -410,6 +410,11 @@ void SoftVorbis::onPortFlushCompleted(OMX_U32 portIndex) { } } +void SoftVorbis::onReset() { + mNumFramesOutput = 0; + vorbis_dsp_restart(mState); +} + void SoftVorbis::onPortEnableCompleted(OMX_U32 portIndex, bool enabled) { if (portIndex != 1) { return; diff --git a/media/libstagefright/codecs/vorbis/dec/SoftVorbis.h b/media/libstagefright/codecs/vorbis/dec/SoftVorbis.h index e252f55..cb628a0 100644 --- a/media/libstagefright/codecs/vorbis/dec/SoftVorbis.h +++ b/media/libstagefright/codecs/vorbis/dec/SoftVorbis.h @@ -43,6 +43,7 @@ protected: virtual void onQueueFilled(OMX_U32 portIndex); virtual void onPortFlushCompleted(OMX_U32 portIndex); virtual void onPortEnableCompleted(OMX_U32 portIndex, bool enabled); + virtual void onReset(); private: enum { diff --git a/media/libstagefright/include/SimpleSoftOMXComponent.h b/media/libstagefright/include/SimpleSoftOMXComponent.h index 50cd275..f8c61eb 100644 --- a/media/libstagefright/include/SimpleSoftOMXComponent.h +++ b/media/libstagefright/include/SimpleSoftOMXComponent.h @@ -71,6 +71,7 @@ protected: virtual void onPortFlushCompleted(OMX_U32 portIndex); virtual void onPortEnableCompleted(OMX_U32 portIndex, bool enabled); + virtual void onReset(); PortInfo *editPortInfo(OMX_U32 portIndex); diff --git a/media/libstagefright/omx/SimpleSoftOMXComponent.cpp b/media/libstagefright/omx/SimpleSoftOMXComponent.cpp index c79e01f..4999663 100644 --- a/media/libstagefright/omx/SimpleSoftOMXComponent.cpp +++ b/media/libstagefright/omx/SimpleSoftOMXComponent.cpp @@ -450,6 +450,10 @@ void SimpleSoftOMXComponent::onChangeState(OMX_STATETYPE state) { checkTransitions(); } +void SimpleSoftOMXComponent::onReset() { + // no-op +} + void SimpleSoftOMXComponent::onPortEnable(OMX_U32 portIndex, bool enable) { CHECK_LT(portIndex, mPorts.size()); @@ -581,6 +585,10 @@ void SimpleSoftOMXComponent::checkTransitions() { if (transitionComplete) { mState = mTargetState; + if (mState == OMX_StateLoaded) { + onReset(); + } + notify(OMX_EventCmdComplete, OMX_CommandStateSet, mState, NULL); } } -- cgit v1.1 From 8d6cc842e8d525405c68e57fdf3bc5da0b4d7e87 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Fri, 3 Feb 2012 11:06:53 -0800 Subject: Remove unnecessary parameter Just get the parameter on server side Change-Id: I433a63104dbb257e0d862be2ab61847cb36d1c15 --- cmds/stagefright/stagefright.cpp | 3 +-- cmds/stagefright/stream.cpp | 2 +- include/media/IAudioFlinger.h | 4 +--- include/media/IMediaPlayerService.h | 6 +++--- media/libmedia/AudioEffect.cpp | 2 +- media/libmedia/AudioRecord.cpp | 2 +- media/libmedia/AudioTrack.cpp | 3 +-- media/libmedia/IAudioFlinger.cpp | 16 ++++------------ media/libmedia/IMediaPlayerService.cpp | 18 ++++++------------ media/libmedia/mediametadataretriever.cpp | 2 +- media/libmedia/mediaplayer.cpp | 6 +++--- media/libmedia/mediarecorder.cpp | 2 +- media/libmediaplayerservice/MediaPlayerService.cpp | 9 ++++++--- media/libmediaplayerservice/MediaPlayerService.h | 6 +++--- .../wifi-display/sink/TunnelRenderer.cpp | 2 +- services/audioflinger/AudioFlinger.cpp | 7 ++++--- services/audioflinger/AudioFlinger.h | 4 +--- 17 files changed, 39 insertions(+), 55 deletions(-) diff --git a/cmds/stagefright/stagefright.cpp b/cmds/stagefright/stagefright.cpp index 148b66e..ad68a21 100644 --- a/cmds/stagefright/stagefright.cpp +++ b/cmds/stagefright/stagefright.cpp @@ -22,7 +22,6 @@ #include #include -#include #include "jpeg.h" #include "SineSource.h" @@ -821,7 +820,7 @@ int main(int argc, char **argv) { CHECK(service.get() != NULL); sp retriever = - service->createMetadataRetriever(getpid()); + service->createMetadataRetriever(); CHECK(retriever != NULL); diff --git a/cmds/stagefright/stream.cpp b/cmds/stagefright/stream.cpp index 7329dcc..af6afe0 100644 --- a/cmds/stagefright/stream.cpp +++ b/cmds/stagefright/stream.cpp @@ -370,7 +370,7 @@ int main(int argc, char **argv) { } sp player = - service->create(getpid(), client, 0); + service->create(client, 0); if (player != NULL && player->setDataSource(source) == NO_ERROR) { player->setVideoSurfaceTexture(surface->getSurfaceTexture()); diff --git a/include/media/IAudioFlinger.h b/include/media/IAudioFlinger.h index 9727143..9c3067e 100644 --- a/include/media/IAudioFlinger.h +++ b/include/media/IAudioFlinger.h @@ -56,7 +56,6 @@ public: * return null if the track cannot be created. */ virtual sp createTrack( - pid_t pid, audio_stream_type_t streamType, uint32_t sampleRate, audio_format_t format, @@ -70,7 +69,6 @@ public: status_t *status) = 0; virtual sp openRecord( - pid_t pid, audio_io_handle_t input, uint32_t sampleRate, audio_format_t format, @@ -174,7 +172,7 @@ public: virtual status_t getEffectDescriptor(const effect_uuid_t *pEffectUUID, effect_descriptor_t *pDescriptor) const = 0; - virtual sp createEffect(pid_t pid, + virtual sp createEffect( effect_descriptor_t *pDesc, const sp& client, int32_t priority, diff --git a/include/media/IMediaPlayerService.h b/include/media/IMediaPlayerService.h index 7a89135..44db5bc 100644 --- a/include/media/IMediaPlayerService.h +++ b/include/media/IMediaPlayerService.h @@ -44,9 +44,9 @@ class IMediaPlayerService: public IInterface public: DECLARE_META_INTERFACE(MediaPlayerService); - virtual sp createMediaRecorder(pid_t pid) = 0; - virtual sp createMetadataRetriever(pid_t pid) = 0; - virtual sp create(pid_t pid, const sp& client, int audioSessionId = 0) = 0; + virtual sp createMediaRecorder() = 0; + virtual sp createMetadataRetriever() = 0; + virtual sp create(const sp& client, int audioSessionId = 0) = 0; virtual sp decode(const char* url, uint32_t *pSampleRate, int* pNumChannels, audio_format_t* pFormat) = 0; virtual sp decode(int fd, int64_t offset, int64_t length, uint32_t *pSampleRate, int* pNumChannels, audio_format_t* pFormat) = 0; diff --git a/media/libmedia/AudioEffect.cpp b/media/libmedia/AudioEffect.cpp index 3317d57..8dfffb3 100644 --- a/media/libmedia/AudioEffect.cpp +++ b/media/libmedia/AudioEffect.cpp @@ -127,7 +127,7 @@ status_t AudioEffect::set(const effect_uuid_t *type, mIEffectClient = new EffectClient(this); - iEffect = audioFlinger->createEffect(getpid(), &mDescriptor, + iEffect = audioFlinger->createEffect((effect_descriptor_t *)&mDescriptor, mIEffectClient, priority, io, mSessionId, &mStatus, &mId, &enabled); if (iEffect == 0 || (mStatus != NO_ERROR && mStatus != ALREADY_EXISTS)) { diff --git a/media/libmedia/AudioRecord.cpp b/media/libmedia/AudioRecord.cpp index c2ef68c..3db69a4 100644 --- a/media/libmedia/AudioRecord.cpp +++ b/media/libmedia/AudioRecord.cpp @@ -444,7 +444,7 @@ status_t AudioRecord::openRecord_l( // FIXME see similar logic at AudioTrack int originalSessionId = mSessionId; - sp record = audioFlinger->openRecord(getpid(), input, + sp record = audioFlinger->openRecord(input, sampleRate, format, mChannelMask, frameCount, diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp index 1d87ff8..d0872f0 100644 --- a/media/libmedia/AudioTrack.cpp +++ b/media/libmedia/AudioTrack.cpp @@ -837,8 +837,7 @@ status_t AudioTrack::createTrack_l( } } - sp track = audioFlinger->createTrack(getpid(), - streamType, + sp track = audioFlinger->createTrack(streamType, sampleRate, // AudioFlinger only sees 16-bit PCM format == AUDIO_FORMAT_PCM_8_BIT ? diff --git a/media/libmedia/IAudioFlinger.cpp b/media/libmedia/IAudioFlinger.cpp index c5fbbf0..2f18680 100644 --- a/media/libmedia/IAudioFlinger.cpp +++ b/media/libmedia/IAudioFlinger.cpp @@ -84,7 +84,6 @@ public: } virtual sp createTrack( - pid_t pid, audio_stream_type_t streamType, uint32_t sampleRate, audio_format_t format, @@ -100,7 +99,6 @@ public: Parcel data, reply; sp track; data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor()); - data.writeInt32(pid); data.writeInt32((int32_t) streamType); data.writeInt32(sampleRate); data.writeInt32(format); @@ -138,7 +136,6 @@ public: } virtual sp openRecord( - pid_t pid, audio_io_handle_t input, uint32_t sampleRate, audio_format_t format, @@ -152,7 +149,6 @@ public: Parcel data, reply; sp record; data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor()); - data.writeInt32(pid); data.writeInt32((int32_t) input); data.writeInt32(sampleRate); data.writeInt32(format); @@ -612,7 +608,7 @@ public: return NO_ERROR; } - virtual sp createEffect(pid_t pid, + virtual sp createEffect( effect_descriptor_t *pDesc, const sp& client, int32_t priority, @@ -633,7 +629,6 @@ public: } data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor()); - data.writeInt32(pid); data.write(pDesc, sizeof(effect_descriptor_t)); data.writeStrongBinder(client->asBinder()); data.writeInt32(priority); @@ -712,7 +707,6 @@ status_t BnAudioFlinger::onTransact( switch (code) { case CREATE_TRACK: { CHECK_INTERFACE(IAudioFlinger, data, reply); - pid_t pid = data.readInt32(); int streamType = data.readInt32(); uint32_t sampleRate = data.readInt32(); audio_format_t format = (audio_format_t) data.readInt32(); @@ -724,7 +718,7 @@ status_t BnAudioFlinger::onTransact( pid_t tid = (pid_t) data.readInt32(); int sessionId = data.readInt32(); status_t status; - sp track = createTrack(pid, + sp track = createTrack( (audio_stream_type_t) streamType, sampleRate, format, channelMask, frameCount, &flags, buffer, output, tid, &sessionId, &status); reply->writeInt32(flags); @@ -735,7 +729,6 @@ status_t BnAudioFlinger::onTransact( } break; case OPEN_RECORD: { CHECK_INTERFACE(IAudioFlinger, data, reply); - pid_t pid = data.readInt32(); audio_io_handle_t input = (audio_io_handle_t) data.readInt32(); uint32_t sampleRate = data.readInt32(); audio_format_t format = (audio_format_t) data.readInt32(); @@ -745,7 +738,7 @@ status_t BnAudioFlinger::onTransact( pid_t tid = (pid_t) data.readInt32(); int sessionId = data.readInt32(); status_t status; - sp record = openRecord(pid, input, + sp record = openRecord(input, sampleRate, format, channelMask, frameCount, flags, tid, &sessionId, &status); reply->writeInt32(sessionId); reply->writeInt32(status); @@ -1021,7 +1014,6 @@ status_t BnAudioFlinger::onTransact( } case CREATE_EFFECT: { CHECK_INTERFACE(IAudioFlinger, data, reply); - pid_t pid = data.readInt32(); effect_descriptor_t desc; data.read(&desc, sizeof(effect_descriptor_t)); sp client = interface_cast(data.readStrongBinder()); @@ -1032,7 +1024,7 @@ status_t BnAudioFlinger::onTransact( int id; int enabled; - sp effect = createEffect(pid, &desc, client, priority, output, sessionId, + sp effect = createEffect(&desc, client, priority, output, sessionId, &status, &id, &enabled); reply->writeInt32(status); reply->writeInt32(id); diff --git a/media/libmedia/IMediaPlayerService.cpp b/media/libmedia/IMediaPlayerService.cpp index c0a0260..ae76c10 100644 --- a/media/libmedia/IMediaPlayerService.cpp +++ b/media/libmedia/IMediaPlayerService.cpp @@ -56,20 +56,18 @@ public: { } - virtual sp createMetadataRetriever(pid_t pid) + virtual sp createMetadataRetriever() { Parcel data, reply; data.writeInterfaceToken(IMediaPlayerService::getInterfaceDescriptor()); - data.writeInt32(pid); remote()->transact(CREATE_METADATA_RETRIEVER, data, &reply); return interface_cast(reply.readStrongBinder()); } virtual sp create( - pid_t pid, const sp& client, int audioSessionId) { + const sp& client, int audioSessionId) { Parcel data, reply; data.writeInterfaceToken(IMediaPlayerService::getInterfaceDescriptor()); - data.writeInt32(pid); data.writeStrongBinder(client->asBinder()); data.writeInt32(audioSessionId); @@ -77,11 +75,10 @@ public: return interface_cast(reply.readStrongBinder()); } - virtual sp createMediaRecorder(pid_t pid) + virtual sp createMediaRecorder() { Parcel data, reply; data.writeInterfaceToken(IMediaPlayerService::getInterfaceDescriptor()); - data.writeInt32(pid); remote()->transact(CREATE_MEDIA_RECORDER, data, &reply); return interface_cast(reply.readStrongBinder()); } @@ -168,11 +165,10 @@ status_t BnMediaPlayerService::onTransact( switch (code) { case CREATE: { CHECK_INTERFACE(IMediaPlayerService, data, reply); - pid_t pid = data.readInt32(); sp client = interface_cast(data.readStrongBinder()); int audioSessionId = data.readInt32(); - sp player = create(pid, client, audioSessionId); + sp player = create(client, audioSessionId); reply->writeStrongBinder(player->asBinder()); return NO_ERROR; } break; @@ -206,15 +202,13 @@ status_t BnMediaPlayerService::onTransact( } break; case CREATE_MEDIA_RECORDER: { CHECK_INTERFACE(IMediaPlayerService, data, reply); - pid_t pid = data.readInt32(); - sp recorder = createMediaRecorder(pid); + sp recorder = createMediaRecorder(); reply->writeStrongBinder(recorder->asBinder()); return NO_ERROR; } break; case CREATE_METADATA_RETRIEVER: { CHECK_INTERFACE(IMediaPlayerService, data, reply); - pid_t pid = data.readInt32(); - sp retriever = createMetadataRetriever(pid); + sp retriever = createMetadataRetriever(); reply->writeStrongBinder(retriever->asBinder()); return NO_ERROR; } break; diff --git a/media/libmedia/mediametadataretriever.cpp b/media/libmedia/mediametadataretriever.cpp index b0241aa..110b94c 100644 --- a/media/libmedia/mediametadataretriever.cpp +++ b/media/libmedia/mediametadataretriever.cpp @@ -64,7 +64,7 @@ MediaMetadataRetriever::MediaMetadataRetriever() ALOGE("failed to obtain MediaMetadataRetrieverService"); return; } - sp retriever(service->createMetadataRetriever(getpid())); + sp retriever(service->createMetadataRetriever()); if (retriever == 0) { ALOGE("failed to create IMediaMetadataRetriever object from server"); } diff --git a/media/libmedia/mediaplayer.cpp b/media/libmedia/mediaplayer.cpp index bbbf4b6..dbff8dc 100644 --- a/media/libmedia/mediaplayer.cpp +++ b/media/libmedia/mediaplayer.cpp @@ -143,7 +143,7 @@ status_t MediaPlayer::setDataSource( if (url != NULL) { const sp& service(getMediaPlayerService()); if (service != 0) { - sp player(service->create(getpid(), this, mAudioSessionId)); + sp player(service->create(this, mAudioSessionId)); if ((NO_ERROR != doSetRetransmitEndpoint(player)) || (NO_ERROR != player->setDataSource(url, headers))) { player.clear(); @@ -160,7 +160,7 @@ status_t MediaPlayer::setDataSource(int fd, int64_t offset, int64_t length) status_t err = UNKNOWN_ERROR; const sp& service(getMediaPlayerService()); if (service != 0) { - sp player(service->create(getpid(), this, mAudioSessionId)); + sp player(service->create(this, mAudioSessionId)); if ((NO_ERROR != doSetRetransmitEndpoint(player)) || (NO_ERROR != player->setDataSource(fd, offset, length))) { player.clear(); @@ -176,7 +176,7 @@ status_t MediaPlayer::setDataSource(const sp &source) status_t err = UNKNOWN_ERROR; const sp& service(getMediaPlayerService()); if (service != 0) { - sp player(service->create(getpid(), this, mAudioSessionId)); + sp player(service->create(this, mAudioSessionId)); if ((NO_ERROR != doSetRetransmitEndpoint(player)) || (NO_ERROR != player->setDataSource(source))) { player.clear(); diff --git a/media/libmedia/mediarecorder.cpp b/media/libmedia/mediarecorder.cpp index 9541015..46a8f54 100644 --- a/media/libmedia/mediarecorder.cpp +++ b/media/libmedia/mediarecorder.cpp @@ -620,7 +620,7 @@ MediaRecorder::MediaRecorder() : mSurfaceMediaSource(NULL) const sp& service(getMediaPlayerService()); if (service != NULL) { - mMediaRecorder = service->createMediaRecorder(getpid()); + mMediaRecorder = service->createMediaRecorder(); } if (mMediaRecorder != NULL) { mCurrentState = MEDIA_RECORDER_IDLE; diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp index c3e5c40..c211072 100644 --- a/media/libmediaplayerservice/MediaPlayerService.cpp +++ b/media/libmediaplayerservice/MediaPlayerService.cpp @@ -224,8 +224,9 @@ MediaPlayerService::~MediaPlayerService() ALOGV("MediaPlayerService destroyed"); } -sp MediaPlayerService::createMediaRecorder(pid_t pid) +sp MediaPlayerService::createMediaRecorder() { + pid_t pid = IPCThreadState::self()->getCallingPid(); sp recorder = new MediaRecorderClient(this, pid); wp w = recorder; Mutex::Autolock lock(mLock); @@ -241,16 +242,18 @@ void MediaPlayerService::removeMediaRecorderClient(wp clien ALOGV("Delete media recorder client"); } -sp MediaPlayerService::createMetadataRetriever(pid_t pid) +sp MediaPlayerService::createMetadataRetriever() { + pid_t pid = IPCThreadState::self()->getCallingPid(); sp retriever = new MetadataRetrieverClient(pid); ALOGV("Create new media retriever from pid %d", pid); return retriever; } -sp MediaPlayerService::create(pid_t pid, const sp& client, +sp MediaPlayerService::create(const sp& client, int audioSessionId) { + pid_t pid = IPCThreadState::self()->getCallingPid(); int32_t connId = android_atomic_inc(&mNextConnId); sp c = new Client( diff --git a/media/libmediaplayerservice/MediaPlayerService.h b/media/libmediaplayerservice/MediaPlayerService.h index fd648df..a8af66e 100644 --- a/media/libmediaplayerservice/MediaPlayerService.h +++ b/media/libmediaplayerservice/MediaPlayerService.h @@ -239,11 +239,11 @@ public: static void instantiate(); // IMediaPlayerService interface - virtual sp createMediaRecorder(pid_t pid); + virtual sp createMediaRecorder(); void removeMediaRecorderClient(wp client); - virtual sp createMetadataRetriever(pid_t pid); + virtual sp createMetadataRetriever(); - virtual sp create(pid_t pid, const sp& client, int audioSessionId); + virtual sp create(const sp& client, int audioSessionId); virtual sp decode(const char* url, uint32_t *pSampleRate, int* pNumChannels, audio_format_t* pFormat); virtual sp decode(int fd, int64_t offset, int64_t length, uint32_t *pSampleRate, int* pNumChannels, audio_format_t* pFormat); diff --git a/media/libstagefright/wifi-display/sink/TunnelRenderer.cpp b/media/libstagefright/wifi-display/sink/TunnelRenderer.cpp index b913124..558fd41 100644 --- a/media/libstagefright/wifi-display/sink/TunnelRenderer.cpp +++ b/media/libstagefright/wifi-display/sink/TunnelRenderer.cpp @@ -370,7 +370,7 @@ void TunnelRenderer::initPlayer() { mPlayerClient = new PlayerClient; - mPlayer = service->create(getpid(), mPlayerClient, 0); + mPlayer = service->create(mPlayerClient, 0); CHECK(mPlayer != NULL); CHECK_EQ(mPlayer->setDataSource(mStreamSource), (status_t)OK); diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp index 514fcb1..5f5b041 100644 --- a/services/audioflinger/AudioFlinger.cpp +++ b/services/audioflinger/AudioFlinger.cpp @@ -344,7 +344,6 @@ sp AudioFlinger::registerPid_l(pid_t pid) sp AudioFlinger::createTrack( - pid_t pid, audio_stream_type_t streamType, uint32_t sampleRate, audio_format_t format, @@ -389,6 +388,7 @@ sp AudioFlinger::createTrack( goto Exit; } + pid_t pid = IPCThreadState::self()->getCallingPid(); client = registerPid_l(pid); ALOGV("createTrack() sessionId: %d", (sessionId == NULL) ? -2 : *sessionId); @@ -1120,7 +1120,6 @@ void AudioFlinger::NotificationClient::binderDied(const wp& who) // ---------------------------------------------------------------------------- sp AudioFlinger::openRecord( - pid_t pid, audio_io_handle_t input, uint32_t sampleRate, audio_format_t format, @@ -1154,6 +1153,7 @@ sp AudioFlinger::openRecord( goto Exit; } + pid_t pid = IPCThreadState::self()->getCallingPid(); client = registerPid_l(pid); // If no audio session id is provided, create one here @@ -1877,7 +1877,7 @@ status_t AudioFlinger::getEffectDescriptor(const effect_uuid_t *pUuid, } -sp AudioFlinger::createEffect(pid_t pid, +sp AudioFlinger::createEffect( effect_descriptor_t *pDesc, const sp& effectClient, int32_t priority, @@ -1891,6 +1891,7 @@ sp AudioFlinger::createEffect(pid_t pid, sp handle; effect_descriptor_t desc; + pid_t pid = IPCThreadState::self()->getCallingPid(); ALOGV("createEffect pid %d, effectClient %p, priority %d, sessionId %d, io %d", pid, effectClient.get(), priority, sessionId, io); diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h index 6d3f0a1..a28f47e 100644 --- a/services/audioflinger/AudioFlinger.h +++ b/services/audioflinger/AudioFlinger.h @@ -92,7 +92,6 @@ public: // IAudioFlinger interface, in binder opcode order virtual sp createTrack( - pid_t pid, audio_stream_type_t streamType, uint32_t sampleRate, audio_format_t format, @@ -106,7 +105,6 @@ public: status_t *status); virtual sp openRecord( - pid_t pid, audio_io_handle_t input, uint32_t sampleRate, audio_format_t format, @@ -197,7 +195,7 @@ public: virtual status_t getEffectDescriptor(const effect_uuid_t *pUuid, effect_descriptor_t *descriptor) const; - virtual sp createEffect(pid_t pid, + virtual sp createEffect( effect_descriptor_t *pDesc, const sp& effectClient, int32_t priority, -- cgit v1.1 From 4c44e9fed87ff6363393f2559b150291242da247 Mon Sep 17 00:00:00 2001 From: Marco Nelissen Date: Thu, 20 Dec 2012 11:19:49 -0800 Subject: Ensure proper EOS behavior If a buffer is tagged with EOS but has data in it, decode that data instead of ignoring it. Change-Id: Ie41c8485c3ad7fe7d9c64f0752c2e7601d91d602 --- media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp | 8 ++++++-- media/libstagefright/codecs/on2/h264dec/SoftAVC.cpp | 10 +++------- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp b/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp index d527fde..020cc0a 100644 --- a/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp +++ b/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp @@ -326,7 +326,7 @@ void SoftMPEG4::onQueueFilled(OMX_U32 portIndex) { OMX_BUFFERHEADERTYPE *outHeader = port->mBuffers.editItemAt(mNumSamplesOutput & 1).mHeader; - if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) { + if ((inHeader->nFlags & OMX_BUFFERFLAG_EOS) && inHeader->nFilledLen == 0) { inQueue.erase(inQueue.begin()); inInfo->mOwnedByUs = false; notifyEmptyBufferDone(inHeader); @@ -445,6 +445,11 @@ void SoftMPEG4::onQueueFilled(OMX_U32 portIndex) { inHeader->nOffset += bufferSize; inHeader->nFilledLen = 0; + if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) { + outHeader->nFlags = OMX_BUFFERFLAG_EOS; + } else { + outHeader->nFlags = 0; + } if (inHeader->nFilledLen == 0) { inInfo->mOwnedByUs = false; @@ -458,7 +463,6 @@ void SoftMPEG4::onQueueFilled(OMX_U32 portIndex) { outHeader->nOffset = 0; outHeader->nFilledLen = (mWidth * mHeight * 3) / 2; - outHeader->nFlags = 0; List::iterator it = outQueue.begin(); while ((*it)->mHeader != outHeader) { diff --git a/media/libstagefright/codecs/on2/h264dec/SoftAVC.cpp b/media/libstagefright/codecs/on2/h264dec/SoftAVC.cpp index 6c3f834..6e36651 100644 --- a/media/libstagefright/codecs/on2/h264dec/SoftAVC.cpp +++ b/media/libstagefright/codecs/on2/h264dec/SoftAVC.cpp @@ -311,18 +311,14 @@ void SoftAVC::onQueueFilled(OMX_U32 portIndex) { BufferInfo *inInfo = *inQueue.begin(); OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader; ++mPicId; - if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) { - inQueue.erase(inQueue.begin()); - inInfo->mOwnedByUs = false; - notifyEmptyBufferDone(inHeader); - mEOSStatus = INPUT_EOS_SEEN; - continue; - } OMX_BUFFERHEADERTYPE *header = new OMX_BUFFERHEADERTYPE; memset(header, 0, sizeof(OMX_BUFFERHEADERTYPE)); header->nTimeStamp = inHeader->nTimeStamp; header->nFlags = inHeader->nFlags; + if (header->nFlags & OMX_BUFFERFLAG_EOS) { + mEOSStatus = INPUT_EOS_SEEN; + } mPicToHeaderMap.add(mPicId, header); inQueue.erase(inQueue.begin()); -- cgit v1.1 From c8e07e483c116ecaca1c9c6991588607f1187b75 Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Thu, 20 Dec 2012 13:49:34 -0800 Subject: Properly release any MediaBuffer references associated with the encoder input buffers on shutdown. This worked fine before for an orderly shutdown but didn't release all references in case of error. Change-Id: I0ea3eb26da76fbeb33cadf58d237b0c68a86ac4a related-to-bug: 7893090 --- .../wifi-display/source/Converter.cpp | 60 +++++++++++++++------- .../libstagefright/wifi-display/source/Converter.h | 1 + 2 files changed, 42 insertions(+), 19 deletions(-) diff --git a/media/libstagefright/wifi-display/source/Converter.cpp b/media/libstagefright/wifi-display/source/Converter.cpp index 7a87444..5628dec 100644 --- a/media/libstagefright/wifi-display/source/Converter.cpp +++ b/media/libstagefright/wifi-display/source/Converter.cpp @@ -67,13 +67,47 @@ Converter::Converter( mInitCheck = initEncoder(); if (mInitCheck != OK) { - if (mEncoder != NULL) { - mEncoder->release(); - mEncoder.clear(); - } + releaseEncoder(); + } +} + +static void ReleaseMediaBufferReference(const sp &accessUnit) { + void *mbuf; + if (accessUnit->meta()->findPointer("mediaBuffer", &mbuf) + && mbuf != NULL) { + ALOGV("releasing mbuf %p", mbuf); + + accessUnit->meta()->setPointer("mediaBuffer", NULL); + + static_cast(mbuf)->release(); + mbuf = NULL; } } +void Converter::releaseEncoder() { + if (mEncoder == NULL) { + return; + } + + mEncoder->release(); + mEncoder.clear(); + + while (!mInputBufferQueue.empty()) { + sp accessUnit = *mInputBufferQueue.begin(); + mInputBufferQueue.erase(mInputBufferQueue.begin()); + + ReleaseMediaBufferReference(accessUnit); + } + + for (size_t i = 0; i < mEncoderInputBuffers.size(); ++i) { + sp accessUnit = mEncoderInputBuffers.itemAt(i); + ReleaseMediaBufferReference(accessUnit); + } + + mEncoderInputBuffers.clear(); + mEncoderOutputBuffers.clear(); +} + Converter::~Converter() { CHECK(mEncoder == NULL); } @@ -274,16 +308,7 @@ void Converter::onMessageReceived(const sp &msg) { sp accessUnit; CHECK(msg->findBuffer("accessUnit", &accessUnit)); - void *mbuf; - if (accessUnit->meta()->findPointer("mediaBuffer", &mbuf) - && mbuf != NULL) { - ALOGV("releasing mbuf %p", mbuf); - - accessUnit->meta()->setPointer("mediaBuffer", NULL); - - static_cast(mbuf)->release(); - mbuf = NULL; - } + ReleaseMediaBufferReference(accessUnit); } break; } @@ -385,12 +410,9 @@ void Converter::onMessageReceived(const sp &msg) { case kWhatShutdown: { - ALOGI("shutting down encoder"); + ALOGI("shutting down %s encoder", mIsVideo ? "video" : "audio"); - if (mEncoder != NULL) { - mEncoder->release(); - mEncoder.clear(); - } + releaseEncoder(); AString mime; CHECK(mInputFormat->findString("mime", &mime)); diff --git a/media/libstagefright/wifi-display/source/Converter.h b/media/libstagefright/wifi-display/source/Converter.h index 0665eea..3357d61 100644 --- a/media/libstagefright/wifi-display/source/Converter.h +++ b/media/libstagefright/wifi-display/source/Converter.h @@ -101,6 +101,7 @@ private: sp mPartialAudioAU; status_t initEncoder(); + void releaseEncoder(); status_t feedEncoderInputBuffers(); -- cgit v1.1 From a589764c3c0617c7a8996e929ce2d6db1cc01d77 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Fri, 4 Jan 2013 16:57:33 -0800 Subject: Fix bug in AudioRecord() constructor It was calling set() with wrong parameter list. This goes back to commit be916aa1267e2e6b1c148f51d11bcbbc79cb864c from 2010. Change-Id: I2f6917765baf58260bf35e89a2cc59c199734ff6 --- media/libmedia/AudioRecord.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/media/libmedia/AudioRecord.cpp b/media/libmedia/AudioRecord.cpp index f6e60fc..4f555c1 100644 --- a/media/libmedia/AudioRecord.cpp +++ b/media/libmedia/AudioRecord.cpp @@ -96,7 +96,7 @@ AudioRecord::AudioRecord( mProxy(NULL) { mStatus = set(inputSource, sampleRate, format, channelMask, - frameCount, cbf, user, notificationFrames, sessionId); + frameCount, cbf, user, notificationFrames, false /*threadCanCallJava*/, sessionId); } AudioRecord::~AudioRecord() -- cgit v1.1 From c695b7c660e1fe62fca35c6b25c1849f3c81a49f Mon Sep 17 00:00:00 2001 From: Eino-Ville Talvala Date: Fri, 4 Jan 2013 12:05:56 -0800 Subject: Camera2: Fix JPEG quality metadata type, new warnings. - JPEG quality tag is now a uint8_t, not an int. Update parameter code accordingly. - Fix new warnings about narrowing conversions. Bug: 7944244 Change-Id: Ie081c57c9e9323148614b170b132ffb98c0a0b9f --- .../camera/libcameraservice/camera2/Parameters.cpp | 37 ++++++++++++++-------- .../camera/libcameraservice/camera2/Parameters.h | 2 +- 2 files changed, 24 insertions(+), 15 deletions(-) diff --git a/services/camera/libcameraservice/camera2/Parameters.cpp b/services/camera/libcameraservice/camera2/Parameters.cpp index 6ab19b1..859e2e9 100644 --- a/services/camera/libcameraservice/camera2/Parameters.cpp +++ b/services/camera/libcameraservice/camera2/Parameters.cpp @@ -1207,23 +1207,24 @@ status_t Parameters::set(const String8& paramString) { } // JPEG_THUMBNAIL_QUALITY - validatedParams.jpegThumbQuality = - newParams.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY); - if (validatedParams.jpegThumbQuality < 0 || - validatedParams.jpegThumbQuality > 100) { + int quality = newParams.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY); + // also makes sure quality fits in uint8_t + if (quality < 0 || quality > 100) { ALOGE("%s: Requested JPEG thumbnail quality %d is not supported", - __FUNCTION__, validatedParams.jpegThumbQuality); + __FUNCTION__, quality); return BAD_VALUE; } + validatedParams.jpegThumbQuality = quality; // JPEG_QUALITY - validatedParams.jpegQuality = - newParams.getInt(CameraParameters::KEY_JPEG_QUALITY); - if (validatedParams.jpegQuality < 0 || validatedParams.jpegQuality > 100) { + quality = newParams.getInt(CameraParameters::KEY_JPEG_QUALITY); + // also makes sure quality fits in uint8_t + if (quality < 0 || quality > 100) { ALOGE("%s: Requested JPEG quality %d is not supported", - __FUNCTION__, validatedParams.jpegQuality); + __FUNCTION__, quality); return BAD_VALUE; } + validatedParams.jpegQuality = quality; // ROTATION validatedParams.jpegRotation = @@ -1752,7 +1753,11 @@ status_t Parameters::updateRequest(CameraMetadata *request) const { CropRegion::OUTPUT_PREVIEW | CropRegion::OUTPUT_VIDEO | CropRegion::OUTPUT_PICTURE )); - int32_t reqCropRegion[3] = { crop.left, crop.top, crop.width }; + int32_t reqCropRegion[3] = { + static_cast(crop.left), + static_cast(crop.top), + static_cast(crop.width) + }; res = request->update(ANDROID_SCALER_CROP_REGION, reqCropRegion, 3); if (res != OK) return res; @@ -2362,10 +2367,14 @@ Parameters::CropRegion Parameters::calculateCropRegion( float minOutputWidth, minOutputHeight, minOutputRatio; { float outputSizes[][2] = { - { previewWidth, previewHeight }, - { videoWidth, videoHeight }, - { jpegThumbSize[0], jpegThumbSize[1] }, - { pictureWidth, pictureHeight }, + { static_cast(previewWidth), + static_cast(previewHeight) }, + { static_cast(videoWidth), + static_cast(videoHeight) }, + { static_cast(jpegThumbSize[0]), + static_cast(jpegThumbSize[1]) }, + { static_cast(pictureWidth), + static_cast(pictureHeight) }, }; minOutputWidth = outputSizes[0][0]; diff --git a/services/camera/libcameraservice/camera2/Parameters.h b/services/camera/libcameraservice/camera2/Parameters.h index 4192e97..9f5f03b 100644 --- a/services/camera/libcameraservice/camera2/Parameters.h +++ b/services/camera/libcameraservice/camera2/Parameters.h @@ -55,7 +55,7 @@ struct Parameters { int pictureWidth, pictureHeight; int32_t jpegThumbSize[2]; - int32_t jpegQuality, jpegThumbQuality; + uint8_t jpegQuality, jpegThumbQuality; int32_t jpegRotation; bool gpsEnabled; -- cgit v1.1 From 9c6745f128648f6e0144b74ee593911a9fa10d51 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Fri, 30 Nov 2012 13:35:29 -0800 Subject: Propose new interpretation for setPosition and setLoop Add new API getBufferPosition to return position relative to start of fixed buffer. Change-Id: I7aca8e392d45b988545f07b36b5032691057b03e --- include/media/AudioTrack.h | 44 +++++++++++++++++++++++++++++------------- media/libmedia/AudioTrack.cpp | 45 +++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 76 insertions(+), 13 deletions(-) diff --git a/include/media/AudioTrack.h b/include/media/AudioTrack.h index 9d07ed5..4210f49 100644 --- a/include/media/AudioTrack.h +++ b/include/media/AudioTrack.h @@ -304,15 +304,24 @@ public: /* Enables looping and sets the start and end points of looping. * Only supported for static buffer mode. * + * FIXME The comments below are for the new planned interpretation which is not yet implemented. + * Currently the legacy behavior is still implemented, where loopStart and loopEnd + * are in wrapping (overflow) frame units like the return value of getPosition(). + * The plan is to fix all callers to use the new version at same time implementation changes. + * * Parameters: * - * loopStart: loop start expressed as the number of PCM frames played since AudioTrack start. - * loopEnd: loop end expressed as the number of PCM frames played since AudioTrack start. + * loopStart: loop start in frames relative to start of buffer. + * loopEnd: loop end in frames relative to start of buffer. * loopCount: number of loops to execute. Calling setLoop() with loopCount == 0 cancels any - * pending or active loop. loopCount = -1 means infinite looping. + * pending or active loop. loopCount == -1 means infinite looping. * * For proper operation the following condition must be respected: - * (loopEnd-loopStart) <= framecount() + * loopCount != 0 implies 0 <= loopStart < loopEnd <= frameCount(). + * + * If the loop period (loopEnd - loopStart) is too small for the implementation to support, + * setLoop() will return BAD_VALUE. + * */ status_t setLoop(uint32_t loopStart, uint32_t loopEnd, int loopCount); @@ -354,18 +363,19 @@ public: status_t setPositionUpdatePeriod(uint32_t updatePeriod); status_t getPositionUpdatePeriod(uint32_t *updatePeriod) const; - /* Sets playback head position within AudioTrack buffer. The new position is specified - * in number of frames. - * This method must be called with the AudioTrack in paused or stopped state. - * Note that the actual position set is modulo the AudioTrack buffer size in frames. - * Therefore using this method makes sense only when playing a "static" audio buffer - * as opposed to streaming. - * The getPosition() method on the other hand returns the total number of frames played since - * playback start. + /* Sets playback head position. + * Only supported for static buffer mode. + * + * FIXME The comments below are for the new planned interpretation which is not yet implemented. + * Currently the legacy behavior is still implemented, where the new position + * is in wrapping (overflow) frame units like the return value of getPosition(). + * The plan is to fix all callers to use the new version at same time implementation changes. * * Parameters: * - * position: New playback head position within AudioTrack buffer. + * position: New playback head position in frames relative to start of buffer. + * 0 <= position <= frameCount(). Note that end of buffer is permitted, + * but will result in an immediate underrun if started. * * Returned status (from utils/Errors.h) can be: * - NO_ERROR: successful operation @@ -381,6 +391,14 @@ public: */ status_t getPosition(uint32_t *position); +#if 0 + /* For static buffer mode only, this returns the current playback position in frames + * relative to start of buffer. It is analogous to the new API for + * setLoop() and setPosition(). After underrun, the position will be at end of buffer. + */ + status_t getBufferPosition(uint32_t *position); +#endif + /* Forces AudioTrack buffer full condition. When playing a static buffer, this method avoids * rewriting the buffer before restarting playback after a stop. * This method must be called with the AudioTrack in paused or stopped state. diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp index 1bd839f..2d77581 100644 --- a/media/libmedia/AudioTrack.cpp +++ b/media/libmedia/AudioTrack.cpp @@ -561,6 +561,26 @@ status_t AudioTrack::setLoop_l(uint32_t loopStart, uint32_t loopEnd, int loopCou return INVALID_OPERATION; } + if (loopCount < 0 && loopCount != -1) { + return BAD_VALUE; + } + +#if 0 + // This will be for the new interpretation of loopStart and loopEnd + + if (loopCount != 0) { + if (loopStart >= mFrameCount || loopEnd >= mFrameCount || loopStart >= loopEnd) { + return BAD_VALUE; + } + uint32_t periodFrames = loopEnd - loopStart; + if (periodFrames < PERIOD_FRAMES_MIN) { + return BAD_VALUE; + } + } + + // The remainder of this code still uses the old interpretation +#endif + audio_track_cblk_t* cblk = mCblk; Mutex::Autolock _l(cblk->lock); @@ -656,6 +676,16 @@ status_t AudioTrack::setPosition(uint32_t position) return INVALID_OPERATION; } +#if 0 + // This will be for the new interpretation of position + + if (position >= mFrameCount) { + return BAD_VALUE; + } + + // The remainder of this code still uses the old interpretation +#endif + audio_track_cblk_t* cblk = mCblk; Mutex::Autolock _l(cblk->lock); @@ -680,6 +710,21 @@ status_t AudioTrack::getPosition(uint32_t *position) return NO_ERROR; } +#if 0 +status_t AudioTrack::getBufferPosition(uint32_t *position) +{ + if (mSharedBuffer == 0 || mIsTimed) { + return INVALID_OPERATION; + } + if (position == NULL) { + return BAD_VALUE; + } + *position = 0; + + return NO_ERROR; +} +#endif + status_t AudioTrack::reload() { if (mStatus != NO_ERROR) { -- cgit v1.1 From 70dc7025fac462e502803108e55911e6b3301bfe Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Tue, 8 Jan 2013 16:45:38 -0800 Subject: Fix build warnings Change-Id: I79f4d7052a4c0a9fde144063f44a9c8c9dd575d5 --- services/audioflinger/audio-resampler/AudioResamplerCoefficients.cpp | 4 ++-- services/audioflinger/audio-resampler/filter_coefficients.h | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/services/audioflinger/audio-resampler/AudioResamplerCoefficients.cpp b/services/audioflinger/audio-resampler/AudioResamplerCoefficients.cpp index d45d697..7fc03a6 100644 --- a/services/audioflinger/audio-resampler/AudioResamplerCoefficients.cpp +++ b/services/audioflinger/audio-resampler/AudioResamplerCoefficients.cpp @@ -34,9 +34,9 @@ const int32_t* readResamplerCoefficients(bool upSample) { ALOGV("readResamplerCoefficients"); if (upSample) { - return up_sampler_filter_coefficients; + return (const int32_t *) up_sampler_filter_coefficients; } else { - return dn_sampler_filter_coefficients; + return (const int32_t *) dn_sampler_filter_coefficients; } } diff --git a/services/audioflinger/audio-resampler/filter_coefficients.h b/services/audioflinger/audio-resampler/filter_coefficients.h index bf70c63..8b082b3 100644 --- a/services/audioflinger/audio-resampler/filter_coefficients.h +++ b/services/audioflinger/audio-resampler/filter_coefficients.h @@ -18,7 +18,7 @@ namespace android { // cmd-line: fir -l 7 -s 48000 -c 23400 -n 16 -b 9.62 -const int32_t up_sampler_filter_coefficients[] __attribute__ ((aligned (32))) = { +const uint32_t up_sampler_filter_coefficients[] __attribute__ ((aligned (32))) = { 0x7ccccccd, 0x0323eb7f, 0xfd086246, 0x02b2aa5c, 0xfda45e2c, 0x01fa5183, 0xfe694e12, 0x0137e672, 0xff1c87d3, 0x009ce6d8, 0xff9a68b0, 0x003d150d, 0xffde727a, 0x00106595, 0xfff93679, 0x00021fc5, 0x7cc9b757, 0x022ac835, 0xfd7e3a71, 0x026b7da1, 0xfdd2b905, 0x01db7c90, 0xfe7db77c, 0x012aa7bf, 0xff24dc32, 0x0097dfc9, 0xff9d4ae9, 0x003b8742, 0xffdf38e5, 0x00100be5, 0xfff959f5, 0x0002144b, 0x7cc0773c, 0x01354bc1, 0xfdf365e8, 0x0224726d, 0xfe011d2e, 0x01bc908b, 0xfe923a2b, 0x011d528d, 0xff2d426f, 0x0092cbc0, 0xffa035cc, 0x0039f42e, 0xffe00236, 0x000fb0d2, 0xfff97dfa, 0x000208b0, @@ -151,7 +151,7 @@ const int32_t up_sampler_filter_coefficients[] __attribute__ ((aligned (32))) = }; // cmd-line: fir -l 7 -s 44100 -c 19876 -n 16 -b 9.62 -const int32_t dn_sampler_filter_coefficients[] __attribute__ ((aligned (32))) = { +const uint32_t dn_sampler_filter_coefficients[] __attribute__ ((aligned (32))) = { 0x736144b5, 0x0c333a22, 0xf4fca390, 0x09424904, 0xf8c92a41, 0x052ac04c, 0xfca4fc64, 0x01ed8cc7, 0xff119cc0, 0x0053ba6e, 0xfff9a80d, 0xffeaeaab, 0x001690d9, 0xfff11dcd, 0x000715d9, 0xfffdb4b9, 0x735ed3aa, 0x0b433de8, 0xf560f0f3, 0x091282c4, 0xf8dd5ccf, 0x0525cb66, 0xfca23e3d, 0x01f33960, 0xff0bc9c2, 0x00586127, 0xfff68603, 0xffecbad5, 0x0015ab8b, 0xfff17c10, 0x0006f71a, 0xfffdbc2f, 0x735780bb, 0x0a55a98f, 0xf5c5b2a1, 0x08e1ea27, 0xf8f25767, 0x0520366d, 0xfc9ff262, 0x01f89c98, 0xff0620a4, 0x005cf349, 0xfff36c0d, 0xffee8913, 0x0014c5dc, 0xfff1db1a, 0x0006d7d7, 0xfffdc3db, -- cgit v1.1 From 2e4664677d72ce54201d3fd0beb0e10280add93c Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Thu, 10 Jan 2013 14:26:24 -0800 Subject: Fix AudioRecord Bug: 7965744 Change-Id: Ic024e7fb32f7459b8093c2cf6cd5752aade21ddb --- media/libmedia/AudioRecord.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/media/libmedia/AudioRecord.cpp b/media/libmedia/AudioRecord.cpp index 4f555c1..0a2b0b0 100644 --- a/media/libmedia/AudioRecord.cpp +++ b/media/libmedia/AudioRecord.cpp @@ -164,6 +164,7 @@ status_t AudioRecord::set( ALOGE("Invalid format"); return BAD_VALUE; } + mFormat = format; if (!audio_is_input_channel(channelMask)) { return BAD_VALUE; @@ -172,7 +173,7 @@ status_t AudioRecord::set( uint32_t channelCount = popcount(channelMask); mChannelCount = channelCount; - if (audio_is_linear_pcm(mFormat)) { + if (audio_is_linear_pcm(format)) { mFrameSize = channelCount * audio_bytes_per_sample(format); } else { mFrameSize = sizeof(uint8_t); @@ -226,7 +227,6 @@ status_t AudioRecord::set( mStatus = NO_ERROR; - mFormat = format; // Update buffer size in case it has been limited by AudioFlinger during track creation mFrameCount = mCblk->frameCount_; -- cgit v1.1 From 38545f51715ec1460559df81eb93e0aa9733a77c Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Thu, 10 Jan 2013 14:26:24 -0800 Subject: Fix AudioRecord Bug: 7965744 Change-Id: Ic024e7fb32f7459b8093c2cf6cd5752aade21ddb --- media/libmedia/AudioRecord.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/media/libmedia/AudioRecord.cpp b/media/libmedia/AudioRecord.cpp index 4f555c1..0a2b0b0 100644 --- a/media/libmedia/AudioRecord.cpp +++ b/media/libmedia/AudioRecord.cpp @@ -164,6 +164,7 @@ status_t AudioRecord::set( ALOGE("Invalid format"); return BAD_VALUE; } + mFormat = format; if (!audio_is_input_channel(channelMask)) { return BAD_VALUE; @@ -172,7 +173,7 @@ status_t AudioRecord::set( uint32_t channelCount = popcount(channelMask); mChannelCount = channelCount; - if (audio_is_linear_pcm(mFormat)) { + if (audio_is_linear_pcm(format)) { mFrameSize = channelCount * audio_bytes_per_sample(format); } else { mFrameSize = sizeof(uint8_t); @@ -226,7 +227,6 @@ status_t AudioRecord::set( mStatus = NO_ERROR; - mFormat = format; // Update buffer size in case it has been limited by AudioFlinger during track creation mFrameCount = mCblk->frameCount_; -- cgit v1.1 From 2ba042ff8a8bb5aa0320580119771e11e64ba2cd Mon Sep 17 00:00:00 2001 From: Insun Kang Date: Tue, 25 Sep 2012 20:22:25 +0900 Subject: Bug fix: set 'und' as default metadata language info for srt. o Previously, it leaves language code empty and it is inconsistent with other code which gives 'und'. As a result, selected SRT track returned empty language info. With this fix, it returns 'und'. Bug: 7227230 TESTED=runtest -d cts-media -c android.media.cts.MediaPlayerTest -m testGetTrackInfo Change-Id: I225848f029637dd782c376e0d770dddd5c849550 --- media/libstagefright/timedtext/TimedTextSRTSource.cpp | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/media/libstagefright/timedtext/TimedTextSRTSource.cpp b/media/libstagefright/timedtext/TimedTextSRTSource.cpp index eac23ba..2ac1e72 100644 --- a/media/libstagefright/timedtext/TimedTextSRTSource.cpp +++ b/media/libstagefright/timedtext/TimedTextSRTSource.cpp @@ -36,6 +36,9 @@ TimedTextSRTSource::TimedTextSRTSource(const sp& dataSource) : mSource(dataSource), mMetaData(new MetaData), mIndex(0) { + // TODO: Need to detect the language, because SRT doesn't give language + // information explicitly. + mMetaData->setCString(kKeyMediaLanguage, "und"); } TimedTextSRTSource::~TimedTextSRTSource() { @@ -46,14 +49,10 @@ status_t TimedTextSRTSource::start() { if (err != OK) { reset(); } - // TODO: Need to detect the language, because SRT doesn't give language - // information explicitly. - mMetaData->setCString(kKeyMediaLanguage, ""); return err; } void TimedTextSRTSource::reset() { - mMetaData->clear(); mTextVector.clear(); mIndex = 0; } -- cgit v1.1 From 2592f6e68edbed386e004d5b045233f15e7b9ba1 Mon Sep 17 00:00:00 2001 From: Eric Laurent Date: Thu, 17 Jan 2013 17:36:00 -0800 Subject: AudioFlinger: fix build with debug log enabled Change-Id: Id397155ec884c41cb84b35462ea09a97a04d3ed2 --- services/audioflinger/Threads.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index 783327f..744a7df 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -2589,7 +2589,7 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::MixerThread::prepareTrac // the minimum track buffer size is normally twice the number of frames necessary // to fill one buffer and the resampler should not leave more than one buffer worth // of unreleased frames after each pass, but just in case... - ALOG_ASSERT(minFrames <= cblk->frameCount); + ALOG_ASSERT(minFrames <= cblk->frameCount_); } } if ((track->framesReady() >= minFrames) && track->isReady() && -- cgit v1.1 From 2a330d6cbb25f0cdd6208aeee53b4a3b88dae3b0 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Fri, 18 Jan 2013 15:17:05 -0800 Subject: Remove obsolete audioflinger reference Change-Id: I9ae754c908f3b0102c3828c71d6f542851a74341 --- media/mediaserver/Android.mk | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/media/mediaserver/Android.mk b/media/mediaserver/Android.mk index 5a73cdd..8c3cc5e 100644 --- a/media/mediaserver/Android.mk +++ b/media/mediaserver/Android.mk @@ -11,12 +11,10 @@ LOCAL_SHARED_LIBRARIES := \ libutils \ libbinder -# FIXME The duplicate audioflinger is temporary LOCAL_C_INCLUDES := \ frameworks/av/media/libmediaplayerservice \ frameworks/av/services/audioflinger \ - frameworks/av/services/camera/libcameraservice \ - frameworks/native/services/audioflinger + frameworks/av/services/camera/libcameraservice LOCAL_MODULE:= mediaserver -- cgit v1.1 From 13f7fe763b9ad52fc27f21ed923c46b9555a321f Mon Sep 17 00:00:00 2001 From: Henrik B Andersson Date: Fri, 26 Oct 2012 15:15:15 +0200 Subject: Fix for not ending up in an eternal loop in DrmManager. In the original code a random number is used to get try to find an empty slot in the list of free DRM id's. When you reached the limit of 0xfff id's you ended up in an eternal loop causing ANRs. Updated by James Dong . Change-Id: I70176cc3f770223c4a8060f9739fe2bc03a703d9 --- drm/drmserver/DrmManager.cpp | 56 +++++++++++++------------------- drm/libdrmframework/include/DrmManager.h | 6 +++- 2 files changed, 28 insertions(+), 34 deletions(-) diff --git a/drm/drmserver/DrmManager.cpp b/drm/drmserver/DrmManager.cpp index e7b0e90..bfaf4bc 100644 --- a/drm/drmserver/DrmManager.cpp +++ b/drm/drmserver/DrmManager.cpp @@ -42,7 +42,8 @@ const String8 DrmManager::EMPTY_STRING(""); DrmManager::DrmManager() : mDecryptSessionId(0), mConvertId(0) { - + srand(time(NULL)); + memset(mUniqueIdArray, 0, sizeof(bool) * kMaxNumUniqueIds); } DrmManager::~DrmManager() { @@ -52,48 +53,37 @@ DrmManager::~DrmManager() { int DrmManager::addUniqueId(bool isNative) { Mutex::Autolock _l(mLock); - int temp = 0; - bool foundUniqueId = false; - const int size = mUniqueIdVector.size(); - const int uniqueIdRange = 0xfff; - int maxLoopTimes = (uniqueIdRange - 1) / 2; - srand(time(NULL)); + int uniqueId = -1; + int random = rand(); - while (!foundUniqueId) { - temp = rand() & uniqueIdRange; + for (size_t index = 0; index < kMaxNumUniqueIds; ++index) { + int temp = (random + index) % kMaxNumUniqueIds; + if (!mUniqueIdArray[temp]) { + uniqueId = temp; + mUniqueIdArray[uniqueId] = true; - if (isNative) { - // set a flag to differentiate DrmManagerClient - // created from native side and java side - temp |= 0x1000; - } - - int index = 0; - for (; index < size; ++index) { - if (mUniqueIdVector.itemAt(index) == temp) { - foundUniqueId = false; - break; + if (isNative) { + // set a flag to differentiate DrmManagerClient + // created from native side and java side + uniqueId |= 0x1000; } + break; } - if (index == size) { - foundUniqueId = true; - } - - maxLoopTimes --; - LOG_FATAL_IF(maxLoopTimes <= 0, "cannot find an unique ID for this session"); } - mUniqueIdVector.push(temp); - return temp; + // -1 indicates that no unique id can be allocated. + return uniqueId; } void DrmManager::removeUniqueId(int uniqueId) { Mutex::Autolock _l(mLock); - for (unsigned int i = 0; i < mUniqueIdVector.size(); i++) { - if (uniqueId == mUniqueIdVector.itemAt(i)) { - mUniqueIdVector.removeAt(i); - break; - } + if (uniqueId & 0x1000) { + // clear the flag for the native side. + uniqueId &= ~(0x1000); + } + + if (uniqueId >= 0 && uniqueId < kMaxNumUniqueIds) { + mUniqueIdArray[uniqueId] = false; } } diff --git a/drm/libdrmframework/include/DrmManager.h b/drm/libdrmframework/include/DrmManager.h index 491e8f7..8ab693f 100644 --- a/drm/libdrmframework/include/DrmManager.h +++ b/drm/libdrmframework/include/DrmManager.h @@ -144,7 +144,11 @@ private: bool canHandle(int uniqueId, const String8& path); private: - Vector mUniqueIdVector; + enum { + kMaxNumUniqueIds = 0x1000, + }; + + bool mUniqueIdArray[kMaxNumUniqueIds]; static const String8 EMPTY_STRING; int mDecryptSessionId; -- cgit v1.1 From 0bf43848adb00922122b0a0eed2fa0318bde8317 Mon Sep 17 00:00:00 2001 From: Hung Nguyen Date: Tue, 5 Jun 2012 13:19:53 +0200 Subject: Fixed memory leakage in the DRM framework Change-Id: Ib1276bec6cafb4e94f8f13b52e50e4987765eec4 --- drm/common/IDrmManagerService.cpp | 12 ++++++++---- drm/common/ReadWriteUtils.cpp | 2 +- .../plugins/passthru/src/DrmPassthruPlugIn.cpp | 3 ++- 3 files changed, 11 insertions(+), 6 deletions(-) diff --git a/drm/common/IDrmManagerService.cpp b/drm/common/IDrmManagerService.cpp index 0282036..91fd91e 100644 --- a/drm/common/IDrmManagerService.cpp +++ b/drm/common/IDrmManagerService.cpp @@ -190,8 +190,9 @@ DrmConstraints* BpDrmManagerService::getConstraints( if (0 < bufferSize) { data = new char[bufferSize]; reply.read(data, bufferSize); + drmConstraints->put(&key, data); + delete[] data; } - drmConstraints->put(&key, data); } } return drmConstraints; @@ -219,8 +220,9 @@ DrmMetadata* BpDrmManagerService::getMetadata(int uniqueId, const String8* path) if (0 < bufferSize) { data = new char[bufferSize]; reply.read(data, bufferSize); + drmMetadata->put(&key, data); + delete[] data; } - drmMetadata->put(&key, data); } } return drmMetadata; @@ -889,9 +891,11 @@ status_t BnDrmManagerService::onTransact( int bufferSize = 0; if (NULL != value) { bufferSize = strlen(value); + reply->writeInt32(bufferSize + 1); + reply->write(value, bufferSize + 1); + } else { + reply->writeInt32(0); } - reply->writeInt32(bufferSize + 1); - reply->write(value, bufferSize + 1); } } delete drmConstraints; drmConstraints = NULL; diff --git a/drm/common/ReadWriteUtils.cpp b/drm/common/ReadWriteUtils.cpp index fd17e98..d696f16 100644 --- a/drm/common/ReadWriteUtils.cpp +++ b/drm/common/ReadWriteUtils.cpp @@ -47,7 +47,7 @@ String8 ReadWriteUtils::readBytes(const String8& filePath) { if (length == read(fd, (void*) bytes, length)) { string.append(bytes, length); } - delete bytes; + delete[] bytes; } fclose(file); } diff --git a/drm/libdrmframework/plugins/passthru/src/DrmPassthruPlugIn.cpp b/drm/libdrmframework/plugins/passthru/src/DrmPassthruPlugIn.cpp index fa659fd..084e323 100644 --- a/drm/libdrmframework/plugins/passthru/src/DrmPassthruPlugIn.cpp +++ b/drm/libdrmframework/plugins/passthru/src/DrmPassthruPlugIn.cpp @@ -65,10 +65,11 @@ DrmConstraints* DrmPassthruPlugIn::onGetConstraints( char* charValue = NULL; charValue = new char[value.length() + 1]; strncpy(charValue, value.string(), value.length()); + charValue[value.length()] = '\0'; //Just add dummy available time for verification drmConstraints->put(&(DrmConstraints::LICENSE_AVAILABLE_TIME), charValue); - + delete[] charValue; return drmConstraints; } -- cgit v1.1 From 11d8dfcc063425ae7d59229f54b6752fd8987c10 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Mon, 14 Jan 2013 14:53:13 -0800 Subject: Add non-blocking event logger NBLog Change-Id: I6c136cf3d7f46a8af84c69ecfc199dab394c10dc --- include/media/nbaio/NBLog.h | 188 +++++++++++++++++++ media/libnbaio/Android.mk | 3 + media/libnbaio/NBLog.cpp | 447 ++++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 638 insertions(+) create mode 100644 include/media/nbaio/NBLog.h create mode 100644 media/libnbaio/NBLog.cpp diff --git a/include/media/nbaio/NBLog.h b/include/media/nbaio/NBLog.h new file mode 100644 index 0000000..8fc417f --- /dev/null +++ b/include/media/nbaio/NBLog.h @@ -0,0 +1,188 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// Non-blocking event logger intended for safe communication between processes via shared memory + +#ifndef ANDROID_MEDIA_NBLOG_H +#define ANDROID_MEDIA_NBLOG_H + +#include +#include +#include + +namespace android { + +class NBLog { + +public: + +class Writer; +class Reader; + +private: + +enum Event { + EVENT_RESERVED, + EVENT_STRING, // ASCII string, not NUL-terminated + EVENT_TIMESTAMP, // clock_gettime(CLOCK_MONOTONIC) +}; + +// --------------------------------------------------------------------------- + +// representation of a single log entry in private memory +struct Entry { + Entry(Event event, const void *data, size_t length) + : mEvent(event), mLength(length), mData(data) { } + /*virtual*/ ~Entry() { } + + int readAt(size_t offset) const; + +private: + friend class Writer; + Event mEvent; // event type + size_t mLength; // length of additional data, 0 <= mLength <= 255 + const void *mData; // event type-specific data +}; + +// representation of a single log entry in shared memory +// byte[0] mEvent +// byte[1] mLength +// byte[2] mData[0] +// ... +// byte[2+i] mData[i] +// ... +// byte[2+mLength-1] mData[mLength-1] +// byte[2+mLength] duplicate copy of mLength to permit reverse scan +// byte[3+mLength] start of next log entry + +// located in shared memory +struct Shared { + Shared() : mRear(0) { } + /*virtual*/ ~Shared() { } + + volatile int32_t mRear; // index one byte past the end of most recent Entry + char mBuffer[0]; // circular buffer for entries +}; + +public: + +// --------------------------------------------------------------------------- + +// FIXME Timeline was intended to wrap Writer and Reader, but isn't actually used yet. +// For now it is just a namespace for sharedSize(). +class Timeline : public RefBase { +public: +#if 0 + Timeline(size_t size, void *shared = NULL); + virtual ~Timeline(); +#endif + + static size_t sharedSize(size_t size); + +#if 0 +private: + friend class Writer; + friend class Reader; + + const size_t mSize; // circular buffer size in bytes, must be a power of 2 + bool mOwn; // whether I own the memory at mShared + Shared* const mShared; // pointer to shared memory +#endif +}; + +// --------------------------------------------------------------------------- + +// Writer is thread-safe with respect to Reader, but not with respect to multiple threads +// calling Writer methods. If you need multi-thread safety for writing, use LockedWriter. +class Writer : public RefBase { +public: + Writer(); // dummy nop implementation without shared memory + Writer(size_t size, void *shared); + Writer(size_t size, const sp& iMemory); + virtual ~Writer() { } + + virtual void log(const char *string); + virtual void logf(const char *fmt, ...); + virtual void logvf(const char *fmt, va_list ap); + virtual void logTimestamp(); + virtual void logTimestamp(const struct timespec& ts); + + virtual bool isEnabled() const; + + // return value for all of these is the previous isEnabled() + virtual bool setEnabled(bool enabled); // but won't enable if no shared memory + bool enable() { return setEnabled(true); } + bool disable() { return setEnabled(false); } + + sp getIMemory() const { return mIMemory; } + +private: + void log(Event event, const void *data, size_t length); + void log(const Entry *entry, bool trusted = false); + + const size_t mSize; // circular buffer size in bytes, must be a power of 2 + Shared* const mShared; // raw pointer to shared memory + const sp mIMemory; // ref-counted version + int32_t mRear; // my private copy of mShared->mRear + bool mEnabled; // whether to actually log +}; + +// --------------------------------------------------------------------------- + +// Similar to Writer, but safe for multiple threads to call concurrently +class LockedWriter : public Writer { +public: + LockedWriter(); + LockedWriter(size_t size, void *shared); + + virtual void log(const char *string); + virtual void logf(const char *fmt, ...); + virtual void logvf(const char *fmt, va_list ap); + virtual void logTimestamp(); + virtual void logTimestamp(const struct timespec& ts); + + virtual bool isEnabled() const; + virtual bool setEnabled(bool enabled); + +private: + mutable Mutex mLock; +}; + +// --------------------------------------------------------------------------- + +class Reader : public RefBase { +public: + Reader(size_t size, const void *shared); + Reader(size_t size, const sp& iMemory); + virtual ~Reader() { } + + void dump(int fd, size_t indent = 0); + bool isIMemory(const sp& iMemory) const; + +private: + const size_t mSize; // circular buffer size in bytes, must be a power of 2 + const Shared* const mShared; // raw pointer to shared memory + const sp mIMemory; // ref-counted version + int32_t mFront; // index of oldest acknowledged Entry + + static const size_t kSquashTimestamp = 5; // squash this many or more adjacent timestamps +}; + +}; // class NBLog + +} // namespace android + +#endif // ANDROID_MEDIA_NBLOG_H diff --git a/media/libnbaio/Android.mk b/media/libnbaio/Android.mk index 757272f..d372d20 100644 --- a/media/libnbaio/Android.mk +++ b/media/libnbaio/Android.mk @@ -14,6 +14,8 @@ LOCAL_SRC_FILES := \ roundup.c \ SourceAudioBufferProvider.cpp +LOCAL_SRC_FILES += NBLog.cpp + # libsndfile license is incompatible; uncomment to use for local debug only #LOCAL_SRC_FILES += LibsndfileSink.cpp LibsndfileSource.cpp #LOCAL_C_INCLUDES += path/to/libsndfile/src @@ -25,6 +27,7 @@ LOCAL_SRC_FILES := \ LOCAL_MODULE := libnbaio LOCAL_SHARED_LIBRARIES := \ + libbinder \ libcommon_time_client \ libcutils \ libutils diff --git a/media/libnbaio/NBLog.cpp b/media/libnbaio/NBLog.cpp new file mode 100644 index 0000000..045bf64 --- /dev/null +++ b/media/libnbaio/NBLog.cpp @@ -0,0 +1,447 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "NBLog" +//#define LOG_NDEBUG 0 + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +namespace android { + +int NBLog::Entry::readAt(size_t offset) const +{ + // FIXME This is too slow, despite the name it is used during writing + if (offset == 0) + return mEvent; + else if (offset == 1) + return mLength; + else if (offset < (size_t) (mLength + 2)) + return ((char *) mData)[offset - 2]; + else if (offset == (size_t) (mLength + 2)) + return mLength; + else + return 0; +} + +// --------------------------------------------------------------------------- + +#if 0 // FIXME see note in NBLog.h +NBLog::Timeline::Timeline(size_t size, void *shared) + : mSize(roundup(size)), mOwn(shared == NULL), + mShared((Shared *) (mOwn ? new char[sharedSize(size)] : shared)) +{ + new (mShared) Shared; +} + +NBLog::Timeline::~Timeline() +{ + mShared->~Shared(); + if (mOwn) { + delete[] (char *) mShared; + } +} +#endif + +/*static*/ +size_t NBLog::Timeline::sharedSize(size_t size) +{ + return sizeof(Shared) + roundup(size); +} + +// --------------------------------------------------------------------------- + +NBLog::Writer::Writer() + : mSize(0), mShared(NULL), mRear(0), mEnabled(false) +{ +} + +NBLog::Writer::Writer(size_t size, void *shared) + : mSize(roundup(size)), mShared((Shared *) shared), mRear(0), mEnabled(mShared != NULL) +{ +} + +NBLog::Writer::Writer(size_t size, const sp& iMemory) + : mSize(roundup(size)), mShared(iMemory != 0 ? (Shared *) iMemory->pointer() : NULL), + mIMemory(iMemory), mRear(0), mEnabled(mShared != NULL) +{ +} + +void NBLog::Writer::log(const char *string) +{ + if (!mEnabled) { + return; + } + size_t length = strlen(string); + if (length > 255) { + length = 255; + } + log(EVENT_STRING, string, length); +} + +void NBLog::Writer::logf(const char *fmt, ...) +{ + if (!mEnabled) { + return; + } + va_list ap; + va_start(ap, fmt); + Writer::logvf(fmt, ap); // the Writer:: is needed to avoid virtual dispatch for LockedWriter + va_end(ap); +} + +void NBLog::Writer::logvf(const char *fmt, va_list ap) +{ + if (!mEnabled) { + return; + } + char buffer[256]; + int length = vsnprintf(buffer, sizeof(buffer), fmt, ap); + if (length >= (int) sizeof(buffer)) { + length = sizeof(buffer) - 1; + // NUL termination is not required + // buffer[length] = '\0'; + } + if (length >= 0) { + log(EVENT_STRING, buffer, length); + } +} + +void NBLog::Writer::logTimestamp() +{ + if (!mEnabled) { + return; + } + struct timespec ts; + if (!clock_gettime(CLOCK_MONOTONIC, &ts)) { + log(EVENT_TIMESTAMP, &ts, sizeof(struct timespec)); + } +} + +void NBLog::Writer::logTimestamp(const struct timespec& ts) +{ + if (!mEnabled) { + return; + } + log(EVENT_TIMESTAMP, &ts, sizeof(struct timespec)); +} + +void NBLog::Writer::log(Event event, const void *data, size_t length) +{ + if (!mEnabled) { + return; + } + if (data == NULL || length > 255) { + return; + } + switch (event) { + case EVENT_STRING: + case EVENT_TIMESTAMP: + break; + case EVENT_RESERVED: + default: + return; + } + Entry entry(event, data, length); + log(&entry, true /*trusted*/); +} + +void NBLog::Writer::log(const NBLog::Entry *entry, bool trusted) +{ + if (!mEnabled) { + return; + } + if (!trusted) { + log(entry->mEvent, entry->mData, entry->mLength); + return; + } + size_t rear = mRear & (mSize - 1); + size_t written = mSize - rear; // written = number of bytes that have been written so far + size_t need = entry->mLength + 3; // mEvent, mLength, data[length], mLength + // need = number of bytes remaining to write + if (written > need) { + written = need; + } + size_t i; + // FIXME optimize this using memcpy for the data part of the Entry. + // The Entry could have a method copyTo(ptr, offset, size) to optimize the copy. + for (i = 0; i < written; ++i) { + mShared->mBuffer[rear + i] = entry->readAt(i); + } + if (rear + written == mSize && (need -= written) > 0) { + for (i = 0; i < need; ++i) { + mShared->mBuffer[i] = entry->readAt(written + i); + } + written += need; + } + android_atomic_release_store(mRear += written, &mShared->mRear); +} + +bool NBLog::Writer::isEnabled() const +{ + return mEnabled; +} + +bool NBLog::Writer::setEnabled(bool enabled) +{ + bool old = mEnabled; + mEnabled = enabled && mShared != NULL; + return old; +} + +// --------------------------------------------------------------------------- + +NBLog::LockedWriter::LockedWriter() + : Writer() +{ +} + +NBLog::LockedWriter::LockedWriter(size_t size, void *shared) + : Writer(size, shared) +{ +} + +void NBLog::LockedWriter::log(const char *string) +{ + Mutex::Autolock _l(mLock); + Writer::log(string); +} + +void NBLog::LockedWriter::logf(const char *fmt, ...) +{ + // FIXME should not take the lock until after formatting is done + Mutex::Autolock _l(mLock); + va_list ap; + va_start(ap, fmt); + Writer::logvf(fmt, ap); + va_end(ap); +} + +void NBLog::LockedWriter::logvf(const char *fmt, va_list ap) +{ + // FIXME should not take the lock until after formatting is done + Mutex::Autolock _l(mLock); + Writer::logvf(fmt, ap); +} + +void NBLog::LockedWriter::logTimestamp() +{ + // FIXME should not take the lock until after the clock_gettime() syscall + Mutex::Autolock _l(mLock); + Writer::logTimestamp(); +} + +void NBLog::LockedWriter::logTimestamp(const struct timespec& ts) +{ + Mutex::Autolock _l(mLock); + Writer::logTimestamp(ts); +} + +bool NBLog::LockedWriter::isEnabled() const +{ + Mutex::Autolock _l(mLock); + return Writer::isEnabled(); +} + +bool NBLog::LockedWriter::setEnabled(bool enabled) +{ + Mutex::Autolock _l(mLock); + return Writer::setEnabled(enabled); +} + +// --------------------------------------------------------------------------- + +NBLog::Reader::Reader(size_t size, const void *shared) + : mSize(roundup(size)), mShared((const Shared *) shared), mFront(0) +{ +} + +NBLog::Reader::Reader(size_t size, const sp& iMemory) + : mSize(roundup(size)), mShared(iMemory != 0 ? (const Shared *) iMemory->pointer() : NULL), + mIMemory(iMemory), mFront(0) +{ +} + +void NBLog::Reader::dump(int fd, size_t indent) +{ + int32_t rear = android_atomic_acquire_load(&mShared->mRear); + size_t avail = rear - mFront; + if (avail == 0) { + return; + } + size_t lost = 0; + if (avail > mSize) { + lost = avail - mSize; + mFront += lost; + avail = mSize; + } + size_t remaining = avail; // remaining = number of bytes left to read + size_t front = mFront & (mSize - 1); + size_t read = mSize - front; // read = number of bytes that have been read so far + if (read > remaining) { + read = remaining; + } + // make a copy to avoid race condition with writer + uint8_t *copy = new uint8_t[avail]; + // copy first part of circular buffer up until the wraparound point + memcpy(copy, &mShared->mBuffer[front], read); + if (front + read == mSize) { + if ((remaining -= read) > 0) { + // copy second part of circular buffer starting at beginning + memcpy(©[read], mShared->mBuffer, remaining); + read += remaining; + // remaining = 0 but not necessary + } + } + mFront += read; + size_t i = avail; + Event event; + size_t length; + struct timespec ts; + time_t maxSec = -1; + while (i >= 3) { + length = copy[i - 1]; + if (length + 3 > i || copy[i - length - 2] != length) { + break; + } + event = (Event) copy[i - length - 3]; + if (event == EVENT_TIMESTAMP) { + if (length != sizeof(struct timespec)) { + // corrupt + break; + } + memcpy(&ts, ©[i - length - 1], sizeof(struct timespec)); + if (ts.tv_sec > maxSec) { + maxSec = ts.tv_sec; + } + } + i -= length + 3; + } + if (i > 0) { + lost += i; + if (fd >= 0) { + fdprintf(fd, "%*swarning: lost %u bytes worth of events\n", indent, "", lost); + } else { + ALOGI("%*swarning: lost %u bytes worth of events\n", indent, "", lost); + } + } + size_t width = 1; + while (maxSec >= 10) { + ++width; + maxSec /= 10; + } + char prefix[32]; + if (maxSec >= 0) { + snprintf(prefix, sizeof(prefix), "[%*s] ", width + 4, ""); + } else { + prefix[0] = '\0'; + } + while (i < avail) { + event = (Event) copy[i]; + length = copy[i + 1]; + const void *data = ©[i + 2]; + size_t advance = length + 3; + switch (event) { + case EVENT_STRING: + if (fd >= 0) { + fdprintf(fd, "%*s%s%.*s\n", indent, "", prefix, length, (const char *) data); + } else { + ALOGI("%*s%s%.*s", indent, "", prefix, length, (const char *) data); + } break; + case EVENT_TIMESTAMP: { + // already checked that length == sizeof(struct timespec); + memcpy(&ts, data, sizeof(struct timespec)); + long prevNsec = ts.tv_nsec; + long deltaMin = LONG_MAX; + long deltaMax = -1; + long deltaTotal = 0; + size_t j = i; + for (;;) { + j += sizeof(struct timespec) + 3; + if (j >= avail || (Event) copy[j] != EVENT_TIMESTAMP) { + break; + } + struct timespec tsNext; + memcpy(&tsNext, ©[j + 2], sizeof(struct timespec)); + if (tsNext.tv_sec != ts.tv_sec) { + break; + } + long delta = tsNext.tv_nsec - prevNsec; + if (delta < 0) { + break; + } + if (delta < deltaMin) { + deltaMin = delta; + } + if (delta > deltaMax) { + deltaMax = delta; + } + deltaTotal += delta; + prevNsec = tsNext.tv_nsec; + } + size_t n = (j - i) / (sizeof(struct timespec) + 3); + if (n >= kSquashTimestamp) { + if (fd >= 0) { + fdprintf(fd, "%*s[%d.%03d to .%.03d by .%.03d to .%.03d]\n", indent, "", + (int) ts.tv_sec, (int) (ts.tv_nsec / 1000000), + (int) ((ts.tv_nsec + deltaTotal) / 1000000), + (int) (deltaMin / 1000000), (int) (deltaMax / 1000000)); + } else { + ALOGI("%*s[%d.%03d to .%.03d by .%.03d to .%.03d]\n", indent, "", + (int) ts.tv_sec, (int) (ts.tv_nsec / 1000000), + (int) ((ts.tv_nsec + deltaTotal) / 1000000), + (int) (deltaMin / 1000000), (int) (deltaMax / 1000000)); + } + i = j; + advance = 0; + break; + } + if (fd >= 0) { + fdprintf(fd, "%*s[%d.%03d]\n", indent, "", (int) ts.tv_sec, + (int) (ts.tv_nsec / 1000000)); + } else { + ALOGI("%*s[%d.%03d]", indent, "", (int) ts.tv_sec, + (int) (ts.tv_nsec / 1000000)); + } + } break; + case EVENT_RESERVED: + default: + if (fd >= 0) { + fdprintf(fd, "%*s%swarning: unknown event %d\n", indent, "", prefix, event); + } else { + ALOGI("%*s%swarning: unknown event %d", indent, "", prefix, event); + } + break; + } + i += advance; + } + // FIXME it would be more efficient to put a char mCopy[256] as a member variable of the dumper + delete[] copy; +} + +bool NBLog::Reader::isIMemory(const sp& iMemory) const +{ + return iMemory.get() == mIMemory.get(); +} + +} // namespace android -- cgit v1.1 From 6f1c1918d0dfece10f728711b055441e4d135c73 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Fri, 18 Jan 2013 15:31:41 -0800 Subject: Add media.log service based on NBLog Change-Id: Ie45093df6ac9a739d05c8d408fab52a9a8a27e7f --- include/media/IMediaLogService.h | 45 ++++++++++++++ media/libmedia/Android.mk | 1 + media/libmedia/IMediaLogService.cpp | 94 +++++++++++++++++++++++++++++ media/mediaserver/Android.mk | 5 ++ media/mediaserver/main_mediaserver.cpp | 105 ++++++++++++++++++++++++++++++--- services/medialog/Android.mk | 11 ++++ services/medialog/MediaLogService.cpp | 81 +++++++++++++++++++++++++ services/medialog/MediaLogService.h | 65 ++++++++++++++++++++ 8 files changed, 398 insertions(+), 9 deletions(-) create mode 100644 include/media/IMediaLogService.h create mode 100644 media/libmedia/IMediaLogService.cpp create mode 100644 services/medialog/Android.mk create mode 100644 services/medialog/MediaLogService.cpp create mode 100644 services/medialog/MediaLogService.h diff --git a/include/media/IMediaLogService.h b/include/media/IMediaLogService.h new file mode 100644 index 0000000..1f5777e --- /dev/null +++ b/include/media/IMediaLogService.h @@ -0,0 +1,45 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_IMEDIALOGSERVICE_H +#define ANDROID_IMEDIALOGSERVICE_H + +#include +#include +#include + +namespace android { + +class IMediaLogService: public IInterface +{ +public: + DECLARE_META_INTERFACE(MediaLogService); + + virtual void registerWriter(const sp& shared, size_t size, const char *name) = 0; + virtual void unregisterWriter(const sp& shared) = 0; + +}; + +class BnMediaLogService: public BnInterface +{ +public: + virtual status_t onTransact(uint32_t code, const Parcel& data, Parcel* reply, + uint32_t flags = 0); +}; + +} // namespace android + +#endif // ANDROID_IMEDIALOGSERVICE_H diff --git a/media/libmedia/Android.mk b/media/libmedia/Android.mk index a35d562..52fa3e1 100644 --- a/media/libmedia/Android.mk +++ b/media/libmedia/Android.mk @@ -23,6 +23,7 @@ LOCAL_SRC_FILES:= \ AudioRecord.cpp \ AudioSystem.cpp \ mediaplayer.cpp \ + IMediaLogService.cpp \ IMediaPlayerService.cpp \ IMediaPlayerClient.cpp \ IMediaRecorderClient.cpp \ diff --git a/media/libmedia/IMediaLogService.cpp b/media/libmedia/IMediaLogService.cpp new file mode 100644 index 0000000..33239a7 --- /dev/null +++ b/media/libmedia/IMediaLogService.cpp @@ -0,0 +1,94 @@ +/* +** +** Copyright 2007, The Android Open Source Project +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#define LOG_TAG "IMediaLogService" +//#define LOG_NDEBUG 0 + +#include +#include +#include +#include +#include + +namespace android { + +enum { + REGISTER_WRITER = IBinder::FIRST_CALL_TRANSACTION, + UNREGISTER_WRITER, +}; + +class BpMediaLogService : public BpInterface +{ +public: + BpMediaLogService(const sp& impl) + : BpInterface(impl) + { + } + + virtual void registerWriter(const sp& shared, size_t size, const char *name) { + Parcel data, reply; + data.writeInterfaceToken(IMediaLogService::getInterfaceDescriptor()); + data.writeStrongBinder(shared->asBinder()); + data.writeInt32((int32_t) size); + data.writeCString(name); + status_t status = remote()->transact(REGISTER_WRITER, data, &reply); + // FIXME ignores status + } + + virtual void unregisterWriter(const sp& shared) { + Parcel data, reply; + data.writeInterfaceToken(IMediaLogService::getInterfaceDescriptor()); + data.writeStrongBinder(shared->asBinder()); + status_t status = remote()->transact(UNREGISTER_WRITER, data, &reply); + // FIXME ignores status + } + +}; + +IMPLEMENT_META_INTERFACE(MediaLogService, "android.media.IMediaLogService"); + +// ---------------------------------------------------------------------- + +status_t BnMediaLogService::onTransact( + uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) +{ + switch (code) { + + case REGISTER_WRITER: { + CHECK_INTERFACE(IMediaLogService, data, reply); + sp shared = interface_cast(data.readStrongBinder()); + size_t size = (size_t) data.readInt32(); + const char *name = data.readCString(); + registerWriter(shared, size, name); + return NO_ERROR; + } + + case UNREGISTER_WRITER: { + CHECK_INTERFACE(IMediaLogService, data, reply); + sp shared = interface_cast(data.readStrongBinder()); + unregisterWriter(shared); + return NO_ERROR; + } + + default: + return BBinder::onTransact(code, data, reply, flags); + } +} + +// ---------------------------------------------------------------------------- + +}; // namespace android diff --git a/media/mediaserver/Android.mk b/media/mediaserver/Android.mk index 8c3cc5e..0a0f4db 100644 --- a/media/mediaserver/Android.mk +++ b/media/mediaserver/Android.mk @@ -7,12 +7,17 @@ LOCAL_SRC_FILES:= \ LOCAL_SHARED_LIBRARIES := \ libaudioflinger \ libcameraservice \ + libmedialogservice \ + libcutils \ + libnbaio \ + libmedia \ libmediaplayerservice \ libutils \ libbinder LOCAL_C_INCLUDES := \ frameworks/av/media/libmediaplayerservice \ + frameworks/av/services/medialog \ frameworks/av/services/audioflinger \ frameworks/av/services/camera/libcameraservice diff --git a/media/mediaserver/main_mediaserver.cpp b/media/mediaserver/main_mediaserver.cpp index ddd5b84..0862952 100644 --- a/media/mediaserver/main_mediaserver.cpp +++ b/media/mediaserver/main_mediaserver.cpp @@ -18,14 +18,19 @@ #define LOG_TAG "mediaserver" //#define LOG_NDEBUG 0 +#include +#include +#include #include #include #include +#include #include // from LOCAL_C_INCLUDES #include "AudioFlinger.h" #include "CameraService.h" +#include "MediaLogService.h" #include "MediaPlayerService.h" #include "AudioPolicyService.h" @@ -34,13 +39,95 @@ using namespace android; int main(int argc, char** argv) { signal(SIGPIPE, SIG_IGN); - sp proc(ProcessState::self()); - sp sm = defaultServiceManager(); - ALOGI("ServiceManager: %p", sm.get()); - AudioFlinger::instantiate(); - MediaPlayerService::instantiate(); - CameraService::instantiate(); - AudioPolicyService::instantiate(); - ProcessState::self()->startThreadPool(); - IPCThreadState::self()->joinThreadPool(); + char value[PROPERTY_VALUE_MAX]; + bool doLog = (property_get("ro.test_harness", value, "0") > 0) && (atoi(value) == 1); + pid_t childPid; + // FIXME The advantage of making the process containing media.log service the parent process of + // the process that contains all the other real services, is that it allows us to collect more + // detailed information such as signal numbers, stop and continue, resource usage, etc. + // But it is also more complex. Consider replacing this by independent processes, and using + // binder on death notification instead. + if (doLog && (childPid = fork()) != 0) { + // media.log service + //prctl(PR_SET_NAME, (unsigned long) "media.log", 0, 0, 0); + // unfortunately ps ignores PR_SET_NAME for the main thread, so use this ugly hack + strcpy(argv[0], "media.log"); + sp proc(ProcessState::self()); + MediaLogService::instantiate(); + ProcessState::self()->startThreadPool(); + for (;;) { + siginfo_t info; + int ret = waitid(P_PID, childPid, &info, WEXITED | WSTOPPED | WCONTINUED); + if (ret == EINTR) { + continue; + } + if (ret < 0) { + break; + } + char buffer[32]; + const char *code; + switch (info.si_code) { + case CLD_EXITED: + code = "CLD_EXITED"; + break; + case CLD_KILLED: + code = "CLD_KILLED"; + break; + case CLD_DUMPED: + code = "CLD_DUMPED"; + break; + case CLD_STOPPED: + code = "CLD_STOPPED"; + break; + case CLD_TRAPPED: + code = "CLD_TRAPPED"; + break; + case CLD_CONTINUED: + code = "CLD_CONTINUED"; + break; + default: + snprintf(buffer, sizeof(buffer), "unknown (%d)", info.si_code); + code = buffer; + break; + } + struct rusage usage; + getrusage(RUSAGE_CHILDREN, &usage); + ALOG(LOG_ERROR, "media.log", "pid %d status %d code %s user %ld.%03lds sys %ld.%03lds", + info.si_pid, info.si_status, code, + usage.ru_utime.tv_sec, usage.ru_utime.tv_usec / 1000, + usage.ru_stime.tv_sec, usage.ru_stime.tv_usec / 1000); + sp sm = defaultServiceManager(); + sp binder = sm->getService(String16("media.log")); + if (binder != 0) { + Vector args; + binder->dump(-1, args); + } + switch (info.si_code) { + case CLD_EXITED: + case CLD_KILLED: + case CLD_DUMPED: { + ALOG(LOG_INFO, "media.log", "exiting"); + _exit(0); + // not reached + } + default: + break; + } + } + } else { + // all other services + if (doLog) { + prctl(PR_SET_PDEATHSIG, SIGKILL); // if parent media.log dies before me, kill me also + setpgid(0, 0); // but if I die first, don't kill my parent + } + sp proc(ProcessState::self()); + sp sm = defaultServiceManager(); + ALOGI("ServiceManager: %p", sm.get()); + AudioFlinger::instantiate(); + MediaPlayerService::instantiate(); + CameraService::instantiate(); + AudioPolicyService::instantiate(); + ProcessState::self()->startThreadPool(); + IPCThreadState::self()->joinThreadPool(); + } } diff --git a/services/medialog/Android.mk b/services/medialog/Android.mk new file mode 100644 index 0000000..559b1ed --- /dev/null +++ b/services/medialog/Android.mk @@ -0,0 +1,11 @@ +LOCAL_PATH := $(call my-dir) + +include $(CLEAR_VARS) + +LOCAL_SRC_FILES := MediaLogService.cpp + +LOCAL_SHARED_LIBRARIES := libmedia libbinder libutils libnbaio + +LOCAL_MODULE:= libmedialogservice + +include $(BUILD_SHARED_LIBRARY) diff --git a/services/medialog/MediaLogService.cpp b/services/medialog/MediaLogService.cpp new file mode 100644 index 0000000..2332b3e --- /dev/null +++ b/services/medialog/MediaLogService.cpp @@ -0,0 +1,81 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "MediaLog" +//#define LOG_NDEBUG 0 + +#include +#include +#include +#include +#include "MediaLogService.h" + +namespace android { + +void MediaLogService::registerWriter(const sp& shared, size_t size, const char *name) +{ + if (IPCThreadState::self()->getCallingUid() != AID_MEDIA || shared == 0 || + size < kMinSize || size > kMaxSize || name == NULL || + shared->size() < NBLog::Timeline::sharedSize(size)) { + return; + } + sp reader(new NBLog::Reader(size, shared)); + NamedReader namedReader(reader, name); + Mutex::Autolock _l(mLock); + mNamedReaders.add(namedReader); +} + +void MediaLogService::unregisterWriter(const sp& shared) +{ + if (IPCThreadState::self()->getCallingUid() != AID_MEDIA || shared == 0) { + return; + } + Mutex::Autolock _l(mLock); + for (size_t i = 0; i < mNamedReaders.size(); ) { + if (mNamedReaders[i].reader()->isIMemory(shared)) { + mNamedReaders.removeAt(i); + } else { + i++; + } + } +} + +status_t MediaLogService::dump(int fd, const Vector& args) +{ + Vector namedReaders; + { + Mutex::Autolock _l(mLock); + namedReaders = mNamedReaders; + } + for (size_t i = 0; i < namedReaders.size(); i++) { + const NamedReader& namedReader = namedReaders[i]; + if (fd >= 0) { + fdprintf(fd, "\n%s:\n", namedReader.name()); + } else { + ALOGI("%s:", namedReader.name()); + } + namedReader.reader()->dump(fd, 0 /*indent*/); + } + return NO_ERROR; +} + +status_t MediaLogService::onTransact(uint32_t code, const Parcel& data, Parcel* reply, + uint32_t flags) +{ + return BnMediaLogService::onTransact(code, data, reply, flags); +} + +} // namespace android diff --git a/services/medialog/MediaLogService.h b/services/medialog/MediaLogService.h new file mode 100644 index 0000000..2d89a41 --- /dev/null +++ b/services/medialog/MediaLogService.h @@ -0,0 +1,65 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_MEDIA_LOG_SERVICE_H +#define ANDROID_MEDIA_LOG_SERVICE_H + +#include +#include +#include + +namespace android { + +class MediaLogService : public BinderService, public BnMediaLogService +{ + friend class BinderService; // for MediaLogService() +public: + MediaLogService() : BnMediaLogService() { } + virtual ~MediaLogService() { } + virtual void onFirstRef() { } + + static const char* getServiceName() { return "media.log"; } + + static const size_t kMinSize = 0x100; + static const size_t kMaxSize = 0x10000; + virtual void registerWriter(const sp& shared, size_t size, const char *name); + virtual void unregisterWriter(const sp& shared); + + virtual status_t dump(int fd, const Vector& args); + virtual status_t onTransact(uint32_t code, const Parcel& data, Parcel* reply, + uint32_t flags); + +private: + Mutex mLock; + class NamedReader { + public: + NamedReader() : mReader(0) { mName[0] = '\0'; } // for Vector + NamedReader(const sp& reader, const char *name) : mReader(reader) + { strlcpy(mName, name, sizeof(mName)); } + ~NamedReader() { } + const sp& reader() const { return mReader; } + const char* name() const { return mName; } + private: + sp mReader; + static const size_t kMaxName = 32; + char mName[kMaxName]; + }; + Vector mNamedReaders; +}; + +} // namespace android + +#endif // ANDROID_MEDIA_LOG_SERVICE_H -- cgit v1.1 From 2d45dbd1910a7e51d383e8583017e6f26cd3498a Mon Sep 17 00:00:00 2001 From: Hung Nguyen Date: Tue, 5 Jun 2012 13:19:53 +0200 Subject: Fixed bugs where the content of buffer holding keys was cleared using a wrong size in forward lock Modified by James Dong Change-Id: Iaacfc79b2b26bdee7046d8555e3b4e0bc224c3c5 --- .../plugins/forward-lock/internal-format/converter/FwdLockConv.c | 6 ++++-- .../plugins/forward-lock/internal-format/decoder/FwdLockFile.c | 7 +++++-- 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/drm/libdrmframework/plugins/forward-lock/internal-format/converter/FwdLockConv.c b/drm/libdrmframework/plugins/forward-lock/internal-format/converter/FwdLockConv.c index bb97abc..9d15835 100644 --- a/drm/libdrmframework/plugins/forward-lock/internal-format/converter/FwdLockConv.c +++ b/drm/libdrmframework/plugins/forward-lock/internal-format/converter/FwdLockConv.c @@ -245,7 +245,9 @@ static int FwdLockConv_DeriveKeys(FwdLockConv_Session_t *pSession) { AES_KEY sessionRoundKeys; unsigned char value[KEY_SIZE]; unsigned char key[KEY_SIZE]; - } *pData = malloc(sizeof *pData); + }; + const size_t kSize = sizeof(struct FwdLockConv_DeriveKeys_Data); + struct FwdLockConv_DeriveKeys_Data *pData = malloc(kSize); if (pData == NULL) { status = FwdLockConv_Status_OutOfMemory; } else { @@ -268,7 +270,7 @@ static int FwdLockConv_DeriveKeys(FwdLockConv_Session_t *pSession) { status = FwdLockConv_Status_OK; } } - memset(pData, 0, sizeof pData); // Zero out key data. + memset(pData, 0, kSize); // Zero out key data. free(pData); } return status; diff --git a/drm/libdrmframework/plugins/forward-lock/internal-format/decoder/FwdLockFile.c b/drm/libdrmframework/plugins/forward-lock/internal-format/decoder/FwdLockFile.c index 7ff3c00..43b9e98 100644 --- a/drm/libdrmframework/plugins/forward-lock/internal-format/decoder/FwdLockFile.c +++ b/drm/libdrmframework/plugins/forward-lock/internal-format/decoder/FwdLockFile.c @@ -174,7 +174,10 @@ static int FwdLockFile_DeriveKeys(FwdLockFile_Session_t * pSession) { AES_KEY sessionRoundKeys; unsigned char value[KEY_SIZE]; unsigned char key[KEY_SIZE]; - } *pData = malloc(sizeof *pData); + }; + + const size_t kSize = sizeof(struct FwdLockFile_DeriveKeys_Data); + struct FwdLockFile_DeriveKeys_Data *pData = malloc(kSize); if (pData == NULL) { result = FALSE; } else { @@ -202,7 +205,7 @@ static int FwdLockFile_DeriveKeys(FwdLockFile_Session_t * pSession) { if (!result) { errno = ENOSYS; } - memset(pData, 0, sizeof pData); // Zero out key data. + memset(pData, 0, kSize); // Zero out key data. free(pData); } return result; -- cgit v1.1 From 0f6675d5fdf15d4b8765545fb6a351138acccdf6 Mon Sep 17 00:00:00 2001 From: SeungBeom Kim Date: Wed, 16 Jan 2013 15:34:00 +0900 Subject: ACodec Fix. Bug: 7961269 Change-Id: Ie12530f89dd96dbce82e873de0c2310490390c34 Signed-off-by: SeungBeom Kim --- media/libstagefright/ACodec.cpp | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp index 7920d32..7b27843 100644 --- a/media/libstagefright/ACodec.cpp +++ b/media/libstagefright/ACodec.cpp @@ -1421,7 +1421,8 @@ status_t ACodec::setSupportedOutputFormat() { || format.eColorFormat == OMX_COLOR_FormatCbYCrY || format.eColorFormat == OMX_TI_COLOR_FormatYUV420PackedSemiPlanar || format.eColorFormat == OMX_QCOM_COLOR_FormatYVU420SemiPlanar - || format.eColorFormat == OMX_QCOM_COLOR_FormatYUV420PackedSemiPlanar64x32Tile2m8ka); + || format.eColorFormat == OMX_QCOM_COLOR_FormatYUV420PackedSemiPlanar64x32Tile2m8ka + || format.eColorFormat == OMX_SEC_COLOR_FormatNV12Tiled); return mOMX->setParameter( mNode, OMX_IndexParamVideoPortFormat, -- cgit v1.1 From 558d60e4e2217e02b3e7adf7afaf51d1c6e08f91 Mon Sep 17 00:00:00 2001 From: Henrik B Andersson Date: Sun, 15 Jul 2012 07:46:29 -0700 Subject: Remove unnecessary lib dependencies in the FL makefile Change-Id: Ia1e81a91b4b8dc1bf3558a4aa06e90a955c6fbe6 --- .../plugins/forward-lock/internal-format/converter/Android.mk | 4 ---- .../plugins/forward-lock/internal-format/decoder/Android.mk | 4 ---- 2 files changed, 8 deletions(-) diff --git a/drm/libdrmframework/plugins/forward-lock/internal-format/converter/Android.mk b/drm/libdrmframework/plugins/forward-lock/internal-format/converter/Android.mk index 37a3851..8f08c88 100644 --- a/drm/libdrmframework/plugins/forward-lock/internal-format/converter/Android.mk +++ b/drm/libdrmframework/plugins/forward-lock/internal-format/converter/Android.mk @@ -26,10 +26,6 @@ LOCAL_C_INCLUDES := \ LOCAL_SHARED_LIBRARIES := libcrypto -LOCAL_WHOLE_STATIC_LIBRARIES := libfwdlock-common - -LOCAL_STATIC_LIBRARIES := libfwdlock-common - LOCAL_MODULE := libfwdlock-converter LOCAL_MODULE_TAGS := optional diff --git a/drm/libdrmframework/plugins/forward-lock/internal-format/decoder/Android.mk b/drm/libdrmframework/plugins/forward-lock/internal-format/decoder/Android.mk index d9b5cfd..7b493c3 100644 --- a/drm/libdrmframework/plugins/forward-lock/internal-format/decoder/Android.mk +++ b/drm/libdrmframework/plugins/forward-lock/internal-format/decoder/Android.mk @@ -26,10 +26,6 @@ LOCAL_C_INCLUDES := \ LOCAL_SHARED_LIBRARIES := libcrypto -LOCAL_WHOLE_STATIC_LIBRARIES := libfwdlock-common - -LOCAL_STATIC_LIBRARIES := libfwdlock-common - LOCAL_MODULE := libfwdlock-decoder LOCAL_MODULE_TAGS := optional -- cgit v1.1 From 94a483bf2bd699275673d9cd57cb125d48572f30 Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Tue, 29 Jan 2013 09:22:16 -0800 Subject: Squashed commit of the following: commit f2c38e5cf8cee3b597c744c9d6a9c0969ac8599a Author: Andreas Huber Date: Mon Jan 28 16:33:07 2013 -0800 Proper support for video format selection/negotiation. Change-Id: I7db86cef939d63b8064be1c74de9ad78e85d45d9 commit 488023b7bad086692ffe942114fa3cc0e59a16c0 Author: Andreas Huber Date: Mon Jan 28 11:21:23 2013 -0800 Sink now notifies clients once it is disconnected. Change-Id: I2f0a458ef1ec30dda1272ad5a013fee4ee70edc9 commit 783932e40dd904aa531c263ad51280d9ca814dcb Author: Andreas Huber Date: Tue Dec 18 15:03:40 2012 -0800 Alternative DirectRenderer implementation. Change-Id: I307beb913d7a61cb938bcb02696cc2e82d2b8b07 commit 1935cc9a87824aea71fc8ebe2162f62ec634ce5a Author: Andreas Huber Date: Tue Dec 18 10:24:27 2012 -0800 Experimenting with wifi sink timing. Change-Id: I059bae9762cf11777666988a8b4ab2012b5807be commit a859ee1eadd6a1d6a080667917e8b102c3770d61 Author: Andreas Huber Date: Thu Nov 15 11:16:30 2012 -0800 wfd sink update. Change-Id: I026dfc580be92aa40dbbe7c1bc061fadf3b08be8 Change-Id: I191d3d7015869ca99254d813d074328fb5b2f479 --- media/libstagefright/mpeg2ts/ATSParser.cpp | 10 + media/libstagefright/wifi-display/Android.mk | 2 + media/libstagefright/wifi-display/VideoFormats.cpp | 370 ++++++++++++++++++ media/libstagefright/wifi-display/VideoFormats.h | 83 ++++ .../wifi-display/sink/DirectRenderer.cpp | 428 +++++++++++++++++++++ .../wifi-display/sink/DirectRenderer.h | 94 +++++ media/libstagefright/wifi-display/sink/RTPSink.cpp | 17 +- media/libstagefright/wifi-display/sink/RTPSink.h | 15 +- .../wifi-display/sink/TunnelRenderer.cpp | 9 +- .../wifi-display/sink/WifiDisplaySink.cpp | 87 ++++- .../wifi-display/sink/WifiDisplaySink.h | 22 +- .../wifi-display/source/PlaybackSession.cpp | 78 ++-- .../wifi-display/source/PlaybackSession.h | 21 +- .../libstagefright/wifi-display/source/Sender.cpp | 10 +- .../wifi-display/source/WifiDisplaySource.cpp | 217 +++++++---- .../wifi-display/source/WifiDisplaySource.h | 12 +- media/libstagefright/wifi-display/wfd.cpp | 1 + 17 files changed, 1347 insertions(+), 129 deletions(-) create mode 100644 media/libstagefright/wifi-display/VideoFormats.cpp create mode 100644 media/libstagefright/wifi-display/VideoFormats.h create mode 100644 media/libstagefright/wifi-display/sink/DirectRenderer.cpp create mode 100644 media/libstagefright/wifi-display/sink/DirectRenderer.h diff --git a/media/libstagefright/mpeg2ts/ATSParser.cpp b/media/libstagefright/mpeg2ts/ATSParser.cpp index 4f6c4b2..a167b5a 100644 --- a/media/libstagefright/mpeg2ts/ATSParser.cpp +++ b/media/libstagefright/mpeg2ts/ATSParser.cpp @@ -534,6 +534,16 @@ status_t ATSParser::Stream::parse( mBuffer->setRange(0, 0); mExpectedContinuityCounter = -1; +#if 0 + // Uncomment this if you'd rather see no corruption whatsoever on + // screen and suspend updates until we come across another IDR frame. + + if (mStreamType == STREAMTYPE_H264) { + ALOGI("clearing video queue"); + mQueue->clear(true /* clearFormat */); + } +#endif + return OK; } diff --git a/media/libstagefright/wifi-display/Android.mk b/media/libstagefright/wifi-display/Android.mk index 75098f1..5095e82 100644 --- a/media/libstagefright/wifi-display/Android.mk +++ b/media/libstagefright/wifi-display/Android.mk @@ -6,6 +6,7 @@ LOCAL_SRC_FILES:= \ ANetworkSession.cpp \ Parameters.cpp \ ParsedMessage.cpp \ + sink/DirectRenderer.cpp \ sink/LinearRegression.cpp \ sink/RTPSink.cpp \ sink/TunnelRenderer.cpp \ @@ -18,6 +19,7 @@ LOCAL_SRC_FILES:= \ source/TSPacketizer.cpp \ source/WifiDisplaySource.cpp \ TimeSeries.cpp \ + VideoFormats.cpp \ LOCAL_C_INCLUDES:= \ $(TOP)/frameworks/av/media/libstagefright \ diff --git a/media/libstagefright/wifi-display/VideoFormats.cpp b/media/libstagefright/wifi-display/VideoFormats.cpp new file mode 100644 index 0000000..9ad8c3c --- /dev/null +++ b/media/libstagefright/wifi-display/VideoFormats.cpp @@ -0,0 +1,370 @@ +/* + * Copyright 2013, The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "VideoFormats" +#include + +#include "VideoFormats.h" + +#include + +namespace android { + +VideoFormats::VideoFormats() { + for (size_t i = 0; i < kNumResolutionTypes; ++i) { + mResolutionEnabled[i] = 0; + } + + setNativeResolution(RESOLUTION_CEA, 0); // default to 640x480 p60 +} + +void VideoFormats::setNativeResolution(ResolutionType type, size_t index) { + CHECK_LT(type, kNumResolutionTypes); + CHECK(GetConfiguration(type, index, NULL, NULL, NULL, NULL)); + + mNativeType = type; + mNativeIndex = index; + + setResolutionEnabled(type, index); +} + +void VideoFormats::getNativeResolution( + ResolutionType *type, size_t *index) const { + *type = mNativeType; + *index = mNativeIndex; +} + +void VideoFormats::disableAll() { + for (size_t i = 0; i < kNumResolutionTypes; ++i) { + mResolutionEnabled[i] = 0; + } +} + +void VideoFormats::enableAll() { + for (size_t i = 0; i < kNumResolutionTypes; ++i) { + mResolutionEnabled[i] = 0xffffffff; + } +} + +void VideoFormats::setResolutionEnabled( + ResolutionType type, size_t index, bool enabled) { + CHECK_LT(type, kNumResolutionTypes); + CHECK(GetConfiguration(type, index, NULL, NULL, NULL, NULL)); + + if (enabled) { + mResolutionEnabled[type] |= (1ul << index); + } else { + mResolutionEnabled[type] &= ~(1ul << index); + } +} + +bool VideoFormats::isResolutionEnabled( + ResolutionType type, size_t index) const { + CHECK_LT(type, kNumResolutionTypes); + CHECK(GetConfiguration(type, index, NULL, NULL, NULL, NULL)); + + return mResolutionEnabled[type] & (1ul << index); +} + +// static +bool VideoFormats::GetConfiguration( + ResolutionType type, + size_t index, + size_t *width, size_t *height, size_t *framesPerSecond, + bool *interlaced) { + CHECK_LT(type, kNumResolutionTypes); + + if (index >= 32) { + return false; + } + + static const struct config_t { + size_t width, height, framesPerSecond; + bool interlaced; + } kConfigs[kNumResolutionTypes][32] = { + { + // CEA Resolutions + { 640, 480, 60, false }, + { 720, 480, 60, false }, + { 720, 480, 60, true }, + { 720, 576, 50, false }, + { 720, 576, 50, true }, + { 1280, 720, 30, false }, + { 1280, 720, 60, false }, + { 1920, 1080, 30, false }, + { 1920, 1080, 60, false }, + { 1920, 1080, 60, true }, + { 1280, 720, 25, false }, + { 1280, 720, 50, false }, + { 1920, 1080, 25, false }, + { 1920, 1080, 50, false }, + { 1920, 1080, 50, true }, + { 1280, 720, 24, false }, + { 1920, 1080, 24, false }, + { 0, 0, 0, false }, + { 0, 0, 0, false }, + { 0, 0, 0, false }, + { 0, 0, 0, false }, + { 0, 0, 0, false }, + { 0, 0, 0, false }, + { 0, 0, 0, false }, + { 0, 0, 0, false }, + { 0, 0, 0, false }, + { 0, 0, 0, false }, + { 0, 0, 0, false }, + { 0, 0, 0, false }, + { 0, 0, 0, false }, + { 0, 0, 0, false }, + { 0, 0, 0, false }, + }, + { + // VESA Resolutions + { 800, 600, 30, false }, + { 800, 600, 60, false }, + { 1024, 768, 30, false }, + { 1024, 768, 60, false }, + { 1152, 864, 30, false }, + { 1152, 864, 60, false }, + { 1280, 768, 30, false }, + { 1280, 768, 60, false }, + { 1280, 800, 30, false }, + { 1280, 800, 60, false }, + { 1360, 768, 30, false }, + { 1360, 768, 60, false }, + { 1366, 768, 30, false }, + { 1366, 768, 60, false }, + { 1280, 1024, 30, false }, + { 1280, 1024, 60, false }, + { 1400, 1050, 30, false }, + { 1400, 1050, 60, false }, + { 1440, 900, 30, false }, + { 1440, 900, 60, false }, + { 1600, 900, 30, false }, + { 1600, 900, 60, false }, + { 1600, 1200, 30, false }, + { 1600, 1200, 60, false }, + { 1680, 1024, 30, false }, + { 1680, 1024, 60, false }, + { 1680, 1050, 30, false }, + { 1680, 1050, 60, false }, + { 1920, 1200, 30, false }, + { 1920, 1200, 60, false }, + { 0, 0, 0, false }, + { 0, 0, 0, false }, + }, + { + // HH Resolutions + { 800, 480, 30, false }, + { 800, 480, 60, false }, + { 854, 480, 30, false }, + { 854, 480, 60, false }, + { 864, 480, 30, false }, + { 864, 480, 60, false }, + { 640, 360, 30, false }, + { 640, 360, 60, false }, + { 960, 540, 30, false }, + { 960, 540, 60, false }, + { 848, 480, 30, false }, + { 848, 480, 60, false }, + { 0, 0, 0, false }, + { 0, 0, 0, false }, + { 0, 0, 0, false }, + { 0, 0, 0, false }, + { 0, 0, 0, false }, + { 0, 0, 0, false }, + { 0, 0, 0, false }, + { 0, 0, 0, false }, + { 0, 0, 0, false }, + { 0, 0, 0, false }, + { 0, 0, 0, false }, + { 0, 0, 0, false }, + { 0, 0, 0, false }, + { 0, 0, 0, false }, + { 0, 0, 0, false }, + { 0, 0, 0, false }, + { 0, 0, 0, false }, + { 0, 0, 0, false }, + { 0, 0, 0, false }, + { 0, 0, 0, false }, + } + }; + + const config_t *config = &kConfigs[type][index]; + + if (config->width == 0) { + return false; + } + + if (width) { + *width = config->width; + } + + if (height) { + *height = config->height; + } + + if (framesPerSecond) { + *framesPerSecond = config->framesPerSecond; + } + + if (interlaced) { + *interlaced = config->interlaced; + } + + return true; +} + +bool VideoFormats::parseFormatSpec(const char *spec) { + CHECK_EQ(kNumResolutionTypes, 3); + + unsigned native, dummy; + + if (sscanf( + spec, + "%02x %02x %02x %02x %08X %08X %08X", + &native, + &dummy, + &dummy, + &dummy, + &mResolutionEnabled[0], + &mResolutionEnabled[1], + &mResolutionEnabled[2]) != 7) { + return false; + } + + mNativeIndex = native >> 3; + mNativeType = (ResolutionType)(native & 7); + + if (mNativeType >= kNumResolutionTypes) { + return false; + } + + return GetConfiguration(mNativeType, mNativeIndex, NULL, NULL, NULL, NULL); +} + +AString VideoFormats::getFormatSpec() const { + CHECK_EQ(kNumResolutionTypes, 3); + + // wfd_video_formats: + // 1 byte "native" + // 1 byte "preferred-display-mode-supported" 0 or 1 + // one or more avc codec structures + // 1 byte profile + // 1 byte level + // 4 byte CEA mask + // 4 byte VESA mask + // 4 byte HH mask + // 1 byte latency + // 2 byte min-slice-slice + // 2 byte slice-enc-params + // 1 byte framerate-control-support + // max-hres (none or 2 byte) + // max-vres (none or 2 byte) + + return StringPrintf( + "%02x 00 02 02 %08x %08x %08x 00 0000 0000 00 none none", + (mNativeIndex << 3) | mNativeType, + mResolutionEnabled[0], + mResolutionEnabled[1], + mResolutionEnabled[2]); +} + +// static +bool VideoFormats::PickBestFormat( + const VideoFormats &sinkSupported, + const VideoFormats &sourceSupported, + ResolutionType *chosenType, + size_t *chosenIndex) { + ResolutionType nativeType; + size_t nativeIndex; + sinkSupported.getNativeResolution(&nativeType, &nativeIndex); + if (sinkSupported.isResolutionEnabled(nativeType, nativeIndex)) { + if (sourceSupported.isResolutionEnabled(nativeType, nativeIndex)) { + ALOGI("Choosing sink's native resolution"); + *chosenType = nativeType; + *chosenIndex = nativeIndex; + return true; + } + } else { + ALOGW("Sink advertised native resolution that it doesn't " + "actually support... ignoring"); + } + + sourceSupported.getNativeResolution(&nativeType, &nativeIndex); + if (sourceSupported.isResolutionEnabled(nativeType, nativeIndex)) { + if (sinkSupported.isResolutionEnabled(nativeType, nativeIndex)) { + ALOGI("Choosing source's native resolution"); + *chosenType = nativeType; + *chosenIndex = nativeIndex; + return true; + } + } else { + ALOGW("Source advertised native resolution that it doesn't " + "actually support... ignoring"); + } + + bool first = true; + uint32_t bestScore = 0; + size_t bestType = 0; + size_t bestIndex = 0; + for (size_t i = 0; i < kNumResolutionTypes; ++i) { + for (size_t j = 0; j < 32; ++j) { + size_t width, height, framesPerSecond; + bool interlaced; + if (!GetConfiguration( + (ResolutionType)i, + j, + &width, &height, &framesPerSecond, &interlaced)) { + break; + } + + if (!sinkSupported.isResolutionEnabled((ResolutionType)i, j) + || !sourceSupported.isResolutionEnabled( + (ResolutionType)i, j)) { + continue; + } + + ALOGV("type %u, index %u, %u x %u %c%u supported", + i, j, width, height, interlaced ? 'i' : 'p', framesPerSecond); + + uint32_t score = width * height * framesPerSecond; + if (!interlaced) { + score *= 2; + } + + if (first || score > bestScore) { + bestScore = score; + bestType = i; + bestIndex = j; + + first = false; + } + } + } + + if (first) { + return false; + } + + *chosenType = (ResolutionType)bestType; + *chosenIndex = bestIndex; + + return true; +} + +} // namespace android + diff --git a/media/libstagefright/wifi-display/VideoFormats.h b/media/libstagefright/wifi-display/VideoFormats.h new file mode 100644 index 0000000..a84407a --- /dev/null +++ b/media/libstagefright/wifi-display/VideoFormats.h @@ -0,0 +1,83 @@ +/* + * Copyright 2013, The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef VIDEO_FORMATS_H_ + +#define VIDEO_FORMATS_H_ + +#include + +#include + +namespace android { + +struct AString; + +// This class encapsulates that video resolution capabilities of a wfd source +// or sink as outlined in the wfd specs. Currently three sets of resolutions +// are specified, each of which supports up to 32 resolutions. +// In addition to its capabilities each sink/source also publishes its +// "native" resolution, presumably one that is preferred among all others +// because it wouldn't require any scaling and directly corresponds to the +// display capabilities/pixels. +struct VideoFormats { + VideoFormats(); + + enum ResolutionType { + RESOLUTION_CEA, + RESOLUTION_VESA, + RESOLUTION_HH, + kNumResolutionTypes, + }; + + void setNativeResolution(ResolutionType type, size_t index); + void getNativeResolution(ResolutionType *type, size_t *index) const; + + void disableAll(); + void enableAll(); + + void setResolutionEnabled( + ResolutionType type, size_t index, bool enabled = true); + + bool isResolutionEnabled(ResolutionType type, size_t index) const; + + static bool GetConfiguration( + ResolutionType type, size_t index, + size_t *width, size_t *height, size_t *framesPerSecond, + bool *interlaced); + + bool parseFormatSpec(const char *spec); + AString getFormatSpec() const; + + static bool PickBestFormat( + const VideoFormats &sinkSupported, + const VideoFormats &sourceSupported, + ResolutionType *chosenType, + size_t *chosenIndex); + +private: + ResolutionType mNativeType; + size_t mNativeIndex; + + uint32_t mResolutionEnabled[kNumResolutionTypes]; + + DISALLOW_EVIL_CONSTRUCTORS(VideoFormats); +}; + +} // namespace android + +#endif // VIDEO_FORMATS_H_ + diff --git a/media/libstagefright/wifi-display/sink/DirectRenderer.cpp b/media/libstagefright/wifi-display/sink/DirectRenderer.cpp new file mode 100644 index 0000000..8120634 --- /dev/null +++ b/media/libstagefright/wifi-display/sink/DirectRenderer.cpp @@ -0,0 +1,428 @@ +/* + * Copyright 2012, The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "DirectRenderer" +#include + +#include "DirectRenderer.h" + +#include "AnotherPacketSource.h" +#include "ATSParser.h" + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +namespace android { + +// static +const int64_t DirectRenderer::kPacketLostDelayUs = 80000ll; + +// static +const int64_t DirectRenderer::kPacketLateDelayUs = 60000ll; + +DirectRenderer::DirectRenderer( + const sp ¬ifyLost, + const sp &bufferProducer) + : mNotifyLost(notifyLost), + mSurfaceTex(bufferProducer), + mTSParser(new ATSParser(ATSParser::ALIGNED_VIDEO_DATA)), + mVideoDecoderNotificationPending(false), + mAwaitingExtSeqNo(-1), + mRequestedRetransmission(false), + mPacketLostGeneration(0) { +} + +DirectRenderer::~DirectRenderer() { + if (mVideoDecoder != NULL) { + mVideoDecoder->release(); + mVideoDecoder.clear(); + + mVideoDecoderLooper->stop(); + mVideoDecoderLooper.clear(); + } +} + +void DirectRenderer::onMessageReceived(const sp &msg) { + switch (msg->what()) { + case kWhatQueueBuffer: + { + sp buffer; + CHECK(msg->findBuffer("buffer", &buffer)); + + onQueueBuffer(buffer); + + dequeueMore(); + break; + } + + case kWhatPacketLate: + case kWhatPacketLost: + { + int32_t generation; + CHECK(msg->findInt32("generation", &generation)); + + if (generation != mPacketLostGeneration) { + // stale. + break; + } + + if (msg->what() == kWhatPacketLate) { + CHECK(!mRequestedRetransmission); + CHECK_GE(mAwaitingExtSeqNo, 0); + + ALOGV("packet extSeqNo %d is late, requesting retransmission.", + mAwaitingExtSeqNo); + + sp notify = mNotifyLost->dup(); + notify->setInt32("seqNo", (mAwaitingExtSeqNo & 0xffff)); + notify->post(); + + mRequestedRetransmission = true; + break; + } + + ALOGW("lost packet extSeqNo %d", mAwaitingExtSeqNo); + + sp extra; + mTSParser->signalDiscontinuity( + ATSParser::DISCONTINUITY_TIME, extra); + + mAwaitingExtSeqNo = -1; + mRequestedRetransmission = false; + dequeueMore(); + break; + } + + case kWhatVideoDecoderNotify: + { + onVideoDecoderNotify(); + break; + } + + default: + TRESPASS(); + } +} + +void DirectRenderer::onQueueBuffer(const sp &buffer) { + int32_t newExtendedSeqNo = buffer->int32Data(); + + if (mPackets.empty()) { + mPackets.push_back(buffer); + return; + } + + if (mAwaitingExtSeqNo > 0 && newExtendedSeqNo < mAwaitingExtSeqNo) { + // We're no longer interested in these. They're old. + return; + } + + List >::iterator firstIt = mPackets.begin(); + List >::iterator it = --mPackets.end(); + for (;;) { + int32_t extendedSeqNo = (*it)->int32Data(); + + if (extendedSeqNo == newExtendedSeqNo) { + // Duplicate packet. + return; + } + + if (extendedSeqNo < newExtendedSeqNo) { + // Insert new packet after the one at "it". + mPackets.insert(++it, buffer); + return; + } + + if (it == firstIt) { + // Insert new packet before the first existing one. + mPackets.insert(it, buffer); + return; + } + + --it; + } +} + +void DirectRenderer::dequeueMore() { + if (mAwaitingExtSeqNo >= 0) { + // Remove all packets before the one we're looking for, they had + // their chance. + while (!mPackets.empty() + && (*mPackets.begin())->int32Data() < mAwaitingExtSeqNo) { + ALOGV("dropping late packet extSeqNo %d", + (*mPackets.begin())->int32Data()); + + mPackets.erase(mPackets.begin()); + } + } + + bool packetLostScheduled = (mAwaitingExtSeqNo >= 0); + + while (!mPackets.empty()) { + sp buffer = *mPackets.begin(); + int32_t extSeqNo = buffer->int32Data(); + + if (mAwaitingExtSeqNo >= 0 && extSeqNo != mAwaitingExtSeqNo) { + break; + } + + mPackets.erase(mPackets.begin()); + + if (packetLostScheduled) { + packetLostScheduled = false; + cancelPacketLost(); + } + + if (mRequestedRetransmission) { + ALOGV("recovered after requesting retransmission of extSeqNo %d", + mAwaitingExtSeqNo); + } + + CHECK_EQ(buffer->size() % 188, 0u); + + for (size_t offset = 0; offset < buffer->size(); offset += 188) { + status_t err = mTSParser->feedTSPacket( + buffer->data() + offset, 188); + + CHECK_EQ(err, (status_t)OK); + } + + mAwaitingExtSeqNo = extSeqNo + 1; + mRequestedRetransmission = false; + } + + if (!packetLostScheduled && mAwaitingExtSeqNo >= 0) { + schedulePacketLost(); + } + + dequeueAccessUnits(); +} + +void DirectRenderer::dequeueAccessUnits() { + sp audioSource = + static_cast( + mTSParser->getSource(ATSParser::AUDIO).get()); + + if (audioSource != NULL) { + status_t finalResult; + size_t n = 0; + while (audioSource->hasBufferAvailable(&finalResult)) { + sp accessUnit; + status_t err = audioSource->dequeueAccessUnit(&accessUnit); + if (err == OK) { + ++n; + } + } + + if (n > 0) { + ALOGV("dequeued %d audio access units.", n); + } + } + + sp videoSource = + static_cast( + mTSParser->getSource(ATSParser::VIDEO).get()); + + if (videoSource != NULL) { + if (mVideoDecoder == NULL) { + sp meta = videoSource->getFormat(); + if (meta != NULL) { + sp videoFormat; + status_t err = convertMetaDataToMessage(meta, &videoFormat); + CHECK_EQ(err, (status_t)OK); + + AString mime; + CHECK(videoFormat->findString("mime", &mime)); + + mVideoDecoderLooper = new ALooper; + mVideoDecoderLooper->setName("video codec looper"); + + mVideoDecoderLooper->start( + false /* runOnCallingThread */, + false /* canCallJava */, + PRIORITY_DEFAULT); + + mVideoDecoder = MediaCodec::CreateByType( + mVideoDecoderLooper, mime.c_str(), false /* encoder */); + + CHECK(mVideoDecoder != NULL); + + err = mVideoDecoder->configure( + videoFormat, + new SurfaceTextureClient(mSurfaceTex), + NULL /* crypto */, + 0 /* flags */); + + CHECK_EQ(err, (status_t)OK); + + err = mVideoDecoder->start(); + CHECK_EQ(err, (status_t)OK); + + err = mVideoDecoder->getInputBuffers( + &mVideoDecoderInputBuffers); + CHECK_EQ(err, (status_t)OK); + + scheduleVideoDecoderNotification(); + } + } + + status_t finalResult; + size_t n = 0; + while (videoSource->hasBufferAvailable(&finalResult)) { + sp accessUnit; + status_t err = videoSource->dequeueAccessUnit(&accessUnit); + if (err == OK) { + mVideoAccessUnits.push_back(accessUnit); + ++n; + } + } + + if (n > 0) { + ALOGV("dequeued %d video access units.", n); + queueVideoDecoderInputBuffers(); + } + } +} + +void DirectRenderer::schedulePacketLost() { + sp msg; + +#if 1 + msg = new AMessage(kWhatPacketLate, id()); + msg->setInt32("generation", mPacketLostGeneration); + msg->post(kPacketLateDelayUs); +#endif + + msg = new AMessage(kWhatPacketLost, id()); + msg->setInt32("generation", mPacketLostGeneration); + msg->post(kPacketLostDelayUs); +} + +void DirectRenderer::cancelPacketLost() { + ++mPacketLostGeneration; +} + +void DirectRenderer::queueVideoDecoderInputBuffers() { + if (mVideoDecoder == NULL) { + return; + } + + bool submittedMore = false; + + while (!mVideoAccessUnits.empty() + && !mVideoDecoderInputBuffersAvailable.empty()) { + size_t index = *mVideoDecoderInputBuffersAvailable.begin(); + + mVideoDecoderInputBuffersAvailable.erase( + mVideoDecoderInputBuffersAvailable.begin()); + + sp srcBuffer = *mVideoAccessUnits.begin(); + mVideoAccessUnits.erase(mVideoAccessUnits.begin()); + + const sp &dstBuffer = + mVideoDecoderInputBuffers.itemAt(index); + + memcpy(dstBuffer->data(), srcBuffer->data(), srcBuffer->size()); + + int64_t timeUs; + CHECK(srcBuffer->meta()->findInt64("timeUs", &timeUs)); + + status_t err = mVideoDecoder->queueInputBuffer( + index, + 0 /* offset */, + srcBuffer->size(), + timeUs, + 0 /* flags */); + CHECK_EQ(err, (status_t)OK); + + submittedMore = true; + } + + if (submittedMore) { + scheduleVideoDecoderNotification(); + } +} + +void DirectRenderer::onVideoDecoderNotify() { + mVideoDecoderNotificationPending = false; + + for (;;) { + size_t index; + status_t err = mVideoDecoder->dequeueInputBuffer(&index); + + if (err == OK) { + mVideoDecoderInputBuffersAvailable.push_back(index); + } else if (err == -EAGAIN) { + break; + } else { + TRESPASS(); + } + } + + queueVideoDecoderInputBuffers(); + + for (;;) { + size_t index; + size_t offset; + size_t size; + int64_t timeUs; + uint32_t flags; + status_t err = mVideoDecoder->dequeueOutputBuffer( + &index, + &offset, + &size, + &timeUs, + &flags); + + if (err == OK) { + err = mVideoDecoder->renderOutputBufferAndRelease(index); + CHECK_EQ(err, (status_t)OK); + } else if (err == INFO_OUTPUT_BUFFERS_CHANGED) { + // We don't care. + } else if (err == INFO_FORMAT_CHANGED) { + // We don't care. + } else if (err == -EAGAIN) { + break; + } else { + TRESPASS(); + } + } + + scheduleVideoDecoderNotification(); +} + +void DirectRenderer::scheduleVideoDecoderNotification() { + if (mVideoDecoderNotificationPending) { + return; + } + + sp notify = + new AMessage(kWhatVideoDecoderNotify, id()); + + mVideoDecoder->requestActivityNotification(notify); + mVideoDecoderNotificationPending = true; +} + +} // namespace android + diff --git a/media/libstagefright/wifi-display/sink/DirectRenderer.h b/media/libstagefright/wifi-display/sink/DirectRenderer.h new file mode 100644 index 0000000..2babcb8 --- /dev/null +++ b/media/libstagefright/wifi-display/sink/DirectRenderer.h @@ -0,0 +1,94 @@ +/* + * Copyright 2012, The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef DIRECT_RENDERER_H_ + +#define DIRECT_RENDERER_H_ + +#include + +namespace android { + +struct ABuffer; +struct ATSParser; +struct IGraphicBufferProducer; +struct MediaCodec; + +// An experimental renderer that only supports video and decodes video data +// as soon as it arrives using a MediaCodec instance, rendering it without +// delay. Primarily meant to finetune packet loss discovery and minimize +// latency. +struct DirectRenderer : public AHandler { + DirectRenderer( + const sp ¬ifyLost, + const sp &bufferProducer); + + enum { + kWhatQueueBuffer = 'queB', + }; + +protected: + virtual void onMessageReceived(const sp &msg); + virtual ~DirectRenderer(); + +private: + enum { + kWhatPacketLate, + kWhatPacketLost, + kWhatVideoDecoderNotify, + }; + + static const int64_t kPacketLateDelayUs; + static const int64_t kPacketLostDelayUs; + + sp mNotifyLost; + sp mSurfaceTex; + + // Ordered by extended seq number. + List > mPackets; + + sp mTSParser; + + sp mVideoDecoderLooper; + sp mVideoDecoder; + Vector > mVideoDecoderInputBuffers; + List mVideoDecoderInputBuffersAvailable; + bool mVideoDecoderNotificationPending; + + List > mVideoAccessUnits; + + int32_t mAwaitingExtSeqNo; + bool mRequestedRetransmission; + int32_t mPacketLostGeneration; + + void onQueueBuffer(const sp &buffer); + void onVideoDecoderNotify(); + + void dequeueMore(); + void dequeueAccessUnits(); + + void schedulePacketLost(); + void cancelPacketLost(); + + void queueVideoDecoderInputBuffers(); + void scheduleVideoDecoderNotification(); + + DISALLOW_EVIL_CONSTRUCTORS(DirectRenderer); +}; + +} // namespace android + +#endif // DIRECT_RENDERER_H_ diff --git a/media/libstagefright/wifi-display/sink/RTPSink.cpp b/media/libstagefright/wifi-display/sink/RTPSink.cpp index 640e055..ad75373 100644 --- a/media/libstagefright/wifi-display/sink/RTPSink.cpp +++ b/media/libstagefright/wifi-display/sink/RTPSink.cpp @@ -21,7 +21,14 @@ #include "RTPSink.h" #include "ANetworkSession.h" + +#if USE_TUNNEL_RENDERER #include "TunnelRenderer.h" +#define RENDERER_CLASS TunnelRenderer +#else +#include "DirectRenderer.h" +#define RENDERER_CLASS DirectRenderer +#endif #include #include @@ -238,9 +245,11 @@ void RTPSink::Source::addReportBlock( RTPSink::RTPSink( const sp &netSession, - const sp &bufferProducer) + const sp &bufferProducer, + const sp ¬ify) : mNetSession(netSession), mSurfaceTex(bufferProducer), + mNotify(notify), mRTPPort(0), mRTPSessionID(0), mRTCPSessionID(0), @@ -470,6 +479,7 @@ status_t RTPSink::parseRTP(const sp &buffer) { uint32_t rtpTime = U32_AT(&data[4]); uint16_t seqNo = U16_AT(&data[2]); +#if 0 int64_t arrivalTimeUs; CHECK(buffer->meta()->findInt64("arrivalTimeUs", &arrivalTimeUs)); @@ -500,6 +510,7 @@ status_t RTPSink::parseRTP(const sp &buffer) { ALOGI("packet was %.2f ms late", latenessMs); } } +#endif sp meta = buffer->meta(); meta->setInt32("ssrc", srcId); @@ -515,12 +526,12 @@ status_t RTPSink::parseRTP(const sp &buffer) { sp notifyLost = new AMessage(kWhatPacketLost, id()); notifyLost->setInt32("ssrc", srcId); - mRenderer = new TunnelRenderer(notifyLost, mSurfaceTex); + mRenderer = new RENDERER_CLASS(notifyLost, mSurfaceTex); looper()->registerHandler(mRenderer); } sp queueBufferMsg = - new AMessage(TunnelRenderer::kWhatQueueBuffer, mRenderer->id()); + new AMessage(RENDERER_CLASS::kWhatQueueBuffer, mRenderer->id()); sp source = new Source(seqNo, buffer, queueBufferMsg); mSources.add(srcId, source); diff --git a/media/libstagefright/wifi-display/sink/RTPSink.h b/media/libstagefright/wifi-display/sink/RTPSink.h index 2183fd6..6e40185 100644 --- a/media/libstagefright/wifi-display/sink/RTPSink.h +++ b/media/libstagefright/wifi-display/sink/RTPSink.h @@ -24,18 +24,26 @@ #include +#define USE_TUNNEL_RENDERER 0 + namespace android { struct ABuffer; struct ANetworkSession; + +#if USE_TUNNEL_RENDERER struct TunnelRenderer; +#else +struct DirectRenderer; +#endif // Creates a pair of sockets for RTP/RTCP traffic, instantiates a renderer // for incoming transport stream data and occasionally sends statistics over // the RTCP channel. struct RTPSink : public AHandler { RTPSink(const sp &netSession, - const sp &bufferProducer); + const sp &bufferProducer, + const sp ¬ify); // If TCP interleaving is used, no UDP sockets are created, instead // incoming RTP/RTCP packets (arriving on the RTSP control connection) @@ -67,6 +75,7 @@ private: sp mNetSession; sp mSurfaceTex; + sp mNotify; KeyedVector > mSources; int32_t mRTPPort; @@ -78,7 +87,11 @@ private: LinearRegression mRegression; int64_t mMaxDelayMs; +#if USE_TUNNEL_RENDERER sp mRenderer; +#else + sp mRenderer; +#endif status_t parseRTP(const sp &buffer); status_t parseRTCP(const sp &buffer); diff --git a/media/libstagefright/wifi-display/sink/TunnelRenderer.cpp b/media/libstagefright/wifi-display/sink/TunnelRenderer.cpp index f3f4536..04dbd7b 100644 --- a/media/libstagefright/wifi-display/sink/TunnelRenderer.cpp +++ b/media/libstagefright/wifi-display/sink/TunnelRenderer.cpp @@ -264,16 +264,17 @@ sp TunnelRenderer::dequeueBuffer() { if (mFirstFailedAttemptUs < 0ll) { mFirstFailedAttemptUs = ALooper::GetNowUs(); - ALOGI("failed to get the correct packet the first time."); + ALOGV("failed to get the correct packet the first time."); return NULL; } if (mFirstFailedAttemptUs + 50000ll > ALooper::GetNowUs()) { // We're willing to wait a little while to get the right packet. -#if 0 +#if 1 if (!mRequestedRetransmission) { - ALOGI("requesting retransmission of seqNo %d", + ALOGI("requesting retransmission of extSeqNo %d (seqNo %d)", + mLastDequeuedExtSeqNo + 1, (mLastDequeuedExtSeqNo + 1) & 0xffff); sp notify = mNotifyLost->dup(); @@ -284,7 +285,7 @@ sp TunnelRenderer::dequeueBuffer() { } else #endif { - ALOGI("still waiting for the correct packet to arrive."); + ALOGV("still waiting for the correct packet to arrive."); } return NULL; diff --git a/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp b/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp index 0f0caf1..46c40c7 100644 --- a/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp +++ b/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp @@ -31,12 +31,27 @@ namespace android { WifiDisplaySink::WifiDisplaySink( const sp &netSession, - const sp &bufferProducer) + const sp &bufferProducer, + const sp ¬ify) : mState(UNDEFINED), mNetSession(netSession), mSurfaceTex(bufferProducer), + mNotify(notify), mSessionID(0), mNextCSeq(1) { +#if 1 + // We support any and all resolutions, but prefer 720p30 + mSinkSupportedVideoFormats.setNativeResolution( + VideoFormats::RESOLUTION_CEA, 5); // 1280 x 720 p30 + + mSinkSupportedVideoFormats.enableAll(); +#else + // We only support 800 x 600 p60. + mSinkSupportedVideoFormats.disableAll(); + + mSinkSupportedVideoFormats.setNativeResolution( + VideoFormats::RESOLUTION_VESA, 1); // 800 x 600 p60 +#endif } WifiDisplaySink::~WifiDisplaySink() { @@ -123,6 +138,8 @@ void WifiDisplaySink::onMessageReceived(const sp &msg) { switch (msg->what()) { case kWhatStart: { + sleep(2); // XXX + int32_t sourcePort; if (msg->findString("setupURI", &mSetupURI)) { @@ -176,7 +193,13 @@ void WifiDisplaySink::onMessageReceived(const sp &msg) { mNetSession->destroySession(mSessionID); mSessionID = 0; - looper()->stop(); + if (mNotify == NULL) { + looper()->stop(); + } else { + sp notify = mNotify->dup(); + notify->setInt32("what", kWhatDisconnected); + notify->post(); + } } break; } @@ -227,6 +250,18 @@ void WifiDisplaySink::onMessageReceived(const sp &msg) { break; } + case kWhatRequestIDRFrame: + { + ALOGI("requesting IDR frame"); + sendIDRFrameRequest(mSessionID); + break; + } + + case kWhatRTPSinkNotify: + { + break; + } + default: TRESPASS(); } @@ -392,6 +427,11 @@ status_t WifiDisplaySink::onReceivePlayResponse( return OK; } +status_t WifiDisplaySink::onReceiveIDRFrameRequestResponse( + int32_t sessionID, const sp &msg) { + return OK; +} + void WifiDisplaySink::onReceiveClientData(const sp &msg) { int32_t sessionID; CHECK(msg->findInt32("sessionID", &sessionID)); @@ -474,11 +514,11 @@ void WifiDisplaySink::onGetParameterRequest( int32_t sessionID, int32_t cseq, const sp &data) { - AString body = - "wfd_video_formats: " - "28 00 02 02 FFFFFFFF 0000000 00000000 00 0000 0000 00 none none\r\n" - "wfd_audio_codecs: AAC 0000000F 00\r\n" - "wfd_client_rtp_ports: RTP/AVP/UDP;unicast 19000 0 mode=play\r\n"; + AString body = "wfd_video_formats: "; + body.append(mSinkSupportedVideoFormats.getFormatSpec()); + body.append( + "\r\nwfd_audio_codecs: AAC 0000000F 00\r\n" + "wfd_client_rtp_ports: RTP/AVP/UDP;unicast 19000 0 mode=play\r\n"); AString response = "RTSP/1.0 200 OK\r\n"; AppendCommonResponse(&response, cseq); @@ -517,7 +557,9 @@ status_t WifiDisplaySink::sendDescribe(int32_t sessionID, const char *uri) { } status_t WifiDisplaySink::sendSetup(int32_t sessionID, const char *uri) { - mRTPSink = new RTPSink(mNetSession, mSurfaceTex); + sp notify = new AMessage(kWhatRTPSinkNotify, id()); + + mRTPSink = new RTPSink(mNetSession, mSurfaceTex, notify); looper()->registerHandler(mRTPSink); status_t err = mRTPSink->init(sUseTCPInterleaving); @@ -584,6 +626,35 @@ status_t WifiDisplaySink::sendPlay(int32_t sessionID, const char *uri) { return OK; } +status_t WifiDisplaySink::sendIDRFrameRequest(int32_t sessionID) { + AString request = "SET_PARAMETER rtsp://localhost/wfd1.0 RTSP/1.0\r\n"; + + AppendCommonResponse(&request, mNextCSeq); + + AString content = "wfd_idr_request\r\n"; + + request.append(StringPrintf("Session: %s\r\n", mPlaybackSessionID.c_str())); + request.append(StringPrintf("Content-Length: %d\r\n", content.size())); + request.append("\r\n"); + request.append(content); + + status_t err = + mNetSession->sendRequest(sessionID, request.c_str(), request.size()); + + if (err != OK) { + return err; + } + + registerResponseHandler( + sessionID, + mNextCSeq, + &WifiDisplaySink::onReceiveIDRFrameRequestResponse); + + ++mNextCSeq; + + return OK; +} + void WifiDisplaySink::onSetParameterRequest( int32_t sessionID, int32_t cseq, diff --git a/media/libstagefright/wifi-display/sink/WifiDisplaySink.h b/media/libstagefright/wifi-display/sink/WifiDisplaySink.h index a508839..5f86519 100644 --- a/media/libstagefright/wifi-display/sink/WifiDisplaySink.h +++ b/media/libstagefright/wifi-display/sink/WifiDisplaySink.h @@ -20,11 +20,14 @@ #include "ANetworkSession.h" +#include "VideoFormats.h" + #include #include namespace android { +struct AMessage; struct ParsedMessage; struct RTPSink; @@ -32,9 +35,18 @@ struct RTPSink; // Connects to a wifi display source and renders the incoming // transport stream using a MediaPlayer instance. struct WifiDisplaySink : public AHandler { + enum { + kWhatDisconnected, + }; + + // If no notification message is specified (notify == NULL) + // the sink will stop its looper() once the session ends, + // otherwise it will post an appropriate notification but leave + // the looper() running. WifiDisplaySink( const sp &netSession, - const sp &bufferProducer = NULL); + const sp &bufferProducer = NULL, + const sp ¬ify = NULL); void start(const char *sourceHost, int32_t sourcePort); void start(const char *uri); @@ -56,6 +68,8 @@ private: kWhatStart, kWhatRTSPNotify, kWhatStop, + kWhatRequestIDRFrame, + kWhatRTPSinkNotify, }; struct ResponseID { @@ -75,8 +89,10 @@ private: static const bool sUseTCPInterleaving = false; State mState; + VideoFormats mSinkSupportedVideoFormats; sp mNetSession; sp mSurfaceTex; + sp mNotify; AString mSetupURI; AString mRTSPHost; int32_t mSessionID; @@ -93,6 +109,7 @@ private: status_t sendDescribe(int32_t sessionID, const char *uri); status_t sendSetup(int32_t sessionID, const char *uri); status_t sendPlay(int32_t sessionID, const char *uri); + status_t sendIDRFrameRequest(int32_t sessionID); status_t onReceiveM2Response( int32_t sessionID, const sp &msg); @@ -108,6 +125,9 @@ private: status_t onReceivePlayResponse( int32_t sessionID, const sp &msg); + status_t onReceiveIDRFrameRequestResponse( + int32_t sessionID, const sp &msg); + void registerResponseHandler( int32_t sessionID, int32_t cseq, HandleRTSPResponseFunc func); diff --git a/media/libstagefright/wifi-display/source/PlaybackSession.cpp b/media/libstagefright/wifi-display/source/PlaybackSession.cpp index d6b87a7..91dc1fa 100644 --- a/media/libstagefright/wifi-display/source/PlaybackSession.cpp +++ b/media/libstagefright/wifi-display/source/PlaybackSession.cpp @@ -346,8 +346,17 @@ WifiDisplaySource::PlaybackSession::PlaybackSession( status_t WifiDisplaySource::PlaybackSession::init( const char *clientIP, int32_t clientRtp, int32_t clientRtcp, Sender::TransportMode transportMode, - bool usePCMAudio) { - status_t err = setupPacketizer(usePCMAudio); + bool enableAudio, + bool usePCMAudio, + bool enableVideo, + VideoFormats::ResolutionType videoResolutionType, + size_t videoResolutionIndex) { + status_t err = setupPacketizer( + enableAudio, + usePCMAudio, + enableVideo, + videoResolutionType, + videoResolutionIndex); if (err != OK) { return err; @@ -639,13 +648,27 @@ void WifiDisplaySource::PlaybackSession::onMessageReceived( } } -status_t WifiDisplaySource::PlaybackSession::setupPacketizer(bool usePCMAudio) { +status_t WifiDisplaySource::PlaybackSession::setupPacketizer( + bool enableAudio, + bool usePCMAudio, + bool enableVideo, + VideoFormats::ResolutionType videoResolutionType, + size_t videoResolutionIndex) { + CHECK(enableAudio || enableVideo); + mPacketizer = new TSPacketizer; - status_t err = addVideoSource(); + if (enableVideo) { + status_t err = addVideoSource( + videoResolutionType, videoResolutionIndex); - if (err != OK) { - return err; + if (err != OK) { + return err; + } + } + + if (!enableAudio) { + return OK; } return addAudioSource(usePCMAudio); @@ -735,27 +758,30 @@ status_t WifiDisplaySource::PlaybackSession::addSource( return OK; } -status_t WifiDisplaySource::PlaybackSession::addVideoSource() { - sp source = new SurfaceMediaSource(width(), height()); +status_t WifiDisplaySource::PlaybackSession::addVideoSource( + VideoFormats::ResolutionType videoResolutionType, + size_t videoResolutionIndex) { + size_t width, height, framesPerSecond; + bool interlaced; + CHECK(VideoFormats::GetConfiguration( + videoResolutionType, + videoResolutionIndex, + &width, + &height, + &framesPerSecond, + &interlaced)); + + sp source = new SurfaceMediaSource(width, height); source->setUseAbsoluteTimestamps(); -#if 1 sp videoSource = - new RepeaterSource(source, 30.0 /* rateHz */); -#endif + new RepeaterSource(source, framesPerSecond); -#if 1 size_t numInputBuffers; status_t err = addSource( true /* isVideo */, videoSource, true /* isRepeaterSource */, false /* usePCMAudio */, &numInputBuffers); -#else - size_t numInputBuffers; - status_t err = addSource( - true /* isVideo */, source, false /* isRepeaterSource */, - false /* usePCMAudio */, &numInputBuffers); -#endif if (err != OK) { return err; @@ -790,22 +816,6 @@ sp WifiDisplaySource::PlaybackSession::getSurfaceTexture return mBufferQueue; } -int32_t WifiDisplaySource::PlaybackSession::width() const { -#if USE_1080P - return 1920; -#else - return 1280; -#endif -} - -int32_t WifiDisplaySource::PlaybackSession::height() const { -#if USE_1080P - return 1080; -#else - return 720; -#endif -} - void WifiDisplaySource::PlaybackSession::requestIDRFrame() { for (size_t i = 0; i < mTracks.size(); ++i) { const sp &track = mTracks.valueAt(i); diff --git a/media/libstagefright/wifi-display/source/PlaybackSession.h b/media/libstagefright/wifi-display/source/PlaybackSession.h index 281548d..7365c78 100644 --- a/media/libstagefright/wifi-display/source/PlaybackSession.h +++ b/media/libstagefright/wifi-display/source/PlaybackSession.h @@ -19,6 +19,7 @@ #define PLAYBACK_SESSION_H_ #include "Sender.h" +#include "VideoFormats.h" #include "WifiDisplaySource.h" namespace android { @@ -43,7 +44,11 @@ struct WifiDisplaySource::PlaybackSession : public AHandler { status_t init( const char *clientIP, int32_t clientRtp, int32_t clientRtcp, Sender::TransportMode transportMode, - bool usePCMAudio); + bool enableAudio, + bool usePCMAudio, + bool enableVideo, + VideoFormats::ResolutionType videoResolutionType, + size_t videoResolutionIndex); void destroyAsync(); @@ -57,8 +62,6 @@ struct WifiDisplaySource::PlaybackSession : public AHandler { status_t pause(); sp getSurfaceTexture(); - int32_t width() const; - int32_t height() const; void requestIDRFrame(); @@ -109,7 +112,12 @@ private: bool mAllTracksHavePacketizerIndex; - status_t setupPacketizer(bool usePCMAudio); + status_t setupPacketizer( + bool enableAudio, + bool usePCMAudio, + bool enableVideo, + VideoFormats::ResolutionType videoResolutionType, + size_t videoResolutionIndex); status_t addSource( bool isVideo, @@ -118,7 +126,10 @@ private: bool usePCMAudio, size_t *numInputBuffers); - status_t addVideoSource(); + status_t addVideoSource( + VideoFormats::ResolutionType videoResolutionType, + size_t videoResolutionIndex); + status_t addAudioSource(bool usePCMAudio); ssize_t appendTSData( diff --git a/media/libstagefright/wifi-display/source/Sender.cpp b/media/libstagefright/wifi-display/source/Sender.cpp index 9048691..8b7d93f 100644 --- a/media/libstagefright/wifi-display/source/Sender.cpp +++ b/media/libstagefright/wifi-display/source/Sender.cpp @@ -685,7 +685,15 @@ status_t Sender::parseTSFB( if (!foundSeqNo || blp != 0) { ALOGI("Some sequence numbers were no longer available for " - "retransmission"); + "retransmission (seqNo = %d, foundSeqNo = %d, blp = 0x%04x)", + seqNo, foundSeqNo, blp); + + if (!mHistory.empty()) { + int32_t earliest = (*mHistory.begin())->int32Data() & 0xffff; + int32_t latest = (*--mHistory.end())->int32Data() & 0xffff; + + ALOGI("have seq numbers from %d - %d", earliest, latest); + } } } diff --git a/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp b/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp index 9ec1064..0fed19b 100644 --- a/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp +++ b/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp @@ -58,8 +58,19 @@ WifiDisplaySource::WifiDisplaySource( mIsHDCP2_0(false), mHDCPPort(0), mHDCPInitializationComplete(false), - mSetupTriggerDeferred(false) -{ + mSetupTriggerDeferred(false) { + mSupportedSourceVideoFormats.enableAll(); + + mSupportedSourceVideoFormats.setNativeResolution( + VideoFormats::RESOLUTION_CEA, 5); // 1280x720 p30 + + // Disable resolutions above 1080p since the encoder won't be able to + // handle them. + mSupportedSourceVideoFormats.setResolutionEnabled( + VideoFormats::RESOLUTION_VESA, 28, false); // 1920x1200 p30 + + mSupportedSourceVideoFormats.setResolutionEnabled( + VideoFormats::RESOLUTION_VESA, 29, false); // 1920x1200 p60 } WifiDisplaySource::~WifiDisplaySource() { @@ -375,13 +386,33 @@ void WifiDisplaySource::onMessageReceived(const sp &msg) { IRemoteDisplayClient::kDisplayErrorUnknown); } else if (what == PlaybackSession::kWhatSessionEstablished) { if (mClient != NULL) { - mClient->onDisplayConnected( - mClientInfo.mPlaybackSession->getSurfaceTexture(), - mClientInfo.mPlaybackSession->width(), - mClientInfo.mPlaybackSession->height(), - mUsingHDCP - ? IRemoteDisplayClient::kDisplayFlagSecure - : 0); + if (!mSinkSupportsVideo) { + mClient->onDisplayConnected( + NULL, // SurfaceTexture + 0, // width, + 0, // height, + mUsingHDCP + ? IRemoteDisplayClient::kDisplayFlagSecure + : 0); + } else { + size_t width, height; + + CHECK(VideoFormats::GetConfiguration( + mChosenVideoResolutionType, + mChosenVideoResolutionIndex, + &width, + &height, + NULL /* framesPerSecond */, + NULL /* interlaced */)); + + mClient->onDisplayConnected( + mClientInfo.mPlaybackSession->getSurfaceTexture(), + width, + height, + mUsingHDCP + ? IRemoteDisplayClient::kDisplayFlagSecure + : 0); + } } if (mState == ABOUT_TO_PLAY) { @@ -564,22 +595,6 @@ status_t WifiDisplaySource::sendM3(int32_t sessionID) { } status_t WifiDisplaySource::sendM4(int32_t sessionID) { - // wfd_video_formats: - // 1 byte "native" - // 1 byte "preferred-display-mode-supported" 0 or 1 - // one or more avc codec structures - // 1 byte profile - // 1 byte level - // 4 byte CEA mask - // 4 byte VESA mask - // 4 byte HH mask - // 1 byte latency - // 2 byte min-slice-slice - // 2 byte slice-enc-params - // 1 byte framerate-control-support - // max-hres (none or 2 byte) - // max-vres (none or 2 byte) - CHECK_EQ(sessionID, mClientSessionID); AString transportString = "UDP"; @@ -591,28 +606,35 @@ status_t WifiDisplaySource::sendM4(int32_t sessionID) { transportString = "TCP"; } - // For 720p60: - // use "30 00 02 02 00000040 00000000 00000000 00 0000 0000 00 none none\r\n" - // For 720p30: - // use "28 00 02 02 00000020 00000000 00000000 00 0000 0000 00 none none\r\n" - // For 720p24: - // use "78 00 02 02 00008000 00000000 00000000 00 0000 0000 00 none none\r\n" - // For 1080p30: - // use "38 00 02 02 00000080 00000000 00000000 00 0000 0000 00 none none\r\n" - AString body = StringPrintf( - "wfd_video_formats: " -#if USE_1080P - "38 00 02 02 00000080 00000000 00000000 00 0000 0000 00 none none\r\n" -#else - "28 00 02 02 00000020 00000000 00000000 00 0000 0000 00 none none\r\n" -#endif - "wfd_audio_codecs: %s\r\n" - "wfd_presentation_URL: rtsp://%s/wfd1.0/streamid=0 none\r\n" - "wfd_client_rtp_ports: RTP/AVP/%s;unicast %d 0 mode=play\r\n", - (mUsingPCMAudio - ? "LPCM 00000002 00" // 2 ch PCM 48kHz - : "AAC 00000001 00"), // 2 ch AAC 48kHz - mClientInfo.mLocalIP.c_str(), transportString.c_str(), mChosenRTPPort); + AString body; + + if (mSinkSupportsVideo) { + body.append("wfd_video_formats: "); + + VideoFormats chosenVideoFormat; + chosenVideoFormat.disableAll(); + chosenVideoFormat.setNativeResolution( + mChosenVideoResolutionType, mChosenVideoResolutionIndex); + + body.append(chosenVideoFormat.getFormatSpec()); + body.append("\r\n"); + } + + if (mSinkSupportsAudio) { + body.append( + StringPrintf("wfd_audio_codecs: %s\r\n", + (mUsingPCMAudio + ? "LPCM 00000002 00" // 2 ch PCM 48kHz + : "AAC 00000001 00"))); // 2 ch AAC 48kHz + } + + body.append( + StringPrintf( + "wfd_presentation_URL: rtsp://%s/wfd1.0/streamid=0 none\r\n" + "wfd_client_rtp_ports: RTP/AVP/%s;unicast %d 0 mode=play\r\n", + mClientInfo.mLocalIP.c_str(), + transportString.c_str(), + mChosenRTPPort)); AString request = "SET_PARAMETER rtsp://localhost/wfd1.0 RTSP/1.0\r\n"; AppendCommonResponse(&request, mNextCSeq); @@ -789,39 +811,90 @@ status_t WifiDisplaySource::onReceiveM3Response( mChosenRTPPort = port0; + if (!params->findParameter("wfd_video_formats", &value)) { + ALOGE("Sink doesn't report its choice of wfd_video_formats."); + return ERROR_MALFORMED; + } + + mSinkSupportsVideo = false; + + if (!(value == "none")) { + mSinkSupportsVideo = true; + if (!mSupportedSinkVideoFormats.parseFormatSpec(value.c_str())) { + ALOGE("Failed to parse sink provided wfd_video_formats (%s)", + value.c_str()); + + return ERROR_MALFORMED; + } + + if (!VideoFormats::PickBestFormat( + mSupportedSinkVideoFormats, + mSupportedSourceVideoFormats, + &mChosenVideoResolutionType, + &mChosenVideoResolutionIndex)) { + ALOGE("Sink and source share no commonly supported video " + "formats."); + + return ERROR_UNSUPPORTED; + } + + size_t width, height, framesPerSecond; + bool interlaced; + CHECK(VideoFormats::GetConfiguration( + mChosenVideoResolutionType, + mChosenVideoResolutionIndex, + &width, + &height, + &framesPerSecond, + &interlaced)); + + ALOGI("Picked video resolution %u x %u %c%u", + width, height, interlaced ? 'i' : 'p', framesPerSecond); + } else { + ALOGI("Sink doesn't support video at all."); + } + if (!params->findParameter("wfd_audio_codecs", &value)) { ALOGE("Sink doesn't report its choice of wfd_audio_codecs."); return ERROR_MALFORMED; } - if (value == "none") { - ALOGE("Sink doesn't support audio at all."); - return ERROR_UNSUPPORTED; - } + mSinkSupportsAudio = false; - uint32_t modes; - GetAudioModes(value.c_str(), "AAC", &modes); + if (!(value == "none")) { + mSinkSupportsAudio = true; - bool supportsAAC = (modes & 1) != 0; // AAC 2ch 48kHz + uint32_t modes; + GetAudioModes(value.c_str(), "AAC", &modes); - GetAudioModes(value.c_str(), "LPCM", &modes); + bool supportsAAC = (modes & 1) != 0; // AAC 2ch 48kHz - bool supportsPCM = (modes & 2) != 0; // LPCM 2ch 48kHz + GetAudioModes(value.c_str(), "LPCM", &modes); - char val[PROPERTY_VALUE_MAX]; - if (supportsPCM - && property_get("media.wfd.use-pcm-audio", val, NULL) - && (!strcasecmp("true", val) || !strcmp("1", val))) { - ALOGI("Using PCM audio."); - mUsingPCMAudio = true; - } else if (supportsAAC) { - ALOGI("Using AAC audio."); - mUsingPCMAudio = false; - } else if (supportsPCM) { - ALOGI("Using PCM audio."); - mUsingPCMAudio = true; + bool supportsPCM = (modes & 2) != 0; // LPCM 2ch 48kHz + + char val[PROPERTY_VALUE_MAX]; + if (supportsPCM + && property_get("media.wfd.use-pcm-audio", val, NULL) + && (!strcasecmp("true", val) || !strcmp("1", val))) { + ALOGI("Using PCM audio."); + mUsingPCMAudio = true; + } else if (supportsAAC) { + ALOGI("Using AAC audio."); + mUsingPCMAudio = false; + } else if (supportsPCM) { + ALOGI("Using PCM audio."); + mUsingPCMAudio = true; + } else { + ALOGI("Sink doesn't support an audio format we do."); + return ERROR_UNSUPPORTED; + } } else { - ALOGI("Sink doesn't support an audio format we do."); + ALOGI("Sink doesn't support audio at all."); + } + + if (!mSinkSupportsVideo && !mSinkSupportsAudio) { + ALOGE("Sink supports neither video nor audio..."); return ERROR_UNSUPPORTED; } @@ -1160,7 +1233,11 @@ status_t WifiDisplaySource::onSetupRequest( clientRtp, clientRtcp, transportMode, - mUsingPCMAudio); + mSinkSupportsAudio, + mUsingPCMAudio, + mSinkSupportsVideo, + mChosenVideoResolutionType, + mChosenVideoResolutionIndex); if (err != OK) { looper()->unregisterHandler(playbackSession->id()); diff --git a/media/libstagefright/wifi-display/source/WifiDisplaySource.h b/media/libstagefright/wifi-display/source/WifiDisplaySource.h index 974e070..fec2c6d 100644 --- a/media/libstagefright/wifi-display/source/WifiDisplaySource.h +++ b/media/libstagefright/wifi-display/source/WifiDisplaySource.h @@ -19,6 +19,7 @@ #define WIFI_DISPLAY_SOURCE_H_ #include "ANetworkSession.h" +#include "VideoFormats.h" #include @@ -26,8 +27,6 @@ namespace android { -#define USE_1080P 0 - struct IHDCP; struct IRemoteDisplayClient; struct ParsedMessage; @@ -112,6 +111,7 @@ private: kPlaybackSessionTimeoutSecs * 1000000ll; State mState; + VideoFormats mSupportedSourceVideoFormats; sp mNetSession; sp mClient; struct in_addr mInterfaceAddr; @@ -121,6 +121,14 @@ private: int32_t mChosenRTPPort; // extracted from "wfd_client_rtp_ports" + bool mSinkSupportsVideo; + VideoFormats mSupportedSinkVideoFormats; + + VideoFormats::ResolutionType mChosenVideoResolutionType; + size_t mChosenVideoResolutionIndex; + + bool mSinkSupportsAudio; + bool mUsingPCMAudio; int32_t mClientSessionID; diff --git a/media/libstagefright/wifi-display/wfd.cpp b/media/libstagefright/wifi-display/wfd.cpp index 2ec9b4f..be9e35e 100644 --- a/media/libstagefright/wifi-display/wfd.cpp +++ b/media/libstagefright/wifi-display/wfd.cpp @@ -30,6 +30,7 @@ #include #include #include +#include namespace android { -- cgit v1.1 From a6a88d9c445e261972c2433254e0a996336e78a4 Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Wed, 30 Jan 2013 10:41:25 -0800 Subject: Plumbing to reflect minor changes in the HDCP module API that allow for support of _decryption_ modules in addition to what we already supported. Change-Id: Ic37b87dc170ba8def3817991d25df798f21e950b --- include/media/IHDCP.h | 24 +++++++++-- include/media/IMediaPlayerService.h | 2 +- media/libmedia/IHDCP.cpp | 49 ++++++++++++++++++++++ media/libmedia/IMediaPlayerService.cpp | 6 ++- media/libmediaplayerservice/HDCP.cpp | 26 ++++++++++-- media/libmediaplayerservice/HDCP.h | 8 +++- media/libmediaplayerservice/MediaPlayerService.cpp | 4 +- media/libmediaplayerservice/MediaPlayerService.h | 2 +- .../wifi-display/source/WifiDisplaySource.cpp | 2 +- 9 files changed, 108 insertions(+), 15 deletions(-) diff --git a/include/media/IHDCP.h b/include/media/IHDCP.h index a0613c7..6d27b18 100644 --- a/include/media/IHDCP.h +++ b/include/media/IHDCP.h @@ -45,18 +45,34 @@ struct IHDCP : public IInterface { // Request to shutdown the active HDCP session. virtual status_t shutdownAsync() = 0; - // Encrypt a data according to the HDCP spec. The data is to be - // encrypted in-place, only size bytes of data should be read/write, - // even if the size is not a multiple of 128 bit (16 bytes). + // ENCRYPTION only: + // Encrypt data according to the HDCP spec. "size" bytes of data are + // available at "inData" (virtual address), "size" may not be a multiple + // of 128 bits (16 bytes). An equal number of encrypted bytes should be + // written to the buffer at "outData" (virtual address). // This operation is to be synchronous, i.e. this call does not return // until outData contains size bytes of encrypted data. // streamCTR will be assigned by the caller (to 0 for the first PES stream, // 1 for the second and so on) - // inputCTR will be maintained by the callee for each PES stream. + // inputCTR _will_be_maintained_by_the_callee_ for each PES stream. virtual status_t encrypt( const void *inData, size_t size, uint32_t streamCTR, uint64_t *outInputCTR, void *outData) = 0; + // DECRYPTION only: + // Decrypt data according to the HDCP spec. + // "size" bytes of encrypted data are available at "inData" + // (virtual address), "size" may not be a multiple of 128 bits (16 bytes). + // An equal number of decrypted bytes should be written to the buffer + // at "outData" (virtual address). + // This operation is to be synchronous, i.e. this call does not return + // until outData contains size bytes of decrypted data. + // Both streamCTR and inputCTR will be provided by the caller. + virtual status_t decrypt( + const void *inData, size_t size, + uint32_t streamCTR, uint64_t inputCTR, + void *outData) = 0; + private: DISALLOW_EVIL_CONSTRUCTORS(IHDCP); }; diff --git a/include/media/IMediaPlayerService.h b/include/media/IMediaPlayerService.h index 44db5bc..b29d3c7 100644 --- a/include/media/IMediaPlayerService.h +++ b/include/media/IMediaPlayerService.h @@ -52,7 +52,7 @@ public: virtual sp decode(int fd, int64_t offset, int64_t length, uint32_t *pSampleRate, int* pNumChannels, audio_format_t* pFormat) = 0; virtual sp getOMX() = 0; virtual sp makeCrypto() = 0; - virtual sp makeHDCP() = 0; + virtual sp makeHDCP(bool createEncryptionModule) = 0; // Connects to a remote display. // 'iface' specifies the address of the local interface on which to listen for diff --git a/media/libmedia/IHDCP.cpp b/media/libmedia/IHDCP.cpp index 493f5a4..f13addc 100644 --- a/media/libmedia/IHDCP.cpp +++ b/media/libmedia/IHDCP.cpp @@ -31,6 +31,7 @@ enum { HDCP_INIT_ASYNC, HDCP_SHUTDOWN_ASYNC, HDCP_ENCRYPT, + HDCP_DECRYPT, }; struct BpHDCPObserver : public BpInterface { @@ -106,6 +107,29 @@ struct BpHDCP : public BpInterface { return err; } + + virtual status_t decrypt( + const void *inData, size_t size, + uint32_t streamCTR, uint64_t inputCTR, + void *outData) { + Parcel data, reply; + data.writeInterfaceToken(IHDCP::getInterfaceDescriptor()); + data.writeInt32(size); + data.write(inData, size); + data.writeInt32(streamCTR); + data.writeInt64(inputCTR); + remote()->transact(HDCP_DECRYPT, data, &reply); + + status_t err = reply.readInt32(); + + if (err != OK) { + return err; + } + + reply.read(outData, size); + + return err; + } }; IMPLEMENT_META_INTERFACE(HDCP, "android.hardware.IHDCP"); @@ -198,6 +222,31 @@ status_t BnHDCP::onTransact( return OK; } + case HDCP_DECRYPT: + { + size_t size = data.readInt32(); + + void *inData = malloc(2 * size); + void *outData = (uint8_t *)inData + size; + + data.read(inData, size); + + uint32_t streamCTR = data.readInt32(); + uint64_t inputCTR = data.readInt64(); + status_t err = decrypt(inData, size, streamCTR, inputCTR, outData); + + reply->writeInt32(err); + + if (err == OK) { + reply->write(outData, size); + } + + free(inData); + inData = outData = NULL; + + return OK; + } + default: return BBinder::onTransact(code, data, reply, flags); } diff --git a/media/libmedia/IMediaPlayerService.cpp b/media/libmedia/IMediaPlayerService.cpp index ae76c10..a95f4c9 100644 --- a/media/libmedia/IMediaPlayerService.cpp +++ b/media/libmedia/IMediaPlayerService.cpp @@ -123,9 +123,10 @@ public: return interface_cast(reply.readStrongBinder()); } - virtual sp makeHDCP() { + virtual sp makeHDCP(bool createEncryptionModule) { Parcel data, reply; data.writeInterfaceToken(IMediaPlayerService::getInterfaceDescriptor()); + data.writeInt32(createEncryptionModule); remote()->transact(MAKE_HDCP, data, &reply); return interface_cast(reply.readStrongBinder()); } @@ -226,7 +227,8 @@ status_t BnMediaPlayerService::onTransact( } break; case MAKE_HDCP: { CHECK_INTERFACE(IMediaPlayerService, data, reply); - sp hdcp = makeHDCP(); + bool createEncryptionModule = data.readInt32(); + sp hdcp = makeHDCP(createEncryptionModule); reply->writeStrongBinder(hdcp->asBinder()); return NO_ERROR; } break; diff --git a/media/libmediaplayerservice/HDCP.cpp b/media/libmediaplayerservice/HDCP.cpp index 09b9719..469a02e 100644 --- a/media/libmediaplayerservice/HDCP.cpp +++ b/media/libmediaplayerservice/HDCP.cpp @@ -26,8 +26,9 @@ namespace android { -HDCP::HDCP() - : mLibHandle(NULL), +HDCP::HDCP(bool createEncryptionModule) + : mIsEncryptionModule(createEncryptionModule), + mLibHandle(NULL), mHDCPModule(NULL) { mLibHandle = dlopen("libstagefright_hdcp.so", RTLD_NOW); @@ -40,7 +41,10 @@ HDCP::HDCP() void *, HDCPModule::ObserverFunc); CreateHDCPModuleFunc createHDCPModule = - (CreateHDCPModuleFunc)dlsym(mLibHandle, "createHDCPModule"); + mIsEncryptionModule + ? (CreateHDCPModuleFunc)dlsym(mLibHandle, "createHDCPModule") + : (CreateHDCPModuleFunc)dlsym( + mLibHandle, "createHDCPModuleForDecryption"); if (createHDCPModule == NULL) { ALOGE("Unable to find symbol 'createHDCPModule'."); @@ -101,6 +105,8 @@ status_t HDCP::encrypt( uint64_t *outInputCTR, void *outData) { Mutex::Autolock autoLock(mLock); + CHECK(mIsEncryptionModule); + if (mHDCPModule == NULL) { *outInputCTR = 0; @@ -110,6 +116,20 @@ status_t HDCP::encrypt( return mHDCPModule->encrypt(inData, size, streamCTR, outInputCTR, outData); } +status_t HDCP::decrypt( + const void *inData, size_t size, + uint32_t streamCTR, uint64_t outInputCTR, void *outData) { + Mutex::Autolock autoLock(mLock); + + CHECK(!mIsEncryptionModule); + + if (mHDCPModule == NULL) { + return NO_INIT; + } + + return mHDCPModule->decrypt(inData, size, streamCTR, outInputCTR, outData); +} + // static void HDCP::ObserveWrapper(void *me, int msg, int ext1, int ext2) { static_cast(me)->observe(msg, ext1, ext2); diff --git a/media/libmediaplayerservice/HDCP.h b/media/libmediaplayerservice/HDCP.h index b2fc457..42e6467 100644 --- a/media/libmediaplayerservice/HDCP.h +++ b/media/libmediaplayerservice/HDCP.h @@ -24,7 +24,7 @@ namespace android { struct HDCP : public BnHDCP { - HDCP(); + HDCP(bool createEncryptionModule); virtual ~HDCP(); virtual status_t setObserver(const sp &observer); @@ -35,9 +35,15 @@ struct HDCP : public BnHDCP { const void *inData, size_t size, uint32_t streamCTR, uint64_t *outInputCTR, void *outData); + virtual status_t decrypt( + const void *inData, size_t size, + uint32_t streamCTR, uint64_t outInputCTR, void *outData); + private: Mutex mLock; + bool mIsEncryptionModule; + void *mLibHandle; HDCPModule *mHDCPModule; sp mObserver; diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp index 749f48c..f932131 100644 --- a/media/libmediaplayerservice/MediaPlayerService.cpp +++ b/media/libmediaplayerservice/MediaPlayerService.cpp @@ -285,8 +285,8 @@ sp MediaPlayerService::makeCrypto() { return new Crypto; } -sp MediaPlayerService::makeHDCP() { - return new HDCP; +sp MediaPlayerService::makeHDCP(bool createEncryptionModule) { + return new HDCP(createEncryptionModule); } sp MediaPlayerService::listenForRemoteDisplay( diff --git a/media/libmediaplayerservice/MediaPlayerService.h b/media/libmediaplayerservice/MediaPlayerService.h index d2d8939..2d2a09d 100644 --- a/media/libmediaplayerservice/MediaPlayerService.h +++ b/media/libmediaplayerservice/MediaPlayerService.h @@ -249,7 +249,7 @@ public: virtual sp decode(int fd, int64_t offset, int64_t length, uint32_t *pSampleRate, int* pNumChannels, audio_format_t* pFormat); virtual sp getOMX(); virtual sp makeCrypto(); - virtual sp makeHDCP(); + virtual sp makeHDCP(bool createEncryptionModule); virtual sp listenForRemoteDisplay(const sp& client, const String8& iface); diff --git a/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp b/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp index 0fed19b..981d5f9 100644 --- a/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp +++ b/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp @@ -1637,7 +1637,7 @@ status_t WifiDisplaySource::makeHDCP() { sp service = interface_cast(binder); CHECK(service != NULL); - mHDCP = service->makeHDCP(); + mHDCP = service->makeHDCP(true /* createEncryptionModule */); if (mHDCP == NULL) { return ERROR_UNSUPPORTED; -- cgit v1.1 From 0fcdb7271e1a25bc501ead6093ab1ae2667fdd47 Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Wed, 30 Jan 2013 11:35:39 -0800 Subject: Actually display something on screen when instantiating a wfd sink via the wfd commandline tool. Change-Id: I26466efb95a5837a14d77d6581e0777038d31d95 --- .../wifi-display/sink/DirectRenderer.cpp | 3 +- media/libstagefright/wifi-display/wfd.cpp | 38 +++++++++++++++++++++- 2 files changed, 39 insertions(+), 2 deletions(-) diff --git a/media/libstagefright/wifi-display/sink/DirectRenderer.cpp b/media/libstagefright/wifi-display/sink/DirectRenderer.cpp index 8120634..93430eb 100644 --- a/media/libstagefright/wifi-display/sink/DirectRenderer.cpp +++ b/media/libstagefright/wifi-display/sink/DirectRenderer.cpp @@ -270,7 +270,8 @@ void DirectRenderer::dequeueAccessUnits() { err = mVideoDecoder->configure( videoFormat, - new SurfaceTextureClient(mSurfaceTex), + mSurfaceTex == NULL + ? NULL : new SurfaceTextureClient(mSurfaceTex), NULL /* crypto */, 0 /* flags */); diff --git a/media/libstagefright/wifi-display/wfd.cpp b/media/libstagefright/wifi-display/wfd.cpp index be9e35e..21d661e 100644 --- a/media/libstagefright/wifi-display/wfd.cpp +++ b/media/libstagefright/wifi-display/wfd.cpp @@ -23,6 +23,7 @@ #include #include +#include #include #include #include @@ -31,6 +32,7 @@ #include #include #include +#include namespace android { @@ -282,12 +284,44 @@ int main(int argc, char **argv) { exit(1); } + sp composerClient = new SurfaceComposerClient; + CHECK_EQ(composerClient->initCheck(), (status_t)OK); + + sp display(SurfaceComposerClient::getBuiltInDisplay( + ISurfaceComposer::eDisplayIdMain)); + DisplayInfo info; + SurfaceComposerClient::getDisplayInfo(display, &info); + ssize_t displayWidth = info.w; + ssize_t displayHeight = info.h; + + ALOGV("display is %d x %d\n", displayWidth, displayHeight); + + sp control = + composerClient->createSurface( + String8("A Surface"), + displayWidth, + displayHeight, + PIXEL_FORMAT_RGB_565, + 0); + + CHECK(control != NULL); + CHECK(control->isValid()); + + SurfaceComposerClient::openGlobalTransaction(); + CHECK_EQ(control->setLayer(INT_MAX), (status_t)OK); + CHECK_EQ(control->show(), (status_t)OK); + SurfaceComposerClient::closeGlobalTransaction(); + + sp surface = control->getSurface(); + CHECK(surface != NULL); + sp session = new ANetworkSession; session->start(); sp looper = new ALooper; - sp sink = new WifiDisplaySink(session); + sp sink = new WifiDisplaySink( + session, surface->getSurfaceTexture()); looper->registerHandler(sink); if (connectToPort >= 0) { @@ -298,5 +332,7 @@ int main(int argc, char **argv) { looper->start(true /* runOnCallingThread */); + composerClient->dispose(); + return 0; } -- cgit v1.1 From aa65ddb06862fa542c9ec8d556bd3e01bf4c32b2 Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Thu, 31 Jan 2013 15:25:07 -0800 Subject: Fix typo and reduce allocation overhead of RTP retransmission requests. Change-Id: I402a195da5dfeceadb4d073888ee7702c5532dc8 --- media/libstagefright/wifi-display/sink/RTPSink.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/media/libstagefright/wifi-display/sink/RTPSink.cpp b/media/libstagefright/wifi-display/sink/RTPSink.cpp index ad75373..7f4b66f 100644 --- a/media/libstagefright/wifi-display/sink/RTPSink.cpp +++ b/media/libstagefright/wifi-display/sink/RTPSink.cpp @@ -787,12 +787,12 @@ void RTPSink::onPacketLost(const sp &msg) { int32_t blp = 0; - sp buf = new ABuffer(1500); + sp buf = new ABuffer(16); buf->setRange(0, 0); uint8_t *ptr = buf->data(); ptr[0] = 0x80 | 1; // generic NACK - ptr[1] = 205; // RTPFB + ptr[1] = 205; // TSFB ptr[2] = 0; ptr[3] = 3; ptr[4] = 0xde; // sender SSRC -- cgit v1.1 From 9e58b552f51b00b3b674102876bd6c77ef3da806 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Fri, 18 Jan 2013 15:09:48 -0800 Subject: AudioFlinger uses media.log service for logging Change-Id: Ia0f8204334f6b233f644d897762a18c95d936b4b --- services/audioflinger/AudioFlinger.cpp | 47 ++++++++++++++++++++++++++++++++ services/audioflinger/AudioFlinger.h | 9 ++++++ services/audioflinger/FastMixer.cpp | 17 ++++++++++++ services/audioflinger/FastMixerState.cpp | 2 +- services/audioflinger/FastMixerState.h | 2 ++ services/audioflinger/Threads.cpp | 28 +++++++++++++++++++ services/audioflinger/Threads.h | 4 +++ services/audioflinger/Tracks.cpp | 4 +++ 8 files changed, 112 insertions(+), 1 deletion(-) diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp index 5f5b041..52fa576 100644 --- a/services/audioflinger/AudioFlinger.cpp +++ b/services/audioflinger/AudioFlinger.cpp @@ -59,6 +59,8 @@ #include //#include +#include + // ---------------------------------------------------------------------------- // Note: the following macro is used for extremely verbose logging message. In @@ -127,6 +129,11 @@ AudioFlinger::AudioFlinger() mMode(AUDIO_MODE_INVALID), mBtNrecIsOff(false) { + char value[PROPERTY_VALUE_MAX]; + bool doLog = (property_get("ro.test_harness", value, "0") > 0) && (atoi(value) == 1); + if (doLog) { + mLogMemoryDealer = new MemoryDealer(kLogMemorySize, "LogWriters"); + } } void AudioFlinger::onFirstRef() @@ -323,6 +330,17 @@ status_t AudioFlinger::dump(int fd, const Vector& args) if (locked) { mLock.unlock(); } + + // append a copy of media.log here by forwarding fd to it, but don't attempt + // to lookup the service if it's not running, as it will block for a second + if (mLogMemoryDealer != 0) { + sp binder = defaultServiceManager()->getService(String16("media.log")); + if (binder != 0) { + fdprintf(fd, "\nmedia.log:\n"); + Vector args; + binder->dump(fd, args); + } + } } return NO_ERROR; } @@ -340,6 +358,35 @@ sp AudioFlinger::registerPid_l(pid_t pid) return client; } +sp AudioFlinger::newWriter_l(size_t size, const char *name) +{ + if (mLogMemoryDealer == 0) { + return new NBLog::Writer(); + } + sp shared = mLogMemoryDealer->allocate(NBLog::Timeline::sharedSize(size)); + sp writer = new NBLog::Writer(size, shared); + sp binder = defaultServiceManager()->getService(String16("media.log")); + if (binder != 0) { + interface_cast(binder)->registerWriter(shared, size, name); + } + return writer; +} + +void AudioFlinger::unregisterWriter(const sp& writer) +{ + sp iMemory(writer->getIMemory()); + if (iMemory == 0) { + return; + } + sp binder = defaultServiceManager()->getService(String16("media.log")); + if (binder != 0) { + interface_cast(binder)->unregisterWriter(iMemory); + // Now the media.log remote reference to IMemory is gone. + // When our last local reference to IMemory also drops to zero, + // the IMemory destructor will deallocate the region from mMemoryDealer. + } +} + // IAudioFlinger interface diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h index a7f5b9e..c3f08f6 100644 --- a/services/audioflinger/AudioFlinger.h +++ b/services/audioflinger/AudioFlinger.h @@ -53,6 +53,8 @@ #include +#include + namespace android { class audio_track_cblk_t; @@ -222,6 +224,13 @@ public: // end of IAudioFlinger interface + sp newWriter_l(size_t size, const char *name); + void unregisterWriter(const sp& writer); +private: + static const size_t kLogMemorySize = 10 * 1024; + sp mLogMemoryDealer; // == 0 when NBLog is disabled +public: + class SyncEvent; typedef void (*sync_event_callback_t)(const wp& event) ; diff --git a/services/audioflinger/FastMixer.cpp b/services/audioflinger/FastMixer.cpp index 9283f53..80e37ca 100644 --- a/services/audioflinger/FastMixer.cpp +++ b/services/audioflinger/FastMixer.cpp @@ -92,6 +92,7 @@ bool FastMixer::threadLoop() struct timespec measuredWarmupTs = {0, 0}; // how long did it take for warmup to complete uint32_t warmupCycles = 0; // counter of number of loop cycles required to warmup NBAIO_Sink* teeSink = NULL; // if non-NULL, then duplicate write() to this non-blocking sink + NBLog::Writer dummyLogWriter, *logWriter = &dummyLogWriter; for (;;) { @@ -119,9 +120,12 @@ bool FastMixer::threadLoop() FastMixerState::Command command = next->mCommand; if (next != current) { + logWriter->log("next != current"); + // As soon as possible of learning of a new dump area, start using it dumpState = next->mDumpState != NULL ? next->mDumpState : &dummyDumpState; teeSink = next->mTeeSink; + logWriter = next->mNBLogWriter != NULL ? next->mNBLogWriter : &dummyLogWriter; // We want to always have a valid reference to the previous (non-idle) state. // However, the state queue only guarantees access to current and previous states. @@ -163,6 +167,7 @@ bool FastMixer::threadLoop() ALOG_ASSERT(coldFutexAddr != NULL); int32_t old = android_atomic_dec(coldFutexAddr); if (old <= 0) { + logWriter->log("wait"); __futex_syscall4(coldFutexAddr, FUTEX_WAIT_PRIVATE, old - 1, NULL); } // This may be overly conservative; there could be times that the normal mixer @@ -181,6 +186,7 @@ bool FastMixer::threadLoop() } continue; case FastMixerState::EXIT: + logWriter->log("exit"); delete mixer; delete[] mixBuffer; return false; @@ -258,11 +264,15 @@ bool FastMixer::threadLoop() unsigned currentTrackMask = current->mTrackMask; dumpState->mTrackMask = currentTrackMask; if (current->mFastTracksGen != fastTracksGen) { + logWriter->logf("gen %d", current->mFastTracksGen); ALOG_ASSERT(mixBuffer != NULL); int name; // process removed tracks first to avoid running out of track names unsigned removedTracks = previousTrackMask & ~currentTrackMask; + if (removedTracks) { + logWriter->logf("removed %#x", removedTracks); + } while (removedTracks != 0) { i = __builtin_ctz(removedTracks); removedTracks &= ~(1 << i); @@ -282,6 +292,9 @@ bool FastMixer::threadLoop() // now process added tracks unsigned addedTracks = currentTrackMask & ~previousTrackMask; + if (addedTracks) { + logWriter->logf("added %#x", addedTracks); + } while (addedTracks != 0) { i = __builtin_ctz(addedTracks); addedTracks &= ~(1 << i); @@ -312,6 +325,9 @@ bool FastMixer::threadLoop() // finally process modified tracks; these use the same slot // but may have a different buffer provider or volume provider unsigned modifiedTracks = currentTrackMask & previousTrackMask; + if (modifiedTracks) { + logWriter->logf("modified %#x", modifiedTracks); + } while (modifiedTracks != 0) { i = __builtin_ctz(modifiedTracks); modifiedTracks &= ~(1 << i); @@ -455,6 +471,7 @@ bool FastMixer::threadLoop() struct timespec newTs; int rc = clock_gettime(CLOCK_MONOTONIC, &newTs); if (rc == 0) { + logWriter->logTimestamp(newTs); if (oldTsValid) { time_t sec = newTs.tv_sec - oldTs.tv_sec; long nsec = newTs.tv_nsec - oldTs.tv_nsec; diff --git a/services/audioflinger/FastMixerState.cpp b/services/audioflinger/FastMixerState.cpp index 6305a83..c45c81b 100644 --- a/services/audioflinger/FastMixerState.cpp +++ b/services/audioflinger/FastMixerState.cpp @@ -31,7 +31,7 @@ FastTrack::~FastTrack() FastMixerState::FastMixerState() : mFastTracksGen(0), mTrackMask(0), mOutputSink(NULL), mOutputSinkGen(0), mFrameCount(0), mCommand(INITIAL), mColdFutexAddr(NULL), mColdGen(0), - mDumpState(NULL), mTeeSink(NULL) + mDumpState(NULL), mTeeSink(NULL), mNBLogWriter(NULL) { } diff --git a/services/audioflinger/FastMixerState.h b/services/audioflinger/FastMixerState.h index 6e53f21..f6e7903 100644 --- a/services/audioflinger/FastMixerState.h +++ b/services/audioflinger/FastMixerState.h @@ -20,6 +20,7 @@ #include #include #include +#include namespace android { @@ -77,6 +78,7 @@ struct FastMixerState { // This might be a one-time configuration rather than per-state FastMixerDumpState* mDumpState; // if non-NULL, then update dump state periodically NBAIO_Sink* mTeeSink; // if non-NULL, then duplicate write()s to this non-blocking sink + NBLog::Writer* mNBLogWriter; // non-blocking logger }; // struct FastMixerState } // namespace android diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index 744a7df..6f748d7 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -936,6 +936,7 @@ AudioFlinger::PlaybackThread::PlaybackThread(const sp& audioFlinge mFastTrackAvailMask(((1 << FastMixerState::kMaxFastTracks) - 1) & ~1) { snprintf(mName, kNameLength, "AudioOut_%X", id); + mNBLogWriter = audioFlinger->newWriter_l(kLogSize, mName); // Assumes constructor is called by AudioFlinger with it's mLock held, but // it would be safer to explicitly pass initial masterVolume/masterMute as @@ -971,6 +972,7 @@ AudioFlinger::PlaybackThread::PlaybackThread(const sp& audioFlinge AudioFlinger::PlaybackThread::~PlaybackThread() { + mAudioFlinger->unregisterWriter(mNBLogWriter); delete [] mMixBuffer; } @@ -1247,6 +1249,7 @@ Exit: if (status) { *status = lStatus; } + mNBLogWriter->logf("createTrack_l"); return track; } @@ -1314,6 +1317,7 @@ float AudioFlinger::PlaybackThread::streamVolume(audio_stream_type_t stream) con // addTrack_l() must be called with ThreadBase::mLock held status_t AudioFlinger::PlaybackThread::addTrack_l(const sp& track) { + mNBLogWriter->logf("addTrack_l mName=%d", track->mName); status_t status = ALREADY_EXISTS; // set retry count for buffer fill @@ -1347,6 +1351,7 @@ status_t AudioFlinger::PlaybackThread::addTrack_l(const sp& track) // destroyTrack_l() must be called with ThreadBase::mLock held void AudioFlinger::PlaybackThread::destroyTrack_l(const sp& track) { + mNBLogWriter->logf("destroyTrack_l mName=%d", track->mName); track->mState = TrackBase::TERMINATED; // active tracks are removed by threadLoop() if (mActiveTracks.indexOf(track) < 0) { @@ -1356,6 +1361,7 @@ void AudioFlinger::PlaybackThread::destroyTrack_l(const sp& track) void AudioFlinger::PlaybackThread::removeTrack_l(const sp& track) { + mNBLogWriter->logf("removeTrack_l mName=%d", track->mName); track->triggerEvents(AudioSystem::SYNC_EVENT_PRESENTATION_COMPLETE); mTracks.remove(track); deleteTrackName_l(track->name()); @@ -1892,6 +1898,11 @@ bool AudioFlinger::PlaybackThread::threadLoop() acquireWakeLock(); + // mNBLogWriter->log can only be called while thread mutex mLock is held. + // So if you need to log when mutex is unlocked, set logString to a non-NULL string, + // and then that string will be logged at the next convenient opportunity. + const char *logString = NULL; + while (!exitPending()) { cpuStats.sample(myName); @@ -1904,6 +1915,12 @@ bool AudioFlinger::PlaybackThread::threadLoop() Mutex::Autolock _l(mLock); + if (logString != NULL) { + mNBLogWriter->logTimestamp(); + mNBLogWriter->log(logString); + logString = NULL; + } + if (checkForNewParameters_l()) { cacheParameters_l(); } @@ -1917,6 +1934,7 @@ bool AudioFlinger::PlaybackThread::threadLoop() threadLoop_standby(); + mNBLogWriter->log("standby"); mStandby = true; } @@ -2012,6 +2030,9 @@ if (mType == MIXER) { // since we can't guarantee the destructors won't acquire that // same lock. This will also mutate and push a new fast mixer state. threadLoop_removeTracks(tracksToRemove); + if (tracksToRemove.size() > 0) { + logString = "remove"; + } tracksToRemove.clear(); // FIXME I don't understand the need for this here; @@ -2143,6 +2164,8 @@ AudioFlinger::MixerThread::MixerThread(const sp& audioFlinger, Aud state->mColdGen++; state->mDumpState = &mFastMixerDumpState; state->mTeeSink = mTeeSink.get(); + mFastMixerNBLogWriter = audioFlinger->newWriter_l(kFastMixerLogSize, "FastMixer"); + state->mNBLogWriter = mFastMixerNBLogWriter.get(); sq->end(); sq->push(FastMixerStateQueue::BLOCK_UNTIL_PUSHED); @@ -2219,6 +2242,7 @@ AudioFlinger::MixerThread::~MixerThread() } #endif } + mAudioFlinger->unregisterWriter(mFastMixerNBLogWriter); delete mAudioMixer; } @@ -2846,6 +2870,7 @@ track_is_ready: ; if (CC_UNLIKELY(count)) { for (size_t i=0 ; i& track = tracksToRemove->itemAt(i); + mNBLogWriter->logf("prepareTracks_l remove name=%u", track->name()); mActiveTracks.remove(track); if (track->mainBuffer() != mMixBuffer) { chain = getEffectChain_l(track->sessionId()); @@ -3222,6 +3247,9 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::DirectOutputThread::prep // remove all the tracks that need to be... if (CC_UNLIKELY(trackToRemove != 0)) { tracksToRemove->add(trackToRemove); +#if 0 + mNBLogWriter->logf("prepareTracks_l remove name=%u", trackToRemove->name()); +#endif mActiveTracks.remove(trackToRemove); if (!mEffectChains.isEmpty()) { ALOGV("stopping track on chain %p for session Id: %d", mEffectChains[0].get(), diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h index 06a1c8c..5fa7eaf 100644 --- a/services/audioflinger/Threads.h +++ b/services/audioflinger/Threads.h @@ -315,6 +315,8 @@ protected: // keyed by session ID, the second by type UUID timeLow field KeyedVector< int, KeyedVector< int, sp > > mSuspendedSessions; + static const size_t kLogSize = 512; + sp mNBLogWriter; }; // --- PlaybackThread --- @@ -544,6 +546,8 @@ private: sp mTeeSink; sp mTeeSource; uint32_t mScreenState; // cached copy of gScreenState + static const size_t kFastMixerLogSize = 8 * 1024; + sp mFastMixerNBLogWriter; public: virtual bool hasFastMixer() const = 0; virtual FastTrackUnderruns getFastTrackUnderruns(size_t fastIndex) const diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp index c5f0ed7..315cbbc 100644 --- a/services/audioflinger/Tracks.cpp +++ b/services/audioflinger/Tracks.cpp @@ -569,6 +569,7 @@ status_t AudioFlinger::PlaybackThread::Track::start(AudioSystem::sync_event_t ev sp thread = mThread.promote(); if (thread != 0) { Mutex::Autolock _l(thread->mLock); + thread->mNBLogWriter->logf("start mName=%d", mName); track_state state = mState; // here the track could be either new, or restarted // in both cases "unstop" the track @@ -611,6 +612,7 @@ void AudioFlinger::PlaybackThread::Track::stop() sp thread = mThread.promote(); if (thread != 0) { Mutex::Autolock _l(thread->mLock); + thread->mNBLogWriter->logf("stop mName=%d", mName); track_state state = mState; if (state == RESUMING || state == ACTIVE || state == PAUSING || state == PAUSED) { // If the track is not active (PAUSED and buffers full), flush buffers @@ -647,6 +649,7 @@ void AudioFlinger::PlaybackThread::Track::pause() sp thread = mThread.promote(); if (thread != 0) { Mutex::Autolock _l(thread->mLock); + thread->mNBLogWriter->logf("pause mName=%d", mName); if (mState == ACTIVE || mState == RESUMING) { mState = PAUSING; ALOGV("ACTIVE/RESUMING => PAUSING (%d) on thread %p", mName, thread.get()); @@ -670,6 +673,7 @@ void AudioFlinger::PlaybackThread::Track::flush() sp thread = mThread.promote(); if (thread != 0) { Mutex::Autolock _l(thread->mLock); + thread->mNBLogWriter->logf("flush mName=%d", mName); if (mState != STOPPING_1 && mState != STOPPING_2 && mState != STOPPED && mState != PAUSED && mState != PAUSING && mState != IDLE && mState != FLUSHED) { return; -- cgit v1.1 From 7cc0c29d6a7b76520ec588437ab51d5b8eac9ebc Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Fri, 1 Feb 2013 11:43:44 -0800 Subject: Revive the code to support TCP interleaved transport Also support non-interleaved transport now, but the sink gets to decide what mode it wants to run in, _not_ the source. Change-Id: I3b6057f86871084e4decf930bb4a7a2d7517b0f2 --- .../wifi-display/ANetworkSession.cpp | 1 - .../wifi-display/sink/DirectRenderer.cpp | 18 ++- media/libstagefright/wifi-display/sink/RTPSink.cpp | 53 ++++++++- media/libstagefright/wifi-display/sink/RTPSink.h | 10 +- .../wifi-display/sink/WifiDisplaySink.cpp | 124 +++++++++------------ .../wifi-display/sink/WifiDisplaySink.h | 9 +- .../wifi-display/source/PlaybackSession.cpp | 12 ++ .../wifi-display/source/WifiDisplaySource.cpp | 40 +++---- .../wifi-display/source/WifiDisplaySource.h | 1 + 9 files changed, 157 insertions(+), 111 deletions(-) diff --git a/media/libstagefright/wifi-display/ANetworkSession.cpp b/media/libstagefright/wifi-display/ANetworkSession.cpp index 62a6e7f..06f71f4 100644 --- a/media/libstagefright/wifi-display/ANetworkSession.cpp +++ b/media/libstagefright/wifi-display/ANetworkSession.cpp @@ -1091,7 +1091,6 @@ void ANetworkSession::threadLoop() { clientSocket); sp clientSession = - // using socket sd as sessionID new Session( mNextSessionID++, Session::CONNECTED, diff --git a/media/libstagefright/wifi-display/sink/DirectRenderer.cpp b/media/libstagefright/wifi-display/sink/DirectRenderer.cpp index 93430eb..23cf6fd 100644 --- a/media/libstagefright/wifi-display/sink/DirectRenderer.cpp +++ b/media/libstagefright/wifi-display/sink/DirectRenderer.cpp @@ -35,11 +35,19 @@ namespace android { +#if 0 // static const int64_t DirectRenderer::kPacketLostDelayUs = 80000ll; // static const int64_t DirectRenderer::kPacketLateDelayUs = 60000ll; +#else +// static +const int64_t DirectRenderer::kPacketLostDelayUs = 1000000ll; + +// static +const int64_t DirectRenderer::kPacketLateDelayUs = -1ll; +#endif DirectRenderer::DirectRenderer( const sp ¬ifyLost, @@ -309,11 +317,11 @@ void DirectRenderer::dequeueAccessUnits() { void DirectRenderer::schedulePacketLost() { sp msg; -#if 1 - msg = new AMessage(kWhatPacketLate, id()); - msg->setInt32("generation", mPacketLostGeneration); - msg->post(kPacketLateDelayUs); -#endif + if (kPacketLateDelayUs > 0ll) { + msg = new AMessage(kWhatPacketLate, id()); + msg->setInt32("generation", mPacketLostGeneration); + msg->post(kPacketLateDelayUs); + } msg = new AMessage(kWhatPacketLost, id()); msg->setInt32("generation", mPacketLostGeneration); diff --git a/media/libstagefright/wifi-display/sink/RTPSink.cpp b/media/libstagefright/wifi-display/sink/RTPSink.cpp index 7f4b66f..be54595 100644 --- a/media/libstagefright/wifi-display/sink/RTPSink.cpp +++ b/media/libstagefright/wifi-display/sink/RTPSink.cpp @@ -253,6 +253,8 @@ RTPSink::RTPSink( mRTPPort(0), mRTPSessionID(0), mRTCPSessionID(0), + mRTPClientSessionID(0), + mRTCPClientSessionID(0), mFirstArrivalTimeUs(-1ll), mNumPacketsReceived(0ll), mRegression(1000), @@ -260,6 +262,14 @@ RTPSink::RTPSink( } RTPSink::~RTPSink() { + if (mRTCPClientSessionID != 0) { + mNetSession->destroySession(mRTCPClientSessionID); + } + + if (mRTPClientSessionID != 0) { + mNetSession->destroySession(mRTPClientSessionID); + } + if (mRTCPSessionID != 0) { mNetSession->destroySession(mRTCPSessionID); } @@ -269,8 +279,8 @@ RTPSink::~RTPSink() { } } -status_t RTPSink::init(bool useTCPInterleaving) { - if (useTCPInterleaving) { +status_t RTPSink::init(bool usingTCPTransport, bool usingTCPInterleaving) { + if (usingTCPInterleaving) { return OK; } @@ -280,8 +290,16 @@ status_t RTPSink::init(bool useTCPInterleaving) { sp rtcpNotify = new AMessage(kWhatRTCPNotify, id()); for (clientRtp = 15550;; clientRtp += 2) { int32_t rtpSession; - status_t err = mNetSession->createUDPSession( - clientRtp, rtpNotify, &rtpSession); + status_t err; + struct in_addr ifaceAddr; + if (usingTCPTransport) { + ifaceAddr.s_addr = INADDR_ANY; + err = mNetSession->createTCPDatagramSession( + ifaceAddr, clientRtp, rtpNotify, &rtpSession); + } else { + err = mNetSession->createUDPSession( + clientRtp, rtpNotify, &rtpSession); + } if (err != OK) { ALOGI("failed to create RTP socket on port %d", clientRtp); @@ -289,8 +307,13 @@ status_t RTPSink::init(bool useTCPInterleaving) { } int32_t rtcpSession; - err = mNetSession->createUDPSession( - clientRtp + 1, rtcpNotify, &rtcpSession); + if (usingTCPTransport) { + err = mNetSession->createTCPDatagramSession( + ifaceAddr, clientRtp + 1, rtcpNotify, &rtcpSession); + } else { + err = mNetSession->createUDPSession( + clientRtp + 1, rtcpNotify, &rtcpSession); + } if (err == OK) { mRTPPort = clientRtp; @@ -367,6 +390,24 @@ void RTPSink::onMessageReceived(const sp &msg) { break; } + case ANetworkSession::kWhatClientConnected: + { + int32_t sessionID; + CHECK(msg->findInt32("sessionID", &sessionID)); + ALOGI("TCP session %d now connected", sessionID); + + int32_t serverPort; + CHECK(msg->findInt32("server-port", &serverPort)); + + if (serverPort == mRTPPort) { + mRTPClientSessionID = sessionID; + } else { + CHECK_EQ(serverPort, mRTPPort + 1); + mRTCPClientSessionID = sessionID; + } + break; + } + default: TRESPASS(); } diff --git a/media/libstagefright/wifi-display/sink/RTPSink.h b/media/libstagefright/wifi-display/sink/RTPSink.h index 6e40185..f9cbce9 100644 --- a/media/libstagefright/wifi-display/sink/RTPSink.h +++ b/media/libstagefright/wifi-display/sink/RTPSink.h @@ -48,7 +48,7 @@ struct RTPSink : public AHandler { // If TCP interleaving is used, no UDP sockets are created, instead // incoming RTP/RTCP packets (arriving on the RTSP control connection) // are manually injected by WifiDisplaySink. - status_t init(bool useTCPInterleaving); + status_t init(bool usingTCPTransport, bool usingTCPInterleaving); status_t connect( const char *host, int32_t remoteRtpPort, int32_t remoteRtcpPort); @@ -79,8 +79,12 @@ private: KeyedVector > mSources; int32_t mRTPPort; - int32_t mRTPSessionID; - int32_t mRTCPSessionID; + + int32_t mRTPSessionID; // in TCP unicast mode these are just server + int32_t mRTCPSessionID; // sockets. No data is transferred through them. + + int32_t mRTPClientSessionID; // in TCP unicast mode + int32_t mRTCPClientSessionID; int64_t mFirstArrivalTimeUs; int64_t mNumPacketsReceived; diff --git a/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp b/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp index 46c40c7..55581a6 100644 --- a/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp +++ b/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp @@ -27,6 +27,8 @@ #include #include +#include + namespace android { WifiDisplaySink::WifiDisplaySink( @@ -37,6 +39,8 @@ WifiDisplaySink::WifiDisplaySink( mNetSession(netSession), mSurfaceTex(bufferProducer), mNotify(notify), + mUsingTCPTransport(false), + mUsingTCPInterleaving(false), mSessionID(0), mNextCSeq(1) { #if 1 @@ -141,17 +145,8 @@ void WifiDisplaySink::onMessageReceived(const sp &msg) { sleep(2); // XXX int32_t sourcePort; - - if (msg->findString("setupURI", &mSetupURI)) { - AString path, user, pass; - CHECK(ParseURL( - mSetupURI.c_str(), - &mRTSPHost, &sourcePort, &path, &user, &pass) - && user.empty() && pass.empty()); - } else { - CHECK(msg->findString("sourceHost", &mRTSPHost)); - CHECK(msg->findInt32("sourcePort", &sourcePort)); - } + CHECK(msg->findString("sourceHost", &mRTSPHost)); + CHECK(msg->findInt32("sourcePort", &sourcePort)); sp notify = new AMessage(kWhatRTSPNotify, id()); @@ -208,13 +203,6 @@ void WifiDisplaySink::onMessageReceived(const sp &msg) { { ALOGI("We're now connected."); mState = CONNECTED; - - if (!mSetupURI.empty()) { - status_t err = - sendDescribe(mSessionID, mSetupURI.c_str()); - - CHECK_EQ(err, (status_t)OK); - } break; } @@ -226,7 +214,7 @@ void WifiDisplaySink::onMessageReceived(const sp &msg) { case ANetworkSession::kWhatBinaryData: { - CHECK(sUseTCPInterleaving); + CHECK(mUsingTCPInterleaving); int32_t channel; CHECK(msg->findInt32("channel", &channel)); @@ -312,20 +300,6 @@ status_t WifiDisplaySink::onReceiveM2Response( return OK; } -status_t WifiDisplaySink::onReceiveDescribeResponse( - int32_t sessionID, const sp &msg) { - int32_t statusCode; - if (!msg->getStatusCode(&statusCode)) { - return ERROR_MALFORMED; - } - - if (statusCode != 200) { - return ERROR_UNSUPPORTED; - } - - return sendSetup(sessionID, mSetupURI.c_str()); -} - status_t WifiDisplaySink::onReceiveSetupResponse( int32_t sessionID, const sp &msg) { int32_t statusCode; @@ -365,12 +339,11 @@ status_t WifiDisplaySink::onReceiveSetupResponse( return sendPlay( sessionID, - !mSetupURI.empty() - ? mSetupURI.c_str() : "rtsp://x.x.x.x:x/wfd1.0/streamid=0"); + "rtsp://x.x.x.x:x/wfd1.0/streamid=0"); } status_t WifiDisplaySink::configureTransport(const sp &msg) { - if (sUseTCPInterleaving) { + if (mUsingTCPTransport) { return OK; } @@ -514,11 +487,45 @@ void WifiDisplaySink::onGetParameterRequest( int32_t sessionID, int32_t cseq, const sp &data) { - AString body = "wfd_video_formats: "; - body.append(mSinkSupportedVideoFormats.getFormatSpec()); - body.append( - "\r\nwfd_audio_codecs: AAC 0000000F 00\r\n" - "wfd_client_rtp_ports: RTP/AVP/UDP;unicast 19000 0 mode=play\r\n"); + AString body; + + if (mState == CONNECTED) { + mUsingTCPTransport = false; + mUsingTCPInterleaving = false; + + char val[PROPERTY_VALUE_MAX]; + if (property_get("media.wfd-sink.tcp-mode", val, NULL)) { + if (!strcasecmp("true", val) || !strcmp("1", val)) { + ALOGI("Using TCP unicast transport."); + mUsingTCPTransport = true; + mUsingTCPInterleaving = false; + } else if (!strcasecmp("interleaved", val)) { + ALOGI("Using TCP interleaved transport."); + mUsingTCPTransport = true; + mUsingTCPInterleaving = true; + } + } + + body = "wfd_video_formats: "; + body.append(mSinkSupportedVideoFormats.getFormatSpec()); + + body.append( + "\r\nwfd_audio_codecs: AAC 0000000F 00\r\n" + "wfd_client_rtp_ports: RTP/AVP/"); + + if (mUsingTCPTransport) { + body.append("TCP;"); + if (mUsingTCPInterleaving) { + body.append("interleaved"); + } else { + body.append("unicast 19000 0"); + } + } else { + body.append("UDP;unicast 19000 0"); + } + + body.append(" mode=play\r\n"); + } AString response = "RTSP/1.0 200 OK\r\n"; AppendCommonResponse(&response, cseq); @@ -531,38 +538,13 @@ void WifiDisplaySink::onGetParameterRequest( CHECK_EQ(err, (status_t)OK); } -status_t WifiDisplaySink::sendDescribe(int32_t sessionID, const char *uri) { - uri = "rtsp://xwgntvx.is.livestream-api.com/livestreamiphone/wgntv"; - uri = "rtsp://v2.cache6.c.youtube.com/video.3gp?cid=e101d4bf280055f9&fmt=18"; - - AString request = StringPrintf("DESCRIBE %s RTSP/1.0\r\n", uri); - AppendCommonResponse(&request, mNextCSeq); - - request.append("Accept: application/sdp\r\n"); - request.append("\r\n"); - - status_t err = mNetSession->sendRequest( - sessionID, request.c_str(), request.size()); - - if (err != OK) { - return err; - } - - registerResponseHandler( - sessionID, mNextCSeq, &WifiDisplaySink::onReceiveDescribeResponse); - - ++mNextCSeq; - - return OK; -} - status_t WifiDisplaySink::sendSetup(int32_t sessionID, const char *uri) { sp notify = new AMessage(kWhatRTPSinkNotify, id()); mRTPSink = new RTPSink(mNetSession, mSurfaceTex, notify); looper()->registerHandler(mRTPSink); - status_t err = mRTPSink->init(sUseTCPInterleaving); + status_t err = mRTPSink->init(mUsingTCPTransport, mUsingTCPInterleaving); if (err != OK) { looper()->unregisterHandler(mRTPSink->id()); @@ -574,15 +556,17 @@ status_t WifiDisplaySink::sendSetup(int32_t sessionID, const char *uri) { AppendCommonResponse(&request, mNextCSeq); - if (sUseTCPInterleaving) { + if (mUsingTCPInterleaving) { request.append("Transport: RTP/AVP/TCP;interleaved=0-1\r\n"); } else { int32_t rtpPort = mRTPSink->getRTPPort(); request.append( StringPrintf( - "Transport: RTP/AVP/UDP;unicast;client_port=%d-%d\r\n", - rtpPort, rtpPort + 1)); + "Transport: RTP/AVP/%s;unicast;client_port=%d-%d\r\n", + mUsingTCPTransport ? "TCP" : "UDP", + rtpPort, + rtpPort + 1)); } request.append("\r\n"); diff --git a/media/libstagefright/wifi-display/sink/WifiDisplaySink.h b/media/libstagefright/wifi-display/sink/WifiDisplaySink.h index 5f86519..8b5ff6b 100644 --- a/media/libstagefright/wifi-display/sink/WifiDisplaySink.h +++ b/media/libstagefright/wifi-display/sink/WifiDisplaySink.h @@ -86,14 +86,13 @@ private: typedef status_t (WifiDisplaySink::*HandleRTSPResponseFunc)( int32_t sessionID, const sp &msg); - static const bool sUseTCPInterleaving = false; - State mState; VideoFormats mSinkSupportedVideoFormats; sp mNetSession; sp mSurfaceTex; sp mNotify; - AString mSetupURI; + bool mUsingTCPTransport; + bool mUsingTCPInterleaving; AString mRTSPHost; int32_t mSessionID; @@ -106,7 +105,6 @@ private: int32_t mPlaybackSessionTimeoutSecs; status_t sendM2(int32_t sessionID); - status_t sendDescribe(int32_t sessionID, const char *uri); status_t sendSetup(int32_t sessionID, const char *uri); status_t sendPlay(int32_t sessionID, const char *uri); status_t sendIDRFrameRequest(int32_t sessionID); @@ -114,9 +112,6 @@ private: status_t onReceiveM2Response( int32_t sessionID, const sp &msg); - status_t onReceiveDescribeResponse( - int32_t sessionID, const sp &msg); - status_t onReceiveSetupResponse( int32_t sessionID, const sp &msg); diff --git a/media/libstagefright/wifi-display/source/PlaybackSession.cpp b/media/libstagefright/wifi-display/source/PlaybackSession.cpp index 91dc1fa..453cbc5 100644 --- a/media/libstagefright/wifi-display/source/PlaybackSession.cpp +++ b/media/libstagefright/wifi-display/source/PlaybackSession.cpp @@ -546,6 +546,18 @@ void WifiDisplaySource::PlaybackSession::onMessageReceived( onFinishPlay2(); } else if (what == Sender::kWhatSessionDead) { notifySessionDead(); + } else if (what == Sender::kWhatBinaryData) { + sp notify = mNotify->dup(); + notify->setInt32("what", kWhatBinaryData); + + int32_t channel; + CHECK(msg->findInt32("channel", &channel)); + notify->setInt32("channel", channel); + + sp data; + CHECK(msg->findBuffer("data", &data)); + notify->setBuffer("data", data); + notify->post(); } else { TRESPASS(); } diff --git a/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp b/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp index 981d5f9..825ebc6 100644 --- a/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp +++ b/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp @@ -597,15 +597,6 @@ status_t WifiDisplaySource::sendM3(int32_t sessionID) { status_t WifiDisplaySource::sendM4(int32_t sessionID) { CHECK_EQ(sessionID, mClientSessionID); - AString transportString = "UDP"; - - char val[PROPERTY_VALUE_MAX]; - if (property_get("media.wfd.enable-tcp", val, NULL) - && (!strcasecmp("true", val) || !strcmp("1", val))) { - ALOGI("Using TCP transport."); - transportString = "TCP"; - } - AString body; if (mSinkSupportsVideo) { @@ -630,11 +621,11 @@ status_t WifiDisplaySource::sendM4(int32_t sessionID) { body.append( StringPrintf( - "wfd_presentation_URL: rtsp://%s/wfd1.0/streamid=0 none\r\n" - "wfd_client_rtp_ports: RTP/AVP/%s;unicast %d 0 mode=play\r\n", - mClientInfo.mLocalIP.c_str(), - transportString.c_str(), - mChosenRTPPort)); + "wfd_presentation_URL: rtsp://%s/wfd1.0/streamid=0 none\r\n", + mClientInfo.mLocalIP.c_str())); + + body.append(mWfdClientRtpPorts); + body.append("\r\n"); AString request = "SET_PARAMETER rtsp://localhost/wfd1.0 RTSP/1.0\r\n"; AppendCommonResponse(&request, mNextCSeq); @@ -797,18 +788,29 @@ status_t WifiDisplaySource::onReceiveM3Response( return ERROR_MALFORMED; } - unsigned port0, port1; + unsigned port0 = 0, port1 = 0; if (sscanf(value.c_str(), "RTP/AVP/UDP;unicast %u %u mode=play", &port0, - &port1) != 2 - || port0 == 0 || port0 > 65535 || port1 != 0) { - ALOGE("Sink chose its wfd_client_rtp_ports poorly (%s)", + &port1) == 2 + || sscanf(value.c_str(), + "RTP/AVP/TCP;unicast %u %u mode=play", + &port0, + &port1) == 2) { + if (port0 == 0 || port0 > 65535 || port1 != 0) { + ALOGE("Sink chose its wfd_client_rtp_ports poorly (%s)", + value.c_str()); + + return ERROR_MALFORMED; + } + } else if (strcmp(value.c_str(), "RTP/AVP/TCP;interleaved mode=play")) { + ALOGE("Unsupported value for wfd_client_rtp_ports (%s)", value.c_str()); - return ERROR_MALFORMED; + return ERROR_UNSUPPORTED; } + mWfdClientRtpPorts = value; mChosenRTPPort = port0; if (!params->findParameter("wfd_video_formats", &value)) { diff --git a/media/libstagefright/wifi-display/source/WifiDisplaySource.h b/media/libstagefright/wifi-display/source/WifiDisplaySource.h index fec2c6d..724462c 100644 --- a/media/libstagefright/wifi-display/source/WifiDisplaySource.h +++ b/media/libstagefright/wifi-display/source/WifiDisplaySource.h @@ -119,6 +119,7 @@ private: uint32_t mStopReplyID; + AString mWfdClientRtpPorts; int32_t mChosenRTPPort; // extracted from "wfd_client_rtp_ports" bool mSinkSupportsVideo; -- cgit v1.1 From 2fa05230219f72118388f3a350b1239db1299647 Mon Sep 17 00:00:00 2001 From: James Dong Date: Wed, 30 Jan 2013 13:31:59 -0800 Subject: Remove a few unused header includes from PlaybackSession.cpp Change-Id: Id3147e2f2d5d75ccc172e67802639cd43ed68870 --- media/libstagefright/wifi-display/source/PlaybackSession.cpp | 4 ---- 1 file changed, 4 deletions(-) diff --git a/media/libstagefright/wifi-display/source/PlaybackSession.cpp b/media/libstagefright/wifi-display/source/PlaybackSession.cpp index 91dc1fa..e542908 100644 --- a/media/libstagefright/wifi-display/source/PlaybackSession.cpp +++ b/media/libstagefright/wifi-display/source/PlaybackSession.cpp @@ -29,8 +29,6 @@ #include "WifiDisplaySource.h" #include -#include -#include #include #include #include @@ -41,10 +39,8 @@ #include #include #include -#include #include #include -#include #include #include -- cgit v1.1 From d3922f72601d82c6fc067a98916fda0bd1291c5f Mon Sep 17 00:00:00 2001 From: Eric Laurent Date: Fri, 1 Feb 2013 17:57:04 -0800 Subject: AudioFlinger: fix RecordThread initial device A regression was introduced when the audio device enums where modified for a 32 bit representation: the device passed when constructing a RecordThread was still the concatenation of input device and output device bit fields on one 32 bit value which is not possible anymore. The fix consists in modifying the RecordThread constructor to accept separate values for input and output devices. Change-Id: I81fb5f4718428b54251e65d74b86e198ce15193e --- services/audioflinger/AudioFlinger.cpp | 6 +++--- services/audioflinger/Threads.cpp | 5 +++-- services/audioflinger/Threads.h | 3 ++- 3 files changed, 8 insertions(+), 6 deletions(-) diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp index 5f5b041..89e9b52 100644 --- a/services/audioflinger/AudioFlinger.cpp +++ b/services/audioflinger/AudioFlinger.cpp @@ -1600,14 +1600,14 @@ audio_io_handle_t AudioFlinger::openInput(audio_module_handle_t module, // Start record thread // RecorThread require both input and output device indication to forward to audio // pre processing modules - audio_devices_t device = (*pDevices) | primaryOutputDevice_l(); - thread = new RecordThread(this, input, reqSamplingRate, reqChannels, id, - device, teeSink); + primaryOutputDevice_l(), + *pDevices, + teeSink); mRecordThreads.add(id, thread); ALOGV("openInput() created record thread: ID %d thread %p", id, thread); if (pSamplingRate != NULL) *pSamplingRate = reqSamplingRate; diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index 744a7df..af0dccc 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -3545,9 +3545,10 @@ AudioFlinger::RecordThread::RecordThread(const sp& audioFlinger, uint32_t sampleRate, audio_channel_mask_t channelMask, audio_io_handle_t id, - audio_devices_t device, + audio_devices_t outDevice, + audio_devices_t inDevice, const sp& teeSink) : - ThreadBase(audioFlinger, id, AUDIO_DEVICE_NONE, device, RECORD), + ThreadBase(audioFlinger, id, outDevice, inDevice, RECORD), mInput(input), mResampler(NULL), mRsmpOutBuffer(NULL), mRsmpInBuffer(NULL), // mRsmpInIndex and mInputBytes set by readInputParameters() mReqChannelCount(popcount(channelMask)), diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h index 06a1c8c..a1abcde 100644 --- a/services/audioflinger/Threads.h +++ b/services/audioflinger/Threads.h @@ -698,7 +698,8 @@ public: uint32_t sampleRate, audio_channel_mask_t channelMask, audio_io_handle_t id, - audio_devices_t device, + audio_devices_t outDevice, + audio_devices_t inDevice, const sp& teeSink); virtual ~RecordThread(); -- cgit v1.1 From 685ef09bcaf5de6abf2064d552296f70eaec6761 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Mon, 4 Feb 2013 08:15:34 -0800 Subject: Fix crash when destroying a DuplicatingThread Bug: 8121343 Change-Id: I9b1fc56c753e465e8a08916022e52db6c3a36906 --- services/audioflinger/AudioFlinger.cpp | 3 +++ 1 file changed, 3 insertions(+) diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp index 52fa576..1270825 100644 --- a/services/audioflinger/AudioFlinger.cpp +++ b/services/audioflinger/AudioFlinger.cpp @@ -374,6 +374,9 @@ sp AudioFlinger::newWriter_l(size_t size, const char *name) void AudioFlinger::unregisterWriter(const sp& writer) { + if (writer == 0) { + return; + } sp iMemory(writer->getIMemory()); if (iMemory == 0) { return; -- cgit v1.1 From a77c496d4520f25b7b337d32ebd9681d8ea0f7bb Mon Sep 17 00:00:00 2001 From: Marco Nelissen Date: Tue, 5 Feb 2013 09:03:22 -0800 Subject: Return error code if setting scaling mode fails The return code from native_window_set_scaling_mode() was ignored. Looking at the code review comments that introduced this code, it seems like the intention was to return the error code in that case. Change-Id: I9592cc378f0a0b960d37178aa0525fc17e8734ba --- media/libstagefright/AwesomePlayer.cpp | 1 + 1 file changed, 1 insertion(+) diff --git a/media/libstagefright/AwesomePlayer.cpp b/media/libstagefright/AwesomePlayer.cpp index 23ce088..0f4d866 100644 --- a/media/libstagefright/AwesomePlayer.cpp +++ b/media/libstagefright/AwesomePlayer.cpp @@ -2511,6 +2511,7 @@ status_t AwesomePlayer::setVideoScalingMode_l(int32_t mode) { if (err != OK) { ALOGW("Failed to set scaling mode: %d", err); } + return err; } return OK; } -- cgit v1.1 From b5f25f005bc1d3ae35f45b58c88345e183dc336d Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Tue, 5 Feb 2013 10:14:26 -0800 Subject: Groundwork to support bidirectional, asynchronous communication between NuPlayer and its sources. Change-Id: I1989022d806206b926555add3aa5c1fcf37aa78d --- .../nuplayer/GenericSource.cpp | 8 +++-- .../libmediaplayerservice/nuplayer/GenericSource.h | 5 +++- .../nuplayer/HTTPLiveSource.cpp | 4 ++- .../nuplayer/HTTPLiveSource.h | 1 + media/libmediaplayerservice/nuplayer/NuPlayer.cpp | 35 ++++++++++++++++++---- media/libmediaplayerservice/nuplayer/NuPlayer.h | 1 + .../nuplayer/NuPlayerSource.h | 16 ++++++++-- .../libmediaplayerservice/nuplayer/RTSPSource.cpp | 4 ++- media/libmediaplayerservice/nuplayer/RTSPSource.h | 1 + .../nuplayer/StreamingSource.cpp | 7 +++-- .../nuplayer/StreamingSource.h | 4 ++- .../nuplayer/mp4/MP4Source.cpp | 6 ++-- .../libmediaplayerservice/nuplayer/mp4/MP4Source.h | 2 +- 13 files changed, 75 insertions(+), 19 deletions(-) diff --git a/media/libmediaplayerservice/nuplayer/GenericSource.cpp b/media/libmediaplayerservice/nuplayer/GenericSource.cpp index f281879..450fae5 100644 --- a/media/libmediaplayerservice/nuplayer/GenericSource.cpp +++ b/media/libmediaplayerservice/nuplayer/GenericSource.cpp @@ -32,11 +32,13 @@ namespace android { NuPlayer::GenericSource::GenericSource( + const sp ¬ify, const char *url, const KeyedVector *headers, bool uidValid, uid_t uid) - : mDurationUs(0ll), + : Source(notify), + mDurationUs(0ll), mAudioIsVorbis(false) { DataSource::RegisterDefaultSniffers(); @@ -48,8 +50,10 @@ NuPlayer::GenericSource::GenericSource( } NuPlayer::GenericSource::GenericSource( + const sp ¬ify, int fd, int64_t offset, int64_t length) - : mDurationUs(0ll), + : Source(notify), + mDurationUs(0ll), mAudioIsVorbis(false) { DataSource::RegisterDefaultSniffers(); diff --git a/media/libmediaplayerservice/nuplayer/GenericSource.h b/media/libmediaplayerservice/nuplayer/GenericSource.h index e1ce2c1..e59ea3a 100644 --- a/media/libmediaplayerservice/nuplayer/GenericSource.h +++ b/media/libmediaplayerservice/nuplayer/GenericSource.h @@ -32,12 +32,15 @@ struct MediaSource; struct NuPlayer::GenericSource : public NuPlayer::Source { GenericSource( + const sp ¬ify, const char *url, const KeyedVector *headers, bool uidValid = false, uid_t uid = 0); - GenericSource(int fd, int64_t offset, int64_t length); + GenericSource( + const sp ¬ify, + int fd, int64_t offset, int64_t length); virtual void start(); diff --git a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp index 5dcca12..d38ee62 100644 --- a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp +++ b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp @@ -34,10 +34,12 @@ namespace android { NuPlayer::HTTPLiveSource::HTTPLiveSource( + const sp ¬ify, const char *url, const KeyedVector *headers, bool uidValid, uid_t uid) - : mURL(url), + : Source(notify), + mURL(url), mUIDValid(uidValid), mUID(uid), mFlags(0), diff --git a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h index 79f4ab8..4a217af 100644 --- a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h +++ b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h @@ -28,6 +28,7 @@ struct LiveSession; struct NuPlayer::HTTPLiveSource : public NuPlayer::Source { HTTPLiveSource( + const sp ¬ify, const char *url, const KeyedVector *headers, bool uidValid = false, diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp index 517fb34..9585aba 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp +++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp @@ -145,12 +145,14 @@ void NuPlayer::setDriver(const wp &driver) { void NuPlayer::setDataSource(const sp &source) { sp msg = new AMessage(kWhatSetDataSource, id()); + sp notify = new AMessage(kWhatSourceNotify, id()); + char prop[PROPERTY_VALUE_MAX]; if (property_get("media.stagefright.use-mp4source", prop, NULL) && (!strcmp(prop, "1") || !strcasecmp(prop, "true"))) { - msg->setObject("source", new MP4Source(source)); + msg->setObject("source", new MP4Source(notify, source)); } else { - msg->setObject("source", new StreamingSource(source)); + msg->setObject("source", new StreamingSource(notify, source)); } msg->post(); @@ -176,13 +178,15 @@ void NuPlayer::setDataSource( const char *url, const KeyedVector *headers) { sp msg = new AMessage(kWhatSetDataSource, id()); + sp notify = new AMessage(kWhatSourceNotify, id()); + sp source; if (IsHTTPLiveURL(url)) { - source = new HTTPLiveSource(url, headers, mUIDValid, mUID); + source = new HTTPLiveSource(notify, url, headers, mUIDValid, mUID); } else if (!strncasecmp(url, "rtsp://", 7)) { - source = new RTSPSource(url, headers, mUIDValid, mUID); + source = new RTSPSource(notify, url, headers, mUIDValid, mUID); } else { - source = new GenericSource(url, headers, mUIDValid, mUID); + source = new GenericSource(notify, url, headers, mUIDValid, mUID); } msg->setObject("source", source); @@ -192,7 +196,9 @@ void NuPlayer::setDataSource( void NuPlayer::setDataSource(int fd, int64_t offset, int64_t length) { sp msg = new AMessage(kWhatSetDataSource, id()); - sp source = new GenericSource(fd, offset, length); + sp notify = new AMessage(kWhatSourceNotify, id()); + + sp source = new GenericSource(notify, fd, offset, length); msg->setObject("source", source); msg->post(); } @@ -273,6 +279,8 @@ void NuPlayer::onMessageReceived(const sp &msg) { CHECK(msg->findObject("source", &obj)); mSource = static_cast(obj.get()); + + looper()->registerHandler(mSource); break; } @@ -714,6 +722,12 @@ void NuPlayer::onMessageReceived(const sp &msg) { break; } + case kWhatSourceNotify: + { + TRESPASS(); // TBD + break; + } + default: TRESPASS(); break; @@ -1169,6 +1183,9 @@ void NuPlayer::performReset() { if (mSource != NULL) { mSource->stop(); + + looper()->unregisterHandler(mSource->id()); + mSource.clear(); } @@ -1210,4 +1227,10 @@ void NuPlayer::performSetSurface(const sp &wrapper) { } } +//////////////////////////////////////////////////////////////////////////////// + +void NuPlayer::Source::onMessageReceived(const sp &msg) { + TRESPASS(); +} + } // namespace android diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.h b/media/libmediaplayerservice/nuplayer/NuPlayer.h index 09fc0ba..0ff6089 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayer.h +++ b/media/libmediaplayerservice/nuplayer/NuPlayer.h @@ -95,6 +95,7 @@ private: kWhatPause = 'paus', kWhatResume = 'rsme', kWhatPollDuration = 'polD', + kWhatSourceNotify = 'srcN', }; wp mDriver; diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerSource.h b/media/libmediaplayerservice/nuplayer/NuPlayerSource.h index a635340..a3201cf 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayerSource.h +++ b/media/libmediaplayerservice/nuplayer/NuPlayerSource.h @@ -20,17 +20,23 @@ #include "NuPlayer.h" +#include + namespace android { struct ABuffer; -struct NuPlayer::Source : public RefBase { +struct NuPlayer::Source : public AHandler { enum Flags { FLAG_SEEKABLE = 1, FLAG_DYNAMIC_DURATION = 2, }; - Source() {} + // The provides message is used to notify the player about various + // events. + Source(const sp ¬ify) + : mNotify(notify) { + } virtual void start() = 0; virtual void stop() {} @@ -57,9 +63,15 @@ struct NuPlayer::Source : public RefBase { protected: virtual ~Source() {} + virtual void onMessageReceived(const sp &msg); + virtual sp getFormatMeta(bool audio) { return NULL; } + sp dupNotify() const { return mNotify->dup(); } + private: + sp mNotify; + DISALLOW_EVIL_CONSTRUCTORS(Source); }; diff --git a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp index afaa5db..e402115 100644 --- a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp +++ b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp @@ -29,11 +29,13 @@ namespace android { NuPlayer::RTSPSource::RTSPSource( + const sp ¬ify, const char *url, const KeyedVector *headers, bool uidValid, uid_t uid) - : mURL(url), + : Source(notify), + mURL(url), mUIDValid(uidValid), mUID(uid), mFlags(0), diff --git a/media/libmediaplayerservice/nuplayer/RTSPSource.h b/media/libmediaplayerservice/nuplayer/RTSPSource.h index 779d791..033b3e8 100644 --- a/media/libmediaplayerservice/nuplayer/RTSPSource.h +++ b/media/libmediaplayerservice/nuplayer/RTSPSource.h @@ -32,6 +32,7 @@ struct MyHandler; struct NuPlayer::RTSPSource : public NuPlayer::Source { RTSPSource( + const sp ¬ify, const char *url, const KeyedVector *headers, bool uidValid = false, diff --git a/media/libmediaplayerservice/nuplayer/StreamingSource.cpp b/media/libmediaplayerservice/nuplayer/StreamingSource.cpp index 7159404..9b04833 100644 --- a/media/libmediaplayerservice/nuplayer/StreamingSource.cpp +++ b/media/libmediaplayerservice/nuplayer/StreamingSource.cpp @@ -32,8 +32,11 @@ namespace android { -NuPlayer::StreamingSource::StreamingSource(const sp &source) - : mSource(source), +NuPlayer::StreamingSource::StreamingSource( + const sp ¬ify, + const sp &source) + : Source(notify), + mSource(source), mFinalResult(OK) { } diff --git a/media/libmediaplayerservice/nuplayer/StreamingSource.h b/media/libmediaplayerservice/nuplayer/StreamingSource.h index a27b58a..dc616f7 100644 --- a/media/libmediaplayerservice/nuplayer/StreamingSource.h +++ b/media/libmediaplayerservice/nuplayer/StreamingSource.h @@ -27,7 +27,9 @@ struct ABuffer; struct ATSParser; struct NuPlayer::StreamingSource : public NuPlayer::Source { - StreamingSource(const sp &source); + StreamingSource( + const sp ¬ify, + const sp &source); virtual void start(); diff --git a/media/libmediaplayerservice/nuplayer/mp4/MP4Source.cpp b/media/libmediaplayerservice/nuplayer/mp4/MP4Source.cpp index a62d5a2..d659b73 100644 --- a/media/libmediaplayerservice/nuplayer/mp4/MP4Source.cpp +++ b/media/libmediaplayerservice/nuplayer/mp4/MP4Source.cpp @@ -104,8 +104,10 @@ private: DISALLOW_EVIL_CONSTRUCTORS(StreamSource); }; -MP4Source::MP4Source(const sp &source) - : mSource(source), +MP4Source::MP4Source( + const sp ¬ify, const sp &source) + : Source(notify), + mSource(source), mLooper(new ALooper), mParser(new FragmentedMP4Parser), mEOS(false) { diff --git a/media/libmediaplayerservice/nuplayer/mp4/MP4Source.h b/media/libmediaplayerservice/nuplayer/mp4/MP4Source.h index abca236..b16a111 100644 --- a/media/libmediaplayerservice/nuplayer/mp4/MP4Source.h +++ b/media/libmediaplayerservice/nuplayer/mp4/MP4Source.h @@ -24,7 +24,7 @@ namespace android { struct FragmentedMP4Parser; struct MP4Source : public NuPlayer::Source { - MP4Source(const sp &source); + MP4Source(const sp ¬ify, const sp &source); virtual void start(); -- cgit v1.1 From 7a33b7740412accf6a1cc912686c8d0acfb2a883 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Oscar=20Rydh=C3=A9?= Date: Mon, 20 Feb 2012 10:15:48 +0100 Subject: Added HTTP support for SDP files. MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Added support for playing SDP files from http links. Previously, SDP files only worked when started from rtsp links (rtsp://a.b.c/abc.sdp), but they are just as common in http links. patch provided by "Oscar Rydhé " Change-Id: Ic73af3a9a002009dbe8b04c267a4621bf7fe2f46 --- media/libmediaplayerservice/MediaPlayerFactory.cpp | 4 + media/libmediaplayerservice/nuplayer/NuPlayer.cpp | 6 + .../libmediaplayerservice/nuplayer/RTSPSource.cpp | 74 +++++++++- media/libmediaplayerservice/nuplayer/RTSPSource.h | 7 +- media/libstagefright/include/SDPLoader.h | 70 ++++++++++ media/libstagefright/rtsp/Android.mk | 1 + media/libstagefright/rtsp/MyHandler.h | 53 +++++++ media/libstagefright/rtsp/SDPLoader.cpp | 154 +++++++++++++++++++++ 8 files changed, 362 insertions(+), 7 deletions(-) create mode 100644 media/libstagefright/include/SDPLoader.h create mode 100644 media/libstagefright/rtsp/SDPLoader.cpp diff --git a/media/libmediaplayerservice/MediaPlayerFactory.cpp b/media/libmediaplayerservice/MediaPlayerFactory.cpp index 3f69c11..bb441cc 100644 --- a/media/libmediaplayerservice/MediaPlayerFactory.cpp +++ b/media/libmediaplayerservice/MediaPlayerFactory.cpp @@ -215,6 +215,10 @@ class NuPlayerFactory : public MediaPlayerFactory::IFactory { if (strstr(url,"m3u8")) { return kOurScore; } + + if ((len >= 4 && !strcasecmp(".sdp", &url[len - 4])) || strstr(url, ".sdp?")) { + return kOurScore; + } } if (!strncasecmp("rtsp://", url, 7)) { diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp index 9585aba..0736fbe 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp +++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp @@ -177,6 +177,7 @@ static bool IsHTTPLiveURL(const char *url) { void NuPlayer::setDataSource( const char *url, const KeyedVector *headers) { sp msg = new AMessage(kWhatSetDataSource, id()); + size_t len = strlen(url); sp notify = new AMessage(kWhatSourceNotify, id()); @@ -185,6 +186,11 @@ void NuPlayer::setDataSource( source = new HTTPLiveSource(notify, url, headers, mUIDValid, mUID); } else if (!strncasecmp(url, "rtsp://", 7)) { source = new RTSPSource(notify, url, headers, mUIDValid, mUID); + } else if ((!strncasecmp(url, "http://", 7) + || !strncasecmp(url, "https://", 8)) + && ((len >= 4 && !strcasecmp(".sdp", &url[len - 4])) + || strstr(url, ".sdp?"))) { + source = new RTSPSource(notify, url, headers, mUIDValid, mUID, true); } else { source = new GenericSource(notify, url, headers, mUIDValid, mUID); } diff --git a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp index e402115..3035589 100644 --- a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp +++ b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp @@ -22,6 +22,7 @@ #include "AnotherPacketSource.h" #include "MyHandler.h" +#include "SDPLoader.h" #include #include @@ -33,12 +34,14 @@ NuPlayer::RTSPSource::RTSPSource( const char *url, const KeyedVector *headers, bool uidValid, - uid_t uid) + uid_t uid, + bool isSDP) : Source(notify), mURL(url), mUIDValid(uidValid), mUID(uid), mFlags(0), + mIsSDP(isSDP), mState(DISCONNECTED), mFinalResult(OK), mDisconnectReplyID(0), @@ -73,16 +76,25 @@ void NuPlayer::RTSPSource::start() { } CHECK(mHandler == NULL); + CHECK(mSDPLoader == NULL); sp notify = new AMessage(kWhatNotify, mReflector->id()); - mHandler = new MyHandler(mURL.c_str(), notify, mUIDValid, mUID); - mLooper->registerHandler(mHandler); - CHECK_EQ(mState, (int)DISCONNECTED); mState = CONNECTING; - mHandler->connect(); + if (mIsSDP) { + mSDPLoader = new SDPLoader(notify, + (mFlags & kFlagIncognito) ? SDPLoader::kFlagIncognito : 0, + mUIDValid, mUID); + + mSDPLoader->load(mURL.c_str(), mExtraHeaders.isEmpty() ? NULL : &mExtraHeaders); + } else { + mHandler = new MyHandler(mURL.c_str(), notify, mUIDValid, mUID); + mLooper->registerHandler(mHandler); + + mHandler->connect(); + } } void NuPlayer::RTSPSource::stop() { @@ -408,6 +420,12 @@ void NuPlayer::RTSPSource::onMessageReceived(const sp &msg) { break; } + case SDPLoader::kWhatSDPLoaded: + { + onSDPLoaded(msg); + break; + } + default: TRESPASS(); } @@ -461,6 +479,46 @@ void NuPlayer::RTSPSource::onConnected() { mState = CONNECTED; } +void NuPlayer::RTSPSource::onSDPLoaded(const sp &msg) { + status_t err; + CHECK(msg->findInt32("result", &err)); + + mSDPLoader.clear(); + + if (mDisconnectReplyID != 0) { + err = UNKNOWN_ERROR; + } + + if (err == OK) { + sp desc; + sp obj; + CHECK(msg->findObject("description", &obj)); + desc = static_cast(obj.get()); + + AString rtspUri; + if (!desc->findAttribute(0, "a=control", &rtspUri)) { + ALOGE("Unable to find url in SDP"); + err = UNKNOWN_ERROR; + } else { + sp notify = new AMessage(kWhatNotify, mReflector->id()); + + mHandler = new MyHandler(rtspUri.c_str(), notify, mUIDValid, mUID); + mLooper->registerHandler(mHandler); + + mHandler->loadSDP(desc); + } + } + + if (err != OK) { + mState = DISCONNECTED; + mFinalResult = err; + + if (mDisconnectReplyID != 0) { + finishDisconnectIfPossible(); + } + } +} + void NuPlayer::RTSPSource::onDisconnected(const sp &msg) { status_t err; CHECK(msg->findInt32("result", &err)); @@ -479,7 +537,11 @@ void NuPlayer::RTSPSource::onDisconnected(const sp &msg) { void NuPlayer::RTSPSource::finishDisconnectIfPossible() { if (mState != DISCONNECTED) { - mHandler->disconnect(); + if (mHandler != NULL) { + mHandler->disconnect(); + } else if (mSDPLoader != NULL) { + mSDPLoader->cancel(); + } return; } diff --git a/media/libmediaplayerservice/nuplayer/RTSPSource.h b/media/libmediaplayerservice/nuplayer/RTSPSource.h index 033b3e8..b2a7dae 100644 --- a/media/libmediaplayerservice/nuplayer/RTSPSource.h +++ b/media/libmediaplayerservice/nuplayer/RTSPSource.h @@ -29,6 +29,7 @@ namespace android { struct ALooper; struct AnotherPacketSource; struct MyHandler; +struct SDPLoader; struct NuPlayer::RTSPSource : public NuPlayer::Source { RTSPSource( @@ -36,7 +37,8 @@ struct NuPlayer::RTSPSource : public NuPlayer::Source { const char *url, const KeyedVector *headers, bool uidValid = false, - uid_t uid = 0); + uid_t uid = 0, + bool isSDP = false); virtual void start(); virtual void stop(); @@ -90,6 +92,7 @@ private: bool mUIDValid; uid_t mUID; uint32_t mFlags; + bool mIsSDP; State mState; status_t mFinalResult; uint32_t mDisconnectReplyID; @@ -98,6 +101,7 @@ private: sp mLooper; sp > mReflector; sp mHandler; + sp mSDPLoader; Vector mTracks; sp mAudioTrack; @@ -110,6 +114,7 @@ private: sp getSource(bool audio); void onConnected(); + void onSDPLoaded(const sp &msg); void onDisconnected(const sp &msg); void finishDisconnectIfPossible(); diff --git a/media/libstagefright/include/SDPLoader.h b/media/libstagefright/include/SDPLoader.h new file mode 100644 index 0000000..ca59dc0 --- /dev/null +++ b/media/libstagefright/include/SDPLoader.h @@ -0,0 +1,70 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef SDP_LOADER_H_ + +#define SDP_LOADER_H_ + +#include +#include +#include + +namespace android { + +struct HTTPBase; + +struct SDPLoader : public AHandler { + enum Flags { + // Don't log any URLs. + kFlagIncognito = 1, + }; + enum { + kWhatSDPLoaded = 'sdpl' + }; + SDPLoader(const sp ¬ify, uint32_t flags = 0, bool uidValid = false, uid_t uid = 0); + + void load(const char* url, const KeyedVector *headers); + + void cancel(); + +protected: + virtual ~SDPLoader() {} + + virtual void onMessageReceived(const sp &msg); + +private: + enum { + kWhatLoad = 'load', + }; + + void onLoad(const sp &msg); + + sp mNotify; + const char* mUrl; + uint32_t mFlags; + bool mUIDValid; + uid_t mUID; + sp mNetLooper; + bool mCancelled; + + sp mHTTPDataSource; + + DISALLOW_EVIL_CONSTRUCTORS(SDPLoader); +}; + +} // namespace android + +#endif // SDP_LOADER_H_ diff --git a/media/libstagefright/rtsp/Android.mk b/media/libstagefright/rtsp/Android.mk index 49e2daf..9e2724d 100644 --- a/media/libstagefright/rtsp/Android.mk +++ b/media/libstagefright/rtsp/Android.mk @@ -17,6 +17,7 @@ LOCAL_SRC_FILES:= \ ARTPWriter.cpp \ ARTSPConnection.cpp \ ASessionDescription.cpp \ + SDPLoader.cpp \ LOCAL_C_INCLUDES:= \ $(TOP)/frameworks/av/media/libstagefright/include \ diff --git a/media/libstagefright/rtsp/MyHandler.h b/media/libstagefright/rtsp/MyHandler.h index 96c7683..b7183b1 100644 --- a/media/libstagefright/rtsp/MyHandler.h +++ b/media/libstagefright/rtsp/MyHandler.h @@ -173,6 +173,18 @@ struct MyHandler : public AHandler { mConn->connect(mOriginalSessionURL.c_str(), reply); } + void loadSDP(const sp& desc) { + looper()->registerHandler(mConn); + (1 ? mNetLooper : looper())->registerHandler(mRTPConn); + + sp notify = new AMessage('biny', id()); + mConn->observeBinaryData(notify); + + sp reply = new AMessage('sdpl', id()); + reply->setObject("description", desc); + mConn->connect(mOriginalSessionURL.c_str(), reply); + } + void disconnect() { (new AMessage('abor', id()))->post(); } @@ -486,6 +498,47 @@ struct MyHandler : public AHandler { break; } + case 'sdpl': + { + int32_t result; + CHECK(msg->findInt32("result", &result)); + + ALOGI("SDP connection request completed with result %d (%s)", + result, strerror(-result)); + + if (result == OK) { + sp obj; + CHECK(msg->findObject("description", &obj)); + mSessionDesc = + static_cast(obj.get()); + + if (!mSessionDesc->isValid()) { + ALOGE("Failed to parse session description."); + result = ERROR_MALFORMED; + } else { + mBaseURL = mSessionURL; + + if (mSessionDesc->countTracks() < 2) { + // There's no actual tracks in this session. + // The first "track" is merely session meta + // data. + + ALOGW("Session doesn't contain any playable " + "tracks. Aborting."); + result = ERROR_UNSUPPORTED; + } else { + setupTrack(1); + } + } + } + + if (result != OK) { + sp reply = new AMessage('disc', id()); + mConn->disconnect(reply); + } + break; + } + case 'setu': { size_t index; diff --git a/media/libstagefright/rtsp/SDPLoader.cpp b/media/libstagefright/rtsp/SDPLoader.cpp new file mode 100644 index 0000000..ed3fa7e --- /dev/null +++ b/media/libstagefright/rtsp/SDPLoader.cpp @@ -0,0 +1,154 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "SDPLoader" +#include + +#include "SDPLoader.h" + +#include "ASessionDescription.h" +#include "HTTPBase.h" + +#include +#include + +#define DEFAULT_SDP_SIZE 100000 + +namespace android { + +SDPLoader::SDPLoader(const sp ¬ify, uint32_t flags, bool uidValid, uid_t uid) + : mNotify(notify), + mFlags(flags), + mUIDValid(uidValid), + mUID(uid), + mNetLooper(new ALooper), + mCancelled(false), + mHTTPDataSource( + HTTPBase::Create( + (mFlags & kFlagIncognito) + ? HTTPBase::kFlagIncognito + : 0)) { + if (mUIDValid) { + mHTTPDataSource->setUID(mUID); + } + + mNetLooper->setName("sdp net"); + mNetLooper->start(false /* runOnCallingThread */, + false /* canCallJava */, + PRIORITY_HIGHEST); +} + +void SDPLoader::load(const char *url, const KeyedVector *headers) { + mNetLooper->registerHandler(this); + + sp msg = new AMessage(kWhatLoad, id()); + msg->setString("url", url); + + if (headers != NULL) { + msg->setPointer( + "headers", + new KeyedVector(*headers)); + } + + msg->post(); +} + +void SDPLoader::cancel() { + mCancelled = true; + sp HTTPDataSource = mHTTPDataSource; + HTTPDataSource->disconnect(); +} + +void SDPLoader::onMessageReceived(const sp &msg) { + switch (msg->what()) { + case kWhatLoad: + onLoad(msg); + break; + + default: + TRESPASS(); + break; + } +} + +void SDPLoader::onLoad(const sp &msg) { + status_t err = OK; + sp desc = NULL; + AString url; + CHECK(msg->findString("url", &url)); + + KeyedVector *headers = NULL; + msg->findPointer("headers", (void **)&headers); + + if (!(mFlags & kFlagIncognito)) { + ALOGI("onLoad '%s'", url.c_str()); + } else { + ALOGI("onLoad "); + } + + if (!mCancelled) { + err = mHTTPDataSource->connect(url.c_str(), headers); + + if (err != OK) { + ALOGE("connect() returned %d", err); + } + } + + if (headers != NULL) { + delete headers; + headers = NULL; + } + + off64_t sdpSize; + if (err == OK && !mCancelled) { + err = mHTTPDataSource->getSize(&sdpSize); + + if (err != OK) { + //We did not get the size of the sdp file, default to a large value + sdpSize = DEFAULT_SDP_SIZE; + err = OK; + } + } + + sp buffer = new ABuffer(sdpSize); + + if (err == OK && !mCancelled) { + ssize_t readSize = mHTTPDataSource->readAt(0, buffer->data(), sdpSize); + + if (readSize < 0) { + ALOGE("Failed to read SDP, error code = %ld", readSize); + err = UNKNOWN_ERROR; + } else { + desc = new ASessionDescription; + + if (desc == NULL || !desc->setTo(buffer->data(), (size_t)readSize)) { + err = UNKNOWN_ERROR; + ALOGE("Failed to parse SDP"); + } + } + } + + mHTTPDataSource.clear(); + + sp notify = mNotify->dup(); + notify->setInt32("what", kWhatSDPLoaded); + notify->setInt32("result", err); + notify->setObject("description", desc); + notify->post(); +} + +} // namespace android -- cgit v1.1 From 84ca0414fedea2dfe51607b422f6227e1c4f0d7f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Roger=20J=C3=B6nsson?= Date: Thu, 17 Jan 2013 13:22:31 +0100 Subject: Detect live streams The information is used to decide on visibility of pause button and to handle the duration clock correctly. Change-Id: I286ac992fd171c7fc313e429326d38b6fc80e3fb --- media/libstagefright/rtsp/MyHandler.h | 44 +++++++++++++++++++++++++++++++---- 1 file changed, 39 insertions(+), 5 deletions(-) diff --git a/media/libstagefright/rtsp/MyHandler.h b/media/libstagefright/rtsp/MyHandler.h index b7183b1..cdd00e4 100644 --- a/media/libstagefright/rtsp/MyHandler.h +++ b/media/libstagefright/rtsp/MyHandler.h @@ -133,7 +133,7 @@ struct MyHandler : public AHandler { mTryFakeRTCP(false), mReceivedFirstRTCPPacket(false), mReceivedFirstRTPPacket(false), - mSeekable(false), + mSeekable(true), mKeepAliveTimeoutUs(kDefaultKeepAliveTimeoutUs), mKeepAliveGeneration(0) { mNetLooper->setName("rtsp net"); @@ -360,6 +360,39 @@ struct MyHandler : public AHandler { return true; } + static bool isLiveStream(const sp &desc) { + AString attrLiveStream; + if (desc->findAttribute(0, "a=LiveStream", &attrLiveStream)) { + ssize_t semicolonPos = attrLiveStream.find(";", 2); + + const char* liveStreamValue; + if (semicolonPos < 0) { + liveStreamValue = attrLiveStream.c_str(); + } else { + AString valString; + valString.setTo(attrLiveStream, + semicolonPos + 1, + attrLiveStream.size() - semicolonPos - 1); + liveStreamValue = valString.c_str(); + } + + uint32_t value = strtoul(liveStreamValue, NULL, 10); + if (value == 1) { + ALOGV("found live stream"); + return true; + } + } else { + // It is a live stream if no duration is returned + int64_t durationUs; + if (!desc->getDurationUs(&durationUs)) { + ALOGV("No duration found, assume live stream"); + return true; + } + } + + return false; + } + virtual void onMessageReceived(const sp &msg) { switch (msg->what()) { case 'conn': @@ -457,6 +490,8 @@ struct MyHandler : public AHandler { } } + mSeekable = !isLiveStream(mSessionDesc); + if (!mBaseURL.startsWith("rtsp://")) { // Some misbehaving servers specify a relative // URL in one of the locations above, combine @@ -518,6 +553,8 @@ struct MyHandler : public AHandler { } else { mBaseURL = mSessionURL; + mSeekable = !isLiveStream(mSessionDesc); + if (mSessionDesc->countTracks() < 2) { // There's no actual tracks in this session. // The first "track" is merely session meta @@ -783,7 +820,7 @@ struct MyHandler : public AHandler { mNumAccessUnitsReceived = 0; mReceivedFirstRTCPPacket = false; mReceivedFirstRTPPacket = false; - mSeekable = false; + mSeekable = true; sp reply = new AMessage('tear', id()); @@ -1143,7 +1180,6 @@ struct MyHandler : public AHandler { } void parsePlayResponse(const sp &response) { - mSeekable = false; if (mTracks.size() == 0) { ALOGV("parsePlayResponse: late packets ignored."); return; @@ -1218,8 +1254,6 @@ struct MyHandler : public AHandler { ++n; } - - mSeekable = true; } sp getTrackFormat(size_t index, int32_t *timeScale) { -- cgit v1.1 From 198a893671ce80d951625fe328a92073306660d0 Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Tue, 5 Feb 2013 13:16:39 -0800 Subject: Fix a typo in MediaPlayerFactory that would prevent us from opting in to nuplayer for general media playback. Change-Id: I050f5178aadbb0b8bf422861ef885745c0b9006a --- media/libmediaplayerservice/MediaPlayerFactory.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/media/libmediaplayerservice/MediaPlayerFactory.cpp b/media/libmediaplayerservice/MediaPlayerFactory.cpp index bb441cc..1fb8b1a 100644 --- a/media/libmediaplayerservice/MediaPlayerFactory.cpp +++ b/media/libmediaplayerservice/MediaPlayerFactory.cpp @@ -100,7 +100,7 @@ void MediaPlayerFactory::unregisterFactory(player_type type) { } \ \ if (0.0 == bestScore) { \ - bestScore = getDefaultPlayerType(); \ + ret = getDefaultPlayerType(); \ } \ \ return ret; -- cgit v1.1 From 9575c96b6e418914e2ffc6741ecc8d71e3968dbe Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Tue, 5 Feb 2013 13:59:56 -0800 Subject: Support for a "preparation" state that can take care of lengthy operations in NuPlayer and its sources. Sources also can publish their flags now and the mediaplayer UI will be able to pick up on these. Change-Id: I4f2b7e5d105dcb4b6c9132cd0e8799efa0c6a14b --- .../nuplayer/GenericSource.cpp | 24 +- .../libmediaplayerservice/nuplayer/GenericSource.h | 4 +- .../nuplayer/HTTPLiveSource.cpp | 35 +-- .../nuplayer/HTTPLiveSource.h | 3 +- media/libmediaplayerservice/nuplayer/NuPlayer.cpp | 97 +++++++- media/libmediaplayerservice/nuplayer/NuPlayer.h | 12 +- .../nuplayer/NuPlayerDriver.cpp | 247 ++++++++++++++++----- .../nuplayer/NuPlayerDriver.h | 30 ++- .../nuplayer/NuPlayerSource.h | 22 +- .../libmediaplayerservice/nuplayer/RTSPSource.cpp | 22 +- media/libmediaplayerservice/nuplayer/RTSPSource.h | 3 +- .../nuplayer/StreamingSource.cpp | 10 +- .../nuplayer/StreamingSource.h | 3 +- .../nuplayer/mp4/MP4Source.cpp | 10 +- .../libmediaplayerservice/nuplayer/mp4/MP4Source.h | 3 +- 15 files changed, 406 insertions(+), 119 deletions(-) diff --git a/media/libmediaplayerservice/nuplayer/GenericSource.cpp b/media/libmediaplayerservice/nuplayer/GenericSource.cpp index 450fae5..b04e7a6 100644 --- a/media/libmediaplayerservice/nuplayer/GenericSource.cpp +++ b/media/libmediaplayerservice/nuplayer/GenericSource.cpp @@ -106,6 +106,26 @@ void NuPlayer::GenericSource::initFromDataSource( NuPlayer::GenericSource::~GenericSource() { } +void NuPlayer::GenericSource::prepareAsync() { + if (mVideoTrack.mSource != NULL) { + sp meta = mVideoTrack.mSource->getFormat(); + + int32_t width, height; + CHECK(meta->findInt32(kKeyWidth, &width)); + CHECK(meta->findInt32(kKeyHeight, &height)); + + notifyVideoSizeChanged(width, height); + } + + notifyFlagsChanged( + FLAG_CAN_PAUSE + | FLAG_CAN_SEEK_BACKWARD + | FLAG_CAN_SEEK_FORWARD + | FLAG_CAN_SEEK); + + notifyPrepared(); +} + void NuPlayer::GenericSource::start() { ALOGI("start"); @@ -262,8 +282,4 @@ void NuPlayer::GenericSource::readBuffer( } } -uint32_t NuPlayer::GenericSource::flags() const { - return FLAG_SEEKABLE; -} - } // namespace android diff --git a/media/libmediaplayerservice/nuplayer/GenericSource.h b/media/libmediaplayerservice/nuplayer/GenericSource.h index e59ea3a..2da680c 100644 --- a/media/libmediaplayerservice/nuplayer/GenericSource.h +++ b/media/libmediaplayerservice/nuplayer/GenericSource.h @@ -42,6 +42,8 @@ struct NuPlayer::GenericSource : public NuPlayer::Source { const sp ¬ify, int fd, int64_t offset, int64_t length); + virtual void prepareAsync(); + virtual void start(); virtual status_t feedMoreTSData(); @@ -51,8 +53,6 @@ struct NuPlayer::GenericSource : public NuPlayer::Source { virtual status_t getDuration(int64_t *durationUs); virtual status_t seekTo(int64_t seekTimeUs); - virtual uint32_t flags() const; - protected: virtual ~GenericSource(); diff --git a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp index d38ee62..ae67906 100644 --- a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp +++ b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp @@ -66,7 +66,7 @@ NuPlayer::HTTPLiveSource::~HTTPLiveSource() { } } -void NuPlayer::HTTPLiveSource::start() { +void NuPlayer::HTTPLiveSource::prepareAsync() { mLiveLooper = new ALooper; mLiveLooper->setName("http live"); mLiveLooper->start(); @@ -81,6 +81,26 @@ void NuPlayer::HTTPLiveSource::start() { mURL.c_str(), mExtraHeaders.isEmpty() ? NULL : &mExtraHeaders); mTSParser = new ATSParser; + + notifyVideoSizeChanged(0, 0); + + uint32_t flags = FLAG_CAN_PAUSE; + if (mLiveSession->isSeekable()) { + flags |= FLAG_CAN_SEEK; + flags |= FLAG_CAN_SEEK_BACKWARD; + flags |= FLAG_CAN_SEEK_FORWARD; + } + + if (mLiveSession->hasDynamicDuration()) { + flags |= FLAG_DYNAMIC_DURATION; + } + + notifyFlagsChanged(flags); + + notifyPrepared(); +} + +void NuPlayer::HTTPLiveSource::start() { } sp NuPlayer::HTTPLiveSource::getFormatMeta(bool audio) { @@ -194,18 +214,5 @@ status_t NuPlayer::HTTPLiveSource::seekTo(int64_t seekTimeUs) { return OK; } -uint32_t NuPlayer::HTTPLiveSource::flags() const { - uint32_t flags = 0; - if (mLiveSession->isSeekable()) { - flags |= FLAG_SEEKABLE; - } - - if (mLiveSession->hasDynamicDuration()) { - flags |= FLAG_DYNAMIC_DURATION; - } - - return flags; -} - } // namespace android diff --git a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h index 4a217af..269f3c0 100644 --- a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h +++ b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h @@ -34,6 +34,7 @@ struct NuPlayer::HTTPLiveSource : public NuPlayer::Source { bool uidValid = false, uid_t uid = 0); + virtual void prepareAsync(); virtual void start(); virtual status_t feedMoreTSData(); @@ -43,8 +44,6 @@ struct NuPlayer::HTTPLiveSource : public NuPlayer::Source { virtual status_t getDuration(int64_t *durationUs); virtual status_t seekTo(int64_t seekTimeUs); - virtual uint32_t flags() const; - protected: virtual ~HTTPLiveSource(); diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp index 0736fbe..78b94ba 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp +++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp @@ -112,6 +112,7 @@ private: NuPlayer::NuPlayer() : mUIDValid(false), + mSourceFlags(0), mVideoIsAVC(false), mAudioEOS(false), mVideoEOS(false), @@ -142,7 +143,7 @@ void NuPlayer::setDriver(const wp &driver) { mDriver = driver; } -void NuPlayer::setDataSource(const sp &source) { +void NuPlayer::setDataSourceAsync(const sp &source) { sp msg = new AMessage(kWhatSetDataSource, id()); sp notify = new AMessage(kWhatSourceNotify, id()); @@ -174,7 +175,7 @@ static bool IsHTTPLiveURL(const char *url) { return false; } -void NuPlayer::setDataSource( +void NuPlayer::setDataSourceAsync( const char *url, const KeyedVector *headers) { sp msg = new AMessage(kWhatSetDataSource, id()); size_t len = strlen(url); @@ -199,7 +200,7 @@ void NuPlayer::setDataSource( msg->post(); } -void NuPlayer::setDataSource(int fd, int64_t offset, int64_t length) { +void NuPlayer::setDataSourceAsync(int fd, int64_t offset, int64_t length) { sp msg = new AMessage(kWhatSetDataSource, id()); sp notify = new AMessage(kWhatSourceNotify, id()); @@ -209,6 +210,10 @@ void NuPlayer::setDataSource(int fd, int64_t offset, int64_t length) { msg->post(); } +void NuPlayer::prepareAsync() { + (new AMessage(kWhatPrepare, id()))->post(); +} + void NuPlayer::setVideoSurfaceTextureAsync( const sp &bufferProducer) { sp msg = new AMessage(kWhatSetVideoNativeWindow, id()); @@ -287,6 +292,18 @@ void NuPlayer::onMessageReceived(const sp &msg) { mSource = static_cast(obj.get()); looper()->registerHandler(mSource); + + CHECK(mDriver != NULL); + sp driver = mDriver.promote(); + if (driver != NULL) { + driver->notifySetDataSourceCompleted(OK); + } + break; + } + + case kWhatPrepare: + { + mSource->prepareAsync(); break; } @@ -403,9 +420,7 @@ void NuPlayer::onMessageReceived(const sp &msg) { && (mAudioDecoder != NULL || mVideoDecoder != NULL)) { // This is the first time we've found anything playable. - uint32_t flags = mSource->flags(); - - if (flags & Source::FLAG_DYNAMIC_DURATION) { + if (mSourceFlags & Source::FLAG_DYNAMIC_DURATION) { schedulePollDuration(); } } @@ -730,7 +745,7 @@ void NuPlayer::onMessageReceived(const sp &msg) { case kWhatSourceNotify: { - TRESPASS(); // TBD + onSourceNotify(msg); break; } @@ -1233,8 +1248,76 @@ void NuPlayer::performSetSurface(const sp &wrapper) { } } +void NuPlayer::onSourceNotify(const sp &msg) { + int32_t what; + CHECK(msg->findInt32("what", &what)); + + switch (what) { + case Source::kWhatPrepared: + { + sp driver = mDriver.promote(); + if (driver != NULL) { + driver->notifyPrepareCompleted(OK); + } + break; + } + + case Source::kWhatFlagsChanged: + { + uint32_t flags; + CHECK(msg->findInt32("flags", (int32_t *)&flags)); + + if ((mSourceFlags & Source::FLAG_DYNAMIC_DURATION) + && (!(flags & Source::FLAG_DYNAMIC_DURATION))) { + cancelPollDuration(); + } else if (!(mSourceFlags & Source::FLAG_DYNAMIC_DURATION) + && (flags & Source::FLAG_DYNAMIC_DURATION) + && (mAudioDecoder != NULL || mVideoDecoder != NULL)) { + schedulePollDuration(); + } + + mSourceFlags = flags; + break; + } + + case Source::kWhatVideoSizeChanged: + { + int32_t width, height; + CHECK(msg->findInt32("width", &width)); + CHECK(msg->findInt32("height", &height)); + + notifyListener(MEDIA_SET_VIDEO_SIZE, width, height); + break; + } + + default: + TRESPASS(); + } +} + //////////////////////////////////////////////////////////////////////////////// +void NuPlayer::Source::notifyFlagsChanged(uint32_t flags) { + sp notify = dupNotify(); + notify->setInt32("what", kWhatFlagsChanged); + notify->setInt32("flags", flags); + notify->post(); +} + +void NuPlayer::Source::notifyVideoSizeChanged(int32_t width, int32_t height) { + sp notify = dupNotify(); + notify->setInt32("what", kWhatVideoSizeChanged); + notify->setInt32("width", width); + notify->setInt32("height", height); + notify->post(); +} + +void NuPlayer::Source::notifyPrepared() { + sp notify = dupNotify(); + notify->setInt32("what", kWhatPrepared); + notify->post(); +} + void NuPlayer::Source::onMessageReceived(const sp &msg) { TRESPASS(); } diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.h b/media/libmediaplayerservice/nuplayer/NuPlayer.h index 0ff6089..50d0462 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayer.h +++ b/media/libmediaplayerservice/nuplayer/NuPlayer.h @@ -35,12 +35,14 @@ struct NuPlayer : public AHandler { void setDriver(const wp &driver); - void setDataSource(const sp &source); + void setDataSourceAsync(const sp &source); - void setDataSource( + void setDataSourceAsync( const char *url, const KeyedVector *headers); - void setDataSource(int fd, int64_t offset, int64_t length); + void setDataSourceAsync(int fd, int64_t offset, int64_t length); + + void prepareAsync(); void setVideoSurfaceTextureAsync( const sp &bufferProducer); @@ -82,6 +84,7 @@ private: enum { kWhatSetDataSource = '=DaS', + kWhatPrepare = 'prep', kWhatSetVideoNativeWindow = '=NaW', kWhatSetAudioSink = '=AuS', kWhatMoreDataQueued = 'more', @@ -102,6 +105,7 @@ private: bool mUIDValid; uid_t mUID; sp mSource; + uint32_t mSourceFlags; sp mNativeWindow; sp mAudioSink; sp mVideoDecoder; @@ -173,6 +177,8 @@ private: void performScanSources(); void performSetSurface(const sp &wrapper); + void onSourceNotify(const sp &msg); + DISALLOW_EVIL_CONSTRUCTORS(NuPlayer); }; diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp index 7043404..ab7b4e8 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp +++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp @@ -21,21 +21,24 @@ #include "NuPlayerDriver.h" #include "NuPlayer.h" +#include "NuPlayerSource.h" #include #include +#include namespace android { NuPlayerDriver::NuPlayerDriver() - : mResetInProgress(false), + : mState(STATE_IDLE), + mAsyncResult(UNKNOWN_ERROR), mSetSurfaceInProgress(false), mDurationUs(-1), mPositionUs(-1), mNumFramesTotal(0), mNumFramesDropped(0), mLooper(new ALooper), - mState(UNINITIALIZED), + mPlayerFlags(0), mAtEOS(false), mStartupSeekTimeUs(-1) { mLooper->setName("NuPlayerDriver Looper"); @@ -67,43 +70,76 @@ status_t NuPlayerDriver::setUID(uid_t uid) { status_t NuPlayerDriver::setDataSource( const char *url, const KeyedVector *headers) { - CHECK_EQ((int)mState, (int)UNINITIALIZED); + Mutex::Autolock autoLock(mLock); - mPlayer->setDataSource(url, headers); + if (mState != STATE_IDLE) { + return INVALID_OPERATION; + } - mState = STOPPED; + mState = STATE_SET_DATASOURCE_PENDING; - return OK; + mPlayer->setDataSourceAsync(url, headers); + + while (mState == STATE_SET_DATASOURCE_PENDING) { + mCondition.wait(mLock); + } + + return mAsyncResult; } status_t NuPlayerDriver::setDataSource(int fd, int64_t offset, int64_t length) { - CHECK_EQ((int)mState, (int)UNINITIALIZED); + Mutex::Autolock autoLock(mLock); - mPlayer->setDataSource(fd, offset, length); + if (mState != STATE_IDLE) { + return INVALID_OPERATION; + } - mState = STOPPED; + mState = STATE_SET_DATASOURCE_PENDING; - return OK; + mPlayer->setDataSourceAsync(fd, offset, length); + + while (mState == STATE_SET_DATASOURCE_PENDING) { + mCondition.wait(mLock); + } + + return mAsyncResult; } status_t NuPlayerDriver::setDataSource(const sp &source) { - CHECK_EQ((int)mState, (int)UNINITIALIZED); + Mutex::Autolock autoLock(mLock); - mPlayer->setDataSource(source); + if (mState != STATE_IDLE) { + return INVALID_OPERATION; + } - mState = STOPPED; + mState = STATE_SET_DATASOURCE_PENDING; - return OK; + mPlayer->setDataSourceAsync(source); + + while (mState == STATE_SET_DATASOURCE_PENDING) { + mCondition.wait(mLock); + } + + return mAsyncResult; } status_t NuPlayerDriver::setVideoSurfaceTexture( const sp &bufferProducer) { Mutex::Autolock autoLock(mLock); - if (mResetInProgress) { + if (mSetSurfaceInProgress) { return INVALID_OPERATION; } + switch (mState) { + case STATE_SET_DATASOURCE_PENDING: + case STATE_RESET_IN_PROGRESS: + return INVALID_OPERATION; + + default: + break; + } + mSetSurfaceInProgress = true; mPlayer->setVideoSurfaceTextureAsync(bufferProducer); @@ -116,23 +152,55 @@ status_t NuPlayerDriver::setVideoSurfaceTexture( } status_t NuPlayerDriver::prepare() { - sendEvent(MEDIA_SET_VIDEO_SIZE, 0, 0); - return OK; + Mutex::Autolock autoLock(mLock); + return prepare_l(); } -status_t NuPlayerDriver::prepareAsync() { - status_t err = prepare(); +status_t NuPlayerDriver::prepare_l() { + switch (mState) { + case STATE_UNPREPARED: + mState = STATE_PREPARING; + mPlayer->prepareAsync(); + while (mState == STATE_PREPARING) { + mCondition.wait(mLock); + } + return (mState == STATE_PREPARED) ? OK : UNKNOWN_ERROR; + default: + return INVALID_OPERATION; + }; +} - notifyListener(MEDIA_PREPARED); +status_t NuPlayerDriver::prepareAsync() { + Mutex::Autolock autoLock(mLock); - return err; + switch (mState) { + case STATE_UNPREPARED: + mState = STATE_PREPARING; + mPlayer->prepareAsync(); + return OK; + default: + return INVALID_OPERATION; + }; } status_t NuPlayerDriver::start() { + Mutex::Autolock autoLock(mLock); + switch (mState) { - case UNINITIALIZED: - return INVALID_OPERATION; - case STOPPED: + case STATE_UNPREPARED: + { + status_t err = prepare_l(); + + if (err != OK) { + return err; + } + + CHECK_EQ(mState, STATE_PREPARED); + + // fall through + } + + case STATE_PREPARED: { mAtEOS = false; mPlayer->start(); @@ -146,21 +214,23 @@ status_t NuPlayerDriver::start() { mStartupSeekTimeUs = -1; } - break; } - case PLAYING: - return OK; - default: - { - CHECK_EQ((int)mState, (int)PAUSED); + case STATE_RUNNING: + break; + + case STATE_PAUSED: + { mPlayer->resume(); break; } + + default: + return INVALID_OPERATION; } - mState = PLAYING; + mState = STATE_RUNNING; return OK; } @@ -170,43 +240,44 @@ status_t NuPlayerDriver::stop() { } status_t NuPlayerDriver::pause() { + Mutex::Autolock autoLock(mLock); + switch (mState) { - case UNINITIALIZED: - return INVALID_OPERATION; - case STOPPED: + case STATE_PAUSED: + case STATE_PREPARED: return OK; - case PLAYING: + + case STATE_RUNNING: mPlayer->pause(); break; + default: - { - CHECK_EQ((int)mState, (int)PAUSED); - return OK; - } + return INVALID_OPERATION; } - mState = PAUSED; + mState = STATE_PAUSED; return OK; } bool NuPlayerDriver::isPlaying() { - return mState == PLAYING && !mAtEOS; + return mState == STATE_RUNNING && !mAtEOS; } status_t NuPlayerDriver::seekTo(int msec) { + Mutex::Autolock autoLock(mLock); + int64_t seekTimeUs = msec * 1000ll; switch (mState) { - case UNINITIALIZED: - return INVALID_OPERATION; - case STOPPED: + case STATE_PREPARED: { mStartupSeekTimeUs = seekTimeUs; break; } - case PLAYING: - case PAUSED: + + case STATE_RUNNING: + case STATE_PAUSED: { mAtEOS = false; mPlayer->seekToAsync(seekTimeUs); @@ -214,8 +285,7 @@ status_t NuPlayerDriver::seekTo(int msec) { } default: - TRESPASS(); - break; + return INVALID_OPERATION; } return OK; @@ -247,17 +317,28 @@ status_t NuPlayerDriver::getDuration(int *msec) { status_t NuPlayerDriver::reset() { Mutex::Autolock autoLock(mLock); - mResetInProgress = true; + switch (mState) { + case STATE_IDLE: + return OK; + + case STATE_SET_DATASOURCE_PENDING: + case STATE_RESET_IN_PROGRESS: + return INVALID_OPERATION; + + default: + break; + } + + mState = STATE_RESET_IN_PROGRESS; mPlayer->resetAsync(); - while (mResetInProgress) { + while (mState == STATE_RESET_IN_PROGRESS) { mCondition.wait(mLock); } mDurationUs = -1; mPositionUs = -1; - mState = UNINITIALIZED; mStartupSeekTimeUs = -1; return OK; @@ -311,20 +392,45 @@ status_t NuPlayerDriver::getParameter(int key, Parcel *reply) { status_t NuPlayerDriver::getMetadata( const media::Metadata::Filter& ids, Parcel *records) { - return INVALID_OPERATION; + Mutex::Autolock autoLock(mLock); + + using media::Metadata; + + Metadata meta(records); + + meta.appendBool( + Metadata::kPauseAvailable, + mPlayerFlags & NuPlayer::Source::FLAG_CAN_PAUSE); + + meta.appendBool( + Metadata::kSeekBackwardAvailable, + mPlayerFlags & NuPlayer::Source::FLAG_CAN_SEEK_BACKWARD); + + meta.appendBool( + Metadata::kSeekForwardAvailable, + mPlayerFlags & NuPlayer::Source::FLAG_CAN_SEEK_FORWARD); + + meta.appendBool( + Metadata::kSeekAvailable, + mPlayerFlags & NuPlayer::Source::FLAG_CAN_SEEK); + + return OK; } void NuPlayerDriver::notifyResetComplete() { Mutex::Autolock autoLock(mLock); - CHECK(mResetInProgress); - mResetInProgress = false; + + CHECK_EQ(mState, STATE_RESET_IN_PROGRESS); + mState = STATE_IDLE; mCondition.broadcast(); } void NuPlayerDriver::notifySetSurfaceComplete() { Mutex::Autolock autoLock(mLock); + CHECK(mSetSurfaceInProgress); mSetSurfaceInProgress = false; + mCondition.broadcast(); } @@ -376,4 +482,37 @@ void NuPlayerDriver::notifyListener(int msg, int ext1, int ext2) { sendEvent(msg, ext1, ext2); } +void NuPlayerDriver::notifySetDataSourceCompleted(status_t err) { + Mutex::Autolock autoLock(mLock); + + CHECK_EQ(mState, STATE_SET_DATASOURCE_PENDING); + + mAsyncResult = err; + mState = (err == OK) ? STATE_UNPREPARED : STATE_IDLE; + mCondition.broadcast(); +} + +void NuPlayerDriver::notifyPrepareCompleted(status_t err) { + Mutex::Autolock autoLock(mLock); + + CHECK_EQ(mState, STATE_PREPARING); + + mAsyncResult = err; + + if (err == OK) { + notifyListener(MEDIA_PREPARED); + mState = STATE_PREPARED; + } else { + mState = STATE_UNPREPARED; + } + + mCondition.broadcast(); +} + +void NuPlayerDriver::notifyFlagsChanged(uint32_t flags) { + Mutex::Autolock autoLock(mLock); + + mPlayerFlags = flags; +} + } // namespace android diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h index 553c406..49b8ed2 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h +++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h @@ -61,6 +61,8 @@ struct NuPlayerDriver : public MediaPlayerInterface { virtual status_t dump(int fd, const Vector &args) const; + void notifySetDataSourceCompleted(status_t err); + void notifyPrepareCompleted(status_t err); void notifyResetComplete(); void notifySetSurfaceComplete(); void notifyDuration(int64_t durationUs); @@ -68,17 +70,32 @@ struct NuPlayerDriver : public MediaPlayerInterface { void notifySeekComplete(); void notifyFrameStats(int64_t numFramesTotal, int64_t numFramesDropped); void notifyListener(int msg, int ext1 = 0, int ext2 = 0); + void notifyFlagsChanged(uint32_t flags); protected: virtual ~NuPlayerDriver(); private: + enum State { + STATE_IDLE, + STATE_SET_DATASOURCE_PENDING, + STATE_UNPREPARED, + STATE_PREPARING, + STATE_PREPARED, + STATE_RUNNING, + STATE_PAUSED, + STATE_RESET_IN_PROGRESS, + }; + mutable Mutex mLock; Condition mCondition; + State mState; + + status_t mAsyncResult; + // The following are protected through "mLock" // >>> - bool mResetInProgress; bool mSetSurfaceInProgress; int64_t mDurationUs; int64_t mPositionUs; @@ -88,19 +105,14 @@ private: sp mLooper; sp mPlayer; + uint32_t mPlayerFlags; - enum State { - UNINITIALIZED, - STOPPED, - PLAYING, - PAUSED - }; - - State mState; bool mAtEOS; int64_t mStartupSeekTimeUs; + status_t prepare_l(); + DISALLOW_EVIL_CONSTRUCTORS(NuPlayerDriver); }; diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerSource.h b/media/libmediaplayerservice/nuplayer/NuPlayerSource.h index a3201cf..53c7c12 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayerSource.h +++ b/media/libmediaplayerservice/nuplayer/NuPlayerSource.h @@ -25,11 +25,21 @@ namespace android { struct ABuffer; +struct MetaData; struct NuPlayer::Source : public AHandler { enum Flags { - FLAG_SEEKABLE = 1, - FLAG_DYNAMIC_DURATION = 2, + FLAG_CAN_PAUSE = 1, + FLAG_CAN_SEEK_BACKWARD = 2, // the "10 sec back button" + FLAG_CAN_SEEK_FORWARD = 4, // the "10 sec forward button" + FLAG_CAN_SEEK = 8, // the "seek bar" + FLAG_DYNAMIC_DURATION = 16, + }; + + enum { + kWhatPrepared, + kWhatFlagsChanged, + kWhatVideoSizeChanged, }; // The provides message is used to notify the player about various @@ -38,6 +48,8 @@ struct NuPlayer::Source : public AHandler { : mNotify(notify) { } + virtual void prepareAsync() = 0; + virtual void start() = 0; virtual void stop() {} @@ -58,8 +70,6 @@ struct NuPlayer::Source : public AHandler { return INVALID_OPERATION; } - virtual uint32_t flags() const = 0; - protected: virtual ~Source() {} @@ -69,6 +79,10 @@ protected: sp dupNotify() const { return mNotify->dup(); } + void notifyFlagsChanged(uint32_t flags); + void notifyVideoSizeChanged(int32_t width, int32_t height); + void notifyPrepared(); + private: sp mNotify; diff --git a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp index 3035589..e4d72d9 100644 --- a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp +++ b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp @@ -65,7 +65,7 @@ NuPlayer::RTSPSource::~RTSPSource() { mLooper->stop(); } -void NuPlayer::RTSPSource::start() { +void NuPlayer::RTSPSource::prepareAsync() { if (mLooper == NULL) { mLooper = new ALooper; mLooper->setName("rtsp"); @@ -88,13 +88,27 @@ void NuPlayer::RTSPSource::start() { (mFlags & kFlagIncognito) ? SDPLoader::kFlagIncognito : 0, mUIDValid, mUID); - mSDPLoader->load(mURL.c_str(), mExtraHeaders.isEmpty() ? NULL : &mExtraHeaders); + mSDPLoader->load( + mURL.c_str(), mExtraHeaders.isEmpty() ? NULL : &mExtraHeaders); } else { mHandler = new MyHandler(mURL.c_str(), notify, mUIDValid, mUID); mLooper->registerHandler(mHandler); mHandler->connect(); } + + notifyVideoSizeChanged(0, 0); + + notifyFlagsChanged( + FLAG_CAN_PAUSE + | FLAG_CAN_SEEK_BACKWARD + | FLAG_CAN_SEEK_FORWARD + | FLAG_CAN_SEEK); + + notifyPrepared(); +} + +void NuPlayer::RTSPSource::start() { } void NuPlayer::RTSPSource::stop() { @@ -225,10 +239,6 @@ void NuPlayer::RTSPSource::performSeek(int64_t seekTimeUs) { mHandler->seek(seekTimeUs); } -uint32_t NuPlayer::RTSPSource::flags() const { - return FLAG_SEEKABLE; -} - void NuPlayer::RTSPSource::onMessageReceived(const sp &msg) { if (msg->what() == kWhatDisconnect) { uint32_t replyID; diff --git a/media/libmediaplayerservice/nuplayer/RTSPSource.h b/media/libmediaplayerservice/nuplayer/RTSPSource.h index b2a7dae..cbb6f90 100644 --- a/media/libmediaplayerservice/nuplayer/RTSPSource.h +++ b/media/libmediaplayerservice/nuplayer/RTSPSource.h @@ -40,6 +40,7 @@ struct NuPlayer::RTSPSource : public NuPlayer::Source { uid_t uid = 0, bool isSDP = false); + virtual void prepareAsync(); virtual void start(); virtual void stop(); @@ -50,8 +51,6 @@ struct NuPlayer::RTSPSource : public NuPlayer::Source { virtual status_t getDuration(int64_t *durationUs); virtual status_t seekTo(int64_t seekTimeUs); - virtual uint32_t flags() const; - void onMessageReceived(const sp &msg); protected: diff --git a/media/libmediaplayerservice/nuplayer/StreamingSource.cpp b/media/libmediaplayerservice/nuplayer/StreamingSource.cpp index 9b04833..df03f86 100644 --- a/media/libmediaplayerservice/nuplayer/StreamingSource.cpp +++ b/media/libmediaplayerservice/nuplayer/StreamingSource.cpp @@ -43,6 +43,12 @@ NuPlayer::StreamingSource::StreamingSource( NuPlayer::StreamingSource::~StreamingSource() { } +void NuPlayer::StreamingSource::prepareAsync() { + notifyVideoSizeChanged(0, 0); + notifyFlagsChanged(0); + notifyPrepared(); +} + void NuPlayer::StreamingSource::start() { mStreamListener = new NuPlayerStreamListener(mSource, 0); @@ -176,9 +182,5 @@ status_t NuPlayer::StreamingSource::dequeueAccessUnit( return err; } -uint32_t NuPlayer::StreamingSource::flags() const { - return 0; -} - } // namespace android diff --git a/media/libmediaplayerservice/nuplayer/StreamingSource.h b/media/libmediaplayerservice/nuplayer/StreamingSource.h index dc616f7..80b061c 100644 --- a/media/libmediaplayerservice/nuplayer/StreamingSource.h +++ b/media/libmediaplayerservice/nuplayer/StreamingSource.h @@ -31,14 +31,13 @@ struct NuPlayer::StreamingSource : public NuPlayer::Source { const sp ¬ify, const sp &source); + virtual void prepareAsync(); virtual void start(); virtual status_t feedMoreTSData(); virtual status_t dequeueAccessUnit(bool audio, sp *accessUnit); - virtual uint32_t flags() const; - protected: virtual ~StreamingSource(); diff --git a/media/libmediaplayerservice/nuplayer/mp4/MP4Source.cpp b/media/libmediaplayerservice/nuplayer/mp4/MP4Source.cpp index d659b73..d31d947 100644 --- a/media/libmediaplayerservice/nuplayer/mp4/MP4Source.cpp +++ b/media/libmediaplayerservice/nuplayer/mp4/MP4Source.cpp @@ -117,6 +117,12 @@ MP4Source::MP4Source( MP4Source::~MP4Source() { } +void MP4Source::prepareAsync() { + notifyVideoSizeChanged(0, 0); + notifyFlagsChanged(0); + notifyPrepared(); +} + void MP4Source::start() { mLooper->start(false /* runOnCallingThread */); mParser->start(new StreamSource(mSource)); @@ -135,8 +141,4 @@ status_t MP4Source::dequeueAccessUnit( return mParser->dequeueAccessUnit(audio, accessUnit); } -uint32_t MP4Source::flags() const { - return 0; -} - } // namespace android diff --git a/media/libmediaplayerservice/nuplayer/mp4/MP4Source.h b/media/libmediaplayerservice/nuplayer/mp4/MP4Source.h index b16a111..a6ef622 100644 --- a/media/libmediaplayerservice/nuplayer/mp4/MP4Source.h +++ b/media/libmediaplayerservice/nuplayer/mp4/MP4Source.h @@ -26,6 +26,7 @@ struct FragmentedMP4Parser; struct MP4Source : public NuPlayer::Source { MP4Source(const sp ¬ify, const sp &source); + virtual void prepareAsync(); virtual void start(); virtual status_t feedMoreTSData(); @@ -35,8 +36,6 @@ struct MP4Source : public NuPlayer::Source { virtual status_t dequeueAccessUnit( bool audio, sp *accessUnit); - virtual uint32_t flags() const; - protected: virtual ~MP4Source(); -- cgit v1.1 From ec0c597cabf169ca646bcea5faac1bd81ed4484d Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Tue, 5 Feb 2013 14:47:13 -0800 Subject: RTSP now properly publishes its "seekable" flags after connection has successfully completed and only then signals that preparation is complete. Change-Id: I1a60f718e673fe1462c69369c40eafbed6a14326 --- media/libmediaplayerservice/nuplayer/NuPlayer.cpp | 8 +++-- .../nuplayer/NuPlayerDriver.cpp | 14 +++++++- .../nuplayer/NuPlayerDriver.h | 1 + .../nuplayer/NuPlayerSource.h | 2 +- .../libmediaplayerservice/nuplayer/RTSPSource.cpp | 41 ++++++++++++++++------ media/libstagefright/rtsp/MyHandler.h | 4 +++ 6 files changed, 56 insertions(+), 14 deletions(-) diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp index 78b94ba..bcefe63 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp +++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp @@ -1255,9 +1255,12 @@ void NuPlayer::onSourceNotify(const sp &msg) { switch (what) { case Source::kWhatPrepared: { + int32_t err; + CHECK(msg->findInt32("err", &err)); + sp driver = mDriver.promote(); if (driver != NULL) { - driver->notifyPrepareCompleted(OK); + driver->notifyPrepareCompleted(err); } break; } @@ -1312,9 +1315,10 @@ void NuPlayer::Source::notifyVideoSizeChanged(int32_t width, int32_t height) { notify->post(); } -void NuPlayer::Source::notifyPrepared() { +void NuPlayer::Source::notifyPrepared(status_t err) { sp notify = dupNotify(); notify->setInt32("what", kWhatPrepared); + notify->setInt32("err", err); notify->post(); } diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp index ab7b4e8..3c63e80 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp +++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp @@ -31,6 +31,7 @@ namespace android { NuPlayerDriver::NuPlayerDriver() : mState(STATE_IDLE), + mIsAsyncPrepare(false), mAsyncResult(UNKNOWN_ERROR), mSetSurfaceInProgress(false), mDurationUs(-1), @@ -160,6 +161,11 @@ status_t NuPlayerDriver::prepare_l() { switch (mState) { case STATE_UNPREPARED: mState = STATE_PREPARING; + + // Make sure we're not posting any notifications, success or + // failure information is only communicated through our result + // code. + mIsAsyncPrepare = false; mPlayer->prepareAsync(); while (mState == STATE_PREPARING) { mCondition.wait(mLock); @@ -176,6 +182,7 @@ status_t NuPlayerDriver::prepareAsync() { switch (mState) { case STATE_UNPREPARED: mState = STATE_PREPARING; + mIsAsyncPrepare = true; mPlayer->prepareAsync(); return OK; default: @@ -500,9 +507,14 @@ void NuPlayerDriver::notifyPrepareCompleted(status_t err) { mAsyncResult = err; if (err == OK) { - notifyListener(MEDIA_PREPARED); + if (mIsAsyncPrepare) { + notifyListener(MEDIA_PREPARED); + } mState = STATE_PREPARED; } else { + if (mIsAsyncPrepare) { + notifyListener(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, err); + } mState = STATE_UNPREPARED; } diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h index 49b8ed2..5df0cfb 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h +++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h @@ -92,6 +92,7 @@ private: State mState; + bool mIsAsyncPrepare; status_t mAsyncResult; // The following are protected through "mLock" diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerSource.h b/media/libmediaplayerservice/nuplayer/NuPlayerSource.h index 53c7c12..f5d4c38 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayerSource.h +++ b/media/libmediaplayerservice/nuplayer/NuPlayerSource.h @@ -81,7 +81,7 @@ protected: void notifyFlagsChanged(uint32_t flags); void notifyVideoSizeChanged(int32_t width, int32_t height); - void notifyPrepared(); + void notifyPrepared(status_t err = OK); private: sp mNotify; diff --git a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp index e4d72d9..d787647 100644 --- a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp +++ b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp @@ -96,16 +96,6 @@ void NuPlayer::RTSPSource::prepareAsync() { mHandler->connect(); } - - notifyVideoSizeChanged(0, 0); - - notifyFlagsChanged( - FLAG_CAN_PAUSE - | FLAG_CAN_SEEK_BACKWARD - | FLAG_CAN_SEEK_FORWARD - | FLAG_CAN_SEEK); - - notifyPrepared(); } void NuPlayer::RTSPSource::start() { @@ -270,12 +260,31 @@ void NuPlayer::RTSPSource::onMessageReceived(const sp &msg) { switch (what) { case MyHandler::kWhatConnected: + { onConnected(); + + notifyVideoSizeChanged(0, 0); + + uint32_t flags = 0; + + if (mHandler->isSeekable()) { + flags = FLAG_CAN_PAUSE | FLAG_CAN_SEEK; + + // Seeking 10secs forward or backward is a very expensive + // operation for rtsp, so let's not enable that. + // The user can always use the seek bar. + } + + notifyFlagsChanged(flags); + notifyPrepared(); break; + } case MyHandler::kWhatDisconnected: + { onDisconnected(msg); break; + } case MyHandler::kWhatSeekDone: { @@ -520,6 +529,12 @@ void NuPlayer::RTSPSource::onSDPLoaded(const sp &msg) { } if (err != OK) { + if (mState == CONNECTING) { + // We're still in the preparation phase, signal that it + // failed. + notifyPrepared(err); + } + mState = DISCONNECTED; mFinalResult = err; @@ -537,6 +552,12 @@ void NuPlayer::RTSPSource::onDisconnected(const sp &msg) { mLooper->unregisterHandler(mHandler->id()); mHandler.clear(); + if (mState == CONNECTING) { + // We're still in the preparation phase, signal that it + // failed. + notifyPrepared(err); + } + mState = DISCONNECTED; mFinalResult = err; diff --git a/media/libstagefright/rtsp/MyHandler.h b/media/libstagefright/rtsp/MyHandler.h index cdd00e4..aa64060 100644 --- a/media/libstagefright/rtsp/MyHandler.h +++ b/media/libstagefright/rtsp/MyHandler.h @@ -195,6 +195,10 @@ struct MyHandler : public AHandler { msg->post(); } + bool isSeekable() const { + return mSeekable; + } + static void addRR(const sp &buf) { uint8_t *ptr = buf->data() + buf->size(); ptr[0] = 0x80 | 0; -- cgit v1.1 From 0df36ec3303c2c6bf9b42c07945ac8bd234153f3 Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Wed, 6 Feb 2013 10:44:39 -0800 Subject: HLS now properly publishes its "seekable" flags after connection has successfully completed and a sufficient amount of data fetched, and only then signals that preparation is completed. Change-Id: I7684a14238b826909f518f2af506966e522dfcfc --- cmds/stagefright/stagefright.cpp | 3 +- .../nuplayer/HTTPLiveSource.cpp | 74 +++++++++++++++++----- .../nuplayer/HTTPLiveSource.h | 8 +++ media/libstagefright/foundation/ALooperRoster.cpp | 3 +- media/libstagefright/httplive/LiveSession.cpp | 53 +++++++++++++--- media/libstagefright/include/LiveSession.h | 15 ++++- 6 files changed, 127 insertions(+), 29 deletions(-) diff --git a/cmds/stagefright/stagefright.cpp b/cmds/stagefright/stagefright.cpp index 2b935ed..2aae64d 100644 --- a/cmds/stagefright/stagefright.cpp +++ b/cmds/stagefright/stagefright.cpp @@ -30,6 +30,7 @@ #include #include #include +#include #include "include/LiveSession.h" #include "include/NuCachedSource2.h" #include @@ -1004,7 +1005,7 @@ int main(int argc, char **argv) { looper = new ALooper; looper->start(); } - liveSession = new LiveSession; + liveSession = new LiveSession(NULL /* notify */); looper->registerHandler(liveSession); liveSession->connect(uri.string()); diff --git a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp index ae67906..655ee55 100644 --- a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp +++ b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp @@ -71,7 +71,10 @@ void NuPlayer::HTTPLiveSource::prepareAsync() { mLiveLooper->setName("http live"); mLiveLooper->start(); + sp notify = new AMessage(kWhatSessionNotify, id()); + mLiveSession = new LiveSession( + notify, (mFlags & kFlagIncognito) ? LiveSession::kFlagIncognito : 0, mUIDValid, mUID); @@ -81,23 +84,6 @@ void NuPlayer::HTTPLiveSource::prepareAsync() { mURL.c_str(), mExtraHeaders.isEmpty() ? NULL : &mExtraHeaders); mTSParser = new ATSParser; - - notifyVideoSizeChanged(0, 0); - - uint32_t flags = FLAG_CAN_PAUSE; - if (mLiveSession->isSeekable()) { - flags |= FLAG_CAN_SEEK; - flags |= FLAG_CAN_SEEK_BACKWARD; - flags |= FLAG_CAN_SEEK_FORWARD; - } - - if (mLiveSession->hasDynamicDuration()) { - flags |= FLAG_DYNAMIC_DURATION; - } - - notifyFlagsChanged(flags); - - notifyPrepared(); } void NuPlayer::HTTPLiveSource::start() { @@ -214,5 +200,59 @@ status_t NuPlayer::HTTPLiveSource::seekTo(int64_t seekTimeUs) { return OK; } +void NuPlayer::HTTPLiveSource::onMessageReceived(const sp &msg) { + switch (msg->what()) { + case kWhatSessionNotify: + { + onSessionNotify(msg); + break; + } + + default: + Source::onMessageReceived(msg); + break; + } +} + +void NuPlayer::HTTPLiveSource::onSessionNotify(const sp &msg) { + int32_t what; + CHECK(msg->findInt32("what", &what)); + + switch (what) { + case LiveSession::kWhatPrepared: + { + notifyVideoSizeChanged(0, 0); + + uint32_t flags = FLAG_CAN_PAUSE; + if (mLiveSession->isSeekable()) { + flags |= FLAG_CAN_SEEK; + flags |= FLAG_CAN_SEEK_BACKWARD; + flags |= FLAG_CAN_SEEK_FORWARD; + } + + if (mLiveSession->hasDynamicDuration()) { + flags |= FLAG_DYNAMIC_DURATION; + } + + notifyFlagsChanged(flags); + + notifyPrepared(); + break; + } + + case LiveSession::kWhatPreparationFailed: + { + status_t err; + CHECK(msg->findInt32("err", &err)); + + notifyPrepared(err); + break; + } + + default: + TRESPASS(); + } +} + } // namespace android diff --git a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h index 269f3c0..067d1da 100644 --- a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h +++ b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h @@ -49,12 +49,18 @@ protected: virtual sp getFormatMeta(bool audio); + virtual void onMessageReceived(const sp &msg); + private: enum Flags { // Don't log any URLs. kFlagIncognito = 1, }; + enum { + kWhatSessionNotify, + }; + AString mURL; KeyedVector mExtraHeaders; bool mUIDValid; @@ -66,6 +72,8 @@ private: sp mLiveSession; sp mTSParser; + void onSessionNotify(const sp &msg); + DISALLOW_EVIL_CONSTRUCTORS(HTTPLiveSource); }; diff --git a/media/libstagefright/foundation/ALooperRoster.cpp b/media/libstagefright/foundation/ALooperRoster.cpp index dff931d..ad10d2b 100644 --- a/media/libstagefright/foundation/ALooperRoster.cpp +++ b/media/libstagefright/foundation/ALooperRoster.cpp @@ -82,7 +82,8 @@ status_t ALooperRoster::postMessage_l( ssize_t index = mHandlers.indexOfKey(msg->target()); if (index < 0) { - ALOGW("failed to post message. Target handler not registered."); + ALOGW("failed to post message '%s'. Target handler not registered.", + msg->debugString().c_str()); return -ENOENT; } diff --git a/media/libstagefright/httplive/LiveSession.cpp b/media/libstagefright/httplive/LiveSession.cpp index 733753b..962b01c 100644 --- a/media/libstagefright/httplive/LiveSession.cpp +++ b/media/libstagefright/httplive/LiveSession.cpp @@ -40,10 +40,13 @@ namespace android { -LiveSession::LiveSession(uint32_t flags, bool uidValid, uid_t uid) - : mFlags(flags), +LiveSession::LiveSession( + const sp ¬ify, uint32_t flags, bool uidValid, uid_t uid) + : mNotify(notify), + mFlags(flags), mUIDValid(uidValid), mUID(uid), + mInPreparationPhase(true), mDataSource(new LiveDataSource), mHTTPDataSource( HTTPBase::Create( @@ -179,7 +182,7 @@ void LiveSession::onConnect(const sp &msg) { if (playlist == NULL) { ALOGE("unable to fetch master playlist '%s'.", url.c_str()); - mDataSource->queueEOS(ERROR_IO); + signalEOS(ERROR_IO); return; } @@ -207,7 +210,7 @@ void LiveSession::onConnect(const sp &msg) { void LiveSession::onDisconnect() { ALOGI("onDisconnect"); - mDataSource->queueEOS(ERROR_END_OF_STREAM); + signalEOS(ERROR_END_OF_STREAM); Mutex::Autolock autoLock(mLock); mDisconnectPending = false; @@ -561,7 +564,8 @@ rinse_repeat: // unchanged from the last time we tried. } else { ALOGE("failed to load playlist at url '%s'", url.c_str()); - mDataSource->queueEOS(ERROR_IO); + signalEOS(ERROR_IO); + return; } } else { @@ -704,7 +708,7 @@ rinse_repeat: mSeqNumber, firstSeqNumberInPlaylist, firstSeqNumberInPlaylist + mPlaylist->size() - 1); - mDataSource->queueEOS(ERROR_END_OF_STREAM); + signalEOS(ERROR_END_OF_STREAM); return; } } @@ -737,7 +741,7 @@ rinse_repeat: status_t err = fetchFile(uri.c_str(), &buffer, range_offset, range_length); if (err != OK) { ALOGE("failed to fetch .ts segment at url '%s'", uri.c_str()); - mDataSource->queueEOS(err); + signalEOS(err); return; } @@ -748,7 +752,7 @@ rinse_repeat: if (err != OK) { ALOGE("decryptBuffer failed w/ error %d", err); - mDataSource->queueEOS(err); + signalEOS(err); return; } @@ -760,7 +764,7 @@ rinse_repeat: mBandwidthItems.removeAt(bandwidthIndex); if (mBandwidthItems.isEmpty()) { - mDataSource->queueEOS(ERROR_UNSUPPORTED); + signalEOS(ERROR_UNSUPPORTED); return; } @@ -824,11 +828,42 @@ rinse_repeat: postMonitorQueue(); } +void LiveSession::signalEOS(status_t err) { + if (mInPreparationPhase && mNotify != NULL) { + sp notify = mNotify->dup(); + + notify->setInt32( + "what", + err == ERROR_END_OF_STREAM + ? kWhatPrepared : kWhatPreparationFailed); + + if (err != ERROR_END_OF_STREAM) { + notify->setInt32("err", err); + } + + notify->post(); + + mInPreparationPhase = false; + } + + mDataSource->queueEOS(err); +} + void LiveSession::onMonitorQueue() { if (mSeekTimeUs >= 0 || mDataSource->countQueuedBuffers() < kMaxNumQueuedFragments) { onDownloadNext(); } else { + if (mInPreparationPhase) { + if (mNotify != NULL) { + sp notify = mNotify->dup(); + notify->setInt32("what", kWhatPrepared); + notify->post(); + } + + mInPreparationPhase = false; + } + postMonitorQueue(1000000ll); } } diff --git a/media/libstagefright/include/LiveSession.h b/media/libstagefright/include/LiveSession.h index f329cc9..db44a33 100644 --- a/media/libstagefright/include/LiveSession.h +++ b/media/libstagefright/include/LiveSession.h @@ -35,7 +35,9 @@ struct LiveSession : public AHandler { // Don't log any URLs. kFlagIncognito = 1, }; - LiveSession(uint32_t flags = 0, bool uidValid = false, uid_t uid = 0); + LiveSession( + const sp ¬ify, + uint32_t flags = 0, bool uidValid = false, uid_t uid = 0); sp getDataSource(); @@ -53,6 +55,12 @@ struct LiveSession : public AHandler { bool isSeekable() const; bool hasDynamicDuration() const; + // Posted notification's "what" field will carry one of the following: + enum { + kWhatPrepared, + kWhatPreparationFailed, + }; + protected: virtual ~LiveSession(); @@ -76,10 +84,13 @@ private: unsigned long mBandwidth; }; + sp mNotify; uint32_t mFlags; bool mUIDValid; uid_t mUID; + bool mInPreparationPhase; + sp mDataSource; sp mHTTPDataSource; @@ -144,6 +155,8 @@ private: // This is computed by summing the durations of all segments before it. int64_t getSegmentStartTimeUs(int32_t seqNumber) const; + void signalEOS(status_t err); + DISALLOW_EVIL_CONSTRUCTORS(LiveSession); }; -- cgit v1.1 From b50e83eca302a12f0fced6e7bab1b8617d63deaa Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Roger=20J=C3=B6nsson?= Date: Mon, 21 Jan 2013 16:26:41 +0100 Subject: RTSP buffering improvements Added buffering start and end notifications for RTSP. MEDIA_INFO_BUFFERING_START is sent when buffering is started and MEDIA_INFO_BUFFERING_END is sent when the buffer has filled up. This patch also adds RTSP end of stream handling. EOS is signalled when BYE is received OR when detecting end of stream even if no actual EOS is received. Change-Id: I5cccb6845060ae6afd66d9f735b89da81476cd13 --- media/libmediaplayerservice/nuplayer/NuPlayer.cpp | 12 ++++ .../nuplayer/NuPlayerRenderer.cpp | 6 ++ .../nuplayer/NuPlayerSource.h | 2 + .../libmediaplayerservice/nuplayer/RTSPSource.cpp | 80 ++++++++++++++++++++-- media/libmediaplayerservice/nuplayer/RTSPSource.h | 7 +- .../libstagefright/mpeg2ts/AnotherPacketSource.cpp | 20 +++++- media/libstagefright/mpeg2ts/AnotherPacketSource.h | 3 + media/libstagefright/rtsp/MyHandler.h | 4 +- 8 files changed, 121 insertions(+), 13 deletions(-) diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp index bcefe63..ee25cc6 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp +++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp @@ -1293,6 +1293,18 @@ void NuPlayer::onSourceNotify(const sp &msg) { break; } + case Source::kWhatBufferingStart: + { + notifyListener(MEDIA_INFO, MEDIA_INFO_BUFFERING_START, 0); + break; + } + + case Source::kWhatBufferingEnd: + { + notifyListener(MEDIA_INFO, MEDIA_INFO_BUFFERING_END, 0); + break; + } + default: TRESPASS(); } diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp index 8a75f83..1ba76a5 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp +++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp @@ -512,9 +512,15 @@ void NuPlayer::Renderer::onQueueEOS(const sp &msg) { entry.mFinalResult = finalResult; if (audio) { + if (mAudioQueue.empty() && mSyncQueues) { + syncQueuesDone(); + } mAudioQueue.push_back(entry); postDrainAudioQueue(); } else { + if (mVideoQueue.empty() && mSyncQueues) { + syncQueuesDone(); + } mVideoQueue.push_back(entry); postDrainVideoQueue(); } diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerSource.h b/media/libmediaplayerservice/nuplayer/NuPlayerSource.h index f5d4c38..df84123 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayerSource.h +++ b/media/libmediaplayerservice/nuplayer/NuPlayerSource.h @@ -40,6 +40,8 @@ struct NuPlayer::Source : public AHandler { kWhatPrepared, kWhatFlagsChanged, kWhatVideoSizeChanged, + kWhatBufferingStart, + kWhatBufferingEnd, }; // The provides message is used to notify the player about various diff --git a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp index d787647..b70d550 100644 --- a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp +++ b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp @@ -29,6 +29,8 @@ namespace android { +const int64_t kNearEOSTimeoutUs = 2000000ll; // 2 secs + NuPlayer::RTSPSource::RTSPSource( const sp ¬ify, const char *url, @@ -45,8 +47,10 @@ NuPlayer::RTSPSource::RTSPSource( mState(DISCONNECTED), mFinalResult(OK), mDisconnectReplyID(0), - mStartingUp(true), - mSeekGeneration(0) { + mBuffering(true), + mSeekGeneration(0), + mEOSTimeoutAudio(0), + mEOSTimeoutVideo(0) { if (headers) { mExtraHeaders = *headers; @@ -96,6 +100,10 @@ void NuPlayer::RTSPSource::prepareAsync() { mHandler->connect(); } + + sp notifyStart = dupNotify(); + notifyStart->setInt32("what", kWhatBufferingStart); + notifyStart->post(); } void NuPlayer::RTSPSource::start() { @@ -131,6 +139,13 @@ bool NuPlayer::RTSPSource::haveSufficientDataOnAllTracks() { static const int64_t kMinDurationUs = 2000000ll; + int64_t mediaDurationUs = 0; + getDuration(&mediaDurationUs); + if ((mAudioTrack != NULL && mAudioTrack->isFinished(mediaDurationUs)) + || (mVideoTrack != NULL && mVideoTrack->isFinished(mediaDurationUs))) { + return true; + } + status_t err; int64_t durationUs; if (mAudioTrack != NULL @@ -156,12 +171,16 @@ bool NuPlayer::RTSPSource::haveSufficientDataOnAllTracks() { status_t NuPlayer::RTSPSource::dequeueAccessUnit( bool audio, sp *accessUnit) { - if (mStartingUp) { + if (mBuffering) { if (!haveSufficientDataOnAllTracks()) { return -EWOULDBLOCK; } - mStartingUp = false; + mBuffering = false; + + sp notify = dupNotify(); + notify->setInt32("what", kWhatBufferingEnd); + notify->post(); } sp source = getSource(audio); @@ -172,9 +191,51 @@ status_t NuPlayer::RTSPSource::dequeueAccessUnit( status_t finalResult; if (!source->hasBufferAvailable(&finalResult)) { - return finalResult == OK ? -EWOULDBLOCK : finalResult; + if (finalResult == OK) { + int64_t mediaDurationUs = 0; + getDuration(&mediaDurationUs); + sp otherSource = getSource(!audio); + status_t otherFinalResult; + + // If other source already signaled EOS, this source should also signal EOS + if (otherSource != NULL && + !otherSource->hasBufferAvailable(&otherFinalResult) && + otherFinalResult == ERROR_END_OF_STREAM) { + source->signalEOS(ERROR_END_OF_STREAM); + return ERROR_END_OF_STREAM; + } + + // If this source has detected near end, give it some time to retrieve more + // data before signaling EOS + if (source->isFinished(mediaDurationUs)) { + int64_t eosTimeout = audio ? mEOSTimeoutAudio : mEOSTimeoutVideo; + if (eosTimeout == 0) { + setEOSTimeout(audio, ALooper::GetNowUs()); + } else if ((ALooper::GetNowUs() - eosTimeout) > kNearEOSTimeoutUs) { + setEOSTimeout(audio, 0); + source->signalEOS(ERROR_END_OF_STREAM); + return ERROR_END_OF_STREAM; + } + return -EWOULDBLOCK; + } + + if (!(otherSource != NULL && otherSource->isFinished(mediaDurationUs))) { + // We should not enter buffering mode + // if any of the sources already have detected EOS. + mBuffering = true; + + sp notify = dupNotify(); + notify->setInt32("what", kWhatBufferingStart); + notify->post(); + } + + return -EWOULDBLOCK; + } + return finalResult; } + setEOSTimeout(audio, 0); + return source->dequeueAccessUnit(accessUnit); } @@ -189,6 +250,14 @@ sp NuPlayer::RTSPSource::getSource(bool audio) { return audio ? mAudioTrack : mVideoTrack; } +void NuPlayer::RTSPSource::setEOSTimeout(bool audio, int64_t timeout) { + if (audio) { + mEOSTimeoutAudio = timeout; + } else { + mEOSTimeoutVideo = timeout; + } +} + status_t NuPlayer::RTSPSource::getDuration(int64_t *durationUs) { *durationUs = 0ll; @@ -289,7 +358,6 @@ void NuPlayer::RTSPSource::onMessageReceived(const sp &msg) { case MyHandler::kWhatSeekDone: { mState = CONNECTED; - mStartingUp = true; break; } diff --git a/media/libmediaplayerservice/nuplayer/RTSPSource.h b/media/libmediaplayerservice/nuplayer/RTSPSource.h index cbb6f90..8451b9e 100644 --- a/media/libmediaplayerservice/nuplayer/RTSPSource.h +++ b/media/libmediaplayerservice/nuplayer/RTSPSource.h @@ -95,7 +95,7 @@ private: State mState; status_t mFinalResult; uint32_t mDisconnectReplyID; - bool mStartingUp; + bool mBuffering; sp mLooper; sp > mReflector; @@ -110,6 +110,9 @@ private: int32_t mSeekGeneration; + int64_t mEOSTimeoutAudio; + int64_t mEOSTimeoutVideo; + sp getSource(bool audio); void onConnected(); @@ -121,6 +124,8 @@ private: bool haveSufficientDataOnAllTracks(); + void setEOSTimeout(bool audio, int64_t timeout); + DISALLOW_EVIL_CONSTRUCTORS(RTSPSource); }; diff --git a/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp b/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp index a605a05..3de3a61 100644 --- a/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp +++ b/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp @@ -28,9 +28,12 @@ namespace android { +const int64_t kNearEOSMarkUs = 2000000ll; // 2 secs + AnotherPacketSource::AnotherPacketSource(const sp &meta) : mIsAudio(false), mFormat(meta), + mLastQueuedTimeUs(0), mEOSResult(OK) { const char *mime; CHECK(meta->findCString(kKeyMIMEType, &mime)); @@ -141,9 +144,8 @@ void AnotherPacketSource::queueAccessUnit(const sp &buffer) { return; } - int64_t timeUs; - CHECK(buffer->meta()->findInt64("timeUs", &timeUs)); - ALOGV("queueAccessUnit timeUs=%lld us (%.2f secs)", timeUs, timeUs / 1E6); + CHECK(buffer->meta()->findInt64("timeUs", &mLastQueuedTimeUs)); + ALOGV("queueAccessUnit timeUs=%lld us (%.2f secs)", mLastQueuedTimeUs, mLastQueuedTimeUs / 1E6); Mutex::Autolock autoLock(mLock); mBuffers.push_back(buffer); @@ -171,6 +173,7 @@ void AnotherPacketSource::queueDiscontinuity( } mEOSResult = OK; + mLastQueuedTimeUs = 0; sp buffer = new ABuffer(0); buffer->meta()->setInt32("discontinuity", static_cast(type)); @@ -247,4 +250,15 @@ status_t AnotherPacketSource::nextBufferTime(int64_t *timeUs) { return OK; } +bool AnotherPacketSource::isFinished(int64_t duration) const { + if (duration > 0) { + int64_t diff = duration - mLastQueuedTimeUs; + if (diff < kNearEOSMarkUs && diff > -kNearEOSMarkUs) { + ALOGV("Detecting EOS due to near end"); + return true; + } + } + return (mEOSResult != OK); +} + } // namespace android diff --git a/media/libstagefright/mpeg2ts/AnotherPacketSource.h b/media/libstagefright/mpeg2ts/AnotherPacketSource.h index d685b98..1db4068 100644 --- a/media/libstagefright/mpeg2ts/AnotherPacketSource.h +++ b/media/libstagefright/mpeg2ts/AnotherPacketSource.h @@ -58,6 +58,8 @@ struct AnotherPacketSource : public MediaSource { status_t dequeueAccessUnit(sp *buffer); + bool isFinished(int64_t duration) const; + protected: virtual ~AnotherPacketSource(); @@ -67,6 +69,7 @@ private: bool mIsAudio; sp mFormat; + int64_t mLastQueuedTimeUs; List > mBuffers; status_t mEOSResult; diff --git a/media/libstagefright/rtsp/MyHandler.h b/media/libstagefright/rtsp/MyHandler.h index aa64060..cfbf501 100644 --- a/media/libstagefright/rtsp/MyHandler.h +++ b/media/libstagefright/rtsp/MyHandler.h @@ -945,9 +945,7 @@ struct MyHandler : public AHandler { int32_t eos; if (msg->findInt32("eos", &eos)) { ALOGI("received BYE on track index %d", trackIndex); -#if 0 - track->mPacketSource->signalEOS(ERROR_END_OF_STREAM); -#endif + postQueueEOS(trackIndex, ERROR_END_OF_STREAM); return; } -- cgit v1.1 From fba60daf77cc74a13ae3bf4b0e9925dd2ee4470c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Roger=20J=C3=B6nsson?= Date: Mon, 21 Jan 2013 17:15:45 +0100 Subject: Enable pause/resume for RTSP streaming When a stream is paused, RTSP Pause is also sent to the server. Otherwise the buffering might continue until the memory runs out. When the stream is resumed, RTSP Play will be sent in order to resume the buffering. Change-Id: I5dc1761140827c532451638c3fd3f34271e5b9ab --- media/libmediaplayerservice/nuplayer/NuPlayer.cpp | 2 + .../nuplayer/NuPlayerSource.h | 2 + .../libmediaplayerservice/nuplayer/RTSPSource.cpp | 19 ++++ media/libmediaplayerservice/nuplayer/RTSPSource.h | 2 + media/libstagefright/rtsp/MyHandler.h | 126 ++++++++++++++++++++- 5 files changed, 147 insertions(+), 4 deletions(-) diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp index ee25cc6..30eb4b9 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp +++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp @@ -732,6 +732,7 @@ void NuPlayer::onMessageReceived(const sp &msg) { case kWhatPause: { CHECK(mRenderer != NULL); + mSource->pause(); mRenderer->pause(); break; } @@ -739,6 +740,7 @@ void NuPlayer::onMessageReceived(const sp &msg) { case kWhatResume: { CHECK(mRenderer != NULL); + mSource->resume(); mRenderer->resume(); break; } diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerSource.h b/media/libmediaplayerservice/nuplayer/NuPlayerSource.h index df84123..8622abe 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayerSource.h +++ b/media/libmediaplayerservice/nuplayer/NuPlayerSource.h @@ -54,6 +54,8 @@ struct NuPlayer::Source : public AHandler { virtual void start() = 0; virtual void stop() {} + virtual void pause() {} + virtual void resume() {} // Returns OK iff more data was available, // an error or ERROR_END_OF_STREAM if not. diff --git a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp index b70d550..a5ff0ca 100644 --- a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp +++ b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp @@ -119,6 +119,25 @@ void NuPlayer::RTSPSource::stop() { msg->postAndAwaitResponse(&dummy); } +void NuPlayer::RTSPSource::pause() { + int64_t mediaDurationUs = 0; + getDuration(&mediaDurationUs); + for (size_t index = 0; index < mTracks.size(); index++) { + TrackInfo *info = &mTracks.editItemAt(index); + sp source = info->mSource; + + // Check if EOS or ERROR is received + if (source != NULL && source->isFinished(mediaDurationUs)) { + return; + } + } + mHandler->pause(); +} + +void NuPlayer::RTSPSource::resume() { + mHandler->resume(); +} + status_t NuPlayer::RTSPSource::feedMoreTSData() { return mFinalResult; } diff --git a/media/libmediaplayerservice/nuplayer/RTSPSource.h b/media/libmediaplayerservice/nuplayer/RTSPSource.h index 8451b9e..8cf34a0 100644 --- a/media/libmediaplayerservice/nuplayer/RTSPSource.h +++ b/media/libmediaplayerservice/nuplayer/RTSPSource.h @@ -43,6 +43,8 @@ struct NuPlayer::RTSPSource : public NuPlayer::Source { virtual void prepareAsync(); virtual void start(); virtual void stop(); + virtual void pause(); + virtual void resume(); virtual status_t feedMoreTSData(); diff --git a/media/libstagefright/rtsp/MyHandler.h b/media/libstagefright/rtsp/MyHandler.h index cfbf501..4e5e2fa 100644 --- a/media/libstagefright/rtsp/MyHandler.h +++ b/media/libstagefright/rtsp/MyHandler.h @@ -135,7 +135,8 @@ struct MyHandler : public AHandler { mReceivedFirstRTPPacket(false), mSeekable(true), mKeepAliveTimeoutUs(kDefaultKeepAliveTimeoutUs), - mKeepAliveGeneration(0) { + mKeepAliveGeneration(0), + mPausing(false) { mNetLooper->setName("rtsp net"); mNetLooper->start(false /* runOnCallingThread */, false /* canCallJava */, @@ -199,6 +200,16 @@ struct MyHandler : public AHandler { return mSeekable; } + void pause() { + sp msg = new AMessage('paus', id()); + msg->post(); + } + + void resume() { + sp msg = new AMessage('resu', id()); + msg->post(); + } + static void addRR(const sp &buf) { uint8_t *ptr = buf->data() + buf->size(); ptr[0] = 0x80 | 0; @@ -824,6 +835,7 @@ struct MyHandler : public AHandler { mNumAccessUnitsReceived = 0; mReceivedFirstRTCPPacket = false; mReceivedFirstRTPPacket = false; + mPausing = false; mSeekable = true; sp reply = new AMessage('tear', id()); @@ -973,6 +985,100 @@ struct MyHandler : public AHandler { break; } + case 'paus': + { + if (!mSeekable) { + ALOGW("This is a live stream, ignoring pause request."); + break; + } + mCheckPending = true; + ++mCheckGeneration; + mPausing = true; + + AString request = "PAUSE "; + request.append(mSessionURL); + request.append(" RTSP/1.0\r\n"); + + request.append("Session: "); + request.append(mSessionID); + request.append("\r\n"); + + request.append("\r\n"); + + sp reply = new AMessage('pau2', id()); + mConn->sendRequest(request.c_str(), reply); + break; + } + + case 'pau2': + { + int32_t result; + CHECK(msg->findInt32("result", &result)); + + ALOGI("PAUSE completed with result %d (%s)", + result, strerror(-result)); + break; + } + + case 'resu': + { + if (mPausing && mSeekPending) { + // If seeking, Play will be sent from see1 instead + break; + } + + if (!mPausing) { + // Dont send PLAY if we have not paused + break; + } + AString request = "PLAY "; + request.append(mSessionURL); + request.append(" RTSP/1.0\r\n"); + + request.append("Session: "); + request.append(mSessionID); + request.append("\r\n"); + + request.append("\r\n"); + + sp reply = new AMessage('res2', id()); + mConn->sendRequest(request.c_str(), reply); + break; + } + + case 'res2': + { + int32_t result; + CHECK(msg->findInt32("result", &result)); + + ALOGI("PLAY completed with result %d (%s)", + result, strerror(-result)); + + mCheckPending = false; + postAccessUnitTimeoutCheck(); + + if (result == OK) { + sp obj; + CHECK(msg->findObject("response", &obj)); + sp response = + static_cast(obj.get()); + + if (response->mStatusCode != 200) { + result = UNKNOWN_ERROR; + } else { + parsePlayResponse(response); + } + } + + if (result != OK) { + ALOGE("resume failed, aborting."); + (new AMessage('abor', id()))->post(); + } + + mPausing = false; + break; + } + case 'seek': { if (!mSeekable) { @@ -994,6 +1100,15 @@ struct MyHandler : public AHandler { mCheckPending = true; ++mCheckGeneration; + sp reply = new AMessage('see1', id()); + reply->setInt64("time", timeUs); + + if (mPausing) { + // PAUSE already sent + ALOGI("Pause already sent"); + reply->post(); + break; + } AString request = "PAUSE "; request.append(mSessionURL); request.append(" RTSP/1.0\r\n"); @@ -1004,8 +1119,6 @@ struct MyHandler : public AHandler { request.append("\r\n"); - sp reply = new AMessage('see1', id()); - reply->setInt64("time", timeUs); mConn->sendRequest(request.c_str(), reply); break; } @@ -1049,7 +1162,10 @@ struct MyHandler : public AHandler { case 'see2': { - CHECK(mSeekPending); + if (mTracks.size() == 0) { + // We have already hit abor, break + break; + } int32_t result; CHECK(msg->findInt32("result", &result)); @@ -1085,6 +1201,7 @@ struct MyHandler : public AHandler { (new AMessage('abor', id()))->post(); } + mPausing = false; mSeekPending = false; sp msg = mNotify->dup(); @@ -1327,6 +1444,7 @@ private: bool mSeekable; int64_t mKeepAliveTimeoutUs; int32_t mKeepAliveGeneration; + bool mPausing; Vector mTracks; -- cgit v1.1 From 599b9655ddf95cdf6cb99970ce03c632bb2a576b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?M=C3=A5ns=20Zigher?= Date: Wed, 23 Jan 2013 14:48:57 +0100 Subject: RTSP: Parse session level control attribute from SDP If a=control: is present at session-level in the SDP response, RFC2326:C.1.1 defines the URL to be used for aggregate commands. This includes PLAY and PAUSE but not TEARDOWN. Change-Id: Iaa1dc2271d00df39dc83477a99fda6fbeb73c5b4 --- media/libstagefright/rtsp/MyHandler.h | 36 ++++++++++++++++++++++++++++++----- 1 file changed, 31 insertions(+), 5 deletions(-) diff --git a/media/libstagefright/rtsp/MyHandler.h b/media/libstagefright/rtsp/MyHandler.h index 4e5e2fa..2dde422 100644 --- a/media/libstagefright/rtsp/MyHandler.h +++ b/media/libstagefright/rtsp/MyHandler.h @@ -186,6 +186,27 @@ struct MyHandler : public AHandler { mConn->connect(mOriginalSessionURL.c_str(), reply); } + AString getControlURL(sp desc) { + AString sessionLevelControlURL; + if (mSessionDesc->findAttribute( + 0, + "a=control", + &sessionLevelControlURL)) { + if (sessionLevelControlURL.compare("*") == 0) { + return mBaseURL; + } else { + AString controlURL; + CHECK(MakeURL( + mBaseURL.c_str(), + sessionLevelControlURL.c_str(), + &controlURL)); + return controlURL; + } + } else { + return mSessionURL; + } + } + void disconnect() { (new AMessage('abor', id()))->post(); } @@ -526,6 +547,8 @@ struct MyHandler : public AHandler { mBaseURL = tmp; } + mControlURL = getControlURL(mSessionDesc); + if (mSessionDesc->countTracks() < 2) { // There's no actual tracks in this session. // The first "track" is merely session meta @@ -570,6 +593,8 @@ struct MyHandler : public AHandler { mSeekable = !isLiveStream(mSessionDesc); + mControlURL = getControlURL(mSessionDesc); + if (mSessionDesc->countTracks() < 2) { // There's no actual tracks in this session. // The first "track" is merely session meta @@ -708,7 +733,7 @@ struct MyHandler : public AHandler { postKeepAlive(); AString request = "PLAY "; - request.append(mSessionURL); + request.append(mControlURL); request.append(" RTSP/1.0\r\n"); request.append("Session: "); @@ -996,7 +1021,7 @@ struct MyHandler : public AHandler { mPausing = true; AString request = "PAUSE "; - request.append(mSessionURL); + request.append(mControlURL); request.append(" RTSP/1.0\r\n"); request.append("Session: "); @@ -1032,7 +1057,7 @@ struct MyHandler : public AHandler { break; } AString request = "PLAY "; - request.append(mSessionURL); + request.append(mControlURL); request.append(" RTSP/1.0\r\n"); request.append("Session: "); @@ -1110,7 +1135,7 @@ struct MyHandler : public AHandler { break; } AString request = "PAUSE "; - request.append(mSessionURL); + request.append(mControlURL); request.append(" RTSP/1.0\r\n"); request.append("Session: "); @@ -1142,7 +1167,7 @@ struct MyHandler : public AHandler { CHECK(msg->findInt64("time", &timeUs)); AString request = "PLAY "; - request.append(mSessionURL); + request.append(mControlURL); request.append(" RTSP/1.0\r\n"); request.append("Session: "); @@ -1424,6 +1449,7 @@ private: AString mSessionURL; AString mSessionHost; AString mBaseURL; + AString mControlURL; AString mSessionID; bool mSetupTracksSuccessful; bool mSeekPending; -- cgit v1.1 From ba021d15cf7bc964bc813688e33d34845bfd89ea Mon Sep 17 00:00:00 2001 From: joakim johansson Date: Wed, 23 Jan 2013 17:18:56 +0100 Subject: EOS fixes for RTSP streams The fix takes care of several near end of stream use cases: seek, pause and fake timestamps. Change-Id: I5f5fa881b1f619dfd5e1afd2af957082345c59eb --- media/libstagefright/rtsp/MyHandler.h | 88 ++++++++++++++++++++++++++++++++++- 1 file changed, 86 insertions(+), 2 deletions(-) diff --git a/media/libstagefright/rtsp/MyHandler.h b/media/libstagefright/rtsp/MyHandler.h index 2dde422..8f86f3b 100644 --- a/media/libstagefright/rtsp/MyHandler.h +++ b/media/libstagefright/rtsp/MyHandler.h @@ -129,6 +129,7 @@ struct MyHandler : public AHandler { mNumAccessUnitsReceived(0), mCheckPending(false), mCheckGeneration(0), + mCheckTimeoutGeneration(0), mTryTCPInterleaving(false), mTryFakeRTCP(false), mReceivedFirstRTCPPacket(false), @@ -771,6 +772,8 @@ struct MyHandler : public AHandler { parsePlayResponse(response); sp timeout = new AMessage('tiou', id()); + mCheckTimeoutGeneration++; + timeout->setInt32("tioucheck", mCheckTimeoutGeneration); timeout->post(kStartupTimeoutUs); } } @@ -982,7 +985,16 @@ struct MyHandler : public AHandler { int32_t eos; if (msg->findInt32("eos", &eos)) { ALOGI("received BYE on track index %d", trackIndex); - postQueueEOS(trackIndex, ERROR_END_OF_STREAM); + if (!mAllTracksHaveTime && dataReceivedOnAllChannels()) { + ALOGI("No time established => fake existing data"); + + track->mEOSReceived = true; + mTryFakeRTCP = true; + mReceivedFirstRTCPPacket = true; + fakeTimestamps(); + } else { + postQueueEOS(trackIndex, ERROR_END_OF_STREAM); + } return; } @@ -1039,6 +1051,7 @@ struct MyHandler : public AHandler { { int32_t result; CHECK(msg->findInt32("result", &result)); + mCheckTimeoutGeneration++; ALOGI("PAUSE completed with result %d (%s)", result, strerror(-result)); @@ -1092,6 +1105,13 @@ struct MyHandler : public AHandler { result = UNKNOWN_ERROR; } else { parsePlayResponse(response); + + // Post new timeout in order to make sure to use + // fake timestamps if no new Sender Reports arrive + sp timeout = new AMessage('tiou', id()); + mCheckTimeoutGeneration++; + timeout->setInt32("tioucheck", mCheckTimeoutGeneration); + timeout->post(kStartupTimeoutUs); } } @@ -1155,6 +1175,7 @@ struct MyHandler : public AHandler { TrackInfo *info = &mTracks.editItemAt(i); postQueueSeekDiscontinuity(i); + info->mEOSReceived = false; info->mRTPAnchor = 0; info->mNTPAnchorUs = -1; @@ -1163,6 +1184,13 @@ struct MyHandler : public AHandler { mAllTracksHaveTime = false; mNTPAnchorUs = -1; + // Start new timeoutgeneration to avoid getting timeout + // before PLAY response arrive + sp timeout = new AMessage('tiou', id()); + mCheckTimeoutGeneration++; + timeout->setInt32("tioucheck", mCheckTimeoutGeneration); + timeout->post(kStartupTimeoutUs); + int64_t timeUs; CHECK(msg->findInt64("time", &timeUs)); @@ -1212,6 +1240,13 @@ struct MyHandler : public AHandler { } else { parsePlayResponse(response); + // Post new timeout in order to make sure to use + // fake timestamps if no new Sender Reports arrive + sp timeout = new AMessage('tiou', id()); + mCheckTimeoutGeneration++; + timeout->setInt32("tioucheck", mCheckTimeoutGeneration); + timeout->post(kStartupTimeoutUs); + ssize_t i = response->mHeaders.indexOfKey("rtp-info"); CHECK_GE(i, 0); @@ -1249,8 +1284,17 @@ struct MyHandler : public AHandler { case 'tiou': { + int32_t timeoutGenerationCheck; + CHECK(msg->findInt32("tioucheck", &timeoutGenerationCheck)); + if (timeoutGenerationCheck != mCheckTimeoutGeneration) { + // This is an outdated message. Ignore. + // This typically happens if a lot of seeks are + // performed, since new timeout messages now are + // posted at seek as well. + break; + } if (!mReceivedFirstRTCPPacket) { - if (mReceivedFirstRTPPacket && !mTryFakeRTCP) { + if (dataReceivedOnAllChannels() && !mTryFakeRTCP) { ALOGW("We received RTP packets but no RTCP packets, " "using fake timestamps."); @@ -1427,6 +1471,7 @@ private: uint32_t mRTPAnchor; int64_t mNTPAnchorUs; int32_t mTimeScale; + bool mEOSReceived; uint32_t mNormalPlayTimeRTP; int64_t mNormalPlayTimeUs; @@ -1463,6 +1508,7 @@ private: int64_t mNumAccessUnitsReceived; bool mCheckPending; int32_t mCheckGeneration; + int32_t mCheckTimeoutGeneration; bool mTryTCPInterleaving; bool mTryFakeRTCP; bool mReceivedFirstRTCPPacket; @@ -1517,6 +1563,7 @@ private: formatDesc.c_str(), ×cale, &numChannels); info->mTimeScale = timescale; + info->mEOSReceived = false; ALOGV("track #%d URL=%s", mTracks.size(), trackURL.c_str()); @@ -1609,6 +1656,17 @@ private: } } + bool dataReceivedOnAllChannels() { + TrackInfo *track; + for (size_t i = 0; i < mTracks.size(); ++i) { + track = &mTracks.editItemAt(i); + if (track->mPackets.empty()) { + return false; + } + } + return true; + } + void onTimeUpdate(int32_t trackIndex, uint32_t rtpTime, uint64_t ntpTime) { ALOGV("onTimeUpdate track %d, rtpTime = 0x%08x, ntpTime = 0x%016llx", trackIndex, rtpTime, ntpTime); @@ -1639,6 +1697,27 @@ private: ALOGI("Time now established for all tracks."); } } + if (mAllTracksHaveTime && dataReceivedOnAllChannels()) { + // Time is now established, lets start timestamping immediately + for (size_t i = 0; i < mTracks.size(); ++i) { + TrackInfo *trackInfo = &mTracks.editItemAt(i); + while (!trackInfo->mPackets.empty()) { + sp accessUnit = *trackInfo->mPackets.begin(); + trackInfo->mPackets.erase(trackInfo->mPackets.begin()); + + if (addMediaTimestamp(i, trackInfo, accessUnit)) { + postQueueAccessUnit(i, accessUnit); + } + } + } + for (size_t i = 0; i < mTracks.size(); ++i) { + TrackInfo *trackInfo = &mTracks.editItemAt(i); + if (trackInfo->mEOSReceived) { + postQueueEOS(i, ERROR_END_OF_STREAM); + trackInfo->mEOSReceived = false; + } + } + } } void onAccessUnitComplete( @@ -1683,6 +1762,11 @@ private: if (addMediaTimestamp(trackIndex, track, accessUnit)) { postQueueAccessUnit(trackIndex, accessUnit); } + + if (track->mEOSReceived) { + postQueueEOS(trackIndex, ERROR_END_OF_STREAM); + track->mEOSReceived = false; + } } bool addMediaTimestamp( -- cgit v1.1 From a0dd006834f4a424b67773ab6724e961a61de923 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Roger=20J=C3=B6nsson?= Date: Wed, 23 Jan 2013 18:18:08 +0100 Subject: Avoid rebuffering after RTSP pause If pausing an RTSP stream, an RTSP Pause request is sent and then if the stream is immediately resumed again, an RTSP Play request will be sent to the server. But the new data after the pause will not be buffered until Sender Reports have arrived again on both channels. Meanwhile the player will resume playback and continue consuming the already existing buffer. This means that there is a risk that the buffer is emptied while waiting for sender reports. This commit simply adds a delay before the RTSP pause request is sent, allowing some additional RTSP buffering that might be needed when the stream is resumed again. Also, if the stream is resumed again before the RTSP pause request is sent, there is no need for any RTSP pause request, hence it is omitted. Change-Id: I928c8bfb5e99a6a146dcda4e51e528973ecbe065 --- media/libstagefright/rtsp/MyHandler.h | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/media/libstagefright/rtsp/MyHandler.h b/media/libstagefright/rtsp/MyHandler.h index 8f86f3b..5d760d3 100644 --- a/media/libstagefright/rtsp/MyHandler.h +++ b/media/libstagefright/rtsp/MyHandler.h @@ -52,6 +52,8 @@ static int64_t kStartupTimeoutUs = 10000000ll; static int64_t kDefaultKeepAliveTimeoutUs = 60000000ll; +static int64_t kPauseDelayUs = 3000000ll; + namespace android { static void MakeUserAgentString(AString *s) { @@ -137,7 +139,8 @@ struct MyHandler : public AHandler { mSeekable(true), mKeepAliveTimeoutUs(kDefaultKeepAliveTimeoutUs), mKeepAliveGeneration(0), - mPausing(false) { + mPausing(false), + mPauseGeneration(0) { mNetLooper->setName("rtsp net"); mNetLooper->start(false /* runOnCallingThread */, false /* canCallJava */, @@ -215,6 +218,7 @@ struct MyHandler : public AHandler { void seek(int64_t timeUs) { sp msg = new AMessage('seek', id()); msg->setInt64("time", timeUs); + mPauseGeneration++; msg->post(); } @@ -224,11 +228,14 @@ struct MyHandler : public AHandler { void pause() { sp msg = new AMessage('paus', id()); - msg->post(); + mPauseGeneration++; + msg->setInt32("pausecheck", mPauseGeneration); + msg->post(kPauseDelayUs); } void resume() { sp msg = new AMessage('resu', id()); + mPauseGeneration++; msg->post(); } @@ -1024,6 +1031,13 @@ struct MyHandler : public AHandler { case 'paus': { + int32_t generation; + CHECK(msg->findInt32("pausecheck", &generation)); + if (generation != mPauseGeneration) { + ALOGV("Ignoring outdated pause message."); + break; + } + if (!mSeekable) { ALOGW("This is a live stream, ignoring pause request."); break; @@ -1517,6 +1531,7 @@ private: int64_t mKeepAliveTimeoutUs; int32_t mKeepAliveGeneration; bool mPausing; + int32_t mPauseGeneration; Vector mTracks; -- cgit v1.1 From 6c6bb9873f55853fe74d8f45ad3ae116636d8be7 Mon Sep 17 00:00:00 2001 From: Kunter Gultekin Date: Fri, 1 Feb 2013 17:01:15 +0200 Subject: Adds VPX encoding support for stagefright. Only following encoder settings are available - target bitrate - rate control (constant / variable) - frame rate - token partitioning - error resilience - reconstruction & loop filters Only following color formats are recognized - YUV420Planar - YUV420SemiPlanar - AndroidOpaque Following settings are not configurable by the client - encoding deadline is realtime - the algorithm interface for encoder is vp8 - fractional bits of frame rate is discarded - timebase is fixed to 1/1000000 Requires libvpx to be built with encoder support enabled. Requires openmax 1.1.2 extension headers. Relevant tests exist in cts repo. Change-Id: If759edb8db36acbd24dcb53d159a54e942766020 Signed-off-by: Kunter Gultekin --- media/libstagefright/codecs/on2/enc/Android.mk | 24 + .../codecs/on2/enc/MODULE_LICENSE_APACHE2 | 0 media/libstagefright/codecs/on2/enc/NOTICE | 190 ++++++ .../codecs/on2/enc/SoftVPXEncoder.cpp | 685 +++++++++++++++++++++ .../libstagefright/codecs/on2/enc/SoftVPXEncoder.h | 203 ++++++ 5 files changed, 1102 insertions(+) create mode 100644 media/libstagefright/codecs/on2/enc/Android.mk create mode 100644 media/libstagefright/codecs/on2/enc/MODULE_LICENSE_APACHE2 create mode 100644 media/libstagefright/codecs/on2/enc/NOTICE create mode 100644 media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp create mode 100644 media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h diff --git a/media/libstagefright/codecs/on2/enc/Android.mk b/media/libstagefright/codecs/on2/enc/Android.mk new file mode 100644 index 0000000..5d3317c --- /dev/null +++ b/media/libstagefright/codecs/on2/enc/Android.mk @@ -0,0 +1,24 @@ +LOCAL_PATH := $(call my-dir) +include $(CLEAR_VARS) + +LOCAL_SRC_FILES := \ + SoftVPXEncoder.cpp + +LOCAL_C_INCLUDES := \ + $(TOP)/external/libvpx/libvpx \ + $(TOP)/external/openssl/include \ + $(TOP)/external/libvpx/libvpx/vpx_codec \ + $(TOP)/external/libvpx/libvpx/vpx_ports \ + frameworks/av/media/libstagefright/include \ + frameworks/native/include/media/openmax \ + +LOCAL_STATIC_LIBRARIES := \ + libvpx + +LOCAL_SHARED_LIBRARIES := \ + libstagefright libstagefright_omx libstagefright_foundation libutils \ + +LOCAL_MODULE := libstagefright_soft_vpxenc +LOCAL_MODULE_TAGS := optional + +include $(BUILD_SHARED_LIBRARY) diff --git a/media/libstagefright/codecs/on2/enc/MODULE_LICENSE_APACHE2 b/media/libstagefright/codecs/on2/enc/MODULE_LICENSE_APACHE2 new file mode 100644 index 0000000..e69de29 diff --git a/media/libstagefright/codecs/on2/enc/NOTICE b/media/libstagefright/codecs/on2/enc/NOTICE new file mode 100644 index 0000000..faed58a --- /dev/null +++ b/media/libstagefright/codecs/on2/enc/NOTICE @@ -0,0 +1,190 @@ + + Copyright (c) 2005-2013, The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + diff --git a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp new file mode 100644 index 0000000..cc38dc3 --- /dev/null +++ b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp @@ -0,0 +1,685 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// #define LOG_NDEBUG 0 +#define LOG_TAG "SoftVPXEncoder" +#include "SoftVPXEncoder.h" + +#include + +#include +#include + +namespace android { + + +template +static void InitOMXParams(T *params) { + params->nSize = sizeof(T); + // OMX IL 1.1.2 + params->nVersion.s.nVersionMajor = 1; + params->nVersion.s.nVersionMinor = 1; + params->nVersion.s.nRevision = 2; + params->nVersion.s.nStep = 0; +} + + +static int GetCPUCoreCount() { + int cpuCoreCount = 1; +#if defined(_SC_NPROCESSORS_ONLN) + cpuCoreCount = sysconf(_SC_NPROCESSORS_ONLN); +#else + // _SC_NPROC_ONLN must be defined... + cpuCoreCount = sysconf(_SC_NPROC_ONLN); +#endif + CHECK_GE(cpuCoreCount, 1); + return cpuCoreCount; +} + + +// This color conversion utility is copied from SoftMPEG4Encoder.cpp +inline static void ConvertSemiPlanarToPlanar(uint8_t *inyuv, + uint8_t* outyuv, + int32_t width, + int32_t height) { + int32_t outYsize = width * height; + uint32_t *outy = (uint32_t *) outyuv; + uint16_t *outcb = (uint16_t *) (outyuv + outYsize); + uint16_t *outcr = (uint16_t *) (outyuv + outYsize + (outYsize >> 2)); + + /* Y copying */ + memcpy(outy, inyuv, outYsize); + + /* U & V copying */ + uint32_t *inyuv_4 = (uint32_t *) (inyuv + outYsize); + for (int32_t i = height >> 1; i > 0; --i) { + for (int32_t j = width >> 2; j > 0; --j) { + uint32_t temp = *inyuv_4++; + uint32_t tempU = temp & 0xFF; + tempU = tempU | ((temp >> 8) & 0xFF00); + + uint32_t tempV = (temp >> 8) & 0xFF; + tempV = tempV | ((temp >> 16) & 0xFF00); + + // Flip U and V + *outcb++ = tempV; + *outcr++ = tempU; + } + } +} + + +SoftVPXEncoder::SoftVPXEncoder(const char *name, + const OMX_CALLBACKTYPE *callbacks, + OMX_PTR appData, + OMX_COMPONENTTYPE **component) + : SimpleSoftOMXComponent(name, callbacks, appData, component), + mCodecContext(NULL), + mCodecConfiguration(NULL), + mCodecInterface(NULL), + mWidth(176), + mHeight(144), + mBitrate(192000), // in bps + mBitrateControlMode(VPX_VBR), // variable bitrate + mFrameDurationUs(33333), // Defaults to 30 fps + mDCTPartitions(0), + mErrorResilience(OMX_FALSE), + mColorFormat(OMX_COLOR_FormatYUV420Planar), + mLevel(OMX_VIDEO_VP8Level_Version0), + mConversionBuffer(NULL) { + + initPorts(); +} + + +SoftVPXEncoder::~SoftVPXEncoder() { + releaseEncoder(); +} + + +void SoftVPXEncoder::initPorts() { + OMX_PARAM_PORTDEFINITIONTYPE inputPort; + OMX_PARAM_PORTDEFINITIONTYPE outputPort; + + InitOMXParams(&inputPort); + InitOMXParams(&outputPort); + + inputPort.nBufferCountMin = kNumBuffers; + inputPort.nBufferCountActual = inputPort.nBufferCountMin; + inputPort.bEnabled = OMX_TRUE; + inputPort.bPopulated = OMX_FALSE; + inputPort.eDomain = OMX_PortDomainVideo; + inputPort.bBuffersContiguous = OMX_FALSE; + inputPort.format.video.pNativeRender = NULL; + inputPort.format.video.nFrameWidth = mWidth; + inputPort.format.video.nFrameHeight = mHeight; + inputPort.format.video.nStride = inputPort.format.video.nFrameWidth; + inputPort.format.video.nSliceHeight = inputPort.format.video.nFrameHeight; + inputPort.format.video.nBitrate = 0; + // frameRate is reciprocal of frameDuration, which is + // in microseconds. It is also in Q16 format. + inputPort.format.video.xFramerate = (1000000/mFrameDurationUs) << 16; + inputPort.format.video.bFlagErrorConcealment = OMX_FALSE; + inputPort.nPortIndex = kInputPortIndex; + inputPort.eDir = OMX_DirInput; + inputPort.nBufferAlignment = kInputBufferAlignment; + inputPort.format.video.cMIMEType = + const_cast(MEDIA_MIMETYPE_VIDEO_RAW); + inputPort.format.video.eCompressionFormat = OMX_VIDEO_CodingUnused; + inputPort.format.video.eColorFormat = mColorFormat; + inputPort.format.video.pNativeWindow = NULL; + inputPort.nBufferSize = + (inputPort.format.video.nStride * + inputPort.format.video.nSliceHeight * 3) / 2; + + addPort(inputPort); + + outputPort.nBufferCountMin = kNumBuffers; + outputPort.nBufferCountActual = outputPort.nBufferCountMin; + outputPort.bEnabled = OMX_TRUE; + outputPort.bPopulated = OMX_FALSE; + outputPort.eDomain = OMX_PortDomainVideo; + outputPort.bBuffersContiguous = OMX_FALSE; + outputPort.format.video.pNativeRender = NULL; + outputPort.format.video.nFrameWidth = mWidth; + outputPort.format.video.nFrameHeight = mHeight; + outputPort.format.video.nStride = outputPort.format.video.nFrameWidth; + outputPort.format.video.nSliceHeight = outputPort.format.video.nFrameHeight; + outputPort.format.video.nBitrate = mBitrate; + outputPort.format.video.xFramerate = 0; + outputPort.format.video.bFlagErrorConcealment = OMX_FALSE; + outputPort.nPortIndex = kOutputPortIndex; + outputPort.eDir = OMX_DirOutput; + outputPort.nBufferAlignment = kOutputBufferAlignment; + outputPort.format.video.cMIMEType = + const_cast(MEDIA_MIMETYPE_VIDEO_VPX); + outputPort.format.video.eCompressionFormat = OMX_VIDEO_CodingVPX; + outputPort.format.video.eColorFormat = OMX_COLOR_FormatUnused; + outputPort.format.video.pNativeWindow = NULL; + outputPort.nBufferSize = 256 * 1024; // arbitrary + + addPort(outputPort); +} + + +status_t SoftVPXEncoder::initEncoder() { + vpx_codec_err_t codec_return; + + mCodecContext = new vpx_codec_ctx_t; + mCodecConfiguration = new vpx_codec_enc_cfg_t; + mCodecInterface = vpx_codec_vp8_cx(); + + if (mCodecInterface == NULL) { + return UNKNOWN_ERROR; + } + + codec_return = vpx_codec_enc_config_default(mCodecInterface, + mCodecConfiguration, + 0); // Codec specific flags + + if (codec_return != VPX_CODEC_OK) { + ALOGE("Error populating default configuration for vpx encoder."); + return UNKNOWN_ERROR; + } + + mCodecConfiguration->g_w = mWidth; + mCodecConfiguration->g_h = mHeight; + mCodecConfiguration->g_threads = GetCPUCoreCount(); + mCodecConfiguration->g_error_resilient = mErrorResilience; + + switch (mLevel) { + case OMX_VIDEO_VP8Level_Version0: + mCodecConfiguration->g_profile = 0; + break; + + case OMX_VIDEO_VP8Level_Version1: + mCodecConfiguration->g_profile = 1; + break; + + case OMX_VIDEO_VP8Level_Version2: + mCodecConfiguration->g_profile = 2; + break; + + case OMX_VIDEO_VP8Level_Version3: + mCodecConfiguration->g_profile = 3; + break; + + default: + mCodecConfiguration->g_profile = 0; + } + + // OMX timebase unit is microsecond + // g_timebase is in seconds (i.e. 1/1000000 seconds) + mCodecConfiguration->g_timebase.num = 1; + mCodecConfiguration->g_timebase.den = 1000000; + // rc_target_bitrate is in kbps, mBitrate in bps + mCodecConfiguration->rc_target_bitrate = mBitrate/1000; + mCodecConfiguration->rc_end_usage = mBitrateControlMode; + + codec_return = vpx_codec_enc_init(mCodecContext, + mCodecInterface, + mCodecConfiguration, + 0); // flags + + if (codec_return != VPX_CODEC_OK) { + ALOGE("Error initializing vpx encoder"); + return UNKNOWN_ERROR; + } + + codec_return = vpx_codec_control(mCodecContext, + VP8E_SET_TOKEN_PARTITIONS, + mDCTPartitions); + if (codec_return != VPX_CODEC_OK) { + ALOGE("Error setting dct partitions for vpx encoder."); + return UNKNOWN_ERROR; + } + + if (mColorFormat == OMX_COLOR_FormatYUV420SemiPlanar) { + if (mConversionBuffer == NULL) { + mConversionBuffer = (uint8_t *)malloc(mWidth * mHeight * 3 / 2); + if (mConversionBuffer == NULL) { + ALOGE("Allocating conversion buffer failed."); + return UNKNOWN_ERROR; + } + } + } + return OK; +} + + +status_t SoftVPXEncoder::releaseEncoder() { + if (mCodecContext != NULL) { + vpx_codec_destroy(mCodecContext); + delete mCodecContext; + mCodecContext = NULL; + } + + if (mCodecConfiguration != NULL) { + delete mCodecConfiguration; + mCodecConfiguration = NULL; + } + + if (mConversionBuffer != NULL) { + delete mConversionBuffer; + mConversionBuffer = NULL; + } + + // this one is not allocated by us + mCodecInterface = NULL; + + return OK; +} + + +OMX_ERRORTYPE SoftVPXEncoder::internalGetParameter(OMX_INDEXTYPE index, + OMX_PTR param) { + // can include extension index OMX_INDEXEXTTYPE + const int32_t indexFull = index; + + switch (indexFull) { + case OMX_IndexParamVideoPortFormat: { + OMX_VIDEO_PARAM_PORTFORMATTYPE *formatParams = + (OMX_VIDEO_PARAM_PORTFORMATTYPE *)param; + + if (formatParams->nPortIndex == kInputPortIndex) { + if (formatParams->nIndex >= kNumberOfSupportedColorFormats) { + return OMX_ErrorNoMore; + } + + // Color formats, in order of preference + if (formatParams->nIndex == 0) { + formatParams->eColorFormat = OMX_COLOR_FormatYUV420Planar; + } else if (formatParams->nIndex == 1) { + formatParams->eColorFormat = + OMX_COLOR_FormatYUV420SemiPlanar; + } else { + formatParams->eColorFormat = OMX_COLOR_FormatAndroidOpaque; + } + + formatParams->eCompressionFormat = OMX_VIDEO_CodingUnused; + // Converting from microseconds + // Also converting to Q16 format + formatParams->xFramerate = (1000000/mFrameDurationUs) << 16; + return OMX_ErrorNone; + } else if (formatParams->nPortIndex == kOutputPortIndex) { + formatParams->eCompressionFormat = OMX_VIDEO_CodingVPX; + formatParams->eColorFormat = OMX_COLOR_FormatUnused; + formatParams->xFramerate = 0; + return OMX_ErrorNone; + } else { + return OMX_ErrorBadPortIndex; + } + } + + case OMX_IndexParamVideoBitrate: { + OMX_VIDEO_PARAM_BITRATETYPE *bitrate = + (OMX_VIDEO_PARAM_BITRATETYPE *)param; + + if (bitrate->nPortIndex != kOutputPortIndex) { + return OMX_ErrorUnsupportedIndex; + } + + bitrate->nTargetBitrate = mBitrate; + + if (mBitrateControlMode == VPX_VBR) { + bitrate->eControlRate = OMX_Video_ControlRateVariable; + } else if (mBitrateControlMode == VPX_CBR) { + bitrate->eControlRate = OMX_Video_ControlRateConstant; + } else { + return OMX_ErrorUnsupportedSetting; + } + return OMX_ErrorNone; + } + + // VP8 specific parameters that use extension headers + case OMX_IndexParamVideoVp8: { + OMX_VIDEO_PARAM_VP8TYPE *vp8Params = + (OMX_VIDEO_PARAM_VP8TYPE *)param; + + if (vp8Params->nPortIndex != kOutputPortIndex) { + return OMX_ErrorUnsupportedIndex; + } + + vp8Params->eProfile = OMX_VIDEO_VP8ProfileMain; + vp8Params->eLevel = mLevel; + vp8Params->nDCTPartitions = mDCTPartitions; + vp8Params->bErrorResilientMode = mErrorResilience; + return OMX_ErrorNone; + } + + case OMX_IndexParamVideoProfileLevelQuerySupported: { + OMX_VIDEO_PARAM_PROFILELEVELTYPE *profileAndLevel = + (OMX_VIDEO_PARAM_PROFILELEVELTYPE *)param; + + if (profileAndLevel->nPortIndex != kOutputPortIndex) { + return OMX_ErrorUnsupportedIndex; + } + + switch (profileAndLevel->nProfileIndex) { + case 0: + profileAndLevel->eLevel = OMX_VIDEO_VP8Level_Version0; + break; + + case 1: + profileAndLevel->eLevel = OMX_VIDEO_VP8Level_Version1; + break; + + case 2: + profileAndLevel->eLevel = OMX_VIDEO_VP8Level_Version2; + break; + + case 3: + profileAndLevel->eLevel = OMX_VIDEO_VP8Level_Version3; + break; + + default: + return OMX_ErrorNoMore; + } + + profileAndLevel->eProfile = OMX_VIDEO_VP8ProfileMain; + return OMX_ErrorNone; + } + + case OMX_IndexParamVideoProfileLevelCurrent: { + OMX_VIDEO_PARAM_PROFILELEVELTYPE *profileAndLevel = + (OMX_VIDEO_PARAM_PROFILELEVELTYPE *)param; + + if (profileAndLevel->nPortIndex != kOutputPortIndex) { + return OMX_ErrorUnsupportedIndex; + } + + profileAndLevel->eLevel = mLevel; + profileAndLevel->eProfile = OMX_VIDEO_VP8ProfileMain; + return OMX_ErrorNone; + } + + default: + return SimpleSoftOMXComponent::internalGetParameter(index, param); + } +} + + +OMX_ERRORTYPE SoftVPXEncoder::internalSetParameter(OMX_INDEXTYPE index, + const OMX_PTR param) { + // can include extension index OMX_INDEXEXTTYPE + const int32_t indexFull = index; + + switch (indexFull) { + case OMX_IndexParamStandardComponentRole: + return internalSetRoleParams( + (const OMX_PARAM_COMPONENTROLETYPE *)param); + + case OMX_IndexParamVideoBitrate: + return internalSetBitrateParams( + (const OMX_VIDEO_PARAM_BITRATETYPE *)param); + + case OMX_IndexParamPortDefinition: + return internalSetPortParams( + (const OMX_PARAM_PORTDEFINITIONTYPE *)param); + + case OMX_IndexParamVideoPortFormat: + return internalSetFormatParams( + (const OMX_VIDEO_PARAM_PORTFORMATTYPE *)param); + + case OMX_IndexParamVideoVp8: + return internalSetVp8Params( + (const OMX_VIDEO_PARAM_VP8TYPE *)param); + + case OMX_IndexParamVideoProfileLevelCurrent: + return internalSetProfileLevel( + (const OMX_VIDEO_PARAM_PROFILELEVELTYPE *)param); + + default: + return SimpleSoftOMXComponent::internalSetParameter(index, param); + } +} + +OMX_ERRORTYPE SoftVPXEncoder::internalSetProfileLevel( + const OMX_VIDEO_PARAM_PROFILELEVELTYPE* profileAndLevel) { + if (profileAndLevel->nPortIndex != kOutputPortIndex) { + return OMX_ErrorUnsupportedIndex; + } + + if (profileAndLevel->eProfile != OMX_VIDEO_VP8ProfileMain) { + return OMX_ErrorBadParameter; + } + + if (profileAndLevel->eLevel == OMX_VIDEO_VP8Level_Version0 || + profileAndLevel->eLevel == OMX_VIDEO_VP8Level_Version1 || + profileAndLevel->eLevel == OMX_VIDEO_VP8Level_Version2 || + profileAndLevel->eLevel == OMX_VIDEO_VP8Level_Version3) { + mLevel = (OMX_VIDEO_VP8LEVELTYPE)profileAndLevel->eLevel; + } else { + return OMX_ErrorBadParameter; + } + + return OMX_ErrorNone; +} + + +OMX_ERRORTYPE SoftVPXEncoder::internalSetVp8Params( + const OMX_VIDEO_PARAM_VP8TYPE* vp8Params) { + if (vp8Params->nPortIndex != kOutputPortIndex) { + return OMX_ErrorUnsupportedIndex; + } + + if (vp8Params->eProfile != OMX_VIDEO_VP8ProfileMain) { + return OMX_ErrorBadParameter; + } + + if (vp8Params->eLevel == OMX_VIDEO_VP8Level_Version0 || + vp8Params->eLevel == OMX_VIDEO_VP8Level_Version1 || + vp8Params->eLevel == OMX_VIDEO_VP8Level_Version2 || + vp8Params->eLevel == OMX_VIDEO_VP8Level_Version3) { + mLevel = vp8Params->eLevel; + } else { + return OMX_ErrorBadParameter; + } + + if (vp8Params->nDCTPartitions <= kMaxDCTPartitions) { + mDCTPartitions = vp8Params->nDCTPartitions; + } else { + return OMX_ErrorBadParameter; + } + + mErrorResilience = vp8Params->bErrorResilientMode; + return OMX_ErrorNone; +} + + +OMX_ERRORTYPE SoftVPXEncoder::internalSetFormatParams( + const OMX_VIDEO_PARAM_PORTFORMATTYPE* format) { + if (format->nPortIndex == kInputPortIndex) { + if (format->eColorFormat == OMX_COLOR_FormatYUV420Planar || + format->eColorFormat == OMX_COLOR_FormatYUV420SemiPlanar || + format->eColorFormat == OMX_COLOR_FormatAndroidOpaque) { + mColorFormat = format->eColorFormat; + return OMX_ErrorNone; + } else { + ALOGE("Unsupported color format %i", format->eColorFormat); + return OMX_ErrorUnsupportedSetting; + } + } else if (format->nPortIndex == kOutputPortIndex) { + if (format->eCompressionFormat == OMX_VIDEO_CodingVPX) { + return OMX_ErrorNone; + } else { + return OMX_ErrorUnsupportedSetting; + } + } else { + return OMX_ErrorBadPortIndex; + } +} + + +OMX_ERRORTYPE SoftVPXEncoder::internalSetRoleParams( + const OMX_PARAM_COMPONENTROLETYPE* role) { + const char* roleText = (const char*)role->cRole; + const size_t roleTextMaxSize = OMX_MAX_STRINGNAME_SIZE - 1; + + if (strncmp(roleText, "video_encoder.vpx", roleTextMaxSize)) { + ALOGE("Unsupported component role"); + return OMX_ErrorBadParameter; + } + + return OMX_ErrorNone; +} + + +OMX_ERRORTYPE SoftVPXEncoder::internalSetPortParams( + const OMX_PARAM_PORTDEFINITIONTYPE* port) { + if (port->nPortIndex == kInputPortIndex) { + mWidth = port->format.video.nFrameWidth; + mHeight = port->format.video.nFrameHeight; + + // xFramerate comes in Q16 format, in frames per second unit + const uint32_t framerate = port->format.video.xFramerate >> 16; + // frame duration is in microseconds + mFrameDurationUs = (1000000/framerate); + + if (port->format.video.eColorFormat == OMX_COLOR_FormatYUV420Planar || + port->format.video.eColorFormat == OMX_COLOR_FormatYUV420SemiPlanar || + port->format.video.eColorFormat == OMX_COLOR_FormatAndroidOpaque) { + mColorFormat = port->format.video.eColorFormat; + } else { + return OMX_ErrorUnsupportedSetting; + } + + return OMX_ErrorNone; + } else if (port->nPortIndex == kOutputPortIndex) { + mBitrate = port->format.video.nBitrate; + return OMX_ErrorNone; + } else { + return OMX_ErrorBadPortIndex; + } +} + + +OMX_ERRORTYPE SoftVPXEncoder::internalSetBitrateParams( + const OMX_VIDEO_PARAM_BITRATETYPE* bitrate) { + if (bitrate->nPortIndex != kOutputPortIndex) { + return OMX_ErrorUnsupportedIndex; + } + + mBitrate = bitrate->nTargetBitrate; + + if (bitrate->eControlRate == OMX_Video_ControlRateVariable) { + mBitrateControlMode = VPX_VBR; + } else if (bitrate->eControlRate == OMX_Video_ControlRateConstant) { + mBitrateControlMode = VPX_CBR; + } else { + return OMX_ErrorUnsupportedSetting; + } + + return OMX_ErrorNone; +} + + +void SoftVPXEncoder::onQueueFilled(OMX_U32 portIndex) { + // Initialize encoder if not already + if (mCodecContext == NULL) { + if (OK != initEncoder()) { + ALOGE("Failed to initialize encoder"); + notify(OMX_EventError, + OMX_ErrorUndefined, + 0, // Extra notification data + NULL); // Notification data pointer + return; + } + } + + vpx_codec_err_t codec_return; + List &inputBufferInfoQueue = getPortQueue(kInputPortIndex); + List &outputBufferInfoQueue = getPortQueue(kOutputPortIndex); + + while (!inputBufferInfoQueue.empty() && !outputBufferInfoQueue.empty()) { + BufferInfo *inputBufferInfo = *inputBufferInfoQueue.begin(); + OMX_BUFFERHEADERTYPE *inputBufferHeader = inputBufferInfo->mHeader; + + BufferInfo *outputBufferInfo = *outputBufferInfoQueue.begin(); + OMX_BUFFERHEADERTYPE *outputBufferHeader = outputBufferInfo->mHeader; + + if (inputBufferHeader->nFlags & OMX_BUFFERFLAG_EOS) { + inputBufferInfoQueue.erase(inputBufferInfoQueue.begin()); + inputBufferInfo->mOwnedByUs = false; + notifyEmptyBufferDone(inputBufferHeader); + + outputBufferHeader->nFilledLen = 0; + outputBufferHeader->nFlags = OMX_BUFFERFLAG_EOS; + + outputBufferInfoQueue.erase(outputBufferInfoQueue.begin()); + outputBufferInfo->mOwnedByUs = false; + notifyFillBufferDone(outputBufferHeader); + return; + } + + uint8_t* source = inputBufferHeader->pBuffer + inputBufferHeader->nOffset; + + // NOTE: As much as nothing is known about color format + // when it is denoted as AndroidOpaque, it is at least + // assumed to be planar. + if (mColorFormat == OMX_COLOR_FormatYUV420SemiPlanar) { + ConvertSemiPlanarToPlanar(source, mConversionBuffer, mWidth, mHeight); + source = mConversionBuffer; + } + vpx_image_t raw_frame; + vpx_img_wrap(&raw_frame, VPX_IMG_FMT_I420, mWidth, mHeight, + kInputBufferAlignment, source); + codec_return = vpx_codec_encode(mCodecContext, + &raw_frame, + inputBufferHeader->nTimeStamp, // in timebase units + mFrameDurationUs, // frame duration in timebase units + 0, // frame flags + VPX_DL_REALTIME); // encoding deadline + if (codec_return != VPX_CODEC_OK) { + ALOGE("vpx encoder failed to encode frame"); + notify(OMX_EventError, + OMX_ErrorUndefined, + 0, // Extra notification data + NULL); // Notification data pointer + return; + } + + vpx_codec_iter_t encoded_packet_iterator = NULL; + const vpx_codec_cx_pkt_t* encoded_packet; + + while (encoded_packet = vpx_codec_get_cx_data(mCodecContext, &encoded_packet_iterator)) { + if (encoded_packet->kind == VPX_CODEC_CX_FRAME_PKT) { + outputBufferHeader->nTimeStamp = encoded_packet->data.frame.pts; + outputBufferHeader->nFlags = 0; + outputBufferHeader->nOffset = 0; + outputBufferHeader->nFilledLen = encoded_packet->data.frame.sz; + memcpy(outputBufferHeader->pBuffer, + encoded_packet->data.frame.buf, + encoded_packet->data.frame.sz); + outputBufferInfo->mOwnedByUs = false; + outputBufferInfoQueue.erase(outputBufferInfoQueue.begin()); + notifyFillBufferDone(outputBufferHeader); + } + } + + inputBufferInfo->mOwnedByUs = false; + inputBufferInfoQueue.erase(inputBufferInfoQueue.begin()); + notifyEmptyBufferDone(inputBufferHeader); + } +} +} // namespace android + + +android::SoftOMXComponent *createSoftOMXComponent( + const char *name, const OMX_CALLBACKTYPE *callbacks, + OMX_PTR appData, OMX_COMPONENTTYPE **component) { + return new android::SoftVPXEncoder(name, callbacks, appData, component); +} diff --git a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h new file mode 100644 index 0000000..3bc05c0 --- /dev/null +++ b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h @@ -0,0 +1,203 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef SOFT_VPX_ENCODER_H_ + +#define SOFT_VPX_ENCODER_H_ + +#include "SimpleSoftOMXComponent.h" + +#include +#include + +#include "vpx/vpx_encoder.h" +#include "vpx/vpx_codec.h" +#include "vpx/vp8cx.h" + +namespace android { + +// Exposes a vpx encoder as an OMX Component +// +// Boilerplate for callback bindings are taken care +// by the base class SimpleSoftOMXComponent and its +// parent SoftOMXComponent. +// +// Only following encoder settings are available +// - target bitrate +// - rate control (constant / variable) +// - frame rate +// - error resilience +// - token partitioning +// - reconstruction & loop filters (g_profile) +// +// Only following color formats are recognized +// - YUV420Planar +// - YUV420SemiPlanar +// - AndroidOpaque +// +// Following settings are not configurable by the client +// - encoding deadline is realtime +// - multithreaded encoding utilizes a number of threads equal +// to online cpu's available +// - the algorithm interface for encoder is vp8 +// - fractional bits of frame rate is discarded +// - OMX timestamps are in microseconds, therefore +// encoder timebase is fixed to 1/1000000 + +class SoftVPXEncoder : public SimpleSoftOMXComponent { + public: + SoftVPXEncoder(const char *name, + const OMX_CALLBACKTYPE *callbacks, + OMX_PTR appData, + OMX_COMPONENTTYPE **component); + + protected: + virtual ~SoftVPXEncoder(); + + // Returns current values for requested OMX + // parameters + virtual OMX_ERRORTYPE internalGetParameter( + OMX_INDEXTYPE index, OMX_PTR param); + + // Validates, extracts and stores relevant OMX + // parameters + virtual OMX_ERRORTYPE internalSetParameter( + OMX_INDEXTYPE index, const OMX_PTR param); + + // OMX callback when buffers available + // Note that both an input and output buffer + // is expected to be available to carry out + // encoding of the frame + virtual void onQueueFilled(OMX_U32 portIndex); + + private: + // number of buffers allocated per port + static const uint32_t kNumBuffers = 4; + + // OMX port indexes that refer to input and + // output ports respectively + static const uint32_t kInputPortIndex = 0; + static const uint32_t kOutputPortIndex = 1; + + // Byte-alignment required for buffers + static const uint32_t kInputBufferAlignment = 1; + static const uint32_t kOutputBufferAlignment = 2; + + // Max value supported for DCT partitions + static const uint32_t kMaxDCTPartitions = 3; + + // Number of supported input color formats + static const uint32_t kNumberOfSupportedColorFormats = 3; + + // vpx specific opaque data structure that + // stores encoder state + vpx_codec_ctx_t* mCodecContext; + + // vpx specific data structure that + // stores encoder configuration + vpx_codec_enc_cfg_t* mCodecConfiguration; + + // vpx specific read-only data structure + // that specifies algorithm interface (e.g. vp8) + vpx_codec_iface_t* mCodecInterface; + + // Width of the input frames + int32_t mWidth; + + // Height of the input frames + int32_t mHeight; + + // Target bitrate set for the encoder, in bits per second. + int32_t mBitrate; + + // Bitrate control mode, either constant or variable + vpx_rc_mode mBitrateControlMode; + + // Frame duration is the reciprocal of framerate, denoted + // in microseconds + uint64_t mFrameDurationUs; + + // vp8 specific configuration parameter + // that enables token partitioning of + // the stream into substreams + int32_t mDCTPartitions; + + // Parameter that denotes whether error resilience + // is enabled in encoder + OMX_BOOL mErrorResilience; + + // Color format for the input port + OMX_COLOR_FORMATTYPE mColorFormat; + + // Encoder profile corresponding to OMX level parameter + // + // The inconsistency in the naming is caused by + // OMX spec referring vpx profiles (g_profile) + // as "levels" whereas using the name "profile" for + // something else. + OMX_VIDEO_VP8LEVELTYPE mLevel; + + // Conversion buffer is needed to convert semi + // planar yuv420 to planar format + // It is only allocated if input format is + // indeed YUV420SemiPlanar. + uint8_t* mConversionBuffer; + + // Initializes input and output OMX ports with sensible + // default values. + void initPorts(); + + // Initializes vpx encoder with available settings. + status_t initEncoder(); + + // Releases vpx encoder instance, with it's associated + // data structures. + // + // Unless called earlier, this is handled by the + // dtor. + status_t releaseEncoder(); + + // Handles port changes with respect to color formats + OMX_ERRORTYPE internalSetFormatParams( + const OMX_VIDEO_PARAM_PORTFORMATTYPE* format); + + // Verifies the component role tried to be set to this OMX component is + // strictly video_encoder.vpx + OMX_ERRORTYPE internalSetRoleParams( + const OMX_PARAM_COMPONENTROLETYPE* role); + + // Updates bitrate to reflect port settings. + OMX_ERRORTYPE internalSetBitrateParams( + const OMX_VIDEO_PARAM_BITRATETYPE* bitrate); + + // Handles port definition changes. + OMX_ERRORTYPE internalSetPortParams( + const OMX_PARAM_PORTDEFINITIONTYPE* port); + + // Handles vp8 specific parameters. + OMX_ERRORTYPE internalSetVp8Params( + const OMX_VIDEO_PARAM_VP8TYPE* vp8Params); + + // Updates encoder profile + OMX_ERRORTYPE internalSetProfileLevel( + const OMX_VIDEO_PARAM_PROFILELEVELTYPE* profileAndLevel); + + DISALLOW_EVIL_CONSTRUCTORS(SoftVPXEncoder); +}; + +} // namespace android + +#endif // SOFT_VPX_ENCODER_H_ -- cgit v1.1 From a2eb22c1de262aa3fa7c356537ac2fe165afdf3d Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Thu, 7 Feb 2013 10:56:14 -0800 Subject: Allow for dynamic reconfiguration of the video bitrate used to encode video while running as a wfd source. Change-Id: I44f7b2350c88fc5807047c61bfe594ef8fa79275 --- include/media/stagefright/ACodec.h | 4 ++ include/media/stagefright/MediaCodec.h | 5 +++ media/libstagefright/ACodec.cpp | 48 ++++++++++++++++++++++ media/libstagefright/MediaCodec.cpp | 31 ++++++++++++++ .../wifi-display/source/Converter.cpp | 14 +++++++ .../libstagefright/wifi-display/source/Converter.h | 2 + 6 files changed, 104 insertions(+) diff --git a/include/media/stagefright/ACodec.h b/include/media/stagefright/ACodec.h index df1c46b..317b6f0 100644 --- a/include/media/stagefright/ACodec.h +++ b/include/media/stagefright/ACodec.h @@ -54,6 +54,8 @@ struct ACodec : public AHierarchicalStateMachine { void signalResume(); void initiateShutdown(bool keepComponentAllocated = false); + void signalSetParameters(const sp &msg); + void initiateAllocateComponent(const sp &msg); void initiateConfigureComponent(const sp &msg); void initiateStart(); @@ -105,6 +107,7 @@ private: kWhatConfigureComponent = 'conf', kWhatStart = 'star', kWhatRequestIDRFrame = 'ridr', + kWhatSetParameters = 'setP', }; enum { @@ -270,6 +273,7 @@ private: status_t internalError = UNKNOWN_ERROR); status_t requestIDRFrame(); + status_t setParameters(const sp ¶ms); DISALLOW_EVIL_CONSTRUCTORS(ACodec); }; diff --git a/include/media/stagefright/MediaCodec.h b/include/media/stagefright/MediaCodec.h index 88aabf6..3f0d3b3 100644 --- a/include/media/stagefright/MediaCodec.h +++ b/include/media/stagefright/MediaCodec.h @@ -115,6 +115,8 @@ struct MediaCodec : public AHandler { status_t getName(AString *componentName) const; + status_t setParameters(const sp ¶ms); + protected: virtual ~MediaCodec(); virtual void onMessageReceived(const sp &msg); @@ -157,6 +159,7 @@ private: kWhatRequestIDRFrame = 'ridr', kWhatRequestActivityNotification = 'racN', kWhatGetName = 'getN', + kWhatSetParameters = 'setP', }; enum { @@ -230,6 +233,8 @@ private: void postActivityNotificationIfPossible(); + status_t onSetParameters(const sp ¶ms); + DISALLOW_EVIL_CONSTRUCTORS(MediaCodec); }; diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp index 7b27843..a6cc4eb 100644 --- a/media/libstagefright/ACodec.cpp +++ b/media/libstagefright/ACodec.cpp @@ -374,6 +374,12 @@ void ACodec::initiateSetup(const sp &msg) { msg->post(); } +void ACodec::signalSetParameters(const sp ¶ms) { + sp msg = new AMessage(kWhatSetParameters, id()); + msg->setMessage("params", params); + msg->post(); +} + void ACodec::initiateAllocateComponent(const sp &msg) { msg->setWhat(kWhatAllocateComponent); msg->setTarget(id()); @@ -3550,6 +3556,23 @@ bool ACodec::ExecutingState::onMessageReceived(const sp &msg) { break; } + case kWhatSetParameters: + { + sp params; + CHECK(msg->findMessage("params", ¶ms)); + + status_t err = mCodec->setParameters(params); + + sp reply; + if (msg->findMessage("reply", &reply)) { + reply->setInt32("err", err); + reply->post(); + } + + handled = true; + break; + } + default: handled = BaseState::onMessageReceived(msg); break; @@ -3558,6 +3581,31 @@ bool ACodec::ExecutingState::onMessageReceived(const sp &msg) { return handled; } +status_t ACodec::setParameters(const sp ¶ms) { + int32_t videoBitrate; + if (params->findInt32("videoBitrate", &videoBitrate)) { + OMX_VIDEO_CONFIG_BITRATETYPE configParams; + InitOMXParams(&configParams); + configParams.nPortIndex = kPortIndexOutput; + configParams.nEncodeBitrate = videoBitrate; + + status_t err = mOMX->setConfig( + mNode, + OMX_IndexConfigVideoBitrate, + &configParams, + sizeof(configParams)); + + if (err != OK) { + ALOGE("setConfig(OMX_IndexConfigVideoBitrate, %d) failed w/ err %d", + videoBitrate, err); + + return err; + } + } + + return OK; +} + bool ACodec::ExecutingState::onOMXEvent( OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { switch (event) { diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp index cb8a651..77aceb7 100644 --- a/media/libstagefright/MediaCodec.cpp +++ b/media/libstagefright/MediaCodec.cpp @@ -1203,6 +1203,23 @@ void MediaCodec::onMessageReceived(const sp &msg) { break; } + case kWhatSetParameters: + { + uint32_t replyID; + CHECK(msg->senderAwaitsResponse(&replyID)); + + sp params; + CHECK(msg->findMessage("params", ¶ms)); + + status_t err = onSetParameters(params); + + sp response = new AMessage; + response->setInt32("err", err); + + response->postReply(replyID); + break; + } + default: TRESPASS(); } @@ -1556,4 +1573,18 @@ void MediaCodec::postActivityNotificationIfPossible() { } } +status_t MediaCodec::setParameters(const sp ¶ms) { + sp msg = new AMessage(kWhatSetParameters, id()); + msg->setMessage("params", params); + + sp response; + return PostAndAwaitResponse(msg, &response); +} + +status_t MediaCodec::onSetParameters(const sp ¶ms) { + mCodec->signalSetParameters(params); + + return OK; +} + } // namespace android diff --git a/media/libstagefright/wifi-display/source/Converter.cpp b/media/libstagefright/wifi-display/source/Converter.cpp index 5628dec..376b0df 100644 --- a/media/libstagefright/wifi-display/source/Converter.cpp +++ b/media/libstagefright/wifi-display/source/Converter.cpp @@ -54,6 +54,7 @@ Converter::Converter( ,mFirstSilentFrameUs(-1ll) ,mInSilentMode(false) #endif + ,mPrevVideoBitrate(-1) { AString mime; CHECK(mInputFormat->findString("mime", &mime)); @@ -185,6 +186,7 @@ status_t Converter::initEncoder() { int32_t audioBitrate = getBitrate("media.wfd.audio-bitrate", 128000); int32_t videoBitrate = getBitrate("media.wfd.video-bitrate", 5000000); + mPrevVideoBitrate = videoBitrate; ALOGI("using audio bitrate of %d bps, video bitrate of %d bps", audioBitrate, videoBitrate); @@ -606,6 +608,18 @@ status_t Converter::feedEncoderInputBuffers() { } status_t Converter::doMoreWork() { + if (mIsVideo) { + int32_t videoBitrate = getBitrate("media.wfd.video-bitrate", 5000000); + if (videoBitrate != mPrevVideoBitrate) { + sp params = new AMessage; + + params->setInt32("videoBitrate", videoBitrate); + mEncoder->setParameters(params); + + mPrevVideoBitrate = videoBitrate; + } + } + status_t err; for (;;) { diff --git a/media/libstagefright/wifi-display/source/Converter.h b/media/libstagefright/wifi-display/source/Converter.h index 3357d61..57802bd 100644 --- a/media/libstagefright/wifi-display/source/Converter.h +++ b/media/libstagefright/wifi-display/source/Converter.h @@ -100,6 +100,8 @@ private: sp mPartialAudioAU; + int32_t mPrevVideoBitrate; + status_t initEncoder(); void releaseEncoder(); -- cgit v1.1 From 26b0a9d007e77e088af9ff3810734728f0558e85 Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Thu, 7 Feb 2013 11:38:08 -0800 Subject: A few more patches to fix wfd tcp unicast transport. Change-Id: Ie2f1b1e56c487ac4c3ef19d9e79022a35084e042 --- media/libstagefright/wifi-display/sink/DirectRenderer.cpp | 2 +- media/libstagefright/wifi-display/sink/RTPSink.cpp | 14 +++++++++++++- media/libstagefright/wifi-display/sink/RTPSink.h | 3 +++ 3 files changed, 17 insertions(+), 2 deletions(-) diff --git a/media/libstagefright/wifi-display/sink/DirectRenderer.cpp b/media/libstagefright/wifi-display/sink/DirectRenderer.cpp index 23cf6fd..d7f169f 100644 --- a/media/libstagefright/wifi-display/sink/DirectRenderer.cpp +++ b/media/libstagefright/wifi-display/sink/DirectRenderer.cpp @@ -35,7 +35,7 @@ namespace android { -#if 0 +#if 1 // static const int64_t DirectRenderer::kPacketLostDelayUs = 80000ll; diff --git a/media/libstagefright/wifi-display/sink/RTPSink.cpp b/media/libstagefright/wifi-display/sink/RTPSink.cpp index be54595..3c90a1e 100644 --- a/media/libstagefright/wifi-display/sink/RTPSink.cpp +++ b/media/libstagefright/wifi-display/sink/RTPSink.cpp @@ -250,6 +250,8 @@ RTPSink::RTPSink( : mNetSession(netSession), mSurfaceTex(bufferProducer), mNotify(notify), + mUsingTCPTransport(false), + mUsingTCPInterleaving(false), mRTPPort(0), mRTPSessionID(0), mRTCPSessionID(0), @@ -280,6 +282,9 @@ RTPSink::~RTPSink() { } status_t RTPSink::init(bool usingTCPTransport, bool usingTCPInterleaving) { + mUsingTCPTransport = usingTCPTransport; + mUsingTCPInterleaving = usingTCPInterleaving; + if (usingTCPInterleaving) { return OK; } @@ -717,7 +722,9 @@ status_t RTPSink::connect( mRTCPSessionID, buf->data(), buf->size()); #endif - scheduleSendRR(); + if (!mUsingTCPTransport) { + scheduleSendRR(); + } return OK; } @@ -820,6 +827,11 @@ void RTPSink::onSendRR() { } void RTPSink::onPacketLost(const sp &msg) { + if (mUsingTCPTransport) { + ALOGW("huh? lost a packet even though using reliable transport?"); + return; + } + uint32_t srcId; CHECK(msg->findInt32("ssrc", (int32_t *)&srcId)); diff --git a/media/libstagefright/wifi-display/sink/RTPSink.h b/media/libstagefright/wifi-display/sink/RTPSink.h index f9cbce9..4706c6d 100644 --- a/media/libstagefright/wifi-display/sink/RTPSink.h +++ b/media/libstagefright/wifi-display/sink/RTPSink.h @@ -78,6 +78,9 @@ private: sp mNotify; KeyedVector > mSources; + bool mUsingTCPTransport; + bool mUsingTCPInterleaving; + int32_t mRTPPort; int32_t mRTPSessionID; // in TCP unicast mode these are just server -- cgit v1.1 From 0a694951c00f2135c8968fd2205f71899997a8ad Mon Sep 17 00:00:00 2001 From: Mike Lockwoood Date: Fri, 8 Feb 2013 13:25:01 -0800 Subject: MTP: Write initial data to correct file offset in SendPartialObject Change-Id: I84288aeda3e65e6e6487f11d32a72910cd16cff2 --- media/mtp/MtpServer.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/media/mtp/MtpServer.cpp b/media/mtp/MtpServer.cpp index 662a93d..8568dfc 100644 --- a/media/mtp/MtpServer.cpp +++ b/media/mtp/MtpServer.cpp @@ -1118,7 +1118,7 @@ MtpResponseCode MtpServer::doSendPartialObject() { int initialData = ret - MTP_CONTAINER_HEADER_SIZE; if (initialData > 0) { - ret = write(edit->mFD, mData.getData(), initialData); + ret = pwrite(edit->mFD, mData.getData(), initialData, offset); offset += initialData; length -= initialData; } -- cgit v1.1 From 513b8b238caa52f8ddf5c85109dbf362c515185f Mon Sep 17 00:00:00 2001 From: James Dong Date: Fri, 8 Feb 2013 18:16:02 -0800 Subject: Revert "Adds VPX encoding support for stagefright." This lib was not part of the build, but it is built anyway for userdebug image (not for eng though). let me revert it for now... This reverts commit 6c6bb9873f55853fe74d8f45ad3ae116636d8be7. --- media/libstagefright/codecs/on2/enc/Android.mk | 24 - .../codecs/on2/enc/MODULE_LICENSE_APACHE2 | 0 media/libstagefright/codecs/on2/enc/NOTICE | 190 ------ .../codecs/on2/enc/SoftVPXEncoder.cpp | 685 --------------------- .../libstagefright/codecs/on2/enc/SoftVPXEncoder.h | 203 ------ 5 files changed, 1102 deletions(-) delete mode 100644 media/libstagefright/codecs/on2/enc/Android.mk delete mode 100644 media/libstagefright/codecs/on2/enc/MODULE_LICENSE_APACHE2 delete mode 100644 media/libstagefright/codecs/on2/enc/NOTICE delete mode 100644 media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp delete mode 100644 media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h diff --git a/media/libstagefright/codecs/on2/enc/Android.mk b/media/libstagefright/codecs/on2/enc/Android.mk deleted file mode 100644 index 5d3317c..0000000 --- a/media/libstagefright/codecs/on2/enc/Android.mk +++ /dev/null @@ -1,24 +0,0 @@ -LOCAL_PATH := $(call my-dir) -include $(CLEAR_VARS) - -LOCAL_SRC_FILES := \ - SoftVPXEncoder.cpp - -LOCAL_C_INCLUDES := \ - $(TOP)/external/libvpx/libvpx \ - $(TOP)/external/openssl/include \ - $(TOP)/external/libvpx/libvpx/vpx_codec \ - $(TOP)/external/libvpx/libvpx/vpx_ports \ - frameworks/av/media/libstagefright/include \ - frameworks/native/include/media/openmax \ - -LOCAL_STATIC_LIBRARIES := \ - libvpx - -LOCAL_SHARED_LIBRARIES := \ - libstagefright libstagefright_omx libstagefright_foundation libutils \ - -LOCAL_MODULE := libstagefright_soft_vpxenc -LOCAL_MODULE_TAGS := optional - -include $(BUILD_SHARED_LIBRARY) diff --git a/media/libstagefright/codecs/on2/enc/MODULE_LICENSE_APACHE2 b/media/libstagefright/codecs/on2/enc/MODULE_LICENSE_APACHE2 deleted file mode 100644 index e69de29..0000000 diff --git a/media/libstagefright/codecs/on2/enc/NOTICE b/media/libstagefright/codecs/on2/enc/NOTICE deleted file mode 100644 index faed58a..0000000 --- a/media/libstagefright/codecs/on2/enc/NOTICE +++ /dev/null @@ -1,190 +0,0 @@ - - Copyright (c) 2005-2013, The Android Open Source Project - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - diff --git a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp deleted file mode 100644 index cc38dc3..0000000 --- a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp +++ /dev/null @@ -1,685 +0,0 @@ -/* - * Copyright (C) 2013 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -// #define LOG_NDEBUG 0 -#define LOG_TAG "SoftVPXEncoder" -#include "SoftVPXEncoder.h" - -#include - -#include -#include - -namespace android { - - -template -static void InitOMXParams(T *params) { - params->nSize = sizeof(T); - // OMX IL 1.1.2 - params->nVersion.s.nVersionMajor = 1; - params->nVersion.s.nVersionMinor = 1; - params->nVersion.s.nRevision = 2; - params->nVersion.s.nStep = 0; -} - - -static int GetCPUCoreCount() { - int cpuCoreCount = 1; -#if defined(_SC_NPROCESSORS_ONLN) - cpuCoreCount = sysconf(_SC_NPROCESSORS_ONLN); -#else - // _SC_NPROC_ONLN must be defined... - cpuCoreCount = sysconf(_SC_NPROC_ONLN); -#endif - CHECK_GE(cpuCoreCount, 1); - return cpuCoreCount; -} - - -// This color conversion utility is copied from SoftMPEG4Encoder.cpp -inline static void ConvertSemiPlanarToPlanar(uint8_t *inyuv, - uint8_t* outyuv, - int32_t width, - int32_t height) { - int32_t outYsize = width * height; - uint32_t *outy = (uint32_t *) outyuv; - uint16_t *outcb = (uint16_t *) (outyuv + outYsize); - uint16_t *outcr = (uint16_t *) (outyuv + outYsize + (outYsize >> 2)); - - /* Y copying */ - memcpy(outy, inyuv, outYsize); - - /* U & V copying */ - uint32_t *inyuv_4 = (uint32_t *) (inyuv + outYsize); - for (int32_t i = height >> 1; i > 0; --i) { - for (int32_t j = width >> 2; j > 0; --j) { - uint32_t temp = *inyuv_4++; - uint32_t tempU = temp & 0xFF; - tempU = tempU | ((temp >> 8) & 0xFF00); - - uint32_t tempV = (temp >> 8) & 0xFF; - tempV = tempV | ((temp >> 16) & 0xFF00); - - // Flip U and V - *outcb++ = tempV; - *outcr++ = tempU; - } - } -} - - -SoftVPXEncoder::SoftVPXEncoder(const char *name, - const OMX_CALLBACKTYPE *callbacks, - OMX_PTR appData, - OMX_COMPONENTTYPE **component) - : SimpleSoftOMXComponent(name, callbacks, appData, component), - mCodecContext(NULL), - mCodecConfiguration(NULL), - mCodecInterface(NULL), - mWidth(176), - mHeight(144), - mBitrate(192000), // in bps - mBitrateControlMode(VPX_VBR), // variable bitrate - mFrameDurationUs(33333), // Defaults to 30 fps - mDCTPartitions(0), - mErrorResilience(OMX_FALSE), - mColorFormat(OMX_COLOR_FormatYUV420Planar), - mLevel(OMX_VIDEO_VP8Level_Version0), - mConversionBuffer(NULL) { - - initPorts(); -} - - -SoftVPXEncoder::~SoftVPXEncoder() { - releaseEncoder(); -} - - -void SoftVPXEncoder::initPorts() { - OMX_PARAM_PORTDEFINITIONTYPE inputPort; - OMX_PARAM_PORTDEFINITIONTYPE outputPort; - - InitOMXParams(&inputPort); - InitOMXParams(&outputPort); - - inputPort.nBufferCountMin = kNumBuffers; - inputPort.nBufferCountActual = inputPort.nBufferCountMin; - inputPort.bEnabled = OMX_TRUE; - inputPort.bPopulated = OMX_FALSE; - inputPort.eDomain = OMX_PortDomainVideo; - inputPort.bBuffersContiguous = OMX_FALSE; - inputPort.format.video.pNativeRender = NULL; - inputPort.format.video.nFrameWidth = mWidth; - inputPort.format.video.nFrameHeight = mHeight; - inputPort.format.video.nStride = inputPort.format.video.nFrameWidth; - inputPort.format.video.nSliceHeight = inputPort.format.video.nFrameHeight; - inputPort.format.video.nBitrate = 0; - // frameRate is reciprocal of frameDuration, which is - // in microseconds. It is also in Q16 format. - inputPort.format.video.xFramerate = (1000000/mFrameDurationUs) << 16; - inputPort.format.video.bFlagErrorConcealment = OMX_FALSE; - inputPort.nPortIndex = kInputPortIndex; - inputPort.eDir = OMX_DirInput; - inputPort.nBufferAlignment = kInputBufferAlignment; - inputPort.format.video.cMIMEType = - const_cast(MEDIA_MIMETYPE_VIDEO_RAW); - inputPort.format.video.eCompressionFormat = OMX_VIDEO_CodingUnused; - inputPort.format.video.eColorFormat = mColorFormat; - inputPort.format.video.pNativeWindow = NULL; - inputPort.nBufferSize = - (inputPort.format.video.nStride * - inputPort.format.video.nSliceHeight * 3) / 2; - - addPort(inputPort); - - outputPort.nBufferCountMin = kNumBuffers; - outputPort.nBufferCountActual = outputPort.nBufferCountMin; - outputPort.bEnabled = OMX_TRUE; - outputPort.bPopulated = OMX_FALSE; - outputPort.eDomain = OMX_PortDomainVideo; - outputPort.bBuffersContiguous = OMX_FALSE; - outputPort.format.video.pNativeRender = NULL; - outputPort.format.video.nFrameWidth = mWidth; - outputPort.format.video.nFrameHeight = mHeight; - outputPort.format.video.nStride = outputPort.format.video.nFrameWidth; - outputPort.format.video.nSliceHeight = outputPort.format.video.nFrameHeight; - outputPort.format.video.nBitrate = mBitrate; - outputPort.format.video.xFramerate = 0; - outputPort.format.video.bFlagErrorConcealment = OMX_FALSE; - outputPort.nPortIndex = kOutputPortIndex; - outputPort.eDir = OMX_DirOutput; - outputPort.nBufferAlignment = kOutputBufferAlignment; - outputPort.format.video.cMIMEType = - const_cast(MEDIA_MIMETYPE_VIDEO_VPX); - outputPort.format.video.eCompressionFormat = OMX_VIDEO_CodingVPX; - outputPort.format.video.eColorFormat = OMX_COLOR_FormatUnused; - outputPort.format.video.pNativeWindow = NULL; - outputPort.nBufferSize = 256 * 1024; // arbitrary - - addPort(outputPort); -} - - -status_t SoftVPXEncoder::initEncoder() { - vpx_codec_err_t codec_return; - - mCodecContext = new vpx_codec_ctx_t; - mCodecConfiguration = new vpx_codec_enc_cfg_t; - mCodecInterface = vpx_codec_vp8_cx(); - - if (mCodecInterface == NULL) { - return UNKNOWN_ERROR; - } - - codec_return = vpx_codec_enc_config_default(mCodecInterface, - mCodecConfiguration, - 0); // Codec specific flags - - if (codec_return != VPX_CODEC_OK) { - ALOGE("Error populating default configuration for vpx encoder."); - return UNKNOWN_ERROR; - } - - mCodecConfiguration->g_w = mWidth; - mCodecConfiguration->g_h = mHeight; - mCodecConfiguration->g_threads = GetCPUCoreCount(); - mCodecConfiguration->g_error_resilient = mErrorResilience; - - switch (mLevel) { - case OMX_VIDEO_VP8Level_Version0: - mCodecConfiguration->g_profile = 0; - break; - - case OMX_VIDEO_VP8Level_Version1: - mCodecConfiguration->g_profile = 1; - break; - - case OMX_VIDEO_VP8Level_Version2: - mCodecConfiguration->g_profile = 2; - break; - - case OMX_VIDEO_VP8Level_Version3: - mCodecConfiguration->g_profile = 3; - break; - - default: - mCodecConfiguration->g_profile = 0; - } - - // OMX timebase unit is microsecond - // g_timebase is in seconds (i.e. 1/1000000 seconds) - mCodecConfiguration->g_timebase.num = 1; - mCodecConfiguration->g_timebase.den = 1000000; - // rc_target_bitrate is in kbps, mBitrate in bps - mCodecConfiguration->rc_target_bitrate = mBitrate/1000; - mCodecConfiguration->rc_end_usage = mBitrateControlMode; - - codec_return = vpx_codec_enc_init(mCodecContext, - mCodecInterface, - mCodecConfiguration, - 0); // flags - - if (codec_return != VPX_CODEC_OK) { - ALOGE("Error initializing vpx encoder"); - return UNKNOWN_ERROR; - } - - codec_return = vpx_codec_control(mCodecContext, - VP8E_SET_TOKEN_PARTITIONS, - mDCTPartitions); - if (codec_return != VPX_CODEC_OK) { - ALOGE("Error setting dct partitions for vpx encoder."); - return UNKNOWN_ERROR; - } - - if (mColorFormat == OMX_COLOR_FormatYUV420SemiPlanar) { - if (mConversionBuffer == NULL) { - mConversionBuffer = (uint8_t *)malloc(mWidth * mHeight * 3 / 2); - if (mConversionBuffer == NULL) { - ALOGE("Allocating conversion buffer failed."); - return UNKNOWN_ERROR; - } - } - } - return OK; -} - - -status_t SoftVPXEncoder::releaseEncoder() { - if (mCodecContext != NULL) { - vpx_codec_destroy(mCodecContext); - delete mCodecContext; - mCodecContext = NULL; - } - - if (mCodecConfiguration != NULL) { - delete mCodecConfiguration; - mCodecConfiguration = NULL; - } - - if (mConversionBuffer != NULL) { - delete mConversionBuffer; - mConversionBuffer = NULL; - } - - // this one is not allocated by us - mCodecInterface = NULL; - - return OK; -} - - -OMX_ERRORTYPE SoftVPXEncoder::internalGetParameter(OMX_INDEXTYPE index, - OMX_PTR param) { - // can include extension index OMX_INDEXEXTTYPE - const int32_t indexFull = index; - - switch (indexFull) { - case OMX_IndexParamVideoPortFormat: { - OMX_VIDEO_PARAM_PORTFORMATTYPE *formatParams = - (OMX_VIDEO_PARAM_PORTFORMATTYPE *)param; - - if (formatParams->nPortIndex == kInputPortIndex) { - if (formatParams->nIndex >= kNumberOfSupportedColorFormats) { - return OMX_ErrorNoMore; - } - - // Color formats, in order of preference - if (formatParams->nIndex == 0) { - formatParams->eColorFormat = OMX_COLOR_FormatYUV420Planar; - } else if (formatParams->nIndex == 1) { - formatParams->eColorFormat = - OMX_COLOR_FormatYUV420SemiPlanar; - } else { - formatParams->eColorFormat = OMX_COLOR_FormatAndroidOpaque; - } - - formatParams->eCompressionFormat = OMX_VIDEO_CodingUnused; - // Converting from microseconds - // Also converting to Q16 format - formatParams->xFramerate = (1000000/mFrameDurationUs) << 16; - return OMX_ErrorNone; - } else if (formatParams->nPortIndex == kOutputPortIndex) { - formatParams->eCompressionFormat = OMX_VIDEO_CodingVPX; - formatParams->eColorFormat = OMX_COLOR_FormatUnused; - formatParams->xFramerate = 0; - return OMX_ErrorNone; - } else { - return OMX_ErrorBadPortIndex; - } - } - - case OMX_IndexParamVideoBitrate: { - OMX_VIDEO_PARAM_BITRATETYPE *bitrate = - (OMX_VIDEO_PARAM_BITRATETYPE *)param; - - if (bitrate->nPortIndex != kOutputPortIndex) { - return OMX_ErrorUnsupportedIndex; - } - - bitrate->nTargetBitrate = mBitrate; - - if (mBitrateControlMode == VPX_VBR) { - bitrate->eControlRate = OMX_Video_ControlRateVariable; - } else if (mBitrateControlMode == VPX_CBR) { - bitrate->eControlRate = OMX_Video_ControlRateConstant; - } else { - return OMX_ErrorUnsupportedSetting; - } - return OMX_ErrorNone; - } - - // VP8 specific parameters that use extension headers - case OMX_IndexParamVideoVp8: { - OMX_VIDEO_PARAM_VP8TYPE *vp8Params = - (OMX_VIDEO_PARAM_VP8TYPE *)param; - - if (vp8Params->nPortIndex != kOutputPortIndex) { - return OMX_ErrorUnsupportedIndex; - } - - vp8Params->eProfile = OMX_VIDEO_VP8ProfileMain; - vp8Params->eLevel = mLevel; - vp8Params->nDCTPartitions = mDCTPartitions; - vp8Params->bErrorResilientMode = mErrorResilience; - return OMX_ErrorNone; - } - - case OMX_IndexParamVideoProfileLevelQuerySupported: { - OMX_VIDEO_PARAM_PROFILELEVELTYPE *profileAndLevel = - (OMX_VIDEO_PARAM_PROFILELEVELTYPE *)param; - - if (profileAndLevel->nPortIndex != kOutputPortIndex) { - return OMX_ErrorUnsupportedIndex; - } - - switch (profileAndLevel->nProfileIndex) { - case 0: - profileAndLevel->eLevel = OMX_VIDEO_VP8Level_Version0; - break; - - case 1: - profileAndLevel->eLevel = OMX_VIDEO_VP8Level_Version1; - break; - - case 2: - profileAndLevel->eLevel = OMX_VIDEO_VP8Level_Version2; - break; - - case 3: - profileAndLevel->eLevel = OMX_VIDEO_VP8Level_Version3; - break; - - default: - return OMX_ErrorNoMore; - } - - profileAndLevel->eProfile = OMX_VIDEO_VP8ProfileMain; - return OMX_ErrorNone; - } - - case OMX_IndexParamVideoProfileLevelCurrent: { - OMX_VIDEO_PARAM_PROFILELEVELTYPE *profileAndLevel = - (OMX_VIDEO_PARAM_PROFILELEVELTYPE *)param; - - if (profileAndLevel->nPortIndex != kOutputPortIndex) { - return OMX_ErrorUnsupportedIndex; - } - - profileAndLevel->eLevel = mLevel; - profileAndLevel->eProfile = OMX_VIDEO_VP8ProfileMain; - return OMX_ErrorNone; - } - - default: - return SimpleSoftOMXComponent::internalGetParameter(index, param); - } -} - - -OMX_ERRORTYPE SoftVPXEncoder::internalSetParameter(OMX_INDEXTYPE index, - const OMX_PTR param) { - // can include extension index OMX_INDEXEXTTYPE - const int32_t indexFull = index; - - switch (indexFull) { - case OMX_IndexParamStandardComponentRole: - return internalSetRoleParams( - (const OMX_PARAM_COMPONENTROLETYPE *)param); - - case OMX_IndexParamVideoBitrate: - return internalSetBitrateParams( - (const OMX_VIDEO_PARAM_BITRATETYPE *)param); - - case OMX_IndexParamPortDefinition: - return internalSetPortParams( - (const OMX_PARAM_PORTDEFINITIONTYPE *)param); - - case OMX_IndexParamVideoPortFormat: - return internalSetFormatParams( - (const OMX_VIDEO_PARAM_PORTFORMATTYPE *)param); - - case OMX_IndexParamVideoVp8: - return internalSetVp8Params( - (const OMX_VIDEO_PARAM_VP8TYPE *)param); - - case OMX_IndexParamVideoProfileLevelCurrent: - return internalSetProfileLevel( - (const OMX_VIDEO_PARAM_PROFILELEVELTYPE *)param); - - default: - return SimpleSoftOMXComponent::internalSetParameter(index, param); - } -} - -OMX_ERRORTYPE SoftVPXEncoder::internalSetProfileLevel( - const OMX_VIDEO_PARAM_PROFILELEVELTYPE* profileAndLevel) { - if (profileAndLevel->nPortIndex != kOutputPortIndex) { - return OMX_ErrorUnsupportedIndex; - } - - if (profileAndLevel->eProfile != OMX_VIDEO_VP8ProfileMain) { - return OMX_ErrorBadParameter; - } - - if (profileAndLevel->eLevel == OMX_VIDEO_VP8Level_Version0 || - profileAndLevel->eLevel == OMX_VIDEO_VP8Level_Version1 || - profileAndLevel->eLevel == OMX_VIDEO_VP8Level_Version2 || - profileAndLevel->eLevel == OMX_VIDEO_VP8Level_Version3) { - mLevel = (OMX_VIDEO_VP8LEVELTYPE)profileAndLevel->eLevel; - } else { - return OMX_ErrorBadParameter; - } - - return OMX_ErrorNone; -} - - -OMX_ERRORTYPE SoftVPXEncoder::internalSetVp8Params( - const OMX_VIDEO_PARAM_VP8TYPE* vp8Params) { - if (vp8Params->nPortIndex != kOutputPortIndex) { - return OMX_ErrorUnsupportedIndex; - } - - if (vp8Params->eProfile != OMX_VIDEO_VP8ProfileMain) { - return OMX_ErrorBadParameter; - } - - if (vp8Params->eLevel == OMX_VIDEO_VP8Level_Version0 || - vp8Params->eLevel == OMX_VIDEO_VP8Level_Version1 || - vp8Params->eLevel == OMX_VIDEO_VP8Level_Version2 || - vp8Params->eLevel == OMX_VIDEO_VP8Level_Version3) { - mLevel = vp8Params->eLevel; - } else { - return OMX_ErrorBadParameter; - } - - if (vp8Params->nDCTPartitions <= kMaxDCTPartitions) { - mDCTPartitions = vp8Params->nDCTPartitions; - } else { - return OMX_ErrorBadParameter; - } - - mErrorResilience = vp8Params->bErrorResilientMode; - return OMX_ErrorNone; -} - - -OMX_ERRORTYPE SoftVPXEncoder::internalSetFormatParams( - const OMX_VIDEO_PARAM_PORTFORMATTYPE* format) { - if (format->nPortIndex == kInputPortIndex) { - if (format->eColorFormat == OMX_COLOR_FormatYUV420Planar || - format->eColorFormat == OMX_COLOR_FormatYUV420SemiPlanar || - format->eColorFormat == OMX_COLOR_FormatAndroidOpaque) { - mColorFormat = format->eColorFormat; - return OMX_ErrorNone; - } else { - ALOGE("Unsupported color format %i", format->eColorFormat); - return OMX_ErrorUnsupportedSetting; - } - } else if (format->nPortIndex == kOutputPortIndex) { - if (format->eCompressionFormat == OMX_VIDEO_CodingVPX) { - return OMX_ErrorNone; - } else { - return OMX_ErrorUnsupportedSetting; - } - } else { - return OMX_ErrorBadPortIndex; - } -} - - -OMX_ERRORTYPE SoftVPXEncoder::internalSetRoleParams( - const OMX_PARAM_COMPONENTROLETYPE* role) { - const char* roleText = (const char*)role->cRole; - const size_t roleTextMaxSize = OMX_MAX_STRINGNAME_SIZE - 1; - - if (strncmp(roleText, "video_encoder.vpx", roleTextMaxSize)) { - ALOGE("Unsupported component role"); - return OMX_ErrorBadParameter; - } - - return OMX_ErrorNone; -} - - -OMX_ERRORTYPE SoftVPXEncoder::internalSetPortParams( - const OMX_PARAM_PORTDEFINITIONTYPE* port) { - if (port->nPortIndex == kInputPortIndex) { - mWidth = port->format.video.nFrameWidth; - mHeight = port->format.video.nFrameHeight; - - // xFramerate comes in Q16 format, in frames per second unit - const uint32_t framerate = port->format.video.xFramerate >> 16; - // frame duration is in microseconds - mFrameDurationUs = (1000000/framerate); - - if (port->format.video.eColorFormat == OMX_COLOR_FormatYUV420Planar || - port->format.video.eColorFormat == OMX_COLOR_FormatYUV420SemiPlanar || - port->format.video.eColorFormat == OMX_COLOR_FormatAndroidOpaque) { - mColorFormat = port->format.video.eColorFormat; - } else { - return OMX_ErrorUnsupportedSetting; - } - - return OMX_ErrorNone; - } else if (port->nPortIndex == kOutputPortIndex) { - mBitrate = port->format.video.nBitrate; - return OMX_ErrorNone; - } else { - return OMX_ErrorBadPortIndex; - } -} - - -OMX_ERRORTYPE SoftVPXEncoder::internalSetBitrateParams( - const OMX_VIDEO_PARAM_BITRATETYPE* bitrate) { - if (bitrate->nPortIndex != kOutputPortIndex) { - return OMX_ErrorUnsupportedIndex; - } - - mBitrate = bitrate->nTargetBitrate; - - if (bitrate->eControlRate == OMX_Video_ControlRateVariable) { - mBitrateControlMode = VPX_VBR; - } else if (bitrate->eControlRate == OMX_Video_ControlRateConstant) { - mBitrateControlMode = VPX_CBR; - } else { - return OMX_ErrorUnsupportedSetting; - } - - return OMX_ErrorNone; -} - - -void SoftVPXEncoder::onQueueFilled(OMX_U32 portIndex) { - // Initialize encoder if not already - if (mCodecContext == NULL) { - if (OK != initEncoder()) { - ALOGE("Failed to initialize encoder"); - notify(OMX_EventError, - OMX_ErrorUndefined, - 0, // Extra notification data - NULL); // Notification data pointer - return; - } - } - - vpx_codec_err_t codec_return; - List &inputBufferInfoQueue = getPortQueue(kInputPortIndex); - List &outputBufferInfoQueue = getPortQueue(kOutputPortIndex); - - while (!inputBufferInfoQueue.empty() && !outputBufferInfoQueue.empty()) { - BufferInfo *inputBufferInfo = *inputBufferInfoQueue.begin(); - OMX_BUFFERHEADERTYPE *inputBufferHeader = inputBufferInfo->mHeader; - - BufferInfo *outputBufferInfo = *outputBufferInfoQueue.begin(); - OMX_BUFFERHEADERTYPE *outputBufferHeader = outputBufferInfo->mHeader; - - if (inputBufferHeader->nFlags & OMX_BUFFERFLAG_EOS) { - inputBufferInfoQueue.erase(inputBufferInfoQueue.begin()); - inputBufferInfo->mOwnedByUs = false; - notifyEmptyBufferDone(inputBufferHeader); - - outputBufferHeader->nFilledLen = 0; - outputBufferHeader->nFlags = OMX_BUFFERFLAG_EOS; - - outputBufferInfoQueue.erase(outputBufferInfoQueue.begin()); - outputBufferInfo->mOwnedByUs = false; - notifyFillBufferDone(outputBufferHeader); - return; - } - - uint8_t* source = inputBufferHeader->pBuffer + inputBufferHeader->nOffset; - - // NOTE: As much as nothing is known about color format - // when it is denoted as AndroidOpaque, it is at least - // assumed to be planar. - if (mColorFormat == OMX_COLOR_FormatYUV420SemiPlanar) { - ConvertSemiPlanarToPlanar(source, mConversionBuffer, mWidth, mHeight); - source = mConversionBuffer; - } - vpx_image_t raw_frame; - vpx_img_wrap(&raw_frame, VPX_IMG_FMT_I420, mWidth, mHeight, - kInputBufferAlignment, source); - codec_return = vpx_codec_encode(mCodecContext, - &raw_frame, - inputBufferHeader->nTimeStamp, // in timebase units - mFrameDurationUs, // frame duration in timebase units - 0, // frame flags - VPX_DL_REALTIME); // encoding deadline - if (codec_return != VPX_CODEC_OK) { - ALOGE("vpx encoder failed to encode frame"); - notify(OMX_EventError, - OMX_ErrorUndefined, - 0, // Extra notification data - NULL); // Notification data pointer - return; - } - - vpx_codec_iter_t encoded_packet_iterator = NULL; - const vpx_codec_cx_pkt_t* encoded_packet; - - while (encoded_packet = vpx_codec_get_cx_data(mCodecContext, &encoded_packet_iterator)) { - if (encoded_packet->kind == VPX_CODEC_CX_FRAME_PKT) { - outputBufferHeader->nTimeStamp = encoded_packet->data.frame.pts; - outputBufferHeader->nFlags = 0; - outputBufferHeader->nOffset = 0; - outputBufferHeader->nFilledLen = encoded_packet->data.frame.sz; - memcpy(outputBufferHeader->pBuffer, - encoded_packet->data.frame.buf, - encoded_packet->data.frame.sz); - outputBufferInfo->mOwnedByUs = false; - outputBufferInfoQueue.erase(outputBufferInfoQueue.begin()); - notifyFillBufferDone(outputBufferHeader); - } - } - - inputBufferInfo->mOwnedByUs = false; - inputBufferInfoQueue.erase(inputBufferInfoQueue.begin()); - notifyEmptyBufferDone(inputBufferHeader); - } -} -} // namespace android - - -android::SoftOMXComponent *createSoftOMXComponent( - const char *name, const OMX_CALLBACKTYPE *callbacks, - OMX_PTR appData, OMX_COMPONENTTYPE **component) { - return new android::SoftVPXEncoder(name, callbacks, appData, component); -} diff --git a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h deleted file mode 100644 index 3bc05c0..0000000 --- a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h +++ /dev/null @@ -1,203 +0,0 @@ -/* - * Copyright (C) 2013 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef SOFT_VPX_ENCODER_H_ - -#define SOFT_VPX_ENCODER_H_ - -#include "SimpleSoftOMXComponent.h" - -#include -#include - -#include "vpx/vpx_encoder.h" -#include "vpx/vpx_codec.h" -#include "vpx/vp8cx.h" - -namespace android { - -// Exposes a vpx encoder as an OMX Component -// -// Boilerplate for callback bindings are taken care -// by the base class SimpleSoftOMXComponent and its -// parent SoftOMXComponent. -// -// Only following encoder settings are available -// - target bitrate -// - rate control (constant / variable) -// - frame rate -// - error resilience -// - token partitioning -// - reconstruction & loop filters (g_profile) -// -// Only following color formats are recognized -// - YUV420Planar -// - YUV420SemiPlanar -// - AndroidOpaque -// -// Following settings are not configurable by the client -// - encoding deadline is realtime -// - multithreaded encoding utilizes a number of threads equal -// to online cpu's available -// - the algorithm interface for encoder is vp8 -// - fractional bits of frame rate is discarded -// - OMX timestamps are in microseconds, therefore -// encoder timebase is fixed to 1/1000000 - -class SoftVPXEncoder : public SimpleSoftOMXComponent { - public: - SoftVPXEncoder(const char *name, - const OMX_CALLBACKTYPE *callbacks, - OMX_PTR appData, - OMX_COMPONENTTYPE **component); - - protected: - virtual ~SoftVPXEncoder(); - - // Returns current values for requested OMX - // parameters - virtual OMX_ERRORTYPE internalGetParameter( - OMX_INDEXTYPE index, OMX_PTR param); - - // Validates, extracts and stores relevant OMX - // parameters - virtual OMX_ERRORTYPE internalSetParameter( - OMX_INDEXTYPE index, const OMX_PTR param); - - // OMX callback when buffers available - // Note that both an input and output buffer - // is expected to be available to carry out - // encoding of the frame - virtual void onQueueFilled(OMX_U32 portIndex); - - private: - // number of buffers allocated per port - static const uint32_t kNumBuffers = 4; - - // OMX port indexes that refer to input and - // output ports respectively - static const uint32_t kInputPortIndex = 0; - static const uint32_t kOutputPortIndex = 1; - - // Byte-alignment required for buffers - static const uint32_t kInputBufferAlignment = 1; - static const uint32_t kOutputBufferAlignment = 2; - - // Max value supported for DCT partitions - static const uint32_t kMaxDCTPartitions = 3; - - // Number of supported input color formats - static const uint32_t kNumberOfSupportedColorFormats = 3; - - // vpx specific opaque data structure that - // stores encoder state - vpx_codec_ctx_t* mCodecContext; - - // vpx specific data structure that - // stores encoder configuration - vpx_codec_enc_cfg_t* mCodecConfiguration; - - // vpx specific read-only data structure - // that specifies algorithm interface (e.g. vp8) - vpx_codec_iface_t* mCodecInterface; - - // Width of the input frames - int32_t mWidth; - - // Height of the input frames - int32_t mHeight; - - // Target bitrate set for the encoder, in bits per second. - int32_t mBitrate; - - // Bitrate control mode, either constant or variable - vpx_rc_mode mBitrateControlMode; - - // Frame duration is the reciprocal of framerate, denoted - // in microseconds - uint64_t mFrameDurationUs; - - // vp8 specific configuration parameter - // that enables token partitioning of - // the stream into substreams - int32_t mDCTPartitions; - - // Parameter that denotes whether error resilience - // is enabled in encoder - OMX_BOOL mErrorResilience; - - // Color format for the input port - OMX_COLOR_FORMATTYPE mColorFormat; - - // Encoder profile corresponding to OMX level parameter - // - // The inconsistency in the naming is caused by - // OMX spec referring vpx profiles (g_profile) - // as "levels" whereas using the name "profile" for - // something else. - OMX_VIDEO_VP8LEVELTYPE mLevel; - - // Conversion buffer is needed to convert semi - // planar yuv420 to planar format - // It is only allocated if input format is - // indeed YUV420SemiPlanar. - uint8_t* mConversionBuffer; - - // Initializes input and output OMX ports with sensible - // default values. - void initPorts(); - - // Initializes vpx encoder with available settings. - status_t initEncoder(); - - // Releases vpx encoder instance, with it's associated - // data structures. - // - // Unless called earlier, this is handled by the - // dtor. - status_t releaseEncoder(); - - // Handles port changes with respect to color formats - OMX_ERRORTYPE internalSetFormatParams( - const OMX_VIDEO_PARAM_PORTFORMATTYPE* format); - - // Verifies the component role tried to be set to this OMX component is - // strictly video_encoder.vpx - OMX_ERRORTYPE internalSetRoleParams( - const OMX_PARAM_COMPONENTROLETYPE* role); - - // Updates bitrate to reflect port settings. - OMX_ERRORTYPE internalSetBitrateParams( - const OMX_VIDEO_PARAM_BITRATETYPE* bitrate); - - // Handles port definition changes. - OMX_ERRORTYPE internalSetPortParams( - const OMX_PARAM_PORTDEFINITIONTYPE* port); - - // Handles vp8 specific parameters. - OMX_ERRORTYPE internalSetVp8Params( - const OMX_VIDEO_PARAM_VP8TYPE* vp8Params); - - // Updates encoder profile - OMX_ERRORTYPE internalSetProfileLevel( - const OMX_VIDEO_PARAM_PROFILELEVELTYPE* profileAndLevel); - - DISALLOW_EVIL_CONSTRUCTORS(SoftVPXEncoder); -}; - -} // namespace android - -#endif // SOFT_VPX_ENCODER_H_ -- cgit v1.1 From 272ab546940054ad7991bef4b3a36f15175721cd Mon Sep 17 00:00:00 2001 From: Jean-Michel Trivi Date: Mon, 4 Feb 2013 16:26:02 -0800 Subject: Add support for querying if a stream is active remotely Bug 7485803 Change-Id: I0744374f130fd2dd0714102354cffed2fa915361 --- include/media/AudioSystem.h | 7 ++++++- include/media/IAudioPolicyService.h | 2 ++ media/libmedia/AudioSystem.cpp | 10 ++++++++++ media/libmedia/IAudioPolicyService.cpp | 21 ++++++++++++++++++++- services/audioflinger/AudioPolicyService.cpp | 9 +++++++++ services/audioflinger/AudioPolicyService.h | 1 + 6 files changed, 48 insertions(+), 2 deletions(-) diff --git a/include/media/AudioSystem.h b/include/media/AudioSystem.h index 126ef12..b11c812 100644 --- a/include/media/AudioSystem.h +++ b/include/media/AudioSystem.h @@ -67,9 +67,14 @@ public: // set audio mode in audio hardware static status_t setMode(audio_mode_t mode); - // returns true in *state if tracks are active on the specified stream or has been active + // returns true in *state if tracks are active on the specified stream or have been active // in the past inPastMs milliseconds static status_t isStreamActive(audio_stream_type_t stream, bool *state, uint32_t inPastMs = 0); + // returns true in *state if tracks are active for what qualifies as remote playback + // on the specified stream or have been active in the past inPastMs milliseconds. Remote + // playback isn't mutually exclusive with local playback. + static status_t isStreamActiveRemotely(audio_stream_type_t stream, bool *state, + uint32_t inPastMs = 0); // returns true in *state if a recorder is currently recording with the specified source static status_t isSourceActive(audio_source_t source, bool *state); diff --git a/include/media/IAudioPolicyService.h b/include/media/IAudioPolicyService.h index f5b0604..b5ad4ef 100644 --- a/include/media/IAudioPolicyService.h +++ b/include/media/IAudioPolicyService.h @@ -89,6 +89,8 @@ public: virtual status_t unregisterEffect(int id) = 0; virtual status_t setEffectEnabled(int id, bool enabled) = 0; virtual bool isStreamActive(audio_stream_type_t stream, uint32_t inPastMs = 0) const = 0; + virtual bool isStreamActiveRemotely(audio_stream_type_t stream, uint32_t inPastMs = 0) + const = 0; virtual bool isSourceActive(audio_source_t source) const = 0; virtual status_t queryDefaultPreProcessing(int audioSession, effect_descriptor_t *descriptors, diff --git a/media/libmedia/AudioSystem.cpp b/media/libmedia/AudioSystem.cpp index 028e4a3..693df60 100644 --- a/media/libmedia/AudioSystem.cpp +++ b/media/libmedia/AudioSystem.cpp @@ -731,6 +731,16 @@ status_t AudioSystem::isStreamActive(audio_stream_type_t stream, bool* state, ui return NO_ERROR; } +status_t AudioSystem::isStreamActiveRemotely(audio_stream_type_t stream, bool* state, + uint32_t inPastMs) +{ + const sp& aps = AudioSystem::get_audio_policy_service(); + if (aps == 0) return PERMISSION_DENIED; + if (state == NULL) return BAD_VALUE; + *state = aps->isStreamActiveRemotely(stream, inPastMs); + return NO_ERROR; +} + status_t AudioSystem::isSourceActive(audio_source_t stream, bool* state) { const sp& aps = AudioSystem::get_audio_policy_service(); diff --git a/media/libmedia/IAudioPolicyService.cpp b/media/libmedia/IAudioPolicyService.cpp index 769deae..386c351 100644 --- a/media/libmedia/IAudioPolicyService.cpp +++ b/media/libmedia/IAudioPolicyService.cpp @@ -55,7 +55,8 @@ enum { IS_SOURCE_ACTIVE, GET_DEVICES_FOR_STREAM, QUERY_DEFAULT_PRE_PROCESSING, - SET_EFFECT_ENABLED + SET_EFFECT_ENABLED, + IS_STREAM_ACTIVE_REMOTELY }; class BpAudioPolicyService : public BpInterface @@ -330,6 +331,16 @@ public: return reply.readInt32(); } + virtual bool isStreamActiveRemotely(audio_stream_type_t stream, uint32_t inPastMs) const + { + Parcel data, reply; + data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor()); + data.writeInt32((int32_t) stream); + data.writeInt32(inPastMs); + remote()->transact(IS_STREAM_ACTIVE_REMOTELY, data, &reply); + return reply.readInt32(); + } + virtual bool isSourceActive(audio_source_t source) const { Parcel data, reply; @@ -605,6 +616,14 @@ status_t BnAudioPolicyService::onTransact( return NO_ERROR; } break; + case IS_STREAM_ACTIVE_REMOTELY: { + CHECK_INTERFACE(IAudioPolicyService, data, reply); + audio_stream_type_t stream = (audio_stream_type_t) data.readInt32(); + uint32_t inPastMs = (uint32_t)data.readInt32(); + reply->writeInt32( isStreamActiveRemotely((audio_stream_type_t) stream, inPastMs) ); + return NO_ERROR; + } break; + case IS_SOURCE_ACTIVE: { CHECK_INTERFACE(IAudioPolicyService, data, reply); audio_source_t source = (audio_source_t) data.readInt32(); diff --git a/services/audioflinger/AudioPolicyService.cpp b/services/audioflinger/AudioPolicyService.cpp index b86d3ae..4256fc4 100644 --- a/services/audioflinger/AudioPolicyService.cpp +++ b/services/audioflinger/AudioPolicyService.cpp @@ -484,6 +484,15 @@ bool AudioPolicyService::isStreamActive(audio_stream_type_t stream, uint32_t inP return mpAudioPolicy->is_stream_active(mpAudioPolicy, stream, inPastMs); } +bool AudioPolicyService::isStreamActiveRemotely(audio_stream_type_t stream, uint32_t inPastMs) const +{ + if (mpAudioPolicy == NULL) { + return 0; + } + Mutex::Autolock _l(mLock); + return mpAudioPolicy->is_stream_active_remotely(mpAudioPolicy, stream, inPastMs); +} + bool AudioPolicyService::isSourceActive(audio_source_t source) const { if (mpAudioPolicy == NULL) { diff --git a/services/audioflinger/AudioPolicyService.h b/services/audioflinger/AudioPolicyService.h index 92653c1..35cf368 100644 --- a/services/audioflinger/AudioPolicyService.h +++ b/services/audioflinger/AudioPolicyService.h @@ -104,6 +104,7 @@ public: virtual status_t unregisterEffect(int id); virtual status_t setEffectEnabled(int id, bool enabled); virtual bool isStreamActive(audio_stream_type_t stream, uint32_t inPastMs = 0) const; + virtual bool isStreamActiveRemotely(audio_stream_type_t stream, uint32_t inPastMs = 0) const; virtual bool isSourceActive(audio_source_t source) const; virtual status_t queryDefaultPreProcessing(int audioSession, -- cgit v1.1 From 4f1732b8068970b368a89271158ca29daf25650e Mon Sep 17 00:00:00 2001 From: ztenghui Date: Mon, 4 Feb 2013 15:59:38 -0800 Subject: Add the native MediaMuxer support. MediaAdapter: a helper class to convert the push model to pull model. MediaMuxer: the real muxer. bug:7991013 Change-Id: If3b79551bc6332bc81f5c2740885e579a5c4abf9 --- include/media/stagefright/MediaAdapter.h | 76 ++++++++++++++++ include/media/stagefright/MediaMuxer.h | 109 +++++++++++++++++++++++ media/libstagefright/Android.mk | 2 + media/libstagefright/MediaAdapter.cpp | 126 +++++++++++++++++++++++++++ media/libstagefright/MediaMuxer.cpp | 145 +++++++++++++++++++++++++++++++ 5 files changed, 458 insertions(+) create mode 100644 include/media/stagefright/MediaAdapter.h create mode 100644 include/media/stagefright/MediaMuxer.h create mode 100644 media/libstagefright/MediaAdapter.cpp create mode 100644 media/libstagefright/MediaMuxer.cpp diff --git a/include/media/stagefright/MediaAdapter.h b/include/media/stagefright/MediaAdapter.h new file mode 100644 index 0000000..369fce6 --- /dev/null +++ b/include/media/stagefright/MediaAdapter.h @@ -0,0 +1,76 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef MEDIA_ADAPTER_H +#define MEDIA_ADAPTER_H + +#include +#include +#include +#include +#include + +namespace android { + +// Convert the MediaMuxer's push model into MPEG4Writer's pull model. +// Used only by the MediaMuxer for now. +struct MediaAdapter : public MediaSource, public MediaBufferObserver { +public: + // MetaData is used to set the format and returned at getFormat. + MediaAdapter(const sp &meta); + virtual ~MediaAdapter(); + ///////////////////////////////////////////////// + // Inherited functions from MediaSource + ///////////////////////////////////////////////// + + virtual status_t start(MetaData *params = NULL); + virtual status_t stop(); + virtual sp getFormat(); + virtual status_t read( + MediaBuffer **buffer, const ReadOptions *options = NULL); + + ///////////////////////////////////////////////// + // Inherited functions from MediaBufferObserver + ///////////////////////////////////////////////// + + virtual void signalBufferReturned(MediaBuffer *buffer); + + ///////////////////////////////////////////////// + // Non-inherited functions: + ///////////////////////////////////////////////// + + // pushBuffer() will wait for the read() finish, and read() will have a + // deep copy, such that after pushBuffer return, the buffer can be re-used. + status_t pushBuffer(MediaBuffer *buffer); + +private: + Mutex mAdapterLock; + // Make sure the read() wait for the incoming buffer. + Condition mBufferReadCond; + // Make sure the pushBuffer() wait for the current buffer consumed. + Condition mBufferReturnedCond; + + MediaBuffer *mCurrentMediaBuffer; + + bool mStarted; + sp mOutputFormat; + + DISALLOW_EVIL_CONSTRUCTORS(MediaAdapter); +}; + +} // namespace android + +#endif // MEDIA_ADAPTER_H diff --git a/include/media/stagefright/MediaMuxer.h b/include/media/stagefright/MediaMuxer.h new file mode 100644 index 0000000..27a141e --- /dev/null +++ b/include/media/stagefright/MediaMuxer.h @@ -0,0 +1,109 @@ +/* + * Copyright 2013, The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef MEDIA_MUXER_H_ +#define MEDIA_MUXER_H_ + +#include +#include +#include +#include + +namespace android { + +struct ABuffer; +struct AMessage; +struct MediaAdapter; +struct MediaBuffer; +struct MediaSource; +struct MetaData; +struct MPEG4Writer; + +// MediaMuxer is used to mux multiple tracks into a video. Currently, we only +// support a mp4 file as the output. +// The expected calling order of the functions is: +// Constructor -> addTrack+ -> start -> writeSampleData+ -> stop +// If muxing operation need to be cancelled, the app is responsible for +// deleting the output file after stop. +struct MediaMuxer : public RefBase { +public: + // Construct the muxer with the output file path. + MediaMuxer(const char* pathOut); + // Construct the muxer with the file descriptor. Note that the MediaMuxer + // will close this file at stop(). + MediaMuxer(int fd); + + virtual ~MediaMuxer(); + + /** + * Add a track with its format information. This should be + * called before start(). + * @param format the track's format. + * @return the track's index or negative number if error. + */ + ssize_t addTrack(const sp &format); + + /** + * Start muxing. Make sure all the tracks have been added before + * calling this. + */ + status_t start(); + + /** + * Stop muxing. + * This method is a blocking call. Depending on how + * much data is bufferred internally, the time needed for stopping + * the muxer may be time consuming. UI thread is + * not recommended for launching this call. + */ + status_t stop(); + + /** + * Send a sample buffer for muxing. + * The buffer can be reused once this method returns. Typically, + * this function won't be blocked for very long, and thus there + * is no need to use a separate thread calling this method to + * push a buffer. + * @param buffer the incoming sample buffer. + * @param trackIndex the buffer's track index number. + * @param timeUs the buffer's time stamp. + * @param flags the only supported flag for now is + * MediaCodec::BUFFER_FLAG_SYNCFRAME. + * @return OK if no error. + */ + status_t writeSampleData(const sp &buffer, size_t trackIndex, + int64_t timeUs, uint32_t flags) ; + +private: + sp mWriter; + Vector< sp > mTrackList; // Each track has its MediaAdapter. + + Mutex mMuxerLock; + + enum State { + INITED, + STARTED, + STOPPED + }; + State mState; + + DISALLOW_EVIL_CONSTRUCTORS(MediaMuxer); +}; + +} // namespace android + +#endif // MEDIA_MUXER_H_ + diff --git a/media/libstagefright/Android.mk b/media/libstagefright/Android.mk index 85662db..6934e59 100644 --- a/media/libstagefright/Android.mk +++ b/media/libstagefright/Android.mk @@ -26,12 +26,14 @@ LOCAL_SRC_FILES:= \ MPEG2TSWriter.cpp \ MPEG4Extractor.cpp \ MPEG4Writer.cpp \ + MediaAdapter.cpp \ MediaBuffer.cpp \ MediaBufferGroup.cpp \ MediaCodec.cpp \ MediaCodecList.cpp \ MediaDefs.cpp \ MediaExtractor.cpp \ + MediaMuxer.cpp \ MediaSource.cpp \ MetaData.cpp \ NuCachedSource2.cpp \ diff --git a/media/libstagefright/MediaAdapter.cpp b/media/libstagefright/MediaAdapter.cpp new file mode 100644 index 0000000..2484212 --- /dev/null +++ b/media/libstagefright/MediaAdapter.cpp @@ -0,0 +1,126 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "MediaAdapter" +#include + +#include +#include +#include + +namespace android { + +MediaAdapter::MediaAdapter(const sp &meta) + : mCurrentMediaBuffer(NULL), + mStarted(false), + mOutputFormat(meta) { +} + +MediaAdapter::~MediaAdapter() { + Mutex::Autolock autoLock(mAdapterLock); + mOutputFormat.clear(); + CHECK(mCurrentMediaBuffer == NULL); +} + +status_t MediaAdapter::start(MetaData *params) { + Mutex::Autolock autoLock(mAdapterLock); + if (!mStarted) { + mStarted = true; + } + return OK; +} + +status_t MediaAdapter::stop() { + Mutex::Autolock autoLock(mAdapterLock); + if (mStarted) { + mStarted = false; + // If stop() happens immediately after a pushBuffer(), we should + // clean up the mCurrentMediaBuffer + if (mCurrentMediaBuffer != NULL) { + mCurrentMediaBuffer->release(); + mCurrentMediaBuffer = NULL; + } + // While read() is still waiting, we should signal it to finish. + mBufferReadCond.signal(); + } + return OK; +} + +sp MediaAdapter::getFormat() { + Mutex::Autolock autoLock(mAdapterLock); + return mOutputFormat; +} + +void MediaAdapter::signalBufferReturned(MediaBuffer *buffer) { + Mutex::Autolock autoLock(mAdapterLock); + CHECK(buffer != NULL); + buffer->setObserver(0); + buffer->release(); + ALOGV("buffer returned %p", buffer); + mBufferReturnedCond.signal(); +} + +status_t MediaAdapter::read( + MediaBuffer **buffer, const ReadOptions *options) { + Mutex::Autolock autoLock(mAdapterLock); + if (!mStarted) { + ALOGV("Read before even started!"); + return ERROR_END_OF_STREAM; + } + + while (mCurrentMediaBuffer == NULL && mStarted) { + ALOGV("waiting @ read()"); + mBufferReadCond.wait(mAdapterLock); + } + + if (!mStarted) { + ALOGV("read interrupted after stop"); + CHECK(mCurrentMediaBuffer == NULL); + return ERROR_END_OF_STREAM; + } + + CHECK(mCurrentMediaBuffer != NULL); + + *buffer = mCurrentMediaBuffer; + mCurrentMediaBuffer = NULL; + (*buffer)->setObserver(this); + + return OK; +} + +status_t MediaAdapter::pushBuffer(MediaBuffer *buffer) { + if (buffer == NULL) { + ALOGE("pushBuffer get an NULL buffer"); + return -EINVAL; + } + + Mutex::Autolock autoLock(mAdapterLock); + if (!mStarted) { + ALOGE("pushBuffer called before start"); + return INVALID_OPERATION; + } + mCurrentMediaBuffer = buffer; + mBufferReadCond.signal(); + + ALOGV("wait for the buffer returned @ pushBuffer! %p", buffer); + mBufferReturnedCond.wait(mAdapterLock); + + return OK; +} + +} // namespace android + diff --git a/media/libstagefright/MediaMuxer.cpp b/media/libstagefright/MediaMuxer.cpp new file mode 100644 index 0000000..30bed90 --- /dev/null +++ b/media/libstagefright/MediaMuxer.cpp @@ -0,0 +1,145 @@ +/* + * Copyright 2013, The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "MediaMuxer" +#include + +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +namespace android { + +MediaMuxer::MediaMuxer(const char* pathOut) + : mState(INITED) { + mWriter = new MPEG4Writer(pathOut); +} + +MediaMuxer::MediaMuxer(int fd) + : mState(INITED) { + mWriter = new MPEG4Writer(fd); +} + +MediaMuxer::~MediaMuxer() { + Mutex::Autolock autoLock(mMuxerLock); + + // Clean up all the internal resources. + mWriter.clear(); + mTrackList.clear(); +} + +ssize_t MediaMuxer::addTrack(const sp &format) { + Mutex::Autolock autoLock(mMuxerLock); + + if (format.get() == NULL) { + ALOGE("addTrack() get a null format"); + return -EINVAL; + } + + if (mState != INITED) { + ALOGE("addTrack() must be called after constructor and before start()."); + return INVALID_OPERATION; + } + + sp meta = new MetaData; + convertMessageToMetaData(format, meta); + + sp newTrack = new MediaAdapter(meta); + return mTrackList.add(newTrack); +} + +status_t MediaMuxer::start() { + Mutex::Autolock autoLock(mMuxerLock); + + if (mState == INITED) { + mState = STARTED; + for (size_t i = 0 ; i < mTrackList.size(); i++) { + mWriter->addSource(mTrackList[i]); + } + return mWriter->start(); + } else { + ALOGE("start() is called in invalid state %d", mState); + return INVALID_OPERATION; + } +} + +status_t MediaMuxer::stop() { + Mutex::Autolock autoLock(mMuxerLock); + + if (mState == STARTED) { + mState = STOPPED; + for (size_t i = 0; i < mTrackList.size(); i++) { + mTrackList[i]->stop(); + } + return mWriter->stop(); + } else { + ALOGE("stop() is called in invalid state %d", mState); + return INVALID_OPERATION; + } +} + +status_t MediaMuxer::writeSampleData(const sp &buffer, size_t trackIndex, + int64_t timeUs, uint32_t flags) { + Mutex::Autolock autoLock(mMuxerLock); + + sp currentTrack = mTrackList[trackIndex]; + + if (buffer.get() == NULL) { + ALOGE("WriteSampleData() get an NULL buffer."); + return -EINVAL; + } + + if (mState != STARTED) { + ALOGE("WriteSampleData() is called in invalid state %d", mState); + return INVALID_OPERATION; + } + + if (trackIndex >= mTrackList.size()) { + ALOGE("WriteSampleData() get an invalid index %d", trackIndex); + return -EINVAL; + } + + MediaBuffer* mediaBuffer = new MediaBuffer(buffer); + + mediaBuffer->add_ref(); // Released in MediaAdapter::signalBufferReturned(). + mediaBuffer->set_range(buffer->offset(), buffer->size()); + + sp metaData = mediaBuffer->meta_data(); + metaData->setInt64(kKeyTime, timeUs); + // Just set the kKeyDecodingTime as the presentation time for now. + metaData->setInt64(kKeyDecodingTime, timeUs); + + if (flags & MediaCodec::BUFFER_FLAG_SYNCFRAME) { + metaData->setInt32(kKeyIsSyncFrame, true); + } + + // This pushBuffer will wait until the mediaBuffer is consumed. + return currentTrack->pushBuffer(mediaBuffer); +} + +} // namespace android -- cgit v1.1 From 3051df27261e9952c0e642dec548515250e85f6a Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Tue, 12 Feb 2013 12:12:42 -0800 Subject: Temporary additional logging to investigate bug The bug appears related to continuing to use an invalid buffer provider in fast mixer after track destruction, so focus the added logs in that area. Also includes a bug fix: was calling log in an unsafe place near Threads.cpp AudioFlinger::PlaybackThread::createTrack_l line 1250. Details: - include caller pid or client pid where appropriate - increase log buffer size Bug: 6490974 Change-Id: I4c030f171343fe4b483eae0ddea4427118d8d4b1 --- services/audioflinger/AudioFlinger.h | 2 +- services/audioflinger/FastMixer.cpp | 16 ++++++++++++++++ services/audioflinger/Threads.cpp | 18 +++++++++++++----- services/audioflinger/Threads.h | 2 +- services/audioflinger/Tracks.cpp | 14 ++++++++++---- 5 files changed, 41 insertions(+), 11 deletions(-) diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h index c3f08f6..593f131 100644 --- a/services/audioflinger/AudioFlinger.h +++ b/services/audioflinger/AudioFlinger.h @@ -227,7 +227,7 @@ public: sp newWriter_l(size_t size, const char *name); void unregisterWriter(const sp& writer); private: - static const size_t kLogMemorySize = 10 * 1024; + static const size_t kLogMemorySize = 20 * 1024; sp mLogMemoryDealer; // == 0 when NBLog is disabled public: diff --git a/services/audioflinger/FastMixer.cpp b/services/audioflinger/FastMixer.cpp index 80e37ca..467da39 100644 --- a/services/audioflinger/FastMixer.cpp +++ b/services/audioflinger/FastMixer.cpp @@ -301,6 +301,11 @@ bool FastMixer::threadLoop() const FastTrack* fastTrack = ¤t->mFastTracks[i]; AudioBufferProvider *bufferProvider = fastTrack->mBufferProvider; ALOG_ASSERT(bufferProvider != NULL && fastTrackNames[i] == -1); + if (bufferProvider == NULL || + bufferProvider->getValid() != AudioBufferProvider::kValid) { + logWriter->logTimestamp(); + logWriter->logf("added invalid %#x", i); + } if (mixer != NULL) { // calling getTrackName with default channel mask and a random invalid // sessionId (no effects here) @@ -335,6 +340,11 @@ bool FastMixer::threadLoop() if (fastTrack->mGeneration != generations[i]) { AudioBufferProvider *bufferProvider = fastTrack->mBufferProvider; ALOG_ASSERT(bufferProvider != NULL); + if (bufferProvider == NULL || + bufferProvider->getValid() != AudioBufferProvider::kValid) { + logWriter->logTimestamp(); + logWriter->logf("modified invalid %#x", i); + } if (mixer != NULL) { name = fastTrackNames[i]; ALOG_ASSERT(name >= 0); @@ -423,6 +433,12 @@ bool FastMixer::threadLoop() } ftDump->mUnderruns = underruns; ftDump->mFramesReady = framesReady; + AudioBufferProvider *bufferProvider = fastTrack->mBufferProvider; + if (bufferProvider == NULL || + bufferProvider->getValid() != AudioBufferProvider::kValid) { + logWriter->logTimestamp(); + logWriter->logf("mixing invalid %#x", i); + } } int64_t pts; diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index ba848d7..8cc5f6c 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -1196,6 +1196,8 @@ sp AudioFlinger::PlaybackThread::createTrac { // scope for mLock Mutex::Autolock _l(mLock); + mNBLogWriter->logf("createTrack_l isFast=%d caller=%d", + (*flags & IAudioFlinger::TRACK_FAST) != 0, IPCThreadState::self()->getCallingPid()); // all tracks in same audio session must share the same routing strategy otherwise // conflicts will happen when tracks are moved from one output to another by audio policy @@ -1249,7 +1251,6 @@ Exit: if (status) { *status = lStatus; } - mNBLogWriter->logf("createTrack_l"); return track; } @@ -1317,7 +1318,8 @@ float AudioFlinger::PlaybackThread::streamVolume(audio_stream_type_t stream) con // addTrack_l() must be called with ThreadBase::mLock held status_t AudioFlinger::PlaybackThread::addTrack_l(const sp& track) { - mNBLogWriter->logf("addTrack_l mName=%d", track->mName); + mNBLogWriter->logf("addTrack_l mName=%d mFastIndex=%d caller=%d", track->mName, + track->mFastIndex, IPCThreadState::self()->getCallingPid()); status_t status = ALREADY_EXISTS; // set retry count for buffer fill @@ -1351,7 +1353,9 @@ status_t AudioFlinger::PlaybackThread::addTrack_l(const sp& track) // destroyTrack_l() must be called with ThreadBase::mLock held void AudioFlinger::PlaybackThread::destroyTrack_l(const sp& track) { - mNBLogWriter->logf("destroyTrack_l mName=%d", track->mName); + mNBLogWriter->logTimestamp(); + mNBLogWriter->logf("destroyTrack_l mName=%d mFastIndex=%d mClientPid=%d", track->mName, + track->mFastIndex, track->mClient != 0 ? track->mClient->pid() : -1); track->mState = TrackBase::TERMINATED; // active tracks are removed by threadLoop() if (mActiveTracks.indexOf(track) < 0) { @@ -1361,7 +1365,9 @@ void AudioFlinger::PlaybackThread::destroyTrack_l(const sp& track) void AudioFlinger::PlaybackThread::removeTrack_l(const sp& track) { - mNBLogWriter->logf("removeTrack_l mName=%d", track->mName); + mNBLogWriter->logTimestamp(); + mNBLogWriter->logf("removeTrack_l mName=%d mFastIndex=%d clientPid=%d", track->mName, + track->mFastIndex, track->mClient != 0 ? track->mClient->pid() : -1); track->triggerEvents(AudioSystem::SYNC_EVENT_PRESENTATION_COMPLETE); mTracks.remove(track); deleteTrackName_l(track->name()); @@ -2870,7 +2876,9 @@ track_is_ready: ; if (CC_UNLIKELY(count)) { for (size_t i=0 ; i& track = tracksToRemove->itemAt(i); - mNBLogWriter->logf("prepareTracks_l remove name=%u", track->name()); + mNBLogWriter->logTimestamp(); + mNBLogWriter->logf("prepareTracks_l remove name=%u mFastIndex=%d", track->name(), + track->mFastIndex); mActiveTracks.remove(track); if (track->mainBuffer() != mMixBuffer) { chain = getEffectChain_l(track->sessionId()); diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h index fa1e336..8497788 100644 --- a/services/audioflinger/Threads.h +++ b/services/audioflinger/Threads.h @@ -315,7 +315,7 @@ protected: // keyed by session ID, the second by type UUID timeLow field KeyedVector< int, KeyedVector< int, sp > > mSuspendedSessions; - static const size_t kLogSize = 512; + static const size_t kLogSize = 4096; sp mNBLogWriter; }; diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp index 315cbbc..f679751 100644 --- a/services/audioflinger/Tracks.cpp +++ b/services/audioflinger/Tracks.cpp @@ -351,6 +351,7 @@ AudioFlinger::PlaybackThread::Track::Track( // Read the initial underruns because this field is never cleared by the fast mixer mObservedUnderruns = thread->getFastTrackUnderruns(i); thread->mFastTrackAvailMask &= ~(1 << i); + thread->mNBLogWriter->logf("new Track mName=%d mFastIndex=%d", mName, mFastIndex); } } ALOGV("Track constructor name %d, calling pid %d", mName, @@ -360,6 +361,7 @@ AudioFlinger::PlaybackThread::Track::Track( AudioFlinger::PlaybackThread::Track::~Track() { ALOGV("PlaybackThread::Track destructor"); + // FIXME not sure if safe to log here, would need a lock on thread to do it } void AudioFlinger::PlaybackThread::Track::destroy() @@ -569,7 +571,8 @@ status_t AudioFlinger::PlaybackThread::Track::start(AudioSystem::sync_event_t ev sp thread = mThread.promote(); if (thread != 0) { Mutex::Autolock _l(thread->mLock); - thread->mNBLogWriter->logf("start mName=%d", mName); + thread->mNBLogWriter->logf("start mName=%d mFastIndex=%d caller=%d", mName, mFastIndex, + IPCThreadState::self()->getCallingPid()); track_state state = mState; // here the track could be either new, or restarted // in both cases "unstop" the track @@ -612,7 +615,8 @@ void AudioFlinger::PlaybackThread::Track::stop() sp thread = mThread.promote(); if (thread != 0) { Mutex::Autolock _l(thread->mLock); - thread->mNBLogWriter->logf("stop mName=%d", mName); + thread->mNBLogWriter->logf("stop mName=%d mFastIndex=%d caller=%d", mName, mFastIndex, + IPCThreadState::self()->getCallingPid()); track_state state = mState; if (state == RESUMING || state == ACTIVE || state == PAUSING || state == PAUSED) { // If the track is not active (PAUSED and buffers full), flush buffers @@ -649,7 +653,8 @@ void AudioFlinger::PlaybackThread::Track::pause() sp thread = mThread.promote(); if (thread != 0) { Mutex::Autolock _l(thread->mLock); - thread->mNBLogWriter->logf("pause mName=%d", mName); + thread->mNBLogWriter->logf("pause mName=%d mFastIndex=%d caller=%d", mName, mFastIndex, + IPCThreadState::self()->getCallingPid()); if (mState == ACTIVE || mState == RESUMING) { mState = PAUSING; ALOGV("ACTIVE/RESUMING => PAUSING (%d) on thread %p", mName, thread.get()); @@ -673,7 +678,8 @@ void AudioFlinger::PlaybackThread::Track::flush() sp thread = mThread.promote(); if (thread != 0) { Mutex::Autolock _l(thread->mLock); - thread->mNBLogWriter->logf("flush mName=%d", mName); + thread->mNBLogWriter->logf("flush mName=%d mFastIndex=%d caller=%d", mName, mFastIndex, + IPCThreadState::self()->getCallingPid()); if (mState != STOPPING_1 && mState != STOPPING_2 && mState != STOPPED && mState != PAUSED && mState != PAUSING && mState != IDLE && mState != FLUSHED) { return; -- cgit v1.1 From ecd9389c8712aedeb2a79823ea0e4fb842684269 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Tue, 12 Feb 2013 20:43:58 +0000 Subject: Revert "Temporary additional logging to investigate bug" This reverts commit 3051df27261e9952c0e642dec548515250e85f6a Change-Id: I8bf5c3e91b65bd20de26f480c367c2854b62373c --- services/audioflinger/AudioFlinger.h | 2 +- services/audioflinger/FastMixer.cpp | 16 ---------------- services/audioflinger/Threads.cpp | 18 +++++------------- services/audioflinger/Threads.h | 2 +- services/audioflinger/Tracks.cpp | 14 ++++---------- 5 files changed, 11 insertions(+), 41 deletions(-) diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h index 593f131..c3f08f6 100644 --- a/services/audioflinger/AudioFlinger.h +++ b/services/audioflinger/AudioFlinger.h @@ -227,7 +227,7 @@ public: sp newWriter_l(size_t size, const char *name); void unregisterWriter(const sp& writer); private: - static const size_t kLogMemorySize = 20 * 1024; + static const size_t kLogMemorySize = 10 * 1024; sp mLogMemoryDealer; // == 0 when NBLog is disabled public: diff --git a/services/audioflinger/FastMixer.cpp b/services/audioflinger/FastMixer.cpp index 467da39..80e37ca 100644 --- a/services/audioflinger/FastMixer.cpp +++ b/services/audioflinger/FastMixer.cpp @@ -301,11 +301,6 @@ bool FastMixer::threadLoop() const FastTrack* fastTrack = ¤t->mFastTracks[i]; AudioBufferProvider *bufferProvider = fastTrack->mBufferProvider; ALOG_ASSERT(bufferProvider != NULL && fastTrackNames[i] == -1); - if (bufferProvider == NULL || - bufferProvider->getValid() != AudioBufferProvider::kValid) { - logWriter->logTimestamp(); - logWriter->logf("added invalid %#x", i); - } if (mixer != NULL) { // calling getTrackName with default channel mask and a random invalid // sessionId (no effects here) @@ -340,11 +335,6 @@ bool FastMixer::threadLoop() if (fastTrack->mGeneration != generations[i]) { AudioBufferProvider *bufferProvider = fastTrack->mBufferProvider; ALOG_ASSERT(bufferProvider != NULL); - if (bufferProvider == NULL || - bufferProvider->getValid() != AudioBufferProvider::kValid) { - logWriter->logTimestamp(); - logWriter->logf("modified invalid %#x", i); - } if (mixer != NULL) { name = fastTrackNames[i]; ALOG_ASSERT(name >= 0); @@ -433,12 +423,6 @@ bool FastMixer::threadLoop() } ftDump->mUnderruns = underruns; ftDump->mFramesReady = framesReady; - AudioBufferProvider *bufferProvider = fastTrack->mBufferProvider; - if (bufferProvider == NULL || - bufferProvider->getValid() != AudioBufferProvider::kValid) { - logWriter->logTimestamp(); - logWriter->logf("mixing invalid %#x", i); - } } int64_t pts; diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index 8cc5f6c..ba848d7 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -1196,8 +1196,6 @@ sp AudioFlinger::PlaybackThread::createTrac { // scope for mLock Mutex::Autolock _l(mLock); - mNBLogWriter->logf("createTrack_l isFast=%d caller=%d", - (*flags & IAudioFlinger::TRACK_FAST) != 0, IPCThreadState::self()->getCallingPid()); // all tracks in same audio session must share the same routing strategy otherwise // conflicts will happen when tracks are moved from one output to another by audio policy @@ -1251,6 +1249,7 @@ Exit: if (status) { *status = lStatus; } + mNBLogWriter->logf("createTrack_l"); return track; } @@ -1318,8 +1317,7 @@ float AudioFlinger::PlaybackThread::streamVolume(audio_stream_type_t stream) con // addTrack_l() must be called with ThreadBase::mLock held status_t AudioFlinger::PlaybackThread::addTrack_l(const sp& track) { - mNBLogWriter->logf("addTrack_l mName=%d mFastIndex=%d caller=%d", track->mName, - track->mFastIndex, IPCThreadState::self()->getCallingPid()); + mNBLogWriter->logf("addTrack_l mName=%d", track->mName); status_t status = ALREADY_EXISTS; // set retry count for buffer fill @@ -1353,9 +1351,7 @@ status_t AudioFlinger::PlaybackThread::addTrack_l(const sp& track) // destroyTrack_l() must be called with ThreadBase::mLock held void AudioFlinger::PlaybackThread::destroyTrack_l(const sp& track) { - mNBLogWriter->logTimestamp(); - mNBLogWriter->logf("destroyTrack_l mName=%d mFastIndex=%d mClientPid=%d", track->mName, - track->mFastIndex, track->mClient != 0 ? track->mClient->pid() : -1); + mNBLogWriter->logf("destroyTrack_l mName=%d", track->mName); track->mState = TrackBase::TERMINATED; // active tracks are removed by threadLoop() if (mActiveTracks.indexOf(track) < 0) { @@ -1365,9 +1361,7 @@ void AudioFlinger::PlaybackThread::destroyTrack_l(const sp& track) void AudioFlinger::PlaybackThread::removeTrack_l(const sp& track) { - mNBLogWriter->logTimestamp(); - mNBLogWriter->logf("removeTrack_l mName=%d mFastIndex=%d clientPid=%d", track->mName, - track->mFastIndex, track->mClient != 0 ? track->mClient->pid() : -1); + mNBLogWriter->logf("removeTrack_l mName=%d", track->mName); track->triggerEvents(AudioSystem::SYNC_EVENT_PRESENTATION_COMPLETE); mTracks.remove(track); deleteTrackName_l(track->name()); @@ -2876,9 +2870,7 @@ track_is_ready: ; if (CC_UNLIKELY(count)) { for (size_t i=0 ; i& track = tracksToRemove->itemAt(i); - mNBLogWriter->logTimestamp(); - mNBLogWriter->logf("prepareTracks_l remove name=%u mFastIndex=%d", track->name(), - track->mFastIndex); + mNBLogWriter->logf("prepareTracks_l remove name=%u", track->name()); mActiveTracks.remove(track); if (track->mainBuffer() != mMixBuffer) { chain = getEffectChain_l(track->sessionId()); diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h index 8497788..fa1e336 100644 --- a/services/audioflinger/Threads.h +++ b/services/audioflinger/Threads.h @@ -315,7 +315,7 @@ protected: // keyed by session ID, the second by type UUID timeLow field KeyedVector< int, KeyedVector< int, sp > > mSuspendedSessions; - static const size_t kLogSize = 4096; + static const size_t kLogSize = 512; sp mNBLogWriter; }; diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp index f679751..315cbbc 100644 --- a/services/audioflinger/Tracks.cpp +++ b/services/audioflinger/Tracks.cpp @@ -351,7 +351,6 @@ AudioFlinger::PlaybackThread::Track::Track( // Read the initial underruns because this field is never cleared by the fast mixer mObservedUnderruns = thread->getFastTrackUnderruns(i); thread->mFastTrackAvailMask &= ~(1 << i); - thread->mNBLogWriter->logf("new Track mName=%d mFastIndex=%d", mName, mFastIndex); } } ALOGV("Track constructor name %d, calling pid %d", mName, @@ -361,7 +360,6 @@ AudioFlinger::PlaybackThread::Track::Track( AudioFlinger::PlaybackThread::Track::~Track() { ALOGV("PlaybackThread::Track destructor"); - // FIXME not sure if safe to log here, would need a lock on thread to do it } void AudioFlinger::PlaybackThread::Track::destroy() @@ -571,8 +569,7 @@ status_t AudioFlinger::PlaybackThread::Track::start(AudioSystem::sync_event_t ev sp thread = mThread.promote(); if (thread != 0) { Mutex::Autolock _l(thread->mLock); - thread->mNBLogWriter->logf("start mName=%d mFastIndex=%d caller=%d", mName, mFastIndex, - IPCThreadState::self()->getCallingPid()); + thread->mNBLogWriter->logf("start mName=%d", mName); track_state state = mState; // here the track could be either new, or restarted // in both cases "unstop" the track @@ -615,8 +612,7 @@ void AudioFlinger::PlaybackThread::Track::stop() sp thread = mThread.promote(); if (thread != 0) { Mutex::Autolock _l(thread->mLock); - thread->mNBLogWriter->logf("stop mName=%d mFastIndex=%d caller=%d", mName, mFastIndex, - IPCThreadState::self()->getCallingPid()); + thread->mNBLogWriter->logf("stop mName=%d", mName); track_state state = mState; if (state == RESUMING || state == ACTIVE || state == PAUSING || state == PAUSED) { // If the track is not active (PAUSED and buffers full), flush buffers @@ -653,8 +649,7 @@ void AudioFlinger::PlaybackThread::Track::pause() sp thread = mThread.promote(); if (thread != 0) { Mutex::Autolock _l(thread->mLock); - thread->mNBLogWriter->logf("pause mName=%d mFastIndex=%d caller=%d", mName, mFastIndex, - IPCThreadState::self()->getCallingPid()); + thread->mNBLogWriter->logf("pause mName=%d", mName); if (mState == ACTIVE || mState == RESUMING) { mState = PAUSING; ALOGV("ACTIVE/RESUMING => PAUSING (%d) on thread %p", mName, thread.get()); @@ -678,8 +673,7 @@ void AudioFlinger::PlaybackThread::Track::flush() sp thread = mThread.promote(); if (thread != 0) { Mutex::Autolock _l(thread->mLock); - thread->mNBLogWriter->logf("flush mName=%d mFastIndex=%d caller=%d", mName, mFastIndex, - IPCThreadState::self()->getCallingPid()); + thread->mNBLogWriter->logf("flush mName=%d", mName); if (mState != STOPPING_1 && mState != STOPPING_2 && mState != STOPPED && mState != PAUSED && mState != PAUSING && mState != IDLE && mState != FLUSHED) { return; -- cgit v1.1 From 4a808b46ddcbe7c26ec195a29d46241b45c0cc98 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Tue, 12 Feb 2013 12:12:42 -0800 Subject: Temporary additional logging to investigate bug The bug appears related to continuing to use an invalid buffer provider in fast mixer after track destruction, so focus the added logs in that area. Also includes a bug fix: was calling log in an unsafe place near Threads.cpp AudioFlinger::PlaybackThread::createTrack_l line 1250. Details: - include caller pid or client pid where appropriate - increase log buffer size Bug: 6490974 Change-Id: I4c030f171343fe4b483eae0ddea4427118d8d4b1 --- services/audioflinger/AudioFlinger.h | 2 +- services/audioflinger/FastMixer.cpp | 16 ++++++++++++++++ services/audioflinger/Threads.cpp | 18 +++++++++++++----- services/audioflinger/Threads.h | 2 +- services/audioflinger/Tracks.cpp | 14 ++++++++++---- 5 files changed, 41 insertions(+), 11 deletions(-) diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h index c3f08f6..593f131 100644 --- a/services/audioflinger/AudioFlinger.h +++ b/services/audioflinger/AudioFlinger.h @@ -227,7 +227,7 @@ public: sp newWriter_l(size_t size, const char *name); void unregisterWriter(const sp& writer); private: - static const size_t kLogMemorySize = 10 * 1024; + static const size_t kLogMemorySize = 20 * 1024; sp mLogMemoryDealer; // == 0 when NBLog is disabled public: diff --git a/services/audioflinger/FastMixer.cpp b/services/audioflinger/FastMixer.cpp index 80e37ca..467da39 100644 --- a/services/audioflinger/FastMixer.cpp +++ b/services/audioflinger/FastMixer.cpp @@ -301,6 +301,11 @@ bool FastMixer::threadLoop() const FastTrack* fastTrack = ¤t->mFastTracks[i]; AudioBufferProvider *bufferProvider = fastTrack->mBufferProvider; ALOG_ASSERT(bufferProvider != NULL && fastTrackNames[i] == -1); + if (bufferProvider == NULL || + bufferProvider->getValid() != AudioBufferProvider::kValid) { + logWriter->logTimestamp(); + logWriter->logf("added invalid %#x", i); + } if (mixer != NULL) { // calling getTrackName with default channel mask and a random invalid // sessionId (no effects here) @@ -335,6 +340,11 @@ bool FastMixer::threadLoop() if (fastTrack->mGeneration != generations[i]) { AudioBufferProvider *bufferProvider = fastTrack->mBufferProvider; ALOG_ASSERT(bufferProvider != NULL); + if (bufferProvider == NULL || + bufferProvider->getValid() != AudioBufferProvider::kValid) { + logWriter->logTimestamp(); + logWriter->logf("modified invalid %#x", i); + } if (mixer != NULL) { name = fastTrackNames[i]; ALOG_ASSERT(name >= 0); @@ -423,6 +433,12 @@ bool FastMixer::threadLoop() } ftDump->mUnderruns = underruns; ftDump->mFramesReady = framesReady; + AudioBufferProvider *bufferProvider = fastTrack->mBufferProvider; + if (bufferProvider == NULL || + bufferProvider->getValid() != AudioBufferProvider::kValid) { + logWriter->logTimestamp(); + logWriter->logf("mixing invalid %#x", i); + } } int64_t pts; diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index ba848d7..8cc5f6c 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -1196,6 +1196,8 @@ sp AudioFlinger::PlaybackThread::createTrac { // scope for mLock Mutex::Autolock _l(mLock); + mNBLogWriter->logf("createTrack_l isFast=%d caller=%d", + (*flags & IAudioFlinger::TRACK_FAST) != 0, IPCThreadState::self()->getCallingPid()); // all tracks in same audio session must share the same routing strategy otherwise // conflicts will happen when tracks are moved from one output to another by audio policy @@ -1249,7 +1251,6 @@ Exit: if (status) { *status = lStatus; } - mNBLogWriter->logf("createTrack_l"); return track; } @@ -1317,7 +1318,8 @@ float AudioFlinger::PlaybackThread::streamVolume(audio_stream_type_t stream) con // addTrack_l() must be called with ThreadBase::mLock held status_t AudioFlinger::PlaybackThread::addTrack_l(const sp& track) { - mNBLogWriter->logf("addTrack_l mName=%d", track->mName); + mNBLogWriter->logf("addTrack_l mName=%d mFastIndex=%d caller=%d", track->mName, + track->mFastIndex, IPCThreadState::self()->getCallingPid()); status_t status = ALREADY_EXISTS; // set retry count for buffer fill @@ -1351,7 +1353,9 @@ status_t AudioFlinger::PlaybackThread::addTrack_l(const sp& track) // destroyTrack_l() must be called with ThreadBase::mLock held void AudioFlinger::PlaybackThread::destroyTrack_l(const sp& track) { - mNBLogWriter->logf("destroyTrack_l mName=%d", track->mName); + mNBLogWriter->logTimestamp(); + mNBLogWriter->logf("destroyTrack_l mName=%d mFastIndex=%d mClientPid=%d", track->mName, + track->mFastIndex, track->mClient != 0 ? track->mClient->pid() : -1); track->mState = TrackBase::TERMINATED; // active tracks are removed by threadLoop() if (mActiveTracks.indexOf(track) < 0) { @@ -1361,7 +1365,9 @@ void AudioFlinger::PlaybackThread::destroyTrack_l(const sp& track) void AudioFlinger::PlaybackThread::removeTrack_l(const sp& track) { - mNBLogWriter->logf("removeTrack_l mName=%d", track->mName); + mNBLogWriter->logTimestamp(); + mNBLogWriter->logf("removeTrack_l mName=%d mFastIndex=%d clientPid=%d", track->mName, + track->mFastIndex, track->mClient != 0 ? track->mClient->pid() : -1); track->triggerEvents(AudioSystem::SYNC_EVENT_PRESENTATION_COMPLETE); mTracks.remove(track); deleteTrackName_l(track->name()); @@ -2870,7 +2876,9 @@ track_is_ready: ; if (CC_UNLIKELY(count)) { for (size_t i=0 ; i& track = tracksToRemove->itemAt(i); - mNBLogWriter->logf("prepareTracks_l remove name=%u", track->name()); + mNBLogWriter->logTimestamp(); + mNBLogWriter->logf("prepareTracks_l remove name=%u mFastIndex=%d", track->name(), + track->mFastIndex); mActiveTracks.remove(track); if (track->mainBuffer() != mMixBuffer) { chain = getEffectChain_l(track->sessionId()); diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h index fa1e336..8497788 100644 --- a/services/audioflinger/Threads.h +++ b/services/audioflinger/Threads.h @@ -315,7 +315,7 @@ protected: // keyed by session ID, the second by type UUID timeLow field KeyedVector< int, KeyedVector< int, sp > > mSuspendedSessions; - static const size_t kLogSize = 512; + static const size_t kLogSize = 4096; sp mNBLogWriter; }; diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp index 315cbbc..f679751 100644 --- a/services/audioflinger/Tracks.cpp +++ b/services/audioflinger/Tracks.cpp @@ -351,6 +351,7 @@ AudioFlinger::PlaybackThread::Track::Track( // Read the initial underruns because this field is never cleared by the fast mixer mObservedUnderruns = thread->getFastTrackUnderruns(i); thread->mFastTrackAvailMask &= ~(1 << i); + thread->mNBLogWriter->logf("new Track mName=%d mFastIndex=%d", mName, mFastIndex); } } ALOGV("Track constructor name %d, calling pid %d", mName, @@ -360,6 +361,7 @@ AudioFlinger::PlaybackThread::Track::Track( AudioFlinger::PlaybackThread::Track::~Track() { ALOGV("PlaybackThread::Track destructor"); + // FIXME not sure if safe to log here, would need a lock on thread to do it } void AudioFlinger::PlaybackThread::Track::destroy() @@ -569,7 +571,8 @@ status_t AudioFlinger::PlaybackThread::Track::start(AudioSystem::sync_event_t ev sp thread = mThread.promote(); if (thread != 0) { Mutex::Autolock _l(thread->mLock); - thread->mNBLogWriter->logf("start mName=%d", mName); + thread->mNBLogWriter->logf("start mName=%d mFastIndex=%d caller=%d", mName, mFastIndex, + IPCThreadState::self()->getCallingPid()); track_state state = mState; // here the track could be either new, or restarted // in both cases "unstop" the track @@ -612,7 +615,8 @@ void AudioFlinger::PlaybackThread::Track::stop() sp thread = mThread.promote(); if (thread != 0) { Mutex::Autolock _l(thread->mLock); - thread->mNBLogWriter->logf("stop mName=%d", mName); + thread->mNBLogWriter->logf("stop mName=%d mFastIndex=%d caller=%d", mName, mFastIndex, + IPCThreadState::self()->getCallingPid()); track_state state = mState; if (state == RESUMING || state == ACTIVE || state == PAUSING || state == PAUSED) { // If the track is not active (PAUSED and buffers full), flush buffers @@ -649,7 +653,8 @@ void AudioFlinger::PlaybackThread::Track::pause() sp thread = mThread.promote(); if (thread != 0) { Mutex::Autolock _l(thread->mLock); - thread->mNBLogWriter->logf("pause mName=%d", mName); + thread->mNBLogWriter->logf("pause mName=%d mFastIndex=%d caller=%d", mName, mFastIndex, + IPCThreadState::self()->getCallingPid()); if (mState == ACTIVE || mState == RESUMING) { mState = PAUSING; ALOGV("ACTIVE/RESUMING => PAUSING (%d) on thread %p", mName, thread.get()); @@ -673,7 +678,8 @@ void AudioFlinger::PlaybackThread::Track::flush() sp thread = mThread.promote(); if (thread != 0) { Mutex::Autolock _l(thread->mLock); - thread->mNBLogWriter->logf("flush mName=%d", mName); + thread->mNBLogWriter->logf("flush mName=%d mFastIndex=%d caller=%d", mName, mFastIndex, + IPCThreadState::self()->getCallingPid()); if (mState != STOPPING_1 && mState != STOPPING_2 && mState != STOPPED && mState != PAUSED && mState != PAUSING && mState != IDLE && mState != FLUSHED) { return; -- cgit v1.1 From b4698f79230bbee15936641d951d49655f9e6da5 Mon Sep 17 00:00:00 2001 From: Kunter Gultekin Date: Fri, 1 Feb 2013 17:01:15 +0200 Subject: Adds VPX encoding support for stagefright. Only following encoder settings are available - target bitrate - rate control (constant / variable) - frame rate - token partitioning - error resilience - reconstruction & loop filters Only following color formats are recognized - YUV420Planar - YUV420SemiPlanar - AndroidOpaque Following settings are not configurable by the client - encoding deadline is realtime - the algorithm interface for encoder is vp8 - fractional bits of frame rate is discarded - timebase is fixed to 1/1000000 Requires libvpx to be built with encoder support enabled. Requires openmax 1.1.2 extension headers. Relevant tests exist in cts repo. Change-Id: I650f1aca83e7dc93f79d7e6cba7ac24f26e66d40 Signed-off-by: Kunter Gultekin --- media/libstagefright/codecs/on2/enc/Android.mk | 24 + .../codecs/on2/enc/MODULE_LICENSE_APACHE2 | 0 media/libstagefright/codecs/on2/enc/NOTICE | 190 ++++++ .../codecs/on2/enc/SoftVPXEncoder.cpp | 685 +++++++++++++++++++++ .../libstagefright/codecs/on2/enc/SoftVPXEncoder.h | 203 ++++++ 5 files changed, 1102 insertions(+) create mode 100644 media/libstagefright/codecs/on2/enc/Android.mk create mode 100644 media/libstagefright/codecs/on2/enc/MODULE_LICENSE_APACHE2 create mode 100644 media/libstagefright/codecs/on2/enc/NOTICE create mode 100644 media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp create mode 100644 media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h diff --git a/media/libstagefright/codecs/on2/enc/Android.mk b/media/libstagefright/codecs/on2/enc/Android.mk new file mode 100644 index 0000000..5d3317c --- /dev/null +++ b/media/libstagefright/codecs/on2/enc/Android.mk @@ -0,0 +1,24 @@ +LOCAL_PATH := $(call my-dir) +include $(CLEAR_VARS) + +LOCAL_SRC_FILES := \ + SoftVPXEncoder.cpp + +LOCAL_C_INCLUDES := \ + $(TOP)/external/libvpx/libvpx \ + $(TOP)/external/openssl/include \ + $(TOP)/external/libvpx/libvpx/vpx_codec \ + $(TOP)/external/libvpx/libvpx/vpx_ports \ + frameworks/av/media/libstagefright/include \ + frameworks/native/include/media/openmax \ + +LOCAL_STATIC_LIBRARIES := \ + libvpx + +LOCAL_SHARED_LIBRARIES := \ + libstagefright libstagefright_omx libstagefright_foundation libutils \ + +LOCAL_MODULE := libstagefright_soft_vpxenc +LOCAL_MODULE_TAGS := optional + +include $(BUILD_SHARED_LIBRARY) diff --git a/media/libstagefright/codecs/on2/enc/MODULE_LICENSE_APACHE2 b/media/libstagefright/codecs/on2/enc/MODULE_LICENSE_APACHE2 new file mode 100644 index 0000000..e69de29 diff --git a/media/libstagefright/codecs/on2/enc/NOTICE b/media/libstagefright/codecs/on2/enc/NOTICE new file mode 100644 index 0000000..faed58a --- /dev/null +++ b/media/libstagefright/codecs/on2/enc/NOTICE @@ -0,0 +1,190 @@ + + Copyright (c) 2005-2013, The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + diff --git a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp new file mode 100644 index 0000000..cc38dc3 --- /dev/null +++ b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp @@ -0,0 +1,685 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// #define LOG_NDEBUG 0 +#define LOG_TAG "SoftVPXEncoder" +#include "SoftVPXEncoder.h" + +#include + +#include +#include + +namespace android { + + +template +static void InitOMXParams(T *params) { + params->nSize = sizeof(T); + // OMX IL 1.1.2 + params->nVersion.s.nVersionMajor = 1; + params->nVersion.s.nVersionMinor = 1; + params->nVersion.s.nRevision = 2; + params->nVersion.s.nStep = 0; +} + + +static int GetCPUCoreCount() { + int cpuCoreCount = 1; +#if defined(_SC_NPROCESSORS_ONLN) + cpuCoreCount = sysconf(_SC_NPROCESSORS_ONLN); +#else + // _SC_NPROC_ONLN must be defined... + cpuCoreCount = sysconf(_SC_NPROC_ONLN); +#endif + CHECK_GE(cpuCoreCount, 1); + return cpuCoreCount; +} + + +// This color conversion utility is copied from SoftMPEG4Encoder.cpp +inline static void ConvertSemiPlanarToPlanar(uint8_t *inyuv, + uint8_t* outyuv, + int32_t width, + int32_t height) { + int32_t outYsize = width * height; + uint32_t *outy = (uint32_t *) outyuv; + uint16_t *outcb = (uint16_t *) (outyuv + outYsize); + uint16_t *outcr = (uint16_t *) (outyuv + outYsize + (outYsize >> 2)); + + /* Y copying */ + memcpy(outy, inyuv, outYsize); + + /* U & V copying */ + uint32_t *inyuv_4 = (uint32_t *) (inyuv + outYsize); + for (int32_t i = height >> 1; i > 0; --i) { + for (int32_t j = width >> 2; j > 0; --j) { + uint32_t temp = *inyuv_4++; + uint32_t tempU = temp & 0xFF; + tempU = tempU | ((temp >> 8) & 0xFF00); + + uint32_t tempV = (temp >> 8) & 0xFF; + tempV = tempV | ((temp >> 16) & 0xFF00); + + // Flip U and V + *outcb++ = tempV; + *outcr++ = tempU; + } + } +} + + +SoftVPXEncoder::SoftVPXEncoder(const char *name, + const OMX_CALLBACKTYPE *callbacks, + OMX_PTR appData, + OMX_COMPONENTTYPE **component) + : SimpleSoftOMXComponent(name, callbacks, appData, component), + mCodecContext(NULL), + mCodecConfiguration(NULL), + mCodecInterface(NULL), + mWidth(176), + mHeight(144), + mBitrate(192000), // in bps + mBitrateControlMode(VPX_VBR), // variable bitrate + mFrameDurationUs(33333), // Defaults to 30 fps + mDCTPartitions(0), + mErrorResilience(OMX_FALSE), + mColorFormat(OMX_COLOR_FormatYUV420Planar), + mLevel(OMX_VIDEO_VP8Level_Version0), + mConversionBuffer(NULL) { + + initPorts(); +} + + +SoftVPXEncoder::~SoftVPXEncoder() { + releaseEncoder(); +} + + +void SoftVPXEncoder::initPorts() { + OMX_PARAM_PORTDEFINITIONTYPE inputPort; + OMX_PARAM_PORTDEFINITIONTYPE outputPort; + + InitOMXParams(&inputPort); + InitOMXParams(&outputPort); + + inputPort.nBufferCountMin = kNumBuffers; + inputPort.nBufferCountActual = inputPort.nBufferCountMin; + inputPort.bEnabled = OMX_TRUE; + inputPort.bPopulated = OMX_FALSE; + inputPort.eDomain = OMX_PortDomainVideo; + inputPort.bBuffersContiguous = OMX_FALSE; + inputPort.format.video.pNativeRender = NULL; + inputPort.format.video.nFrameWidth = mWidth; + inputPort.format.video.nFrameHeight = mHeight; + inputPort.format.video.nStride = inputPort.format.video.nFrameWidth; + inputPort.format.video.nSliceHeight = inputPort.format.video.nFrameHeight; + inputPort.format.video.nBitrate = 0; + // frameRate is reciprocal of frameDuration, which is + // in microseconds. It is also in Q16 format. + inputPort.format.video.xFramerate = (1000000/mFrameDurationUs) << 16; + inputPort.format.video.bFlagErrorConcealment = OMX_FALSE; + inputPort.nPortIndex = kInputPortIndex; + inputPort.eDir = OMX_DirInput; + inputPort.nBufferAlignment = kInputBufferAlignment; + inputPort.format.video.cMIMEType = + const_cast(MEDIA_MIMETYPE_VIDEO_RAW); + inputPort.format.video.eCompressionFormat = OMX_VIDEO_CodingUnused; + inputPort.format.video.eColorFormat = mColorFormat; + inputPort.format.video.pNativeWindow = NULL; + inputPort.nBufferSize = + (inputPort.format.video.nStride * + inputPort.format.video.nSliceHeight * 3) / 2; + + addPort(inputPort); + + outputPort.nBufferCountMin = kNumBuffers; + outputPort.nBufferCountActual = outputPort.nBufferCountMin; + outputPort.bEnabled = OMX_TRUE; + outputPort.bPopulated = OMX_FALSE; + outputPort.eDomain = OMX_PortDomainVideo; + outputPort.bBuffersContiguous = OMX_FALSE; + outputPort.format.video.pNativeRender = NULL; + outputPort.format.video.nFrameWidth = mWidth; + outputPort.format.video.nFrameHeight = mHeight; + outputPort.format.video.nStride = outputPort.format.video.nFrameWidth; + outputPort.format.video.nSliceHeight = outputPort.format.video.nFrameHeight; + outputPort.format.video.nBitrate = mBitrate; + outputPort.format.video.xFramerate = 0; + outputPort.format.video.bFlagErrorConcealment = OMX_FALSE; + outputPort.nPortIndex = kOutputPortIndex; + outputPort.eDir = OMX_DirOutput; + outputPort.nBufferAlignment = kOutputBufferAlignment; + outputPort.format.video.cMIMEType = + const_cast(MEDIA_MIMETYPE_VIDEO_VPX); + outputPort.format.video.eCompressionFormat = OMX_VIDEO_CodingVPX; + outputPort.format.video.eColorFormat = OMX_COLOR_FormatUnused; + outputPort.format.video.pNativeWindow = NULL; + outputPort.nBufferSize = 256 * 1024; // arbitrary + + addPort(outputPort); +} + + +status_t SoftVPXEncoder::initEncoder() { + vpx_codec_err_t codec_return; + + mCodecContext = new vpx_codec_ctx_t; + mCodecConfiguration = new vpx_codec_enc_cfg_t; + mCodecInterface = vpx_codec_vp8_cx(); + + if (mCodecInterface == NULL) { + return UNKNOWN_ERROR; + } + + codec_return = vpx_codec_enc_config_default(mCodecInterface, + mCodecConfiguration, + 0); // Codec specific flags + + if (codec_return != VPX_CODEC_OK) { + ALOGE("Error populating default configuration for vpx encoder."); + return UNKNOWN_ERROR; + } + + mCodecConfiguration->g_w = mWidth; + mCodecConfiguration->g_h = mHeight; + mCodecConfiguration->g_threads = GetCPUCoreCount(); + mCodecConfiguration->g_error_resilient = mErrorResilience; + + switch (mLevel) { + case OMX_VIDEO_VP8Level_Version0: + mCodecConfiguration->g_profile = 0; + break; + + case OMX_VIDEO_VP8Level_Version1: + mCodecConfiguration->g_profile = 1; + break; + + case OMX_VIDEO_VP8Level_Version2: + mCodecConfiguration->g_profile = 2; + break; + + case OMX_VIDEO_VP8Level_Version3: + mCodecConfiguration->g_profile = 3; + break; + + default: + mCodecConfiguration->g_profile = 0; + } + + // OMX timebase unit is microsecond + // g_timebase is in seconds (i.e. 1/1000000 seconds) + mCodecConfiguration->g_timebase.num = 1; + mCodecConfiguration->g_timebase.den = 1000000; + // rc_target_bitrate is in kbps, mBitrate in bps + mCodecConfiguration->rc_target_bitrate = mBitrate/1000; + mCodecConfiguration->rc_end_usage = mBitrateControlMode; + + codec_return = vpx_codec_enc_init(mCodecContext, + mCodecInterface, + mCodecConfiguration, + 0); // flags + + if (codec_return != VPX_CODEC_OK) { + ALOGE("Error initializing vpx encoder"); + return UNKNOWN_ERROR; + } + + codec_return = vpx_codec_control(mCodecContext, + VP8E_SET_TOKEN_PARTITIONS, + mDCTPartitions); + if (codec_return != VPX_CODEC_OK) { + ALOGE("Error setting dct partitions for vpx encoder."); + return UNKNOWN_ERROR; + } + + if (mColorFormat == OMX_COLOR_FormatYUV420SemiPlanar) { + if (mConversionBuffer == NULL) { + mConversionBuffer = (uint8_t *)malloc(mWidth * mHeight * 3 / 2); + if (mConversionBuffer == NULL) { + ALOGE("Allocating conversion buffer failed."); + return UNKNOWN_ERROR; + } + } + } + return OK; +} + + +status_t SoftVPXEncoder::releaseEncoder() { + if (mCodecContext != NULL) { + vpx_codec_destroy(mCodecContext); + delete mCodecContext; + mCodecContext = NULL; + } + + if (mCodecConfiguration != NULL) { + delete mCodecConfiguration; + mCodecConfiguration = NULL; + } + + if (mConversionBuffer != NULL) { + delete mConversionBuffer; + mConversionBuffer = NULL; + } + + // this one is not allocated by us + mCodecInterface = NULL; + + return OK; +} + + +OMX_ERRORTYPE SoftVPXEncoder::internalGetParameter(OMX_INDEXTYPE index, + OMX_PTR param) { + // can include extension index OMX_INDEXEXTTYPE + const int32_t indexFull = index; + + switch (indexFull) { + case OMX_IndexParamVideoPortFormat: { + OMX_VIDEO_PARAM_PORTFORMATTYPE *formatParams = + (OMX_VIDEO_PARAM_PORTFORMATTYPE *)param; + + if (formatParams->nPortIndex == kInputPortIndex) { + if (formatParams->nIndex >= kNumberOfSupportedColorFormats) { + return OMX_ErrorNoMore; + } + + // Color formats, in order of preference + if (formatParams->nIndex == 0) { + formatParams->eColorFormat = OMX_COLOR_FormatYUV420Planar; + } else if (formatParams->nIndex == 1) { + formatParams->eColorFormat = + OMX_COLOR_FormatYUV420SemiPlanar; + } else { + formatParams->eColorFormat = OMX_COLOR_FormatAndroidOpaque; + } + + formatParams->eCompressionFormat = OMX_VIDEO_CodingUnused; + // Converting from microseconds + // Also converting to Q16 format + formatParams->xFramerate = (1000000/mFrameDurationUs) << 16; + return OMX_ErrorNone; + } else if (formatParams->nPortIndex == kOutputPortIndex) { + formatParams->eCompressionFormat = OMX_VIDEO_CodingVPX; + formatParams->eColorFormat = OMX_COLOR_FormatUnused; + formatParams->xFramerate = 0; + return OMX_ErrorNone; + } else { + return OMX_ErrorBadPortIndex; + } + } + + case OMX_IndexParamVideoBitrate: { + OMX_VIDEO_PARAM_BITRATETYPE *bitrate = + (OMX_VIDEO_PARAM_BITRATETYPE *)param; + + if (bitrate->nPortIndex != kOutputPortIndex) { + return OMX_ErrorUnsupportedIndex; + } + + bitrate->nTargetBitrate = mBitrate; + + if (mBitrateControlMode == VPX_VBR) { + bitrate->eControlRate = OMX_Video_ControlRateVariable; + } else if (mBitrateControlMode == VPX_CBR) { + bitrate->eControlRate = OMX_Video_ControlRateConstant; + } else { + return OMX_ErrorUnsupportedSetting; + } + return OMX_ErrorNone; + } + + // VP8 specific parameters that use extension headers + case OMX_IndexParamVideoVp8: { + OMX_VIDEO_PARAM_VP8TYPE *vp8Params = + (OMX_VIDEO_PARAM_VP8TYPE *)param; + + if (vp8Params->nPortIndex != kOutputPortIndex) { + return OMX_ErrorUnsupportedIndex; + } + + vp8Params->eProfile = OMX_VIDEO_VP8ProfileMain; + vp8Params->eLevel = mLevel; + vp8Params->nDCTPartitions = mDCTPartitions; + vp8Params->bErrorResilientMode = mErrorResilience; + return OMX_ErrorNone; + } + + case OMX_IndexParamVideoProfileLevelQuerySupported: { + OMX_VIDEO_PARAM_PROFILELEVELTYPE *profileAndLevel = + (OMX_VIDEO_PARAM_PROFILELEVELTYPE *)param; + + if (profileAndLevel->nPortIndex != kOutputPortIndex) { + return OMX_ErrorUnsupportedIndex; + } + + switch (profileAndLevel->nProfileIndex) { + case 0: + profileAndLevel->eLevel = OMX_VIDEO_VP8Level_Version0; + break; + + case 1: + profileAndLevel->eLevel = OMX_VIDEO_VP8Level_Version1; + break; + + case 2: + profileAndLevel->eLevel = OMX_VIDEO_VP8Level_Version2; + break; + + case 3: + profileAndLevel->eLevel = OMX_VIDEO_VP8Level_Version3; + break; + + default: + return OMX_ErrorNoMore; + } + + profileAndLevel->eProfile = OMX_VIDEO_VP8ProfileMain; + return OMX_ErrorNone; + } + + case OMX_IndexParamVideoProfileLevelCurrent: { + OMX_VIDEO_PARAM_PROFILELEVELTYPE *profileAndLevel = + (OMX_VIDEO_PARAM_PROFILELEVELTYPE *)param; + + if (profileAndLevel->nPortIndex != kOutputPortIndex) { + return OMX_ErrorUnsupportedIndex; + } + + profileAndLevel->eLevel = mLevel; + profileAndLevel->eProfile = OMX_VIDEO_VP8ProfileMain; + return OMX_ErrorNone; + } + + default: + return SimpleSoftOMXComponent::internalGetParameter(index, param); + } +} + + +OMX_ERRORTYPE SoftVPXEncoder::internalSetParameter(OMX_INDEXTYPE index, + const OMX_PTR param) { + // can include extension index OMX_INDEXEXTTYPE + const int32_t indexFull = index; + + switch (indexFull) { + case OMX_IndexParamStandardComponentRole: + return internalSetRoleParams( + (const OMX_PARAM_COMPONENTROLETYPE *)param); + + case OMX_IndexParamVideoBitrate: + return internalSetBitrateParams( + (const OMX_VIDEO_PARAM_BITRATETYPE *)param); + + case OMX_IndexParamPortDefinition: + return internalSetPortParams( + (const OMX_PARAM_PORTDEFINITIONTYPE *)param); + + case OMX_IndexParamVideoPortFormat: + return internalSetFormatParams( + (const OMX_VIDEO_PARAM_PORTFORMATTYPE *)param); + + case OMX_IndexParamVideoVp8: + return internalSetVp8Params( + (const OMX_VIDEO_PARAM_VP8TYPE *)param); + + case OMX_IndexParamVideoProfileLevelCurrent: + return internalSetProfileLevel( + (const OMX_VIDEO_PARAM_PROFILELEVELTYPE *)param); + + default: + return SimpleSoftOMXComponent::internalSetParameter(index, param); + } +} + +OMX_ERRORTYPE SoftVPXEncoder::internalSetProfileLevel( + const OMX_VIDEO_PARAM_PROFILELEVELTYPE* profileAndLevel) { + if (profileAndLevel->nPortIndex != kOutputPortIndex) { + return OMX_ErrorUnsupportedIndex; + } + + if (profileAndLevel->eProfile != OMX_VIDEO_VP8ProfileMain) { + return OMX_ErrorBadParameter; + } + + if (profileAndLevel->eLevel == OMX_VIDEO_VP8Level_Version0 || + profileAndLevel->eLevel == OMX_VIDEO_VP8Level_Version1 || + profileAndLevel->eLevel == OMX_VIDEO_VP8Level_Version2 || + profileAndLevel->eLevel == OMX_VIDEO_VP8Level_Version3) { + mLevel = (OMX_VIDEO_VP8LEVELTYPE)profileAndLevel->eLevel; + } else { + return OMX_ErrorBadParameter; + } + + return OMX_ErrorNone; +} + + +OMX_ERRORTYPE SoftVPXEncoder::internalSetVp8Params( + const OMX_VIDEO_PARAM_VP8TYPE* vp8Params) { + if (vp8Params->nPortIndex != kOutputPortIndex) { + return OMX_ErrorUnsupportedIndex; + } + + if (vp8Params->eProfile != OMX_VIDEO_VP8ProfileMain) { + return OMX_ErrorBadParameter; + } + + if (vp8Params->eLevel == OMX_VIDEO_VP8Level_Version0 || + vp8Params->eLevel == OMX_VIDEO_VP8Level_Version1 || + vp8Params->eLevel == OMX_VIDEO_VP8Level_Version2 || + vp8Params->eLevel == OMX_VIDEO_VP8Level_Version3) { + mLevel = vp8Params->eLevel; + } else { + return OMX_ErrorBadParameter; + } + + if (vp8Params->nDCTPartitions <= kMaxDCTPartitions) { + mDCTPartitions = vp8Params->nDCTPartitions; + } else { + return OMX_ErrorBadParameter; + } + + mErrorResilience = vp8Params->bErrorResilientMode; + return OMX_ErrorNone; +} + + +OMX_ERRORTYPE SoftVPXEncoder::internalSetFormatParams( + const OMX_VIDEO_PARAM_PORTFORMATTYPE* format) { + if (format->nPortIndex == kInputPortIndex) { + if (format->eColorFormat == OMX_COLOR_FormatYUV420Planar || + format->eColorFormat == OMX_COLOR_FormatYUV420SemiPlanar || + format->eColorFormat == OMX_COLOR_FormatAndroidOpaque) { + mColorFormat = format->eColorFormat; + return OMX_ErrorNone; + } else { + ALOGE("Unsupported color format %i", format->eColorFormat); + return OMX_ErrorUnsupportedSetting; + } + } else if (format->nPortIndex == kOutputPortIndex) { + if (format->eCompressionFormat == OMX_VIDEO_CodingVPX) { + return OMX_ErrorNone; + } else { + return OMX_ErrorUnsupportedSetting; + } + } else { + return OMX_ErrorBadPortIndex; + } +} + + +OMX_ERRORTYPE SoftVPXEncoder::internalSetRoleParams( + const OMX_PARAM_COMPONENTROLETYPE* role) { + const char* roleText = (const char*)role->cRole; + const size_t roleTextMaxSize = OMX_MAX_STRINGNAME_SIZE - 1; + + if (strncmp(roleText, "video_encoder.vpx", roleTextMaxSize)) { + ALOGE("Unsupported component role"); + return OMX_ErrorBadParameter; + } + + return OMX_ErrorNone; +} + + +OMX_ERRORTYPE SoftVPXEncoder::internalSetPortParams( + const OMX_PARAM_PORTDEFINITIONTYPE* port) { + if (port->nPortIndex == kInputPortIndex) { + mWidth = port->format.video.nFrameWidth; + mHeight = port->format.video.nFrameHeight; + + // xFramerate comes in Q16 format, in frames per second unit + const uint32_t framerate = port->format.video.xFramerate >> 16; + // frame duration is in microseconds + mFrameDurationUs = (1000000/framerate); + + if (port->format.video.eColorFormat == OMX_COLOR_FormatYUV420Planar || + port->format.video.eColorFormat == OMX_COLOR_FormatYUV420SemiPlanar || + port->format.video.eColorFormat == OMX_COLOR_FormatAndroidOpaque) { + mColorFormat = port->format.video.eColorFormat; + } else { + return OMX_ErrorUnsupportedSetting; + } + + return OMX_ErrorNone; + } else if (port->nPortIndex == kOutputPortIndex) { + mBitrate = port->format.video.nBitrate; + return OMX_ErrorNone; + } else { + return OMX_ErrorBadPortIndex; + } +} + + +OMX_ERRORTYPE SoftVPXEncoder::internalSetBitrateParams( + const OMX_VIDEO_PARAM_BITRATETYPE* bitrate) { + if (bitrate->nPortIndex != kOutputPortIndex) { + return OMX_ErrorUnsupportedIndex; + } + + mBitrate = bitrate->nTargetBitrate; + + if (bitrate->eControlRate == OMX_Video_ControlRateVariable) { + mBitrateControlMode = VPX_VBR; + } else if (bitrate->eControlRate == OMX_Video_ControlRateConstant) { + mBitrateControlMode = VPX_CBR; + } else { + return OMX_ErrorUnsupportedSetting; + } + + return OMX_ErrorNone; +} + + +void SoftVPXEncoder::onQueueFilled(OMX_U32 portIndex) { + // Initialize encoder if not already + if (mCodecContext == NULL) { + if (OK != initEncoder()) { + ALOGE("Failed to initialize encoder"); + notify(OMX_EventError, + OMX_ErrorUndefined, + 0, // Extra notification data + NULL); // Notification data pointer + return; + } + } + + vpx_codec_err_t codec_return; + List &inputBufferInfoQueue = getPortQueue(kInputPortIndex); + List &outputBufferInfoQueue = getPortQueue(kOutputPortIndex); + + while (!inputBufferInfoQueue.empty() && !outputBufferInfoQueue.empty()) { + BufferInfo *inputBufferInfo = *inputBufferInfoQueue.begin(); + OMX_BUFFERHEADERTYPE *inputBufferHeader = inputBufferInfo->mHeader; + + BufferInfo *outputBufferInfo = *outputBufferInfoQueue.begin(); + OMX_BUFFERHEADERTYPE *outputBufferHeader = outputBufferInfo->mHeader; + + if (inputBufferHeader->nFlags & OMX_BUFFERFLAG_EOS) { + inputBufferInfoQueue.erase(inputBufferInfoQueue.begin()); + inputBufferInfo->mOwnedByUs = false; + notifyEmptyBufferDone(inputBufferHeader); + + outputBufferHeader->nFilledLen = 0; + outputBufferHeader->nFlags = OMX_BUFFERFLAG_EOS; + + outputBufferInfoQueue.erase(outputBufferInfoQueue.begin()); + outputBufferInfo->mOwnedByUs = false; + notifyFillBufferDone(outputBufferHeader); + return; + } + + uint8_t* source = inputBufferHeader->pBuffer + inputBufferHeader->nOffset; + + // NOTE: As much as nothing is known about color format + // when it is denoted as AndroidOpaque, it is at least + // assumed to be planar. + if (mColorFormat == OMX_COLOR_FormatYUV420SemiPlanar) { + ConvertSemiPlanarToPlanar(source, mConversionBuffer, mWidth, mHeight); + source = mConversionBuffer; + } + vpx_image_t raw_frame; + vpx_img_wrap(&raw_frame, VPX_IMG_FMT_I420, mWidth, mHeight, + kInputBufferAlignment, source); + codec_return = vpx_codec_encode(mCodecContext, + &raw_frame, + inputBufferHeader->nTimeStamp, // in timebase units + mFrameDurationUs, // frame duration in timebase units + 0, // frame flags + VPX_DL_REALTIME); // encoding deadline + if (codec_return != VPX_CODEC_OK) { + ALOGE("vpx encoder failed to encode frame"); + notify(OMX_EventError, + OMX_ErrorUndefined, + 0, // Extra notification data + NULL); // Notification data pointer + return; + } + + vpx_codec_iter_t encoded_packet_iterator = NULL; + const vpx_codec_cx_pkt_t* encoded_packet; + + while (encoded_packet = vpx_codec_get_cx_data(mCodecContext, &encoded_packet_iterator)) { + if (encoded_packet->kind == VPX_CODEC_CX_FRAME_PKT) { + outputBufferHeader->nTimeStamp = encoded_packet->data.frame.pts; + outputBufferHeader->nFlags = 0; + outputBufferHeader->nOffset = 0; + outputBufferHeader->nFilledLen = encoded_packet->data.frame.sz; + memcpy(outputBufferHeader->pBuffer, + encoded_packet->data.frame.buf, + encoded_packet->data.frame.sz); + outputBufferInfo->mOwnedByUs = false; + outputBufferInfoQueue.erase(outputBufferInfoQueue.begin()); + notifyFillBufferDone(outputBufferHeader); + } + } + + inputBufferInfo->mOwnedByUs = false; + inputBufferInfoQueue.erase(inputBufferInfoQueue.begin()); + notifyEmptyBufferDone(inputBufferHeader); + } +} +} // namespace android + + +android::SoftOMXComponent *createSoftOMXComponent( + const char *name, const OMX_CALLBACKTYPE *callbacks, + OMX_PTR appData, OMX_COMPONENTTYPE **component) { + return new android::SoftVPXEncoder(name, callbacks, appData, component); +} diff --git a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h new file mode 100644 index 0000000..3bc05c0 --- /dev/null +++ b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h @@ -0,0 +1,203 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef SOFT_VPX_ENCODER_H_ + +#define SOFT_VPX_ENCODER_H_ + +#include "SimpleSoftOMXComponent.h" + +#include +#include + +#include "vpx/vpx_encoder.h" +#include "vpx/vpx_codec.h" +#include "vpx/vp8cx.h" + +namespace android { + +// Exposes a vpx encoder as an OMX Component +// +// Boilerplate for callback bindings are taken care +// by the base class SimpleSoftOMXComponent and its +// parent SoftOMXComponent. +// +// Only following encoder settings are available +// - target bitrate +// - rate control (constant / variable) +// - frame rate +// - error resilience +// - token partitioning +// - reconstruction & loop filters (g_profile) +// +// Only following color formats are recognized +// - YUV420Planar +// - YUV420SemiPlanar +// - AndroidOpaque +// +// Following settings are not configurable by the client +// - encoding deadline is realtime +// - multithreaded encoding utilizes a number of threads equal +// to online cpu's available +// - the algorithm interface for encoder is vp8 +// - fractional bits of frame rate is discarded +// - OMX timestamps are in microseconds, therefore +// encoder timebase is fixed to 1/1000000 + +class SoftVPXEncoder : public SimpleSoftOMXComponent { + public: + SoftVPXEncoder(const char *name, + const OMX_CALLBACKTYPE *callbacks, + OMX_PTR appData, + OMX_COMPONENTTYPE **component); + + protected: + virtual ~SoftVPXEncoder(); + + // Returns current values for requested OMX + // parameters + virtual OMX_ERRORTYPE internalGetParameter( + OMX_INDEXTYPE index, OMX_PTR param); + + // Validates, extracts and stores relevant OMX + // parameters + virtual OMX_ERRORTYPE internalSetParameter( + OMX_INDEXTYPE index, const OMX_PTR param); + + // OMX callback when buffers available + // Note that both an input and output buffer + // is expected to be available to carry out + // encoding of the frame + virtual void onQueueFilled(OMX_U32 portIndex); + + private: + // number of buffers allocated per port + static const uint32_t kNumBuffers = 4; + + // OMX port indexes that refer to input and + // output ports respectively + static const uint32_t kInputPortIndex = 0; + static const uint32_t kOutputPortIndex = 1; + + // Byte-alignment required for buffers + static const uint32_t kInputBufferAlignment = 1; + static const uint32_t kOutputBufferAlignment = 2; + + // Max value supported for DCT partitions + static const uint32_t kMaxDCTPartitions = 3; + + // Number of supported input color formats + static const uint32_t kNumberOfSupportedColorFormats = 3; + + // vpx specific opaque data structure that + // stores encoder state + vpx_codec_ctx_t* mCodecContext; + + // vpx specific data structure that + // stores encoder configuration + vpx_codec_enc_cfg_t* mCodecConfiguration; + + // vpx specific read-only data structure + // that specifies algorithm interface (e.g. vp8) + vpx_codec_iface_t* mCodecInterface; + + // Width of the input frames + int32_t mWidth; + + // Height of the input frames + int32_t mHeight; + + // Target bitrate set for the encoder, in bits per second. + int32_t mBitrate; + + // Bitrate control mode, either constant or variable + vpx_rc_mode mBitrateControlMode; + + // Frame duration is the reciprocal of framerate, denoted + // in microseconds + uint64_t mFrameDurationUs; + + // vp8 specific configuration parameter + // that enables token partitioning of + // the stream into substreams + int32_t mDCTPartitions; + + // Parameter that denotes whether error resilience + // is enabled in encoder + OMX_BOOL mErrorResilience; + + // Color format for the input port + OMX_COLOR_FORMATTYPE mColorFormat; + + // Encoder profile corresponding to OMX level parameter + // + // The inconsistency in the naming is caused by + // OMX spec referring vpx profiles (g_profile) + // as "levels" whereas using the name "profile" for + // something else. + OMX_VIDEO_VP8LEVELTYPE mLevel; + + // Conversion buffer is needed to convert semi + // planar yuv420 to planar format + // It is only allocated if input format is + // indeed YUV420SemiPlanar. + uint8_t* mConversionBuffer; + + // Initializes input and output OMX ports with sensible + // default values. + void initPorts(); + + // Initializes vpx encoder with available settings. + status_t initEncoder(); + + // Releases vpx encoder instance, with it's associated + // data structures. + // + // Unless called earlier, this is handled by the + // dtor. + status_t releaseEncoder(); + + // Handles port changes with respect to color formats + OMX_ERRORTYPE internalSetFormatParams( + const OMX_VIDEO_PARAM_PORTFORMATTYPE* format); + + // Verifies the component role tried to be set to this OMX component is + // strictly video_encoder.vpx + OMX_ERRORTYPE internalSetRoleParams( + const OMX_PARAM_COMPONENTROLETYPE* role); + + // Updates bitrate to reflect port settings. + OMX_ERRORTYPE internalSetBitrateParams( + const OMX_VIDEO_PARAM_BITRATETYPE* bitrate); + + // Handles port definition changes. + OMX_ERRORTYPE internalSetPortParams( + const OMX_PARAM_PORTDEFINITIONTYPE* port); + + // Handles vp8 specific parameters. + OMX_ERRORTYPE internalSetVp8Params( + const OMX_VIDEO_PARAM_VP8TYPE* vp8Params); + + // Updates encoder profile + OMX_ERRORTYPE internalSetProfileLevel( + const OMX_VIDEO_PARAM_PROFILELEVELTYPE* profileAndLevel); + + DISALLOW_EVIL_CONSTRUCTORS(SoftVPXEncoder); +}; + +} // namespace android + +#endif // SOFT_VPX_ENCODER_H_ -- cgit v1.1 From f98ab74cdc06c1978762cb99d7b28061bc7d1044 Mon Sep 17 00:00:00 2001 From: James Dong Date: Tue, 12 Feb 2013 10:36:01 -0800 Subject: Add OMX.google.vpx.encoder Change-Id: I843e6e542533884a94e105e1cb56f16f0440af61 --- media/libstagefright/omx/SoftOMXPlugin.cpp | 1 + 1 file changed, 1 insertion(+) diff --git a/media/libstagefright/omx/SoftOMXPlugin.cpp b/media/libstagefright/omx/SoftOMXPlugin.cpp index 6e1c04d..b3fe98e 100644 --- a/media/libstagefright/omx/SoftOMXPlugin.cpp +++ b/media/libstagefright/omx/SoftOMXPlugin.cpp @@ -51,6 +51,7 @@ static const struct { { "OMX.google.mp3.decoder", "mp3dec", "audio_decoder.mp3" }, { "OMX.google.vorbis.decoder", "vorbisdec", "audio_decoder.vorbis" }, { "OMX.google.vpx.decoder", "vpxdec", "video_decoder.vpx" }, + { "OMX.google.vpx.encoder", "vpxenc", "video_encoder.vpx" }, { "OMX.google.raw.decoder", "rawdec", "audio_decoder.raw" }, { "OMX.google.flac.encoder", "flacenc", "audio_encoder.flac" }, { "OMX.google.gsm.decoder", "gsmdec", "audio_decoder.gsm" }, -- cgit v1.1 From 6df48bfe8cccdfec58f5f94be3cf3a2c64697e56 Mon Sep 17 00:00:00 2001 From: ztenghui Date: Thu, 7 Feb 2013 15:12:10 -0800 Subject: Add a test utility for MediaMuxer. This test/utility copy samples from one video and mux into another video. It support trimming, cutting audio or video track. It can run simply as command line like: adb shell muxer -a -v -s 1000 -e 8000 "/sdcard/DCIM/Camera/VID_*.mp4" bug:7991013 Change-Id: I8a2eeff3cabd001b6b2a7062d991dd076edbf22e --- cmds/stagefright/Android.mk | 23 ++++ cmds/stagefright/muxer.cpp | 295 ++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 318 insertions(+) create mode 100644 cmds/stagefright/muxer.cpp diff --git a/cmds/stagefright/Android.mk b/cmds/stagefright/Android.mk index a59186a..d583e65 100644 --- a/cmds/stagefright/Android.mk +++ b/cmds/stagefright/Android.mk @@ -165,3 +165,26 @@ LOCAL_MODULE:= codec include $(BUILD_EXECUTABLE) +################################################################################ + +include $(CLEAR_VARS) + +LOCAL_SRC_FILES:= \ + muxer.cpp \ + +LOCAL_SHARED_LIBRARIES := \ + libstagefright liblog libutils libbinder libstagefright_foundation \ + libmedia libgui libcutils libui libc + +LOCAL_C_INCLUDES:= \ + frameworks/av/media/libstagefright \ + $(TOP)/frameworks/native/include/media/openmax + +LOCAL_CFLAGS += -Wno-multichar + +LOCAL_MODULE_TAGS := debug + +LOCAL_MODULE:= muxer + +include $(BUILD_EXECUTABLE) + diff --git a/cmds/stagefright/muxer.cpp b/cmds/stagefright/muxer.cpp new file mode 100644 index 0000000..1b127c7 --- /dev/null +++ b/cmds/stagefright/muxer.cpp @@ -0,0 +1,295 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "muxer" +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +static void usage(const char *me) { + fprintf(stderr, "usage: %s [-a] [-v] [-s ]" + " [-e ] [-o ]" + " \n", me); + fprintf(stderr, " -h help\n"); + fprintf(stderr, " -a use audio\n"); + fprintf(stderr, " -v use video\n"); + fprintf(stderr, " -s Time in milli-seconds when the trim should start\n"); + fprintf(stderr, " -e Time in milli-seconds when the trim should end\n"); + fprintf(stderr, " -o output file name. Default is /sdcard/muxeroutput.mp4\n"); + + exit(1); +} + +using namespace android; + +static int muxing( + const android::sp &looper, + const char *path, + bool useAudio, + bool useVideo, + const char *outputFileName, + bool enableTrim, + int trimStartTimeMs, + int trimEndTimeMs) { + sp extractor = new NuMediaExtractor; + if (extractor->setDataSource(path) != OK) { + fprintf(stderr, "unable to instantiate extractor. %s\n", path); + return 1; + } + + if (outputFileName == NULL) { + outputFileName = "/sdcard/muxeroutput.mp4"; + } + + ALOGV("input file %s, output file %s", path, outputFileName); + ALOGV("useAudio %d, useVideo %d", useAudio, useVideo); + + sp muxer = new MediaMuxer(outputFileName); + + size_t trackCount = extractor->countTracks(); + // Map the extractor's track index to the muxer's track index. + KeyedVector trackIndexMap; + size_t bufferSize = 1 * 1024 * 1024; // default buffer size is 1MB. + + bool haveAudio = false; + bool haveVideo = false; + + int64_t trimStartTimeUs = trimStartTimeMs * 1000; + int64_t trimEndTimeUs = trimEndTimeMs * 1000; + bool trimStarted = false; + int64_t trimOffsetTimeUs = 0; + + for (size_t i = 0; i < trackCount; ++i) { + sp format; + status_t err = extractor->getTrackFormat(i, &format); + CHECK_EQ(err, (status_t)OK); + ALOGV("extractor getTrackFormat: %s", format->debugString().c_str()); + + AString mime; + CHECK(format->findString("mime", &mime)); + + bool isAudio = !strncasecmp(mime.c_str(), "audio/", 6); + bool isVideo = !strncasecmp(mime.c_str(), "video/", 6); + + if (useAudio && !haveAudio && isAudio) { + haveAudio = true; + } else if (useVideo && !haveVideo && isVideo) { + haveVideo = true; + } else { + continue; + } + + if (isVideo) { + int width , height; + CHECK(format->findInt32("width", &width)); + CHECK(format->findInt32("height", &height)); + bufferSize = width * height * 4; // Assuming it is maximally 4BPP + } + + int64_t duration; + CHECK(format->findInt64("durationUs", &duration)); + + // Since we got the duration now, correct the start time. + if (enableTrim) { + if (trimStartTimeUs > duration) { + fprintf(stderr, "Warning: trimStartTimeUs > duration," + " reset to 0\n"); + trimStartTimeUs = 0; + } + } + + ALOGV("selecting track %d", i); + + err = extractor->selectTrack(i); + CHECK_EQ(err, (status_t)OK); + + ssize_t newTrackIndex = muxer->addTrack(format); + CHECK_GE(newTrackIndex, 0); + trackIndexMap.add(i, newTrackIndex); + } + + int64_t muxerStartTimeUs = ALooper::GetNowUs(); + + bool sawInputEOS = false; + + size_t trackIndex = -1; + sp newBuffer = new ABuffer(bufferSize); + + muxer->start(); + + while (!sawInputEOS) { + status_t err = extractor->getSampleTrackIndex(&trackIndex); + if (err != OK) { + ALOGV("saw input eos, err %d", err); + sawInputEOS = true; + break; + } else { + err = extractor->readSampleData(newBuffer); + CHECK_EQ(err, (status_t)OK); + + int64_t timeUs; + err = extractor->getSampleTime(&timeUs); + CHECK_EQ(err, (status_t)OK); + + sp meta; + err = extractor->getSampleMeta(&meta); + CHECK_EQ(err, (status_t)OK); + + uint32_t sampleFlags = 0; + int32_t val; + if (meta->findInt32(kKeyIsSyncFrame, &val) && val != 0) { + // We only support BUFFER_FLAG_SYNCFRAME in the flag for now. + sampleFlags |= MediaCodec::BUFFER_FLAG_SYNCFRAME; + + // We turn on trimming at the sync frame. + if (enableTrim && timeUs > trimStartTimeUs && + timeUs <= trimEndTimeUs) { + if (trimStarted == false) { + trimOffsetTimeUs = timeUs; + } + trimStarted = true; + } + } + // Trim can end at any non-sync frame. + if (enableTrim && timeUs > trimEndTimeUs) { + trimStarted = false; + } + + if (!enableTrim || (enableTrim && trimStarted)) { + err = muxer->writeSampleData(newBuffer, + trackIndexMap.valueFor(trackIndex), + timeUs - trimOffsetTimeUs, sampleFlags); + } + + extractor->advance(); + } + } + + muxer->stop(); + newBuffer.clear(); + trackIndexMap.clear(); + + int64_t elapsedTimeUs = ALooper::GetNowUs() - muxerStartTimeUs; + fprintf(stderr, "SUCCESS: muxer generate the video in %lld ms\n", + elapsedTimeUs / 1000); + + return 0; +} + +int main(int argc, char **argv) { + const char *me = argv[0]; + + bool useAudio = false; + bool useVideo = false; + char *outputFileName = NULL; + int trimStartTimeMs = -1; + int trimEndTimeMs = -1; + // When trimStartTimeMs and trimEndTimeMs seems valid, we turn this switch + // to true. + bool enableTrim = false; + + int res; + while ((res = getopt(argc, argv, "h?avo:s:e:")) >= 0) { + switch (res) { + case 'a': + { + useAudio = true; + break; + } + + case 'v': + { + useVideo = true; + break; + } + + case 'o': + { + outputFileName = optarg; + break; + } + + case 's': + { + trimStartTimeMs = atoi(optarg); + break; + } + + case 'e': + { + trimEndTimeMs = atoi(optarg); + break; + } + + case '?': + case 'h': + default: + { + usage(me); + } + } + } + + argc -= optind; + argv += optind; + + if (argc != 1) { + usage(me); + } + + if (trimStartTimeMs < 0 || trimEndTimeMs < 0) { + // If no input on either 's' or 'e', or they are obviously wrong input, + // then turn off trimming. + ALOGV("Trimming is disabled, copying the whole length video."); + enableTrim = false; + } else if (trimStartTimeMs > trimEndTimeMs) { + fprintf(stderr, "ERROR: start time is bigger\n"); + return 1; + } else { + enableTrim = true; + } + + if (!useAudio && !useVideo) { + fprintf(stderr, "ERROR: Missing both -a and -v, no track to mux then.\n"); + return 1; + } + ProcessState::self()->startThreadPool(); + + // Make sure setDataSource() works. + DataSource::RegisterDefaultSniffers(); + + sp looper = new ALooper; + looper->start(); + + int result = muxing(looper, argv[0], useAudio, useVideo, outputFileName, + enableTrim, trimStartTimeMs, trimEndTimeMs); + + looper->stop(); + + return result; +} -- cgit v1.1 From 0ddd56316262ac74a95e9edb595697c163136d6d Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Wed, 13 Feb 2013 14:46:45 -0800 Subject: Temporary additional logging to investigate bug The bug appears related to continuing to use an invalid buffer provider in fast mixer after track destruction, so focus the added logs in that area. Also includes a bug fix: was calling log in an unsafe place near Threads.cpp AudioFlinger::PlaybackThread::createTrack_l line 1250. Details: - include caller pid or client pid where appropriate - increase log buffer size - log mFastIndex when AudioMixer sees an invalid bufferProvider. - log both potentially modified and actually modified tracks in FastMixer. - fix benign bug where sq->end() was called more than once. - log StateQueue push() call and return. Bug: 6490974 Change-Id: Iee7c8f40e20b6000cd8286c0ec6a14fff4a37af1 --- services/audioflinger/AudioFlinger.h | 2 +- services/audioflinger/AudioMixer.cpp | 8 ++++++-- services/audioflinger/AudioMixer.h | 4 +++- services/audioflinger/FastMixer.cpp | 30 ++++++++++++++++++++++++++++-- services/audioflinger/Threads.cpp | 28 ++++++++++++++++++++++------ services/audioflinger/Threads.h | 2 +- services/audioflinger/Tracks.cpp | 14 ++++++++++---- 7 files changed, 71 insertions(+), 17 deletions(-) diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h index c3f08f6..593f131 100644 --- a/services/audioflinger/AudioFlinger.h +++ b/services/audioflinger/AudioFlinger.h @@ -227,7 +227,7 @@ public: sp newWriter_l(size_t size, const char *name); void unregisterWriter(const sp& writer); private: - static const size_t kLogMemorySize = 10 * 1024; + static const size_t kLogMemorySize = 20 * 1024; sp mLogMemoryDealer; // == 0 when NBLog is disabled public: diff --git a/services/audioflinger/AudioMixer.cpp b/services/audioflinger/AudioMixer.cpp index 08325ad..30d645c 100644 --- a/services/audioflinger/AudioMixer.cpp +++ b/services/audioflinger/AudioMixer.cpp @@ -209,6 +209,7 @@ int AudioMixer::getTrackName(audio_channel_mask_t channelMask, int sessionId) t->mainBuffer = NULL; t->auxBuffer = NULL; t->downmixerBufferProvider = NULL; + t->fastIndex = -1; status_t status = initTrackDownmix(&mState.tracks[n], n, channelMask); if (status == OK) { @@ -455,6 +456,9 @@ void AudioMixer::setParameter(int name, int target, int param, void *value) // for a specific track? or per mixer? /* case DOWNMIX_TYPE: break */ + case FAST_INDEX: + track.fastIndex = valueInt; + break; default: LOG_FATAL("bad param"); } @@ -1111,8 +1115,8 @@ void AudioMixer::process__genericNoResampling(state_t* state, int64_t pts) t.buffer.frameCount = state->frameCount; int valid = t.bufferProvider->getValid(); if (valid != AudioBufferProvider::kValid) { - ALOGE("invalid bufferProvider=%p name=%d frameCount=%d valid=%#x enabledTracks=%#x", - t.bufferProvider, i, t.buffer.frameCount, valid, enabledTracks); + ALOGE("invalid bufferProvider=%p name=%d fastIndex=%d frameCount=%d valid=%#x enabledTracks=%#x", + t.bufferProvider, i, t.fastIndex, t.buffer.frameCount, valid, enabledTracks); // expect to crash } t.bufferProvider->getNextBuffer(&t.buffer, pts); diff --git a/services/audioflinger/AudioMixer.h b/services/audioflinger/AudioMixer.h index fd21fda..f757351 100644 --- a/services/audioflinger/AudioMixer.h +++ b/services/audioflinger/AudioMixer.h @@ -76,6 +76,7 @@ public: MAIN_BUFFER = 0x4002, AUX_BUFFER = 0x4003, DOWNMIX_TYPE = 0X4004, + FAST_INDEX = 0x4005, // for debugging only // for target RESAMPLE SAMPLE_RATE = 0x4100, // Configure sample rate conversion on this track name; // parameter 'value' is the new sample rate in Hz. @@ -200,7 +201,8 @@ private: int32_t sessionId; - int32_t padding[2]; + int32_t fastIndex; + int32_t padding[1]; // 16-byte boundary diff --git a/services/audioflinger/FastMixer.cpp b/services/audioflinger/FastMixer.cpp index 80e37ca..b64c8fb 100644 --- a/services/audioflinger/FastMixer.cpp +++ b/services/audioflinger/FastMixer.cpp @@ -120,6 +120,7 @@ bool FastMixer::threadLoop() FastMixerState::Command command = next->mCommand; if (next != current) { + logWriter->logTimestamp(); logWriter->log("next != current"); // As soon as possible of learning of a new dump area, start using it @@ -301,12 +302,19 @@ bool FastMixer::threadLoop() const FastTrack* fastTrack = ¤t->mFastTracks[i]; AudioBufferProvider *bufferProvider = fastTrack->mBufferProvider; ALOG_ASSERT(bufferProvider != NULL && fastTrackNames[i] == -1); + if (bufferProvider == NULL || + bufferProvider->getValid() != AudioBufferProvider::kValid) { + logWriter->logTimestamp(); + logWriter->logf("added invalid %#x", i); + } if (mixer != NULL) { // calling getTrackName with default channel mask and a random invalid // sessionId (no effects here) name = mixer->getTrackName(AUDIO_CHANNEL_OUT_STEREO, -555); ALOG_ASSERT(name >= 0); fastTrackNames[i] = name; + mixer->setParameter(name, AudioMixer::TRACK, AudioMixer::FAST_INDEX, + (void *) i); mixer->setBufferProvider(name, bufferProvider); mixer->setParameter(name, AudioMixer::TRACK, AudioMixer::MAIN_BUFFER, (void *) mixBuffer); @@ -322,22 +330,31 @@ bool FastMixer::threadLoop() generations[i] = fastTrack->mGeneration; } - // finally process modified tracks; these use the same slot + // finally process (potentially) modified tracks; these use the same slot // but may have a different buffer provider or volume provider unsigned modifiedTracks = currentTrackMask & previousTrackMask; if (modifiedTracks) { - logWriter->logf("modified %#x", modifiedTracks); + logWriter->logf("pot. mod. %#x", modifiedTracks); } + unsigned actuallyModifiedTracks = 0; while (modifiedTracks != 0) { i = __builtin_ctz(modifiedTracks); modifiedTracks &= ~(1 << i); const FastTrack* fastTrack = ¤t->mFastTracks[i]; if (fastTrack->mGeneration != generations[i]) { + actuallyModifiedTracks |= 1 << i; AudioBufferProvider *bufferProvider = fastTrack->mBufferProvider; ALOG_ASSERT(bufferProvider != NULL); + if (bufferProvider == NULL || + bufferProvider->getValid() != AudioBufferProvider::kValid) { + logWriter->logTimestamp(); + logWriter->logf("modified invalid %#x", i); + } if (mixer != NULL) { name = fastTrackNames[i]; ALOG_ASSERT(name >= 0); + mixer->setParameter(name, AudioMixer::TRACK, AudioMixer::FAST_INDEX, + (void *) i); mixer->setBufferProvider(name, bufferProvider); if (fastTrack->mVolumeProvider == NULL) { mixer->setParameter(name, AudioMixer::VOLUME, AudioMixer::VOLUME0, @@ -360,6 +377,9 @@ bool FastMixer::threadLoop() generations[i] = fastTrack->mGeneration; } } + if (actuallyModifiedTracks) { + logWriter->logf("act. mod. %#x", actuallyModifiedTracks); + } fastTracksGen = current->mFastTracksGen; @@ -423,6 +443,12 @@ bool FastMixer::threadLoop() } ftDump->mUnderruns = underruns; ftDump->mFramesReady = framesReady; + AudioBufferProvider *bufferProvider = fastTrack->mBufferProvider; + if (bufferProvider == NULL || + bufferProvider->getValid() != AudioBufferProvider::kValid) { + logWriter->logTimestamp(); + logWriter->logf("mixing invalid %#x", i); + } } int64_t pts; diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index ba848d7..554532d 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -1196,6 +1196,8 @@ sp AudioFlinger::PlaybackThread::createTrac { // scope for mLock Mutex::Autolock _l(mLock); + mNBLogWriter->logf("createTrack_l isFast=%d caller=%d", + (*flags & IAudioFlinger::TRACK_FAST) != 0, IPCThreadState::self()->getCallingPid()); // all tracks in same audio session must share the same routing strategy otherwise // conflicts will happen when tracks are moved from one output to another by audio policy @@ -1249,7 +1251,6 @@ Exit: if (status) { *status = lStatus; } - mNBLogWriter->logf("createTrack_l"); return track; } @@ -1317,7 +1318,8 @@ float AudioFlinger::PlaybackThread::streamVolume(audio_stream_type_t stream) con // addTrack_l() must be called with ThreadBase::mLock held status_t AudioFlinger::PlaybackThread::addTrack_l(const sp& track) { - mNBLogWriter->logf("addTrack_l mName=%d", track->mName); + mNBLogWriter->logf("addTrack_l mName=%d mFastIndex=%d caller=%d", track->mName, + track->mFastIndex, IPCThreadState::self()->getCallingPid()); status_t status = ALREADY_EXISTS; // set retry count for buffer fill @@ -1351,7 +1353,9 @@ status_t AudioFlinger::PlaybackThread::addTrack_l(const sp& track) // destroyTrack_l() must be called with ThreadBase::mLock held void AudioFlinger::PlaybackThread::destroyTrack_l(const sp& track) { - mNBLogWriter->logf("destroyTrack_l mName=%d", track->mName); + mNBLogWriter->logTimestamp(); + mNBLogWriter->logf("destroyTrack_l mName=%d mFastIndex=%d mClientPid=%d", track->mName, + track->mFastIndex, track->mClient != 0 ? track->mClient->pid() : -1); track->mState = TrackBase::TERMINATED; // active tracks are removed by threadLoop() if (mActiveTracks.indexOf(track) < 0) { @@ -1361,7 +1365,9 @@ void AudioFlinger::PlaybackThread::destroyTrack_l(const sp& track) void AudioFlinger::PlaybackThread::removeTrack_l(const sp& track) { - mNBLogWriter->logf("removeTrack_l mName=%d", track->mName); + mNBLogWriter->logTimestamp(); + mNBLogWriter->logf("removeTrack_l mName=%d mFastIndex=%d clientPid=%d", track->mName, + track->mFastIndex, track->mClient != 0 ? track->mClient->pid() : -1); track->triggerEvents(AudioSystem::SYNC_EVENT_PRESENTATION_COMPLETE); mTracks.remove(track); deleteTrackName_l(track->name()); @@ -2839,11 +2845,19 @@ track_is_ready: ; block = FastMixerStateQueue::BLOCK_UNTIL_ACKED; pauseAudioWatchdog = true; } - sq->end(); } if (sq != NULL) { + unsigned trackMask = state->mTrackMask; sq->end(didModify); + if (didModify) { + mNBLogWriter->logTimestamp(); + mNBLogWriter->logf("push trackMask=%#x block=%d", trackMask, block); + } sq->push(block); + if (didModify) { + mNBLogWriter->logTimestamp(); + mNBLogWriter->log("pushed"); + } } #ifdef AUDIO_WATCHDOG if (pauseAudioWatchdog && mAudioWatchdog != 0) { @@ -2870,7 +2884,9 @@ track_is_ready: ; if (CC_UNLIKELY(count)) { for (size_t i=0 ; i& track = tracksToRemove->itemAt(i); - mNBLogWriter->logf("prepareTracks_l remove name=%u", track->name()); + mNBLogWriter->logTimestamp(); + mNBLogWriter->logf("prepareTracks_l remove name=%u mFastIndex=%d", track->name(), + track->mFastIndex); mActiveTracks.remove(track); if (track->mainBuffer() != mMixBuffer) { chain = getEffectChain_l(track->sessionId()); diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h index fa1e336..8497788 100644 --- a/services/audioflinger/Threads.h +++ b/services/audioflinger/Threads.h @@ -315,7 +315,7 @@ protected: // keyed by session ID, the second by type UUID timeLow field KeyedVector< int, KeyedVector< int, sp > > mSuspendedSessions; - static const size_t kLogSize = 512; + static const size_t kLogSize = 4096; sp mNBLogWriter; }; diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp index 315cbbc..f679751 100644 --- a/services/audioflinger/Tracks.cpp +++ b/services/audioflinger/Tracks.cpp @@ -351,6 +351,7 @@ AudioFlinger::PlaybackThread::Track::Track( // Read the initial underruns because this field is never cleared by the fast mixer mObservedUnderruns = thread->getFastTrackUnderruns(i); thread->mFastTrackAvailMask &= ~(1 << i); + thread->mNBLogWriter->logf("new Track mName=%d mFastIndex=%d", mName, mFastIndex); } } ALOGV("Track constructor name %d, calling pid %d", mName, @@ -360,6 +361,7 @@ AudioFlinger::PlaybackThread::Track::Track( AudioFlinger::PlaybackThread::Track::~Track() { ALOGV("PlaybackThread::Track destructor"); + // FIXME not sure if safe to log here, would need a lock on thread to do it } void AudioFlinger::PlaybackThread::Track::destroy() @@ -569,7 +571,8 @@ status_t AudioFlinger::PlaybackThread::Track::start(AudioSystem::sync_event_t ev sp thread = mThread.promote(); if (thread != 0) { Mutex::Autolock _l(thread->mLock); - thread->mNBLogWriter->logf("start mName=%d", mName); + thread->mNBLogWriter->logf("start mName=%d mFastIndex=%d caller=%d", mName, mFastIndex, + IPCThreadState::self()->getCallingPid()); track_state state = mState; // here the track could be either new, or restarted // in both cases "unstop" the track @@ -612,7 +615,8 @@ void AudioFlinger::PlaybackThread::Track::stop() sp thread = mThread.promote(); if (thread != 0) { Mutex::Autolock _l(thread->mLock); - thread->mNBLogWriter->logf("stop mName=%d", mName); + thread->mNBLogWriter->logf("stop mName=%d mFastIndex=%d caller=%d", mName, mFastIndex, + IPCThreadState::self()->getCallingPid()); track_state state = mState; if (state == RESUMING || state == ACTIVE || state == PAUSING || state == PAUSED) { // If the track is not active (PAUSED and buffers full), flush buffers @@ -649,7 +653,8 @@ void AudioFlinger::PlaybackThread::Track::pause() sp thread = mThread.promote(); if (thread != 0) { Mutex::Autolock _l(thread->mLock); - thread->mNBLogWriter->logf("pause mName=%d", mName); + thread->mNBLogWriter->logf("pause mName=%d mFastIndex=%d caller=%d", mName, mFastIndex, + IPCThreadState::self()->getCallingPid()); if (mState == ACTIVE || mState == RESUMING) { mState = PAUSING; ALOGV("ACTIVE/RESUMING => PAUSING (%d) on thread %p", mName, thread.get()); @@ -673,7 +678,8 @@ void AudioFlinger::PlaybackThread::Track::flush() sp thread = mThread.promote(); if (thread != 0) { Mutex::Autolock _l(thread->mLock); - thread->mNBLogWriter->logf("flush mName=%d", mName); + thread->mNBLogWriter->logf("flush mName=%d mFastIndex=%d caller=%d", mName, mFastIndex, + IPCThreadState::self()->getCallingPid()); if (mState != STOPPING_1 && mState != STOPPING_2 && mState != STOPPED && mState != PAUSED && mState != PAUSING && mState != IDLE && mState != FLUSHED) { return; -- cgit v1.1 From 51eb3965caa8ba135bcdd8ffb7a2024a042ecdc0 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Thu, 14 Feb 2013 00:02:36 +0000 Subject: Revert "Temporary additional logging to investigate bug" This reverts commit 0ddd56316262ac74a95e9edb595697c163136d6d Change-Id: I180a928af6f5a38d15a5efe44cd1fe927b5d961c --- services/audioflinger/AudioFlinger.h | 2 +- services/audioflinger/AudioMixer.cpp | 8 ++------ services/audioflinger/AudioMixer.h | 4 +--- services/audioflinger/FastMixer.cpp | 30 ++---------------------------- services/audioflinger/Threads.cpp | 28 ++++++---------------------- services/audioflinger/Threads.h | 2 +- services/audioflinger/Tracks.cpp | 14 ++++---------- 7 files changed, 17 insertions(+), 71 deletions(-) diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h index 593f131..c3f08f6 100644 --- a/services/audioflinger/AudioFlinger.h +++ b/services/audioflinger/AudioFlinger.h @@ -227,7 +227,7 @@ public: sp newWriter_l(size_t size, const char *name); void unregisterWriter(const sp& writer); private: - static const size_t kLogMemorySize = 20 * 1024; + static const size_t kLogMemorySize = 10 * 1024; sp mLogMemoryDealer; // == 0 when NBLog is disabled public: diff --git a/services/audioflinger/AudioMixer.cpp b/services/audioflinger/AudioMixer.cpp index 30d645c..08325ad 100644 --- a/services/audioflinger/AudioMixer.cpp +++ b/services/audioflinger/AudioMixer.cpp @@ -209,7 +209,6 @@ int AudioMixer::getTrackName(audio_channel_mask_t channelMask, int sessionId) t->mainBuffer = NULL; t->auxBuffer = NULL; t->downmixerBufferProvider = NULL; - t->fastIndex = -1; status_t status = initTrackDownmix(&mState.tracks[n], n, channelMask); if (status == OK) { @@ -456,9 +455,6 @@ void AudioMixer::setParameter(int name, int target, int param, void *value) // for a specific track? or per mixer? /* case DOWNMIX_TYPE: break */ - case FAST_INDEX: - track.fastIndex = valueInt; - break; default: LOG_FATAL("bad param"); } @@ -1115,8 +1111,8 @@ void AudioMixer::process__genericNoResampling(state_t* state, int64_t pts) t.buffer.frameCount = state->frameCount; int valid = t.bufferProvider->getValid(); if (valid != AudioBufferProvider::kValid) { - ALOGE("invalid bufferProvider=%p name=%d fastIndex=%d frameCount=%d valid=%#x enabledTracks=%#x", - t.bufferProvider, i, t.fastIndex, t.buffer.frameCount, valid, enabledTracks); + ALOGE("invalid bufferProvider=%p name=%d frameCount=%d valid=%#x enabledTracks=%#x", + t.bufferProvider, i, t.buffer.frameCount, valid, enabledTracks); // expect to crash } t.bufferProvider->getNextBuffer(&t.buffer, pts); diff --git a/services/audioflinger/AudioMixer.h b/services/audioflinger/AudioMixer.h index f757351..fd21fda 100644 --- a/services/audioflinger/AudioMixer.h +++ b/services/audioflinger/AudioMixer.h @@ -76,7 +76,6 @@ public: MAIN_BUFFER = 0x4002, AUX_BUFFER = 0x4003, DOWNMIX_TYPE = 0X4004, - FAST_INDEX = 0x4005, // for debugging only // for target RESAMPLE SAMPLE_RATE = 0x4100, // Configure sample rate conversion on this track name; // parameter 'value' is the new sample rate in Hz. @@ -201,8 +200,7 @@ private: int32_t sessionId; - int32_t fastIndex; - int32_t padding[1]; + int32_t padding[2]; // 16-byte boundary diff --git a/services/audioflinger/FastMixer.cpp b/services/audioflinger/FastMixer.cpp index b64c8fb..80e37ca 100644 --- a/services/audioflinger/FastMixer.cpp +++ b/services/audioflinger/FastMixer.cpp @@ -120,7 +120,6 @@ bool FastMixer::threadLoop() FastMixerState::Command command = next->mCommand; if (next != current) { - logWriter->logTimestamp(); logWriter->log("next != current"); // As soon as possible of learning of a new dump area, start using it @@ -302,19 +301,12 @@ bool FastMixer::threadLoop() const FastTrack* fastTrack = ¤t->mFastTracks[i]; AudioBufferProvider *bufferProvider = fastTrack->mBufferProvider; ALOG_ASSERT(bufferProvider != NULL && fastTrackNames[i] == -1); - if (bufferProvider == NULL || - bufferProvider->getValid() != AudioBufferProvider::kValid) { - logWriter->logTimestamp(); - logWriter->logf("added invalid %#x", i); - } if (mixer != NULL) { // calling getTrackName with default channel mask and a random invalid // sessionId (no effects here) name = mixer->getTrackName(AUDIO_CHANNEL_OUT_STEREO, -555); ALOG_ASSERT(name >= 0); fastTrackNames[i] = name; - mixer->setParameter(name, AudioMixer::TRACK, AudioMixer::FAST_INDEX, - (void *) i); mixer->setBufferProvider(name, bufferProvider); mixer->setParameter(name, AudioMixer::TRACK, AudioMixer::MAIN_BUFFER, (void *) mixBuffer); @@ -330,31 +322,22 @@ bool FastMixer::threadLoop() generations[i] = fastTrack->mGeneration; } - // finally process (potentially) modified tracks; these use the same slot + // finally process modified tracks; these use the same slot // but may have a different buffer provider or volume provider unsigned modifiedTracks = currentTrackMask & previousTrackMask; if (modifiedTracks) { - logWriter->logf("pot. mod. %#x", modifiedTracks); + logWriter->logf("modified %#x", modifiedTracks); } - unsigned actuallyModifiedTracks = 0; while (modifiedTracks != 0) { i = __builtin_ctz(modifiedTracks); modifiedTracks &= ~(1 << i); const FastTrack* fastTrack = ¤t->mFastTracks[i]; if (fastTrack->mGeneration != generations[i]) { - actuallyModifiedTracks |= 1 << i; AudioBufferProvider *bufferProvider = fastTrack->mBufferProvider; ALOG_ASSERT(bufferProvider != NULL); - if (bufferProvider == NULL || - bufferProvider->getValid() != AudioBufferProvider::kValid) { - logWriter->logTimestamp(); - logWriter->logf("modified invalid %#x", i); - } if (mixer != NULL) { name = fastTrackNames[i]; ALOG_ASSERT(name >= 0); - mixer->setParameter(name, AudioMixer::TRACK, AudioMixer::FAST_INDEX, - (void *) i); mixer->setBufferProvider(name, bufferProvider); if (fastTrack->mVolumeProvider == NULL) { mixer->setParameter(name, AudioMixer::VOLUME, AudioMixer::VOLUME0, @@ -377,9 +360,6 @@ bool FastMixer::threadLoop() generations[i] = fastTrack->mGeneration; } } - if (actuallyModifiedTracks) { - logWriter->logf("act. mod. %#x", actuallyModifiedTracks); - } fastTracksGen = current->mFastTracksGen; @@ -443,12 +423,6 @@ bool FastMixer::threadLoop() } ftDump->mUnderruns = underruns; ftDump->mFramesReady = framesReady; - AudioBufferProvider *bufferProvider = fastTrack->mBufferProvider; - if (bufferProvider == NULL || - bufferProvider->getValid() != AudioBufferProvider::kValid) { - logWriter->logTimestamp(); - logWriter->logf("mixing invalid %#x", i); - } } int64_t pts; diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index 554532d..ba848d7 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -1196,8 +1196,6 @@ sp AudioFlinger::PlaybackThread::createTrac { // scope for mLock Mutex::Autolock _l(mLock); - mNBLogWriter->logf("createTrack_l isFast=%d caller=%d", - (*flags & IAudioFlinger::TRACK_FAST) != 0, IPCThreadState::self()->getCallingPid()); // all tracks in same audio session must share the same routing strategy otherwise // conflicts will happen when tracks are moved from one output to another by audio policy @@ -1251,6 +1249,7 @@ Exit: if (status) { *status = lStatus; } + mNBLogWriter->logf("createTrack_l"); return track; } @@ -1318,8 +1317,7 @@ float AudioFlinger::PlaybackThread::streamVolume(audio_stream_type_t stream) con // addTrack_l() must be called with ThreadBase::mLock held status_t AudioFlinger::PlaybackThread::addTrack_l(const sp& track) { - mNBLogWriter->logf("addTrack_l mName=%d mFastIndex=%d caller=%d", track->mName, - track->mFastIndex, IPCThreadState::self()->getCallingPid()); + mNBLogWriter->logf("addTrack_l mName=%d", track->mName); status_t status = ALREADY_EXISTS; // set retry count for buffer fill @@ -1353,9 +1351,7 @@ status_t AudioFlinger::PlaybackThread::addTrack_l(const sp& track) // destroyTrack_l() must be called with ThreadBase::mLock held void AudioFlinger::PlaybackThread::destroyTrack_l(const sp& track) { - mNBLogWriter->logTimestamp(); - mNBLogWriter->logf("destroyTrack_l mName=%d mFastIndex=%d mClientPid=%d", track->mName, - track->mFastIndex, track->mClient != 0 ? track->mClient->pid() : -1); + mNBLogWriter->logf("destroyTrack_l mName=%d", track->mName); track->mState = TrackBase::TERMINATED; // active tracks are removed by threadLoop() if (mActiveTracks.indexOf(track) < 0) { @@ -1365,9 +1361,7 @@ void AudioFlinger::PlaybackThread::destroyTrack_l(const sp& track) void AudioFlinger::PlaybackThread::removeTrack_l(const sp& track) { - mNBLogWriter->logTimestamp(); - mNBLogWriter->logf("removeTrack_l mName=%d mFastIndex=%d clientPid=%d", track->mName, - track->mFastIndex, track->mClient != 0 ? track->mClient->pid() : -1); + mNBLogWriter->logf("removeTrack_l mName=%d", track->mName); track->triggerEvents(AudioSystem::SYNC_EVENT_PRESENTATION_COMPLETE); mTracks.remove(track); deleteTrackName_l(track->name()); @@ -2845,19 +2839,11 @@ track_is_ready: ; block = FastMixerStateQueue::BLOCK_UNTIL_ACKED; pauseAudioWatchdog = true; } + sq->end(); } if (sq != NULL) { - unsigned trackMask = state->mTrackMask; sq->end(didModify); - if (didModify) { - mNBLogWriter->logTimestamp(); - mNBLogWriter->logf("push trackMask=%#x block=%d", trackMask, block); - } sq->push(block); - if (didModify) { - mNBLogWriter->logTimestamp(); - mNBLogWriter->log("pushed"); - } } #ifdef AUDIO_WATCHDOG if (pauseAudioWatchdog && mAudioWatchdog != 0) { @@ -2884,9 +2870,7 @@ track_is_ready: ; if (CC_UNLIKELY(count)) { for (size_t i=0 ; i& track = tracksToRemove->itemAt(i); - mNBLogWriter->logTimestamp(); - mNBLogWriter->logf("prepareTracks_l remove name=%u mFastIndex=%d", track->name(), - track->mFastIndex); + mNBLogWriter->logf("prepareTracks_l remove name=%u", track->name()); mActiveTracks.remove(track); if (track->mainBuffer() != mMixBuffer) { chain = getEffectChain_l(track->sessionId()); diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h index 8497788..fa1e336 100644 --- a/services/audioflinger/Threads.h +++ b/services/audioflinger/Threads.h @@ -315,7 +315,7 @@ protected: // keyed by session ID, the second by type UUID timeLow field KeyedVector< int, KeyedVector< int, sp > > mSuspendedSessions; - static const size_t kLogSize = 4096; + static const size_t kLogSize = 512; sp mNBLogWriter; }; diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp index f679751..315cbbc 100644 --- a/services/audioflinger/Tracks.cpp +++ b/services/audioflinger/Tracks.cpp @@ -351,7 +351,6 @@ AudioFlinger::PlaybackThread::Track::Track( // Read the initial underruns because this field is never cleared by the fast mixer mObservedUnderruns = thread->getFastTrackUnderruns(i); thread->mFastTrackAvailMask &= ~(1 << i); - thread->mNBLogWriter->logf("new Track mName=%d mFastIndex=%d", mName, mFastIndex); } } ALOGV("Track constructor name %d, calling pid %d", mName, @@ -361,7 +360,6 @@ AudioFlinger::PlaybackThread::Track::Track( AudioFlinger::PlaybackThread::Track::~Track() { ALOGV("PlaybackThread::Track destructor"); - // FIXME not sure if safe to log here, would need a lock on thread to do it } void AudioFlinger::PlaybackThread::Track::destroy() @@ -571,8 +569,7 @@ status_t AudioFlinger::PlaybackThread::Track::start(AudioSystem::sync_event_t ev sp thread = mThread.promote(); if (thread != 0) { Mutex::Autolock _l(thread->mLock); - thread->mNBLogWriter->logf("start mName=%d mFastIndex=%d caller=%d", mName, mFastIndex, - IPCThreadState::self()->getCallingPid()); + thread->mNBLogWriter->logf("start mName=%d", mName); track_state state = mState; // here the track could be either new, or restarted // in both cases "unstop" the track @@ -615,8 +612,7 @@ void AudioFlinger::PlaybackThread::Track::stop() sp thread = mThread.promote(); if (thread != 0) { Mutex::Autolock _l(thread->mLock); - thread->mNBLogWriter->logf("stop mName=%d mFastIndex=%d caller=%d", mName, mFastIndex, - IPCThreadState::self()->getCallingPid()); + thread->mNBLogWriter->logf("stop mName=%d", mName); track_state state = mState; if (state == RESUMING || state == ACTIVE || state == PAUSING || state == PAUSED) { // If the track is not active (PAUSED and buffers full), flush buffers @@ -653,8 +649,7 @@ void AudioFlinger::PlaybackThread::Track::pause() sp thread = mThread.promote(); if (thread != 0) { Mutex::Autolock _l(thread->mLock); - thread->mNBLogWriter->logf("pause mName=%d mFastIndex=%d caller=%d", mName, mFastIndex, - IPCThreadState::self()->getCallingPid()); + thread->mNBLogWriter->logf("pause mName=%d", mName); if (mState == ACTIVE || mState == RESUMING) { mState = PAUSING; ALOGV("ACTIVE/RESUMING => PAUSING (%d) on thread %p", mName, thread.get()); @@ -678,8 +673,7 @@ void AudioFlinger::PlaybackThread::Track::flush() sp thread = mThread.promote(); if (thread != 0) { Mutex::Autolock _l(thread->mLock); - thread->mNBLogWriter->logf("flush mName=%d mFastIndex=%d caller=%d", mName, mFastIndex, - IPCThreadState::self()->getCallingPid()); + thread->mNBLogWriter->logf("flush mName=%d", mName); if (mState != STOPPING_1 && mState != STOPPING_2 && mState != STOPPED && mState != PAUSED && mState != PAUSING && mState != IDLE && mState != FLUSHED) { return; -- cgit v1.1 From 9036ba85ef53cef442b1284a2447dd1b620d6b95 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Wed, 13 Feb 2013 14:46:45 -0800 Subject: Temporary additional logging to investigate bug The bug appears related to continuing to use an invalid buffer provider in fast mixer after track destruction, so focus the added logs in that area. Also includes a bug fix: was calling log in an unsafe place near Threads.cpp AudioFlinger::PlaybackThread::createTrack_l line 1250. Details: - include caller pid or client pid where appropriate - increase log buffer size - log mFastIndex when AudioMixer sees an invalid bufferProvider. - log both potentially modified and actually modified tracks in FastMixer. - fix benign bug where sq->end() was called more than once. - log StateQueue push() call and return. Bug: 6490974 Change-Id: Iee7c8f40e20b6000cd8286c0ec6a14fff4a37af1 --- services/audioflinger/AudioFlinger.h | 2 +- services/audioflinger/AudioMixer.cpp | 8 ++++++-- services/audioflinger/AudioMixer.h | 4 +++- services/audioflinger/FastMixer.cpp | 30 ++++++++++++++++++++++++++++-- services/audioflinger/Threads.cpp | 28 ++++++++++++++++++++++------ services/audioflinger/Threads.h | 2 +- services/audioflinger/Tracks.cpp | 14 ++++++++++---- 7 files changed, 71 insertions(+), 17 deletions(-) diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h index c3f08f6..593f131 100644 --- a/services/audioflinger/AudioFlinger.h +++ b/services/audioflinger/AudioFlinger.h @@ -227,7 +227,7 @@ public: sp newWriter_l(size_t size, const char *name); void unregisterWriter(const sp& writer); private: - static const size_t kLogMemorySize = 10 * 1024; + static const size_t kLogMemorySize = 20 * 1024; sp mLogMemoryDealer; // == 0 when NBLog is disabled public: diff --git a/services/audioflinger/AudioMixer.cpp b/services/audioflinger/AudioMixer.cpp index 08325ad..30d645c 100644 --- a/services/audioflinger/AudioMixer.cpp +++ b/services/audioflinger/AudioMixer.cpp @@ -209,6 +209,7 @@ int AudioMixer::getTrackName(audio_channel_mask_t channelMask, int sessionId) t->mainBuffer = NULL; t->auxBuffer = NULL; t->downmixerBufferProvider = NULL; + t->fastIndex = -1; status_t status = initTrackDownmix(&mState.tracks[n], n, channelMask); if (status == OK) { @@ -455,6 +456,9 @@ void AudioMixer::setParameter(int name, int target, int param, void *value) // for a specific track? or per mixer? /* case DOWNMIX_TYPE: break */ + case FAST_INDEX: + track.fastIndex = valueInt; + break; default: LOG_FATAL("bad param"); } @@ -1111,8 +1115,8 @@ void AudioMixer::process__genericNoResampling(state_t* state, int64_t pts) t.buffer.frameCount = state->frameCount; int valid = t.bufferProvider->getValid(); if (valid != AudioBufferProvider::kValid) { - ALOGE("invalid bufferProvider=%p name=%d frameCount=%d valid=%#x enabledTracks=%#x", - t.bufferProvider, i, t.buffer.frameCount, valid, enabledTracks); + ALOGE("invalid bufferProvider=%p name=%d fastIndex=%d frameCount=%d valid=%#x enabledTracks=%#x", + t.bufferProvider, i, t.fastIndex, t.buffer.frameCount, valid, enabledTracks); // expect to crash } t.bufferProvider->getNextBuffer(&t.buffer, pts); diff --git a/services/audioflinger/AudioMixer.h b/services/audioflinger/AudioMixer.h index fd21fda..f757351 100644 --- a/services/audioflinger/AudioMixer.h +++ b/services/audioflinger/AudioMixer.h @@ -76,6 +76,7 @@ public: MAIN_BUFFER = 0x4002, AUX_BUFFER = 0x4003, DOWNMIX_TYPE = 0X4004, + FAST_INDEX = 0x4005, // for debugging only // for target RESAMPLE SAMPLE_RATE = 0x4100, // Configure sample rate conversion on this track name; // parameter 'value' is the new sample rate in Hz. @@ -200,7 +201,8 @@ private: int32_t sessionId; - int32_t padding[2]; + int32_t fastIndex; + int32_t padding[1]; // 16-byte boundary diff --git a/services/audioflinger/FastMixer.cpp b/services/audioflinger/FastMixer.cpp index 80e37ca..b64c8fb 100644 --- a/services/audioflinger/FastMixer.cpp +++ b/services/audioflinger/FastMixer.cpp @@ -120,6 +120,7 @@ bool FastMixer::threadLoop() FastMixerState::Command command = next->mCommand; if (next != current) { + logWriter->logTimestamp(); logWriter->log("next != current"); // As soon as possible of learning of a new dump area, start using it @@ -301,12 +302,19 @@ bool FastMixer::threadLoop() const FastTrack* fastTrack = ¤t->mFastTracks[i]; AudioBufferProvider *bufferProvider = fastTrack->mBufferProvider; ALOG_ASSERT(bufferProvider != NULL && fastTrackNames[i] == -1); + if (bufferProvider == NULL || + bufferProvider->getValid() != AudioBufferProvider::kValid) { + logWriter->logTimestamp(); + logWriter->logf("added invalid %#x", i); + } if (mixer != NULL) { // calling getTrackName with default channel mask and a random invalid // sessionId (no effects here) name = mixer->getTrackName(AUDIO_CHANNEL_OUT_STEREO, -555); ALOG_ASSERT(name >= 0); fastTrackNames[i] = name; + mixer->setParameter(name, AudioMixer::TRACK, AudioMixer::FAST_INDEX, + (void *) i); mixer->setBufferProvider(name, bufferProvider); mixer->setParameter(name, AudioMixer::TRACK, AudioMixer::MAIN_BUFFER, (void *) mixBuffer); @@ -322,22 +330,31 @@ bool FastMixer::threadLoop() generations[i] = fastTrack->mGeneration; } - // finally process modified tracks; these use the same slot + // finally process (potentially) modified tracks; these use the same slot // but may have a different buffer provider or volume provider unsigned modifiedTracks = currentTrackMask & previousTrackMask; if (modifiedTracks) { - logWriter->logf("modified %#x", modifiedTracks); + logWriter->logf("pot. mod. %#x", modifiedTracks); } + unsigned actuallyModifiedTracks = 0; while (modifiedTracks != 0) { i = __builtin_ctz(modifiedTracks); modifiedTracks &= ~(1 << i); const FastTrack* fastTrack = ¤t->mFastTracks[i]; if (fastTrack->mGeneration != generations[i]) { + actuallyModifiedTracks |= 1 << i; AudioBufferProvider *bufferProvider = fastTrack->mBufferProvider; ALOG_ASSERT(bufferProvider != NULL); + if (bufferProvider == NULL || + bufferProvider->getValid() != AudioBufferProvider::kValid) { + logWriter->logTimestamp(); + logWriter->logf("modified invalid %#x", i); + } if (mixer != NULL) { name = fastTrackNames[i]; ALOG_ASSERT(name >= 0); + mixer->setParameter(name, AudioMixer::TRACK, AudioMixer::FAST_INDEX, + (void *) i); mixer->setBufferProvider(name, bufferProvider); if (fastTrack->mVolumeProvider == NULL) { mixer->setParameter(name, AudioMixer::VOLUME, AudioMixer::VOLUME0, @@ -360,6 +377,9 @@ bool FastMixer::threadLoop() generations[i] = fastTrack->mGeneration; } } + if (actuallyModifiedTracks) { + logWriter->logf("act. mod. %#x", actuallyModifiedTracks); + } fastTracksGen = current->mFastTracksGen; @@ -423,6 +443,12 @@ bool FastMixer::threadLoop() } ftDump->mUnderruns = underruns; ftDump->mFramesReady = framesReady; + AudioBufferProvider *bufferProvider = fastTrack->mBufferProvider; + if (bufferProvider == NULL || + bufferProvider->getValid() != AudioBufferProvider::kValid) { + logWriter->logTimestamp(); + logWriter->logf("mixing invalid %#x", i); + } } int64_t pts; diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index ba848d7..554532d 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -1196,6 +1196,8 @@ sp AudioFlinger::PlaybackThread::createTrac { // scope for mLock Mutex::Autolock _l(mLock); + mNBLogWriter->logf("createTrack_l isFast=%d caller=%d", + (*flags & IAudioFlinger::TRACK_FAST) != 0, IPCThreadState::self()->getCallingPid()); // all tracks in same audio session must share the same routing strategy otherwise // conflicts will happen when tracks are moved from one output to another by audio policy @@ -1249,7 +1251,6 @@ Exit: if (status) { *status = lStatus; } - mNBLogWriter->logf("createTrack_l"); return track; } @@ -1317,7 +1318,8 @@ float AudioFlinger::PlaybackThread::streamVolume(audio_stream_type_t stream) con // addTrack_l() must be called with ThreadBase::mLock held status_t AudioFlinger::PlaybackThread::addTrack_l(const sp& track) { - mNBLogWriter->logf("addTrack_l mName=%d", track->mName); + mNBLogWriter->logf("addTrack_l mName=%d mFastIndex=%d caller=%d", track->mName, + track->mFastIndex, IPCThreadState::self()->getCallingPid()); status_t status = ALREADY_EXISTS; // set retry count for buffer fill @@ -1351,7 +1353,9 @@ status_t AudioFlinger::PlaybackThread::addTrack_l(const sp& track) // destroyTrack_l() must be called with ThreadBase::mLock held void AudioFlinger::PlaybackThread::destroyTrack_l(const sp& track) { - mNBLogWriter->logf("destroyTrack_l mName=%d", track->mName); + mNBLogWriter->logTimestamp(); + mNBLogWriter->logf("destroyTrack_l mName=%d mFastIndex=%d mClientPid=%d", track->mName, + track->mFastIndex, track->mClient != 0 ? track->mClient->pid() : -1); track->mState = TrackBase::TERMINATED; // active tracks are removed by threadLoop() if (mActiveTracks.indexOf(track) < 0) { @@ -1361,7 +1365,9 @@ void AudioFlinger::PlaybackThread::destroyTrack_l(const sp& track) void AudioFlinger::PlaybackThread::removeTrack_l(const sp& track) { - mNBLogWriter->logf("removeTrack_l mName=%d", track->mName); + mNBLogWriter->logTimestamp(); + mNBLogWriter->logf("removeTrack_l mName=%d mFastIndex=%d clientPid=%d", track->mName, + track->mFastIndex, track->mClient != 0 ? track->mClient->pid() : -1); track->triggerEvents(AudioSystem::SYNC_EVENT_PRESENTATION_COMPLETE); mTracks.remove(track); deleteTrackName_l(track->name()); @@ -2839,11 +2845,19 @@ track_is_ready: ; block = FastMixerStateQueue::BLOCK_UNTIL_ACKED; pauseAudioWatchdog = true; } - sq->end(); } if (sq != NULL) { + unsigned trackMask = state->mTrackMask; sq->end(didModify); + if (didModify) { + mNBLogWriter->logTimestamp(); + mNBLogWriter->logf("push trackMask=%#x block=%d", trackMask, block); + } sq->push(block); + if (didModify) { + mNBLogWriter->logTimestamp(); + mNBLogWriter->log("pushed"); + } } #ifdef AUDIO_WATCHDOG if (pauseAudioWatchdog && mAudioWatchdog != 0) { @@ -2870,7 +2884,9 @@ track_is_ready: ; if (CC_UNLIKELY(count)) { for (size_t i=0 ; i& track = tracksToRemove->itemAt(i); - mNBLogWriter->logf("prepareTracks_l remove name=%u", track->name()); + mNBLogWriter->logTimestamp(); + mNBLogWriter->logf("prepareTracks_l remove name=%u mFastIndex=%d", track->name(), + track->mFastIndex); mActiveTracks.remove(track); if (track->mainBuffer() != mMixBuffer) { chain = getEffectChain_l(track->sessionId()); diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h index fa1e336..8497788 100644 --- a/services/audioflinger/Threads.h +++ b/services/audioflinger/Threads.h @@ -315,7 +315,7 @@ protected: // keyed by session ID, the second by type UUID timeLow field KeyedVector< int, KeyedVector< int, sp > > mSuspendedSessions; - static const size_t kLogSize = 512; + static const size_t kLogSize = 4096; sp mNBLogWriter; }; diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp index 315cbbc..f679751 100644 --- a/services/audioflinger/Tracks.cpp +++ b/services/audioflinger/Tracks.cpp @@ -351,6 +351,7 @@ AudioFlinger::PlaybackThread::Track::Track( // Read the initial underruns because this field is never cleared by the fast mixer mObservedUnderruns = thread->getFastTrackUnderruns(i); thread->mFastTrackAvailMask &= ~(1 << i); + thread->mNBLogWriter->logf("new Track mName=%d mFastIndex=%d", mName, mFastIndex); } } ALOGV("Track constructor name %d, calling pid %d", mName, @@ -360,6 +361,7 @@ AudioFlinger::PlaybackThread::Track::Track( AudioFlinger::PlaybackThread::Track::~Track() { ALOGV("PlaybackThread::Track destructor"); + // FIXME not sure if safe to log here, would need a lock on thread to do it } void AudioFlinger::PlaybackThread::Track::destroy() @@ -569,7 +571,8 @@ status_t AudioFlinger::PlaybackThread::Track::start(AudioSystem::sync_event_t ev sp thread = mThread.promote(); if (thread != 0) { Mutex::Autolock _l(thread->mLock); - thread->mNBLogWriter->logf("start mName=%d", mName); + thread->mNBLogWriter->logf("start mName=%d mFastIndex=%d caller=%d", mName, mFastIndex, + IPCThreadState::self()->getCallingPid()); track_state state = mState; // here the track could be either new, or restarted // in both cases "unstop" the track @@ -612,7 +615,8 @@ void AudioFlinger::PlaybackThread::Track::stop() sp thread = mThread.promote(); if (thread != 0) { Mutex::Autolock _l(thread->mLock); - thread->mNBLogWriter->logf("stop mName=%d", mName); + thread->mNBLogWriter->logf("stop mName=%d mFastIndex=%d caller=%d", mName, mFastIndex, + IPCThreadState::self()->getCallingPid()); track_state state = mState; if (state == RESUMING || state == ACTIVE || state == PAUSING || state == PAUSED) { // If the track is not active (PAUSED and buffers full), flush buffers @@ -649,7 +653,8 @@ void AudioFlinger::PlaybackThread::Track::pause() sp thread = mThread.promote(); if (thread != 0) { Mutex::Autolock _l(thread->mLock); - thread->mNBLogWriter->logf("pause mName=%d", mName); + thread->mNBLogWriter->logf("pause mName=%d mFastIndex=%d caller=%d", mName, mFastIndex, + IPCThreadState::self()->getCallingPid()); if (mState == ACTIVE || mState == RESUMING) { mState = PAUSING; ALOGV("ACTIVE/RESUMING => PAUSING (%d) on thread %p", mName, thread.get()); @@ -673,7 +678,8 @@ void AudioFlinger::PlaybackThread::Track::flush() sp thread = mThread.promote(); if (thread != 0) { Mutex::Autolock _l(thread->mLock); - thread->mNBLogWriter->logf("flush mName=%d", mName); + thread->mNBLogWriter->logf("flush mName=%d mFastIndex=%d caller=%d", mName, mFastIndex, + IPCThreadState::self()->getCallingPid()); if (mState != STOPPING_1 && mState != STOPPING_2 && mState != STOPPED && mState != PAUSED && mState != PAUSING && mState != IDLE && mState != FLUSHED) { return; -- cgit v1.1 From 639482c24c911b125398b31883ba6d55faebe28b Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Wed, 13 Feb 2013 14:46:45 -0800 Subject: Temporary additional logging to investigate bug The bug appears related to continuing to use an invalid buffer provider in fast mixer after track destruction, so focus the added logs in that area. Also includes a bug fix: was calling log in an unsafe place near Threads.cpp AudioFlinger::PlaybackThread::createTrack_l line 1250. Details: - include caller pid or client pid where appropriate - increase log buffer size - log mFastIndex when AudioMixer sees an invalid bufferProvider. - log both potentially modified and actually modified tracks in FastMixer. - fix benign bug where sq->end() was called more than once. - log StateQueue push() call and return. - increase StateQueue size from 4 to 8 entries - log mixer->enable(), bufferProvider, and currentTrackMask - log buffer provider addresses - increase fast mixer log buffer again - check logf format vs. argument list compatibility - add logging to AudioMixer - add checking of magic field in AudioMixer to detect overwrites - add bool AudioMixer::enabled() Bug: 6490974 Change-Id: I1f3f18aa62d9fbd35bc32285b669f5ba40efe28e --- include/media/nbaio/NBLog.h | 4 ++-- services/audioflinger/AudioFlinger.h | 2 +- services/audioflinger/AudioMixer.cpp | 46 +++++++++++++++++++++++++++++++++--- services/audioflinger/AudioMixer.h | 22 +++++++++++++++-- services/audioflinger/FastMixer.cpp | 45 +++++++++++++++++++++++++++++++++-- services/audioflinger/StateQueue.h | 2 +- services/audioflinger/Threads.cpp | 30 ++++++++++++++++++----- services/audioflinger/Threads.h | 4 ++-- services/audioflinger/Tracks.cpp | 14 +++++++---- 9 files changed, 146 insertions(+), 23 deletions(-) diff --git a/include/media/nbaio/NBLog.h b/include/media/nbaio/NBLog.h index 8fc417f..107ba66 100644 --- a/include/media/nbaio/NBLog.h +++ b/include/media/nbaio/NBLog.h @@ -115,7 +115,7 @@ public: virtual ~Writer() { } virtual void log(const char *string); - virtual void logf(const char *fmt, ...); + virtual void logf(const char *fmt, ...) __attribute__ ((format (printf, 2, 3))); virtual void logvf(const char *fmt, va_list ap); virtual void logTimestamp(); virtual void logTimestamp(const struct timespec& ts); @@ -149,7 +149,7 @@ public: LockedWriter(size_t size, void *shared); virtual void log(const char *string); - virtual void logf(const char *fmt, ...); + virtual void logf(const char *fmt, ...) __attribute__ ((format (printf, 2, 3))); virtual void logvf(const char *fmt, va_list ap); virtual void logTimestamp(); virtual void logTimestamp(const struct timespec& ts); diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h index c3f08f6..e8852fb 100644 --- a/services/audioflinger/AudioFlinger.h +++ b/services/audioflinger/AudioFlinger.h @@ -227,7 +227,7 @@ public: sp newWriter_l(size_t size, const char *name); void unregisterWriter(const sp& writer); private: - static const size_t kLogMemorySize = 10 * 1024; + static const size_t kLogMemorySize = 30 * 1024; sp mLogMemoryDealer; // == 0 when NBLog is disabled public: diff --git a/services/audioflinger/AudioMixer.cpp b/services/audioflinger/AudioMixer.cpp index 08325ad..529d1b5 100644 --- a/services/audioflinger/AudioMixer.cpp +++ b/services/audioflinger/AudioMixer.cpp @@ -98,7 +98,7 @@ effect_descriptor_t AudioMixer::dwnmFxDesc; AudioMixer::AudioMixer(size_t frameCount, uint32_t sampleRate, uint32_t maxNumTracks) : mTrackNames(0), mConfiguredNames((maxNumTracks >= 32 ? 0 : 1 << maxNumTracks) - 1), - mSampleRate(sampleRate) + mSampleRate(sampleRate), mLog(&mDummyLog) { // AudioMixer is not yet capable of multi-channel beyond stereo COMPILE_TIME_ASSERT_FUNCTION_SCOPE(2 == MAX_NUM_CHANNELS); @@ -122,6 +122,7 @@ AudioMixer::AudioMixer(size_t frameCount, uint32_t sampleRate, uint32_t maxNumTr mState.hook = process__nop; mState.outputTemp = NULL; mState.resampleTemp = NULL; + mState.mLog = &mDummyLog; // mState.reserved // FIXME Most of the following initialization is probably redundant since @@ -131,6 +132,7 @@ AudioMixer::AudioMixer(size_t frameCount, uint32_t sampleRate, uint32_t maxNumTr for (unsigned i=0 ; i < MAX_NUM_TRACKS ; i++) { t->resampler = NULL; t->downmixerBufferProvider = NULL; + t->magic = track_t::kMagic; t++; } @@ -169,6 +171,12 @@ AudioMixer::~AudioMixer() delete [] mState.resampleTemp; } +void AudioMixer::setLog(NBLog::Writer *log) +{ + mLog = log; + mState.mLog = log; +} + int AudioMixer::getTrackName(audio_channel_mask_t channelMask, int sessionId) { uint32_t names = (~mTrackNames) & mConfiguredNames; @@ -209,9 +217,12 @@ int AudioMixer::getTrackName(audio_channel_mask_t channelMask, int sessionId) t->mainBuffer = NULL; t->auxBuffer = NULL; t->downmixerBufferProvider = NULL; + t->fastIndex = -1; + // t->magic unchanged status_t status = initTrackDownmix(&mState.tracks[n], n, channelMask); if (status == OK) { + mLog->logf("getTrackName %d", n); return TRACK0 + n; } ALOGE("AudioMixer::getTrackName(0x%x) failed, error preparing track for downmix", @@ -366,9 +377,11 @@ void AudioMixer::deleteTrackName(int name) { ALOGV("AudioMixer::deleteTrackName(%d)", name); name -= TRACK0; + mLog->logf("deleteTrackName %d", name); ALOG_ASSERT(uint32_t(name) < MAX_NUM_TRACKS, "bad track name %d", name); ALOGV("deleteTrackName(%d)", name); track_t& track(mState.tracks[ name ]); + track.checkMagic(); if (track.enabled) { track.enabled = false; invalidateState(1<logf("enable %d", name); track.enabled = true; ALOGV("enable(%d)", name); invalidateState(1 << name); @@ -400,19 +415,32 @@ void AudioMixer::disable(int name) name -= TRACK0; ALOG_ASSERT(uint32_t(name) < MAX_NUM_TRACKS, "bad track name %d", name); track_t& track = mState.tracks[name]; + track.checkMagic(); if (track.enabled) { + mLog->logf("disable %d", name); track.enabled = false; ALOGV("disable(%d)", name); invalidateState(1 << name); } } +bool AudioMixer::enabled(int name) +{ + name -= TRACK0; + ALOG_ASSERT(uint32_t(name) < MAX_NUM_TRACKS, "bad track name %d", name); + track_t& track = mState.tracks[name]; + track.checkMagic(); + + return track.enabled; +} + void AudioMixer::setParameter(int name, int target, int param, void *value) { name -= TRACK0; ALOG_ASSERT(uint32_t(name) < MAX_NUM_TRACKS, "bad track name %d", name); track_t& track = mState.tracks[name]; + track.checkMagic(); int valueInt = (int)value; int32_t *valueBuf = (int32_t *)value; @@ -455,6 +483,9 @@ void AudioMixer::setParameter(int name, int target, int param, void *value) // for a specific track? or per mixer? /* case DOWNMIX_TYPE: break */ + case FAST_INDEX: + track.fastIndex = valueInt; + break; default: LOG_FATAL("bad param"); } @@ -540,6 +571,7 @@ void AudioMixer::setParameter(int name, int target, int param, void *value) bool AudioMixer::track_t::setResampler(uint32_t value, uint32_t devSampleRate) { + checkMagic(); if (value != devSampleRate || resampler != NULL) { if (sampleRate != value) { sampleRate = value; @@ -572,6 +604,7 @@ bool AudioMixer::track_t::setResampler(uint32_t value, uint32_t devSampleRate) inline void AudioMixer::track_t::adjustVolumeRamp(bool aux) { + checkMagic(); for (uint32_t i=0 ; i0) && (((prevVolume[i]+volumeInc[i])>>16) >= volume[i])) || ((volumeInc[i]<0) && (((prevVolume[i]+volumeInc[i])>>16) <= volume[i]))) { @@ -600,8 +633,10 @@ size_t AudioMixer::getUnreleasedFrames(int name) const void AudioMixer::setBufferProvider(int name, AudioBufferProvider* bufferProvider) { name -= TRACK0; + mLog->logf("bp %d-%p", name, bufferProvider); ALOG_ASSERT(uint32_t(name) < MAX_NUM_TRACKS, "bad track name %d", name); + mState.tracks[name].checkMagic(); if (mState.tracks[name].downmixerBufferProvider != NULL) { // update required? if (mState.tracks[name].downmixerBufferProvider->mTrackBufferProvider != bufferProvider) { @@ -623,6 +658,9 @@ void AudioMixer::setBufferProvider(int name, AudioBufferProvider* bufferProvider void AudioMixer::process(int64_t pts) { + if (mState.needsChanged) { + mLog->logf("process needs=%#x", mState.needsChanged); + } mState.hook(&mState, pts); } @@ -647,6 +685,7 @@ void AudioMixer::process__validate(state_t* state, int64_t pts) } state->enabledTracks &= ~disabled; state->enabledTracks |= enabled; + state->mLog->logf("process_validate ena=%#x", state->enabledTracks); // compute everything we need... int countActiveTracks = 0; @@ -1103,6 +1142,7 @@ void AudioMixer::process__genericNoResampling(state_t* state, int64_t pts) // acquire each track's buffer uint32_t enabledTracks = state->enabledTracks; + state->mLog->logf("process_gNR ena=%#x", enabledTracks); uint32_t e0 = enabledTracks; while (e0) { const int i = 31 - __builtin_clz(e0); @@ -1111,8 +1151,8 @@ void AudioMixer::process__genericNoResampling(state_t* state, int64_t pts) t.buffer.frameCount = state->frameCount; int valid = t.bufferProvider->getValid(); if (valid != AudioBufferProvider::kValid) { - ALOGE("invalid bufferProvider=%p name=%d frameCount=%d valid=%#x enabledTracks=%#x", - t.bufferProvider, i, t.buffer.frameCount, valid, enabledTracks); + ALOGE("invalid bufferProvider=%p name=%d fastIndex=%d frameCount=%d valid=%#x enabledTracks=%#x", + t.bufferProvider, i, t.fastIndex, t.buffer.frameCount, valid, enabledTracks); // expect to crash } t.bufferProvider->getNextBuffer(&t.buffer, pts); diff --git a/services/audioflinger/AudioMixer.h b/services/audioflinger/AudioMixer.h index fd21fda..f0ccd8e 100644 --- a/services/audioflinger/AudioMixer.h +++ b/services/audioflinger/AudioMixer.h @@ -28,6 +28,7 @@ #include #include +#include namespace android { @@ -76,6 +77,7 @@ public: MAIN_BUFFER = 0x4002, AUX_BUFFER = 0x4003, DOWNMIX_TYPE = 0X4004, + FAST_INDEX = 0x4005, // for debugging only // for target RESAMPLE SAMPLE_RATE = 0x4100, // Configure sample rate conversion on this track name; // parameter 'value' is the new sample rate in Hz. @@ -106,6 +108,7 @@ public: // Enable or disable an allocated track by name void enable(int name); void disable(int name); + bool enabled(int name); void setParameter(int name, int target, int param, void *value); @@ -200,7 +203,10 @@ private: int32_t sessionId; - int32_t padding[2]; + int32_t fastIndex; + int32_t magic; + static const int kMagic = 0x54637281; + //int32_t padding[1]; // 16-byte boundary @@ -210,6 +216,12 @@ private: void adjustVolumeRamp(bool aux); size_t getUnreleasedFrames() const { return resampler != NULL ? resampler->getUnreleasedFrames() : 0; }; + void checkMagic() { + if (magic != kMagic) { + ALOGE("magic=%#x fastIndex=%d", magic, fastIndex); + } + } + }; // pad to 32-bytes to fill cache line @@ -220,7 +232,8 @@ private: void (*hook)(state_t* state, int64_t pts); // one of process__*, never NULL int32_t *outputTemp; int32_t *resampleTemp; - int32_t reserved[2]; + NBLog::Writer* mLog; + int32_t reserved[1]; // FIXME allocate dynamically to save some memory when maxNumTracks < MAX_NUM_TRACKS track_t tracks[MAX_NUM_TRACKS]; __attribute__((aligned(32))); }; @@ -247,6 +260,11 @@ private: const uint32_t mSampleRate; + NBLog::Writer* mLog; + NBLog::Writer mDummyLog; +public: + void setLog(NBLog::Writer* log); +private: state_t mState __attribute__((aligned(32))); // effect descriptor for the downmixer used by the mixer diff --git a/services/audioflinger/FastMixer.cpp b/services/audioflinger/FastMixer.cpp index 80e37ca..5811771 100644 --- a/services/audioflinger/FastMixer.cpp +++ b/services/audioflinger/FastMixer.cpp @@ -120,12 +120,16 @@ bool FastMixer::threadLoop() FastMixerState::Command command = next->mCommand; if (next != current) { + logWriter->logTimestamp(); logWriter->log("next != current"); // As soon as possible of learning of a new dump area, start using it dumpState = next->mDumpState != NULL ? next->mDumpState : &dummyDumpState; teeSink = next->mTeeSink; logWriter = next->mNBLogWriter != NULL ? next->mNBLogWriter : &dummyLogWriter; + if (mixer != NULL) { + mixer->setLog(logWriter); + } // We want to always have a valid reference to the previous (non-idle) state. // However, the state queue only guarantees access to current and previous states. @@ -300,13 +304,21 @@ bool FastMixer::threadLoop() addedTracks &= ~(1 << i); const FastTrack* fastTrack = ¤t->mFastTracks[i]; AudioBufferProvider *bufferProvider = fastTrack->mBufferProvider; + logWriter->logf("bp %d i=%d %p", __LINE__, i, bufferProvider); ALOG_ASSERT(bufferProvider != NULL && fastTrackNames[i] == -1); + if (bufferProvider == NULL || + bufferProvider->getValid() != AudioBufferProvider::kValid) { + logWriter->logTimestamp(); + logWriter->logf("added invalid %#x", i); + } if (mixer != NULL) { // calling getTrackName with default channel mask and a random invalid // sessionId (no effects here) name = mixer->getTrackName(AUDIO_CHANNEL_OUT_STEREO, -555); ALOG_ASSERT(name >= 0); fastTrackNames[i] = name; + mixer->setParameter(name, AudioMixer::TRACK, AudioMixer::FAST_INDEX, + (void *) i); mixer->setBufferProvider(name, bufferProvider); mixer->setParameter(name, AudioMixer::TRACK, AudioMixer::MAIN_BUFFER, (void *) mixBuffer); @@ -317,27 +329,40 @@ bool FastMixer::threadLoop() } mixer->setParameter(name, AudioMixer::TRACK, AudioMixer::CHANNEL_MASK, (void *) fastTrack->mChannelMask); + if (!mixer->enabled(name)) { + logWriter->logf("enable %d i=%d name=%d", __LINE__, i, name); + } mixer->enable(name); } generations[i] = fastTrack->mGeneration; } - // finally process modified tracks; these use the same slot + // finally process (potentially) modified tracks; these use the same slot // but may have a different buffer provider or volume provider unsigned modifiedTracks = currentTrackMask & previousTrackMask; if (modifiedTracks) { - logWriter->logf("modified %#x", modifiedTracks); + logWriter->logf("pot. mod. %#x", modifiedTracks); } + unsigned actuallyModifiedTracks = 0; while (modifiedTracks != 0) { i = __builtin_ctz(modifiedTracks); modifiedTracks &= ~(1 << i); const FastTrack* fastTrack = ¤t->mFastTracks[i]; if (fastTrack->mGeneration != generations[i]) { + actuallyModifiedTracks |= 1 << i; AudioBufferProvider *bufferProvider = fastTrack->mBufferProvider; + logWriter->logf("bp %d i=%d %p", __LINE__, i, bufferProvider); ALOG_ASSERT(bufferProvider != NULL); + if (bufferProvider == NULL || + bufferProvider->getValid() != AudioBufferProvider::kValid) { + logWriter->logTimestamp(); + logWriter->logf("modified invalid %#x", i); + } if (mixer != NULL) { name = fastTrackNames[i]; ALOG_ASSERT(name >= 0); + mixer->setParameter(name, AudioMixer::TRACK, AudioMixer::FAST_INDEX, + (void *) i); mixer->setBufferProvider(name, bufferProvider); if (fastTrack->mVolumeProvider == NULL) { mixer->setParameter(name, AudioMixer::VOLUME, AudioMixer::VOLUME0, @@ -360,6 +385,9 @@ bool FastMixer::threadLoop() generations[i] = fastTrack->mGeneration; } } + if (actuallyModifiedTracks) { + logWriter->logf("act. mod. %#x", actuallyModifiedTracks); + } fastTracksGen = current->mFastTracksGen; @@ -377,6 +405,7 @@ bool FastMixer::threadLoop() ALOG_ASSERT(mixBuffer != NULL); // for each track, update volume and check for underrun unsigned currentTrackMask = current->mTrackMask; + logWriter->logf("ctm %#x", currentTrackMask); while (currentTrackMask != 0) { i = __builtin_ctz(currentTrackMask); currentTrackMask &= ~(1 << i); @@ -414,15 +443,27 @@ bool FastMixer::threadLoop() // allow mixing partial buffer underruns.mBitFields.mPartial++; underruns.mBitFields.mMostRecent = UNDERRUN_PARTIAL; + if (!mixer->enabled(name)) { + logWriter->logf("enable %d i=%d name=%d", __LINE__, i, name); + } mixer->enable(name); } } else { underruns.mBitFields.mFull++; underruns.mBitFields.mMostRecent = UNDERRUN_FULL; + if (!mixer->enabled(name)) { + logWriter->logf("enable %d i=%d name=%d", __LINE__, i, name); + } mixer->enable(name); } ftDump->mUnderruns = underruns; ftDump->mFramesReady = framesReady; + AudioBufferProvider *bufferProvider = fastTrack->mBufferProvider; + if (bufferProvider == NULL || + bufferProvider->getValid() != AudioBufferProvider::kValid) { + logWriter->logTimestamp(); + logWriter->logf("mixing invalid %#x", i); + } } int64_t pts; diff --git a/services/audioflinger/StateQueue.h b/services/audioflinger/StateQueue.h index e33b3c6..313330f 100644 --- a/services/audioflinger/StateQueue.h +++ b/services/audioflinger/StateQueue.h @@ -174,7 +174,7 @@ public: #endif private: - static const unsigned kN = 4; // values < 4 are not supported by this code + static const unsigned kN = 8; // values < 4 are not supported by this code T mStates[kN]; // written by mutator, read by observer // "volatile" is meaningless with SMP, but here it indicates that we're using atomic ops diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index ba848d7..a19c5f4 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -1196,6 +1196,8 @@ sp AudioFlinger::PlaybackThread::createTrac { // scope for mLock Mutex::Autolock _l(mLock); + mNBLogWriter->logf("createTrack_l isFast=%d caller=%d", + (*flags & IAudioFlinger::TRACK_FAST) != 0, IPCThreadState::self()->getCallingPid()); // all tracks in same audio session must share the same routing strategy otherwise // conflicts will happen when tracks are moved from one output to another by audio policy @@ -1249,7 +1251,6 @@ Exit: if (status) { *status = lStatus; } - mNBLogWriter->logf("createTrack_l"); return track; } @@ -1317,7 +1318,8 @@ float AudioFlinger::PlaybackThread::streamVolume(audio_stream_type_t stream) con // addTrack_l() must be called with ThreadBase::mLock held status_t AudioFlinger::PlaybackThread::addTrack_l(const sp& track) { - mNBLogWriter->logf("addTrack_l mName=%d", track->mName); + mNBLogWriter->logf("addTrack_l mName=%d mFastIndex=%d caller=%d", track->mName, + track->mFastIndex, IPCThreadState::self()->getCallingPid()); status_t status = ALREADY_EXISTS; // set retry count for buffer fill @@ -1351,7 +1353,9 @@ status_t AudioFlinger::PlaybackThread::addTrack_l(const sp& track) // destroyTrack_l() must be called with ThreadBase::mLock held void AudioFlinger::PlaybackThread::destroyTrack_l(const sp& track) { - mNBLogWriter->logf("destroyTrack_l mName=%d", track->mName); + mNBLogWriter->logTimestamp(); + mNBLogWriter->logf("destroyTrack_l mName=%d mFastIndex=%d mClientPid=%d", track->mName, + track->mFastIndex, track->mClient != 0 ? track->mClient->pid() : -1); track->mState = TrackBase::TERMINATED; // active tracks are removed by threadLoop() if (mActiveTracks.indexOf(track) < 0) { @@ -1361,7 +1365,9 @@ void AudioFlinger::PlaybackThread::destroyTrack_l(const sp& track) void AudioFlinger::PlaybackThread::removeTrack_l(const sp& track) { - mNBLogWriter->logf("removeTrack_l mName=%d", track->mName); + mNBLogWriter->logTimestamp(); + mNBLogWriter->logf("removeTrack_l mName=%d mFastIndex=%d clientPid=%d", track->mName, + track->mFastIndex, track->mClient != 0 ? track->mClient->pid() : -1); track->triggerEvents(AudioSystem::SYNC_EVENT_PRESENTATION_COMPLETE); mTracks.remove(track); deleteTrackName_l(track->name()); @@ -2149,6 +2155,7 @@ AudioFlinger::MixerThread::MixerThread(const sp& audioFlinger, Aud FastTrack *fastTrack = &state->mFastTracks[0]; // wrap the source side of the MonoPipe to make it an AudioBufferProvider fastTrack->mBufferProvider = new SourceAudioBufferProvider(new MonoPipeReader(monoPipe)); + mNBLogWriter->logf("ft0 bp %p", fastTrack->mBufferProvider); fastTrack->mVolumeProvider = NULL; fastTrack->mGeneration++; state->mFastTracksGen++; @@ -2553,6 +2560,7 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::MixerThread::prepareTrac // was it previously inactive? if (!(state->mTrackMask & (1 << j))) { ExtendedAudioBufferProvider *eabp = track; + mNBLogWriter->logf("j=%d bp %p", j, eabp); VolumeProvider *vp = track; fastTrack->mBufferProvider = eabp; fastTrack->mVolumeProvider = vp; @@ -2839,11 +2847,19 @@ track_is_ready: ; block = FastMixerStateQueue::BLOCK_UNTIL_ACKED; pauseAudioWatchdog = true; } - sq->end(); } if (sq != NULL) { + unsigned trackMask = state->mTrackMask; sq->end(didModify); + if (didModify) { + mNBLogWriter->logTimestamp(); + mNBLogWriter->logf("push trackMask=%#x block=%d", trackMask, block); + } sq->push(block); + if (didModify) { + mNBLogWriter->logTimestamp(); + mNBLogWriter->log("pushed"); + } } #ifdef AUDIO_WATCHDOG if (pauseAudioWatchdog && mAudioWatchdog != 0) { @@ -2870,7 +2886,9 @@ track_is_ready: ; if (CC_UNLIKELY(count)) { for (size_t i=0 ; i& track = tracksToRemove->itemAt(i); - mNBLogWriter->logf("prepareTracks_l remove name=%u", track->name()); + mNBLogWriter->logTimestamp(); + mNBLogWriter->logf("prepareTracks_l remove name=%u mFastIndex=%d", track->name(), + track->mFastIndex); mActiveTracks.remove(track); if (track->mainBuffer() != mMixBuffer) { chain = getEffectChain_l(track->sessionId()); diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h index fa1e336..3cd5c2c 100644 --- a/services/audioflinger/Threads.h +++ b/services/audioflinger/Threads.h @@ -315,7 +315,7 @@ protected: // keyed by session ID, the second by type UUID timeLow field KeyedVector< int, KeyedVector< int, sp > > mSuspendedSessions; - static const size_t kLogSize = 512; + static const size_t kLogSize = 4096; sp mNBLogWriter; }; @@ -546,7 +546,7 @@ private: sp mTeeSink; sp mTeeSource; uint32_t mScreenState; // cached copy of gScreenState - static const size_t kFastMixerLogSize = 8 * 1024; + static const size_t kFastMixerLogSize = 16 * 1024; sp mFastMixerNBLogWriter; public: virtual bool hasFastMixer() const = 0; diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp index 315cbbc..f679751 100644 --- a/services/audioflinger/Tracks.cpp +++ b/services/audioflinger/Tracks.cpp @@ -351,6 +351,7 @@ AudioFlinger::PlaybackThread::Track::Track( // Read the initial underruns because this field is never cleared by the fast mixer mObservedUnderruns = thread->getFastTrackUnderruns(i); thread->mFastTrackAvailMask &= ~(1 << i); + thread->mNBLogWriter->logf("new Track mName=%d mFastIndex=%d", mName, mFastIndex); } } ALOGV("Track constructor name %d, calling pid %d", mName, @@ -360,6 +361,7 @@ AudioFlinger::PlaybackThread::Track::Track( AudioFlinger::PlaybackThread::Track::~Track() { ALOGV("PlaybackThread::Track destructor"); + // FIXME not sure if safe to log here, would need a lock on thread to do it } void AudioFlinger::PlaybackThread::Track::destroy() @@ -569,7 +571,8 @@ status_t AudioFlinger::PlaybackThread::Track::start(AudioSystem::sync_event_t ev sp thread = mThread.promote(); if (thread != 0) { Mutex::Autolock _l(thread->mLock); - thread->mNBLogWriter->logf("start mName=%d", mName); + thread->mNBLogWriter->logf("start mName=%d mFastIndex=%d caller=%d", mName, mFastIndex, + IPCThreadState::self()->getCallingPid()); track_state state = mState; // here the track could be either new, or restarted // in both cases "unstop" the track @@ -612,7 +615,8 @@ void AudioFlinger::PlaybackThread::Track::stop() sp thread = mThread.promote(); if (thread != 0) { Mutex::Autolock _l(thread->mLock); - thread->mNBLogWriter->logf("stop mName=%d", mName); + thread->mNBLogWriter->logf("stop mName=%d mFastIndex=%d caller=%d", mName, mFastIndex, + IPCThreadState::self()->getCallingPid()); track_state state = mState; if (state == RESUMING || state == ACTIVE || state == PAUSING || state == PAUSED) { // If the track is not active (PAUSED and buffers full), flush buffers @@ -649,7 +653,8 @@ void AudioFlinger::PlaybackThread::Track::pause() sp thread = mThread.promote(); if (thread != 0) { Mutex::Autolock _l(thread->mLock); - thread->mNBLogWriter->logf("pause mName=%d", mName); + thread->mNBLogWriter->logf("pause mName=%d mFastIndex=%d caller=%d", mName, mFastIndex, + IPCThreadState::self()->getCallingPid()); if (mState == ACTIVE || mState == RESUMING) { mState = PAUSING; ALOGV("ACTIVE/RESUMING => PAUSING (%d) on thread %p", mName, thread.get()); @@ -673,7 +678,8 @@ void AudioFlinger::PlaybackThread::Track::flush() sp thread = mThread.promote(); if (thread != 0) { Mutex::Autolock _l(thread->mLock); - thread->mNBLogWriter->logf("flush mName=%d", mName); + thread->mNBLogWriter->logf("flush mName=%d mFastIndex=%d caller=%d", mName, mFastIndex, + IPCThreadState::self()->getCallingPid()); if (mState != STOPPING_1 && mState != STOPPING_2 && mState != STOPPED && mState != PAUSED && mState != PAUSING && mState != IDLE && mState != FLUSHED) { return; -- cgit v1.1 From e186b51e0a9834b287d7a509e960eaf1b688db75 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Thu, 14 Feb 2013 23:57:02 +0000 Subject: Revert "Temporary additional logging to investigate bug" This reverts commit 639482c24c911b125398b31883ba6d55faebe28b Change-Id: I11f2829072ab11e18b0663024f27bf31192f1d39 --- include/media/nbaio/NBLog.h | 4 ++-- services/audioflinger/AudioFlinger.h | 2 +- services/audioflinger/AudioMixer.cpp | 46 +++--------------------------------- services/audioflinger/AudioMixer.h | 22 ++--------------- services/audioflinger/FastMixer.cpp | 45 ++--------------------------------- services/audioflinger/StateQueue.h | 2 +- services/audioflinger/Threads.cpp | 30 +++++------------------ services/audioflinger/Threads.h | 4 ++-- services/audioflinger/Tracks.cpp | 14 ++++------- 9 files changed, 23 insertions(+), 146 deletions(-) diff --git a/include/media/nbaio/NBLog.h b/include/media/nbaio/NBLog.h index 107ba66..8fc417f 100644 --- a/include/media/nbaio/NBLog.h +++ b/include/media/nbaio/NBLog.h @@ -115,7 +115,7 @@ public: virtual ~Writer() { } virtual void log(const char *string); - virtual void logf(const char *fmt, ...) __attribute__ ((format (printf, 2, 3))); + virtual void logf(const char *fmt, ...); virtual void logvf(const char *fmt, va_list ap); virtual void logTimestamp(); virtual void logTimestamp(const struct timespec& ts); @@ -149,7 +149,7 @@ public: LockedWriter(size_t size, void *shared); virtual void log(const char *string); - virtual void logf(const char *fmt, ...) __attribute__ ((format (printf, 2, 3))); + virtual void logf(const char *fmt, ...); virtual void logvf(const char *fmt, va_list ap); virtual void logTimestamp(); virtual void logTimestamp(const struct timespec& ts); diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h index e8852fb..c3f08f6 100644 --- a/services/audioflinger/AudioFlinger.h +++ b/services/audioflinger/AudioFlinger.h @@ -227,7 +227,7 @@ public: sp newWriter_l(size_t size, const char *name); void unregisterWriter(const sp& writer); private: - static const size_t kLogMemorySize = 30 * 1024; + static const size_t kLogMemorySize = 10 * 1024; sp mLogMemoryDealer; // == 0 when NBLog is disabled public: diff --git a/services/audioflinger/AudioMixer.cpp b/services/audioflinger/AudioMixer.cpp index 529d1b5..08325ad 100644 --- a/services/audioflinger/AudioMixer.cpp +++ b/services/audioflinger/AudioMixer.cpp @@ -98,7 +98,7 @@ effect_descriptor_t AudioMixer::dwnmFxDesc; AudioMixer::AudioMixer(size_t frameCount, uint32_t sampleRate, uint32_t maxNumTracks) : mTrackNames(0), mConfiguredNames((maxNumTracks >= 32 ? 0 : 1 << maxNumTracks) - 1), - mSampleRate(sampleRate), mLog(&mDummyLog) + mSampleRate(sampleRate) { // AudioMixer is not yet capable of multi-channel beyond stereo COMPILE_TIME_ASSERT_FUNCTION_SCOPE(2 == MAX_NUM_CHANNELS); @@ -122,7 +122,6 @@ AudioMixer::AudioMixer(size_t frameCount, uint32_t sampleRate, uint32_t maxNumTr mState.hook = process__nop; mState.outputTemp = NULL; mState.resampleTemp = NULL; - mState.mLog = &mDummyLog; // mState.reserved // FIXME Most of the following initialization is probably redundant since @@ -132,7 +131,6 @@ AudioMixer::AudioMixer(size_t frameCount, uint32_t sampleRate, uint32_t maxNumTr for (unsigned i=0 ; i < MAX_NUM_TRACKS ; i++) { t->resampler = NULL; t->downmixerBufferProvider = NULL; - t->magic = track_t::kMagic; t++; } @@ -171,12 +169,6 @@ AudioMixer::~AudioMixer() delete [] mState.resampleTemp; } -void AudioMixer::setLog(NBLog::Writer *log) -{ - mLog = log; - mState.mLog = log; -} - int AudioMixer::getTrackName(audio_channel_mask_t channelMask, int sessionId) { uint32_t names = (~mTrackNames) & mConfiguredNames; @@ -217,12 +209,9 @@ int AudioMixer::getTrackName(audio_channel_mask_t channelMask, int sessionId) t->mainBuffer = NULL; t->auxBuffer = NULL; t->downmixerBufferProvider = NULL; - t->fastIndex = -1; - // t->magic unchanged status_t status = initTrackDownmix(&mState.tracks[n], n, channelMask); if (status == OK) { - mLog->logf("getTrackName %d", n); return TRACK0 + n; } ALOGE("AudioMixer::getTrackName(0x%x) failed, error preparing track for downmix", @@ -377,11 +366,9 @@ void AudioMixer::deleteTrackName(int name) { ALOGV("AudioMixer::deleteTrackName(%d)", name); name -= TRACK0; - mLog->logf("deleteTrackName %d", name); ALOG_ASSERT(uint32_t(name) < MAX_NUM_TRACKS, "bad track name %d", name); ALOGV("deleteTrackName(%d)", name); track_t& track(mState.tracks[ name ]); - track.checkMagic(); if (track.enabled) { track.enabled = false; invalidateState(1<logf("enable %d", name); track.enabled = true; ALOGV("enable(%d)", name); invalidateState(1 << name); @@ -415,32 +400,19 @@ void AudioMixer::disable(int name) name -= TRACK0; ALOG_ASSERT(uint32_t(name) < MAX_NUM_TRACKS, "bad track name %d", name); track_t& track = mState.tracks[name]; - track.checkMagic(); if (track.enabled) { - mLog->logf("disable %d", name); track.enabled = false; ALOGV("disable(%d)", name); invalidateState(1 << name); } } -bool AudioMixer::enabled(int name) -{ - name -= TRACK0; - ALOG_ASSERT(uint32_t(name) < MAX_NUM_TRACKS, "bad track name %d", name); - track_t& track = mState.tracks[name]; - track.checkMagic(); - - return track.enabled; -} - void AudioMixer::setParameter(int name, int target, int param, void *value) { name -= TRACK0; ALOG_ASSERT(uint32_t(name) < MAX_NUM_TRACKS, "bad track name %d", name); track_t& track = mState.tracks[name]; - track.checkMagic(); int valueInt = (int)value; int32_t *valueBuf = (int32_t *)value; @@ -483,9 +455,6 @@ void AudioMixer::setParameter(int name, int target, int param, void *value) // for a specific track? or per mixer? /* case DOWNMIX_TYPE: break */ - case FAST_INDEX: - track.fastIndex = valueInt; - break; default: LOG_FATAL("bad param"); } @@ -571,7 +540,6 @@ void AudioMixer::setParameter(int name, int target, int param, void *value) bool AudioMixer::track_t::setResampler(uint32_t value, uint32_t devSampleRate) { - checkMagic(); if (value != devSampleRate || resampler != NULL) { if (sampleRate != value) { sampleRate = value; @@ -604,7 +572,6 @@ bool AudioMixer::track_t::setResampler(uint32_t value, uint32_t devSampleRate) inline void AudioMixer::track_t::adjustVolumeRamp(bool aux) { - checkMagic(); for (uint32_t i=0 ; i0) && (((prevVolume[i]+volumeInc[i])>>16) >= volume[i])) || ((volumeInc[i]<0) && (((prevVolume[i]+volumeInc[i])>>16) <= volume[i]))) { @@ -633,10 +600,8 @@ size_t AudioMixer::getUnreleasedFrames(int name) const void AudioMixer::setBufferProvider(int name, AudioBufferProvider* bufferProvider) { name -= TRACK0; - mLog->logf("bp %d-%p", name, bufferProvider); ALOG_ASSERT(uint32_t(name) < MAX_NUM_TRACKS, "bad track name %d", name); - mState.tracks[name].checkMagic(); if (mState.tracks[name].downmixerBufferProvider != NULL) { // update required? if (mState.tracks[name].downmixerBufferProvider->mTrackBufferProvider != bufferProvider) { @@ -658,9 +623,6 @@ void AudioMixer::setBufferProvider(int name, AudioBufferProvider* bufferProvider void AudioMixer::process(int64_t pts) { - if (mState.needsChanged) { - mLog->logf("process needs=%#x", mState.needsChanged); - } mState.hook(&mState, pts); } @@ -685,7 +647,6 @@ void AudioMixer::process__validate(state_t* state, int64_t pts) } state->enabledTracks &= ~disabled; state->enabledTracks |= enabled; - state->mLog->logf("process_validate ena=%#x", state->enabledTracks); // compute everything we need... int countActiveTracks = 0; @@ -1142,7 +1103,6 @@ void AudioMixer::process__genericNoResampling(state_t* state, int64_t pts) // acquire each track's buffer uint32_t enabledTracks = state->enabledTracks; - state->mLog->logf("process_gNR ena=%#x", enabledTracks); uint32_t e0 = enabledTracks; while (e0) { const int i = 31 - __builtin_clz(e0); @@ -1151,8 +1111,8 @@ void AudioMixer::process__genericNoResampling(state_t* state, int64_t pts) t.buffer.frameCount = state->frameCount; int valid = t.bufferProvider->getValid(); if (valid != AudioBufferProvider::kValid) { - ALOGE("invalid bufferProvider=%p name=%d fastIndex=%d frameCount=%d valid=%#x enabledTracks=%#x", - t.bufferProvider, i, t.fastIndex, t.buffer.frameCount, valid, enabledTracks); + ALOGE("invalid bufferProvider=%p name=%d frameCount=%d valid=%#x enabledTracks=%#x", + t.bufferProvider, i, t.buffer.frameCount, valid, enabledTracks); // expect to crash } t.bufferProvider->getNextBuffer(&t.buffer, pts); diff --git a/services/audioflinger/AudioMixer.h b/services/audioflinger/AudioMixer.h index f0ccd8e..fd21fda 100644 --- a/services/audioflinger/AudioMixer.h +++ b/services/audioflinger/AudioMixer.h @@ -28,7 +28,6 @@ #include #include -#include namespace android { @@ -77,7 +76,6 @@ public: MAIN_BUFFER = 0x4002, AUX_BUFFER = 0x4003, DOWNMIX_TYPE = 0X4004, - FAST_INDEX = 0x4005, // for debugging only // for target RESAMPLE SAMPLE_RATE = 0x4100, // Configure sample rate conversion on this track name; // parameter 'value' is the new sample rate in Hz. @@ -108,7 +106,6 @@ public: // Enable or disable an allocated track by name void enable(int name); void disable(int name); - bool enabled(int name); void setParameter(int name, int target, int param, void *value); @@ -203,10 +200,7 @@ private: int32_t sessionId; - int32_t fastIndex; - int32_t magic; - static const int kMagic = 0x54637281; - //int32_t padding[1]; + int32_t padding[2]; // 16-byte boundary @@ -216,12 +210,6 @@ private: void adjustVolumeRamp(bool aux); size_t getUnreleasedFrames() const { return resampler != NULL ? resampler->getUnreleasedFrames() : 0; }; - void checkMagic() { - if (magic != kMagic) { - ALOGE("magic=%#x fastIndex=%d", magic, fastIndex); - } - } - }; // pad to 32-bytes to fill cache line @@ -232,8 +220,7 @@ private: void (*hook)(state_t* state, int64_t pts); // one of process__*, never NULL int32_t *outputTemp; int32_t *resampleTemp; - NBLog::Writer* mLog; - int32_t reserved[1]; + int32_t reserved[2]; // FIXME allocate dynamically to save some memory when maxNumTracks < MAX_NUM_TRACKS track_t tracks[MAX_NUM_TRACKS]; __attribute__((aligned(32))); }; @@ -260,11 +247,6 @@ private: const uint32_t mSampleRate; - NBLog::Writer* mLog; - NBLog::Writer mDummyLog; -public: - void setLog(NBLog::Writer* log); -private: state_t mState __attribute__((aligned(32))); // effect descriptor for the downmixer used by the mixer diff --git a/services/audioflinger/FastMixer.cpp b/services/audioflinger/FastMixer.cpp index 5811771..80e37ca 100644 --- a/services/audioflinger/FastMixer.cpp +++ b/services/audioflinger/FastMixer.cpp @@ -120,16 +120,12 @@ bool FastMixer::threadLoop() FastMixerState::Command command = next->mCommand; if (next != current) { - logWriter->logTimestamp(); logWriter->log("next != current"); // As soon as possible of learning of a new dump area, start using it dumpState = next->mDumpState != NULL ? next->mDumpState : &dummyDumpState; teeSink = next->mTeeSink; logWriter = next->mNBLogWriter != NULL ? next->mNBLogWriter : &dummyLogWriter; - if (mixer != NULL) { - mixer->setLog(logWriter); - } // We want to always have a valid reference to the previous (non-idle) state. // However, the state queue only guarantees access to current and previous states. @@ -304,21 +300,13 @@ bool FastMixer::threadLoop() addedTracks &= ~(1 << i); const FastTrack* fastTrack = ¤t->mFastTracks[i]; AudioBufferProvider *bufferProvider = fastTrack->mBufferProvider; - logWriter->logf("bp %d i=%d %p", __LINE__, i, bufferProvider); ALOG_ASSERT(bufferProvider != NULL && fastTrackNames[i] == -1); - if (bufferProvider == NULL || - bufferProvider->getValid() != AudioBufferProvider::kValid) { - logWriter->logTimestamp(); - logWriter->logf("added invalid %#x", i); - } if (mixer != NULL) { // calling getTrackName with default channel mask and a random invalid // sessionId (no effects here) name = mixer->getTrackName(AUDIO_CHANNEL_OUT_STEREO, -555); ALOG_ASSERT(name >= 0); fastTrackNames[i] = name; - mixer->setParameter(name, AudioMixer::TRACK, AudioMixer::FAST_INDEX, - (void *) i); mixer->setBufferProvider(name, bufferProvider); mixer->setParameter(name, AudioMixer::TRACK, AudioMixer::MAIN_BUFFER, (void *) mixBuffer); @@ -329,40 +317,27 @@ bool FastMixer::threadLoop() } mixer->setParameter(name, AudioMixer::TRACK, AudioMixer::CHANNEL_MASK, (void *) fastTrack->mChannelMask); - if (!mixer->enabled(name)) { - logWriter->logf("enable %d i=%d name=%d", __LINE__, i, name); - } mixer->enable(name); } generations[i] = fastTrack->mGeneration; } - // finally process (potentially) modified tracks; these use the same slot + // finally process modified tracks; these use the same slot // but may have a different buffer provider or volume provider unsigned modifiedTracks = currentTrackMask & previousTrackMask; if (modifiedTracks) { - logWriter->logf("pot. mod. %#x", modifiedTracks); + logWriter->logf("modified %#x", modifiedTracks); } - unsigned actuallyModifiedTracks = 0; while (modifiedTracks != 0) { i = __builtin_ctz(modifiedTracks); modifiedTracks &= ~(1 << i); const FastTrack* fastTrack = ¤t->mFastTracks[i]; if (fastTrack->mGeneration != generations[i]) { - actuallyModifiedTracks |= 1 << i; AudioBufferProvider *bufferProvider = fastTrack->mBufferProvider; - logWriter->logf("bp %d i=%d %p", __LINE__, i, bufferProvider); ALOG_ASSERT(bufferProvider != NULL); - if (bufferProvider == NULL || - bufferProvider->getValid() != AudioBufferProvider::kValid) { - logWriter->logTimestamp(); - logWriter->logf("modified invalid %#x", i); - } if (mixer != NULL) { name = fastTrackNames[i]; ALOG_ASSERT(name >= 0); - mixer->setParameter(name, AudioMixer::TRACK, AudioMixer::FAST_INDEX, - (void *) i); mixer->setBufferProvider(name, bufferProvider); if (fastTrack->mVolumeProvider == NULL) { mixer->setParameter(name, AudioMixer::VOLUME, AudioMixer::VOLUME0, @@ -385,9 +360,6 @@ bool FastMixer::threadLoop() generations[i] = fastTrack->mGeneration; } } - if (actuallyModifiedTracks) { - logWriter->logf("act. mod. %#x", actuallyModifiedTracks); - } fastTracksGen = current->mFastTracksGen; @@ -405,7 +377,6 @@ bool FastMixer::threadLoop() ALOG_ASSERT(mixBuffer != NULL); // for each track, update volume and check for underrun unsigned currentTrackMask = current->mTrackMask; - logWriter->logf("ctm %#x", currentTrackMask); while (currentTrackMask != 0) { i = __builtin_ctz(currentTrackMask); currentTrackMask &= ~(1 << i); @@ -443,27 +414,15 @@ bool FastMixer::threadLoop() // allow mixing partial buffer underruns.mBitFields.mPartial++; underruns.mBitFields.mMostRecent = UNDERRUN_PARTIAL; - if (!mixer->enabled(name)) { - logWriter->logf("enable %d i=%d name=%d", __LINE__, i, name); - } mixer->enable(name); } } else { underruns.mBitFields.mFull++; underruns.mBitFields.mMostRecent = UNDERRUN_FULL; - if (!mixer->enabled(name)) { - logWriter->logf("enable %d i=%d name=%d", __LINE__, i, name); - } mixer->enable(name); } ftDump->mUnderruns = underruns; ftDump->mFramesReady = framesReady; - AudioBufferProvider *bufferProvider = fastTrack->mBufferProvider; - if (bufferProvider == NULL || - bufferProvider->getValid() != AudioBufferProvider::kValid) { - logWriter->logTimestamp(); - logWriter->logf("mixing invalid %#x", i); - } } int64_t pts; diff --git a/services/audioflinger/StateQueue.h b/services/audioflinger/StateQueue.h index 313330f..e33b3c6 100644 --- a/services/audioflinger/StateQueue.h +++ b/services/audioflinger/StateQueue.h @@ -174,7 +174,7 @@ public: #endif private: - static const unsigned kN = 8; // values < 4 are not supported by this code + static const unsigned kN = 4; // values < 4 are not supported by this code T mStates[kN]; // written by mutator, read by observer // "volatile" is meaningless with SMP, but here it indicates that we're using atomic ops diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index a19c5f4..ba848d7 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -1196,8 +1196,6 @@ sp AudioFlinger::PlaybackThread::createTrac { // scope for mLock Mutex::Autolock _l(mLock); - mNBLogWriter->logf("createTrack_l isFast=%d caller=%d", - (*flags & IAudioFlinger::TRACK_FAST) != 0, IPCThreadState::self()->getCallingPid()); // all tracks in same audio session must share the same routing strategy otherwise // conflicts will happen when tracks are moved from one output to another by audio policy @@ -1251,6 +1249,7 @@ Exit: if (status) { *status = lStatus; } + mNBLogWriter->logf("createTrack_l"); return track; } @@ -1318,8 +1317,7 @@ float AudioFlinger::PlaybackThread::streamVolume(audio_stream_type_t stream) con // addTrack_l() must be called with ThreadBase::mLock held status_t AudioFlinger::PlaybackThread::addTrack_l(const sp& track) { - mNBLogWriter->logf("addTrack_l mName=%d mFastIndex=%d caller=%d", track->mName, - track->mFastIndex, IPCThreadState::self()->getCallingPid()); + mNBLogWriter->logf("addTrack_l mName=%d", track->mName); status_t status = ALREADY_EXISTS; // set retry count for buffer fill @@ -1353,9 +1351,7 @@ status_t AudioFlinger::PlaybackThread::addTrack_l(const sp& track) // destroyTrack_l() must be called with ThreadBase::mLock held void AudioFlinger::PlaybackThread::destroyTrack_l(const sp& track) { - mNBLogWriter->logTimestamp(); - mNBLogWriter->logf("destroyTrack_l mName=%d mFastIndex=%d mClientPid=%d", track->mName, - track->mFastIndex, track->mClient != 0 ? track->mClient->pid() : -1); + mNBLogWriter->logf("destroyTrack_l mName=%d", track->mName); track->mState = TrackBase::TERMINATED; // active tracks are removed by threadLoop() if (mActiveTracks.indexOf(track) < 0) { @@ -1365,9 +1361,7 @@ void AudioFlinger::PlaybackThread::destroyTrack_l(const sp& track) void AudioFlinger::PlaybackThread::removeTrack_l(const sp& track) { - mNBLogWriter->logTimestamp(); - mNBLogWriter->logf("removeTrack_l mName=%d mFastIndex=%d clientPid=%d", track->mName, - track->mFastIndex, track->mClient != 0 ? track->mClient->pid() : -1); + mNBLogWriter->logf("removeTrack_l mName=%d", track->mName); track->triggerEvents(AudioSystem::SYNC_EVENT_PRESENTATION_COMPLETE); mTracks.remove(track); deleteTrackName_l(track->name()); @@ -2155,7 +2149,6 @@ AudioFlinger::MixerThread::MixerThread(const sp& audioFlinger, Aud FastTrack *fastTrack = &state->mFastTracks[0]; // wrap the source side of the MonoPipe to make it an AudioBufferProvider fastTrack->mBufferProvider = new SourceAudioBufferProvider(new MonoPipeReader(monoPipe)); - mNBLogWriter->logf("ft0 bp %p", fastTrack->mBufferProvider); fastTrack->mVolumeProvider = NULL; fastTrack->mGeneration++; state->mFastTracksGen++; @@ -2560,7 +2553,6 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::MixerThread::prepareTrac // was it previously inactive? if (!(state->mTrackMask & (1 << j))) { ExtendedAudioBufferProvider *eabp = track; - mNBLogWriter->logf("j=%d bp %p", j, eabp); VolumeProvider *vp = track; fastTrack->mBufferProvider = eabp; fastTrack->mVolumeProvider = vp; @@ -2847,19 +2839,11 @@ track_is_ready: ; block = FastMixerStateQueue::BLOCK_UNTIL_ACKED; pauseAudioWatchdog = true; } + sq->end(); } if (sq != NULL) { - unsigned trackMask = state->mTrackMask; sq->end(didModify); - if (didModify) { - mNBLogWriter->logTimestamp(); - mNBLogWriter->logf("push trackMask=%#x block=%d", trackMask, block); - } sq->push(block); - if (didModify) { - mNBLogWriter->logTimestamp(); - mNBLogWriter->log("pushed"); - } } #ifdef AUDIO_WATCHDOG if (pauseAudioWatchdog && mAudioWatchdog != 0) { @@ -2886,9 +2870,7 @@ track_is_ready: ; if (CC_UNLIKELY(count)) { for (size_t i=0 ; i& track = tracksToRemove->itemAt(i); - mNBLogWriter->logTimestamp(); - mNBLogWriter->logf("prepareTracks_l remove name=%u mFastIndex=%d", track->name(), - track->mFastIndex); + mNBLogWriter->logf("prepareTracks_l remove name=%u", track->name()); mActiveTracks.remove(track); if (track->mainBuffer() != mMixBuffer) { chain = getEffectChain_l(track->sessionId()); diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h index 3cd5c2c..fa1e336 100644 --- a/services/audioflinger/Threads.h +++ b/services/audioflinger/Threads.h @@ -315,7 +315,7 @@ protected: // keyed by session ID, the second by type UUID timeLow field KeyedVector< int, KeyedVector< int, sp > > mSuspendedSessions; - static const size_t kLogSize = 4096; + static const size_t kLogSize = 512; sp mNBLogWriter; }; @@ -546,7 +546,7 @@ private: sp mTeeSink; sp mTeeSource; uint32_t mScreenState; // cached copy of gScreenState - static const size_t kFastMixerLogSize = 16 * 1024; + static const size_t kFastMixerLogSize = 8 * 1024; sp mFastMixerNBLogWriter; public: virtual bool hasFastMixer() const = 0; diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp index f679751..315cbbc 100644 --- a/services/audioflinger/Tracks.cpp +++ b/services/audioflinger/Tracks.cpp @@ -351,7 +351,6 @@ AudioFlinger::PlaybackThread::Track::Track( // Read the initial underruns because this field is never cleared by the fast mixer mObservedUnderruns = thread->getFastTrackUnderruns(i); thread->mFastTrackAvailMask &= ~(1 << i); - thread->mNBLogWriter->logf("new Track mName=%d mFastIndex=%d", mName, mFastIndex); } } ALOGV("Track constructor name %d, calling pid %d", mName, @@ -361,7 +360,6 @@ AudioFlinger::PlaybackThread::Track::Track( AudioFlinger::PlaybackThread::Track::~Track() { ALOGV("PlaybackThread::Track destructor"); - // FIXME not sure if safe to log here, would need a lock on thread to do it } void AudioFlinger::PlaybackThread::Track::destroy() @@ -571,8 +569,7 @@ status_t AudioFlinger::PlaybackThread::Track::start(AudioSystem::sync_event_t ev sp thread = mThread.promote(); if (thread != 0) { Mutex::Autolock _l(thread->mLock); - thread->mNBLogWriter->logf("start mName=%d mFastIndex=%d caller=%d", mName, mFastIndex, - IPCThreadState::self()->getCallingPid()); + thread->mNBLogWriter->logf("start mName=%d", mName); track_state state = mState; // here the track could be either new, or restarted // in both cases "unstop" the track @@ -615,8 +612,7 @@ void AudioFlinger::PlaybackThread::Track::stop() sp thread = mThread.promote(); if (thread != 0) { Mutex::Autolock _l(thread->mLock); - thread->mNBLogWriter->logf("stop mName=%d mFastIndex=%d caller=%d", mName, mFastIndex, - IPCThreadState::self()->getCallingPid()); + thread->mNBLogWriter->logf("stop mName=%d", mName); track_state state = mState; if (state == RESUMING || state == ACTIVE || state == PAUSING || state == PAUSED) { // If the track is not active (PAUSED and buffers full), flush buffers @@ -653,8 +649,7 @@ void AudioFlinger::PlaybackThread::Track::pause() sp thread = mThread.promote(); if (thread != 0) { Mutex::Autolock _l(thread->mLock); - thread->mNBLogWriter->logf("pause mName=%d mFastIndex=%d caller=%d", mName, mFastIndex, - IPCThreadState::self()->getCallingPid()); + thread->mNBLogWriter->logf("pause mName=%d", mName); if (mState == ACTIVE || mState == RESUMING) { mState = PAUSING; ALOGV("ACTIVE/RESUMING => PAUSING (%d) on thread %p", mName, thread.get()); @@ -678,8 +673,7 @@ void AudioFlinger::PlaybackThread::Track::flush() sp thread = mThread.promote(); if (thread != 0) { Mutex::Autolock _l(thread->mLock); - thread->mNBLogWriter->logf("flush mName=%d mFastIndex=%d caller=%d", mName, mFastIndex, - IPCThreadState::self()->getCallingPid()); + thread->mNBLogWriter->logf("flush mName=%d", mName); if (mState != STOPPING_1 && mState != STOPPING_2 && mState != STOPPED && mState != PAUSED && mState != PAUSING && mState != IDLE && mState != FLUSHED) { return; -- cgit v1.1 From 08725b7c1f376ff93bdaf374dadaf7bf477fcb13 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Wed, 13 Feb 2013 14:46:45 -0800 Subject: Temporary additional logging to investigate bug The bug appears related to continuing to use an invalid buffer provider in fast mixer after track destruction, so focus the added logs in that area. Also includes a bug fix: was calling log in an unsafe place near Threads.cpp AudioFlinger::PlaybackThread::createTrack_l line 1250. Details: - include caller pid or client pid where appropriate - increase log buffer size - log mFastIndex when AudioMixer sees an invalid bufferProvider. - log both potentially modified and actually modified tracks in FastMixer. - fix benign bug where sq->end() was called more than once. - log StateQueue push() call and return. - increase StateQueue size from 4 to 8 entries - log mixer->enable(), bufferProvider, and currentTrackMask - log buffer provider addresses - increase fast mixer log buffer again - check logf format vs. argument list compatibility - add logging to AudioMixer - add checking of magic field in AudioMixer to detect overwrites - add bool AudioMixer::enabled() Bug: 6490974 Change-Id: I1f3f18aa62d9fbd35bc32285b669f5ba40efe28e --- include/media/nbaio/NBLog.h | 4 ++-- services/audioflinger/AudioFlinger.h | 2 +- services/audioflinger/AudioMixer.cpp | 46 +++++++++++++++++++++++++++++++++--- services/audioflinger/AudioMixer.h | 22 +++++++++++++++-- services/audioflinger/FastMixer.cpp | 45 +++++++++++++++++++++++++++++++++-- services/audioflinger/StateQueue.h | 2 +- services/audioflinger/Threads.cpp | 30 ++++++++++++++++++----- services/audioflinger/Threads.h | 4 ++-- services/audioflinger/Tracks.cpp | 14 +++++++---- 9 files changed, 146 insertions(+), 23 deletions(-) diff --git a/include/media/nbaio/NBLog.h b/include/media/nbaio/NBLog.h index 8fc417f..107ba66 100644 --- a/include/media/nbaio/NBLog.h +++ b/include/media/nbaio/NBLog.h @@ -115,7 +115,7 @@ public: virtual ~Writer() { } virtual void log(const char *string); - virtual void logf(const char *fmt, ...); + virtual void logf(const char *fmt, ...) __attribute__ ((format (printf, 2, 3))); virtual void logvf(const char *fmt, va_list ap); virtual void logTimestamp(); virtual void logTimestamp(const struct timespec& ts); @@ -149,7 +149,7 @@ public: LockedWriter(size_t size, void *shared); virtual void log(const char *string); - virtual void logf(const char *fmt, ...); + virtual void logf(const char *fmt, ...) __attribute__ ((format (printf, 2, 3))); virtual void logvf(const char *fmt, va_list ap); virtual void logTimestamp(); virtual void logTimestamp(const struct timespec& ts); diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h index c3f08f6..e8852fb 100644 --- a/services/audioflinger/AudioFlinger.h +++ b/services/audioflinger/AudioFlinger.h @@ -227,7 +227,7 @@ public: sp newWriter_l(size_t size, const char *name); void unregisterWriter(const sp& writer); private: - static const size_t kLogMemorySize = 10 * 1024; + static const size_t kLogMemorySize = 30 * 1024; sp mLogMemoryDealer; // == 0 when NBLog is disabled public: diff --git a/services/audioflinger/AudioMixer.cpp b/services/audioflinger/AudioMixer.cpp index 08325ad..529d1b5 100644 --- a/services/audioflinger/AudioMixer.cpp +++ b/services/audioflinger/AudioMixer.cpp @@ -98,7 +98,7 @@ effect_descriptor_t AudioMixer::dwnmFxDesc; AudioMixer::AudioMixer(size_t frameCount, uint32_t sampleRate, uint32_t maxNumTracks) : mTrackNames(0), mConfiguredNames((maxNumTracks >= 32 ? 0 : 1 << maxNumTracks) - 1), - mSampleRate(sampleRate) + mSampleRate(sampleRate), mLog(&mDummyLog) { // AudioMixer is not yet capable of multi-channel beyond stereo COMPILE_TIME_ASSERT_FUNCTION_SCOPE(2 == MAX_NUM_CHANNELS); @@ -122,6 +122,7 @@ AudioMixer::AudioMixer(size_t frameCount, uint32_t sampleRate, uint32_t maxNumTr mState.hook = process__nop; mState.outputTemp = NULL; mState.resampleTemp = NULL; + mState.mLog = &mDummyLog; // mState.reserved // FIXME Most of the following initialization is probably redundant since @@ -131,6 +132,7 @@ AudioMixer::AudioMixer(size_t frameCount, uint32_t sampleRate, uint32_t maxNumTr for (unsigned i=0 ; i < MAX_NUM_TRACKS ; i++) { t->resampler = NULL; t->downmixerBufferProvider = NULL; + t->magic = track_t::kMagic; t++; } @@ -169,6 +171,12 @@ AudioMixer::~AudioMixer() delete [] mState.resampleTemp; } +void AudioMixer::setLog(NBLog::Writer *log) +{ + mLog = log; + mState.mLog = log; +} + int AudioMixer::getTrackName(audio_channel_mask_t channelMask, int sessionId) { uint32_t names = (~mTrackNames) & mConfiguredNames; @@ -209,9 +217,12 @@ int AudioMixer::getTrackName(audio_channel_mask_t channelMask, int sessionId) t->mainBuffer = NULL; t->auxBuffer = NULL; t->downmixerBufferProvider = NULL; + t->fastIndex = -1; + // t->magic unchanged status_t status = initTrackDownmix(&mState.tracks[n], n, channelMask); if (status == OK) { + mLog->logf("getTrackName %d", n); return TRACK0 + n; } ALOGE("AudioMixer::getTrackName(0x%x) failed, error preparing track for downmix", @@ -366,9 +377,11 @@ void AudioMixer::deleteTrackName(int name) { ALOGV("AudioMixer::deleteTrackName(%d)", name); name -= TRACK0; + mLog->logf("deleteTrackName %d", name); ALOG_ASSERT(uint32_t(name) < MAX_NUM_TRACKS, "bad track name %d", name); ALOGV("deleteTrackName(%d)", name); track_t& track(mState.tracks[ name ]); + track.checkMagic(); if (track.enabled) { track.enabled = false; invalidateState(1<logf("enable %d", name); track.enabled = true; ALOGV("enable(%d)", name); invalidateState(1 << name); @@ -400,19 +415,32 @@ void AudioMixer::disable(int name) name -= TRACK0; ALOG_ASSERT(uint32_t(name) < MAX_NUM_TRACKS, "bad track name %d", name); track_t& track = mState.tracks[name]; + track.checkMagic(); if (track.enabled) { + mLog->logf("disable %d", name); track.enabled = false; ALOGV("disable(%d)", name); invalidateState(1 << name); } } +bool AudioMixer::enabled(int name) +{ + name -= TRACK0; + ALOG_ASSERT(uint32_t(name) < MAX_NUM_TRACKS, "bad track name %d", name); + track_t& track = mState.tracks[name]; + track.checkMagic(); + + return track.enabled; +} + void AudioMixer::setParameter(int name, int target, int param, void *value) { name -= TRACK0; ALOG_ASSERT(uint32_t(name) < MAX_NUM_TRACKS, "bad track name %d", name); track_t& track = mState.tracks[name]; + track.checkMagic(); int valueInt = (int)value; int32_t *valueBuf = (int32_t *)value; @@ -455,6 +483,9 @@ void AudioMixer::setParameter(int name, int target, int param, void *value) // for a specific track? or per mixer? /* case DOWNMIX_TYPE: break */ + case FAST_INDEX: + track.fastIndex = valueInt; + break; default: LOG_FATAL("bad param"); } @@ -540,6 +571,7 @@ void AudioMixer::setParameter(int name, int target, int param, void *value) bool AudioMixer::track_t::setResampler(uint32_t value, uint32_t devSampleRate) { + checkMagic(); if (value != devSampleRate || resampler != NULL) { if (sampleRate != value) { sampleRate = value; @@ -572,6 +604,7 @@ bool AudioMixer::track_t::setResampler(uint32_t value, uint32_t devSampleRate) inline void AudioMixer::track_t::adjustVolumeRamp(bool aux) { + checkMagic(); for (uint32_t i=0 ; i0) && (((prevVolume[i]+volumeInc[i])>>16) >= volume[i])) || ((volumeInc[i]<0) && (((prevVolume[i]+volumeInc[i])>>16) <= volume[i]))) { @@ -600,8 +633,10 @@ size_t AudioMixer::getUnreleasedFrames(int name) const void AudioMixer::setBufferProvider(int name, AudioBufferProvider* bufferProvider) { name -= TRACK0; + mLog->logf("bp %d-%p", name, bufferProvider); ALOG_ASSERT(uint32_t(name) < MAX_NUM_TRACKS, "bad track name %d", name); + mState.tracks[name].checkMagic(); if (mState.tracks[name].downmixerBufferProvider != NULL) { // update required? if (mState.tracks[name].downmixerBufferProvider->mTrackBufferProvider != bufferProvider) { @@ -623,6 +658,9 @@ void AudioMixer::setBufferProvider(int name, AudioBufferProvider* bufferProvider void AudioMixer::process(int64_t pts) { + if (mState.needsChanged) { + mLog->logf("process needs=%#x", mState.needsChanged); + } mState.hook(&mState, pts); } @@ -647,6 +685,7 @@ void AudioMixer::process__validate(state_t* state, int64_t pts) } state->enabledTracks &= ~disabled; state->enabledTracks |= enabled; + state->mLog->logf("process_validate ena=%#x", state->enabledTracks); // compute everything we need... int countActiveTracks = 0; @@ -1103,6 +1142,7 @@ void AudioMixer::process__genericNoResampling(state_t* state, int64_t pts) // acquire each track's buffer uint32_t enabledTracks = state->enabledTracks; + state->mLog->logf("process_gNR ena=%#x", enabledTracks); uint32_t e0 = enabledTracks; while (e0) { const int i = 31 - __builtin_clz(e0); @@ -1111,8 +1151,8 @@ void AudioMixer::process__genericNoResampling(state_t* state, int64_t pts) t.buffer.frameCount = state->frameCount; int valid = t.bufferProvider->getValid(); if (valid != AudioBufferProvider::kValid) { - ALOGE("invalid bufferProvider=%p name=%d frameCount=%d valid=%#x enabledTracks=%#x", - t.bufferProvider, i, t.buffer.frameCount, valid, enabledTracks); + ALOGE("invalid bufferProvider=%p name=%d fastIndex=%d frameCount=%d valid=%#x enabledTracks=%#x", + t.bufferProvider, i, t.fastIndex, t.buffer.frameCount, valid, enabledTracks); // expect to crash } t.bufferProvider->getNextBuffer(&t.buffer, pts); diff --git a/services/audioflinger/AudioMixer.h b/services/audioflinger/AudioMixer.h index fd21fda..f0ccd8e 100644 --- a/services/audioflinger/AudioMixer.h +++ b/services/audioflinger/AudioMixer.h @@ -28,6 +28,7 @@ #include #include +#include namespace android { @@ -76,6 +77,7 @@ public: MAIN_BUFFER = 0x4002, AUX_BUFFER = 0x4003, DOWNMIX_TYPE = 0X4004, + FAST_INDEX = 0x4005, // for debugging only // for target RESAMPLE SAMPLE_RATE = 0x4100, // Configure sample rate conversion on this track name; // parameter 'value' is the new sample rate in Hz. @@ -106,6 +108,7 @@ public: // Enable or disable an allocated track by name void enable(int name); void disable(int name); + bool enabled(int name); void setParameter(int name, int target, int param, void *value); @@ -200,7 +203,10 @@ private: int32_t sessionId; - int32_t padding[2]; + int32_t fastIndex; + int32_t magic; + static const int kMagic = 0x54637281; + //int32_t padding[1]; // 16-byte boundary @@ -210,6 +216,12 @@ private: void adjustVolumeRamp(bool aux); size_t getUnreleasedFrames() const { return resampler != NULL ? resampler->getUnreleasedFrames() : 0; }; + void checkMagic() { + if (magic != kMagic) { + ALOGE("magic=%#x fastIndex=%d", magic, fastIndex); + } + } + }; // pad to 32-bytes to fill cache line @@ -220,7 +232,8 @@ private: void (*hook)(state_t* state, int64_t pts); // one of process__*, never NULL int32_t *outputTemp; int32_t *resampleTemp; - int32_t reserved[2]; + NBLog::Writer* mLog; + int32_t reserved[1]; // FIXME allocate dynamically to save some memory when maxNumTracks < MAX_NUM_TRACKS track_t tracks[MAX_NUM_TRACKS]; __attribute__((aligned(32))); }; @@ -247,6 +260,11 @@ private: const uint32_t mSampleRate; + NBLog::Writer* mLog; + NBLog::Writer mDummyLog; +public: + void setLog(NBLog::Writer* log); +private: state_t mState __attribute__((aligned(32))); // effect descriptor for the downmixer used by the mixer diff --git a/services/audioflinger/FastMixer.cpp b/services/audioflinger/FastMixer.cpp index 80e37ca..5811771 100644 --- a/services/audioflinger/FastMixer.cpp +++ b/services/audioflinger/FastMixer.cpp @@ -120,12 +120,16 @@ bool FastMixer::threadLoop() FastMixerState::Command command = next->mCommand; if (next != current) { + logWriter->logTimestamp(); logWriter->log("next != current"); // As soon as possible of learning of a new dump area, start using it dumpState = next->mDumpState != NULL ? next->mDumpState : &dummyDumpState; teeSink = next->mTeeSink; logWriter = next->mNBLogWriter != NULL ? next->mNBLogWriter : &dummyLogWriter; + if (mixer != NULL) { + mixer->setLog(logWriter); + } // We want to always have a valid reference to the previous (non-idle) state. // However, the state queue only guarantees access to current and previous states. @@ -300,13 +304,21 @@ bool FastMixer::threadLoop() addedTracks &= ~(1 << i); const FastTrack* fastTrack = ¤t->mFastTracks[i]; AudioBufferProvider *bufferProvider = fastTrack->mBufferProvider; + logWriter->logf("bp %d i=%d %p", __LINE__, i, bufferProvider); ALOG_ASSERT(bufferProvider != NULL && fastTrackNames[i] == -1); + if (bufferProvider == NULL || + bufferProvider->getValid() != AudioBufferProvider::kValid) { + logWriter->logTimestamp(); + logWriter->logf("added invalid %#x", i); + } if (mixer != NULL) { // calling getTrackName with default channel mask and a random invalid // sessionId (no effects here) name = mixer->getTrackName(AUDIO_CHANNEL_OUT_STEREO, -555); ALOG_ASSERT(name >= 0); fastTrackNames[i] = name; + mixer->setParameter(name, AudioMixer::TRACK, AudioMixer::FAST_INDEX, + (void *) i); mixer->setBufferProvider(name, bufferProvider); mixer->setParameter(name, AudioMixer::TRACK, AudioMixer::MAIN_BUFFER, (void *) mixBuffer); @@ -317,27 +329,40 @@ bool FastMixer::threadLoop() } mixer->setParameter(name, AudioMixer::TRACK, AudioMixer::CHANNEL_MASK, (void *) fastTrack->mChannelMask); + if (!mixer->enabled(name)) { + logWriter->logf("enable %d i=%d name=%d", __LINE__, i, name); + } mixer->enable(name); } generations[i] = fastTrack->mGeneration; } - // finally process modified tracks; these use the same slot + // finally process (potentially) modified tracks; these use the same slot // but may have a different buffer provider or volume provider unsigned modifiedTracks = currentTrackMask & previousTrackMask; if (modifiedTracks) { - logWriter->logf("modified %#x", modifiedTracks); + logWriter->logf("pot. mod. %#x", modifiedTracks); } + unsigned actuallyModifiedTracks = 0; while (modifiedTracks != 0) { i = __builtin_ctz(modifiedTracks); modifiedTracks &= ~(1 << i); const FastTrack* fastTrack = ¤t->mFastTracks[i]; if (fastTrack->mGeneration != generations[i]) { + actuallyModifiedTracks |= 1 << i; AudioBufferProvider *bufferProvider = fastTrack->mBufferProvider; + logWriter->logf("bp %d i=%d %p", __LINE__, i, bufferProvider); ALOG_ASSERT(bufferProvider != NULL); + if (bufferProvider == NULL || + bufferProvider->getValid() != AudioBufferProvider::kValid) { + logWriter->logTimestamp(); + logWriter->logf("modified invalid %#x", i); + } if (mixer != NULL) { name = fastTrackNames[i]; ALOG_ASSERT(name >= 0); + mixer->setParameter(name, AudioMixer::TRACK, AudioMixer::FAST_INDEX, + (void *) i); mixer->setBufferProvider(name, bufferProvider); if (fastTrack->mVolumeProvider == NULL) { mixer->setParameter(name, AudioMixer::VOLUME, AudioMixer::VOLUME0, @@ -360,6 +385,9 @@ bool FastMixer::threadLoop() generations[i] = fastTrack->mGeneration; } } + if (actuallyModifiedTracks) { + logWriter->logf("act. mod. %#x", actuallyModifiedTracks); + } fastTracksGen = current->mFastTracksGen; @@ -377,6 +405,7 @@ bool FastMixer::threadLoop() ALOG_ASSERT(mixBuffer != NULL); // for each track, update volume and check for underrun unsigned currentTrackMask = current->mTrackMask; + logWriter->logf("ctm %#x", currentTrackMask); while (currentTrackMask != 0) { i = __builtin_ctz(currentTrackMask); currentTrackMask &= ~(1 << i); @@ -414,15 +443,27 @@ bool FastMixer::threadLoop() // allow mixing partial buffer underruns.mBitFields.mPartial++; underruns.mBitFields.mMostRecent = UNDERRUN_PARTIAL; + if (!mixer->enabled(name)) { + logWriter->logf("enable %d i=%d name=%d", __LINE__, i, name); + } mixer->enable(name); } } else { underruns.mBitFields.mFull++; underruns.mBitFields.mMostRecent = UNDERRUN_FULL; + if (!mixer->enabled(name)) { + logWriter->logf("enable %d i=%d name=%d", __LINE__, i, name); + } mixer->enable(name); } ftDump->mUnderruns = underruns; ftDump->mFramesReady = framesReady; + AudioBufferProvider *bufferProvider = fastTrack->mBufferProvider; + if (bufferProvider == NULL || + bufferProvider->getValid() != AudioBufferProvider::kValid) { + logWriter->logTimestamp(); + logWriter->logf("mixing invalid %#x", i); + } } int64_t pts; diff --git a/services/audioflinger/StateQueue.h b/services/audioflinger/StateQueue.h index e33b3c6..313330f 100644 --- a/services/audioflinger/StateQueue.h +++ b/services/audioflinger/StateQueue.h @@ -174,7 +174,7 @@ public: #endif private: - static const unsigned kN = 4; // values < 4 are not supported by this code + static const unsigned kN = 8; // values < 4 are not supported by this code T mStates[kN]; // written by mutator, read by observer // "volatile" is meaningless with SMP, but here it indicates that we're using atomic ops diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index ba848d7..a19c5f4 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -1196,6 +1196,8 @@ sp AudioFlinger::PlaybackThread::createTrac { // scope for mLock Mutex::Autolock _l(mLock); + mNBLogWriter->logf("createTrack_l isFast=%d caller=%d", + (*flags & IAudioFlinger::TRACK_FAST) != 0, IPCThreadState::self()->getCallingPid()); // all tracks in same audio session must share the same routing strategy otherwise // conflicts will happen when tracks are moved from one output to another by audio policy @@ -1249,7 +1251,6 @@ Exit: if (status) { *status = lStatus; } - mNBLogWriter->logf("createTrack_l"); return track; } @@ -1317,7 +1318,8 @@ float AudioFlinger::PlaybackThread::streamVolume(audio_stream_type_t stream) con // addTrack_l() must be called with ThreadBase::mLock held status_t AudioFlinger::PlaybackThread::addTrack_l(const sp& track) { - mNBLogWriter->logf("addTrack_l mName=%d", track->mName); + mNBLogWriter->logf("addTrack_l mName=%d mFastIndex=%d caller=%d", track->mName, + track->mFastIndex, IPCThreadState::self()->getCallingPid()); status_t status = ALREADY_EXISTS; // set retry count for buffer fill @@ -1351,7 +1353,9 @@ status_t AudioFlinger::PlaybackThread::addTrack_l(const sp& track) // destroyTrack_l() must be called with ThreadBase::mLock held void AudioFlinger::PlaybackThread::destroyTrack_l(const sp& track) { - mNBLogWriter->logf("destroyTrack_l mName=%d", track->mName); + mNBLogWriter->logTimestamp(); + mNBLogWriter->logf("destroyTrack_l mName=%d mFastIndex=%d mClientPid=%d", track->mName, + track->mFastIndex, track->mClient != 0 ? track->mClient->pid() : -1); track->mState = TrackBase::TERMINATED; // active tracks are removed by threadLoop() if (mActiveTracks.indexOf(track) < 0) { @@ -1361,7 +1365,9 @@ void AudioFlinger::PlaybackThread::destroyTrack_l(const sp& track) void AudioFlinger::PlaybackThread::removeTrack_l(const sp& track) { - mNBLogWriter->logf("removeTrack_l mName=%d", track->mName); + mNBLogWriter->logTimestamp(); + mNBLogWriter->logf("removeTrack_l mName=%d mFastIndex=%d clientPid=%d", track->mName, + track->mFastIndex, track->mClient != 0 ? track->mClient->pid() : -1); track->triggerEvents(AudioSystem::SYNC_EVENT_PRESENTATION_COMPLETE); mTracks.remove(track); deleteTrackName_l(track->name()); @@ -2149,6 +2155,7 @@ AudioFlinger::MixerThread::MixerThread(const sp& audioFlinger, Aud FastTrack *fastTrack = &state->mFastTracks[0]; // wrap the source side of the MonoPipe to make it an AudioBufferProvider fastTrack->mBufferProvider = new SourceAudioBufferProvider(new MonoPipeReader(monoPipe)); + mNBLogWriter->logf("ft0 bp %p", fastTrack->mBufferProvider); fastTrack->mVolumeProvider = NULL; fastTrack->mGeneration++; state->mFastTracksGen++; @@ -2553,6 +2560,7 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::MixerThread::prepareTrac // was it previously inactive? if (!(state->mTrackMask & (1 << j))) { ExtendedAudioBufferProvider *eabp = track; + mNBLogWriter->logf("j=%d bp %p", j, eabp); VolumeProvider *vp = track; fastTrack->mBufferProvider = eabp; fastTrack->mVolumeProvider = vp; @@ -2839,11 +2847,19 @@ track_is_ready: ; block = FastMixerStateQueue::BLOCK_UNTIL_ACKED; pauseAudioWatchdog = true; } - sq->end(); } if (sq != NULL) { + unsigned trackMask = state->mTrackMask; sq->end(didModify); + if (didModify) { + mNBLogWriter->logTimestamp(); + mNBLogWriter->logf("push trackMask=%#x block=%d", trackMask, block); + } sq->push(block); + if (didModify) { + mNBLogWriter->logTimestamp(); + mNBLogWriter->log("pushed"); + } } #ifdef AUDIO_WATCHDOG if (pauseAudioWatchdog && mAudioWatchdog != 0) { @@ -2870,7 +2886,9 @@ track_is_ready: ; if (CC_UNLIKELY(count)) { for (size_t i=0 ; i& track = tracksToRemove->itemAt(i); - mNBLogWriter->logf("prepareTracks_l remove name=%u", track->name()); + mNBLogWriter->logTimestamp(); + mNBLogWriter->logf("prepareTracks_l remove name=%u mFastIndex=%d", track->name(), + track->mFastIndex); mActiveTracks.remove(track); if (track->mainBuffer() != mMixBuffer) { chain = getEffectChain_l(track->sessionId()); diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h index fa1e336..3cd5c2c 100644 --- a/services/audioflinger/Threads.h +++ b/services/audioflinger/Threads.h @@ -315,7 +315,7 @@ protected: // keyed by session ID, the second by type UUID timeLow field KeyedVector< int, KeyedVector< int, sp > > mSuspendedSessions; - static const size_t kLogSize = 512; + static const size_t kLogSize = 4096; sp mNBLogWriter; }; @@ -546,7 +546,7 @@ private: sp mTeeSink; sp mTeeSource; uint32_t mScreenState; // cached copy of gScreenState - static const size_t kFastMixerLogSize = 8 * 1024; + static const size_t kFastMixerLogSize = 16 * 1024; sp mFastMixerNBLogWriter; public: virtual bool hasFastMixer() const = 0; diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp index 315cbbc..f679751 100644 --- a/services/audioflinger/Tracks.cpp +++ b/services/audioflinger/Tracks.cpp @@ -351,6 +351,7 @@ AudioFlinger::PlaybackThread::Track::Track( // Read the initial underruns because this field is never cleared by the fast mixer mObservedUnderruns = thread->getFastTrackUnderruns(i); thread->mFastTrackAvailMask &= ~(1 << i); + thread->mNBLogWriter->logf("new Track mName=%d mFastIndex=%d", mName, mFastIndex); } } ALOGV("Track constructor name %d, calling pid %d", mName, @@ -360,6 +361,7 @@ AudioFlinger::PlaybackThread::Track::Track( AudioFlinger::PlaybackThread::Track::~Track() { ALOGV("PlaybackThread::Track destructor"); + // FIXME not sure if safe to log here, would need a lock on thread to do it } void AudioFlinger::PlaybackThread::Track::destroy() @@ -569,7 +571,8 @@ status_t AudioFlinger::PlaybackThread::Track::start(AudioSystem::sync_event_t ev sp thread = mThread.promote(); if (thread != 0) { Mutex::Autolock _l(thread->mLock); - thread->mNBLogWriter->logf("start mName=%d", mName); + thread->mNBLogWriter->logf("start mName=%d mFastIndex=%d caller=%d", mName, mFastIndex, + IPCThreadState::self()->getCallingPid()); track_state state = mState; // here the track could be either new, or restarted // in both cases "unstop" the track @@ -612,7 +615,8 @@ void AudioFlinger::PlaybackThread::Track::stop() sp thread = mThread.promote(); if (thread != 0) { Mutex::Autolock _l(thread->mLock); - thread->mNBLogWriter->logf("stop mName=%d", mName); + thread->mNBLogWriter->logf("stop mName=%d mFastIndex=%d caller=%d", mName, mFastIndex, + IPCThreadState::self()->getCallingPid()); track_state state = mState; if (state == RESUMING || state == ACTIVE || state == PAUSING || state == PAUSED) { // If the track is not active (PAUSED and buffers full), flush buffers @@ -649,7 +653,8 @@ void AudioFlinger::PlaybackThread::Track::pause() sp thread = mThread.promote(); if (thread != 0) { Mutex::Autolock _l(thread->mLock); - thread->mNBLogWriter->logf("pause mName=%d", mName); + thread->mNBLogWriter->logf("pause mName=%d mFastIndex=%d caller=%d", mName, mFastIndex, + IPCThreadState::self()->getCallingPid()); if (mState == ACTIVE || mState == RESUMING) { mState = PAUSING; ALOGV("ACTIVE/RESUMING => PAUSING (%d) on thread %p", mName, thread.get()); @@ -673,7 +678,8 @@ void AudioFlinger::PlaybackThread::Track::flush() sp thread = mThread.promote(); if (thread != 0) { Mutex::Autolock _l(thread->mLock); - thread->mNBLogWriter->logf("flush mName=%d", mName); + thread->mNBLogWriter->logf("flush mName=%d mFastIndex=%d caller=%d", mName, mFastIndex, + IPCThreadState::self()->getCallingPid()); if (mState != STOPPING_1 && mState != STOPPING_2 && mState != STOPPED && mState != PAUSED && mState != PAUSING && mState != IDLE && mState != FLUSHED) { return; -- cgit v1.1 From 05f625c46b992ab66b8d1527a366fe2746b4e3c7 Mon Sep 17 00:00:00 2001 From: Marco Nelissen Date: Wed, 13 Feb 2013 09:27:28 -0800 Subject: Add support for fragmented mp4 to MPEG4Extractor This makes FragmentedMP4Extractor obsolete. It will be removed in a separate change. Change-Id: Ida74c07ccf84983e20a1320ee24ffc7a5c083859 --- include/media/stagefright/DataSource.h | 2 + media/libstagefright/DataSource.cpp | 26 + media/libstagefright/MPEG4Extractor.cpp | 882 +++++++++++++++++++++++++- media/libstagefright/include/MPEG4Extractor.h | 18 + 4 files changed, 899 insertions(+), 29 deletions(-) diff --git a/include/media/stagefright/DataSource.h b/include/media/stagefright/DataSource.h index 00d583e..b0c1b34 100644 --- a/include/media/stagefright/DataSource.h +++ b/include/media/stagefright/DataSource.h @@ -54,6 +54,8 @@ public: // Convenience methods: bool getUInt16(off64_t offset, uint16_t *x); + bool getUInt32(off64_t offset, uint32_t *x); + bool getUInt64(off64_t offset, uint64_t *x); // May return ERROR_UNSUPPORTED. virtual status_t getSize(off64_t *size); diff --git a/media/libstagefright/DataSource.cpp b/media/libstagefright/DataSource.cpp index 9d0eea2..bcf333e 100644 --- a/media/libstagefright/DataSource.cpp +++ b/media/libstagefright/DataSource.cpp @@ -59,6 +59,32 @@ bool DataSource::getUInt16(off64_t offset, uint16_t *x) { return true; } +bool DataSource::getUInt32(off64_t offset, uint32_t *x) { + *x = 0; + + uint32_t tmp; + if (readAt(offset, &tmp, 4) != 4) { + return false; + } + + *x = ntohl(tmp); + + return true; +} + +bool DataSource::getUInt64(off64_t offset, uint64_t *x) { + *x = 0; + + uint64_t tmp; + if (readAt(offset, &tmp, 8) != 8) { + return false; + } + + *x = ntoh64(tmp); + + return true; +} + status_t DataSource::getSize(off64_t *size) { *size = 0; diff --git a/media/libstagefright/MPEG4Extractor.cpp b/media/libstagefright/MPEG4Extractor.cpp index 1a62f9d..b2e60be 100644 --- a/media/libstagefright/MPEG4Extractor.cpp +++ b/media/libstagefright/MPEG4Extractor.cpp @@ -22,8 +22,6 @@ #include "include/SampleTable.h" #include "include/ESDS.h" -#include - #include #include #include @@ -33,13 +31,11 @@ #include #include #include -#include #include #include #include #include #include -#include #include namespace android { @@ -50,15 +46,17 @@ public: MPEG4Source(const sp &format, const sp &dataSource, int32_t timeScale, - const sp &sampleTable); + const sp &sampleTable, + Vector &sidx, + off64_t firstMoofOffset); virtual status_t start(MetaData *params = NULL); virtual status_t stop(); virtual sp getFormat(); - virtual status_t read( - MediaBuffer **buffer, const ReadOptions *options = NULL); + virtual status_t read(MediaBuffer **buffer, const ReadOptions *options = NULL); + virtual status_t fragmentedRead(MediaBuffer **buffer, const ReadOptions *options = NULL); protected: virtual ~MPEG4Source(); @@ -71,6 +69,14 @@ private: int32_t mTimescale; sp mSampleTable; uint32_t mCurrentSampleIndex; + uint32_t mCurrentFragmentIndex; + Vector &mSegments; + off64_t mFirstMoofOffset; + off64_t mCurrentMoofOffset; + off64_t mNextMoofOffset; + uint32_t mCurrentTime; + int32_t mLastParsedTrackId; + int32_t mTrackId; bool mIsAVC; size_t mNALLengthSize; @@ -86,6 +92,38 @@ private: uint8_t *mSrcBuffer; size_t parseNALSize(const uint8_t *data) const; + status_t parseChunk(off64_t *offset); + status_t parseTrackFragmentHeader(off64_t offset, off64_t size); + status_t parseTrackFragmentRun(off64_t offset, off64_t size); + + struct TrackFragmentHeaderInfo { + enum Flags { + kBaseDataOffsetPresent = 0x01, + kSampleDescriptionIndexPresent = 0x02, + kDefaultSampleDurationPresent = 0x08, + kDefaultSampleSizePresent = 0x10, + kDefaultSampleFlagsPresent = 0x20, + kDurationIsEmpty = 0x10000, + }; + + uint32_t mTrackID; + uint32_t mFlags; + uint64_t mBaseDataOffset; + uint32_t mSampleDescriptionIndex; + uint32_t mDefaultSampleDuration; + uint32_t mDefaultSampleSize; + uint32_t mDefaultSampleFlags; + + uint64_t mDataOffset; + }; + TrackFragmentHeaderInfo mTrackFragmentHeaderInfo; + + struct Sample { + off64_t offset; + size_t size; + uint32_t duration; + }; + Vector mCurrentSamples; MPEG4Source(const MPEG4Source &); MPEG4Source &operator=(const MPEG4Source &); @@ -265,7 +303,9 @@ static const char *FourCC2MIME(uint32_t fourcc) { } MPEG4Extractor::MPEG4Extractor(const sp &source) - : mDataSource(source), + : mSidxDuration(0), + mMoofOffset(0), + mDataSource(source), mInitCheck(NO_INIT), mHasVideo(false), mFirstTrack(NULL), @@ -295,6 +335,12 @@ MPEG4Extractor::~MPEG4Extractor() { mFirstSINF = NULL; } +uint32_t MPEG4Extractor::flags() const { + return CAN_PAUSE | + ((mMoofOffset == 0 || mSidxEntries.size() != 0) ? + (CAN_SEEK_BACKWARD | CAN_SEEK_FORWARD | CAN_SEEK) : 0); +} + sp MPEG4Extractor::getMetaData() { status_t err; if ((err = readMetaData()) != OK) { @@ -348,15 +394,24 @@ sp MPEG4Extractor::getTrackMetaData( const char *mime; CHECK(track->meta->findCString(kKeyMIMEType, &mime)); if (!strncasecmp("video/", mime, 6)) { - uint32_t sampleIndex; - uint32_t sampleTime; - if (track->sampleTable->findThumbnailSample(&sampleIndex) == OK - && track->sampleTable->getMetaDataForSample( - sampleIndex, NULL /* offset */, NULL /* size */, - &sampleTime) == OK) { - track->meta->setInt64( - kKeyThumbnailTime, - ((int64_t)sampleTime * 1000000) / track->timescale); + if (mMoofOffset > 0) { + int64_t duration; + if (track->meta->findInt64(kKeyDuration, &duration)) { + // nothing fancy, just pick a frame near 1/4th of the duration + track->meta->setInt64( + kKeyThumbnailTime, duration / 4); + } + } else { + uint32_t sampleIndex; + uint32_t sampleTime; + if (track->sampleTable->findThumbnailSample(&sampleIndex) == OK + && track->sampleTable->getMetaDataForSample( + sampleIndex, NULL /* offset */, NULL /* size */, + &sampleTime) == OK) { + track->meta->setInt64( + kKeyThumbnailTime, + ((int64_t)sampleTime * 1000000) / track->timescale); + } } } } @@ -371,7 +426,25 @@ status_t MPEG4Extractor::readMetaData() { off64_t offset = 0; status_t err; - while ((err = parseChunk(&offset, 0)) == OK) { + while (true) { + err = parseChunk(&offset, 0); + if (err == OK) { + continue; + } + + uint32_t hdr[2]; + if (mDataSource->readAt(offset, hdr, 8) < 8) { + break; + } + uint32_t chunk_type = ntohl(hdr[1]); + if (chunk_type == FOURCC('s', 'i', 'd', 'x')) { + // parse the sidx box too + continue; + } else if (chunk_type == FOURCC('m', 'o', 'o', 'f')) { + // store the offset of the first segment + mMoofOffset = offset; + } + break; } if (mInitCheck == OK) { @@ -630,7 +703,7 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { char chunk[5]; MakeFourCCString(chunk_type, chunk); - ALOGV("chunk: %s @ %lld", chunk, *offset); + ALOGV("chunk: %s @ %lld, %d", chunk, *offset, depth); #if 0 static const char kWhitespace[] = " "; @@ -816,7 +889,7 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { mLastTrack->timescale = ntohl(timescale); - int64_t duration; + int64_t duration = 0; if (version == 1) { if (mDataSource->readAt( timescale_offset + 4, &duration, sizeof(duration)) @@ -825,13 +898,16 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { } duration = ntoh64(duration); } else { - int32_t duration32; + uint32_t duration32; if (mDataSource->readAt( timescale_offset + 4, &duration32, sizeof(duration32)) < (ssize_t)sizeof(duration32)) { return ERROR_IO; } - duration = ntohl(duration32); + // ffmpeg sets duration to -1, which is incorrect. + if (duration32 != 0xffffffff) { + duration = ntohl(duration32); + } } mLastTrack->meta->setInt64( kKeyDuration, (duration * 1000000) / mLastTrack->timescale); @@ -1075,11 +1151,23 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { return err; } - // Assume that a given buffer only contains at most 10 fragments, - // each fragment originally prefixed with a 2 byte length will - // have a 4 byte header (0x00 0x00 0x00 0x01) after conversion, - // and thus will grow by 2 bytes per fragment. - mLastTrack->meta->setInt32(kKeyMaxInputSize, max_size + 10 * 2); + if (max_size != 0) { + // Assume that a given buffer only contains at most 10 chunks, + // each chunk originally prefixed with a 2 byte length will + // have a 4 byte header (0x00 0x00 0x00 0x01) after conversion, + // and thus will grow by 2 bytes per chunk. + mLastTrack->meta->setInt32(kKeyMaxInputSize, max_size + 10 * 2); + } else { + // No size was specified. Pick a conservatively large size. + int32_t width, height; + if (mLastTrack->meta->findInt32(kKeyWidth, &width) && + mLastTrack->meta->findInt32(kKeyHeight, &height)) { + mLastTrack->meta->setInt32(kKeyMaxInputSize, width * height * 3 / 2); + } else { + ALOGE("No width or height, assuming worst case 1080p"); + mLastTrack->meta->setInt32(kKeyMaxInputSize, 3110400); + } + } *offset += chunk_size; // Calculate average frame rate. @@ -1448,6 +1536,13 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { break; } + case FOURCC('s', 'i', 'd', 'x'): + { + parseSegmentIndex(data_offset, chunk_data_size); + *offset += chunk_size; + return UNKNOWN_ERROR; // stop parsing after sidx + } + default: { *offset += chunk_size; @@ -1458,6 +1553,125 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { return OK; } +status_t MPEG4Extractor::parseSegmentIndex(off64_t offset, size_t size) { + ALOGV("MPEG4Extractor::parseSegmentIndex"); + + if (size < 12) { + return -EINVAL; + } + + uint32_t flags; + if (!mDataSource->getUInt32(offset, &flags)) { + return ERROR_MALFORMED; + } + + uint32_t version = flags >> 24; + flags &= 0xffffff; + + ALOGV("sidx version %d", version); + + uint32_t referenceId; + if (!mDataSource->getUInt32(offset + 4, &referenceId)) { + return ERROR_MALFORMED; + } + + uint32_t timeScale; + if (!mDataSource->getUInt32(offset + 8, &timeScale)) { + return ERROR_MALFORMED; + } + ALOGV("sidx refid/timescale: %d/%d", referenceId, timeScale); + + uint64_t earliestPresentationTime; + uint64_t firstOffset; + + offset += 12; + size -= 12; + + if (version == 0) { + if (size < 8) { + return -EINVAL; + } + uint32_t tmp; + if (!mDataSource->getUInt32(offset, &tmp)) { + return ERROR_MALFORMED; + } + earliestPresentationTime = tmp; + if (!mDataSource->getUInt32(offset + 4, &tmp)) { + return ERROR_MALFORMED; + } + firstOffset = tmp; + offset += 8; + size -= 8; + } else { + if (size < 16) { + return -EINVAL; + } + if (!mDataSource->getUInt64(offset, &earliestPresentationTime)) { + return ERROR_MALFORMED; + } + if (!mDataSource->getUInt64(offset + 8, &firstOffset)) { + return ERROR_MALFORMED; + } + offset += 16; + size -= 16; + } + ALOGV("sidx pres/off: %Ld/%Ld", earliestPresentationTime, firstOffset); + + if (size < 4) { + return -EINVAL; + } + + uint16_t referenceCount; + if (!mDataSource->getUInt16(offset + 2, &referenceCount)) { + return ERROR_MALFORMED; + } + offset += 4; + size -= 4; + ALOGV("refcount: %d", referenceCount); + + if (size < referenceCount * 12) { + return -EINVAL; + } + + uint64_t total_duration = 0; + for (unsigned int i = 0; i < referenceCount; i++) { + uint32_t d1, d2, d3; + + if (!mDataSource->getUInt32(offset, &d1) || // size + !mDataSource->getUInt32(offset + 4, &d2) || // duration + !mDataSource->getUInt32(offset + 8, &d3)) { // flags + return ERROR_MALFORMED; + } + + if (d1 & 0x80000000) { + ALOGW("sub-sidx boxes not supported yet"); + } + bool sap = d3 & 0x80000000; + bool saptype = d3 >> 28; + if (!sap || saptype > 2) { + ALOGW("not a stream access point, or unsupported type"); + } + total_duration += d2; + offset += 12; + ALOGV(" item %d, %08x %08x %08x", i, d1, d2, d3); + SidxEntry se; + se.mSize = d1 & 0x7fffffff; + se.mDurationUs = 1000000LL * d2 / timeScale; + mSidxEntries.add(se); + } + + mSidxDuration = total_duration * 1000000 / timeScale; + ALOGV("duration: %lld", mSidxDuration); + + int64_t metaDuration; + if (!mLastTrack->meta->findInt64(kKeyDuration, &metaDuration) || metaDuration == 0) { + mLastTrack->meta->setInt64(kKeyDuration, mSidxDuration); + } + return OK; +} + + + status_t MPEG4Extractor::parseTrackHeader( off64_t data_offset, off64_t data_size) { if (data_size < 4) { @@ -1755,7 +1969,8 @@ sp MPEG4Extractor::getTrack(size_t index) { } return new MPEG4Source( - track->meta, mDataSource, track->timescale, track->sampleTable); + track->meta, mDataSource, track->timescale, track->sampleTable, + mSidxEntries, mMoofOffset); } // static @@ -1898,12 +2113,19 @@ MPEG4Source::MPEG4Source( const sp &format, const sp &dataSource, int32_t timeScale, - const sp &sampleTable) + const sp &sampleTable, + Vector &sidx, + off64_t firstMoofOffset) : mFormat(format), mDataSource(dataSource), mTimescale(timeScale), mSampleTable(sampleTable), mCurrentSampleIndex(0), + mCurrentFragmentIndex(0), + mSegments(sidx), + mFirstMoofOffset(firstMoofOffset), + mCurrentMoofOffset(firstMoofOffset), + mCurrentTime(0), mIsAVC(false), mNALLengthSize(0), mStarted(false), @@ -1931,6 +2153,13 @@ MPEG4Source::MPEG4Source( // The number of bytes used to encode the length of a NAL unit. mNALLengthSize = 1 + (ptr[4] & 3); } + + CHECK(format->findInt32(kKeyTrackID, &mTrackId)); + + if (mFirstMoofOffset != 0) { + off64_t offset = mFirstMoofOffset; + parseChunk(&offset); + } } MPEG4Source::~MPEG4Source() { @@ -1988,6 +2217,344 @@ status_t MPEG4Source::stop() { return OK; } +status_t MPEG4Source::parseChunk(off64_t *offset) { + uint32_t hdr[2]; + if (mDataSource->readAt(*offset, hdr, 8) < 8) { + return ERROR_IO; + } + uint64_t chunk_size = ntohl(hdr[0]); + uint32_t chunk_type = ntohl(hdr[1]); + off64_t data_offset = *offset + 8; + + if (chunk_size == 1) { + if (mDataSource->readAt(*offset + 8, &chunk_size, 8) < 8) { + return ERROR_IO; + } + chunk_size = ntoh64(chunk_size); + data_offset += 8; + + if (chunk_size < 16) { + // The smallest valid chunk is 16 bytes long in this case. + return ERROR_MALFORMED; + } + } else if (chunk_size < 8) { + // The smallest valid chunk is 8 bytes long. + return ERROR_MALFORMED; + } + + char chunk[5]; + MakeFourCCString(chunk_type, chunk); + ALOGV("MPEG4Source chunk %s @ %llx", chunk, *offset); + + off64_t chunk_data_size = *offset + chunk_size - data_offset; + + switch(chunk_type) { + + case FOURCC('t', 'r', 'a', 'f'): + case FOURCC('m', 'o', 'o', 'f'): { + off64_t stop_offset = *offset + chunk_size; + *offset = data_offset; + while (*offset < stop_offset) { + status_t err = parseChunk(offset); + if (err != OK) { + return err; + } + } + if (chunk_type == FOURCC('m', 'o', 'o', 'f')) { + // *offset points to then mdat box following this moof + parseChunk(offset); // doesn't actually parse it, just updates offset + mNextMoofOffset = *offset; + } + break; + } + + case FOURCC('t', 'f', 'h', 'd'): { + status_t err; + if ((err = parseTrackFragmentHeader(data_offset, chunk_data_size)) != OK) { + return err; + } + *offset += chunk_size; + break; + } + + case FOURCC('t', 'r', 'u', 'n'): { + status_t err; + if (mLastParsedTrackId == mTrackId) { + if ((err = parseTrackFragmentRun(data_offset, chunk_data_size)) != OK) { + return err; + } + } + + *offset += chunk_size; + break; + } + + default: { + *offset += chunk_size; + break; + } + } + return OK; +} + +status_t MPEG4Source::parseTrackFragmentHeader(off64_t offset, off64_t size) { + + if (size < 8) { + return -EINVAL; + } + + uint32_t flags; + if (!mDataSource->getUInt32(offset, &flags)) { + return ERROR_MALFORMED; + } + + if (flags & 0xff000000) { + return -EINVAL; + } + + if (!mDataSource->getUInt32(offset + 4, (uint32_t*)&mLastParsedTrackId)) { + return ERROR_MALFORMED; + } + + if (mLastParsedTrackId != mTrackId) { + // this is not the right track, skip it + return OK; + } + + mTrackFragmentHeaderInfo.mFlags = flags; + mTrackFragmentHeaderInfo.mTrackID = mLastParsedTrackId; + offset += 8; + size -= 8; + + ALOGV("fragment header: %08x %08x", flags, mTrackFragmentHeaderInfo.mTrackID); + + if (flags & TrackFragmentHeaderInfo::kBaseDataOffsetPresent) { + if (size < 8) { + return -EINVAL; + } + + if (!mDataSource->getUInt64(offset, &mTrackFragmentHeaderInfo.mBaseDataOffset)) { + return ERROR_MALFORMED; + } + offset += 8; + size -= 8; + } + + if (flags & TrackFragmentHeaderInfo::kSampleDescriptionIndexPresent) { + if (size < 4) { + return -EINVAL; + } + + if (!mDataSource->getUInt32(offset, &mTrackFragmentHeaderInfo.mSampleDescriptionIndex)) { + return ERROR_MALFORMED; + } + offset += 4; + size -= 4; + } + + if (flags & TrackFragmentHeaderInfo::kDefaultSampleDurationPresent) { + if (size < 4) { + return -EINVAL; + } + + if (!mDataSource->getUInt32(offset, &mTrackFragmentHeaderInfo.mDefaultSampleDuration)) { + return ERROR_MALFORMED; + } + offset += 4; + size -= 4; + } + + if (flags & TrackFragmentHeaderInfo::kDefaultSampleSizePresent) { + if (size < 4) { + return -EINVAL; + } + + if (!mDataSource->getUInt32(offset, &mTrackFragmentHeaderInfo.mDefaultSampleSize)) { + return ERROR_MALFORMED; + } + offset += 4; + size -= 4; + } + + if (flags & TrackFragmentHeaderInfo::kDefaultSampleFlagsPresent) { + if (size < 4) { + return -EINVAL; + } + + if (!mDataSource->getUInt32(offset, &mTrackFragmentHeaderInfo.mDefaultSampleFlags)) { + return ERROR_MALFORMED; + } + offset += 4; + size -= 4; + } + + if (!(flags & TrackFragmentHeaderInfo::kBaseDataOffsetPresent)) { + mTrackFragmentHeaderInfo.mBaseDataOffset = mCurrentMoofOffset; + } + + mTrackFragmentHeaderInfo.mDataOffset = 0; + return OK; +} + +status_t MPEG4Source::parseTrackFragmentRun(off64_t offset, off64_t size) { + + ALOGV("MPEG4Extractor::parseTrackFragmentRun"); + if (size < 8) { + return -EINVAL; + } + + enum { + kDataOffsetPresent = 0x01, + kFirstSampleFlagsPresent = 0x04, + kSampleDurationPresent = 0x100, + kSampleSizePresent = 0x200, + kSampleFlagsPresent = 0x400, + kSampleCompositionTimeOffsetPresent = 0x800, + }; + + uint32_t flags; + if (!mDataSource->getUInt32(offset, &flags)) { + return ERROR_MALFORMED; + } + ALOGV("fragment run flags: %08x", flags); + + if (flags & 0xff000000) { + return -EINVAL; + } + + if ((flags & kFirstSampleFlagsPresent) && (flags & kSampleFlagsPresent)) { + // These two shall not be used together. + return -EINVAL; + } + + uint32_t sampleCount; + if (!mDataSource->getUInt32(offset + 4, &sampleCount)) { + return ERROR_MALFORMED; + } + offset += 8; + size -= 8; + + uint64_t dataOffset = mTrackFragmentHeaderInfo.mDataOffset; + + uint32_t firstSampleFlags = 0; + + if (flags & kDataOffsetPresent) { + if (size < 4) { + return -EINVAL; + } + + int32_t dataOffsetDelta; + if (!mDataSource->getUInt32(offset, (uint32_t*)&dataOffsetDelta)) { + return ERROR_MALFORMED; + } + + dataOffset = mTrackFragmentHeaderInfo.mBaseDataOffset + dataOffsetDelta; + + offset += 4; + size -= 4; + } + + if (flags & kFirstSampleFlagsPresent) { + if (size < 4) { + return -EINVAL; + } + + if (!mDataSource->getUInt32(offset, &firstSampleFlags)) { + return ERROR_MALFORMED; + } + offset += 4; + size -= 4; + } + + uint32_t sampleDuration = 0, sampleSize = 0, sampleFlags = 0, + sampleCtsOffset = 0; + + size_t bytesPerSample = 0; + if (flags & kSampleDurationPresent) { + bytesPerSample += 4; + } else if (mTrackFragmentHeaderInfo.mFlags + & TrackFragmentHeaderInfo::kDefaultSampleDurationPresent) { + sampleDuration = mTrackFragmentHeaderInfo.mDefaultSampleDuration; + } else { + sampleDuration = mTrackFragmentHeaderInfo.mDefaultSampleDuration; + } + + if (flags & kSampleSizePresent) { + bytesPerSample += 4; + } else if (mTrackFragmentHeaderInfo.mFlags + & TrackFragmentHeaderInfo::kDefaultSampleSizePresent) { + sampleSize = mTrackFragmentHeaderInfo.mDefaultSampleSize; + } else { + sampleSize = mTrackFragmentHeaderInfo.mDefaultSampleSize; + } + + if (flags & kSampleFlagsPresent) { + bytesPerSample += 4; + } else if (mTrackFragmentHeaderInfo.mFlags + & TrackFragmentHeaderInfo::kDefaultSampleFlagsPresent) { + sampleFlags = mTrackFragmentHeaderInfo.mDefaultSampleFlags; + } else { + sampleFlags = mTrackFragmentHeaderInfo.mDefaultSampleFlags; + } + + if (flags & kSampleCompositionTimeOffsetPresent) { + bytesPerSample += 4; + } else { + sampleCtsOffset = 0; + } + + if (size < sampleCount * bytesPerSample) { + return -EINVAL; + } + + Sample tmp; + for (uint32_t i = 0; i < sampleCount; ++i) { + if (flags & kSampleDurationPresent) { + if (!mDataSource->getUInt32(offset, &sampleDuration)) { + return ERROR_MALFORMED; + } + offset += 4; + } + + if (flags & kSampleSizePresent) { + if (!mDataSource->getUInt32(offset, &sampleSize)) { + return ERROR_MALFORMED; + } + offset += 4; + } + + if (flags & kSampleFlagsPresent) { + if (!mDataSource->getUInt32(offset, &sampleFlags)) { + return ERROR_MALFORMED; + } + offset += 4; + } + + if (flags & kSampleCompositionTimeOffsetPresent) { + if (!mDataSource->getUInt32(offset, &sampleCtsOffset)) { + return ERROR_MALFORMED; + } + offset += 4; + } + + ALOGV("adding sample at offset 0x%08llx, size %u, duration %u, " + " flags 0x%08x", + dataOffset, sampleSize, sampleDuration, + (flags & kFirstSampleFlagsPresent) && i == 0 + ? firstSampleFlags : sampleFlags); + tmp.offset = dataOffset; + tmp.size = sampleSize; + tmp.duration = sampleDuration; + mCurrentSamples.add(tmp); + + dataOffset += sampleSize; + } + + mTrackFragmentHeaderInfo.mDataOffset = dataOffset; + + return OK; +} + sp MPEG4Source::getFormat() { Mutex::Autolock autoLock(mLock); @@ -2019,6 +2586,10 @@ status_t MPEG4Source::read( CHECK(mStarted); + if (mFirstMoofOffset > 0) { + return fragmentedRead(out, options); + } + *out = NULL; int64_t targetSampleTimeUs = -1; @@ -2076,6 +2647,7 @@ status_t MPEG4Source::read( // we had seeked to the end of stream, ending normally. err = ERROR_END_OF_STREAM; } + ALOGV("end of stream"); return err; } @@ -2286,6 +2858,255 @@ status_t MPEG4Source::read( } } +status_t MPEG4Source::fragmentedRead( + MediaBuffer **out, const ReadOptions *options) { + + ALOGV("MPEG4Source::fragmentedRead"); + + CHECK(mStarted); + + *out = NULL; + + int64_t targetSampleTimeUs = -1; + + int64_t seekTimeUs; + ReadOptions::SeekMode mode; + if (options && options->getSeekTo(&seekTimeUs, &mode)) { + + int numSidxEntries = mSegments.size(); + if (numSidxEntries != 0) { + int64_t totalTime = 0; + off64_t totalOffset = mFirstMoofOffset; + for (int i = 0; i < numSidxEntries; i++) { + const SidxEntry *se = &mSegments[i]; + if (totalTime + se->mDurationUs > seekTimeUs) { + // The requested time is somewhere in this segment + if ((mode == ReadOptions::SEEK_NEXT_SYNC) || + (mode == ReadOptions::SEEK_CLOSEST_SYNC && + (seekTimeUs - totalTime) > (totalTime + se->mDurationUs - seekTimeUs))) { + // requested next sync, or closest sync and it was closer to the end of + // this segment + totalTime += se->mDurationUs; + totalOffset += se->mSize; + } + break; + } + totalTime += se->mDurationUs; + totalOffset += se->mSize; + } + mCurrentMoofOffset = totalOffset; + mCurrentSamples.clear(); + mCurrentSampleIndex = 0; + parseChunk(&totalOffset); + mCurrentTime = totalTime * mTimescale / 1000000ll; + } + + if (mBuffer != NULL) { + mBuffer->release(); + mBuffer = NULL; + } + + // fall through + } + + off64_t offset = 0; + size_t size; + uint32_t cts = 0; + bool isSyncSample = false; + bool newBuffer = false; + if (mBuffer == NULL) { + newBuffer = true; + + if (mCurrentSampleIndex >= mCurrentSamples.size()) { + // move to next fragment + Sample lastSample = mCurrentSamples[mCurrentSamples.size() - 1]; + off64_t nextMoof = mNextMoofOffset; // lastSample.offset + lastSample.size; + mCurrentMoofOffset = nextMoof; + mCurrentSamples.clear(); + mCurrentSampleIndex = 0; + parseChunk(&nextMoof); + if (mCurrentSampleIndex >= mCurrentSamples.size()) { + return ERROR_END_OF_STREAM; + } + } + + const Sample *smpl = &mCurrentSamples[mCurrentSampleIndex]; + offset = smpl->offset; + size = smpl->size; + cts = mCurrentTime; + mCurrentTime += smpl->duration; + isSyncSample = (mCurrentSampleIndex == 0); // XXX + + status_t err = mGroup->acquire_buffer(&mBuffer); + + if (err != OK) { + CHECK(mBuffer == NULL); + ALOGV("acquire_buffer returned %d", err); + return err; + } + } + + if (!mIsAVC || mWantsNALFragments) { + if (newBuffer) { + ssize_t num_bytes_read = + mDataSource->readAt(offset, (uint8_t *)mBuffer->data(), size); + + if (num_bytes_read < (ssize_t)size) { + mBuffer->release(); + mBuffer = NULL; + + ALOGV("i/o error"); + return ERROR_IO; + } + + CHECK(mBuffer != NULL); + mBuffer->set_range(0, size); + mBuffer->meta_data()->clear(); + mBuffer->meta_data()->setInt64( + kKeyTime, ((int64_t)cts * 1000000) / mTimescale); + + if (targetSampleTimeUs >= 0) { + mBuffer->meta_data()->setInt64( + kKeyTargetTime, targetSampleTimeUs); + } + + if (isSyncSample) { + mBuffer->meta_data()->setInt32(kKeyIsSyncFrame, 1); + } + + ++mCurrentSampleIndex; + } + + if (!mIsAVC) { + *out = mBuffer; + mBuffer = NULL; + + return OK; + } + + // Each NAL unit is split up into its constituent fragments and + // each one of them returned in its own buffer. + + CHECK(mBuffer->range_length() >= mNALLengthSize); + + const uint8_t *src = + (const uint8_t *)mBuffer->data() + mBuffer->range_offset(); + + size_t nal_size = parseNALSize(src); + if (mBuffer->range_length() < mNALLengthSize + nal_size) { + ALOGE("incomplete NAL unit."); + + mBuffer->release(); + mBuffer = NULL; + + return ERROR_MALFORMED; + } + + MediaBuffer *clone = mBuffer->clone(); + CHECK(clone != NULL); + clone->set_range(mBuffer->range_offset() + mNALLengthSize, nal_size); + + CHECK(mBuffer != NULL); + mBuffer->set_range( + mBuffer->range_offset() + mNALLengthSize + nal_size, + mBuffer->range_length() - mNALLengthSize - nal_size); + + if (mBuffer->range_length() == 0) { + mBuffer->release(); + mBuffer = NULL; + } + + *out = clone; + + return OK; + } else { + ALOGV("whole NAL"); + // Whole NAL units are returned but each fragment is prefixed by + // the start code (0x00 00 00 01). + ssize_t num_bytes_read = 0; + int32_t drm = 0; + bool usesDRM = (mFormat->findInt32(kKeyIsDRM, &drm) && drm != 0); + if (usesDRM) { + num_bytes_read = + mDataSource->readAt(offset, (uint8_t*)mBuffer->data(), size); + } else { + num_bytes_read = mDataSource->readAt(offset, mSrcBuffer, size); + } + + if (num_bytes_read < (ssize_t)size) { + mBuffer->release(); + mBuffer = NULL; + + ALOGV("i/o error"); + return ERROR_IO; + } + + if (usesDRM) { + CHECK(mBuffer != NULL); + mBuffer->set_range(0, size); + + } else { + uint8_t *dstData = (uint8_t *)mBuffer->data(); + size_t srcOffset = 0; + size_t dstOffset = 0; + + while (srcOffset < size) { + bool isMalFormed = (srcOffset + mNALLengthSize > size); + size_t nalLength = 0; + if (!isMalFormed) { + nalLength = parseNALSize(&mSrcBuffer[srcOffset]); + srcOffset += mNALLengthSize; + isMalFormed = srcOffset + nalLength > size; + } + + if (isMalFormed) { + ALOGE("Video is malformed"); + mBuffer->release(); + mBuffer = NULL; + return ERROR_MALFORMED; + } + + if (nalLength == 0) { + continue; + } + + CHECK(dstOffset + 4 <= mBuffer->size()); + + dstData[dstOffset++] = 0; + dstData[dstOffset++] = 0; + dstData[dstOffset++] = 0; + dstData[dstOffset++] = 1; + memcpy(&dstData[dstOffset], &mSrcBuffer[srcOffset], nalLength); + srcOffset += nalLength; + dstOffset += nalLength; + } + CHECK_EQ(srcOffset, size); + CHECK(mBuffer != NULL); + mBuffer->set_range(0, dstOffset); + } + + mBuffer->meta_data()->clear(); + mBuffer->meta_data()->setInt64( + kKeyTime, ((int64_t)cts * 1000000) / mTimescale); + + if (targetSampleTimeUs >= 0) { + mBuffer->meta_data()->setInt64( + kKeyTargetTime, targetSampleTimeUs); + } + + if (isSyncSample) { + mBuffer->meta_data()->setInt32(kKeyIsSyncFrame, 1); + } + + ++mCurrentSampleIndex; + + *out = mBuffer; + mBuffer = NULL; + + return OK; + } +} + MPEG4Extractor::Track *MPEG4Extractor::findTrackByMimePrefix( const char *mimePrefix) { for (Track *track = mFirstTrack; track != NULL; track = track->next) { @@ -2398,6 +3219,9 @@ static bool BetterSniffMPEG4( off64_t chunkDataSize = offset + chunkSize - chunkDataOffset; + char chunkstring[5]; + MakeFourCCString(chunkType, chunkstring); + ALOGV("saw chunk type %s, size %lld @ %lld", chunkstring, chunkSize, offset); switch (chunkType) { case FOURCC('f', 't', 'y', 'p'): { diff --git a/media/libstagefright/include/MPEG4Extractor.h b/media/libstagefright/include/MPEG4Extractor.h index 5c549e0..c68623a 100644 --- a/media/libstagefright/include/MPEG4Extractor.h +++ b/media/libstagefright/include/MPEG4Extractor.h @@ -18,7 +18,12 @@ #define MPEG4_EXTRACTOR_H_ +#include + +#include #include +#include +#include #include #include @@ -29,6 +34,11 @@ class DataSource; class SampleTable; class String8; +struct SidxEntry { + size_t mSize; + uint32_t mDurationUs; +}; + class MPEG4Extractor : public MediaExtractor { public: // Extractor assumes ownership of "source". @@ -39,6 +49,7 @@ public: virtual sp getTrackMetaData(size_t index, uint32_t flags); virtual sp getMetaData(); + virtual uint32_t flags() const; // for DRM virtual char* getDrmTrackInfo(size_t trackID, int *len); @@ -47,6 +58,7 @@ protected: virtual ~MPEG4Extractor(); private: + struct Track { Track *next; sp meta; @@ -56,6 +68,10 @@ private: bool skipTrack; }; + Vector mSidxEntries; + uint64_t mSidxDuration; + off64_t mMoofOffset; + sp mDataSource; status_t mInitCheck; bool mHasVideo; @@ -93,6 +109,8 @@ private: status_t parseTrackHeader(off64_t data_offset, off64_t data_size); + status_t parseSegmentIndex(off64_t data_offset, size_t data_size); + Track *findTrackByMimePrefix(const char *mimePrefix); MPEG4Extractor(const MPEG4Extractor &); -- cgit v1.1 From ab89ac209fd1c3b0a2227168a48d7f3ae9bc43f3 Mon Sep 17 00:00:00 2001 From: Marco Nelissen Date: Fri, 15 Feb 2013 08:26:24 -0800 Subject: Remove FragmentedMP4Extractor MPEG4Extractor now supports fragmented mp4 files. Change-Id: I5659a51f4e5e4407a12535e69238fe3abffda7dc --- media/libstagefright/Android.mk | 1 - media/libstagefright/DataSource.cpp | 2 - media/libstagefright/FragmentedMP4Extractor.cpp | 464 --------------------- media/libstagefright/MediaExtractor.cpp | 8 +- .../include/FragmentedMP4Extractor.h | 70 ---- 5 files changed, 1 insertion(+), 544 deletions(-) delete mode 100644 media/libstagefright/FragmentedMP4Extractor.cpp delete mode 100644 media/libstagefright/include/FragmentedMP4Extractor.h diff --git a/media/libstagefright/Android.mk b/media/libstagefright/Android.mk index 6934e59..acc3abf 100644 --- a/media/libstagefright/Android.mk +++ b/media/libstagefright/Android.mk @@ -19,7 +19,6 @@ LOCAL_SRC_FILES:= \ ESDS.cpp \ FileSource.cpp \ FLACExtractor.cpp \ - FragmentedMP4Extractor.cpp \ HTTPBase.cpp \ JPEGSource.cpp \ MP3Extractor.cpp \ diff --git a/media/libstagefright/DataSource.cpp b/media/libstagefright/DataSource.cpp index bcf333e..19b38ee 100644 --- a/media/libstagefright/DataSource.cpp +++ b/media/libstagefright/DataSource.cpp @@ -23,7 +23,6 @@ #include "include/AACExtractor.h" #include "include/DRMExtractor.h" #include "include/FLACExtractor.h" -#include "include/FragmentedMP4Extractor.h" #include "include/HTTPBase.h" #include "include/MP3Extractor.h" #include "include/MPEG2PSExtractor.h" @@ -137,7 +136,6 @@ void DataSource::RegisterSniffer(SnifferFunc func) { // static void DataSource::RegisterDefaultSniffers() { RegisterSniffer(SniffMPEG4); - RegisterSniffer(SniffFragmentedMP4); RegisterSniffer(SniffMatroska); RegisterSniffer(SniffOgg); RegisterSniffer(SniffWAV); diff --git a/media/libstagefright/FragmentedMP4Extractor.cpp b/media/libstagefright/FragmentedMP4Extractor.cpp deleted file mode 100644 index 496828d..0000000 --- a/media/libstagefright/FragmentedMP4Extractor.cpp +++ /dev/null @@ -1,464 +0,0 @@ -/* - * Copyright (C) 2012 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -//#define LOG_NDEBUG 0 -#define LOG_TAG "FragmentedMP4Extractor" -#include - -#include "include/FragmentedMP4Extractor.h" -#include "include/SampleTable.h" -#include "include/ESDS.h" - -#include - -#include -#include -#include -#include - -#include // for property_get - -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -namespace android { - -class FragmentedMPEG4Source : public MediaSource { -public: - // Caller retains ownership of the Parser - FragmentedMPEG4Source(bool audio, - const sp &format, - const sp &parser, - const sp &extractor); - - virtual status_t start(MetaData *params = NULL); - virtual status_t stop(); - - virtual sp getFormat(); - - virtual status_t read( - MediaBuffer **buffer, const ReadOptions *options = NULL); - -protected: - virtual ~FragmentedMPEG4Source(); - -private: - Mutex mLock; - - sp mFormat; - sp mParser; - sp mExtractor; - bool mIsAudioTrack; - uint32_t mCurrentSampleIndex; - - bool mIsAVC; - size_t mNALLengthSize; - - bool mStarted; - - MediaBufferGroup *mGroup; - - bool mWantsNALFragments; - - uint8_t *mSrcBuffer; - - FragmentedMPEG4Source(const FragmentedMPEG4Source &); - FragmentedMPEG4Source &operator=(const FragmentedMPEG4Source &); -}; - - -FragmentedMP4Extractor::FragmentedMP4Extractor(const sp &source) - : mLooper(new ALooper), - mParser(new FragmentedMP4Parser()), - mDataSource(source), - mInitCheck(NO_INIT), - mFileMetaData(new MetaData) { - ALOGV("FragmentedMP4Extractor"); - mLooper->registerHandler(mParser); - mLooper->start(false /* runOnCallingThread */); - mParser->start(mDataSource); - - bool hasVideo = mParser->getFormat(false /* audio */, true /* synchronous */) != NULL; - bool hasAudio = mParser->getFormat(true /* audio */, true /* synchronous */) != NULL; - - ALOGV("number of tracks: %d", countTracks()); - - if (hasVideo) { - mFileMetaData->setCString( - kKeyMIMEType, MEDIA_MIMETYPE_CONTAINER_MPEG4); - } else if (hasAudio) { - mFileMetaData->setCString(kKeyMIMEType, "audio/mp4"); - } else { - ALOGE("no audio and no video, no idea what file type this is"); - } - // tracks are numbered such that video track is first, audio track is second - if (hasAudio && hasVideo) { - mTrackCount = 2; - mAudioTrackIndex = 1; - } else if (hasAudio) { - mTrackCount = 1; - mAudioTrackIndex = 0; - } else if (hasVideo) { - mTrackCount = 1; - mAudioTrackIndex = -1; - } else { - mTrackCount = 0; - mAudioTrackIndex = -1; - } -} - -FragmentedMP4Extractor::~FragmentedMP4Extractor() { - ALOGV("~FragmentedMP4Extractor"); - mLooper->stop(); -} - -uint32_t FragmentedMP4Extractor::flags() const { - return CAN_PAUSE | - (mParser->isSeekable() ? (CAN_SEEK_BACKWARD | CAN_SEEK_FORWARD | CAN_SEEK) : 0); -} - -sp FragmentedMP4Extractor::getMetaData() { - return mFileMetaData; -} - -size_t FragmentedMP4Extractor::countTracks() { - return mTrackCount; -} - - -sp FragmentedMP4Extractor::getTrackMetaData( - size_t index, uint32_t flags) { - if (index >= countTracks()) { - return NULL; - } - - sp msg = mParser->getFormat(index == mAudioTrackIndex, true /* synchronous */); - - if (msg == NULL) { - ALOGV("got null format for track %d", index); - return NULL; - } - - sp meta = new MetaData(); - convertMessageToMetaData(msg, meta); - return meta; -} - -static void MakeFourCCString(uint32_t x, char *s) { - s[0] = x >> 24; - s[1] = (x >> 16) & 0xff; - s[2] = (x >> 8) & 0xff; - s[3] = x & 0xff; - s[4] = '\0'; -} - -sp FragmentedMP4Extractor::getTrack(size_t index) { - if (index >= countTracks()) { - return NULL; - } - return new FragmentedMPEG4Source(index == mAudioTrackIndex, getTrackMetaData(index, 0), mParser, this); -} - - -//////////////////////////////////////////////////////////////////////////////// - -FragmentedMPEG4Source::FragmentedMPEG4Source( - bool audio, - const sp &format, - const sp &parser, - const sp &extractor) - : mFormat(format), - mParser(parser), - mExtractor(extractor), - mIsAudioTrack(audio), - mStarted(false), - mGroup(NULL), - mWantsNALFragments(false), - mSrcBuffer(NULL) { -} - -FragmentedMPEG4Source::~FragmentedMPEG4Source() { - if (mStarted) { - stop(); - } -} - -status_t FragmentedMPEG4Source::start(MetaData *params) { - Mutex::Autolock autoLock(mLock); - - CHECK(!mStarted); - - int32_t val; - if (params && params->findInt32(kKeyWantsNALFragments, &val) - && val != 0) { - mWantsNALFragments = true; - } else { - mWantsNALFragments = false; - } - ALOGV("caller wants NAL fragments: %s", mWantsNALFragments ? "yes" : "no"); - - mGroup = new MediaBufferGroup; - - // for video, make the buffer big enough for an extremely poorly compressed 1080p frame. - int32_t max_size = mIsAudioTrack ? 65536 : 3110400; - - mGroup->add_buffer(new MediaBuffer(max_size)); - - mSrcBuffer = new uint8_t[max_size]; - - mStarted = true; - - return OK; -} - -status_t FragmentedMPEG4Source::stop() { - Mutex::Autolock autoLock(mLock); - - CHECK(mStarted); - - delete[] mSrcBuffer; - mSrcBuffer = NULL; - - delete mGroup; - mGroup = NULL; - - mStarted = false; - mCurrentSampleIndex = 0; - - return OK; -} - -sp FragmentedMPEG4Source::getFormat() { - Mutex::Autolock autoLock(mLock); - - return mFormat; -} - - -status_t FragmentedMPEG4Source::read( - MediaBuffer **out, const ReadOptions *options) { - int64_t seekTimeUs; - ReadOptions::SeekMode mode; - if (options && options->getSeekTo(&seekTimeUs, &mode)) { - mParser->seekTo(mIsAudioTrack, seekTimeUs); - } - MediaBuffer *buffer = NULL; - mGroup->acquire_buffer(&buffer); - sp parseBuffer; - - status_t ret = mParser->dequeueAccessUnit(mIsAudioTrack, &parseBuffer, true /* synchronous */); - if (ret != OK) { - buffer->release(); - ALOGV("returning %d", ret); - return ret; - } - sp meta = parseBuffer->meta(); - int64_t timeUs; - CHECK(meta->findInt64("timeUs", &timeUs)); - int32_t isSync; - if (meta->findInt32("is-sync-frame", &isSync) && isSync != 0) { - buffer->meta_data()->setInt32(kKeyIsSyncFrame, 1); - } - buffer->meta_data()->setInt64(kKeyTime, timeUs); - buffer->set_range(0, parseBuffer->size()); - memcpy(buffer->data(), parseBuffer->data(), parseBuffer->size()); - *out = buffer; - return OK; -} - - -static bool isCompatibleBrand(uint32_t fourcc) { - static const uint32_t kCompatibleBrands[] = { - FOURCC('i', 's', 'o', 'm'), - FOURCC('i', 's', 'o', '2'), - FOURCC('a', 'v', 'c', '1'), - FOURCC('3', 'g', 'p', '4'), - FOURCC('m', 'p', '4', '1'), - FOURCC('m', 'p', '4', '2'), - - // Won't promise that the following file types can be played. - // Just give these file types a chance. - FOURCC('q', 't', ' ', ' '), // Apple's QuickTime - FOURCC('M', 'S', 'N', 'V'), // Sony's PSP - - FOURCC('3', 'g', '2', 'a'), // 3GPP2 - FOURCC('3', 'g', '2', 'b'), - }; - - for (size_t i = 0; - i < sizeof(kCompatibleBrands) / sizeof(kCompatibleBrands[0]); - ++i) { - if (kCompatibleBrands[i] == fourcc) { - return true; - } - } - - return false; -} - -// Attempt to actually parse the 'ftyp' atom and determine if a suitable -// compatible brand is present. -// Also try to identify where this file's metadata ends -// (end of the 'moov' atom) and report it to the caller as part of -// the metadata. -static bool Sniff( - const sp &source, String8 *mimeType, float *confidence, - sp *meta) { - // We scan up to 128k bytes to identify this file as an MP4. - static const off64_t kMaxScanOffset = 128ll * 1024ll; - - off64_t offset = 0ll; - bool foundGoodFileType = false; - bool isFragmented = false; - off64_t moovAtomEndOffset = -1ll; - bool done = false; - - while (!done && offset < kMaxScanOffset) { - uint32_t hdr[2]; - if (source->readAt(offset, hdr, 8) < 8) { - return false; - } - - uint64_t chunkSize = ntohl(hdr[0]); - uint32_t chunkType = ntohl(hdr[1]); - off64_t chunkDataOffset = offset + 8; - - if (chunkSize == 1) { - if (source->readAt(offset + 8, &chunkSize, 8) < 8) { - return false; - } - - chunkSize = ntoh64(chunkSize); - chunkDataOffset += 8; - - if (chunkSize < 16) { - // The smallest valid chunk is 16 bytes long in this case. - return false; - } - } else if (chunkSize < 8) { - // The smallest valid chunk is 8 bytes long. - return false; - } - - off64_t chunkDataSize = offset + chunkSize - chunkDataOffset; - - char chunkstring[5]; - MakeFourCCString(chunkType, chunkstring); - ALOGV("saw chunk type %s, size %lld @ %lld", chunkstring, chunkSize, offset); - switch (chunkType) { - case FOURCC('f', 't', 'y', 'p'): - { - if (chunkDataSize < 8) { - return false; - } - - uint32_t numCompatibleBrands = (chunkDataSize - 8) / 4; - for (size_t i = 0; i < numCompatibleBrands + 2; ++i) { - if (i == 1) { - // Skip this index, it refers to the minorVersion, - // not a brand. - continue; - } - - uint32_t brand; - if (source->readAt( - chunkDataOffset + 4 * i, &brand, 4) < 4) { - return false; - } - - brand = ntohl(brand); - char brandstring[5]; - MakeFourCCString(brand, brandstring); - ALOGV("Brand: %s", brandstring); - - if (isCompatibleBrand(brand)) { - foundGoodFileType = true; - break; - } - } - - if (!foundGoodFileType) { - return false; - } - - break; - } - - case FOURCC('m', 'o', 'o', 'v'): - { - moovAtomEndOffset = offset + chunkSize; - break; - } - - case FOURCC('m', 'o', 'o', 'f'): - { - // this is kind of broken, since we might not actually find a - // moof box in the first 128k. - isFragmented = true; - done = true; - break; - } - - default: - break; - } - - offset += chunkSize; - } - - if (!foundGoodFileType || !isFragmented) { - return false; - } - - *mimeType = MEDIA_MIMETYPE_CONTAINER_MPEG4; - *confidence = 0.5f; // slightly more than MPEG4Extractor - - if (moovAtomEndOffset >= 0) { - *meta = new AMessage; - (*meta)->setInt64("meta-data-size", moovAtomEndOffset); - (*meta)->setInt32("fragmented", 1); // tell MediaExtractor what to instantiate - - ALOGV("found metadata size: %lld", moovAtomEndOffset); - } - - return true; -} - -// used by DataSource::RegisterDefaultSniffers -bool SniffFragmentedMP4( - const sp &source, String8 *mimeType, float *confidence, - sp *meta) { - ALOGV("SniffFragmentedMP4"); - char prop[PROPERTY_VALUE_MAX]; - if (property_get("media.stagefright.use-fragmp4", prop, NULL) - && (!strcmp(prop, "1") || !strcasecmp(prop, "true"))) { - return Sniff(source, mimeType, confidence, meta); - } - - return false; -} - -} // namespace android diff --git a/media/libstagefright/MediaExtractor.cpp b/media/libstagefright/MediaExtractor.cpp index b18c916..9ab6611 100644 --- a/media/libstagefright/MediaExtractor.cpp +++ b/media/libstagefright/MediaExtractor.cpp @@ -21,7 +21,6 @@ #include "include/AMRExtractor.h" #include "include/MP3Extractor.h" #include "include/MPEG4Extractor.h" -#include "include/FragmentedMP4Extractor.h" #include "include/WAVExtractor.h" #include "include/OggExtractor.h" #include "include/MPEG2PSExtractor.h" @@ -94,12 +93,7 @@ sp MediaExtractor::Create( MediaExtractor *ret = NULL; if (!strcasecmp(mime, MEDIA_MIMETYPE_CONTAINER_MPEG4) || !strcasecmp(mime, "audio/mp4")) { - int fragmented = 0; - if (meta != NULL && meta->findInt32("fragmented", &fragmented) && fragmented) { - ret = new FragmentedMP4Extractor(source); - } else { - ret = new MPEG4Extractor(source); - } + ret = new MPEG4Extractor(source); } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_MPEG)) { ret = new MP3Extractor(source, meta); } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_NB) diff --git a/media/libstagefright/include/FragmentedMP4Extractor.h b/media/libstagefright/include/FragmentedMP4Extractor.h deleted file mode 100644 index 763cd3a..0000000 --- a/media/libstagefright/include/FragmentedMP4Extractor.h +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Copyright (C) 2012 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef FRAGMENTED_MP4_EXTRACTOR_H_ - -#define FRAGMENTED_MP4_EXTRACTOR_H_ - -#include "include/FragmentedMP4Parser.h" - -#include -#include -#include - -namespace android { - -struct AMessage; -class DataSource; -class SampleTable; -class String8; - -class FragmentedMP4Extractor : public MediaExtractor { -public: - // Extractor assumes ownership of "source". - FragmentedMP4Extractor(const sp &source); - - virtual size_t countTracks(); - virtual sp getTrack(size_t index); - virtual sp getTrackMetaData(size_t index, uint32_t flags); - virtual sp getMetaData(); - virtual uint32_t flags() const; - -protected: - virtual ~FragmentedMP4Extractor(); - -private: - sp mLooper; - sp mParser; - sp mDataSource; - status_t mInitCheck; - size_t mAudioTrackIndex; - size_t mTrackCount; - - sp mFileMetaData; - - Vector mPath; - - FragmentedMP4Extractor(const FragmentedMP4Extractor &); - FragmentedMP4Extractor &operator=(const FragmentedMP4Extractor &); -}; - -bool SniffFragmentedMP4( - const sp &source, String8 *mimeType, float *confidence, - sp *); - -} // namespace android - -#endif // MPEG4_EXTRACTOR_H_ -- cgit v1.1 From 1a2952aee048ca7b1765e2bc09ebe9aeddaeafa3 Mon Sep 17 00:00:00 2001 From: Mathias Agopian Date: Thu, 14 Feb 2013 17:11:27 -0800 Subject: Refactoring: Rename SurfaceTextureClient to Surface Change-Id: I4e8a8b20914cb64edc37abe68233fbc9f2b5d830 --- cmds/stagefright/SimplePlayer.cpp | 8 ++++---- cmds/stagefright/codec.cpp | 3 ++- cmds/stagefright/stagefright.cpp | 4 ++-- cmds/stagefright/stream.cpp | 3 ++- include/media/mediarecorder.h | 2 +- include/media/stagefright/MediaCodec.h | 8 ++++---- include/media/stagefright/NativeWindowWrapper.h | 10 +++++----- include/media/stagefright/SurfaceMediaSource.h | 6 +++--- libvideoeditor/lvpp/NativeWindowRenderer.cpp | 6 +++--- libvideoeditor/lvpp/NativeWindowRenderer.h | 14 +++++++------- libvideoeditor/lvpp/PreviewPlayer.cpp | 4 ++-- media/libmedia/mediaplayer.cpp | 2 +- media/libmediaplayerservice/MediaPlayerService.cpp | 4 ++-- media/libmediaplayerservice/nuplayer/NuPlayer.cpp | 2 +- media/libstagefright/AwesomePlayer.cpp | 4 ++-- media/libstagefright/MediaCodec.cpp | 6 +++--- media/libstagefright/tests/SurfaceMediaSource_test.cpp | 18 +++++++++--------- .../wifi-display/sink/DirectRenderer.cpp | 3 ++- .../wifi-display/sink/TunnelRenderer.cpp | 2 +- media/libstagefright/wifi-display/source/Converter.cpp | 2 +- media/libstagefright/wifi-display/wfd.cpp | 2 +- services/camera/libcameraservice/Camera2Client.cpp | 5 ++--- services/camera/libcameraservice/CameraClient.cpp | 5 ++--- services/camera/libcameraservice/CameraClient.h | 2 +- services/camera/libcameraservice/CameraService.cpp | 1 - .../libcameraservice/camera2/CallbackProcessor.cpp | 4 ++-- .../camera/libcameraservice/camera2/JpegProcessor.cpp | 4 ++-- .../libcameraservice/camera2/StreamingProcessor.cpp | 4 ++-- .../camera/libcameraservice/camera2/ZslProcessor.cpp | 4 ++-- 29 files changed, 71 insertions(+), 71 deletions(-) diff --git a/cmds/stagefright/SimplePlayer.cpp b/cmds/stagefright/SimplePlayer.cpp index 93de112..5d2d721 100644 --- a/cmds/stagefright/SimplePlayer.cpp +++ b/cmds/stagefright/SimplePlayer.cpp @@ -20,7 +20,7 @@ #include "SimplePlayer.h" -#include +#include #include #include #include @@ -67,13 +67,13 @@ status_t SimplePlayer::setDataSource(const char *path) { status_t SimplePlayer::setSurface(const sp &bufferProducer) { sp msg = new AMessage(kWhatSetSurface, id()); - sp surfaceTextureClient; + sp surface; if (bufferProducer != NULL) { - surfaceTextureClient = new SurfaceTextureClient(bufferProducer); + surface = new Surface(bufferProducer); } msg->setObject( - "native-window", new NativeWindowWrapper(surfaceTextureClient)); + "native-window", new NativeWindowWrapper(surface)); sp response; return PostAndAwaitResponse(msg, &response); diff --git a/cmds/stagefright/codec.cpp b/cmds/stagefright/codec.cpp index 723a6e5..fdfefdf 100644 --- a/cmds/stagefright/codec.cpp +++ b/cmds/stagefright/codec.cpp @@ -36,6 +36,7 @@ #include #include #include +#include #include static void usage(const char *me) { @@ -413,7 +414,7 @@ int main(int argc, char **argv) { looper->registerHandler(player); player->setDataSource(argv[0]); - player->setSurface(surface->getSurfaceTexture()); + player->setSurface(surface->getIGraphicBufferProducer()); player->start(); sleep(60); player->stop(); diff --git a/cmds/stagefright/stagefright.cpp b/cmds/stagefright/stagefright.cpp index 2aae64d..5bdbfbb 100644 --- a/cmds/stagefright/stagefright.cpp +++ b/cmds/stagefright/stagefright.cpp @@ -53,7 +53,7 @@ #include -#include +#include #include using namespace android; @@ -941,7 +941,7 @@ int main(int argc, char **argv) { CHECK(useSurfaceTexAlloc); sp texture = new GLConsumer(0 /* tex */); - gSurface = new SurfaceTextureClient(texture->getBufferQueue()); + gSurface = new Surface(texture->getBufferQueue()); } CHECK_EQ((status_t)OK, diff --git a/cmds/stagefright/stream.cpp b/cmds/stagefright/stream.cpp index af6afe0..d49ab4a 100644 --- a/cmds/stagefright/stream.cpp +++ b/cmds/stagefright/stream.cpp @@ -35,6 +35,7 @@ #include #include #include +#include #include #include @@ -373,7 +374,7 @@ int main(int argc, char **argv) { service->create(client, 0); if (player != NULL && player->setDataSource(source) == NO_ERROR) { - player->setVideoSurfaceTexture(surface->getSurfaceTexture()); + player->setVideoSurfaceTexture(surface->getISurfaceTexture()); player->start(); client->waitForEOS(); diff --git a/include/media/mediarecorder.h b/include/media/mediarecorder.h index 2882c41..da6b507 100644 --- a/include/media/mediarecorder.h +++ b/include/media/mediarecorder.h @@ -32,7 +32,7 @@ class IMediaRecorder; class ICamera; class ICameraRecordingProxy; class IGraphicBufferProducer; -class SurfaceTextureClient; +class Surface; typedef void (*media_completion_f)(status_t status, void *cookie); diff --git a/include/media/stagefright/MediaCodec.h b/include/media/stagefright/MediaCodec.h index 3f0d3b3..1002663 100644 --- a/include/media/stagefright/MediaCodec.h +++ b/include/media/stagefright/MediaCodec.h @@ -31,7 +31,7 @@ struct AMessage; struct AString; struct ICrypto; struct SoftwareRenderer; -struct SurfaceTextureClient; +struct Surface; struct MediaCodec : public AHandler { enum ConfigureFlags { @@ -52,7 +52,7 @@ struct MediaCodec : public AHandler { status_t configure( const sp &format, - const sp &nativeWindow, + const sp &nativeWindow, const sp &crypto, uint32_t flags); @@ -187,7 +187,7 @@ private: AString mComponentName; uint32_t mReplyID; uint32_t mFlags; - sp mNativeWindow; + sp mNativeWindow; SoftwareRenderer *mSoftRenderer; sp mOutputFormat; @@ -229,7 +229,7 @@ private: status_t queueCSDInputBuffer(size_t bufferIndex); status_t setNativeWindow( - const sp &surfaceTextureClient); + const sp &surface); void postActivityNotificationIfPossible(); diff --git a/include/media/stagefright/NativeWindowWrapper.h b/include/media/stagefright/NativeWindowWrapper.h index 97cc0ce..cfeec22 100644 --- a/include/media/stagefright/NativeWindowWrapper.h +++ b/include/media/stagefright/NativeWindowWrapper.h @@ -18,29 +18,29 @@ #define NATIVE_WINDOW_WRAPPER_H_ -#include +#include namespace android { -// SurfaceTextureClient derives from ANativeWindow which derives from multiple +// Surface derives from ANativeWindow which derives from multiple // base classes, in order to carry it in AMessages, we'll temporarily wrap it // into a NativeWindowWrapper. struct NativeWindowWrapper : RefBase { NativeWindowWrapper( - const sp &surfaceTextureClient) : + const sp &surfaceTextureClient) : mSurfaceTextureClient(surfaceTextureClient) { } sp getNativeWindow() const { return mSurfaceTextureClient; } - sp getSurfaceTextureClient() const { + sp getSurfaceTextureClient() const { return mSurfaceTextureClient; } private: - const sp mSurfaceTextureClient; + const sp mSurfaceTextureClient; DISALLOW_EVIL_CONSTRUCTORS(NativeWindowWrapper); }; diff --git a/include/media/stagefright/SurfaceMediaSource.h b/include/media/stagefright/SurfaceMediaSource.h index 609d84f..5f21da9 100644 --- a/include/media/stagefright/SurfaceMediaSource.h +++ b/include/media/stagefright/SurfaceMediaSource.h @@ -35,7 +35,7 @@ class GraphicBuffer; // ASSUMPTIONS // 1. SurfaceMediaSource is initialized with width*height which // can never change. However, deqeueue buffer does not currently -// enforce this as in BufferQueue, dequeue can be used by SurfaceTextureClient +// enforce this as in BufferQueue, dequeue can be used by Surface // which can modify the default width and heght. Also neither the width // nor height can be 0. // 2. setSynchronousMode is never used (basically no one should call @@ -122,7 +122,7 @@ public: protected: // Implementation of the BufferQueue::ConsumerListener interface. These - // calls are used to notify the SurfaceTextureClient of asynchronous events in the + // calls are used to notify the Surface of asynchronous events in the // BufferQueue. virtual void onFrameAvailable(); @@ -157,7 +157,7 @@ private: // mCurrentSlot is the buffer slot index of the buffer that is currently // being used by buffer consumer // (e.g. StageFrightRecorder in the case of SurfaceMediaSource or GLTexture - // in the case of SurfaceTextureClient). + // in the case of Surface). // It is initialized to INVALID_BUFFER_SLOT, // indicating that no buffer slot is currently bound to the texture. Note, // however, that a value of INVALID_BUFFER_SLOT does not necessarily mean diff --git a/libvideoeditor/lvpp/NativeWindowRenderer.cpp b/libvideoeditor/lvpp/NativeWindowRenderer.cpp index 114f0f6..702900b 100755 --- a/libvideoeditor/lvpp/NativeWindowRenderer.cpp +++ b/libvideoeditor/lvpp/NativeWindowRenderer.cpp @@ -21,7 +21,7 @@ #include #include #include -#include +#include #include #include #include @@ -316,7 +316,7 @@ NativeWindowRenderer::~NativeWindowRenderer() { void NativeWindowRenderer::render(RenderInput* input) { sp ST = input->mST; - sp STC = input->mSTC; + sp STC = input->mSTC; if (input->mIsExternalBuffer) { queueExternalBuffer(STC.get(), input->mBuffer, @@ -569,7 +569,7 @@ RenderInput::RenderInput(NativeWindowRenderer* renderer, GLuint textureId) : mRenderer(renderer) , mTextureId(textureId) { mST = new GLConsumer(mTextureId); - mSTC = new SurfaceTextureClient(mST->getBufferQueue()); + mSTC = new Surface(mST->getBufferQueue()); native_window_connect(mSTC.get(), NATIVE_WINDOW_API_MEDIA); } diff --git a/libvideoeditor/lvpp/NativeWindowRenderer.h b/libvideoeditor/lvpp/NativeWindowRenderer.h index b0623ba..26b4cba 100755 --- a/libvideoeditor/lvpp/NativeWindowRenderer.h +++ b/libvideoeditor/lvpp/NativeWindowRenderer.h @@ -37,16 +37,16 @@ // we only expect that happens briefly when one clip is about to finish // and the next clip is about to start. // -// We allocate a SurfaceTextureClient for each RenderInput and the user can use +// We allocate a Surface for each RenderInput and the user can use // the getTargetWindow() function to get the corresponding ANativeWindow -// for that SurfaceTextureClient. The intention is that the user can pass that +// for that Surface. The intention is that the user can pass that // ANativeWindow to OMXCodec::Create() so the codec can decode directly // to buffers provided by the texture. namespace android { class GLConsumer; -class SurfaceTextureClient; +class Surface; class RenderInput; class NativeWindowRenderer { @@ -110,7 +110,7 @@ private: // destination aspect ratio. GLfloat mPositionCoordinates[8]; - // We use a different GL id for each SurfaceTextureClient. + // We use a different GL id for each Surface. GLuint mNextTextureId; // Number of existing RenderInputs, just for debugging. @@ -146,7 +146,7 @@ private: class RenderInput { public: - // Returns the ANativeWindow corresponds to the SurfaceTextureClient. + // Returns the ANativeWindow corresponds to the Surface. ANativeWindow* getTargetWindow(); // Updates video frame size from the MediaSource's metadata. Specifically @@ -156,7 +156,7 @@ public: // Renders the buffer with the given video effect and rending mode. // The video effets are defined in VideoEditorTools.h // Set isExternalBuffer to true only when the buffer given is not - // provided by the SurfaceTextureClient. + // provided by the Surface. void render(MediaBuffer *buffer, uint32_t videoEffect, M4xVSS_MediaRendering renderingMode, bool isExternalBuffer); private: @@ -165,7 +165,7 @@ private: NativeWindowRenderer* mRenderer; GLuint mTextureId; sp mST; - sp mSTC; + sp mSTC; int mWidth, mHeight; // These are only valid during render() calls diff --git a/libvideoeditor/lvpp/PreviewPlayer.cpp b/libvideoeditor/lvpp/PreviewPlayer.cpp index 754c5a9..2bd9f84 100755 --- a/libvideoeditor/lvpp/PreviewPlayer.cpp +++ b/libvideoeditor/lvpp/PreviewPlayer.cpp @@ -32,7 +32,7 @@ #include #include #include -#include +#include #include "VideoEditorPreviewController.h" #include "DummyAudioSource.h" @@ -1780,7 +1780,7 @@ void PreviewPlayer::setSurfaceTexture(const sp &bufferPr mSurface.clear(); if (bufferProducer != NULL) { - setNativeWindow_l(new SurfaceTextureClient(bufferProducer)); + setNativeWindow_l(new Surface(bufferProducer)); } } diff --git a/media/libmedia/mediaplayer.cpp b/media/libmedia/mediaplayer.cpp index 14602bf..3defec3 100644 --- a/media/libmedia/mediaplayer.cpp +++ b/media/libmedia/mediaplayer.cpp @@ -27,7 +27,7 @@ #include #include -#include +#include #include #include diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp index f932131..16f1317 100644 --- a/media/libmediaplayerservice/MediaPlayerService.cpp +++ b/media/libmediaplayerservice/MediaPlayerService.cpp @@ -38,7 +38,7 @@ #include #include #include -#include +#include #include // for status_t #include #include @@ -731,7 +731,7 @@ status_t MediaPlayerService::Client::setVideoSurfaceTexture( sp anw; if (bufferProducer != NULL) { - anw = new SurfaceTextureClient(bufferProducer); + anw = new Surface(bufferProducer); status_t err = native_window_api_connect(anw.get(), NATIVE_WINDOW_API_MEDIA); diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp index 30eb4b9..2ba6c22 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp +++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp @@ -224,7 +224,7 @@ void NuPlayer::setVideoSurfaceTextureAsync( msg->setObject( "native-window", new NativeWindowWrapper( - new SurfaceTextureClient(bufferProducer))); + new Surface(bufferProducer))); } msg->post(); diff --git a/media/libstagefright/AwesomePlayer.cpp b/media/libstagefright/AwesomePlayer.cpp index 0f4d866..bd28118 100644 --- a/media/libstagefright/AwesomePlayer.cpp +++ b/media/libstagefright/AwesomePlayer.cpp @@ -49,7 +49,7 @@ #include #include -#include +#include #include @@ -1183,7 +1183,7 @@ status_t AwesomePlayer::setSurfaceTexture(const sp &buff status_t err; if (bufferProducer != NULL) { - err = setNativeWindow_l(new SurfaceTextureClient(bufferProducer)); + err = setNativeWindow_l(new Surface(bufferProducer)); } else { err = setNativeWindow_l(NULL); } diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp index 77aceb7..83be0fd 100644 --- a/media/libstagefright/MediaCodec.cpp +++ b/media/libstagefright/MediaCodec.cpp @@ -22,7 +22,7 @@ #include "include/SoftwareRenderer.h" -#include +#include #include #include #include @@ -132,7 +132,7 @@ status_t MediaCodec::init(const char *name, bool nameIsType, bool encoder) { status_t MediaCodec::configure( const sp &format, - const sp &nativeWindow, + const sp &nativeWindow, const sp &crypto, uint32_t flags) { sp msg = new AMessage(kWhatConfigure, id()); @@ -1526,7 +1526,7 @@ ssize_t MediaCodec::dequeuePortBuffer(int32_t portIndex) { } status_t MediaCodec::setNativeWindow( - const sp &surfaceTextureClient) { + const sp &surfaceTextureClient) { status_t err; if (mNativeWindow != NULL) { diff --git a/media/libstagefright/tests/SurfaceMediaSource_test.cpp b/media/libstagefright/tests/SurfaceMediaSource_test.cpp index 6a98509..a5459fe 100644 --- a/media/libstagefright/tests/SurfaceMediaSource_test.cpp +++ b/media/libstagefright/tests/SurfaceMediaSource_test.cpp @@ -27,7 +27,7 @@ #include #include -#include +#include #include #include #include @@ -109,7 +109,7 @@ protected: ALOGV("No actual display. Choosing EGLSurface based on SurfaceMediaSource"); sp sms = (new SurfaceMediaSource( getSurfaceWidth(), getSurfaceHeight()))->getBufferQueue(); - sp stc = new SurfaceTextureClient(sms); + sp stc = new Surface(sms); sp window = stc; mEglSurface = eglCreateWindowSurface(mEglDisplay, mGlConfig, @@ -361,7 +361,7 @@ protected: mSMS = new SurfaceMediaSource(mYuvTexWidth, mYuvTexHeight); // Manual cast is required to avoid constructor ambiguity - mSTC = new SurfaceTextureClient(static_cast >( mSMS->getBufferQueue())); + mSTC = new Surface(static_cast >( mSMS->getBufferQueue())); mANW = mSTC; } @@ -375,7 +375,7 @@ protected: const int mYuvTexHeight; sp mSMS; - sp mSTC; + sp mSTC; sp mANW; }; @@ -396,7 +396,7 @@ protected: ALOGV("SMS-GLTest::SetUp()"); android::ProcessState::self()->startThreadPool(); mSMS = new SurfaceMediaSource(mYuvTexWidth, mYuvTexHeight); - mSTC = new SurfaceTextureClient(static_cast >( mSMS->getBufferQueue())); + mSTC = new Surface(static_cast >( mSMS->getBufferQueue())); mANW = mSTC; // Doing the setup related to the GL Side @@ -416,7 +416,7 @@ protected: const int mYuvTexHeight; sp mSMS; - sp mSTC; + sp mSTC; sp mANW; }; @@ -483,7 +483,7 @@ sp SurfaceMediaSourceGLTest::setUpMediaRecorder(int fd, int video // query the mediarecorder for a surfacemeidasource and create an egl surface with that void SurfaceMediaSourceGLTest::setUpEGLSurfaceFromMediaRecorder(sp& mr) { sp iST = mr->querySurfaceMediaSourceFromMediaServer(); - mSTC = new SurfaceTextureClient(iST); + mSTC = new Surface(iST); mANW = mSTC; if (mEglSurface != EGL_NO_SURFACE) { @@ -750,7 +750,7 @@ TEST_F(SurfaceMediaSourceTest, DISABLED_EncodingFromCpuYV12BufferNpotWriteMediaS // get the reference to the surfacemediasource living in // mediaserver that is created by stagefrightrecorder sp iST = mr->querySurfaceMediaSourceFromMediaServer(); - mSTC = new SurfaceTextureClient(iST); + mSTC = new Surface(iST); mANW = mSTC; ASSERT_EQ(NO_ERROR, native_window_api_connect(mANW.get(), NATIVE_WINDOW_API_CPU)); ASSERT_EQ(NO_ERROR, native_window_set_buffers_format(mANW.get(), @@ -781,7 +781,7 @@ TEST_F(SurfaceMediaSourceGLTest, ChooseAndroidRecordableEGLConfigDummyWriter) { ALOGV("Verify creating a surface w/ right config + dummy writer*********"); mSMS = new SurfaceMediaSource(mYuvTexWidth, mYuvTexHeight); - mSTC = new SurfaceTextureClient(static_cast >( mSMS->getBufferQueue())); + mSTC = new Surface(static_cast >( mSMS->getBufferQueue())); mANW = mSTC; DummyRecorder writer(mSMS); diff --git a/media/libstagefright/wifi-display/sink/DirectRenderer.cpp b/media/libstagefright/wifi-display/sink/DirectRenderer.cpp index d7f169f..70369bb 100644 --- a/media/libstagefright/wifi-display/sink/DirectRenderer.cpp +++ b/media/libstagefright/wifi-display/sink/DirectRenderer.cpp @@ -24,6 +24,7 @@ #include "ATSParser.h" #include +#include #include #include #include @@ -279,7 +280,7 @@ void DirectRenderer::dequeueAccessUnits() { err = mVideoDecoder->configure( videoFormat, mSurfaceTex == NULL - ? NULL : new SurfaceTextureClient(mSurfaceTex), + ? NULL : new Surface(mSurfaceTex), NULL /* crypto */, 0 /* flags */); diff --git a/media/libstagefright/wifi-display/sink/TunnelRenderer.cpp b/media/libstagefright/wifi-display/sink/TunnelRenderer.cpp index 04dbd7b..75f9d73 100644 --- a/media/libstagefright/wifi-display/sink/TunnelRenderer.cpp +++ b/media/libstagefright/wifi-display/sink/TunnelRenderer.cpp @@ -376,7 +376,7 @@ void TunnelRenderer::initPlayer() { CHECK_EQ(mPlayer->setDataSource(mStreamSource), (status_t)OK); mPlayer->setVideoSurfaceTexture( - mSurfaceTex != NULL ? mSurfaceTex : mSurface->getSurfaceTexture()); + mSurfaceTex != NULL ? mSurfaceTex : mSurface->getIGraphicBufferProducer()); mPlayer->start(); } diff --git a/media/libstagefright/wifi-display/source/Converter.cpp b/media/libstagefright/wifi-display/source/Converter.cpp index 376b0df..2861aa9 100644 --- a/media/libstagefright/wifi-display/source/Converter.cpp +++ b/media/libstagefright/wifi-display/source/Converter.cpp @@ -23,7 +23,7 @@ #include "MediaPuller.h" #include -#include +#include #include #include #include diff --git a/media/libstagefright/wifi-display/wfd.cpp b/media/libstagefright/wifi-display/wfd.cpp index 21d661e..3f4216a 100644 --- a/media/libstagefright/wifi-display/wfd.cpp +++ b/media/libstagefright/wifi-display/wfd.cpp @@ -321,7 +321,7 @@ int main(int argc, char **argv) { sp looper = new ALooper; sp sink = new WifiDisplaySink( - session, surface->getSurfaceTexture()); + session, surface->getIGraphicBufferProducer()); looper->registerHandler(sink); if (connectToPort >= 0) { diff --git a/services/camera/libcameraservice/Camera2Client.cpp b/services/camera/libcameraservice/Camera2Client.cpp index 70bf0ac..b9feaf8 100644 --- a/services/camera/libcameraservice/Camera2Client.cpp +++ b/services/camera/libcameraservice/Camera2Client.cpp @@ -22,7 +22,6 @@ #include #include -#include #include #include "camera2/Parameters.h" #include "Camera2Client.h" @@ -489,7 +488,7 @@ status_t Camera2Client::setPreviewDisplay( sp binder; sp window; if (surface != 0) { - binder = surface->asBinder(); + binder = surface->getISurfaceTexture()->asBinder(); window = surface; } @@ -508,7 +507,7 @@ status_t Camera2Client::setPreviewTexture( sp window; if (bufferProducer != 0) { binder = bufferProducer->asBinder(); - window = new SurfaceTextureClient(bufferProducer); + window = new Surface(bufferProducer); } return setPreviewWindowL(binder, window); } diff --git a/services/camera/libcameraservice/CameraClient.cpp b/services/camera/libcameraservice/CameraClient.cpp index f9cee0d..5f03a1c 100644 --- a/services/camera/libcameraservice/CameraClient.cpp +++ b/services/camera/libcameraservice/CameraClient.cpp @@ -18,7 +18,6 @@ //#define LOG_NDEBUG 0 #include -#include #include #include "CameraClient.h" @@ -302,7 +301,7 @@ status_t CameraClient::setPreviewWindow(const sp& binder, status_t CameraClient::setPreviewDisplay(const sp& surface) { LOG1("setPreviewDisplay(%p) (pid %d)", surface.get(), getCallingPid()); - sp binder(surface != 0 ? surface->asBinder() : 0); + sp binder(surface != 0 ? surface->getISurfaceTexture()->asBinder() : 0); sp window(surface); return setPreviewWindow(binder, window); } @@ -317,7 +316,7 @@ status_t CameraClient::setPreviewTexture( sp window; if (bufferProducer != 0) { binder = bufferProducer->asBinder(); - window = new SurfaceTextureClient(bufferProducer); + window = new Surface(bufferProducer); } return setPreviewWindow(binder, window); } diff --git a/services/camera/libcameraservice/CameraClient.h b/services/camera/libcameraservice/CameraClient.h index 7da3da7..74829ce 100644 --- a/services/camera/libcameraservice/CameraClient.h +++ b/services/camera/libcameraservice/CameraClient.h @@ -124,7 +124,7 @@ private: // Ensures atomicity among the public methods mutable Mutex mLock; - // This is a binder of Surface or SurfaceTextureClient. + // This is a binder of Surface or Surface. sp mSurface; sp mPreviewWindow; diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp index 124d24d..717e159 100644 --- a/services/camera/libcameraservice/CameraService.cpp +++ b/services/camera/libcameraservice/CameraService.cpp @@ -28,7 +28,6 @@ #include #include #include -#include #include #include #include diff --git a/services/camera/libcameraservice/camera2/CallbackProcessor.cpp b/services/camera/libcameraservice/camera2/CallbackProcessor.cpp index 307cfab..c4055e0 100644 --- a/services/camera/libcameraservice/camera2/CallbackProcessor.cpp +++ b/services/camera/libcameraservice/camera2/CallbackProcessor.cpp @@ -22,7 +22,7 @@ #include #include "CallbackProcessor.h" -#include +#include #include "../Camera2Device.h" #include "../Camera2Client.h" @@ -65,7 +65,7 @@ status_t CallbackProcessor::updateStream(const Parameters ¶ms) { mCallbackConsumer = new CpuConsumer(kCallbackHeapCount); mCallbackConsumer->setFrameAvailableListener(this); mCallbackConsumer->setName(String8("Camera2Client::CallbackConsumer")); - mCallbackWindow = new SurfaceTextureClient( + mCallbackWindow = new Surface( mCallbackConsumer->getProducerInterface()); } diff --git a/services/camera/libcameraservice/camera2/JpegProcessor.cpp b/services/camera/libcameraservice/camera2/JpegProcessor.cpp index 6280f83..1ec5694 100644 --- a/services/camera/libcameraservice/camera2/JpegProcessor.cpp +++ b/services/camera/libcameraservice/camera2/JpegProcessor.cpp @@ -26,7 +26,7 @@ #include #include "JpegProcessor.h" -#include +#include #include "../Camera2Device.h" #include "../Camera2Client.h" @@ -82,7 +82,7 @@ status_t JpegProcessor::updateStream(const Parameters ¶ms) { mCaptureConsumer = new CpuConsumer(1); mCaptureConsumer->setFrameAvailableListener(this); mCaptureConsumer->setName(String8("Camera2Client::CaptureConsumer")); - mCaptureWindow = new SurfaceTextureClient( + mCaptureWindow = new Surface( mCaptureConsumer->getProducerInterface()); // Create memory for API consumption mCaptureHeap = new MemoryHeapBase(maxJpegSize.data.i32[0], 0, diff --git a/services/camera/libcameraservice/camera2/StreamingProcessor.cpp b/services/camera/libcameraservice/camera2/StreamingProcessor.cpp index 6ea27b2..a0d1093 100644 --- a/services/camera/libcameraservice/camera2/StreamingProcessor.cpp +++ b/services/camera/libcameraservice/camera2/StreamingProcessor.cpp @@ -20,7 +20,7 @@ #include #include -#include +#include #include #include "StreamingProcessor.h" @@ -284,7 +284,7 @@ status_t StreamingProcessor::updateRecordingStream(const Parameters ¶ms) { true); mRecordingConsumer->setFrameAvailableListener(this); mRecordingConsumer->setName(String8("Camera2-RecordingConsumer")); - mRecordingWindow = new SurfaceTextureClient( + mRecordingWindow = new Surface( mRecordingConsumer->getProducerInterface()); // Allocate memory later, since we don't know buffer size until receipt } diff --git a/services/camera/libcameraservice/camera2/ZslProcessor.cpp b/services/camera/libcameraservice/camera2/ZslProcessor.cpp index 9584028..900c099 100644 --- a/services/camera/libcameraservice/camera2/ZslProcessor.cpp +++ b/services/camera/libcameraservice/camera2/ZslProcessor.cpp @@ -29,7 +29,7 @@ #include #include "ZslProcessor.h" -#include +#include #include "../Camera2Device.h" #include "../Camera2Client.h" @@ -124,7 +124,7 @@ status_t ZslProcessor::updateStream(const Parameters ¶ms) { true); mZslConsumer->setFrameAvailableListener(this); mZslConsumer->setName(String8("Camera2Client::ZslConsumer")); - mZslWindow = new SurfaceTextureClient( + mZslWindow = new Surface( mZslConsumer->getProducerInterface()); } -- cgit v1.1 From 32584a7d672864b20ab8b83a3cb23c1858e908b7 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Wed, 13 Feb 2013 14:46:45 -0800 Subject: Temporary additional logging to investigate bug The bug appears related to continuing to use an invalid buffer provider in fast mixer after track destruction, so focus the added logs in that area. Also includes a bug fix: was calling log in an unsafe place near Threads.cpp AudioFlinger::PlaybackThread::createTrack_l line 1250. Details: - include caller pid or client pid where appropriate - increase log buffer size - log mFastIndex when AudioMixer sees an invalid bufferProvider. - log both potentially modified and actually modified tracks in FastMixer. - fix benign bug where sq->end() was called more than once. - log StateQueue push() call and return. - increase StateQueue size from 4 to 8 entries - log mixer->enable(), bufferProvider, and currentTrackMask - log buffer provider addresses - increase fast mixer log buffer again - check logf format vs. argument list compatibility - add logging to AudioMixer - add checking of magic field in AudioMixer to detect overwrites - add bool AudioMixer::enabled() - increase log buffer sizes yet again - enable assertion checking without ALOGV - improve a few log messages - check for corruption in more places - log in all the process hooks - add new mixer APIs so we can check for corruption of mixer state - fix a build warning Bug: 6490974 Change-Id: Ib0c4a73dcf606ef9bd898313b3b40ef61ab42f51 --- include/media/nbaio/NBLog.h | 4 +- services/audioflinger/AudioFlinger.h | 2 +- services/audioflinger/AudioMixer.cpp | 72 ++++++++++++++++++++++++++-- services/audioflinger/AudioMixer.h | 25 +++++++++- services/audioflinger/FastMixer.cpp | 93 ++++++++++++++++++++++++++++++++++-- services/audioflinger/StateQueue.h | 2 +- services/audioflinger/Threads.cpp | 30 +++++++++--- services/audioflinger/Threads.h | 4 +- services/audioflinger/Tracks.cpp | 14 ++++-- 9 files changed, 220 insertions(+), 26 deletions(-) diff --git a/include/media/nbaio/NBLog.h b/include/media/nbaio/NBLog.h index 8fc417f..107ba66 100644 --- a/include/media/nbaio/NBLog.h +++ b/include/media/nbaio/NBLog.h @@ -115,7 +115,7 @@ public: virtual ~Writer() { } virtual void log(const char *string); - virtual void logf(const char *fmt, ...); + virtual void logf(const char *fmt, ...) __attribute__ ((format (printf, 2, 3))); virtual void logvf(const char *fmt, va_list ap); virtual void logTimestamp(); virtual void logTimestamp(const struct timespec& ts); @@ -149,7 +149,7 @@ public: LockedWriter(size_t size, void *shared); virtual void log(const char *string); - virtual void logf(const char *fmt, ...); + virtual void logf(const char *fmt, ...) __attribute__ ((format (printf, 2, 3))); virtual void logvf(const char *fmt, va_list ap); virtual void logTimestamp(); virtual void logTimestamp(const struct timespec& ts); diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h index c3f08f6..a25fb80 100644 --- a/services/audioflinger/AudioFlinger.h +++ b/services/audioflinger/AudioFlinger.h @@ -227,7 +227,7 @@ public: sp newWriter_l(size_t size, const char *name); void unregisterWriter(const sp& writer); private: - static const size_t kLogMemorySize = 10 * 1024; + static const size_t kLogMemorySize = 50 * 1024; sp mLogMemoryDealer; // == 0 when NBLog is disabled public: diff --git a/services/audioflinger/AudioMixer.cpp b/services/audioflinger/AudioMixer.cpp index 08325ad..2d7894d 100644 --- a/services/audioflinger/AudioMixer.cpp +++ b/services/audioflinger/AudioMixer.cpp @@ -16,7 +16,7 @@ */ #define LOG_TAG "AudioMixer" -//#define LOG_NDEBUG 0 +#define LOG_NDEBUG 0 #include #include @@ -25,6 +25,8 @@ #include #include +#undef ALOGV +#define ALOGV(a...) do { } while (0) #include #include @@ -98,7 +100,7 @@ effect_descriptor_t AudioMixer::dwnmFxDesc; AudioMixer::AudioMixer(size_t frameCount, uint32_t sampleRate, uint32_t maxNumTracks) : mTrackNames(0), mConfiguredNames((maxNumTracks >= 32 ? 0 : 1 << maxNumTracks) - 1), - mSampleRate(sampleRate) + mSampleRate(sampleRate), mLog(&mDummyLog) { // AudioMixer is not yet capable of multi-channel beyond stereo COMPILE_TIME_ASSERT_FUNCTION_SCOPE(2 == MAX_NUM_CHANNELS); @@ -122,6 +124,7 @@ AudioMixer::AudioMixer(size_t frameCount, uint32_t sampleRate, uint32_t maxNumTr mState.hook = process__nop; mState.outputTemp = NULL; mState.resampleTemp = NULL; + mState.mLog = &mDummyLog; // mState.reserved // FIXME Most of the following initialization is probably redundant since @@ -131,6 +134,7 @@ AudioMixer::AudioMixer(size_t frameCount, uint32_t sampleRate, uint32_t maxNumTr for (unsigned i=0 ; i < MAX_NUM_TRACKS ; i++) { t->resampler = NULL; t->downmixerBufferProvider = NULL; + t->magic = track_t::kMagic; t++; } @@ -169,6 +173,12 @@ AudioMixer::~AudioMixer() delete [] mState.resampleTemp; } +void AudioMixer::setLog(NBLog::Writer *log) +{ + mLog = log; + mState.mLog = log; +} + int AudioMixer::getTrackName(audio_channel_mask_t channelMask, int sessionId) { uint32_t names = (~mTrackNames) & mConfiguredNames; @@ -209,9 +219,12 @@ int AudioMixer::getTrackName(audio_channel_mask_t channelMask, int sessionId) t->mainBuffer = NULL; t->auxBuffer = NULL; t->downmixerBufferProvider = NULL; + t->fastIndex = -1; + // t->magic unchanged status_t status = initTrackDownmix(&mState.tracks[n], n, channelMask); if (status == OK) { + mLog->logf("getTrackName %d", n); return TRACK0 + n; } ALOGE("AudioMixer::getTrackName(0x%x) failed, error preparing track for downmix", @@ -366,9 +379,11 @@ void AudioMixer::deleteTrackName(int name) { ALOGV("AudioMixer::deleteTrackName(%d)", name); name -= TRACK0; + mLog->logf("deleteTrackName %d", name); ALOG_ASSERT(uint32_t(name) < MAX_NUM_TRACKS, "bad track name %d", name); ALOGV("deleteTrackName(%d)", name); track_t& track(mState.tracks[ name ]); + track.checkMagic(); if (track.enabled) { track.enabled = false; invalidateState(1<logf("enable %d", name); track.enabled = true; ALOGV("enable(%d)", name); invalidateState(1 << name); @@ -400,19 +417,36 @@ void AudioMixer::disable(int name) name -= TRACK0; ALOG_ASSERT(uint32_t(name) < MAX_NUM_TRACKS, "bad track name %d", name); track_t& track = mState.tracks[name]; + track.checkMagic(); if (track.enabled) { + mLog->logf("disable %d", name); track.enabled = false; ALOGV("disable(%d)", name); invalidateState(1 << name); } } +bool AudioMixer::enabled(int name) +{ + name -= TRACK0; + ALOG_ASSERT(uint32_t(name) < MAX_NUM_TRACKS, "bad track name %d", name); + track_t& track = mState.tracks[name]; + track.checkMagic(); +#if 0 + // can't do this because mState.enabledTracks is updated lazily + ALOG_ASSERT(track.enabled == ((mState.enabledTracks & (1 << name)) != 0)); +#endif + + return track.enabled; +} + void AudioMixer::setParameter(int name, int target, int param, void *value) { name -= TRACK0; ALOG_ASSERT(uint32_t(name) < MAX_NUM_TRACKS, "bad track name %d", name); track_t& track = mState.tracks[name]; + track.checkMagic(); int valueInt = (int)value; int32_t *valueBuf = (int32_t *)value; @@ -455,6 +489,9 @@ void AudioMixer::setParameter(int name, int target, int param, void *value) // for a specific track? or per mixer? /* case DOWNMIX_TYPE: break */ + case FAST_INDEX: + track.fastIndex = valueInt; + break; default: LOG_FATAL("bad param"); } @@ -540,6 +577,7 @@ void AudioMixer::setParameter(int name, int target, int param, void *value) bool AudioMixer::track_t::setResampler(uint32_t value, uint32_t devSampleRate) { + checkMagic(); if (value != devSampleRate || resampler != NULL) { if (sampleRate != value) { sampleRate = value; @@ -572,6 +610,7 @@ bool AudioMixer::track_t::setResampler(uint32_t value, uint32_t devSampleRate) inline void AudioMixer::track_t::adjustVolumeRamp(bool aux) { + checkMagic(); for (uint32_t i=0 ; i0) && (((prevVolume[i]+volumeInc[i])>>16) >= volume[i])) || ((volumeInc[i]<0) && (((prevVolume[i]+volumeInc[i])>>16) <= volume[i]))) { @@ -600,8 +639,10 @@ size_t AudioMixer::getUnreleasedFrames(int name) const void AudioMixer::setBufferProvider(int name, AudioBufferProvider* bufferProvider) { name -= TRACK0; + mLog->logf("set bp %d=%p", name, bufferProvider); ALOG_ASSERT(uint32_t(name) < MAX_NUM_TRACKS, "bad track name %d", name); + mState.tracks[name].checkMagic(); if (mState.tracks[name].downmixerBufferProvider != NULL) { // update required? if (mState.tracks[name].downmixerBufferProvider->mTrackBufferProvider != bufferProvider) { @@ -619,10 +660,27 @@ void AudioMixer::setBufferProvider(int name, AudioBufferProvider* bufferProvider } } +AudioBufferProvider* AudioMixer::getBufferProvider(int name) +{ + name -= TRACK0; + ALOG_ASSERT(uint32_t(name) < MAX_NUM_TRACKS, "bad track name %d", name); + mState.tracks[name].checkMagic(); + return mState.tracks[name].bufferProvider; +} +int AudioMixer::getFastIndex(int name) +{ + name -= TRACK0; + ALOG_ASSERT(uint32_t(name) < MAX_NUM_TRACKS, "bad track name %d", name); + mState.tracks[name].checkMagic(); + return mState.tracks[name].fastIndex; +} void AudioMixer::process(int64_t pts) { + if (mState.needsChanged) { + mLog->logf("process needs=%#x", mState.needsChanged); + } mState.hook(&mState, pts); } @@ -647,6 +705,7 @@ void AudioMixer::process__validate(state_t* state, int64_t pts) } state->enabledTracks &= ~disabled; state->enabledTracks |= enabled; + state->mLog->logf("process_validate ena=%#x", state->enabledTracks); // compute everything we need... int countActiveTracks = 0; @@ -1058,6 +1117,7 @@ void AudioMixer::track__16BitsMono(track_t* t, int32_t* out, size_t frameCount, void AudioMixer::process__nop(state_t* state, int64_t pts) { uint32_t e0 = state->enabledTracks; + state->mLog->logf("process_nop ena=%#x", e0); size_t bufSize = state->frameCount * sizeof(int16_t) * MAX_NUM_CHANNELS; while (e0) { // process by group of tracks with same output buffer to @@ -1103,6 +1163,7 @@ void AudioMixer::process__genericNoResampling(state_t* state, int64_t pts) // acquire each track's buffer uint32_t enabledTracks = state->enabledTracks; + state->mLog->logf("process_gNR ena=%#x", enabledTracks); uint32_t e0 = enabledTracks; while (e0) { const int i = 31 - __builtin_clz(e0); @@ -1111,8 +1172,8 @@ void AudioMixer::process__genericNoResampling(state_t* state, int64_t pts) t.buffer.frameCount = state->frameCount; int valid = t.bufferProvider->getValid(); if (valid != AudioBufferProvider::kValid) { - ALOGE("invalid bufferProvider=%p name=%d frameCount=%d valid=%#x enabledTracks=%#x", - t.bufferProvider, i, t.buffer.frameCount, valid, enabledTracks); + ALOGE("invalid bufferProvider=%p name=%d fastIndex=%d frameCount=%d valid=%#x enabledTracks=%#x", + t.bufferProvider, i, t.fastIndex, t.buffer.frameCount, valid, enabledTracks); // expect to crash } t.bufferProvider->getNextBuffer(&t.buffer, pts); @@ -1211,6 +1272,7 @@ void AudioMixer::process__genericResampling(state_t* state, int64_t pts) size_t numFrames = state->frameCount; uint32_t e0 = state->enabledTracks; + state->mLog->logf("process_gR ena=%#x", e0); while (e0) { // process by group of tracks with same output buffer // to optimize cache use @@ -1275,6 +1337,7 @@ void AudioMixer::process__genericResampling(state_t* state, int64_t pts) void AudioMixer::process__OneTrack16BitsStereoNoResampling(state_t* state, int64_t pts) { + state->mLog->logf("process_1TSNR ena=%#x", state->enabledTracks); // This method is only called when state->enabledTracks has exactly // one bit set. The asserts below would verify this, but are commented out // since the whole point of this method is to optimize performance. @@ -1344,6 +1407,7 @@ void AudioMixer::process__TwoTracks16BitsStereoNoResampling(state_t* state, { int i; uint32_t en = state->enabledTracks; + state->mLog->logf("process_2TSNR ena=%#x", en); i = 31 - __builtin_clz(en); const track_t& t0 = state->tracks[i]; diff --git a/services/audioflinger/AudioMixer.h b/services/audioflinger/AudioMixer.h index fd21fda..2d00bf5 100644 --- a/services/audioflinger/AudioMixer.h +++ b/services/audioflinger/AudioMixer.h @@ -28,6 +28,7 @@ #include #include +#include namespace android { @@ -76,6 +77,7 @@ public: MAIN_BUFFER = 0x4002, AUX_BUFFER = 0x4003, DOWNMIX_TYPE = 0X4004, + FAST_INDEX = 0x4005, // for debugging only // for target RESAMPLE SAMPLE_RATE = 0x4100, // Configure sample rate conversion on this track name; // parameter 'value' is the new sample rate in Hz. @@ -106,13 +108,17 @@ public: // Enable or disable an allocated track by name void enable(int name); void disable(int name); + bool enabled(int name); void setParameter(int name, int target, int param, void *value); void setBufferProvider(int name, AudioBufferProvider* bufferProvider); + AudioBufferProvider* getBufferProvider(int name); void process(int64_t pts); uint32_t trackNames() const { return mTrackNames; } + uint32_t enabledTrackNames() const { return mState.enabledTracks; } + int getFastIndex(int name); size_t getUnreleasedFrames(int name) const; @@ -200,7 +206,10 @@ private: int32_t sessionId; - int32_t padding[2]; + int32_t fastIndex; + int32_t magic; + static const int kMagic = 0x54637281; + //int32_t padding[1]; // 16-byte boundary @@ -210,6 +219,12 @@ private: void adjustVolumeRamp(bool aux); size_t getUnreleasedFrames() const { return resampler != NULL ? resampler->getUnreleasedFrames() : 0; }; + void checkMagic() { + if (magic != kMagic) { + ALOGE("magic=%#x fastIndex=%d", magic, fastIndex); + } + } + }; // pad to 32-bytes to fill cache line @@ -220,7 +235,8 @@ private: void (*hook)(state_t* state, int64_t pts); // one of process__*, never NULL int32_t *outputTemp; int32_t *resampleTemp; - int32_t reserved[2]; + NBLog::Writer* mLog; + int32_t reserved[1]; // FIXME allocate dynamically to save some memory when maxNumTracks < MAX_NUM_TRACKS track_t tracks[MAX_NUM_TRACKS]; __attribute__((aligned(32))); }; @@ -247,6 +263,11 @@ private: const uint32_t mSampleRate; + NBLog::Writer* mLog; + NBLog::Writer mDummyLog; +public: + void setLog(NBLog::Writer* log); +private: state_t mState __attribute__((aligned(32))); // effect descriptor for the downmixer used by the mixer diff --git a/services/audioflinger/FastMixer.cpp b/services/audioflinger/FastMixer.cpp index 80e37ca..75c3c41 100644 --- a/services/audioflinger/FastMixer.cpp +++ b/services/audioflinger/FastMixer.cpp @@ -21,13 +21,15 @@ // #define LOG_TAG "FastMixer" -//#define LOG_NDEBUG 0 +#define LOG_NDEBUG 0 #define ATRACE_TAG ATRACE_TAG_AUDIO #include #include #include +#undef ALOGV +#define ALOGV(a...) do { } while (0) #include #include #ifdef FAST_MIXER_STATISTICS @@ -93,6 +95,8 @@ bool FastMixer::threadLoop() uint32_t warmupCycles = 0; // counter of number of loop cycles required to warmup NBAIO_Sink* teeSink = NULL; // if non-NULL, then duplicate write() to this non-blocking sink NBLog::Writer dummyLogWriter, *logWriter = &dummyLogWriter; + bool myEnabled[FastMixerState::kMaxFastTracks]; + memset(myEnabled, 0, sizeof(myEnabled)); for (;;) { @@ -120,12 +124,16 @@ bool FastMixer::threadLoop() FastMixerState::Command command = next->mCommand; if (next != current) { + logWriter->logTimestamp(); logWriter->log("next != current"); // As soon as possible of learning of a new dump area, start using it dumpState = next->mDumpState != NULL ? next->mDumpState : &dummyDumpState; teeSink = next->mTeeSink; logWriter = next->mNBLogWriter != NULL ? next->mNBLogWriter : &dummyLogWriter; + if (mixer != NULL) { + mixer->setLog(logWriter); + } // We want to always have a valid reference to the previous (non-idle) state. // However, the state queue only guarantees access to current and previous states. @@ -300,13 +308,21 @@ bool FastMixer::threadLoop() addedTracks &= ~(1 << i); const FastTrack* fastTrack = ¤t->mFastTracks[i]; AudioBufferProvider *bufferProvider = fastTrack->mBufferProvider; + logWriter->logf("bp %d i=%d %p", __LINE__, i, bufferProvider); ALOG_ASSERT(bufferProvider != NULL && fastTrackNames[i] == -1); + if (bufferProvider == NULL || + bufferProvider->getValid() != AudioBufferProvider::kValid) { + logWriter->logTimestamp(); + logWriter->logf("added invalid %#x", i); + } if (mixer != NULL) { // calling getTrackName with default channel mask and a random invalid // sessionId (no effects here) name = mixer->getTrackName(AUDIO_CHANNEL_OUT_STEREO, -555); ALOG_ASSERT(name >= 0); fastTrackNames[i] = name; + mixer->setParameter(name, AudioMixer::TRACK, AudioMixer::FAST_INDEX, + (void *) i); mixer->setBufferProvider(name, bufferProvider); mixer->setParameter(name, AudioMixer::TRACK, AudioMixer::MAIN_BUFFER, (void *) mixBuffer); @@ -317,27 +333,41 @@ bool FastMixer::threadLoop() } mixer->setParameter(name, AudioMixer::TRACK, AudioMixer::CHANNEL_MASK, (void *) fastTrack->mChannelMask); + if (!mixer->enabled(name)) { + logWriter->logf("enable %d i=%d name=%d", __LINE__, i, name); + } mixer->enable(name); + myEnabled[i] = true; } generations[i] = fastTrack->mGeneration; } - // finally process modified tracks; these use the same slot + // finally process (potentially) modified tracks; these use the same slot // but may have a different buffer provider or volume provider unsigned modifiedTracks = currentTrackMask & previousTrackMask; if (modifiedTracks) { - logWriter->logf("modified %#x", modifiedTracks); + logWriter->logf("pot. mod. %#x", modifiedTracks); } + unsigned actuallyModifiedTracks = 0; while (modifiedTracks != 0) { i = __builtin_ctz(modifiedTracks); modifiedTracks &= ~(1 << i); const FastTrack* fastTrack = ¤t->mFastTracks[i]; if (fastTrack->mGeneration != generations[i]) { + actuallyModifiedTracks |= 1 << i; AudioBufferProvider *bufferProvider = fastTrack->mBufferProvider; + logWriter->logf("bp %d i=%d %p", __LINE__, i, bufferProvider); ALOG_ASSERT(bufferProvider != NULL); + if (bufferProvider == NULL || + bufferProvider->getValid() != AudioBufferProvider::kValid) { + logWriter->logTimestamp(); + logWriter->logf("modified invalid %#x", i); + } if (mixer != NULL) { name = fastTrackNames[i]; ALOG_ASSERT(name >= 0); + mixer->setParameter(name, AudioMixer::TRACK, AudioMixer::FAST_INDEX, + (void *) i); mixer->setBufferProvider(name, bufferProvider); if (fastTrack->mVolumeProvider == NULL) { mixer->setParameter(name, AudioMixer::VOLUME, AudioMixer::VOLUME0, @@ -360,6 +390,9 @@ bool FastMixer::threadLoop() generations[i] = fastTrack->mGeneration; } } + if (actuallyModifiedTracks) { + logWriter->logf("act. mod. %#x", actuallyModifiedTracks); + } fastTracksGen = current->mFastTracksGen; @@ -377,6 +410,7 @@ bool FastMixer::threadLoop() ALOG_ASSERT(mixBuffer != NULL); // for each track, update volume and check for underrun unsigned currentTrackMask = current->mTrackMask; + logWriter->logf("ctm %#x", currentTrackMask); while (currentTrackMask != 0) { i = __builtin_ctz(currentTrackMask); currentTrackMask &= ~(1 << i); @@ -410,25 +444,76 @@ bool FastMixer::threadLoop() underruns.mBitFields.mEmpty++; underruns.mBitFields.mMostRecent = UNDERRUN_EMPTY; mixer->disable(name); + myEnabled[i] = false; } else { // allow mixing partial buffer underruns.mBitFields.mPartial++; underruns.mBitFields.mMostRecent = UNDERRUN_PARTIAL; + if (!mixer->enabled(name)) { + logWriter->logf("enable %d i=%d name=%d", __LINE__, i, name); + } mixer->enable(name); + myEnabled[i] = true; } } else { underruns.mBitFields.mFull++; underruns.mBitFields.mMostRecent = UNDERRUN_FULL; + if (!mixer->enabled(name)) { + logWriter->logf("enable %d i=%d name=%d", __LINE__, i, name); + } mixer->enable(name); + myEnabled[i] = true; } ftDump->mUnderruns = underruns; ftDump->mFramesReady = framesReady; + AudioBufferProvider *bufferProvider = fastTrack->mBufferProvider; + if (bufferProvider == NULL || + bufferProvider->getValid() != AudioBufferProvider::kValid) { + logWriter->logTimestamp(); + logWriter->logf("mixing invalid %#x", i); + } } int64_t pts; if (outputSink == NULL || (OK != outputSink->getNextWriteTimestamp(&pts))) pts = AudioBufferProvider::kInvalidPTS; + // validate that mixer state is correct + currentTrackMask = current->mTrackMask; + unsigned expectedMixerEnabled = 0; + while (currentTrackMask != 0) { + i = __builtin_ctz(currentTrackMask); + currentTrackMask &= ~(1 << i); + const FastTrack* fastTrack = ¤t->mFastTracks[i]; + int name = fastTrackNames[i]; + ALOG_ASSERT(name >= 0); + bool isEnabled = mixer->enabled(name); + if (isEnabled != myEnabled[i]) { + logWriter->logTimestamp(); + logWriter->logf("? i=%d name=%d mixena=%d ftena=%d", i, name, isEnabled, + myEnabled[i]); + } + if (myEnabled[i]) { + expectedMixerEnabled |= 1 << name; + } + AudioBufferProvider *abp = mixer->getBufferProvider(name); + if (abp != fastTrack->mBufferProvider) { + logWriter->logTimestamp(); + logWriter->logf("? i=%d name=%d mixbp=%p ftbp=%p", i, name, abp, + fastTrack->mBufferProvider); + } + int fastIndex = mixer->getFastIndex(name); + if (fastIndex != (int) i) { + logWriter->logTimestamp(); + logWriter->logf("? i=%d name=%d fastIndex=%d", i, name, fastIndex); + } + } + unsigned mixerEnabled = mixer->enabledTrackNames(); + if (mixerEnabled != expectedMixerEnabled) { + logWriter->logTimestamp(); + logWriter->logf("? mixena=%#x expected=%#x", mixerEnabled, expectedMixerEnabled); + } + // process() is CPU-bound mixer->process(pts); mixBufferState = MIXED; @@ -453,7 +538,7 @@ bool FastMixer::threadLoop() ATRACE_END(); dumpState->mWriteSequence++; if (framesWritten >= 0) { - ALOG_ASSERT(framesWritten <= frameCount); + ALOG_ASSERT((size_t) framesWritten <= frameCount); dumpState->mFramesWritten += framesWritten; //if ((size_t) framesWritten == frameCount) { // didFullWrite = true; diff --git a/services/audioflinger/StateQueue.h b/services/audioflinger/StateQueue.h index e33b3c6..313330f 100644 --- a/services/audioflinger/StateQueue.h +++ b/services/audioflinger/StateQueue.h @@ -174,7 +174,7 @@ public: #endif private: - static const unsigned kN = 4; // values < 4 are not supported by this code + static const unsigned kN = 8; // values < 4 are not supported by this code T mStates[kN]; // written by mutator, read by observer // "volatile" is meaningless with SMP, but here it indicates that we're using atomic ops diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index ba848d7..58e3cbe 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -1196,6 +1196,8 @@ sp AudioFlinger::PlaybackThread::createTrac { // scope for mLock Mutex::Autolock _l(mLock); + mNBLogWriter->logf("createTrack_l isFast=%d caller=%d", + (*flags & IAudioFlinger::TRACK_FAST) != 0, IPCThreadState::self()->getCallingPid()); // all tracks in same audio session must share the same routing strategy otherwise // conflicts will happen when tracks are moved from one output to another by audio policy @@ -1249,7 +1251,6 @@ Exit: if (status) { *status = lStatus; } - mNBLogWriter->logf("createTrack_l"); return track; } @@ -1317,7 +1318,8 @@ float AudioFlinger::PlaybackThread::streamVolume(audio_stream_type_t stream) con // addTrack_l() must be called with ThreadBase::mLock held status_t AudioFlinger::PlaybackThread::addTrack_l(const sp& track) { - mNBLogWriter->logf("addTrack_l mName=%d", track->mName); + mNBLogWriter->logf("addTrack_l mName=%d mFastIndex=%d caller=%d", track->mName, + track->mFastIndex, IPCThreadState::self()->getCallingPid()); status_t status = ALREADY_EXISTS; // set retry count for buffer fill @@ -1351,7 +1353,9 @@ status_t AudioFlinger::PlaybackThread::addTrack_l(const sp& track) // destroyTrack_l() must be called with ThreadBase::mLock held void AudioFlinger::PlaybackThread::destroyTrack_l(const sp& track) { - mNBLogWriter->logf("destroyTrack_l mName=%d", track->mName); + mNBLogWriter->logTimestamp(); + mNBLogWriter->logf("destroyTrack_l mName=%d mFastIndex=%d mClientPid=%d", track->mName, + track->mFastIndex, track->mClient != 0 ? track->mClient->pid() : -1); track->mState = TrackBase::TERMINATED; // active tracks are removed by threadLoop() if (mActiveTracks.indexOf(track) < 0) { @@ -1361,7 +1365,9 @@ void AudioFlinger::PlaybackThread::destroyTrack_l(const sp& track) void AudioFlinger::PlaybackThread::removeTrack_l(const sp& track) { - mNBLogWriter->logf("removeTrack_l mName=%d", track->mName); + mNBLogWriter->logTimestamp(); + mNBLogWriter->logf("removeTrack_l mName=%d mFastIndex=%d clientPid=%d", track->mName, + track->mFastIndex, track->mClient != 0 ? track->mClient->pid() : -1); track->triggerEvents(AudioSystem::SYNC_EVENT_PRESENTATION_COMPLETE); mTracks.remove(track); deleteTrackName_l(track->name()); @@ -2149,6 +2155,7 @@ AudioFlinger::MixerThread::MixerThread(const sp& audioFlinger, Aud FastTrack *fastTrack = &state->mFastTracks[0]; // wrap the source side of the MonoPipe to make it an AudioBufferProvider fastTrack->mBufferProvider = new SourceAudioBufferProvider(new MonoPipeReader(monoPipe)); + mNBLogWriter->logf("fastTrack0 bp=%p", fastTrack->mBufferProvider); fastTrack->mVolumeProvider = NULL; fastTrack->mGeneration++; state->mFastTracksGen++; @@ -2553,6 +2560,7 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::MixerThread::prepareTrac // was it previously inactive? if (!(state->mTrackMask & (1 << j))) { ExtendedAudioBufferProvider *eabp = track; + mNBLogWriter->logf("fastTrack j=%d bp=%p", j, eabp); VolumeProvider *vp = track; fastTrack->mBufferProvider = eabp; fastTrack->mVolumeProvider = vp; @@ -2839,11 +2847,19 @@ track_is_ready: ; block = FastMixerStateQueue::BLOCK_UNTIL_ACKED; pauseAudioWatchdog = true; } - sq->end(); } if (sq != NULL) { + unsigned trackMask = state->mTrackMask; sq->end(didModify); + if (didModify) { + mNBLogWriter->logTimestamp(); + mNBLogWriter->logf("push trackMask=%#x block=%d", trackMask, block); + } sq->push(block); + if (didModify) { + mNBLogWriter->logTimestamp(); + mNBLogWriter->log("pushed"); + } } #ifdef AUDIO_WATCHDOG if (pauseAudioWatchdog && mAudioWatchdog != 0) { @@ -2870,7 +2886,9 @@ track_is_ready: ; if (CC_UNLIKELY(count)) { for (size_t i=0 ; i& track = tracksToRemove->itemAt(i); - mNBLogWriter->logf("prepareTracks_l remove name=%u", track->name()); + mNBLogWriter->logTimestamp(); + mNBLogWriter->logf("prepareTracks_l remove name=%u mFastIndex=%d", track->name(), + track->mFastIndex); mActiveTracks.remove(track); if (track->mainBuffer() != mMixBuffer) { chain = getEffectChain_l(track->sessionId()); diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h index fa1e336..8e6b69c 100644 --- a/services/audioflinger/Threads.h +++ b/services/audioflinger/Threads.h @@ -315,7 +315,7 @@ protected: // keyed by session ID, the second by type UUID timeLow field KeyedVector< int, KeyedVector< int, sp > > mSuspendedSessions; - static const size_t kLogSize = 512; + static const size_t kLogSize = 8 * 1024; sp mNBLogWriter; }; @@ -546,7 +546,7 @@ private: sp mTeeSink; sp mTeeSource; uint32_t mScreenState; // cached copy of gScreenState - static const size_t kFastMixerLogSize = 8 * 1024; + static const size_t kFastMixerLogSize = 32 * 1024; sp mFastMixerNBLogWriter; public: virtual bool hasFastMixer() const = 0; diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp index 315cbbc..f679751 100644 --- a/services/audioflinger/Tracks.cpp +++ b/services/audioflinger/Tracks.cpp @@ -351,6 +351,7 @@ AudioFlinger::PlaybackThread::Track::Track( // Read the initial underruns because this field is never cleared by the fast mixer mObservedUnderruns = thread->getFastTrackUnderruns(i); thread->mFastTrackAvailMask &= ~(1 << i); + thread->mNBLogWriter->logf("new Track mName=%d mFastIndex=%d", mName, mFastIndex); } } ALOGV("Track constructor name %d, calling pid %d", mName, @@ -360,6 +361,7 @@ AudioFlinger::PlaybackThread::Track::Track( AudioFlinger::PlaybackThread::Track::~Track() { ALOGV("PlaybackThread::Track destructor"); + // FIXME not sure if safe to log here, would need a lock on thread to do it } void AudioFlinger::PlaybackThread::Track::destroy() @@ -569,7 +571,8 @@ status_t AudioFlinger::PlaybackThread::Track::start(AudioSystem::sync_event_t ev sp thread = mThread.promote(); if (thread != 0) { Mutex::Autolock _l(thread->mLock); - thread->mNBLogWriter->logf("start mName=%d", mName); + thread->mNBLogWriter->logf("start mName=%d mFastIndex=%d caller=%d", mName, mFastIndex, + IPCThreadState::self()->getCallingPid()); track_state state = mState; // here the track could be either new, or restarted // in both cases "unstop" the track @@ -612,7 +615,8 @@ void AudioFlinger::PlaybackThread::Track::stop() sp thread = mThread.promote(); if (thread != 0) { Mutex::Autolock _l(thread->mLock); - thread->mNBLogWriter->logf("stop mName=%d", mName); + thread->mNBLogWriter->logf("stop mName=%d mFastIndex=%d caller=%d", mName, mFastIndex, + IPCThreadState::self()->getCallingPid()); track_state state = mState; if (state == RESUMING || state == ACTIVE || state == PAUSING || state == PAUSED) { // If the track is not active (PAUSED and buffers full), flush buffers @@ -649,7 +653,8 @@ void AudioFlinger::PlaybackThread::Track::pause() sp thread = mThread.promote(); if (thread != 0) { Mutex::Autolock _l(thread->mLock); - thread->mNBLogWriter->logf("pause mName=%d", mName); + thread->mNBLogWriter->logf("pause mName=%d mFastIndex=%d caller=%d", mName, mFastIndex, + IPCThreadState::self()->getCallingPid()); if (mState == ACTIVE || mState == RESUMING) { mState = PAUSING; ALOGV("ACTIVE/RESUMING => PAUSING (%d) on thread %p", mName, thread.get()); @@ -673,7 +678,8 @@ void AudioFlinger::PlaybackThread::Track::flush() sp thread = mThread.promote(); if (thread != 0) { Mutex::Autolock _l(thread->mLock); - thread->mNBLogWriter->logf("flush mName=%d", mName); + thread->mNBLogWriter->logf("flush mName=%d mFastIndex=%d caller=%d", mName, mFastIndex, + IPCThreadState::self()->getCallingPid()); if (mState != STOPPING_1 && mState != STOPPING_2 && mState != STOPPED && mState != PAUSED && mState != PAUSING && mState != IDLE && mState != FLUSHED) { return; -- cgit v1.1 From 7f5d335f7b4caecd0dfb8f1085f352f1d2da5d2e Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Fri, 15 Feb 2013 23:55:04 +0000 Subject: Revert "Temporary additional logging to investigate bug" This reverts commit 32584a7d672864b20ab8b83a3cb23c1858e908b7 Change-Id: I9dc680578b955b1af462eeb7a49d61a0d45eb81b --- include/media/nbaio/NBLog.h | 4 +- services/audioflinger/AudioFlinger.h | 2 +- services/audioflinger/AudioMixer.cpp | 72 ++-------------------------- services/audioflinger/AudioMixer.h | 25 +--------- services/audioflinger/FastMixer.cpp | 93 ++---------------------------------- services/audioflinger/StateQueue.h | 2 +- services/audioflinger/Threads.cpp | 30 +++--------- services/audioflinger/Threads.h | 4 +- services/audioflinger/Tracks.cpp | 14 ++---- 9 files changed, 26 insertions(+), 220 deletions(-) diff --git a/include/media/nbaio/NBLog.h b/include/media/nbaio/NBLog.h index 107ba66..8fc417f 100644 --- a/include/media/nbaio/NBLog.h +++ b/include/media/nbaio/NBLog.h @@ -115,7 +115,7 @@ public: virtual ~Writer() { } virtual void log(const char *string); - virtual void logf(const char *fmt, ...) __attribute__ ((format (printf, 2, 3))); + virtual void logf(const char *fmt, ...); virtual void logvf(const char *fmt, va_list ap); virtual void logTimestamp(); virtual void logTimestamp(const struct timespec& ts); @@ -149,7 +149,7 @@ public: LockedWriter(size_t size, void *shared); virtual void log(const char *string); - virtual void logf(const char *fmt, ...) __attribute__ ((format (printf, 2, 3))); + virtual void logf(const char *fmt, ...); virtual void logvf(const char *fmt, va_list ap); virtual void logTimestamp(); virtual void logTimestamp(const struct timespec& ts); diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h index a25fb80..c3f08f6 100644 --- a/services/audioflinger/AudioFlinger.h +++ b/services/audioflinger/AudioFlinger.h @@ -227,7 +227,7 @@ public: sp newWriter_l(size_t size, const char *name); void unregisterWriter(const sp& writer); private: - static const size_t kLogMemorySize = 50 * 1024; + static const size_t kLogMemorySize = 10 * 1024; sp mLogMemoryDealer; // == 0 when NBLog is disabled public: diff --git a/services/audioflinger/AudioMixer.cpp b/services/audioflinger/AudioMixer.cpp index 2d7894d..08325ad 100644 --- a/services/audioflinger/AudioMixer.cpp +++ b/services/audioflinger/AudioMixer.cpp @@ -16,7 +16,7 @@ */ #define LOG_TAG "AudioMixer" -#define LOG_NDEBUG 0 +//#define LOG_NDEBUG 0 #include #include @@ -25,8 +25,6 @@ #include #include -#undef ALOGV -#define ALOGV(a...) do { } while (0) #include #include @@ -100,7 +98,7 @@ effect_descriptor_t AudioMixer::dwnmFxDesc; AudioMixer::AudioMixer(size_t frameCount, uint32_t sampleRate, uint32_t maxNumTracks) : mTrackNames(0), mConfiguredNames((maxNumTracks >= 32 ? 0 : 1 << maxNumTracks) - 1), - mSampleRate(sampleRate), mLog(&mDummyLog) + mSampleRate(sampleRate) { // AudioMixer is not yet capable of multi-channel beyond stereo COMPILE_TIME_ASSERT_FUNCTION_SCOPE(2 == MAX_NUM_CHANNELS); @@ -124,7 +122,6 @@ AudioMixer::AudioMixer(size_t frameCount, uint32_t sampleRate, uint32_t maxNumTr mState.hook = process__nop; mState.outputTemp = NULL; mState.resampleTemp = NULL; - mState.mLog = &mDummyLog; // mState.reserved // FIXME Most of the following initialization is probably redundant since @@ -134,7 +131,6 @@ AudioMixer::AudioMixer(size_t frameCount, uint32_t sampleRate, uint32_t maxNumTr for (unsigned i=0 ; i < MAX_NUM_TRACKS ; i++) { t->resampler = NULL; t->downmixerBufferProvider = NULL; - t->magic = track_t::kMagic; t++; } @@ -173,12 +169,6 @@ AudioMixer::~AudioMixer() delete [] mState.resampleTemp; } -void AudioMixer::setLog(NBLog::Writer *log) -{ - mLog = log; - mState.mLog = log; -} - int AudioMixer::getTrackName(audio_channel_mask_t channelMask, int sessionId) { uint32_t names = (~mTrackNames) & mConfiguredNames; @@ -219,12 +209,9 @@ int AudioMixer::getTrackName(audio_channel_mask_t channelMask, int sessionId) t->mainBuffer = NULL; t->auxBuffer = NULL; t->downmixerBufferProvider = NULL; - t->fastIndex = -1; - // t->magic unchanged status_t status = initTrackDownmix(&mState.tracks[n], n, channelMask); if (status == OK) { - mLog->logf("getTrackName %d", n); return TRACK0 + n; } ALOGE("AudioMixer::getTrackName(0x%x) failed, error preparing track for downmix", @@ -379,11 +366,9 @@ void AudioMixer::deleteTrackName(int name) { ALOGV("AudioMixer::deleteTrackName(%d)", name); name -= TRACK0; - mLog->logf("deleteTrackName %d", name); ALOG_ASSERT(uint32_t(name) < MAX_NUM_TRACKS, "bad track name %d", name); ALOGV("deleteTrackName(%d)", name); track_t& track(mState.tracks[ name ]); - track.checkMagic(); if (track.enabled) { track.enabled = false; invalidateState(1<logf("enable %d", name); track.enabled = true; ALOGV("enable(%d)", name); invalidateState(1 << name); @@ -417,36 +400,19 @@ void AudioMixer::disable(int name) name -= TRACK0; ALOG_ASSERT(uint32_t(name) < MAX_NUM_TRACKS, "bad track name %d", name); track_t& track = mState.tracks[name]; - track.checkMagic(); if (track.enabled) { - mLog->logf("disable %d", name); track.enabled = false; ALOGV("disable(%d)", name); invalidateState(1 << name); } } -bool AudioMixer::enabled(int name) -{ - name -= TRACK0; - ALOG_ASSERT(uint32_t(name) < MAX_NUM_TRACKS, "bad track name %d", name); - track_t& track = mState.tracks[name]; - track.checkMagic(); -#if 0 - // can't do this because mState.enabledTracks is updated lazily - ALOG_ASSERT(track.enabled == ((mState.enabledTracks & (1 << name)) != 0)); -#endif - - return track.enabled; -} - void AudioMixer::setParameter(int name, int target, int param, void *value) { name -= TRACK0; ALOG_ASSERT(uint32_t(name) < MAX_NUM_TRACKS, "bad track name %d", name); track_t& track = mState.tracks[name]; - track.checkMagic(); int valueInt = (int)value; int32_t *valueBuf = (int32_t *)value; @@ -489,9 +455,6 @@ void AudioMixer::setParameter(int name, int target, int param, void *value) // for a specific track? or per mixer? /* case DOWNMIX_TYPE: break */ - case FAST_INDEX: - track.fastIndex = valueInt; - break; default: LOG_FATAL("bad param"); } @@ -577,7 +540,6 @@ void AudioMixer::setParameter(int name, int target, int param, void *value) bool AudioMixer::track_t::setResampler(uint32_t value, uint32_t devSampleRate) { - checkMagic(); if (value != devSampleRate || resampler != NULL) { if (sampleRate != value) { sampleRate = value; @@ -610,7 +572,6 @@ bool AudioMixer::track_t::setResampler(uint32_t value, uint32_t devSampleRate) inline void AudioMixer::track_t::adjustVolumeRamp(bool aux) { - checkMagic(); for (uint32_t i=0 ; i0) && (((prevVolume[i]+volumeInc[i])>>16) >= volume[i])) || ((volumeInc[i]<0) && (((prevVolume[i]+volumeInc[i])>>16) <= volume[i]))) { @@ -639,10 +600,8 @@ size_t AudioMixer::getUnreleasedFrames(int name) const void AudioMixer::setBufferProvider(int name, AudioBufferProvider* bufferProvider) { name -= TRACK0; - mLog->logf("set bp %d=%p", name, bufferProvider); ALOG_ASSERT(uint32_t(name) < MAX_NUM_TRACKS, "bad track name %d", name); - mState.tracks[name].checkMagic(); if (mState.tracks[name].downmixerBufferProvider != NULL) { // update required? if (mState.tracks[name].downmixerBufferProvider->mTrackBufferProvider != bufferProvider) { @@ -660,27 +619,10 @@ void AudioMixer::setBufferProvider(int name, AudioBufferProvider* bufferProvider } } -AudioBufferProvider* AudioMixer::getBufferProvider(int name) -{ - name -= TRACK0; - ALOG_ASSERT(uint32_t(name) < MAX_NUM_TRACKS, "bad track name %d", name); - mState.tracks[name].checkMagic(); - return mState.tracks[name].bufferProvider; -} -int AudioMixer::getFastIndex(int name) -{ - name -= TRACK0; - ALOG_ASSERT(uint32_t(name) < MAX_NUM_TRACKS, "bad track name %d", name); - mState.tracks[name].checkMagic(); - return mState.tracks[name].fastIndex; -} void AudioMixer::process(int64_t pts) { - if (mState.needsChanged) { - mLog->logf("process needs=%#x", mState.needsChanged); - } mState.hook(&mState, pts); } @@ -705,7 +647,6 @@ void AudioMixer::process__validate(state_t* state, int64_t pts) } state->enabledTracks &= ~disabled; state->enabledTracks |= enabled; - state->mLog->logf("process_validate ena=%#x", state->enabledTracks); // compute everything we need... int countActiveTracks = 0; @@ -1117,7 +1058,6 @@ void AudioMixer::track__16BitsMono(track_t* t, int32_t* out, size_t frameCount, void AudioMixer::process__nop(state_t* state, int64_t pts) { uint32_t e0 = state->enabledTracks; - state->mLog->logf("process_nop ena=%#x", e0); size_t bufSize = state->frameCount * sizeof(int16_t) * MAX_NUM_CHANNELS; while (e0) { // process by group of tracks with same output buffer to @@ -1163,7 +1103,6 @@ void AudioMixer::process__genericNoResampling(state_t* state, int64_t pts) // acquire each track's buffer uint32_t enabledTracks = state->enabledTracks; - state->mLog->logf("process_gNR ena=%#x", enabledTracks); uint32_t e0 = enabledTracks; while (e0) { const int i = 31 - __builtin_clz(e0); @@ -1172,8 +1111,8 @@ void AudioMixer::process__genericNoResampling(state_t* state, int64_t pts) t.buffer.frameCount = state->frameCount; int valid = t.bufferProvider->getValid(); if (valid != AudioBufferProvider::kValid) { - ALOGE("invalid bufferProvider=%p name=%d fastIndex=%d frameCount=%d valid=%#x enabledTracks=%#x", - t.bufferProvider, i, t.fastIndex, t.buffer.frameCount, valid, enabledTracks); + ALOGE("invalid bufferProvider=%p name=%d frameCount=%d valid=%#x enabledTracks=%#x", + t.bufferProvider, i, t.buffer.frameCount, valid, enabledTracks); // expect to crash } t.bufferProvider->getNextBuffer(&t.buffer, pts); @@ -1272,7 +1211,6 @@ void AudioMixer::process__genericResampling(state_t* state, int64_t pts) size_t numFrames = state->frameCount; uint32_t e0 = state->enabledTracks; - state->mLog->logf("process_gR ena=%#x", e0); while (e0) { // process by group of tracks with same output buffer // to optimize cache use @@ -1337,7 +1275,6 @@ void AudioMixer::process__genericResampling(state_t* state, int64_t pts) void AudioMixer::process__OneTrack16BitsStereoNoResampling(state_t* state, int64_t pts) { - state->mLog->logf("process_1TSNR ena=%#x", state->enabledTracks); // This method is only called when state->enabledTracks has exactly // one bit set. The asserts below would verify this, but are commented out // since the whole point of this method is to optimize performance. @@ -1407,7 +1344,6 @@ void AudioMixer::process__TwoTracks16BitsStereoNoResampling(state_t* state, { int i; uint32_t en = state->enabledTracks; - state->mLog->logf("process_2TSNR ena=%#x", en); i = 31 - __builtin_clz(en); const track_t& t0 = state->tracks[i]; diff --git a/services/audioflinger/AudioMixer.h b/services/audioflinger/AudioMixer.h index 2d00bf5..fd21fda 100644 --- a/services/audioflinger/AudioMixer.h +++ b/services/audioflinger/AudioMixer.h @@ -28,7 +28,6 @@ #include #include -#include namespace android { @@ -77,7 +76,6 @@ public: MAIN_BUFFER = 0x4002, AUX_BUFFER = 0x4003, DOWNMIX_TYPE = 0X4004, - FAST_INDEX = 0x4005, // for debugging only // for target RESAMPLE SAMPLE_RATE = 0x4100, // Configure sample rate conversion on this track name; // parameter 'value' is the new sample rate in Hz. @@ -108,17 +106,13 @@ public: // Enable or disable an allocated track by name void enable(int name); void disable(int name); - bool enabled(int name); void setParameter(int name, int target, int param, void *value); void setBufferProvider(int name, AudioBufferProvider* bufferProvider); - AudioBufferProvider* getBufferProvider(int name); void process(int64_t pts); uint32_t trackNames() const { return mTrackNames; } - uint32_t enabledTrackNames() const { return mState.enabledTracks; } - int getFastIndex(int name); size_t getUnreleasedFrames(int name) const; @@ -206,10 +200,7 @@ private: int32_t sessionId; - int32_t fastIndex; - int32_t magic; - static const int kMagic = 0x54637281; - //int32_t padding[1]; + int32_t padding[2]; // 16-byte boundary @@ -219,12 +210,6 @@ private: void adjustVolumeRamp(bool aux); size_t getUnreleasedFrames() const { return resampler != NULL ? resampler->getUnreleasedFrames() : 0; }; - void checkMagic() { - if (magic != kMagic) { - ALOGE("magic=%#x fastIndex=%d", magic, fastIndex); - } - } - }; // pad to 32-bytes to fill cache line @@ -235,8 +220,7 @@ private: void (*hook)(state_t* state, int64_t pts); // one of process__*, never NULL int32_t *outputTemp; int32_t *resampleTemp; - NBLog::Writer* mLog; - int32_t reserved[1]; + int32_t reserved[2]; // FIXME allocate dynamically to save some memory when maxNumTracks < MAX_NUM_TRACKS track_t tracks[MAX_NUM_TRACKS]; __attribute__((aligned(32))); }; @@ -263,11 +247,6 @@ private: const uint32_t mSampleRate; - NBLog::Writer* mLog; - NBLog::Writer mDummyLog; -public: - void setLog(NBLog::Writer* log); -private: state_t mState __attribute__((aligned(32))); // effect descriptor for the downmixer used by the mixer diff --git a/services/audioflinger/FastMixer.cpp b/services/audioflinger/FastMixer.cpp index 75c3c41..80e37ca 100644 --- a/services/audioflinger/FastMixer.cpp +++ b/services/audioflinger/FastMixer.cpp @@ -21,15 +21,13 @@ // #define LOG_TAG "FastMixer" -#define LOG_NDEBUG 0 +//#define LOG_NDEBUG 0 #define ATRACE_TAG ATRACE_TAG_AUDIO #include #include #include -#undef ALOGV -#define ALOGV(a...) do { } while (0) #include #include #ifdef FAST_MIXER_STATISTICS @@ -95,8 +93,6 @@ bool FastMixer::threadLoop() uint32_t warmupCycles = 0; // counter of number of loop cycles required to warmup NBAIO_Sink* teeSink = NULL; // if non-NULL, then duplicate write() to this non-blocking sink NBLog::Writer dummyLogWriter, *logWriter = &dummyLogWriter; - bool myEnabled[FastMixerState::kMaxFastTracks]; - memset(myEnabled, 0, sizeof(myEnabled)); for (;;) { @@ -124,16 +120,12 @@ bool FastMixer::threadLoop() FastMixerState::Command command = next->mCommand; if (next != current) { - logWriter->logTimestamp(); logWriter->log("next != current"); // As soon as possible of learning of a new dump area, start using it dumpState = next->mDumpState != NULL ? next->mDumpState : &dummyDumpState; teeSink = next->mTeeSink; logWriter = next->mNBLogWriter != NULL ? next->mNBLogWriter : &dummyLogWriter; - if (mixer != NULL) { - mixer->setLog(logWriter); - } // We want to always have a valid reference to the previous (non-idle) state. // However, the state queue only guarantees access to current and previous states. @@ -308,21 +300,13 @@ bool FastMixer::threadLoop() addedTracks &= ~(1 << i); const FastTrack* fastTrack = ¤t->mFastTracks[i]; AudioBufferProvider *bufferProvider = fastTrack->mBufferProvider; - logWriter->logf("bp %d i=%d %p", __LINE__, i, bufferProvider); ALOG_ASSERT(bufferProvider != NULL && fastTrackNames[i] == -1); - if (bufferProvider == NULL || - bufferProvider->getValid() != AudioBufferProvider::kValid) { - logWriter->logTimestamp(); - logWriter->logf("added invalid %#x", i); - } if (mixer != NULL) { // calling getTrackName with default channel mask and a random invalid // sessionId (no effects here) name = mixer->getTrackName(AUDIO_CHANNEL_OUT_STEREO, -555); ALOG_ASSERT(name >= 0); fastTrackNames[i] = name; - mixer->setParameter(name, AudioMixer::TRACK, AudioMixer::FAST_INDEX, - (void *) i); mixer->setBufferProvider(name, bufferProvider); mixer->setParameter(name, AudioMixer::TRACK, AudioMixer::MAIN_BUFFER, (void *) mixBuffer); @@ -333,41 +317,27 @@ bool FastMixer::threadLoop() } mixer->setParameter(name, AudioMixer::TRACK, AudioMixer::CHANNEL_MASK, (void *) fastTrack->mChannelMask); - if (!mixer->enabled(name)) { - logWriter->logf("enable %d i=%d name=%d", __LINE__, i, name); - } mixer->enable(name); - myEnabled[i] = true; } generations[i] = fastTrack->mGeneration; } - // finally process (potentially) modified tracks; these use the same slot + // finally process modified tracks; these use the same slot // but may have a different buffer provider or volume provider unsigned modifiedTracks = currentTrackMask & previousTrackMask; if (modifiedTracks) { - logWriter->logf("pot. mod. %#x", modifiedTracks); + logWriter->logf("modified %#x", modifiedTracks); } - unsigned actuallyModifiedTracks = 0; while (modifiedTracks != 0) { i = __builtin_ctz(modifiedTracks); modifiedTracks &= ~(1 << i); const FastTrack* fastTrack = ¤t->mFastTracks[i]; if (fastTrack->mGeneration != generations[i]) { - actuallyModifiedTracks |= 1 << i; AudioBufferProvider *bufferProvider = fastTrack->mBufferProvider; - logWriter->logf("bp %d i=%d %p", __LINE__, i, bufferProvider); ALOG_ASSERT(bufferProvider != NULL); - if (bufferProvider == NULL || - bufferProvider->getValid() != AudioBufferProvider::kValid) { - logWriter->logTimestamp(); - logWriter->logf("modified invalid %#x", i); - } if (mixer != NULL) { name = fastTrackNames[i]; ALOG_ASSERT(name >= 0); - mixer->setParameter(name, AudioMixer::TRACK, AudioMixer::FAST_INDEX, - (void *) i); mixer->setBufferProvider(name, bufferProvider); if (fastTrack->mVolumeProvider == NULL) { mixer->setParameter(name, AudioMixer::VOLUME, AudioMixer::VOLUME0, @@ -390,9 +360,6 @@ bool FastMixer::threadLoop() generations[i] = fastTrack->mGeneration; } } - if (actuallyModifiedTracks) { - logWriter->logf("act. mod. %#x", actuallyModifiedTracks); - } fastTracksGen = current->mFastTracksGen; @@ -410,7 +377,6 @@ bool FastMixer::threadLoop() ALOG_ASSERT(mixBuffer != NULL); // for each track, update volume and check for underrun unsigned currentTrackMask = current->mTrackMask; - logWriter->logf("ctm %#x", currentTrackMask); while (currentTrackMask != 0) { i = __builtin_ctz(currentTrackMask); currentTrackMask &= ~(1 << i); @@ -444,76 +410,25 @@ bool FastMixer::threadLoop() underruns.mBitFields.mEmpty++; underruns.mBitFields.mMostRecent = UNDERRUN_EMPTY; mixer->disable(name); - myEnabled[i] = false; } else { // allow mixing partial buffer underruns.mBitFields.mPartial++; underruns.mBitFields.mMostRecent = UNDERRUN_PARTIAL; - if (!mixer->enabled(name)) { - logWriter->logf("enable %d i=%d name=%d", __LINE__, i, name); - } mixer->enable(name); - myEnabled[i] = true; } } else { underruns.mBitFields.mFull++; underruns.mBitFields.mMostRecent = UNDERRUN_FULL; - if (!mixer->enabled(name)) { - logWriter->logf("enable %d i=%d name=%d", __LINE__, i, name); - } mixer->enable(name); - myEnabled[i] = true; } ftDump->mUnderruns = underruns; ftDump->mFramesReady = framesReady; - AudioBufferProvider *bufferProvider = fastTrack->mBufferProvider; - if (bufferProvider == NULL || - bufferProvider->getValid() != AudioBufferProvider::kValid) { - logWriter->logTimestamp(); - logWriter->logf("mixing invalid %#x", i); - } } int64_t pts; if (outputSink == NULL || (OK != outputSink->getNextWriteTimestamp(&pts))) pts = AudioBufferProvider::kInvalidPTS; - // validate that mixer state is correct - currentTrackMask = current->mTrackMask; - unsigned expectedMixerEnabled = 0; - while (currentTrackMask != 0) { - i = __builtin_ctz(currentTrackMask); - currentTrackMask &= ~(1 << i); - const FastTrack* fastTrack = ¤t->mFastTracks[i]; - int name = fastTrackNames[i]; - ALOG_ASSERT(name >= 0); - bool isEnabled = mixer->enabled(name); - if (isEnabled != myEnabled[i]) { - logWriter->logTimestamp(); - logWriter->logf("? i=%d name=%d mixena=%d ftena=%d", i, name, isEnabled, - myEnabled[i]); - } - if (myEnabled[i]) { - expectedMixerEnabled |= 1 << name; - } - AudioBufferProvider *abp = mixer->getBufferProvider(name); - if (abp != fastTrack->mBufferProvider) { - logWriter->logTimestamp(); - logWriter->logf("? i=%d name=%d mixbp=%p ftbp=%p", i, name, abp, - fastTrack->mBufferProvider); - } - int fastIndex = mixer->getFastIndex(name); - if (fastIndex != (int) i) { - logWriter->logTimestamp(); - logWriter->logf("? i=%d name=%d fastIndex=%d", i, name, fastIndex); - } - } - unsigned mixerEnabled = mixer->enabledTrackNames(); - if (mixerEnabled != expectedMixerEnabled) { - logWriter->logTimestamp(); - logWriter->logf("? mixena=%#x expected=%#x", mixerEnabled, expectedMixerEnabled); - } - // process() is CPU-bound mixer->process(pts); mixBufferState = MIXED; @@ -538,7 +453,7 @@ bool FastMixer::threadLoop() ATRACE_END(); dumpState->mWriteSequence++; if (framesWritten >= 0) { - ALOG_ASSERT((size_t) framesWritten <= frameCount); + ALOG_ASSERT(framesWritten <= frameCount); dumpState->mFramesWritten += framesWritten; //if ((size_t) framesWritten == frameCount) { // didFullWrite = true; diff --git a/services/audioflinger/StateQueue.h b/services/audioflinger/StateQueue.h index 313330f..e33b3c6 100644 --- a/services/audioflinger/StateQueue.h +++ b/services/audioflinger/StateQueue.h @@ -174,7 +174,7 @@ public: #endif private: - static const unsigned kN = 8; // values < 4 are not supported by this code + static const unsigned kN = 4; // values < 4 are not supported by this code T mStates[kN]; // written by mutator, read by observer // "volatile" is meaningless with SMP, but here it indicates that we're using atomic ops diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index 58e3cbe..ba848d7 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -1196,8 +1196,6 @@ sp AudioFlinger::PlaybackThread::createTrac { // scope for mLock Mutex::Autolock _l(mLock); - mNBLogWriter->logf("createTrack_l isFast=%d caller=%d", - (*flags & IAudioFlinger::TRACK_FAST) != 0, IPCThreadState::self()->getCallingPid()); // all tracks in same audio session must share the same routing strategy otherwise // conflicts will happen when tracks are moved from one output to another by audio policy @@ -1251,6 +1249,7 @@ Exit: if (status) { *status = lStatus; } + mNBLogWriter->logf("createTrack_l"); return track; } @@ -1318,8 +1317,7 @@ float AudioFlinger::PlaybackThread::streamVolume(audio_stream_type_t stream) con // addTrack_l() must be called with ThreadBase::mLock held status_t AudioFlinger::PlaybackThread::addTrack_l(const sp& track) { - mNBLogWriter->logf("addTrack_l mName=%d mFastIndex=%d caller=%d", track->mName, - track->mFastIndex, IPCThreadState::self()->getCallingPid()); + mNBLogWriter->logf("addTrack_l mName=%d", track->mName); status_t status = ALREADY_EXISTS; // set retry count for buffer fill @@ -1353,9 +1351,7 @@ status_t AudioFlinger::PlaybackThread::addTrack_l(const sp& track) // destroyTrack_l() must be called with ThreadBase::mLock held void AudioFlinger::PlaybackThread::destroyTrack_l(const sp& track) { - mNBLogWriter->logTimestamp(); - mNBLogWriter->logf("destroyTrack_l mName=%d mFastIndex=%d mClientPid=%d", track->mName, - track->mFastIndex, track->mClient != 0 ? track->mClient->pid() : -1); + mNBLogWriter->logf("destroyTrack_l mName=%d", track->mName); track->mState = TrackBase::TERMINATED; // active tracks are removed by threadLoop() if (mActiveTracks.indexOf(track) < 0) { @@ -1365,9 +1361,7 @@ void AudioFlinger::PlaybackThread::destroyTrack_l(const sp& track) void AudioFlinger::PlaybackThread::removeTrack_l(const sp& track) { - mNBLogWriter->logTimestamp(); - mNBLogWriter->logf("removeTrack_l mName=%d mFastIndex=%d clientPid=%d", track->mName, - track->mFastIndex, track->mClient != 0 ? track->mClient->pid() : -1); + mNBLogWriter->logf("removeTrack_l mName=%d", track->mName); track->triggerEvents(AudioSystem::SYNC_EVENT_PRESENTATION_COMPLETE); mTracks.remove(track); deleteTrackName_l(track->name()); @@ -2155,7 +2149,6 @@ AudioFlinger::MixerThread::MixerThread(const sp& audioFlinger, Aud FastTrack *fastTrack = &state->mFastTracks[0]; // wrap the source side of the MonoPipe to make it an AudioBufferProvider fastTrack->mBufferProvider = new SourceAudioBufferProvider(new MonoPipeReader(monoPipe)); - mNBLogWriter->logf("fastTrack0 bp=%p", fastTrack->mBufferProvider); fastTrack->mVolumeProvider = NULL; fastTrack->mGeneration++; state->mFastTracksGen++; @@ -2560,7 +2553,6 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::MixerThread::prepareTrac // was it previously inactive? if (!(state->mTrackMask & (1 << j))) { ExtendedAudioBufferProvider *eabp = track; - mNBLogWriter->logf("fastTrack j=%d bp=%p", j, eabp); VolumeProvider *vp = track; fastTrack->mBufferProvider = eabp; fastTrack->mVolumeProvider = vp; @@ -2847,19 +2839,11 @@ track_is_ready: ; block = FastMixerStateQueue::BLOCK_UNTIL_ACKED; pauseAudioWatchdog = true; } + sq->end(); } if (sq != NULL) { - unsigned trackMask = state->mTrackMask; sq->end(didModify); - if (didModify) { - mNBLogWriter->logTimestamp(); - mNBLogWriter->logf("push trackMask=%#x block=%d", trackMask, block); - } sq->push(block); - if (didModify) { - mNBLogWriter->logTimestamp(); - mNBLogWriter->log("pushed"); - } } #ifdef AUDIO_WATCHDOG if (pauseAudioWatchdog && mAudioWatchdog != 0) { @@ -2886,9 +2870,7 @@ track_is_ready: ; if (CC_UNLIKELY(count)) { for (size_t i=0 ; i& track = tracksToRemove->itemAt(i); - mNBLogWriter->logTimestamp(); - mNBLogWriter->logf("prepareTracks_l remove name=%u mFastIndex=%d", track->name(), - track->mFastIndex); + mNBLogWriter->logf("prepareTracks_l remove name=%u", track->name()); mActiveTracks.remove(track); if (track->mainBuffer() != mMixBuffer) { chain = getEffectChain_l(track->sessionId()); diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h index 8e6b69c..fa1e336 100644 --- a/services/audioflinger/Threads.h +++ b/services/audioflinger/Threads.h @@ -315,7 +315,7 @@ protected: // keyed by session ID, the second by type UUID timeLow field KeyedVector< int, KeyedVector< int, sp > > mSuspendedSessions; - static const size_t kLogSize = 8 * 1024; + static const size_t kLogSize = 512; sp mNBLogWriter; }; @@ -546,7 +546,7 @@ private: sp mTeeSink; sp mTeeSource; uint32_t mScreenState; // cached copy of gScreenState - static const size_t kFastMixerLogSize = 32 * 1024; + static const size_t kFastMixerLogSize = 8 * 1024; sp mFastMixerNBLogWriter; public: virtual bool hasFastMixer() const = 0; diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp index f679751..315cbbc 100644 --- a/services/audioflinger/Tracks.cpp +++ b/services/audioflinger/Tracks.cpp @@ -351,7 +351,6 @@ AudioFlinger::PlaybackThread::Track::Track( // Read the initial underruns because this field is never cleared by the fast mixer mObservedUnderruns = thread->getFastTrackUnderruns(i); thread->mFastTrackAvailMask &= ~(1 << i); - thread->mNBLogWriter->logf("new Track mName=%d mFastIndex=%d", mName, mFastIndex); } } ALOGV("Track constructor name %d, calling pid %d", mName, @@ -361,7 +360,6 @@ AudioFlinger::PlaybackThread::Track::Track( AudioFlinger::PlaybackThread::Track::~Track() { ALOGV("PlaybackThread::Track destructor"); - // FIXME not sure if safe to log here, would need a lock on thread to do it } void AudioFlinger::PlaybackThread::Track::destroy() @@ -571,8 +569,7 @@ status_t AudioFlinger::PlaybackThread::Track::start(AudioSystem::sync_event_t ev sp thread = mThread.promote(); if (thread != 0) { Mutex::Autolock _l(thread->mLock); - thread->mNBLogWriter->logf("start mName=%d mFastIndex=%d caller=%d", mName, mFastIndex, - IPCThreadState::self()->getCallingPid()); + thread->mNBLogWriter->logf("start mName=%d", mName); track_state state = mState; // here the track could be either new, or restarted // in both cases "unstop" the track @@ -615,8 +612,7 @@ void AudioFlinger::PlaybackThread::Track::stop() sp thread = mThread.promote(); if (thread != 0) { Mutex::Autolock _l(thread->mLock); - thread->mNBLogWriter->logf("stop mName=%d mFastIndex=%d caller=%d", mName, mFastIndex, - IPCThreadState::self()->getCallingPid()); + thread->mNBLogWriter->logf("stop mName=%d", mName); track_state state = mState; if (state == RESUMING || state == ACTIVE || state == PAUSING || state == PAUSED) { // If the track is not active (PAUSED and buffers full), flush buffers @@ -653,8 +649,7 @@ void AudioFlinger::PlaybackThread::Track::pause() sp thread = mThread.promote(); if (thread != 0) { Mutex::Autolock _l(thread->mLock); - thread->mNBLogWriter->logf("pause mName=%d mFastIndex=%d caller=%d", mName, mFastIndex, - IPCThreadState::self()->getCallingPid()); + thread->mNBLogWriter->logf("pause mName=%d", mName); if (mState == ACTIVE || mState == RESUMING) { mState = PAUSING; ALOGV("ACTIVE/RESUMING => PAUSING (%d) on thread %p", mName, thread.get()); @@ -678,8 +673,7 @@ void AudioFlinger::PlaybackThread::Track::flush() sp thread = mThread.promote(); if (thread != 0) { Mutex::Autolock _l(thread->mLock); - thread->mNBLogWriter->logf("flush mName=%d mFastIndex=%d caller=%d", mName, mFastIndex, - IPCThreadState::self()->getCallingPid()); + thread->mNBLogWriter->logf("flush mName=%d", mName); if (mState != STOPPING_1 && mState != STOPPING_2 && mState != STOPPED && mState != PAUSED && mState != PAUSING && mState != IDLE && mState != FLUSHED) { return; -- cgit v1.1 From fc900c922e219771d0cd3c075720d90026e1ca4a Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Mon, 18 Feb 2013 12:47:49 -0800 Subject: A reference cannot be re-bound Bug: 8213067 Bug: 6490974 Change-Id: If546215641fbc9a0fb1e8af8b85cc01f0a4b26ab --- services/audioflinger/AudioMixer.cpp | 44 ++++++++++++++++++++---------------- 1 file changed, 24 insertions(+), 20 deletions(-) diff --git a/services/audioflinger/AudioMixer.cpp b/services/audioflinger/AudioMixer.cpp index 08325ad..2a8a955 100644 --- a/services/audioflinger/AudioMixer.cpp +++ b/services/audioflinger/AudioMixer.cpp @@ -1064,33 +1064,37 @@ void AudioMixer::process__nop(state_t* state, int64_t pts) // avoid multiple memset() on same buffer uint32_t e1 = e0, e2 = e0; int i = 31 - __builtin_clz(e1); - track_t& t1 = state->tracks[i]; - e2 &= ~(1<tracks[i]; e2 &= ~(1<tracks[i]; - if (CC_UNLIKELY(t2.mainBuffer != t1.mainBuffer)) { - e1 &= ~(1<tracks[i]; + if (CC_UNLIKELY(t2.mainBuffer != t1.mainBuffer)) { + e1 &= ~(1<tracks[i]; - size_t outFrames = state->frameCount; - while (outFrames) { - t1.buffer.frameCount = outFrames; - int64_t outputPTS = calculateOutputPTS( - t1, pts, state->frameCount - outFrames); - t1.bufferProvider->getNextBuffer(&t1.buffer, outputPTS); - if (t1.buffer.raw == NULL) break; - outFrames -= t1.buffer.frameCount; - t1.bufferProvider->releaseBuffer(&t1.buffer); + { + track_t& t3 = state->tracks[i]; + size_t outFrames = state->frameCount; + while (outFrames) { + t3.buffer.frameCount = outFrames; + int64_t outputPTS = calculateOutputPTS( + t3, pts, state->frameCount - outFrames); + t3.bufferProvider->getNextBuffer(&t3.buffer, outputPTS); + if (t3.buffer.raw == NULL) break; + outFrames -= t3.buffer.frameCount; + t3.bufferProvider->releaseBuffer(&t3.buffer); + } } } } -- cgit v1.1 From fc7e676d61fbf53b50b0501002e9d6cd36332913 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Mon, 18 Feb 2013 12:47:49 -0800 Subject: A reference cannot be re-bound Bug: 8213067 Bug: 6490974 Change-Id: If546215641fbc9a0fb1e8af8b85cc01f0a4b26ab --- services/audioflinger/AudioMixer.cpp | 44 ++++++++++++++++++++---------------- 1 file changed, 24 insertions(+), 20 deletions(-) diff --git a/services/audioflinger/AudioMixer.cpp b/services/audioflinger/AudioMixer.cpp index 08325ad..2a8a955 100644 --- a/services/audioflinger/AudioMixer.cpp +++ b/services/audioflinger/AudioMixer.cpp @@ -1064,33 +1064,37 @@ void AudioMixer::process__nop(state_t* state, int64_t pts) // avoid multiple memset() on same buffer uint32_t e1 = e0, e2 = e0; int i = 31 - __builtin_clz(e1); - track_t& t1 = state->tracks[i]; - e2 &= ~(1<tracks[i]; e2 &= ~(1<tracks[i]; - if (CC_UNLIKELY(t2.mainBuffer != t1.mainBuffer)) { - e1 &= ~(1<tracks[i]; + if (CC_UNLIKELY(t2.mainBuffer != t1.mainBuffer)) { + e1 &= ~(1<tracks[i]; - size_t outFrames = state->frameCount; - while (outFrames) { - t1.buffer.frameCount = outFrames; - int64_t outputPTS = calculateOutputPTS( - t1, pts, state->frameCount - outFrames); - t1.bufferProvider->getNextBuffer(&t1.buffer, outputPTS); - if (t1.buffer.raw == NULL) break; - outFrames -= t1.buffer.frameCount; - t1.bufferProvider->releaseBuffer(&t1.buffer); + { + track_t& t3 = state->tracks[i]; + size_t outFrames = state->frameCount; + while (outFrames) { + t3.buffer.frameCount = outFrames; + int64_t outputPTS = calculateOutputPTS( + t3, pts, state->frameCount - outFrames); + t3.bufferProvider->getNextBuffer(&t3.buffer, outputPTS); + if (t3.buffer.raw == NULL) break; + outFrames -= t3.buffer.frameCount; + t3.bufferProvider->releaseBuffer(&t3.buffer); + } } } } -- cgit v1.1 From 9e1cdea45d40fa8cf95dd080943053856009054f Mon Sep 17 00:00:00 2001 From: Mathias Agopian Date: Tue, 19 Feb 2013 18:25:33 -0800 Subject: replace getISurfaceTexture by getIGraphicBufferProducer Change-Id: I723a3863558126f2635a544a39bb3ec88d499c60 --- cmds/stagefright/stream.cpp | 2 +- services/camera/libcameraservice/Camera2Client.cpp | 2 +- services/camera/libcameraservice/CameraClient.cpp | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/cmds/stagefright/stream.cpp b/cmds/stagefright/stream.cpp index d49ab4a..dba67a9 100644 --- a/cmds/stagefright/stream.cpp +++ b/cmds/stagefright/stream.cpp @@ -374,7 +374,7 @@ int main(int argc, char **argv) { service->create(client, 0); if (player != NULL && player->setDataSource(source) == NO_ERROR) { - player->setVideoSurfaceTexture(surface->getISurfaceTexture()); + player->setVideoSurfaceTexture(surface->getIGraphicBufferProducer()); player->start(); client->waitForEOS(); diff --git a/services/camera/libcameraservice/Camera2Client.cpp b/services/camera/libcameraservice/Camera2Client.cpp index b9feaf8..e8b3b7f 100644 --- a/services/camera/libcameraservice/Camera2Client.cpp +++ b/services/camera/libcameraservice/Camera2Client.cpp @@ -488,7 +488,7 @@ status_t Camera2Client::setPreviewDisplay( sp binder; sp window; if (surface != 0) { - binder = surface->getISurfaceTexture()->asBinder(); + binder = surface->getIGraphicBufferProducer()->asBinder(); window = surface; } diff --git a/services/camera/libcameraservice/CameraClient.cpp b/services/camera/libcameraservice/CameraClient.cpp index 5f03a1c..f67c9f4 100644 --- a/services/camera/libcameraservice/CameraClient.cpp +++ b/services/camera/libcameraservice/CameraClient.cpp @@ -301,7 +301,7 @@ status_t CameraClient::setPreviewWindow(const sp& binder, status_t CameraClient::setPreviewDisplay(const sp& surface) { LOG1("setPreviewDisplay(%p) (pid %d)", surface.get(), getCallingPid()); - sp binder(surface != 0 ? surface->getISurfaceTexture()->asBinder() : 0); + sp binder(surface != 0 ? surface->getIGraphicBufferProducer()->asBinder() : 0); sp window(surface); return setPreviewWindow(binder, window); } -- cgit v1.1 From c0dd54f1a77fb94ae69cc3ac5944d718bb6caa28 Mon Sep 17 00:00:00 2001 From: Alex Ray Date: Wed, 20 Feb 2013 13:39:37 -0800 Subject: CameraService: Log camera module name Explicitly logs camera module loaded. This is useful on systems with multiple camera modules installed. Also this make the camera service match audioflinger, which logs hardware module names as well. Change-Id: I280bf12df2d517f936916b8ba3cb662e671d0216 --- services/camera/libcameraservice/CameraService.cpp | 1 + 1 file changed, 1 insertion(+) diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp index 717e159..31e20c5 100644 --- a/services/camera/libcameraservice/CameraService.cpp +++ b/services/camera/libcameraservice/CameraService.cpp @@ -87,6 +87,7 @@ void CameraService::onFirstRef() mNumberOfCameras = 0; } else { + ALOGI("Loaded \"%s\" camera module", mModule->common.name); mNumberOfCameras = mModule->get_number_of_cameras(); if (mNumberOfCameras > MAX_CAMERAS) { ALOGE("Number of cameras(%d) > MAX_CAMERAS(%d).", -- cgit v1.1 From feb2179f15bde8241814c8c35f8dace13a923ee7 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Wed, 20 Feb 2013 16:47:28 -0800 Subject: Permit conditional registry of mediaserver extensions Change-Id: I94dc0d038e702dfe1779a50a1de0fae9bad15057 --- media/mediaserver/Android.mk | 12 ++++++++++++ media/mediaserver/RegisterExtensions.h | 22 ++++++++++++++++++++++ media/mediaserver/main_mediaserver.cpp | 2 ++ media/mediaserver/register.cpp | 21 +++++++++++++++++++++ 4 files changed, 57 insertions(+) create mode 100644 media/mediaserver/RegisterExtensions.h create mode 100644 media/mediaserver/register.cpp diff --git a/media/mediaserver/Android.mk b/media/mediaserver/Android.mk index 0a0f4db..a485646 100644 --- a/media/mediaserver/Android.mk +++ b/media/mediaserver/Android.mk @@ -1,4 +1,13 @@ LOCAL_PATH:= $(call my-dir) + +ifneq ($(BOARD_USE_CUSTOM_MEDIASERVEREXTENSIONS),true) +include $(CLEAR_VARS) +LOCAL_SRC_FILES := register.cpp +LOCAL_MODULE := libregistermsext +LOCAL_MODULE_TAGS := optional +include $(BUILD_STATIC_LIBRARY) +endif + include $(CLEAR_VARS) LOCAL_SRC_FILES:= \ @@ -15,6 +24,9 @@ LOCAL_SHARED_LIBRARIES := \ libutils \ libbinder +LOCAL_STATIC_LIBRARIES := \ + libregistermsext + LOCAL_C_INCLUDES := \ frameworks/av/media/libmediaplayerservice \ frameworks/av/services/medialog \ diff --git a/media/mediaserver/RegisterExtensions.h b/media/mediaserver/RegisterExtensions.h new file mode 100644 index 0000000..9a8c03c --- /dev/null +++ b/media/mediaserver/RegisterExtensions.h @@ -0,0 +1,22 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef REGISTER_EXTENSIONS_H +#define REGISTER_EXTENSIONS_H + +extern void registerExtensions(); + +#endif // REGISTER_EXTENSIONS_H diff --git a/media/mediaserver/main_mediaserver.cpp b/media/mediaserver/main_mediaserver.cpp index 0862952..d5207d5 100644 --- a/media/mediaserver/main_mediaserver.cpp +++ b/media/mediaserver/main_mediaserver.cpp @@ -26,6 +26,7 @@ #include #include #include +#include "RegisterExtensions.h" // from LOCAL_C_INCLUDES #include "AudioFlinger.h" @@ -127,6 +128,7 @@ int main(int argc, char** argv) MediaPlayerService::instantiate(); CameraService::instantiate(); AudioPolicyService::instantiate(); + registerExtensions(); ProcessState::self()->startThreadPool(); IPCThreadState::self()->joinThreadPool(); } diff --git a/media/mediaserver/register.cpp b/media/mediaserver/register.cpp new file mode 100644 index 0000000..4ffb2ba --- /dev/null +++ b/media/mediaserver/register.cpp @@ -0,0 +1,21 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "RegisterExtensions.h" + +void registerExtensions() +{ +} -- cgit v1.1 From 7efa520c76e6a1f6b3146404cc6aca5a8353583a Mon Sep 17 00:00:00 2001 From: Igor Murashkin Date: Wed, 13 Feb 2013 15:53:56 -0800 Subject: Camera: Move CameraMetadata.h from service to client library Change-Id: I940ce86f318f37ae5b73f912a6e589415150125f --- camera/Android.mk | 7 +- camera/CameraMetadata.cpp | 298 +++++++++++++++++++++ include/camera/CameraMetadata.h | 171 ++++++++++++ services/camera/libcameraservice/Android.mk | 1 - services/camera/libcameraservice/Camera2Client.h | 1 - services/camera/libcameraservice/Camera2Device.cpp | 2 +- services/camera/libcameraservice/Camera2Device.h | 4 +- .../camera/libcameraservice/camera2/BurstCapture.h | 2 +- .../libcameraservice/camera2/CallbackProcessor.h | 2 +- .../libcameraservice/camera2/CameraMetadata.cpp | 296 -------------------- .../libcameraservice/camera2/CameraMetadata.h | 173 ------------ .../libcameraservice/camera2/CaptureSequencer.h | 2 +- .../libcameraservice/camera2/FrameProcessor.h | 2 +- .../libcameraservice/camera2/JpegProcessor.h | 2 +- .../camera/libcameraservice/camera2/Parameters.h | 3 +- .../libcameraservice/camera2/StreamingProcessor.h | 2 +- .../camera/libcameraservice/camera2/ZslProcessor.h | 2 +- 17 files changed, 485 insertions(+), 485 deletions(-) create mode 100644 camera/CameraMetadata.cpp create mode 100644 include/camera/CameraMetadata.h delete mode 100644 services/camera/libcameraservice/camera2/CameraMetadata.cpp delete mode 100644 services/camera/libcameraservice/camera2/CameraMetadata.h diff --git a/camera/Android.mk b/camera/Android.mk index 7286f92..b66105b 100644 --- a/camera/Android.mk +++ b/camera/Android.mk @@ -3,6 +3,7 @@ include $(CLEAR_VARS) LOCAL_SRC_FILES:= \ Camera.cpp \ + CameraMetadata.cpp \ CameraParameters.cpp \ ICamera.cpp \ ICameraClient.cpp \ @@ -16,7 +17,11 @@ LOCAL_SHARED_LIBRARIES := \ libbinder \ libhardware \ libui \ - libgui + libgui \ + libcamera_metadata \ + +LOCAL_C_INCLUDES += \ + system/media/camera/include LOCAL_MODULE:= libcamera_client diff --git a/camera/CameraMetadata.cpp b/camera/CameraMetadata.cpp new file mode 100644 index 0000000..0bfa420 --- /dev/null +++ b/camera/CameraMetadata.cpp @@ -0,0 +1,298 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "Camera2-Metadata" +#include +#include + +#include + +namespace android { + +CameraMetadata::CameraMetadata() : + mBuffer(NULL) { +} + +CameraMetadata::CameraMetadata(size_t entryCapacity, size_t dataCapacity) +{ + mBuffer = allocate_camera_metadata(entryCapacity, dataCapacity); +} + +CameraMetadata::CameraMetadata(const CameraMetadata &other) { + mBuffer = clone_camera_metadata(other.mBuffer); +} + +CameraMetadata::CameraMetadata(camera_metadata_t *buffer) : mBuffer(NULL) { + acquire(buffer); +} + +CameraMetadata &CameraMetadata::operator=(const CameraMetadata &other) { + return operator=(other.mBuffer); +} + +CameraMetadata &CameraMetadata::operator=(const camera_metadata_t *buffer) { + if (CC_LIKELY(buffer != mBuffer)) { + camera_metadata_t *newBuffer = clone_camera_metadata(buffer); + clear(); + mBuffer = newBuffer; + } + return *this; +} + +CameraMetadata::~CameraMetadata() { + clear(); +} + +camera_metadata_t* CameraMetadata::release() { + camera_metadata_t *released = mBuffer; + mBuffer = NULL; + return released; +} + +void CameraMetadata::clear() { + if (mBuffer) { + free_camera_metadata(mBuffer); + mBuffer = NULL; + } +} + +void CameraMetadata::acquire(camera_metadata_t *buffer) { + clear(); + mBuffer = buffer; +} + +void CameraMetadata::acquire(CameraMetadata &other) { + acquire(other.release()); +} + +status_t CameraMetadata::append(const CameraMetadata &other) { + return append_camera_metadata(mBuffer, other.mBuffer); +} + +size_t CameraMetadata::entryCount() const { + return (mBuffer == NULL) ? 0 : + get_camera_metadata_entry_count(mBuffer); +} + +bool CameraMetadata::isEmpty() const { + return entryCount() == 0; +} + +status_t CameraMetadata::sort() { + return sort_camera_metadata(mBuffer); +} + +status_t CameraMetadata::checkType(uint32_t tag, uint8_t expectedType) { + int tagType = get_camera_metadata_tag_type(tag); + if ( CC_UNLIKELY(tagType == -1)) { + ALOGE("Update metadata entry: Unknown tag %d", tag); + return INVALID_OPERATION; + } + if ( CC_UNLIKELY(tagType != expectedType) ) { + ALOGE("Mismatched tag type when updating entry %s (%d) of type %s; " + "got type %s data instead ", + get_camera_metadata_tag_name(tag), tag, + camera_metadata_type_names[tagType], + camera_metadata_type_names[expectedType]); + return INVALID_OPERATION; + } + return OK; +} + +status_t CameraMetadata::update(uint32_t tag, + const int32_t *data, size_t data_count) { + status_t res; + if ( (res = checkType(tag, TYPE_INT32)) != OK) { + return res; + } + return update(tag, (const void*)data, data_count); +} + +status_t CameraMetadata::update(uint32_t tag, + const uint8_t *data, size_t data_count) { + status_t res; + if ( (res = checkType(tag, TYPE_BYTE)) != OK) { + return res; + } + return update(tag, (const void*)data, data_count); +} + +status_t CameraMetadata::update(uint32_t tag, + const float *data, size_t data_count) { + status_t res; + if ( (res = checkType(tag, TYPE_FLOAT)) != OK) { + return res; + } + return update(tag, (const void*)data, data_count); +} + +status_t CameraMetadata::update(uint32_t tag, + const int64_t *data, size_t data_count) { + status_t res; + if ( (res = checkType(tag, TYPE_INT64)) != OK) { + return res; + } + return update(tag, (const void*)data, data_count); +} + +status_t CameraMetadata::update(uint32_t tag, + const double *data, size_t data_count) { + status_t res; + if ( (res = checkType(tag, TYPE_DOUBLE)) != OK) { + return res; + } + return update(tag, (const void*)data, data_count); +} + +status_t CameraMetadata::update(uint32_t tag, + const camera_metadata_rational_t *data, size_t data_count) { + status_t res; + if ( (res = checkType(tag, TYPE_RATIONAL)) != OK) { + return res; + } + return update(tag, (const void*)data, data_count); +} + +status_t CameraMetadata::update(uint32_t tag, + const String8 &string) { + status_t res; + if ( (res = checkType(tag, TYPE_BYTE)) != OK) { + return res; + } + return update(tag, (const void*)string.string(), string.size()); +} + +status_t CameraMetadata::update(uint32_t tag, const void *data, + size_t data_count) { + status_t res; + int type = get_camera_metadata_tag_type(tag); + if (type == -1) { + ALOGE("%s: Tag %d not found", __FUNCTION__, tag); + return BAD_VALUE; + } + size_t data_size = calculate_camera_metadata_entry_data_size(type, + data_count); + + res = resizeIfNeeded(1, data_size); + + if (res == OK) { + camera_metadata_entry_t entry; + res = find_camera_metadata_entry(mBuffer, tag, &entry); + if (res == NAME_NOT_FOUND) { + res = add_camera_metadata_entry(mBuffer, + tag, data, data_count); + } else if (res == OK) { + res = update_camera_metadata_entry(mBuffer, + entry.index, data, data_count, NULL); + } + } + + if (res != OK) { + ALOGE("%s: Unable to update metadata entry %s.%s (%x): %s (%d)", + __FUNCTION__, get_camera_metadata_section_name(tag), + get_camera_metadata_tag_name(tag), tag, strerror(-res), res); + } + return res; +} + +camera_metadata_entry_t CameraMetadata::find(uint32_t tag) { + status_t res; + camera_metadata_entry entry; + res = find_camera_metadata_entry(mBuffer, tag, &entry); + if (CC_UNLIKELY( res != OK )) { + entry.count = 0; + entry.data.u8 = NULL; + } + return entry; +} + +camera_metadata_ro_entry_t CameraMetadata::find(uint32_t tag) const { + status_t res; + camera_metadata_ro_entry entry; + res = find_camera_metadata_ro_entry(mBuffer, tag, &entry); + if (CC_UNLIKELY( res != OK )) { + entry.count = 0; + entry.data.u8 = NULL; + } + return entry; +} + +status_t CameraMetadata::erase(uint32_t tag) { + camera_metadata_entry_t entry; + status_t res; + res = find_camera_metadata_entry(mBuffer, tag, &entry); + if (res == NAME_NOT_FOUND) { + return OK; + } else if (res != OK) { + ALOGE("%s: Error looking for entry %s.%s (%x): %s %d", + __FUNCTION__, + get_camera_metadata_section_name(tag), + get_camera_metadata_tag_name(tag), tag, strerror(-res), res); + return res; + } + res = delete_camera_metadata_entry(mBuffer, entry.index); + if (res != OK) { + ALOGE("%s: Error deleting entry %s.%s (%x): %s %d", + __FUNCTION__, + get_camera_metadata_section_name(tag), + get_camera_metadata_tag_name(tag), tag, strerror(-res), res); + } + return res; +} + +void CameraMetadata::dump(int fd, int verbosity, int indentation) const { + dump_indented_camera_metadata(mBuffer, fd, verbosity, indentation); +} + +status_t CameraMetadata::resizeIfNeeded(size_t extraEntries, size_t extraData) { + if (mBuffer == NULL) { + mBuffer = allocate_camera_metadata(extraEntries * 2, extraData * 2); + if (mBuffer == NULL) { + ALOGE("%s: Can't allocate larger metadata buffer", __FUNCTION__); + return NO_MEMORY; + } + } else { + size_t currentEntryCount = get_camera_metadata_entry_count(mBuffer); + size_t currentEntryCap = get_camera_metadata_entry_capacity(mBuffer); + size_t newEntryCount = currentEntryCount + + extraEntries; + newEntryCount = (newEntryCount > currentEntryCap) ? + newEntryCount * 2 : currentEntryCap; + + size_t currentDataCount = get_camera_metadata_data_count(mBuffer); + size_t currentDataCap = get_camera_metadata_data_capacity(mBuffer); + size_t newDataCount = currentDataCount + + extraData; + newDataCount = (newDataCount > currentDataCap) ? + newDataCount * 2 : currentDataCap; + + if (newEntryCount > currentEntryCap || + newDataCount > currentDataCap) { + camera_metadata_t *oldBuffer = mBuffer; + mBuffer = allocate_camera_metadata(newEntryCount, + newDataCount); + if (mBuffer == NULL) { + ALOGE("%s: Can't allocate larger metadata buffer", __FUNCTION__); + return NO_MEMORY; + } + append_camera_metadata(mBuffer, oldBuffer); + free_camera_metadata(oldBuffer); + } + } + return OK; +} + +}; // namespace android diff --git a/include/camera/CameraMetadata.h b/include/camera/CameraMetadata.h new file mode 100644 index 0000000..cf599e1 --- /dev/null +++ b/include/camera/CameraMetadata.h @@ -0,0 +1,171 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_CLIENT_CAMERA2_CAMERAMETADATA_CPP +#define ANDROID_CLIENT_CAMERA2_CAMERAMETADATA_CPP + +#include "system/camera_metadata.h" +#include +#include + +namespace android { + +/** + * A convenience wrapper around the C-based camera_metadata_t library. + */ +class CameraMetadata { + public: + /** Creates an empty object; best used when expecting to acquire contents + * from elsewhere */ + CameraMetadata(); + /** Creates an object with space for entryCapacity entries, with + * dataCapacity extra storage */ + CameraMetadata(size_t entryCapacity, size_t dataCapacity = 10); + + ~CameraMetadata(); + + /** Takes ownership of passed-in buffer */ + CameraMetadata(camera_metadata_t *buffer); + /** Clones the metadata */ + CameraMetadata(const CameraMetadata &other); + + /** + * Assignment clones metadata buffer. + */ + CameraMetadata &operator=(const CameraMetadata &other); + CameraMetadata &operator=(const camera_metadata_t *buffer); + + /** + * Release a raw metadata buffer to the caller. After this call, + * CameraMetadata no longer references the buffer, and the caller takes + * responsibility for freeing the raw metadata buffer (using + * free_camera_metadata()), or for handing it to another CameraMetadata + * instance. + */ + camera_metadata_t* release(); + + /** + * Clear the metadata buffer and free all storage used by it + */ + void clear(); + + /** + * Acquire a raw metadata buffer from the caller. After this call, + * the caller no longer owns the raw buffer, and must not free or manipulate it. + * If CameraMetadata already contains metadata, it is freed. + */ + void acquire(camera_metadata_t* buffer); + + /** + * Acquires raw buffer from other CameraMetadata object. After the call, the argument + * object no longer has any metadata. + */ + void acquire(CameraMetadata &other); + + /** + * Append metadata from another CameraMetadata object. + */ + status_t append(const CameraMetadata &other); + + /** + * Number of metadata entries. + */ + size_t entryCount() const; + + /** + * Is the buffer empty (no entires) + */ + bool isEmpty() const; + + /** + * Sort metadata buffer for faster find + */ + status_t sort(); + + /** + * Update metadata entry. Will create entry if it doesn't exist already, and + * will reallocate the buffer if insufficient space exists. Overloaded for + * the various types of valid data. + */ + status_t update(uint32_t tag, + const uint8_t *data, size_t data_count); + status_t update(uint32_t tag, + const int32_t *data, size_t data_count); + status_t update(uint32_t tag, + const float *data, size_t data_count); + status_t update(uint32_t tag, + const int64_t *data, size_t data_count); + status_t update(uint32_t tag, + const double *data, size_t data_count); + status_t update(uint32_t tag, + const camera_metadata_rational_t *data, size_t data_count); + status_t update(uint32_t tag, + const String8 &string); + + template + status_t update(uint32_t tag, Vector data) { + return update(tag, data.array(), data.size()); + } + + /** + * Get metadata entry by tag id + */ + camera_metadata_entry find(uint32_t tag); + + /** + * Get metadata entry by tag id, with no editing + */ + camera_metadata_ro_entry find(uint32_t tag) const; + + /** + * Delete metadata entry by tag + */ + status_t erase(uint32_t tag); + + /** + * Dump contents into FD for debugging. The verbosity levels are + * 0: Tag entry information only, no data values + * 1: Level 0 plus at most 16 data values per entry + * 2: All information + * + * The indentation parameter sets the number of spaces to add to the start + * each line of output. + */ + void dump(int fd, int verbosity = 1, int indentation = 0) const; + + private: + camera_metadata_t *mBuffer; + + /** + * Check if tag has a given type + */ + status_t checkType(uint32_t tag, uint8_t expectedType); + + /** + * Base update entry method + */ + status_t update(uint32_t tag, const void *data, size_t data_count); + + /** + * Resize metadata buffer if needed by reallocating it and copying it over. + */ + status_t resizeIfNeeded(size_t extraEntries, size_t extraData); + +}; + +}; // namespace android + +#endif diff --git a/services/camera/libcameraservice/Android.mk b/services/camera/libcameraservice/Android.mk index 5245983..b6ebd02 100644 --- a/services/camera/libcameraservice/Android.mk +++ b/services/camera/libcameraservice/Android.mk @@ -11,7 +11,6 @@ LOCAL_SRC_FILES:= \ CameraClient.cpp \ Camera2Client.cpp \ Camera2Device.cpp \ - camera2/CameraMetadata.cpp \ camera2/Parameters.cpp \ camera2/FrameProcessor.cpp \ camera2/StreamingProcessor.cpp \ diff --git a/services/camera/libcameraservice/Camera2Client.h b/services/camera/libcameraservice/Camera2Client.h index 4669958..a4d4478 100644 --- a/services/camera/libcameraservice/Camera2Client.h +++ b/services/camera/libcameraservice/Camera2Client.h @@ -157,7 +157,6 @@ private: mutable Mutex mICameraLock; typedef camera2::Parameters Parameters; - typedef camera2::CameraMetadata CameraMetadata; status_t setPreviewWindowL(const sp& binder, sp window); diff --git a/services/camera/libcameraservice/Camera2Device.cpp b/services/camera/libcameraservice/Camera2Device.cpp index 5bfa085..921c8fc 100644 --- a/services/camera/libcameraservice/Camera2Device.cpp +++ b/services/camera/libcameraservice/Camera2Device.cpp @@ -202,7 +202,7 @@ status_t Camera2Device::dump(int fd, const Vector& args) { return res; } -const camera2::CameraMetadata& Camera2Device::info() const { +const CameraMetadata& Camera2Device::info() const { ALOGVV("%s: E", __FUNCTION__); return mDeviceInfo; diff --git a/services/camera/libcameraservice/Camera2Device.h b/services/camera/libcameraservice/Camera2Device.h index 41df2e4..86ff80f 100644 --- a/services/camera/libcameraservice/Camera2Device.h +++ b/services/camera/libcameraservice/Camera2Device.h @@ -27,14 +27,12 @@ #include #include "hardware/camera2.h" -#include "camera2/CameraMetadata.h" +#include "camera/CameraMetadata.h" namespace android { class Camera2Device : public virtual RefBase { public: - typedef camera2::CameraMetadata CameraMetadata; - Camera2Device(int id); ~Camera2Device(); diff --git a/services/camera/libcameraservice/camera2/BurstCapture.h b/services/camera/libcameraservice/camera2/BurstCapture.h index dfc45eb..a2cc893 100644 --- a/services/camera/libcameraservice/camera2/BurstCapture.h +++ b/services/camera/libcameraservice/camera2/BurstCapture.h @@ -17,7 +17,7 @@ #ifndef ANDROID_SERVERS_CAMERA_BURST_CAPTURE_H #define ANDROID_SERVERS_CAMERA_BURST_CAPTURE_H -#include "camera2/CameraMetadata.h" +#include "camera/CameraMetadata.h" #include #include #include diff --git a/services/camera/libcameraservice/camera2/CallbackProcessor.h b/services/camera/libcameraservice/camera2/CallbackProcessor.h index c2a1372..e68bb75 100644 --- a/services/camera/libcameraservice/camera2/CallbackProcessor.h +++ b/services/camera/libcameraservice/camera2/CallbackProcessor.h @@ -24,7 +24,7 @@ #include #include #include "Parameters.h" -#include "CameraMetadata.h" +#include "camera/CameraMetadata.h" #include "Camera2Heap.h" namespace android { diff --git a/services/camera/libcameraservice/camera2/CameraMetadata.cpp b/services/camera/libcameraservice/camera2/CameraMetadata.cpp deleted file mode 100644 index 835587d..0000000 --- a/services/camera/libcameraservice/camera2/CameraMetadata.cpp +++ /dev/null @@ -1,296 +0,0 @@ -/* - * Copyright (C) 2012 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#define LOG_TAG "Camera2-Metadata" -#include -#include - -#include "CameraMetadata.h" - -namespace android { - -namespace camera2 { -CameraMetadata::CameraMetadata() : - mBuffer(NULL) { -} - -CameraMetadata::CameraMetadata(size_t entryCapacity, size_t dataCapacity) -{ - mBuffer = allocate_camera_metadata(entryCapacity, dataCapacity); -} - -CameraMetadata::CameraMetadata(const CameraMetadata &other) { - mBuffer = clone_camera_metadata(other.mBuffer); -} - -CameraMetadata &CameraMetadata::operator=(const CameraMetadata &other) { - return operator=(other.mBuffer); -} - -CameraMetadata &CameraMetadata::operator=(const camera_metadata_t *buffer) { - if (CC_LIKELY(buffer != mBuffer)) { - camera_metadata_t *newBuffer = clone_camera_metadata(buffer); - clear(); - mBuffer = newBuffer; - } - return *this; -} - -CameraMetadata::~CameraMetadata() { - clear(); -} - -camera_metadata_t* CameraMetadata::release() { - camera_metadata_t *released = mBuffer; - mBuffer = NULL; - return released; -} - -void CameraMetadata::clear() { - if (mBuffer) { - free_camera_metadata(mBuffer); - mBuffer = NULL; - } -} - -void CameraMetadata::acquire(camera_metadata_t *buffer) { - clear(); - mBuffer = buffer; -} - -void CameraMetadata::acquire(CameraMetadata &other) { - acquire(other.release()); -} - -status_t CameraMetadata::append(const CameraMetadata &other) { - return append_camera_metadata(mBuffer, other.mBuffer); -} - -size_t CameraMetadata::entryCount() const { - return (mBuffer == NULL) ? 0 : - get_camera_metadata_entry_count(mBuffer); -} - -bool CameraMetadata::isEmpty() const { - return entryCount() == 0; -} - -status_t CameraMetadata::sort() { - return sort_camera_metadata(mBuffer); -} - -status_t CameraMetadata::checkType(uint32_t tag, uint8_t expectedType) { - int tagType = get_camera_metadata_tag_type(tag); - if ( CC_UNLIKELY(tagType == -1)) { - ALOGE("Update metadata entry: Unknown tag %d", tag); - return INVALID_OPERATION; - } - if ( CC_UNLIKELY(tagType != expectedType) ) { - ALOGE("Mismatched tag type when updating entry %s (%d) of type %s; " - "got type %s data instead ", - get_camera_metadata_tag_name(tag), tag, - camera_metadata_type_names[tagType], - camera_metadata_type_names[expectedType]); - return INVALID_OPERATION; - } - return OK; -} - -status_t CameraMetadata::update(uint32_t tag, - const int32_t *data, size_t data_count) { - status_t res; - if ( (res = checkType(tag, TYPE_INT32)) != OK) { - return res; - } - return update(tag, (const void*)data, data_count); -} - -status_t CameraMetadata::update(uint32_t tag, - const uint8_t *data, size_t data_count) { - status_t res; - if ( (res = checkType(tag, TYPE_BYTE)) != OK) { - return res; - } - return update(tag, (const void*)data, data_count); -} - -status_t CameraMetadata::update(uint32_t tag, - const float *data, size_t data_count) { - status_t res; - if ( (res = checkType(tag, TYPE_FLOAT)) != OK) { - return res; - } - return update(tag, (const void*)data, data_count); -} - -status_t CameraMetadata::update(uint32_t tag, - const int64_t *data, size_t data_count) { - status_t res; - if ( (res = checkType(tag, TYPE_INT64)) != OK) { - return res; - } - return update(tag, (const void*)data, data_count); -} - -status_t CameraMetadata::update(uint32_t tag, - const double *data, size_t data_count) { - status_t res; - if ( (res = checkType(tag, TYPE_DOUBLE)) != OK) { - return res; - } - return update(tag, (const void*)data, data_count); -} - -status_t CameraMetadata::update(uint32_t tag, - const camera_metadata_rational_t *data, size_t data_count) { - status_t res; - if ( (res = checkType(tag, TYPE_RATIONAL)) != OK) { - return res; - } - return update(tag, (const void*)data, data_count); -} - -status_t CameraMetadata::update(uint32_t tag, - const String8 &string) { - status_t res; - if ( (res = checkType(tag, TYPE_BYTE)) != OK) { - return res; - } - return update(tag, (const void*)string.string(), string.size()); -} - -status_t CameraMetadata::update(uint32_t tag, const void *data, - size_t data_count) { - status_t res; - int type = get_camera_metadata_tag_type(tag); - if (type == -1) { - ALOGE("%s: Tag %d not found", __FUNCTION__, tag); - return BAD_VALUE; - } - size_t data_size = calculate_camera_metadata_entry_data_size(type, - data_count); - - res = resizeIfNeeded(1, data_size); - - if (res == OK) { - camera_metadata_entry_t entry; - res = find_camera_metadata_entry(mBuffer, tag, &entry); - if (res == NAME_NOT_FOUND) { - res = add_camera_metadata_entry(mBuffer, - tag, data, data_count); - } else if (res == OK) { - res = update_camera_metadata_entry(mBuffer, - entry.index, data, data_count, NULL); - } - } - - if (res != OK) { - ALOGE("%s: Unable to update metadata entry %s.%s (%x): %s (%d)", - __FUNCTION__, get_camera_metadata_section_name(tag), - get_camera_metadata_tag_name(tag), tag, strerror(-res), res); - } - return res; -} - -camera_metadata_entry_t CameraMetadata::find(uint32_t tag) { - status_t res; - camera_metadata_entry entry; - res = find_camera_metadata_entry(mBuffer, tag, &entry); - if (CC_UNLIKELY( res != OK )) { - entry.count = 0; - entry.data.u8 = NULL; - } - return entry; -} - -camera_metadata_ro_entry_t CameraMetadata::find(uint32_t tag) const { - status_t res; - camera_metadata_ro_entry entry; - res = find_camera_metadata_ro_entry(mBuffer, tag, &entry); - if (CC_UNLIKELY( res != OK )) { - entry.count = 0; - entry.data.u8 = NULL; - } - return entry; -} - -status_t CameraMetadata::erase(uint32_t tag) { - camera_metadata_entry_t entry; - status_t res; - res = find_camera_metadata_entry(mBuffer, tag, &entry); - if (res == NAME_NOT_FOUND) { - return OK; - } else if (res != OK) { - ALOGE("%s: Error looking for entry %s.%s (%x): %s %d", - __FUNCTION__, - get_camera_metadata_section_name(tag), - get_camera_metadata_tag_name(tag), tag, strerror(-res), res); - return res; - } - res = delete_camera_metadata_entry(mBuffer, entry.index); - if (res != OK) { - ALOGE("%s: Error deleting entry %s.%s (%x): %s %d", - __FUNCTION__, - get_camera_metadata_section_name(tag), - get_camera_metadata_tag_name(tag), tag, strerror(-res), res); - } - return res; -} - -void CameraMetadata::dump(int fd, int verbosity, int indentation) const { - dump_indented_camera_metadata(mBuffer, fd, verbosity, indentation); -} - -status_t CameraMetadata::resizeIfNeeded(size_t extraEntries, size_t extraData) { - if (mBuffer == NULL) { - mBuffer = allocate_camera_metadata(extraEntries * 2, extraData * 2); - if (mBuffer == NULL) { - ALOGE("%s: Can't allocate larger metadata buffer", __FUNCTION__); - return NO_MEMORY; - } - } else { - size_t currentEntryCount = get_camera_metadata_entry_count(mBuffer); - size_t currentEntryCap = get_camera_metadata_entry_capacity(mBuffer); - size_t newEntryCount = currentEntryCount + - extraEntries; - newEntryCount = (newEntryCount > currentEntryCap) ? - newEntryCount * 2 : currentEntryCap; - - size_t currentDataCount = get_camera_metadata_data_count(mBuffer); - size_t currentDataCap = get_camera_metadata_data_capacity(mBuffer); - size_t newDataCount = currentDataCount + - extraData; - newDataCount = (newDataCount > currentDataCap) ? - newDataCount * 2 : currentDataCap; - - if (newEntryCount > currentEntryCap || - newDataCount > currentDataCap) { - camera_metadata_t *oldBuffer = mBuffer; - mBuffer = allocate_camera_metadata(newEntryCount, - newDataCount); - if (mBuffer == NULL) { - ALOGE("%s: Can't allocate larger metadata buffer", __FUNCTION__); - return NO_MEMORY; - } - append_camera_metadata(mBuffer, oldBuffer); - free_camera_metadata(oldBuffer); - } - } - return OK; -} - -}; // namespace camera2 -}; // namespace android diff --git a/services/camera/libcameraservice/camera2/CameraMetadata.h b/services/camera/libcameraservice/camera2/CameraMetadata.h deleted file mode 100644 index aee6cd7..0000000 --- a/services/camera/libcameraservice/camera2/CameraMetadata.h +++ /dev/null @@ -1,173 +0,0 @@ -/* - * Copyright (C) 2012 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef ANDROID_SERVERS_CAMERA_CAMERA2METADATA_CPP -#define ANDROID_SERVERS_CAMERA_CAMERA2METADATA_CPP - -#include "system/camera_metadata.h" -#include -#include - -namespace android { -namespace camera2 { - -/** - * A convenience wrapper around the C-based camera_metadata_t library. - */ -class CameraMetadata { - public: - /** Creates an empty object; best used when expecting to acquire contents - * from elsewhere */ - CameraMetadata(); - /** Creates an object with space for entryCapacity entries, with - * dataCapacity extra storage */ - CameraMetadata(size_t entryCapacity, size_t dataCapacity = 10); - - ~CameraMetadata(); - - /** Takes ownership of passed-in buffer */ - CameraMetadata(camera_metadata_t *buffer); - /** Clones the metadata */ - CameraMetadata(const CameraMetadata &other); - - /** - * Assignment clones metadata buffer. - */ - CameraMetadata &operator=(const CameraMetadata &other); - CameraMetadata &operator=(const camera_metadata_t *buffer); - - /** - * Release a raw metadata buffer to the caller. After this call, - * CameraMetadata no longer references the buffer, and the caller takes - * responsibility for freeing the raw metadata buffer (using - * free_camera_metadata()), or for handing it to another CameraMetadata - * instance. - */ - camera_metadata_t* release(); - - /** - * Clear the metadata buffer and free all storage used by it - */ - void clear(); - - /** - * Acquire a raw metadata buffer from the caller. After this call, - * the caller no longer owns the raw buffer, and must not free or manipulate it. - * If CameraMetadata already contains metadata, it is freed. - */ - void acquire(camera_metadata_t* buffer); - - /** - * Acquires raw buffer from other CameraMetadata object. After the call, the argument - * object no longer has any metadata. - */ - void acquire(CameraMetadata &other); - - /** - * Append metadata from another CameraMetadata object. - */ - status_t append(const CameraMetadata &other); - - /** - * Number of metadata entries. - */ - size_t entryCount() const; - - /** - * Is the buffer empty (no entires) - */ - bool isEmpty() const; - - /** - * Sort metadata buffer for faster find - */ - status_t sort(); - - /** - * Update metadata entry. Will create entry if it doesn't exist already, and - * will reallocate the buffer if insufficient space exists. Overloaded for - * the various types of valid data. - */ - status_t update(uint32_t tag, - const uint8_t *data, size_t data_count); - status_t update(uint32_t tag, - const int32_t *data, size_t data_count); - status_t update(uint32_t tag, - const float *data, size_t data_count); - status_t update(uint32_t tag, - const int64_t *data, size_t data_count); - status_t update(uint32_t tag, - const double *data, size_t data_count); - status_t update(uint32_t tag, - const camera_metadata_rational_t *data, size_t data_count); - status_t update(uint32_t tag, - const String8 &string); - - template - status_t update(uint32_t tag, Vector data) { - return update(tag, data.array(), data.size()); - } - - /** - * Get metadata entry by tag id - */ - camera_metadata_entry find(uint32_t tag); - - /** - * Get metadata entry by tag id, with no editing - */ - camera_metadata_ro_entry find(uint32_t tag) const; - - /** - * Delete metadata entry by tag - */ - status_t erase(uint32_t tag); - - /** - * Dump contents into FD for debugging. The verbosity levels are - * 0: Tag entry information only, no data values - * 1: Level 0 plus at most 16 data values per entry - * 2: All information - * - * The indentation parameter sets the number of spaces to add to the start - * each line of output. - */ - void dump(int fd, int verbosity = 1, int indentation = 0) const; - - private: - camera_metadata_t *mBuffer; - - /** - * Check if tag has a given type - */ - status_t checkType(uint32_t tag, uint8_t expectedType); - - /** - * Base update entry method - */ - status_t update(uint32_t tag, const void *data, size_t data_count); - - /** - * Resize metadata buffer if needed by reallocating it and copying it over. - */ - status_t resizeIfNeeded(size_t extraEntries, size_t extraData); - -}; - -}; // namespace camera2 -}; // namespace android - -#endif diff --git a/services/camera/libcameraservice/camera2/CaptureSequencer.h b/services/camera/libcameraservice/camera2/CaptureSequencer.h index c42df05..7db8007 100644 --- a/services/camera/libcameraservice/camera2/CaptureSequencer.h +++ b/services/camera/libcameraservice/camera2/CaptureSequencer.h @@ -23,7 +23,7 @@ #include #include #include -#include "CameraMetadata.h" +#include "camera/CameraMetadata.h" #include "Parameters.h" #include "FrameProcessor.h" diff --git a/services/camera/libcameraservice/camera2/FrameProcessor.h b/services/camera/libcameraservice/camera2/FrameProcessor.h index 3bd4e25..66e3cda 100644 --- a/services/camera/libcameraservice/camera2/FrameProcessor.h +++ b/services/camera/libcameraservice/camera2/FrameProcessor.h @@ -22,7 +22,7 @@ #include #include #include -#include "CameraMetadata.h" +#include "camera/CameraMetadata.h" struct camera_frame_metadata; diff --git a/services/camera/libcameraservice/camera2/JpegProcessor.h b/services/camera/libcameraservice/camera2/JpegProcessor.h index 836bd02..2283f28 100644 --- a/services/camera/libcameraservice/camera2/JpegProcessor.h +++ b/services/camera/libcameraservice/camera2/JpegProcessor.h @@ -24,7 +24,7 @@ #include #include #include "Parameters.h" -#include "CameraMetadata.h" +#include "camera/CameraMetadata.h" namespace android { diff --git a/services/camera/libcameraservice/camera2/Parameters.h b/services/camera/libcameraservice/camera2/Parameters.h index 9f5f03b..fe3ec1d 100644 --- a/services/camera/libcameraservice/camera2/Parameters.h +++ b/services/camera/libcameraservice/camera2/Parameters.h @@ -25,8 +25,7 @@ #include #include #include - -#include "CameraMetadata.h" +#include namespace android { namespace camera2 { diff --git a/services/camera/libcameraservice/camera2/StreamingProcessor.h b/services/camera/libcameraservice/camera2/StreamingProcessor.h index 96b100f..e5732ad 100644 --- a/services/camera/libcameraservice/camera2/StreamingProcessor.h +++ b/services/camera/libcameraservice/camera2/StreamingProcessor.h @@ -22,7 +22,7 @@ #include #include "Parameters.h" -#include "CameraMetadata.h" +#include "camera/CameraMetadata.h" namespace android { diff --git a/services/camera/libcameraservice/camera2/ZslProcessor.h b/services/camera/libcameraservice/camera2/ZslProcessor.h index c80e7f4..ec16eef 100644 --- a/services/camera/libcameraservice/camera2/ZslProcessor.h +++ b/services/camera/libcameraservice/camera2/ZslProcessor.h @@ -25,7 +25,7 @@ #include #include "Parameters.h" #include "FrameProcessor.h" -#include "CameraMetadata.h" +#include "camera/CameraMetadata.h" #include "Camera2Heap.h" #include "../Camera2Device.h" -- cgit v1.1 From fc42642ab78da5fe25bcbea7a568bf880268a9dc Mon Sep 17 00:00:00 2001 From: Igor Murashkin Date: Wed, 13 Feb 2013 18:23:39 -0800 Subject: Camera: Add exists function to CameraMetadata Change-Id: I081f61417fca7c8408e9e767129647ddce5364ab --- camera/CameraMetadata.cpp | 5 +++++ include/camera/CameraMetadata.h | 6 ++++++ 2 files changed, 11 insertions(+) diff --git a/camera/CameraMetadata.cpp b/camera/CameraMetadata.cpp index 0bfa420..fdd0610 100644 --- a/camera/CameraMetadata.cpp +++ b/camera/CameraMetadata.cpp @@ -208,6 +208,11 @@ status_t CameraMetadata::update(uint32_t tag, const void *data, return res; } +bool CameraMetadata::exists(uint32_t tag) const { + camera_metadata_ro_entry entry; + return find_camera_metadata_ro_entry(mBuffer, tag, &entry) == 0; +} + camera_metadata_entry_t CameraMetadata::find(uint32_t tag) { status_t res; camera_metadata_entry entry; diff --git a/include/camera/CameraMetadata.h b/include/camera/CameraMetadata.h index cf599e1..4289126 100644 --- a/include/camera/CameraMetadata.h +++ b/include/camera/CameraMetadata.h @@ -121,6 +121,12 @@ class CameraMetadata { } /** + * Check if a metadata entry exists for a given tag id + * + */ + bool exists(uint32_t tag) const; + + /** * Get metadata entry by tag id */ camera_metadata_entry find(uint32_t tag); -- cgit v1.1 From 1d88023e1de6b9f370eb4be944dd9c4480d01f11 Mon Sep 17 00:00:00 2001 From: Igor Murashkin Date: Wed, 20 Feb 2013 16:50:13 -0800 Subject: Camera: Add unit test infrastructure for libcameraclient Change-Id: I957538663ae8332d26f3640c63a75efc79f4ac5c --- camera/Android.mk | 7 +++-- camera/tests/Android.mk | 36 ++++++++++++++++++++++ camera/tests/main.cpp | 27 ++++++++++++++++ include/camera/Camera.h | 2 +- services/camera/libcameraservice/CameraService.cpp | 1 + 5 files changed, 70 insertions(+), 3 deletions(-) create mode 100644 camera/tests/Android.mk create mode 100644 camera/tests/main.cpp diff --git a/camera/Android.mk b/camera/Android.mk index b66105b..a17ad1a 100644 --- a/camera/Android.mk +++ b/camera/Android.mk @@ -1,6 +1,9 @@ -LOCAL_PATH:= $(call my-dir) +CAMERA_CLIENT_LOCAL_PATH:= $(call my-dir) +include $(call all-subdir-makefiles) include $(CLEAR_VARS) +LOCAL_PATH := $(CAMERA_CLIENT_LOCAL_PATH) + LOCAL_SRC_FILES:= \ Camera.cpp \ CameraMetadata.cpp \ @@ -21,7 +24,7 @@ LOCAL_SHARED_LIBRARIES := \ libcamera_metadata \ LOCAL_C_INCLUDES += \ - system/media/camera/include + system/media/camera/include \ LOCAL_MODULE:= libcamera_client diff --git a/camera/tests/Android.mk b/camera/tests/Android.mk new file mode 100644 index 0000000..586e814 --- /dev/null +++ b/camera/tests/Android.mk @@ -0,0 +1,36 @@ +LOCAL_PATH:= $(call my-dir) +include $(CLEAR_VARS) + +LOCAL_SRC_FILES:= \ + main.cpp \ + +LOCAL_SHARED_LIBRARIES := \ + libutils \ + libcutils \ + libstlport \ + libcamera_metadata \ + libcamera_client \ + libgui \ + libsync \ + libui \ + libdl + +LOCAL_STATIC_LIBRARIES := \ + libgtest + +LOCAL_C_INCLUDES += \ + bionic \ + bionic/libstdc++/include \ + external/gtest/include \ + external/stlport/stlport \ + system/media/camera/include \ + frameworks/av/services/camera/libcameraservice \ + frameworks/av/include/camera \ + frameworks/native/include \ + +LOCAL_CFLAGS += -Wall -Wextra + +LOCAL_MODULE:= camera_client_test +LOCAL_MODULE_TAGS := tests + +include $(BUILD_EXECUTABLE) diff --git a/camera/tests/main.cpp b/camera/tests/main.cpp new file mode 100644 index 0000000..8c8c515 --- /dev/null +++ b/camera/tests/main.cpp @@ -0,0 +1,27 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + + +int main(int argc, char **argv) { + + ::testing::InitGoogleTest(&argc, argv); + + int ret = RUN_ALL_TESTS(); + + return ret; +} diff --git a/include/camera/Camera.h b/include/camera/Camera.h index 43dae1c..8b87de6 100644 --- a/include/camera/Camera.h +++ b/include/camera/Camera.h @@ -159,7 +159,7 @@ public: sp mCamera; }; -private: +protected: Camera(); Camera(const Camera&); Camera& operator=(const Camera); diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp index 31e20c5..b1c594a 100644 --- a/services/camera/libcameraservice/CameraService.cpp +++ b/services/camera/libcameraservice/CameraService.cpp @@ -166,6 +166,7 @@ sp CameraService::connect( callingPid); return client; } else { + // TODOSC: need to support 1 regular client, multiple shared clients here ALOGW("CameraService::connect X (pid %d) rejected (existing client).", callingPid); return NULL; -- cgit v1.1 From 634a51509ee50475f3e9f8ccf897e90fc72ded31 Mon Sep 17 00:00:00 2001 From: Igor Murashkin Date: Wed, 20 Feb 2013 17:15:11 -0800 Subject: Camera: Add ProCamera private binder interface for an API2-light functionality Change-Id: I2af7a807c99df75ea659e6e6acc9c4fca6a56274 --- camera/Android.mk | 5 +- camera/Camera.cpp | 3 +- camera/ICameraService.cpp | 18 ++ camera/IProCameraCallbacks.cpp | 144 ++++++++++ camera/IProCameraUser.cpp | 264 ++++++++++++++++++ camera/ProCamera.cpp | 230 ++++++++++++++++ camera/tests/Android.mk | 1 + camera/tests/ProCameraTests.cpp | 68 +++++ include/camera/ICameraService.h | 8 +- include/camera/IProCameraCallbacks.h | 57 ++++ include/camera/IProCameraUser.h | 82 ++++++ include/camera/ProCamera.h | 167 ++++++++++++ services/camera/libcameraservice/CameraService.cpp | 296 +++++++++++++++++++-- services/camera/libcameraservice/CameraService.h | 147 ++++++++-- 14 files changed, 1434 insertions(+), 56 deletions(-) create mode 100644 camera/IProCameraCallbacks.cpp create mode 100644 camera/IProCameraUser.cpp create mode 100644 camera/ProCamera.cpp create mode 100644 camera/tests/ProCameraTests.cpp create mode 100644 include/camera/IProCameraCallbacks.h create mode 100644 include/camera/IProCameraUser.h create mode 100644 include/camera/ProCamera.h diff --git a/camera/Android.mk b/camera/Android.mk index a17ad1a..3e7e5a5 100644 --- a/camera/Android.mk +++ b/camera/Android.mk @@ -12,7 +12,10 @@ LOCAL_SRC_FILES:= \ ICameraClient.cpp \ ICameraService.cpp \ ICameraRecordingProxy.cpp \ - ICameraRecordingProxyListener.cpp + ICameraRecordingProxyListener.cpp \ + IProCameraUser.cpp \ + IProCameraCallbacks.cpp \ + ProCamera.cpp \ LOCAL_SHARED_LIBRARIES := \ libcutils \ diff --git a/camera/Camera.cpp b/camera/Camera.cpp index 3aaacaf..be395ba 100644 --- a/camera/Camera.cpp +++ b/camera/Camera.cpp @@ -120,9 +120,10 @@ sp Camera::connect(int cameraId) { ALOGV("connect"); sp c = new Camera(); + sp cl = c; const sp& cs = getCameraService(); if (cs != 0) { - c->mCamera = cs->connect(c, cameraId); + c->mCamera = cs->connect(cl, cameraId); } if (c->mCamera != 0) { c->mCamera->asBinder()->linkToDeath(c); diff --git a/camera/ICameraService.cpp b/camera/ICameraService.cpp index f2d367e..8237c66 100644 --- a/camera/ICameraService.cpp +++ b/camera/ICameraService.cpp @@ -65,6 +65,17 @@ public: remote()->transact(BnCameraService::CONNECT, data, &reply); return interface_cast(reply.readStrongBinder()); } + + // connect to camera service (pro client) + virtual sp connect(const sp& cameraCb, int cameraId) + { + Parcel data, reply; + data.writeInterfaceToken(ICameraService::getInterfaceDescriptor()); + data.writeStrongBinder(cameraCb->asBinder()); + data.writeInt32(cameraId); + remote()->transact(BnCameraService::CONNECT_PRO, data, &reply); + return interface_cast(reply.readStrongBinder()); + } }; IMPLEMENT_META_INTERFACE(CameraService, "android.hardware.ICameraService"); @@ -97,6 +108,13 @@ status_t BnCameraService::onTransact( reply->writeStrongBinder(camera->asBinder()); return NO_ERROR; } break; + case CONNECT_PRO: { + CHECK_INTERFACE(ICameraService, data, reply); + sp cameraClient = interface_cast(data.readStrongBinder()); + sp camera = connect(cameraClient, data.readInt32()); + reply->writeStrongBinder(camera->asBinder()); + return NO_ERROR; + } break; default: return BBinder::onTransact(code, data, reply, flags); } diff --git a/camera/IProCameraCallbacks.cpp b/camera/IProCameraCallbacks.cpp new file mode 100644 index 0000000..c2ad74f --- /dev/null +++ b/camera/IProCameraCallbacks.cpp @@ -0,0 +1,144 @@ +/* +** +** Copyright 2013, The Android Open Source Project +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "IProCameraCallbacks" +#include +#include +#include + +#include +#include +#include +#include + +#include + +namespace android { + +enum { + NOTIFY_CALLBACK = IBinder::FIRST_CALL_TRANSACTION, + DATA_CALLBACK, + DATA_CALLBACK_TIMESTAMP, +}; + +class BpProCameraCallbacks: public BpInterface +{ +public: + BpProCameraCallbacks(const sp& impl) + : BpInterface(impl) + { + } + + // generic callback from camera service to app + void notifyCallback(int32_t msgType, int32_t ext1, int32_t ext2) + { + ALOGV("notifyCallback"); + Parcel data, reply; + data.writeInterfaceToken(IProCameraCallbacks::getInterfaceDescriptor()); + data.writeInt32(msgType); + data.writeInt32(ext1); + data.writeInt32(ext2); + remote()->transact(NOTIFY_CALLBACK, data, &reply, IBinder::FLAG_ONEWAY); + } + + // generic data callback from camera service to app with image data + void dataCallback(int32_t msgType, const sp& imageData, + camera_frame_metadata_t *metadata) + { + ALOGV("dataCallback"); + Parcel data, reply; + data.writeInterfaceToken(IProCameraCallbacks::getInterfaceDescriptor()); + data.writeInt32(msgType); + data.writeStrongBinder(imageData->asBinder()); + if (metadata) { + data.writeInt32(metadata->number_of_faces); + data.write(metadata->faces, + sizeof(camera_face_t) * metadata->number_of_faces); + } + remote()->transact(DATA_CALLBACK, data, &reply, IBinder::FLAG_ONEWAY); + } + + // generic data callback from camera service to app with image data + void dataCallbackTimestamp(nsecs_t timestamp, int32_t msgType, + const sp& imageData) + { + ALOGV("dataCallback"); + Parcel data, reply; + data.writeInterfaceToken(IProCameraCallbacks::getInterfaceDescriptor()); + data.writeInt64(timestamp); + data.writeInt32(msgType); + data.writeStrongBinder(imageData->asBinder()); + remote()->transact(DATA_CALLBACK_TIMESTAMP, data, &reply, + IBinder::FLAG_ONEWAY); + } +}; + +IMPLEMENT_META_INTERFACE(ProCameraCallbacks, + "android.hardware.IProCameraCallbacks"); + +// ---------------------------------------------------------------------- + +status_t BnProCameraCallbacks::onTransact( + uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) +{ + switch(code) { + case NOTIFY_CALLBACK: { + ALOGV("NOTIFY_CALLBACK"); + CHECK_INTERFACE(IProCameraCallbacks, data, reply); + int32_t msgType = data.readInt32(); + int32_t ext1 = data.readInt32(); + int32_t ext2 = data.readInt32(); + notifyCallback(msgType, ext1, ext2); + return NO_ERROR; + } break; + case DATA_CALLBACK: { + ALOGV("DATA_CALLBACK"); + CHECK_INTERFACE(IProCameraCallbacks, data, reply); + int32_t msgType = data.readInt32(); + sp imageData = interface_cast( + data.readStrongBinder()); + camera_frame_metadata_t *metadata = NULL; + if (data.dataAvail() > 0) { + metadata = new camera_frame_metadata_t; + metadata->number_of_faces = data.readInt32(); + metadata->faces = (camera_face_t *) data.readInplace( + sizeof(camera_face_t) * metadata->number_of_faces); + } + dataCallback(msgType, imageData, metadata); + if (metadata) delete metadata; + return NO_ERROR; + } break; + case DATA_CALLBACK_TIMESTAMP: { + ALOGV("DATA_CALLBACK_TIMESTAMP"); + CHECK_INTERFACE(IProCameraCallbacks, data, reply); + nsecs_t timestamp = data.readInt64(); + int32_t msgType = data.readInt32(); + sp imageData = interface_cast( + data.readStrongBinder()); + dataCallbackTimestamp(timestamp, msgType, imageData); + return NO_ERROR; + } break; + default: + return BBinder::onTransact(code, data, reply, flags); + } +} + +// ---------------------------------------------------------------------------- + +}; // namespace android + diff --git a/camera/IProCameraUser.cpp b/camera/IProCameraUser.cpp new file mode 100644 index 0000000..76c2dcd --- /dev/null +++ b/camera/IProCameraUser.cpp @@ -0,0 +1,264 @@ +/* +** +** Copyright 2013, The Android Open Source Project +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "IProCameraUser" +#include +#include +#include +#include +#include +#include +#include +#include + +namespace android { + +typedef Parcel::WritableBlob WritableBlob; +typedef Parcel::ReadableBlob ReadableBlob; + +enum { + DISCONNECT = IBinder::FIRST_CALL_TRANSACTION, + CONNECT, + EXCLUSIVE_TRY_LOCK, + EXCLUSIVE_LOCK, + EXCLUSIVE_UNLOCK, + HAS_EXCLUSIVE_LOCK, + SUBMIT_REQUEST, + CANCEL_REQUEST, + REQUEST_STREAM, + CANCEL_STREAM, +}; + +class BpProCameraUser: public BpInterface +{ +public: + BpProCameraUser(const sp& impl) + : BpInterface(impl) + { + } + + // disconnect from camera service + void disconnect() + { + ALOGV("disconnect"); + Parcel data, reply; + data.writeInterfaceToken(IProCameraUser::getInterfaceDescriptor()); + remote()->transact(DISCONNECT, data, &reply); + } + + virtual status_t connect(const sp& cameraClient) + { + Parcel data, reply; + data.writeInterfaceToken(IProCameraUser::getInterfaceDescriptor()); + data.writeStrongBinder(cameraClient->asBinder()); + remote()->transact(CONNECT, data, &reply); + return reply.readInt32(); + } + + /* Shared ProCameraUser */ + + virtual status_t exclusiveTryLock() + { + Parcel data, reply; + data.writeInterfaceToken(IProCameraUser::getInterfaceDescriptor()); + remote()->transact(EXCLUSIVE_TRY_LOCK, data, &reply); + return reply.readInt32(); + } + virtual status_t exclusiveLock() + { + Parcel data, reply; + data.writeInterfaceToken(IProCameraUser::getInterfaceDescriptor()); + remote()->transact(EXCLUSIVE_LOCK, data, &reply); + return reply.readInt32(); + } + + virtual status_t exclusiveUnlock() + { + Parcel data, reply; + data.writeInterfaceToken(IProCameraUser::getInterfaceDescriptor()); + remote()->transact(EXCLUSIVE_UNLOCK, data, &reply); + return reply.readInt32(); + } + + virtual bool hasExclusiveLock() + { + Parcel data, reply; + data.writeInterfaceToken(IProCameraUser::getInterfaceDescriptor()); + remote()->transact(HAS_EXCLUSIVE_LOCK, data, &reply); + return !!reply.readInt32(); + } + + virtual int submitRequest(camera_metadata_t* metadata, bool streaming) + { + + Parcel data, reply; + data.writeInterfaceToken(IProCameraUser::getInterfaceDescriptor()); + + // arg0 = metadataSize (int32) + size_t metadataSize = get_camera_metadata_compact_size(metadata); + data.writeInt32(static_cast(metadataSize)); + + // arg1 = metadata (blob) + WritableBlob blob; + { + data.writeBlob(metadataSize, &blob); + copy_camera_metadata(blob.data(), metadataSize, metadata); + } + blob.release(); + + // arg2 = streaming (bool) + data.writeInt32(streaming); + + remote()->transact(SUBMIT_REQUEST, data, &reply); + return reply.readInt32(); + } + + virtual status_t cancelRequest(int requestId) + { + Parcel data, reply; + data.writeInterfaceToken(IProCameraUser::getInterfaceDescriptor()); + data.writeInt32(requestId); + + remote()->transact(CANCEL_REQUEST, data, &reply); + return reply.readInt32(); + } + + virtual status_t requestStream(int streamId) + { + Parcel data, reply; + data.writeInterfaceToken(IProCameraUser::getInterfaceDescriptor()); + data.writeInt32(streamId); + + remote()->transact(REQUEST_STREAM, data, &reply); + return reply.readInt32(); + } + virtual status_t cancelStream(int streamId) + { + Parcel data, reply; + data.writeInterfaceToken(IProCameraUser::getInterfaceDescriptor()); + data.writeInt32(streamId); + + remote()->transact(CANCEL_STREAM, data, &reply); + return reply.readInt32(); + } + +}; + +IMPLEMENT_META_INTERFACE(ProCameraUser, "android.hardware.IProCameraUser"); + +// ---------------------------------------------------------------------- + +status_t BnProCameraUser::onTransact( + uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) +{ + switch(code) { + case DISCONNECT: { + ALOGV("DISCONNECT"); + CHECK_INTERFACE(IProCameraUser, data, reply); + disconnect(); + return NO_ERROR; + } break; + case CONNECT: { + CHECK_INTERFACE(IProCameraUser, data, reply); + sp cameraClient = + interface_cast(data.readStrongBinder()); + reply->writeInt32(connect(cameraClient)); + return NO_ERROR; + } break; + + /* Shared ProCameraUser */ + case EXCLUSIVE_TRY_LOCK: { + CHECK_INTERFACE(IProCameraUser, data, reply); + reply->writeInt32(exclusiveTryLock()); + return NO_ERROR; + } break; + case EXCLUSIVE_LOCK: { + CHECK_INTERFACE(IProCameraUser, data, reply); + reply->writeInt32(exclusiveLock()); + return NO_ERROR; + } break; + case EXCLUSIVE_UNLOCK: { + CHECK_INTERFACE(IProCameraUser, data, reply); + reply->writeInt32(exclusiveUnlock()); + return NO_ERROR; + } break; + case HAS_EXCLUSIVE_LOCK: { + CHECK_INTERFACE(IProCameraUser, data, reply); + reply->writeInt32(hasExclusiveLock()); + return NO_ERROR; + } break; + case SUBMIT_REQUEST: { + CHECK_INTERFACE(IProCameraUser, data, reply); + camera_metadata_t* metadata; + + // arg0 = metadataSize (int32) + size_t metadataSize = static_cast(data.readInt32()); + + // NOTE: this doesn't make sense to me. shouldnt the blob + // know how big it is? why do we have to specify the size + // to Parcel::readBlob ? + + ReadableBlob blob; + // arg1 = metadata (blob) + { + data.readBlob(metadataSize, &blob); + const camera_metadata_t* tmp = + reinterpret_cast(blob.data()); + size_t entry_capacity = get_camera_metadata_entry_capacity(tmp); + size_t data_capacity = get_camera_metadata_data_capacity(tmp); + + metadata = allocate_camera_metadata(entry_capacity, + data_capacity); + copy_camera_metadata(metadata, metadataSize, tmp); + } + blob.release(); + + // arg2 = streaming (bool) + bool streaming = data.readInt32(); + + // return code: requestId (int32) + reply->writeInt32(submitRequest(metadata, streaming)); + + return NO_ERROR; + } break; + case CANCEL_REQUEST: { + CHECK_INTERFACE(IProCameraUser, data, reply); + int requestId = data.readInt32(); + reply->writeInt32(cancelRequest(requestId)); + return NO_ERROR; + } break; + case REQUEST_STREAM: { + CHECK_INTERFACE(IProCameraUser, data, reply); + int streamId = data.readInt32(); + reply->writeInt32(requestStream(streamId)); + return NO_ERROR; + } break; + case CANCEL_STREAM: { + CHECK_INTERFACE(IProCameraUser, data, reply); + int streamId = data.readInt32(); + reply->writeInt32(cancelStream(streamId)); + return NO_ERROR; + } break; + default: + return BBinder::onTransact(code, data, reply, flags); + } +} + +// ---------------------------------------------------------------------------- + +}; // namespace android diff --git a/camera/ProCamera.cpp b/camera/ProCamera.cpp new file mode 100644 index 0000000..134a4a3 --- /dev/null +++ b/camera/ProCamera.cpp @@ -0,0 +1,230 @@ +/* +** +** Copyright (C) 2013, The Android Open Source Project +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "ProCamera" +#include +#include +#include + +#include +#include +#include + +#include +#include +#include +#include + +#include +#include + +namespace android { + +// client singleton for camera service binder interface +Mutex ProCamera::mLock; +sp ProCamera::mCameraService; +sp ProCamera::mDeathNotifier; + +// establish binder interface to camera service +const sp& ProCamera::getCameraService() +{ + Mutex::Autolock _l(mLock); + if (mCameraService.get() == 0) { + sp sm = defaultServiceManager(); + sp binder; + do { + binder = sm->getService(String16("media.camera")); + if (binder != 0) + break; + ALOGW("CameraService not published, waiting..."); + usleep(500000); // 0.5 s + } while(true); + if (mDeathNotifier == NULL) { + mDeathNotifier = new DeathNotifier(); + } + binder->linkToDeath(mDeathNotifier); + mCameraService = interface_cast(binder); + } + ALOGE_IF(mCameraService==0, "no CameraService!?"); + return mCameraService; +} + +sp ProCamera::connect(int cameraId) +{ + ALOGV("connect"); + sp c = new ProCamera(); + sp cl = c; + const sp& cs = getCameraService(); + if (cs != 0) { + c->mCamera = cs->connect(cl, cameraId); + } + if (c->mCamera != 0) { + c->mCamera->asBinder()->linkToDeath(c); + c->mStatus = NO_ERROR; + } else { + c.clear(); + } + return c; +} + +void ProCamera::disconnect() +{ + ALOGV("disconnect"); + if (mCamera != 0) { + mCamera->disconnect(); + mCamera->asBinder()->unlinkToDeath(this); + mCamera = 0; + } +} + +ProCamera::ProCamera() +{ +} + +ProCamera::~ProCamera() +{ + +} + +sp ProCamera::remote() +{ + return mCamera; +} + +void ProCamera::binderDied(const wp& who) { + ALOGW("IProCameraUser died"); + notifyCallback(CAMERA_MSG_ERROR, CAMERA_ERROR_SERVER_DIED, 0); +} + +void ProCamera::DeathNotifier::binderDied(const wp& who) { + ALOGV("binderDied"); + Mutex::Autolock _l(ProCamera::mLock); + ProCamera::mCameraService.clear(); + ALOGW("Camera service died!"); +} + + +// callback from camera service +void ProCamera::notifyCallback(int32_t msgType, int32_t ext1, int32_t ext2) +{ + sp listener; + { + Mutex::Autolock _l(mLock); + listener = mListener; + } + if (listener != NULL) { + listener->notify(msgType, ext1, ext2); + } +} + +// callback from camera service when frame or image is ready +void ProCamera::dataCallback(int32_t msgType, const sp& dataPtr, + camera_frame_metadata_t *metadata) +{ + sp listener; + { + Mutex::Autolock _l(mLock); + listener = mListener; + } + if (listener != NULL) { + listener->postData(msgType, dataPtr, metadata); + } +} + +// callback from camera service when timestamped frame is ready +void ProCamera::dataCallbackTimestamp(nsecs_t timestamp, int32_t msgType, + const sp& dataPtr) +{ + sp listener; + { + Mutex::Autolock _l(mLock); + listener = mListener; + } + if (listener != NULL) { + listener->postDataTimestamp(timestamp, msgType, dataPtr); + } else { + ALOGW("No listener was set. Drop a recording frame."); + } +} + +/* IProCameraUser's implementation */ + +status_t ProCamera::exclusiveTryLock() +{ + sp c = mCamera; + if (c == 0) return NO_INIT; + + return c->exclusiveTryLock(); +} +status_t ProCamera::exclusiveLock() +{ + sp c = mCamera; + if (c == 0) return NO_INIT; + + return c->exclusiveLock(); +} +status_t ProCamera::exclusiveUnlock() +{ + sp c = mCamera; + if (c == 0) return NO_INIT; + + return c->exclusiveUnlock(); +} +bool ProCamera::hasExclusiveLock() +{ + sp c = mCamera; + if (c == 0) return NO_INIT; + + return c->hasExclusiveLock(); +} + +// Note that the callee gets a copy of the metadata. +int ProCamera::submitRequest(const struct camera_metadata* metadata, + bool streaming) +{ + sp c = mCamera; + if (c == 0) return NO_INIT; + + return c->submitRequest(const_cast(metadata), + streaming); +} + +status_t ProCamera::cancelRequest(int requestId) +{ + sp c = mCamera; + if (c == 0) return NO_INIT; + + return c->cancelRequest(requestId); +} + +status_t ProCamera::requestStream(int streamId) +{ + sp c = mCamera; + if (c == 0) return NO_INIT; + + return c->requestStream(streamId); +} +status_t ProCamera::cancelStream(int streamId) +{ + sp c = mCamera; + if (c == 0) return NO_INIT; + + return c->cancelStream(streamId); +} + +}; // namespace android diff --git a/camera/tests/Android.mk b/camera/tests/Android.mk index 586e814..5d386c4 100644 --- a/camera/tests/Android.mk +++ b/camera/tests/Android.mk @@ -3,6 +3,7 @@ include $(CLEAR_VARS) LOCAL_SRC_FILES:= \ main.cpp \ + ProCameraTests.cpp \ LOCAL_SHARED_LIBRARIES := \ libutils \ diff --git a/camera/tests/ProCameraTests.cpp b/camera/tests/ProCameraTests.cpp new file mode 100644 index 0000000..4de9c10 --- /dev/null +++ b/camera/tests/ProCameraTests.cpp @@ -0,0 +1,68 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include + +#include "Camera.h" +#include "ProCamera.h" + +namespace android { +namespace camera2 { +namespace tests { +namespace client { + +#define CAMERA_ID 0 +#define TEST_DEBUGGING 0 + +#if TEST_DEBUGGING +#define dout std::cerr +#else +#define dout if (0) std::cerr +#endif + +class ProCameraTest : public ::testing::Test { + + virtual void SetUp() { + mCamera = ProCamera::connect(CAMERA_ID); + ASSERT_NE((void*)NULL, mCamera.get()); + } + + virtual void TearDown() { + ASSERT_NE((void*)NULL, mCamera.get()); + mCamera->disconnect(); + } + +protected: + sp mCamera; +}; + +TEST_F(ProCameraTest, Locking) { + + if (HasFatalFailure()) { + return; + } + + status_t res = mCamera->exclusiveTryLock(); + + EXPECT_EQ(OK, res); +} + +} +} +} +} + diff --git a/include/camera/ICameraService.h b/include/camera/ICameraService.h index 7d70c1e..11d7b65 100644 --- a/include/camera/ICameraService.h +++ b/include/camera/ICameraService.h @@ -23,6 +23,7 @@ #include #include +#include namespace android { @@ -32,7 +33,8 @@ public: enum { GET_NUMBER_OF_CAMERAS = IBinder::FIRST_CALL_TRANSACTION, GET_CAMERA_INFO, - CONNECT + CONNECT, + CONNECT_PRO }; public: @@ -43,6 +45,10 @@ public: struct CameraInfo* cameraInfo) = 0; virtual sp connect(const sp& cameraClient, int cameraId) = 0; + + virtual sp + connect(const sp& cameraCb, + int cameraId) = 0; }; // ---------------------------------------------------------------------------- diff --git a/include/camera/IProCameraCallbacks.h b/include/camera/IProCameraCallbacks.h new file mode 100644 index 0000000..ac1d5eb --- /dev/null +++ b/include/camera/IProCameraCallbacks.h @@ -0,0 +1,57 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_IPROCAMERA_CALLBACKS_H +#define ANDROID_HARDWARE_IPROCAMERA_CALLBACKS_H + +#include +#include +#include +#include +#include +#include + +namespace android { + +class IProCameraCallbacks: public IInterface +{ +public: + DECLARE_META_INTERFACE(ProCameraCallbacks); + + virtual void notifyCallback(int32_t msgType, int32_t ext1, + int32_t ext2) = 0; + virtual void dataCallback(int32_t msgType, + const sp& data, + camera_frame_metadata_t *metadata) = 0; + virtual void dataCallbackTimestamp(nsecs_t timestamp, + int32_t msgType, + const sp& data) = 0; +}; + +// ---------------------------------------------------------------------------- + +class BnProCameraCallbacks: public BnInterface +{ +public: + virtual status_t onTransact( uint32_t code, + const Parcel& data, + Parcel* reply, + uint32_t flags = 0); +}; + +}; // namespace android + +#endif diff --git a/include/camera/IProCameraUser.h b/include/camera/IProCameraUser.h new file mode 100644 index 0000000..6170410 --- /dev/null +++ b/include/camera/IProCameraUser.h @@ -0,0 +1,82 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_IPROCAMERAUSER_H +#define ANDROID_HARDWARE_IPROCAMERAUSER_H + +#include +#include +#include +#include +#include +#include + +struct camera_metadata; + +namespace android { + +class IProCameraUserClient; +class IGraphicBufferProducer; +class Surface; + +class IProCameraUser: public IInterface +{ +public: + DECLARE_META_INTERFACE(ProCameraUser); + + virtual void disconnect() = 0; + + // connect to the service, given a callbacks listener + virtual status_t connect(const sp& callbacks) + = 0; + + /** + * Locking + **/ + virtual status_t exclusiveTryLock() = 0; + virtual status_t exclusiveLock() = 0; + virtual status_t exclusiveUnlock() = 0; + + virtual bool hasExclusiveLock() = 0; + + /** + * Request Handling + **/ + + // Note that the callee gets a copy of the metadata. + virtual int submitRequest(struct camera_metadata* metadata, + bool streaming = false) = 0; + virtual status_t cancelRequest(int requestId) = 0; + + virtual status_t requestStream(int streamId) = 0; + virtual status_t cancelStream(int streamId) = 0; + +}; + +// ---------------------------------------------------------------------------- + +class BnProCameraUser: public BnInterface +{ +public: + virtual status_t onTransact( uint32_t code, + const Parcel& data, + Parcel* reply, + uint32_t flags = 0); +}; + +}; // namespace android + +#endif diff --git a/include/camera/ProCamera.h b/include/camera/ProCamera.h new file mode 100644 index 0000000..ba5fdc0 --- /dev/null +++ b/include/camera/ProCamera.h @@ -0,0 +1,167 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_PRO_CAMERA_H +#define ANDROID_HARDWARE_PRO_CAMERA_H + +#include +#include +#include +#include +#include +#include + +struct camera_metadata; + +namespace android { + +// ref-counted object for callbacks +class ProCameraListener : public CameraListener +{ +public: + // Lock has been acquired. Write operations now available. + virtual void onLockAcquired() = 0; + // Lock has been released with exclusiveUnlock, or has been stolen by + // another client. + virtual void onLockReleased() = 0; + + // Lock free. + virtual void onTriggerNotify(int32_t msgType, int32_t ext1, int32_t ext2) + = 0; +}; + +class ProCamera : public BnProCameraCallbacks, public IBinder::DeathRecipient +{ +public: + /** + * Connect a shared camera. By default access is restricted to read only + * (Lock free) operations. To be able to submit custom requests a lock needs + * to be acquired with exclusive[Try]Lock. + */ + static sp connect(int cameraId); + virtual void disconnect(); + virtual ~ProCamera(); + + void setListener(const sp& listener); + + /** + * Exclusive Locks: + * - We may request exclusive access to a camera if no other + * clients are using the camera. This works as a traditional + * client, writing/reading any camera state. + * - An application opening the camera (a regular 'Camera') will + * always steal away the exclusive lock from a ProCamera, + * this will call onLockReleased. + * - onLockAcquired will be called again once it is possible + * to again exclusively lock the camera. + * + */ + + /** + * All exclusiveLock/unlock functions are asynchronous. The remote endpoint + * shall not block while waiting to acquire the lock. Instead the lock + * notifications will come in asynchronously on the listener. + */ + + /** + * Attempt to acquire the lock instantly (non-blocking) + * - If this succeeds, you do not need to wait for onLockAcquired + * but the event will still be fired + * + * Returns -EBUSY if already locked. 0 on success. + */ + status_t exclusiveTryLock(); + // always returns 0. wait for onLockAcquired before lock is acquired. + status_t exclusiveLock(); + // release a lock if we have one, or cancel the lock request. + status_t exclusiveUnlock(); + + // exclusive lock = do whatever we want. no lock = read only. + bool hasExclusiveLock(); + + /** + * < 0 error, >= 0 the request ID. streaming to have the request repeat + * until cancelled. + * The request queue is flushed when a lock is released or stolen + * if not locked will return PERMISSION_DENIED + */ + int submitRequest(const struct camera_metadata* metadata, + bool streaming = false); + // if not locked will return PERMISSION_DENIED, BAD_VALUE if requestId bad + status_t cancelRequest(int requestId); + + /** + * Ask for a stream to be enabled. + * Lock free. Service maintains counter of streams. + */ + status_t requestStream(int streamId); + /** + * Ask for a stream to be disabled. + * Lock free. Service maintains counter of streams. + * Errors: BAD_VALUE if unknown stream ID. + */ + status_t cancelStream(int streamId); + + sp remote(); + +protected: + //////////////////////////////////////////////////////// + // IProCameraCallbacks implementation + //////////////////////////////////////////////////////// + virtual void notifyCallback(int32_t msgType, int32_t ext, + int32_t ext2); + virtual void dataCallback(int32_t msgType, + const sp& dataPtr, + camera_frame_metadata_t *metadata); + virtual void dataCallbackTimestamp(nsecs_t timestamp, + int32_t msgType, + const sp& dataPtr); + + class DeathNotifier: public IBinder::DeathRecipient + { + public: + DeathNotifier() { + } + + virtual void binderDied(const wp& who); + }; + +private: + ProCamera(); + + virtual void binderDied(const wp& who); + + // helper function to obtain camera service handle + static const sp& getCameraService(); + + static sp mDeathNotifier; + + sp mCamera; + status_t mStatus; + + sp mListener; + + friend class DeathNotifier; + + static Mutex mLock; + static sp mCameraService; + + +}; + +}; // namespace android + +#endif diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp index b1c594a..4941965 100644 --- a/services/camera/libcameraservice/CameraService.cpp +++ b/services/camera/libcameraservice/CameraService.cpp @@ -79,6 +79,8 @@ CameraService::CameraService() void CameraService::onFirstRef() { + LOG1("CameraService::onFirstRef"); + BnCameraService::onFirstRef(); if (hw_get_module(CAMERA_HARDWARE_MODULE_ID, @@ -131,6 +133,26 @@ status_t CameraService::getCameraInfo(int cameraId, return rc; } +int CameraService::getDeviceVersion(int cameraId, int* facing) { + struct camera_info info; + if (mModule->get_camera_info(cameraId, &info) != OK) { + return -1; + } + + int deviceVersion; + if (mModule->common.module_api_version >= CAMERA_MODULE_API_VERSION_2_0) { + deviceVersion = info.device_version; + } else { + deviceVersion = CAMERA_DEVICE_API_VERSION_1_0; + } + + if (facing) { + *facing = info.facing; + } + + return deviceVersion; +} + sp CameraService::connect( const sp& cameraClient, int cameraId) { int callingPid = getCallingPid(); @@ -175,34 +197,96 @@ sp CameraService::connect( mClient[cameraId].clear(); } + /* + mBusy is set to false as the last step of the Client destructor, + after which it is guaranteed that the Client destructor has finished ( + including any inherited destructors) + + We only need this for a Client subclasses since we don't allow + multiple Clents to be opened concurrently, but multiple BasicClient + would be fine + */ if (mBusy[cameraId]) { ALOGW("CameraService::connect X (pid %d) rejected" " (camera %d is still busy).", callingPid, cameraId); return NULL; } - struct camera_info info; - if (mModule->get_camera_info(cameraId, &info) != OK) { + int facing = -1; + int deviceVersion = getDeviceVersion(cameraId, &facing); + + switch(deviceVersion) { + case CAMERA_DEVICE_API_VERSION_1_0: + client = new CameraClient(this, cameraClient, cameraId, + facing, callingPid, getpid()); + break; + case CAMERA_DEVICE_API_VERSION_2_0: + case CAMERA_DEVICE_API_VERSION_2_1: + client = new Camera2Client(this, cameraClient, cameraId, + facing, callingPid, getpid()); + break; + case -1: ALOGE("Invalid camera id %d", cameraId); return NULL; + default: + ALOGE("Unknown camera device HAL version: %d", deviceVersion); + return NULL; } - int deviceVersion; - if (mModule->common.module_api_version == CAMERA_MODULE_API_VERSION_2_0) { - deviceVersion = info.device_version; - } else { - deviceVersion = CAMERA_DEVICE_API_VERSION_1_0; + if (client->initialize(mModule) != OK) { + return NULL; + } + + cameraClient->asBinder()->linkToDeath(this); + + mClient[cameraId] = client; + LOG1("CameraService::connect X (id %d, this pid is %d)", cameraId, getpid()); + return client; +} + +sp CameraService::connect( + const sp& cameraCb, + int cameraId) +{ + int callingPid = getCallingPid(); + + LOG1("CameraService::connectPro E (pid %d, id %d)", callingPid, cameraId); + + if (!mModule) { + ALOGE("Camera HAL module not loaded"); + return NULL; + } + + sp client; + if (cameraId < 0 || cameraId >= mNumberOfCameras) { + ALOGE("CameraService::connectPro X (pid %d) rejected (invalid cameraId %d).", + callingPid, cameraId); + return NULL; } + char value[PROPERTY_VALUE_MAX]; + property_get("sys.secpolicy.camera.disabled", value, "0"); + if (strcmp(value, "1") == 0) { + // Camera is disabled by DevicePolicyManager. + ALOGI("Camera is disabled. connect X (pid %d) rejected", callingPid); + return NULL; + } + + int facing = -1; + int deviceVersion = getDeviceVersion(cameraId, &facing); + switch(deviceVersion) { case CAMERA_DEVICE_API_VERSION_1_0: - client = new CameraClient(this, cameraClient, cameraId, - info.facing, callingPid, getpid()); + ALOGE("Camera id %d uses HALv1, doesn't support ProCamera", cameraId); + return NULL; break; case CAMERA_DEVICE_API_VERSION_2_0: - client = new Camera2Client(this, cameraClient, cameraId, - info.facing, callingPid, getpid()); + client = new ProClient(this, cameraCb, cameraId, + facing, callingPid, getpid()); break; + case -1: + ALOGE("Invalid camera id %d", cameraId); + return NULL; default: ALOGE("Unknown camera device HAL version: %d", deviceVersion); return NULL; @@ -212,23 +296,27 @@ sp CameraService::connect( return NULL; } - cameraClient->asBinder()->linkToDeath(this); + mProClientList[cameraId].push(client); + + cameraCb->asBinder()->linkToDeath(this); - mClient[cameraId] = client; LOG1("CameraService::connect X (id %d, this pid is %d)", cameraId, getpid()); return client; + + + return NULL; } -void CameraService::removeClient(const sp& cameraClient) { +void CameraService::removeClientByRemote(const wp& remoteBinder) { int callingPid = getCallingPid(); - LOG1("CameraService::removeClient E (pid %d)", callingPid); + LOG1("CameraService::removeClientByRemote E (pid %d)", callingPid); // Declare this before the lock to make absolutely sure the // destructor won't be called with the lock held. Mutex::Autolock lock(mServiceLock); int outIndex; - sp client = findClientUnsafe(cameraClient->asBinder(), outIndex); + sp client = findClientUnsafe(remoteBinder, outIndex); if (client != 0) { // Found our camera, clear and leave. @@ -236,9 +324,50 @@ void CameraService::removeClient(const sp& cameraClient) { mClient[outIndex].clear(); client->unlinkToDeath(this); + } else { + + sp clientPro = findProClientUnsafe(remoteBinder); + + if (clientPro != NULL) { + // Found our camera, clear and leave. + LOG1("removeClient: clear pro %p", clientPro.get()); + + clientPro->getRemoteCallback()->asBinder()->unlinkToDeath(this); + } + } + + LOG1("CameraService::removeClientByRemote X (pid %d)", callingPid); +} + +sp CameraService::findProClientUnsafe( + const wp& cameraCallbacksRemote) +{ + sp clientPro; + + for (int i = 0; i < mNumberOfCameras; ++i) { + Vector removeIdx; + + for (size_t j = 0; j < mProClientList[i].size(); ++j) { + wp cl = mProClientList[i][j]; + + sp clStrong = cl.promote(); + if (clStrong != NULL && clStrong->getRemote() == cameraCallbacksRemote) { + clientPro = clStrong; + break; + } else if (clStrong == NULL) { + // mark to clean up dead ptr + removeIdx.push(j); + } + } + + // remove stale ptrs (in reverse so the indices dont change) + for (ssize_t j = (ssize_t)removeIdx.size() - 1; j >= 0; --j) { + mProClientList[i].removeAt(removeIdx[j]); + } + } - LOG1("CameraService::removeClient X (pid %d)", callingPid); + return clientPro; } sp CameraService::findClientUnsafe( @@ -252,7 +381,7 @@ sp CameraService::findClientUnsafe( if (mClient[i] == 0) continue; // Promote mClient. It can fail if we are called from this path: - // Client::~Client() -> disconnect() -> removeClient(). + // Client::~Client() -> disconnect() -> removeClientByRemote(). client = mClient[i].promote(); // Clean up stale client entry @@ -282,12 +411,12 @@ Mutex* CameraService::getClientLockById(int cameraId) { return &mClientLock[cameraId]; } -sp CameraService::getClientByRemote( +sp CameraService::getClientByRemote( const wp& cameraClient) { // Declare this before the lock to make absolutely sure the // destructor won't be called with the lock held. - sp client; + sp client; Mutex::Autolock lock(mServiceLock); @@ -302,6 +431,7 @@ status_t CameraService::onTransact( // Permission checks switch (code) { case BnCameraService::CONNECT: + case BnCameraService::CONNECT_PRO: const int pid = getCallingPid(); const int self_pid = getpid(); if (pid != self_pid) { @@ -390,17 +520,15 @@ void CameraService::playSound(sound_kind kind) { CameraService::Client::Client(const sp& cameraService, const sp& cameraClient, - int cameraId, int cameraFacing, int clientPid, int servicePid) { + int cameraId, int cameraFacing, int clientPid, int servicePid) : + CameraService::BasicClient(cameraService, cameraClient->asBinder(), + cameraId, cameraFacing, + clientPid, servicePid) +{ int callingPid = getCallingPid(); LOG1("Client::Client E (pid %d, id %d)", callingPid, cameraId); - mCameraService = cameraService; mCameraClient = cameraClient; - mCameraId = cameraId; - mCameraFacing = cameraFacing; - mClientPid = clientPid; - mServicePid = servicePid; - mDestructionStarted = false; cameraService->setCameraBusy(cameraId); cameraService->loadSound(); @@ -409,12 +537,37 @@ CameraService::Client::Client(const sp& cameraService, // tear down the client CameraService::Client::~Client() { + mDestructionStarted = true; + mCameraService->releaseSound(); // unconditionally disconnect. function is idempotent Client::disconnect(); } +CameraService::BasicClient::BasicClient(const sp& cameraService, + const sp& remoteCallback, + int cameraId, int cameraFacing, + int clientPid, int servicePid) +{ + mCameraService = cameraService; + mRemoteCallback = remoteCallback; + mCameraId = cameraId; + mCameraFacing = cameraFacing; + mClientPid = clientPid; + mServicePid = servicePid; + + mDestructionStarted = false; +} + +CameraService::BasicClient::~BasicClient() { + mDestructionStarted = true; +} + +void CameraService::BasicClient::disconnect() { + mCameraService->removeClientByRemote(mRemoteCallback); +} + // ---------------------------------------------------------------------------- Mutex* CameraService::Client::getClientLockFromCookie(void* user) { @@ -439,11 +592,96 @@ CameraService::Client* CameraService::Client::getClientFromCookie(void* user) { // NOTE: function is idempotent void CameraService::Client::disconnect() { - mCameraService->removeClient(mCameraClient); + BasicClient::disconnect(); mCameraService->setCameraFree(mCameraId); } // ---------------------------------------------------------------------------- +// IProCamera +// ---------------------------------------------------------------------------- + +CameraService::ProClient::ProClient(const sp& cameraService, + const sp& remoteCallback, + int cameraId, + int cameraFacing, + int clientPid, + int servicePid) + : CameraService::BasicClient(cameraService, remoteCallback->asBinder(), + cameraId, cameraFacing, + clientPid, servicePid) +{ + mRemoteCallback = remoteCallback; +} + +CameraService::ProClient::~ProClient() { + mDestructionStarted = true; + + ProClient::disconnect(); +} + +status_t CameraService::ProClient::connect(const sp& callbacks) { + ALOGE("%s: not implemented yet", __FUNCTION__); + + return INVALID_OPERATION; +} + +void CameraService::ProClient::disconnect() { + BasicClient::disconnect(); +} + +status_t CameraService::ProClient::initialize(camera_module_t* module) +{ + ALOGW("%s: not implemented yet", __FUNCTION__); + return OK; +} + +status_t CameraService::ProClient::exclusiveTryLock() { + ALOGE("%s: not implemented yet", __FUNCTION__); + return INVALID_OPERATION; +} + +status_t CameraService::ProClient::exclusiveLock() { + ALOGE("%s: not implemented yet", __FUNCTION__); + return INVALID_OPERATION; +} + +status_t CameraService::ProClient::exclusiveUnlock() { + ALOGE("%s: not implemented yet", __FUNCTION__); + return INVALID_OPERATION; +} + +bool CameraService::ProClient::hasExclusiveLock() { + ALOGE("%s: not implemented yet", __FUNCTION__); + return false; +} + +status_t CameraService::ProClient::submitRequest(camera_metadata_t* request, bool streaming) { + ALOGE("%s: not implemented yet", __FUNCTION__); + + free_camera_metadata(request); + + return INVALID_OPERATION; +} + +status_t CameraService::ProClient::cancelRequest(int requestId) { + ALOGE("%s: not implemented yet", __FUNCTION__); + + return INVALID_OPERATION; +} + +status_t CameraService::ProClient::requestStream(int streamId) { + ALOGE("%s: not implemented yet", __FUNCTION__); + + return INVALID_OPERATION; +} + +status_t CameraService::ProClient::cancelStream(int streamId) { + ALOGE("%s: not implemented yet", __FUNCTION__); + + return INVALID_OPERATION; +} + +// ---------------------------------------------------------------------------- static const int kDumpLockRetries = 50; static const int kDumpLockSleep = 60000; @@ -569,7 +807,7 @@ status_t CameraService::dump(int fd, const Vector& args) { ALOGV("java clients' binder died"); - sp cameraClient = getClientByRemote(who); + sp cameraClient = getClientByRemote(who); if (cameraClient == 0) { ALOGV("java clients' binder death already cleaned up (normal case)"); diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h index 41365a0..9e0f62a 100644 --- a/services/camera/libcameraservice/CameraService.h +++ b/services/camera/libcameraservice/CameraService.h @@ -18,6 +18,7 @@ #ifndef ANDROID_SERVERS_CAMERA_CAMERASERVICE_H #define ANDROID_SERVERS_CAMERA_CAMERASERVICE_H +#include #include #include #include @@ -40,27 +41,32 @@ class CameraService : friend class BinderService; public: class Client; + class BasicClient; + + // Implementation of BinderService static char const* getServiceName() { return "media.camera"; } CameraService(); virtual ~CameraService(); + ///////////////////////////////////////////////////////////////////// + // ICameraService virtual int32_t getNumberOfCameras(); virtual status_t getCameraInfo(int cameraId, struct CameraInfo* cameraInfo); virtual sp connect(const sp& cameraClient, int cameraId); - virtual void removeClient(const sp& cameraClient); - // returns plain pointer of client. Note that mClientLock should be acquired to - // prevent the client from destruction. The result can be NULL. - virtual Client* getClientByIdUnsafe(int cameraId); - virtual Mutex* getClientLockById(int cameraId); - - virtual sp getClientByRemote(const wp& cameraClient); + virtual sp + connect(const sp& cameraCb, int cameraId); - virtual status_t dump(int fd, const Vector& args); + // Extra permissions checks virtual status_t onTransact(uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags); - virtual void onFirstRef(); + + virtual status_t dump(int fd, const Vector& args); + + ///////////////////////////////////////////////////////////////////// + // Client functionality + virtual void removeClientByRemote(const wp& remoteBinder); enum sound_kind { SOUND_SHUTTER = 0, @@ -72,7 +78,53 @@ public: void playSound(sound_kind kind); void releaseSound(); - class Client : public BnCamera + + ///////////////////////////////////////////////////////////////////// + // CameraClient functionality + + // returns plain pointer of client. Note that mClientLock should be acquired to + // prevent the client from destruction. The result can be NULL. + virtual Client* getClientByIdUnsafe(int cameraId); + virtual Mutex* getClientLockById(int cameraId); + + class BasicClient : public virtual RefBase { + public: + virtual status_t initialize(camera_module_t *module) = 0; + + virtual void disconnect() = 0; + + wp getRemote() { + return mRemoteCallback; + } + + protected: + BasicClient(const sp& cameraService, + const sp& remoteCallback, + int cameraId, + int cameraFacing, + int clientPid, + int servicePid); + + virtual ~BasicClient(); + + // the instance is in the middle of destruction. When this is set, + // the instance should not be accessed from callback. + // CameraService's mClientLock should be acquired to access this. + // - subclasses should set this to true in their destructors. + bool mDestructionStarted; + + // these are initialized in the constructor. + sp mCameraService; // immutable after constructor + int mCameraId; // immutable after constructor + int mCameraFacing; // immutable after constructor + pid_t mClientPid; + pid_t mServicePid; // immutable after constructor + + // - The app-side Binder interface to receive callbacks from us + wp mRemoteCallback; // immutable after constructor + }; + + class Client : public BnCamera, public BasicClient { public: // ICamera interface (see ICamera for details) @@ -112,38 +164,82 @@ public: return mCameraClient; } - virtual status_t initialize(camera_module_t *module) = 0; - - virtual status_t dump(int fd, const Vector& args) = 0; - protected: static Mutex* getClientLockFromCookie(void* user); // convert client from cookie. Client lock should be acquired before getting Client. static Client* getClientFromCookie(void* user); - // the instance is in the middle of destruction. When this is set, - // the instance should not be accessed from callback. - // CameraService's mClientLock should be acquired to access this. - bool mDestructionStarted; + // Initialized in constructor - // these are initialized in the constructor. - sp mCameraService; // immutable after constructor + // - The app-side Binder interface to receive callbacks from us sp mCameraClient; - int mCameraId; // immutable after constructor - int mCameraFacing; // immutable after constructor - pid_t mClientPid; - pid_t mServicePid; // immutable after constructor + }; + + class ProClient : public BnProCameraUser, public BasicClient { + public: + ProClient(const sp& cameraService, + const sp& remoteCallback, + int cameraId, + int cameraFacing, + int clientPid, + int servicePid); + + virtual ~ProClient(); + + const sp& getRemoteCallback() { + return mRemoteCallback; + } + + // BasicClient implementation + virtual status_t initialize(camera_module_t *module); + + /*** + IProCamera implementation + ***/ + + + virtual status_t connect( + const sp& callbacks); + virtual void disconnect(); + + virtual status_t exclusiveTryLock(); + virtual status_t exclusiveLock(); + virtual status_t exclusiveUnlock(); + + virtual bool hasExclusiveLock(); + + // Note that the callee gets a copy of the metadata. + virtual int submitRequest(camera_metadata_t* metadata, + bool streaming = false); + virtual status_t cancelRequest(int requestId); + + virtual status_t requestStream(int streamId); + virtual status_t cancelStream(int streamId); + + protected: + sp mRemoteCallback; }; private: + + // Delay-load the Camera HAL module + virtual void onFirstRef(); + + virtual sp getClientByRemote(const wp& cameraClient); + Mutex mServiceLock; wp mClient[MAX_CAMERAS]; // protected by mServiceLock Mutex mClientLock[MAX_CAMERAS]; // prevent Client destruction inside callbacks int mNumberOfCameras; + typedef wp weak_pro_client_ptr; + Vector mProClientList[MAX_CAMERAS]; + // needs to be called with mServiceLock held sp findClientUnsafe(const wp& cameraClient, int& outIndex); + sp findProClientUnsafe( + const wp& cameraCallbacksRemote); // atomics to record whether the hardware is allocated to some client. volatile int32_t mBusy[MAX_CAMERAS]; @@ -161,6 +257,9 @@ private: // IBinder::DeathRecipient implementation virtual void binderDied(const wp &who); + + // Helpers + int getDeviceVersion(int cameraId, int* facing); }; } // namespace android -- cgit v1.1 From 5376573eff55f370f041889618c9a7a9e1894615 Mon Sep 17 00:00:00 2001 From: Igor Murashkin Date: Wed, 20 Feb 2013 17:41:57 -0800 Subject: Camera: ProClient add asynchronous locks and such Change-Id: I551e5e5e76d9be733fab5224beaa7309268c0f38 --- camera/IProCameraCallbacks.cpp | 19 ++++ camera/ProCamera.cpp | 34 +++++++ camera/tests/Android.mk | 3 +- camera/tests/ProCameraTests.cpp | 169 ++++++++++++++++++++++++++++++++++- include/camera/IProCameraCallbacks.h | 8 ++ include/camera/ProCamera.h | 7 +- 6 files changed, 234 insertions(+), 6 deletions(-) diff --git a/camera/IProCameraCallbacks.cpp b/camera/IProCameraCallbacks.cpp index c2ad74f..756fba2 100644 --- a/camera/IProCameraCallbacks.cpp +++ b/camera/IProCameraCallbacks.cpp @@ -34,6 +34,7 @@ enum { NOTIFY_CALLBACK = IBinder::FIRST_CALL_TRANSACTION, DATA_CALLBACK, DATA_CALLBACK_TIMESTAMP, + LOCK_STATUS_CHANGED, }; class BpProCameraCallbacks: public BpInterface @@ -86,6 +87,15 @@ public: remote()->transact(DATA_CALLBACK_TIMESTAMP, data, &reply, IBinder::FLAG_ONEWAY); } + + void onLockStatusChanged(LockStatus newLockStatus) { + ALOGV("onLockStatusChanged"); + Parcel data, reply; + data.writeInterfaceToken(IProCameraCallbacks::getInterfaceDescriptor()); + data.writeInt32(newLockStatus); + remote()->transact(LOCK_STATUS_CHANGED, data, &reply, + IBinder::FLAG_ONEWAY); + } }; IMPLEMENT_META_INTERFACE(ProCameraCallbacks, @@ -96,6 +106,7 @@ IMPLEMENT_META_INTERFACE(ProCameraCallbacks, status_t BnProCameraCallbacks::onTransact( uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) { + ALOGV("onTransact - code = %d", code); switch(code) { case NOTIFY_CALLBACK: { ALOGV("NOTIFY_CALLBACK"); @@ -133,6 +144,14 @@ status_t BnProCameraCallbacks::onTransact( dataCallbackTimestamp(timestamp, msgType, imageData); return NO_ERROR; } break; + case LOCK_STATUS_CHANGED: { + ALOGV("LOCK_STATUS_CHANGED"); + CHECK_INTERFACE(IProCameraCallbacks, data, reply); + LockStatus newLockStatus + = static_cast(data.readInt32()); + onLockStatusChanged(newLockStatus); + return NO_ERROR; + } break; default: return BBinder::onTransact(code, data, reply, flags); } diff --git a/camera/ProCamera.cpp b/camera/ProCamera.cpp index 134a4a3..8164188 100644 --- a/camera/ProCamera.cpp +++ b/camera/ProCamera.cpp @@ -118,6 +118,12 @@ void ProCamera::DeathNotifier::binderDied(const wp& who) { ALOGW("Camera service died!"); } +void ProCamera::setListener(const sp& listener) +{ + Mutex::Autolock _l(mLock); + mListener = listener; +} + // callback from camera service void ProCamera::notifyCallback(int32_t msgType, int32_t ext1, int32_t ext2) @@ -164,6 +170,34 @@ void ProCamera::dataCallbackTimestamp(nsecs_t timestamp, int32_t msgType, /* IProCameraUser's implementation */ +void ProCamera::onLockStatusChanged( + IProCameraCallbacks::LockStatus newLockStatus) +{ + ALOGV("%s: newLockStatus = %d", __FUNCTION__, newLockStatus); + + sp listener; + { + Mutex::Autolock _l(mLock); + listener = mListener; + } + if (listener != NULL) { + switch (newLockStatus) { + case IProCameraCallbacks::LOCK_ACQUIRED: + listener->onLockAcquired(); + break; + case IProCameraCallbacks::LOCK_RELEASED: + listener->onLockReleased(); + break; + case IProCameraCallbacks::LOCK_STOLEN: + listener->onLockStolen(); + break; + default: + ALOGE("%s: Unknown lock status: %d", + __FUNCTION__, newLockStatus); + } + } +} + status_t ProCamera::exclusiveTryLock() { sp c = mCamera; diff --git a/camera/tests/Android.mk b/camera/tests/Android.mk index 5d386c4..e455943 100644 --- a/camera/tests/Android.mk +++ b/camera/tests/Android.mk @@ -14,7 +14,8 @@ LOCAL_SHARED_LIBRARIES := \ libgui \ libsync \ libui \ - libdl + libdl \ + libbinder LOCAL_STATIC_LIBRARIES := \ libgtest diff --git a/camera/tests/ProCameraTests.cpp b/camera/tests/ProCameraTests.cpp index 4de9c10..ca9e224 100644 --- a/camera/tests/ProCameraTests.cpp +++ b/camera/tests/ProCameraTests.cpp @@ -17,8 +17,14 @@ #include #include +#include +#include + #include "Camera.h" #include "ProCamera.h" +#include +#include +#include namespace android { namespace camera2 { @@ -28,17 +34,159 @@ namespace client { #define CAMERA_ID 0 #define TEST_DEBUGGING 0 +#define TEST_LISTENER_TIMEOUT 2000000000 // 2 second listener timeout + #if TEST_DEBUGGING #define dout std::cerr #else #define dout if (0) std::cerr #endif +enum LockEvent { + UNKNOWN, + ACQUIRED, + RELEASED, + STOLEN +}; + +typedef Vector EventList; + +class ProCameraTestThread : public Thread +{ +public: + ProCameraTestThread() { + } + + virtual bool threadLoop() { + mProc = ProcessState::self(); + mProc->startThreadPool(); + + IPCThreadState *ptr = IPCThreadState::self(); + + dout << "will join thread pool" << std::endl; + ptr->joinThreadPool(); + dout << "joined thread pool (done)" << std::endl; + + return false; + } + + sp mProc; +}; + +class ProCameraTestListener : public ProCameraListener { + +public: + status_t WaitForEvent() { + Mutex::Autolock cal(mConditionMutex); + + { + Mutex::Autolock al(mListenerMutex); + + if (mLockEventList.size() > 0) { + return OK; + } + } + + return mListenerCondition.waitRelative(mConditionMutex, + TEST_LISTENER_TIMEOUT); + } + + /* Read events into out. Existing queue is flushed */ + void ReadEvents(EventList& out) { + Mutex::Autolock al(mListenerMutex); + + for (size_t i = 0; i < mLockEventList.size(); ++i) { + out.push(mLockEventList[i]); + } + + mLockEventList.clear(); + } + + /** + * Dequeue 1 event from the event queue. + * Returns UNKNOWN if queue is empty + */ + LockEvent ReadEvent() { + Mutex::Autolock al(mListenerMutex); + + if (mLockEventList.size() == 0) { + return UNKNOWN; + } + + LockEvent ev = mLockEventList[0]; + mLockEventList.removeAt(0); + + return ev; + } + +private: + void QueueEvent(LockEvent ev) { + { + Mutex::Autolock al(mListenerMutex); + mLockEventList.push(ev); + } + + + mListenerCondition.broadcast(); + } + +protected: + + ////////////////////////////////////////////////// + ///////// ProCameraListener ////////////////////// + ////////////////////////////////////////////////// + + + // Lock has been acquired. Write operations now available. + virtual void onLockAcquired() { + QueueEvent(ACQUIRED); + } + // Lock has been released with exclusiveUnlock + virtual void onLockReleased() { + QueueEvent(RELEASED); + } + + // Lock has been stolen by another client. + virtual void onLockStolen() { + QueueEvent(STOLEN); + } + + // Lock free. + virtual void onTriggerNotify(int32_t ext1, int32_t ext2, int32_t ext3) { + + dout << "Trigger notify: " << ext1 << " " << ext2 + << " " << ext3 << std::endl; + } + + // TODO: remove + + virtual void notify(int32_t , int32_t , int32_t ) {} + virtual void postData(int32_t , const sp& , + camera_frame_metadata_t *) {} + virtual void postDataTimestamp(nsecs_t , int32_t , const sp& ) {} + + + Vector mLockEventList; + Mutex mListenerMutex; + Mutex mConditionMutex; + Condition mListenerCondition; +}; + class ProCameraTest : public ::testing::Test { +public: + ProCameraTest() { + } + virtual void SetUp() { + mTestThread = new ProCameraTestThread(); + mTestThread->run("ProCameraTestThread"); + mCamera = ProCamera::connect(CAMERA_ID); ASSERT_NE((void*)NULL, mCamera.get()); + + mListener = new ProCameraTestListener(); + mCamera->setListener(mListener); } virtual void TearDown() { @@ -48,17 +196,32 @@ class ProCameraTest : public ::testing::Test { protected: sp mCamera; + sp mListener; + + sp mTestThread; + }; -TEST_F(ProCameraTest, Locking) { +TEST_F(ProCameraTest, LockingImmediate) { if (HasFatalFailure()) { return; } - status_t res = mCamera->exclusiveTryLock(); - EXPECT_EQ(OK, res); + EXPECT_FALSE(mCamera->hasExclusiveLock()); + EXPECT_EQ(OK, mCamera->exclusiveTryLock()); + + EXPECT_EQ(OK, mListener->WaitForEvent()); + EXPECT_EQ(ACQUIRED, mListener->ReadEvent()); + + EXPECT_TRUE(mCamera->hasExclusiveLock()); + EXPECT_EQ(OK, mCamera->exclusiveUnlock()); + + EXPECT_EQ(OK, mListener->WaitForEvent()); + EXPECT_EQ(RELEASED, mListener->ReadEvent()); + + EXPECT_FALSE(mCamera->hasExclusiveLock()); } } diff --git a/include/camera/IProCameraCallbacks.h b/include/camera/IProCameraCallbacks.h index ac1d5eb..e5be099 100644 --- a/include/camera/IProCameraCallbacks.h +++ b/include/camera/IProCameraCallbacks.h @@ -39,6 +39,14 @@ public: virtual void dataCallbackTimestamp(nsecs_t timestamp, int32_t msgType, const sp& data) = 0; + + enum LockStatus { + LOCK_ACQUIRED, + LOCK_RELEASED, + LOCK_STOLEN, + }; + + virtual void onLockStatusChanged(LockStatus newLockStatus) = 0; }; // ---------------------------------------------------------------------------- diff --git a/include/camera/ProCamera.h b/include/camera/ProCamera.h index ba5fdc0..2dd01e3 100644 --- a/include/camera/ProCamera.h +++ b/include/camera/ProCamera.h @@ -34,9 +34,10 @@ class ProCameraListener : public CameraListener public: // Lock has been acquired. Write operations now available. virtual void onLockAcquired() = 0; - // Lock has been released with exclusiveUnlock, or has been stolen by - // another client. + // Lock has been released with exclusiveUnlock. virtual void onLockReleased() = 0; + // Lock has been stolen by another client. + virtual void onLockStolen() = 0; // Lock free. virtual void onTriggerNotify(int32_t msgType, int32_t ext1, int32_t ext2) @@ -129,6 +130,8 @@ protected: virtual void dataCallbackTimestamp(nsecs_t timestamp, int32_t msgType, const sp& dataPtr); + virtual void onLockStatusChanged( + IProCameraCallbacks::LockStatus newLockStatus); class DeathNotifier: public IBinder::DeathRecipient { -- cgit v1.1 From 39f79f77a435c2f769477caeb071e2f9f6e78742 Mon Sep 17 00:00:00 2001 From: Igor Murashkin Date: Wed, 30 Jan 2013 10:14:24 -0800 Subject: Camera: ProCameraTests - add asynchronous locking unit test Change-Id: Ib79eb84046c9ed898bfb086a6600265fc351924c --- camera/tests/ProCameraTests.cpp | 46 ++++++++++++++++++++++++++++++++++++----- 1 file changed, 41 insertions(+), 5 deletions(-) diff --git a/camera/tests/ProCameraTests.cpp b/camera/tests/ProCameraTests.cpp index ca9e224..adc3c75 100644 --- a/camera/tests/ProCameraTests.cpp +++ b/camera/tests/ProCameraTests.cpp @@ -42,6 +42,11 @@ namespace client { #define dout if (0) std::cerr #endif +#define EXPECT_OK(x) EXPECT_EQ(OK, (x)) +#define ASSERT_OK(x) ASSERT_EQ(OK, (x)) + +class ProCameraTest; + enum LockEvent { UNKNOWN, ACQUIRED, @@ -63,9 +68,7 @@ public: IPCThreadState *ptr = IPCThreadState::self(); - dout << "will join thread pool" << std::endl; ptr->joinThreadPool(); - dout << "joined thread pool (done)" << std::endl; return false; } @@ -178,10 +181,13 @@ public: ProCameraTest() { } - virtual void SetUp() { + static void SetUpTestCase() { + // Binder Thread Pool Initialization mTestThread = new ProCameraTestThread(); mTestThread->run("ProCameraTestThread"); + } + virtual void SetUp() { mCamera = ProCamera::connect(CAMERA_ID); ASSERT_NE((void*)NULL, mCamera.get()); @@ -198,19 +204,49 @@ protected: sp mCamera; sp mListener; - sp mTestThread; + static sp mTestThread; }; +sp ProCameraTest::mTestThread; + +// test around exclusiveTryLock (immediate locking) TEST_F(ProCameraTest, LockingImmediate) { if (HasFatalFailure()) { return; } - EXPECT_FALSE(mCamera->hasExclusiveLock()); EXPECT_EQ(OK, mCamera->exclusiveTryLock()); + // at this point we definitely have the lock + + EXPECT_EQ(OK, mListener->WaitForEvent()); + EXPECT_EQ(ACQUIRED, mListener->ReadEvent()); + + EXPECT_TRUE(mCamera->hasExclusiveLock()); + EXPECT_EQ(OK, mCamera->exclusiveUnlock()); + + EXPECT_EQ(OK, mListener->WaitForEvent()); + EXPECT_EQ(RELEASED, mListener->ReadEvent()); + + EXPECT_FALSE(mCamera->hasExclusiveLock()); +} + +// test around exclusiveLock (locking at some future point in time) +TEST_F(ProCameraTest, LockingAsynchronous) { + + if (HasFatalFailure()) { + return; + } + + // TODO: Add another procamera that has a lock here. + // then we can be test that the lock wont immediately be acquired + + EXPECT_FALSE(mCamera->hasExclusiveLock()); + EXPECT_EQ(OK, mCamera->exclusiveLock()); + // at this point we may or may not have the lock + // we cant be sure until we get an ACQUIRED event EXPECT_EQ(OK, mListener->WaitForEvent()); EXPECT_EQ(ACQUIRED, mListener->ReadEvent()); -- cgit v1.1 From 68506fd58d26748617babe94d5648503cb3690bb Mon Sep 17 00:00:00 2001 From: Igor Murashkin Date: Wed, 20 Feb 2013 17:57:31 -0800 Subject: Camera: ProCamera - add createStream stub and unit test for it Change-Id: Ic05130e63f4f2c0c3278ba348b192992169f105f --- camera/ProCamera.cpp | 68 ++++++++++++++++++++++++ camera/tests/ProCameraTests.cpp | 112 +++++++++++++++++++++++++++++++++++++++- include/camera/ProCamera.h | 41 +++++++++++++++ 3 files changed, 220 insertions(+), 1 deletion(-) diff --git a/camera/ProCamera.cpp b/camera/ProCamera.cpp index 8164188..26e4de9 100644 --- a/camera/ProCamera.cpp +++ b/camera/ProCamera.cpp @@ -261,4 +261,72 @@ status_t ProCamera::cancelStream(int streamId) return c->cancelStream(streamId); } +status_t ProCamera::createStream(int width, int height, int format, + const sp& window, + /*out*/ + int* streamId) +{ + *streamId = -1; + + ALOGV("%s: createStreamW %dx%d (fmt=0x%x)", __FUNCTION__, width, height, + format); + + if (window == 0) { + return BAD_VALUE; + } + + // TODO: actually implement this in IProCamera + return INVALID_OPERATION; +} + +status_t ProCamera::createStream(int width, int height, int format, + const sp& bufferProducer, + /*out*/ + int* streamId) { + + ALOGV("%s: createStreamT %dx%d (fmt=0x%x)", __FUNCTION__, width, height, + format); + + sp binder; + sp window; + + if (bufferProducer != 0) { + binder = bufferProducer->asBinder(); + window = new Surface(bufferProducer); + + status_t stat = createStream(width, height, format, window, streamId); + + ALOGV("%s: createStreamT END (%d), StreamID = %d", __FUNCTION__, stat, + *streamId); + } + else { + *streamId = -1; + return BAD_VALUE; + } + + return BAD_VALUE; +} + +int ProCamera::getNumberOfCameras() { + ALOGE("%s: not implemented yet", __FUNCTION__); + return 1; +} + +camera_metadata* ProCamera::getCameraInfo(int cameraId) { + ALOGE("%s: not implemented yet", __FUNCTION__); + + ALOGV("%s: cameraId = %d", __FUNCTION__, cameraId); + return NULL; +} + +status_t ProCamera::createDefaultRequest(int templateId, + camera_metadata** request) const { + ALOGE("%s: not implemented yet", __FUNCTION__); + + ALOGV("%s: templateId = %d", __FUNCTION__, templateId); + + *request = NULL; + return INVALID_OPERATION; +} + }; // namespace android diff --git a/camera/tests/ProCameraTests.cpp b/camera/tests/ProCameraTests.cpp index adc3c75..d632b7e 100644 --- a/camera/tests/ProCameraTests.cpp +++ b/camera/tests/ProCameraTests.cpp @@ -26,6 +26,12 @@ #include #include +#include +#include + +#include +#include // for CAMERA2_TEMPLATE_PREVIEW only + namespace android { namespace camera2 { namespace tests { @@ -34,7 +40,8 @@ namespace client { #define CAMERA_ID 0 #define TEST_DEBUGGING 0 -#define TEST_LISTENER_TIMEOUT 2000000000 // 2 second listener timeout +#define TEST_LISTENER_TIMEOUT 1000000000 // 1 second listener timeout +#define TEST_FORMAT HAL_PIXEL_FORMAT_RGBA_8888 //TODO: YUY2 instead #if TEST_DEBUGGING #define dout std::cerr @@ -206,6 +213,40 @@ protected: static sp mTestThread; + int mDisplaySecs; + sp mComposerClient; + sp mSurfaceControl; + + int getSurfaceWidth() { + return 512; + } + int getSurfaceHeight() { + return 512; + } + + void createOnScreenSurface(sp& surface) { + mComposerClient = new SurfaceComposerClient; + ASSERT_EQ(NO_ERROR, mComposerClient->initCheck()); + + mSurfaceControl = mComposerClient->createSurface( + String8("ProCameraTest StreamingImage Surface"), + getSurfaceWidth(), getSurfaceHeight(), + PIXEL_FORMAT_RGB_888, 0); + + ASSERT_TRUE(mSurfaceControl != NULL); + ASSERT_TRUE(mSurfaceControl->isValid()); + + SurfaceComposerClient::openGlobalTransaction(); + ASSERT_EQ(NO_ERROR, mSurfaceControl->setLayer(0x7FFFFFFF)); + ASSERT_EQ(NO_ERROR, mSurfaceControl->show()); + SurfaceComposerClient::closeGlobalTransaction(); + + sp window = mSurfaceControl->getSurface(); + surface = mSurfaceControl->getSurface(); + + ASSERT_NE((void*)NULL, surface.get()); + } + }; sp ProCameraTest::mTestThread; @@ -260,6 +301,75 @@ TEST_F(ProCameraTest, LockingAsynchronous) { EXPECT_FALSE(mCamera->hasExclusiveLock()); } +// Stream directly to the screen. +TEST_F(ProCameraTest, StreamingImage) { + if (HasFatalFailure()) { + return; + } + char* displaySecsEnv = getenv("TEST_DISPLAY_SECS"); + if (displaySecsEnv != NULL) { + mDisplaySecs = atoi(displaySecsEnv); + if (mDisplaySecs < 0) { + mDisplaySecs = 0; + } + } else { + mDisplaySecs = 0; + } + + sp surface; + sp window; + if (mDisplaySecs > 0) { + createOnScreenSurface(/*out*/surface); + window = surface; + } + int streamId = -1; + EXPECT_OK(mCamera->createStream(/*width*/640, /*height*/480, TEST_FORMAT, + window, &streamId)); + EXPECT_NE(-1, streamId); + + EXPECT_OK(mCamera->exclusiveTryLock()); + /* iterate in a loop submitting requests every frame. + * what kind of requests doesnt really matter, just whatever. + */ + + // it would probably be better to use CameraMetadata from camera service. + camera_metadata_t *request = NULL; + EXPECT_OK(mCamera->createDefaultRequest(CAMERA2_TEMPLATE_PREVIEW, + /*out*/&request)); + EXPECT_NE((void*)NULL, request); + + /* FIXME: dont need this later, at which point the above should become an + ASSERT_NE*/ + if(request == NULL) request = allocate_camera_metadata(10, 100); + + // set the output streams to just this stream ID + + // wow what a verbose API. + // i would give a loaf of bread for + // metadata->updateOrInsert(keys.request.output.streams, streamId); + camera_metadata_entry_t entry; + uint32_t tag = static_cast(ANDROID_REQUEST_OUTPUT_STREAMS); + int find = find_camera_metadata_entry(request, tag, &entry); + if (find == -ENOENT) { + ASSERT_OK(add_camera_metadata_entry(request, tag, &streamId, + /*data_count*/1)); + } else { + ASSERT_OK(update_camera_metadata_entry(request, entry.index, &streamId, + /*data_count*/1, &entry)); + } + + EXPECT_OK(mCamera->submitRequest(request, /*streaming*/true)); + + sleep(mDisplaySecs); + //should the window be empty until the buffer is flipped? + // that would certainly make sense + + + free_camera_metadata(request); + EXPECT_OK(mCamera->cancelStream(streamId)); + EXPECT_OK(mCamera->exclusiveUnlock()); +} + } } } diff --git a/include/camera/ProCamera.h b/include/camera/ProCamera.h index 2dd01e3..7191b07 100644 --- a/include/camera/ProCamera.h +++ b/include/camera/ProCamera.h @@ -114,8 +114,49 @@ public: * Lock free. Service maintains counter of streams. * Errors: BAD_VALUE if unknown stream ID. */ +// TODO: remove requestStream, its useless. + +// TODO: rename cancelStream to deleteStream +// can probably do it with a grep/sed + + /** + * Ask for a stream to be disabled. + * Lock free. Service maintains counter of streams. + * Errors: BAD_VALUE if unknown stream ID. + */ status_t cancelStream(int streamId); + /** + * Create a new HW stream, whose sink will be the window. + * Lock free. Service maintains counter of streams. + * Errors: -EBUSY if too many streams created + */ + status_t createStream(int width, int height, int format, + const sp& window, + /*out*/ + int* streamId); + + /** + * Create a new HW stream, whose sink will be the SurfaceTexture. + * Lock free. Service maintains counter of streams. + * Errors: -EBUSY if too many streams created + */ + status_t createStream(int width, int height, int format, + const sp& bufferProducer, + /*out*/ + int* streamId); + + // Create a request object from a template. + status_t createDefaultRequest(int templateId, + /*out*/ + camera_metadata** request) const; + + // Get number of cameras + static int getNumberOfCameras(); + + // Get static camera metadata + static camera_metadata* getCameraInfo(int cameraId); + sp remote(); protected: -- cgit v1.1 From 985fd30a10f6fec4293f071fd258c4726cff5a3d Mon Sep 17 00:00:00 2001 From: Igor Murashkin Date: Wed, 20 Feb 2013 18:24:43 -0800 Subject: Camera: ProCamera2Client - add createStream (service is stubbed) and unit test Change-Id: I1ae7ba9b24f5883c214c19a7ed0eaf0c802d69c1 --- camera/IProCameraUser.cpp | 178 +++++++-- camera/ProCamera.cpp | 25 +- camera/tests/ProCameraTests.cpp | 20 +- include/camera/IProCameraUser.h | 10 + include/camera/ProCamera.h | 2 +- services/camera/libcameraservice/Android.mk | 1 + services/camera/libcameraservice/CameraService.cpp | 4 +- .../camera/libcameraservice/ProCamera2Client.cpp | 396 +++++++++++++++++++++ .../camera/libcameraservice/ProCamera2Client.h | 156 ++++++++ 9 files changed, 736 insertions(+), 56 deletions(-) create mode 100644 services/camera/libcameraservice/ProCamera2Client.cpp create mode 100644 services/camera/libcameraservice/ProCamera2Client.h diff --git a/camera/IProCameraUser.cpp b/camera/IProCameraUser.cpp index 76c2dcd..cd7bf5c 100644 --- a/camera/IProCameraUser.cpp +++ b/camera/IProCameraUser.cpp @@ -42,8 +42,78 @@ enum { CANCEL_REQUEST, REQUEST_STREAM, CANCEL_STREAM, + CREATE_STREAM, + CREATE_DEFAULT_REQUEST, }; +/** + * Caller becomes the owner of the new metadata + * 'const Parcel' doesnt prevent us from calling the read functions. + * which is interesting since it changes the internal state + */ +void readMetadata(const Parcel& data, camera_metadata_t** out) { + camera_metadata_t* metadata; + + // arg0 = metadataSize (int32) + size_t metadataSize = static_cast(data.readInt32()); + + if (metadataSize == 0) { + if (out) { + *out = NULL; + } + return; + } + + // NOTE: this doesn't make sense to me. shouldnt the blob + // know how big it is? why do we have to specify the size + // to Parcel::readBlob ? + + ReadableBlob blob; + // arg1 = metadata (blob) + { + data.readBlob(metadataSize, &blob); + const camera_metadata_t* tmp = + reinterpret_cast(blob.data()); + size_t entry_capacity = get_camera_metadata_entry_capacity(tmp); + size_t data_capacity = get_camera_metadata_data_capacity(tmp); + + metadata = allocate_camera_metadata(entry_capacity, data_capacity); + copy_camera_metadata(metadata, metadataSize, tmp); + } + blob.release(); + + if (out) { + *out = metadata; + } else { + free_camera_metadata(metadata); + } +} + +/** + * Caller retains ownership of metadata + * - Write 2 (int32 + blob) args in the current position + */ +void writeMetadata(Parcel& data, camera_metadata_t* metadata) { + // arg0 = metadataSize (int32) + size_t metadataSize; + + if (metadata == NULL) { + data.writeInt32(0); + return; + } + + metadataSize = get_camera_metadata_compact_size(metadata); + data.writeInt32(static_cast(metadataSize)); + + // arg1 = metadata (blob) + WritableBlob blob; + { + data.writeBlob(metadataSize, &blob); + copy_camera_metadata(blob.data(), metadataSize, metadata); + } + blob.release(); +} + class BpProCameraUser: public BpInterface { public: @@ -109,17 +179,8 @@ public: Parcel data, reply; data.writeInterfaceToken(IProCameraUser::getInterfaceDescriptor()); - // arg0 = metadataSize (int32) - size_t metadataSize = get_camera_metadata_compact_size(metadata); - data.writeInt32(static_cast(metadataSize)); - - // arg1 = metadata (blob) - WritableBlob blob; - { - data.writeBlob(metadataSize, &blob); - copy_camera_metadata(blob.data(), metadataSize, metadata); - } - blob.release(); + // arg0+arg1 + writeMetadata(data, metadata); // arg2 = streaming (bool) data.writeInt32(streaming); @@ -157,6 +218,44 @@ public: return reply.readInt32(); } + virtual status_t createStream(int width, int height, int format, + const sp& surface, + /*out*/ + int* streamId) + { + Parcel data, reply; + data.writeInterfaceToken(IProCameraUser::getInterfaceDescriptor()); + data.writeInt32(width); + data.writeInt32(height); + data.writeInt32(format); + + Surface::writeToParcel(surface, &data); + remote()->transact(CREATE_STREAM, data, &reply); + + int sId = reply.readInt32(); + if (streamId) { + *streamId = sId; + } + return reply.readInt32(); + } + + // Create a request object from a template. + virtual status_t createDefaultRequest(int templateId, + /*out*/ + camera_metadata** request) + { + Parcel data, reply; + data.writeInterfaceToken(IProCameraUser::getInterfaceDescriptor()); + data.writeInt32(templateId); + remote()->transact(CREATE_DEFAULT_REQUEST, data, &reply); + readMetadata(reply, /*out*/request); + return reply.readInt32(); + } + + +private: + + }; IMPLEMENT_META_INTERFACE(ProCameraUser, "android.hardware.IProCameraUser"); @@ -205,28 +304,7 @@ status_t BnProCameraUser::onTransact( case SUBMIT_REQUEST: { CHECK_INTERFACE(IProCameraUser, data, reply); camera_metadata_t* metadata; - - // arg0 = metadataSize (int32) - size_t metadataSize = static_cast(data.readInt32()); - - // NOTE: this doesn't make sense to me. shouldnt the blob - // know how big it is? why do we have to specify the size - // to Parcel::readBlob ? - - ReadableBlob blob; - // arg1 = metadata (blob) - { - data.readBlob(metadataSize, &blob); - const camera_metadata_t* tmp = - reinterpret_cast(blob.data()); - size_t entry_capacity = get_camera_metadata_entry_capacity(tmp); - size_t data_capacity = get_camera_metadata_data_capacity(tmp); - - metadata = allocate_camera_metadata(entry_capacity, - data_capacity); - copy_camera_metadata(metadata, metadataSize, tmp); - } - blob.release(); + readMetadata(data, /*out*/&metadata); // arg2 = streaming (bool) bool streaming = data.readInt32(); @@ -254,6 +332,40 @@ status_t BnProCameraUser::onTransact( reply->writeInt32(cancelStream(streamId)); return NO_ERROR; } break; + case CREATE_STREAM: { + CHECK_INTERFACE(IProCameraUser, data, reply); + int width, height, format; + + width = data.readInt32(); + height = data.readInt32(); + format = data.readInt32(); + + sp surface = Surface::readFromParcel(data); + + int streamId = -1; + status_t ret; + ret = createStream(width, height, format, surface, &streamId); + + reply->writeInt32(streamId); + reply->writeInt32(ret); + + return NO_ERROR; + } break; + + case CREATE_DEFAULT_REQUEST: { + CHECK_INTERFACE(IProCameraUser, data, reply); + + int templateId = data.readInt32(); + + camera_metadata_t* request = NULL; + status_t ret; + ret = createDefaultRequest(templateId, &request); + + writeMetadata(*reply, request); + reply->writeInt32(ret); + + return NO_ERROR; + } break; default: return BBinder::onTransact(code, data, reply, flags); } diff --git a/camera/ProCamera.cpp b/camera/ProCamera.cpp index 26e4de9..92ec9d6 100644 --- a/camera/ProCamera.cpp +++ b/camera/ProCamera.cpp @@ -262,7 +262,7 @@ status_t ProCamera::cancelStream(int streamId) } status_t ProCamera::createStream(int width, int height, int format, - const sp& window, + const sp& surface, /*out*/ int* streamId) { @@ -271,12 +271,14 @@ status_t ProCamera::createStream(int width, int height, int format, ALOGV("%s: createStreamW %dx%d (fmt=0x%x)", __FUNCTION__, width, height, format); - if (window == 0) { + if (surface == 0) { return BAD_VALUE; } - // TODO: actually implement this in IProCamera - return INVALID_OPERATION; + sp c = mCamera; + if (c == 0) return NO_INIT; + + return c->createStream(width, height, format, surface, streamId); } status_t ProCamera::createStream(int width, int height, int format, @@ -288,13 +290,10 @@ status_t ProCamera::createStream(int width, int height, int format, format); sp binder; - sp window; + status_t stat = INVALID_OPERATION; if (bufferProducer != 0) { binder = bufferProducer->asBinder(); - window = new Surface(bufferProducer); - - status_t stat = createStream(width, height, format, window, streamId); ALOGV("%s: createStreamT END (%d), StreamID = %d", __FUNCTION__, stat, *streamId); @@ -304,7 +303,7 @@ status_t ProCamera::createStream(int width, int height, int format, return BAD_VALUE; } - return BAD_VALUE; + return stat; } int ProCamera::getNumberOfCameras() { @@ -321,12 +320,12 @@ camera_metadata* ProCamera::getCameraInfo(int cameraId) { status_t ProCamera::createDefaultRequest(int templateId, camera_metadata** request) const { - ALOGE("%s: not implemented yet", __FUNCTION__); - ALOGV("%s: templateId = %d", __FUNCTION__, templateId); - *request = NULL; - return INVALID_OPERATION; + sp c = mCamera; + if (c == 0) return NO_INIT; + + return c->createDefaultRequest(templateId, request); } }; // namespace android diff --git a/camera/tests/ProCameraTests.cpp b/camera/tests/ProCameraTests.cpp index d632b7e..230e160 100644 --- a/camera/tests/ProCameraTests.cpp +++ b/camera/tests/ProCameraTests.cpp @@ -317,14 +317,12 @@ TEST_F(ProCameraTest, StreamingImage) { } sp surface; - sp window; if (mDisplaySecs > 0) { createOnScreenSurface(/*out*/surface); - window = surface; } int streamId = -1; EXPECT_OK(mCamera->createStream(/*width*/640, /*height*/480, TEST_FORMAT, - window, &streamId)); + surface, &streamId)); EXPECT_NE(-1, streamId); EXPECT_OK(mCamera->exclusiveTryLock()); @@ -351,8 +349,16 @@ TEST_F(ProCameraTest, StreamingImage) { uint32_t tag = static_cast(ANDROID_REQUEST_OUTPUT_STREAMS); int find = find_camera_metadata_entry(request, tag, &entry); if (find == -ENOENT) { - ASSERT_OK(add_camera_metadata_entry(request, tag, &streamId, - /*data_count*/1)); + if (add_camera_metadata_entry(request, tag, &streamId, /*data_count*/1) + != OK) { + camera_metadata_t *tmp = allocate_camera_metadata(1000, 10000); + ASSERT_OK(append_camera_metadata(tmp, request)); + free_camera_metadata(request); + request = tmp; + + ASSERT_OK(add_camera_metadata_entry(request, tag, &streamId, + /*data_count*/1)); + } } else { ASSERT_OK(update_camera_metadata_entry(request, entry.index, &streamId, /*data_count*/1, &entry)); @@ -360,10 +366,8 @@ TEST_F(ProCameraTest, StreamingImage) { EXPECT_OK(mCamera->submitRequest(request, /*streaming*/true)); + dout << "will sleep now for " << mDisplaySecs << std::endl; sleep(mDisplaySecs); - //should the window be empty until the buffer is flipped? - // that would certainly make sense - free_camera_metadata(request); EXPECT_OK(mCamera->cancelStream(streamId)); diff --git a/include/camera/IProCameraUser.h b/include/camera/IProCameraUser.h index 6170410..3ef4676 100644 --- a/include/camera/IProCameraUser.h +++ b/include/camera/IProCameraUser.h @@ -63,6 +63,16 @@ public: virtual status_t requestStream(int streamId) = 0; virtual status_t cancelStream(int streamId) = 0; + virtual status_t createStream(int width, int height, int format, + const sp& surface, + /*out*/ + int* streamId) = 0; + + // Create a request object from a template. + virtual status_t createDefaultRequest(int templateId, + /*out*/ + camera_metadata** request) + = 0; }; diff --git a/include/camera/ProCamera.h b/include/camera/ProCamera.h index 7191b07..9b763a3 100644 --- a/include/camera/ProCamera.h +++ b/include/camera/ProCamera.h @@ -132,7 +132,7 @@ public: * Errors: -EBUSY if too many streams created */ status_t createStream(int width, int height, int format, - const sp& window, + const sp& surface, /*out*/ int* streamId); diff --git a/services/camera/libcameraservice/Android.mk b/services/camera/libcameraservice/Android.mk index b6ebd02..c7a8e4a 100644 --- a/services/camera/libcameraservice/Android.mk +++ b/services/camera/libcameraservice/Android.mk @@ -10,6 +10,7 @@ LOCAL_SRC_FILES:= \ CameraService.cpp \ CameraClient.cpp \ Camera2Client.cpp \ + ProCamera2Client.cpp \ Camera2Device.cpp \ camera2/Parameters.cpp \ camera2/FrameProcessor.cpp \ diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp index 4941965..eb8bc05 100644 --- a/services/camera/libcameraservice/CameraService.cpp +++ b/services/camera/libcameraservice/CameraService.cpp @@ -39,6 +39,7 @@ #include "CameraService.h" #include "CameraClient.h" #include "Camera2Client.h" +#include "ProCamera2Client.h" namespace android { @@ -281,7 +282,8 @@ sp CameraService::connect( return NULL; break; case CAMERA_DEVICE_API_VERSION_2_0: - client = new ProClient(this, cameraCb, cameraId, + case CAMERA_DEVICE_API_VERSION_2_1: + client = new ProCamera2Client(this, cameraCb, cameraId, facing, callingPid, getpid()); break; case -1: diff --git a/services/camera/libcameraservice/ProCamera2Client.cpp b/services/camera/libcameraservice/ProCamera2Client.cpp new file mode 100644 index 0000000..d6389a1 --- /dev/null +++ b/services/camera/libcameraservice/ProCamera2Client.cpp @@ -0,0 +1,396 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "ProCamera2Client" +#define ATRACE_TAG ATRACE_TAG_CAMERA +//#define LOG_NDEBUG 0 + +#include +#include + +#include +#include +#include +#include "camera2/Parameters.h" +#include "ProCamera2Client.h" + +namespace android { +using namespace camera2; + +static int getCallingPid() { + return IPCThreadState::self()->getCallingPid(); +} + +static int getCallingUid() { + return IPCThreadState::self()->getCallingUid(); +} + +// Interface used by CameraService + +ProCamera2Client::ProCamera2Client(const sp& cameraService, + const sp& remoteCallback, + int cameraId, + int cameraFacing, + int clientPid, + int servicePid): + ProClient(cameraService, remoteCallback, + cameraId, cameraFacing, clientPid, servicePid), + mSharedCameraCallbacks(remoteCallback) +{ + ATRACE_CALL(); + ALOGI("ProCamera %d: Opened", cameraId); + + mDevice = new Camera2Device(cameraId); + + mExclusiveLock = false; +} + +status_t ProCamera2Client::checkPid(const char* checkLocation) const { + int callingPid = getCallingPid(); + if (callingPid == mClientPid) return NO_ERROR; + + ALOGE("%s: attempt to use a locked camera from a different process" + " (old pid %d, new pid %d)", checkLocation, mClientPid, callingPid); + return PERMISSION_DENIED; +} + +status_t ProCamera2Client::initialize(camera_module_t *module) +{ + ATRACE_CALL(); + ALOGV("%s: Initializing client for camera %d", __FUNCTION__, mCameraId); + status_t res; + + res = mDevice->initialize(module); + if (res != OK) { + ALOGE("%s: Camera %d: unable to initialize device: %s (%d)", + __FUNCTION__, mCameraId, strerror(-res), res); + return NO_INIT; + } + + res = mDevice->setNotifyCallback(this); + + return OK; +} + +ProCamera2Client::~ProCamera2Client() { + ATRACE_CALL(); + + mDestructionStarted = true; + + disconnect(); + + ALOGI("ProCamera %d: Closed", mCameraId); +} + +status_t ProCamera2Client::exclusiveTryLock() { + ATRACE_CALL(); + ALOGV("%s", __FUNCTION__); + + Mutex::Autolock icl(mIProCameraUserLock); + SharedCameraCallbacks::Lock l(mSharedCameraCallbacks); + + if (!mExclusiveLock) { + mExclusiveLock = true; + + if (mRemoteCallback != NULL) { + mRemoteCallback->onLockStatusChanged( + IProCameraCallbacks::LOCK_ACQUIRED); + } + + ALOGV("%s: exclusive lock acquired", __FUNCTION__); + + return OK; + } + + // TODO: have a PERMISSION_DENIED case for when someone else owns the lock + + // don't allow recursive locking + ALOGW("%s: exclusive lock already exists - recursive locking is not" + "allowed", __FUNCTION__); + + return ALREADY_EXISTS; +} + +status_t ProCamera2Client::exclusiveLock() { + ATRACE_CALL(); + ALOGV("%s", __FUNCTION__); + + Mutex::Autolock icl(mIProCameraUserLock); + SharedCameraCallbacks::Lock l(mSharedCameraCallbacks); + + /** + * TODO: this should asynchronously 'wait' until the lock becomes available + * if another client already has an exclusive lock. + * + * once we have proper sharing support this will need to do + * more than just return immediately + */ + if (!mExclusiveLock) { + mExclusiveLock = true; + + if (mRemoteCallback != NULL) { + mRemoteCallback->onLockStatusChanged(IProCameraCallbacks::LOCK_ACQUIRED); + } + + ALOGV("%s: exclusive lock acquired", __FUNCTION__); + + return OK; + } + + // don't allow recursive locking + ALOGW("%s: exclusive lock already exists - recursive locking is not allowed" + , __FUNCTION__); + return ALREADY_EXISTS; +} + +status_t ProCamera2Client::exclusiveUnlock() { + ATRACE_CALL(); + ALOGV("%s", __FUNCTION__); + + Mutex::Autolock icl(mIProCameraUserLock); + SharedCameraCallbacks::Lock l(mSharedCameraCallbacks); + + // don't allow unlocking if we have no lock + if (!mExclusiveLock) { + ALOGW("%s: cannot unlock, no lock was held in the first place", + __FUNCTION__); + return BAD_VALUE; + } + + mExclusiveLock = false; + if (mRemoteCallback != NULL ) { + mRemoteCallback->onLockStatusChanged( + IProCameraCallbacks::LOCK_RELEASED); + } + ALOGV("%s: exclusive lock released", __FUNCTION__); + + return OK; +} + +bool ProCamera2Client::hasExclusiveLock() { + return mExclusiveLock; +} + +status_t ProCamera2Client::submitRequest(camera_metadata_t* request, + bool streaming) { + ATRACE_CALL(); + ALOGV("%s", __FUNCTION__); + + Mutex::Autolock icl(mIProCameraUserLock); + if (!mExclusiveLock) { + return PERMISSION_DENIED; + } + + ALOGE("%s: not fully implemented yet", __FUNCTION__); + free_camera_metadata(request); + return OK; +} + +status_t ProCamera2Client::cancelRequest(int requestId) { + ATRACE_CALL(); + ALOGV("%s", __FUNCTION__); + + Mutex::Autolock icl(mIProCameraUserLock); + if (!mExclusiveLock) { + return PERMISSION_DENIED; + } + + ALOGE("%s: not fully implemented yet", __FUNCTION__); + return OK; +} + +status_t ProCamera2Client::requestStream(int streamId) { + ALOGE("%s: not implemented yet", __FUNCTION__); + + return INVALID_OPERATION; +} + +status_t ProCamera2Client::cancelStream(int streamId) { + ALOGE("%s: not implemented yet", __FUNCTION__); + + return INVALID_OPERATION; +} + +status_t ProCamera2Client::createStream(int width, int height, int format, + const sp& surface, + /*out*/ + int* streamId) { + ALOGE("%s: not implemented yet", __FUNCTION__); + + return INVALID_OPERATION; +} + +status_t ProCamera2Client::createDefaultRequest(int templateId, + /*out*/ + camera_metadata** request) { + ALOGE("%s: not implemented yet", __FUNCTION__); + + return INVALID_OPERATION; +} + + + + + +status_t ProCamera2Client::dump(int fd, const Vector& args) { + String8 result; + result.appendFormat("ProCamera2Client[%d] (%p) PID: %d, dump:\n", + mCameraId, + getRemoteCallback()->asBinder().get(), + mClientPid); + result.append(" State: "); + + // TODO: print dynamic/request section from most recent requests + +#define CASE_APPEND_ENUM(x) case x: result.append(#x "\n"); break; + + result = " Device dump:\n"; + write(fd, result.string(), result.size()); + + status_t res = mDevice->dump(fd, args); + if (res != OK) { + result = String8::format(" Error dumping device: %s (%d)", + strerror(-res), res); + write(fd, result.string(), result.size()); + } + +#undef CASE_APPEND_ENUM + return NO_ERROR; +} + +// IProCameraUser interface + +void ProCamera2Client::disconnect() { + ATRACE_CALL(); + Mutex::Autolock icl(mIProCameraUserLock); + status_t res; + + // Allow both client and the media server to disconnect at all times + int callingPid = getCallingPid(); + if (callingPid != mClientPid && callingPid != mServicePid) return; + + if (mDevice == 0) return; + + ALOGV("Camera %d: Shutting down", mCameraId); + ALOGV("Camera %d: Waiting for threads", mCameraId); + ALOGV("Camera %d: Disconnecting device", mCameraId); + + mDevice->disconnect(); + + mDevice.clear(); + + ProClient::disconnect(); +} + +status_t ProCamera2Client::connect(const sp& client) { + ATRACE_CALL(); + ALOGV("%s: E", __FUNCTION__); + Mutex::Autolock icl(mIProCameraUserLock); + + if (mClientPid != 0 && getCallingPid() != mClientPid) { + ALOGE("%s: Camera %d: Connection attempt from pid %d; " + "current locked to pid %d", __FUNCTION__, + mCameraId, getCallingPid(), mClientPid); + return BAD_VALUE; + } + + mClientPid = getCallingPid(); + + mRemoteCallback = client; + mSharedCameraCallbacks = client; + + return OK; +} + +/** Device-related methods */ + +void ProCamera2Client::notifyError(int errorCode, int arg1, int arg2) { + ALOGE("Error condition %d reported by HAL, arguments %d, %d", errorCode, + arg1, arg2); +} + +void ProCamera2Client::notifyShutter(int frameNumber, nsecs_t timestamp) { + ALOGV("%s: Shutter notification for frame %d at time %lld", __FUNCTION__, + frameNumber, timestamp); +} + +void ProCamera2Client::notifyAutoFocus(uint8_t newState, int triggerId) { + ALOGV("%s: Autofocus state now %d, last trigger %d", + __FUNCTION__, newState, triggerId); + + SharedCameraCallbacks::Lock l(mSharedCameraCallbacks); + if (l.mRemoteCallback != 0) { + l.mRemoteCallback->notifyCallback(CAMERA_MSG_FOCUS_MOVE, + 1, 0); + } + if (l.mRemoteCallback != 0) { + l.mRemoteCallback->notifyCallback(CAMERA_MSG_FOCUS, + 1, 0); + } +} + +void ProCamera2Client::notifyAutoExposure(uint8_t newState, int triggerId) { + ALOGV("%s: Autoexposure state now %d, last trigger %d", + __FUNCTION__, newState, triggerId); +} + +void ProCamera2Client::notifyAutoWhitebalance(uint8_t newState, int triggerId) { + ALOGV("%s: Auto-whitebalance state now %d, last trigger %d", + __FUNCTION__, newState, triggerId); +} + +int ProCamera2Client::getCameraId() const { + return mCameraId; +} + +const sp& ProCamera2Client::getCameraDevice() { + return mDevice; +} + +const sp& ProCamera2Client::getCameraService() { + return mCameraService; +} + +ProCamera2Client::SharedCameraCallbacks::Lock::Lock( + SharedCameraCallbacks &client): + mRemoteCallback(client.mRemoteCallback), + mSharedClient(client) { + mSharedClient.mRemoteCallbackLock.lock(); +} + +ProCamera2Client::SharedCameraCallbacks::Lock::~Lock() { + mSharedClient.mRemoteCallbackLock.unlock(); +} + +ProCamera2Client::SharedCameraCallbacks::SharedCameraCallbacks + (const sp&client): + mRemoteCallback(client) { +} + +ProCamera2Client::SharedCameraCallbacks& + ProCamera2Client::SharedCameraCallbacks::operator=( + const sp&client) { + Mutex::Autolock l(mRemoteCallbackLock); + mRemoteCallback = client; + return *this; +} + +void ProCamera2Client::SharedCameraCallbacks::clear() { + Mutex::Autolock l(mRemoteCallbackLock); + mRemoteCallback.clear(); +} + +} // namespace android diff --git a/services/camera/libcameraservice/ProCamera2Client.h b/services/camera/libcameraservice/ProCamera2Client.h new file mode 100644 index 0000000..8f76819 --- /dev/null +++ b/services/camera/libcameraservice/ProCamera2Client.h @@ -0,0 +1,156 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_SERVERS_CAMERA_PROCAMERA2CLIENT_H +#define ANDROID_SERVERS_CAMERA_PROCAMERA2CLIENT_H + +#include "Camera2Device.h" +#include "CameraService.h" + +namespace android { + +class IMemory; +/** + * Implements the binder IProCameraUser API, + * meant for HAL2-level private API access. + */ +class ProCamera2Client : + public CameraService::ProClient, + public Camera2Device::NotificationListener +{ +public: + /** + * IProCameraUser interface (see IProCameraUser for details) + */ + virtual status_t connect(const sp& callbacks); + virtual void disconnect(); + + virtual status_t exclusiveTryLock(); + virtual status_t exclusiveLock(); + virtual status_t exclusiveUnlock(); + + virtual bool hasExclusiveLock(); + + // Note that the callee gets a copy of the metadata. + virtual int submitRequest(camera_metadata_t* metadata, + bool streaming = false); + virtual status_t cancelRequest(int requestId); + + virtual status_t requestStream(int streamId); + virtual status_t cancelStream(int streamId); + + virtual status_t createStream(int width, int height, int format, + const sp& surface, + /*out*/ + int* streamId); + + // Create a request object from a template. + virtual status_t createDefaultRequest(int templateId, + /*out*/ + camera_metadata** request); + + + /** + * Interface used by CameraService + */ + + ProCamera2Client(const sp& cameraService, + const sp& remoteCallback, + int cameraId, + int cameraFacing, + int clientPid, + int servicePid); + virtual ~ProCamera2Client(); + + status_t initialize(camera_module_t *module); + + virtual status_t dump(int fd, const Vector& args); + + /** + * Interface used by Camera2Device + */ + + virtual void notifyError(int errorCode, int arg1, int arg2); + virtual void notifyShutter(int frameNumber, nsecs_t timestamp); + virtual void notifyAutoFocus(uint8_t newState, int triggerId); + virtual void notifyAutoExposure(uint8_t newState, int triggerId); + virtual void notifyAutoWhitebalance(uint8_t newState, int triggerId); + + + int getCameraId() const; + const sp& getCameraDevice(); + const sp& getCameraService(); + + /** + * Interface used by independent components of ProCamera2Client. + */ + + // Simple class to ensure that access to IProCameraCallbacks is serialized + // by requiring mRemoteCallbackLock to be locked before access to + // mCameraClient is possible. + class SharedCameraCallbacks { + public: + class Lock { + public: + Lock(SharedCameraCallbacks &client); + ~Lock(); + sp &mRemoteCallback; + private: + SharedCameraCallbacks &mSharedClient; + }; + SharedCameraCallbacks(const sp& client); + SharedCameraCallbacks& operator=(const sp& client); + void clear(); + private: + sp mRemoteCallback; + mutable Mutex mRemoteCallbackLock; + } mSharedCameraCallbacks; + +private: + /** IProCameraUser interface-related private members */ + + // Mutex that must be locked by methods implementing the IProCameraUser + // interface. Ensures serialization between incoming IProCameraUser calls. + // All methods below that append 'L' to the name assume that + // mIProCameraUserLock is locked when they're called + mutable Mutex mIProCameraUserLock; + + // Used with stream IDs + static const int NO_STREAM = -1; + + /* Preview/Recording related members */ + + sp mPreviewSurface; + + /** Preview callback related members */ + /** Camera2Device instance wrapping HAL2 entry */ + + sp mDevice; + + /** Utility members */ + + // Verify that caller is the owner of the camera + status_t checkPid(const char *checkLocation) const; + + // Whether or not we have an exclusive lock on the device + // - if no we can't modify the request queue. + // note that creating/deleting streams we own is still OK + bool mExclusiveLock; +}; + +}; // namespace android + +#endif -- cgit v1.1 From 3261fd3f1d8f798fab2f1b3efaa92d5a35cd42e7 Mon Sep 17 00:00:00 2001 From: Igor Murashkin Date: Wed, 20 Feb 2013 19:02:36 -0800 Subject: Camera: ProCamera2Client implement createStream,createDefaultRequest The StreamingImage unit test should now work when TEST_DISPLAY_SECS is set. Change-Id: I9cf3ad5bd5a76096ab21bd39e631d636c4ff3d3f --- .../camera/libcameraservice/ProCamera2Client.cpp | 68 +++++++++++++++++----- .../camera/libcameraservice/ProCamera2Client.h | 1 + 2 files changed, 54 insertions(+), 15 deletions(-) diff --git a/services/camera/libcameraservice/ProCamera2Client.cpp b/services/camera/libcameraservice/ProCamera2Client.cpp index d6389a1..5ebe713 100644 --- a/services/camera/libcameraservice/ProCamera2Client.cpp +++ b/services/camera/libcameraservice/ProCamera2Client.cpp @@ -194,8 +194,15 @@ status_t ProCamera2Client::submitRequest(camera_metadata_t* request, return PERMISSION_DENIED; } - ALOGE("%s: not fully implemented yet", __FUNCTION__); - free_camera_metadata(request); + CameraMetadata metadata(request); + + if (streaming) { + return mDevice->setStreamingRequest(metadata); + } else { + return mDevice->capture(metadata); + } + + // unreachable. thx gcc for a useless warning return OK; } @@ -209,7 +216,7 @@ status_t ProCamera2Client::cancelRequest(int requestId) { } ALOGE("%s: not fully implemented yet", __FUNCTION__); - return OK; + return INVALID_OPERATION; } status_t ProCamera2Client::requestStream(int streamId) { @@ -219,31 +226,62 @@ status_t ProCamera2Client::requestStream(int streamId) { } status_t ProCamera2Client::cancelStream(int streamId) { - ALOGE("%s: not implemented yet", __FUNCTION__); + ATRACE_CALL(); + ALOGV("%s (streamId = 0x%x)", __FUNCTION__, streamId); - return INVALID_OPERATION; + status_t res; + if ( (res = checkPid(__FUNCTION__) ) != OK) return res; + + Mutex::Autolock icl(mIProCameraUserLock); + + return mDevice->deleteStream(streamId); } status_t ProCamera2Client::createStream(int width, int height, int format, - const sp& surface, - /*out*/ - int* streamId) { - ALOGE("%s: not implemented yet", __FUNCTION__); + const sp& surface, + /*out*/ + int* streamId) +{ + if (streamId) { + *streamId = -1; + } - return INVALID_OPERATION; + ATRACE_CALL(); + ALOGV("%s (w = %d, h = %d, f = 0x%x)", __FUNCTION__, width, height, format); + + status_t res; + if ( (res = checkPid(__FUNCTION__) ) != OK) return res; + + Mutex::Autolock icl(mIProCameraUserLock); + + return mDevice->createStream(surface, width, height, format, /*size*/1, streamId); } +// Create a request object from a template. +// -- Caller owns the newly allocated metadata status_t ProCamera2Client::createDefaultRequest(int templateId, - /*out*/ - camera_metadata** request) { - ALOGE("%s: not implemented yet", __FUNCTION__); + /*out*/ + camera_metadata** request) +{ + ATRACE_CALL(); + ALOGV("%s (templateId = 0x%x)", __FUNCTION__, templateId); - return INVALID_OPERATION; -} + if (request) { + *request = NULL; + } + status_t res; + if ( (res = checkPid(__FUNCTION__) ) != OK) return res; + Mutex::Autolock icl(mIProCameraUserLock); + CameraMetadata metadata; + if ( (res = mDevice->createDefaultRequest(templateId, &metadata) ) == OK) { + *request = metadata.release(); + } + return res; +} status_t ProCamera2Client::dump(int fd, const Vector& args) { String8 result; diff --git a/services/camera/libcameraservice/ProCamera2Client.h b/services/camera/libcameraservice/ProCamera2Client.h index 8f76819..ed42e22 100644 --- a/services/camera/libcameraservice/ProCamera2Client.h +++ b/services/camera/libcameraservice/ProCamera2Client.h @@ -58,6 +58,7 @@ public: int* streamId); // Create a request object from a template. + // -- Caller owns the newly allocated metadata virtual status_t createDefaultRequest(int templateId, /*out*/ camera_metadata** request); -- cgit v1.1 From 76f8b43909817179b317880202360863b8f976d0 Mon Sep 17 00:00:00 2001 From: Igor Murashkin Date: Wed, 20 Feb 2013 19:15:15 -0800 Subject: Camera: Change ProCamera to take IGraphicBufferProducer Change-Id: Iec62eead6d179aa5486f7719143340976bb76e7d --- camera/IProCameraUser.cpp | 11 +++++++---- camera/ProCamera.cpp | 9 +++++---- include/camera/IProCameraUser.h | 9 +++++---- services/camera/libcameraservice/ProCamera2Client.cpp | 12 ++++++++++-- services/camera/libcameraservice/ProCamera2Client.h | 6 +++--- 5 files changed, 30 insertions(+), 17 deletions(-) diff --git a/camera/IProCameraUser.cpp b/camera/IProCameraUser.cpp index cd7bf5c..e60cfe5 100644 --- a/camera/IProCameraUser.cpp +++ b/camera/IProCameraUser.cpp @@ -219,7 +219,7 @@ public: } virtual status_t createStream(int width, int height, int format, - const sp& surface, + const sp& bufferProducer, /*out*/ int* streamId) { @@ -229,7 +229,9 @@ public: data.writeInt32(height); data.writeInt32(format); - Surface::writeToParcel(surface, &data); + sp b(bufferProducer->asBinder()); + data.writeStrongBinder(b); + remote()->transact(CREATE_STREAM, data, &reply); int sId = reply.readInt32(); @@ -340,11 +342,12 @@ status_t BnProCameraUser::onTransact( height = data.readInt32(); format = data.readInt32(); - sp surface = Surface::readFromParcel(data); + sp bp = + interface_cast(data.readStrongBinder()); int streamId = -1; status_t ret; - ret = createStream(width, height, format, surface, &streamId); + ret = createStream(width, height, format, bp, &streamId); reply->writeInt32(streamId); reply->writeInt32(ret); diff --git a/camera/ProCamera.cpp b/camera/ProCamera.cpp index 92ec9d6..8fd08f4 100644 --- a/camera/ProCamera.cpp +++ b/camera/ProCamera.cpp @@ -278,7 +278,8 @@ status_t ProCamera::createStream(int width, int height, int format, sp c = mCamera; if (c == 0) return NO_INIT; - return c->createStream(width, height, format, surface, streamId); + return c->createStream(width, height, format, surface->getIGraphicBufferProducer(), + streamId); } status_t ProCamera::createStream(int width, int height, int format, @@ -293,10 +294,10 @@ status_t ProCamera::createStream(int width, int height, int format, status_t stat = INVALID_OPERATION; if (bufferProducer != 0) { - binder = bufferProducer->asBinder(); + sp c = mCamera; + if (c == 0) return NO_INIT; - ALOGV("%s: createStreamT END (%d), StreamID = %d", __FUNCTION__, stat, - *streamId); + return c->createStream(width, height, format, bufferProducer, streamId); } else { *streamId = -1; diff --git a/include/camera/IProCameraUser.h b/include/camera/IProCameraUser.h index 3ef4676..a141769 100644 --- a/include/camera/IProCameraUser.h +++ b/include/camera/IProCameraUser.h @@ -63,10 +63,11 @@ public: virtual status_t requestStream(int streamId) = 0; virtual status_t cancelStream(int streamId) = 0; - virtual status_t createStream(int width, int height, int format, - const sp& surface, - /*out*/ - int* streamId) = 0; + virtual status_t createStream( + int width, int height, int format, + const sp& bufferProducer, + /*out*/ + int* streamId) = 0; // Create a request object from a template. virtual status_t createDefaultRequest(int templateId, diff --git a/services/camera/libcameraservice/ProCamera2Client.cpp b/services/camera/libcameraservice/ProCamera2Client.cpp index 5ebe713..aa02f10 100644 --- a/services/camera/libcameraservice/ProCamera2Client.cpp +++ b/services/camera/libcameraservice/ProCamera2Client.cpp @@ -238,7 +238,7 @@ status_t ProCamera2Client::cancelStream(int streamId) { } status_t ProCamera2Client::createStream(int width, int height, int format, - const sp& surface, + const sp& bufferProducer, /*out*/ int* streamId) { @@ -254,7 +254,15 @@ status_t ProCamera2Client::createStream(int width, int height, int format, Mutex::Autolock icl(mIProCameraUserLock); - return mDevice->createStream(surface, width, height, format, /*size*/1, streamId); + sp binder; + sp window; + if (bufferProducer != 0) { + binder = bufferProducer->asBinder(); + window = new Surface(bufferProducer); + } + + return mDevice->createStream(window, width, height, format, /*size*/1, + streamId); } // Create a request object from a template. diff --git a/services/camera/libcameraservice/ProCamera2Client.h b/services/camera/libcameraservice/ProCamera2Client.h index ed42e22..b72fd63 100644 --- a/services/camera/libcameraservice/ProCamera2Client.h +++ b/services/camera/libcameraservice/ProCamera2Client.h @@ -53,9 +53,9 @@ public: virtual status_t cancelStream(int streamId); virtual status_t createStream(int width, int height, int format, - const sp& surface, - /*out*/ - int* streamId); + const sp& bufferProducer, + /*out*/ + int* streamId); // Create a request object from a template. // -- Caller owns the newly allocated metadata -- cgit v1.1 From 5835cc46a2f06dbfa5fbdab70e091896ef2fb438 Mon Sep 17 00:00:00 2001 From: Igor Murashkin Date: Wed, 20 Feb 2013 19:29:53 -0800 Subject: Camera: ProCamera - add createStreamCpu and unit test Change-Id: I468172dbfdd78510b273bf9d119c950cbeda7ea3 --- camera/ProCamera.cpp | 111 ++++++++++--- camera/tests/ProCameraTests.cpp | 175 +++++++++++++++++---- include/camera/ProCamera.h | 86 ++++++++-- .../camera/libcameraservice/ProCamera2Client.cpp | 7 + 4 files changed, 316 insertions(+), 63 deletions(-) diff --git a/camera/ProCamera.cpp b/camera/ProCamera.cpp index 8fd08f4..5ee0e4d 100644 --- a/camera/ProCamera.cpp +++ b/camera/ProCamera.cpp @@ -246,19 +246,16 @@ status_t ProCamera::cancelRequest(int requestId) return c->cancelRequest(requestId); } -status_t ProCamera::requestStream(int streamId) +status_t ProCamera::deleteStream(int streamId) { sp c = mCamera; if (c == 0) return NO_INIT; - return c->requestStream(streamId); -} -status_t ProCamera::cancelStream(int streamId) -{ - sp c = mCamera; - if (c == 0) return NO_INIT; + status_t s = c->cancelStream(streamId); + + mStreams.removeItem(streamId); - return c->cancelStream(streamId); + return s; } status_t ProCamera::createStream(int width, int height, int format, @@ -275,38 +272,76 @@ status_t ProCamera::createStream(int width, int height, int format, return BAD_VALUE; } - sp c = mCamera; - if (c == 0) return NO_INIT; - - return c->createStream(width, height, format, surface->getIGraphicBufferProducer(), - streamId); + return createStream(width, height, format, surface->getIGraphicBufferProducer(), + streamId); } status_t ProCamera::createStream(int width, int height, int format, const sp& bufferProducer, /*out*/ int* streamId) { + *streamId = -1; ALOGV("%s: createStreamT %dx%d (fmt=0x%x)", __FUNCTION__, width, height, format); - sp binder; - status_t stat = INVALID_OPERATION; + if (bufferProducer == 0) { + return BAD_VALUE; + } - if (bufferProducer != 0) { - sp c = mCamera; - if (c == 0) return NO_INIT; + sp c = mCamera; + status_t stat = c->createStream(width, height, format, bufferProducer, + streamId); - return c->createStream(width, height, format, bufferProducer, streamId); - } - else { - *streamId = -1; - return BAD_VALUE; + if (stat == OK) { + StreamInfo s(*streamId); + + mStreams.add(*streamId, s); } return stat; } +status_t ProCamera::createStreamCpu(int width, int height, int format, + int heapCount, + /*out*/ + int* streamId) +{ + ALOGV("%s: createStreamW %dx%d (fmt=0x%x)", __FUNCTION__, width, height, + format); + + sp c = mCamera; + if (c == 0) return NO_INIT; + + sp cc = new CpuConsumer(heapCount); + cc->setName(String8("ProCamera::mCpuConsumer")); + + sp stc = new Surface( + cc->getProducerInterface()); + + status_t s = createStream(width, height, format, stc->getIGraphicBufferProducer(), + streamId); + + if (s != OK) { + ALOGE("%s: Failure to create stream %dx%d (fmt=0x%x)", __FUNCTION__, + width, height, format); + return s; + } + + sp frameAvailableListener = + new ProFrameListener(this, *streamId); + + getStreamInfo(*streamId).cpuStream = true; + getStreamInfo(*streamId).cpuConsumer = cc; + getStreamInfo(*streamId).stc = stc; + // for lifetime management + getStreamInfo(*streamId).frameAvailableListener = frameAvailableListener; + + cc->setFrameAvailableListener(frameAvailableListener); + + return s; +} + int ProCamera::getNumberOfCameras() { ALOGE("%s: not implemented yet", __FUNCTION__); return 1; @@ -329,4 +364,34 @@ status_t ProCamera::createDefaultRequest(int templateId, return c->createDefaultRequest(templateId, request); } +void ProCamera::onFrameAvailable(int streamId) { + ALOGV("%s: streamId = %d", __FUNCTION__, streamId); + + sp listener = mListener; + if (listener.get() != NULL) { + StreamInfo& stream = getStreamInfo(streamId); + + CpuConsumer::LockedBuffer buf; + + status_t stat = stream.cpuConsumer->lockNextBuffer(&buf); + if (stat != OK) { + ALOGE("%s: Failed to lock buffer, error code = %d", __FUNCTION__, + stat); + return; + } + + listener->onBufferReceived(streamId, buf); + stat = stream.cpuConsumer->unlockBuffer(buf); + + if (stat != OK) { + ALOGE("%s: Failed to unlock buffer, error code = %d", __FUNCTION__, + stat); + } + } +} + +ProCamera::StreamInfo& ProCamera::getStreamInfo(int streamId) { + return mStreams.editValueFor(streamId); +} + }; // namespace android diff --git a/camera/tests/ProCameraTests.cpp b/camera/tests/ProCameraTests.cpp index 230e160..f0a36e8 100644 --- a/camera/tests/ProCameraTests.cpp +++ b/camera/tests/ProCameraTests.cpp @@ -41,7 +41,12 @@ namespace client { #define TEST_DEBUGGING 0 #define TEST_LISTENER_TIMEOUT 1000000000 // 1 second listener timeout -#define TEST_FORMAT HAL_PIXEL_FORMAT_RGBA_8888 //TODO: YUY2 instead +#define TEST_FORMAT HAL_PIXEL_FORMAT_Y16 //TODO: YUY2 instead + +#define TEST_FORMAT_DEPTH HAL_PIXEL_FORMAT_Y16 + +#define TEST_CPU_FRAME_COUNT 2 +#define TEST_CPU_HEAP_COUNT 5 #if TEST_DEBUGGING #define dout std::cerr @@ -54,14 +59,15 @@ namespace client { class ProCameraTest; -enum LockEvent { +enum ProEvent { UNKNOWN, ACQUIRED, RELEASED, - STOLEN + STOLEN, + BUFFER_RECEIVED, }; -typedef Vector EventList; +typedef Vector EventList; class ProCameraTestThread : public Thread { @@ -92,7 +98,7 @@ public: { Mutex::Autolock al(mListenerMutex); - if (mLockEventList.size() > 0) { + if (mProEventList.size() > 0) { return OK; } } @@ -105,35 +111,35 @@ public: void ReadEvents(EventList& out) { Mutex::Autolock al(mListenerMutex); - for (size_t i = 0; i < mLockEventList.size(); ++i) { - out.push(mLockEventList[i]); + for (size_t i = 0; i < mProEventList.size(); ++i) { + out.push(mProEventList[i]); } - mLockEventList.clear(); + mProEventList.clear(); } /** * Dequeue 1 event from the event queue. * Returns UNKNOWN if queue is empty */ - LockEvent ReadEvent() { + ProEvent ReadEvent() { Mutex::Autolock al(mListenerMutex); - if (mLockEventList.size() == 0) { + if (mProEventList.size() == 0) { return UNKNOWN; } - LockEvent ev = mLockEventList[0]; - mLockEventList.removeAt(0); + ProEvent ev = mProEventList[0]; + mProEventList.removeAt(0); return ev; } private: - void QueueEvent(LockEvent ev) { + void QueueEvent(ProEvent ev) { { Mutex::Autolock al(mListenerMutex); - mLockEventList.push(ev); + mProEventList.push(ev); } @@ -168,6 +174,20 @@ protected: << " " << ext3 << std::endl; } + virtual void onBufferReceived(int streamId, + const CpuConsumer::LockedBuffer& buf) { + + dout << "Buffer received on streamId = " << streamId << + ", dataPtr = " << (void*)buf.data << std::endl; + + QueueEvent(BUFFER_RECEIVED); + + } + virtual void onRequestReceived( + camera_metadata* request) { + free_camera_metadata(request); + } + // TODO: remove virtual void notify(int32_t , int32_t , int32_t ) {} @@ -176,7 +196,7 @@ protected: virtual void postDataTimestamp(nsecs_t , int32_t , const sp& ) {} - Vector mLockEventList; + Vector mProEventList; Mutex mListenerMutex; Mutex mConditionMutex; Condition mListenerCondition; @@ -217,6 +237,9 @@ protected: sp mComposerClient; sp mSurfaceControl; + sp mDepthComposerClient; + sp mDepthSurfaceControl; + int getSurfaceWidth() { return 512; } @@ -233,6 +256,8 @@ protected: getSurfaceWidth(), getSurfaceHeight(), PIXEL_FORMAT_RGB_888, 0); + mSurfaceControl->setPosition(640, 0); + ASSERT_TRUE(mSurfaceControl != NULL); ASSERT_TRUE(mSurfaceControl->isValid()); @@ -247,6 +272,31 @@ protected: ASSERT_NE((void*)NULL, surface.get()); } + void createDepthOnScreenSurface(sp& surface) { + mDepthComposerClient = new SurfaceComposerClient; + ASSERT_EQ(NO_ERROR, mDepthComposerClient->initCheck()); + + mDepthSurfaceControl = mDepthComposerClient->createSurface( + String8("ProCameraTest StreamingImage Surface"), + getSurfaceWidth(), getSurfaceHeight(), + PIXEL_FORMAT_RGB_888, 0); + + mDepthSurfaceControl->setPosition(640, 0); + + ASSERT_TRUE(mDepthSurfaceControl != NULL); + ASSERT_TRUE(mDepthSurfaceControl->isValid()); + + SurfaceComposerClient::openGlobalTransaction(); + ASSERT_EQ(NO_ERROR, mDepthSurfaceControl->setLayer(0x7FFFFFFF)); + ASSERT_EQ(NO_ERROR, mDepthSurfaceControl->show()); + SurfaceComposerClient::closeGlobalTransaction(); + + sp window = mDepthSurfaceControl->getSurface(); + surface = mDepthSurfaceControl->getSurface(); + + ASSERT_NE((void*)NULL, surface.get()); + } + }; sp ProCameraTest::mTestThread; @@ -316,14 +366,15 @@ TEST_F(ProCameraTest, StreamingImage) { mDisplaySecs = 0; } - sp surface; + sp depthSurface; if (mDisplaySecs > 0) { - createOnScreenSurface(/*out*/surface); + createDepthOnScreenSurface(/*out*/depthSurface); } - int streamId = -1; - EXPECT_OK(mCamera->createStream(/*width*/640, /*height*/480, TEST_FORMAT, - surface, &streamId)); - EXPECT_NE(-1, streamId); + + int depthStreamId = -1; + EXPECT_OK(mCamera->createStream(/*width*/320, /*height*/240, + TEST_FORMAT_DEPTH, depthSurface, &depthStreamId)); + EXPECT_NE(-1, depthStreamId); EXPECT_OK(mCamera->exclusiveTryLock()); /* iterate in a loop submitting requests every frame. @@ -345,23 +396,26 @@ TEST_F(ProCameraTest, StreamingImage) { // wow what a verbose API. // i would give a loaf of bread for // metadata->updateOrInsert(keys.request.output.streams, streamId); + uint8_t allStreams[] = { depthStreamId }; + size_t streamCount = sizeof(allStreams) / sizeof(allStreams[0]); + camera_metadata_entry_t entry; uint32_t tag = static_cast(ANDROID_REQUEST_OUTPUT_STREAMS); int find = find_camera_metadata_entry(request, tag, &entry); if (find == -ENOENT) { - if (add_camera_metadata_entry(request, tag, &streamId, /*data_count*/1) - != OK) { + if (add_camera_metadata_entry(request, tag, &allStreams, + /*data_count*/streamCount) != OK) { camera_metadata_t *tmp = allocate_camera_metadata(1000, 10000); ASSERT_OK(append_camera_metadata(tmp, request)); free_camera_metadata(request); request = tmp; - ASSERT_OK(add_camera_metadata_entry(request, tag, &streamId, - /*data_count*/1)); + ASSERT_OK(add_camera_metadata_entry(request, tag, &allStreams, + /*data_count*/streamCount)); } } else { - ASSERT_OK(update_camera_metadata_entry(request, entry.index, &streamId, - /*data_count*/1, &entry)); + ASSERT_OK(update_camera_metadata_entry(request, entry.index, + &allStreams, /*data_count*/streamCount, &entry)); } EXPECT_OK(mCamera->submitRequest(request, /*streaming*/true)); @@ -370,7 +424,72 @@ TEST_F(ProCameraTest, StreamingImage) { sleep(mDisplaySecs); free_camera_metadata(request); - EXPECT_OK(mCamera->cancelStream(streamId)); + + for (int i = 0; i < streamCount; ++i) { + EXPECT_OK(mCamera->deleteStream(allStreams[i])); + } + EXPECT_OK(mCamera->exclusiveUnlock()); +} + +TEST_F(ProCameraTest, CpuConsumer) { + if (HasFatalFailure()) { + return; + } + int streamId = -1; + EXPECT_OK(mCamera->createStreamCpu(/*width*/320, /*height*/240, + TEST_FORMAT_DEPTH, TEST_CPU_HEAP_COUNT, &streamId)); + EXPECT_NE(-1, streamId); + + EXPECT_OK(mCamera->exclusiveTryLock()); + EXPECT_EQ(OK, mListener->WaitForEvent()); + EXPECT_EQ(ACQUIRED, mListener->ReadEvent()); + /* iterate in a loop submitting requests every frame. + * what kind of requests doesnt really matter, just whatever. + */ + + // it would probably be better to use CameraMetadata from camera service. + camera_metadata_t *request = NULL; + EXPECT_OK(mCamera->createDefaultRequest(CAMERA2_TEMPLATE_PREVIEW, + /*out*/&request)); + EXPECT_NE((void*)NULL, request); + + /*FIXME: dont need this later, at which point the above should become an + ASSERT_NE*/ + if(request == NULL) request = allocate_camera_metadata(10, 100); + + // set the output streams to just this stream ID + + uint8_t allStreams[] = { streamId }; + camera_metadata_entry_t entry; + uint32_t tag = static_cast(ANDROID_REQUEST_OUTPUT_STREAMS); + int find = find_camera_metadata_entry(request, tag, &entry); + if (find == -ENOENT) { + if (add_camera_metadata_entry(request, tag, &allStreams, + /*data_count*/1) != OK) { + camera_metadata_t *tmp = allocate_camera_metadata(1000, 10000); + ASSERT_OK(append_camera_metadata(tmp, request)); + free_camera_metadata(request); + request = tmp; + + ASSERT_OK(add_camera_metadata_entry(request, tag, &allStreams, + /*data_count*/1)); + } + } else { + ASSERT_OK(update_camera_metadata_entry(request, entry.index, + &allStreams, /*data_count*/1, &entry)); + } + + EXPECT_OK(mCamera->submitRequest(request, /*streaming*/true)); + + // Consume a couple of frames + for (int i = 0; i < TEST_CPU_FRAME_COUNT; ++i) { + EXPECT_EQ(OK, mListener->WaitForEvent()); + EXPECT_EQ(BUFFER_RECEIVED, mListener->ReadEvent()); + } + + // Done: clean up + free_camera_metadata(request); + EXPECT_OK(mCamera->deleteStream(streamId)); EXPECT_OK(mCamera->exclusiveUnlock()); } diff --git a/include/camera/ProCamera.h b/include/camera/ProCamera.h index 9b763a3..4dda533 100644 --- a/include/camera/ProCamera.h +++ b/include/camera/ProCamera.h @@ -18,17 +18,20 @@ #define ANDROID_HARDWARE_PRO_CAMERA_H #include +#include #include #include #include #include #include +#include struct camera_metadata; namespace android { -// ref-counted object for callbacks +// All callbacks on this class are concurrent +// (they come from separate threads) class ProCameraListener : public CameraListener { public: @@ -42,6 +45,21 @@ public: // Lock free. virtual void onTriggerNotify(int32_t msgType, int32_t ext1, int32_t ext2) = 0; + + // OnBufferReceived and OnRequestReceived can come in with any order, + // use android.sensor.timestamp and LockedBuffer.timestamp to correlate them + + // TODO: implement in IProCameraCallbacks, ProCamera2Client + + // A new frame buffer has been received for this stream. + // -- This callback only fires for createStreamCpu streams + // -- The buffer must not be accessed after this function call completes + virtual void onBufferReceived(int streamId, + const CpuConsumer::LockedBuffer& buf) = 0; + // A new metadata buffer has been received. + // -- Ownership of request passes on to the callee, + // free with free_camera_metadata. + virtual void onRequestReceived(camera_metadata* request) = 0; }; class ProCamera : public BnProCameraCallbacks, public IBinder::DeathRecipient @@ -109,22 +127,15 @@ public: * Lock free. Service maintains counter of streams. */ status_t requestStream(int streamId); - /** - * Ask for a stream to be disabled. - * Lock free. Service maintains counter of streams. - * Errors: BAD_VALUE if unknown stream ID. - */ // TODO: remove requestStream, its useless. -// TODO: rename cancelStream to deleteStream -// can probably do it with a grep/sed - /** - * Ask for a stream to be disabled. - * Lock free. Service maintains counter of streams. + * Delete a stream. + * Lock free. * Errors: BAD_VALUE if unknown stream ID. + * PERMISSION_DENIED if the stream wasn't yours */ - status_t cancelStream(int streamId); + status_t deleteStream(int streamId); /** * Create a new HW stream, whose sink will be the window. @@ -145,6 +156,10 @@ public: const sp& bufferProducer, /*out*/ int* streamId); + status_t createStreamCpu(int width, int height, int format, + int heapCount, + /*out*/ + int* streamId); // Create a request object from a template. status_t createDefaultRequest(int templateId, @@ -203,6 +218,53 @@ private: static Mutex mLock; static sp mCameraService; + class ProFrameListener : public CpuConsumer::FrameAvailableListener { + public: + ProFrameListener(wp camera, int streamID) { + mCamera = camera; + mStreamId = streamID; + } + + protected: + virtual void onFrameAvailable() { + sp c = mCamera.promote(); + if (c.get() != NULL) { + c->onFrameAvailable(mStreamId); + } + } + + private: + wp mCamera; + int mStreamId; + }; + friend class ProFrameListener; + + struct StreamInfo + { + StreamInfo(int streamId) { + this->streamID = streamId; + cpuStream = false; + } + + StreamInfo() { + streamID = -1; + cpuStream = false; + } + + int streamID; + bool cpuStream; + sp cpuConsumer; + sp frameAvailableListener; + sp stc; + }; + + KeyedVector mStreams; + + + void onFrameAvailable(int streamId); + + StreamInfo& getStreamInfo(int streamId); + }; diff --git a/services/camera/libcameraservice/ProCamera2Client.cpp b/services/camera/libcameraservice/ProCamera2Client.cpp index aa02f10..f850034 100644 --- a/services/camera/libcameraservice/ProCamera2Client.cpp +++ b/services/camera/libcameraservice/ProCamera2Client.cpp @@ -234,6 +234,13 @@ status_t ProCamera2Client::cancelStream(int streamId) { Mutex::Autolock icl(mIProCameraUserLock); + mDevice->clearStreamingRequest(); + + status_t code; + if ((code = mDevice->waitUntilDrained()) != OK) { + ALOGE("%s: waitUntilDrained failed with code 0x%x", __FUNCTION__, code); + } + return mDevice->deleteStream(streamId); } -- cgit v1.1 From eb72e1796b3af548e87891a6d2b73b0567807f25 Mon Sep 17 00:00:00 2001 From: Igor Murashkin Date: Thu, 21 Feb 2013 11:43:14 -0800 Subject: Camera: ProCameraTests - add CpuConsumer tests for dual streams Change-Id: Ia010450dc79ac2841645b8c2e1553c9ec74ee5e3 --- camera/tests/ProCameraTests.cpp | 168 +++++++++++++++++++++++++++++++++++++++- 1 file changed, 166 insertions(+), 2 deletions(-) diff --git a/camera/tests/ProCameraTests.cpp b/camera/tests/ProCameraTests.cpp index f0a36e8..dafa995 100644 --- a/camera/tests/ProCameraTests.cpp +++ b/camera/tests/ProCameraTests.cpp @@ -43,6 +43,7 @@ namespace client { #define TEST_LISTENER_TIMEOUT 1000000000 // 1 second listener timeout #define TEST_FORMAT HAL_PIXEL_FORMAT_Y16 //TODO: YUY2 instead +#define TEST_FORMAT_MAIN HAL_PIXEL_FORMAT_Y8 #define TEST_FORMAT_DEPTH HAL_PIXEL_FORMAT_Y16 #define TEST_CPU_FRAME_COUNT 2 @@ -352,7 +353,7 @@ TEST_F(ProCameraTest, LockingAsynchronous) { } // Stream directly to the screen. -TEST_F(ProCameraTest, StreamingImage) { +TEST_F(ProCameraTest, StreamingImageSingle) { if (HasFatalFailure()) { return; } @@ -431,7 +432,94 @@ TEST_F(ProCameraTest, StreamingImage) { EXPECT_OK(mCamera->exclusiveUnlock()); } -TEST_F(ProCameraTest, CpuConsumer) { +// Stream directly to the screen. +TEST_F(ProCameraTest, StreamingImageDual) { + if (HasFatalFailure()) { + return; + } + char* displaySecsEnv = getenv("TEST_DISPLAY_SECS"); + if (displaySecsEnv != NULL) { + mDisplaySecs = atoi(displaySecsEnv); + if (mDisplaySecs < 0) { + mDisplaySecs = 0; + } + } else { + mDisplaySecs = 0; + } + + sp surface; + sp depthSurface; + if (mDisplaySecs > 0) { + createOnScreenSurface(/*out*/surface); + createDepthOnScreenSurface(/*out*/depthSurface); + } + + int streamId = -1; + EXPECT_OK(mCamera->createStream(/*width*/1280, /*height*/960, + TEST_FORMAT_MAIN, surface, &streamId)); + EXPECT_NE(-1, streamId); + + int depthStreamId = -1; + EXPECT_OK(mCamera->createStream(/*width*/320, /*height*/240, + TEST_FORMAT_DEPTH, depthSurface, &depthStreamId)); + EXPECT_NE(-1, depthStreamId); + + EXPECT_OK(mCamera->exclusiveTryLock()); + /* + */ + /* iterate in a loop submitting requests every frame. + * what kind of requests doesnt really matter, just whatever. + */ + + // it would probably be better to use CameraMetadata from camera service. + camera_metadata_t *request = NULL; + EXPECT_OK(mCamera->createDefaultRequest(CAMERA2_TEMPLATE_PREVIEW, + /*out*/&request)); + EXPECT_NE((void*)NULL, request); + + /*FIXME: dont need this later, at which point the above should become an + ASSERT_NE*/ + if(request == NULL) request = allocate_camera_metadata(10, 100); + + // set the output streams to just this stream ID + + // wow what a verbose API. + uint8_t allStreams[] = { streamId, depthStreamId }; + // IMPORTANT. bad things will happen if its not a uint8. + size_t streamCount = sizeof(allStreams) / sizeof(allStreams[0]); + camera_metadata_entry_t entry; + uint32_t tag = static_cast(ANDROID_REQUEST_OUTPUT_STREAMS); + int find = find_camera_metadata_entry(request, tag, &entry); + if (find == -ENOENT) { + if (add_camera_metadata_entry(request, tag, &allStreams, + /*data_count*/streamCount) != OK) { + camera_metadata_t *tmp = allocate_camera_metadata(1000, 10000); + ASSERT_OK(append_camera_metadata(tmp, request)); + free_camera_metadata(request); + request = tmp; + + ASSERT_OK(add_camera_metadata_entry(request, tag, &allStreams, + /*data_count*/streamCount)); + } + } else { + ASSERT_OK(update_camera_metadata_entry(request, entry.index, + &allStreams, /*data_count*/streamCount, &entry)); + } + + EXPECT_OK(mCamera->submitRequest(request, /*streaming*/true)); + + dout << "will sleep now for " << mDisplaySecs << std::endl; + sleep(mDisplaySecs); + + free_camera_metadata(request); + + for (int i = 0; i < streamCount; ++i) { + EXPECT_OK(mCamera->deleteStream(allStreams[i])); + } + EXPECT_OK(mCamera->exclusiveUnlock()); +} + +TEST_F(ProCameraTest, CpuConsumerSingle) { if (HasFatalFailure()) { return; } @@ -493,6 +581,82 @@ TEST_F(ProCameraTest, CpuConsumer) { EXPECT_OK(mCamera->exclusiveUnlock()); } +TEST_F(ProCameraTest, CpuConsumerDual) { + if (HasFatalFailure()) { + return; + } + int streamId = -1; + EXPECT_OK(mCamera->createStreamCpu(/*width*/1280, /*height*/960, + TEST_FORMAT_MAIN, TEST_CPU_HEAP_COUNT, &streamId)); + EXPECT_NE(-1, streamId); + + int depthStreamId = -1; + EXPECT_OK(mCamera->createStreamCpu(/*width*/320, /*height*/240, + TEST_FORMAT_DEPTH, TEST_CPU_HEAP_COUNT, &depthStreamId)); + EXPECT_NE(-1, depthStreamId); + + EXPECT_OK(mCamera->exclusiveTryLock()); + EXPECT_EQ(OK, mListener->WaitForEvent()); + EXPECT_EQ(ACQUIRED, mListener->ReadEvent()); + /* + */ + /* iterate in a loop submitting requests every frame. + * what kind of requests doesnt really matter, just whatever. + */ + + // it would probably be better to use CameraMetadata from camera service. + camera_metadata_t *request = NULL; + EXPECT_OK(mCamera->createDefaultRequest(CAMERA2_TEMPLATE_PREVIEW, + /*out*/&request)); + EXPECT_NE((void*)NULL, request); + + if(request == NULL) request = allocate_camera_metadata(10, 100); + + // set the output streams to just this stream ID + + // wow what a verbose API. + uint8_t allStreams[] = { streamId, depthStreamId }; + size_t streamCount = 2; + camera_metadata_entry_t entry; + uint32_t tag = static_cast(ANDROID_REQUEST_OUTPUT_STREAMS); + int find = find_camera_metadata_entry(request, tag, &entry); + if (find == -ENOENT) { + if (add_camera_metadata_entry(request, tag, &allStreams, + /*data_count*/streamCount) != OK) { + camera_metadata_t *tmp = allocate_camera_metadata(1000, 10000); + ASSERT_OK(append_camera_metadata(tmp, request)); + free_camera_metadata(request); + request = tmp; + + ASSERT_OK(add_camera_metadata_entry(request, tag, &allStreams, + /*data_count*/streamCount)); + } + } else { + ASSERT_OK(update_camera_metadata_entry(request, entry.index, + &allStreams, /*data_count*/streamCount, &entry)); + } + + EXPECT_OK(mCamera->submitRequest(request, /*streaming*/true)); + + // Consume a couple of frames + for (int i = 0; i < TEST_CPU_FRAME_COUNT; ++i) { + // stream id 1 + EXPECT_EQ(OK, mListener->WaitForEvent()); + EXPECT_EQ(BUFFER_RECEIVED, mListener->ReadEvent()); + + // stream id 2 + EXPECT_EQ(OK, mListener->WaitForEvent()); + EXPECT_EQ(BUFFER_RECEIVED, mListener->ReadEvent()); + + //TODO: events should be a struct with some data like the stream id + } + + // Done: clean up + free_camera_metadata(request); + EXPECT_OK(mCamera->deleteStream(streamId)); + EXPECT_OK(mCamera->exclusiveUnlock()); +} + } } } -- cgit v1.1 From a91537e268f2b35f9f0dfdc0c4f84655c93285ae Mon Sep 17 00:00:00 2001 From: Igor Murashkin Date: Thu, 21 Feb 2013 12:02:29 -0800 Subject: Camera: ProCamera - implement onResultReceived callback for metadata callbacks Change-Id: I46775402b007244bc383d6343a620eebbd492aad --- camera/IProCameraCallbacks.cpp | 25 +++ camera/ProCamera.cpp | 18 +++ camera/tests/ProCameraTests.cpp | 128 ++++++++++++++-- include/camera/IProCameraCallbacks.h | 7 + include/camera/ProCamera.h | 17 ++- services/camera/libcameraservice/Android.mk | 3 +- .../camera/libcameraservice/ProCamera2Client.cpp | 35 +++++ .../camera/libcameraservice/ProCamera2Client.h | 12 +- .../libcameraservice/camera2/ProFrameProcessor.cpp | 168 +++++++++++++++++++++ .../libcameraservice/camera2/ProFrameProcessor.h | 81 ++++++++++ 10 files changed, 474 insertions(+), 20 deletions(-) create mode 100644 services/camera/libcameraservice/camera2/ProFrameProcessor.cpp create mode 100644 services/camera/libcameraservice/camera2/ProFrameProcessor.h diff --git a/camera/IProCameraCallbacks.cpp b/camera/IProCameraCallbacks.cpp index 756fba2..6cd36bf 100644 --- a/camera/IProCameraCallbacks.cpp +++ b/camera/IProCameraCallbacks.cpp @@ -28,6 +28,8 @@ #include +#include + namespace android { enum { @@ -35,8 +37,12 @@ enum { DATA_CALLBACK, DATA_CALLBACK_TIMESTAMP, LOCK_STATUS_CHANGED, + RESULT_RECEIVED, }; +void readMetadata(const Parcel& data, camera_metadata_t** out); +void writeMetadata(Parcel& data, camera_metadata_t* metadata); + class BpProCameraCallbacks: public BpInterface { public: @@ -96,6 +102,15 @@ public: remote()->transact(LOCK_STATUS_CHANGED, data, &reply, IBinder::FLAG_ONEWAY); } + + void onResultReceived(int32_t frameId, camera_metadata* result) { + ALOGV("onResultReceived"); + Parcel data, reply; + data.writeInterfaceToken(IProCameraCallbacks::getInterfaceDescriptor()); + data.writeInt32(frameId); + writeMetadata(data, result); + remote()->transact(RESULT_RECEIVED, data, &reply, IBinder::FLAG_ONEWAY); + } }; IMPLEMENT_META_INTERFACE(ProCameraCallbacks, @@ -152,6 +167,16 @@ status_t BnProCameraCallbacks::onTransact( onLockStatusChanged(newLockStatus); return NO_ERROR; } break; + case RESULT_RECEIVED: { + ALOGV("RESULT_RECEIVED"); + CHECK_INTERFACE(IProCameraCallbacks, data, reply); + int32_t frameId = data.readInt32(); + camera_metadata_t *result = NULL; + readMetadata(data, &result); + onResultReceived(frameId, result); + return NO_ERROR; + break; + } default: return BBinder::onTransact(code, data, reply, flags); } diff --git a/camera/ProCamera.cpp b/camera/ProCamera.cpp index 5ee0e4d..142c03b 100644 --- a/camera/ProCamera.cpp +++ b/camera/ProCamera.cpp @@ -33,6 +33,8 @@ #include #include +#include + namespace android { // client singleton for camera service binder interface @@ -198,6 +200,22 @@ void ProCamera::onLockStatusChanged( } } +void ProCamera::onResultReceived(int32_t frameId, camera_metadata* result) { + ALOGV("%s: frameId = %d, result = %p", __FUNCTION__, frameId, result); + + sp listener; + { + Mutex::Autolock _l(mLock); + listener = mListener; + } + if (listener != NULL) { + listener->onResultReceived(frameId, result); + } else { + free_camera_metadata(result); + } + +} + status_t ProCamera::exclusiveTryLock() { sp c = mCamera; diff --git a/camera/tests/ProCameraTests.cpp b/camera/tests/ProCameraTests.cpp index dafa995..021fbae 100644 --- a/camera/tests/ProCameraTests.cpp +++ b/camera/tests/ProCameraTests.cpp @@ -66,8 +66,13 @@ enum ProEvent { RELEASED, STOLEN, BUFFER_RECEIVED, + RESULT_RECEIVED, }; +inline int ProEvent_Mask(ProEvent e) { + return (1 << static_cast(e)); +} + typedef Vector EventList; class ProCameraTestThread : public Thread @@ -93,6 +98,12 @@ public: class ProCameraTestListener : public ProCameraListener { public: + static const int EVENT_MASK_ALL = 0xFFFFFFFF; + + ProCameraTestListener() { + mEventMask = EVENT_MASK_ALL; + } + status_t WaitForEvent() { Mutex::Autolock cal(mConditionMutex); @@ -136,15 +147,26 @@ public: return ev; } + void SetEventMask(int eventMask) { + Mutex::Autolock al(mListenerMutex); + mEventMask = eventMask; + } + private: void QueueEvent(ProEvent ev) { + bool eventAdded = false; { Mutex::Autolock al(mListenerMutex); - mProEventList.push(ev); - } + if (ProEvent_Mask(ev) & mEventMask) { + mProEventList.push(ev); + eventAdded = true; + } + } - mListenerCondition.broadcast(); + if (eventAdded) { + mListenerCondition.broadcast(); + } } protected: @@ -184,8 +206,11 @@ protected: QueueEvent(BUFFER_RECEIVED); } - virtual void onRequestReceived( - camera_metadata* request) { + virtual void onResultReceived(int32_t frameId, + camera_metadata* request) { + dout << "Result received frameId = " << frameId + << ", requestPtr = " << (void*)request << std::endl; + QueueEvent(RESULT_RECEIVED); free_camera_metadata(request); } @@ -201,6 +226,7 @@ protected: Mutex mListenerMutex; Mutex mConditionMutex; Condition mListenerCondition; + int mEventMask; }; class ProCameraTest : public ::testing::Test { @@ -309,6 +335,10 @@ TEST_F(ProCameraTest, LockingImmediate) { return; } + mListener->SetEventMask(ProEvent_Mask(ACQUIRED) | + ProEvent_Mask(STOLEN) | + ProEvent_Mask(RELEASED)); + EXPECT_FALSE(mCamera->hasExclusiveLock()); EXPECT_EQ(OK, mCamera->exclusiveTryLock()); // at this point we definitely have the lock @@ -332,13 +362,17 @@ TEST_F(ProCameraTest, LockingAsynchronous) { return; } + + mListener->SetEventMask(ProEvent_Mask(ACQUIRED) | + ProEvent_Mask(STOLEN) | + ProEvent_Mask(RELEASED)); + // TODO: Add another procamera that has a lock here. // then we can be test that the lock wont immediately be acquired EXPECT_FALSE(mCamera->hasExclusiveLock()); - EXPECT_EQ(OK, mCamera->exclusiveLock()); - // at this point we may or may not have the lock - // we cant be sure until we get an ACQUIRED event + EXPECT_EQ(OK, mCamera->exclusiveTryLock()); + // at this point we definitely have the lock EXPECT_EQ(OK, mListener->WaitForEvent()); EXPECT_EQ(ACQUIRED, mListener->ReadEvent()); @@ -353,7 +387,7 @@ TEST_F(ProCameraTest, LockingAsynchronous) { } // Stream directly to the screen. -TEST_F(ProCameraTest, StreamingImageSingle) { +TEST_F(ProCameraTest, DISABLED_StreamingImageSingle) { if (HasFatalFailure()) { return; } @@ -433,7 +467,7 @@ TEST_F(ProCameraTest, StreamingImageSingle) { } // Stream directly to the screen. -TEST_F(ProCameraTest, StreamingImageDual) { +TEST_F(ProCameraTest, DISABLED_StreamingImageDual) { if (HasFatalFailure()) { return; } @@ -523,6 +557,9 @@ TEST_F(ProCameraTest, CpuConsumerSingle) { if (HasFatalFailure()) { return; } + + mListener->SetEventMask(ProEvent_Mask(BUFFER_RECEIVED)); + int streamId = -1; EXPECT_OK(mCamera->createStreamCpu(/*width*/320, /*height*/240, TEST_FORMAT_DEPTH, TEST_CPU_HEAP_COUNT, &streamId)); @@ -585,6 +622,9 @@ TEST_F(ProCameraTest, CpuConsumerDual) { if (HasFatalFailure()) { return; } + + mListener->SetEventMask(ProEvent_Mask(BUFFER_RECEIVED)); + int streamId = -1; EXPECT_OK(mCamera->createStreamCpu(/*width*/1280, /*height*/960, TEST_FORMAT_MAIN, TEST_CPU_HEAP_COUNT, &streamId)); @@ -596,8 +636,6 @@ TEST_F(ProCameraTest, CpuConsumerDual) { EXPECT_NE(-1, depthStreamId); EXPECT_OK(mCamera->exclusiveTryLock()); - EXPECT_EQ(OK, mListener->WaitForEvent()); - EXPECT_EQ(ACQUIRED, mListener->ReadEvent()); /* */ /* iterate in a loop submitting requests every frame. @@ -657,6 +695,72 @@ TEST_F(ProCameraTest, CpuConsumerDual) { EXPECT_OK(mCamera->exclusiveUnlock()); } +TEST_F(ProCameraTest, ResultReceiver) { + if (HasFatalFailure()) { + return; + } + + mListener->SetEventMask(ProEvent_Mask(RESULT_RECEIVED)); + //FIXME: if this is run right after the previous test we get BUFFER_RECEIVED + // need to filter out events at read time + + int streamId = -1; + EXPECT_OK(mCamera->createStreamCpu(/*width*/1280, /*height*/960, + TEST_FORMAT_MAIN, TEST_CPU_HEAP_COUNT, &streamId)); + EXPECT_NE(-1, streamId); + + EXPECT_OK(mCamera->exclusiveTryLock()); + /* + */ + /* iterate in a loop submitting requests every frame. + * what kind of requests doesnt really matter, just whatever. + */ + + camera_metadata_t *request = NULL; + EXPECT_OK(mCamera->createDefaultRequest(CAMERA2_TEMPLATE_PREVIEW, + /*out*/&request)); + EXPECT_NE((void*)NULL, request); + + /*FIXME*/ + if(request == NULL) request = allocate_camera_metadata(10, 100); + + // set the output streams to just this stream ID + + uint8_t allStreams[] = { streamId }; + size_t streamCount = 1; + camera_metadata_entry_t entry; + uint32_t tag = static_cast(ANDROID_REQUEST_OUTPUT_STREAMS); + int find = find_camera_metadata_entry(request, tag, &entry); + if (find == -ENOENT) { + if (add_camera_metadata_entry(request, tag, &allStreams, + /*data_count*/streamCount) != OK) { + camera_metadata_t *tmp = allocate_camera_metadata(1000, 10000); + ASSERT_OK(append_camera_metadata(tmp, request)); + free_camera_metadata(request); + request = tmp; + + ASSERT_OK(add_camera_metadata_entry(request, tag, &allStreams, + /*data_count*/streamCount)); + } + } else { + ASSERT_OK(update_camera_metadata_entry(request, entry.index, + &allStreams, /*data_count*/streamCount, &entry)); + } + + EXPECT_OK(mCamera->submitRequest(request, /*streaming*/true)); + + // Consume a couple of results + for (int i = 0; i < TEST_CPU_FRAME_COUNT; ++i) { + EXPECT_EQ(OK, mListener->WaitForEvent()); + EXPECT_EQ(RESULT_RECEIVED, mListener->ReadEvent()); + } + + // Done: clean up + free_camera_metadata(request); + EXPECT_OK(mCamera->deleteStream(streamId)); + EXPECT_OK(mCamera->exclusiveUnlock()); +} + } } } diff --git a/include/camera/IProCameraCallbacks.h b/include/camera/IProCameraCallbacks.h index e5be099..fc24026 100644 --- a/include/camera/IProCameraCallbacks.h +++ b/include/camera/IProCameraCallbacks.h @@ -24,6 +24,8 @@ #include #include +struct camera_metadata; + namespace android { class IProCameraCallbacks: public IInterface @@ -47,6 +49,11 @@ public: }; virtual void onLockStatusChanged(LockStatus newLockStatus) = 0; + + /** Missing by design: implementation is client-side in ProCamera.cpp **/ + // virtual void onBufferReceived(int streamId, + // const CpuConsumer::LockedBufer& buf); + virtual void onResultReceived(int32_t frameId, camera_metadata* result) = 0; }; // ---------------------------------------------------------------------------- diff --git a/include/camera/ProCamera.h b/include/camera/ProCamera.h index 4dda533..7cd9138 100644 --- a/include/camera/ProCamera.h +++ b/include/camera/ProCamera.h @@ -49,17 +49,19 @@ public: // OnBufferReceived and OnRequestReceived can come in with any order, // use android.sensor.timestamp and LockedBuffer.timestamp to correlate them - // TODO: implement in IProCameraCallbacks, ProCamera2Client - // A new frame buffer has been received for this stream. // -- This callback only fires for createStreamCpu streams + // -- Use buf.timestamp to correlate with metadata's + // android.sensor.timestamp // -- The buffer must not be accessed after this function call completes virtual void onBufferReceived(int streamId, const CpuConsumer::LockedBuffer& buf) = 0; - // A new metadata buffer has been received. - // -- Ownership of request passes on to the callee, - // free with free_camera_metadata. - virtual void onRequestReceived(camera_metadata* request) = 0; + /** + * A new metadata buffer has been received. + * -- Ownership of request passes on to the callee, free with + * free_camera_metadata. + */ + virtual void onResultReceived(int32_t frameId, camera_metadata* result) = 0; }; class ProCamera : public BnProCameraCallbacks, public IBinder::DeathRecipient @@ -189,6 +191,9 @@ protected: virtual void onLockStatusChanged( IProCameraCallbacks::LockStatus newLockStatus); + virtual void onResultReceived(int32_t frameId, + camera_metadata* result); + class DeathNotifier: public IBinder::DeathRecipient { public: diff --git a/services/camera/libcameraservice/Android.mk b/services/camera/libcameraservice/Android.mk index c7a8e4a..f76c861 100644 --- a/services/camera/libcameraservice/Android.mk +++ b/services/camera/libcameraservice/Android.mk @@ -20,7 +20,8 @@ LOCAL_SRC_FILES:= \ camera2/ZslProcessor.cpp \ camera2/BurstCapture.cpp \ camera2/JpegCompressor.cpp \ - camera2/CaptureSequencer.cpp + camera2/CaptureSequencer.cpp \ + camera2/ProFrameProcessor.cpp \ LOCAL_SHARED_LIBRARIES:= \ libui \ diff --git a/services/camera/libcameraservice/ProCamera2Client.cpp b/services/camera/libcameraservice/ProCamera2Client.cpp index f850034..7611796 100644 --- a/services/camera/libcameraservice/ProCamera2Client.cpp +++ b/services/camera/libcameraservice/ProCamera2Client.cpp @@ -26,6 +26,7 @@ #include #include "camera2/Parameters.h" #include "ProCamera2Client.h" +#include "camera2/ProFrameProcessor.h" namespace android { using namespace camera2; @@ -82,6 +83,16 @@ status_t ProCamera2Client::initialize(camera_module_t *module) res = mDevice->setNotifyCallback(this); + String8 threadName; + mFrameProcessor = new ProFrameProcessor(this); + threadName = String8::format("PC2-%d-FrameProc", + mCameraId); + mFrameProcessor->run(threadName.string()); + + mFrameProcessor->registerListener(FRAME_PROCESSOR_LISTENER_MIN_ID, + FRAME_PROCESSOR_LISTENER_MAX_ID, + /*listener*/this); + return OK; } @@ -307,6 +318,7 @@ status_t ProCamera2Client::dump(int fd, const Vector& args) { result.append(" State: "); // TODO: print dynamic/request section from most recent requests + mFrameProcessor->dump(fd, args); #define CASE_APPEND_ENUM(x) case x: result.append(#x "\n"); break; @@ -338,7 +350,12 @@ void ProCamera2Client::disconnect() { if (mDevice == 0) return; ALOGV("Camera %d: Shutting down", mCameraId); + mFrameProcessor->removeListener(FRAME_PROCESSOR_LISTENER_MIN_ID, + FRAME_PROCESSOR_LISTENER_MAX_ID, + /*listener*/this); + mFrameProcessor->requestExit(); ALOGV("Camera %d: Waiting for threads", mCameraId); + mFrameProcessor->join(); ALOGV("Camera %d: Disconnecting device", mCameraId); mDevice->disconnect(); @@ -446,4 +463,22 @@ void ProCamera2Client::SharedCameraCallbacks::clear() { mRemoteCallback.clear(); } +void ProCamera2Client::onFrameAvailable(int32_t frameId, + const CameraMetadata& frame) { + ATRACE_CALL(); + ALOGV("%s", __FUNCTION__); + + Mutex::Autolock icl(mIProCameraUserLock); + SharedCameraCallbacks::Lock l(mSharedCameraCallbacks); + + if (mRemoteCallback != NULL) { + CameraMetadata tmp(frame); + camera_metadata_t* meta = tmp.release(); + ALOGV("%s: meta = %p ", __FUNCTION__, meta); + mRemoteCallback->onResultReceived(frameId, meta); + tmp.acquire(meta); + } + +} + } // namespace android diff --git a/services/camera/libcameraservice/ProCamera2Client.h b/services/camera/libcameraservice/ProCamera2Client.h index b72fd63..dfea1e1 100644 --- a/services/camera/libcameraservice/ProCamera2Client.h +++ b/services/camera/libcameraservice/ProCamera2Client.h @@ -19,6 +19,7 @@ #include "Camera2Device.h" #include "CameraService.h" +#include "camera2/ProFrameProcessor.h" namespace android { @@ -29,7 +30,8 @@ class IMemory; */ class ProCamera2Client : public CameraService::ProClient, - public Camera2Device::NotificationListener + public Camera2Device::NotificationListener, + public camera2::ProFrameProcessor::FilteredListener { public: /** @@ -120,6 +122,10 @@ public: mutable Mutex mRemoteCallbackLock; } mSharedCameraCallbacks; +protected: + /** FilteredListener implementation **/ + virtual void onFrameAvailable(int32_t frameId, const CameraMetadata& frame); + private: /** IProCameraUser interface-related private members */ @@ -137,6 +143,10 @@ private: sp mPreviewSurface; /** Preview callback related members */ + sp mFrameProcessor; + static const int32_t FRAME_PROCESSOR_LISTENER_MIN_ID = 0; + static const int32_t FRAME_PROCESSOR_LISTENER_MAX_ID = 0x7fffffffL; + /** Camera2Device instance wrapping HAL2 entry */ sp mDevice; diff --git a/services/camera/libcameraservice/camera2/ProFrameProcessor.cpp b/services/camera/libcameraservice/camera2/ProFrameProcessor.cpp new file mode 100644 index 0000000..8d4933c --- /dev/null +++ b/services/camera/libcameraservice/camera2/ProFrameProcessor.cpp @@ -0,0 +1,168 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "Camera2-ProFrameProcessor" +#define ATRACE_TAG ATRACE_TAG_CAMERA +//#define LOG_NDEBUG 0 + +#include +#include + +#include "ProFrameProcessor.h" +#include "../Camera2Device.h" +#include "../ProCamera2Client.h" + +namespace android { +namespace camera2 { + +ProFrameProcessor::ProFrameProcessor(wp client): + Thread(false), mClient(client) { +} + +ProFrameProcessor::~ProFrameProcessor() { + ALOGV("%s: Exit", __FUNCTION__); +} + +status_t ProFrameProcessor::registerListener(int32_t minId, + int32_t maxId, wp listener) { + Mutex::Autolock l(mInputMutex); + ALOGV("%s: Registering listener for frame id range %d - %d", + __FUNCTION__, minId, maxId); + RangeListener rListener = { minId, maxId, listener }; + mRangeListeners.push_back(rListener); + return OK; +} + +status_t ProFrameProcessor::removeListener(int32_t minId, + int32_t maxId, wp listener) { + Mutex::Autolock l(mInputMutex); + List::iterator item = mRangeListeners.begin(); + while (item != mRangeListeners.end()) { + if (item->minId == minId && + item->maxId == maxId && + item->listener == listener) { + item = mRangeListeners.erase(item); + } else { + item++; + } + } + return OK; +} + +void ProFrameProcessor::dump(int fd, const Vector& args) { + String8 result(" Latest received frame:\n"); + write(fd, result.string(), result.size()); + mLastFrame.dump(fd, 2, 6); +} + +bool ProFrameProcessor::threadLoop() { + status_t res; + + sp device; + { + sp client = mClient.promote(); + if (client == 0) return false; + device = client->getCameraDevice(); + if (device == 0) return false; + } + + res = device->waitForNextFrame(kWaitDuration); + if (res == OK) { + sp client = mClient.promote(); + if (client == 0) return false; + processNewFrames(client); + } else if (res != TIMED_OUT) { + ALOGE("ProCamera2Client::ProFrameProcessor: Error waiting for new " + "frames: %s (%d)", strerror(-res), res); + } + + return true; +} + +void ProFrameProcessor::processNewFrames(sp &client) { + status_t res; + ATRACE_CALL(); + CameraMetadata frame; + while ( (res = client->getCameraDevice()->getNextFrame(&frame)) == OK) { + camera_metadata_entry_t entry; + + entry = frame.find(ANDROID_REQUEST_FRAME_COUNT); + if (entry.count == 0) { + ALOGE("%s: Camera %d: Error reading frame number", + __FUNCTION__, client->getCameraId()); + break; + } + ATRACE_INT("cam2_frame", entry.data.i32[0]); + + res = processListeners(frame, client); + if (res != OK) break; + + if (!frame.isEmpty()) { + mLastFrame.acquire(frame); + } + } + if (res != NOT_ENOUGH_DATA) { + ALOGE("%s: Camera %d: Error getting next frame: %s (%d)", + __FUNCTION__, client->getCameraId(), strerror(-res), res); + return; + } + + return; +} + +status_t ProFrameProcessor::processListeners(const CameraMetadata &frame, + sp &client) { + status_t res; + ATRACE_CALL(); + camera_metadata_ro_entry_t entry; + + entry = frame.find(ANDROID_REQUEST_ID); + if (entry.count == 0) { + ALOGE("%s: Camera %d: Error reading frame id", + __FUNCTION__, client->getCameraId()); + return BAD_VALUE; + } + int32_t frameId = entry.data.i32[0]; + + List > listeners; + { + Mutex::Autolock l(mInputMutex); + + List::iterator item = mRangeListeners.begin(); + while (item != mRangeListeners.end()) { + if (frameId >= item->minId && + frameId < item->maxId) { + sp listener = item->listener.promote(); + if (listener == 0) { + item = mRangeListeners.erase(item); + continue; + } else { + listeners.push_back(listener); + } + } + item++; + } + } + ALOGV("Got %d range listeners out of %d", listeners.size(), mRangeListeners.size()); + List >::iterator item = listeners.begin(); + for (; item != listeners.end(); item++) { + (*item)->onFrameAvailable(frameId, frame); + } + return OK; +} + +}; // namespace camera2 +}; // namespace android diff --git a/services/camera/libcameraservice/camera2/ProFrameProcessor.h b/services/camera/libcameraservice/camera2/ProFrameProcessor.h new file mode 100644 index 0000000..e4094a6 --- /dev/null +++ b/services/camera/libcameraservice/camera2/ProFrameProcessor.h @@ -0,0 +1,81 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_SERVERS_CAMERA_CAMERA2_PROFRAMEPROCESSOR_H +#define ANDROID_SERVERS_CAMERA_CAMERA2_PROFRAMEPROCESSOR_H + +#include +#include +#include +#include +#include +#include + +struct camera_frame_metadata; + +namespace android { + +class ProCamera2Client; + +namespace camera2 { + +/* Output frame metadata processing thread. This thread waits for new + * frames from the device, and analyzes them as necessary. + */ +class ProFrameProcessor: public Thread { + public: + ProFrameProcessor(wp client); + ~ProFrameProcessor(); + + struct FilteredListener: virtual public RefBase { + virtual void onFrameAvailable(int32_t frameId, + const CameraMetadata &frame) = 0; + }; + + // Register a listener for a range of IDs [minId, maxId). Multiple listeners + // can be listening to the same range + status_t registerListener(int32_t minId, int32_t maxId, wp listener); + status_t removeListener(int32_t minId, int32_t maxId, wp listener); + + void dump(int fd, const Vector& args); + private: + static const nsecs_t kWaitDuration = 10000000; // 10 ms + wp mClient; + + virtual bool threadLoop(); + + Mutex mInputMutex; + + struct RangeListener { + int32_t minId; + int32_t maxId; + wp listener; + }; + List mRangeListeners; + + void processNewFrames(sp &client); + + status_t processListeners(const CameraMetadata &frame, + sp &client); + + CameraMetadata mLastFrame; +}; + + +}; //namespace camera2 +}; //namespace android + +#endif -- cgit v1.1 From 7b33a74bbc514b99c16be7fff9a34e892bc19264 Mon Sep 17 00:00:00 2001 From: Igor Murashkin Date: Thu, 21 Feb 2013 13:49:26 -0800 Subject: ProCamera: Add getInfo for getting static metadata (and a test) Change-Id: I3b87dfbc621d98bf41f37a892a1012baf85b5166 --- camera/IProCameraUser.cpp | 30 ++++++++++++++++++++++ camera/ProCamera.cpp | 23 +++++++++++++---- camera/tests/ProCameraTests.cpp | 30 ++++++++++++++++++++++ include/camera/IProCameraUser.h | 5 ++++ include/camera/ProCamera.h | 2 +- .../camera/libcameraservice/ProCamera2Client.cpp | 14 ++++++++++ .../camera/libcameraservice/ProCamera2Client.h | 5 ++++ 7 files changed, 103 insertions(+), 6 deletions(-) diff --git a/camera/IProCameraUser.cpp b/camera/IProCameraUser.cpp index e60cfe5..c9d98aa 100644 --- a/camera/IProCameraUser.cpp +++ b/camera/IProCameraUser.cpp @@ -44,6 +44,7 @@ enum { CANCEL_STREAM, CREATE_STREAM, CREATE_DEFAULT_REQUEST, + GET_CAMERA_INFO, }; /** @@ -255,6 +256,17 @@ public: } + virtual status_t getCameraInfo(int cameraId, camera_metadata** info) + { + Parcel data, reply; + data.writeInterfaceToken(IProCameraUser::getInterfaceDescriptor()); + data.writeInt32(cameraId); + remote()->transact(GET_CAMERA_INFO, data, &reply); + readMetadata(reply, /*out*/info); + return reply.readInt32(); + } + + private: @@ -367,6 +379,24 @@ status_t BnProCameraUser::onTransact( writeMetadata(*reply, request); reply->writeInt32(ret); + free_camera_metadata(request); + + return NO_ERROR; + } break; + case GET_CAMERA_INFO: { + CHECK_INTERFACE(IProCameraUser, data, reply); + + int cameraId = data.readInt32(); + + camera_metadata_t* info = NULL; + status_t ret; + ret = getCameraInfo(cameraId, &info); + + writeMetadata(*reply, info); + reply->writeInt32(ret); + + free_camera_metadata(info); + return NO_ERROR; } break; default: diff --git a/camera/ProCamera.cpp b/camera/ProCamera.cpp index 142c03b..c95c4e0 100644 --- a/camera/ProCamera.cpp +++ b/camera/ProCamera.cpp @@ -361,15 +361,28 @@ status_t ProCamera::createStreamCpu(int width, int height, int format, } int ProCamera::getNumberOfCameras() { - ALOGE("%s: not implemented yet", __FUNCTION__); - return 1; + const sp cs = getCameraService(); + + if (!cs.get()) { + return DEAD_OBJECT; + } + return cs->getNumberOfCameras(); } camera_metadata* ProCamera::getCameraInfo(int cameraId) { - ALOGE("%s: not implemented yet", __FUNCTION__); - ALOGV("%s: cameraId = %d", __FUNCTION__, cameraId); - return NULL; + + sp c = mCamera; + if (c == 0) return NULL; + + camera_metadata* ptr = NULL; + status_t status = c->getCameraInfo(cameraId, &ptr); + + if (status != OK) { + ALOGE("%s: Failed to get camera info, error = %d", __FUNCTION__, status); + } + + return ptr; } status_t ProCamera::createDefaultRequest(int templateId, diff --git a/camera/tests/ProCameraTests.cpp b/camera/tests/ProCameraTests.cpp index 021fbae..69b7f3c 100644 --- a/camera/tests/ProCameraTests.cpp +++ b/camera/tests/ProCameraTests.cpp @@ -324,10 +324,40 @@ protected: ASSERT_NE((void*)NULL, surface.get()); } + template + static bool FindItem(T needle, T* array, size_t count) { + for (int i = 0; i < count; ++i) { + if (array[i] == needle) { + return true; + } + } + return false; + } + }; sp ProCameraTest::mTestThread; +TEST_F(ProCameraTest, AvailableFormats) { + if (HasFatalFailure()) { + return; + } + + camera_metadata_t* info = mCamera->getCameraInfo(CAMERA_ID); + ASSERT_NE((void*)NULL, info); + + camera_metadata_entry_t entry; + uint32_t tag = static_cast(ANDROID_SCALER_AVAILABLE_FORMATS); + EXPECT_EQ(OK, find_camera_metadata_entry(info, tag, &entry)); + + EXPECT_TRUE(FindItem(HAL_PIXEL_FORMAT_YV12, + entry.data.i32, entry.count)); + EXPECT_TRUE(FindItem(HAL_PIXEL_FORMAT_YCrCb_420_SP, + entry.data.i32, entry.count)); + + free_camera_metadata(info); +} + // test around exclusiveTryLock (immediate locking) TEST_F(ProCameraTest, LockingImmediate) { diff --git a/include/camera/IProCameraUser.h b/include/camera/IProCameraUser.h index a141769..7bddb0c 100644 --- a/include/camera/IProCameraUser.h +++ b/include/camera/IProCameraUser.h @@ -75,6 +75,11 @@ public: camera_metadata** request) = 0; + // Get static camera metadata + virtual status_t getCameraInfo(int cameraId, + /*out*/ + camera_metadata** info) = 0; + }; // ---------------------------------------------------------------------------- diff --git a/include/camera/ProCamera.h b/include/camera/ProCamera.h index 7cd9138..11904f9 100644 --- a/include/camera/ProCamera.h +++ b/include/camera/ProCamera.h @@ -172,7 +172,7 @@ public: static int getNumberOfCameras(); // Get static camera metadata - static camera_metadata* getCameraInfo(int cameraId); + camera_metadata* getCameraInfo(int cameraId); sp remote(); diff --git a/services/camera/libcameraservice/ProCamera2Client.cpp b/services/camera/libcameraservice/ProCamera2Client.cpp index 7611796..c264e2a 100644 --- a/services/camera/libcameraservice/ProCamera2Client.cpp +++ b/services/camera/libcameraservice/ProCamera2Client.cpp @@ -309,6 +309,20 @@ status_t ProCamera2Client::createDefaultRequest(int templateId, return res; } +status_t ProCamera2Client::getCameraInfo(int cameraId, + /*out*/ + camera_metadata** info) +{ + if (cameraId != mCameraId) { + return INVALID_OPERATION; + } + + CameraMetadata deviceInfo = mDevice->info(); + *info = deviceInfo.release(); + + return OK; +} + status_t ProCamera2Client::dump(int fd, const Vector& args) { String8 result; result.appendFormat("ProCamera2Client[%d] (%p) PID: %d, dump:\n", diff --git a/services/camera/libcameraservice/ProCamera2Client.h b/services/camera/libcameraservice/ProCamera2Client.h index dfea1e1..cd0a2ae 100644 --- a/services/camera/libcameraservice/ProCamera2Client.h +++ b/services/camera/libcameraservice/ProCamera2Client.h @@ -65,6 +65,11 @@ public: /*out*/ camera_metadata** request); + // Get the static metadata for the camera + // -- Caller owns the newly allocated metadata + virtual status_t getCameraInfo(int cameraId, + /*out*/ + camera_metadata** info); /** * Interface used by CameraService -- cgit v1.1 From dcb07d51e307019731147751946774f45321edfb Mon Sep 17 00:00:00 2001 From: Igor Murashkin Date: Thu, 21 Feb 2013 14:18:10 -0800 Subject: Camera: ProCameraTests: clean up a bit, still needs more cleanup Change-Id: Ia7e18c81002c8d98176f21553cf13cbd13cfdf5d --- camera/tests/ProCameraTests.cpp | 195 ++++++++++++++++++++++++---------------- 1 file changed, 116 insertions(+), 79 deletions(-) diff --git a/camera/tests/ProCameraTests.cpp b/camera/tests/ProCameraTests.cpp index 69b7f3c..f1dd48c 100644 --- a/camera/tests/ProCameraTests.cpp +++ b/camera/tests/ProCameraTests.cpp @@ -31,6 +31,7 @@ #include #include // for CAMERA2_TEMPLATE_PREVIEW only +#include namespace android { namespace camera2 { @@ -46,6 +47,11 @@ namespace client { #define TEST_FORMAT_MAIN HAL_PIXEL_FORMAT_Y8 #define TEST_FORMAT_DEPTH HAL_PIXEL_FORMAT_Y16 +// defaults for display "test" +#define TEST_DISPLAY_FORMAT HAL_PIXEL_FORMAT_Y16 +#define TEST_DISPLAY_WIDTH 1280 +#define TEST_DISPLAY_HEIGHT 960 + #define TEST_CPU_FRAME_COUNT 2 #define TEST_CPU_HEAP_COUNT 5 @@ -233,6 +239,42 @@ class ProCameraTest : public ::testing::Test { public: ProCameraTest() { + char* displaySecsEnv = getenv("TEST_DISPLAY_SECS"); + if (displaySecsEnv != NULL) { + mDisplaySecs = atoi(displaySecsEnv); + if (mDisplaySecs < 0) { + mDisplaySecs = 0; + } + } else { + mDisplaySecs = 0; + } + + char* displayFmtEnv = getenv("TEST_DISPLAY_FORMAT"); + if (displayFmtEnv != NULL) { + mDisplayFmt = FormatFromString(displayFmtEnv); + } else { + mDisplayFmt = TEST_DISPLAY_FORMAT; + } + + char* displayWidthEnv = getenv("TEST_DISPLAY_WIDTH"); + if (displayWidthEnv != NULL) { + mDisplayW = atoi(displayWidthEnv); + if (mDisplayW < 0) { + mDisplayW = 0; + } + } else { + mDisplayW = TEST_DISPLAY_WIDTH; + } + + char* displayHeightEnv = getenv("TEST_DISPLAY_HEIGHT"); + if (displayHeightEnv != NULL) { + mDisplayH = atoi(displayHeightEnv); + if (mDisplayH < 0) { + mDisplayH = 0; + } + } else { + mDisplayH = TEST_DISPLAY_HEIGHT; + } } static void SetUpTestCase() { @@ -261,6 +303,10 @@ protected: static sp mTestThread; int mDisplaySecs; + int mDisplayFmt; + int mDisplayW; + int mDisplayH; + sp mComposerClient; sp mSurfaceControl; @@ -283,7 +329,7 @@ protected: getSurfaceWidth(), getSurfaceHeight(), PIXEL_FORMAT_RGB_888, 0); - mSurfaceControl->setPosition(640, 0); + mSurfaceControl->setPosition(0, 0); ASSERT_TRUE(mSurfaceControl != NULL); ASSERT_TRUE(mSurfaceControl->isValid()); @@ -325,7 +371,11 @@ protected: } template - static bool FindItem(T needle, T* array, size_t count) { + static bool ExistsItem(T needle, T* array, size_t count) { + if (!array) { + return false; + } + for (int i = 0; i < count; ++i) { if (array[i] == needle) { return true; @@ -334,6 +384,53 @@ protected: return false; } + + static int FormatFromString(const char* str) { + std::string s(str); + +#define CMP_STR(x, y) \ + if (s == #x) return HAL_PIXEL_FORMAT_ ## y; +#define CMP_STR_SAME(x) CMP_STR(x, x) + + CMP_STR_SAME( Y16); + CMP_STR_SAME( Y8); + CMP_STR_SAME( YV12); + CMP_STR(NV16, YCbCr_422_SP); + CMP_STR(NV21, YCrCb_420_SP); + CMP_STR(YUY2, YCbCr_422_I); + CMP_STR(RAW, RAW_SENSOR); + CMP_STR(RGBA, RGBA_8888); + + std::cerr << "Unknown format string " << str << std::endl; + return -1; + + } + + /** + * Creating a streaming request for these output streams from a template, + * and submit it + */ + void createSubmitRequestForStreams(uint8_t* streamIds, size_t count) { + + ASSERT_NE((void*)NULL, streamIds); + ASSERT_LT(0, count); + + camera_metadata_t *requestTmp = NULL; + EXPECT_OK(mCamera->createDefaultRequest(CAMERA2_TEMPLATE_PREVIEW, + /*out*/&requestTmp)); + ASSERT_NE((void*)NULL, requestTmp); + CameraMetadata request(requestTmp); + + // set the output streams. default is empty + + uint32_t tag = static_cast(ANDROID_REQUEST_OUTPUT_STREAMS); + request.update(tag, streamIds, count); + + requestTmp = request.release(); + EXPECT_OK(mCamera->submitRequest(requestTmp, /*streaming*/true)); + request.acquire(requestTmp); + } + }; sp ProCameraTest::mTestThread; @@ -343,19 +440,17 @@ TEST_F(ProCameraTest, AvailableFormats) { return; } - camera_metadata_t* info = mCamera->getCameraInfo(CAMERA_ID); - ASSERT_NE((void*)NULL, info); + CameraMetadata staticInfo = mCamera->getCameraInfo(CAMERA_ID); + ASSERT_FALSE(staticInfo.isEmpty()); - camera_metadata_entry_t entry; uint32_t tag = static_cast(ANDROID_SCALER_AVAILABLE_FORMATS); - EXPECT_EQ(OK, find_camera_metadata_entry(info, tag, &entry)); + EXPECT_TRUE(staticInfo.exists(tag)); + camera_metadata_entry_t entry = staticInfo.find(tag); - EXPECT_TRUE(FindItem(HAL_PIXEL_FORMAT_YV12, + EXPECT_TRUE(ExistsItem(HAL_PIXEL_FORMAT_YV12, entry.data.i32, entry.count)); - EXPECT_TRUE(FindItem(HAL_PIXEL_FORMAT_YCrCb_420_SP, + EXPECT_TRUE(ExistsItem(HAL_PIXEL_FORMAT_YCrCb_420_SP, entry.data.i32, entry.count)); - - free_camera_metadata(info); } // test around exclusiveTryLock (immediate locking) @@ -421,78 +516,30 @@ TEST_F(ProCameraTest, DISABLED_StreamingImageSingle) { if (HasFatalFailure()) { return; } - char* displaySecsEnv = getenv("TEST_DISPLAY_SECS"); - if (displaySecsEnv != NULL) { - mDisplaySecs = atoi(displaySecsEnv); - if (mDisplaySecs < 0) { - mDisplaySecs = 0; - } - } else { - mDisplaySecs = 0; - } - sp depthSurface; + sp surface; if (mDisplaySecs > 0) { - createDepthOnScreenSurface(/*out*/depthSurface); + createOnScreenSurface(/*out*/surface); + } + else { + dout << "Skipping, will not render to screen" << std::endl; + return; } int depthStreamId = -1; - EXPECT_OK(mCamera->createStream(/*width*/320, /*height*/240, - TEST_FORMAT_DEPTH, depthSurface, &depthStreamId)); + EXPECT_OK(mCamera->createStream(mDisplayW, mDisplayH, mDisplayFmt, surface, + &depthStreamId)); EXPECT_NE(-1, depthStreamId); EXPECT_OK(mCamera->exclusiveTryLock()); - /* iterate in a loop submitting requests every frame. - * what kind of requests doesnt really matter, just whatever. - */ - - // it would probably be better to use CameraMetadata from camera service. - camera_metadata_t *request = NULL; - EXPECT_OK(mCamera->createDefaultRequest(CAMERA2_TEMPLATE_PREVIEW, - /*out*/&request)); - EXPECT_NE((void*)NULL, request); - - /* FIXME: dont need this later, at which point the above should become an - ASSERT_NE*/ - if(request == NULL) request = allocate_camera_metadata(10, 100); - // set the output streams to just this stream ID - - // wow what a verbose API. - // i would give a loaf of bread for - // metadata->updateOrInsert(keys.request.output.streams, streamId); - uint8_t allStreams[] = { depthStreamId }; - size_t streamCount = sizeof(allStreams) / sizeof(allStreams[0]); - - camera_metadata_entry_t entry; - uint32_t tag = static_cast(ANDROID_REQUEST_OUTPUT_STREAMS); - int find = find_camera_metadata_entry(request, tag, &entry); - if (find == -ENOENT) { - if (add_camera_metadata_entry(request, tag, &allStreams, - /*data_count*/streamCount) != OK) { - camera_metadata_t *tmp = allocate_camera_metadata(1000, 10000); - ASSERT_OK(append_camera_metadata(tmp, request)); - free_camera_metadata(request); - request = tmp; - - ASSERT_OK(add_camera_metadata_entry(request, tag, &allStreams, - /*data_count*/streamCount)); - } - } else { - ASSERT_OK(update_camera_metadata_entry(request, entry.index, - &allStreams, /*data_count*/streamCount, &entry)); - } - - EXPECT_OK(mCamera->submitRequest(request, /*streaming*/true)); + uint8_t streams[] = { depthStreamId }; + ASSERT_NO_FATAL_FAILURE(createSubmitRequestForStreams(streams, /*count*/1)); dout << "will sleep now for " << mDisplaySecs << std::endl; sleep(mDisplaySecs); - free_camera_metadata(request); - - for (int i = 0; i < streamCount; ++i) { - EXPECT_OK(mCamera->deleteStream(allStreams[i])); - } + EXPECT_OK(mCamera->deleteStream(depthStreamId)); EXPECT_OK(mCamera->exclusiveUnlock()); } @@ -501,16 +548,6 @@ TEST_F(ProCameraTest, DISABLED_StreamingImageDual) { if (HasFatalFailure()) { return; } - char* displaySecsEnv = getenv("TEST_DISPLAY_SECS"); - if (displaySecsEnv != NULL) { - mDisplaySecs = atoi(displaySecsEnv); - if (mDisplaySecs < 0) { - mDisplaySecs = 0; - } - } else { - mDisplaySecs = 0; - } - sp surface; sp depthSurface; if (mDisplaySecs > 0) { -- cgit v1.1 From a140a6efea1db7837984b3578755cfa4eaa8d92d Mon Sep 17 00:00:00 2001 From: Igor Murashkin Date: Thu, 21 Feb 2013 14:45:03 -0800 Subject: ProCamera: add waitForFrameBuffer/waitForFrameResult blocking calls Change-Id: I851d41aeecaa15245d5b9d622132e8706d6e292c --- camera/ProCamera.cpp | 110 ++++++++++++++++++++++--- camera/tests/ProCameraTests.cpp | 172 +++++++++++++++++++++++++++++++++++++--- include/camera/ProCamera.h | 45 ++++++++++- 3 files changed, 304 insertions(+), 23 deletions(-) diff --git a/camera/ProCamera.cpp b/camera/ProCamera.cpp index c95c4e0..d4a9556 100644 --- a/camera/ProCamera.cpp +++ b/camera/ProCamera.cpp @@ -86,12 +86,13 @@ sp ProCamera::connect(int cameraId) void ProCamera::disconnect() { - ALOGV("disconnect"); + ALOGV("%s: disconnect", __FUNCTION__); if (mCamera != 0) { mCamera->disconnect(); mCamera->asBinder()->unlinkToDeath(this); mCamera = 0; } + ALOGV("%s: disconnect (done)", __FUNCTION__); } ProCamera::ProCamera() @@ -208,6 +209,19 @@ void ProCamera::onResultReceived(int32_t frameId, camera_metadata* result) { Mutex::Autolock _l(mLock); listener = mListener; } + + CameraMetadata tmp(result); + + // Unblock waitForFrame(id) callers + { + Mutex::Autolock al(mWaitMutex); + mMetadataReady = true; + mLatestMetadata = tmp; + mWaitCondition.broadcast(); + } + + result = tmp.release(); + if (listener != NULL) { listener->onResultReceived(frameId, result); } else { @@ -323,11 +337,14 @@ status_t ProCamera::createStream(int width, int height, int format, status_t ProCamera::createStreamCpu(int width, int height, int format, int heapCount, /*out*/ + sp* cpuConsumer, int* streamId) { ALOGV("%s: createStreamW %dx%d (fmt=0x%x)", __FUNCTION__, width, height, format); + *cpuConsumer = NULL; + sp c = mCamera; if (c == 0) return NO_INIT; @@ -357,6 +374,8 @@ status_t ProCamera::createStreamCpu(int width, int height, int format, cc->setFrameAvailableListener(frameAvailableListener); + *cpuConsumer = cc; + return s; } @@ -399,26 +418,91 @@ void ProCamera::onFrameAvailable(int streamId) { ALOGV("%s: streamId = %d", __FUNCTION__, streamId); sp listener = mListener; - if (listener.get() != NULL) { - StreamInfo& stream = getStreamInfo(streamId); + StreamInfo& stream = getStreamInfo(streamId); - CpuConsumer::LockedBuffer buf; + CpuConsumer::LockedBuffer buf; - status_t stat = stream.cpuConsumer->lockNextBuffer(&buf); - if (stat != OK) { - ALOGE("%s: Failed to lock buffer, error code = %d", __FUNCTION__, - stat); + if (listener.get() != NULL) { + if (listener->useOnFrameAvailable()) { + listener->onFrameAvailable(streamId, stream.cpuConsumer); return; } + } + + // Unblock waitForFrame(id) callers + { + Mutex::Autolock al(mWaitMutex); + getStreamInfo(streamId).frameReady = true; + mWaitCondition.broadcast(); + } +} + +status_t ProCamera::waitForFrameBuffer(int streamId) { + status_t stat = BAD_VALUE; + Mutex::Autolock al(mWaitMutex); - listener->onBufferReceived(streamId, buf); - stat = stream.cpuConsumer->unlockBuffer(buf); + StreamInfo& si = getStreamInfo(streamId); - if (stat != OK) { - ALOGE("%s: Failed to unlock buffer, error code = %d", __FUNCTION__, - stat); + if (si.frameReady) { + si.frameReady = false; + return OK; + } else { + while (true) { + stat = mWaitCondition.waitRelative(mWaitMutex, + mWaitTimeout); + if (stat != OK) { + ALOGE("%s: Error while waiting for frame buffer: %d", + __FUNCTION__, stat); + return stat; + } + + if (si.frameReady) { + si.frameReady = false; + return OK; + } + // else it was some other stream that got unblocked } } + + return stat; +} + +status_t ProCamera::waitForFrameMetadata() { + status_t stat = BAD_VALUE; + Mutex::Autolock al(mWaitMutex); + + if (mMetadataReady) { + return OK; + } else { + while (true) { + stat = mWaitCondition.waitRelative(mWaitMutex, + mWaitTimeout); + + if (stat != OK) { + ALOGE("%s: Error while waiting for metadata: %d", + __FUNCTION__, stat); + return stat; + } + + if (mMetadataReady) { + mMetadataReady = false; + return OK; + } + // else it was some other stream or metadata + } + } + + return stat; +} + +CameraMetadata ProCamera::consumeFrameMetadata() { + Mutex::Autolock al(mWaitMutex); + + // Destructive: Subsequent calls return empty metadatas + CameraMetadata tmp = mLatestMetadata; + mLatestMetadata.release(); + + return tmp; } ProCamera::StreamInfo& ProCamera::getStreamInfo(int streamId) { diff --git a/camera/tests/ProCameraTests.cpp b/camera/tests/ProCameraTests.cpp index f1dd48c..33c9179 100644 --- a/camera/tests/ProCameraTests.cpp +++ b/camera/tests/ProCameraTests.cpp @@ -207,7 +207,8 @@ protected: const CpuConsumer::LockedBuffer& buf) { dout << "Buffer received on streamId = " << streamId << - ", dataPtr = " << (void*)buf.data << std::endl; + ", dataPtr = " << (void*)buf.data << + ", timestamp = " << buf.timestamp << std::endl; QueueEvent(BUFFER_RECEIVED); @@ -376,7 +377,7 @@ protected: return false; } - for (int i = 0; i < count; ++i) { + for (size_t i = 0; i < count; ++i) { if (array[i] == needle) { return true; } @@ -410,10 +411,10 @@ protected: * Creating a streaming request for these output streams from a template, * and submit it */ - void createSubmitRequestForStreams(uint8_t* streamIds, size_t count) { + void createSubmitRequestForStreams(uint8_t* streamIds, size_t count, int requestCount=-1) { ASSERT_NE((void*)NULL, streamIds); - ASSERT_LT(0, count); + ASSERT_LT(0u, count); camera_metadata_t *requestTmp = NULL; EXPECT_OK(mCamera->createDefaultRequest(CAMERA2_TEMPLATE_PREVIEW, @@ -427,7 +428,15 @@ protected: request.update(tag, streamIds, count); requestTmp = request.release(); - EXPECT_OK(mCamera->submitRequest(requestTmp, /*streaming*/true)); + + if (requestCount < 0) { + EXPECT_OK(mCamera->submitRequest(requestTmp, /*streaming*/true)); + } else { + for (int i = 0; i < requestCount; ++i) { + EXPECT_OK(mCamera->submitRequest(requestTmp, + /*streaming*/false)); + } + } request.acquire(requestTmp); } @@ -628,8 +637,9 @@ TEST_F(ProCameraTest, CpuConsumerSingle) { mListener->SetEventMask(ProEvent_Mask(BUFFER_RECEIVED)); int streamId = -1; + sp consumer; EXPECT_OK(mCamera->createStreamCpu(/*width*/320, /*height*/240, - TEST_FORMAT_DEPTH, TEST_CPU_HEAP_COUNT, &streamId)); + TEST_FORMAT_DEPTH, TEST_CPU_HEAP_COUNT, &consumer, &streamId)); EXPECT_NE(-1, streamId); EXPECT_OK(mCamera->exclusiveTryLock()); @@ -693,13 +703,14 @@ TEST_F(ProCameraTest, CpuConsumerDual) { mListener->SetEventMask(ProEvent_Mask(BUFFER_RECEIVED)); int streamId = -1; + sp consumer; EXPECT_OK(mCamera->createStreamCpu(/*width*/1280, /*height*/960, - TEST_FORMAT_MAIN, TEST_CPU_HEAP_COUNT, &streamId)); + TEST_FORMAT_MAIN, TEST_CPU_HEAP_COUNT, &consumer, &streamId)); EXPECT_NE(-1, streamId); int depthStreamId = -1; EXPECT_OK(mCamera->createStreamCpu(/*width*/320, /*height*/240, - TEST_FORMAT_DEPTH, TEST_CPU_HEAP_COUNT, &depthStreamId)); + TEST_FORMAT_DEPTH, TEST_CPU_HEAP_COUNT, &consumer, &depthStreamId)); EXPECT_NE(-1, depthStreamId); EXPECT_OK(mCamera->exclusiveTryLock()); @@ -772,8 +783,9 @@ TEST_F(ProCameraTest, ResultReceiver) { // need to filter out events at read time int streamId = -1; + sp consumer; EXPECT_OK(mCamera->createStreamCpu(/*width*/1280, /*height*/960, - TEST_FORMAT_MAIN, TEST_CPU_HEAP_COUNT, &streamId)); + TEST_FORMAT_MAIN, TEST_CPU_HEAP_COUNT, &consumer, &streamId)); EXPECT_NE(-1, streamId); EXPECT_OK(mCamera->exclusiveTryLock()); @@ -828,6 +840,148 @@ TEST_F(ProCameraTest, ResultReceiver) { EXPECT_OK(mCamera->exclusiveUnlock()); } +TEST_F(ProCameraTest, WaitForResult) { + if (HasFatalFailure()) { + return; + } + + int streamId = -1; + sp consumer; + EXPECT_OK(mCamera->createStreamCpu(/*width*/1280, /*height*/960, + TEST_FORMAT_MAIN, TEST_CPU_HEAP_COUNT, &consumer, &streamId)); + EXPECT_NE(-1, streamId); + + EXPECT_OK(mCamera->exclusiveTryLock()); + + uint8_t streams[] = { streamId }; + ASSERT_NO_FATAL_FAILURE(createSubmitRequestForStreams(streams, /*count*/1)); + + // Consume a couple of results + for (int i = 0; i < TEST_CPU_FRAME_COUNT; ++i) { + EXPECT_OK(mCamera->waitForFrameMetadata()); + CameraMetadata meta = mCamera->consumeFrameMetadata(); + EXPECT_FALSE(meta.isEmpty()); + } + + // Done: clean up + consumer->abandon(); // since we didn't consume any of the buffers + EXPECT_OK(mCamera->deleteStream(streamId)); + EXPECT_OK(mCamera->exclusiveUnlock()); +} + +TEST_F(ProCameraTest, WaitForSingleStreamBuffer) { + if (HasFatalFailure()) { + return; + } + + int streamId = -1; + sp consumer; + EXPECT_OK(mCamera->createStreamCpu(/*width*/1280, /*height*/960, + TEST_FORMAT_MAIN, TEST_CPU_HEAP_COUNT, &consumer, &streamId)); + EXPECT_NE(-1, streamId); + + EXPECT_OK(mCamera->exclusiveTryLock()); + + uint8_t streams[] = { streamId }; + ASSERT_NO_FATAL_FAILURE(createSubmitRequestForStreams(streams, /*count*/1, + /*requests*/TEST_CPU_FRAME_COUNT)); + + // Consume a couple of results + for (int i = 0; i < TEST_CPU_FRAME_COUNT; ++i) { + EXPECT_OK(mCamera->waitForFrameBuffer(streamId)); + + CpuConsumer::LockedBuffer buf; + EXPECT_OK(consumer->lockNextBuffer(&buf)); + + dout << "Buffer synchronously received on streamId = " << streamId << + ", dataPtr = " << (void*)buf.data << + ", timestamp = " << buf.timestamp << std::endl; + + EXPECT_OK(consumer->unlockBuffer(buf)); + } + + // Done: clean up + EXPECT_OK(mCamera->deleteStream(streamId)); + EXPECT_OK(mCamera->exclusiveUnlock()); +} + +TEST_F(ProCameraTest, WaitForDualStreamBuffer) { + if (HasFatalFailure()) { + return; + } + + const int REQUEST_COUNT = TEST_CPU_FRAME_COUNT * 10; + + // 15 fps + int streamId = -1; + sp consumer; + EXPECT_OK(mCamera->createStreamCpu(/*width*/1280, /*height*/960, + TEST_FORMAT_MAIN, TEST_CPU_HEAP_COUNT, &consumer, &streamId)); + EXPECT_NE(-1, streamId); + + // 30 fps + int depthStreamId = -1; + sp depthConsumer; + EXPECT_OK(mCamera->createStreamCpu(/*width*/320, /*height*/240, + TEST_FORMAT_DEPTH, TEST_CPU_HEAP_COUNT, &depthConsumer, &depthStreamId)); + EXPECT_NE(-1, depthStreamId); + + EXPECT_OK(mCamera->exclusiveTryLock()); + + uint8_t streams[] = { streamId, depthStreamId }; + ASSERT_NO_FATAL_FAILURE(createSubmitRequestForStreams(streams, /*count*/2, + /*requests*/REQUEST_COUNT)); + + // Consume two frames simultaneously. Unsynchronized by timestamps. + for (int i = 0; i < REQUEST_COUNT; ++i) { + + // Get the metadata + EXPECT_OK(mCamera->waitForFrameMetadata()); + CameraMetadata meta = mCamera->consumeFrameMetadata(); + EXPECT_FALSE(meta.isEmpty()); + + // Get the buffers + + EXPECT_OK(mCamera->waitForFrameBuffer(depthStreamId)); + + /** + * Guaranteed to be able to consume the depth frame, + * since we waited on it. + */ + CpuConsumer::LockedBuffer depthBuffer; + EXPECT_OK(depthConsumer->lockNextBuffer(&depthBuffer)); + + dout << "Depth Buffer synchronously received on streamId = " << + streamId << + ", dataPtr = " << (void*)depthBuffer.data << + ", timestamp = " << depthBuffer.timestamp << std::endl; + + EXPECT_OK(depthConsumer->unlockBuffer(depthBuffer)); + + + /** Consume Greyscale frames if there are any. + * There may not be since it runs at half FPS */ + CpuConsumer::LockedBuffer greyBuffer; + while (consumer->lockNextBuffer(&greyBuffer) == OK) { + + dout << "GRAY Buffer synchronously received on streamId = " << + streamId << + ", dataPtr = " << (void*)greyBuffer.data << + ", timestamp = " << greyBuffer.timestamp << std::endl; + + EXPECT_OK(consumer->unlockBuffer(greyBuffer)); + } + } + + // Done: clean up + EXPECT_OK(mCamera->deleteStream(streamId)); + EXPECT_OK(mCamera->exclusiveUnlock()); +} + + + + + } } } diff --git a/include/camera/ProCamera.h b/include/camera/ProCamera.h index 11904f9..f813c1c 100644 --- a/include/camera/ProCamera.h +++ b/include/camera/ProCamera.h @@ -24,8 +24,12 @@ #include #include #include +#include #include +#include +#include + struct camera_metadata; namespace android { @@ -62,6 +66,20 @@ public: * free_camera_metadata. */ virtual void onResultReceived(int32_t frameId, camera_metadata* result) = 0; + + + // A new frame buffer has been received for this stream. + // -- This callback only fires for createStreamCpu streams + // -- Use buf.timestamp to correlate with metadata's android.sensor.timestamp + // -- The buffer should be accessed with CpuConsumer::lockNextBuffer + // and CpuConsumer::unlockBuffer + virtual void onFrameAvailable(int streamId, + const sp& cpuConsumer) { + } + + virtual bool useOnFrameAvailable() { + return false; + } }; class ProCamera : public BnProCameraCallbacks, public IBinder::DeathRecipient @@ -161,6 +179,7 @@ public: status_t createStreamCpu(int width, int height, int format, int heapCount, /*out*/ + sp* cpuConsumer, int* streamId); // Create a request object from a template. @@ -174,6 +193,24 @@ public: // Get static camera metadata camera_metadata* getCameraInfo(int cameraId); + // Blocks until a frame is available (CPU streams only) + // - Obtain the frame data by calling CpuConsumer::lockNextBuffer + // - Release the frame data after use with CpuConsumer::unlockBuffer + // Error codes: + // -ETIMEDOUT if it took too long to get a frame + status_t waitForFrameBuffer(int streamId); + + // Blocks until a metadata result is available + // - Obtain the metadata by calling consumeFrameMetadata() + // Error codes: + // -ETIMEDOUT if it took too long to get a frame + status_t waitForFrameMetadata(); + + // Get the latest metadata. This is destructive. + // - Calling this repeatedly will produce empty metadata objects. + // - Use waitForFrameMetadata to sync until new data is available. + CameraMetadata consumeFrameMetadata(); + sp remote(); protected: @@ -249,6 +286,7 @@ private: StreamInfo(int streamId) { this->streamID = streamId; cpuStream = false; + frameReady = false; } StreamInfo() { @@ -261,10 +299,15 @@ private: sp cpuConsumer; sp frameAvailableListener; sp stc; + bool frameReady; }; + Condition mWaitCondition; + Mutex mWaitMutex; + static const nsecs_t mWaitTimeout = 1000000000; // 1sec KeyedVector mStreams; - + bool mMetadataReady; + CameraMetadata mLatestMetadata; void onFrameAvailable(int streamId); -- cgit v1.1 From 4bc4a3845e456fd464556d79d20650a107e873e5 Mon Sep 17 00:00:00 2001 From: Igor Murashkin Date: Wed, 20 Feb 2013 13:36:17 -0800 Subject: ProCamera: Fix waitForFrameBuffer not handling multiple outstanding frames If the CpuConsumer triggered multiple onFrameAvailable callbacks in between a single waitForFrameBuffer call, the old code would only handle 1 callback. This meant on two subsequent waitForFrameBuffer calls the second would always timeout when two buffers were already available to be unlocked. Bug: 8238112 Change-Id: Ibefca35005ac5c408e5ada97ec4a4344a9e3e497 --- camera/ProCamera.cpp | 41 +++++++++++++++++++++++------ camera/tests/ProCameraTests.cpp | 57 +++++++++++++++++++++++++++++++++++++++-- include/camera/ProCamera.h | 17 +++++++++--- 3 files changed, 102 insertions(+), 13 deletions(-) diff --git a/camera/ProCamera.cpp b/camera/ProCamera.cpp index d4a9556..7c66d62 100644 --- a/camera/ProCamera.cpp +++ b/camera/ProCamera.cpp @@ -432,20 +432,21 @@ void ProCamera::onFrameAvailable(int streamId) { // Unblock waitForFrame(id) callers { Mutex::Autolock al(mWaitMutex); - getStreamInfo(streamId).frameReady = true; + getStreamInfo(streamId).frameReady++; mWaitCondition.broadcast(); } } -status_t ProCamera::waitForFrameBuffer(int streamId) { +int ProCamera::waitForFrameBuffer(int streamId) { status_t stat = BAD_VALUE; Mutex::Autolock al(mWaitMutex); StreamInfo& si = getStreamInfo(streamId); - if (si.frameReady) { - si.frameReady = false; - return OK; + if (si.frameReady > 0) { + int numFrames = si.frameReady; + si.frameReady = 0; + return numFrames; } else { while (true) { stat = mWaitCondition.waitRelative(mWaitMutex, @@ -456,9 +457,10 @@ status_t ProCamera::waitForFrameBuffer(int streamId) { return stat; } - if (si.frameReady) { - si.frameReady = false; - return OK; + if (si.frameReady > 0) { + int numFrames = si.frameReady; + si.frameReady = 0; + return numFrames; } // else it was some other stream that got unblocked } @@ -467,6 +469,29 @@ status_t ProCamera::waitForFrameBuffer(int streamId) { return stat; } +int ProCamera::dropFrameBuffer(int streamId, int count) { + StreamInfo& si = getStreamInfo(streamId); + + if (!si.cpuStream) { + return BAD_VALUE; + } else if (count < 0) { + return BAD_VALUE; + } + + int numDropped = 0; + for (int i = 0; i < count; ++i) { + CpuConsumer::LockedBuffer buffer; + if (si.cpuConsumer->lockNextBuffer(&buffer) != OK) { + break; + } + + si.cpuConsumer->unlockBuffer(buffer); + numDropped++; + } + + return numDropped; +} + status_t ProCamera::waitForFrameMetadata() { status_t stat = BAD_VALUE; Mutex::Autolock al(mWaitMutex); diff --git a/camera/tests/ProCameraTests.cpp b/camera/tests/ProCameraTests.cpp index 33c9179..f93e5cd 100644 --- a/camera/tests/ProCameraTests.cpp +++ b/camera/tests/ProCameraTests.cpp @@ -55,6 +55,8 @@ namespace client { #define TEST_CPU_FRAME_COUNT 2 #define TEST_CPU_HEAP_COUNT 5 +#define TEST_FRAME_PROCESSING_DELAY_US 200000 // 200 ms + #if TEST_DEBUGGING #define dout std::cerr #else @@ -888,7 +890,7 @@ TEST_F(ProCameraTest, WaitForSingleStreamBuffer) { // Consume a couple of results for (int i = 0; i < TEST_CPU_FRAME_COUNT; ++i) { - EXPECT_OK(mCamera->waitForFrameBuffer(streamId)); + EXPECT_EQ(1, mCamera->waitForFrameBuffer(streamId)); CpuConsumer::LockedBuffer buf; EXPECT_OK(consumer->lockNextBuffer(&buf)); @@ -942,7 +944,7 @@ TEST_F(ProCameraTest, WaitForDualStreamBuffer) { // Get the buffers - EXPECT_OK(mCamera->waitForFrameBuffer(depthStreamId)); + EXPECT_EQ(1, mCamera->waitForFrameBuffer(depthStreamId)); /** * Guaranteed to be able to consume the depth frame, @@ -978,7 +980,58 @@ TEST_F(ProCameraTest, WaitForDualStreamBuffer) { EXPECT_OK(mCamera->exclusiveUnlock()); } +TEST_F(ProCameraTest, WaitForSingleStreamBufferAndDropFrames) { + if (HasFatalFailure()) { + return; + } + + const int NUM_REQUESTS = 20 * TEST_CPU_FRAME_COUNT; + + int streamId = -1; + sp consumer; + EXPECT_OK(mCamera->createStreamCpu(/*width*/1280, /*height*/960, + TEST_FORMAT_MAIN, TEST_CPU_HEAP_COUNT, &consumer, &streamId)); + EXPECT_NE(-1, streamId); + + EXPECT_OK(mCamera->exclusiveTryLock()); + + uint8_t streams[] = { streamId }; + ASSERT_NO_FATAL_FAILURE(createSubmitRequestForStreams(streams, /*count*/1, + /*requests*/NUM_REQUESTS)); + + // Consume a couple of results + for (int i = 0; i < NUM_REQUESTS; ++i) { + // Process at 10fps, stream is at 15fps. + // This means we will definitely fill up the buffer queue with + // extra buffers and need to drop them. + usleep(TEST_FRAME_PROCESSING_DELAY_US); + + int numFrames; + EXPECT_TRUE((numFrames = mCamera->waitForFrameBuffer(streamId)) > 0); + + // Drop all but the newest framebuffer + EXPECT_EQ(numFrames-1, mCamera->dropFrameBuffer(streamId, numFrames-1)); + + dout << "Dropped " << (numFrames - 1) << " frames" << std::endl; + + // Skip the counter ahead, don't try to consume these frames again + i += numFrames-1; + + // "Consume" the buffer + CpuConsumer::LockedBuffer buf; + EXPECT_OK(consumer->lockNextBuffer(&buf)); + + dout << "Buffer synchronously received on streamId = " << streamId << + ", dataPtr = " << (void*)buf.data << + ", timestamp = " << buf.timestamp << std::endl; + + EXPECT_OK(consumer->unlockBuffer(buf)); + } + // Done: clean up + EXPECT_OK(mCamera->deleteStream(streamId)); + EXPECT_OK(mCamera->exclusiveUnlock()); +} diff --git a/include/camera/ProCamera.h b/include/camera/ProCamera.h index f813c1c..cd2772c 100644 --- a/include/camera/ProCamera.h +++ b/include/camera/ProCamera.h @@ -196,9 +196,12 @@ public: // Blocks until a frame is available (CPU streams only) // - Obtain the frame data by calling CpuConsumer::lockNextBuffer // - Release the frame data after use with CpuConsumer::unlockBuffer + // Return value: + // - >0 - number of frames available to be locked + // - <0 - error (refer to error codes) // Error codes: // -ETIMEDOUT if it took too long to get a frame - status_t waitForFrameBuffer(int streamId); + int waitForFrameBuffer(int streamId); // Blocks until a metadata result is available // - Obtain the metadata by calling consumeFrameMetadata() @@ -211,6 +214,14 @@ public: // - Use waitForFrameMetadata to sync until new data is available. CameraMetadata consumeFrameMetadata(); + // Convenience method to drop frame buffers (CPU streams only) + // Return values: + // >=0 - number of frames dropped (up to count) + // <0 - error code + // Error codes: + // BAD_VALUE - invalid streamId or count passed + int dropFrameBuffer(int streamId, int count); + sp remote(); protected: @@ -286,7 +297,7 @@ private: StreamInfo(int streamId) { this->streamID = streamId; cpuStream = false; - frameReady = false; + frameReady = 0; } StreamInfo() { @@ -299,7 +310,7 @@ private: sp cpuConsumer; sp frameAvailableListener; sp stc; - bool frameReady; + int frameReady; }; Condition mWaitCondition; -- cgit v1.1 From da6ef1320d0161b1640dc84d7a9c5a25860c3619 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Thu, 10 Jan 2013 12:31:01 -0800 Subject: Update tee sink Implement rotation to reduce long-term storage use. Implement optional per-track tee. Dynamically enable at runtime based on property, instead of at compile-time. Dynamic frame count not yet implemented. Bug: 8223560 Change-Id: I3706443c6ec0cb0c6656dc288715a02ad5fea63a --- services/audioflinger/Android.mk | 8 --- services/audioflinger/AudioFlinger.cpp | 111 ++++++++++++++++++++++++++++++--- services/audioflinger/AudioFlinger.h | 17 +++++ services/audioflinger/Threads.cpp | 26 ++++---- services/audioflinger/TrackBase.h | 3 + services/audioflinger/Tracks.cpp | 31 ++++++++- 6 files changed, 164 insertions(+), 32 deletions(-) diff --git a/services/audioflinger/Android.mk b/services/audioflinger/Android.mk index 6d42143..0855db6 100644 --- a/services/audioflinger/Android.mk +++ b/services/audioflinger/Android.mk @@ -66,14 +66,6 @@ LOCAL_CFLAGS += -DSTATE_QUEUE_INSTANTIATIONS='"StateQueueInstantiations.cpp"' LOCAL_CFLAGS += -UFAST_TRACKS_AT_NON_NATIVE_SAMPLE_RATE -# uncomment for dumpsys to write most recent audio output to .wav file -# 47.5 seconds at 44.1 kHz, 8 megabytes -# LOCAL_CFLAGS += -DTEE_SINK_FRAMES=0x200000 - -# uncomment for dumpsys to write most recent audio input to .wav file -# 47.5 seconds at 44.1 kHz, 8 megabytes -# LOCAL_CFLAGS += -DTEE_SINK_INPUT_FRAMES=0x200000 - # uncomment to enable the audio watchdog # LOCAL_SRC_FILES += AudioWatchdog.cpp # LOCAL_CFLAGS += -DAUDIO_WATCHDOG diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp index 47c2772..e0ab8cd 100644 --- a/services/audioflinger/AudioFlinger.cpp +++ b/services/audioflinger/AudioFlinger.cpp @@ -19,6 +19,7 @@ #define LOG_TAG "AudioFlinger" //#define LOG_NDEBUG 0 +#include #include #include #include @@ -61,6 +62,9 @@ #include +#include +#include + // ---------------------------------------------------------------------------- // Note: the following macro is used for extremely verbose logging message. In @@ -86,6 +90,14 @@ nsecs_t AudioFlinger::mStandbyTimeInNsecs = kDefaultStandbyTimeInNsecs; uint32_t AudioFlinger::mScreenState; +bool AudioFlinger::mTeeSinkInputEnabled = false; +bool AudioFlinger::mTeeSinkOutputEnabled = false; +bool AudioFlinger::mTeeSinkTrackEnabled = false; + +size_t AudioFlinger::mTeeSinkInputFrames = kTeeSinkInputFramesDefault; +size_t AudioFlinger::mTeeSinkOutputFrames = kTeeSinkOutputFramesDefault; +size_t AudioFlinger::mTeeSinkTrackFrames = kTeeSinkTrackFramesDefault; + // ---------------------------------------------------------------------------- static int load_audio_interface(const char *if_name, audio_hw_device_t **dev) @@ -134,6 +146,19 @@ AudioFlinger::AudioFlinger() if (doLog) { mLogMemoryDealer = new MemoryDealer(kLogMemorySize, "LogWriters"); } + (void) property_get("ro.debuggable", value, "0"); + int debuggable = atoi(value); + int teeEnabled = 0; + if (debuggable) { + (void) property_get("af.tee", value, "0"); + teeEnabled = atoi(value); + } + if (teeEnabled & 1) + mTeeSinkInputEnabled = true; + if (teeEnabled & 2) + mTeeSinkOutputEnabled = true; + if (teeEnabled & 4) + mTeeSinkTrackEnabled = true; } void AudioFlinger::onFirstRef() @@ -1602,7 +1627,6 @@ audio_io_handle_t AudioFlinger::openInput(audio_module_handle_t module, // Try to re-use most recently used Pipe to archive a copy of input for dumpsys, // or (re-)create if current Pipe is idle and does not match the new format sp teeSink; -#ifdef TEE_SINK_INPUT_FRAMES enum { TEE_SINK_NO, // don't copy input TEE_SINK_NEW, // copy input using a new pipe @@ -1610,7 +1634,9 @@ audio_io_handle_t AudioFlinger::openInput(audio_module_handle_t module, } kind; NBAIO_Format format = Format_from_SR_C(inStream->common.get_sample_rate(&inStream->common), popcount(inStream->common.get_channels(&inStream->common))); - if (format == Format_Invalid) { + if (!mTeeSinkInputEnabled) { + kind = TEE_SINK_NO; + } else if (format == Format_Invalid) { kind = TEE_SINK_NO; } else if (mRecordTeeSink == 0) { kind = TEE_SINK_NEW; @@ -1623,7 +1649,7 @@ audio_io_handle_t AudioFlinger::openInput(audio_module_handle_t module, } switch (kind) { case TEE_SINK_NEW: { - Pipe *pipe = new Pipe(TEE_SINK_INPUT_FRAMES, format); + Pipe *pipe = new Pipe(mTeeSinkInputFrames, format); size_t numCounterOffers = 0; const NBAIO_Format offers[1] = {format}; ssize_t index = pipe->negotiate(offers, 1, NULL, numCounterOffers); @@ -1644,7 +1670,7 @@ audio_io_handle_t AudioFlinger::openInput(audio_module_handle_t module, default: break; } -#endif + AudioStreamIn *input = new AudioStreamIn(inHwDev, inStream); // Start record thread @@ -2199,19 +2225,80 @@ status_t AudioFlinger::moveEffectChain_l(int sessionId, return NO_ERROR; } +struct Entry { +#define MAX_NAME 32 // %Y%m%d%H%M%S_%d.wav + char mName[MAX_NAME]; +}; + +int comparEntry(const void *p1, const void *p2) +{ + return strcmp(((const Entry *) p1)->mName, ((const Entry *) p2)->mName); +} + void AudioFlinger::dumpTee(int fd, const sp& source, audio_io_handle_t id) { NBAIO_Source *teeSource = source.get(); if (teeSource != NULL) { + // .wav rotation + // There is a benign race condition if 2 threads call this simultaneously. + // They would both traverse the directory, but the result would simply be + // failures at unlink() which are ignored. It's also unlikely since + // normally dumpsys is only done by bugreport or from the command line. + char teePath[32+256]; + strcpy(teePath, "/data/misc/media"); + size_t teePathLen = strlen(teePath); + DIR *dir = opendir(teePath); + teePath[teePathLen++] = '/'; + if (dir != NULL) { +#define MAX_SORT 20 // number of entries to sort +#define MAX_KEEP 10 // number of entries to keep + struct Entry entries[MAX_SORT]; + size_t entryCount = 0; + while (entryCount < MAX_SORT) { + struct dirent de; + struct dirent *result = NULL; + int rc = readdir_r(dir, &de, &result); + if (rc != 0) { + ALOGW("readdir_r failed %d", rc); + break; + } + if (result == NULL) { + break; + } + if (result != &de) { + ALOGW("readdir_r returned unexpected result %p != %p", result, &de); + break; + } + // ignore non .wav file entries + size_t nameLen = strlen(de.d_name); + if (nameLen <= 4 || nameLen >= MAX_NAME || + strcmp(&de.d_name[nameLen - 4], ".wav")) { + continue; + } + strcpy(entries[entryCount++].mName, de.d_name); + } + (void) closedir(dir); + if (entryCount > MAX_KEEP) { + qsort(entries, entryCount, sizeof(Entry), comparEntry); + for (size_t i = 0; i < entryCount - MAX_KEEP; ++i) { + strcpy(&teePath[teePathLen], entries[i].mName); + (void) unlink(teePath); + } + } + } else { + if (fd >= 0) { + fdprintf(fd, "unable to rotate tees in %s: %s\n", teePath, strerror(errno)); + } + } char teeTime[16]; struct timeval tv; gettimeofday(&tv, NULL); struct tm tm; localtime_r(&tv.tv_sec, &tm); - strftime(teeTime, sizeof(teeTime), "%T", &tm); - char teePath[64]; - sprintf(teePath, "/data/misc/media/%s_%d.wav", teeTime, id); - int teeFd = open(teePath, O_WRONLY | O_CREAT, S_IRUSR | S_IWUSR); + strftime(teeTime, sizeof(teeTime), "%Y%m%d%H%M%S", &tm); + snprintf(&teePath[teePathLen], sizeof(teePath) - teePathLen, "%s_%d.wav", teeTime, id); + // if 2 dumpsys are done within 1 second, and rotation didn't work, then discard 2nd + int teeFd = open(teePath, O_WRONLY | O_CREAT | O_EXCL | O_NOFOLLOW, S_IRUSR | S_IWUSR); if (teeFd >= 0) { char wavHeader[44]; memcpy(wavHeader, @@ -2253,9 +2340,13 @@ void AudioFlinger::dumpTee(int fd, const sp& source, audio_io_hand temp = total * channelCount * sizeof(short); write(teeFd, &temp, sizeof(temp)); close(teeFd); - fdprintf(fd, "FastMixer tee copied to %s\n", teePath); + if (fd >= 0) { + fdprintf(fd, "tee copied to %s\n", teePath); + } } else { - fdprintf(fd, "FastMixer unable to create tee %s: \n", strerror(errno)); + if (fd >= 0) { + fdprintf(fd, "unable to create tee %s: %s\n", teePath, strerror(errno)); + } } } } diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h index c3f08f6..44bd260 100644 --- a/services/audioflinger/AudioFlinger.h +++ b/services/audioflinger/AudioFlinger.h @@ -593,7 +593,24 @@ private: sp mRecordTeeSource; public: + // tee sink, if enabled by property, allows dumpsys to write most recent audio to .wav file static void dumpTee(int fd, const sp& source, audio_io_handle_t id = 0); + + // whether tee sink is enabled by property + static bool mTeeSinkInputEnabled; + static bool mTeeSinkOutputEnabled; + static bool mTeeSinkTrackEnabled; + + // runtime configured size of each tee sink pipe, in frames + static size_t mTeeSinkInputFrames; + static size_t mTeeSinkOutputFrames; + static size_t mTeeSinkTrackFrames; + + // compile-time default size of tee sink pipes, in frames + // 0x200000 stereo 16-bit PCM frames = 47.5 seconds at 44.1 kHz, 8 megabytes + static const size_t kTeeSinkInputFramesDefault = 0x200000; + static const size_t kTeeSinkOutputFramesDefault = 0x200000; + static const size_t kTeeSinkTrackFramesDefault = 0x1000; }; #undef INCLUDING_FROM_AUDIOFLINGER_H diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index ba848d7..1209ea6 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -2124,19 +2124,19 @@ AudioFlinger::MixerThread::MixerThread(const sp& audioFlinger, Aud (monoPipe->maxFrames() * 7) / 8 : mNormalFrameCount * 2); mPipeSink = monoPipe; -#ifdef TEE_SINK_FRAMES - // create a Pipe to archive a copy of FastMixer's output for dumpsys - Pipe *teeSink = new Pipe(TEE_SINK_FRAMES, format); - numCounterOffers = 0; - index = teeSink->negotiate(offers, 1, NULL, numCounterOffers); - ALOG_ASSERT(index == 0); - mTeeSink = teeSink; - PipeReader *teeSource = new PipeReader(*teeSink); - numCounterOffers = 0; - index = teeSource->negotiate(offers, 1, NULL, numCounterOffers); - ALOG_ASSERT(index == 0); - mTeeSource = teeSource; -#endif + if (mTeeSinkOutputEnabled) { + // create a Pipe to archive a copy of FastMixer's output for dumpsys + Pipe *teeSink = new Pipe(mTeeSinkOutputFrames, format); + numCounterOffers = 0; + index = teeSink->negotiate(offers, 1, NULL, numCounterOffers); + ALOG_ASSERT(index == 0); + mTeeSink = teeSink; + PipeReader *teeSource = new PipeReader(*teeSink); + numCounterOffers = 0; + index = teeSource->negotiate(offers, 1, NULL, numCounterOffers); + ALOG_ASSERT(index == 0); + mTeeSource = teeSource; + } // create fast mixer and configure it initially with just one fast track for our submix mFastMixer = new FastMixer(); diff --git a/services/audioflinger/TrackBase.h b/services/audioflinger/TrackBase.h index e0bd97a..fecbfda 100644 --- a/services/audioflinger/TrackBase.h +++ b/services/audioflinger/TrackBase.h @@ -141,4 +141,7 @@ protected: Vector < sp >mSyncEvents; const bool mIsOut; ServerProxy* mServerProxy; + const int mId; + sp mTeeSink; + sp mTeeSource; }; diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp index 315cbbc..724ce38 100644 --- a/services/audioflinger/Tracks.cpp +++ b/services/audioflinger/Tracks.cpp @@ -32,6 +32,9 @@ #include "AudioFlinger.h" #include "ServiceUtilities.h" +#include +#include + // ---------------------------------------------------------------------------- // Note: the following macro is used for extremely verbose logging message. In @@ -53,6 +56,8 @@ namespace android { // TrackBase // ---------------------------------------------------------------------------- +static volatile int32_t nextTrackId = 55; + // TrackBase constructor must be called with AudioFlinger::mLock held AudioFlinger::ThreadBase::TrackBase::TrackBase( ThreadBase *thread, @@ -82,7 +87,8 @@ AudioFlinger::ThreadBase::TrackBase::TrackBase( mStepServerFailed(false), mSessionId(sessionId), mIsOut(isOut), - mServerProxy(NULL) + mServerProxy(NULL), + mId(android_atomic_inc(&nextTrackId)) { // client == 0 implies sharedBuffer == 0 ALOG_ASSERT(!(client == 0 && sharedBuffer != 0)); @@ -134,11 +140,30 @@ AudioFlinger::ThreadBase::TrackBase::TrackBase( } mBufferEnd = (uint8_t *)mBuffer + bufferSize; mServerProxy = new ServerProxy(mCblk, mBuffer, frameCount, mFrameSize, isOut); + + if (mTeeSinkTrackEnabled) { + NBAIO_Format pipeFormat = Format_from_SR_C(mSampleRate, mChannelCount); + if (pipeFormat != Format_Invalid) { + Pipe *pipe = new Pipe(mTeeSinkTrackFrames, pipeFormat); + size_t numCounterOffers = 0; + const NBAIO_Format offers[1] = {pipeFormat}; + ssize_t index = pipe->negotiate(offers, 1, NULL, numCounterOffers); + ALOG_ASSERT(index == 0); + PipeReader *pipeReader = new PipeReader(*pipe); + numCounterOffers = 0; + index = pipeReader->negotiate(offers, 1, NULL, numCounterOffers); + ALOG_ASSERT(index == 0); + mTeeSink = pipe; + mTeeSource = pipeReader; + } + } + } } AudioFlinger::ThreadBase::TrackBase::~TrackBase() { + dumpTee(-1, mTeeSource, mId); // delete the proxy before deleting the shared memory it refers to, to avoid dangling reference delete mServerProxy; if (mCblk != NULL) { @@ -164,6 +189,10 @@ AudioFlinger::ThreadBase::TrackBase::~TrackBase() // This implementation of releaseBuffer() is used by Track and RecordTrack, but not TimedTrack void AudioFlinger::ThreadBase::TrackBase::releaseBuffer(AudioBufferProvider::Buffer* buffer) { + if (mTeeSink != 0) { + (void) mTeeSink->write(buffer->raw, buffer->frameCount); + } + buffer->raw = NULL; mStepCount = buffer->frameCount; // FIXME See note at getNextBuffer() -- cgit v1.1 From b2066f3a91718df6190532f7f795d572cf80ffbd Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Thu, 10 Jan 2013 12:31:01 -0800 Subject: Update tee sink Implement rotation to reduce long-term storage use. Implement optional per-track tee. Dynamically enable at runtime based on property, instead of at compile-time. Dynamic frame count not yet implemented. Bug: 8223560 Change-Id: I3706443c6ec0cb0c6656dc288715a02ad5fea63a --- services/audioflinger/Android.mk | 8 --- services/audioflinger/AudioFlinger.cpp | 111 ++++++++++++++++++++++++++++++--- services/audioflinger/AudioFlinger.h | 17 +++++ services/audioflinger/Threads.cpp | 26 ++++---- services/audioflinger/TrackBase.h | 3 + services/audioflinger/Tracks.cpp | 31 ++++++++- 6 files changed, 164 insertions(+), 32 deletions(-) diff --git a/services/audioflinger/Android.mk b/services/audioflinger/Android.mk index 6d42143..0855db6 100644 --- a/services/audioflinger/Android.mk +++ b/services/audioflinger/Android.mk @@ -66,14 +66,6 @@ LOCAL_CFLAGS += -DSTATE_QUEUE_INSTANTIATIONS='"StateQueueInstantiations.cpp"' LOCAL_CFLAGS += -UFAST_TRACKS_AT_NON_NATIVE_SAMPLE_RATE -# uncomment for dumpsys to write most recent audio output to .wav file -# 47.5 seconds at 44.1 kHz, 8 megabytes -# LOCAL_CFLAGS += -DTEE_SINK_FRAMES=0x200000 - -# uncomment for dumpsys to write most recent audio input to .wav file -# 47.5 seconds at 44.1 kHz, 8 megabytes -# LOCAL_CFLAGS += -DTEE_SINK_INPUT_FRAMES=0x200000 - # uncomment to enable the audio watchdog # LOCAL_SRC_FILES += AudioWatchdog.cpp # LOCAL_CFLAGS += -DAUDIO_WATCHDOG diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp index 47c2772..e0ab8cd 100644 --- a/services/audioflinger/AudioFlinger.cpp +++ b/services/audioflinger/AudioFlinger.cpp @@ -19,6 +19,7 @@ #define LOG_TAG "AudioFlinger" //#define LOG_NDEBUG 0 +#include #include #include #include @@ -61,6 +62,9 @@ #include +#include +#include + // ---------------------------------------------------------------------------- // Note: the following macro is used for extremely verbose logging message. In @@ -86,6 +90,14 @@ nsecs_t AudioFlinger::mStandbyTimeInNsecs = kDefaultStandbyTimeInNsecs; uint32_t AudioFlinger::mScreenState; +bool AudioFlinger::mTeeSinkInputEnabled = false; +bool AudioFlinger::mTeeSinkOutputEnabled = false; +bool AudioFlinger::mTeeSinkTrackEnabled = false; + +size_t AudioFlinger::mTeeSinkInputFrames = kTeeSinkInputFramesDefault; +size_t AudioFlinger::mTeeSinkOutputFrames = kTeeSinkOutputFramesDefault; +size_t AudioFlinger::mTeeSinkTrackFrames = kTeeSinkTrackFramesDefault; + // ---------------------------------------------------------------------------- static int load_audio_interface(const char *if_name, audio_hw_device_t **dev) @@ -134,6 +146,19 @@ AudioFlinger::AudioFlinger() if (doLog) { mLogMemoryDealer = new MemoryDealer(kLogMemorySize, "LogWriters"); } + (void) property_get("ro.debuggable", value, "0"); + int debuggable = atoi(value); + int teeEnabled = 0; + if (debuggable) { + (void) property_get("af.tee", value, "0"); + teeEnabled = atoi(value); + } + if (teeEnabled & 1) + mTeeSinkInputEnabled = true; + if (teeEnabled & 2) + mTeeSinkOutputEnabled = true; + if (teeEnabled & 4) + mTeeSinkTrackEnabled = true; } void AudioFlinger::onFirstRef() @@ -1602,7 +1627,6 @@ audio_io_handle_t AudioFlinger::openInput(audio_module_handle_t module, // Try to re-use most recently used Pipe to archive a copy of input for dumpsys, // or (re-)create if current Pipe is idle and does not match the new format sp teeSink; -#ifdef TEE_SINK_INPUT_FRAMES enum { TEE_SINK_NO, // don't copy input TEE_SINK_NEW, // copy input using a new pipe @@ -1610,7 +1634,9 @@ audio_io_handle_t AudioFlinger::openInput(audio_module_handle_t module, } kind; NBAIO_Format format = Format_from_SR_C(inStream->common.get_sample_rate(&inStream->common), popcount(inStream->common.get_channels(&inStream->common))); - if (format == Format_Invalid) { + if (!mTeeSinkInputEnabled) { + kind = TEE_SINK_NO; + } else if (format == Format_Invalid) { kind = TEE_SINK_NO; } else if (mRecordTeeSink == 0) { kind = TEE_SINK_NEW; @@ -1623,7 +1649,7 @@ audio_io_handle_t AudioFlinger::openInput(audio_module_handle_t module, } switch (kind) { case TEE_SINK_NEW: { - Pipe *pipe = new Pipe(TEE_SINK_INPUT_FRAMES, format); + Pipe *pipe = new Pipe(mTeeSinkInputFrames, format); size_t numCounterOffers = 0; const NBAIO_Format offers[1] = {format}; ssize_t index = pipe->negotiate(offers, 1, NULL, numCounterOffers); @@ -1644,7 +1670,7 @@ audio_io_handle_t AudioFlinger::openInput(audio_module_handle_t module, default: break; } -#endif + AudioStreamIn *input = new AudioStreamIn(inHwDev, inStream); // Start record thread @@ -2199,19 +2225,80 @@ status_t AudioFlinger::moveEffectChain_l(int sessionId, return NO_ERROR; } +struct Entry { +#define MAX_NAME 32 // %Y%m%d%H%M%S_%d.wav + char mName[MAX_NAME]; +}; + +int comparEntry(const void *p1, const void *p2) +{ + return strcmp(((const Entry *) p1)->mName, ((const Entry *) p2)->mName); +} + void AudioFlinger::dumpTee(int fd, const sp& source, audio_io_handle_t id) { NBAIO_Source *teeSource = source.get(); if (teeSource != NULL) { + // .wav rotation + // There is a benign race condition if 2 threads call this simultaneously. + // They would both traverse the directory, but the result would simply be + // failures at unlink() which are ignored. It's also unlikely since + // normally dumpsys is only done by bugreport or from the command line. + char teePath[32+256]; + strcpy(teePath, "/data/misc/media"); + size_t teePathLen = strlen(teePath); + DIR *dir = opendir(teePath); + teePath[teePathLen++] = '/'; + if (dir != NULL) { +#define MAX_SORT 20 // number of entries to sort +#define MAX_KEEP 10 // number of entries to keep + struct Entry entries[MAX_SORT]; + size_t entryCount = 0; + while (entryCount < MAX_SORT) { + struct dirent de; + struct dirent *result = NULL; + int rc = readdir_r(dir, &de, &result); + if (rc != 0) { + ALOGW("readdir_r failed %d", rc); + break; + } + if (result == NULL) { + break; + } + if (result != &de) { + ALOGW("readdir_r returned unexpected result %p != %p", result, &de); + break; + } + // ignore non .wav file entries + size_t nameLen = strlen(de.d_name); + if (nameLen <= 4 || nameLen >= MAX_NAME || + strcmp(&de.d_name[nameLen - 4], ".wav")) { + continue; + } + strcpy(entries[entryCount++].mName, de.d_name); + } + (void) closedir(dir); + if (entryCount > MAX_KEEP) { + qsort(entries, entryCount, sizeof(Entry), comparEntry); + for (size_t i = 0; i < entryCount - MAX_KEEP; ++i) { + strcpy(&teePath[teePathLen], entries[i].mName); + (void) unlink(teePath); + } + } + } else { + if (fd >= 0) { + fdprintf(fd, "unable to rotate tees in %s: %s\n", teePath, strerror(errno)); + } + } char teeTime[16]; struct timeval tv; gettimeofday(&tv, NULL); struct tm tm; localtime_r(&tv.tv_sec, &tm); - strftime(teeTime, sizeof(teeTime), "%T", &tm); - char teePath[64]; - sprintf(teePath, "/data/misc/media/%s_%d.wav", teeTime, id); - int teeFd = open(teePath, O_WRONLY | O_CREAT, S_IRUSR | S_IWUSR); + strftime(teeTime, sizeof(teeTime), "%Y%m%d%H%M%S", &tm); + snprintf(&teePath[teePathLen], sizeof(teePath) - teePathLen, "%s_%d.wav", teeTime, id); + // if 2 dumpsys are done within 1 second, and rotation didn't work, then discard 2nd + int teeFd = open(teePath, O_WRONLY | O_CREAT | O_EXCL | O_NOFOLLOW, S_IRUSR | S_IWUSR); if (teeFd >= 0) { char wavHeader[44]; memcpy(wavHeader, @@ -2253,9 +2340,13 @@ void AudioFlinger::dumpTee(int fd, const sp& source, audio_io_hand temp = total * channelCount * sizeof(short); write(teeFd, &temp, sizeof(temp)); close(teeFd); - fdprintf(fd, "FastMixer tee copied to %s\n", teePath); + if (fd >= 0) { + fdprintf(fd, "tee copied to %s\n", teePath); + } } else { - fdprintf(fd, "FastMixer unable to create tee %s: \n", strerror(errno)); + if (fd >= 0) { + fdprintf(fd, "unable to create tee %s: %s\n", teePath, strerror(errno)); + } } } } diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h index c3f08f6..44bd260 100644 --- a/services/audioflinger/AudioFlinger.h +++ b/services/audioflinger/AudioFlinger.h @@ -593,7 +593,24 @@ private: sp mRecordTeeSource; public: + // tee sink, if enabled by property, allows dumpsys to write most recent audio to .wav file static void dumpTee(int fd, const sp& source, audio_io_handle_t id = 0); + + // whether tee sink is enabled by property + static bool mTeeSinkInputEnabled; + static bool mTeeSinkOutputEnabled; + static bool mTeeSinkTrackEnabled; + + // runtime configured size of each tee sink pipe, in frames + static size_t mTeeSinkInputFrames; + static size_t mTeeSinkOutputFrames; + static size_t mTeeSinkTrackFrames; + + // compile-time default size of tee sink pipes, in frames + // 0x200000 stereo 16-bit PCM frames = 47.5 seconds at 44.1 kHz, 8 megabytes + static const size_t kTeeSinkInputFramesDefault = 0x200000; + static const size_t kTeeSinkOutputFramesDefault = 0x200000; + static const size_t kTeeSinkTrackFramesDefault = 0x1000; }; #undef INCLUDING_FROM_AUDIOFLINGER_H diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index ba848d7..1209ea6 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -2124,19 +2124,19 @@ AudioFlinger::MixerThread::MixerThread(const sp& audioFlinger, Aud (monoPipe->maxFrames() * 7) / 8 : mNormalFrameCount * 2); mPipeSink = monoPipe; -#ifdef TEE_SINK_FRAMES - // create a Pipe to archive a copy of FastMixer's output for dumpsys - Pipe *teeSink = new Pipe(TEE_SINK_FRAMES, format); - numCounterOffers = 0; - index = teeSink->negotiate(offers, 1, NULL, numCounterOffers); - ALOG_ASSERT(index == 0); - mTeeSink = teeSink; - PipeReader *teeSource = new PipeReader(*teeSink); - numCounterOffers = 0; - index = teeSource->negotiate(offers, 1, NULL, numCounterOffers); - ALOG_ASSERT(index == 0); - mTeeSource = teeSource; -#endif + if (mTeeSinkOutputEnabled) { + // create a Pipe to archive a copy of FastMixer's output for dumpsys + Pipe *teeSink = new Pipe(mTeeSinkOutputFrames, format); + numCounterOffers = 0; + index = teeSink->negotiate(offers, 1, NULL, numCounterOffers); + ALOG_ASSERT(index == 0); + mTeeSink = teeSink; + PipeReader *teeSource = new PipeReader(*teeSink); + numCounterOffers = 0; + index = teeSource->negotiate(offers, 1, NULL, numCounterOffers); + ALOG_ASSERT(index == 0); + mTeeSource = teeSource; + } // create fast mixer and configure it initially with just one fast track for our submix mFastMixer = new FastMixer(); diff --git a/services/audioflinger/TrackBase.h b/services/audioflinger/TrackBase.h index e0bd97a..fecbfda 100644 --- a/services/audioflinger/TrackBase.h +++ b/services/audioflinger/TrackBase.h @@ -141,4 +141,7 @@ protected: Vector < sp >mSyncEvents; const bool mIsOut; ServerProxy* mServerProxy; + const int mId; + sp mTeeSink; + sp mTeeSource; }; diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp index 315cbbc..724ce38 100644 --- a/services/audioflinger/Tracks.cpp +++ b/services/audioflinger/Tracks.cpp @@ -32,6 +32,9 @@ #include "AudioFlinger.h" #include "ServiceUtilities.h" +#include +#include + // ---------------------------------------------------------------------------- // Note: the following macro is used for extremely verbose logging message. In @@ -53,6 +56,8 @@ namespace android { // TrackBase // ---------------------------------------------------------------------------- +static volatile int32_t nextTrackId = 55; + // TrackBase constructor must be called with AudioFlinger::mLock held AudioFlinger::ThreadBase::TrackBase::TrackBase( ThreadBase *thread, @@ -82,7 +87,8 @@ AudioFlinger::ThreadBase::TrackBase::TrackBase( mStepServerFailed(false), mSessionId(sessionId), mIsOut(isOut), - mServerProxy(NULL) + mServerProxy(NULL), + mId(android_atomic_inc(&nextTrackId)) { // client == 0 implies sharedBuffer == 0 ALOG_ASSERT(!(client == 0 && sharedBuffer != 0)); @@ -134,11 +140,30 @@ AudioFlinger::ThreadBase::TrackBase::TrackBase( } mBufferEnd = (uint8_t *)mBuffer + bufferSize; mServerProxy = new ServerProxy(mCblk, mBuffer, frameCount, mFrameSize, isOut); + + if (mTeeSinkTrackEnabled) { + NBAIO_Format pipeFormat = Format_from_SR_C(mSampleRate, mChannelCount); + if (pipeFormat != Format_Invalid) { + Pipe *pipe = new Pipe(mTeeSinkTrackFrames, pipeFormat); + size_t numCounterOffers = 0; + const NBAIO_Format offers[1] = {pipeFormat}; + ssize_t index = pipe->negotiate(offers, 1, NULL, numCounterOffers); + ALOG_ASSERT(index == 0); + PipeReader *pipeReader = new PipeReader(*pipe); + numCounterOffers = 0; + index = pipeReader->negotiate(offers, 1, NULL, numCounterOffers); + ALOG_ASSERT(index == 0); + mTeeSink = pipe; + mTeeSource = pipeReader; + } + } + } } AudioFlinger::ThreadBase::TrackBase::~TrackBase() { + dumpTee(-1, mTeeSource, mId); // delete the proxy before deleting the shared memory it refers to, to avoid dangling reference delete mServerProxy; if (mCblk != NULL) { @@ -164,6 +189,10 @@ AudioFlinger::ThreadBase::TrackBase::~TrackBase() // This implementation of releaseBuffer() is used by Track and RecordTrack, but not TimedTrack void AudioFlinger::ThreadBase::TrackBase::releaseBuffer(AudioBufferProvider::Buffer* buffer) { + if (mTeeSink != 0) { + (void) mTeeSink->write(buffer->raw, buffer->frameCount); + } + buffer->raw = NULL; mStepCount = buffer->frameCount; // FIXME See note at getNextBuffer() -- cgit v1.1 From 2b56065a51c49a6235ac974c033c5751e8055869 Mon Sep 17 00:00:00 2001 From: Insun Kang Date: Fri, 23 Nov 2012 19:00:07 +0900 Subject: Handles duplicated NAL start code to fix crash on HLS streams. Some youtube live streams are encoded having duplicated NAL start code, for instance, 00 00 01 00 00 00 01 .... In previous code, zero NAL size causes crash by CHECK_GT(nalsize, 0) macro. With this patch, duplicated NAL start code will be just ignored. TESTED=Played problematic Youtube Live streams. Change-Id: I1d76f111a34bd29cb09b037eb1b0626fe5f5b140 --- media/libstagefright/mpeg2ts/ESQueue.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/media/libstagefright/mpeg2ts/ESQueue.cpp b/media/libstagefright/mpeg2ts/ESQueue.cpp index 82fb637..9499712 100644 --- a/media/libstagefright/mpeg2ts/ESQueue.cpp +++ b/media/libstagefright/mpeg2ts/ESQueue.cpp @@ -536,7 +536,7 @@ sp ElementaryStreamQueue::dequeueAccessUnitH264() { size_t nalSize; bool foundSlice = false; while ((err = getNextNALUnit(&data, &size, &nalStart, &nalSize)) == OK) { - CHECK_GT(nalSize, 0u); + if (nalSize == 0) continue; unsigned nalType = nalStart[0] & 0x1f; bool flush = false; -- cgit v1.1 From ceb388d6c03c38b96dc41c0ea4804b749aa077c4 Mon Sep 17 00:00:00 2001 From: Eino-Ville Talvala Date: Tue, 19 Feb 2013 10:40:14 -0800 Subject: CameraService and Stagefright: Support AppOps Camera: - Signal to AppOpsService when camera usage starts and stops - Listen to permissions revocations and act on them - Currently just kill camera connection when permissions lost Stagefright: - Pass on client name, UID to camera as needed Bug: 8181262 Change-Id: I9e33c9d05e9daa77dbb2d795045d08eb887ec8f0 --- camera/Camera.cpp | 6 +- camera/ICameraService.cpp | 14 +- cmds/stagefright/record.cpp | 3 +- include/camera/Camera.h | 10 +- include/camera/ICameraService.h | 20 ++- include/media/IMediaRecorder.h | 1 + include/media/MediaRecorderBase.h | 1 + include/media/mediarecorder.h | 1 + include/media/stagefright/CameraSource.h | 28 +++- include/media/stagefright/CameraSourceTimeLapse.h | 5 + media/libmedia/IMediaRecorder.cpp | 19 ++- media/libmedia/mediarecorder.cpp | 21 +++ .../libmediaplayerservice/MediaRecorderClient.cpp | 12 +- media/libmediaplayerservice/MediaRecorderClient.h | 1 + .../libmediaplayerservice/StagefrightRecorder.cpp | 13 +- media/libmediaplayerservice/StagefrightRecorder.h | 3 + media/libstagefright/CameraSource.cpp | 27 +++- media/libstagefright/CameraSourceTimeLapse.cpp | 8 +- services/camera/libcameraservice/Camera2Client.cpp | 15 +- services/camera/libcameraservice/Camera2Client.h | 2 + services/camera/libcameraservice/CameraClient.cpp | 18 ++- services/camera/libcameraservice/CameraClient.h | 2 + services/camera/libcameraservice/CameraService.cpp | 171 +++++++++++++++++---- services/camera/libcameraservice/CameraService.h | 57 ++++++- .../camera/libcameraservice/ProCamera2Client.cpp | 6 +- .../camera/libcameraservice/ProCamera2Client.h | 2 + 26 files changed, 388 insertions(+), 78 deletions(-) diff --git a/camera/Camera.cpp b/camera/Camera.cpp index be395ba..d8dc2a5 100644 --- a/camera/Camera.cpp +++ b/camera/Camera.cpp @@ -19,6 +19,7 @@ #define LOG_TAG "Camera" #include #include +#include #include #include #include @@ -116,14 +117,15 @@ status_t Camera::getCameraInfo(int cameraId, return cs->getCameraInfo(cameraId, cameraInfo); } -sp Camera::connect(int cameraId) +sp Camera::connect(int cameraId, const String16& clientPackageName, + int clientUid) { ALOGV("connect"); sp c = new Camera(); sp cl = c; const sp& cs = getCameraService(); if (cs != 0) { - c->mCamera = cs->connect(cl, cameraId); + c->mCamera = cs->connect(cl, cameraId, clientPackageName, clientUid); } if (c->mCamera != 0) { c->mCamera->asBinder()->linkToDeath(c); diff --git a/camera/ICameraService.cpp b/camera/ICameraService.cpp index 8237c66..fdf20ff 100644 --- a/camera/ICameraService.cpp +++ b/camera/ICameraService.cpp @@ -56,12 +56,15 @@ public: } // connect to camera service - virtual sp connect(const sp& cameraClient, int cameraId) + virtual sp connect(const sp& cameraClient, int cameraId, + const String16 &clientPackageName, int clientUid) { Parcel data, reply; data.writeInterfaceToken(ICameraService::getInterfaceDescriptor()); data.writeStrongBinder(cameraClient->asBinder()); data.writeInt32(cameraId); + data.writeString16(clientPackageName); + data.writeInt32(clientUid); remote()->transact(BnCameraService::CONNECT, data, &reply); return interface_cast(reply.readStrongBinder()); } @@ -103,8 +106,13 @@ status_t BnCameraService::onTransact( } break; case CONNECT: { CHECK_INTERFACE(ICameraService, data, reply); - sp cameraClient = interface_cast(data.readStrongBinder()); - sp camera = connect(cameraClient, data.readInt32()); + sp cameraClient = + interface_cast(data.readStrongBinder()); + int32_t cameraId = data.readInt32(); + const String16 clientName = data.readString16(); + int32_t clientUid = data.readInt32(); + sp camera = connect(cameraClient, cameraId, + clientName, clientUid); reply->writeStrongBinder(camera->asBinder()); return NO_ERROR; } break; diff --git a/cmds/stagefright/record.cpp b/cmds/stagefright/record.cpp index 45c3f7b..b7a40c2 100644 --- a/cmds/stagefright/record.cpp +++ b/cmds/stagefright/record.cpp @@ -264,7 +264,8 @@ int main(int argc, char **argv) { #endif #if 0 - CameraSource *source = CameraSource::Create(); + CameraSource *source = CameraSource::Create( + String16(argv[0], strlen(argv[0]))); source->start(); printf("source = %p\n", source); diff --git a/include/camera/Camera.h b/include/camera/Camera.h index 8b87de6..be2b7f4 100644 --- a/include/camera/Camera.h +++ b/include/camera/Camera.h @@ -53,6 +53,7 @@ class ICamera; class Surface; class Mutex; class String8; +class String16; // ref-counted object for callbacks class CameraListener: virtual public RefBase @@ -67,12 +68,19 @@ public: class Camera : public BnCameraClient, public IBinder::DeathRecipient { public: + enum { + USE_CALLING_UID = -1 + }; + // construct a camera client from an existing remote static sp create(const sp& camera); static int32_t getNumberOfCameras(); static status_t getCameraInfo(int cameraId, struct CameraInfo* cameraInfo); - static sp connect(int cameraId); + static sp connect(int cameraId, + const String16& clientPackageName, + int clientUid); + virtual ~Camera(); void init(); diff --git a/include/camera/ICameraService.h b/include/camera/ICameraService.h index 11d7b65..aa64243 100644 --- a/include/camera/ICameraService.h +++ b/include/camera/ICameraService.h @@ -37,18 +37,28 @@ public: CONNECT_PRO }; + enum { + USE_CALLING_UID = -1 + }; + public: DECLARE_META_INTERFACE(CameraService); virtual int32_t getNumberOfCameras() = 0; virtual status_t getCameraInfo(int cameraId, struct CameraInfo* cameraInfo) = 0; - virtual sp connect(const sp& cameraClient, - int cameraId) = 0; + /** + * clientPackageName and clientUid are used for permissions checking. if + * clientUid == USE_CALLING_UID, then the calling UID is used instead. Only + * trusted callers can set a clientUid other than USE_CALLING_UID. + */ + virtual sp connect(const sp& cameraClient, + int cameraId, + const String16& clientPackageName, + int clientUid) = 0; - virtual sp - connect(const sp& cameraCb, - int cameraId) = 0; + virtual sp connect(const sp& cameraCb, + int cameraId) = 0; }; // ---------------------------------------------------------------------------- diff --git a/include/media/IMediaRecorder.h b/include/media/IMediaRecorder.h index 54af0d3..8d7f11d 100644 --- a/include/media/IMediaRecorder.h +++ b/include/media/IMediaRecorder.h @@ -47,6 +47,7 @@ public: virtual status_t setVideoFrameRate(int frames_per_second) = 0; virtual status_t setParameters(const String8& params) = 0; virtual status_t setListener(const sp& listener) = 0; + virtual status_t setClientName(const String16& clientName) = 0; virtual status_t prepare() = 0; virtual status_t getMaxAmplitude(int* max) = 0; virtual status_t start() = 0; diff --git a/include/media/MediaRecorderBase.h b/include/media/MediaRecorderBase.h index 803bc64..8dd40d2 100644 --- a/include/media/MediaRecorderBase.h +++ b/include/media/MediaRecorderBase.h @@ -48,6 +48,7 @@ struct MediaRecorderBase { virtual status_t setOutputFileAuxiliary(int fd) {return INVALID_OPERATION;} virtual status_t setParameters(const String8& params) = 0; virtual status_t setListener(const sp& listener) = 0; + virtual status_t setClientName(const String16& clientName) = 0; virtual status_t prepare() = 0; virtual status_t start() = 0; virtual status_t stop() = 0; diff --git a/include/media/mediarecorder.h b/include/media/mediarecorder.h index da6b507..3b33479 100644 --- a/include/media/mediarecorder.h +++ b/include/media/mediarecorder.h @@ -219,6 +219,7 @@ public: status_t setVideoFrameRate(int frames_per_second); status_t setParameters(const String8& params); status_t setListener(const sp& listener); + status_t setClientName(const String16& clientName); status_t prepare(); status_t getMaxAmplitude(int* max); status_t start(); diff --git a/include/media/stagefright/CameraSource.h b/include/media/stagefright/CameraSource.h index 6d6b8a9..cf38b14 100644 --- a/include/media/stagefright/CameraSource.h +++ b/include/media/stagefright/CameraSource.h @@ -25,6 +25,7 @@ #include #include #include +#include namespace android { @@ -39,9 +40,11 @@ public: * settings (such as video size, frame rate, color format, etc) * from the default camera. * + * @param clientName The package/process name of the client application. + * This is used for permissions checking. * @return NULL on error. */ - static CameraSource *Create(); + static CameraSource *Create(const String16 &clientName); /** * Factory method to create a new CameraSource. @@ -52,7 +55,11 @@ public: * * @param cameraId the id of the camera that the source will connect * to if camera is NULL; otherwise ignored. - * + * @param clientName the package/process name of the camera-using + * application if camera is NULL; otherwise ignored. Used for + * permissions checking. + * @param clientUid the UID of the camera-using application if camera is + * NULL; otherwise ignored. Used for permissions checking. * @param videoSize the dimension (in pixels) of the video frame * @param frameRate the target frames per second * @param surface the preview surface for display where preview @@ -71,6 +78,8 @@ public: static CameraSource *CreateFromCamera(const sp &camera, const sp &proxy, int32_t cameraId, + const String16& clientName, + uid_t clientUid, Size videoSize, int32_t frameRate, const sp& surface, @@ -158,7 +167,7 @@ protected: int64_t mTimeBetweenFrameCaptureUs; CameraSource(const sp& camera, const sp& proxy, - int32_t cameraId, + int32_t cameraId, const String16& clientName, uid_t clientUid, Size videoSize, int32_t frameRate, const sp& surface, bool storeMetaDataInVideoBuffers); @@ -198,17 +207,20 @@ private: status_t init(const sp& camera, const sp& proxy, - int32_t cameraId, Size videoSize, int32_t frameRate, - bool storeMetaDataInVideoBuffers); + int32_t cameraId, const String16& clientName, uid_t clientUid, + Size videoSize, int32_t frameRate, bool storeMetaDataInVideoBuffers); status_t initWithCameraAccess( const sp& camera, const sp& proxy, - int32_t cameraId, Size videoSize, int32_t frameRate, - bool storeMetaDataInVideoBuffers); + int32_t cameraId, const String16& clientName, uid_t clientUid, + Size videoSize, int32_t frameRate, bool storeMetaDataInVideoBuffers); status_t isCameraAvailable(const sp& camera, const sp& proxy, - int32_t cameraId); + int32_t cameraId, + const String16& clientName, + uid_t clientUid); + status_t isCameraColorFormatSupported(const CameraParameters& params); status_t configureCamera(CameraParameters* params, int32_t width, int32_t height, diff --git a/include/media/stagefright/CameraSourceTimeLapse.h b/include/media/stagefright/CameraSourceTimeLapse.h index 0936da2..774772b 100644 --- a/include/media/stagefright/CameraSourceTimeLapse.h +++ b/include/media/stagefright/CameraSourceTimeLapse.h @@ -22,6 +22,7 @@ #include #include +#include namespace android { @@ -35,6 +36,8 @@ public: const sp &camera, const sp &proxy, int32_t cameraId, + const String16& clientName, + uid_t clientUid, Size videoSize, int32_t videoFrameRate, const sp& surface, @@ -108,6 +111,8 @@ private: const sp &camera, const sp &proxy, int32_t cameraId, + const String16& clientName, + uid_t clientUid, Size videoSize, int32_t videoFrameRate, const sp& surface, diff --git a/media/libmedia/IMediaRecorder.cpp b/media/libmedia/IMediaRecorder.cpp index fdbc747..c935d97 100644 --- a/media/libmedia/IMediaRecorder.cpp +++ b/media/libmedia/IMediaRecorder.cpp @@ -51,7 +51,8 @@ enum { SET_PARAMETERS, SET_PREVIEW_SURFACE, SET_CAMERA, - SET_LISTENER + SET_LISTENER, + SET_CLIENT_NAME }; class BpMediaRecorder: public BpInterface @@ -217,6 +218,16 @@ public: return reply.readInt32(); } + status_t setClientName(const String16& clientName) + { + ALOGV("setClientName(%s)", String8(clientName).string()); + Parcel data, reply; + data.writeInterfaceToken(IMediaRecorder::getInterfaceDescriptor()); + data.writeString16(clientName); + remote()->transact(SET_CLIENT_NAME, data, &reply); + return reply.readInt32(); + } + status_t prepare() { ALOGV("prepare"); @@ -423,6 +434,12 @@ status_t BnMediaRecorder::onTransact( reply->writeInt32(setListener(listener)); return NO_ERROR; } break; + case SET_CLIENT_NAME: { + ALOGV("SET_CLIENT_NAME"); + CHECK_INTERFACE(IMediaRecorder, data, reply); + reply->writeInt32(setClientName(data.readString16())); + return NO_ERROR; + } case SET_PREVIEW_SURFACE: { ALOGV("SET_PREVIEW_SURFACE"); CHECK_INTERFACE(IMediaRecorder, data, reply); diff --git a/media/libmedia/mediarecorder.cpp b/media/libmedia/mediarecorder.cpp index 660b1b2..3ac98cc 100644 --- a/media/libmedia/mediarecorder.cpp +++ b/media/libmedia/mediarecorder.cpp @@ -656,6 +656,27 @@ status_t MediaRecorder::setListener(const sp& listener) return NO_ERROR; } +status_t MediaRecorder::setClientName(const String16& clientName) +{ + ALOGV("setClientName"); + if (mMediaRecorder == NULL) { + ALOGE("media recorder is not initialized yet"); + return INVALID_OPERATION; + } + bool isInvalidState = (mCurrentState & + (MEDIA_RECORDER_PREPARED | + MEDIA_RECORDER_RECORDING | + MEDIA_RECORDER_ERROR)); + if (isInvalidState) { + ALOGE("setClientName is called in an invalid state: %d", mCurrentState); + return INVALID_OPERATION; + } + + mMediaRecorder->setClientName(clientName); + + return NO_ERROR; +} + void MediaRecorder::notify(int msg, int ext1, int ext2) { ALOGV("message received msg=%d, ext1=%d, ext2=%d", msg, ext1, ext2); diff --git a/media/libmediaplayerservice/MediaRecorderClient.cpp b/media/libmediaplayerservice/MediaRecorderClient.cpp index c6d8b76..a52b238 100644 --- a/media/libmediaplayerservice/MediaRecorderClient.cpp +++ b/media/libmediaplayerservice/MediaRecorderClient.cpp @@ -99,7 +99,7 @@ status_t MediaRecorderClient::setVideoSource(int vs) return PERMISSION_DENIED; } Mutex::Autolock lock(mLock); - if (mRecorder == NULL) { + if (mRecorder == NULL) { ALOGE("recorder is not initialized"); return NO_INIT; } @@ -325,6 +325,16 @@ status_t MediaRecorderClient::setListener(const sp& listen return mRecorder->setListener(listener); } +status_t MediaRecorderClient::setClientName(const String16& clientName) { + ALOGV("setClientName(%s)", String8(clientName).string()); + Mutex::Autolock lock(mLock); + if (mRecorder == NULL) { + ALOGE("recorder is not initialized"); + return NO_INIT; + } + return mRecorder->setClientName(clientName); +} + status_t MediaRecorderClient::dump(int fd, const Vector& args) const { if (mRecorder != NULL) { return mRecorder->dump(fd, args); diff --git a/media/libmediaplayerservice/MediaRecorderClient.h b/media/libmediaplayerservice/MediaRecorderClient.h index 5623917..bd0eaf1 100644 --- a/media/libmediaplayerservice/MediaRecorderClient.h +++ b/media/libmediaplayerservice/MediaRecorderClient.h @@ -46,6 +46,7 @@ public: virtual status_t setParameters(const String8& params); virtual status_t setListener( const sp& listener); + virtual status_t setClientName(const String16& clientName); virtual status_t prepare(); virtual status_t getMaxAmplitude(int* max); virtual status_t start(); diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp index 497dda6..f570856 100644 --- a/media/libmediaplayerservice/StagefrightRecorder.cpp +++ b/media/libmediaplayerservice/StagefrightRecorder.cpp @@ -730,6 +730,12 @@ status_t StagefrightRecorder::setListener(const sp &listen return OK; } +status_t StagefrightRecorder::setClientName(const String16& clientName) { + mClientName = clientName; + + return OK; +} + status_t StagefrightRecorder::prepare() { return OK; } @@ -737,6 +743,8 @@ status_t StagefrightRecorder::prepare() { status_t StagefrightRecorder::start() { CHECK_GE(mOutputFd, 0); + // Get UID here for permission checking + mClientUid = IPCThreadState::self()->getCallingUid(); if (mWriter != NULL) { ALOGE("File writer is not avaialble"); return UNKNOWN_ERROR; @@ -1312,13 +1320,14 @@ status_t StagefrightRecorder::setupCameraSource( } mCameraSourceTimeLapse = CameraSourceTimeLapse::CreateFromCamera( - mCamera, mCameraProxy, mCameraId, + mCamera, mCameraProxy, mCameraId, mClientName, mClientUid, videoSize, mFrameRate, mPreviewSurface, mTimeBetweenTimeLapseFrameCaptureUs); *cameraSource = mCameraSourceTimeLapse; } else { *cameraSource = CameraSource::CreateFromCamera( - mCamera, mCameraProxy, mCameraId, videoSize, mFrameRate, + mCamera, mCameraProxy, mCameraId, mClientName, mClientUid, + videoSize, mFrameRate, mPreviewSurface, true /*storeMetaDataInVideoBuffers*/); } mCamera.clear(); diff --git a/media/libmediaplayerservice/StagefrightRecorder.h b/media/libmediaplayerservice/StagefrightRecorder.h index 351efd4..fbe6fa6 100644 --- a/media/libmediaplayerservice/StagefrightRecorder.h +++ b/media/libmediaplayerservice/StagefrightRecorder.h @@ -56,6 +56,7 @@ struct StagefrightRecorder : public MediaRecorderBase { virtual status_t setOutputFile(int fd, int64_t offset, int64_t length); virtual status_t setParameters(const String8& params); virtual status_t setListener(const sp& listener); + virtual status_t setClientName(const String16& clientName); virtual status_t prepare(); virtual status_t start(); virtual status_t pause(); @@ -72,6 +73,8 @@ private: sp mCameraProxy; sp mPreviewSurface; sp mListener; + String16 mClientName; + uid_t mClientUid; sp mWriter; int mOutputFd; sp mAudioSourceNode; diff --git a/media/libstagefright/CameraSource.cpp b/media/libstagefright/CameraSource.cpp index efd7af7..f8557d0 100644 --- a/media/libstagefright/CameraSource.cpp +++ b/media/libstagefright/CameraSource.cpp @@ -121,13 +121,14 @@ static int32_t getColorFormat(const char* colorFormat) { CHECK(!"Unknown color format"); } -CameraSource *CameraSource::Create() { +CameraSource *CameraSource::Create(const String16 &clientName) { Size size; size.width = -1; size.height = -1; sp camera; - return new CameraSource(camera, NULL, 0, size, -1, NULL, false); + return new CameraSource(camera, NULL, 0, clientName, -1, + size, -1, NULL, false); } // static @@ -135,14 +136,16 @@ CameraSource *CameraSource::CreateFromCamera( const sp& camera, const sp& proxy, int32_t cameraId, + const String16& clientName, + uid_t clientUid, Size videoSize, int32_t frameRate, const sp& surface, bool storeMetaDataInVideoBuffers) { CameraSource *source = new CameraSource(camera, proxy, cameraId, - videoSize, frameRate, surface, - storeMetaDataInVideoBuffers); + clientName, clientUid, videoSize, frameRate, surface, + storeMetaDataInVideoBuffers); return source; } @@ -150,6 +153,8 @@ CameraSource::CameraSource( const sp& camera, const sp& proxy, int32_t cameraId, + const String16& clientName, + uid_t clientUid, Size videoSize, int32_t frameRate, const sp& surface, @@ -173,6 +178,7 @@ CameraSource::CameraSource( mVideoSize.height = -1; mInitCheck = init(camera, proxy, cameraId, + clientName, clientUid, videoSize, frameRate, storeMetaDataInVideoBuffers); if (mInitCheck != OK) releaseCamera(); @@ -184,10 +190,10 @@ status_t CameraSource::initCheck() const { status_t CameraSource::isCameraAvailable( const sp& camera, const sp& proxy, - int32_t cameraId) { + int32_t cameraId, const String16& clientName, uid_t clientUid) { if (camera == 0) { - mCamera = Camera::connect(cameraId); + mCamera = Camera::connect(cameraId, clientName, clientUid); if (mCamera == 0) return -EBUSY; mCameraFlags &= ~FLAGS_HOT_CAMERA; } else { @@ -469,6 +475,8 @@ status_t CameraSource::init( const sp& camera, const sp& proxy, int32_t cameraId, + const String16& clientName, + uid_t clientUid, Size videoSize, int32_t frameRate, bool storeMetaDataInVideoBuffers) { @@ -476,7 +484,7 @@ status_t CameraSource::init( ALOGV("init"); status_t err = OK; int64_t token = IPCThreadState::self()->clearCallingIdentity(); - err = initWithCameraAccess(camera, proxy, cameraId, + err = initWithCameraAccess(camera, proxy, cameraId, clientName, clientUid, videoSize, frameRate, storeMetaDataInVideoBuffers); IPCThreadState::self()->restoreCallingIdentity(token); @@ -487,13 +495,16 @@ status_t CameraSource::initWithCameraAccess( const sp& camera, const sp& proxy, int32_t cameraId, + const String16& clientName, + uid_t clientUid, Size videoSize, int32_t frameRate, bool storeMetaDataInVideoBuffers) { ALOGV("initWithCameraAccess"); status_t err = OK; - if ((err = isCameraAvailable(camera, proxy, cameraId)) != OK) { + if ((err = isCameraAvailable(camera, proxy, cameraId, + clientName, clientUid)) != OK) { ALOGE("Camera connection could not be established."); return err; } diff --git a/media/libstagefright/CameraSourceTimeLapse.cpp b/media/libstagefright/CameraSourceTimeLapse.cpp index 26ce7ae..2ed2223 100644 --- a/media/libstagefright/CameraSourceTimeLapse.cpp +++ b/media/libstagefright/CameraSourceTimeLapse.cpp @@ -36,6 +36,8 @@ CameraSourceTimeLapse *CameraSourceTimeLapse::CreateFromCamera( const sp &camera, const sp &proxy, int32_t cameraId, + const String16& clientName, + uid_t clientUid, Size videoSize, int32_t videoFrameRate, const sp& surface, @@ -43,6 +45,7 @@ CameraSourceTimeLapse *CameraSourceTimeLapse::CreateFromCamera( CameraSourceTimeLapse *source = new CameraSourceTimeLapse(camera, proxy, cameraId, + clientName, clientUid, videoSize, videoFrameRate, surface, timeBetweenFrameCaptureUs); @@ -59,11 +62,14 @@ CameraSourceTimeLapse::CameraSourceTimeLapse( const sp& camera, const sp& proxy, int32_t cameraId, + const String16& clientName, + uid_t clientUid, Size videoSize, int32_t videoFrameRate, const sp& surface, int64_t timeBetweenFrameCaptureUs) - : CameraSource(camera, proxy, cameraId, videoSize, videoFrameRate, surface, true), + : CameraSource(camera, proxy, cameraId, clientName, clientUid, + videoSize, videoFrameRate, surface, true), mTimeBetweenTimeLapseVideoFramesUs(1E6/videoFrameRate), mLastTimeLapseFrameRealTimestampUs(0), mSkipCurrentFrame(false) { diff --git a/services/camera/libcameraservice/Camera2Client.cpp b/services/camera/libcameraservice/Camera2Client.cpp index e8b3b7f..38d6949 100644 --- a/services/camera/libcameraservice/Camera2Client.cpp +++ b/services/camera/libcameraservice/Camera2Client.cpp @@ -40,12 +40,14 @@ static int getCallingPid() { Camera2Client::Camera2Client(const sp& cameraService, const sp& cameraClient, + const String16& clientPackageName, int cameraId, int cameraFacing, int clientPid, + uid_t clientUid, int servicePid): - Client(cameraService, cameraClient, - cameraId, cameraFacing, clientPid, servicePid), + Client(cameraService, cameraClient, clientPackageName, + cameraId, cameraFacing, clientPid, clientUid, servicePid), mSharedCameraClient(cameraClient), mParameters(cameraId, cameraFacing) { @@ -73,6 +75,12 @@ status_t Camera2Client::initialize(camera_module_t *module) ALOGV("%s: Initializing client for camera %d", __FUNCTION__, mCameraId); status_t res; + // Verify ops permissions + res = startCameraOps(); + if (res != OK) { + return res; + } + res = mDevice->initialize(module); if (res != OK) { ALOGE("%s: Camera %d: unable to initialize device: %s (%d)", @@ -741,8 +749,7 @@ void Camera2Client::stopPreviewL() { switch (state) { case Parameters::DISCONNECTED: - ALOGE("%s: Camera %d: Call before initialized", - __FUNCTION__, mCameraId); + // Nothing to do. break; case Parameters::STOPPED: case Parameters::VIDEO_SNAPSHOT: diff --git a/services/camera/libcameraservice/Camera2Client.h b/services/camera/libcameraservice/Camera2Client.h index a4d4478..173b65e 100644 --- a/services/camera/libcameraservice/Camera2Client.h +++ b/services/camera/libcameraservice/Camera2Client.h @@ -72,9 +72,11 @@ public: Camera2Client(const sp& cameraService, const sp& cameraClient, + const String16& clientPackageName, int cameraId, int cameraFacing, int clientPid, + uid_t clientUid, int servicePid); virtual ~Camera2Client(); diff --git a/services/camera/libcameraservice/CameraClient.cpp b/services/camera/libcameraservice/CameraClient.cpp index f67c9f4..90f8f40 100644 --- a/services/camera/libcameraservice/CameraClient.cpp +++ b/services/camera/libcameraservice/CameraClient.cpp @@ -35,9 +35,12 @@ static int getCallingPid() { CameraClient::CameraClient(const sp& cameraService, const sp& cameraClient, - int cameraId, int cameraFacing, int clientPid, int servicePid): - Client(cameraService, cameraClient, - cameraId, cameraFacing, clientPid, servicePid) + const String16& clientPackageName, + int cameraId, int cameraFacing, + int clientPid, int clientUid, + int servicePid): + Client(cameraService, cameraClient, clientPackageName, + cameraId, cameraFacing, clientPid, clientUid, servicePid) { int callingPid = getCallingPid(); LOG1("CameraClient::CameraClient E (pid %d, id %d)", callingPid, cameraId); @@ -57,10 +60,17 @@ CameraClient::CameraClient(const sp& cameraService, status_t CameraClient::initialize(camera_module_t *module) { int callingPid = getCallingPid(); + status_t res; + LOG1("CameraClient::initialize E (pid %d, id %d)", callingPid, mCameraId); + // Verify ops permissions + res = startCameraOps(); + if (res != OK) { + return res; + } + char camera_device_name[10]; - status_t res; snprintf(camera_device_name, sizeof(camera_device_name), "%d", mCameraId); mHardware = new CameraHardwareInterface(camera_device_name); diff --git a/services/camera/libcameraservice/CameraClient.h b/services/camera/libcameraservice/CameraClient.h index 74829ce..00dc90c 100644 --- a/services/camera/libcameraservice/CameraClient.h +++ b/services/camera/libcameraservice/CameraClient.h @@ -53,9 +53,11 @@ public: // Interface used by CameraService CameraClient(const sp& cameraService, const sp& cameraClient, + const String16& clientPackageName, int cameraId, int cameraFacing, int clientPid, + int clientUid, int servicePid); ~CameraClient(); diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp index eb8bc05..ec1c3f0 100644 --- a/services/camera/libcameraservice/CameraService.cpp +++ b/services/camera/libcameraservice/CameraService.cpp @@ -22,6 +22,7 @@ #include #include +#include #include #include #include @@ -72,7 +73,7 @@ static int getCallingUid() { static CameraService *gCameraService; CameraService::CameraService() -:mSoundRef(0), mModule(0) + :mSoundRef(0), mModule(0) { ALOGI("CameraService started (pid=%d)", getpid()); gCameraService = this; @@ -155,10 +156,27 @@ int CameraService::getDeviceVersion(int cameraId, int* facing) { } sp CameraService::connect( - const sp& cameraClient, int cameraId) { + const sp& cameraClient, + int cameraId, + const String16& clientPackageName, + int clientUid) { + + String8 clientName8(clientPackageName); int callingPid = getCallingPid(); - LOG1("CameraService::connect E (pid %d, id %d)", callingPid, cameraId); + LOG1("CameraService::connect E (pid %d \"%s\", id %d)", callingPid, + clientName8.string(), cameraId); + + if (clientUid == USE_CALLING_UID) { + clientUid = getCallingUid(); + } else { + // We only trust our own process to forward client UIDs + if (callingPid != getpid()) { + ALOGE("CameraService::connect X (pid %d) rejected (don't trust clientUid)", + callingPid); + return NULL; + } + } if (!mModule) { ALOGE("Camera HAL module not loaded"); @@ -208,8 +226,10 @@ sp CameraService::connect( would be fine */ if (mBusy[cameraId]) { - ALOGW("CameraService::connect X (pid %d) rejected" - " (camera %d is still busy).", callingPid, cameraId); + + ALOGW("CameraService::connect X (pid %d, \"%s\") rejected" + " (camera %d is still busy).", callingPid, + clientName8.string(), cameraId); return NULL; } @@ -218,13 +238,15 @@ sp CameraService::connect( switch(deviceVersion) { case CAMERA_DEVICE_API_VERSION_1_0: - client = new CameraClient(this, cameraClient, cameraId, - facing, callingPid, getpid()); + client = new CameraClient(this, cameraClient, + clientPackageName, cameraId, + facing, callingPid, clientUid, getpid()); break; case CAMERA_DEVICE_API_VERSION_2_0: case CAMERA_DEVICE_API_VERSION_2_1: - client = new Camera2Client(this, cameraClient, cameraId, - facing, callingPid, getpid()); + client = new Camera2Client(this, cameraClient, + clientPackageName, cameraId, + facing, callingPid, clientUid, getpid()); break; case -1: ALOGE("Invalid camera id %d", cameraId); @@ -283,8 +305,8 @@ sp CameraService::connect( break; case CAMERA_DEVICE_API_VERSION_2_0: case CAMERA_DEVICE_API_VERSION_2_1: - client = new ProCamera2Client(this, cameraCb, cameraId, - facing, callingPid, getpid()); + client = new ProCamera2Client(this, cameraCb, String16(), + cameraId, facing, callingPid, USE_CALLING_UID, getpid()); break; case -1: ALOGE("Invalid camera id %d", cameraId); @@ -302,7 +324,8 @@ sp CameraService::connect( cameraCb->asBinder()->linkToDeath(this); - LOG1("CameraService::connect X (id %d, this pid is %d)", cameraId, getpid()); + LOG1("CameraService::connectPro X (id %d, this pid is %d)", cameraId, + getpid()); return client; @@ -522,10 +545,15 @@ void CameraService::playSound(sound_kind kind) { CameraService::Client::Client(const sp& cameraService, const sp& cameraClient, - int cameraId, int cameraFacing, int clientPid, int servicePid) : + const String16& clientPackageName, + int cameraId, int cameraFacing, + int clientPid, uid_t clientUid, + int servicePid) : CameraService::BasicClient(cameraService, cameraClient->asBinder(), - cameraId, cameraFacing, - clientPid, servicePid) + clientPackageName, + cameraId, cameraFacing, + clientPid, clientUid, + servicePid) { int callingPid = getCallingPid(); LOG1("Client::Client E (pid %d, id %d)", callingPid, cameraId); @@ -534,6 +562,7 @@ CameraService::Client::Client(const sp& cameraService, cameraService->setCameraBusy(cameraId); cameraService->loadSound(); + LOG1("Client::Client X (pid %d, id %d)", callingPid, cameraId); } @@ -542,23 +571,27 @@ CameraService::Client::~Client() { mDestructionStarted = true; mCameraService->releaseSound(); - + finishCameraOps(); // unconditionally disconnect. function is idempotent Client::disconnect(); } CameraService::BasicClient::BasicClient(const sp& cameraService, - const sp& remoteCallback, - int cameraId, int cameraFacing, - int clientPid, int servicePid) + const sp& remoteCallback, + const String16& clientPackageName, + int cameraId, int cameraFacing, + int clientPid, uid_t clientUid, + int servicePid): + mClientPackageName(clientPackageName) { mCameraService = cameraService; mRemoteCallback = remoteCallback; mCameraId = cameraId; mCameraFacing = cameraFacing; mClientPid = clientPid; + mClientUid = clientUid; mServicePid = servicePid; - + mOpsActive = false; mDestructionStarted = false; } @@ -570,6 +603,66 @@ void CameraService::BasicClient::disconnect() { mCameraService->removeClientByRemote(mRemoteCallback); } +status_t CameraService::BasicClient::startCameraOps() { + int32_t res; + + mOpsCallback = new OpsCallback(this); + + mAppOpsManager.startWatchingMode(AppOpsManager::OP_CAMERA, + mClientPackageName, mOpsCallback); + res = mAppOpsManager.startOp(AppOpsManager::OP_CAMERA, + mClientUid, mClientPackageName); + + if (res != AppOpsManager::MODE_ALLOWED) { + ALOGI("Camera %d: Access for \"%s\" has been revoked", + mCameraId, String8(mClientPackageName).string()); + return PERMISSION_DENIED; + } + mOpsActive = true; + return OK; +} + +status_t CameraService::BasicClient::finishCameraOps() { + if (mOpsActive) { + mAppOpsManager.finishOp(AppOpsManager::OP_CAMERA, mClientUid, + mClientPackageName); + mOpsActive = false; + } + mAppOpsManager.stopWatchingMode(mOpsCallback); + mOpsCallback.clear(); + + return OK; +} + +void CameraService::BasicClient::opChanged(int32_t op, const String16& packageName) { + String8 name(packageName); + String8 myName(mClientPackageName); + + if (op != AppOpsManager::OP_CAMERA) { + ALOGW("Unexpected app ops notification received: %d", op); + return; + } + + int32_t res; + res = mAppOpsManager.checkOp(AppOpsManager::OP_CAMERA, + mClientUid, mClientPackageName); + ALOGV("checkOp returns: %d, %s ", res, + res == AppOpsManager::MODE_ALLOWED ? "ALLOWED" : + res == AppOpsManager::MODE_IGNORED ? "IGNORED" : + res == AppOpsManager::MODE_ERRORED ? "ERRORED" : + "UNKNOWN"); + + if (res != AppOpsManager::MODE_ALLOWED) { + ALOGI("Camera %d: Access for \"%s\" revoked", mCameraId, + myName.string()); + // Reset the client PID to allow server-initiated disconnect, + // and to prevent further calls by client. + mClientPid = getCallingPid(); + notifyError(); + disconnect(); + } +} + // ---------------------------------------------------------------------------- Mutex* CameraService::Client::getClientLockFromCookie(void* user) { @@ -592,25 +685,43 @@ CameraService::Client* CameraService::Client::getClientFromCookie(void* user) { return client; } +void CameraService::Client::notifyError() { + mCameraClient->notifyCallback(CAMERA_MSG_ERROR, CAMERA_ERROR_RELEASED, 0); +} + // NOTE: function is idempotent void CameraService::Client::disconnect() { BasicClient::disconnect(); mCameraService->setCameraFree(mCameraId); } +CameraService::Client::OpsCallback::OpsCallback(wp client): + mClient(client) { +} + +void CameraService::Client::OpsCallback::opChanged(int32_t op, + const String16& packageName) { + sp client = mClient.promote(); + if (client != NULL) { + client->opChanged(op, packageName); + } +} + // ---------------------------------------------------------------------------- // IProCamera // ---------------------------------------------------------------------------- CameraService::ProClient::ProClient(const sp& cameraService, - const sp& remoteCallback, - int cameraId, - int cameraFacing, - int clientPid, - int servicePid) - : CameraService::BasicClient(cameraService, remoteCallback->asBinder(), - cameraId, cameraFacing, - clientPid, servicePid) + const sp& remoteCallback, + const String16& clientPackageName, + int cameraId, + int cameraFacing, + int clientPid, + uid_t clientUid, + int servicePid) + : CameraService::BasicClient(cameraService, remoteCallback->asBinder(), + clientPackageName, cameraId, cameraFacing, + clientPid, clientUid, servicePid) { mRemoteCallback = remoteCallback; } @@ -683,6 +794,10 @@ status_t CameraService::ProClient::cancelStream(int streamId) { return INVALID_OPERATION; } +void CameraService::ProClient::notifyError() { + ALOGE("%s: not implemented yet", __FUNCTION__); +} + // ---------------------------------------------------------------------------- static const int kDumpLockRetries = 50; diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h index 9e0f62a..b017505 100644 --- a/services/camera/libcameraservice/CameraService.h +++ b/services/camera/libcameraservice/CameraService.h @@ -19,7 +19,9 @@ #define ANDROID_SERVERS_CAMERA_CAMERASERVICE_H #include +#include #include +#include #include #include @@ -54,9 +56,11 @@ public: virtual int32_t getNumberOfCameras(); virtual status_t getCameraInfo(int cameraId, struct CameraInfo* cameraInfo); - virtual sp connect(const sp& cameraClient, int cameraId); - virtual sp - connect(const sp& cameraCb, int cameraId); + + virtual sp connect(const sp& cameraClient, int cameraId, + const String16& clientPackageName, int clientUid); + virtual sp connect(const sp& cameraCb, + int cameraId); // Extra permissions checks virtual status_t onTransact(uint32_t code, const Parcel& data, @@ -100,9 +104,11 @@ public: protected: BasicClient(const sp& cameraService, const sp& remoteCallback, + const String16& clientPackageName, int cameraId, int cameraFacing, int clientPid, + uid_t clientUid, int servicePid); virtual ~BasicClient(); @@ -117,12 +123,41 @@ public: sp mCameraService; // immutable after constructor int mCameraId; // immutable after constructor int mCameraFacing; // immutable after constructor + const String16 mClientPackageName; pid_t mClientPid; + uid_t mClientUid; // immutable after constructor pid_t mServicePid; // immutable after constructor // - The app-side Binder interface to receive callbacks from us wp mRemoteCallback; // immutable after constructor - }; + + // permissions management + status_t startCameraOps(); + status_t finishCameraOps(); + + // Notify client about a fatal error + virtual void notifyError() = 0; + private: + AppOpsManager mAppOpsManager; + + class OpsCallback : public BnAppOpsCallback { + public: + OpsCallback(wp client); + virtual void opChanged(int32_t op, const String16& packageName); + + private: + wp mClient; + + }; // class OpsCallback + + sp mOpsCallback; + // Track whether startCameraOps was called successfully, to avoid + // finishing what we didn't start. + bool mOpsActive; + + // IAppOpsCallback interface, indirected through opListener + virtual void opChanged(int32_t op, const String16& packageName); + }; // class BasicClient class Client : public BnCamera, public BasicClient { @@ -153,9 +188,11 @@ public: // Interface used by CameraService Client(const sp& cameraService, const sp& cameraClient, + const String16& clientPackageName, int cameraId, int cameraFacing, int clientPid, + uid_t clientUid, int servicePid); ~Client(); @@ -169,19 +206,24 @@ public: // convert client from cookie. Client lock should be acquired before getting Client. static Client* getClientFromCookie(void* user); + virtual void notifyError(); + // Initialized in constructor // - The app-side Binder interface to receive callbacks from us sp mCameraClient; - }; + + }; // class Client class ProClient : public BnProCameraUser, public BasicClient { public: ProClient(const sp& cameraService, const sp& remoteCallback, + const String16& clientPackageName, int cameraId, int cameraFacing, int clientPid, + uid_t clientUid, int servicePid); virtual ~ProClient(); @@ -217,9 +259,10 @@ public: virtual status_t cancelStream(int streamId); protected: - sp mRemoteCallback; + virtual void notifyError(); - }; + sp mRemoteCallback; + }; // class ProClient private: diff --git a/services/camera/libcameraservice/ProCamera2Client.cpp b/services/camera/libcameraservice/ProCamera2Client.cpp index c264e2a..eda3012 100644 --- a/services/camera/libcameraservice/ProCamera2Client.cpp +++ b/services/camera/libcameraservice/ProCamera2Client.cpp @@ -43,12 +43,14 @@ static int getCallingUid() { ProCamera2Client::ProCamera2Client(const sp& cameraService, const sp& remoteCallback, + const String16& clientPackageName, int cameraId, int cameraFacing, int clientPid, + uid_t clientUid, int servicePid): - ProClient(cameraService, remoteCallback, - cameraId, cameraFacing, clientPid, servicePid), + ProClient(cameraService, remoteCallback, clientPackageName, + cameraId, cameraFacing, clientPid, clientUid, servicePid), mSharedCameraCallbacks(remoteCallback) { ATRACE_CALL(); diff --git a/services/camera/libcameraservice/ProCamera2Client.h b/services/camera/libcameraservice/ProCamera2Client.h index cd0a2ae..9f514f4 100644 --- a/services/camera/libcameraservice/ProCamera2Client.h +++ b/services/camera/libcameraservice/ProCamera2Client.h @@ -77,9 +77,11 @@ public: ProCamera2Client(const sp& cameraService, const sp& remoteCallback, + const String16& clientPackageName, int cameraId, int cameraFacing, int clientPid, + uid_t clientUid, int servicePid); virtual ~ProCamera2Client(); -- cgit v1.1 From 46909e7eb074ce1b95b8a411eb71154f53f84f77 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Tue, 26 Feb 2013 09:20:22 -0800 Subject: Remove tee sink debugging at compile time Bug: 8223560 Change-Id: Iddbfb06c45d43d9f20bb428215dd4094931e19a7 --- services/audioflinger/Android.mk | 3 +++ services/audioflinger/AudioFlinger.cpp | 17 +++++++++++++++-- services/audioflinger/AudioFlinger.h | 6 ++++++ services/audioflinger/Threads.cpp | 24 +++++++++++++++++++----- services/audioflinger/Threads.h | 9 +++++++-- services/audioflinger/Tracks.cpp | 34 ++++++++++++++++++++-------------- 6 files changed, 70 insertions(+), 23 deletions(-) diff --git a/services/audioflinger/Android.mk b/services/audioflinger/Android.mk index 0855db6..7daef99 100644 --- a/services/audioflinger/Android.mk +++ b/services/audioflinger/Android.mk @@ -66,6 +66,9 @@ LOCAL_CFLAGS += -DSTATE_QUEUE_INSTANTIATIONS='"StateQueueInstantiations.cpp"' LOCAL_CFLAGS += -UFAST_TRACKS_AT_NON_NATIVE_SAMPLE_RATE +# uncomment to allow tee sink debugging to be enabled by property +# LOCAL_CFLAGS += -DTEE_SINK + # uncomment to enable the audio watchdog # LOCAL_SRC_FILES += AudioWatchdog.cpp # LOCAL_CFLAGS += -DAUDIO_WATCHDOG diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp index e0ab8cd..e81267f 100644 --- a/services/audioflinger/AudioFlinger.cpp +++ b/services/audioflinger/AudioFlinger.cpp @@ -90,6 +90,7 @@ nsecs_t AudioFlinger::mStandbyTimeInNsecs = kDefaultStandbyTimeInNsecs; uint32_t AudioFlinger::mScreenState; +#ifdef TEE_SINK bool AudioFlinger::mTeeSinkInputEnabled = false; bool AudioFlinger::mTeeSinkOutputEnabled = false; bool AudioFlinger::mTeeSinkTrackEnabled = false; @@ -97,6 +98,7 @@ bool AudioFlinger::mTeeSinkTrackEnabled = false; size_t AudioFlinger::mTeeSinkInputFrames = kTeeSinkInputFramesDefault; size_t AudioFlinger::mTeeSinkOutputFrames = kTeeSinkOutputFramesDefault; size_t AudioFlinger::mTeeSinkTrackFrames = kTeeSinkTrackFramesDefault; +#endif // ---------------------------------------------------------------------------- @@ -146,6 +148,7 @@ AudioFlinger::AudioFlinger() if (doLog) { mLogMemoryDealer = new MemoryDealer(kLogMemorySize, "LogWriters"); } +#ifdef TEE_SINK (void) property_get("ro.debuggable", value, "0"); int debuggable = atoi(value); int teeEnabled = 0; @@ -159,6 +162,7 @@ AudioFlinger::AudioFlinger() mTeeSinkOutputEnabled = true; if (teeEnabled & 4) mTeeSinkTrackEnabled = true; +#endif } void AudioFlinger::onFirstRef() @@ -347,10 +351,12 @@ status_t AudioFlinger::dump(int fd, const Vector& args) dev->dump(dev, fd); } +#ifdef TEE_SINK // dump the serially shared record tee sink if (mRecordTeeSource != 0) { dumpTee(fd, mRecordTeeSource); } +#endif if (locked) { mLock.unlock(); @@ -1624,6 +1630,7 @@ audio_io_handle_t AudioFlinger::openInput(audio_module_handle_t module, if (status == NO_ERROR && inStream != NULL) { +#ifdef TEE_SINK // Try to re-use most recently used Pipe to archive a copy of input for dumpsys, // or (re-)create if current Pipe is idle and does not match the new format sp teeSink; @@ -1670,6 +1677,7 @@ audio_io_handle_t AudioFlinger::openInput(audio_module_handle_t module, default: break; } +#endif AudioStreamIn *input = new AudioStreamIn(inHwDev, inStream); @@ -1682,8 +1690,11 @@ audio_io_handle_t AudioFlinger::openInput(audio_module_handle_t module, reqChannels, id, primaryOutputDevice_l(), - *pDevices, - teeSink); + *pDevices +#ifdef TEE_SINK + , teeSink +#endif + ); mRecordThreads.add(id, thread); ALOGV("openInput() created record thread: ID %d thread %p", id, thread); if (pSamplingRate != NULL) *pSamplingRate = reqSamplingRate; @@ -2235,6 +2246,7 @@ int comparEntry(const void *p1, const void *p2) return strcmp(((const Entry *) p1)->mName, ((const Entry *) p2)->mName); } +#ifdef TEE_SINK void AudioFlinger::dumpTee(int fd, const sp& source, audio_io_handle_t id) { NBAIO_Source *teeSource = source.get(); @@ -2350,6 +2362,7 @@ void AudioFlinger::dumpTee(int fd, const sp& source, audio_io_hand } } } +#endif // ---------------------------------------------------------------------------- diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h index 44bd260..d0ef922 100644 --- a/services/audioflinger/AudioFlinger.h +++ b/services/audioflinger/AudioFlinger.h @@ -588,11 +588,15 @@ private: status_t closeOutput_nonvirtual(audio_io_handle_t output); status_t closeInput_nonvirtual(audio_io_handle_t input); +#ifdef TEE_SINK // all record threads serially share a common tee sink, which is re-created on format change sp mRecordTeeSink; sp mRecordTeeSource; +#endif public: + +#ifdef TEE_SINK // tee sink, if enabled by property, allows dumpsys to write most recent audio to .wav file static void dumpTee(int fd, const sp& source, audio_io_handle_t id = 0); @@ -611,6 +615,8 @@ public: static const size_t kTeeSinkInputFramesDefault = 0x200000; static const size_t kTeeSinkOutputFramesDefault = 0x200000; static const size_t kTeeSinkTrackFramesDefault = 0x1000; +#endif + }; #undef INCLUDING_FROM_AUDIOFLINGER_H diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index 1209ea6..267241f 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -2124,6 +2124,7 @@ AudioFlinger::MixerThread::MixerThread(const sp& audioFlinger, Aud (monoPipe->maxFrames() * 7) / 8 : mNormalFrameCount * 2); mPipeSink = monoPipe; +#ifdef TEE_SINK if (mTeeSinkOutputEnabled) { // create a Pipe to archive a copy of FastMixer's output for dumpsys Pipe *teeSink = new Pipe(mTeeSinkOutputFrames, format); @@ -2137,6 +2138,7 @@ AudioFlinger::MixerThread::MixerThread(const sp& audioFlinger, Aud ALOG_ASSERT(index == 0); mTeeSource = teeSource; } +#endif // create fast mixer and configure it initially with just one fast track for our submix mFastMixer = new FastMixer(); @@ -2163,7 +2165,9 @@ AudioFlinger::MixerThread::MixerThread(const sp& audioFlinger, Aud state->mColdFutexAddr = &mFastMixerFutex; state->mColdGen++; state->mDumpState = &mFastMixerDumpState; +#ifdef TEE_SINK state->mTeeSink = mTeeSink.get(); +#endif mFastMixerNBLogWriter = audioFlinger->newWriter_l(kFastMixerLogSize, "FastMixer"); state->mNBLogWriter = mFastMixerNBLogWriter.get(); sq->end(); @@ -3076,8 +3080,10 @@ void AudioFlinger::MixerThread::dumpInternals(int fd, const Vector& ar mutatorCopy.dump(fd); #endif +#ifdef TEE_SINK // Write the tee output to a .wav file dumpTee(fd, mTeeSource, mId); +#endif #ifdef AUDIO_WATCHDOG if (mAudioWatchdog != 0) { @@ -3574,16 +3580,21 @@ AudioFlinger::RecordThread::RecordThread(const sp& audioFlinger, audio_channel_mask_t channelMask, audio_io_handle_t id, audio_devices_t outDevice, - audio_devices_t inDevice, - const sp& teeSink) : + audio_devices_t inDevice +#ifdef TEE_SINK + , const sp& teeSink +#endif + ) : ThreadBase(audioFlinger, id, outDevice, inDevice, RECORD), mInput(input), mResampler(NULL), mRsmpOutBuffer(NULL), mRsmpInBuffer(NULL), // mRsmpInIndex and mInputBytes set by readInputParameters() mReqChannelCount(popcount(channelMask)), - mReqSampleRate(sampleRate), + mReqSampleRate(sampleRate) // mBytesRead is only meaningful while active, and so is cleared in start() // (but might be better to also clear here for dump?) - mTeeSink(teeSink) +#ifdef TEE_SINK + , mTeeSink(teeSink) +#endif { snprintf(mName, kNameLength, "AudioIn_%X", id); @@ -3740,10 +3751,13 @@ bool AudioFlinger::RecordThread::threadLoop() mRsmpInIndex = mFrameCount; framesOut = 0; buffer.frameCount = 0; - } else if (mTeeSink != 0) { + } +#ifdef TEE_SINK + else if (mTeeSink != 0) { (void) mTeeSink->write(readInto, mBytesRead >> Format_frameBitShift(mTeeSink->format())); } +#endif } } } else { diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h index fa1e336..caac882 100644 --- a/services/audioflinger/Threads.h +++ b/services/audioflinger/Threads.h @@ -542,9 +542,11 @@ private: sp mPipeSink; // The current sink for the normal mixer to write it's (sub)mix, mOutputSink or mPipeSink sp mNormalSink; +#ifdef TEE_SINK // For dumpsys sp mTeeSink; sp mTeeSource; +#endif uint32_t mScreenState; // cached copy of gScreenState static const size_t kFastMixerLogSize = 8 * 1024; sp mFastMixerNBLogWriter; @@ -703,8 +705,11 @@ public: audio_channel_mask_t channelMask, audio_io_handle_t id, audio_devices_t outDevice, - audio_devices_t inDevice, - const sp& teeSink); + audio_devices_t inDevice +#ifdef TEE_SINK + , const sp& teeSink +#endif + ); virtual ~RecordThread(); // no addTrack_l ? diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp index 724ce38..dad9114 100644 --- a/services/audioflinger/Tracks.cpp +++ b/services/audioflinger/Tracks.cpp @@ -141,29 +141,33 @@ AudioFlinger::ThreadBase::TrackBase::TrackBase( mBufferEnd = (uint8_t *)mBuffer + bufferSize; mServerProxy = new ServerProxy(mCblk, mBuffer, frameCount, mFrameSize, isOut); +#ifdef TEE_SINK if (mTeeSinkTrackEnabled) { - NBAIO_Format pipeFormat = Format_from_SR_C(mSampleRate, mChannelCount); - if (pipeFormat != Format_Invalid) { - Pipe *pipe = new Pipe(mTeeSinkTrackFrames, pipeFormat); - size_t numCounterOffers = 0; - const NBAIO_Format offers[1] = {pipeFormat}; - ssize_t index = pipe->negotiate(offers, 1, NULL, numCounterOffers); - ALOG_ASSERT(index == 0); - PipeReader *pipeReader = new PipeReader(*pipe); - numCounterOffers = 0; - index = pipeReader->negotiate(offers, 1, NULL, numCounterOffers); - ALOG_ASSERT(index == 0); - mTeeSink = pipe; - mTeeSource = pipeReader; - } + NBAIO_Format pipeFormat = Format_from_SR_C(mSampleRate, mChannelCount); + if (pipeFormat != Format_Invalid) { + Pipe *pipe = new Pipe(mTeeSinkTrackFrames, pipeFormat); + size_t numCounterOffers = 0; + const NBAIO_Format offers[1] = {pipeFormat}; + ssize_t index = pipe->negotiate(offers, 1, NULL, numCounterOffers); + ALOG_ASSERT(index == 0); + PipeReader *pipeReader = new PipeReader(*pipe); + numCounterOffers = 0; + index = pipeReader->negotiate(offers, 1, NULL, numCounterOffers); + ALOG_ASSERT(index == 0); + mTeeSink = pipe; + mTeeSource = pipeReader; + } } +#endif } } AudioFlinger::ThreadBase::TrackBase::~TrackBase() { +#ifdef TEE_SINK dumpTee(-1, mTeeSource, mId); +#endif // delete the proxy before deleting the shared memory it refers to, to avoid dangling reference delete mServerProxy; if (mCblk != NULL) { @@ -189,9 +193,11 @@ AudioFlinger::ThreadBase::TrackBase::~TrackBase() // This implementation of releaseBuffer() is used by Track and RecordTrack, but not TimedTrack void AudioFlinger::ThreadBase::TrackBase::releaseBuffer(AudioBufferProvider::Buffer* buffer) { +#ifdef TEE_SINK if (mTeeSink != 0) { (void) mTeeSink->write(buffer->raw, buffer->frameCount); } +#endif buffer->raw = NULL; mStepCount = buffer->frameCount; -- cgit v1.1 From c073ba525404f3416c2824c435d3d926a9892f1b Mon Sep 17 00:00:00 2001 From: Igor Murashkin Date: Tue, 26 Feb 2013 14:32:34 -0800 Subject: camera_client: refactor Camera/ProCamera commonalities into BasicCamera Change-Id: Ie10a4094522d49683657665fe94ab0b7ccd280e9 --- camera/Android.mk | 1 + camera/Camera.cpp | 126 +---------- camera/CameraBase.cpp | 237 +++++++++++++++++++++ camera/ICameraService.cpp | 15 +- camera/ProCamera.cpp | 153 +++---------- include/camera/Camera.h | 81 ++----- include/camera/CameraBase.h | 117 ++++++++++ include/camera/ICameraService.h | 13 +- include/camera/ProCamera.h | 65 +++--- services/camera/libcameraservice/CameraService.cpp | 6 +- services/camera/libcameraservice/CameraService.h | 7 +- 11 files changed, 464 insertions(+), 357 deletions(-) create mode 100644 camera/CameraBase.cpp create mode 100644 include/camera/CameraBase.h diff --git a/camera/Android.mk b/camera/Android.mk index 3e7e5a5..3f30079 100644 --- a/camera/Android.mk +++ b/camera/Android.mk @@ -16,6 +16,7 @@ LOCAL_SRC_FILES:= \ IProCameraUser.cpp \ IProCameraCallbacks.cpp \ ProCamera.cpp \ + CameraBase.cpp \ LOCAL_SHARED_LIBRARIES := \ libcutils \ diff --git a/camera/Camera.cpp b/camera/Camera.cpp index d8dc2a5..f417c90 100644 --- a/camera/Camera.cpp +++ b/camera/Camera.cpp @@ -27,46 +27,16 @@ #include #include #include +#include #include #include namespace android { -// client singleton for camera service binder interface -Mutex Camera::mLock; -sp Camera::mCameraService; -sp Camera::mDeathNotifier; - -// establish binder interface to camera service -const sp& Camera::getCameraService() +Camera::Camera(int cameraId) + : CameraBase(cameraId) { - Mutex::Autolock _l(mLock); - if (mCameraService.get() == 0) { - sp sm = defaultServiceManager(); - sp binder; - do { - binder = sm->getService(String16("media.camera")); - if (binder != 0) - break; - ALOGW("CameraService not published, waiting..."); - usleep(500000); // 0.5 s - } while(true); - if (mDeathNotifier == NULL) { - mDeathNotifier = new DeathNotifier(); - } - binder->linkToDeath(mDeathNotifier); - mCameraService = interface_cast(binder); - } - ALOGE_IF(mCameraService==0, "no CameraService!?"); - return mCameraService; -} - -// --------------------------------------------------------------------------- - -Camera::Camera() -{ - init(); } // construct a camera client from an existing camera remote @@ -78,7 +48,7 @@ sp Camera::create(const sp& camera) return 0; } - sp c = new Camera(); + sp c = new Camera(-1); if (camera->connect(c) == NO_ERROR) { c->mStatus = NO_ERROR; c->mCamera = camera; @@ -88,11 +58,6 @@ sp Camera::create(const sp& camera) return 0; } -void Camera::init() -{ - mStatus = UNKNOWN_ERROR; -} - Camera::~Camera() { // We don't need to call disconnect() here because if the CameraService @@ -103,47 +68,10 @@ Camera::~Camera() // deadlock if we call any method of ICamera here. } -int32_t Camera::getNumberOfCameras() -{ - const sp& cs = getCameraService(); - if (cs == 0) return 0; - return cs->getNumberOfCameras(); -} - -status_t Camera::getCameraInfo(int cameraId, - struct CameraInfo* cameraInfo) { - const sp& cs = getCameraService(); - if (cs == 0) return UNKNOWN_ERROR; - return cs->getCameraInfo(cameraId, cameraInfo); -} - sp Camera::connect(int cameraId, const String16& clientPackageName, int clientUid) { - ALOGV("connect"); - sp c = new Camera(); - sp cl = c; - const sp& cs = getCameraService(); - if (cs != 0) { - c->mCamera = cs->connect(cl, cameraId, clientPackageName, clientUid); - } - if (c->mCamera != 0) { - c->mCamera->asBinder()->linkToDeath(c); - c->mStatus = NO_ERROR; - } else { - c.clear(); - } - return c; -} - -void Camera::disconnect() -{ - ALOGV("disconnect"); - if (mCamera != 0) { - mCamera->disconnect(); - mCamera->asBinder()->unlinkToDeath(this); - mCamera = 0; - } + return CameraBaseT::connect(cameraId, clientPackageName, clientUid); } status_t Camera::reconnect() @@ -154,11 +82,6 @@ status_t Camera::reconnect() return c->connect(this); } -sp Camera::remote() -{ - return mCamera; -} - status_t Camera::lock() { sp c = mCamera; @@ -353,28 +276,14 @@ void Camera::setPreviewCallbackFlags(int flag) // callback from camera service void Camera::notifyCallback(int32_t msgType, int32_t ext1, int32_t ext2) { - sp listener; - { - Mutex::Autolock _l(mLock); - listener = mListener; - } - if (listener != NULL) { - listener->notify(msgType, ext1, ext2); - } + return CameraBaseT::notifyCallback(msgType, ext1, ext2); } // callback from camera service when frame or image is ready void Camera::dataCallback(int32_t msgType, const sp& dataPtr, camera_frame_metadata_t *metadata) { - sp listener; - { - Mutex::Autolock _l(mLock); - listener = mListener; - } - if (listener != NULL) { - listener->postData(msgType, dataPtr, metadata); - } + return CameraBaseT::dataCallback(msgType, dataPtr, metadata); } // callback from camera service when timestamped frame is ready @@ -393,31 +302,12 @@ void Camera::dataCallbackTimestamp(nsecs_t timestamp, int32_t msgType, const sp< return; } - sp listener; - { - Mutex::Autolock _l(mLock); - listener = mListener; - } - if (listener != NULL) { - listener->postDataTimestamp(timestamp, msgType, dataPtr); - } else { + if (!CameraBaseT::dataCallbackTimestamp(timestamp, msgType, dataPtr)) { ALOGW("No listener was set. Drop a recording frame."); releaseRecordingFrame(dataPtr); } } -void Camera::binderDied(const wp& who) { - ALOGW("ICamera died"); - notifyCallback(CAMERA_MSG_ERROR, CAMERA_ERROR_SERVER_DIED, 0); -} - -void Camera::DeathNotifier::binderDied(const wp& who) { - ALOGV("binderDied"); - Mutex::Autolock _l(Camera::mLock); - Camera::mCameraService.clear(); - ALOGW("Camera server died!"); -} - sp Camera::getRecordingProxy() { ALOGV("getProxy"); return new RecordingProxy(this); diff --git a/camera/CameraBase.cpp b/camera/CameraBase.cpp new file mode 100644 index 0000000..9b0e6bf --- /dev/null +++ b/camera/CameraBase.cpp @@ -0,0 +1,237 @@ +/* +** +** Copyright (C) 2013, The Android Open Source Project +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "CameraBase" +#include +#include +#include + +#include +#include +#include + +#include +#include + +// needed to instantiate +#include +#include + +#include + +namespace android { + +namespace { + sp gCameraService; + const int kCameraServicePollDelay = 500000; // 0.5s + const char* kCameraServiceName = "media.camera"; + + Mutex gLock; + + class DeathNotifier : public IBinder::DeathRecipient + { + public: + DeathNotifier() { + } + + virtual void binderDied(const wp& who) { + ALOGV("binderDied"); + Mutex::Autolock _l(gLock); + gCameraService.clear(); + ALOGW("Camera service died!"); + } + }; + + sp gDeathNotifier; +}; // namespace anonymous + +/////////////////////////////////////////////////////////// +// CameraBase definition +/////////////////////////////////////////////////////////// + +// establish binder interface to camera service +template +const sp& CameraBase::getCameraService() +{ + Mutex::Autolock _l(gLock); + if (gCameraService.get() == 0) { + sp sm = defaultServiceManager(); + sp binder; + do { + binder = sm->getService(String16(kCameraServiceName)); + if (binder != 0) { + break; + } + ALOGW("CameraService not published, waiting..."); + usleep(kCameraServicePollDelay); + } while(true); + if (gDeathNotifier == NULL) { + gDeathNotifier = new DeathNotifier(); + } + binder->linkToDeath(gDeathNotifier); + gCameraService = interface_cast(binder); + } + ALOGE_IF(gCameraService == 0, "no CameraService!?"); + return gCameraService; +} + +template +sp CameraBase::connect(int cameraId, + const String16& clientPackageName, + int clientUid) +{ + ALOGV("%s: connect", __FUNCTION__); + sp c = new TCam(cameraId); + sp cl = c; + const sp& cs = getCameraService(); + if (cs != 0) { + c->mCamera = cs->connect(cl, cameraId, clientPackageName, clientUid); + } + if (c->mCamera != 0) { + c->mCamera->asBinder()->linkToDeath(c); + c->mStatus = NO_ERROR; + } else { + c.clear(); + } + return c; +} + +template +void CameraBase::disconnect() +{ + ALOGV("%s: disconnect", __FUNCTION__); + if (mCamera != 0) { + mCamera->disconnect(); + mCamera->asBinder()->unlinkToDeath(this); + mCamera = 0; + } + ALOGV("%s: disconnect (done)", __FUNCTION__); +} + +template +CameraBase::CameraBase(int cameraId) : + mStatus(UNKNOWN_ERROR), + mCameraId(cameraId) +{ +} + +template +CameraBase::~CameraBase() +{ +} + +template +sp CameraBase::remote() +{ + return mCamera; +} + +template +status_t CameraBase::getStatus() +{ + return mStatus; +} + +template +void CameraBase::binderDied(const wp& who) { + ALOGW("mediaserver's remote binder Camera object died"); + notifyCallback(CAMERA_MSG_ERROR, CAMERA_ERROR_SERVER_DIED, /*ext2*/0); +} + +template +void CameraBase::setListener(const sp& listener) +{ + Mutex::Autolock _l(mLock); + mListener = listener; +} + +// callback from camera service +template +void CameraBase::notifyCallback(int32_t msgType, + int32_t ext1, + int32_t ext2) +{ + sp listener; + { + Mutex::Autolock _l(mLock); + listener = mListener; + } + if (listener != NULL) { + listener->notify(msgType, ext1, ext2); + } +} + +// callback from camera service when frame or image is ready +template +void CameraBase::dataCallback(int32_t msgType, + const sp& dataPtr, + camera_frame_metadata *metadata) +{ + sp listener; + { + Mutex::Autolock _l(mLock); + listener = mListener; + } + if (listener != NULL) { + listener->postData(msgType, dataPtr, metadata); + } +} + +// callback from camera service when timestamped frame is ready +template +bool CameraBase::dataCallbackTimestamp(nsecs_t timestamp, + int32_t msgType, + const sp& dataPtr) +{ + sp listener; + { + Mutex::Autolock _l(mLock); + listener = mListener; + } + if (listener != NULL) { + listener->postDataTimestamp(timestamp, msgType, dataPtr); + return true; + } + + return false; +} + +template +int CameraBase::getNumberOfCameras() { + const sp cs = getCameraService(); + + if (!cs.get()) { + // as required by the public Java APIs + return 0; + } + return cs->getNumberOfCameras(); +} + +// this can be in BaseCamera but it should be an instance method +template +status_t CameraBase::getCameraInfo(int cameraId, + struct CameraInfo* cameraInfo) { + const sp& cs = getCameraService(); + if (cs == 0) return UNKNOWN_ERROR; + return cs->getCameraInfo(cameraId, cameraInfo); +} + +template class CameraBase; +template class CameraBase; + +} // namespace android diff --git a/camera/ICameraService.cpp b/camera/ICameraService.cpp index fdf20ff..b54d63f 100644 --- a/camera/ICameraService.cpp +++ b/camera/ICameraService.cpp @@ -23,6 +23,10 @@ #include #include +#include +#include +#include +#include namespace android { @@ -70,12 +74,15 @@ public: } // connect to camera service (pro client) - virtual sp connect(const sp& cameraCb, int cameraId) + virtual sp connect(const sp& cameraCb, int cameraId, + const String16 &clientPackageName, int clientUid) { Parcel data, reply; data.writeInterfaceToken(ICameraService::getInterfaceDescriptor()); data.writeStrongBinder(cameraCb->asBinder()); data.writeInt32(cameraId); + data.writeString16(clientPackageName); + data.writeInt32(clientUid); remote()->transact(BnCameraService::CONNECT_PRO, data, &reply); return interface_cast(reply.readStrongBinder()); } @@ -119,7 +126,11 @@ status_t BnCameraService::onTransact( case CONNECT_PRO: { CHECK_INTERFACE(ICameraService, data, reply); sp cameraClient = interface_cast(data.readStrongBinder()); - sp camera = connect(cameraClient, data.readInt32()); + int32_t cameraId = data.readInt32(); + const String16 clientName = data.readString16(); + int32_t clientUid = data.readInt32(); + sp camera = connect(cameraClient, cameraId, + clientName, clientUid); reply->writeStrongBinder(camera->asBinder()); return NO_ERROR; } break; diff --git a/camera/ProCamera.cpp b/camera/ProCamera.cpp index 7c66d62..13ba07c 100644 --- a/camera/ProCamera.cpp +++ b/camera/ProCamera.cpp @@ -31,71 +31,19 @@ #include #include -#include #include namespace android { -// client singleton for camera service binder interface -Mutex ProCamera::mLock; -sp ProCamera::mCameraService; -sp ProCamera::mDeathNotifier; - -// establish binder interface to camera service -const sp& ProCamera::getCameraService() -{ - Mutex::Autolock _l(mLock); - if (mCameraService.get() == 0) { - sp sm = defaultServiceManager(); - sp binder; - do { - binder = sm->getService(String16("media.camera")); - if (binder != 0) - break; - ALOGW("CameraService not published, waiting..."); - usleep(500000); // 0.5 s - } while(true); - if (mDeathNotifier == NULL) { - mDeathNotifier = new DeathNotifier(); - } - binder->linkToDeath(mDeathNotifier); - mCameraService = interface_cast(binder); - } - ALOGE_IF(mCameraService==0, "no CameraService!?"); - return mCameraService; -} - sp ProCamera::connect(int cameraId) { - ALOGV("connect"); - sp c = new ProCamera(); - sp cl = c; - const sp& cs = getCameraService(); - if (cs != 0) { - c->mCamera = cs->connect(cl, cameraId); - } - if (c->mCamera != 0) { - c->mCamera->asBinder()->linkToDeath(c); - c->mStatus = NO_ERROR; - } else { - c.clear(); - } - return c; -} - -void ProCamera::disconnect() -{ - ALOGV("%s: disconnect", __FUNCTION__); - if (mCamera != 0) { - mCamera->disconnect(); - mCamera->asBinder()->unlinkToDeath(this); - mCamera = 0; - } - ALOGV("%s: disconnect (done)", __FUNCTION__); + return CameraBaseT::connect(cameraId, String16(), + ICameraService::USE_CALLING_UID); } -ProCamera::ProCamera() +ProCamera::ProCamera(int cameraId) + : CameraBase(cameraId) { } @@ -104,74 +52,28 @@ ProCamera::~ProCamera() } -sp ProCamera::remote() -{ - return mCamera; -} - -void ProCamera::binderDied(const wp& who) { - ALOGW("IProCameraUser died"); - notifyCallback(CAMERA_MSG_ERROR, CAMERA_ERROR_SERVER_DIED, 0); -} - -void ProCamera::DeathNotifier::binderDied(const wp& who) { - ALOGV("binderDied"); - Mutex::Autolock _l(ProCamera::mLock); - ProCamera::mCameraService.clear(); - ALOGW("Camera service died!"); -} - -void ProCamera::setListener(const sp& listener) -{ - Mutex::Autolock _l(mLock); - mListener = listener; -} - +/* IProCameraUser's implementation */ // callback from camera service void ProCamera::notifyCallback(int32_t msgType, int32_t ext1, int32_t ext2) { - sp listener; - { - Mutex::Autolock _l(mLock); - listener = mListener; - } - if (listener != NULL) { - listener->notify(msgType, ext1, ext2); - } + return CameraBaseT::notifyCallback(msgType, ext1, ext2); } // callback from camera service when frame or image is ready void ProCamera::dataCallback(int32_t msgType, const sp& dataPtr, camera_frame_metadata_t *metadata) { - sp listener; - { - Mutex::Autolock _l(mLock); - listener = mListener; - } - if (listener != NULL) { - listener->postData(msgType, dataPtr, metadata); - } + return CameraBaseT::dataCallback(msgType, dataPtr, metadata); } // callback from camera service when timestamped frame is ready void ProCamera::dataCallbackTimestamp(nsecs_t timestamp, int32_t msgType, - const sp& dataPtr) + const sp& dataPtr) { - sp listener; - { - Mutex::Autolock _l(mLock); - listener = mListener; - } - if (listener != NULL) { - listener->postDataTimestamp(timestamp, msgType, dataPtr); - } else { - ALOGW("No listener was set. Drop a recording frame."); - } + CameraBaseT::dataCallbackTimestamp(timestamp, msgType, dataPtr); } -/* IProCameraUser's implementation */ void ProCamera::onLockStatusChanged( IProCameraCallbacks::LockStatus newLockStatus) @@ -291,9 +193,9 @@ status_t ProCamera::deleteStream(int streamId) } status_t ProCamera::createStream(int width, int height, int format, - const sp& surface, - /*out*/ - int* streamId) + const sp& surface, + /*out*/ + int* streamId) { *streamId = -1; @@ -304,14 +206,15 @@ status_t ProCamera::createStream(int width, int height, int format, return BAD_VALUE; } - return createStream(width, height, format, surface->getIGraphicBufferProducer(), + return createStream(width, height, format, + surface->getIGraphicBufferProducer(), streamId); } status_t ProCamera::createStream(int width, int height, int format, - const sp& bufferProducer, - /*out*/ - int* streamId) { + const sp& bufferProducer, + /*out*/ + int* streamId) { *streamId = -1; ALOGV("%s: createStreamT %dx%d (fmt=0x%x)", __FUNCTION__, width, height, @@ -335,10 +238,10 @@ status_t ProCamera::createStream(int width, int height, int format, } status_t ProCamera::createStreamCpu(int width, int height, int format, - int heapCount, - /*out*/ - sp* cpuConsumer, - int* streamId) + int heapCount, + /*out*/ + sp* cpuConsumer, + int* streamId) { ALOGV("%s: createStreamW %dx%d (fmt=0x%x)", __FUNCTION__, width, height, format); @@ -354,8 +257,9 @@ status_t ProCamera::createStreamCpu(int width, int height, int format, sp stc = new Surface( cc->getProducerInterface()); - status_t s = createStream(width, height, format, stc->getIGraphicBufferProducer(), - streamId); + status_t s = createStream(width, height, format, + stc->getIGraphicBufferProducer(), + streamId); if (s != OK) { ALOGE("%s: Failure to create stream %dx%d (fmt=0x%x)", __FUNCTION__, @@ -379,15 +283,6 @@ status_t ProCamera::createStreamCpu(int width, int height, int format, return s; } -int ProCamera::getNumberOfCameras() { - const sp cs = getCameraService(); - - if (!cs.get()) { - return DEAD_OBJECT; - } - return cs->getNumberOfCameras(); -} - camera_metadata* ProCamera::getCameraInfo(int cameraId) { ALOGV("%s: cameraId = %d", __FUNCTION__, cameraId); diff --git a/include/camera/Camera.h b/include/camera/Camera.h index be2b7f4..71c66ce 100644 --- a/include/camera/Camera.h +++ b/include/camera/Camera.h @@ -23,35 +23,13 @@ #include #include #include +#include +#include +#include namespace android { -struct CameraInfo { - /** - * The direction that the camera faces to. It should be CAMERA_FACING_BACK - * or CAMERA_FACING_FRONT. - */ - int facing; - - /** - * The orientation of the camera image. The value is the angle that the - * camera image needs to be rotated clockwise so it shows correctly on the - * display in its natural orientation. It should be 0, 90, 180, or 270. - * - * For example, suppose a device has a naturally tall screen. The - * back-facing camera sensor is mounted in landscape. You are looking at - * the screen. If the top side of the camera sensor is aligned with the - * right edge of the screen in natural orientation, the value should be - * 90. If the top side of a front-facing camera sensor is aligned with the - * right of the screen, the value should be 270. - */ - int orientation; -}; - -class ICameraService; -class ICamera; class Surface; -class Mutex; class String8; class String16; @@ -65,32 +43,37 @@ public: virtual void postDataTimestamp(nsecs_t timestamp, int32_t msgType, const sp& dataPtr) = 0; }; -class Camera : public BnCameraClient, public IBinder::DeathRecipient +class Camera; + +template <> +struct CameraTraits +{ + typedef CameraListener TCamListener; + typedef ICamera TCamUser; + typedef ICameraClient TCamCallbacks; +}; + +class Camera : + public CameraBase, + public BnCameraClient { public: enum { - USE_CALLING_UID = -1 + USE_CALLING_UID = ICameraService::USE_CALLING_UID }; // construct a camera client from an existing remote static sp create(const sp& camera); - static int32_t getNumberOfCameras(); - static status_t getCameraInfo(int cameraId, - struct CameraInfo* cameraInfo); static sp connect(int cameraId, const String16& clientPackageName, int clientUid); virtual ~Camera(); - void init(); status_t reconnect(); - void disconnect(); status_t lock(); status_t unlock(); - status_t getStatus() { return mStatus; } - // pass the buffered Surface to the camera service status_t setPreviewDisplay(const sp& surface); @@ -151,8 +134,6 @@ public: camera_frame_metadata_t *metadata); virtual void dataCallbackTimestamp(nsecs_t timestamp, int32_t msgType, const sp& dataPtr); - sp remote(); - class RecordingProxy : public BnCameraRecordingProxy { public: @@ -168,35 +149,13 @@ public: }; protected: - Camera(); + Camera(int cameraId); Camera(const Camera&); Camera& operator=(const Camera); - virtual void binderDied(const wp& who); - - class DeathNotifier: public IBinder::DeathRecipient - { - public: - DeathNotifier() { - } - - virtual void binderDied(const wp& who); - }; - - static sp mDeathNotifier; - - // helper function to obtain camera service handle - static const sp& getCameraService(); - - sp mCamera; - status_t mStatus; - - sp mListener; - sp mRecordingProxyListener; - friend class DeathNotifier; + sp mRecordingProxyListener; - static Mutex mLock; - static sp mCameraService; + friend class CameraBase; }; }; // namespace android diff --git a/include/camera/CameraBase.h b/include/camera/CameraBase.h new file mode 100644 index 0000000..fed28ea --- /dev/null +++ b/include/camera/CameraBase.h @@ -0,0 +1,117 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_CAMERA_BASE_H +#define ANDROID_HARDWARE_CAMERA_BASE_H + +#include +#include + +struct camera_frame_metadata; + +namespace android { + +struct CameraInfo { + /** + * The direction that the camera faces to. It should be CAMERA_FACING_BACK + * or CAMERA_FACING_FRONT. + */ + int facing; + + /** + * The orientation of the camera image. The value is the angle that the + * camera image needs to be rotated clockwise so it shows correctly on the + * display in its natural orientation. It should be 0, 90, 180, or 270. + * + * For example, suppose a device has a naturally tall screen. The + * back-facing camera sensor is mounted in landscape. You are looking at + * the screen. If the top side of the camera sensor is aligned with the + * right edge of the screen in natural orientation, the value should be + * 90. If the top side of a front-facing camera sensor is aligned with the + * right of the screen, the value should be 270. + */ + int orientation; +}; + +template +struct CameraTraits { +}; + +template > +class CameraBase : public IBinder::DeathRecipient +{ +public: + typedef typename TCamTraits::TCamListener TCamListener; + typedef typename TCamTraits::TCamUser TCamUser; + typedef typename TCamTraits::TCamCallbacks TCamCallbacks; + + static sp connect(int cameraId, + const String16& clientPackageName, + int clientUid); + virtual void disconnect(); + + void setListener(const sp& listener); + + static int getNumberOfCameras(); + + static status_t getCameraInfo(int cameraId, + /*out*/ + struct CameraInfo* cameraInfo); + + sp remote(); + + // Status is set to 'UNKNOWN_ERROR' after successful (re)connection + status_t getStatus(); + +protected: + CameraBase(int cameraId); + virtual ~CameraBase(); + + //////////////////////////////////////////////////////// + // TCamCallbacks implementation + //////////////////////////////////////////////////////// + virtual void notifyCallback(int32_t msgType, int32_t ext, + int32_t ext2); + virtual void dataCallback(int32_t msgType, + const sp& dataPtr, + camera_frame_metadata *metadata); + bool dataCallbackTimestamp(nsecs_t timestamp, + int32_t msgType, + const sp& dataPtr); + + //////////////////////////////////////////////////////// + // Common instance variables + //////////////////////////////////////////////////////// + Mutex mLock; + + virtual void binderDied(const wp& who); + + // helper function to obtain camera service handle + static const sp& getCameraService(); + + sp mCamera; + status_t mStatus; + + sp mListener; + + const int mCameraId; + + typedef CameraBase CameraBaseT; +}; + +}; // namespace android + +#endif diff --git a/include/camera/ICameraService.h b/include/camera/ICameraService.h index aa64243..ef2b685 100644 --- a/include/camera/ICameraService.h +++ b/include/camera/ICameraService.h @@ -21,12 +21,13 @@ #include #include -#include -#include -#include - namespace android { +class ICamera; +class ICameraClient; +class IProCameraUser; +class IProCameraCallbacks; + class ICameraService : public IInterface { public: @@ -58,7 +59,9 @@ public: int clientUid) = 0; virtual sp connect(const sp& cameraCb, - int cameraId) = 0; + int cameraId, + const String16& clientPackageName, + int clientUid) = 0; }; // ---------------------------------------------------------------------------- diff --git a/include/camera/ProCamera.h b/include/camera/ProCamera.h index cd2772c..b228145 100644 --- a/include/camera/ProCamera.h +++ b/include/camera/ProCamera.h @@ -27,9 +27,13 @@ #include #include +#include + #include #include +#include + struct camera_metadata; namespace android { @@ -53,6 +57,8 @@ public: // OnBufferReceived and OnRequestReceived can come in with any order, // use android.sensor.timestamp and LockedBuffer.timestamp to correlate them + // TODO: remove onBufferReceived + // A new frame buffer has been received for this stream. // -- This callback only fires for createStreamCpu streams // -- Use buf.timestamp to correlate with metadata's @@ -67,22 +73,36 @@ public: */ virtual void onResultReceived(int32_t frameId, camera_metadata* result) = 0; + // TODO: make onFrameAvailable pure virtual // A new frame buffer has been received for this stream. // -- This callback only fires for createStreamCpu streams // -- Use buf.timestamp to correlate with metadata's android.sensor.timestamp // -- The buffer should be accessed with CpuConsumer::lockNextBuffer // and CpuConsumer::unlockBuffer - virtual void onFrameAvailable(int streamId, - const sp& cpuConsumer) { + virtual void onFrameAvailable(int /*streamId*/, + const sp& /*cpuConsumer*/) { } + // TODO: Remove useOnFrameAvailable virtual bool useOnFrameAvailable() { return false; } }; -class ProCamera : public BnProCameraCallbacks, public IBinder::DeathRecipient +class ProCamera; + +template <> +struct CameraTraits +{ + typedef ProCameraListener TCamListener; + typedef IProCameraUser TCamUser; + typedef IProCameraCallbacks TCamCallbacks; +}; + +class ProCamera : + public CameraBase, + public BnProCameraCallbacks { public: /** @@ -91,11 +111,8 @@ public: * to be acquired with exclusive[Try]Lock. */ static sp connect(int cameraId); - virtual void disconnect(); virtual ~ProCamera(); - void setListener(const sp& listener); - /** * Exclusive Locks: * - We may request exclusive access to a camera if no other @@ -187,9 +204,6 @@ public: /*out*/ camera_metadata** request) const; - // Get number of cameras - static int getNumberOfCameras(); - // Get static camera metadata camera_metadata* getCameraInfo(int cameraId); @@ -222,8 +236,6 @@ public: // BAD_VALUE - invalid streamId or count passed int dropFrameBuffer(int streamId, int count); - sp remote(); - protected: //////////////////////////////////////////////////////// // IProCameraCallbacks implementation @@ -241,35 +253,8 @@ protected: virtual void onResultReceived(int32_t frameId, camera_metadata* result); - - class DeathNotifier: public IBinder::DeathRecipient - { - public: - DeathNotifier() { - } - - virtual void binderDied(const wp& who); - }; - private: - ProCamera(); - - virtual void binderDied(const wp& who); - - // helper function to obtain camera service handle - static const sp& getCameraService(); - - static sp mDeathNotifier; - - sp mCamera; - status_t mStatus; - - sp mListener; - - friend class DeathNotifier; - - static Mutex mLock; - static sp mCameraService; + ProCamera(int cameraId); class ProFrameListener : public CpuConsumer::FrameAvailableListener { public: @@ -324,7 +309,7 @@ private: StreamInfo& getStreamInfo(int streamId); - + friend class CameraBase; }; }; // namespace android diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp index ec1c3f0..1a78b53 100644 --- a/services/camera/libcameraservice/CameraService.cpp +++ b/services/camera/libcameraservice/CameraService.cpp @@ -269,10 +269,14 @@ sp CameraService::connect( sp CameraService::connect( const sp& cameraCb, - int cameraId) + int cameraId, + const String16& clientPackageName, + int clientUid) { int callingPid = getCallingPid(); + // TODO: use clientPackageName and clientUid with appOpsMangr + LOG1("CameraService::connectPro E (pid %d, id %d)", callingPid, cameraId); if (!mModule) { diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h index b017505..d93aa73 100644 --- a/services/camera/libcameraservice/CameraService.h +++ b/services/camera/libcameraservice/CameraService.h @@ -25,6 +25,11 @@ #include #include +#include +#include +#include +#include + /* This needs to be increased if we can have more cameras */ #define MAX_CAMERAS 2 @@ -60,7 +65,7 @@ public: virtual sp connect(const sp& cameraClient, int cameraId, const String16& clientPackageName, int clientUid); virtual sp connect(const sp& cameraCb, - int cameraId); + int cameraId, const String16& clientPackageName, int clientUid); // Extra permissions checks virtual status_t onTransact(uint32_t code, const Parcel& data, -- cgit v1.1 From 409e3749a5627f1b360feb1479fcd341067a90b8 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Wed, 27 Feb 2013 09:39:39 -0800 Subject: Revert "Logging to investigate a crash" This reverts commit f0ff908da019a44115109f1b4d1b6864b35a8a29. --- include/media/AudioBufferProvider.h | 12 +----------- services/audioflinger/AudioMixer.cpp | 6 ------ 2 files changed, 1 insertion(+), 17 deletions(-) diff --git a/include/media/AudioBufferProvider.h b/include/media/AudioBufferProvider.h index 865ed7e..43e4de7 100644 --- a/include/media/AudioBufferProvider.h +++ b/include/media/AudioBufferProvider.h @@ -36,11 +36,8 @@ public: size_t frameCount; }; -protected: - AudioBufferProvider() : mValid(kValid) { } - virtual ~AudioBufferProvider() { mValid = kDead; } + virtual ~AudioBufferProvider() {} -public: // value representing an invalid presentation timestamp static const int64_t kInvalidPTS = 0x7FFFFFFFFFFFFFFFLL; // is too painful @@ -50,13 +47,6 @@ public: virtual status_t getNextBuffer(Buffer* buffer, int64_t pts = kInvalidPTS) = 0; virtual void releaseBuffer(Buffer* buffer) = 0; - - int getValid() const { return mValid; } - static const int kValid = 'GOOD'; - static const int kDead = 'DEAD'; - -private: - int mValid; }; // ---------------------------------------------------------------------------- diff --git a/services/audioflinger/AudioMixer.cpp b/services/audioflinger/AudioMixer.cpp index 2a8a955..17b6a8a 100644 --- a/services/audioflinger/AudioMixer.cpp +++ b/services/audioflinger/AudioMixer.cpp @@ -1113,12 +1113,6 @@ void AudioMixer::process__genericNoResampling(state_t* state, int64_t pts) e0 &= ~(1<tracks[i]; t.buffer.frameCount = state->frameCount; - int valid = t.bufferProvider->getValid(); - if (valid != AudioBufferProvider::kValid) { - ALOGE("invalid bufferProvider=%p name=%d frameCount=%d valid=%#x enabledTracks=%#x", - t.bufferProvider, i, t.buffer.frameCount, valid, enabledTracks); - // expect to crash - } t.bufferProvider->getNextBuffer(&t.buffer, pts); t.frameCount = t.buffer.frameCount; t.in = t.buffer.raw; -- cgit v1.1 From b99c5b8eebb35133a08c46b015624bd4c4a6c477 Mon Sep 17 00:00:00 2001 From: Eino-Ville Talvala Date: Wed, 6 Feb 2013 17:20:07 -0800 Subject: Camera: Start of support for version 3.0 of camera device HAL - Refactor common CameraDevice interface out of Camera2Device - Initial skeleton only for Camera3Device Change-Id: I097cc76e2ad102a51712ac114235163245f5482c --- services/camera/libcameraservice/Android.mk | 2 + services/camera/libcameraservice/Camera2Client.cpp | 28 +- services/camera/libcameraservice/Camera2Client.h | 20 +- services/camera/libcameraservice/Camera2Device.cpp | 65 ++-- services/camera/libcameraservice/Camera2Device.h | 199 ++--------- services/camera/libcameraservice/Camera3Device.cpp | 375 +++++++++++++++++++++ services/camera/libcameraservice/Camera3Device.h | 133 ++++++++ services/camera/libcameraservice/CameraClient.h | 5 + .../camera/libcameraservice/CameraDeviceBase.cpp | 30 ++ .../camera/libcameraservice/CameraDeviceBase.h | 204 +++++++++++ .../libcameraservice/CameraHardwareInterface.h | 19 +- services/camera/libcameraservice/CameraService.cpp | 4 +- .../libcameraservice/camera2/CallbackProcessor.cpp | 6 +- .../libcameraservice/camera2/FrameProcessor.cpp | 4 +- .../libcameraservice/camera2/JpegProcessor.cpp | 6 +- .../camera2/StreamingProcessor.cpp | 12 +- .../libcameraservice/camera2/ZslProcessor.cpp | 12 +- .../camera/libcameraservice/camera2/ZslProcessor.h | 4 +- 18 files changed, 881 insertions(+), 247 deletions(-) create mode 100644 services/camera/libcameraservice/Camera3Device.cpp create mode 100644 services/camera/libcameraservice/Camera3Device.h create mode 100644 services/camera/libcameraservice/CameraDeviceBase.cpp create mode 100644 services/camera/libcameraservice/CameraDeviceBase.h diff --git a/services/camera/libcameraservice/Android.mk b/services/camera/libcameraservice/Android.mk index f76c861..d6ad889 100644 --- a/services/camera/libcameraservice/Android.mk +++ b/services/camera/libcameraservice/Android.mk @@ -11,7 +11,9 @@ LOCAL_SRC_FILES:= \ CameraClient.cpp \ Camera2Client.cpp \ ProCamera2Client.cpp \ + CameraDeviceBase.cpp \ Camera2Device.cpp \ + Camera3Device.cpp \ camera2/Parameters.cpp \ camera2/FrameProcessor.cpp \ camera2/StreamingProcessor.cpp \ diff --git a/services/camera/libcameraservice/Camera2Client.cpp b/services/camera/libcameraservice/Camera2Client.cpp index 38d6949..8295905 100644 --- a/services/camera/libcameraservice/Camera2Client.cpp +++ b/services/camera/libcameraservice/Camera2Client.cpp @@ -25,6 +25,8 @@ #include #include "camera2/Parameters.h" #include "Camera2Client.h" +#include "Camera2Device.h" +#include "Camera3Device.h" #define ALOG1(...) ALOGD_IF(gLogLevel >= 1, __VA_ARGS__); #define ALOG2(...) ALOGD_IF(gLogLevel >= 2, __VA_ARGS__); @@ -45,7 +47,8 @@ Camera2Client::Camera2Client(const sp& cameraService, int cameraFacing, int clientPid, uid_t clientUid, - int servicePid): + int servicePid, + int deviceVersion): Client(cameraService, cameraClient, clientPackageName, cameraId, cameraFacing, clientPid, clientUid, servicePid), mSharedCameraClient(cameraClient), @@ -54,7 +57,20 @@ Camera2Client::Camera2Client(const sp& cameraService, ATRACE_CALL(); ALOGI("Camera %d: Opened", cameraId); - mDevice = new Camera2Device(cameraId); + switch (deviceVersion) { + case CAMERA_DEVICE_API_VERSION_2_0: + mDevice = new Camera2Device(cameraId); + break; + case CAMERA_DEVICE_API_VERSION_3_0: + mDevice = new Camera3Device(cameraId); + break; + default: + ALOGE("Camera %d: Unknown HAL device version %d", + cameraId, deviceVersion); + mDevice = NULL; + break; + } + SharedParameters::Lock l(mParameters); l.mParameters.state = Parameters::DISCONNECTED; @@ -81,6 +97,12 @@ status_t Camera2Client::initialize(camera_module_t *module) return res; } + if (mDevice == NULL) { + ALOGE("%s: Camera %d: No device connected", + __FUNCTION__, mCameraId); + return NO_INIT; + } + res = mDevice->initialize(module); if (res != OK) { ALOGE("%s: Camera %d: unable to initialize device: %s (%d)", @@ -1465,7 +1487,7 @@ int Camera2Client::getCameraId() const { return mCameraId; } -const sp& Camera2Client::getCameraDevice() { +const sp& Camera2Client::getCameraDevice() { return mDevice; } diff --git a/services/camera/libcameraservice/Camera2Client.h b/services/camera/libcameraservice/Camera2Client.h index 173b65e..80b88f4 100644 --- a/services/camera/libcameraservice/Camera2Client.h +++ b/services/camera/libcameraservice/Camera2Client.h @@ -17,7 +17,7 @@ #ifndef ANDROID_SERVERS_CAMERA_CAMERA2CLIENT_H #define ANDROID_SERVERS_CAMERA_CAMERA2CLIENT_H -#include "Camera2Device.h" +#include "CameraDeviceBase.h" #include "CameraService.h" #include "camera2/Parameters.h" #include "camera2/FrameProcessor.h" @@ -31,12 +31,12 @@ namespace android { class IMemory; /** - * Implements the android.hardware.camera API on top of - * camera device HAL version 2. + * Interface between android.hardware.Camera API and Camera HAL device for versions + * CAMERA_DEVICE_API_VERSION_2_0 and 3_0. */ class Camera2Client : public CameraService::Client, - public Camera2Device::NotificationListener + public CameraDeviceBase::NotificationListener { public: /** @@ -77,7 +77,9 @@ public: int cameraFacing, int clientPid, uid_t clientUid, - int servicePid); + int servicePid, + int deviceVersion); + virtual ~Camera2Client(); status_t initialize(camera_module_t *module); @@ -85,7 +87,7 @@ public: virtual status_t dump(int fd, const Vector& args); /** - * Interface used by Camera2Device + * Interface used by CameraDeviceBase */ virtual void notifyError(int errorCode, int arg1, int arg2); @@ -99,7 +101,7 @@ public: */ int getCameraId() const; - const sp& getCameraDevice(); + const sp& getCameraDevice(); const sp& getCameraService(); camera2::SharedParameters& getParameters(); @@ -211,9 +213,9 @@ private: bool mAfInMotion; - /** Camera2Device instance wrapping HAL2 entry */ + /** CameraDevice instance, wraps HAL camera device */ - sp mDevice; + sp mDevice; /** Utility members */ diff --git a/services/camera/libcameraservice/Camera2Device.cpp b/services/camera/libcameraservice/Camera2Device.cpp index 921c8fc..81e58ca 100644 --- a/services/camera/libcameraservice/Camera2Device.cpp +++ b/services/camera/libcameraservice/Camera2Device.cpp @@ -34,7 +34,7 @@ namespace android { Camera2Device::Camera2Device(int id): mId(id), - mDevice(NULL) + mHal2Device(NULL) { ATRACE_CALL(); ALOGV("%s: Created device for camera %d", __FUNCTION__, id); @@ -51,7 +51,7 @@ status_t Camera2Device::initialize(camera_module_t *module) { ATRACE_CALL(); ALOGV("%s: Initializing device for camera %d", __FUNCTION__, mId); - if (mDevice != NULL) { + if (mHal2Device != NULL) { ALOGE("%s: Already initialized!", __FUNCTION__); return INVALID_OPERATION; } @@ -131,7 +131,7 @@ status_t Camera2Device::initialize(camera_module_t *module) } mDeviceInfo = info.static_camera_characteristics; - mDevice = device; + mHal2Device = device; return OK; } @@ -139,23 +139,23 @@ status_t Camera2Device::initialize(camera_module_t *module) status_t Camera2Device::disconnect() { ATRACE_CALL(); status_t res = OK; - if (mDevice) { + if (mHal2Device) { ALOGV("%s: Closing device for camera %d", __FUNCTION__, mId); - int inProgressCount = mDevice->ops->get_in_progress_count(mDevice); + int inProgressCount = mHal2Device->ops->get_in_progress_count(mHal2Device); if (inProgressCount > 0) { ALOGW("%s: Closing camera device %d with %d requests in flight!", __FUNCTION__, mId, inProgressCount); } mReprocessStreams.clear(); mStreams.clear(); - res = mDevice->common.close(&mDevice->common); + res = mHal2Device->common.close(&mHal2Device->common); if (res != OK) { ALOGE("%s: Could not close camera %d: %s (%d)", __FUNCTION__, mId, strerror(-res), res); } - mDevice = NULL; + mHal2Device = NULL; ALOGV("%s: Shutdown complete", __FUNCTION__); } return res; @@ -197,7 +197,7 @@ status_t Camera2Device::dump(int fd, const Vector& args) { write(fd, result.string(), result.size()); status_t res; - res = mDevice->ops->dump(mDevice, fd); + res = mHal2Device->ops->dump(mHal2Device, fd); return res; } @@ -240,7 +240,7 @@ status_t Camera2Device::createStream(sp consumer, status_t res; ALOGV("%s: E", __FUNCTION__); - sp stream = new StreamAdapter(mDevice); + sp stream = new StreamAdapter(mHal2Device); res = stream->connectToDevice(consumer, width, height, format, size); if (res != OK) { @@ -276,7 +276,7 @@ status_t Camera2Device::createReprocessStreamFromStream(int outputId, int *id) { return BAD_VALUE; } - sp stream = new ReprocessStreamAdapter(mDevice); + sp stream = new ReprocessStreamAdapter(mHal2Device); res = stream->connectToDevice((*streamI)); if (res != OK) { @@ -401,8 +401,8 @@ status_t Camera2Device::createDefaultRequest(int templateId, status_t err; ALOGV("%s: E", __FUNCTION__); camera_metadata_t *rawRequest; - err = mDevice->ops->construct_default_request( - mDevice, templateId, &rawRequest); + err = mHal2Device->ops->construct_default_request( + mHal2Device, templateId, &rawRequest); request->acquire(rawRequest); return err; } @@ -417,12 +417,12 @@ status_t Camera2Device::waitUntilDrained() { // TODO: Set up notifications from HAL, instead of sleeping here uint32_t totalTime = 0; - while (mDevice->ops->get_in_progress_count(mDevice) > 0) { + while (mHal2Device->ops->get_in_progress_count(mHal2Device) > 0) { usleep(kSleepTime); totalTime += kSleepTime; if (totalTime > kMaxSleepTime) { ALOGE("%s: Waited %d us, %d requests still in flight", __FUNCTION__, - mDevice->ops->get_in_progress_count(mDevice), totalTime); + mHal2Device->ops->get_in_progress_count(mHal2Device), totalTime); return TIMED_OUT; } } @@ -433,7 +433,7 @@ status_t Camera2Device::waitUntilDrained() { status_t Camera2Device::setNotifyCallback(NotificationListener *listener) { ATRACE_CALL(); status_t res; - res = mDevice->ops->set_notify_callback(mDevice, notificationCallback, + res = mHal2Device->ops->set_notify_callback(mHal2Device, notificationCallback, reinterpret_cast(listener) ); if (res != OK) { ALOGE("%s: Unable to set notification callback!", __FUNCTION__); @@ -497,7 +497,7 @@ status_t Camera2Device::triggerAutofocus(uint32_t id) { ATRACE_CALL(); status_t res; ALOGV("%s: Triggering autofocus, id %d", __FUNCTION__, id); - res = mDevice->ops->trigger_action(mDevice, + res = mHal2Device->ops->trigger_action(mHal2Device, CAMERA2_TRIGGER_AUTOFOCUS, id, 0); if (res != OK) { ALOGE("%s: Error triggering autofocus (id %d)", @@ -510,7 +510,7 @@ status_t Camera2Device::triggerCancelAutofocus(uint32_t id) { ATRACE_CALL(); status_t res; ALOGV("%s: Canceling autofocus, id %d", __FUNCTION__, id); - res = mDevice->ops->trigger_action(mDevice, + res = mHal2Device->ops->trigger_action(mHal2Device, CAMERA2_TRIGGER_CANCEL_AUTOFOCUS, id, 0); if (res != OK) { ALOGE("%s: Error canceling autofocus (id %d)", @@ -523,7 +523,7 @@ status_t Camera2Device::triggerPrecaptureMetering(uint32_t id) { ATRACE_CALL(); status_t res; ALOGV("%s: Triggering precapture metering, id %d", __FUNCTION__, id); - res = mDevice->ops->trigger_action(mDevice, + res = mHal2Device->ops->trigger_action(mHal2Device, CAMERA2_TRIGGER_PRECAPTURE_METERING, id, 0); if (res != OK) { ALOGE("%s: Error triggering precapture metering (id %d)", @@ -560,18 +560,11 @@ status_t Camera2Device::pushReprocessBuffer(int reprocessStreamId, } /** - * Camera2Device::NotificationListener - */ - -Camera2Device::NotificationListener::~NotificationListener() { -} - -/** * Camera2Device::MetadataQueue */ Camera2Device::MetadataQueue::MetadataQueue(): - mDevice(NULL), + mHal2Device(NULL), mFrameCount(0), mLatestRequestId(0), mCount(0), @@ -602,7 +595,7 @@ status_t Camera2Device::MetadataQueue::setConsumerDevice(camera2_device_t *d) { res = d->ops->set_request_queue_src_ops(d, this); if (res != OK) return res; - mDevice = d; + mHal2Device = d; return OK; } @@ -835,12 +828,12 @@ status_t Camera2Device::MetadataQueue::signalConsumerLocked() { ATRACE_CALL(); status_t res = OK; notEmpty.signal(); - if (mSignalConsumer && mDevice != NULL) { + if (mSignalConsumer && mHal2Device != NULL) { mSignalConsumer = false; mMutex.unlock(); ALOGV("%s: Signaling consumer", __FUNCTION__); - res = mDevice->ops->notify_request_queue_not_empty(mDevice); + res = mHal2Device->ops->notify_request_queue_not_empty(mHal2Device); mMutex.lock(); } return res; @@ -939,7 +932,7 @@ int Camera2Device::MetadataQueue::producer_enqueue( Camera2Device::StreamAdapter::StreamAdapter(camera2_device_t *d): mState(RELEASED), - mDevice(d), + mHal2Device(d), mId(-1), mWidth(0), mHeight(0), mFormat(0), mSize(0), mUsage(0), mMaxProducerBuffers(0), mMaxConsumerBuffers(0), @@ -990,7 +983,7 @@ status_t Camera2Device::StreamAdapter::connectToDevice( uint32_t formatActual; uint32_t usage; uint32_t maxBuffers = 2; - res = mDevice->ops->allocate_stream(mDevice, + res = mHal2Device->ops->allocate_stream(mHal2Device, mWidth, mHeight, mFormatRequested, getStreamOps(), &id, &formatActual, &usage, &maxBuffers); if (res != OK) { @@ -1106,7 +1099,7 @@ status_t Camera2Device::StreamAdapter::connectToDevice( } ALOGV("%s: Registering %d buffers with camera HAL", __FUNCTION__, mTotalBuffers); - res = mDevice->ops->register_stream_buffers(mDevice, + res = mHal2Device->ops->register_stream_buffers(mHal2Device, mId, mTotalBuffers, buffers); @@ -1138,7 +1131,7 @@ status_t Camera2Device::StreamAdapter::release() { status_t res; ALOGV("%s: Releasing stream %d", __FUNCTION__, mId); if (mState >= ALLOCATED) { - res = mDevice->ops->release_stream(mDevice, mId); + res = mHal2Device->ops->release_stream(mHal2Device, mId); if (res != OK) { ALOGE("%s: Unable to release stream %d", __FUNCTION__, mId); @@ -1319,7 +1312,7 @@ int Camera2Device::StreamAdapter::set_crop(const camera2_stream_ops_t* w, Camera2Device::ReprocessStreamAdapter::ReprocessStreamAdapter(camera2_device_t *d): mState(RELEASED), - mDevice(d), + mHal2Device(d), mId(-1), mWidth(0), mHeight(0), mFormat(0), mActiveBuffers(0), @@ -1361,7 +1354,7 @@ status_t Camera2Device::ReprocessStreamAdapter::connectToDevice( // Allocate device-side stream interface uint32_t id; - res = mDevice->ops->allocate_reprocess_stream_from_stream(mDevice, + res = mHal2Device->ops->allocate_reprocess_stream_from_stream(mHal2Device, outputStream->getId(), getStreamOps(), &id); if (res != OK) { @@ -1385,7 +1378,7 @@ status_t Camera2Device::ReprocessStreamAdapter::release() { status_t res; ALOGV("%s: Releasing stream %d", __FUNCTION__, mId); if (mState >= ACTIVE) { - res = mDevice->ops->release_reprocess_stream(mDevice, mId); + res = mHal2Device->ops->release_reprocess_stream(mHal2Device, mId); if (res != OK) { ALOGE("%s: Unable to release stream %d", __FUNCTION__, mId); diff --git a/services/camera/libcameraservice/Camera2Device.h b/services/camera/libcameraservice/Camera2Device.h index 86ff80f..1adb7a9 100644 --- a/services/camera/libcameraservice/Camera2Device.h +++ b/services/camera/libcameraservice/Camera2Device.h @@ -21,184 +21,53 @@ #include #include #include -#include -#include -#include -#include -#include "hardware/camera2.h" -#include "camera/CameraMetadata.h" +#include "CameraDeviceBase.h" namespace android { -class Camera2Device : public virtual RefBase { +/** + * CameraDevice for HAL devices with version CAMERA_DEVICE_API_VERSION_2_0 + */ +class Camera2Device: public CameraDeviceBase { public: Camera2Device(int id); - ~Camera2Device(); - - status_t initialize(camera_module_t *module); - status_t disconnect(); - - status_t dump(int fd, const Vector& args); - - /** - * The device's static characteristics metadata buffer - */ - const CameraMetadata& info() const; - - /** - * Submit request for capture. The Camera2Device takes ownership of the - * passed-in buffer. - */ - status_t capture(CameraMetadata &request); - - /** - * Submit request for streaming. The Camera2Device makes a copy of the - * passed-in buffer and the caller retains ownership. - */ - status_t setStreamingRequest(const CameraMetadata &request); - - /** - * Clear the streaming request slot. - */ - status_t clearStreamingRequest(); - - /** - * Wait until a request with the given ID has been dequeued by the - * HAL. Returns TIMED_OUT if the timeout duration is reached. Returns - * immediately if the latest request received by the HAL has this id. - */ - status_t waitUntilRequestReceived(int32_t requestId, nsecs_t timeout); + virtual ~Camera2Device(); /** - * Create an output stream of the requested size and format. - * - * If format is CAMERA2_HAL_PIXEL_FORMAT_OPAQUE, then the HAL device selects - * an appropriate format; it can be queried with getStreamInfo. - * - * If format is HAL_PIXEL_FORMAT_COMPRESSED, the size parameter must be - * equal to the size in bytes of the buffers to allocate for the stream. For - * other formats, the size parameter is ignored. + * CameraDevice interface */ - status_t createStream(sp consumer, + virtual status_t initialize(camera_module_t *module); + virtual status_t disconnect(); + virtual status_t dump(int fd, const Vector& args); + virtual const CameraMetadata& info() const; + virtual status_t capture(CameraMetadata &request); + virtual status_t setStreamingRequest(const CameraMetadata &request); + virtual status_t clearStreamingRequest(); + virtual status_t waitUntilRequestReceived(int32_t requestId, nsecs_t timeout); + virtual status_t createStream(sp consumer, uint32_t width, uint32_t height, int format, size_t size, int *id); - - /** - * Create an input reprocess stream that uses buffers from an existing - * output stream. - */ - status_t createReprocessStreamFromStream(int outputId, int *id); - - /** - * Get information about a given stream. - */ - status_t getStreamInfo(int id, + virtual status_t createReprocessStreamFromStream(int outputId, int *id); + virtual status_t getStreamInfo(int id, uint32_t *width, uint32_t *height, uint32_t *format); - - /** - * Set stream gralloc buffer transform - */ - status_t setStreamTransform(int id, int transform); - - /** - * Delete stream. Must not be called if there are requests in flight which - * reference that stream. - */ - status_t deleteStream(int id); - - /** - * Delete reprocess stream. Must not be called if there are requests in - * flight which reference that stream. - */ - status_t deleteReprocessStream(int id); - - /** - * Create a metadata buffer with fields that the HAL device believes are - * best for the given use case - */ - status_t createDefaultRequest(int templateId, CameraMetadata *request); - - /** - * Wait until all requests have been processed. Returns INVALID_OPERATION if - * the streaming slot is not empty, or TIMED_OUT if the requests haven't - * finished processing in 10 seconds. - */ - status_t waitUntilDrained(); - - /** - * Abstract class for HAL notification listeners - */ - class NotificationListener { - public: - // Refer to the Camera2 HAL definition for notification definitions - virtual void notifyError(int errorCode, int arg1, int arg2) = 0; - virtual void notifyShutter(int frameNumber, nsecs_t timestamp) = 0; - virtual void notifyAutoFocus(uint8_t newState, int triggerId) = 0; - virtual void notifyAutoExposure(uint8_t newState, int triggerId) = 0; - virtual void notifyAutoWhitebalance(uint8_t newState, int triggerId) = 0; - protected: - virtual ~NotificationListener(); - }; - - /** - * Connect HAL notifications to a listener. Overwrites previous - * listener. Set to NULL to stop receiving notifications. - */ - status_t setNotifyCallback(NotificationListener *listener); - - /** - * Wait for a new frame to be produced, with timeout in nanoseconds. - * Returns TIMED_OUT when no frame produced within the specified duration - */ - status_t waitForNextFrame(nsecs_t timeout); - - /** - * Get next metadata frame from the frame queue. Returns NULL if the queue - * is empty; caller takes ownership of the metadata buffer. - */ - status_t getNextFrame(CameraMetadata *frame); - - /** - * Trigger auto-focus. The latest ID used in a trigger autofocus or cancel - * autofocus call will be returned by the HAL in all subsequent AF - * notifications. - */ - status_t triggerAutofocus(uint32_t id); - - /** - * Cancel auto-focus. The latest ID used in a trigger autofocus/cancel - * autofocus call will be returned by the HAL in all subsequent AF - * notifications. - */ - status_t triggerCancelAutofocus(uint32_t id); - - /** - * Trigger pre-capture metering. The latest ID used in a trigger pre-capture - * call will be returned by the HAL in all subsequent AE and AWB - * notifications. - */ - status_t triggerPrecaptureMetering(uint32_t id); - - /** - * Abstract interface for clients that want to listen to reprocess buffer - * release events - */ - struct BufferReleasedListener: public virtual RefBase { - virtual void onBufferReleased(buffer_handle_t *handle) = 0; - }; - - /** - * Push a buffer to be reprocessed into a reprocessing stream, and - * provide a listener to call once the buffer is returned by the HAL - */ - status_t pushReprocessBuffer(int reprocessStreamId, + virtual status_t setStreamTransform(int id, int transform); + virtual status_t deleteStream(int id); + virtual status_t deleteReprocessStream(int id); + virtual status_t createDefaultRequest(int templateId, CameraMetadata *request); + virtual status_t waitUntilDrained(); + virtual status_t setNotifyCallback(NotificationListener *listener); + virtual status_t waitForNextFrame(nsecs_t timeout); + virtual status_t getNextFrame(CameraMetadata *frame); + virtual status_t triggerAutofocus(uint32_t id); + virtual status_t triggerCancelAutofocus(uint32_t id); + virtual status_t triggerPrecaptureMetering(uint32_t id); + virtual status_t pushReprocessBuffer(int reprocessStreamId, buffer_handle_t *buffer, wp listener); - private: const int mId; - camera2_device_t *mDevice; + camera2_device_t *mHal2Device; CameraMetadata mDeviceInfo; vendor_tag_query_ops_t *mVendorTagOps; @@ -249,7 +118,7 @@ class Camera2Device : public virtual RefBase { status_t freeBuffers(List::iterator start, List::iterator end); - camera2_device_t *mDevice; + camera2_device_t *mHal2Device; Mutex mMutex; Condition notEmpty; @@ -341,7 +210,7 @@ class Camera2Device : public virtual RefBase { } mState; sp mConsumerInterface; - camera2_device_t *mDevice; + camera2_device_t *mHal2Device; uint32_t mId; uint32_t mWidth; @@ -435,7 +304,7 @@ class Camera2Device : public virtual RefBase { List mInFlightQueue; - camera2_device_t *mDevice; + camera2_device_t *mHal2Device; uint32_t mId; uint32_t mWidth; diff --git a/services/camera/libcameraservice/Camera3Device.cpp b/services/camera/libcameraservice/Camera3Device.cpp new file mode 100644 index 0000000..2a1be09 --- /dev/null +++ b/services/camera/libcameraservice/Camera3Device.cpp @@ -0,0 +1,375 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "Camera3-Device" +#define ATRACE_TAG ATRACE_TAG_CAMERA +//#define LOG_NDEBUG 0 +//#define LOG_NNDEBUG 0 // Per-frame verbose logging + +#ifdef LOG_NNDEBUG +#define ALOGVV(...) ALOGV(__VA_ARGS__) +#else +#define ALOGVV(...) ((void)0) +#endif + +#include +#include +#include +#include "Camera3Device.h" + +namespace android { + + +Camera3Device::Camera3Device(int id): + mId(id), + mHal3Device(NULL) +{ + ATRACE_CALL(); + camera3_callback_ops::notify = &sNotify; + camera3_callback_ops::process_capture_result = &sProcessCaptureResult; + ALOGV("%s: Created device for camera %d", __FUNCTION__, id); +} + +Camera3Device::~Camera3Device() +{ + ATRACE_CALL(); + ALOGV("%s: Tearing down for camera id %d", __FUNCTION__, mId); + disconnect(); +} + +status_t Camera3Device::initialize(camera_module_t *module) +{ + ATRACE_CALL(); + ALOGV("%s: Initializing device for camera %d", __FUNCTION__, mId); + if (mHal3Device != NULL) { + ALOGE("%s: Already initialized!", __FUNCTION__); + return INVALID_OPERATION; + } + + /** Open HAL device */ + + status_t res; + String8 deviceName = String8::format("%d", mId); + + camera3_device_t *device; + + res = module->common.methods->open(&module->common, deviceName.string(), + reinterpret_cast(&device)); + + if (res != OK) { + ALOGE("%s: Could not open camera %d: %s (%d)", __FUNCTION__, + mId, strerror(-res), res); + return res; + } + + /** Cross-check device version */ + + if (device->common.version != CAMERA_DEVICE_API_VERSION_3_0) { + ALOGE("%s: Could not open camera %d: " + "Camera device is not version %x, reports %x instead", + __FUNCTION__, mId, CAMERA_DEVICE_API_VERSION_3_0, + device->common.version); + device->common.close(&device->common); + return BAD_VALUE; + } + + camera_info info; + res = module->get_camera_info(mId, &info); + if (res != OK) return res; + + if (info.device_version != device->common.version) { + ALOGE("%s: HAL reporting mismatched camera_info version (%x)" + " and device version (%x).", __FUNCTION__, + device->common.version, info.device_version); + device->common.close(&device->common); + return BAD_VALUE; + } + + /** Initialize device with callback functions */ + + res = device->ops->initialize(device, this); + if (res != OK) { + ALOGE("%s: Camera %d: Unable to initialize HAL device: %s (%d)", + __FUNCTION__, mId, strerror(-res), res); + device->common.close(&device->common); + return BAD_VALUE; + } + + /** Get vendor metadata tags */ + + mVendorTagOps.get_camera_vendor_section_name = NULL; + + device->ops->get_metadata_vendor_tag_ops(device, &mVendorTagOps); + + if (mVendorTagOps.get_camera_vendor_section_name != NULL) { + res = set_camera_metadata_vendor_tag_ops(&mVendorTagOps); + if (res != OK) { + ALOGE("%s: Camera %d: Unable to set tag ops: %s (%d)", + __FUNCTION__, mId, strerror(-res), res); + device->common.close(&device->common); + return res; + } + } + + /** Start up request queue thread */ + + requestThread = new RequestThread(this); + res = requestThread->run(String8::format("C3Dev-%d-ReqQueue", mId).string()); + if (res != OK) { + ALOGE("%s: Camera %d: Unable to start request queue thread: %s (%d)", + __FUNCTION__, mId, strerror(-res), res); + device->common.close(&device->common); + return res; + } + + /** Everything is good to go */ + + mDeviceInfo = info.static_camera_characteristics; + mHal3Device = device; + + return OK; +} + +status_t Camera3Device::disconnect() { + ATRACE_CALL(); + + ALOGE("%s: Unimplemented", __FUNCTION__); + return INVALID_OPERATION; +} + +status_t Camera3Device::dump(int fd, const Vector &args) { + ATRACE_CALL(); + (void)args; + + mHal3Device->ops->dump(mHal3Device, fd); + + return OK; +} + +const CameraMetadata& Camera3Device::info() const { + ALOGVV("%s: E", __FUNCTION__); + + return mDeviceInfo; +} + +status_t Camera3Device::capture(CameraMetadata &request) { + ATRACE_CALL(); + (void)request; + + ALOGE("%s: Unimplemented", __FUNCTION__); + return INVALID_OPERATION; +} + + +status_t Camera3Device::setStreamingRequest(const CameraMetadata &request) { + ATRACE_CALL(); + (void)request; + + ALOGE("%s: Unimplemented", __FUNCTION__); + return INVALID_OPERATION; +} + +status_t Camera3Device::clearStreamingRequest() { + ATRACE_CALL(); + + ALOGE("%s: Unimplemented", __FUNCTION__); + return INVALID_OPERATION; +} + +status_t Camera3Device::waitUntilRequestReceived(int32_t requestId, nsecs_t timeout) { + ATRACE_CALL(); + (void)requestId; (void)timeout; + + ALOGE("%s: Unimplemented", __FUNCTION__); + return INVALID_OPERATION; +} + +status_t Camera3Device::createStream(sp consumer, + uint32_t width, uint32_t height, int format, size_t size, int *id) { + ATRACE_CALL(); + (void)consumer; (void)width; (void)height; (void)format; + (void)size; (void)id; + + ALOGE("%s: Unimplemented", __FUNCTION__); + return INVALID_OPERATION; +} + +status_t Camera3Device::createReprocessStreamFromStream(int outputId, int *id) { + ATRACE_CALL(); + (void)outputId; (void)id; + + ALOGE("%s: Unimplemented", __FUNCTION__); + return INVALID_OPERATION; +} + + +status_t Camera3Device::getStreamInfo(int id, + uint32_t *width, uint32_t *height, uint32_t *format) { + ATRACE_CALL(); + (void)id; (void)width; (void)height; (void)format; + + ALOGE("%s: Unimplemented", __FUNCTION__); + return INVALID_OPERATION; +} + +status_t Camera3Device::setStreamTransform(int id, + int transform) { + ATRACE_CALL(); + (void)id; (void)transform; + + ALOGE("%s: Unimplemented", __FUNCTION__); + return INVALID_OPERATION; +} + +status_t Camera3Device::deleteStream(int id) { + ATRACE_CALL(); + (void)id; + + ALOGE("%s: Unimplemented", __FUNCTION__); + return INVALID_OPERATION; +} + +status_t Camera3Device::deleteReprocessStream(int id) { + ATRACE_CALL(); + (void)id; + + ALOGE("%s: Unimplemented", __FUNCTION__); + return INVALID_OPERATION; +} + + +status_t Camera3Device::createDefaultRequest(int templateId, + CameraMetadata *request) { + ATRACE_CALL(); + ALOGV("%s: E", __FUNCTION__); + + const camera_metadata_t *rawRequest; + rawRequest = mHal3Device->ops->construct_default_request_settings( + mHal3Device, templateId); + if (rawRequest == NULL) return DEAD_OBJECT; + *request = rawRequest; + + return OK; +} + +status_t Camera3Device::waitUntilDrained() { + ATRACE_CALL(); + + ALOGE("%s: Unimplemented", __FUNCTION__); + return INVALID_OPERATION; +} + +status_t Camera3Device::setNotifyCallback(NotificationListener *listener) { + ATRACE_CALL(); + (void)listener; + + ALOGE("%s: Unimplemented", __FUNCTION__); + return INVALID_OPERATION; +} + +status_t Camera3Device::waitForNextFrame(nsecs_t timeout) { + (void)timeout; + + ALOGE("%s: Unimplemented", __FUNCTION__); + return INVALID_OPERATION; +} + +status_t Camera3Device::getNextFrame(CameraMetadata *frame) { + ATRACE_CALL(); + (void)frame; + + ALOGE("%s: Unimplemented", __FUNCTION__); + return INVALID_OPERATION; +} + +status_t Camera3Device::triggerAutofocus(uint32_t id) { + ATRACE_CALL(); + (void)id; + + + ALOGE("%s: Unimplemented", __FUNCTION__); + return INVALID_OPERATION; +} + +status_t Camera3Device::triggerCancelAutofocus(uint32_t id) { + ATRACE_CALL(); + (void)id; + + ALOGE("%s: Unimplemented", __FUNCTION__); + return INVALID_OPERATION; + +} + +status_t Camera3Device::triggerPrecaptureMetering(uint32_t id) { + ATRACE_CALL(); + (void)id; + + ALOGE("%s: Unimplemented", __FUNCTION__); + return INVALID_OPERATION; + +} + +status_t Camera3Device::pushReprocessBuffer(int reprocessStreamId, + buffer_handle_t *buffer, wp listener) { + ATRACE_CALL(); + (void)reprocessStreamId; (void)buffer; (void)listener; + + ALOGE("%s: Unimplemented", __FUNCTION__); + return INVALID_OPERATION; +} + +Camera3Device::RequestThread::RequestThread(wp parent) : + Thread(false), + mParent(parent) { +} + +bool Camera3Device::RequestThread::threadLoop() { + ALOGE("%s: Unimplemented", __FUNCTION__); + + return false; +} + +void Camera3Device::processCaptureResult(const camera3_capture_result *result) { + (void)result; + + ALOGE("%s: Unimplemented", __FUNCTION__); +} + +void Camera3Device::notify(const camera3_notify_msg *msg) { + (void)msg; + + ALOGE("%s: Unimplemented", __FUNCTION__); +} + +/** + * Static callback forwarding methods from HAL to instance + */ + +void Camera3Device::sProcessCaptureResult(const camera3_callback_ops *cb, + const camera3_capture_result *result) { + Camera3Device *d = + const_cast(static_cast(cb)); + d->processCaptureResult(result); +} + +void Camera3Device::sNotify(const camera3_callback_ops *cb, + const camera3_notify_msg *msg) { + Camera3Device *d = + const_cast(static_cast(cb)); + d->notify(msg); +} + +}; // namespace android diff --git a/services/camera/libcameraservice/Camera3Device.h b/services/camera/libcameraservice/Camera3Device.h new file mode 100644 index 0000000..2bc7cf0 --- /dev/null +++ b/services/camera/libcameraservice/Camera3Device.h @@ -0,0 +1,133 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_SERVERS_CAMERA_CAMERA3DEVICE_H +#define ANDROID_SERVERS_CAMERA_CAMERA3DEVICE_H + +#include +#include +#include +#include +#include + +#include "CameraDeviceBase.h" + +#include "hardware/camera3.h" + +/** + * Function pointer types with C calling convention to + * use for HAL callback functions. + */ +extern "C" { + typedef void (callbacks_process_capture_result_t)( + const struct camera3_callback_ops *, + const camera3_capture_result_t *); + + typedef void (callbacks_notify_t)( + const struct camera3_callback_ops *, + const camera3_notify_msg_t *); +} + +namespace android { + +/** + * CameraDevice for HAL devices with version CAMERA_DEVICE_API_VERSION_3_0 + */ +class Camera3Device : + public CameraDeviceBase, + private camera3_callback_ops { + public: + Camera3Device(int id); + + virtual ~Camera3Device(); + + /** + * CameraDevice interface + */ + virtual status_t initialize(camera_module_t *module); + virtual status_t disconnect(); + virtual status_t dump(int fd, const Vector &args); + virtual const CameraMetadata& info() const; + virtual status_t capture(CameraMetadata &request); + virtual status_t setStreamingRequest(const CameraMetadata &request); + virtual status_t clearStreamingRequest(); + virtual status_t waitUntilRequestReceived(int32_t requestId, nsecs_t timeout); + virtual status_t createStream(sp consumer, + uint32_t width, uint32_t height, int format, size_t size, + int *id); + virtual status_t createReprocessStreamFromStream(int outputId, int *id); + virtual status_t getStreamInfo(int id, + uint32_t *width, uint32_t *height, uint32_t *format); + virtual status_t setStreamTransform(int id, int transform); + virtual status_t deleteStream(int id); + virtual status_t deleteReprocessStream(int id); + virtual status_t createDefaultRequest(int templateId, CameraMetadata *request); + virtual status_t waitUntilDrained(); + virtual status_t setNotifyCallback(NotificationListener *listener); + virtual status_t waitForNextFrame(nsecs_t timeout); + virtual status_t getNextFrame(CameraMetadata *frame); + virtual status_t triggerAutofocus(uint32_t id); + virtual status_t triggerCancelAutofocus(uint32_t id); + virtual status_t triggerPrecaptureMetering(uint32_t id); + virtual status_t pushReprocessBuffer(int reprocessStreamId, + buffer_handle_t *buffer, wp listener); + + private: + const int mId; + camera3_device_t *mHal3Device; + + CameraMetadata mDeviceInfo; + vendor_tag_query_ops_t mVendorTagOps; + + /** + * Thread for managing capture request submission to HAL device. + */ + class RequestThread: public Thread { + + public: + + RequestThread(wp parent); + + protected: + + virtual bool threadLoop(); + + private: + + wp mParent; + + }; + sp requestThread; + + /** + * Callback functions from HAL device + */ + void processCaptureResult(const camera3_capture_result *result); + + void notify(const camera3_notify_msg *msg); + + /** + * Static callback forwarding methods from HAL to instance + */ + static callbacks_process_capture_result_t sProcessCaptureResult; + + static callbacks_notify_t sNotify; + +}; // class Camera3Device + +}; // namespace android + +#endif diff --git a/services/camera/libcameraservice/CameraClient.h b/services/camera/libcameraservice/CameraClient.h index 00dc90c..7f0cb29 100644 --- a/services/camera/libcameraservice/CameraClient.h +++ b/services/camera/libcameraservice/CameraClient.h @@ -24,6 +24,11 @@ namespace android { class MemoryHeapBase; class CameraHardwareInterface; +/** + * Interface between android.hardware.Camera API and Camera HAL device for version + * CAMERA_DEVICE_API_VERSION_1_0. + */ + class CameraClient : public CameraService::Client { public: diff --git a/services/camera/libcameraservice/CameraDeviceBase.cpp b/services/camera/libcameraservice/CameraDeviceBase.cpp new file mode 100644 index 0000000..6c4e87f --- /dev/null +++ b/services/camera/libcameraservice/CameraDeviceBase.cpp @@ -0,0 +1,30 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "CameraDeviceBase.h" + +namespace android { + +/** + * Base class destructors + */ +CameraDeviceBase::~CameraDeviceBase() { +} + +CameraDeviceBase::NotificationListener::~NotificationListener() { +} + +} // namespace android diff --git a/services/camera/libcameraservice/CameraDeviceBase.h b/services/camera/libcameraservice/CameraDeviceBase.h new file mode 100644 index 0000000..8252af7 --- /dev/null +++ b/services/camera/libcameraservice/CameraDeviceBase.h @@ -0,0 +1,204 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_SERVERS_CAMERA_CAMERADEVICEBASE_H +#define ANDROID_SERVERS_CAMERA_CAMERADEVICEBASE_H + +#include +#include +#include +#include +#include + +#include "hardware/camera2.h" +#include "camera/CameraMetadata.h" + +namespace android { + +/** + * Base interface for version >= 2 camera device classes, which interface to + * camera HAL device versions >= 2. + */ +class CameraDeviceBase : public virtual RefBase { + public: + virtual ~CameraDeviceBase(); + + virtual status_t initialize(camera_module_t *module) = 0; + virtual status_t disconnect() = 0; + + virtual status_t dump(int fd, const Vector& args) = 0; + + /** + * The device's static characteristics metadata buffer + */ + virtual const CameraMetadata& info() const = 0; + + /** + * Submit request for capture. The CameraDevice takes ownership of the + * passed-in buffer. + */ + virtual status_t capture(CameraMetadata &request) = 0; + + /** + * Submit request for streaming. The CameraDevice makes a copy of the + * passed-in buffer and the caller retains ownership. + */ + virtual status_t setStreamingRequest(const CameraMetadata &request) = 0; + + /** + * Clear the streaming request slot. + */ + virtual status_t clearStreamingRequest() = 0; + + /** + * Wait until a request with the given ID has been dequeued by the + * HAL. Returns TIMED_OUT if the timeout duration is reached. Returns + * immediately if the latest request received by the HAL has this id. + */ + virtual status_t waitUntilRequestReceived(int32_t requestId, + nsecs_t timeout) = 0; + + /** + * Create an output stream of the requested size and format. + * + * If format is CAMERA2_HAL_PIXEL_FORMAT_OPAQUE, then the HAL device selects + * an appropriate format; it can be queried with getStreamInfo. + * + * If format is HAL_PIXEL_FORMAT_COMPRESSED, the size parameter must be + * equal to the size in bytes of the buffers to allocate for the stream. For + * other formats, the size parameter is ignored. + */ + virtual status_t createStream(sp consumer, + uint32_t width, uint32_t height, int format, size_t size, + int *id) = 0; + + /** + * Create an input reprocess stream that uses buffers from an existing + * output stream. + */ + virtual status_t createReprocessStreamFromStream(int outputId, int *id) = 0; + + /** + * Get information about a given stream. + */ + virtual status_t getStreamInfo(int id, + uint32_t *width, uint32_t *height, uint32_t *format) = 0; + + /** + * Set stream gralloc buffer transform + */ + virtual status_t setStreamTransform(int id, int transform) = 0; + + /** + * Delete stream. Must not be called if there are requests in flight which + * reference that stream. + */ + virtual status_t deleteStream(int id) = 0; + + /** + * Delete reprocess stream. Must not be called if there are requests in + * flight which reference that stream. + */ + virtual status_t deleteReprocessStream(int id) = 0; + + /** + * Create a metadata buffer with fields that the HAL device believes are + * best for the given use case + */ + virtual status_t createDefaultRequest(int templateId, + CameraMetadata *request) = 0; + + /** + * Wait until all requests have been processed. Returns INVALID_OPERATION if + * the streaming slot is not empty, or TIMED_OUT if the requests haven't + * finished processing in 10 seconds. + */ + virtual status_t waitUntilDrained() = 0; + + /** + * Abstract class for HAL notification listeners + */ + class NotificationListener { + public: + // Refer to the Camera2 HAL definition for notification definitions + virtual void notifyError(int errorCode, int arg1, int arg2) = 0; + virtual void notifyShutter(int frameNumber, nsecs_t timestamp) = 0; + virtual void notifyAutoFocus(uint8_t newState, int triggerId) = 0; + virtual void notifyAutoExposure(uint8_t newState, int triggerId) = 0; + virtual void notifyAutoWhitebalance(uint8_t newState, + int triggerId) = 0; + protected: + virtual ~NotificationListener(); + }; + + /** + * Connect HAL notifications to a listener. Overwrites previous + * listener. Set to NULL to stop receiving notifications. + */ + virtual status_t setNotifyCallback(NotificationListener *listener) = 0; + + /** + * Wait for a new frame to be produced, with timeout in nanoseconds. + * Returns TIMED_OUT when no frame produced within the specified duration + */ + virtual status_t waitForNextFrame(nsecs_t timeout) = 0; + + /** + * Get next metadata frame from the frame queue. Returns NULL if the queue + * is empty; caller takes ownership of the metadata buffer. + */ + virtual status_t getNextFrame(CameraMetadata *frame) = 0; + + /** + * Trigger auto-focus. The latest ID used in a trigger autofocus or cancel + * autofocus call will be returned by the HAL in all subsequent AF + * notifications. + */ + virtual status_t triggerAutofocus(uint32_t id) = 0; + + /** + * Cancel auto-focus. The latest ID used in a trigger autofocus/cancel + * autofocus call will be returned by the HAL in all subsequent AF + * notifications. + */ + virtual status_t triggerCancelAutofocus(uint32_t id) = 0; + + /** + * Trigger pre-capture metering. The latest ID used in a trigger pre-capture + * call will be returned by the HAL in all subsequent AE and AWB + * notifications. + */ + virtual status_t triggerPrecaptureMetering(uint32_t id) = 0; + + /** + * Abstract interface for clients that want to listen to reprocess buffer + * release events + */ + struct BufferReleasedListener : public virtual RefBase { + virtual void onBufferReleased(buffer_handle_t *handle) = 0; + }; + + /** + * Push a buffer to be reprocessed into a reprocessing stream, and + * provide a listener to call once the buffer is returned by the HAL + */ + virtual status_t pushReprocessBuffer(int reprocessStreamId, + buffer_handle_t *buffer, wp listener) = 0; +}; + +}; // namespace android + +#endif diff --git a/services/camera/libcameraservice/CameraHardwareInterface.h b/services/camera/libcameraservice/CameraHardwareInterface.h index 167b37c..87b2807 100644 --- a/services/camera/libcameraservice/CameraHardwareInterface.h +++ b/services/camera/libcameraservice/CameraHardwareInterface.h @@ -47,7 +47,8 @@ typedef void (*data_callback_timestamp)(nsecs_t timestamp, /** * CameraHardwareInterface.h defines the interface to the * camera hardware abstraction layer, used for setting and getting - * parameters, live previewing, and taking pictures. + * parameters, live previewing, and taking pictures. It is used for + * HAL devices with version CAMERA_DEVICE_API_VERSION_1_0 only. * * It is a referenced counted interface with RefBase as its base class. * CameraService calls openCameraHardware() to retrieve a strong pointer to the @@ -56,24 +57,18 @@ typedef void (*data_callback_timestamp)(nsecs_t timestamp, * * -# After CameraService calls openCameraHardware(), getParameters() and * setParameters() are used to initialize the camera instance. - * CameraService calls getPreviewHeap() to establish access to the - * preview heap so it can be registered with SurfaceFlinger for - * efficient display updating while in preview mode. - * -# startPreview() is called. The camera instance then periodically - * sends the message CAMERA_MSG_PREVIEW_FRAME (if enabled) each time - * a new preview frame is available. If data callback code needs to use - * this memory after returning, it must copy the data. + * -# startPreview() is called. * - * Prior to taking a picture, CameraService calls autofocus(). When auto + * Prior to taking a picture, CameraService often calls autofocus(). When auto * focusing has completed, the camera instance sends a CAMERA_MSG_FOCUS notification, * which informs the application whether focusing was successful. The camera instance * only sends this message once and it is up to the application to call autoFocus() * again if refocusing is desired. * * CameraService calls takePicture() to request the camera instance take a - * picture. At this point, if a shutter, postview, raw, and/or compressed callback - * is desired, the corresponding message must be enabled. As with CAMERA_MSG_PREVIEW_FRAME, - * any memory provided in a data callback must be copied if it's needed after returning. + * picture. At this point, if a shutter, postview, raw, and/or compressed + * callback is desired, the corresponding message must be enabled. Any memory + * provided in a data callback must be copied if it's needed after returning. */ class CameraHardwareInterface : public virtual RefBase { diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp index 1a78b53..d7c8807 100644 --- a/services/camera/libcameraservice/CameraService.cpp +++ b/services/camera/libcameraservice/CameraService.cpp @@ -244,9 +244,11 @@ sp CameraService::connect( break; case CAMERA_DEVICE_API_VERSION_2_0: case CAMERA_DEVICE_API_VERSION_2_1: + case CAMERA_DEVICE_API_VERSION_3_0: client = new Camera2Client(this, cameraClient, clientPackageName, cameraId, - facing, callingPid, clientUid, getpid()); + facing, callingPid, clientUid, getpid(), + deviceVersion); break; case -1: ALOGE("Invalid camera id %d", cameraId); diff --git a/services/camera/libcameraservice/camera2/CallbackProcessor.cpp b/services/camera/libcameraservice/camera2/CallbackProcessor.cpp index c4055e0..9a14758 100644 --- a/services/camera/libcameraservice/camera2/CallbackProcessor.cpp +++ b/services/camera/libcameraservice/camera2/CallbackProcessor.cpp @@ -23,7 +23,7 @@ #include "CallbackProcessor.h" #include -#include "../Camera2Device.h" +#include "../CameraDeviceBase.h" #include "../Camera2Client.h" @@ -58,7 +58,7 @@ status_t CallbackProcessor::updateStream(const Parameters ¶ms) { sp client = mClient.promote(); if (client == 0) return OK; - sp device = client->getCameraDevice(); + sp device = client->getCameraDevice(); if (mCallbackConsumer == 0) { // Create CPU buffer queue endpoint @@ -125,7 +125,7 @@ status_t CallbackProcessor::deleteStream() { if (mCallbackStreamId != NO_STREAM) { sp client = mClient.promote(); if (client == 0) return OK; - sp device = client->getCameraDevice(); + sp device = client->getCameraDevice(); device->deleteStream(mCallbackStreamId); diff --git a/services/camera/libcameraservice/camera2/FrameProcessor.cpp b/services/camera/libcameraservice/camera2/FrameProcessor.cpp index 8ee5de7..3129a0b 100644 --- a/services/camera/libcameraservice/camera2/FrameProcessor.cpp +++ b/services/camera/libcameraservice/camera2/FrameProcessor.cpp @@ -22,7 +22,7 @@ #include #include "FrameProcessor.h" -#include "../Camera2Device.h" +#include "../CameraDeviceBase.h" #include "../Camera2Client.h" namespace android { @@ -71,7 +71,7 @@ void FrameProcessor::dump(int fd, const Vector& /*args*/) { bool FrameProcessor::threadLoop() { status_t res; - sp device; + sp device; { sp client = mClient.promote(); if (client == 0) return false; diff --git a/services/camera/libcameraservice/camera2/JpegProcessor.cpp b/services/camera/libcameraservice/camera2/JpegProcessor.cpp index 1ec5694..286fac4 100644 --- a/services/camera/libcameraservice/camera2/JpegProcessor.cpp +++ b/services/camera/libcameraservice/camera2/JpegProcessor.cpp @@ -27,7 +27,7 @@ #include "JpegProcessor.h" #include -#include "../Camera2Device.h" +#include "../CameraDeviceBase.h" #include "../Camera2Client.h" @@ -66,7 +66,7 @@ status_t JpegProcessor::updateStream(const Parameters ¶ms) { sp client = mClient.promote(); if (client == 0) return OK; - sp device = client->getCameraDevice(); + sp device = client->getCameraDevice(); // Find out buffer size for JPEG camera_metadata_ro_entry_t maxJpegSize = @@ -145,7 +145,7 @@ status_t JpegProcessor::deleteStream() { if (mCaptureStreamId != NO_STREAM) { sp client = mClient.promote(); if (client == 0) return OK; - sp device = client->getCameraDevice(); + sp device = client->getCameraDevice(); device->deleteStream(mCaptureStreamId); diff --git a/services/camera/libcameraservice/camera2/StreamingProcessor.cpp b/services/camera/libcameraservice/camera2/StreamingProcessor.cpp index a0d1093..6a4b95d 100644 --- a/services/camera/libcameraservice/camera2/StreamingProcessor.cpp +++ b/services/camera/libcameraservice/camera2/StreamingProcessor.cpp @@ -26,7 +26,7 @@ #include "StreamingProcessor.h" #include "Camera2Heap.h" #include "../Camera2Client.h" -#include "../Camera2Device.h" +#include "../CameraDeviceBase.h" namespace android { namespace camera2 { @@ -110,7 +110,7 @@ status_t StreamingProcessor::updatePreviewStream(const Parameters ¶ms) { status_t res; sp client = mClient.promote(); if (client == 0) return INVALID_OPERATION; - sp device = client->getCameraDevice(); + sp device = client->getCameraDevice(); if (mPreviewStreamId != NO_STREAM) { // Check if stream parameters have to change @@ -176,7 +176,7 @@ status_t StreamingProcessor::deletePreviewStream() { if (mPreviewStreamId != NO_STREAM) { sp client = mClient.promote(); if (client == 0) return INVALID_OPERATION; - sp device = client->getCameraDevice(); + sp device = client->getCameraDevice(); ALOGV("%s: for cameraId %d on streamId %d", __FUNCTION__, client->getCameraId(), mPreviewStreamId); @@ -272,7 +272,7 @@ status_t StreamingProcessor::updateRecordingStream(const Parameters ¶ms) { sp client = mClient.promote(); if (client == 0) return INVALID_OPERATION; - sp device = client->getCameraDevice(); + sp device = client->getCameraDevice(); if (mRecordingConsumer == 0) { // Create CPU buffer queue endpoint. We need one more buffer here so that we can @@ -339,7 +339,7 @@ status_t StreamingProcessor::deleteRecordingStream() { if (mRecordingStreamId != NO_STREAM) { sp client = mClient.promote(); if (client == 0) return INVALID_OPERATION; - sp device = client->getCameraDevice(); + sp device = client->getCameraDevice(); res = device->waitUntilDrained(); if (res != OK) { @@ -415,7 +415,7 @@ status_t StreamingProcessor::stopStream() { sp client = mClient.promote(); if (client == 0) return INVALID_OPERATION; - sp device = client->getCameraDevice(); + sp device = client->getCameraDevice(); res = device->clearStreamingRequest(); if (res != OK) { diff --git a/services/camera/libcameraservice/camera2/ZslProcessor.cpp b/services/camera/libcameraservice/camera2/ZslProcessor.cpp index 900c099..769d9bd 100644 --- a/services/camera/libcameraservice/camera2/ZslProcessor.cpp +++ b/services/camera/libcameraservice/camera2/ZslProcessor.cpp @@ -30,7 +30,7 @@ #include "ZslProcessor.h" #include -#include "../Camera2Device.h" +#include "../CameraDeviceBase.h" #include "../Camera2Client.h" @@ -114,7 +114,7 @@ status_t ZslProcessor::updateStream(const Parameters ¶ms) { sp client = mClient.promote(); if (client == 0) return OK; - sp device = client->getCameraDevice(); + sp device = client->getCameraDevice(); if (mZslConsumer == 0) { // Create CPU buffer queue endpoint @@ -202,7 +202,7 @@ status_t ZslProcessor::deleteStream() { if (mZslStreamId != NO_STREAM) { sp client = mClient.promote(); if (client == 0) return OK; - sp device = client->getCameraDevice(); + sp device = client->getCameraDevice(); res = device->deleteReprocessStream(mZslReprocessStreamId); if (res != OK) { @@ -289,10 +289,12 @@ status_t ZslProcessor::pushToReprocess(int32_t requestId) { uint8_t requestType = ANDROID_REQUEST_TYPE_REPROCESS; res = request.update(ANDROID_REQUEST_TYPE, &requestType, 1); - uint8_t inputStreams[1] = { mZslReprocessStreamId }; + uint8_t inputStreams[1] = + { static_cast(mZslReprocessStreamId) }; if (res == OK) request.update(ANDROID_REQUEST_INPUT_STREAMS, inputStreams, 1); - uint8_t outputStreams[1] = { client->getCaptureStreamId() }; + uint8_t outputStreams[1] = + { static_cast(client->getCaptureStreamId()) }; if (res == OK) request.update(ANDROID_REQUEST_OUTPUT_STREAMS, outputStreams, 1); res = request.update(ANDROID_REQUEST_ID, diff --git a/services/camera/libcameraservice/camera2/ZslProcessor.h b/services/camera/libcameraservice/camera2/ZslProcessor.h index ec16eef..b2cf5b1 100644 --- a/services/camera/libcameraservice/camera2/ZslProcessor.h +++ b/services/camera/libcameraservice/camera2/ZslProcessor.h @@ -27,7 +27,7 @@ #include "FrameProcessor.h" #include "camera/CameraMetadata.h" #include "Camera2Heap.h" -#include "../Camera2Device.h" +#include "../CameraDeviceBase.h" namespace android { @@ -44,7 +44,7 @@ class ZslProcessor: virtual public Thread, virtual public BufferItemConsumer::FrameAvailableListener, virtual public FrameProcessor::FilteredListener, - virtual public Camera2Device::BufferReleasedListener { + virtual public CameraDeviceBase::BufferReleasedListener { public: ZslProcessor(wp client, wp sequencer); ~ZslProcessor(); -- cgit v1.1 From 3fa4891f54bbfdbe8ee652930d22c96639964eff Mon Sep 17 00:00:00 2001 From: Igor Murashkin Date: Thu, 28 Feb 2013 10:41:51 -0800 Subject: ProCameraTest: Drop frames test move processing delay between lock/unlock * This makes it more likely to find races with the buffer being unlocked by itself Change-Id: I1fa7dbfb73f60664a02a35678b58bcc9b06893a2 --- camera/tests/ProCameraTests.cpp | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/camera/tests/ProCameraTests.cpp b/camera/tests/ProCameraTests.cpp index f93e5cd..39456af 100644 --- a/camera/tests/ProCameraTests.cpp +++ b/camera/tests/ProCameraTests.cpp @@ -1001,11 +1001,6 @@ TEST_F(ProCameraTest, WaitForSingleStreamBufferAndDropFrames) { // Consume a couple of results for (int i = 0; i < NUM_REQUESTS; ++i) { - // Process at 10fps, stream is at 15fps. - // This means we will definitely fill up the buffer queue with - // extra buffers and need to drop them. - usleep(TEST_FRAME_PROCESSING_DELAY_US); - int numFrames; EXPECT_TRUE((numFrames = mCamera->waitForFrameBuffer(streamId)) > 0); @@ -1025,6 +1020,11 @@ TEST_F(ProCameraTest, WaitForSingleStreamBufferAndDropFrames) { ", dataPtr = " << (void*)buf.data << ", timestamp = " << buf.timestamp << std::endl; + // Process at 10fps, stream is at 15fps. + // This means we will definitely fill up the buffer queue with + // extra buffers and need to drop them. + usleep(TEST_FRAME_PROCESSING_DELAY_US); + EXPECT_OK(consumer->unlockBuffer(buf)); } -- cgit v1.1 From bfc9915f482520eb9676c6d2dbf7f1ac078d937d Mon Sep 17 00:00:00 2001 From: Igor Murashkin Date: Wed, 27 Feb 2013 12:55:20 -0800 Subject: Camera: Drop ProCamera connections when a Camera connection happens * Also adds an ICameraServiceListener with available/not available statuses Bug: 8291653 Change-Id: I24680f1a2dc109510caf451cf7c7bd180b670d84 --- camera/Android.mk | 1 + camera/CameraBase.cpp | 16 +++ camera/ICameraService.cpp | 33 +++++ camera/ICameraServiceListener.cpp | 86 ++++++++++++ camera/tests/ProCameraTests.cpp | 150 +++++++++++++++++++-- include/camera/CameraBase.h | 6 + include/camera/ICameraService.h | 18 ++- include/camera/ICameraServiceListener.h | 64 +++++++++ include/camera/ProCamera.h | 3 + services/camera/libcameraservice/CameraService.cpp | 126 ++++++++++++++++- services/camera/libcameraservice/CameraService.h | 30 ++++- .../camera/libcameraservice/ProCamera2Client.cpp | 68 +++++++++- .../camera/libcameraservice/ProCamera2Client.h | 5 + 13 files changed, 587 insertions(+), 19 deletions(-) create mode 100644 camera/ICameraServiceListener.cpp create mode 100644 include/camera/ICameraServiceListener.h diff --git a/camera/Android.mk b/camera/Android.mk index 3f30079..e33fb50 100644 --- a/camera/Android.mk +++ b/camera/Android.mk @@ -11,6 +11,7 @@ LOCAL_SRC_FILES:= \ ICamera.cpp \ ICameraClient.cpp \ ICameraService.cpp \ + ICameraServiceListener.cpp \ ICameraRecordingProxy.cpp \ ICameraRecordingProxyListener.cpp \ IProCameraUser.cpp \ diff --git a/camera/CameraBase.cpp b/camera/CameraBase.cpp index 9b0e6bf..29096da 100644 --- a/camera/CameraBase.cpp +++ b/camera/CameraBase.cpp @@ -231,6 +231,22 @@ status_t CameraBase::getCameraInfo(int cameraId, return cs->getCameraInfo(cameraId, cameraInfo); } +template +status_t CameraBase::addServiceListener( + const sp& listener) { + const sp& cs = getCameraService(); + if (cs == 0) return UNKNOWN_ERROR; + return cs->addListener(listener); +} + +template +status_t CameraBase::removeServiceListener( + const sp& listener) { + const sp& cs = getCameraService(); + if (cs == 0) return UNKNOWN_ERROR; + return cs->removeListener(listener); +} + template class CameraBase; template class CameraBase; diff --git a/camera/ICameraService.cpp b/camera/ICameraService.cpp index b54d63f..134f7f0 100644 --- a/camera/ICameraService.cpp +++ b/camera/ICameraService.cpp @@ -23,6 +23,7 @@ #include #include +#include #include #include #include @@ -86,6 +87,24 @@ public: remote()->transact(BnCameraService::CONNECT_PRO, data, &reply); return interface_cast(reply.readStrongBinder()); } + + virtual status_t addListener(const sp& listener) + { + Parcel data, reply; + data.writeInterfaceToken(ICameraService::getInterfaceDescriptor()); + data.writeStrongBinder(listener->asBinder()); + remote()->transact(BnCameraService::ADD_LISTENER, data, &reply); + return reply.readInt32(); + } + + virtual status_t removeListener(const sp& listener) + { + Parcel data, reply; + data.writeInterfaceToken(ICameraService::getInterfaceDescriptor()); + data.writeStrongBinder(listener->asBinder()); + remote()->transact(BnCameraService::REMOVE_LISTENER, data, &reply); + return reply.readInt32(); + } }; IMPLEMENT_META_INTERFACE(CameraService, "android.hardware.ICameraService"); @@ -134,6 +153,20 @@ status_t BnCameraService::onTransact( reply->writeStrongBinder(camera->asBinder()); return NO_ERROR; } break; + case ADD_LISTENER: { + CHECK_INTERFACE(ICameraService, data, reply); + sp listener = + interface_cast(data.readStrongBinder()); + reply->writeInt32(addListener(listener)); + return NO_ERROR; + } break; + case REMOVE_LISTENER: { + CHECK_INTERFACE(ICameraService, data, reply); + sp listener = + interface_cast(data.readStrongBinder()); + reply->writeInt32(removeListener(listener)); + return NO_ERROR; + } break; default: return BBinder::onTransact(code, data, reply, flags); } diff --git a/camera/ICameraServiceListener.cpp b/camera/ICameraServiceListener.cpp new file mode 100644 index 0000000..640ee35 --- /dev/null +++ b/camera/ICameraServiceListener.cpp @@ -0,0 +1,86 @@ +/* +** +** Copyright 2013, The Android Open Source Project +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#include +#include + +#include +#include +#include + +#include + +namespace android { + +namespace { + enum { + STATUS_CHANGED = IBinder::FIRST_CALL_TRANSACTION, + }; +}; // namespace anonymous + +class BpCameraServiceListener: public BpInterface +{ + +public: + BpCameraServiceListener(const sp& impl) + : BpInterface(impl) + { + } + + virtual void onStatusChanged(Status status, int32_t cameraId) + { + Parcel data, reply; + data.writeInterfaceToken( + ICameraServiceListener::getInterfaceDescriptor()); + + data.writeInt32(static_cast(status)); + data.writeInt32(cameraId); + + remote()->transact(STATUS_CHANGED, + data, + &reply, + IBinder::FLAG_ONEWAY); + } +}; + +IMPLEMENT_META_INTERFACE(CameraServiceListener, + "android.hardware.ICameraServiceListener"); + +// ---------------------------------------------------------------------- + +status_t BnCameraServiceListener::onTransact( + uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) +{ + switch(code) { + case STATUS_CHANGED: { + CHECK_INTERFACE(ICameraServiceListener, data, reply); + + Status status = static_cast(data.readInt32()); + int32_t cameraId = data.readInt32(); + + onStatusChanged(status, cameraId); + + return NO_ERROR; + } break; + default: + return BBinder::onTransact(code, data, reply, flags); + } +} + +// ---------------------------------------------------------------------------- + +}; // namespace android diff --git a/camera/tests/ProCameraTests.cpp b/camera/tests/ProCameraTests.cpp index 39456af..c61e71a 100644 --- a/camera/tests/ProCameraTests.cpp +++ b/camera/tests/ProCameraTests.cpp @@ -33,6 +33,8 @@ #include // for CAMERA2_TEMPLATE_PREVIEW only #include +#include + namespace android { namespace camera2 { namespace tests { @@ -48,9 +50,9 @@ namespace client { #define TEST_FORMAT_DEPTH HAL_PIXEL_FORMAT_Y16 // defaults for display "test" -#define TEST_DISPLAY_FORMAT HAL_PIXEL_FORMAT_Y16 -#define TEST_DISPLAY_WIDTH 1280 -#define TEST_DISPLAY_HEIGHT 960 +#define TEST_DISPLAY_FORMAT HAL_PIXEL_FORMAT_Y8 +#define TEST_DISPLAY_WIDTH 320 +#define TEST_DISPLAY_HEIGHT 240 #define TEST_CPU_FRAME_COUNT 2 #define TEST_CPU_HEAP_COUNT 5 @@ -68,6 +70,52 @@ namespace client { class ProCameraTest; +struct ServiceListener : public BnCameraServiceListener { + + ServiceListener() : + mLatestStatus(STATUS_UNKNOWN), + mPrevStatus(STATUS_UNKNOWN) + { + } + + void onStatusChanged(Status status, int32_t cameraId) { + dout << "On status changed: 0x" << std::hex + << status << " cameraId " << cameraId + << std::endl; + + Mutex::Autolock al(mMutex); + + mLatestStatus = status; + mCondition.broadcast(); + } + + status_t waitForStatusChange(Status& newStatus) { + Mutex::Autolock al(mMutex); + + if (mLatestStatus != mPrevStatus) { + newStatus = mLatestStatus; + mPrevStatus = mLatestStatus; + return OK; + } + + status_t stat = mCondition.waitRelative(mMutex, + TEST_LISTENER_TIMEOUT); + + if (stat == OK) { + newStatus = mLatestStatus; + mPrevStatus = mLatestStatus; + } + + return stat; + } + + Condition mCondition; + Mutex mMutex; + + Status mLatestStatus; + Status mPrevStatus; +}; + enum ProEvent { UNKNOWN, ACQUIRED, @@ -441,7 +489,6 @@ protected: } request.acquire(requestTmp); } - }; sp ProCameraTest::mTestThread; @@ -538,18 +585,52 @@ TEST_F(ProCameraTest, DISABLED_StreamingImageSingle) { } int depthStreamId = -1; - EXPECT_OK(mCamera->createStream(mDisplayW, mDisplayH, mDisplayFmt, surface, - &depthStreamId)); - EXPECT_NE(-1, depthStreamId); - EXPECT_OK(mCamera->exclusiveTryLock()); + sp listener = new ServiceListener(); + EXPECT_OK(ProCamera::addServiceListener(listener)); - uint8_t streams[] = { depthStreamId }; - ASSERT_NO_FATAL_FAILURE(createSubmitRequestForStreams(streams, /*count*/1)); + ServiceListener::Status currentStatus = ServiceListener::STATUS_AVAILABLE; - dout << "will sleep now for " << mDisplaySecs << std::endl; - sleep(mDisplaySecs); + dout << "Will now stream and resume infinitely..." << std::endl; + while (true) { + + if (currentStatus == ServiceListener::STATUS_AVAILABLE) { + + EXPECT_OK(mCamera->createStream(mDisplayW, mDisplayH, mDisplayFmt, + surface, + &depthStreamId)); + EXPECT_NE(-1, depthStreamId); + EXPECT_OK(mCamera->exclusiveTryLock()); + + uint8_t streams[] = { depthStreamId }; + ASSERT_NO_FATAL_FAILURE(createSubmitRequestForStreams( + streams, + /*count*/1)); + } + + ServiceListener::Status stat = ServiceListener::STATUS_UNKNOWN; + + // TODO: maybe check for getch every once in a while? + while (listener->waitForStatusChange(/*out*/stat) != OK); + + if (currentStatus != stat) { + if (stat == ServiceListener::STATUS_AVAILABLE) { + dout << "Reconnecting to camera" << std::endl; + mCamera = ProCamera::connect(CAMERA_ID); + } else if (stat == ServiceListener::STATUS_NOT_AVAILABLE) { + dout << "Disconnecting from camera" << std::endl; + mCamera->disconnect(); + } else { + dout << "Unknown status change " + << std::hex << stat << std::endl; + } + + currentStatus = stat; + } + } + + EXPECT_OK(ProCamera::removeServiceListener(listener)); EXPECT_OK(mCamera->deleteStream(depthStreamId)); EXPECT_OK(mCamera->exclusiveUnlock()); } @@ -1035,6 +1116,51 @@ TEST_F(ProCameraTest, WaitForSingleStreamBufferAndDropFrames) { +//TODO: refactor into separate file +TEST_F(ProCameraTest, ServiceListenersSubscribe) { + + ASSERT_EQ(4u, sizeof(ServiceListener::Status)); + + sp listener = new ServiceListener(); + + EXPECT_EQ(BAD_VALUE, ProCamera::removeServiceListener(listener)); + EXPECT_OK(ProCamera::addServiceListener(listener)); + + EXPECT_EQ(ALREADY_EXISTS, ProCamera::addServiceListener(listener)); + EXPECT_OK(ProCamera::removeServiceListener(listener)); + + EXPECT_EQ(BAD_VALUE, ProCamera::removeServiceListener(listener)); +} + +//TODO: refactor into separate file +TEST_F(ProCameraTest, ServiceListenersFunctional) { + + sp listener = new ServiceListener(); + + EXPECT_OK(ProCamera::addServiceListener(listener)); + + sp cam = Camera::connect(CAMERA_ID, + /*clientPackageName*/String16(), + -1); + EXPECT_NE((void*)NULL, cam.get()); + + ServiceListener::Status stat = ServiceListener::STATUS_UNKNOWN; + EXPECT_OK(listener->waitForStatusChange(/*out*/stat)); + + EXPECT_EQ(ServiceListener::STATUS_NOT_AVAILABLE, stat); + + if (cam.get()) { + cam->disconnect(); + } + + EXPECT_OK(listener->waitForStatusChange(/*out*/stat)); + EXPECT_EQ(ServiceListener::STATUS_AVAILABLE, stat); + + EXPECT_OK(ProCamera::removeServiceListener(listener)); +} + + + } } } diff --git a/include/camera/CameraBase.h b/include/camera/CameraBase.h index fed28ea..2735a86 100644 --- a/include/camera/CameraBase.h +++ b/include/camera/CameraBase.h @@ -71,6 +71,12 @@ public: /*out*/ struct CameraInfo* cameraInfo); + static status_t addServiceListener( + const sp& listener); + + static status_t removeServiceListener( + const sp& listener); + sp remote(); // Status is set to 'UNKNOWN_ERROR' after successful (re)connection diff --git a/include/camera/ICameraService.h b/include/camera/ICameraService.h index ef2b685..aaf6eb3 100644 --- a/include/camera/ICameraService.h +++ b/include/camera/ICameraService.h @@ -27,6 +27,7 @@ class ICamera; class ICameraClient; class IProCameraUser; class IProCameraCallbacks; +class ICameraServiceListener; class ICameraService : public IInterface { @@ -35,7 +36,9 @@ public: GET_NUMBER_OF_CAMERAS = IBinder::FIRST_CALL_TRANSACTION, GET_CAMERA_INFO, CONNECT, - CONNECT_PRO + CONNECT_PRO, + ADD_LISTENER, + REMOVE_LISTENER, }; enum { @@ -45,9 +48,18 @@ public: public: DECLARE_META_INTERFACE(CameraService); - virtual int32_t getNumberOfCameras() = 0; - virtual status_t getCameraInfo(int cameraId, + virtual int32_t getNumberOfCameras() = 0; + virtual status_t getCameraInfo(int cameraId, struct CameraInfo* cameraInfo) = 0; + + // Returns 'OK' if operation succeeded + // - Errors: ALREADY_EXISTS if the listener was already added + virtual status_t addListener(const sp& listener) + = 0; + // Returns 'OK' if operation succeeded + // - Errors: BAD_VALUE if specified listener was not in the listener list + virtual status_t removeListener(const sp& listener) + = 0; /** * clientPackageName and clientUid are used for permissions checking. if * clientUid == USE_CALLING_UID, then the calling UID is used instead. Only diff --git a/include/camera/ICameraServiceListener.h b/include/camera/ICameraServiceListener.h new file mode 100644 index 0000000..207116a --- /dev/null +++ b/include/camera/ICameraServiceListener.h @@ -0,0 +1,64 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_ICAMERASERVICE_LISTENER_H +#define ANDROID_HARDWARE_ICAMERASERVICE_LISTENER_H + +#include +#include +#include +#include + +namespace android { + +class ICameraServiceListener : public IInterface +{ +public: + + enum Status { + // Device physically unplugged + STATUS_PRESENT = CAMERA_DEVICE_STATUS_PRESENT, + // Device physically re-plugged + STATUS_NOT_PRESENT = CAMERA_DEVICE_STATUS_NOT_PRESENT, + + // Camera can be used exclusively + STATUS_AVAILABLE = 0x80000000, + // Camera is in use by another app and cannot be used exclusively + STATUS_NOT_AVAILABLE, + + // Use to initialize variables only + STATUS_UNKNOWN = 0xFFFFFFFF, + }; + + DECLARE_META_INTERFACE(CameraServiceListener); + + virtual void onStatusChanged(Status status, int32_t cameraId) = 0; +}; + +// ---------------------------------------------------------------------------- + +class BnCameraServiceListener : public BnInterface +{ +public: + virtual status_t onTransact( uint32_t code, + const Parcel& data, + Parcel* reply, + uint32_t flags = 0); +}; + +}; // namespace android + +#endif diff --git a/include/camera/ProCamera.h b/include/camera/ProCamera.h index b228145..e8dcdef 100644 --- a/include/camera/ProCamera.h +++ b/include/camera/ProCamera.h @@ -169,6 +169,9 @@ public: /** * Delete a stream. * Lock free. + * + * NOTE: As a side effect this cancels ALL streaming requests. + * * Errors: BAD_VALUE if unknown stream ID. * PERMISSION_DENIED if the stream wasn't yours */ diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp index d7c8807..8c4f619 100644 --- a/services/camera/libcameraservice/CameraService.cpp +++ b/services/camera/libcameraservice/CameraService.cpp @@ -77,6 +77,10 @@ CameraService::CameraService() { ALOGI("CameraService started (pid=%d)", getpid()); gCameraService = this; + + for (size_t i = 0; i < MAX_CAMERAS; ++i) { + mStatusList[i] = ICameraServiceListener::STATUS_AVAILABLE; + } } void CameraService::onFirstRef() @@ -155,6 +159,23 @@ int CameraService::getDeviceVersion(int cameraId, int* facing) { return deviceVersion; } +bool CameraService::isValidCameraId(int cameraId) { + int facing; + int deviceVersion = getDeviceVersion(cameraId, &facing); + + switch(deviceVersion) { + case CAMERA_DEVICE_API_VERSION_1_0: + case CAMERA_DEVICE_API_VERSION_2_0: + case CAMERA_DEVICE_API_VERSION_2_1: + case CAMERA_DEVICE_API_VERSION_3_0: + return true; + default: + return false; + } + + return false; +} + sp CameraService::connect( const sp& cameraClient, int cameraId, @@ -236,6 +257,10 @@ sp CameraService::connect( int facing = -1; int deviceVersion = getDeviceVersion(cameraId, &facing); + if (isValidCameraId(cameraId)) { + updateStatus(ICameraServiceListener::STATUS_NOT_AVAILABLE, cameraId); + } + switch(deviceVersion) { case CAMERA_DEVICE_API_VERSION_1_0: client = new CameraClient(this, cameraClient, @@ -259,6 +284,9 @@ sp CameraService::connect( } if (client->initialize(mModule) != OK) { + // this is probably not recoverable.. but maybe the client can try again + updateStatus(ICameraServiceListener::STATUS_AVAILABLE, cameraId); + return NULL; } @@ -266,6 +294,7 @@ sp CameraService::connect( mClient[cameraId] = client; LOG1("CameraService::connect X (id %d, this pid is %d)", cameraId, getpid()); + return client; } @@ -275,6 +304,7 @@ sp CameraService::connect( const String16& clientPackageName, int clientUid) { + String8 clientName8(clientPackageName); int callingPid = getCallingPid(); // TODO: use clientPackageName and clientUid with appOpsMangr @@ -301,6 +331,15 @@ sp CameraService::connect( return NULL; } + // TODO: allow concurrent connections with a ProCamera + if (mBusy[cameraId]) { + + ALOGW("CameraService::connectPro X (pid %d, \"%s\") rejected" + " (camera %d is still busy).", callingPid, + clientName8.string(), cameraId); + return NULL; + } + int facing = -1; int deviceVersion = getDeviceVersion(cameraId, &facing); @@ -333,9 +372,45 @@ sp CameraService::connect( LOG1("CameraService::connectPro X (id %d, this pid is %d)", cameraId, getpid()); return client; +} +status_t CameraService::addListener( + const sp& listener) { + ALOGV("%s: Add listener %p", __FUNCTION__, listener.get()); - return NULL; + Mutex::Autolock lock(mServiceLock); + + Vector >::iterator it, end; + for (it = mListenerList.begin(); it != mListenerList.end(); ++it) { + if ((*it)->asBinder() == listener->asBinder()) { + ALOGW("%s: Tried to add listener %p which was already subscribed", + __FUNCTION__, listener.get()); + return ALREADY_EXISTS; + } + } + + mListenerList.push_back(listener); + + return OK; +} +status_t CameraService::removeListener( + const sp& listener) { + ALOGV("%s: Remove listener %p", __FUNCTION__, listener.get()); + + Mutex::Autolock lock(mServiceLock); + + Vector >::iterator it; + for (it = mListenerList.begin(); it != mListenerList.end(); ++it) { + if ((*it)->asBinder() == listener->asBinder()) { + mListenerList.erase(it); + return OK; + } + } + + ALOGW("%s: Tried to remove a listener %p which was not subscribed", + __FUNCTION__, listener.get()); + + return BAD_VALUE; } void CameraService::removeClientByRemote(const wp& remoteBinder) { @@ -699,6 +774,8 @@ void CameraService::Client::notifyError() { void CameraService::Client::disconnect() { BasicClient::disconnect(); mCameraService->setCameraFree(mCameraId); + mCameraService->updateStatus(ICameraServiceListener::STATUS_AVAILABLE, + mCameraId); } CameraService::Client::OpsCallback::OpsCallback(wp client): @@ -774,6 +851,10 @@ bool CameraService::ProClient::hasExclusiveLock() { return false; } +void CameraService::ProClient::onExclusiveLockStolen() { + ALOGE("%s: not implemented yet", __FUNCTION__); +} + status_t CameraService::ProClient::submitRequest(camera_metadata_t* request, bool streaming) { ALOGE("%s: not implemented yet", __FUNCTION__); @@ -944,4 +1025,47 @@ status_t CameraService::dump(int fd, const Vector& args) { } +void CameraService::updateStatus(ICameraServiceListener::Status status, + int32_t cameraId) { + // do not lock mServiceLock here or can get into a deadlock from + // connect() -> ProClient::disconnect -> updateStatus + Mutex::Autolock lock(mStatusMutex); + updateStatusUnsafe(status, cameraId); +} + +void CameraService::updateStatusUnsafe(ICameraServiceListener::Status status, + int32_t cameraId) { + + ICameraServiceListener::Status oldStatus = mStatusList[cameraId]; + + mStatusList[cameraId] = status; + + if (oldStatus != status) { + ALOGV("%s: Status has changed for camera ID %d from 0x%x to 0x%x", + __FUNCTION__, cameraId, (uint32_t)oldStatus, (uint32_t)status); + + /** + * ProClients lose their exclusive lock. + * - Done before the CameraClient can initialize the HAL device, + * since we want to be able to close it before they get to initialize + */ + if (status == ICameraServiceListener::STATUS_NOT_AVAILABLE) { + Vector > proClients(mProClientList[cameraId]); + Vector >::const_iterator it; + + for (it = proClients.begin(); it != proClients.end(); ++it) { + sp proCl = it->promote(); + if (proCl.get() != NULL) { + proCl->onExclusiveLockStolen(); + } + } + } + + Vector >::const_iterator it; + for (it = mListenerList.begin(); it != mListenerList.end(); ++it) { + (*it)->onStatusChanged(status, cameraId); + } + } +} + }; // namespace android diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h index d93aa73..8acc63f 100644 --- a/services/camera/libcameraservice/CameraService.h +++ b/services/camera/libcameraservice/CameraService.h @@ -30,6 +30,8 @@ #include #include +#include + /* This needs to be increased if we can have more cameras */ #define MAX_CAMERAS 2 @@ -67,6 +69,10 @@ public: virtual sp connect(const sp& cameraCb, int cameraId, const String16& clientPackageName, int clientUid); + virtual status_t addListener(const sp& listener); + virtual status_t removeListener( + const sp& listener); + // Extra permissions checks virtual status_t onTransact(uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags); @@ -263,6 +269,9 @@ public: virtual status_t requestStream(int streamId); virtual status_t cancelStream(int streamId); + // Callbacks from camera service + virtual void onExclusiveLockStolen(); + protected: virtual void notifyError(); @@ -303,11 +312,30 @@ private: camera_module_t *mModule; + Vector > + mListenerList; + + // guard only mStatusList and the broadcasting of ICameraServiceListener + Mutex mStatusMutex; + ICameraServiceListener::Status + mStatusList[MAX_CAMERAS]; + + // Broadcast the new status if it changed (locks the service mutex) + void updateStatus( + ICameraServiceListener::Status status, + int32_t cameraId); + // Call this one when the service mutex is already held (idempotent) + void updateStatusUnsafe( + ICameraServiceListener::Status status, + int32_t cameraId); + // IBinder::DeathRecipient implementation - virtual void binderDied(const wp &who); + virtual void binderDied(const wp &who); // Helpers int getDeviceVersion(int cameraId, int* facing); + + bool isValidCameraId(int cameraId); }; } // namespace android diff --git a/services/camera/libcameraservice/ProCamera2Client.cpp b/services/camera/libcameraservice/ProCamera2Client.cpp index eda3012..6fed8b4 100644 --- a/services/camera/libcameraservice/ProCamera2Client.cpp +++ b/services/camera/libcameraservice/ProCamera2Client.cpp @@ -115,6 +115,8 @@ status_t ProCamera2Client::exclusiveTryLock() { Mutex::Autolock icl(mIProCameraUserLock); SharedCameraCallbacks::Lock l(mSharedCameraCallbacks); + if (!mDevice.get()) return PERMISSION_DENIED; + if (!mExclusiveLock) { mExclusiveLock = true; @@ -144,6 +146,8 @@ status_t ProCamera2Client::exclusiveLock() { Mutex::Autolock icl(mIProCameraUserLock); SharedCameraCallbacks::Lock l(mSharedCameraCallbacks); + if (!mDevice.get()) return PERMISSION_DENIED; + /** * TODO: this should asynchronously 'wait' until the lock becomes available * if another client already has an exclusive lock. @@ -197,12 +201,33 @@ bool ProCamera2Client::hasExclusiveLock() { return mExclusiveLock; } +void ProCamera2Client::onExclusiveLockStolen() { + ALOGV("%s: ProClient lost exclusivity (id %d)", + __FUNCTION__, mCameraId); + + Mutex::Autolock icl(mIProCameraUserLock); + SharedCameraCallbacks::Lock l(mSharedCameraCallbacks); + + if (mExclusiveLock && mRemoteCallback.get() != NULL) { + mRemoteCallback->onLockStatusChanged( + IProCameraCallbacks::LOCK_STOLEN); + } + + mExclusiveLock = false; + + //TODO: we should not need to detach the device, merely reset it. + detachDevice(); +} + status_t ProCamera2Client::submitRequest(camera_metadata_t* request, bool streaming) { ATRACE_CALL(); ALOGV("%s", __FUNCTION__); Mutex::Autolock icl(mIProCameraUserLock); + + if (!mDevice.get()) return DEAD_OBJECT; + if (!mExclusiveLock) { return PERMISSION_DENIED; } @@ -224,6 +249,9 @@ status_t ProCamera2Client::cancelRequest(int requestId) { ALOGV("%s", __FUNCTION__); Mutex::Autolock icl(mIProCameraUserLock); + + if (!mDevice.get()) return DEAD_OBJECT; + if (!mExclusiveLock) { return PERMISSION_DENIED; } @@ -247,6 +275,7 @@ status_t ProCamera2Client::cancelStream(int streamId) { Mutex::Autolock icl(mIProCameraUserLock); + if (!mDevice.get()) return DEAD_OBJECT; mDevice->clearStreamingRequest(); status_t code; @@ -274,6 +303,8 @@ status_t ProCamera2Client::createStream(int width, int height, int format, Mutex::Autolock icl(mIProCameraUserLock); + if (!mDevice.get()) return DEAD_OBJECT; + sp binder; sp window; if (bufferProducer != 0) { @@ -303,6 +334,8 @@ status_t ProCamera2Client::createDefaultRequest(int templateId, Mutex::Autolock icl(mIProCameraUserLock); + if (!mDevice.get()) return DEAD_OBJECT; + CameraMetadata metadata; if ( (res = mDevice->createDefaultRequest(templateId, &metadata) ) == OK) { *request = metadata.release(); @@ -319,6 +352,10 @@ status_t ProCamera2Client::getCameraInfo(int cameraId, return INVALID_OPERATION; } + Mutex::Autolock icl(mIProCameraUserLock); + + if (!mDevice.get()) return DEAD_OBJECT; + CameraMetadata deviceInfo = mDevice->info(); *info = deviceInfo.release(); @@ -341,6 +378,12 @@ status_t ProCamera2Client::dump(int fd, const Vector& args) { result = " Device dump:\n"; write(fd, result.string(), result.size()); + if (!mDevice.get()) { + result = " *** Device is detached\n"; + write(fd, result.string(), result.size()); + return NO_ERROR; + } + status_t res = mDevice->dump(fd, args); if (res != OK) { result = String8::format(" Error dumping device: %s (%d)", @@ -363,9 +406,19 @@ void ProCamera2Client::disconnect() { int callingPid = getCallingPid(); if (callingPid != mClientPid && callingPid != mServicePid) return; + ALOGV("Camera %d: Shutting down", mCameraId); + + detachDevice(); + ProClient::disconnect(); + + ALOGV("Camera %d: Shut down complete complete", mCameraId); +} + +void ProCamera2Client::detachDevice() { if (mDevice == 0) return; - ALOGV("Camera %d: Shutting down", mCameraId); + ALOGV("Camera %d: Stopping processors", mCameraId); + mFrameProcessor->removeListener(FRAME_PROCESSOR_LISTENER_MIN_ID, FRAME_PROCESSOR_LISTENER_MAX_ID, /*listener*/this); @@ -374,11 +427,22 @@ void ProCamera2Client::disconnect() { mFrameProcessor->join(); ALOGV("Camera %d: Disconnecting device", mCameraId); + // WORKAROUND: HAL refuses to disconnect while there's streams in flight + { + mDevice->clearStreamingRequest(); + + status_t code; + if ((code = mDevice->waitUntilDrained()) != OK) { + ALOGE("%s: waitUntilDrained failed with code 0x%x", __FUNCTION__, + code); + } + } + mDevice->disconnect(); mDevice.clear(); - ProClient::disconnect(); + ALOGV("Camera %d: Detach complete", mCameraId); } status_t ProCamera2Client::connect(const sp& client) { diff --git a/services/camera/libcameraservice/ProCamera2Client.h b/services/camera/libcameraservice/ProCamera2Client.h index 9f514f4..ff6f4e2 100644 --- a/services/camera/libcameraservice/ProCamera2Client.h +++ b/services/camera/libcameraservice/ProCamera2Client.h @@ -104,6 +104,9 @@ public: const sp& getCameraDevice(); const sp& getCameraService(); + // Callbacks from camera service + virtual void onExclusiveLockStolen(); + /** * Interface used by independent components of ProCamera2Client. */ @@ -167,6 +170,8 @@ private: // - if no we can't modify the request queue. // note that creating/deleting streams we own is still OK bool mExclusiveLock; + + void detachDevice(); }; }; // namespace android -- cgit v1.1 From ba5ca4ee770fa0fe9e14990fd13b23f1010f5c98 Mon Sep 17 00:00:00 2001 From: Igor Murashkin Date: Thu, 28 Feb 2013 11:21:00 -0800 Subject: ProCamera: Add CpuConsumer asynchronous mode support Bug: 8290146 Bug: 8291751 Change-Id: I25423a2b8a70ac7169911b1c7b482aa17190fe0f --- camera/ProCamera.cpp | 21 +++++++++++++++- camera/tests/ProCameraTests.cpp | 56 +++++++++++++++++++++++++++++++++++++++-- include/camera/ProCamera.h | 7 ++++++ 3 files changed, 81 insertions(+), 3 deletions(-) diff --git a/camera/ProCamera.cpp b/camera/ProCamera.cpp index 13ba07c..3cfabf6 100644 --- a/camera/ProCamera.cpp +++ b/camera/ProCamera.cpp @@ -241,6 +241,17 @@ status_t ProCamera::createStreamCpu(int width, int height, int format, int heapCount, /*out*/ sp* cpuConsumer, + int* streamId) { + return createStreamCpu(width, height, format, heapCount, + /*synchronousMode*/true, + cpuConsumer, streamId); +} + +status_t ProCamera::createStreamCpu(int width, int height, int format, + int heapCount, + bool synchronousMode, + /*out*/ + sp* cpuConsumer, int* streamId) { ALOGV("%s: createStreamW %dx%d (fmt=0x%x)", __FUNCTION__, width, height, @@ -251,7 +262,7 @@ status_t ProCamera::createStreamCpu(int width, int height, int format, sp c = mCamera; if (c == 0) return NO_INIT; - sp cc = new CpuConsumer(heapCount); + sp cc = new CpuConsumer(heapCount, synchronousMode); cc->setName(String8("ProCamera::mCpuConsumer")); sp stc = new Surface( @@ -272,6 +283,7 @@ status_t ProCamera::createStreamCpu(int width, int height, int format, getStreamInfo(*streamId).cpuStream = true; getStreamInfo(*streamId).cpuConsumer = cc; + getStreamInfo(*streamId).synchronousMode = synchronousMode; getStreamInfo(*streamId).stc = stc; // for lifetime management getStreamInfo(*streamId).frameAvailableListener = frameAvailableListener; @@ -373,6 +385,13 @@ int ProCamera::dropFrameBuffer(int streamId, int count) { return BAD_VALUE; } + if (!si.synchronousMode) { + ALOGW("%s: No need to drop frames on asynchronous streams," + " as asynchronous mode only keeps 1 latest frame around.", + __FUNCTION__); + return BAD_VALUE; + } + int numDropped = 0; for (int i = 0; i < count; ++i) { CpuConsumer::LockedBuffer buffer; diff --git a/camera/tests/ProCameraTests.cpp b/camera/tests/ProCameraTests.cpp index c61e71a..1a8564e 100644 --- a/camera/tests/ProCameraTests.cpp +++ b/camera/tests/ProCameraTests.cpp @@ -1061,7 +1061,7 @@ TEST_F(ProCameraTest, WaitForDualStreamBuffer) { EXPECT_OK(mCamera->exclusiveUnlock()); } -TEST_F(ProCameraTest, WaitForSingleStreamBufferAndDropFrames) { +TEST_F(ProCameraTest, WaitForSingleStreamBufferAndDropFramesSync) { if (HasFatalFailure()) { return; } @@ -1071,7 +1071,8 @@ TEST_F(ProCameraTest, WaitForSingleStreamBufferAndDropFrames) { int streamId = -1; sp consumer; EXPECT_OK(mCamera->createStreamCpu(/*width*/1280, /*height*/960, - TEST_FORMAT_MAIN, TEST_CPU_HEAP_COUNT, &consumer, &streamId)); + TEST_FORMAT_MAIN, TEST_CPU_HEAP_COUNT, + /*synchronousMode*/true, &consumer, &streamId)); EXPECT_NE(-1, streamId); EXPECT_OK(mCamera->exclusiveTryLock()); @@ -1114,6 +1115,57 @@ TEST_F(ProCameraTest, WaitForSingleStreamBufferAndDropFrames) { EXPECT_OK(mCamera->exclusiveUnlock()); } +TEST_F(ProCameraTest, WaitForSingleStreamBufferAndDropFramesAsync) { + if (HasFatalFailure()) { + return; + } + + const int NUM_REQUESTS = 20 * TEST_CPU_FRAME_COUNT; + + int streamId = -1; + sp consumer; + EXPECT_OK(mCamera->createStreamCpu(/*width*/1280, /*height*/960, + TEST_FORMAT_MAIN, TEST_CPU_HEAP_COUNT, + /*synchronousMode*/false, &consumer, &streamId)); + EXPECT_NE(-1, streamId); + + EXPECT_OK(mCamera->exclusiveTryLock()); + + uint8_t streams[] = { streamId }; + ASSERT_NO_FATAL_FAILURE(createSubmitRequestForStreams(streams, /*count*/1, + /*requests*/NUM_REQUESTS)); + + // Consume a couple of results + for (int i = 0; i < NUM_REQUESTS; ++i) { + int numFrames; + EXPECT_TRUE((numFrames = mCamera->waitForFrameBuffer(streamId)) > 0); + + dout << "Dropped " << (numFrames - 1) << " frames" << std::endl; + + // Skip the counter ahead, don't try to consume these frames again + i += numFrames-1; + + // "Consume" the buffer + CpuConsumer::LockedBuffer buf; + EXPECT_OK(consumer->lockNextBuffer(&buf)); + + dout << "Buffer asynchronously received on streamId = " << streamId << + ", dataPtr = " << (void*)buf.data << + ", timestamp = " << buf.timestamp << std::endl; + + // Process at 10fps, stream is at 15fps. + // This means we will definitely fill up the buffer queue with + // extra buffers and need to drop them. + usleep(TEST_FRAME_PROCESSING_DELAY_US); + + EXPECT_OK(consumer->unlockBuffer(buf)); + } + + // Done: clean up + EXPECT_OK(mCamera->deleteStream(streamId)); + EXPECT_OK(mCamera->exclusiveUnlock()); +} + //TODO: refactor into separate file diff --git a/include/camera/ProCamera.h b/include/camera/ProCamera.h index e8dcdef..5d6cfaa 100644 --- a/include/camera/ProCamera.h +++ b/include/camera/ProCamera.h @@ -201,6 +201,12 @@ public: /*out*/ sp* cpuConsumer, int* streamId); + status_t createStreamCpu(int width, int height, int format, + int heapCount, + bool synchronousMode, + /*out*/ + sp* cpuConsumer, + int* streamId); // Create a request object from a template. status_t createDefaultRequest(int templateId, @@ -296,6 +302,7 @@ private: int streamID; bool cpuStream; sp cpuConsumer; + bool synchronousMode; sp frameAvailableListener; sp stc; int frameReady; -- cgit v1.1 From ab7d72f0804fbb7e91ad9d2a16f826d97e20e5d0 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Wed, 27 Feb 2013 09:05:28 -0800 Subject: media.log cleanup Remove almost all of the specific logs, but leave the media.log logging infrastructure in place for the next time we need it. Re-apply a few good changes that were reverted earlier: - check logf format vs. argument list compatibility - distinguish potentially modified and actually modified tracks in FastMixer - fix benign bug where sq->end() was called more than once - fix a build warning Bug: 6490974 Change-Id: I02d3e83646c738acaebb415bd0d6b548638b4ef5 --- include/media/nbaio/NBLog.h | 4 ++-- services/audioflinger/AudioMixer.cpp | 7 ++++++- services/audioflinger/AudioMixer.h | 8 +++++++- services/audioflinger/FastMixer.cpp | 24 +++++++----------------- services/audioflinger/Threads.cpp | 13 ------------- services/audioflinger/Threads.h | 4 ++-- services/audioflinger/Tracks.cpp | 4 ---- 7 files changed, 24 insertions(+), 40 deletions(-) diff --git a/include/media/nbaio/NBLog.h b/include/media/nbaio/NBLog.h index 8fc417f..107ba66 100644 --- a/include/media/nbaio/NBLog.h +++ b/include/media/nbaio/NBLog.h @@ -115,7 +115,7 @@ public: virtual ~Writer() { } virtual void log(const char *string); - virtual void logf(const char *fmt, ...); + virtual void logf(const char *fmt, ...) __attribute__ ((format (printf, 2, 3))); virtual void logvf(const char *fmt, va_list ap); virtual void logTimestamp(); virtual void logTimestamp(const struct timespec& ts); @@ -149,7 +149,7 @@ public: LockedWriter(size_t size, void *shared); virtual void log(const char *string); - virtual void logf(const char *fmt, ...); + virtual void logf(const char *fmt, ...) __attribute__ ((format (printf, 2, 3))); virtual void logvf(const char *fmt, va_list ap); virtual void logTimestamp(); virtual void logTimestamp(const struct timespec& ts); diff --git a/services/audioflinger/AudioMixer.cpp b/services/audioflinger/AudioMixer.cpp index 17b6a8a..7d38f80 100644 --- a/services/audioflinger/AudioMixer.cpp +++ b/services/audioflinger/AudioMixer.cpp @@ -122,6 +122,7 @@ AudioMixer::AudioMixer(size_t frameCount, uint32_t sampleRate, uint32_t maxNumTr mState.hook = process__nop; mState.outputTemp = NULL; mState.resampleTemp = NULL; + mState.mLog = &mDummyLog; // mState.reserved // FIXME Most of the following initialization is probably redundant since @@ -169,6 +170,11 @@ AudioMixer::~AudioMixer() delete [] mState.resampleTemp; } +void AudioMixer::setLog(NBLog::Writer *log) +{ + mState.mLog = log; +} + int AudioMixer::getTrackName(audio_channel_mask_t channelMask, int sessionId) { uint32_t names = (~mTrackNames) & mConfiguredNames; @@ -620,7 +626,6 @@ void AudioMixer::setBufferProvider(int name, AudioBufferProvider* bufferProvider } - void AudioMixer::process(int64_t pts) { mState.hook(&mState, pts); diff --git a/services/audioflinger/AudioMixer.h b/services/audioflinger/AudioMixer.h index fd21fda..43aeb86 100644 --- a/services/audioflinger/AudioMixer.h +++ b/services/audioflinger/AudioMixer.h @@ -28,6 +28,7 @@ #include #include +#include namespace android { @@ -220,7 +221,8 @@ private: void (*hook)(state_t* state, int64_t pts); // one of process__*, never NULL int32_t *outputTemp; int32_t *resampleTemp; - int32_t reserved[2]; + NBLog::Writer* mLog; + int32_t reserved[1]; // FIXME allocate dynamically to save some memory when maxNumTracks < MAX_NUM_TRACKS track_t tracks[MAX_NUM_TRACKS]; __attribute__((aligned(32))); }; @@ -247,6 +249,10 @@ private: const uint32_t mSampleRate; + NBLog::Writer mDummyLog; +public: + void setLog(NBLog::Writer* log); +private: state_t mState __attribute__((aligned(32))); // effect descriptor for the downmixer used by the mixer diff --git a/services/audioflinger/FastMixer.cpp b/services/audioflinger/FastMixer.cpp index 80e37ca..2832e96 100644 --- a/services/audioflinger/FastMixer.cpp +++ b/services/audioflinger/FastMixer.cpp @@ -120,12 +120,13 @@ bool FastMixer::threadLoop() FastMixerState::Command command = next->mCommand; if (next != current) { - logWriter->log("next != current"); - // As soon as possible of learning of a new dump area, start using it dumpState = next->mDumpState != NULL ? next->mDumpState : &dummyDumpState; teeSink = next->mTeeSink; logWriter = next->mNBLogWriter != NULL ? next->mNBLogWriter : &dummyLogWriter; + if (mixer != NULL) { + mixer->setLog(logWriter); + } // We want to always have a valid reference to the previous (non-idle) state. // However, the state queue only guarantees access to current and previous states. @@ -167,7 +168,6 @@ bool FastMixer::threadLoop() ALOG_ASSERT(coldFutexAddr != NULL); int32_t old = android_atomic_dec(coldFutexAddr); if (old <= 0) { - logWriter->log("wait"); __futex_syscall4(coldFutexAddr, FUTEX_WAIT_PRIVATE, old - 1, NULL); } // This may be overly conservative; there could be times that the normal mixer @@ -186,7 +186,6 @@ bool FastMixer::threadLoop() } continue; case FastMixerState::EXIT: - logWriter->log("exit"); delete mixer; delete[] mixBuffer; return false; @@ -264,15 +263,11 @@ bool FastMixer::threadLoop() unsigned currentTrackMask = current->mTrackMask; dumpState->mTrackMask = currentTrackMask; if (current->mFastTracksGen != fastTracksGen) { - logWriter->logf("gen %d", current->mFastTracksGen); ALOG_ASSERT(mixBuffer != NULL); int name; // process removed tracks first to avoid running out of track names unsigned removedTracks = previousTrackMask & ~currentTrackMask; - if (removedTracks) { - logWriter->logf("removed %#x", removedTracks); - } while (removedTracks != 0) { i = __builtin_ctz(removedTracks); removedTracks &= ~(1 << i); @@ -292,9 +287,6 @@ bool FastMixer::threadLoop() // now process added tracks unsigned addedTracks = currentTrackMask & ~previousTrackMask; - if (addedTracks) { - logWriter->logf("added %#x", addedTracks); - } while (addedTracks != 0) { i = __builtin_ctz(addedTracks); addedTracks &= ~(1 << i); @@ -322,17 +314,15 @@ bool FastMixer::threadLoop() generations[i] = fastTrack->mGeneration; } - // finally process modified tracks; these use the same slot + // finally process (potentially) modified tracks; these use the same slot // but may have a different buffer provider or volume provider unsigned modifiedTracks = currentTrackMask & previousTrackMask; - if (modifiedTracks) { - logWriter->logf("modified %#x", modifiedTracks); - } while (modifiedTracks != 0) { i = __builtin_ctz(modifiedTracks); modifiedTracks &= ~(1 << i); const FastTrack* fastTrack = ¤t->mFastTracks[i]; if (fastTrack->mGeneration != generations[i]) { + // this track was actually modified AudioBufferProvider *bufferProvider = fastTrack->mBufferProvider; ALOG_ASSERT(bufferProvider != NULL); if (mixer != NULL) { @@ -453,7 +443,7 @@ bool FastMixer::threadLoop() ATRACE_END(); dumpState->mWriteSequence++; if (framesWritten >= 0) { - ALOG_ASSERT(framesWritten <= frameCount); + ALOG_ASSERT((size_t) framesWritten <= frameCount); dumpState->mFramesWritten += framesWritten; //if ((size_t) framesWritten == frameCount) { // didFullWrite = true; @@ -471,7 +461,7 @@ bool FastMixer::threadLoop() struct timespec newTs; int rc = clock_gettime(CLOCK_MONOTONIC, &newTs); if (rc == 0) { - logWriter->logTimestamp(newTs); + //logWriter->logTimestamp(newTs); if (oldTsValid) { time_t sec = newTs.tv_sec - oldTs.tv_sec; long nsec = newTs.tv_nsec - oldTs.tv_nsec; diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index 1209ea6..b80ad25 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -1249,7 +1249,6 @@ Exit: if (status) { *status = lStatus; } - mNBLogWriter->logf("createTrack_l"); return track; } @@ -1317,7 +1316,6 @@ float AudioFlinger::PlaybackThread::streamVolume(audio_stream_type_t stream) con // addTrack_l() must be called with ThreadBase::mLock held status_t AudioFlinger::PlaybackThread::addTrack_l(const sp& track) { - mNBLogWriter->logf("addTrack_l mName=%d", track->mName); status_t status = ALREADY_EXISTS; // set retry count for buffer fill @@ -1351,7 +1349,6 @@ status_t AudioFlinger::PlaybackThread::addTrack_l(const sp& track) // destroyTrack_l() must be called with ThreadBase::mLock held void AudioFlinger::PlaybackThread::destroyTrack_l(const sp& track) { - mNBLogWriter->logf("destroyTrack_l mName=%d", track->mName); track->mState = TrackBase::TERMINATED; // active tracks are removed by threadLoop() if (mActiveTracks.indexOf(track) < 0) { @@ -1361,7 +1358,6 @@ void AudioFlinger::PlaybackThread::destroyTrack_l(const sp& track) void AudioFlinger::PlaybackThread::removeTrack_l(const sp& track) { - mNBLogWriter->logf("removeTrack_l mName=%d", track->mName); track->triggerEvents(AudioSystem::SYNC_EVENT_PRESENTATION_COMPLETE); mTracks.remove(track); deleteTrackName_l(track->name()); @@ -1934,7 +1930,6 @@ bool AudioFlinger::PlaybackThread::threadLoop() threadLoop_standby(); - mNBLogWriter->log("standby"); mStandby = true; } @@ -2030,9 +2025,6 @@ if (mType == MIXER) { // since we can't guarantee the destructors won't acquire that // same lock. This will also mutate and push a new fast mixer state. threadLoop_removeTracks(tracksToRemove); - if (tracksToRemove.size() > 0) { - logString = "remove"; - } tracksToRemove.clear(); // FIXME I don't understand the need for this here; @@ -2839,7 +2831,6 @@ track_is_ready: ; block = FastMixerStateQueue::BLOCK_UNTIL_ACKED; pauseAudioWatchdog = true; } - sq->end(); } if (sq != NULL) { sq->end(didModify); @@ -2870,7 +2861,6 @@ track_is_ready: ; if (CC_UNLIKELY(count)) { for (size_t i=0 ; i& track = tracksToRemove->itemAt(i); - mNBLogWriter->logf("prepareTracks_l remove name=%u", track->name()); mActiveTracks.remove(track); if (track->mainBuffer() != mMixBuffer) { chain = getEffectChain_l(track->sessionId()); @@ -3247,9 +3237,6 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::DirectOutputThread::prep // remove all the tracks that need to be... if (CC_UNLIKELY(trackToRemove != 0)) { tracksToRemove->add(trackToRemove); -#if 0 - mNBLogWriter->logf("prepareTracks_l remove name=%u", trackToRemove->name()); -#endif mActiveTracks.remove(trackToRemove); if (!mEffectChains.isEmpty()) { ALOGV("stopping track on chain %p for session Id: %d", mEffectChains[0].get(), diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h index fa1e336..4595b6c 100644 --- a/services/audioflinger/Threads.h +++ b/services/audioflinger/Threads.h @@ -315,7 +315,7 @@ protected: // keyed by session ID, the second by type UUID timeLow field KeyedVector< int, KeyedVector< int, sp > > mSuspendedSessions; - static const size_t kLogSize = 512; + static const size_t kLogSize = 4 * 1024; sp mNBLogWriter; }; @@ -546,7 +546,7 @@ private: sp mTeeSink; sp mTeeSource; uint32_t mScreenState; // cached copy of gScreenState - static const size_t kFastMixerLogSize = 8 * 1024; + static const size_t kFastMixerLogSize = 4 * 1024; sp mFastMixerNBLogWriter; public: virtual bool hasFastMixer() const = 0; diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp index 724ce38..30fe1f2 100644 --- a/services/audioflinger/Tracks.cpp +++ b/services/audioflinger/Tracks.cpp @@ -598,7 +598,6 @@ status_t AudioFlinger::PlaybackThread::Track::start(AudioSystem::sync_event_t ev sp thread = mThread.promote(); if (thread != 0) { Mutex::Autolock _l(thread->mLock); - thread->mNBLogWriter->logf("start mName=%d", mName); track_state state = mState; // here the track could be either new, or restarted // in both cases "unstop" the track @@ -641,7 +640,6 @@ void AudioFlinger::PlaybackThread::Track::stop() sp thread = mThread.promote(); if (thread != 0) { Mutex::Autolock _l(thread->mLock); - thread->mNBLogWriter->logf("stop mName=%d", mName); track_state state = mState; if (state == RESUMING || state == ACTIVE || state == PAUSING || state == PAUSED) { // If the track is not active (PAUSED and buffers full), flush buffers @@ -678,7 +676,6 @@ void AudioFlinger::PlaybackThread::Track::pause() sp thread = mThread.promote(); if (thread != 0) { Mutex::Autolock _l(thread->mLock); - thread->mNBLogWriter->logf("pause mName=%d", mName); if (mState == ACTIVE || mState == RESUMING) { mState = PAUSING; ALOGV("ACTIVE/RESUMING => PAUSING (%d) on thread %p", mName, thread.get()); @@ -702,7 +699,6 @@ void AudioFlinger::PlaybackThread::Track::flush() sp thread = mThread.promote(); if (thread != 0) { Mutex::Autolock _l(thread->mLock); - thread->mNBLogWriter->logf("flush mName=%d", mName); if (mState != STOPPING_1 && mState != STOPPING_2 && mState != STOPPED && mState != PAUSED && mState != PAUSING && mState != IDLE && mState != FLUSHED) { return; -- cgit v1.1 From c9b2e20f7c9a71e07ef398152709c76079decbcd Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Tue, 26 Feb 2013 11:32:32 -0800 Subject: Miscellaneous cleanup Abbreviation framesReady to fRdy for new systrace. Put inline const on one line. Use local copy of mState in state. Improve logging. Line length 100. Change-Id: I8201c3ce0e53fd464fd33d02544e52c342d40b68 --- include/media/AudioTrack.h | 2 +- media/libmedia/AudioRecord.cpp | 6 +++--- services/audioflinger/AudioFlinger.cpp | 4 ++-- services/audioflinger/FastMixer.cpp | 6 +++--- services/audioflinger/PlaybackTracks.h | 12 +++--------- services/audioflinger/Threads.cpp | 5 +++-- services/audioflinger/TrackBase.h | 4 +--- services/audioflinger/Tracks.cpp | 2 +- 8 files changed, 17 insertions(+), 24 deletions(-) diff --git a/include/media/AudioTrack.h b/include/media/AudioTrack.h index 9d07ed5..db5a7ab 100644 --- a/include/media/AudioTrack.h +++ b/include/media/AudioTrack.h @@ -226,7 +226,7 @@ public: * This includes the latency due to AudioTrack buffer size, AudioMixer (if any) * and audio hardware driver. */ - uint32_t latency() const { return mLatency; } + uint32_t latency() const { return mLatency; } /* getters, see constructors and set() */ diff --git a/media/libmedia/AudioRecord.cpp b/media/libmedia/AudioRecord.cpp index 0a2b0b0..40ff1bf 100644 --- a/media/libmedia/AudioRecord.cpp +++ b/media/libmedia/AudioRecord.cpp @@ -47,9 +47,9 @@ status_t AudioRecord::getMinFrameCount( *frameCount = 0; size_t size = 0; - if (AudioSystem::getInputBufferSize(sampleRate, format, channelMask, &size) - != NO_ERROR) { - ALOGE("AudioSystem could not query the input buffer size."); + status_t status = AudioSystem::getInputBufferSize(sampleRate, format, channelMask, &size); + if (status != NO_ERROR) { + ALOGE("AudioSystem could not query the input buffer size; status %d", status); return NO_INIT; } diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp index e81267f..b3de526 100644 --- a/services/audioflinger/AudioFlinger.cpp +++ b/services/audioflinger/AudioFlinger.cpp @@ -464,7 +464,7 @@ sp AudioFlinger::createTrack( PlaybackThread *thread = checkPlaybackThread_l(output); PlaybackThread *effectThread = NULL; if (thread == NULL) { - ALOGE("unknown output thread"); + ALOGE("no playback thread found for output handle %d", output); lStatus = BAD_VALUE; goto Exit; } @@ -589,7 +589,7 @@ uint32_t AudioFlinger::latency(audio_io_handle_t output) const Mutex::Autolock _l(mLock); PlaybackThread *thread = checkPlaybackThread_l(output); if (thread == NULL) { - ALOGW("latency() unknown thread %d", output); + ALOGW("latency(): no playback thread found for output handle %d", output); return 0; } return thread->latency(); diff --git a/services/audioflinger/FastMixer.cpp b/services/audioflinger/FastMixer.cpp index 2832e96..24a6dfe 100644 --- a/services/audioflinger/FastMixer.cpp +++ b/services/audioflinger/FastMixer.cpp @@ -388,9 +388,9 @@ bool FastMixer::threadLoop() if (ATRACE_ENABLED()) { // I wish we had formatted trace names char traceName[16]; - strcpy(traceName, "framesReady"); - traceName[11] = i + (i < 10 ? '0' : 'A' - 10); - traceName[12] = '\0'; + strcpy(traceName, "fRdy"); + traceName[4] = i + (i < 10 ? '0' : 'A' - 10); + traceName[5] = '\0'; ATRACE_INT(traceName, framesReady); } FastTrackDump *ftDump = &dumpState->mTracks[i]; diff --git a/services/audioflinger/PlaybackTracks.h b/services/audioflinger/PlaybackTracks.h index adec938..a749d7a 100644 --- a/services/audioflinger/PlaybackTracks.h +++ b/services/audioflinger/PlaybackTracks.h @@ -77,15 +77,9 @@ protected: virtual size_t framesReady() const; - bool isPausing() const { - return mState == PAUSING; - } - bool isPaused() const { - return mState == PAUSED; - } - bool isResuming() const { - return mState == RESUMING; - } + bool isPausing() const { return mState == PAUSING; } + bool isPaused() const { return mState == PAUSED; } + bool isResuming() const { return mState == RESUMING; } bool isReady() const; void setPaused() { mState = PAUSED; } void reset(); diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index ec8ffa0..9d98f0b 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -2795,7 +2795,7 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::MixerThread::prepareTrac // No buffers for this track. Give it a few chances to // fill a buffer, then remove it from active list. if (--(track->mRetryCount) <= 0) { - ALOGV("BUFFER TIMEOUT: remove(%d) from active list on thread %p", name, this); + ALOGI("BUFFER TIMEOUT: remove(%d) from active list on thread %p", name, this); tracksToRemove->add(track); // indicate to client process that the track was disabled because of underrun; // it will then automatically call start() when data is available @@ -3725,7 +3725,8 @@ bool AudioFlinger::RecordThread::threadLoop() readInto = mRsmpInBuffer; mRsmpInIndex = 0; } - mBytesRead = mInput->stream->read(mInput->stream, readInto, mInputBytes); + mBytesRead = mInput->stream->read(mInput->stream, readInto, + mInputBytes); if (mBytesRead <= 0) { if ((mBytesRead < 0) && (mActiveTrack->mState == TrackBase::ACTIVE)) { diff --git a/services/audioflinger/TrackBase.h b/services/audioflinger/TrackBase.h index fecbfda..fac7071 100644 --- a/services/audioflinger/TrackBase.h +++ b/services/audioflinger/TrackBase.h @@ -68,9 +68,7 @@ protected: // but putting it in TrackBase avoids the complexity of virtual inheritance virtual size_t framesReady() const { return SIZE_MAX; } - audio_format_t format() const { - return mFormat; - } + audio_format_t format() const { return mFormat; } uint32_t channelCount() const { return mChannelCount; } diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp index b816338..a6ab4f8 100644 --- a/services/audioflinger/Tracks.cpp +++ b/services/audioflinger/Tracks.cpp @@ -607,7 +607,7 @@ status_t AudioFlinger::PlaybackThread::Track::start(AudioSystem::sync_event_t ev track_state state = mState; // here the track could be either new, or restarted // in both cases "unstop" the track - if (mState == PAUSED) { + if (state == PAUSED) { mState = TrackBase::RESUMING; ALOGV("PAUSED => RESUMING (%d) on thread %p", mName, this); } else { -- cgit v1.1 From 8602e5501a653a00e2f82d6fd3f7558382fea1dd Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Mon, 4 Mar 2013 13:05:22 -0800 Subject: Modified udptest to sync time across devices. Change-Id: Ib055cd8ab5931395907d017331e27f5d781d7019 --- media/libstagefright/wifi-display/udptest.cpp | 144 ++++++++++++++++---------- 1 file changed, 92 insertions(+), 52 deletions(-) diff --git a/media/libstagefright/wifi-display/udptest.cpp b/media/libstagefright/wifi-display/udptest.cpp index 1cd82c3..86437e0 100644 --- a/media/libstagefright/wifi-display/udptest.cpp +++ b/media/libstagefright/wifi-display/udptest.cpp @@ -47,8 +47,18 @@ private: kWhatStartClient, kWhatUDPNotify, kWhatSendPacket, + kWhatTimedOut, }; + struct TimeInfo { + int64_t mT1; // client timestamp at send + int64_t mT2; // server timestamp at receive + int64_t mT3; // server timestamp at send + int64_t mT4; // client timestamp at receive + }; + + static const int64_t kTimeoutDelayUs = 1000000ll; + sp mNetSession; bool mIsServer; @@ -57,9 +67,16 @@ private: uint32_t mSeqNo; double mTotalTimeUs; int32_t mCount; + int64_t mSumOffsets; + + int64_t mPendingT1; + int32_t mTimeoutGeneration; void postSendPacket(int64_t delayUs = 0ll); + void postTimeout(); + void cancelTimeout(); + DISALLOW_EVIL_CONSTRUCTORS(TestHandler); }; @@ -70,7 +87,10 @@ TestHandler::TestHandler(const sp &netSession) mUDPSession(0), mSeqNo(0), mTotalTimeUs(0.0), - mCount(0) { + mCount(0), + mSumOffsets(0ll), + mPendingT1(0ll), + mTimeoutGeneration(0) { } TestHandler::~TestHandler() { @@ -131,30 +151,31 @@ void TestHandler::onMessageReceived(const sp &msg) { case kWhatSendPacket: { - char buffer[12]; - memset(buffer, 0, sizeof(buffer)); - - buffer[0] = mSeqNo >> 24; - buffer[1] = (mSeqNo >> 16) & 0xff; - buffer[2] = (mSeqNo >> 8) & 0xff; - buffer[3] = mSeqNo & 0xff; - ++mSeqNo; - - int64_t nowUs = ALooper::GetNowUs(); - buffer[4] = nowUs >> 56; - buffer[5] = (nowUs >> 48) & 0xff; - buffer[6] = (nowUs >> 40) & 0xff; - buffer[7] = (nowUs >> 32) & 0xff; - buffer[8] = (nowUs >> 24) & 0xff; - buffer[9] = (nowUs >> 16) & 0xff; - buffer[10] = (nowUs >> 8) & 0xff; - buffer[11] = nowUs & 0xff; + TimeInfo ti; + memset(&ti, 0, sizeof(ti)); + + ti.mT1 = ALooper::GetNowUs(); CHECK_EQ((status_t)OK, mNetSession->sendRequest( - mUDPSession, buffer, sizeof(buffer))); + mUDPSession, &ti, sizeof(ti))); + + mPendingT1 = ti.mT1; + postTimeout(); + break; + } + + case kWhatTimedOut: + { + int32_t generation; + CHECK(msg->findInt32("generation", &generation)); + + if (generation != mTimeoutGeneration) { + break; + } - postSendPacket(20000ll); + ALOGI("timed out, sending another request"); + postSendPacket(); break; } @@ -182,6 +203,9 @@ void TestHandler::onMessageReceived(const sp &msg) { strerror(-err)); mNetSession->destroySession(sessionID); + + cancelTimeout(); + looper()->stop(); break; } @@ -190,8 +214,16 @@ void TestHandler::onMessageReceived(const sp &msg) { int32_t sessionID; CHECK(msg->findInt32("sessionID", &sessionID)); - sp data; - CHECK(msg->findBuffer("data", &data)); + sp packet; + CHECK(msg->findBuffer("data", &packet)); + + int64_t arrivalTimeUs; + CHECK(packet->meta()->findInt64( + "arrivalTimeUs", &arrivalTimeUs)); + + CHECK_EQ(packet->size(), sizeof(TimeInfo)); + + TimeInfo *ti = (TimeInfo *)packet->data(); if (mIsServer) { if (!mConnected) { @@ -208,43 +240,41 @@ void TestHandler::onMessageReceived(const sp &msg) { mConnected = true; } - int64_t nowUs = ALooper::GetNowUs(); - - sp buffer = new ABuffer(data->size() + 8); - memcpy(buffer->data(), data->data(), data->size()); - - uint8_t *ptr = buffer->data() + data->size(); - - *ptr++ = nowUs >> 56; - *ptr++ = (nowUs >> 48) & 0xff; - *ptr++ = (nowUs >> 40) & 0xff; - *ptr++ = (nowUs >> 32) & 0xff; - *ptr++ = (nowUs >> 24) & 0xff; - *ptr++ = (nowUs >> 16) & 0xff; - *ptr++ = (nowUs >> 8) & 0xff; - *ptr++ = nowUs & 0xff; + ti->mT2 = arrivalTimeUs; + ti->mT3 = ALooper::GetNowUs(); CHECK_EQ((status_t)OK, mNetSession->sendRequest( - mUDPSession, buffer->data(), buffer->size())); + mUDPSession, ti, sizeof(*ti))); } else { - CHECK_EQ(data->size(), 20u); + if (ti->mT1 != mPendingT1) { + break; + } + + cancelTimeout(); + mPendingT1 = 0; - uint32_t seqNo = U32_AT(data->data()); - int64_t t1 = U64_AT(data->data() + 4); - int64_t t2 = U64_AT(data->data() + 12); + ti->mT4 = arrivalTimeUs; - int64_t t3; - CHECK(data->meta()->findInt64("arrivalTimeUs", &t3)); + // One way delay for a packet to travel from client + // to server or back (assumed to be the same either way). + int64_t delay = + (ti->mT2 - ti->mT1 + ti->mT4 - ti->mT3) / 2; -#if 0 - printf("roundtrip seqNo %u, time = %lld us\n", - seqNo, t3 - t1); -#else - mTotalTimeUs += t3 - t1; + // Offset between the client clock (T1, T4) and the + // server clock (T2, T3) timestamps. + int64_t offset = + (ti->mT2 - ti->mT1 - ti->mT4 + ti->mT3) / 2; + + mSumOffsets += offset; ++mCount; - printf("avg. roundtrip time %.2f us\n", mTotalTimeUs / mCount); -#endif + + printf("delay = %lld us,\toffset %lld us\n", + delay, + offset); + fflush(stdout); + + postSendPacket(1000000ll / 30); } break; } @@ -265,6 +295,16 @@ void TestHandler::postSendPacket(int64_t delayUs) { (new AMessage(kWhatSendPacket, id()))->post(delayUs); } +void TestHandler::postTimeout() { + sp msg = new AMessage(kWhatTimedOut, id()); + msg->setInt32("generation", mTimeoutGeneration); + msg->post(kTimeoutDelayUs); +} + +void TestHandler::cancelTimeout() { + ++mTimeoutGeneration; +} + } // namespace android static void usage(const char *me) { -- cgit v1.1 From 7cd58537932ef6f481f68be0b9c597a89cebdfec Mon Sep 17 00:00:00 2001 From: Andy McFadden Date: Tue, 19 Feb 2013 07:28:30 -0800 Subject: Implement Surface input to MediaCodec. Also, renamed a CHECK_INTERFACE macro that was clashing with the Binder version. Bug 7991062 Change-Id: If5e6ed0a06d9f67975497676e4b05abe3aa3d6c0 --- include/media/IOMX.h | 7 + include/media/stagefright/ACodec.h | 6 + include/media/stagefright/BufferProducerWrapper.h | 46 +++ include/media/stagefright/MediaCodec.h | 6 + media/libmedia/IOMX.cpp | 113 +++++- media/libstagefright/ACodec.cpp | 72 ++++ media/libstagefright/MediaCodec.cpp | 101 +++++ media/libstagefright/OMXClient.cpp | 18 + media/libstagefright/include/OMX.h | 6 + media/libstagefright/include/OMXNodeInstance.h | 24 ++ media/libstagefright/omx/Android.mk | 2 + media/libstagefright/omx/GraphicBufferSource.cpp | 441 ++++++++++++++++++++++ media/libstagefright/omx/GraphicBufferSource.h | 176 +++++++++ media/libstagefright/omx/OMX.cpp | 14 + media/libstagefright/omx/OMXNodeInstance.cpp | 136 +++++++ 15 files changed, 1148 insertions(+), 20 deletions(-) create mode 100644 include/media/stagefright/BufferProducerWrapper.h create mode 100644 media/libstagefright/omx/GraphicBufferSource.cpp create mode 100644 media/libstagefright/omx/GraphicBufferSource.h diff --git a/include/media/IOMX.h b/include/media/IOMX.h index be1b2fc..0b1d1e4 100644 --- a/include/media/IOMX.h +++ b/include/media/IOMX.h @@ -19,6 +19,7 @@ #define ANDROID_IOMX_H_ #include +#include #include #include #include @@ -96,6 +97,12 @@ public: node_id node, OMX_U32 port_index, const sp &graphicBuffer, buffer_id *buffer) = 0; + virtual status_t createInputSurface( + node_id node, OMX_U32 port_index, + sp *bufferProducer) = 0; + + virtual status_t signalEndOfInputStream(node_id node) = 0; + // This API clearly only makes sense if the caller lives in the // same process as the callee, i.e. is the media_server, as the // returned "buffer_data" pointer is just that, a pointer into local diff --git a/include/media/stagefright/ACodec.h b/include/media/stagefright/ACodec.h index 317b6f0..097ec5f 100644 --- a/include/media/stagefright/ACodec.h +++ b/include/media/stagefright/ACodec.h @@ -43,6 +43,8 @@ struct ACodec : public AHierarchicalStateMachine { kWhatError = 'erro', kWhatComponentAllocated = 'cAll', kWhatComponentConfigured = 'cCon', + kWhatInputSurfaceCreated = 'isfc', + kWhatSignaledInputEOS = 'seos', kWhatBuffersAllocated = 'allc', }; @@ -55,9 +57,11 @@ struct ACodec : public AHierarchicalStateMachine { void initiateShutdown(bool keepComponentAllocated = false); void signalSetParameters(const sp &msg); + void signalEndOfInputStream(); void initiateAllocateComponent(const sp &msg); void initiateConfigureComponent(const sp &msg); + void initiateCreateInputSurface(); void initiateStart(); void signalRequestIDRFrame(); @@ -105,6 +109,8 @@ private: kWhatDrainDeferredMessages = 'drai', kWhatAllocateComponent = 'allo', kWhatConfigureComponent = 'conf', + kWhatCreateInputSurface = 'cisf', + kWhatSignalEndOfInputStream = 'eois', kWhatStart = 'star', kWhatRequestIDRFrame = 'ridr', kWhatSetParameters = 'setP', diff --git a/include/media/stagefright/BufferProducerWrapper.h b/include/media/stagefright/BufferProducerWrapper.h new file mode 100644 index 0000000..d8acf30 --- /dev/null +++ b/include/media/stagefright/BufferProducerWrapper.h @@ -0,0 +1,46 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef BUFFER_PRODUCER_WRAPPER_H_ + +#define BUFFER_PRODUCER_WRAPPER_H_ + +#include + +namespace android { + +// Can't use static_cast to cast a RefBase back to an IGraphicBufferProducer, +// because IGBP's parent (IInterface) uses virtual inheritance. This class +// wraps IGBP while we pass it through AMessage. + +struct BufferProducerWrapper : RefBase { + BufferProducerWrapper( + const sp& bufferProducer) : + mBufferProducer(bufferProducer) { } + + sp getBufferProducer() const { + return mBufferProducer; + } + +private: + const sp mBufferProducer; + + DISALLOW_EVIL_CONSTRUCTORS(BufferProducerWrapper); +}; + +} // namespace android + +#endif // BUFFER_PRODUCER_WRAPPER_H_ diff --git a/include/media/stagefright/MediaCodec.h b/include/media/stagefright/MediaCodec.h index 1002663..ef695a7 100644 --- a/include/media/stagefright/MediaCodec.h +++ b/include/media/stagefright/MediaCodec.h @@ -56,6 +56,8 @@ struct MediaCodec : public AHandler { const sp &crypto, uint32_t flags); + status_t createInputSurface(sp* bufferProducer); + status_t start(); // Returns to a state in which the component remains allocated but @@ -101,6 +103,8 @@ struct MediaCodec : public AHandler { status_t renderOutputBufferAndRelease(size_t index); status_t releaseOutputBuffer(size_t index); + status_t signalEndOfInputStream(); + status_t getOutputFormat(sp *format) const; status_t getInputBuffers(Vector > *buffers) const; @@ -143,6 +147,7 @@ private: enum { kWhatInit = 'init', kWhatConfigure = 'conf', + kWhatCreateInputSurface = 'cisf', kWhatStart = 'strt', kWhatStop = 'stop', kWhatRelease = 'rele', @@ -150,6 +155,7 @@ private: kWhatQueueInputBuffer = 'queI', kWhatDequeueOutputBuffer = 'deqO', kWhatReleaseOutputBuffer = 'relO', + kWhatSignalEndOfInputStream = 'eois', kWhatGetBuffers = 'getB', kWhatFlush = 'flus', kWhatGetOutputFormat = 'getO', diff --git a/media/libmedia/IOMX.cpp b/media/libmedia/IOMX.cpp index 48e427a..d6cd43a 100644 --- a/media/libmedia/IOMX.cpp +++ b/media/libmedia/IOMX.cpp @@ -40,6 +40,8 @@ enum { ENABLE_GRAPHIC_BUFFERS, USE_BUFFER, USE_GRAPHIC_BUFFER, + CREATE_INPUT_SURFACE, + SIGNAL_END_OF_INPUT_STREAM, STORE_META_DATA_IN_BUFFERS, ALLOC_BUFFER, ALLOC_BUFFER_WITH_BACKUP, @@ -280,6 +282,45 @@ public: return err; } + virtual status_t createInputSurface( + node_id node, OMX_U32 port_index, + sp *bufferProducer) { + Parcel data, reply; + status_t err; + data.writeInterfaceToken(IOMX::getInterfaceDescriptor()); + data.writeIntPtr((intptr_t)node); + data.writeInt32(port_index); + err = remote()->transact(CREATE_INPUT_SURFACE, data, &reply); + if (err != OK) { + ALOGW("binder transaction failed: %d", err); + return err; + } + + err = reply.readInt32(); + if (err != OK) { + return err; + } + + *bufferProducer = IGraphicBufferProducer::asInterface( + reply.readStrongBinder()); + + return err; + } + + virtual status_t signalEndOfInputStream(node_id node) { + Parcel data, reply; + status_t err; + data.writeInterfaceToken(IOMX::getInterfaceDescriptor()); + data.writeIntPtr((intptr_t)node); + err = remote()->transact(SIGNAL_END_OF_INPUT_STREAM, data, &reply); + if (err != OK) { + ALOGW("binder transaction failed: %d", err); + return err; + } + + return reply.readInt32(); + } + virtual status_t storeMetaDataInBuffers( node_id node, OMX_U32 port_index, OMX_BOOL enable) { Parcel data, reply; @@ -404,7 +445,7 @@ IMPLEMENT_META_INTERFACE(OMX, "android.hardware.IOMX"); //////////////////////////////////////////////////////////////////////////////// -#define CHECK_INTERFACE(interface, data, reply) \ +#define CHECK_OMX_INTERFACE(interface, data, reply) \ do { if (!data.enforceInterface(interface::getInterfaceDescriptor())) { \ ALOGW("Call incorrectly routed to " #interface); \ return PERMISSION_DENIED; \ @@ -415,7 +456,7 @@ status_t BnOMX::onTransact( switch (code) { case LIVES_LOCALLY: { - CHECK_INTERFACE(IOMX, data, reply); + CHECK_OMX_INTERFACE(IOMX, data, reply); node_id node = (void *)data.readIntPtr(); pid_t pid = (pid_t)data.readInt32(); reply->writeInt32(livesLocally(node, pid)); @@ -425,7 +466,7 @@ status_t BnOMX::onTransact( case LIST_NODES: { - CHECK_INTERFACE(IOMX, data, reply); + CHECK_OMX_INTERFACE(IOMX, data, reply); List list; listNodes(&list); @@ -448,7 +489,7 @@ status_t BnOMX::onTransact( case ALLOCATE_NODE: { - CHECK_INTERFACE(IOMX, data, reply); + CHECK_OMX_INTERFACE(IOMX, data, reply); const char *name = data.readCString(); @@ -468,7 +509,7 @@ status_t BnOMX::onTransact( case FREE_NODE: { - CHECK_INTERFACE(IOMX, data, reply); + CHECK_OMX_INTERFACE(IOMX, data, reply); node_id node = (void*)data.readIntPtr(); @@ -479,7 +520,7 @@ status_t BnOMX::onTransact( case SEND_COMMAND: { - CHECK_INTERFACE(IOMX, data, reply); + CHECK_OMX_INTERFACE(IOMX, data, reply); node_id node = (void*)data.readIntPtr(); @@ -497,7 +538,7 @@ status_t BnOMX::onTransact( case GET_CONFIG: case SET_CONFIG: { - CHECK_INTERFACE(IOMX, data, reply); + CHECK_OMX_INTERFACE(IOMX, data, reply); node_id node = (void*)data.readIntPtr(); OMX_INDEXTYPE index = static_cast(data.readInt32()); @@ -539,7 +580,7 @@ status_t BnOMX::onTransact( case GET_STATE: { - CHECK_INTERFACE(IOMX, data, reply); + CHECK_OMX_INTERFACE(IOMX, data, reply); node_id node = (void*)data.readIntPtr(); OMX_STATETYPE state = OMX_StateInvalid; @@ -553,7 +594,7 @@ status_t BnOMX::onTransact( case ENABLE_GRAPHIC_BUFFERS: { - CHECK_INTERFACE(IOMX, data, reply); + CHECK_OMX_INTERFACE(IOMX, data, reply); node_id node = (void*)data.readIntPtr(); OMX_U32 port_index = data.readInt32(); @@ -567,7 +608,7 @@ status_t BnOMX::onTransact( case GET_GRAPHIC_BUFFER_USAGE: { - CHECK_INTERFACE(IOMX, data, reply); + CHECK_OMX_INTERFACE(IOMX, data, reply); node_id node = (void*)data.readIntPtr(); OMX_U32 port_index = data.readInt32(); @@ -582,7 +623,7 @@ status_t BnOMX::onTransact( case USE_BUFFER: { - CHECK_INTERFACE(IOMX, data, reply); + CHECK_OMX_INTERFACE(IOMX, data, reply); node_id node = (void*)data.readIntPtr(); OMX_U32 port_index = data.readInt32(); @@ -602,7 +643,7 @@ status_t BnOMX::onTransact( case USE_GRAPHIC_BUFFER: { - CHECK_INTERFACE(IOMX, data, reply); + CHECK_OMX_INTERFACE(IOMX, data, reply); node_id node = (void*)data.readIntPtr(); OMX_U32 port_index = data.readInt32(); @@ -621,9 +662,41 @@ status_t BnOMX::onTransact( return NO_ERROR; } + case CREATE_INPUT_SURFACE: + { + CHECK_OMX_INTERFACE(IOMX, data, reply); + + node_id node = (void*)data.readIntPtr(); + OMX_U32 port_index = data.readInt32(); + + sp bufferProducer; + status_t err = createInputSurface(node, port_index, + &bufferProducer); + + reply->writeInt32(err); + + if (err == OK) { + reply->writeStrongBinder(bufferProducer->asBinder()); + } + + return NO_ERROR; + } + + case SIGNAL_END_OF_INPUT_STREAM: + { + CHECK_OMX_INTERFACE(IOMX, data, reply); + + node_id node = (void*)data.readIntPtr(); + + status_t err = signalEndOfInputStream(node); + reply->writeInt32(err); + + return NO_ERROR; + } + case STORE_META_DATA_IN_BUFFERS: { - CHECK_INTERFACE(IOMX, data, reply); + CHECK_OMX_INTERFACE(IOMX, data, reply); node_id node = (void*)data.readIntPtr(); OMX_U32 port_index = data.readInt32(); @@ -637,7 +710,7 @@ status_t BnOMX::onTransact( case ALLOC_BUFFER: { - CHECK_INTERFACE(IOMX, data, reply); + CHECK_OMX_INTERFACE(IOMX, data, reply); node_id node = (void*)data.readIntPtr(); OMX_U32 port_index = data.readInt32(); @@ -659,7 +732,7 @@ status_t BnOMX::onTransact( case ALLOC_BUFFER_WITH_BACKUP: { - CHECK_INTERFACE(IOMX, data, reply); + CHECK_OMX_INTERFACE(IOMX, data, reply); node_id node = (void*)data.readIntPtr(); OMX_U32 port_index = data.readInt32(); @@ -681,7 +754,7 @@ status_t BnOMX::onTransact( case FREE_BUFFER: { - CHECK_INTERFACE(IOMX, data, reply); + CHECK_OMX_INTERFACE(IOMX, data, reply); node_id node = (void*)data.readIntPtr(); OMX_U32 port_index = data.readInt32(); @@ -693,7 +766,7 @@ status_t BnOMX::onTransact( case FILL_BUFFER: { - CHECK_INTERFACE(IOMX, data, reply); + CHECK_OMX_INTERFACE(IOMX, data, reply); node_id node = (void*)data.readIntPtr(); buffer_id buffer = (void*)data.readIntPtr(); @@ -704,7 +777,7 @@ status_t BnOMX::onTransact( case EMPTY_BUFFER: { - CHECK_INTERFACE(IOMX, data, reply); + CHECK_OMX_INTERFACE(IOMX, data, reply); node_id node = (void*)data.readIntPtr(); buffer_id buffer = (void*)data.readIntPtr(); @@ -723,7 +796,7 @@ status_t BnOMX::onTransact( case GET_EXTENSION_INDEX: { - CHECK_INTERFACE(IOMX, data, reply); + CHECK_OMX_INTERFACE(IOMX, data, reply); node_id node = (void*)data.readIntPtr(); const char *parameter_name = data.readCString(); @@ -769,7 +842,7 @@ status_t BnOMXObserver::onTransact( switch (code) { case OBSERVER_ON_MSG: { - CHECK_INTERFACE(IOMXObserver, data, reply); + CHECK_OMX_INTERFACE(IOMXObserver, data, reply); omx_message msg; data.read(&msg, sizeof(msg)); diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp index a6cc4eb..59fc45e 100644 --- a/media/libstagefright/ACodec.cpp +++ b/media/libstagefright/ACodec.cpp @@ -26,6 +26,7 @@ #include #include +#include #include #include #include @@ -192,6 +193,7 @@ private: friend struct ACodec::UninitializedState; bool onConfigureComponent(const sp &msg); + void onCreateInputSurface(const sp &msg); void onStart(); void onShutdown(bool keepComponentAllocated); @@ -239,6 +241,9 @@ struct ACodec::ExecutingState : public ACodec::BaseState { // to fill with data. void resume(); + // Send EOS on input stream. + void onSignalEndOfInputStream(); + // Returns true iff input and output buffers are in play. bool active() const { return mActive; } @@ -392,6 +397,14 @@ void ACodec::initiateConfigureComponent(const sp &msg) { msg->post(); } +void ACodec::initiateCreateInputSurface() { + (new AMessage(kWhatCreateInputSurface, id()))->post(); +} + +void ACodec::signalEndOfInputStream() { + (new AMessage(kWhatSignalEndOfInputStream, id()))->post(); +} + void ACodec::initiateStart() { (new AMessage(kWhatStart, id()))->post(); } @@ -2469,6 +2482,14 @@ bool ACodec::BaseState::onMessageReceived(const sp &msg) { return onOMXMessage(msg); } + case ACodec::kWhatCreateInputSurface: + case ACodec::kWhatSignalEndOfInputStream: + { + ALOGE("Message 0x%x was not handled", msg->what()); + mCodec->signalError(OMX_ErrorUndefined, INVALID_OPERATION); + return true; + } + default: return false; } @@ -3232,6 +3253,13 @@ bool ACodec::LoadedState::onMessageReceived(const sp &msg) { break; } + case ACodec::kWhatCreateInputSurface: + { + onCreateInputSurface(msg); + handled = true; + break; + } + case ACodec::kWhatStart: { onStart(); @@ -3310,6 +3338,32 @@ bool ACodec::LoadedState::onConfigureComponent( return true; } +void ACodec::LoadedState::onCreateInputSurface( + const sp &msg) { + ALOGV("onCreateInputSurface"); + + sp notify = mCodec->mNotify->dup(); + notify->setInt32("what", ACodec::kWhatInputSurfaceCreated); + + sp bufferProducer; + status_t err; + + err = mCodec->mOMX->createInputSurface(mCodec->mNode, kPortIndexInput, + &bufferProducer); + if (err == OK) { + notify->setObject("input-surface", + new BufferProducerWrapper(bufferProducer)); + } else { + // Can't use mCodec->signalError() here -- MediaCodec won't forward + // the error through because it's in the "configured" state. We + // send a kWhatInputSurfaceCreated with an error value instead. + ALOGE("[%s] onCreateInputSurface returning error %d", + mCodec->mComponentName.c_str(), err); + notify->setInt32("err", err); + } + notify->post(); +} + void ACodec::LoadedState::onStart() { ALOGV("onStart"); @@ -3484,6 +3538,17 @@ void ACodec::ExecutingState::resume() { mActive = true; } +void ACodec::ExecutingState::onSignalEndOfInputStream() { + sp notify = mCodec->mNotify->dup(); + notify->setInt32("what", ACodec::kWhatSignaledInputEOS); + + status_t err = mCodec->mOMX->signalEndOfInputStream(mCodec->mNode); + if (err != OK) { + notify->setInt32("err", err); + } + notify->post(); +} + void ACodec::ExecutingState::stateEntered() { ALOGV("[%s] Now Executing", mCodec->mComponentName.c_str()); @@ -3573,6 +3638,13 @@ bool ACodec::ExecutingState::onMessageReceived(const sp &msg) { break; } + case ACodec::kWhatSignalEndOfInputStream: + { + onSignalEndOfInputStream(); + handled = true; + break; + } + default: handled = BaseState::onMessageReceived(msg); break; diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp index 83be0fd..79ea04c 100644 --- a/media/libstagefright/MediaCodec.cpp +++ b/media/libstagefright/MediaCodec.cpp @@ -30,6 +30,7 @@ #include #include #include +#include #include #include #include @@ -62,6 +63,7 @@ MediaCodec::MediaCodec(const sp &looper) : mState(UNINITIALIZED), mLooper(looper), mCodec(new ACodec), + mReplyID(0), mFlags(0), mSoftRenderer(NULL), mDequeueInputTimeoutGeneration(0), @@ -154,6 +156,28 @@ status_t MediaCodec::configure( return PostAndAwaitResponse(msg, &response); } +status_t MediaCodec::createInputSurface( + sp* bufferProducer) { + sp msg = new AMessage(kWhatCreateInputSurface, id()); + + // TODO(fadden): require MediaFormat colorFormat == AndroidOpaque + + sp response; + status_t err = PostAndAwaitResponse(msg, &response); + if (err == NO_ERROR) { + // unwrap the sp + sp obj; + bool found = response->findObject("input-surface", &obj); + CHECK(found); + sp wrapper( + static_cast(obj.get())); + *bufferProducer = wrapper->getBufferProducer(); + } else { + ALOGW("createInputSurface failed, err=%d", err); + } + return err; +} + status_t MediaCodec::start() { sp msg = new AMessage(kWhatStart, id()); @@ -232,6 +256,8 @@ status_t MediaCodec::queueSecureInputBuffer( } status_t MediaCodec::dequeueInputBuffer(size_t *index, int64_t timeoutUs) { + // TODO(fadden): fail if an input Surface has been configured + sp msg = new AMessage(kWhatDequeueInputBuffer, id()); msg->setInt64("timeoutUs", timeoutUs); @@ -288,6 +314,13 @@ status_t MediaCodec::releaseOutputBuffer(size_t index) { return PostAndAwaitResponse(msg, &response); } +status_t MediaCodec::signalEndOfInputStream() { + sp msg = new AMessage(kWhatSignalEndOfInputStream, id()); + + sp response; + return PostAndAwaitResponse(msg, &response); +} + status_t MediaCodec::getOutputFormat(sp *format) const { sp msg = new AMessage(kWhatGetOutputFormat, id()); @@ -575,6 +608,36 @@ void MediaCodec::onMessageReceived(const sp &msg) { break; } + case ACodec::kWhatInputSurfaceCreated: + { + // response to ACodec::kWhatCreateInputSurface + status_t err = NO_ERROR; + sp response = new AMessage(); + if (!msg->findInt32("err", &err)) { + sp obj; + msg->findObject("input-surface", &obj); + CHECK(obj != NULL); + response->setObject("input-surface", obj); + } else { + response->setInt32("err", err); + } + response->postReply(mReplyID); + break; + } + + case ACodec::kWhatSignaledInputEOS: + { + // response to ACodec::kWhatSignalEndOfInputStream + sp response = new AMessage(); + status_t err; + if (msg->findInt32("err", &err)) { + response->setInt32("err", err); + } + response->postReply(mReplyID); + break; + } + + case ACodec::kWhatBuffersAllocated: { int32_t portIndex; @@ -881,6 +944,25 @@ void MediaCodec::onMessageReceived(const sp &msg) { break; } + case kWhatCreateInputSurface: + { + uint32_t replyID; + CHECK(msg->senderAwaitsResponse(&replyID)); + + // Must be configured, but can't have been started yet. + if (mState != CONFIGURED) { + sp response = new AMessage; + response->setInt32("err", INVALID_OPERATION); + + response->postReply(replyID); + break; + } + + mReplyID = replyID; + mCodec->initiateCreateInputSurface(); + break; + } + case kWhatStart: { uint32_t replyID; @@ -947,6 +1029,7 @@ void MediaCodec::onMessageReceived(const sp &msg) { case kWhatDequeueInputBuffer: { + // TODO(fadden): make this fail if we're using an input Surface uint32_t replyID; CHECK(msg->senderAwaitsResponse(&replyID)); @@ -1093,6 +1176,24 @@ void MediaCodec::onMessageReceived(const sp &msg) { break; } + case kWhatSignalEndOfInputStream: + { + uint32_t replyID; + CHECK(msg->senderAwaitsResponse(&replyID)); + + if (mState != STARTED || (mFlags & kFlagStickyError)) { + sp response = new AMessage; + response->setInt32("err", INVALID_OPERATION); + + response->postReply(replyID); + break; + } + + mReplyID = replyID; + mCodec->signalEndOfInputStream(); + break; + } + case kWhatGetBuffers: { uint32_t replyID; diff --git a/media/libstagefright/OMXClient.cpp b/media/libstagefright/OMXClient.cpp index 7cdb793..ff72e0e 100644 --- a/media/libstagefright/OMXClient.cpp +++ b/media/libstagefright/OMXClient.cpp @@ -83,6 +83,12 @@ struct MuxOMX : public IOMX { node_id node, OMX_U32 port_index, const sp &graphicBuffer, buffer_id *buffer); + virtual status_t createInputSurface( + node_id node, OMX_U32 port_index, + sp *bufferProducer); + + virtual status_t signalEndOfInputStream(node_id node); + virtual status_t allocateBuffer( node_id node, OMX_U32 port_index, size_t size, buffer_id *buffer, void **buffer_data); @@ -274,6 +280,18 @@ status_t MuxOMX::useGraphicBuffer( node, port_index, graphicBuffer, buffer); } +status_t MuxOMX::createInputSurface( + node_id node, OMX_U32 port_index, + sp *bufferProducer) { + status_t err = getOMX(node)->createInputSurface( + node, port_index, bufferProducer); + return err; +} + +status_t MuxOMX::signalEndOfInputStream(node_id node) { + return getOMX(node)->signalEndOfInputStream(node); +} + status_t MuxOMX::allocateBuffer( node_id node, OMX_U32 port_index, size_t size, buffer_id *buffer, void **buffer_data) { diff --git a/media/libstagefright/include/OMX.h b/media/libstagefright/include/OMX.h index 2c87b34..24b8d98 100644 --- a/media/libstagefright/include/OMX.h +++ b/media/libstagefright/include/OMX.h @@ -79,6 +79,12 @@ public: node_id node, OMX_U32 port_index, const sp &graphicBuffer, buffer_id *buffer); + virtual status_t createInputSurface( + node_id node, OMX_U32 port_index, + sp *bufferProducer); + + virtual status_t signalEndOfInputStream(node_id node); + virtual status_t allocateBuffer( node_id node, OMX_U32 port_index, size_t size, buffer_id *buffer, void **buffer_data); diff --git a/media/libstagefright/include/OMXNodeInstance.h b/media/libstagefright/include/OMXNodeInstance.h index 47ca579..67aba6b 100644 --- a/media/libstagefright/include/OMXNodeInstance.h +++ b/media/libstagefright/include/OMXNodeInstance.h @@ -27,6 +27,7 @@ namespace android { class IOMXObserver; struct OMXMaster; +struct GraphicBufferSource; struct OMXNodeInstance { OMXNodeInstance( @@ -65,6 +66,11 @@ struct OMXNodeInstance { OMX_U32 portIndex, const sp &graphicBuffer, OMX::buffer_id *buffer); + status_t createInputSurface( + OMX_U32 portIndex, sp *bufferProducer); + + status_t signalEndOfInputStream(); + status_t allocateBuffer( OMX_U32 portIndex, size_t size, OMX::buffer_id *buffer, void **buffer_data); @@ -82,12 +88,18 @@ struct OMXNodeInstance { OMX_U32 rangeOffset, OMX_U32 rangeLength, OMX_U32 flags, OMX_TICKS timestamp); + status_t emptyDirectBuffer( + OMX_BUFFERHEADERTYPE *header, + OMX_U32 rangeOffset, OMX_U32 rangeLength, + OMX_U32 flags, OMX_TICKS timestamp); + status_t getExtensionIndex( const char *parameterName, OMX_INDEXTYPE *index); void onMessage(const omx_message &msg); void onObserverDied(OMXMaster *master); void onGetHandleFailed(); + void onEvent(OMX_EVENTTYPE event, OMX_U32 arg1, OMX_U32 arg2); static OMX_CALLBACKTYPE kCallbacks; @@ -100,6 +112,13 @@ private: sp mObserver; bool mDying; + // Lock only covers mGraphicBufferSource. We can't always use mLock + // because of rare instances where we'd end up locking it recursively. + Mutex mGraphicBufferSourceLock; + // Access this through getGraphicBufferSource(). + sp mGraphicBufferSource; + + struct ActiveBuffer { OMX_U32 mPortIndex; OMX::buffer_id mID; @@ -132,6 +151,11 @@ private: OMX_IN OMX_PTR pAppData, OMX_IN OMX_BUFFERHEADERTYPE *pBuffer); + status_t storeMetaDataInBuffers_l(OMX_U32 portIndex, OMX_BOOL enable); + + sp getGraphicBufferSource(); + void setGraphicBufferSource(const sp& bufferSource); + OMXNodeInstance(const OMXNodeInstance &); OMXNodeInstance &operator=(const OMXNodeInstance &); }; diff --git a/media/libstagefright/omx/Android.mk b/media/libstagefright/omx/Android.mk index d7fbbbe..9129f08 100644 --- a/media/libstagefright/omx/Android.mk +++ b/media/libstagefright/omx/Android.mk @@ -2,6 +2,7 @@ LOCAL_PATH:= $(call my-dir) include $(CLEAR_VARS) LOCAL_SRC_FILES:= \ + GraphicBufferSource.cpp \ OMX.cpp \ OMXMaster.cpp \ OMXNodeInstance.cpp \ @@ -19,6 +20,7 @@ LOCAL_SHARED_LIBRARIES := \ libmedia \ libutils \ libui \ + libgui \ libcutils \ libstagefright_foundation \ libdl diff --git a/media/libstagefright/omx/GraphicBufferSource.cpp b/media/libstagefright/omx/GraphicBufferSource.cpp new file mode 100644 index 0000000..f207954 --- /dev/null +++ b/media/libstagefright/omx/GraphicBufferSource.cpp @@ -0,0 +1,441 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "GraphicBufferSource" +#include + +#include + +#include +#include + +#include +#include + +namespace android { + +static const bool EXTRA_CHECK = true; + + +GraphicBufferSource::GraphicBufferSource(OMXNodeInstance* nodeInstance, + uint32_t bufferWidth, uint32_t bufferHeight) : + mInitCheck(UNKNOWN_ERROR), + mNodeInstance(nodeInstance), + mExecuting(false), + mNumFramesAvailable(0), + mEndOfStream(false), + mEndOfStreamSent(false) { + + ALOGV("GraphicBufferSource w=%u h=%u", bufferWidth, bufferHeight); + + if (bufferWidth == 0 || bufferHeight == 0) { + ALOGE("Invalid dimensions %dx%d", bufferWidth, bufferHeight); + mInitCheck = BAD_VALUE; + return; + } + + mBufferQueue = new BufferQueue(true); + mBufferQueue->setDefaultBufferSize(bufferWidth, bufferHeight); + mBufferQueue->setSynchronousMode(true); + mBufferQueue->setConsumerUsageBits(GRALLOC_USAGE_HW_VIDEO_ENCODER | + GRALLOC_USAGE_HW_TEXTURE); + + // Note that we can't create an sp<...>(this) in a ctor that will not keep a + // reference once the ctor ends, as that would cause the refcount of 'this' + // dropping to 0 at the end of the ctor. Since all we need is a wp<...> + // that's what we create. + wp listener; + listener = static_cast(this); + + sp proxy; + proxy = new BufferQueue::ProxyConsumerListener(listener); + + status_t err = mBufferQueue->consumerConnect(proxy); + if (err != NO_ERROR) { + ALOGE("Error connecting to BufferQueue: %s (%d)", + strerror(-err), err); + return; + } + + mInitCheck = OK; +} + +GraphicBufferSource::~GraphicBufferSource() { + ALOGV("~GraphicBufferSource"); + status_t err = mBufferQueue->consumerDisconnect(); + if (err != NO_ERROR) { + ALOGW("consumerDisconnect failed: %d", err); + } +} + +void GraphicBufferSource::omxExecuting() { + Mutex::Autolock autoLock(mMutex); + ALOGV("--> executing; avail=%d, codec vec size=%zd", + mNumFramesAvailable, mCodecBuffers.size()); + CHECK(!mExecuting); + mExecuting = true; + + // Start by loading up as many buffers as possible. We want to do this, + // rather than just submit the first buffer, to avoid a degenerate case: + // if all BQ buffers arrive before we start executing, and we only submit + // one here, the other BQ buffers will just sit until we get notified + // that the codec buffer has been released. We'd then acquire and + // submit a single additional buffer, repeatedly, never using more than + // one codec buffer simultaneously. (We could instead try to submit + // all BQ buffers whenever any codec buffer is freed, but if we get the + // initial conditions right that will never be useful.) + while (mNumFramesAvailable && isCodecBufferAvailable_l()) { + fillCodecBuffer_l(); + } + + ALOGV("done loading initial frames, avail=%d", mNumFramesAvailable); + + // If EOS has already been signaled, and there are no more frames to + // submit, try to send EOS now as well. + if (mEndOfStream && mNumFramesAvailable == 0) { + submitEndOfInputStream_l(); + } +} + +void GraphicBufferSource::omxIdling(){ + Mutex::Autolock autoLock(mMutex); + ALOGV("--> idling"); + if (!mExecuting) { + // Transition from "loading" to "idling". Nothing to do. + return; + } + + ALOGV("Dropped down to idle, avail=%d eos=%d eosSent=%d", + mNumFramesAvailable, mEndOfStream, mEndOfStreamSent); + + // Codec is no longer executing. Discard all codec-related state. + mCodecBuffers.clear(); + // TODO: scan mCodecBuffers to verify that all mGraphicBuffer entries + // are null; complain if not + + mExecuting = false; +} + +void GraphicBufferSource::addCodecBuffer(OMX_BUFFERHEADERTYPE* header) { + Mutex::Autolock autoLock(mMutex); + + if (mExecuting) { + // This should never happen -- buffers can only be allocated when + // transitioning from "loaded" to "idle". + ALOGE("addCodecBuffer: buffer added while executing"); + return; + } + + ALOGV("addCodecBuffer h=%p size=%lu p=%p", + header, header->nAllocLen, header->pBuffer); + CodecBuffer codecBuffer; + codecBuffer.mHeader = header; + mCodecBuffers.add(codecBuffer); +} + +void GraphicBufferSource::codecBufferEmptied(OMX_BUFFERHEADERTYPE* header) { + Mutex::Autolock autoLock(mMutex); + + CHECK(mExecuting); // could this happen if app stop()s early? + + int cbi = findMatchingCodecBuffer_l(header); + if (cbi < 0) { + // This should never happen. + ALOGE("codecBufferEmptied: buffer not recognized (h=%p)", header); + return; + } + + ALOGV("codecBufferEmptied h=%p size=%lu filled=%lu p=%p", + header, header->nAllocLen, header->nFilledLen, + header->pBuffer); + CodecBuffer& codecBuffer(mCodecBuffers.editItemAt(cbi)); + + // header->nFilledLen may not be the original value, so we can't compare + // that to zero to see of this was the EOS buffer. Instead we just + // see if the GraphicBuffer reference was null, which should only ever + // happen for EOS. + if (codecBuffer.mGraphicBuffer == NULL) { + CHECK(mEndOfStream); + // No GraphicBuffer to deal with, no additional input or output is + // expected, so just return. + return; + } + + if (EXTRA_CHECK) { + // Pull the graphic buffer handle back out of the buffer, and confirm + // that it matches expectations. + OMX_U8* data = header->pBuffer; + buffer_handle_t bufferHandle; + memcpy(&bufferHandle, data + 4, sizeof(buffer_handle_t)); + if (bufferHandle != codecBuffer.mGraphicBuffer->handle) { + // should never happen + ALOGE("codecBufferEmptied: buffer's handle is %p, expected %p", + bufferHandle, codecBuffer.mGraphicBuffer->handle); + CHECK(!"codecBufferEmptied: mismatched buffer"); + } + } + + // Find matching entry in our cached copy of the BufferQueue slots. + // If we find a match, release that slot. If we don't, the BufferQueue + // has dropped that GraphicBuffer, and there's nothing for us to release. + // + // (We could store "id" in CodecBuffer and avoid the slot search.) + int id; + for (id = 0; id < BufferQueue::NUM_BUFFER_SLOTS; id++) { + if (mBufferSlot[id] == NULL) { + continue; + } + + if (mBufferSlot[id]->handle == codecBuffer.mGraphicBuffer->handle) { + ALOGV("cbi %d matches bq slot %d, handle=%p", + cbi, id, mBufferSlot[id]->handle); + + mBufferQueue->releaseBuffer(id, EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, + Fence::NO_FENCE); + break; + } + } + if (id == BufferQueue::NUM_BUFFER_SLOTS) { + ALOGV("codecBufferEmptied: no match for emptied buffer in cbi %d", + cbi); + } + + // Mark the codec buffer as available by clearing the GraphicBuffer ref. + codecBuffer.mGraphicBuffer = NULL; + + if (mNumFramesAvailable) { + // Fill this codec buffer. + CHECK(!mEndOfStream); + ALOGV("buffer freed, %d frames avail", mNumFramesAvailable); + fillCodecBuffer_l(); + } else if (mEndOfStream) { + // No frames available, but EOS is pending, so use this buffer to + // send that. + ALOGV("buffer freed, EOS pending"); + submitEndOfInputStream_l(); + } + return; +} + +status_t GraphicBufferSource::fillCodecBuffer_l() { + CHECK(mExecuting && mNumFramesAvailable > 0); + int cbi = findAvailableCodecBuffer_l(); + if (cbi < 0) { + // No buffers available, bail. + ALOGV("fillCodecBuffer_l: no codec buffers, avail now %d", + mNumFramesAvailable); + } else { + ALOGV("fillCodecBuffer_l: acquiring buffer, avail=%d", + mNumFramesAvailable); + BufferQueue::BufferItem item; + status_t err = mBufferQueue->acquireBuffer(&item); + if (err == BufferQueue::NO_BUFFER_AVAILABLE) { + // shouldn't happen + ALOGW("fillCodecBuffer_l: frame was not available"); + return err; + } else if (err != OK) { + // now what? fake end-of-stream? + ALOGW("fillCodecBuffer_l: acquireBuffer returned err=%d", err); + return err; + } + + mNumFramesAvailable--; + + // Wait for it to become available. + err = item.mFence->waitForever(1000, + "GraphicBufferSource::fillCodecBuffer_l"); + if (err != OK) { + ALOGW("failed to wait for buffer fence: %d", err); + // keep going + } + + // If this is the first time we're seeing this buffer, add it to our + // slot table. + if (item.mGraphicBuffer != NULL) { + ALOGV("fillCodecBuffer_l: setting mBufferSlot %d", item.mBuf); + mBufferSlot[item.mBuf] = item.mGraphicBuffer; + } + + err = submitBuffer_l(mBufferSlot[item.mBuf], item.mTimestamp, cbi); + if (err != OK) { + ALOGV("submitBuffer_l failed, releasing bq buf %d", item.mBuf); + mBufferQueue->releaseBuffer(item.mBuf, EGL_NO_DISPLAY, + EGL_NO_SYNC_KHR, Fence::NO_FENCE); + } else { + ALOGV("buffer submitted (bq %d, cbi %d)", item.mBuf, cbi); + } + } + + return OK; +} + +void GraphicBufferSource::signalEndOfInputStream() { + Mutex::Autolock autoLock(mMutex); + ALOGV("signalEndOfInputStream: exec=%d avail=%d", + mExecuting, mNumFramesAvailable); + + // Set the end-of-stream flag. If no frames are pending from the + // BufferQueue, and a codec buffer is available, and we're executing, + // we initiate the EOS from here. Otherwise, we'll let + // codecBufferEmptied() (or omxExecuting) do it. + // + // Note: if there are no pending frames and all codec buffers are + // available, we *must* submit the EOS from here or we'll just + // stall since no future events are expected. + mEndOfStream = true; + + if (mExecuting && mNumFramesAvailable == 0) { + submitEndOfInputStream_l(); + } +} + +status_t GraphicBufferSource::submitBuffer_l(sp& graphicBuffer, + int64_t timestamp, int cbi) { + ALOGV("submitBuffer_l cbi=%d", cbi); + CodecBuffer& codecBuffer(mCodecBuffers.editItemAt(cbi)); + codecBuffer.mGraphicBuffer = graphicBuffer; + + OMX_BUFFERHEADERTYPE* header = codecBuffer.mHeader; + CHECK(header->nAllocLen >= 4 + sizeof(buffer_handle_t)); + OMX_U8* data = header->pBuffer; + const OMX_U32 type = kMetadataBufferTypeGrallocSource; + buffer_handle_t handle = codecBuffer.mGraphicBuffer->handle; + memcpy(data, &type, 4); + memcpy(data + 4, &handle, sizeof(buffer_handle_t)); + + status_t err = mNodeInstance->emptyDirectBuffer(header, 0, + 4 + sizeof(buffer_handle_t), OMX_BUFFERFLAG_ENDOFFRAME, + timestamp); + if (err != OK) { + ALOGW("WARNING: emptyDirectBuffer failed: 0x%x", err); + codecBuffer.mGraphicBuffer = NULL; + return err; + } + + ALOGV("emptyDirectBuffer succeeded, h=%p p=%p bufhandle=%p", + header, header->pBuffer, handle); + return OK; +} + +void GraphicBufferSource::submitEndOfInputStream_l() { + CHECK(mEndOfStream); + if (mEndOfStreamSent) { + ALOGV("EOS already sent"); + return; + } + + int cbi = findAvailableCodecBuffer_l(); + if (cbi < 0) { + ALOGV("submitEndOfInputStream_l: no codec buffers available"); + return; + } + + // We reject any additional incoming graphic buffers, so there's no need + // to stick a placeholder into codecBuffer.mGraphicBuffer to mark it as + // in-use. + CodecBuffer& codecBuffer(mCodecBuffers.editItemAt(cbi)); + + OMX_BUFFERHEADERTYPE* header = codecBuffer.mHeader; + if (EXTRA_CHECK) { + // Guard against implementations that don't check nFilledLen. + size_t fillLen = 4 + sizeof(buffer_handle_t); + CHECK(header->nAllocLen >= fillLen); + OMX_U8* data = header->pBuffer; + memset(data, 0xcd, fillLen); + } + + uint64_t timestamp = 0; // does this matter? + + status_t err = mNodeInstance->emptyDirectBuffer(header, /*offset*/ 0, + /*length*/ 0, OMX_BUFFERFLAG_ENDOFFRAME | OMX_BUFFERFLAG_EOS, + timestamp); + if (err != OK) { + ALOGW("emptyDirectBuffer EOS failed: 0x%x", err); + } else { + ALOGV("submitEndOfInputStream_l: buffer submitted, header=%p cbi=%d", + header, cbi); + } +} + +int GraphicBufferSource::findAvailableCodecBuffer_l() { + CHECK(mCodecBuffers.size() > 0); + + for (int i = (int)mCodecBuffers.size() - 1; i>= 0; --i) { + if (mCodecBuffers[i].mGraphicBuffer == NULL) { + return i; + } + } + return -1; +} + +int GraphicBufferSource::findMatchingCodecBuffer_l( + const OMX_BUFFERHEADERTYPE* header) { + for (int i = (int)mCodecBuffers.size() - 1; i>= 0; --i) { + if (mCodecBuffers[i].mHeader == header) { + return i; + } + } + return -1; +} + +// BufferQueue::ConsumerListener callback +void GraphicBufferSource::onFrameAvailable() { + Mutex::Autolock autoLock(mMutex); + + ALOGV("onFrameAvailable exec=%d avail=%d", mExecuting, mNumFramesAvailable); + + if (mEndOfStream) { + // This should only be possible if a new buffer was queued after + // EOS was signaled, i.e. the app is misbehaving. + ALOGW("onFrameAvailable: EOS is set, ignoring frame"); + + BufferQueue::BufferItem item; + status_t err = mBufferQueue->acquireBuffer(&item); + if (err == OK) { + mBufferQueue->releaseBuffer(item.mBuf, EGL_NO_DISPLAY, + EGL_NO_SYNC_KHR, item.mFence); + } + return; + } + + mNumFramesAvailable++; + + if (mExecuting) { + fillCodecBuffer_l(); + } +} + +// BufferQueue::ConsumerListener callback +void GraphicBufferSource::onBuffersReleased() { + Mutex::Autolock lock(mMutex); + + uint32_t slotMask; + if (mBufferQueue->getReleasedBuffers(&slotMask) != NO_ERROR) { + ALOGW("onBuffersReleased: unable to get released buffer set"); + slotMask = 0xffffffff; + } + + ALOGV("onBuffersReleased: 0x%08x", slotMask); + + for (int i = 0; i < BufferQueue::NUM_BUFFER_SLOTS; i++) { + if ((slotMask & 0x01) != 0) { + mBufferSlot[i] = NULL; + } + slotMask >>= 1; + } +} + +} // namespace android diff --git a/media/libstagefright/omx/GraphicBufferSource.h b/media/libstagefright/omx/GraphicBufferSource.h new file mode 100644 index 0000000..6d49f96 --- /dev/null +++ b/media/libstagefright/omx/GraphicBufferSource.h @@ -0,0 +1,176 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GRAPHIC_BUFFER_SOURCE_H_ + +#define GRAPHIC_BUFFER_SOURCE_H_ + +#include +#include +#include + +#include +#include "../include/OMXNodeInstance.h" +#include + +namespace android { + +/* + * This class is used to feed OMX codecs from a Surface via BufferQueue. + * + * Instances of the class don't run on a dedicated thread. Instead, + * various events trigger data movement: + * + * - Availability of a new frame of data from the BufferQueue (notified + * via the onFrameAvailable callback). + * - The return of a codec buffer (via OnEmptyBufferDone). + * - Application signaling end-of-stream. + * - Transition to or from "executing" state. + * + * Frames of data (and, perhaps, the end-of-stream indication) can arrive + * before the codec is in the "executing" state, so we need to queue + * things up until we're ready to go. + */ +class GraphicBufferSource : public BufferQueue::ConsumerListener { +public: + GraphicBufferSource(OMXNodeInstance* nodeInstance, + uint32_t bufferWidth, uint32_t bufferHeight); + virtual ~GraphicBufferSource(); + + // We can't throw an exception if the constructor fails, so we just set + // this and require that the caller test the value. + status_t initCheck() const { + return mInitCheck; + } + + // Returns the handle to the producer side of the BufferQueue. Buffers + // queued on this will be received by GraphicBufferSource. + sp getIGraphicBufferProducer() const { + return mBufferQueue; + } + + // This is called when OMX transitions to OMX_StateExecuting, which means + // we can start handing it buffers. If we already have buffers of data + // sitting in the BufferQueue, this will send them to the codec. + void omxExecuting(); + + // This is called when OMX transitions to OMX_StateIdle. If we were + // previously executing, this means we're about to be shut down. (We + // also enter Idle on the way up.) + void omxIdling(); + + // A "codec buffer", i.e. a buffer that can be used to pass data into + // the encoder, has been allocated. (This call does not call back into + // OMXNodeInstance.) + void addCodecBuffer(OMX_BUFFERHEADERTYPE* header); + + // Called from OnEmptyBufferDone. If we have a BQ buffer available, + // fill it with a new frame of data; otherwise, just mark it as available. + void codecBufferEmptied(OMX_BUFFERHEADERTYPE* header); + + // This is called after the last input frame has been submitted. We + // need to submit an empty buffer with the EOS flag set. If we don't + // have a codec buffer ready, we just set the mEndOfStream flag. + void signalEndOfInputStream(); + +protected: + // BufferQueue::ConsumerListener interface, called when a new frame of + // data is available. If we're executing and a codec buffer is + // available, we acquire the buffer, copy the GraphicBuffer reference + // into the codec buffer, and call Empty[This]Buffer. If we're not yet + // executing or there's no codec buffer available, we just increment + // mNumFramesAvailable and return. + virtual void onFrameAvailable(); + + // BufferQueue::ConsumerListener interface, called when the client has + // released one or more GraphicBuffers. We clear out the appropriate + // set of mBufferSlot entries. + virtual void onBuffersReleased(); + +private: + // Keep track of codec input buffers. They may either be available + // (mGraphicBuffer == NULL) or in use by the codec. + struct CodecBuffer { + OMX_BUFFERHEADERTYPE* mHeader; + sp mGraphicBuffer; + }; + + // Returns the index of an available codec buffer. If none are + // available, returns -1. Mutex must be held by caller. + int findAvailableCodecBuffer_l(); + + // Returns true if a codec buffer is available. + bool isCodecBufferAvailable_l() { + return findAvailableCodecBuffer_l() >= 0; + } + + // Finds the mCodecBuffers entry that matches. Returns -1 if not found. + int findMatchingCodecBuffer_l(const OMX_BUFFERHEADERTYPE* header); + + // Fills a codec buffer with a frame from the BufferQueue. This must + // only be called when we know that a frame of data is ready (i.e. we're + // in the onFrameAvailable callback, or if we're in codecBufferEmptied + // and mNumFramesAvailable is nonzero). Returns without doing anything if + // we don't have a codec buffer available. + status_t fillCodecBuffer_l(); + + // Marks the mCodecBuffers entry as in-use, copies the GraphicBuffer + // reference into the codec buffer, and submits the data to the codec. + status_t submitBuffer_l(sp& graphicBuffer, + int64_t timestamp, int cbi); + + // Submits an empty buffer, with the EOS flag set. Returns without + // doing anything if we don't have a codec buffer available. + void submitEndOfInputStream_l(); + + // Lock, covers all member variables. + mutable Mutex mMutex; + + // Used to report constructor failure. + status_t mInitCheck; + + // Pointer back to the object that contains us. We send buffers here. + OMXNodeInstance* mNodeInstance; + + // Set by omxExecuting() / omxIdling(). + bool mExecuting; + + // We consume graphic buffers from this. + sp mBufferQueue; + + // Number of frames pending in BufferQueue that haven't yet been + // forwarded to the codec. + size_t mNumFramesAvailable; + + // Set to true if we want to send end-of-stream after we run out of + // frames in BufferQueue. + bool mEndOfStream; + bool mEndOfStreamSent; + + // Cache of GraphicBuffers from the buffer queue. When the codec + // is done processing a GraphicBuffer, we can use this to map back + // to a slot number. + sp mBufferSlot[BufferQueue::NUM_BUFFER_SLOTS]; + + // Tracks codec buffers. + Vector mCodecBuffers; + + DISALLOW_EVIL_CONSTRUCTORS(GraphicBufferSource); +}; + +} // namespace android + +#endif // GRAPHIC_BUFFER_SOURCE_H_ diff --git a/media/libstagefright/omx/OMX.cpp b/media/libstagefright/omx/OMX.cpp index 29bc733..3987ead 100644 --- a/media/libstagefright/omx/OMX.cpp +++ b/media/libstagefright/omx/OMX.cpp @@ -345,6 +345,17 @@ status_t OMX::useGraphicBuffer( port_index, graphicBuffer, buffer); } +status_t OMX::createInputSurface( + node_id node, OMX_U32 port_index, + sp *bufferProducer) { + return findInstance(node)->createInputSurface( + port_index, bufferProducer); +} + +status_t OMX::signalEndOfInputStream(node_id node) { + return findInstance(node)->signalEndOfInputStream(); +} + status_t OMX::allocateBuffer( node_id node, OMX_U32 port_index, size_t size, buffer_id *buffer, void **buffer_data) { @@ -393,6 +404,9 @@ OMX_ERRORTYPE OMX::OnEvent( OMX_IN OMX_PTR pEventData) { ALOGV("OnEvent(%d, %ld, %ld)", eEvent, nData1, nData2); + // Forward to OMXNodeInstance. + findInstance(node)->onEvent(eEvent, nData1, nData2); + omx_message msg; msg.type = omx_message::EVENT; msg.node = node; diff --git a/media/libstagefright/omx/OMXNodeInstance.cpp b/media/libstagefright/omx/OMXNodeInstance.cpp index bff3def..6c2c33b 100644 --- a/media/libstagefright/omx/OMXNodeInstance.cpp +++ b/media/libstagefright/omx/OMXNodeInstance.cpp @@ -20,14 +20,18 @@ #include "../include/OMXNodeInstance.h" #include "OMXMaster.h" +#include "GraphicBufferSource.h" #include #include +#include #include #include #include +static const OMX_U32 kPortIndexInput = 0; + namespace android { struct BufferMeta { @@ -100,6 +104,17 @@ void OMXNodeInstance::setHandle(OMX::node_id node_id, OMX_HANDLETYPE handle) { mHandle = handle; } +sp OMXNodeInstance::getGraphicBufferSource() { + Mutex::Autolock autoLock(mGraphicBufferSourceLock); + return mGraphicBufferSource; +} + +void OMXNodeInstance::setGraphicBufferSource( + const sp& bufferSource) { + Mutex::Autolock autoLock(mGraphicBufferSourceLock); + mGraphicBufferSource = bufferSource; +} + OMX *OMXNodeInstance::owner() { return mOwner; } @@ -354,7 +369,12 @@ status_t OMXNodeInstance::storeMetaDataInBuffers( OMX_U32 portIndex, OMX_BOOL enable) { Mutex::Autolock autolock(mLock); + return storeMetaDataInBuffers_l(portIndex, enable); +} +status_t OMXNodeInstance::storeMetaDataInBuffers_l( + OMX_U32 portIndex, + OMX_BOOL enable) { OMX_INDEXTYPE index; OMX_STRING name = const_cast( "OMX.google.android.index.storeMetaDataInBuffers"); @@ -411,6 +431,11 @@ status_t OMXNodeInstance::useBuffer( addActiveBuffer(portIndex, *buffer); + sp bufferSource(getGraphicBufferSource()); + if (bufferSource != NULL && portIndex == kPortIndexInput) { + bufferSource->addCodecBuffer(header); + } + return OK; } @@ -530,6 +555,60 @@ status_t OMXNodeInstance::useGraphicBuffer( return OK; } +status_t OMXNodeInstance::createInputSurface( + OMX_U32 portIndex, sp *bufferProducer) { + Mutex::Autolock autolock(mLock); + status_t err; + + const sp& surfaceCheck = getGraphicBufferSource(); + if (surfaceCheck != NULL) { + return ALREADY_EXISTS; + } + + // Input buffers will hold meta-data (gralloc references). + err = storeMetaDataInBuffers_l(portIndex, OMX_TRUE); + if (err != OK) { + return err; + } + + // Retrieve the width and height of the graphic buffer, set when the + // codec was configured. + OMX_PARAM_PORTDEFINITIONTYPE def; + def.nSize = sizeof(def); + def.nVersion.s.nVersionMajor = 1; + def.nVersion.s.nVersionMinor = 0; + def.nVersion.s.nRevision = 0; + def.nVersion.s.nStep = 0; + def.nPortIndex = portIndex; + OMX_ERRORTYPE oerr = OMX_GetParameter( + mHandle, OMX_IndexParamPortDefinition, &def); + CHECK(oerr == OMX_ErrorNone); + + GraphicBufferSource* bufferSource = new GraphicBufferSource( + this, def.format.video.nFrameWidth, def.format.video.nFrameHeight); + if ((err = bufferSource->initCheck()) != OK) { + delete bufferSource; + return err; + } + setGraphicBufferSource(bufferSource); + + *bufferProducer = bufferSource->getIGraphicBufferProducer(); + return OK; +} + +status_t OMXNodeInstance::signalEndOfInputStream() { + // For non-Surface input, the MediaCodec should convert the call to a + // pair of requests (dequeue input buffer, queue input buffer with EOS + // flag set). Seems easier than doing the equivalent from here. + sp bufferSource(getGraphicBufferSource()); + if (bufferSource == NULL) { + ALOGW("signalEndOfInputStream should only be used with Surface input"); + return INVALID_OPERATION; + }; + bufferSource->signalEndOfInputStream(); + return OK; +} + status_t OMXNodeInstance::allocateBuffer( OMX_U32 portIndex, size_t size, OMX::buffer_id *buffer, void **buffer_data) { @@ -560,6 +639,11 @@ status_t OMXNodeInstance::allocateBuffer( addActiveBuffer(portIndex, *buffer); + sp bufferSource(getGraphicBufferSource()); + if (bufferSource != NULL && portIndex == kPortIndexInput) { + bufferSource->addCodecBuffer(header); + } + return OK; } @@ -592,6 +676,11 @@ status_t OMXNodeInstance::allocateBufferWithBackup( addActiveBuffer(portIndex, *buffer); + sp bufferSource(getGraphicBufferSource()); + if (bufferSource != NULL && portIndex == kPortIndexInput) { + bufferSource->addCodecBuffer(header); + } + return OK; } @@ -646,6 +735,26 @@ status_t OMXNodeInstance::emptyBuffer( return StatusFromOMXError(err); } +// like emptyBuffer, but the data is already in header->pBuffer +status_t OMXNodeInstance::emptyDirectBuffer( + OMX_BUFFERHEADERTYPE *header, + OMX_U32 rangeOffset, OMX_U32 rangeLength, + OMX_U32 flags, OMX_TICKS timestamp) { + Mutex::Autolock autoLock(mLock); + + header->nFilledLen = rangeLength; + header->nOffset = rangeOffset; + header->nFlags = flags; + header->nTimeStamp = timestamp; + + OMX_ERRORTYPE err = OMX_EmptyThisBuffer(mHandle, header); + if (err != OMX_ErrorNone) { + ALOGW("emptyDirectBuffer failed, OMX err=0x%x", err); + } + + return StatusFromOMXError(err); +} + status_t OMXNodeInstance::getExtensionIndex( const char *parameterName, OMX_INDEXTYPE *index) { Mutex::Autolock autoLock(mLock); @@ -682,6 +791,22 @@ void OMXNodeInstance::onGetHandleFailed() { delete this; } +// OMXNodeInstance::OnEvent calls OMX::OnEvent, which then calls here. +// Don't try to acquire mLock here -- in rare circumstances this will hang. +void OMXNodeInstance::onEvent( + OMX_EVENTTYPE event, OMX_U32 arg1, OMX_U32 arg2) { + const sp& bufferSource(getGraphicBufferSource()); + + if (bufferSource != NULL && event == OMX_EventCmdComplete && + arg1 == OMX_CommandStateSet) { + if (arg2 == OMX_StateExecuting) { + bufferSource->omxExecuting(); + } else if (arg2 == OMX_StateIdle) { + bufferSource->omxIdling(); + } + } +} + // static OMX_ERRORTYPE OMXNodeInstance::OnEvent( OMX_IN OMX_HANDLETYPE hComponent, @@ -707,6 +832,17 @@ OMX_ERRORTYPE OMXNodeInstance::OnEmptyBufferDone( if (instance->mDying) { return OMX_ErrorNone; } + const sp& bufferSource( + instance->getGraphicBufferSource()); + if (bufferSource != NULL) { + bufferSource->codecBufferEmptied(pBuffer); + + // This is one of the buffers used exclusively by GraphicBufferSource. + // Don't dispatch a message back to ACodec, since it doesn't + // know that anyone asked to have the buffer emptied and will + // be very confused. + return OMX_ErrorNone; + } return instance->owner()->OnEmptyBufferDone(instance->nodeID(), pBuffer); } -- cgit v1.1 From 5c4cc0d99d3b1cb35c5d7c237272ee53142745fb Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Mon, 26 Nov 2012 10:40:24 -0800 Subject: Add template class SingleStateQueue Change-Id: If7e2bc9b2a216524ee9cbb68682e2634933b4973 --- include/media/SingleStateQueue.h | 97 ++++++++++++++++++++ include/private/media/StaticAudioTrackState.h | 35 +++++++ media/libmedia/Android.mk | 7 ++ media/libmedia/SingleStateQueue.cpp | 107 ++++++++++++++++++++++ media/libmedia/SingleStateQueueInstantiations.cpp | 26 ++++++ services/audioflinger/Android.mk | 1 - 6 files changed, 272 insertions(+), 1 deletion(-) create mode 100644 include/media/SingleStateQueue.h create mode 100644 include/private/media/StaticAudioTrackState.h create mode 100644 media/libmedia/SingleStateQueue.cpp create mode 100644 media/libmedia/SingleStateQueueInstantiations.cpp diff --git a/include/media/SingleStateQueue.h b/include/media/SingleStateQueue.h new file mode 100644 index 0000000..04c5fd0 --- /dev/null +++ b/include/media/SingleStateQueue.h @@ -0,0 +1,97 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef SINGLE_STATE_QUEUE_H +#define SINGLE_STATE_QUEUE_H + +// Non-blocking single element state queue, or +// Non-blocking single-reader / single-writer multi-word atomic load / store + +#include + +namespace android { + +template class SingleStateQueue { + +public: + + class Mutator; + class Observer; + + struct Shared { + // needs to be part of a union so don't define constructor or destructor + + friend class Mutator; + friend class Observer; + +private: + void init() { mAck = 0; mSequence = 0; } + + volatile int32_t mAck; +#if 0 + int mPad[7]; + // cache line boundary +#endif + volatile int32_t mSequence; + T mValue; + }; + + class Mutator { + public: + Mutator(Shared *shared); + /*virtual*/ ~Mutator() { } + + // push new value onto state queue, overwriting previous value; + // returns a sequence number which can be used with ack() + int32_t push(const T& value); + + // return true if most recent push has been observed + bool ack(); + + // return true if a push with specified sequence number or later has been observed + bool ack(int32_t sequence); + + private: + int32_t mSequence; + Shared * const mShared; + }; + + class Observer { + public: + Observer(Shared *shared); + /*virtual*/ ~Observer() { } + + // return true if value has changed + bool poll(T& value); + + private: + int32_t mSequence; + int mSeed; // for PRNG + Shared * const mShared; + }; + +#if 0 + SingleStateQueue(void /*Shared*/ *shared); + /*virtual*/ ~SingleStateQueue() { } + + static size_t size() { return sizeof(Shared); } +#endif + +}; + +} // namespace android + +#endif // SINGLE_STATE_QUEUE_H diff --git a/include/private/media/StaticAudioTrackState.h b/include/private/media/StaticAudioTrackState.h new file mode 100644 index 0000000..46a5946 --- /dev/null +++ b/include/private/media/StaticAudioTrackState.h @@ -0,0 +1,35 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef STATIC_AUDIO_TRACK_STATE_H +#define STATIC_AUDIO_TRACK_STATE_H + +namespace android { + +// Represents a single state of an AudioTrack that was created in static mode (shared memory buffer +// supplied by the client). This state needs to be communicated from the client to server. As this +// state is too large to be updated atomically without a mutex, and mutexes aren't allowed here, the +// state is wrapped by a SingleStateQueue. +struct StaticAudioTrackState { + // do not define constructors, destructors, or virtual methods + size_t mLoopStart; + size_t mLoopEnd; + int mLoopCount; +}; + +} // namespace android + +#endif // STATIC_AUDIO_TRACK_STATE_H diff --git a/media/libmedia/Android.mk b/media/libmedia/Android.mk index 52fa3e1..6b48991 100644 --- a/media/libmedia/Android.mk +++ b/media/libmedia/Android.mk @@ -53,6 +53,13 @@ LOCAL_SRC_FILES:= \ SoundPool.cpp \ SoundPoolThread.cpp +LOCAL_SRC_FILES += ../libnbaio/roundup.c + +# for +LOCAL_CFLAGS += -DANDROID_SMP=$(if $(findstring true,$(TARGET_CPU_SMP)),1,0) +LOCAL_SRC_FILES += SingleStateQueue.cpp +LOCAL_CFLAGS += -DSINGLE_STATE_QUEUE_INSTANTIATIONS='"SingleStateQueueInstantiations.cpp"' + LOCAL_SHARED_LIBRARIES := \ libui libcutils libutils libbinder libsonivox libicuuc libexpat \ libcamera_client libstagefright_foundation \ diff --git a/media/libmedia/SingleStateQueue.cpp b/media/libmedia/SingleStateQueue.cpp new file mode 100644 index 0000000..3503baa --- /dev/null +++ b/media/libmedia/SingleStateQueue.cpp @@ -0,0 +1,107 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include // for android_memory_barrier() +#include + +namespace android { + +template SingleStateQueue::Mutator::Mutator(Shared *shared) + : mSequence(0), mShared((Shared *) shared) +{ + // exactly one of Mutator and Observer must initialize, currently it is Observer + //shared->init(); +} + +template int32_t SingleStateQueue::Mutator::push(const T& value) +{ + Shared *shared = mShared; + int32_t sequence = mSequence; + sequence++; + android_atomic_acquire_store(sequence, &shared->mSequence); + shared->mValue = value; + sequence++; + android_atomic_release_store(sequence, &shared->mSequence); + mSequence = sequence; + // consider signalling a futex here, if we know that observer is waiting + return sequence; +} + +template bool SingleStateQueue::Mutator::ack() +{ + return mShared->mAck - mSequence == 0; +} + +template bool SingleStateQueue::Mutator::ack(int32_t sequence) +{ + // this relies on 2's complement rollover to detect an ancient sequence number + return mShared->mAck - sequence >= 0; +} + +template SingleStateQueue::Observer::Observer(Shared *shared) + : mSequence(0), mSeed(1), mShared((Shared *) shared) +{ + // exactly one of Mutator and Observer must initialize, currently it is Observer + shared->init(); +} + +template bool SingleStateQueue::Observer::poll(T& value) +{ + Shared *shared = mShared; + int32_t before = shared->mSequence; + if (before == mSequence) { + return false; + } + for (int tries = 0; ; ) { + const int MAX_TRIES = 5; + if (before & 1) { + if (++tries >= MAX_TRIES) { + return false; + } + before = shared->mSequence; + } else { + android_memory_barrier(); + T temp = shared->mValue; + int32_t after = android_atomic_release_load(&shared->mSequence); + if (after == before) { + value = temp; + shared->mAck = before; + mSequence = before; + return true; + } + if (++tries >= MAX_TRIES) { + return false; + } + before = after; + } + } +} + +#if 0 +template SingleStateQueue::SingleStateQueue(void /*Shared*/ *shared) +{ + ((Shared *) shared)->init(); +} +#endif + +} // namespace android + +// hack for gcc +#ifdef SINGLE_STATE_QUEUE_INSTANTIATIONS +#include SINGLE_STATE_QUEUE_INSTANTIATIONS +#endif diff --git a/media/libmedia/SingleStateQueueInstantiations.cpp b/media/libmedia/SingleStateQueueInstantiations.cpp new file mode 100644 index 0000000..2afebe9 --- /dev/null +++ b/media/libmedia/SingleStateQueueInstantiations.cpp @@ -0,0 +1,26 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include + +// FIXME hack for gcc + +namespace android { + +template class SingleStateQueue; // typedef StaticAudioTrackSingleStateQueue + +} diff --git a/services/audioflinger/Android.mk b/services/audioflinger/Android.mk index 7daef99..7806f48 100644 --- a/services/audioflinger/Android.mk +++ b/services/audioflinger/Android.mk @@ -104,5 +104,4 @@ LOCAL_MODULE_TAGS := optional include $(BUILD_EXECUTABLE) - include $(call all-makefiles-under,$(LOCAL_PATH)) -- cgit v1.1 From a556c4822fc205db0d27834ba5b637c351d73ffa Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Tue, 5 Mar 2013 10:56:27 -0800 Subject: Squashed commit of the following: commit e5919b1f57ea61fa1d380dfdb4e3e832ce73d79d Author: Andreas Huber Date: Wed Feb 27 16:38:48 2013 -0800 Configure TCP datagram sockets to be TCP_NODELAY. Change-Id: Ia724a81e6e27dccd00ac84603e712d69ca77a0cd commit 1b52b393183db8a6dc000a7c31baac544ccfc50c Author: Andreas Huber Date: Wed Feb 27 14:26:01 2013 -0800 Send IDR frame requests on packet loss. Change-Id: I53b7fb85cbd6923491113b93ec3e2175726d654a commit 68d76b4b3a0181b30abc57cd2915273210530a6d Author: Andreas Huber Date: Tue Feb 26 15:12:34 2013 -0800 Revive TunnelRenderer Change-Id: I8c5a9d982793b1c5b841c828227b354f1dab618c commit 3df28a8e9d8bcdc1430016bb088d097eca653b56 Author: Andreas Huber Date: Tue Feb 26 13:53:14 2013 -0800 Disable suspension of video updates. Change-Id: I7e3a16b8d7dd7a55d9f962a2236388931f664106 commit 2ec7a79de019a26ec415016c1478afd762f069cd Author: Andreas Huber Date: Tue Feb 26 08:54:40 2013 -0800 Adds an SNTP client to wfd. Change-Id: Icd7d6104e951e1443e4c1b81ccf6b3731d79d3ec commit c81c3bb5725bb4079a4d7fb02151ad0bb540632f Author: Andreas Huber Date: Mon Feb 25 10:00:58 2013 -0800 Squashed commit of the following: commit b83a4ec96659ef6f6b7c2090fdd866abe3ab78ba Author: Andreas Huber Date: Mon Feb 25 09:28:11 2013 -0800 Some reorganization of the rtp code, renamed StreamHub -> MediaSender Change-Id: I8cf67444960e60426bf74880af1acce41e8b2fef commit 7769cbd739f2a67c58e0c6a7b1a21a12210c7c4d Author: Andreas Huber Date: Fri Feb 22 16:12:18 2013 -0800 Choose a smaller MTU to avoid fragmented IPv4 packets, fix AVC assembler. Change-Id: I274b3cc1483c4e9f4d146dbf9f3d9f7557ef7ef9 commit 1f687ee80a88b56d614c2cf408ff729114ff86a0 Author: Andreas Huber Date: Fri Feb 22 11:38:31 2013 -0800 better reporting. Change-Id: I67f0bb51f106ea77f5cc75938b053c8e8e8f688e commit 7950c1cd59213eb5f281fcde44a772ecffae473d Author: Andreas Huber Date: Fri Feb 22 09:07:41 2013 -0800 stuff Change-Id: Ib99416366d3eec6e6ad69b4d791a8a9408410f3b commit 33c09045b0f86fcaa4619cbd679b47a074f71231 Author: Andreas Huber Date: Thu Feb 21 15:54:01 2013 -0800 Render frames according to their timestamps. Change-Id: I8143a95cffe775799d6a4bb093558bd7abb1f063 commit d8b6daae2160bf1c016d7c6251256b46bb89db42 Author: Andreas Huber Date: Thu Feb 21 15:01:27 2013 -0800 Better packet-lost logic. Change-Id: I611eee5a42bd089638cf45b0e16f628ff2a955ab commit 782c6b15717e2d062d96665a089d06c0577733d0 Author: Andreas Huber Date: Wed Feb 20 15:06:47 2013 -0800 Add a dedicated looper for the MediaReceiver Change-Id: I3b79cad367fb69c9a160a8d009af8c5f5142b98e commit 4c7b8b10861674b773270103bcabd1a99486a691 Author: Andreas Huber Date: Wed Feb 20 14:30:28 2013 -0800 Tweaks to RTPSender and RTPReceiver Change-Id: Ib535552f289a26cfead6df8c63e4c63d3987d4e9 commit 39226b28177a816cda5c67b321745d396b18277d Author: Andreas Huber Date: Tue Feb 19 08:48:25 2013 -0800 Playing around with non muxed delivery Change-Id: I845375f6938d04bc30502840c2ceb7688dc9b237 commit c16d21de75d8ecdbcd9abce14934afe484970061 Author: Andreas Huber Date: Wed Feb 13 14:43:35 2013 -0800 A more solid base for RTP communication. Change-Id: I52033eeb0feba0ff029d61553a821c82f2fa1c3f Change-Id: I57e3bcfc1c59a012b15aaaa42ed81f09c34c26bb Change-Id: I4b09db4a44d0eeded7a1658f6dc6c97d4b8be720 --- .../wifi-display/ANetworkSession.cpp | 12 + media/libstagefright/wifi-display/Android.mk | 32 +- .../libstagefright/wifi-display/MediaReceiver.cpp | 311 +++++++ media/libstagefright/wifi-display/MediaReceiver.h | 108 +++ media/libstagefright/wifi-display/MediaSender.cpp | 443 ++++++++++ media/libstagefright/wifi-display/MediaSender.h | 126 +++ media/libstagefright/wifi-display/SNTPClient.cpp | 174 ++++ media/libstagefright/wifi-display/SNTPClient.h | 62 ++ media/libstagefright/wifi-display/TimeSeries.cpp | 67 -- media/libstagefright/wifi-display/TimeSeries.h | 46 -- .../wifi-display/rtp/RTPAssembler.cpp | 324 ++++++++ .../libstagefright/wifi-display/rtp/RTPAssembler.h | 92 +++ media/libstagefright/wifi-display/rtp/RTPBase.h | 49 ++ .../wifi-display/rtp/RTPReceiver.cpp | 899 +++++++++++++++++++++ .../libstagefright/wifi-display/rtp/RTPReceiver.h | 110 +++ .../libstagefright/wifi-display/rtp/RTPSender.cpp | 701 ++++++++++++++++ media/libstagefright/wifi-display/rtp/RTPSender.h | 112 +++ media/libstagefright/wifi-display/rtptest.cpp | 382 +++++++++ .../wifi-display/sink/DirectRenderer.cpp | 359 +++----- .../wifi-display/sink/DirectRenderer.h | 43 +- .../wifi-display/sink/LinearRegression.cpp | 110 --- .../wifi-display/sink/LinearRegression.h | 52 -- media/libstagefright/wifi-display/sink/RTPSink.cpp | 870 -------------------- media/libstagefright/wifi-display/sink/RTPSink.h | 118 --- .../wifi-display/sink/TunnelRenderer.cpp | 188 +---- .../wifi-display/sink/TunnelRenderer.h | 20 +- .../wifi-display/sink/WifiDisplaySink.cpp | 169 +++- .../wifi-display/sink/WifiDisplaySink.h | 23 +- .../wifi-display/source/PlaybackSession.cpp | 423 ++-------- .../wifi-display/source/PlaybackSession.h | 38 +- .../wifi-display/source/RepeaterSource.h | 2 +- .../libstagefright/wifi-display/source/Sender.cpp | 878 -------------------- media/libstagefright/wifi-display/source/Sender.h | 169 ---- .../wifi-display/source/TSPacketizer.cpp | 26 +- .../wifi-display/source/TSPacketizer.h | 2 + .../wifi-display/source/WifiDisplaySource.cpp | 12 +- 36 files changed, 4346 insertions(+), 3206 deletions(-) create mode 100644 media/libstagefright/wifi-display/MediaReceiver.cpp create mode 100644 media/libstagefright/wifi-display/MediaReceiver.h create mode 100644 media/libstagefright/wifi-display/MediaSender.cpp create mode 100644 media/libstagefright/wifi-display/MediaSender.h create mode 100644 media/libstagefright/wifi-display/SNTPClient.cpp create mode 100644 media/libstagefright/wifi-display/SNTPClient.h delete mode 100644 media/libstagefright/wifi-display/TimeSeries.cpp delete mode 100644 media/libstagefright/wifi-display/TimeSeries.h create mode 100644 media/libstagefright/wifi-display/rtp/RTPAssembler.cpp create mode 100644 media/libstagefright/wifi-display/rtp/RTPAssembler.h create mode 100644 media/libstagefright/wifi-display/rtp/RTPBase.h create mode 100644 media/libstagefright/wifi-display/rtp/RTPReceiver.cpp create mode 100644 media/libstagefright/wifi-display/rtp/RTPReceiver.h create mode 100644 media/libstagefright/wifi-display/rtp/RTPSender.cpp create mode 100644 media/libstagefright/wifi-display/rtp/RTPSender.h create mode 100644 media/libstagefright/wifi-display/rtptest.cpp delete mode 100644 media/libstagefright/wifi-display/sink/LinearRegression.cpp delete mode 100644 media/libstagefright/wifi-display/sink/LinearRegression.h delete mode 100644 media/libstagefright/wifi-display/sink/RTPSink.cpp delete mode 100644 media/libstagefright/wifi-display/sink/RTPSink.h delete mode 100644 media/libstagefright/wifi-display/source/Sender.cpp delete mode 100644 media/libstagefright/wifi-display/source/Sender.h diff --git a/media/libstagefright/wifi-display/ANetworkSession.cpp b/media/libstagefright/wifi-display/ANetworkSession.cpp index 06f71f4..cb6011c 100644 --- a/media/libstagefright/wifi-display/ANetworkSession.cpp +++ b/media/libstagefright/wifi-display/ANetworkSession.cpp @@ -23,6 +23,7 @@ #include #include +#include #include #include #include @@ -314,6 +315,9 @@ status_t ANetworkSession::Session::readMore() { sp packet = new ABuffer(packetSize); memcpy(packet->data(), mInBuffer.c_str() + 2, packetSize); + int64_t nowUs = ALooper::GetNowUs(); + packet->meta()->setInt64("arrivalTimeUs", nowUs); + sp notify = mNotify->dup(); notify->setInt32("sessionID", mSessionID); notify->setInt32("reason", kWhatDatagram); @@ -770,6 +774,14 @@ status_t ANetworkSession::createClientOrServer( err = -errno; goto bail2; } + } else if (mode == kModeCreateTCPDatagramSessionActive) { + int flag = 1; + res = setsockopt(s, IPPROTO_TCP, TCP_NODELAY, &flag, sizeof(flag)); + + if (res < 0) { + err = -errno; + goto bail2; + } } err = MakeSocketNonBlocking(s); diff --git a/media/libstagefright/wifi-display/Android.mk b/media/libstagefright/wifi-display/Android.mk index 5095e82..19f560c 100644 --- a/media/libstagefright/wifi-display/Android.mk +++ b/media/libstagefright/wifi-display/Android.mk @@ -4,21 +4,23 @@ include $(CLEAR_VARS) LOCAL_SRC_FILES:= \ ANetworkSession.cpp \ + MediaReceiver.cpp \ + MediaSender.cpp \ Parameters.cpp \ ParsedMessage.cpp \ + rtp/RTPAssembler.cpp \ + rtp/RTPReceiver.cpp \ + rtp/RTPSender.cpp \ sink/DirectRenderer.cpp \ - sink/LinearRegression.cpp \ - sink/RTPSink.cpp \ sink/TunnelRenderer.cpp \ sink/WifiDisplaySink.cpp \ + SNTPClient.cpp \ source/Converter.cpp \ source/MediaPuller.cpp \ source/PlaybackSession.cpp \ source/RepeaterSource.cpp \ - source/Sender.cpp \ source/TSPacketizer.cpp \ source/WifiDisplaySource.cpp \ - TimeSeries.cpp \ VideoFormats.cpp \ LOCAL_C_INCLUDES:= \ @@ -85,3 +87,25 @@ LOCAL_MODULE:= udptest LOCAL_MODULE_TAGS := debug include $(BUILD_EXECUTABLE) + +################################################################################ + +include $(CLEAR_VARS) + +LOCAL_SRC_FILES:= \ + rtptest.cpp \ + +LOCAL_SHARED_LIBRARIES:= \ + libbinder \ + libgui \ + libmedia \ + libstagefright \ + libstagefright_foundation \ + libstagefright_wfd \ + libutils \ + +LOCAL_MODULE:= rtptest + +LOCAL_MODULE_TAGS := debug + +include $(BUILD_EXECUTABLE) diff --git a/media/libstagefright/wifi-display/MediaReceiver.cpp b/media/libstagefright/wifi-display/MediaReceiver.cpp new file mode 100644 index 0000000..3c92d41 --- /dev/null +++ b/media/libstagefright/wifi-display/MediaReceiver.cpp @@ -0,0 +1,311 @@ +/* + * Copyright 2013, The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "MediaReceiver" +#include + +#include "MediaReceiver.h" + +#include "ANetworkSession.h" +#include "AnotherPacketSource.h" +#include "rtp/RTPReceiver.h" + +#include +#include +#include +#include +#include + +namespace android { + +MediaReceiver::MediaReceiver( + const sp &netSession, + const sp ¬ify) + : mNetSession(netSession), + mNotify(notify), + mMode(MODE_UNDEFINED), + mGeneration(0), + mInitStatus(OK), + mInitDoneCount(0) { +} + +MediaReceiver::~MediaReceiver() { +} + +ssize_t MediaReceiver::addTrack( + RTPReceiver::TransportMode transportMode, + int32_t *localRTPPort) { + if (mMode != MODE_UNDEFINED) { + return INVALID_OPERATION; + } + + size_t trackIndex = mTrackInfos.size(); + + TrackInfo info; + + sp notify = new AMessage(kWhatReceiverNotify, id()); + notify->setInt32("generation", mGeneration); + notify->setSize("trackIndex", trackIndex); + + info.mReceiver = new RTPReceiver(mNetSession, notify); + looper()->registerHandler(info.mReceiver); + + info.mReceiver->registerPacketType( + 33, RTPReceiver::PACKETIZATION_TRANSPORT_STREAM); + + info.mReceiver->registerPacketType( + 96, RTPReceiver::PACKETIZATION_AAC); + + info.mReceiver->registerPacketType( + 97, RTPReceiver::PACKETIZATION_H264); + + status_t err = info.mReceiver->initAsync(transportMode, localRTPPort); + + if (err != OK) { + looper()->unregisterHandler(info.mReceiver->id()); + info.mReceiver.clear(); + + return err; + } + + mTrackInfos.push_back(info); + + return trackIndex; +} + +status_t MediaReceiver::connectTrack( + size_t trackIndex, + const char *remoteHost, + int32_t remoteRTPPort, + int32_t remoteRTCPPort) { + if (trackIndex >= mTrackInfos.size()) { + return -ERANGE; + } + + TrackInfo *info = &mTrackInfos.editItemAt(trackIndex); + return info->mReceiver->connect(remoteHost, remoteRTPPort, remoteRTCPPort); +} + +status_t MediaReceiver::initAsync(Mode mode) { + if ((mode == MODE_TRANSPORT_STREAM || mode == MODE_TRANSPORT_STREAM_RAW) + && mTrackInfos.size() > 1) { + return INVALID_OPERATION; + } + + sp msg = new AMessage(kWhatInit, id()); + msg->setInt32("mode", mode); + msg->post(); + + return OK; +} + +void MediaReceiver::onMessageReceived(const sp &msg) { + switch (msg->what()) { + case kWhatInit: + { + int32_t mode; + CHECK(msg->findInt32("mode", &mode)); + + CHECK_EQ(mMode, MODE_UNDEFINED); + mMode = (Mode)mode; + + if (mInitStatus != OK || mInitDoneCount == mTrackInfos.size()) { + notifyInitDone(mInitStatus); + } + + mTSParser = new ATSParser(ATSParser::ALIGNED_VIDEO_DATA); + mFormatKnownMask = 0; + break; + } + + case kWhatReceiverNotify: + { + int32_t generation; + CHECK(msg->findInt32("generation", &generation)); + if (generation != mGeneration) { + break; + } + + onReceiverNotify(msg); + break; + } + + default: + TRESPASS(); + } +} + +void MediaReceiver::onReceiverNotify(const sp &msg) { + int32_t what; + CHECK(msg->findInt32("what", &what)); + + switch (what) { + case RTPReceiver::kWhatInitDone: + { + ++mInitDoneCount; + + int32_t err; + CHECK(msg->findInt32("err", &err)); + + if (err != OK) { + mInitStatus = err; + ++mGeneration; + } + + if (mMode != MODE_UNDEFINED) { + if (mInitStatus != OK || mInitDoneCount == mTrackInfos.size()) { + notifyInitDone(mInitStatus); + } + } + break; + } + + case RTPReceiver::kWhatError: + { + int32_t err; + CHECK(msg->findInt32("err", &err)); + + notifyError(err); + break; + } + + case RTPReceiver::kWhatAccessUnit: + { + size_t trackIndex; + CHECK(msg->findSize("trackIndex", &trackIndex)); + + sp accessUnit; + CHECK(msg->findBuffer("accessUnit", &accessUnit)); + + int32_t followsDiscontinuity; + if (!msg->findInt32( + "followsDiscontinuity", &followsDiscontinuity)) { + followsDiscontinuity = 0; + } + + if (mMode == MODE_TRANSPORT_STREAM) { + if (followsDiscontinuity) { + mTSParser->signalDiscontinuity( + ATSParser::DISCONTINUITY_TIME, NULL /* extra */); + } + + for (size_t offset = 0; + offset < accessUnit->size(); offset += 188) { + status_t err = mTSParser->feedTSPacket( + accessUnit->data() + offset, 188); + + if (err != OK) { + notifyError(err); + break; + } + } + + drainPackets(0 /* trackIndex */, ATSParser::VIDEO); + drainPackets(1 /* trackIndex */, ATSParser::AUDIO); + } else { + postAccessUnit(trackIndex, accessUnit, NULL); + } + break; + } + + case RTPReceiver::kWhatPacketLost: + { + notifyPacketLost(); + break; + } + + default: + TRESPASS(); + } +} + +void MediaReceiver::drainPackets( + size_t trackIndex, ATSParser::SourceType type) { + sp source = + static_cast( + mTSParser->getSource(type).get()); + + if (source == NULL) { + return; + } + + sp format; + if (!(mFormatKnownMask & (1ul << trackIndex))) { + sp meta = source->getFormat(); + CHECK(meta != NULL); + + CHECK_EQ((status_t)OK, convertMetaDataToMessage(meta, &format)); + + mFormatKnownMask |= 1ul << trackIndex; + } + + status_t finalResult; + while (source->hasBufferAvailable(&finalResult)) { + sp accessUnit; + status_t err = source->dequeueAccessUnit(&accessUnit); + if (err == OK) { + postAccessUnit(trackIndex, accessUnit, format); + format.clear(); + } else if (err != INFO_DISCONTINUITY) { + notifyError(err); + } + } + + if (finalResult != OK) { + notifyError(finalResult); + } +} + +void MediaReceiver::notifyInitDone(status_t err) { + sp notify = mNotify->dup(); + notify->setInt32("what", kWhatInitDone); + notify->setInt32("err", err); + notify->post(); +} + +void MediaReceiver::notifyError(status_t err) { + sp notify = mNotify->dup(); + notify->setInt32("what", kWhatError); + notify->setInt32("err", err); + notify->post(); +} + +void MediaReceiver::notifyPacketLost() { + sp notify = mNotify->dup(); + notify->setInt32("what", kWhatPacketLost); + notify->post(); +} + +void MediaReceiver::postAccessUnit( + size_t trackIndex, + const sp &accessUnit, + const sp &format) { + sp notify = mNotify->dup(); + notify->setInt32("what", kWhatAccessUnit); + notify->setSize("trackIndex", trackIndex); + notify->setBuffer("accessUnit", accessUnit); + + if (format != NULL) { + notify->setMessage("format", format); + } + + notify->post(); +} + +} // namespace android + + diff --git a/media/libstagefright/wifi-display/MediaReceiver.h b/media/libstagefright/wifi-display/MediaReceiver.h new file mode 100644 index 0000000..7adc3c4 --- /dev/null +++ b/media/libstagefright/wifi-display/MediaReceiver.h @@ -0,0 +1,108 @@ +/* + * Copyright 2013, The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include "ATSParser.h" +#include "rtp/RTPReceiver.h" + +namespace android { + +struct ABuffer; +struct ANetworkSession; +struct AMessage; +struct ATSParser; + +// This class facilitates receiving of media data for one or more tracks +// over RTP. Either a 1:1 track to RTP channel mapping is used or a single +// RTP channel provides the data for a transport stream that is consequently +// demuxed and its track's data provided to the observer. +struct MediaReceiver : public AHandler { + enum { + kWhatInitDone, + kWhatError, + kWhatAccessUnit, + kWhatPacketLost, + }; + + MediaReceiver( + const sp &netSession, + const sp ¬ify); + + ssize_t addTrack( + RTPReceiver::TransportMode transportMode, + int32_t *localRTPPort); + + status_t connectTrack( + size_t trackIndex, + const char *remoteHost, + int32_t remoteRTPPort, + int32_t remoteRTCPPort); + + enum Mode { + MODE_UNDEFINED, + MODE_TRANSPORT_STREAM, + MODE_TRANSPORT_STREAM_RAW, + MODE_ELEMENTARY_STREAMS, + }; + status_t initAsync(Mode mode); + +protected: + virtual void onMessageReceived(const sp &msg); + virtual ~MediaReceiver(); + +private: + enum { + kWhatInit, + kWhatReceiverNotify, + }; + + struct TrackInfo { + sp mReceiver; + }; + + sp mNetSession; + sp mNotify; + + Mode mMode; + int32_t mGeneration; + + Vector mTrackInfos; + + status_t mInitStatus; + size_t mInitDoneCount; + + sp mTSParser; + uint32_t mFormatKnownMask; + + void onReceiverNotify(const sp &msg); + + void drainPackets(size_t trackIndex, ATSParser::SourceType type); + + void notifyInitDone(status_t err); + void notifyError(status_t err); + void notifyPacketLost(); + + void postAccessUnit( + size_t trackIndex, + const sp &accessUnit, + const sp &format); + + DISALLOW_EVIL_CONSTRUCTORS(MediaReceiver); +}; + +} // namespace android + diff --git a/media/libstagefright/wifi-display/MediaSender.cpp b/media/libstagefright/wifi-display/MediaSender.cpp new file mode 100644 index 0000000..900aa82 --- /dev/null +++ b/media/libstagefright/wifi-display/MediaSender.cpp @@ -0,0 +1,443 @@ +/* + * Copyright 2013, The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "MediaSender" +#include + +#include "MediaSender.h" + +#include "ANetworkSession.h" +#include "rtp/RTPSender.h" +#include "source/TSPacketizer.h" + +#include "include/avc_utils.h" + +#include +#include +#include +#include + +namespace android { + +MediaSender::MediaSender( + const sp &netSession, + const sp ¬ify) + : mNetSession(netSession), + mNotify(notify), + mMode(MODE_UNDEFINED), + mGeneration(0), + mPrevTimeUs(-1ll), + mInitDoneCount(0) { +} + +MediaSender::~MediaSender() { +} + +status_t MediaSender::setHDCP(const sp &hdcp) { + if (mMode != MODE_UNDEFINED) { + return INVALID_OPERATION; + } + + mHDCP = hdcp; + + return OK; +} + +ssize_t MediaSender::addTrack(const sp &format, uint32_t flags) { + if (mMode != MODE_UNDEFINED) { + return INVALID_OPERATION; + } + + TrackInfo info; + info.mFormat = format; + info.mFlags = flags; + info.mPacketizerTrackIndex = -1; + + AString mime; + CHECK(format->findString("mime", &mime)); + info.mIsAudio = !strncasecmp("audio/", mime.c_str(), 6); + + size_t index = mTrackInfos.size(); + mTrackInfos.push_back(info); + + return index; +} + +status_t MediaSender::initAsync( + ssize_t trackIndex, + RTPSender::TransportMode transportMode, + const char *remoteHost, + int32_t remoteRTPPort, + int32_t remoteRTCPPort, + int32_t *localRTPPort) { + if (trackIndex < 0) { + if (mMode != MODE_UNDEFINED) { + return INVALID_OPERATION; + } + + mTSPacketizer = new TSPacketizer; + + status_t err = OK; + for (size_t i = 0; i < mTrackInfos.size(); ++i) { + TrackInfo *info = &mTrackInfos.editItemAt(i); + + sp trackFormat = info->mFormat; + if (mHDCP != NULL && !info->mIsAudio) { + // HDCP2.0 _and_ HDCP 2.1 specs say to set the version + // inside the HDCP descriptor to 0x20!!! + trackFormat->setInt32("hdcp-version", 0x20); + } + + ssize_t packetizerTrackIndex = + mTSPacketizer->addTrack(trackFormat); + + if (packetizerTrackIndex < 0) { + err = packetizerTrackIndex; + break; + } + + info->mPacketizerTrackIndex = packetizerTrackIndex; + } + + if (err == OK) { + sp notify = new AMessage(kWhatSenderNotify, id()); + notify->setInt32("generation", mGeneration); + mTSSender = new RTPSender(mNetSession, notify); + looper()->registerHandler(mTSSender); + + err = mTSSender->initAsync( + transportMode, + remoteHost, + remoteRTPPort, + remoteRTCPPort, + localRTPPort); + + if (err != OK) { + looper()->unregisterHandler(mTSSender->id()); + mTSSender.clear(); + } + } + + if (err != OK) { + for (size_t i = 0; i < mTrackInfos.size(); ++i) { + TrackInfo *info = &mTrackInfos.editItemAt(i); + info->mPacketizerTrackIndex = -1; + } + + mTSPacketizer.clear(); + return err; + } + + mMode = MODE_TRANSPORT_STREAM; + mInitDoneCount = 1; + + return OK; + } + + if (mMode == MODE_TRANSPORT_STREAM) { + return INVALID_OPERATION; + } + + if ((size_t)trackIndex >= mTrackInfos.size()) { + return -ERANGE; + } + + TrackInfo *info = &mTrackInfos.editItemAt(trackIndex); + + if (info->mSender != NULL) { + return INVALID_OPERATION; + } + + sp notify = new AMessage(kWhatSenderNotify, id()); + notify->setInt32("generation", mGeneration); + notify->setSize("trackIndex", trackIndex); + + info->mSender = new RTPSender(mNetSession, notify); + looper()->registerHandler(info->mSender); + + status_t err = info->mSender->initAsync( + transportMode, + remoteHost, + remoteRTPPort, + remoteRTCPPort, + localRTPPort); + + if (err != OK) { + looper()->unregisterHandler(info->mSender->id()); + info->mSender.clear(); + + return err; + } + + if (mMode == MODE_UNDEFINED) { + mInitDoneCount = mTrackInfos.size(); + } + + mMode = MODE_ELEMENTARY_STREAMS; + + return OK; +} + +status_t MediaSender::queueAccessUnit( + size_t trackIndex, const sp &accessUnit) { + if (mMode == MODE_UNDEFINED) { + return INVALID_OPERATION; + } + + if (trackIndex >= mTrackInfos.size()) { + return -ERANGE; + } + + if (mMode == MODE_TRANSPORT_STREAM) { + TrackInfo *info = &mTrackInfos.editItemAt(trackIndex); + info->mAccessUnits.push_back(accessUnit); + + mTSPacketizer->extractCSDIfNecessary(info->mPacketizerTrackIndex); + + for (;;) { + ssize_t minTrackIndex = -1; + int64_t minTimeUs = -1ll; + + for (size_t i = 0; i < mTrackInfos.size(); ++i) { + const TrackInfo &info = mTrackInfos.itemAt(i); + + if (info.mAccessUnits.empty()) { + minTrackIndex = -1; + minTimeUs = -1ll; + break; + } + + int64_t timeUs; + const sp &accessUnit = *info.mAccessUnits.begin(); + CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs)); + + if (minTrackIndex < 0 || timeUs < minTimeUs) { + minTrackIndex = i; + minTimeUs = timeUs; + } + } + + if (minTrackIndex < 0) { + return OK; + } + + TrackInfo *info = &mTrackInfos.editItemAt(minTrackIndex); + sp accessUnit = *info->mAccessUnits.begin(); + info->mAccessUnits.erase(info->mAccessUnits.begin()); + + sp tsPackets; + status_t err = packetizeAccessUnit( + minTrackIndex, accessUnit, &tsPackets); + + if (err == OK) { + err = mTSSender->queueBuffer( + tsPackets, + 33 /* packetType */, + RTPSender::PACKETIZATION_TRANSPORT_STREAM); + } + + if (err != OK) { + return err; + } + } + } + + TrackInfo *info = &mTrackInfos.editItemAt(trackIndex); + + return info->mSender->queueBuffer( + accessUnit, + info->mIsAudio ? 96 : 97 /* packetType */, + info->mIsAudio + ? RTPSender::PACKETIZATION_AAC : RTPSender::PACKETIZATION_H264); +} + +void MediaSender::onMessageReceived(const sp &msg) { + switch (msg->what()) { + case kWhatSenderNotify: + { + int32_t generation; + CHECK(msg->findInt32("generation", &generation)); + if (generation != mGeneration) { + break; + } + + onSenderNotify(msg); + break; + } + + default: + TRESPASS(); + } +} + +void MediaSender::onSenderNotify(const sp &msg) { + int32_t what; + CHECK(msg->findInt32("what", &what)); + + switch (what) { + case RTPSender::kWhatInitDone: + { + --mInitDoneCount; + + int32_t err; + CHECK(msg->findInt32("err", &err)); + + if (err != OK) { + notifyInitDone(err); + ++mGeneration; + break; + } + + if (mInitDoneCount == 0) { + notifyInitDone(OK); + } + break; + } + + case RTPSender::kWhatError: + { + int32_t err; + CHECK(msg->findInt32("err", &err)); + + notifyError(err); + break; + } + + default: + TRESPASS(); + } +} + +void MediaSender::notifyInitDone(status_t err) { + sp notify = mNotify->dup(); + notify->setInt32("what", kWhatInitDone); + notify->setInt32("err", err); + notify->post(); +} + +void MediaSender::notifyError(status_t err) { + sp notify = mNotify->dup(); + notify->setInt32("what", kWhatError); + notify->setInt32("err", err); + notify->post(); +} + +status_t MediaSender::packetizeAccessUnit( + size_t trackIndex, + sp accessUnit, + sp *tsPackets) { + const TrackInfo &info = mTrackInfos.itemAt(trackIndex); + + uint32_t flags = 0; + + bool isHDCPEncrypted = false; + uint64_t inputCTR; + uint8_t HDCP_private_data[16]; + + bool manuallyPrependSPSPPS = + !info.mIsAudio + && (info.mFlags & FLAG_MANUALLY_PREPEND_SPS_PPS) + && IsIDR(accessUnit); + + if (mHDCP != NULL && !info.mIsAudio) { + isHDCPEncrypted = true; + + if (manuallyPrependSPSPPS) { + accessUnit = mTSPacketizer->prependCSD( + info.mPacketizerTrackIndex, accessUnit); + } + + status_t err = mHDCP->encrypt( + accessUnit->data(), accessUnit->size(), + trackIndex /* streamCTR */, + &inputCTR, + accessUnit->data()); + + if (err != OK) { + ALOGE("Failed to HDCP-encrypt media data (err %d)", + err); + + return err; + } + + HDCP_private_data[0] = 0x00; + + HDCP_private_data[1] = + (((trackIndex >> 30) & 3) << 1) | 1; + + HDCP_private_data[2] = (trackIndex >> 22) & 0xff; + + HDCP_private_data[3] = + (((trackIndex >> 15) & 0x7f) << 1) | 1; + + HDCP_private_data[4] = (trackIndex >> 7) & 0xff; + + HDCP_private_data[5] = + ((trackIndex & 0x7f) << 1) | 1; + + HDCP_private_data[6] = 0x00; + + HDCP_private_data[7] = + (((inputCTR >> 60) & 0x0f) << 1) | 1; + + HDCP_private_data[8] = (inputCTR >> 52) & 0xff; + + HDCP_private_data[9] = + (((inputCTR >> 45) & 0x7f) << 1) | 1; + + HDCP_private_data[10] = (inputCTR >> 37) & 0xff; + + HDCP_private_data[11] = + (((inputCTR >> 30) & 0x7f) << 1) | 1; + + HDCP_private_data[12] = (inputCTR >> 22) & 0xff; + + HDCP_private_data[13] = + (((inputCTR >> 15) & 0x7f) << 1) | 1; + + HDCP_private_data[14] = (inputCTR >> 7) & 0xff; + + HDCP_private_data[15] = + ((inputCTR & 0x7f) << 1) | 1; + + flags |= TSPacketizer::IS_ENCRYPTED; + } else if (manuallyPrependSPSPPS) { + flags |= TSPacketizer::PREPEND_SPS_PPS_TO_IDR_FRAMES; + } + + int64_t timeUs = ALooper::GetNowUs(); + if (mPrevTimeUs < 0ll || mPrevTimeUs + 100000ll <= timeUs) { + flags |= TSPacketizer::EMIT_PCR; + flags |= TSPacketizer::EMIT_PAT_AND_PMT; + + mPrevTimeUs = timeUs; + } + + mTSPacketizer->packetize( + info.mPacketizerTrackIndex, + accessUnit, + tsPackets, + flags, + !isHDCPEncrypted ? NULL : HDCP_private_data, + !isHDCPEncrypted ? 0 : sizeof(HDCP_private_data), + info.mIsAudio ? 2 : 0 /* numStuffingBytes */); + + return OK; +} + +} // namespace android + diff --git a/media/libstagefright/wifi-display/MediaSender.h b/media/libstagefright/wifi-display/MediaSender.h new file mode 100644 index 0000000..834780a --- /dev/null +++ b/media/libstagefright/wifi-display/MediaSender.h @@ -0,0 +1,126 @@ +/* + * Copyright 2013, The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef MEDIA_SENDER_H_ + +#define MEDIA_SENDER_H_ + +#include "rtp/RTPSender.h" + +#include +#include +#include +#include + +namespace android { + +struct ABuffer; +struct ANetworkSession; +struct AMessage; +struct IHDCP; +struct TSPacketizer; + +// This class facilitates sending of data from one or more media tracks +// through one or more RTP channels, either providing a 1:1 mapping from +// track to RTP channel or muxing all tracks into a single RTP channel and +// using transport stream encapsulation. +// Optionally the (video) data is encrypted using the provided hdcp object. +struct MediaSender : public AHandler { + enum { + kWhatInitDone, + kWhatError, + }; + + MediaSender( + const sp &netSession, + const sp ¬ify); + + status_t setHDCP(const sp &hdcp); + + enum FlagBits { + FLAG_MANUALLY_PREPEND_SPS_PPS = 1, + }; + ssize_t addTrack(const sp &format, uint32_t flags); + + // If trackIndex == -1, initialize for transport stream muxing. + status_t initAsync( + ssize_t trackIndex, + RTPSender::TransportMode transportMode, + const char *remoteHost, + int32_t remoteRTPPort, + int32_t remoteRTCPPort, + int32_t *localRTPPort); + + status_t queueAccessUnit( + size_t trackIndex, const sp &accessUnit); + +protected: + virtual void onMessageReceived(const sp &msg); + virtual ~MediaSender(); + +private: + enum { + kWhatSenderNotify, + }; + + enum Mode { + MODE_UNDEFINED, + MODE_TRANSPORT_STREAM, + MODE_ELEMENTARY_STREAMS, + }; + + struct TrackInfo { + sp mFormat; + uint32_t mFlags; + sp mSender; + List > mAccessUnits; + ssize_t mPacketizerTrackIndex; + bool mIsAudio; + }; + + sp mNetSession; + sp mNotify; + + sp mHDCP; + + Mode mMode; + int32_t mGeneration; + + Vector mTrackInfos; + + sp mTSPacketizer; + sp mTSSender; + int64_t mPrevTimeUs; + + size_t mInitDoneCount; + + void onSenderNotify(const sp &msg); + + void notifyInitDone(status_t err); + void notifyError(status_t err); + + status_t packetizeAccessUnit( + size_t trackIndex, + sp accessUnit, + sp *tsPackets); + + DISALLOW_EVIL_CONSTRUCTORS(MediaSender); +}; + +} // namespace android + +#endif // MEDIA_SENDER_H_ + diff --git a/media/libstagefright/wifi-display/SNTPClient.cpp b/media/libstagefright/wifi-display/SNTPClient.cpp new file mode 100644 index 0000000..5c0af6a --- /dev/null +++ b/media/libstagefright/wifi-display/SNTPClient.cpp @@ -0,0 +1,174 @@ +/* + * Copyright 2013, The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "SNTPClient.h" + +#include +#include + +#include +#include +#include +#include +#include + +namespace android { + +SNTPClient::SNTPClient() { +} + +status_t SNTPClient::requestTime(const char *host) { + struct hostent *ent; + int64_t requestTimeNTP, requestTimeUs; + ssize_t n; + int64_t responseTimeUs, responseTimeNTP; + int64_t originateTimeNTP, receiveTimeNTP, transmitTimeNTP; + int64_t roundTripTimeNTP, clockOffsetNTP; + + status_t err = UNKNOWN_ERROR; + + int s = socket(AF_INET, SOCK_DGRAM, 0); + + if (s < 0) { + err = -errno; + + goto bail; + } + + ent = gethostbyname(host); + + if (ent == NULL) { + err = -ENOENT; + goto bail2; + } + + struct sockaddr_in hostAddr; + memset(hostAddr.sin_zero, 0, sizeof(hostAddr.sin_zero)); + hostAddr.sin_family = AF_INET; + hostAddr.sin_port = htons(kNTPPort); + hostAddr.sin_addr.s_addr = *(in_addr_t *)ent->h_addr; + + uint8_t packet[kNTPPacketSize]; + memset(packet, 0, sizeof(packet)); + + packet[0] = kNTPModeClient | (kNTPVersion << 3); + + requestTimeNTP = getNowNTP(); + requestTimeUs = ALooper::GetNowUs(); + writeTimeStamp(&packet[kNTPTransmitTimeOffset], requestTimeNTP); + + n = sendto( + s, packet, sizeof(packet), 0, + (const struct sockaddr *)&hostAddr, sizeof(hostAddr)); + + if (n < 0) { + err = -errno; + goto bail2; + } + + memset(packet, 0, sizeof(packet)); + + do { + n = recv(s, packet, sizeof(packet), 0); + } while (n < 0 && errno == EINTR); + + if (n < 0) { + err = -errno; + goto bail2; + } + + responseTimeUs = ALooper::GetNowUs(); + + responseTimeNTP = requestTimeNTP + makeNTP(responseTimeUs - requestTimeUs); + + originateTimeNTP = readTimeStamp(&packet[kNTPOriginateTimeOffset]); + receiveTimeNTP = readTimeStamp(&packet[kNTPReceiveTimeOffset]); + transmitTimeNTP = readTimeStamp(&packet[kNTPTransmitTimeOffset]); + + roundTripTimeNTP = + makeNTP(responseTimeUs - requestTimeUs) + - (transmitTimeNTP - receiveTimeNTP); + + clockOffsetNTP = + ((receiveTimeNTP - originateTimeNTP) + + (transmitTimeNTP - responseTimeNTP)) / 2; + + mTimeReferenceNTP = responseTimeNTP + clockOffsetNTP; + mTimeReferenceUs = responseTimeUs; + mRoundTripTimeNTP = roundTripTimeNTP; + + err = OK; + +bail2: + close(s); + s = -1; + +bail: + return err; +} + +int64_t SNTPClient::adjustTimeUs(int64_t timeUs) const { + uint64_t nowNTP = + mTimeReferenceNTP + makeNTP(timeUs - mTimeReferenceUs); + + int64_t nowUs = + (nowNTP >> 32) * 1000000ll + + ((nowNTP & 0xffffffff) * 1000000ll) / (1ll << 32); + + nowUs -= ((70ll * 365 + 17) * 24) * 60 * 60 * 1000000ll; + + return nowUs; +} + +// static +void SNTPClient::writeTimeStamp(uint8_t *dst, uint64_t ntpTime) { + *dst++ = (ntpTime >> 56) & 0xff; + *dst++ = (ntpTime >> 48) & 0xff; + *dst++ = (ntpTime >> 40) & 0xff; + *dst++ = (ntpTime >> 32) & 0xff; + *dst++ = (ntpTime >> 24) & 0xff; + *dst++ = (ntpTime >> 16) & 0xff; + *dst++ = (ntpTime >> 8) & 0xff; + *dst++ = ntpTime & 0xff; +} + +// static +uint64_t SNTPClient::readTimeStamp(const uint8_t *dst) { + return U64_AT(dst); +} + +// static +uint64_t SNTPClient::getNowNTP() { + struct timeval tv; + gettimeofday(&tv, NULL /* time zone */); + + uint64_t nowUs = tv.tv_sec * 1000000ll + tv.tv_usec; + + nowUs += ((70ll * 365 + 17) * 24) * 60 * 60 * 1000000ll; + + return makeNTP(nowUs); +} + +// static +uint64_t SNTPClient::makeNTP(uint64_t deltaUs) { + uint64_t hi = deltaUs / 1000000ll; + uint64_t lo = ((1ll << 32) * (deltaUs % 1000000ll)) / 1000000ll; + + return (hi << 32) | lo; +} + +} // namespace android + diff --git a/media/libstagefright/wifi-display/SNTPClient.h b/media/libstagefright/wifi-display/SNTPClient.h new file mode 100644 index 0000000..967d1fc --- /dev/null +++ b/media/libstagefright/wifi-display/SNTPClient.h @@ -0,0 +1,62 @@ +/* + * Copyright 2013, The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef SNTP_CLIENT_H_ + +#define SNTP_CLIENT_H_ + +#include +#include + +namespace android { + +// Implementation of the SNTP (Simple Network Time Protocol) +struct SNTPClient { + SNTPClient(); + + status_t requestTime(const char *host); + + // given a time obtained from ALooper::GetNowUs() + // return the number of us elapsed since Jan 1 1970 00:00:00 (UTC). + int64_t adjustTimeUs(int64_t timeUs) const; + +private: + enum { + kNTPPort = 123, + kNTPPacketSize = 48, + kNTPModeClient = 3, + kNTPVersion = 3, + kNTPTransmitTimeOffset = 40, + kNTPOriginateTimeOffset = 24, + kNTPReceiveTimeOffset = 32, + }; + + uint64_t mTimeReferenceNTP; + int64_t mTimeReferenceUs; + int64_t mRoundTripTimeNTP; + + static void writeTimeStamp(uint8_t *dst, uint64_t ntpTime); + static uint64_t readTimeStamp(const uint8_t *dst); + + static uint64_t getNowNTP(); + static uint64_t makeNTP(uint64_t deltaUs); + + DISALLOW_EVIL_CONSTRUCTORS(SNTPClient); +}; + +} // namespace android + +#endif // SNTP_CLIENT_H_ diff --git a/media/libstagefright/wifi-display/TimeSeries.cpp b/media/libstagefright/wifi-display/TimeSeries.cpp deleted file mode 100644 index d882d98..0000000 --- a/media/libstagefright/wifi-display/TimeSeries.cpp +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Copyright 2012, The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#include "TimeSeries.h" - -#include -#include - -namespace android { - -TimeSeries::TimeSeries() - : mCount(0), - mSum(0.0) { -} - -void TimeSeries::add(double val) { - if (mCount < kHistorySize) { - mValues[mCount++] = val; - mSum += val; - } else { - mSum -= mValues[0]; - memmove(&mValues[0], &mValues[1], (kHistorySize - 1) * sizeof(double)); - mValues[kHistorySize - 1] = val; - mSum += val; - } -} - -double TimeSeries::mean() const { - if (mCount < 1) { - return 0.0; - } - - return mSum / mCount; -} - -double TimeSeries::sdev() const { - if (mCount < 1) { - return 0.0; - } - - double m = mean(); - - double sum = 0.0; - for (size_t i = 0; i < mCount; ++i) { - double tmp = mValues[i] - m; - tmp *= tmp; - - sum += tmp; - } - - return sqrt(sum / mCount); -} - -} // namespace android diff --git a/media/libstagefright/wifi-display/TimeSeries.h b/media/libstagefright/wifi-display/TimeSeries.h deleted file mode 100644 index c818d51..0000000 --- a/media/libstagefright/wifi-display/TimeSeries.h +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright 2012, The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef TIME_SERIES_H_ - -#define TIME_SERIES_H_ - -#include - -namespace android { - -struct TimeSeries { - TimeSeries(); - - void add(double val); - - double mean() const; - double sdev() const; - -private: - enum { - kHistorySize = 20 - }; - double mValues[kHistorySize]; - - size_t mCount; - double mSum; -}; - -} // namespace android - -#endif // TIME_SERIES_H_ - diff --git a/media/libstagefright/wifi-display/rtp/RTPAssembler.cpp b/media/libstagefright/wifi-display/rtp/RTPAssembler.cpp new file mode 100644 index 0000000..d0ab60d --- /dev/null +++ b/media/libstagefright/wifi-display/rtp/RTPAssembler.cpp @@ -0,0 +1,324 @@ +/* + * Copyright 2013, The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "RTPAssembler" +#include + +#include "RTPAssembler.h" + +#include +#include +#include +#include +#include + +namespace android { + +RTPReceiver::Assembler::Assembler(const sp ¬ify) + : mNotify(notify) { +} + +void RTPReceiver::Assembler::postAccessUnit( + const sp &accessUnit, bool followsDiscontinuity) { + sp notify = mNotify->dup(); + notify->setInt32("what", RTPReceiver::kWhatAccessUnit); + notify->setBuffer("accessUnit", accessUnit); + notify->setInt32("followsDiscontinuity", followsDiscontinuity); + notify->post(); +} + +//////////////////////////////////////////////////////////////////////////////// + +RTPReceiver::TSAssembler::TSAssembler(const sp ¬ify) + : Assembler(notify), + mSawDiscontinuity(false) { +} + +void RTPReceiver::TSAssembler::signalDiscontinuity() { + mSawDiscontinuity = true; +} + +status_t RTPReceiver::TSAssembler::processPacket(const sp &packet) { + postAccessUnit(packet, mSawDiscontinuity); + + if (mSawDiscontinuity) { + mSawDiscontinuity = false; + } + + return OK; +} + +//////////////////////////////////////////////////////////////////////////////// + +RTPReceiver::H264Assembler::H264Assembler(const sp ¬ify) + : Assembler(notify), + mState(0), + mIndicator(0), + mNALType(0), + mAccessUnitRTPTime(0) { +} + +void RTPReceiver::H264Assembler::signalDiscontinuity() { + reset(); +} + +status_t RTPReceiver::H264Assembler::processPacket(const sp &packet) { + status_t err = internalProcessPacket(packet); + + if (err != OK) { + reset(); + } + + return err; +} + +status_t RTPReceiver::H264Assembler::internalProcessPacket( + const sp &packet) { + const uint8_t *data = packet->data(); + size_t size = packet->size(); + + switch (mState) { + case 0: + { + if (size < 1 || (data[0] & 0x80)) { + ALOGV("Malformed H264 RTP packet (empty or F-bit set)"); + return ERROR_MALFORMED; + } + + unsigned nalType = data[0] & 0x1f; + if (nalType >= 1 && nalType <= 23) { + addSingleNALUnit(packet); + ALOGV("added single NAL packet"); + } else if (nalType == 28) { + // FU-A + unsigned indicator = data[0]; + CHECK((indicator & 0x1f) == 28); + + if (size < 2) { + ALOGV("Malformed H264 FU-A packet (single byte)"); + return ERROR_MALFORMED; + } + + if (!(data[1] & 0x80)) { + ALOGV("Malformed H264 FU-A packet (no start bit)"); + return ERROR_MALFORMED; + } + + mIndicator = data[0]; + mNALType = data[1] & 0x1f; + uint32_t nri = (data[0] >> 5) & 3; + + clearAccumulator(); + + uint8_t byte = mNALType | (nri << 5); + appendToAccumulator(&byte, 1); + appendToAccumulator(data + 2, size - 2); + + int32_t rtpTime; + CHECK(packet->meta()->findInt32("rtp-time", &rtpTime)); + mAccumulator->meta()->setInt32("rtp-time", rtpTime); + + if (data[1] & 0x40) { + // Huh? End bit also set on the first buffer. + addSingleNALUnit(mAccumulator); + clearAccumulator(); + + ALOGV("added FU-A"); + break; + } + + mState = 1; + } else if (nalType == 24) { + // STAP-A + + status_t err = addSingleTimeAggregationPacket(packet); + if (err != OK) { + return err; + } + } else { + ALOGV("Malformed H264 packet (unknown type %d)", nalType); + return ERROR_UNSUPPORTED; + } + break; + } + + case 1: + { + if (size < 2 + || data[0] != mIndicator + || (data[1] & 0x1f) != mNALType + || (data[1] & 0x80)) { + ALOGV("Malformed H264 FU-A packet (indicator, " + "type or start bit mismatch)"); + + return ERROR_MALFORMED; + } + + appendToAccumulator(data + 2, size - 2); + + if (data[1] & 0x40) { + addSingleNALUnit(mAccumulator); + + clearAccumulator(); + mState = 0; + + ALOGV("added FU-A"); + } + break; + } + + default: + TRESPASS(); + } + + int32_t marker; + CHECK(packet->meta()->findInt32("M", &marker)); + + if (marker) { + flushAccessUnit(); + } + + return OK; +} + +void RTPReceiver::H264Assembler::reset() { + mNALUnits.clear(); + + clearAccumulator(); + mState = 0; +} + +void RTPReceiver::H264Assembler::clearAccumulator() { + if (mAccumulator != NULL) { + // XXX Too expensive. + mAccumulator.clear(); + } +} + +void RTPReceiver::H264Assembler::appendToAccumulator( + const void *data, size_t size) { + if (mAccumulator == NULL) { + mAccumulator = new ABuffer(size); + memcpy(mAccumulator->data(), data, size); + return; + } + + if (mAccumulator->size() + size > mAccumulator->capacity()) { + sp buf = new ABuffer(mAccumulator->size() + size); + memcpy(buf->data(), mAccumulator->data(), mAccumulator->size()); + buf->setRange(0, mAccumulator->size()); + + int32_t rtpTime; + if (mAccumulator->meta()->findInt32("rtp-time", &rtpTime)) { + buf->meta()->setInt32("rtp-time", rtpTime); + } + + mAccumulator = buf; + } + + memcpy(mAccumulator->data() + mAccumulator->size(), data, size); + mAccumulator->setRange(0, mAccumulator->size() + size); +} + +void RTPReceiver::H264Assembler::addSingleNALUnit(const sp &packet) { + if (mNALUnits.empty()) { + int32_t rtpTime; + CHECK(packet->meta()->findInt32("rtp-time", &rtpTime)); + + mAccessUnitRTPTime = rtpTime; + } + + mNALUnits.push_back(packet); +} + +void RTPReceiver::H264Assembler::flushAccessUnit() { + if (mNALUnits.empty()) { + return; + } + + size_t totalSize = 0; + for (List >::iterator it = mNALUnits.begin(); + it != mNALUnits.end(); ++it) { + totalSize += 4 + (*it)->size(); + } + + sp accessUnit = new ABuffer(totalSize); + size_t offset = 0; + for (List >::iterator it = mNALUnits.begin(); + it != mNALUnits.end(); ++it) { + const sp nalUnit = *it; + + memcpy(accessUnit->data() + offset, "\x00\x00\x00\x01", 4); + + memcpy(accessUnit->data() + offset + 4, + nalUnit->data(), + nalUnit->size()); + + offset += 4 + nalUnit->size(); + } + + mNALUnits.clear(); + + accessUnit->meta()->setInt64("timeUs", mAccessUnitRTPTime * 100ll / 9ll); + postAccessUnit(accessUnit, false /* followsDiscontinuity */); +} + +status_t RTPReceiver::H264Assembler::addSingleTimeAggregationPacket( + const sp &packet) { + const uint8_t *data = packet->data(); + size_t size = packet->size(); + + if (size < 3) { + ALOGV("Malformed H264 STAP-A packet (too small)"); + return ERROR_MALFORMED; + } + + int32_t rtpTime; + CHECK(packet->meta()->findInt32("rtp-time", &rtpTime)); + + ++data; + --size; + while (size >= 2) { + size_t nalSize = (data[0] << 8) | data[1]; + + if (size < nalSize + 2) { + ALOGV("Malformed H264 STAP-A packet (incomplete NAL unit)"); + return ERROR_MALFORMED; + } + + sp unit = new ABuffer(nalSize); + memcpy(unit->data(), &data[2], nalSize); + + unit->meta()->setInt32("rtp-time", rtpTime); + + addSingleNALUnit(unit); + + data += 2 + nalSize; + size -= 2 + nalSize; + } + + if (size != 0) { + ALOGV("Unexpected padding at end of STAP-A packet."); + } + + ALOGV("added STAP-A"); + + return OK; +} + +} // namespace android + diff --git a/media/libstagefright/wifi-display/rtp/RTPAssembler.h b/media/libstagefright/wifi-display/rtp/RTPAssembler.h new file mode 100644 index 0000000..e456d32 --- /dev/null +++ b/media/libstagefright/wifi-display/rtp/RTPAssembler.h @@ -0,0 +1,92 @@ +/* + * Copyright 2013, The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef RTP_ASSEMBLER_H_ + +#define RTP_ASSEMBLER_H_ + +#include "RTPReceiver.h" + +namespace android { + +// A helper class to reassemble the payload of RTP packets into access +// units depending on the packetization scheme. +struct RTPReceiver::Assembler : public RefBase { + Assembler(const sp ¬ify); + + virtual void signalDiscontinuity() = 0; + virtual status_t processPacket(const sp &packet) = 0; + +protected: + virtual ~Assembler() {} + + void postAccessUnit( + const sp &accessUnit, bool followsDiscontinuity); + +private: + sp mNotify; + + DISALLOW_EVIL_CONSTRUCTORS(Assembler); +}; + +struct RTPReceiver::TSAssembler : public RTPReceiver::Assembler { + TSAssembler(const sp ¬ify); + + virtual void signalDiscontinuity(); + virtual status_t processPacket(const sp &packet); + +private: + bool mSawDiscontinuity; + + DISALLOW_EVIL_CONSTRUCTORS(TSAssembler); +}; + +struct RTPReceiver::H264Assembler : public RTPReceiver::Assembler { + H264Assembler(const sp ¬ify); + + virtual void signalDiscontinuity(); + virtual status_t processPacket(const sp &packet); + +private: + int32_t mState; + + uint8_t mIndicator; + uint8_t mNALType; + + sp mAccumulator; + + List > mNALUnits; + int32_t mAccessUnitRTPTime; + + status_t internalProcessPacket(const sp &packet); + + void addSingleNALUnit(const sp &packet); + status_t addSingleTimeAggregationPacket(const sp &packet); + + void flushAccessUnit(); + + void clearAccumulator(); + void appendToAccumulator(const void *data, size_t size); + + void reset(); + + DISALLOW_EVIL_CONSTRUCTORS(H264Assembler); +}; + +} // namespace android + +#endif // RTP_ASSEMBLER_H_ + diff --git a/media/libstagefright/wifi-display/rtp/RTPBase.h b/media/libstagefright/wifi-display/rtp/RTPBase.h new file mode 100644 index 0000000..6507a6f --- /dev/null +++ b/media/libstagefright/wifi-display/rtp/RTPBase.h @@ -0,0 +1,49 @@ +/* + * Copyright 2013, The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef RTP_BASE_H_ + +#define RTP_BASE_H_ + +namespace android { + +struct RTPBase { + enum PacketizationMode { + PACKETIZATION_TRANSPORT_STREAM, + PACKETIZATION_H264, + PACKETIZATION_AAC, + }; + + enum TransportMode { + TRANSPORT_UNDEFINED, + TRANSPORT_UDP, + TRANSPORT_TCP, + TRANSPORT_TCP_INTERLEAVED, + }; + + enum { + // Really UDP _payload_ size + kMaxUDPPacketSize = 1472, // 1472 good, 1473 bad on Android@Home + }; + + static int32_t PickRandomRTPPort(); +}; + +} // namespace android + +#endif // RTP_BASE_H_ + + diff --git a/media/libstagefright/wifi-display/rtp/RTPReceiver.cpp b/media/libstagefright/wifi-display/rtp/RTPReceiver.cpp new file mode 100644 index 0000000..29482af --- /dev/null +++ b/media/libstagefright/wifi-display/rtp/RTPReceiver.cpp @@ -0,0 +1,899 @@ +/* + * Copyright 2013, The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "RTPReceiver" +#include + +#include "RTPAssembler.h" +#include "RTPReceiver.h" + +#include "ANetworkSession.h" + +#include +#include +#include +#include +#include +#include + +namespace android { + +//////////////////////////////////////////////////////////////////////////////// + +struct RTPReceiver::Source : public RefBase { + Source(RTPReceiver *receiver, uint32_t ssrc); + + void onPacketReceived(uint16_t seq, const sp &buffer); + + void addReportBlock(uint32_t ssrc, const sp &buf); + +protected: + virtual ~Source(); + +private: + static const uint32_t kMinSequential = 2; + static const uint32_t kMaxDropout = 3000; + static const uint32_t kMaxMisorder = 100; + static const uint32_t kRTPSeqMod = 1u << 16; + static const int64_t kReportIntervalUs = 10000000ll; + + RTPReceiver *mReceiver; + uint32_t mSSRC; + bool mFirst; + uint16_t mMaxSeq; + uint32_t mCycles; + uint32_t mBaseSeq; + uint32_t mReceived; + uint32_t mExpectedPrior; + uint32_t mReceivedPrior; + + int64_t mFirstArrivalTimeUs; + int64_t mFirstRTPTimeUs; + + // Ordered by extended seq number. + List > mPackets; + + int32_t mAwaitingExtSeqNo; + bool mRequestedRetransmission; + + int32_t mActivePacketType; + sp mActiveAssembler; + + int64_t mNextReportTimeUs; + + int32_t mNumDeclaredLost; + int32_t mNumDeclaredLostPrior; + + void queuePacket(const sp &packet); + void dequeueMore(); + + sp getNextPacket(); + void resync(); + + DISALLOW_EVIL_CONSTRUCTORS(Source); +}; + +//////////////////////////////////////////////////////////////////////////////// + +RTPReceiver::Source::Source(RTPReceiver *receiver, uint32_t ssrc) + : mReceiver(receiver), + mSSRC(ssrc), + mFirst(true), + mMaxSeq(0), + mCycles(0), + mBaseSeq(0), + mReceived(0), + mExpectedPrior(0), + mReceivedPrior(0), + mFirstArrivalTimeUs(-1ll), + mFirstRTPTimeUs(-1ll), + mAwaitingExtSeqNo(-1), + mRequestedRetransmission(false), + mActivePacketType(-1), + mNextReportTimeUs(-1ll), + mNumDeclaredLost(0), + mNumDeclaredLostPrior(0) { +} + +RTPReceiver::Source::~Source() { +} + +void RTPReceiver::Source::onPacketReceived( + uint16_t seq, const sp &buffer) { + if (mFirst) { + buffer->setInt32Data(mCycles | seq); + queuePacket(buffer); + + mFirst = false; + mBaseSeq = seq; + mMaxSeq = seq; + ++mReceived; + return; + } + + uint16_t udelta = seq - mMaxSeq; + + if (udelta < kMaxDropout) { + // In order, with permissible gap. + + if (seq < mMaxSeq) { + // Sequence number wrapped - count another 64K cycle + mCycles += kRTPSeqMod; + } + + mMaxSeq = seq; + + ++mReceived; + } else if (udelta <= kRTPSeqMod - kMaxMisorder) { + // The sequence number made a very large jump + return; + } else { + // Duplicate or reordered packet. + } + + buffer->setInt32Data(mCycles | seq); + queuePacket(buffer); +} + +void RTPReceiver::Source::queuePacket(const sp &packet) { + int32_t newExtendedSeqNo = packet->int32Data(); + + if (mFirstArrivalTimeUs < 0ll) { + mFirstArrivalTimeUs = ALooper::GetNowUs(); + + uint32_t rtpTime; + CHECK(packet->meta()->findInt32("rtp-time", (int32_t *)&rtpTime)); + + mFirstRTPTimeUs = (rtpTime * 100ll) / 9ll; + } + + if (mAwaitingExtSeqNo >= 0 && newExtendedSeqNo < mAwaitingExtSeqNo) { + // We're no longer interested in these. They're old. + ALOGV("dropping stale extSeqNo %d", newExtendedSeqNo); + return; + } + + if (mPackets.empty()) { + mPackets.push_back(packet); + dequeueMore(); + return; + } + + List >::iterator firstIt = mPackets.begin(); + List >::iterator it = --mPackets.end(); + for (;;) { + int32_t extendedSeqNo = (*it)->int32Data(); + + if (extendedSeqNo == newExtendedSeqNo) { + // Duplicate packet. + return; + } + + if (extendedSeqNo < newExtendedSeqNo) { + // Insert new packet after the one at "it". + mPackets.insert(++it, packet); + break; + } + + if (it == firstIt) { + // Insert new packet before the first existing one. + mPackets.insert(it, packet); + break; + } + + --it; + } + + dequeueMore(); +} + +void RTPReceiver::Source::dequeueMore() { + int64_t nowUs = ALooper::GetNowUs(); + if (mNextReportTimeUs < 0ll || nowUs >= mNextReportTimeUs) { + if (mNextReportTimeUs >= 0ll) { + uint32_t expected = (mMaxSeq | mCycles) - mBaseSeq + 1; + + uint32_t expectedInterval = expected - mExpectedPrior; + mExpectedPrior = expected; + + uint32_t receivedInterval = mReceived - mReceivedPrior; + mReceivedPrior = mReceived; + + int64_t lostInterval = + (int64_t)expectedInterval - (int64_t)receivedInterval; + + int32_t declaredLostInterval = + mNumDeclaredLost - mNumDeclaredLostPrior; + + mNumDeclaredLostPrior = mNumDeclaredLost; + + ALOGI("lost %lld packets (%.2f %%), declared %d lost\n", + lostInterval, + 100.0f * lostInterval / expectedInterval, + declaredLostInterval); + } + + mNextReportTimeUs = nowUs + kReportIntervalUs; + } + + for (;;) { + sp packet = getNextPacket(); + + if (packet == NULL) { + if (mPackets.empty()) { + break; + } + + CHECK_GE(mAwaitingExtSeqNo, 0); + + const sp &firstPacket = *mPackets.begin(); + + uint32_t rtpTime; + CHECK(firstPacket->meta()->findInt32( + "rtp-time", (int32_t *)&rtpTime)); + + + int64_t rtpUs = (rtpTime * 100ll) / 9ll; + + int64_t maxArrivalTimeUs = + mFirstArrivalTimeUs + rtpUs - mFirstRTPTimeUs; + + int64_t nowUs = ALooper::GetNowUs(); + + CHECK_LT(mAwaitingExtSeqNo, firstPacket->int32Data()); + + ALOGV("waiting for %d, comparing against %d, %lld us left", + mAwaitingExtSeqNo, + firstPacket->int32Data(), + maxArrivalTimeUs - nowUs); + + if (maxArrivalTimeUs + kPacketLostAfterUs <= nowUs) { + ALOGV("Lost packet extSeqNo %d %s", + mAwaitingExtSeqNo, + mRequestedRetransmission ? "*" : ""); + + mRequestedRetransmission = false; + if (mActiveAssembler != NULL) { + mActiveAssembler->signalDiscontinuity(); + } + + // resync(); + ++mAwaitingExtSeqNo; + ++mNumDeclaredLost; + + mReceiver->notifyPacketLost(); + continue; + } else if (kRequestRetransmissionAfterUs > 0 + && maxArrivalTimeUs + kRequestRetransmissionAfterUs <= nowUs + && !mRequestedRetransmission + && mAwaitingExtSeqNo >= 0) { + mRequestedRetransmission = true; + mReceiver->requestRetransmission(mSSRC, mAwaitingExtSeqNo); + break; + } else { + break; + } + } + + mRequestedRetransmission = false; + + int32_t packetType; + CHECK(packet->meta()->findInt32("PT", &packetType)); + + if (packetType != mActivePacketType) { + mActiveAssembler = mReceiver->makeAssembler(packetType); + mActivePacketType = packetType; + } + + if (mActiveAssembler == NULL) { + continue; + } + + status_t err = mActiveAssembler->processPacket(packet); + if (err != OK) { + ALOGV("assembler returned error %d", err); + } + } +} + +sp RTPReceiver::Source::getNextPacket() { + if (mPackets.empty()) { + return NULL; + } + + int32_t extSeqNo = (*mPackets.begin())->int32Data(); + + if (mAwaitingExtSeqNo < 0) { + mAwaitingExtSeqNo = extSeqNo; + } else if (extSeqNo != mAwaitingExtSeqNo) { + return NULL; + } + + sp packet = *mPackets.begin(); + mPackets.erase(mPackets.begin()); + + ++mAwaitingExtSeqNo; + + return packet; +} + +void RTPReceiver::Source::resync() { + mAwaitingExtSeqNo = -1; +} + +void RTPReceiver::Source::addReportBlock( + uint32_t ssrc, const sp &buf) { + uint32_t extMaxSeq = mMaxSeq | mCycles; + uint32_t expected = extMaxSeq - mBaseSeq + 1; + + int64_t lost = (int64_t)expected - (int64_t)mReceived; + if (lost > 0x7fffff) { + lost = 0x7fffff; + } else if (lost < -0x800000) { + lost = -0x800000; + } + + uint32_t expectedInterval = expected - mExpectedPrior; + mExpectedPrior = expected; + + uint32_t receivedInterval = mReceived - mReceivedPrior; + mReceivedPrior = mReceived; + + int64_t lostInterval = expectedInterval - receivedInterval; + + uint8_t fractionLost; + if (expectedInterval == 0 || lostInterval <=0) { + fractionLost = 0; + } else { + fractionLost = (lostInterval << 8) / expectedInterval; + } + + uint8_t *ptr = buf->data() + buf->size(); + + ptr[0] = ssrc >> 24; + ptr[1] = (ssrc >> 16) & 0xff; + ptr[2] = (ssrc >> 8) & 0xff; + ptr[3] = ssrc & 0xff; + + ptr[4] = fractionLost; + + ptr[5] = (lost >> 16) & 0xff; + ptr[6] = (lost >> 8) & 0xff; + ptr[7] = lost & 0xff; + + ptr[8] = extMaxSeq >> 24; + ptr[9] = (extMaxSeq >> 16) & 0xff; + ptr[10] = (extMaxSeq >> 8) & 0xff; + ptr[11] = extMaxSeq & 0xff; + + // XXX TODO: + + ptr[12] = 0x00; // interarrival jitter + ptr[13] = 0x00; + ptr[14] = 0x00; + ptr[15] = 0x00; + + ptr[16] = 0x00; // last SR + ptr[17] = 0x00; + ptr[18] = 0x00; + ptr[19] = 0x00; + + ptr[20] = 0x00; // delay since last SR + ptr[21] = 0x00; + ptr[22] = 0x00; + ptr[23] = 0x00; +} + +//////////////////////////////////////////////////////////////////////////////// + +RTPReceiver::RTPReceiver( + const sp &netSession, + const sp ¬ify) + : mNetSession(netSession), + mNotify(notify), + mMode(TRANSPORT_UNDEFINED), + mRTPSessionID(0), + mRTCPSessionID(0), + mRTPClientSessionID(0) { +} + +RTPReceiver::~RTPReceiver() { + if (mRTPClientSessionID != 0) { + mNetSession->destroySession(mRTPClientSessionID); + mRTPClientSessionID = 0; + } + + if (mRTCPSessionID != 0) { + mNetSession->destroySession(mRTCPSessionID); + mRTCPSessionID = 0; + } + + if (mRTPSessionID != 0) { + mNetSession->destroySession(mRTPSessionID); + mRTPSessionID = 0; + } +} + +status_t RTPReceiver::initAsync(TransportMode mode, int32_t *outLocalRTPPort) { + if (mMode != TRANSPORT_UNDEFINED || mode == TRANSPORT_UNDEFINED) { + return INVALID_OPERATION; + } + + CHECK_NE(mMode, TRANSPORT_TCP_INTERLEAVED); + + sp rtpNotify = new AMessage(kWhatRTPNotify, id()); + + sp rtcpNotify; + if (mode == TRANSPORT_UDP) { + rtcpNotify = new AMessage(kWhatRTCPNotify, id()); + } + + CHECK_EQ(mRTPSessionID, 0); + CHECK_EQ(mRTCPSessionID, 0); + + int32_t localRTPPort; + + struct in_addr ifaceAddr; + ifaceAddr.s_addr = INADDR_ANY; + + for (;;) { + localRTPPort = PickRandomRTPPort(); + + status_t err; + if (mode == TRANSPORT_UDP) { + err = mNetSession->createUDPSession( + localRTPPort, + rtpNotify, + &mRTPSessionID); + } else { + CHECK_EQ(mode, TRANSPORT_TCP); + err = mNetSession->createTCPDatagramSession( + ifaceAddr, + localRTPPort, + rtpNotify, + &mRTPSessionID); + } + + if (err != OK) { + continue; + } + + if (mode == TRANSPORT_TCP) { + break; + } + + err = mNetSession->createUDPSession( + localRTPPort + 1, + rtcpNotify, + &mRTCPSessionID); + + if (err == OK) { + break; + } + + mNetSession->destroySession(mRTPSessionID); + mRTPSessionID = 0; + } + + mMode = mode; + *outLocalRTPPort = localRTPPort; + + return OK; +} + +status_t RTPReceiver::connect( + const char *remoteHost, int32_t remoteRTPPort, int32_t remoteRTCPPort) { + if (mMode == TRANSPORT_TCP) { + return OK; + } + + status_t err = mNetSession->connectUDPSession( + mRTPSessionID, remoteHost, remoteRTPPort); + + if (err != OK) { + notifyInitDone(err); + return err; + } + + ALOGI("connectUDPSession RTP successful."); + + if (remoteRTCPPort >= 0) { + err = mNetSession->connectUDPSession( + mRTCPSessionID, remoteHost, remoteRTCPPort); + + if (err != OK) { + ALOGI("connect failed w/ err %d", err); + + notifyInitDone(err); + return err; + } + + scheduleSendRR(); + } + + notifyInitDone(OK); + + return OK; +} + +void RTPReceiver::onMessageReceived(const sp &msg) { + switch (msg->what()) { + case kWhatRTPNotify: + case kWhatRTCPNotify: + onNetNotify(msg->what() == kWhatRTPNotify, msg); + break; + + case kWhatSendRR: + { + onSendRR(); + break; + } + + default: + TRESPASS(); + } +} + +void RTPReceiver::onNetNotify(bool isRTP, const sp &msg) { + int32_t reason; + CHECK(msg->findInt32("reason", &reason)); + + switch (reason) { + case ANetworkSession::kWhatError: + { + int32_t sessionID; + CHECK(msg->findInt32("sessionID", &sessionID)); + + int32_t err; + CHECK(msg->findInt32("err", &err)); + + int32_t errorOccuredDuringSend; + CHECK(msg->findInt32("send", &errorOccuredDuringSend)); + + AString detail; + CHECK(msg->findString("detail", &detail)); + + ALOGE("An error occurred during %s in session %d " + "(%d, '%s' (%s)).", + errorOccuredDuringSend ? "send" : "receive", + sessionID, + err, + detail.c_str(), + strerror(-err)); + + mNetSession->destroySession(sessionID); + + if (sessionID == mRTPSessionID) { + mRTPSessionID = 0; + + if (mMode == TRANSPORT_TCP && mRTPClientSessionID == 0) { + notifyInitDone(err); + break; + } + } else if (sessionID == mRTCPSessionID) { + mRTCPSessionID = 0; + } else if (sessionID == mRTPClientSessionID) { + mRTPClientSessionID = 0; + } + + notifyError(err); + break; + } + + case ANetworkSession::kWhatDatagram: + { + sp data; + CHECK(msg->findBuffer("data", &data)); + + if (isRTP) { + onRTPData(data); + } else { + onRTCPData(data); + } + break; + } + + case ANetworkSession::kWhatClientConnected: + { + CHECK_EQ(mMode, TRANSPORT_TCP); + CHECK(isRTP); + + int32_t sessionID; + CHECK(msg->findInt32("sessionID", &sessionID)); + + if (mRTPClientSessionID != 0) { + // We only allow a single client connection. + mNetSession->destroySession(sessionID); + sessionID = 0; + break; + } + + mRTPClientSessionID = sessionID; + + notifyInitDone(OK); + break; + } + } +} + +void RTPReceiver::notifyInitDone(status_t err) { + sp notify = mNotify->dup(); + notify->setInt32("what", kWhatInitDone); + notify->setInt32("err", err); + notify->post(); +} + +void RTPReceiver::notifyError(status_t err) { + sp notify = mNotify->dup(); + notify->setInt32("what", kWhatError); + notify->setInt32("err", err); + notify->post(); +} + +void RTPReceiver::notifyPacketLost() { + sp notify = mNotify->dup(); + notify->setInt32("what", kWhatPacketLost); + notify->post(); +} + +status_t RTPReceiver::onRTPData(const sp &buffer) { + size_t size = buffer->size(); + if (size < 12) { + // Too short to be a valid RTP header. + return ERROR_MALFORMED; + } + + const uint8_t *data = buffer->data(); + + if ((data[0] >> 6) != 2) { + // Unsupported version. + return ERROR_UNSUPPORTED; + } + + if (data[0] & 0x20) { + // Padding present. + + size_t paddingLength = data[size - 1]; + + if (paddingLength + 12 > size) { + // If we removed this much padding we'd end up with something + // that's too short to be a valid RTP header. + return ERROR_MALFORMED; + } + + size -= paddingLength; + } + + int numCSRCs = data[0] & 0x0f; + + size_t payloadOffset = 12 + 4 * numCSRCs; + + if (size < payloadOffset) { + // Not enough data to fit the basic header and all the CSRC entries. + return ERROR_MALFORMED; + } + + if (data[0] & 0x10) { + // Header eXtension present. + + if (size < payloadOffset + 4) { + // Not enough data to fit the basic header, all CSRC entries + // and the first 4 bytes of the extension header. + + return ERROR_MALFORMED; + } + + const uint8_t *extensionData = &data[payloadOffset]; + + size_t extensionLength = + 4 * (extensionData[2] << 8 | extensionData[3]); + + if (size < payloadOffset + 4 + extensionLength) { + return ERROR_MALFORMED; + } + + payloadOffset += 4 + extensionLength; + } + + uint32_t srcId = U32_AT(&data[8]); + uint32_t rtpTime = U32_AT(&data[4]); + uint16_t seqNo = U16_AT(&data[2]); + + sp meta = buffer->meta(); + meta->setInt32("ssrc", srcId); + meta->setInt32("rtp-time", rtpTime); + meta->setInt32("PT", data[1] & 0x7f); + meta->setInt32("M", data[1] >> 7); + + buffer->setRange(payloadOffset, size - payloadOffset); + + ssize_t index = mSources.indexOfKey(srcId); + sp source; + if (index < 0) { + source = new Source(this, srcId); + mSources.add(srcId, source); + } else { + source = mSources.valueAt(index); + } + + source->onPacketReceived(seqNo, buffer); + + return OK; +} + +status_t RTPReceiver::onRTCPData(const sp &data) { + ALOGI("onRTCPData"); + return OK; +} + +void RTPReceiver::addSDES(const sp &buffer) { + uint8_t *data = buffer->data() + buffer->size(); + data[0] = 0x80 | 1; + data[1] = 202; // SDES + data[4] = kSourceID >> 24; // SSRC + data[5] = (kSourceID >> 16) & 0xff; + data[6] = (kSourceID >> 8) & 0xff; + data[7] = kSourceID & 0xff; + + size_t offset = 8; + + data[offset++] = 1; // CNAME + + AString cname = "stagefright@somewhere"; + data[offset++] = cname.size(); + + memcpy(&data[offset], cname.c_str(), cname.size()); + offset += cname.size(); + + data[offset++] = 6; // TOOL + + AString tool = "stagefright/1.0"; + data[offset++] = tool.size(); + + memcpy(&data[offset], tool.c_str(), tool.size()); + offset += tool.size(); + + data[offset++] = 0; + + if ((offset % 4) > 0) { + size_t count = 4 - (offset % 4); + switch (count) { + case 3: + data[offset++] = 0; + case 2: + data[offset++] = 0; + case 1: + data[offset++] = 0; + } + } + + size_t numWords = (offset / 4) - 1; + data[2] = numWords >> 8; + data[3] = numWords & 0xff; + + buffer->setRange(buffer->offset(), buffer->size() + offset); +} + +void RTPReceiver::scheduleSendRR() { + (new AMessage(kWhatSendRR, id()))->post(5000000ll); +} + +void RTPReceiver::onSendRR() { +#if 0 + sp buf = new ABuffer(kMaxUDPPacketSize); + buf->setRange(0, 0); + + uint8_t *ptr = buf->data(); + ptr[0] = 0x80 | 0; + ptr[1] = 201; // RR + ptr[2] = 0; + ptr[3] = 1; + ptr[4] = kSourceID >> 24; // SSRC + ptr[5] = (kSourceID >> 16) & 0xff; + ptr[6] = (kSourceID >> 8) & 0xff; + ptr[7] = kSourceID & 0xff; + + buf->setRange(0, 8); + + size_t numReportBlocks = 0; + for (size_t i = 0; i < mSources.size(); ++i) { + uint32_t ssrc = mSources.keyAt(i); + sp source = mSources.valueAt(i); + + if (numReportBlocks > 31 || buf->size() + 24 > buf->capacity()) { + // Cannot fit another report block. + break; + } + + source->addReportBlock(ssrc, buf); + ++numReportBlocks; + } + + ptr[0] |= numReportBlocks; // 5 bit + + size_t sizeInWordsMinus1 = 1 + 6 * numReportBlocks; + ptr[2] = sizeInWordsMinus1 >> 8; + ptr[3] = sizeInWordsMinus1 & 0xff; + + buf->setRange(0, (sizeInWordsMinus1 + 1) * 4); + + addSDES(buf); + + mNetSession->sendRequest(mRTCPSessionID, buf->data(), buf->size()); +#endif + + scheduleSendRR(); +} + +status_t RTPReceiver::registerPacketType( + uint8_t packetType, PacketizationMode mode) { + mPacketTypes.add(packetType, mode); + + return OK; +} + +sp RTPReceiver::makeAssembler(uint8_t packetType) { + ssize_t index = mPacketTypes.indexOfKey(packetType); + if (index < 0) { + return NULL; + } + + PacketizationMode mode = mPacketTypes.valueAt(index); + + switch (mode) { + case PACKETIZATION_TRANSPORT_STREAM: + return new TSAssembler(mNotify); + + case PACKETIZATION_H264: + return new H264Assembler(mNotify); + + default: + return NULL; + } +} + +void RTPReceiver::requestRetransmission(uint32_t senderSSRC, int32_t extSeqNo) { + int32_t blp = 0; + + sp buf = new ABuffer(16); + buf->setRange(0, 0); + + uint8_t *ptr = buf->data(); + ptr[0] = 0x80 | 1; // generic NACK + ptr[1] = 205; // TSFB + ptr[2] = 0; + ptr[3] = 3; + ptr[8] = (senderSSRC >> 24) & 0xff; + ptr[9] = (senderSSRC >> 16) & 0xff; + ptr[10] = (senderSSRC >> 8) & 0xff; + ptr[11] = (senderSSRC & 0xff); + ptr[8] = (kSourceID >> 24) & 0xff; + ptr[9] = (kSourceID >> 16) & 0xff; + ptr[10] = (kSourceID >> 8) & 0xff; + ptr[11] = (kSourceID & 0xff); + ptr[12] = (extSeqNo >> 8) & 0xff; + ptr[13] = (extSeqNo & 0xff); + ptr[14] = (blp >> 8) & 0xff; + ptr[15] = (blp & 0xff); + + buf->setRange(0, 16); + + mNetSession->sendRequest(mRTCPSessionID, buf->data(), buf->size()); +} + +} // namespace android + diff --git a/media/libstagefright/wifi-display/rtp/RTPReceiver.h b/media/libstagefright/wifi-display/rtp/RTPReceiver.h new file mode 100644 index 0000000..2ae864a --- /dev/null +++ b/media/libstagefright/wifi-display/rtp/RTPReceiver.h @@ -0,0 +1,110 @@ +/* + * Copyright 2013, The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef RTP_RECEIVER_H_ + +#define RTP_RECEIVER_H_ + +#include "RTPBase.h" + +#include + +namespace android { + +struct ABuffer; +struct ANetworkSession; + +// An object of this class facilitates receiving of media data on an RTP +// channel. The channel is established over a UDP or TCP connection depending +// on which "TransportMode" was chosen. In addition different RTP packetization +// schemes are supported such as "Transport Stream Packets over RTP", +// or "AVC/H.264 encapsulation as specified in RFC 3984 (non-interleaved mode)" +struct RTPReceiver : public RTPBase, public AHandler { + enum { + kWhatInitDone, + kWhatError, + kWhatAccessUnit, + kWhatPacketLost, + }; + RTPReceiver( + const sp &netSession, + const sp ¬ify); + + status_t registerPacketType( + uint8_t packetType, PacketizationMode mode); + + status_t initAsync(TransportMode mode, int32_t *outLocalRTPPort); + + status_t connect( + const char *remoteHost, + int32_t remoteRTPPort, + int32_t remoteRTCPPort); + +protected: + virtual ~RTPReceiver(); + virtual void onMessageReceived(const sp &msg); + +private: + enum { + kWhatRTPNotify, + kWhatRTCPNotify, + kWhatSendRR, + }; + + enum { + kSourceID = 0xdeadbeef, + kPacketLostAfterUs = 100000, + kRequestRetransmissionAfterUs = -1, + }; + + struct Assembler; + struct H264Assembler; + struct Source; + struct TSAssembler; + + sp mNetSession; + sp mNotify; + TransportMode mMode; + int32_t mRTPSessionID; + int32_t mRTCPSessionID; + + int32_t mRTPClientSessionID; // in TRANSPORT_TCP mode. + + KeyedVector mPacketTypes; + KeyedVector > mSources; + + void onNetNotify(bool isRTP, const sp &msg); + status_t onRTPData(const sp &data); + status_t onRTCPData(const sp &data); + void onSendRR(); + + void scheduleSendRR(); + void addSDES(const sp &buffer); + + void notifyInitDone(status_t err); + void notifyError(status_t err); + void notifyPacketLost(); + + sp makeAssembler(uint8_t packetType); + + void requestRetransmission(uint32_t senderSSRC, int32_t extSeqNo); + + DISALLOW_EVIL_CONSTRUCTORS(RTPReceiver); +}; + +} // namespace android + +#endif // RTP_RECEIVER_H_ diff --git a/media/libstagefright/wifi-display/rtp/RTPSender.cpp b/media/libstagefright/wifi-display/rtp/RTPSender.cpp new file mode 100644 index 0000000..85c5933 --- /dev/null +++ b/media/libstagefright/wifi-display/rtp/RTPSender.cpp @@ -0,0 +1,701 @@ +/* + * Copyright 2013, The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "RTPSender" +#include + +#include "RTPSender.h" + +#include "ANetworkSession.h" + +#include +#include +#include +#include +#include +#include + +#include "include/avc_utils.h" + +namespace android { + +RTPSender::RTPSender( + const sp &netSession, + const sp ¬ify) + : mNetSession(netSession), + mNotify(notify), + mMode(TRANSPORT_UNDEFINED), + mRTPSessionID(0), + mRTCPSessionID(0), + mRTPConnected(false), + mRTCPConnected(false), + mLastNTPTime(0), + mLastRTPTime(0), + mNumRTPSent(0), + mNumRTPOctetsSent(0), + mNumSRsSent(0), + mRTPSeqNo(0), + mHistorySize(0) { +} + +RTPSender::~RTPSender() { + if (mRTCPSessionID != 0) { + mNetSession->destroySession(mRTCPSessionID); + mRTCPSessionID = 0; + } + + if (mRTPSessionID != 0) { + mNetSession->destroySession(mRTPSessionID); + mRTPSessionID = 0; + } +} + +// static +int32_t RTPBase::PickRandomRTPPort() { + // Pick an even integer in range [1024, 65534) + + static const size_t kRange = (65534 - 1024) / 2; + + return (int32_t)(((float)(kRange + 1) * rand()) / RAND_MAX) * 2 + 1024; +} + +status_t RTPSender::initAsync( + TransportMode mode, + const char *remoteHost, + int32_t remoteRTPPort, + int32_t remoteRTCPPort, + int32_t *outLocalRTPPort) { + if (mMode != TRANSPORT_UNDEFINED || mode == TRANSPORT_UNDEFINED) { + return INVALID_OPERATION; + } + + CHECK_NE(mMode, TRANSPORT_TCP_INTERLEAVED); + + if (mode == TRANSPORT_TCP && remoteRTCPPort >= 0) { + return INVALID_OPERATION; + } + + sp rtpNotify = new AMessage(kWhatRTPNotify, id()); + + sp rtcpNotify; + if (remoteRTCPPort >= 0) { + rtcpNotify = new AMessage(kWhatRTCPNotify, id()); + } + + CHECK_EQ(mRTPSessionID, 0); + CHECK_EQ(mRTCPSessionID, 0); + + int32_t localRTPPort; + + for (;;) { + localRTPPort = PickRandomRTPPort(); + + status_t err; + if (mode == TRANSPORT_UDP) { + err = mNetSession->createUDPSession( + localRTPPort, + remoteHost, + remoteRTPPort, + rtpNotify, + &mRTPSessionID); + } else { + CHECK_EQ(mode, TRANSPORT_TCP); + err = mNetSession->createTCPDatagramSession( + localRTPPort, + remoteHost, + remoteRTPPort, + rtpNotify, + &mRTPSessionID); + } + + if (err != OK) { + continue; + } + + if (remoteRTCPPort < 0) { + break; + } + + if (mode == TRANSPORT_UDP) { + err = mNetSession->createUDPSession( + localRTPPort + 1, + remoteHost, + remoteRTCPPort, + rtcpNotify, + &mRTCPSessionID); + } else { + CHECK_EQ(mode, TRANSPORT_TCP); + err = mNetSession->createTCPDatagramSession( + localRTPPort + 1, + remoteHost, + remoteRTCPPort, + rtcpNotify, + &mRTCPSessionID); + } + + if (err == OK) { + break; + } + + mNetSession->destroySession(mRTPSessionID); + mRTPSessionID = 0; + } + + if (mode == TRANSPORT_UDP) { + mRTPConnected = true; + mRTCPConnected = true; + } + + mMode = mode; + *outLocalRTPPort = localRTPPort; + + if (mMode == TRANSPORT_UDP) { + notifyInitDone(OK); + } + + return OK; +} + +status_t RTPSender::queueBuffer( + const sp &buffer, uint8_t packetType, PacketizationMode mode) { + status_t err; + + switch (mode) { + case PACKETIZATION_TRANSPORT_STREAM: + err = queueTSPackets(buffer, packetType); + break; + + case PACKETIZATION_H264: + err = queueAVCBuffer(buffer, packetType); + break; + + default: + TRESPASS(); + } + + return err; +} + +status_t RTPSender::queueTSPackets( + const sp &tsPackets, uint8_t packetType) { + CHECK_EQ(0, tsPackets->size() % 188); + + const size_t numTSPackets = tsPackets->size() / 188; + + size_t srcOffset = 0; + while (srcOffset < tsPackets->size()) { + sp udpPacket = + new ABuffer(12 + kMaxNumTSPacketsPerRTPPacket * 188); + + udpPacket->setInt32Data(mRTPSeqNo); + + uint8_t *rtp = udpPacket->data(); + rtp[0] = 0x80; + rtp[1] = packetType; + + rtp[2] = (mRTPSeqNo >> 8) & 0xff; + rtp[3] = mRTPSeqNo & 0xff; + ++mRTPSeqNo; + + int64_t nowUs = ALooper::GetNowUs(); + uint32_t rtpTime = (nowUs * 9) / 100ll; + + rtp[4] = rtpTime >> 24; + rtp[5] = (rtpTime >> 16) & 0xff; + rtp[6] = (rtpTime >> 8) & 0xff; + rtp[7] = rtpTime & 0xff; + + rtp[8] = kSourceID >> 24; + rtp[9] = (kSourceID >> 16) & 0xff; + rtp[10] = (kSourceID >> 8) & 0xff; + rtp[11] = kSourceID & 0xff; + + size_t numTSPackets = (tsPackets->size() - srcOffset) / 188; + if (numTSPackets > kMaxNumTSPacketsPerRTPPacket) { + numTSPackets = kMaxNumTSPacketsPerRTPPacket; + } + + memcpy(&rtp[12], tsPackets->data() + srcOffset, numTSPackets * 188); + + udpPacket->setRange(0, 12 + numTSPackets * 188); + status_t err = sendRTPPacket(udpPacket, true /* storeInHistory */); + + if (err != OK) { + return err; + } + + srcOffset += numTSPackets * 188; + } + + return OK; +} + +status_t RTPSender::queueAVCBuffer( + const sp &accessUnit, uint8_t packetType) { + int64_t timeUs; + CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs)); + + uint32_t rtpTime = (timeUs * 9 / 100ll); + + List > packets; + + sp out = new ABuffer(kMaxUDPPacketSize); + size_t outBytesUsed = 12; // Placeholder for RTP header. + + const uint8_t *data = accessUnit->data(); + size_t size = accessUnit->size(); + const uint8_t *nalStart; + size_t nalSize; + while (getNextNALUnit( + &data, &size, &nalStart, &nalSize, + true /* startCodeFollows */) == OK) { + size_t bytesNeeded = nalSize + 2; + if (outBytesUsed == 12) { + ++bytesNeeded; + } + + if (outBytesUsed + bytesNeeded > out->capacity()) { + bool emitSingleNALPacket = false; + + if (outBytesUsed == 12 + && outBytesUsed + nalSize <= out->capacity()) { + // We haven't emitted anything into the current packet yet and + // this NAL unit fits into a single-NAL-unit-packet while + // it wouldn't have fit as part of a STAP-A packet. + + memcpy(out->data() + outBytesUsed, nalStart, nalSize); + outBytesUsed += nalSize; + + emitSingleNALPacket = true; + } + + if (outBytesUsed > 12) { + out->setRange(0, outBytesUsed); + packets.push_back(out); + out = new ABuffer(kMaxUDPPacketSize); + outBytesUsed = 12; // Placeholder for RTP header + } + + if (emitSingleNALPacket) { + continue; + } + } + + if (outBytesUsed + bytesNeeded <= out->capacity()) { + uint8_t *dst = out->data() + outBytesUsed; + + if (outBytesUsed == 12) { + *dst++ = 24; // STAP-A header + } + + *dst++ = (nalSize >> 8) & 0xff; + *dst++ = nalSize & 0xff; + memcpy(dst, nalStart, nalSize); + + outBytesUsed += bytesNeeded; + continue; + } + + // This single NAL unit does not fit into a single RTP packet, + // we need to emit an FU-A. + + CHECK_EQ(outBytesUsed, 12u); + + uint8_t nalType = nalStart[0] & 0x1f; + uint8_t nri = (nalStart[0] >> 5) & 3; + + size_t srcOffset = 1; + while (srcOffset < nalSize) { + size_t copy = out->capacity() - outBytesUsed - 2; + if (copy > nalSize - srcOffset) { + copy = nalSize - srcOffset; + } + + uint8_t *dst = out->data() + outBytesUsed; + dst[0] = (nri << 5) | 28; + + dst[1] = nalType; + + if (srcOffset == 1) { + dst[1] |= 0x80; + } + + if (srcOffset + copy == nalSize) { + dst[1] |= 0x40; + } + + memcpy(&dst[2], nalStart + srcOffset, copy); + srcOffset += copy; + + out->setRange(0, outBytesUsed + copy + 2); + + packets.push_back(out); + out = new ABuffer(kMaxUDPPacketSize); + outBytesUsed = 12; // Placeholder for RTP header + } + } + + if (outBytesUsed > 12) { + out->setRange(0, outBytesUsed); + packets.push_back(out); + } + + while (!packets.empty()) { + sp out = *packets.begin(); + packets.erase(packets.begin()); + + out->setInt32Data(mRTPSeqNo); + + bool last = packets.empty(); + + uint8_t *dst = out->data(); + + dst[0] = 0x80; + + dst[1] = packetType; + if (last) { + dst[1] |= 1 << 7; // M-bit + } + + dst[2] = (mRTPSeqNo >> 8) & 0xff; + dst[3] = mRTPSeqNo & 0xff; + ++mRTPSeqNo; + + dst[4] = rtpTime >> 24; + dst[5] = (rtpTime >> 16) & 0xff; + dst[6] = (rtpTime >> 8) & 0xff; + dst[7] = rtpTime & 0xff; + dst[8] = kSourceID >> 24; + dst[9] = (kSourceID >> 16) & 0xff; + dst[10] = (kSourceID >> 8) & 0xff; + dst[11] = kSourceID & 0xff; + + status_t err = sendRTPPacket(out, true /* storeInHistory */); + + if (err != OK) { + return err; + } + } + + return OK; +} + +status_t RTPSender::sendRTPPacket( + const sp &buffer, bool storeInHistory) { + CHECK(mRTPConnected); + + status_t err = mNetSession->sendRequest( + mRTPSessionID, buffer->data(), buffer->size()); + + if (err != OK) { + return err; + } + + mLastNTPTime = GetNowNTP(); + mLastRTPTime = U32_AT(buffer->data() + 4); + + ++mNumRTPSent; + mNumRTPOctetsSent += buffer->size() - 12; + + if (storeInHistory) { + if (mHistorySize == kMaxHistorySize) { + mHistory.erase(mHistory.begin()); + } else { + ++mHistorySize; + } + mHistory.push_back(buffer); + } + + return OK; +} + +// static +uint64_t RTPSender::GetNowNTP() { + struct timeval tv; + gettimeofday(&tv, NULL /* timezone */); + + uint64_t nowUs = tv.tv_sec * 1000000ll + tv.tv_usec; + + nowUs += ((70ll * 365 + 17) * 24) * 60 * 60 * 1000000ll; + + uint64_t hi = nowUs / 1000000ll; + uint64_t lo = ((1ll << 32) * (nowUs % 1000000ll)) / 1000000ll; + + return (hi << 32) | lo; +} + +void RTPSender::onMessageReceived(const sp &msg) { + switch (msg->what()) { + case kWhatRTPNotify: + case kWhatRTCPNotify: + onNetNotify(msg->what() == kWhatRTPNotify, msg); + break; + + default: + TRESPASS(); + } +} + +void RTPSender::onNetNotify(bool isRTP, const sp &msg) { + int32_t reason; + CHECK(msg->findInt32("reason", &reason)); + + switch (reason) { + case ANetworkSession::kWhatError: + { + int32_t sessionID; + CHECK(msg->findInt32("sessionID", &sessionID)); + + int32_t err; + CHECK(msg->findInt32("err", &err)); + + int32_t errorOccuredDuringSend; + CHECK(msg->findInt32("send", &errorOccuredDuringSend)); + + AString detail; + CHECK(msg->findString("detail", &detail)); + + ALOGE("An error occurred during %s in session %d " + "(%d, '%s' (%s)).", + errorOccuredDuringSend ? "send" : "receive", + sessionID, + err, + detail.c_str(), + strerror(-err)); + + mNetSession->destroySession(sessionID); + + if (sessionID == mRTPSessionID) { + mRTPSessionID = 0; + } else if (sessionID == mRTCPSessionID) { + mRTCPSessionID = 0; + } + + if (mMode == TRANSPORT_TCP) { + if (!mRTPConnected + || (mRTCPSessionID > 0 && !mRTCPConnected)) { + notifyInitDone(err); + break; + } + } + + notifyError(err); + break; + } + + case ANetworkSession::kWhatDatagram: + { + sp data; + CHECK(msg->findBuffer("data", &data)); + + if (isRTP) { + ALOGW("Huh? Received data on RTP connection..."); + } else { + onRTCPData(data); + } + break; + } + + case ANetworkSession::kWhatConnected: + { + CHECK_EQ(mMode, TRANSPORT_TCP); + + int32_t sessionID; + CHECK(msg->findInt32("sessionID", &sessionID)); + + if (isRTP) { + CHECK_EQ(sessionID, mRTPSessionID); + mRTPConnected = true; + } else { + CHECK_EQ(sessionID, mRTCPSessionID); + mRTCPConnected = true; + } + + if (mRTPConnected && (mRTCPSessionID == 0 || mRTCPConnected)) { + notifyInitDone(OK); + } + break; + } + } +} + +status_t RTPSender::onRTCPData(const sp &buffer) { + const uint8_t *data = buffer->data(); + size_t size = buffer->size(); + + while (size > 0) { + if (size < 8) { + // Too short to be a valid RTCP header + return ERROR_MALFORMED; + } + + if ((data[0] >> 6) != 2) { + // Unsupported version. + return ERROR_UNSUPPORTED; + } + + if (data[0] & 0x20) { + // Padding present. + + size_t paddingLength = data[size - 1]; + + if (paddingLength + 12 > size) { + // If we removed this much padding we'd end up with something + // that's too short to be a valid RTP header. + return ERROR_MALFORMED; + } + + size -= paddingLength; + } + + size_t headerLength = 4 * (data[2] << 8 | data[3]) + 4; + + if (size < headerLength) { + // Only received a partial packet? + return ERROR_MALFORMED; + } + + switch (data[1]) { + case 200: + case 201: // RR + parseReceiverReport(data, headerLength); + break; + + case 202: // SDES + case 203: + case 204: // APP + break; + + case 205: // TSFB (transport layer specific feedback) + parseTSFB(data, headerLength); + break; + + case 206: // PSFB (payload specific feedback) + // hexdump(data, headerLength); + break; + + default: + { + ALOGW("Unknown RTCP packet type %u of size %d", + (unsigned)data[1], headerLength); + break; + } + } + + data += headerLength; + size -= headerLength; + } + + return OK; +} + +status_t RTPSender::parseReceiverReport(const uint8_t *data, size_t size) { + // hexdump(data, size); + + float fractionLost = data[12] / 256.0f; + + ALOGI("lost %.2f %% of packets during report interval.", + 100.0f * fractionLost); + + return OK; +} + +status_t RTPSender::parseTSFB(const uint8_t *data, size_t size) { + if ((data[0] & 0x1f) != 1) { + return ERROR_UNSUPPORTED; // We only support NACK for now. + } + + uint32_t srcId = U32_AT(&data[8]); + if (srcId != kSourceID) { + return ERROR_MALFORMED; + } + + for (size_t i = 12; i < size; i += 4) { + uint16_t seqNo = U16_AT(&data[i]); + uint16_t blp = U16_AT(&data[i + 2]); + + List >::iterator it = mHistory.begin(); + bool foundSeqNo = false; + while (it != mHistory.end()) { + const sp &buffer = *it; + + uint16_t bufferSeqNo = buffer->int32Data() & 0xffff; + + bool retransmit = false; + if (bufferSeqNo == seqNo) { + retransmit = true; + } else if (blp != 0) { + for (size_t i = 0; i < 16; ++i) { + if ((blp & (1 << i)) + && (bufferSeqNo == ((seqNo + i + 1) & 0xffff))) { + blp &= ~(1 << i); + retransmit = true; + } + } + } + + if (retransmit) { + ALOGV("retransmitting seqNo %d", bufferSeqNo); + + CHECK_EQ((status_t)OK, + sendRTPPacket(buffer, false /* storeInHistory */)); + + if (bufferSeqNo == seqNo) { + foundSeqNo = true; + } + + if (foundSeqNo && blp == 0) { + break; + } + } + + ++it; + } + + if (!foundSeqNo || blp != 0) { + ALOGI("Some sequence numbers were no longer available for " + "retransmission (seqNo = %d, foundSeqNo = %d, blp = 0x%04x)", + seqNo, foundSeqNo, blp); + + if (!mHistory.empty()) { + int32_t earliest = (*mHistory.begin())->int32Data() & 0xffff; + int32_t latest = (*--mHistory.end())->int32Data() & 0xffff; + + ALOGI("have seq numbers from %d - %d", earliest, latest); + } + } + } + + return OK; +} + +void RTPSender::notifyInitDone(status_t err) { + sp notify = mNotify->dup(); + notify->setInt32("what", kWhatInitDone); + notify->setInt32("err", err); + notify->post(); +} + +void RTPSender::notifyError(status_t err) { + sp notify = mNotify->dup(); + notify->setInt32("what", kWhatError); + notify->setInt32("err", err); + notify->post(); +} + +} // namespace android + diff --git a/media/libstagefright/wifi-display/rtp/RTPSender.h b/media/libstagefright/wifi-display/rtp/RTPSender.h new file mode 100644 index 0000000..2b683a4 --- /dev/null +++ b/media/libstagefright/wifi-display/rtp/RTPSender.h @@ -0,0 +1,112 @@ +/* + * Copyright 2013, The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef RTP_SENDER_H_ + +#define RTP_SENDER_H_ + +#include "RTPBase.h" + +#include + +namespace android { + +struct ABuffer; +struct ANetworkSession; + +// An object of this class facilitates sending of media data over an RTP +// channel. The channel is established over a UDP or TCP connection depending +// on which "TransportMode" was chosen. In addition different RTP packetization +// schemes are supported such as "Transport Stream Packets over RTP", +// or "AVC/H.264 encapsulation as specified in RFC 3984 (non-interleaved mode)" +struct RTPSender : public RTPBase, public AHandler { + enum { + kWhatInitDone, + kWhatError, + }; + RTPSender( + const sp &netSession, + const sp ¬ify); + + status_t initAsync( + TransportMode mode, + const char *remoteHost, + int32_t remoteRTPPort, + int32_t remoteRTCPPort, + int32_t *outLocalRTPPort); + + status_t queueBuffer( + const sp &buffer, + uint8_t packetType, + PacketizationMode mode); + +protected: + virtual ~RTPSender(); + virtual void onMessageReceived(const sp &msg); + +private: + enum { + kWhatRTPNotify, + kWhatRTCPNotify, + }; + + enum { + kMaxNumTSPacketsPerRTPPacket = (kMaxUDPPacketSize - 12) / 188, + kMaxHistorySize = 1024, + kSourceID = 0xdeadbeef, + }; + + sp mNetSession; + sp mNotify; + TransportMode mMode; + int32_t mRTPSessionID; + int32_t mRTCPSessionID; + bool mRTPConnected; + bool mRTCPConnected; + + uint64_t mLastNTPTime; + uint32_t mLastRTPTime; + uint32_t mNumRTPSent; + uint32_t mNumRTPOctetsSent; + uint32_t mNumSRsSent; + + uint32_t mRTPSeqNo; + + List > mHistory; + size_t mHistorySize; + + static uint64_t GetNowNTP(); + + status_t queueTSPackets(const sp &tsPackets, uint8_t packetType); + status_t queueAVCBuffer(const sp &accessUnit, uint8_t packetType); + + status_t sendRTPPacket(const sp &packet, bool storeInHistory); + + void onNetNotify(bool isRTP, const sp &msg); + + status_t onRTCPData(const sp &data); + status_t parseReceiverReport(const uint8_t *data, size_t size); + status_t parseTSFB(const uint8_t *data, size_t size); + + void notifyInitDone(status_t err); + void notifyError(status_t err); + + DISALLOW_EVIL_CONSTRUCTORS(RTPSender); +}; + +} // namespace android + +#endif // RTP_SENDER_H_ diff --git a/media/libstagefright/wifi-display/rtptest.cpp b/media/libstagefright/wifi-display/rtptest.cpp new file mode 100644 index 0000000..607d9d2 --- /dev/null +++ b/media/libstagefright/wifi-display/rtptest.cpp @@ -0,0 +1,382 @@ +/* + * Copyright 2013, The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NEBUG 0 +#define LOG_TAG "rtptest" +#include + +#include "ANetworkSession.h" +#include "rtp/RTPSender.h" +#include "rtp/RTPReceiver.h" + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +namespace android { + +struct TestHandler : public AHandler { + TestHandler(const sp &netSession); + + void listen(); + void connect(const char *host, int32_t port); + +protected: + virtual ~TestHandler(); + virtual void onMessageReceived(const sp &msg); + +private: + enum { + kWhatListen, + kWhatConnect, + kWhatReceiverNotify, + kWhatSenderNotify, + kWhatSendMore, + kWhatStop, + }; + + sp mNetSession; + sp mExtractor; + sp mSender; + sp mReceiver; + + size_t mMaxSampleSize; + + int64_t mFirstTimeRealUs; + int64_t mFirstTimeMediaUs; + + status_t readMore(); + + DISALLOW_EVIL_CONSTRUCTORS(TestHandler); +}; + +TestHandler::TestHandler(const sp &netSession) + : mNetSession(netSession), + mMaxSampleSize(1024 * 1024), + mFirstTimeRealUs(-1ll), + mFirstTimeMediaUs(-1ll) { +} + +TestHandler::~TestHandler() { +} + +void TestHandler::listen() { + sp msg = new AMessage(kWhatListen, id()); + msg->post(); +} + +void TestHandler::connect(const char *host, int32_t port) { + sp msg = new AMessage(kWhatConnect, id()); + msg->setString("host", host); + msg->setInt32("port", port); + msg->post(); +} + +void TestHandler::onMessageReceived(const sp &msg) { + switch (msg->what()) { + case kWhatListen: + { + sp notify = new AMessage(kWhatReceiverNotify, id()); + mReceiver = new RTPReceiver(mNetSession, notify); + looper()->registerHandler(mReceiver); + + CHECK_EQ((status_t)OK, + mReceiver->registerPacketType( + 33, RTPReceiver::PACKETIZATION_H264)); + + int32_t receiverRTPPort; + CHECK_EQ((status_t)OK, + mReceiver->initAsync( + RTPReceiver::TRANSPORT_UDP, &receiverRTPPort)); + + printf("picked receiverRTPPort %d\n", receiverRTPPort); + +#if 0 + CHECK_EQ((status_t)OK, + mReceiver->connect( + "127.0.0.1", senderRTPPort, senderRTPPort + 1)); +#endif + break; + } + + case kWhatConnect: + { + AString host; + CHECK(msg->findString("host", &host)); + + int32_t receiverRTPPort; + CHECK(msg->findInt32("port", &receiverRTPPort)); + + mExtractor = new NuMediaExtractor; + CHECK_EQ((status_t)OK, + mExtractor->setDataSource( + "/sdcard/Frame Counter HD 30FPS_1080p.mp4")); + + bool haveVideo = false; + for (size_t i = 0; i < mExtractor->countTracks(); ++i) { + sp format; + CHECK_EQ((status_t)OK, mExtractor->getTrackFormat(i, &format)); + + AString mime; + CHECK(format->findString("mime", &mime)); + + if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_AVC, mime.c_str())) { + mExtractor->selectTrack(i); + haveVideo = true; + break; + } + } + + CHECK(haveVideo); + + sp notify = new AMessage(kWhatSenderNotify, id()); + mSender = new RTPSender(mNetSession, notify); + looper()->registerHandler(mSender); + + int32_t senderRTPPort; + CHECK_EQ((status_t)OK, + mSender->initAsync( + RTPSender::TRANSPORT_UDP, + host.c_str(), + receiverRTPPort, + receiverRTPPort + 1, + &senderRTPPort)); + + printf("picked senderRTPPort %d\n", senderRTPPort); + break; + } + + case kWhatSenderNotify: + { + ALOGI("kWhatSenderNotify"); + + int32_t what; + CHECK(msg->findInt32("what", &what)); + + switch (what) { + case RTPSender::kWhatInitDone: + { + int32_t err; + CHECK(msg->findInt32("err", &err)); + + ALOGI("RTPSender::initAsync completed w/ err %d", err); + + if (err == OK) { + err = readMore(); + + if (err != OK) { + (new AMessage(kWhatStop, id()))->post(); + } + } + break; + } + + case RTPSender::kWhatError: + break; + } + break; + } + + case kWhatReceiverNotify: + { + ALOGI("kWhatReceiverNotify"); + + int32_t what; + CHECK(msg->findInt32("what", &what)); + + switch (what) { + case RTPReceiver::kWhatInitDone: + { + int32_t err; + CHECK(msg->findInt32("err", &err)); + + ALOGI("RTPReceiver::initAsync completed w/ err %d", err); + break; + } + + case RTPSender::kWhatError: + break; + } + break; + } + + case kWhatSendMore: + { + sp accessUnit; + CHECK(msg->findBuffer("accessUnit", &accessUnit)); + + CHECK_EQ((status_t)OK, + mSender->queueBuffer( + accessUnit, + 33, + RTPSender::PACKETIZATION_H264)); + + status_t err = readMore(); + + if (err != OK) { + (new AMessage(kWhatStop, id()))->post(); + } + break; + } + + case kWhatStop: + { + if (mReceiver != NULL) { + looper()->unregisterHandler(mReceiver->id()); + mReceiver.clear(); + } + + if (mSender != NULL) { + looper()->unregisterHandler(mSender->id()); + mSender.clear(); + } + + mExtractor.clear(); + + looper()->stop(); + break; + } + + default: + TRESPASS(); + } +} + +status_t TestHandler::readMore() { + int64_t timeUs; + status_t err = mExtractor->getSampleTime(&timeUs); + + if (err != OK) { + return err; + } + + sp accessUnit = new ABuffer(mMaxSampleSize); + CHECK_EQ((status_t)OK, mExtractor->readSampleData(accessUnit)); + + accessUnit->meta()->setInt64("timeUs", timeUs); + + CHECK_EQ((status_t)OK, mExtractor->advance()); + + int64_t nowUs = ALooper::GetNowUs(); + int64_t whenUs; + + if (mFirstTimeRealUs < 0ll) { + mFirstTimeRealUs = whenUs = nowUs; + mFirstTimeMediaUs = timeUs; + } else { + whenUs = mFirstTimeRealUs + timeUs - mFirstTimeMediaUs; + } + + sp msg = new AMessage(kWhatSendMore, id()); + msg->setBuffer("accessUnit", accessUnit); + msg->post(whenUs - nowUs); + + return OK; +} + +} // namespace android + +static void usage(const char *me) { + fprintf(stderr, + "usage: %s -c host:port\tconnect to remote host\n" + " -l \tlisten\n", + me); +} + +int main(int argc, char **argv) { + using namespace android; + + // srand(time(NULL)); + + ProcessState::self()->startThreadPool(); + + DataSource::RegisterDefaultSniffers(); + + bool listen = false; + int32_t connectToPort = -1; + AString connectToHost; + + int res; + while ((res = getopt(argc, argv, "hc:l")) >= 0) { + switch (res) { + case 'c': + { + const char *colonPos = strrchr(optarg, ':'); + + if (colonPos == NULL) { + usage(argv[0]); + exit(1); + } + + connectToHost.setTo(optarg, colonPos - optarg); + + char *end; + connectToPort = strtol(colonPos + 1, &end, 10); + + if (*end != '\0' || end == colonPos + 1 + || connectToPort < 1 || connectToPort > 65535) { + fprintf(stderr, "Illegal port specified.\n"); + exit(1); + } + break; + } + + case 'l': + { + listen = true; + break; + } + + case '?': + case 'h': + usage(argv[0]); + exit(1); + } + } + + if (!listen && connectToPort < 0) { + fprintf(stderr, + "You need to select either client or server mode.\n"); + exit(1); + } + + sp netSession = new ANetworkSession; + netSession->start(); + + sp looper = new ALooper; + + sp handler = new TestHandler(netSession); + looper->registerHandler(handler); + + if (listen) { + handler->listen(); + } + + if (connectToPort >= 0) { + handler->connect(connectToHost.c_str(), connectToPort); + } + + looper->start(true /* runOnCallingThread */); + + return 0; +} + diff --git a/media/libstagefright/wifi-display/sink/DirectRenderer.cpp b/media/libstagefright/wifi-display/sink/DirectRenderer.cpp index 70369bb..b53252d 100644 --- a/media/libstagefright/wifi-display/sink/DirectRenderer.cpp +++ b/media/libstagefright/wifi-display/sink/DirectRenderer.cpp @@ -20,15 +20,13 @@ #include "DirectRenderer.h" -#include "AnotherPacketSource.h" -#include "ATSParser.h" - #include #include #include #include #include #include +#include #include #include #include @@ -36,30 +34,13 @@ namespace android { -#if 1 -// static -const int64_t DirectRenderer::kPacketLostDelayUs = 80000ll; - -// static -const int64_t DirectRenderer::kPacketLateDelayUs = 60000ll; -#else -// static -const int64_t DirectRenderer::kPacketLostDelayUs = 1000000ll; - -// static -const int64_t DirectRenderer::kPacketLateDelayUs = -1ll; -#endif - DirectRenderer::DirectRenderer( - const sp ¬ifyLost, const sp &bufferProducer) - : mNotifyLost(notifyLost), - mSurfaceTex(bufferProducer), - mTSParser(new ATSParser(ATSParser::ALIGNED_VIDEO_DATA)), + : mSurfaceTex(bufferProducer), mVideoDecoderNotificationPending(false), - mAwaitingExtSeqNo(-1), - mRequestedRetransmission(false), - mPacketLostGeneration(0) { + mRenderPending(false), + mFirstRenderTimeUs(-1ll), + mFirstRenderRealUs(-1ll) { } DirectRenderer::~DirectRenderer() { @@ -74,58 +55,15 @@ DirectRenderer::~DirectRenderer() { void DirectRenderer::onMessageReceived(const sp &msg) { switch (msg->what()) { - case kWhatQueueBuffer: - { - sp buffer; - CHECK(msg->findBuffer("buffer", &buffer)); - - onQueueBuffer(buffer); - - dequeueMore(); - break; - } - - case kWhatPacketLate: - case kWhatPacketLost: + case kWhatVideoDecoderNotify: { - int32_t generation; - CHECK(msg->findInt32("generation", &generation)); - - if (generation != mPacketLostGeneration) { - // stale. - break; - } - - if (msg->what() == kWhatPacketLate) { - CHECK(!mRequestedRetransmission); - CHECK_GE(mAwaitingExtSeqNo, 0); - - ALOGV("packet extSeqNo %d is late, requesting retransmission.", - mAwaitingExtSeqNo); - - sp notify = mNotifyLost->dup(); - notify->setInt32("seqNo", (mAwaitingExtSeqNo & 0xffff)); - notify->post(); - - mRequestedRetransmission = true; - break; - } - - ALOGW("lost packet extSeqNo %d", mAwaitingExtSeqNo); - - sp extra; - mTSParser->signalDiscontinuity( - ATSParser::DISCONTINUITY_TIME, extra); - - mAwaitingExtSeqNo = -1; - mRequestedRetransmission = false; - dequeueMore(); + onVideoDecoderNotify(); break; } - case kWhatVideoDecoderNotify: + case kWhatRender: { - onVideoDecoderNotify(); + onRender(); break; } @@ -134,203 +72,67 @@ void DirectRenderer::onMessageReceived(const sp &msg) { } } -void DirectRenderer::onQueueBuffer(const sp &buffer) { - int32_t newExtendedSeqNo = buffer->int32Data(); - - if (mPackets.empty()) { - mPackets.push_back(buffer); - return; - } - - if (mAwaitingExtSeqNo > 0 && newExtendedSeqNo < mAwaitingExtSeqNo) { - // We're no longer interested in these. They're old. +void DirectRenderer::setFormat( + size_t trackIndex, const sp &format) { + if (trackIndex == 1) { + // Ignore audio for now. return; } - List >::iterator firstIt = mPackets.begin(); - List >::iterator it = --mPackets.end(); - for (;;) { - int32_t extendedSeqNo = (*it)->int32Data(); - - if (extendedSeqNo == newExtendedSeqNo) { - // Duplicate packet. - return; - } + CHECK(mVideoDecoder == NULL); - if (extendedSeqNo < newExtendedSeqNo) { - // Insert new packet after the one at "it". - mPackets.insert(++it, buffer); - return; - } - - if (it == firstIt) { - // Insert new packet before the first existing one. - mPackets.insert(it, buffer); - return; - } + AString mime; + CHECK(format->findString("mime", &mime)); - --it; - } -} + mVideoDecoderLooper = new ALooper; + mVideoDecoderLooper->setName("video codec looper"); -void DirectRenderer::dequeueMore() { - if (mAwaitingExtSeqNo >= 0) { - // Remove all packets before the one we're looking for, they had - // their chance. - while (!mPackets.empty() - && (*mPackets.begin())->int32Data() < mAwaitingExtSeqNo) { - ALOGV("dropping late packet extSeqNo %d", - (*mPackets.begin())->int32Data()); + mVideoDecoderLooper->start( + false /* runOnCallingThread */, + false /* canCallJava */, + PRIORITY_DEFAULT); - mPackets.erase(mPackets.begin()); - } - } + mVideoDecoder = MediaCodec::CreateByType( + mVideoDecoderLooper, mime.c_str(), false /* encoder */); - bool packetLostScheduled = (mAwaitingExtSeqNo >= 0); + CHECK(mVideoDecoder != NULL); - while (!mPackets.empty()) { - sp buffer = *mPackets.begin(); - int32_t extSeqNo = buffer->int32Data(); + status_t err = mVideoDecoder->configure( + format, + mSurfaceTex == NULL + ? NULL : new Surface(mSurfaceTex), + NULL /* crypto */, + 0 /* flags */); + CHECK_EQ(err, (status_t)OK); - if (mAwaitingExtSeqNo >= 0 && extSeqNo != mAwaitingExtSeqNo) { - break; - } + err = mVideoDecoder->start(); + CHECK_EQ(err, (status_t)OK); - mPackets.erase(mPackets.begin()); + err = mVideoDecoder->getInputBuffers( + &mVideoDecoderInputBuffers); + CHECK_EQ(err, (status_t)OK); - if (packetLostScheduled) { - packetLostScheduled = false; - cancelPacketLost(); - } - - if (mRequestedRetransmission) { - ALOGV("recovered after requesting retransmission of extSeqNo %d", - mAwaitingExtSeqNo); - } - - CHECK_EQ(buffer->size() % 188, 0u); - - for (size_t offset = 0; offset < buffer->size(); offset += 188) { - status_t err = mTSParser->feedTSPacket( - buffer->data() + offset, 188); - - CHECK_EQ(err, (status_t)OK); - } - - mAwaitingExtSeqNo = extSeqNo + 1; - mRequestedRetransmission = false; - } - - if (!packetLostScheduled && mAwaitingExtSeqNo >= 0) { - schedulePacketLost(); - } - - dequeueAccessUnits(); + scheduleVideoDecoderNotification(); } -void DirectRenderer::dequeueAccessUnits() { - sp audioSource = - static_cast( - mTSParser->getSource(ATSParser::AUDIO).get()); - - if (audioSource != NULL) { - status_t finalResult; - size_t n = 0; - while (audioSource->hasBufferAvailable(&finalResult)) { - sp accessUnit; - status_t err = audioSource->dequeueAccessUnit(&accessUnit); - if (err == OK) { - ++n; - } - } - - if (n > 0) { - ALOGV("dequeued %d audio access units.", n); - } - } - - sp videoSource = - static_cast( - mTSParser->getSource(ATSParser::VIDEO).get()); - - if (videoSource != NULL) { - if (mVideoDecoder == NULL) { - sp meta = videoSource->getFormat(); - if (meta != NULL) { - sp videoFormat; - status_t err = convertMetaDataToMessage(meta, &videoFormat); - CHECK_EQ(err, (status_t)OK); - - AString mime; - CHECK(videoFormat->findString("mime", &mime)); - - mVideoDecoderLooper = new ALooper; - mVideoDecoderLooper->setName("video codec looper"); - - mVideoDecoderLooper->start( - false /* runOnCallingThread */, - false /* canCallJava */, - PRIORITY_DEFAULT); - - mVideoDecoder = MediaCodec::CreateByType( - mVideoDecoderLooper, mime.c_str(), false /* encoder */); - - CHECK(mVideoDecoder != NULL); - - err = mVideoDecoder->configure( - videoFormat, - mSurfaceTex == NULL - ? NULL : new Surface(mSurfaceTex), - NULL /* crypto */, - 0 /* flags */); - - CHECK_EQ(err, (status_t)OK); - - err = mVideoDecoder->start(); - CHECK_EQ(err, (status_t)OK); - - err = mVideoDecoder->getInputBuffers( - &mVideoDecoderInputBuffers); - CHECK_EQ(err, (status_t)OK); - - scheduleVideoDecoderNotification(); - } - } - - status_t finalResult; - size_t n = 0; - while (videoSource->hasBufferAvailable(&finalResult)) { - sp accessUnit; - status_t err = videoSource->dequeueAccessUnit(&accessUnit); - if (err == OK) { - mVideoAccessUnits.push_back(accessUnit); - ++n; - } - } - - if (n > 0) { - ALOGV("dequeued %d video access units.", n); - queueVideoDecoderInputBuffers(); - } +void DirectRenderer::queueAccessUnit( + size_t trackIndex, const sp &accessUnit) { + if (trackIndex == 1) { + // Ignore audio for now. + return; } -} -void DirectRenderer::schedulePacketLost() { - sp msg; + if (mVideoDecoder == NULL) { + sp format = new AMessage; + format->setString("mime", "video/avc"); + format->setInt32("width", 640); + format->setInt32("height", 360); - if (kPacketLateDelayUs > 0ll) { - msg = new AMessage(kWhatPacketLate, id()); - msg->setInt32("generation", mPacketLostGeneration); - msg->post(kPacketLateDelayUs); + setFormat(0, format); } - msg = new AMessage(kWhatPacketLost, id()); - msg->setInt32("generation", mPacketLostGeneration); - msg->post(kPacketLostDelayUs); -} - -void DirectRenderer::cancelPacketLost() { - ++mPacketLostGeneration; + mVideoAccessUnits.push_back(accessUnit); + queueVideoDecoderInputBuffers(); } void DirectRenderer::queueVideoDecoderInputBuffers() { @@ -406,8 +208,7 @@ void DirectRenderer::onVideoDecoderNotify() { &flags); if (err == OK) { - err = mVideoDecoder->renderOutputBufferAndRelease(index); - CHECK_EQ(err, (status_t)OK); + queueOutputBuffer(index, timeUs); } else if (err == INFO_OUTPUT_BUFFERS_CHANGED) { // We don't care. } else if (err == INFO_FORMAT_CHANGED) { @@ -422,6 +223,62 @@ void DirectRenderer::onVideoDecoderNotify() { scheduleVideoDecoderNotification(); } +void DirectRenderer::queueOutputBuffer(size_t index, int64_t timeUs) { +#if 0 + OutputInfo info; + info.mIndex = index; + info.mTimeUs = timeUs; + mOutputBuffers.push_back(info); + + scheduleRenderIfNecessary(); +#else + status_t err = mVideoDecoder->renderOutputBufferAndRelease(index); + CHECK_EQ(err, (status_t)OK); +#endif +} + +void DirectRenderer::scheduleRenderIfNecessary() { + if (mRenderPending || mOutputBuffers.empty()) { + return; + } + + mRenderPending = true; + + int64_t timeUs = (*mOutputBuffers.begin()).mTimeUs; + int64_t nowUs = ALooper::GetNowUs(); + + if (mFirstRenderTimeUs < 0ll) { + mFirstRenderTimeUs = timeUs; + mFirstRenderRealUs = nowUs; + } + + int64_t whenUs = timeUs - mFirstRenderTimeUs + mFirstRenderRealUs; + int64_t delayUs = whenUs - nowUs; + + (new AMessage(kWhatRender, id()))->post(delayUs); +} + +void DirectRenderer::onRender() { + mRenderPending = false; + + int64_t nowUs = ALooper::GetNowUs(); + + while (!mOutputBuffers.empty()) { + const OutputInfo &info = *mOutputBuffers.begin(); + + if (info.mTimeUs > nowUs) { + break; + } + + status_t err = mVideoDecoder->renderOutputBufferAndRelease(info.mIndex); + CHECK_EQ(err, (status_t)OK); + + mOutputBuffers.erase(mOutputBuffers.begin()); + } + + scheduleRenderIfNecessary(); +} + void DirectRenderer::scheduleVideoDecoderNotification() { if (mVideoDecoderNotificationPending) { return; diff --git a/media/libstagefright/wifi-display/sink/DirectRenderer.h b/media/libstagefright/wifi-display/sink/DirectRenderer.h index 2babcb8..7219080 100644 --- a/media/libstagefright/wifi-display/sink/DirectRenderer.h +++ b/media/libstagefright/wifi-display/sink/DirectRenderer.h @@ -23,7 +23,6 @@ namespace android { struct ABuffer; -struct ATSParser; struct IGraphicBufferProducer; struct MediaCodec; @@ -32,13 +31,10 @@ struct MediaCodec; // delay. Primarily meant to finetune packet loss discovery and minimize // latency. struct DirectRenderer : public AHandler { - DirectRenderer( - const sp ¬ifyLost, - const sp &bufferProducer); + DirectRenderer(const sp &bufferProducer); - enum { - kWhatQueueBuffer = 'queB', - }; + void setFormat(size_t trackIndex, const sp &format); + void queueAccessUnit(size_t trackIndex, const sp &accessUnit); protected: virtual void onMessageReceived(const sp &msg); @@ -46,22 +42,17 @@ protected: private: enum { - kWhatPacketLate, - kWhatPacketLost, kWhatVideoDecoderNotify, + kWhatRender, }; - static const int64_t kPacketLateDelayUs; - static const int64_t kPacketLostDelayUs; + struct OutputInfo { + size_t mIndex; + int64_t mTimeUs; + }; - sp mNotifyLost; sp mSurfaceTex; - // Ordered by extended seq number. - List > mPackets; - - sp mTSParser; - sp mVideoDecoderLooper; sp mVideoDecoder; Vector > mVideoDecoderInputBuffers; @@ -70,21 +61,19 @@ private: List > mVideoAccessUnits; - int32_t mAwaitingExtSeqNo; - bool mRequestedRetransmission; - int32_t mPacketLostGeneration; + List mOutputBuffers; + bool mRenderPending; + int64_t mFirstRenderTimeUs; + int64_t mFirstRenderRealUs; - void onQueueBuffer(const sp &buffer); void onVideoDecoderNotify(); - - void dequeueMore(); - void dequeueAccessUnits(); - - void schedulePacketLost(); - void cancelPacketLost(); + void onRender(); void queueVideoDecoderInputBuffers(); void scheduleVideoDecoderNotification(); + void scheduleRenderIfNecessary(); + + void queueOutputBuffer(size_t index, int64_t timeUs); DISALLOW_EVIL_CONSTRUCTORS(DirectRenderer); }; diff --git a/media/libstagefright/wifi-display/sink/LinearRegression.cpp b/media/libstagefright/wifi-display/sink/LinearRegression.cpp deleted file mode 100644 index 8cfce37..0000000 --- a/media/libstagefright/wifi-display/sink/LinearRegression.cpp +++ /dev/null @@ -1,110 +0,0 @@ -/* - * Copyright 2012, The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -//#define LOG_NDEBUG 0 -#define LOG_TAG "LinearRegression" -#include - -#include "LinearRegression.h" - -#include -#include - -namespace android { - -LinearRegression::LinearRegression(size_t historySize) - : mHistorySize(historySize), - mCount(0), - mHistory(new Point[mHistorySize]), - mSumX(0.0), - mSumY(0.0) { -} - -LinearRegression::~LinearRegression() { - delete[] mHistory; - mHistory = NULL; -} - -void LinearRegression::addPoint(float x, float y) { - if (mCount == mHistorySize) { - const Point &oldest = mHistory[0]; - - mSumX -= oldest.mX; - mSumY -= oldest.mY; - - memmove(&mHistory[0], &mHistory[1], (mHistorySize - 1) * sizeof(Point)); - --mCount; - } - - Point *newest = &mHistory[mCount++]; - newest->mX = x; - newest->mY = y; - - mSumX += x; - mSumY += y; -} - -bool LinearRegression::approxLine(float *n1, float *n2, float *b) const { - static const float kEpsilon = 1.0E-4; - - if (mCount < 2) { - return false; - } - - float sumX2 = 0.0f; - float sumY2 = 0.0f; - float sumXY = 0.0f; - - float meanX = mSumX / (float)mCount; - float meanY = mSumY / (float)mCount; - - for (size_t i = 0; i < mCount; ++i) { - const Point &p = mHistory[i]; - - float x = p.mX - meanX; - float y = p.mY - meanY; - - sumX2 += x * x; - sumY2 += y * y; - sumXY += x * y; - } - - float T = sumX2 + sumY2; - float D = sumX2 * sumY2 - sumXY * sumXY; - float root = sqrt(T * T * 0.25 - D); - - float L1 = T * 0.5 - root; - - if (fabs(sumXY) > kEpsilon) { - *n1 = 1.0; - *n2 = (2.0 * L1 - sumX2) / sumXY; - - float mag = sqrt((*n1) * (*n1) + (*n2) * (*n2)); - - *n1 /= mag; - *n2 /= mag; - } else { - *n1 = 0.0; - *n2 = 1.0; - } - - *b = (*n1) * meanX + (*n2) * meanY; - - return true; -} - -} // namespace android - diff --git a/media/libstagefright/wifi-display/sink/LinearRegression.h b/media/libstagefright/wifi-display/sink/LinearRegression.h deleted file mode 100644 index ca6f5a1..0000000 --- a/media/libstagefright/wifi-display/sink/LinearRegression.h +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Copyright 2012, The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef LINEAR_REGRESSION_H_ - -#define LINEAR_REGRESSION_H_ - -#include -#include - -namespace android { - -// Helper class to fit a line to a set of points minimizing the sum of -// squared (orthogonal) distances from line to individual points. -struct LinearRegression { - LinearRegression(size_t historySize); - ~LinearRegression(); - - void addPoint(float x, float y); - - bool approxLine(float *n1, float *n2, float *b) const; - -private: - struct Point { - float mX, mY; - }; - - size_t mHistorySize; - size_t mCount; - Point *mHistory; - - float mSumX, mSumY; - - DISALLOW_EVIL_CONSTRUCTORS(LinearRegression); -}; - -} // namespace android - -#endif // LINEAR_REGRESSION_H_ diff --git a/media/libstagefright/wifi-display/sink/RTPSink.cpp b/media/libstagefright/wifi-display/sink/RTPSink.cpp deleted file mode 100644 index 3c90a1e..0000000 --- a/media/libstagefright/wifi-display/sink/RTPSink.cpp +++ /dev/null @@ -1,870 +0,0 @@ -/* - * Copyright 2012, The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -//#define LOG_NDEBUG 0 -#define LOG_TAG "RTPSink" -#include - -#include "RTPSink.h" - -#include "ANetworkSession.h" - -#if USE_TUNNEL_RENDERER -#include "TunnelRenderer.h" -#define RENDERER_CLASS TunnelRenderer -#else -#include "DirectRenderer.h" -#define RENDERER_CLASS DirectRenderer -#endif - -#include -#include -#include -#include -#include -#include - -namespace android { - -struct RTPSink::Source : public RefBase { - Source(uint16_t seq, const sp &buffer, - const sp queueBufferMsg); - - bool updateSeq(uint16_t seq, const sp &buffer); - - void addReportBlock(uint32_t ssrc, const sp &buf); - -protected: - virtual ~Source(); - -private: - static const uint32_t kMinSequential = 2; - static const uint32_t kMaxDropout = 3000; - static const uint32_t kMaxMisorder = 100; - static const uint32_t kRTPSeqMod = 1u << 16; - - sp mQueueBufferMsg; - - uint16_t mMaxSeq; - uint32_t mCycles; - uint32_t mBaseSeq; - uint32_t mBadSeq; - uint32_t mProbation; - uint32_t mReceived; - uint32_t mExpectedPrior; - uint32_t mReceivedPrior; - - void initSeq(uint16_t seq); - void queuePacket(const sp &buffer); - - DISALLOW_EVIL_CONSTRUCTORS(Source); -}; - -//////////////////////////////////////////////////////////////////////////////// - -RTPSink::Source::Source( - uint16_t seq, const sp &buffer, - const sp queueBufferMsg) - : mQueueBufferMsg(queueBufferMsg), - mProbation(kMinSequential) { - initSeq(seq); - mMaxSeq = seq - 1; - - buffer->setInt32Data(mCycles | seq); - queuePacket(buffer); -} - -RTPSink::Source::~Source() { -} - -void RTPSink::Source::initSeq(uint16_t seq) { - mMaxSeq = seq; - mCycles = 0; - mBaseSeq = seq; - mBadSeq = kRTPSeqMod + 1; - mReceived = 0; - mExpectedPrior = 0; - mReceivedPrior = 0; -} - -bool RTPSink::Source::updateSeq(uint16_t seq, const sp &buffer) { - uint16_t udelta = seq - mMaxSeq; - - if (mProbation) { - // Startup phase - - if (seq == mMaxSeq + 1) { - buffer->setInt32Data(mCycles | seq); - queuePacket(buffer); - - --mProbation; - mMaxSeq = seq; - if (mProbation == 0) { - initSeq(seq); - ++mReceived; - - return true; - } - } else { - // Packet out of sequence, restart startup phase - - mProbation = kMinSequential - 1; - mMaxSeq = seq; - -#if 0 - mPackets.clear(); - mTotalBytesQueued = 0; - ALOGI("XXX cleared packets"); -#endif - - buffer->setInt32Data(mCycles | seq); - queuePacket(buffer); - } - - return false; - } - - if (udelta < kMaxDropout) { - // In order, with permissible gap. - - if (seq < mMaxSeq) { - // Sequence number wrapped - count another 64K cycle - mCycles += kRTPSeqMod; - } - - mMaxSeq = seq; - } else if (udelta <= kRTPSeqMod - kMaxMisorder) { - // The sequence number made a very large jump - - if (seq == mBadSeq) { - // Two sequential packets -- assume that the other side - // restarted without telling us so just re-sync - // (i.e. pretend this was the first packet) - - initSeq(seq); - } else { - mBadSeq = (seq + 1) & (kRTPSeqMod - 1); - - return false; - } - } else { - // Duplicate or reordered packet. - } - - ++mReceived; - - buffer->setInt32Data(mCycles | seq); - queuePacket(buffer); - - return true; -} - -void RTPSink::Source::queuePacket(const sp &buffer) { - sp msg = mQueueBufferMsg->dup(); - msg->setBuffer("buffer", buffer); - msg->post(); -} - -void RTPSink::Source::addReportBlock( - uint32_t ssrc, const sp &buf) { - uint32_t extMaxSeq = mMaxSeq | mCycles; - uint32_t expected = extMaxSeq - mBaseSeq + 1; - - int64_t lost = (int64_t)expected - (int64_t)mReceived; - if (lost > 0x7fffff) { - lost = 0x7fffff; - } else if (lost < -0x800000) { - lost = -0x800000; - } - - uint32_t expectedInterval = expected - mExpectedPrior; - mExpectedPrior = expected; - - uint32_t receivedInterval = mReceived - mReceivedPrior; - mReceivedPrior = mReceived; - - int64_t lostInterval = expectedInterval - receivedInterval; - - uint8_t fractionLost; - if (expectedInterval == 0 || lostInterval <=0) { - fractionLost = 0; - } else { - fractionLost = (lostInterval << 8) / expectedInterval; - } - - uint8_t *ptr = buf->data() + buf->size(); - - ptr[0] = ssrc >> 24; - ptr[1] = (ssrc >> 16) & 0xff; - ptr[2] = (ssrc >> 8) & 0xff; - ptr[3] = ssrc & 0xff; - - ptr[4] = fractionLost; - - ptr[5] = (lost >> 16) & 0xff; - ptr[6] = (lost >> 8) & 0xff; - ptr[7] = lost & 0xff; - - ptr[8] = extMaxSeq >> 24; - ptr[9] = (extMaxSeq >> 16) & 0xff; - ptr[10] = (extMaxSeq >> 8) & 0xff; - ptr[11] = extMaxSeq & 0xff; - - // XXX TODO: - - ptr[12] = 0x00; // interarrival jitter - ptr[13] = 0x00; - ptr[14] = 0x00; - ptr[15] = 0x00; - - ptr[16] = 0x00; // last SR - ptr[17] = 0x00; - ptr[18] = 0x00; - ptr[19] = 0x00; - - ptr[20] = 0x00; // delay since last SR - ptr[21] = 0x00; - ptr[22] = 0x00; - ptr[23] = 0x00; -} - -//////////////////////////////////////////////////////////////////////////////// - -RTPSink::RTPSink( - const sp &netSession, - const sp &bufferProducer, - const sp ¬ify) - : mNetSession(netSession), - mSurfaceTex(bufferProducer), - mNotify(notify), - mUsingTCPTransport(false), - mUsingTCPInterleaving(false), - mRTPPort(0), - mRTPSessionID(0), - mRTCPSessionID(0), - mRTPClientSessionID(0), - mRTCPClientSessionID(0), - mFirstArrivalTimeUs(-1ll), - mNumPacketsReceived(0ll), - mRegression(1000), - mMaxDelayMs(-1ll) { -} - -RTPSink::~RTPSink() { - if (mRTCPClientSessionID != 0) { - mNetSession->destroySession(mRTCPClientSessionID); - } - - if (mRTPClientSessionID != 0) { - mNetSession->destroySession(mRTPClientSessionID); - } - - if (mRTCPSessionID != 0) { - mNetSession->destroySession(mRTCPSessionID); - } - - if (mRTPSessionID != 0) { - mNetSession->destroySession(mRTPSessionID); - } -} - -status_t RTPSink::init(bool usingTCPTransport, bool usingTCPInterleaving) { - mUsingTCPTransport = usingTCPTransport; - mUsingTCPInterleaving = usingTCPInterleaving; - - if (usingTCPInterleaving) { - return OK; - } - - int clientRtp; - - sp rtpNotify = new AMessage(kWhatRTPNotify, id()); - sp rtcpNotify = new AMessage(kWhatRTCPNotify, id()); - for (clientRtp = 15550;; clientRtp += 2) { - int32_t rtpSession; - status_t err; - struct in_addr ifaceAddr; - if (usingTCPTransport) { - ifaceAddr.s_addr = INADDR_ANY; - err = mNetSession->createTCPDatagramSession( - ifaceAddr, clientRtp, rtpNotify, &rtpSession); - } else { - err = mNetSession->createUDPSession( - clientRtp, rtpNotify, &rtpSession); - } - - if (err != OK) { - ALOGI("failed to create RTP socket on port %d", clientRtp); - continue; - } - - int32_t rtcpSession; - if (usingTCPTransport) { - err = mNetSession->createTCPDatagramSession( - ifaceAddr, clientRtp + 1, rtcpNotify, &rtcpSession); - } else { - err = mNetSession->createUDPSession( - clientRtp + 1, rtcpNotify, &rtcpSession); - } - - if (err == OK) { - mRTPPort = clientRtp; - mRTPSessionID = rtpSession; - mRTCPSessionID = rtcpSession; - break; - } - - ALOGI("failed to create RTCP socket on port %d", clientRtp + 1); - mNetSession->destroySession(rtpSession); - } - - if (mRTPPort == 0) { - return UNKNOWN_ERROR; - } - - return OK; -} - -int32_t RTPSink::getRTPPort() const { - return mRTPPort; -} - -void RTPSink::onMessageReceived(const sp &msg) { - switch (msg->what()) { - case kWhatRTPNotify: - case kWhatRTCPNotify: - { - int32_t reason; - CHECK(msg->findInt32("reason", &reason)); - - switch (reason) { - case ANetworkSession::kWhatError: - { - int32_t sessionID; - CHECK(msg->findInt32("sessionID", &sessionID)); - - int32_t err; - CHECK(msg->findInt32("err", &err)); - - AString detail; - CHECK(msg->findString("detail", &detail)); - - ALOGE("An error occurred in session %d (%d, '%s/%s').", - sessionID, - err, - detail.c_str(), - strerror(-err)); - - mNetSession->destroySession(sessionID); - - if (sessionID == mRTPSessionID) { - mRTPSessionID = 0; - } else if (sessionID == mRTCPSessionID) { - mRTCPSessionID = 0; - } - break; - } - - case ANetworkSession::kWhatDatagram: - { - int32_t sessionID; - CHECK(msg->findInt32("sessionID", &sessionID)); - - sp data; - CHECK(msg->findBuffer("data", &data)); - - status_t err; - if (msg->what() == kWhatRTPNotify) { - err = parseRTP(data); - } else { - err = parseRTCP(data); - } - break; - } - - case ANetworkSession::kWhatClientConnected: - { - int32_t sessionID; - CHECK(msg->findInt32("sessionID", &sessionID)); - ALOGI("TCP session %d now connected", sessionID); - - int32_t serverPort; - CHECK(msg->findInt32("server-port", &serverPort)); - - if (serverPort == mRTPPort) { - mRTPClientSessionID = sessionID; - } else { - CHECK_EQ(serverPort, mRTPPort + 1); - mRTCPClientSessionID = sessionID; - } - break; - } - - default: - TRESPASS(); - } - break; - } - - case kWhatSendRR: - { - onSendRR(); - break; - } - - case kWhatPacketLost: - { - onPacketLost(msg); - break; - } - - case kWhatInject: - { - int32_t isRTP; - CHECK(msg->findInt32("isRTP", &isRTP)); - - sp buffer; - CHECK(msg->findBuffer("buffer", &buffer)); - - status_t err; - if (isRTP) { - err = parseRTP(buffer); - } else { - err = parseRTCP(buffer); - } - break; - } - - default: - TRESPASS(); - } -} - -status_t RTPSink::injectPacket(bool isRTP, const sp &buffer) { - sp msg = new AMessage(kWhatInject, id()); - msg->setInt32("isRTP", isRTP); - msg->setBuffer("buffer", buffer); - msg->post(); - - return OK; -} - -status_t RTPSink::parseRTP(const sp &buffer) { - size_t size = buffer->size(); - if (size < 12) { - // Too short to be a valid RTP header. - return ERROR_MALFORMED; - } - - const uint8_t *data = buffer->data(); - - if ((data[0] >> 6) != 2) { - // Unsupported version. - return ERROR_UNSUPPORTED; - } - - if (data[0] & 0x20) { - // Padding present. - - size_t paddingLength = data[size - 1]; - - if (paddingLength + 12 > size) { - // If we removed this much padding we'd end up with something - // that's too short to be a valid RTP header. - return ERROR_MALFORMED; - } - - size -= paddingLength; - } - - int numCSRCs = data[0] & 0x0f; - - size_t payloadOffset = 12 + 4 * numCSRCs; - - if (size < payloadOffset) { - // Not enough data to fit the basic header and all the CSRC entries. - return ERROR_MALFORMED; - } - - if (data[0] & 0x10) { - // Header eXtension present. - - if (size < payloadOffset + 4) { - // Not enough data to fit the basic header, all CSRC entries - // and the first 4 bytes of the extension header. - - return ERROR_MALFORMED; - } - - const uint8_t *extensionData = &data[payloadOffset]; - - size_t extensionLength = - 4 * (extensionData[2] << 8 | extensionData[3]); - - if (size < payloadOffset + 4 + extensionLength) { - return ERROR_MALFORMED; - } - - payloadOffset += 4 + extensionLength; - } - - uint32_t srcId = U32_AT(&data[8]); - uint32_t rtpTime = U32_AT(&data[4]); - uint16_t seqNo = U16_AT(&data[2]); - -#if 0 - int64_t arrivalTimeUs; - CHECK(buffer->meta()->findInt64("arrivalTimeUs", &arrivalTimeUs)); - - if (mFirstArrivalTimeUs < 0ll) { - mFirstArrivalTimeUs = arrivalTimeUs; - } - arrivalTimeUs -= mFirstArrivalTimeUs; - - int64_t arrivalTimeMedia = (arrivalTimeUs * 9ll) / 100ll; - - ALOGV("seqNo: %d, SSRC 0x%08x, diff %lld", - seqNo, srcId, rtpTime - arrivalTimeMedia); - - mRegression.addPoint((float)rtpTime, (float)arrivalTimeMedia); - - ++mNumPacketsReceived; - - float n1, n2, b; - if (mRegression.approxLine(&n1, &n2, &b)) { - ALOGV("Line %lld: %.2f %.2f %.2f, slope %.2f", - mNumPacketsReceived, n1, n2, b, -n1 / n2); - - float expectedArrivalTimeMedia = (b - n1 * (float)rtpTime) / n2; - float latenessMs = (arrivalTimeMedia - expectedArrivalTimeMedia) / 90.0; - - if (mMaxDelayMs < 0ll || latenessMs > mMaxDelayMs) { - mMaxDelayMs = latenessMs; - ALOGI("packet was %.2f ms late", latenessMs); - } - } -#endif - - sp meta = buffer->meta(); - meta->setInt32("ssrc", srcId); - meta->setInt32("rtp-time", rtpTime); - meta->setInt32("PT", data[1] & 0x7f); - meta->setInt32("M", data[1] >> 7); - - buffer->setRange(payloadOffset, size - payloadOffset); - - ssize_t index = mSources.indexOfKey(srcId); - if (index < 0) { - if (mRenderer == NULL) { - sp notifyLost = new AMessage(kWhatPacketLost, id()); - notifyLost->setInt32("ssrc", srcId); - - mRenderer = new RENDERER_CLASS(notifyLost, mSurfaceTex); - looper()->registerHandler(mRenderer); - } - - sp queueBufferMsg = - new AMessage(RENDERER_CLASS::kWhatQueueBuffer, mRenderer->id()); - - sp source = new Source(seqNo, buffer, queueBufferMsg); - mSources.add(srcId, source); - } else { - mSources.valueAt(index)->updateSeq(seqNo, buffer); - } - - return OK; -} - -status_t RTPSink::parseRTCP(const sp &buffer) { - const uint8_t *data = buffer->data(); - size_t size = buffer->size(); - - while (size > 0) { - if (size < 8) { - // Too short to be a valid RTCP header - return ERROR_MALFORMED; - } - - if ((data[0] >> 6) != 2) { - // Unsupported version. - return ERROR_UNSUPPORTED; - } - - if (data[0] & 0x20) { - // Padding present. - - size_t paddingLength = data[size - 1]; - - if (paddingLength + 12 > size) { - // If we removed this much padding we'd end up with something - // that's too short to be a valid RTP header. - return ERROR_MALFORMED; - } - - size -= paddingLength; - } - - size_t headerLength = 4 * (data[2] << 8 | data[3]) + 4; - - if (size < headerLength) { - // Only received a partial packet? - return ERROR_MALFORMED; - } - - switch (data[1]) { - case 200: - { - parseSR(data, headerLength); - break; - } - - case 201: // RR - case 202: // SDES - case 204: // APP - break; - - case 205: // TSFB (transport layer specific feedback) - case 206: // PSFB (payload specific feedback) - // hexdump(data, headerLength); - break; - - case 203: - { - parseBYE(data, headerLength); - break; - } - - default: - { - ALOGW("Unknown RTCP packet type %u of size %d", - (unsigned)data[1], headerLength); - break; - } - } - - data += headerLength; - size -= headerLength; - } - - return OK; -} - -status_t RTPSink::parseBYE(const uint8_t *data, size_t size) { - size_t SC = data[0] & 0x3f; - - if (SC == 0 || size < (4 + SC * 4)) { - // Packet too short for the minimal BYE header. - return ERROR_MALFORMED; - } - - uint32_t id = U32_AT(&data[4]); - - return OK; -} - -status_t RTPSink::parseSR(const uint8_t *data, size_t size) { - size_t RC = data[0] & 0x1f; - - if (size < (7 + RC * 6) * 4) { - // Packet too short for the minimal SR header. - return ERROR_MALFORMED; - } - - uint32_t id = U32_AT(&data[4]); - uint64_t ntpTime = U64_AT(&data[8]); - uint32_t rtpTime = U32_AT(&data[16]); - - ALOGV("SR: ssrc 0x%08x, ntpTime 0x%016llx, rtpTime 0x%08x", - id, ntpTime, rtpTime); - - return OK; -} - -status_t RTPSink::connect( - const char *host, int32_t remoteRtpPort, int32_t remoteRtcpPort) { - ALOGI("connecting RTP/RTCP sockets to %s:{%d,%d}", - host, remoteRtpPort, remoteRtcpPort); - - status_t err = - mNetSession->connectUDPSession(mRTPSessionID, host, remoteRtpPort); - - if (err != OK) { - return err; - } - - err = mNetSession->connectUDPSession(mRTCPSessionID, host, remoteRtcpPort); - - if (err != OK) { - return err; - } - -#if 0 - sp buf = new ABuffer(1500); - memset(buf->data(), 0, buf->size()); - - mNetSession->sendRequest( - mRTPSessionID, buf->data(), buf->size()); - - mNetSession->sendRequest( - mRTCPSessionID, buf->data(), buf->size()); -#endif - - if (!mUsingTCPTransport) { - scheduleSendRR(); - } - - return OK; -} - -void RTPSink::scheduleSendRR() { - (new AMessage(kWhatSendRR, id()))->post(2000000ll); -} - -void RTPSink::addSDES(const sp &buffer) { - uint8_t *data = buffer->data() + buffer->size(); - data[0] = 0x80 | 1; - data[1] = 202; // SDES - data[4] = 0xde; // SSRC - data[5] = 0xad; - data[6] = 0xbe; - data[7] = 0xef; - - size_t offset = 8; - - data[offset++] = 1; // CNAME - - AString cname = "stagefright@somewhere"; - data[offset++] = cname.size(); - - memcpy(&data[offset], cname.c_str(), cname.size()); - offset += cname.size(); - - data[offset++] = 6; // TOOL - - AString tool = "stagefright/1.0"; - data[offset++] = tool.size(); - - memcpy(&data[offset], tool.c_str(), tool.size()); - offset += tool.size(); - - data[offset++] = 0; - - if ((offset % 4) > 0) { - size_t count = 4 - (offset % 4); - switch (count) { - case 3: - data[offset++] = 0; - case 2: - data[offset++] = 0; - case 1: - data[offset++] = 0; - } - } - - size_t numWords = (offset / 4) - 1; - data[2] = numWords >> 8; - data[3] = numWords & 0xff; - - buffer->setRange(buffer->offset(), buffer->size() + offset); -} - -void RTPSink::onSendRR() { - sp buf = new ABuffer(1500); - buf->setRange(0, 0); - - uint8_t *ptr = buf->data(); - ptr[0] = 0x80 | 0; - ptr[1] = 201; // RR - ptr[2] = 0; - ptr[3] = 1; - ptr[4] = 0xde; // SSRC - ptr[5] = 0xad; - ptr[6] = 0xbe; - ptr[7] = 0xef; - - buf->setRange(0, 8); - - size_t numReportBlocks = 0; - for (size_t i = 0; i < mSources.size(); ++i) { - uint32_t ssrc = mSources.keyAt(i); - sp source = mSources.valueAt(i); - - if (numReportBlocks > 31 || buf->size() + 24 > buf->capacity()) { - // Cannot fit another report block. - break; - } - - source->addReportBlock(ssrc, buf); - ++numReportBlocks; - } - - ptr[0] |= numReportBlocks; // 5 bit - - size_t sizeInWordsMinus1 = 1 + 6 * numReportBlocks; - ptr[2] = sizeInWordsMinus1 >> 8; - ptr[3] = sizeInWordsMinus1 & 0xff; - - buf->setRange(0, (sizeInWordsMinus1 + 1) * 4); - - addSDES(buf); - - mNetSession->sendRequest(mRTCPSessionID, buf->data(), buf->size()); - - scheduleSendRR(); -} - -void RTPSink::onPacketLost(const sp &msg) { - if (mUsingTCPTransport) { - ALOGW("huh? lost a packet even though using reliable transport?"); - return; - } - - uint32_t srcId; - CHECK(msg->findInt32("ssrc", (int32_t *)&srcId)); - - int32_t seqNo; - CHECK(msg->findInt32("seqNo", &seqNo)); - - int32_t blp = 0; - - sp buf = new ABuffer(16); - buf->setRange(0, 0); - - uint8_t *ptr = buf->data(); - ptr[0] = 0x80 | 1; // generic NACK - ptr[1] = 205; // TSFB - ptr[2] = 0; - ptr[3] = 3; - ptr[4] = 0xde; // sender SSRC - ptr[5] = 0xad; - ptr[6] = 0xbe; - ptr[7] = 0xef; - ptr[8] = (srcId >> 24) & 0xff; - ptr[9] = (srcId >> 16) & 0xff; - ptr[10] = (srcId >> 8) & 0xff; - ptr[11] = (srcId & 0xff); - ptr[12] = (seqNo >> 8) & 0xff; - ptr[13] = (seqNo & 0xff); - ptr[14] = (blp >> 8) & 0xff; - ptr[15] = (blp & 0xff); - - buf->setRange(0, 16); - - mNetSession->sendRequest(mRTCPSessionID, buf->data(), buf->size()); -} - -} // namespace android - diff --git a/media/libstagefright/wifi-display/sink/RTPSink.h b/media/libstagefright/wifi-display/sink/RTPSink.h deleted file mode 100644 index 4706c6d..0000000 --- a/media/libstagefright/wifi-display/sink/RTPSink.h +++ /dev/null @@ -1,118 +0,0 @@ -/* - * Copyright 2012, The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef RTP_SINK_H_ - -#define RTP_SINK_H_ - -#include - -#include "LinearRegression.h" - -#include - -#define USE_TUNNEL_RENDERER 0 - -namespace android { - -struct ABuffer; -struct ANetworkSession; - -#if USE_TUNNEL_RENDERER -struct TunnelRenderer; -#else -struct DirectRenderer; -#endif - -// Creates a pair of sockets for RTP/RTCP traffic, instantiates a renderer -// for incoming transport stream data and occasionally sends statistics over -// the RTCP channel. -struct RTPSink : public AHandler { - RTPSink(const sp &netSession, - const sp &bufferProducer, - const sp ¬ify); - - // If TCP interleaving is used, no UDP sockets are created, instead - // incoming RTP/RTCP packets (arriving on the RTSP control connection) - // are manually injected by WifiDisplaySink. - status_t init(bool usingTCPTransport, bool usingTCPInterleaving); - - status_t connect( - const char *host, int32_t remoteRtpPort, int32_t remoteRtcpPort); - - int32_t getRTPPort() const; - - status_t injectPacket(bool isRTP, const sp &buffer); - -protected: - virtual void onMessageReceived(const sp &msg); - virtual ~RTPSink(); - -private: - enum { - kWhatRTPNotify, - kWhatRTCPNotify, - kWhatSendRR, - kWhatPacketLost, - kWhatInject, - }; - - struct Source; - struct StreamSource; - - sp mNetSession; - sp mSurfaceTex; - sp mNotify; - KeyedVector > mSources; - - bool mUsingTCPTransport; - bool mUsingTCPInterleaving; - - int32_t mRTPPort; - - int32_t mRTPSessionID; // in TCP unicast mode these are just server - int32_t mRTCPSessionID; // sockets. No data is transferred through them. - - int32_t mRTPClientSessionID; // in TCP unicast mode - int32_t mRTCPClientSessionID; - - int64_t mFirstArrivalTimeUs; - int64_t mNumPacketsReceived; - LinearRegression mRegression; - int64_t mMaxDelayMs; - -#if USE_TUNNEL_RENDERER - sp mRenderer; -#else - sp mRenderer; -#endif - - status_t parseRTP(const sp &buffer); - status_t parseRTCP(const sp &buffer); - status_t parseBYE(const uint8_t *data, size_t size); - status_t parseSR(const uint8_t *data, size_t size); - - void addSDES(const sp &buffer); - void onSendRR(); - void onPacketLost(const sp &msg); - void scheduleSendRR(); - - DISALLOW_EVIL_CONSTRUCTORS(RTPSink); -}; - -} // namespace android - -#endif // RTP_SINK_H_ diff --git a/media/libstagefright/wifi-display/sink/TunnelRenderer.cpp b/media/libstagefright/wifi-display/sink/TunnelRenderer.cpp index 75f9d73..d9d8a76 100644 --- a/media/libstagefright/wifi-display/sink/TunnelRenderer.cpp +++ b/media/libstagefright/wifi-display/sink/TunnelRenderer.cpp @@ -158,175 +158,17 @@ void TunnelRenderer::StreamSource::doSomeWork() { //////////////////////////////////////////////////////////////////////////////// TunnelRenderer::TunnelRenderer( - const sp ¬ifyLost, const sp &bufferProducer) - : mNotifyLost(notifyLost), - mSurfaceTex(bufferProducer), - mTotalBytesQueued(0ll), - mLastDequeuedExtSeqNo(-1), - mFirstFailedAttemptUs(-1ll), - mRequestedRetransmission(false) { + : mSurfaceTex(bufferProducer), + mStartup(true) { } TunnelRenderer::~TunnelRenderer() { destroyPlayer(); } -void TunnelRenderer::queueBuffer(const sp &buffer) { - Mutex::Autolock autoLock(mLock); - - mTotalBytesQueued += buffer->size(); - - if (mPackets.empty()) { - mPackets.push_back(buffer); - return; - } - - int32_t newExtendedSeqNo = buffer->int32Data(); - - List >::iterator firstIt = mPackets.begin(); - List >::iterator it = --mPackets.end(); - for (;;) { - int32_t extendedSeqNo = (*it)->int32Data(); - - if (extendedSeqNo == newExtendedSeqNo) { - // Duplicate packet. - return; - } - - if (extendedSeqNo < newExtendedSeqNo) { - // Insert new packet after the one at "it". - mPackets.insert(++it, buffer); - return; - } - - if (it == firstIt) { - // Insert new packet before the first existing one. - mPackets.insert(it, buffer); - return; - } - - --it; - } -} - -sp TunnelRenderer::dequeueBuffer() { - Mutex::Autolock autoLock(mLock); - - sp buffer; - int32_t extSeqNo; - while (!mPackets.empty()) { - buffer = *mPackets.begin(); - extSeqNo = buffer->int32Data(); - - if (mLastDequeuedExtSeqNo < 0 || extSeqNo > mLastDequeuedExtSeqNo) { - break; - } - - // This is a retransmission of a packet we've already returned. - - mTotalBytesQueued -= buffer->size(); - buffer.clear(); - extSeqNo = -1; - - mPackets.erase(mPackets.begin()); - } - - if (mPackets.empty()) { - if (mFirstFailedAttemptUs < 0ll) { - mFirstFailedAttemptUs = ALooper::GetNowUs(); - mRequestedRetransmission = false; - } else { - ALOGV("no packets available for %.2f secs", - (ALooper::GetNowUs() - mFirstFailedAttemptUs) / 1E6); - } - - return NULL; - } - - if (mLastDequeuedExtSeqNo < 0 || extSeqNo == mLastDequeuedExtSeqNo + 1) { - if (mRequestedRetransmission) { - ALOGI("Recovered after requesting retransmission of %d", - extSeqNo); - } - - mLastDequeuedExtSeqNo = extSeqNo; - mFirstFailedAttemptUs = -1ll; - mRequestedRetransmission = false; - - mPackets.erase(mPackets.begin()); - - mTotalBytesQueued -= buffer->size(); - - return buffer; - } - - if (mFirstFailedAttemptUs < 0ll) { - mFirstFailedAttemptUs = ALooper::GetNowUs(); - - ALOGV("failed to get the correct packet the first time."); - return NULL; - } - - if (mFirstFailedAttemptUs + 50000ll > ALooper::GetNowUs()) { - // We're willing to wait a little while to get the right packet. - -#if 1 - if (!mRequestedRetransmission) { - ALOGI("requesting retransmission of extSeqNo %d (seqNo %d)", - mLastDequeuedExtSeqNo + 1, - (mLastDequeuedExtSeqNo + 1) & 0xffff); - - sp notify = mNotifyLost->dup(); - notify->setInt32("seqNo", (mLastDequeuedExtSeqNo + 1) & 0xffff); - notify->post(); - - mRequestedRetransmission = true; - } else -#endif - { - ALOGV("still waiting for the correct packet to arrive."); - } - - return NULL; - } - - ALOGI("dropping packet. extSeqNo %d didn't arrive in time", - mLastDequeuedExtSeqNo + 1); - - // Permanent failure, we never received the packet. - mLastDequeuedExtSeqNo = extSeqNo; - mFirstFailedAttemptUs = -1ll; - mRequestedRetransmission = false; - - mTotalBytesQueued -= buffer->size(); - - mPackets.erase(mPackets.begin()); - - return buffer; -} - void TunnelRenderer::onMessageReceived(const sp &msg) { switch (msg->what()) { - case kWhatQueueBuffer: - { - sp buffer; - CHECK(msg->findBuffer("buffer", &buffer)); - - queueBuffer(buffer); - - if (mStreamSource == NULL) { - if (mTotalBytesQueued > 0ll) { - initPlayer(); - } else { - ALOGI("Have %lld bytes queued...", mTotalBytesQueued); - } - } else { - mStreamSource->doSomeWork(); - } - break; - } - default: TRESPASS(); } @@ -396,5 +238,31 @@ void TunnelRenderer::destroyPlayer() { } } +void TunnelRenderer::queueBuffer(const sp &buffer) { + { + Mutex::Autolock autoLock(mLock); + mBuffers.push_back(buffer); + } + + if (mStartup) { + initPlayer(); + mStartup = false; + } + + mStreamSource->doSomeWork(); +} + +sp TunnelRenderer::dequeueBuffer() { + Mutex::Autolock autoLock(mLock); + if (mBuffers.empty()) { + return NULL; + } + + sp buf = *mBuffers.begin(); + mBuffers.erase(mBuffers.begin()); + + return buf; +} + } // namespace android diff --git a/media/libstagefright/wifi-display/sink/TunnelRenderer.h b/media/libstagefright/wifi-display/sink/TunnelRenderer.h index 52e6e66..8e96665 100644 --- a/media/libstagefright/wifi-display/sink/TunnelRenderer.h +++ b/media/libstagefright/wifi-display/sink/TunnelRenderer.h @@ -34,16 +34,11 @@ struct IStreamListener; // and sends the resulting transport stream to a mediaplayer instance // for playback. struct TunnelRenderer : public AHandler { - TunnelRenderer( - const sp ¬ifyLost, - const sp &bufferProducer); + TunnelRenderer(const sp &bufferProducer); + void queueBuffer(const sp &buffer); sp dequeueBuffer(); - enum { - kWhatQueueBuffer, - }; - protected: virtual void onMessageReceived(const sp &msg); virtual ~TunnelRenderer(); @@ -54,11 +49,10 @@ private: mutable Mutex mLock; - sp mNotifyLost; sp mSurfaceTex; - List > mPackets; - int64_t mTotalBytesQueued; + bool mStartup; + List > mBuffers; sp mComposerClient; sp mSurfaceControl; @@ -67,15 +61,9 @@ private: sp mPlayer; sp mStreamSource; - int32_t mLastDequeuedExtSeqNo; - int64_t mFirstFailedAttemptUs; - bool mRequestedRetransmission; - void initPlayer(); void destroyPlayer(); - void queueBuffer(const sp &buffer); - DISALLOW_EVIL_CONSTRUCTORS(TunnelRenderer); }; diff --git a/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp b/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp index 55581a6..a6f58cd 100644 --- a/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp +++ b/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp @@ -19,8 +19,11 @@ #include #include "WifiDisplaySink.h" + +#include "DirectRenderer.h" +#include "MediaReceiver.h" #include "ParsedMessage.h" -#include "RTPSink.h" +#include "TunnelRenderer.h" #include #include @@ -42,7 +45,8 @@ WifiDisplaySink::WifiDisplaySink( mUsingTCPTransport(false), mUsingTCPInterleaving(false), mSessionID(0), - mNextCSeq(1) { + mNextCSeq(1), + mIDRFrameRequestPending(false) { #if 1 // We support any and all resolutions, but prefer 720p30 mSinkSupportedVideoFormats.setNativeResolution( @@ -50,11 +54,11 @@ WifiDisplaySink::WifiDisplaySink( mSinkSupportedVideoFormats.enableAll(); #else - // We only support 800 x 600 p60. + // We only support 640 x 360 p30. mSinkSupportedVideoFormats.disableAll(); mSinkSupportedVideoFormats.setNativeResolution( - VideoFormats::RESOLUTION_VESA, 1); // 800 x 600 p60 + VideoFormats::RESOLUTION_HH, 6); // 640 x 360 p30 #endif } @@ -212,20 +216,6 @@ void WifiDisplaySink::onMessageReceived(const sp &msg) { break; } - case ANetworkSession::kWhatBinaryData: - { - CHECK(mUsingTCPInterleaving); - - int32_t channel; - CHECK(msg->findInt32("channel", &channel)); - - sp data; - CHECK(msg->findBuffer("data", &data)); - - mRTPSink->injectPacket(channel == 0 /* isRTP */, data); - break; - } - default: TRESPASS(); } @@ -238,15 +228,80 @@ void WifiDisplaySink::onMessageReceived(const sp &msg) { break; } - case kWhatRequestIDRFrame: + case kWhatMediaReceiverNotify: { - ALOGI("requesting IDR frame"); - sendIDRFrameRequest(mSessionID); + onMediaReceiverNotify(msg); break; } - case kWhatRTPSinkNotify: + default: + TRESPASS(); + } +} + +void WifiDisplaySink::onMediaReceiverNotify(const sp &msg) { + int32_t what; + CHECK(msg->findInt32("what", &what)); + + switch (what) { + case MediaReceiver::kWhatInitDone: { + status_t err; + CHECK(msg->findInt32("err", &err)); + + ALOGI("MediaReceiver initialization completed w/ err %d", err); + break; + } + + case MediaReceiver::kWhatError: + { + status_t err; + CHECK(msg->findInt32("err", &err)); + + ALOGE("MediaReceiver signaled error %d", err); + break; + } + + case MediaReceiver::kWhatAccessUnit: + { + if (mRenderer == NULL) { +#if USE_TUNNEL_RENDERER + mRenderer = new TunnelRenderer(mSurfaceTex); +#else + mRenderer = new DirectRenderer(mSurfaceTex); +#endif + + looper()->registerHandler(mRenderer); + } + + sp accessUnit; + CHECK(msg->findBuffer("accessUnit", &accessUnit)); + +#if USE_TUNNEL_RENDERER + mRenderer->queueBuffer(accessUnit); +#else + size_t trackIndex; + CHECK(msg->findSize("trackIndex", &trackIndex)); + + sp format; + if (msg->findMessage("format", &format)) { + mRenderer->setFormat(trackIndex, format); + } + + mRenderer->queueAccessUnit(trackIndex, accessUnit); +#endif + break; + } + + case MediaReceiver::kWhatPacketLost: + { +#if 0 + if (!mIDRFrameRequestPending) { + ALOGI("requesting IDR frame"); + + sendIDRFrameRequest(mSessionID); + } +#endif break; } @@ -381,7 +436,8 @@ status_t WifiDisplaySink::configureTransport(const sp &msg) { ALOGW("Server picked an odd numbered RTP port."); } - return mRTPSink->connect(sourceHost.c_str(), rtpPort, rtcpPort); + return mMediaReceiver->connectTrack( + 0 /* trackIndex */, sourceHost.c_str(), rtpPort, rtcpPort); } status_t WifiDisplaySink::onReceivePlayResponse( @@ -402,6 +458,9 @@ status_t WifiDisplaySink::onReceivePlayResponse( status_t WifiDisplaySink::onReceiveIDRFrameRequestResponse( int32_t sessionID, const sp &msg) { + CHECK(mIDRFrameRequestPending); + mIDRFrameRequestPending = false; + return OK; } @@ -539,16 +598,48 @@ void WifiDisplaySink::onGetParameterRequest( } status_t WifiDisplaySink::sendSetup(int32_t sessionID, const char *uri) { - sp notify = new AMessage(kWhatRTPSinkNotify, id()); + sp notify = new AMessage(kWhatMediaReceiverNotify, id()); + + mMediaReceiverLooper = new ALooper; + mMediaReceiverLooper->setName("media_receiver"); + + mMediaReceiverLooper->start( + false /* runOnCallingThread */, + false /* canCallJava */, + PRIORITY_AUDIO); + + mMediaReceiver = new MediaReceiver(mNetSession, notify); + mMediaReceiverLooper->registerHandler(mMediaReceiver); - mRTPSink = new RTPSink(mNetSession, mSurfaceTex, notify); - looper()->registerHandler(mRTPSink); + RTPReceiver::TransportMode mode = RTPReceiver::TRANSPORT_UDP; + if (mUsingTCPTransport) { + if (mUsingTCPInterleaving) { + mode = RTPReceiver::TRANSPORT_TCP_INTERLEAVED; + } else { + mode = RTPReceiver::TRANSPORT_TCP; + } + } - status_t err = mRTPSink->init(mUsingTCPTransport, mUsingTCPInterleaving); + int32_t localRTPPort; + status_t err = mMediaReceiver->addTrack(mode, &localRTPPort); + + if (err == OK) { + err = mMediaReceiver->initAsync( +#if USE_TUNNEL_RENDERER + MediaReceiver::MODE_TRANSPORT_STREAM_RAW +#else + MediaReceiver::MODE_TRANSPORT_STREAM +#endif + ); + } if (err != OK) { - looper()->unregisterHandler(mRTPSink->id()); - mRTPSink.clear(); + mMediaReceiverLooper->unregisterHandler(mMediaReceiver->id()); + mMediaReceiver.clear(); + + mMediaReceiverLooper->stop(); + mMediaReceiverLooper.clear(); + return err; } @@ -556,17 +647,19 @@ status_t WifiDisplaySink::sendSetup(int32_t sessionID, const char *uri) { AppendCommonResponse(&request, mNextCSeq); - if (mUsingTCPInterleaving) { + if (mode == RTPReceiver::TRANSPORT_TCP_INTERLEAVED) { request.append("Transport: RTP/AVP/TCP;interleaved=0-1\r\n"); + } else if (mode == RTPReceiver::TRANSPORT_TCP) { + request.append( + StringPrintf( + "Transport: RTP/AVP/TCP;unicast;client_port=%d\r\n", + localRTPPort)); } else { - int32_t rtpPort = mRTPSink->getRTPPort(); - request.append( StringPrintf( - "Transport: RTP/AVP/%s;unicast;client_port=%d-%d\r\n", - mUsingTCPTransport ? "TCP" : "UDP", - rtpPort, - rtpPort + 1)); + "Transport: RTP/AVP/UDP;unicast;client_port=%d-%d\r\n", + localRTPPort, + localRTPPort + 1)); } request.append("\r\n"); @@ -611,6 +704,8 @@ status_t WifiDisplaySink::sendPlay(int32_t sessionID, const char *uri) { } status_t WifiDisplaySink::sendIDRFrameRequest(int32_t sessionID) { + CHECK(!mIDRFrameRequestPending); + AString request = "SET_PARAMETER rtsp://localhost/wfd1.0 RTSP/1.0\r\n"; AppendCommonResponse(&request, mNextCSeq); @@ -636,6 +731,8 @@ status_t WifiDisplaySink::sendIDRFrameRequest(int32_t sessionID) { ++mNextCSeq; + mIDRFrameRequestPending = true; + return OK; } diff --git a/media/libstagefright/wifi-display/sink/WifiDisplaySink.h b/media/libstagefright/wifi-display/sink/WifiDisplaySink.h index 8b5ff6b..01af58b 100644 --- a/media/libstagefright/wifi-display/sink/WifiDisplaySink.h +++ b/media/libstagefright/wifi-display/sink/WifiDisplaySink.h @@ -28,8 +28,12 @@ namespace android { struct AMessage; +struct DirectRenderer; +struct MediaReceiver; struct ParsedMessage; -struct RTPSink; +struct TunnelRenderer; + +#define USE_TUNNEL_RENDERER 0 // Represents the RTSP client acting as a wifi display sink. // Connects to a wifi display source and renders the incoming @@ -68,8 +72,7 @@ private: kWhatStart, kWhatRTSPNotify, kWhatStop, - kWhatRequestIDRFrame, - kWhatRTPSinkNotify, + kWhatMediaReceiverNotify, }; struct ResponseID { @@ -100,10 +103,20 @@ private: KeyedVector mResponseHandlers; - sp mRTPSink; + sp mMediaReceiverLooper; + sp mMediaReceiver; + +#if USE_TUNNEL_RENDERER + sp mRenderer; +#else + sp mRenderer; +#endif + AString mPlaybackSessionID; int32_t mPlaybackSessionTimeoutSecs; + bool mIDRFrameRequestPending; + status_t sendM2(int32_t sessionID); status_t sendSetup(int32_t sessionID, const char *uri); status_t sendPlay(int32_t sessionID, const char *uri); @@ -143,6 +156,8 @@ private: int32_t cseq, const sp &data); + void onMediaReceiverNotify(const sp &msg); + void sendErrorResponse( int32_t sessionID, const char *errorDetail, diff --git a/media/libstagefright/wifi-display/source/PlaybackSession.cpp b/media/libstagefright/wifi-display/source/PlaybackSession.cpp index ede4e60..ea195b3 100644 --- a/media/libstagefright/wifi-display/source/PlaybackSession.cpp +++ b/media/libstagefright/wifi-display/source/PlaybackSession.cpp @@ -23,8 +23,6 @@ #include "Converter.h" #include "MediaPuller.h" #include "RepeaterSource.h" -#include "Sender.h" -#include "TSPacketizer.h" #include "include/avc_utils.h" #include "WifiDisplaySource.h" @@ -65,9 +63,9 @@ struct WifiDisplaySource::PlaybackSession::Track : public AHandler { bool isAudio() const; const sp &converter() const; - ssize_t packetizerTrackIndex() const; - void setPacketizerTrackIndex(size_t index); + ssize_t mediaSenderTrackIndex() const; + void setMediaSenderTrackIndex(size_t index); status_t start(); void stopAsync(); @@ -107,7 +105,7 @@ private: sp mMediaPuller; sp mConverter; bool mStarted; - ssize_t mPacketizerTrackIndex; + ssize_t mMediaSenderTrackIndex; bool mIsAudio; List > mQueuedAccessUnits; sp mRepeaterSource; @@ -131,7 +129,6 @@ WifiDisplaySource::PlaybackSession::Track::Track( mMediaPuller(mediaPuller), mConverter(converter), mStarted(false), - mPacketizerTrackIndex(-1), mIsAudio(IsAudioFormat(mConverter->getOutputFormat())), mLastOutputBufferQueuedTimeUs(-1ll) { } @@ -161,13 +158,14 @@ const sp &WifiDisplaySource::PlaybackSession::Track::converter() cons return mConverter; } -ssize_t WifiDisplaySource::PlaybackSession::Track::packetizerTrackIndex() const { - return mPacketizerTrackIndex; +ssize_t WifiDisplaySource::PlaybackSession::Track::mediaSenderTrackIndex() const { + CHECK_GE(mMediaSenderTrackIndex, 0); + return mMediaSenderTrackIndex; } -void WifiDisplaySource::PlaybackSession::Track::setPacketizerTrackIndex(size_t index) { - CHECK_LT(mPacketizerTrackIndex, 0); - mPacketizerTrackIndex = index; +void WifiDisplaySource::PlaybackSession::Track::setMediaSenderTrackIndex( + size_t index) { + mMediaSenderTrackIndex = index; } status_t WifiDisplaySource::PlaybackSession::Track::start() { @@ -331,22 +329,28 @@ WifiDisplaySource::PlaybackSession::PlaybackSession( mNotify(notify), mInterfaceAddr(interfaceAddr), mHDCP(hdcp), + mLocalRTPPort(-1), mWeAreDead(false), mPaused(false), mLastLifesignUs(), mVideoTrackIndex(-1), - mPrevTimeUs(-1ll), - mAllTracksHavePacketizerIndex(false) { + mPrevTimeUs(-1ll) { } status_t WifiDisplaySource::PlaybackSession::init( const char *clientIP, int32_t clientRtp, int32_t clientRtcp, - Sender::TransportMode transportMode, + RTPSender::TransportMode transportMode, bool enableAudio, bool usePCMAudio, bool enableVideo, VideoFormats::ResolutionType videoResolutionType, size_t videoResolutionIndex) { + sp notify = new AMessage(kWhatMediaSenderNotify, id()); + mMediaSender = new MediaSender(mNetSession, notify); + looper()->registerHandler(mMediaSender); + + mMediaSender->setHDCP(mHDCP); + status_t err = setupPacketizer( enableAudio, usePCMAudio, @@ -354,26 +358,22 @@ status_t WifiDisplaySource::PlaybackSession::init( videoResolutionType, videoResolutionIndex); - if (err != OK) { - return err; + if (err == OK) { + err = mMediaSender->initAsync( + -1 /* trackIndex */, + transportMode, + clientIP, + clientRtp, + clientRtcp, + &mLocalRTPPort); } - sp notify = new AMessage(kWhatSenderNotify, id()); - mSender = new Sender(mNetSession, notify); - - mSenderLooper = new ALooper; - mSenderLooper->setName("sender_looper"); - - mSenderLooper->start( - false /* runOnCallingThread */, - false /* canCallJava */, - PRIORITY_AUDIO); - - mSenderLooper->registerHandler(mSender); + if (err != OK) { + mLocalRTPPort = -1; - err = mSender->init(clientIP, clientRtp, clientRtcp, transportMode); + looper()->unregisterHandler(mMediaSender->id()); + mMediaSender.clear(); - if (err != OK) { return err; } @@ -386,7 +386,7 @@ WifiDisplaySource::PlaybackSession::~PlaybackSession() { } int32_t WifiDisplaySource::PlaybackSession::getRTPPort() const { - return mSender->getRTPPort(); + return mLocalRTPPort; } int64_t WifiDisplaySource::PlaybackSession::getLastLifesignUs() const { @@ -406,18 +406,10 @@ status_t WifiDisplaySource::PlaybackSession::play() { } status_t WifiDisplaySource::PlaybackSession::finishPlay() { - // XXX Give the dongle a second to bind its sockets. - (new AMessage(kWhatFinishPlay, id()))->post(1000000ll); return OK; } -status_t WifiDisplaySource::PlaybackSession::onFinishPlay() { - return mSender->finishInit(); -} - -status_t WifiDisplaySource::PlaybackSession::onFinishPlay2() { - mSender->scheduleSendSR(); - +status_t WifiDisplaySource::PlaybackSession::onMediaSenderInitialized() { for (size_t i = 0; i < mTracks.size(); ++i) { CHECK_EQ((status_t)OK, mTracks.editValueAt(i)->start()); } @@ -464,44 +456,18 @@ void WifiDisplaySource::PlaybackSession::onMessageReceived( CHECK(msg->findSize("trackIndex", &trackIndex)); if (what == Converter::kWhatAccessUnit) { - const sp &track = mTracks.valueFor(trackIndex); - - ssize_t packetizerTrackIndex = track->packetizerTrackIndex(); - - if (packetizerTrackIndex < 0) { - sp trackFormat = track->getFormat()->dup(); - if (mHDCP != NULL && !track->isAudio()) { - // HDCP2.0 _and_ HDCP 2.1 specs say to set the version - // inside the HDCP descriptor to 0x20!!! - trackFormat->setInt32("hdcp-version", 0x20); - } - packetizerTrackIndex = mPacketizer->addTrack(trackFormat); - - CHECK_GE(packetizerTrackIndex, 0); - - track->setPacketizerTrackIndex(packetizerTrackIndex); - - if (allTracksHavePacketizerIndex()) { - status_t err = packetizeQueuedAccessUnits(); - - if (err != OK) { - notifySessionDead(); - break; - } - } - } - sp accessUnit; CHECK(msg->findBuffer("accessUnit", &accessUnit)); - if (!allTracksHavePacketizerIndex()) { - track->queueAccessUnit(accessUnit); - break; - } + const sp &track = mTracks.valueFor(trackIndex); - track->queueOutputBuffer(accessUnit); + status_t err = mMediaSender->queueAccessUnit( + track->mediaSenderTrackIndex(), + accessUnit); - drainAccessUnits(); + if (err != OK) { + notifySessionDead(); + } break; } else if (what == Converter::kWhatEOS) { CHECK_EQ(what, Converter::kWhatEOS); @@ -533,37 +499,25 @@ void WifiDisplaySource::PlaybackSession::onMessageReceived( break; } - case kWhatSenderNotify: + case kWhatMediaSenderNotify: { int32_t what; CHECK(msg->findInt32("what", &what)); - if (what == Sender::kWhatInitDone) { - onFinishPlay2(); - } else if (what == Sender::kWhatSessionDead) { - notifySessionDead(); - } else if (what == Sender::kWhatBinaryData) { - sp notify = mNotify->dup(); - notify->setInt32("what", kWhatBinaryData); - - int32_t channel; - CHECK(msg->findInt32("channel", &channel)); - notify->setInt32("channel", channel); + if (what == MediaSender::kWhatInitDone) { + status_t err; + CHECK(msg->findInt32("err", &err)); - sp data; - CHECK(msg->findBuffer("data", &data)); - notify->setBuffer("data", data); - notify->post(); + if (err == OK) { + onMediaSenderInitialized(); + } else { + notifySessionDead(); + } + } else if (what == MediaSender::kWhatError) { + notifySessionDead(); } else { TRESPASS(); } - - break; - } - - case kWhatFinishPlay: - { - onFinishPlay(); break; } @@ -588,11 +542,8 @@ void WifiDisplaySource::PlaybackSession::onMessageReceived( break; } - mSenderLooper->unregisterHandler(mSender->id()); - mSender.clear(); - mSenderLooper.clear(); - - mPacketizer.clear(); + looper()->unregisterHandler(mMediaSender->id()); + mMediaSender.clear(); sp notify = mNotify->dup(); notify->setInt32("what", kWhatSessionDestroyed); @@ -601,28 +552,6 @@ void WifiDisplaySource::PlaybackSession::onMessageReceived( break; } - case kWhatPacketize: - { - size_t trackIndex; - CHECK(msg->findSize("trackIndex", &trackIndex)); - - sp accessUnit; - CHECK(msg->findBuffer("accessUnit", &accessUnit)); - -#if 0 - if ((ssize_t)trackIndex == mVideoTrackIndex) { - int64_t nowUs = ALooper::GetNowUs(); - static int64_t prevNowUs = 0ll; - - ALOGI("sending AU, dNowUs=%lld us", nowUs - prevNowUs); - - prevNowUs = nowUs; - } -#endif - - break; - } - case kWhatPause: { if (mPaused) { @@ -664,8 +593,6 @@ status_t WifiDisplaySource::PlaybackSession::setupPacketizer( size_t videoResolutionIndex) { CHECK(enableAudio || enableVideo); - mPacketizer = new TSPacketizer; - if (enableVideo) { status_t err = addVideoSource( videoResolutionType, videoResolutionIndex); @@ -763,6 +690,17 @@ status_t WifiDisplaySource::PlaybackSession::addSource( mVideoTrackIndex = trackIndex; } + uint32_t flags = 0; + if (converter->needToManuallyPrependSPSPPS()) { + flags |= MediaSender::FLAG_MANUALLY_PREPEND_SPS_PPS; + } + + ssize_t mediaSenderTrackIndex = + mMediaSender->addTrack(converter->getOutputFormat(), flags); + CHECK_GE(mediaSenderTrackIndex, 0); + + track->setMediaSenderTrackIndex(mediaSenderTrackIndex); + return OK; } @@ -832,168 +770,6 @@ void WifiDisplaySource::PlaybackSession::requestIDRFrame() { } } -bool WifiDisplaySource::PlaybackSession::allTracksHavePacketizerIndex() { - if (mAllTracksHavePacketizerIndex) { - return true; - } - - for (size_t i = 0; i < mTracks.size(); ++i) { - if (mTracks.valueAt(i)->packetizerTrackIndex() < 0) { - return false; - } - } - - mAllTracksHavePacketizerIndex = true; - - return true; -} - -status_t WifiDisplaySource::PlaybackSession::packetizeAccessUnit( - size_t trackIndex, sp accessUnit, - sp *packets) { - const sp &track = mTracks.valueFor(trackIndex); - - uint32_t flags = 0; - - bool isHDCPEncrypted = false; - uint64_t inputCTR; - uint8_t HDCP_private_data[16]; - - bool manuallyPrependSPSPPS = - !track->isAudio() - && track->converter()->needToManuallyPrependSPSPPS() - && IsIDR(accessUnit); - - if (mHDCP != NULL && !track->isAudio()) { - isHDCPEncrypted = true; - - if (manuallyPrependSPSPPS) { - accessUnit = mPacketizer->prependCSD( - track->packetizerTrackIndex(), accessUnit); - } - - status_t err = mHDCP->encrypt( - accessUnit->data(), accessUnit->size(), - trackIndex /* streamCTR */, - &inputCTR, - accessUnit->data()); - - if (err != OK) { - ALOGE("Failed to HDCP-encrypt media data (err %d)", - err); - - return err; - } - - HDCP_private_data[0] = 0x00; - - HDCP_private_data[1] = - (((trackIndex >> 30) & 3) << 1) | 1; - - HDCP_private_data[2] = (trackIndex >> 22) & 0xff; - - HDCP_private_data[3] = - (((trackIndex >> 15) & 0x7f) << 1) | 1; - - HDCP_private_data[4] = (trackIndex >> 7) & 0xff; - - HDCP_private_data[5] = - ((trackIndex & 0x7f) << 1) | 1; - - HDCP_private_data[6] = 0x00; - - HDCP_private_data[7] = - (((inputCTR >> 60) & 0x0f) << 1) | 1; - - HDCP_private_data[8] = (inputCTR >> 52) & 0xff; - - HDCP_private_data[9] = - (((inputCTR >> 45) & 0x7f) << 1) | 1; - - HDCP_private_data[10] = (inputCTR >> 37) & 0xff; - - HDCP_private_data[11] = - (((inputCTR >> 30) & 0x7f) << 1) | 1; - - HDCP_private_data[12] = (inputCTR >> 22) & 0xff; - - HDCP_private_data[13] = - (((inputCTR >> 15) & 0x7f) << 1) | 1; - - HDCP_private_data[14] = (inputCTR >> 7) & 0xff; - - HDCP_private_data[15] = - ((inputCTR & 0x7f) << 1) | 1; - -#if 0 - ALOGI("HDCP_private_data:"); - hexdump(HDCP_private_data, sizeof(HDCP_private_data)); - - ABitReader br(HDCP_private_data, sizeof(HDCP_private_data)); - CHECK_EQ(br.getBits(13), 0); - CHECK_EQ(br.getBits(2), (trackIndex >> 30) & 3); - CHECK_EQ(br.getBits(1), 1u); - CHECK_EQ(br.getBits(15), (trackIndex >> 15) & 0x7fff); - CHECK_EQ(br.getBits(1), 1u); - CHECK_EQ(br.getBits(15), trackIndex & 0x7fff); - CHECK_EQ(br.getBits(1), 1u); - CHECK_EQ(br.getBits(11), 0); - CHECK_EQ(br.getBits(4), (inputCTR >> 60) & 0xf); - CHECK_EQ(br.getBits(1), 1u); - CHECK_EQ(br.getBits(15), (inputCTR >> 45) & 0x7fff); - CHECK_EQ(br.getBits(1), 1u); - CHECK_EQ(br.getBits(15), (inputCTR >> 30) & 0x7fff); - CHECK_EQ(br.getBits(1), 1u); - CHECK_EQ(br.getBits(15), (inputCTR >> 15) & 0x7fff); - CHECK_EQ(br.getBits(1), 1u); - CHECK_EQ(br.getBits(15), inputCTR & 0x7fff); - CHECK_EQ(br.getBits(1), 1u); -#endif - - flags |= TSPacketizer::IS_ENCRYPTED; - } else if (manuallyPrependSPSPPS) { - flags |= TSPacketizer::PREPEND_SPS_PPS_TO_IDR_FRAMES; - } - - int64_t timeUs = ALooper::GetNowUs(); - if (mPrevTimeUs < 0ll || mPrevTimeUs + 100000ll <= timeUs) { - flags |= TSPacketizer::EMIT_PCR; - flags |= TSPacketizer::EMIT_PAT_AND_PMT; - - mPrevTimeUs = timeUs; - } - - mPacketizer->packetize( - track->packetizerTrackIndex(), accessUnit, packets, flags, - !isHDCPEncrypted ? NULL : HDCP_private_data, - !isHDCPEncrypted ? 0 : sizeof(HDCP_private_data), - track->isAudio() ? 2 : 0 /* numStuffingBytes */); - - return OK; -} - -status_t WifiDisplaySource::PlaybackSession::packetizeQueuedAccessUnits() { - for (;;) { - bool gotMoreData = false; - for (size_t i = 0; i < mTracks.size(); ++i) { - size_t trackIndex = mTracks.keyAt(i); - const sp &track = mTracks.valueAt(i); - - sp accessUnit = track->dequeueAccessUnit(); - if (accessUnit != NULL) { - track->queueOutputBuffer(accessUnit); - gotMoreData = true; - } - } - - if (!gotMoreData) { - break; - } - } - - return OK; -} - void WifiDisplaySource::PlaybackSession::notifySessionDead() { // Inform WifiDisplaySource of our premature death (wish). sp notify = mNotify->dup(); @@ -1003,78 +779,5 @@ void WifiDisplaySource::PlaybackSession::notifySessionDead() { mWeAreDead = true; } -void WifiDisplaySource::PlaybackSession::drainAccessUnits() { - ALOGV("audio/video has %d/%d buffers ready.", - mTracks.valueFor(1)->countQueuedOutputBuffers(), - mTracks.valueFor(0)->countQueuedOutputBuffers()); - - while (drainAccessUnit()) { - } -} - -bool WifiDisplaySource::PlaybackSession::drainAccessUnit() { - ssize_t minTrackIndex = -1; - int64_t minTimeUs = -1ll; - - for (size_t i = 0; i < mTracks.size(); ++i) { - const sp &track = mTracks.valueAt(i); - - int64_t timeUs; - if (track->hasOutputBuffer(&timeUs)) { - if (minTrackIndex < 0 || timeUs < minTimeUs) { - minTrackIndex = mTracks.keyAt(i); - minTimeUs = timeUs; - } - } -#if SUSPEND_VIDEO_IF_IDLE - else if (!track->isSuspended()) { - // We still consider this track "live", so it should keep - // delivering output data whose time stamps we'll have to - // consider for proper interleaving. - return false; - } -#else - else { - // We need access units available on all tracks to be able to - // dequeue the earliest one. - return false; - } -#endif - } - - if (minTrackIndex < 0) { - return false; - } - - const sp &track = mTracks.valueFor(minTrackIndex); - sp accessUnit = track->dequeueOutputBuffer(); - - sp packets; - status_t err = packetizeAccessUnit(minTrackIndex, accessUnit, &packets); - - if (err != OK) { - notifySessionDead(); - return false; - } - - if ((ssize_t)minTrackIndex == mVideoTrackIndex) { - packets->meta()->setInt32("isVideo", 1); - } - mSender->queuePackets(minTimeUs, packets); - -#if 0 - if (minTrackIndex == mVideoTrackIndex) { - int64_t nowUs = ALooper::GetNowUs(); - - // Latency from "data acquired" to "ready to send if we wanted to". - ALOGI("[%s] latencyUs = %lld ms", - minTrackIndex == mVideoTrackIndex ? "video" : "audio", - (nowUs - minTimeUs) / 1000ll); - } -#endif - - return true; -} - } // namespace android diff --git a/media/libstagefright/wifi-display/source/PlaybackSession.h b/media/libstagefright/wifi-display/source/PlaybackSession.h index 7365c78..cd6da85 100644 --- a/media/libstagefright/wifi-display/source/PlaybackSession.h +++ b/media/libstagefright/wifi-display/source/PlaybackSession.h @@ -18,7 +18,7 @@ #define PLAYBACK_SESSION_H_ -#include "Sender.h" +#include "MediaSender.h" #include "VideoFormats.h" #include "WifiDisplaySource.h" @@ -30,7 +30,7 @@ struct IHDCP; struct IGraphicBufferProducer; struct MediaPuller; struct MediaSource; -struct TSPacketizer; +struct MediaSender; // Encapsulates the state of an RTP/RTCP session in the context of wifi // display. @@ -43,7 +43,7 @@ struct WifiDisplaySource::PlaybackSession : public AHandler { status_t init( const char *clientIP, int32_t clientRtp, int32_t clientRtcp, - Sender::TransportMode transportMode, + RTPSender::TransportMode transportMode, bool enableAudio, bool usePCMAudio, bool enableVideo, @@ -83,26 +83,25 @@ private: kWhatMediaPullerNotify, kWhatConverterNotify, kWhatTrackNotify, - kWhatSenderNotify, kWhatUpdateSurface, - kWhatFinishPlay, - kWhatPacketize, kWhatPause, kWhatResume, + kWhatMediaSenderNotify, }; sp mNetSession; - sp mSender; - sp mSenderLooper; sp mNotify; in_addr mInterfaceAddr; sp mHDCP; + + sp mMediaSender; + int32_t mLocalRTPPort; + bool mWeAreDead; bool mPaused; int64_t mLastLifesignUs; - sp mPacketizer; sp mBufferQueue; KeyedVector > mTracks; @@ -110,8 +109,6 @@ private: int64_t mPrevTimeUs; - bool mAllTracksHavePacketizerIndex; - status_t setupPacketizer( bool enableAudio, bool usePCMAudio, @@ -132,27 +129,10 @@ private: status_t addAudioSource(bool usePCMAudio); - ssize_t appendTSData( - const void *data, size_t size, bool timeDiscontinuity, bool flush); - - status_t onFinishPlay(); - status_t onFinishPlay2(); - - bool allTracksHavePacketizerIndex(); - - status_t packetizeAccessUnit( - size_t trackIndex, sp accessUnit, - sp *packets); - - status_t packetizeQueuedAccessUnits(); + status_t onMediaSenderInitialized(); void notifySessionDead(); - void drainAccessUnits(); - - // Returns true iff an access unit was successfully drained. - bool drainAccessUnit(); - DISALLOW_EVIL_CONSTRUCTORS(PlaybackSession); }; diff --git a/media/libstagefright/wifi-display/source/RepeaterSource.h b/media/libstagefright/wifi-display/source/RepeaterSource.h index a13973c..146af32 100644 --- a/media/libstagefright/wifi-display/source/RepeaterSource.h +++ b/media/libstagefright/wifi-display/source/RepeaterSource.h @@ -6,7 +6,7 @@ #include #include -#define SUSPEND_VIDEO_IF_IDLE 1 +#define SUSPEND_VIDEO_IF_IDLE 0 namespace android { diff --git a/media/libstagefright/wifi-display/source/Sender.cpp b/media/libstagefright/wifi-display/source/Sender.cpp deleted file mode 100644 index 8b7d93f..0000000 --- a/media/libstagefright/wifi-display/source/Sender.cpp +++ /dev/null @@ -1,878 +0,0 @@ -/* - * Copyright 2012, The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -//#define LOG_NDEBUG 0 -#define LOG_TAG "Sender" -#include - -#include "Sender.h" - -#include "ANetworkSession.h" -#include "TimeSeries.h" - -#include -#include -#include -#include -#include -#include - -namespace android { - -static size_t kMaxRTPPacketSize = 1500; -static size_t kMaxNumTSPacketsPerRTPPacket = (kMaxRTPPacketSize - 12) / 188; - -Sender::Sender( - const sp &netSession, - const sp ¬ify) - : mNetSession(netSession), - mNotify(notify), - mTransportMode(TRANSPORT_UDP), - mRTPChannel(0), - mRTCPChannel(0), - mRTPPort(0), - mRTPSessionID(0), - mRTCPSessionID(0), -#if ENABLE_RETRANSMISSION && RETRANSMISSION_ACCORDING_TO_RFC_XXXX - mRTPRetransmissionSessionID(0), - mRTCPRetransmissionSessionID(0), -#endif - mClientRTPPort(0), - mClientRTCPPort(0), - mRTPConnected(false), - mRTCPConnected(false), - mFirstOutputBufferReadyTimeUs(-1ll), - mFirstOutputBufferSentTimeUs(-1ll), - mRTPSeqNo(0), -#if ENABLE_RETRANSMISSION && RETRANSMISSION_ACCORDING_TO_RFC_XXXX - mRTPRetransmissionSeqNo(0), -#endif - mLastNTPTime(0), - mLastRTPTime(0), - mNumRTPSent(0), - mNumRTPOctetsSent(0), - mNumSRsSent(0), - mSendSRPending(false) -#if ENABLE_RETRANSMISSION - ,mHistoryLength(0) -#endif -#if TRACK_BANDWIDTH - ,mFirstPacketTimeUs(-1ll) - ,mTotalBytesSent(0ll) -#endif -#if LOG_TRANSPORT_STREAM - ,mLogFile(NULL) -#endif -{ -#if LOG_TRANSPORT_STREAM - mLogFile = fopen("/system/etc/log.ts", "wb"); -#endif -} - -Sender::~Sender() { -#if ENABLE_RETRANSMISSION && RETRANSMISSION_ACCORDING_TO_RFC_XXXX - if (mRTCPRetransmissionSessionID != 0) { - mNetSession->destroySession(mRTCPRetransmissionSessionID); - } - - if (mRTPRetransmissionSessionID != 0) { - mNetSession->destroySession(mRTPRetransmissionSessionID); - } -#endif - - if (mRTCPSessionID != 0) { - mNetSession->destroySession(mRTCPSessionID); - } - - if (mRTPSessionID != 0) { - mNetSession->destroySession(mRTPSessionID); - } - -#if LOG_TRANSPORT_STREAM - if (mLogFile != NULL) { - fclose(mLogFile); - mLogFile = NULL; - } -#endif -} - -status_t Sender::init( - const char *clientIP, int32_t clientRtp, int32_t clientRtcp, - TransportMode transportMode) { - mClientIP = clientIP; - mTransportMode = transportMode; - - if (transportMode == TRANSPORT_TCP_INTERLEAVED) { - mRTPChannel = clientRtp; - mRTCPChannel = clientRtcp; - mRTPPort = 0; - mRTPSessionID = 0; - mRTCPSessionID = 0; - return OK; - } - - mRTPChannel = 0; - mRTCPChannel = 0; - - if (mTransportMode == TRANSPORT_TCP) { - // XXX This is wrong, we need to allocate sockets here, we only - // need to do this because the dongles are not establishing their - // end until after PLAY instead of before SETUP. - mRTPPort = 20000; - mRTPSessionID = 0; - mRTCPSessionID = 0; - mClientRTPPort = clientRtp; - mClientRTCPPort = clientRtcp; - return OK; - } - - int serverRtp; - - sp rtpNotify = new AMessage(kWhatRTPNotify, id()); - sp rtcpNotify = new AMessage(kWhatRTCPNotify, id()); - -#if ENABLE_RETRANSMISSION && RETRANSMISSION_ACCORDING_TO_RFC_XXXX - sp rtpRetransmissionNotify = - new AMessage(kWhatRTPRetransmissionNotify, id()); - - sp rtcpRetransmissionNotify = - new AMessage(kWhatRTCPRetransmissionNotify, id()); -#endif - - status_t err; - for (serverRtp = 15550;; serverRtp += 2) { - int32_t rtpSession; - if (mTransportMode == TRANSPORT_UDP) { - err = mNetSession->createUDPSession( - serverRtp, clientIP, clientRtp, - rtpNotify, &rtpSession); - } else { - err = mNetSession->createTCPDatagramSession( - serverRtp, clientIP, clientRtp, - rtpNotify, &rtpSession); - } - - if (err != OK) { - ALOGI("failed to create RTP socket on port %d", serverRtp); - continue; - } - - int32_t rtcpSession = 0; - - if (clientRtcp >= 0) { - if (mTransportMode == TRANSPORT_UDP) { - err = mNetSession->createUDPSession( - serverRtp + 1, clientIP, clientRtcp, - rtcpNotify, &rtcpSession); - } else { - err = mNetSession->createTCPDatagramSession( - serverRtp + 1, clientIP, clientRtcp, - rtcpNotify, &rtcpSession); - } - - if (err != OK) { - ALOGI("failed to create RTCP socket on port %d", serverRtp + 1); - - mNetSession->destroySession(rtpSession); - continue; - } - } - -#if ENABLE_RETRANSMISSION && RETRANSMISSION_ACCORDING_TO_RFC_XXXX - if (mTransportMode == TRANSPORT_UDP) { - int32_t rtpRetransmissionSession; - - err = mNetSession->createUDPSession( - serverRtp + kRetransmissionPortOffset, - clientIP, - clientRtp + kRetransmissionPortOffset, - rtpRetransmissionNotify, - &rtpRetransmissionSession); - - if (err != OK) { - mNetSession->destroySession(rtcpSession); - mNetSession->destroySession(rtpSession); - continue; - } - - CHECK_GE(clientRtcp, 0); - - int32_t rtcpRetransmissionSession; - err = mNetSession->createUDPSession( - serverRtp + 1 + kRetransmissionPortOffset, - clientIP, - clientRtp + 1 + kRetransmissionPortOffset, - rtcpRetransmissionNotify, - &rtcpRetransmissionSession); - - if (err != OK) { - mNetSession->destroySession(rtpRetransmissionSession); - mNetSession->destroySession(rtcpSession); - mNetSession->destroySession(rtpSession); - continue; - } - - mRTPRetransmissionSessionID = rtpRetransmissionSession; - mRTCPRetransmissionSessionID = rtcpRetransmissionSession; - - ALOGI("rtpRetransmissionSessionID = %d, " - "rtcpRetransmissionSessionID = %d", - rtpRetransmissionSession, rtcpRetransmissionSession); - } -#endif - - mRTPPort = serverRtp; - mRTPSessionID = rtpSession; - mRTCPSessionID = rtcpSession; - - ALOGI("rtpSessionID = %d, rtcpSessionID = %d", rtpSession, rtcpSession); - break; - } - - if (mRTPPort == 0) { - return UNKNOWN_ERROR; - } - - return OK; -} - -status_t Sender::finishInit() { - if (mTransportMode != TRANSPORT_TCP) { - notifyInitDone(); - return OK; - } - - sp rtpNotify = new AMessage(kWhatRTPNotify, id()); - - status_t err = mNetSession->createTCPDatagramSession( - mRTPPort, mClientIP.c_str(), mClientRTPPort, - rtpNotify, &mRTPSessionID); - - if (err != OK) { - return err; - } - - if (mClientRTCPPort >= 0) { - sp rtcpNotify = new AMessage(kWhatRTCPNotify, id()); - - err = mNetSession->createTCPDatagramSession( - mRTPPort + 1, mClientIP.c_str(), mClientRTCPPort, - rtcpNotify, &mRTCPSessionID); - - if (err != OK) { - return err; - } - } - - return OK; -} - -int32_t Sender::getRTPPort() const { - return mRTPPort; -} - -void Sender::queuePackets( - int64_t timeUs, const sp &tsPackets) { - const size_t numTSPackets = tsPackets->size() / 188; - - const size_t numRTPPackets = - (numTSPackets + kMaxNumTSPacketsPerRTPPacket - 1) - / kMaxNumTSPacketsPerRTPPacket; - - sp udpPackets = new ABuffer( - numRTPPackets * (12 + kMaxNumTSPacketsPerRTPPacket * 188)); - - udpPackets->meta()->setInt64("timeUs", timeUs); - - size_t dstOffset = 0; - for (size_t i = 0; i < numTSPackets; ++i) { - if ((i % kMaxNumTSPacketsPerRTPPacket) == 0) { - static const bool kMarkerBit = false; - - uint8_t *rtp = udpPackets->data() + dstOffset; - rtp[0] = 0x80; - rtp[1] = 33 | (kMarkerBit ? (1 << 7) : 0); // M-bit - rtp[2] = (mRTPSeqNo >> 8) & 0xff; - rtp[3] = mRTPSeqNo & 0xff; - rtp[4] = 0x00; // rtp time to be filled in later. - rtp[5] = 0x00; - rtp[6] = 0x00; - rtp[7] = 0x00; - rtp[8] = kSourceID >> 24; - rtp[9] = (kSourceID >> 16) & 0xff; - rtp[10] = (kSourceID >> 8) & 0xff; - rtp[11] = kSourceID & 0xff; - - ++mRTPSeqNo; - - dstOffset += 12; - } - - memcpy(udpPackets->data() + dstOffset, - tsPackets->data() + 188 * i, - 188); - - dstOffset += 188; - } - - udpPackets->setRange(0, dstOffset); - - sp msg = new AMessage(kWhatDrainQueue, id()); - msg->setBuffer("udpPackets", udpPackets); - msg->post(); - -#if LOG_TRANSPORT_STREAM - if (mLogFile != NULL) { - fwrite(tsPackets->data(), 1, tsPackets->size(), mLogFile); - } -#endif -} - -void Sender::onMessageReceived(const sp &msg) { - switch (msg->what()) { - case kWhatRTPNotify: - case kWhatRTCPNotify: -#if ENABLE_RETRANSMISSION && RETRANSMISSION_ACCORDING_TO_RFC_XXXX - case kWhatRTPRetransmissionNotify: - case kWhatRTCPRetransmissionNotify: -#endif - { - int32_t reason; - CHECK(msg->findInt32("reason", &reason)); - - switch (reason) { - case ANetworkSession::kWhatError: - { - int32_t sessionID; - CHECK(msg->findInt32("sessionID", &sessionID)); - - int32_t err; - CHECK(msg->findInt32("err", &err)); - - int32_t errorOccuredDuringSend; - CHECK(msg->findInt32("send", &errorOccuredDuringSend)); - - AString detail; - CHECK(msg->findString("detail", &detail)); - - if ((msg->what() == kWhatRTPNotify -#if ENABLE_RETRANSMISSION && RETRANSMISSION_ACCORDING_TO_RFC_XXXX - || msg->what() == kWhatRTPRetransmissionNotify -#endif - ) && !errorOccuredDuringSend) { - // This is ok, we don't expect to receive anything on - // the RTP socket. - break; - } - - ALOGE("An error occurred during %s in session %d " - "(%d, '%s' (%s)).", - errorOccuredDuringSend ? "send" : "receive", - sessionID, - err, - detail.c_str(), - strerror(-err)); - - mNetSession->destroySession(sessionID); - - if (sessionID == mRTPSessionID) { - mRTPSessionID = 0; - } else if (sessionID == mRTCPSessionID) { - mRTCPSessionID = 0; - } -#if ENABLE_RETRANSMISSION && RETRANSMISSION_ACCORDING_TO_RFC_XXXX - else if (sessionID == mRTPRetransmissionSessionID) { - mRTPRetransmissionSessionID = 0; - } else if (sessionID == mRTCPRetransmissionSessionID) { - mRTCPRetransmissionSessionID = 0; - } -#endif - - notifySessionDead(); - break; - } - - case ANetworkSession::kWhatDatagram: - { - int32_t sessionID; - CHECK(msg->findInt32("sessionID", &sessionID)); - - sp data; - CHECK(msg->findBuffer("data", &data)); - - status_t err; - if (msg->what() == kWhatRTCPNotify -#if ENABLE_RETRANSMISSION && RETRANSMISSION_ACCORDING_TO_RFC_XXXX - || msg->what() == kWhatRTCPRetransmissionNotify -#endif - ) - { - err = parseRTCP(data); - } - break; - } - - case ANetworkSession::kWhatConnected: - { - CHECK_EQ(mTransportMode, TRANSPORT_TCP); - - int32_t sessionID; - CHECK(msg->findInt32("sessionID", &sessionID)); - - if (sessionID == mRTPSessionID) { - CHECK(!mRTPConnected); - mRTPConnected = true; - ALOGI("RTP Session now connected."); - } else if (sessionID == mRTCPSessionID) { - CHECK(!mRTCPConnected); - mRTCPConnected = true; - ALOGI("RTCP Session now connected."); - } else { - TRESPASS(); - } - - if (mRTPConnected - && (mClientRTCPPort < 0 || mRTCPConnected)) { - notifyInitDone(); - } - break; - } - - default: - TRESPASS(); - } - break; - } - - case kWhatDrainQueue: - { - sp udpPackets; - CHECK(msg->findBuffer("udpPackets", &udpPackets)); - - onDrainQueue(udpPackets); - break; - } - - case kWhatSendSR: - { - mSendSRPending = false; - - if (mRTCPSessionID == 0) { - break; - } - - onSendSR(); - - scheduleSendSR(); - break; - } - } -} - -void Sender::scheduleSendSR() { - if (mSendSRPending || mRTCPSessionID == 0) { - return; - } - - mSendSRPending = true; - (new AMessage(kWhatSendSR, id()))->post(kSendSRIntervalUs); -} - -void Sender::addSR(const sp &buffer) { - uint8_t *data = buffer->data() + buffer->size(); - - // TODO: Use macros/utility functions to clean up all the bitshifts below. - - data[0] = 0x80 | 0; - data[1] = 200; // SR - data[2] = 0; - data[3] = 6; - data[4] = kSourceID >> 24; - data[5] = (kSourceID >> 16) & 0xff; - data[6] = (kSourceID >> 8) & 0xff; - data[7] = kSourceID & 0xff; - - data[8] = mLastNTPTime >> (64 - 8); - data[9] = (mLastNTPTime >> (64 - 16)) & 0xff; - data[10] = (mLastNTPTime >> (64 - 24)) & 0xff; - data[11] = (mLastNTPTime >> 32) & 0xff; - data[12] = (mLastNTPTime >> 24) & 0xff; - data[13] = (mLastNTPTime >> 16) & 0xff; - data[14] = (mLastNTPTime >> 8) & 0xff; - data[15] = mLastNTPTime & 0xff; - - data[16] = (mLastRTPTime >> 24) & 0xff; - data[17] = (mLastRTPTime >> 16) & 0xff; - data[18] = (mLastRTPTime >> 8) & 0xff; - data[19] = mLastRTPTime & 0xff; - - data[20] = mNumRTPSent >> 24; - data[21] = (mNumRTPSent >> 16) & 0xff; - data[22] = (mNumRTPSent >> 8) & 0xff; - data[23] = mNumRTPSent & 0xff; - - data[24] = mNumRTPOctetsSent >> 24; - data[25] = (mNumRTPOctetsSent >> 16) & 0xff; - data[26] = (mNumRTPOctetsSent >> 8) & 0xff; - data[27] = mNumRTPOctetsSent & 0xff; - - buffer->setRange(buffer->offset(), buffer->size() + 28); -} - -void Sender::addSDES(const sp &buffer) { - uint8_t *data = buffer->data() + buffer->size(); - data[0] = 0x80 | 1; - data[1] = 202; // SDES - data[4] = kSourceID >> 24; - data[5] = (kSourceID >> 16) & 0xff; - data[6] = (kSourceID >> 8) & 0xff; - data[7] = kSourceID & 0xff; - - size_t offset = 8; - - data[offset++] = 1; // CNAME - - static const char *kCNAME = "someone@somewhere"; - data[offset++] = strlen(kCNAME); - - memcpy(&data[offset], kCNAME, strlen(kCNAME)); - offset += strlen(kCNAME); - - data[offset++] = 7; // NOTE - - static const char *kNOTE = "Hell's frozen over."; - data[offset++] = strlen(kNOTE); - - memcpy(&data[offset], kNOTE, strlen(kNOTE)); - offset += strlen(kNOTE); - - data[offset++] = 0; - - if ((offset % 4) > 0) { - size_t count = 4 - (offset % 4); - switch (count) { - case 3: - data[offset++] = 0; - case 2: - data[offset++] = 0; - case 1: - data[offset++] = 0; - } - } - - size_t numWords = (offset / 4) - 1; - data[2] = numWords >> 8; - data[3] = numWords & 0xff; - - buffer->setRange(buffer->offset(), buffer->size() + offset); -} - -// static -uint64_t Sender::GetNowNTP() { - uint64_t nowUs = ALooper::GetNowUs(); - - nowUs += ((70ll * 365 + 17) * 24) * 60 * 60 * 1000000ll; - - uint64_t hi = nowUs / 1000000ll; - uint64_t lo = ((1ll << 32) * (nowUs % 1000000ll)) / 1000000ll; - - return (hi << 32) | lo; -} - -void Sender::onSendSR() { - sp buffer = new ABuffer(1500); - buffer->setRange(0, 0); - - addSR(buffer); - addSDES(buffer); - - if (mTransportMode == TRANSPORT_TCP_INTERLEAVED) { - sp notify = mNotify->dup(); - notify->setInt32("what", kWhatBinaryData); - notify->setInt32("channel", mRTCPChannel); - notify->setBuffer("data", buffer); - notify->post(); - } else { - sendPacket(mRTCPSessionID, buffer->data(), buffer->size()); - } - - ++mNumSRsSent; -} - -#if ENABLE_RETRANSMISSION -status_t Sender::parseTSFB( - const uint8_t *data, size_t size) { - if ((data[0] & 0x1f) != 1) { - return ERROR_UNSUPPORTED; // We only support NACK for now. - } - - uint32_t srcId = U32_AT(&data[8]); - if (srcId != kSourceID) { - return ERROR_MALFORMED; - } - - for (size_t i = 12; i < size; i += 4) { - uint16_t seqNo = U16_AT(&data[i]); - uint16_t blp = U16_AT(&data[i + 2]); - - List >::iterator it = mHistory.begin(); - bool foundSeqNo = false; - while (it != mHistory.end()) { - const sp &buffer = *it; - - uint16_t bufferSeqNo = buffer->int32Data() & 0xffff; - - bool retransmit = false; - if (bufferSeqNo == seqNo) { - retransmit = true; - } else if (blp != 0) { - for (size_t i = 0; i < 16; ++i) { - if ((blp & (1 << i)) - && (bufferSeqNo == ((seqNo + i + 1) & 0xffff))) { - blp &= ~(1 << i); - retransmit = true; - } - } - } - - if (retransmit) { - ALOGI("retransmitting seqNo %d", bufferSeqNo); - -#if RETRANSMISSION_ACCORDING_TO_RFC_XXXX - sp retransRTP = new ABuffer(2 + buffer->size()); - uint8_t *rtp = retransRTP->data(); - memcpy(rtp, buffer->data(), 12); - rtp[2] = (mRTPRetransmissionSeqNo >> 8) & 0xff; - rtp[3] = mRTPRetransmissionSeqNo & 0xff; - rtp[12] = (bufferSeqNo >> 8) & 0xff; - rtp[13] = bufferSeqNo & 0xff; - memcpy(&rtp[14], buffer->data() + 12, buffer->size() - 12); - - ++mRTPRetransmissionSeqNo; - - sendPacket( - mRTPRetransmissionSessionID, - retransRTP->data(), retransRTP->size()); -#else - sendPacket( - mRTPSessionID, buffer->data(), buffer->size()); -#endif - - if (bufferSeqNo == seqNo) { - foundSeqNo = true; - } - - if (foundSeqNo && blp == 0) { - break; - } - } - - ++it; - } - - if (!foundSeqNo || blp != 0) { - ALOGI("Some sequence numbers were no longer available for " - "retransmission (seqNo = %d, foundSeqNo = %d, blp = 0x%04x)", - seqNo, foundSeqNo, blp); - - if (!mHistory.empty()) { - int32_t earliest = (*mHistory.begin())->int32Data() & 0xffff; - int32_t latest = (*--mHistory.end())->int32Data() & 0xffff; - - ALOGI("have seq numbers from %d - %d", earliest, latest); - } - } - } - - return OK; -} -#endif - -status_t Sender::parseRTCP( - const sp &buffer) { - const uint8_t *data = buffer->data(); - size_t size = buffer->size(); - - while (size > 0) { - if (size < 8) { - // Too short to be a valid RTCP header - return ERROR_MALFORMED; - } - - if ((data[0] >> 6) != 2) { - // Unsupported version. - return ERROR_UNSUPPORTED; - } - - if (data[0] & 0x20) { - // Padding present. - - size_t paddingLength = data[size - 1]; - - if (paddingLength + 12 > size) { - // If we removed this much padding we'd end up with something - // that's too short to be a valid RTP header. - return ERROR_MALFORMED; - } - - size -= paddingLength; - } - - size_t headerLength = 4 * (data[2] << 8 | data[3]) + 4; - - if (size < headerLength) { - // Only received a partial packet? - return ERROR_MALFORMED; - } - - switch (data[1]) { - case 200: - case 201: // RR - case 202: // SDES - case 203: - case 204: // APP - break; - -#if ENABLE_RETRANSMISSION - case 205: // TSFB (transport layer specific feedback) - parseTSFB(data, headerLength); - break; -#endif - - case 206: // PSFB (payload specific feedback) - hexdump(data, headerLength); - break; - - default: - { - ALOGW("Unknown RTCP packet type %u of size %d", - (unsigned)data[1], headerLength); - break; - } - } - - data += headerLength; - size -= headerLength; - } - - return OK; -} - -status_t Sender::sendPacket( - int32_t sessionID, const void *data, size_t size) { - return mNetSession->sendRequest(sessionID, data, size); -} - -void Sender::notifyInitDone() { - sp notify = mNotify->dup(); - notify->setInt32("what", kWhatInitDone); - notify->post(); -} - -void Sender::notifySessionDead() { - sp notify = mNotify->dup(); - notify->setInt32("what", kWhatSessionDead); - notify->post(); -} - -void Sender::onDrainQueue(const sp &udpPackets) { - static const size_t kFullRTPPacketSize = - 12 + 188 * kMaxNumTSPacketsPerRTPPacket; - - size_t srcOffset = 0; - while (srcOffset < udpPackets->size()) { - uint8_t *rtp = udpPackets->data() + srcOffset; - - size_t rtpPacketSize = udpPackets->size() - srcOffset; - if (rtpPacketSize > kFullRTPPacketSize) { - rtpPacketSize = kFullRTPPacketSize; - } - - int64_t nowUs = ALooper::GetNowUs(); - mLastNTPTime = GetNowNTP(); - - // 90kHz time scale - uint32_t rtpTime = (nowUs * 9ll) / 100ll; - - rtp[4] = rtpTime >> 24; - rtp[5] = (rtpTime >> 16) & 0xff; - rtp[6] = (rtpTime >> 8) & 0xff; - rtp[7] = rtpTime & 0xff; - - ++mNumRTPSent; - mNumRTPOctetsSent += rtpPacketSize - 12; - - mLastRTPTime = rtpTime; - - if (mTransportMode == TRANSPORT_TCP_INTERLEAVED) { - sp notify = mNotify->dup(); - notify->setInt32("what", kWhatBinaryData); - - sp data = new ABuffer(rtpPacketSize); - memcpy(data->data(), rtp, rtpPacketSize); - - notify->setInt32("channel", mRTPChannel); - notify->setBuffer("data", data); - notify->post(); - } else { - sendPacket(mRTPSessionID, rtp, rtpPacketSize); - -#if TRACK_BANDWIDTH - mTotalBytesSent += rtpPacketSize->size(); - int64_t delayUs = ALooper::GetNowUs() - mFirstPacketTimeUs; - - if (delayUs > 0ll) { - ALOGI("approx. net bandwidth used: %.2f Mbit/sec", - mTotalBytesSent * 8.0 / delayUs); - } -#endif - } - -#if ENABLE_RETRANSMISSION - addToHistory(rtp, rtpPacketSize); -#endif - - srcOffset += rtpPacketSize; - } - -#if 0 - int64_t timeUs; - CHECK(udpPackets->meta()->findInt64("timeUs", &timeUs)); - - ALOGI("dTimeUs = %lld us", ALooper::GetNowUs() - timeUs); -#endif -} - -#if ENABLE_RETRANSMISSION -void Sender::addToHistory(const uint8_t *rtp, size_t rtpPacketSize) { - sp packet = new ABuffer(rtpPacketSize); - memcpy(packet->data(), rtp, rtpPacketSize); - - unsigned rtpSeqNo = U16_AT(&rtp[2]); - packet->setInt32Data(rtpSeqNo); - - mHistory.push_back(packet); - ++mHistoryLength; - - if (mHistoryLength > kMaxHistoryLength) { - mHistory.erase(mHistory.begin()); - --mHistoryLength; - } -} -#endif - -} // namespace android - diff --git a/media/libstagefright/wifi-display/source/Sender.h b/media/libstagefright/wifi-display/source/Sender.h deleted file mode 100644 index 66951f7..0000000 --- a/media/libstagefright/wifi-display/source/Sender.h +++ /dev/null @@ -1,169 +0,0 @@ -/* - * Copyright 2012, The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef SENDER_H_ - -#define SENDER_H_ - -#include - -namespace android { - -#define LOG_TRANSPORT_STREAM 0 -#define TRACK_BANDWIDTH 0 - -#define ENABLE_RETRANSMISSION 1 - -// If retransmission is enabled the following define determines what -// kind we support, if RETRANSMISSION_ACCORDING_TO_RFC_XXXX is 0 -// we'll send NACKs on the original RTCP channel and retransmit packets -// on the original RTP channel, otherwise a separate channel pair is used -// for this purpose. -#define RETRANSMISSION_ACCORDING_TO_RFC_XXXX 0 - -struct ABuffer; -struct ANetworkSession; - -struct Sender : public AHandler { - Sender(const sp &netSession, const sp ¬ify); - - enum { - kWhatInitDone, - kWhatSessionDead, - kWhatBinaryData, - }; - - enum TransportMode { - TRANSPORT_UDP, - TRANSPORT_TCP_INTERLEAVED, - TRANSPORT_TCP, - }; - status_t init( - const char *clientIP, int32_t clientRtp, int32_t clientRtcp, - TransportMode transportMode); - - status_t finishInit(); - - int32_t getRTPPort() const; - - void queuePackets(int64_t timeUs, const sp &tsPackets); - void scheduleSendSR(); - -protected: - virtual ~Sender(); - virtual void onMessageReceived(const sp &msg); - -private: - enum { - kWhatDrainQueue, - kWhatSendSR, - kWhatRTPNotify, - kWhatRTCPNotify, -#if ENABLE_RETRANSMISSION && RETRANSMISSION_ACCORDING_TO_RFC_XXXX - kWhatRTPRetransmissionNotify, - kWhatRTCPRetransmissionNotify, -#endif - }; - - static const int64_t kSendSRIntervalUs = 10000000ll; - - static const uint32_t kSourceID = 0xdeadbeef; - static const size_t kMaxHistoryLength = 128; - -#if ENABLE_RETRANSMISSION && RETRANSMISSION_ACCORDING_TO_RFC_XXXX - static const size_t kRetransmissionPortOffset = 120; -#endif - - sp mNetSession; - sp mNotify; - - TransportMode mTransportMode; - AString mClientIP; - - // in TCP mode - int32_t mRTPChannel; - int32_t mRTCPChannel; - - // in UDP mode - int32_t mRTPPort; - int32_t mRTPSessionID; - int32_t mRTCPSessionID; - -#if ENABLE_RETRANSMISSION && RETRANSMISSION_ACCORDING_TO_RFC_XXXX - int32_t mRTPRetransmissionSessionID; - int32_t mRTCPRetransmissionSessionID; -#endif - - int32_t mClientRTPPort; - int32_t mClientRTCPPort; - bool mRTPConnected; - bool mRTCPConnected; - - int64_t mFirstOutputBufferReadyTimeUs; - int64_t mFirstOutputBufferSentTimeUs; - - uint32_t mRTPSeqNo; -#if ENABLE_RETRANSMISSION && RETRANSMISSION_ACCORDING_TO_RFC_XXXX - uint32_t mRTPRetransmissionSeqNo; -#endif - - uint64_t mLastNTPTime; - uint32_t mLastRTPTime; - uint32_t mNumRTPSent; - uint32_t mNumRTPOctetsSent; - uint32_t mNumSRsSent; - - bool mSendSRPending; - -#if ENABLE_RETRANSMISSION - List > mHistory; - size_t mHistoryLength; -#endif - -#if TRACK_BANDWIDTH - int64_t mFirstPacketTimeUs; - uint64_t mTotalBytesSent; -#endif - -#if LOG_TRANSPORT_STREAM - FILE *mLogFile; -#endif - - void onSendSR(); - void addSR(const sp &buffer); - void addSDES(const sp &buffer); - static uint64_t GetNowNTP(); - -#if ENABLE_RETRANSMISSION - status_t parseTSFB(const uint8_t *data, size_t size); - void addToHistory(const uint8_t *rtp, size_t rtpPacketSize); -#endif - - status_t parseRTCP(const sp &buffer); - - status_t sendPacket(int32_t sessionID, const void *data, size_t size); - - void notifyInitDone(); - void notifySessionDead(); - - void onDrainQueue(const sp &udpPackets); - - DISALLOW_EVIL_CONSTRUCTORS(Sender); -}; - -} // namespace android - -#endif // SENDER_H_ diff --git a/media/libstagefright/wifi-display/source/TSPacketizer.cpp b/media/libstagefright/wifi-display/source/TSPacketizer.cpp index ef57a4d..8420529 100644 --- a/media/libstagefright/wifi-display/source/TSPacketizer.cpp +++ b/media/libstagefright/wifi-display/source/TSPacketizer.cpp @@ -58,6 +58,7 @@ struct TSPacketizer::Track : public RefBase { sp descriptorAt(size_t index) const; void finalize(); + void extractCSDIfNecessary(); protected: virtual ~Track(); @@ -77,6 +78,7 @@ private: bool mAudioLacksATDSHeaders; bool mFinalized; + bool mExtractedCSD; DISALLOW_EVIL_CONSTRUCTORS(Track); }; @@ -90,14 +92,21 @@ TSPacketizer::Track::Track( mStreamID(streamID), mContinuityCounter(0), mAudioLacksATDSHeaders(false), - mFinalized(false) { + mFinalized(false), + mExtractedCSD(false) { CHECK(format->findString("mime", &mMIME)); +} + +void TSPacketizer::Track::extractCSDIfNecessary() { + if (mExtractedCSD) { + return; + } if (!strcasecmp(mMIME.c_str(), MEDIA_MIMETYPE_VIDEO_AVC) || !strcasecmp(mMIME.c_str(), MEDIA_MIMETYPE_AUDIO_AAC)) { for (size_t i = 0;; ++i) { sp csd; - if (!format->findBuffer(StringPrintf("csd-%d", i).c_str(), &csd)) { + if (!mFormat->findBuffer(StringPrintf("csd-%d", i).c_str(), &csd)) { break; } @@ -111,6 +120,8 @@ TSPacketizer::Track::Track( } } } + + mExtractedCSD = true; } TSPacketizer::Track::~Track() { @@ -407,6 +418,17 @@ ssize_t TSPacketizer::addTrack(const sp &format) { return mTracks.add(track); } +status_t TSPacketizer::extractCSDIfNecessary(size_t trackIndex) { + if (trackIndex >= mTracks.size()) { + return -ERANGE; + } + + const sp &track = mTracks.itemAt(trackIndex); + track->extractCSDIfNecessary(); + + return OK; +} + status_t TSPacketizer::packetize( size_t trackIndex, const sp &_accessUnit, diff --git a/media/libstagefright/wifi-display/source/TSPacketizer.h b/media/libstagefright/wifi-display/source/TSPacketizer.h index a37917d..5d1d70e 100644 --- a/media/libstagefright/wifi-display/source/TSPacketizer.h +++ b/media/libstagefright/wifi-display/source/TSPacketizer.h @@ -50,6 +50,8 @@ struct TSPacketizer : public RefBase { const uint8_t *PES_private_data, size_t PES_private_data_len, size_t numStuffingBytes = 0); + status_t extractCSDIfNecessary(size_t trackIndex); + // XXX to be removed once encoder config option takes care of this for // encrypted mode. sp prependCSD( diff --git a/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp b/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp index 825ebc6..07eb237 100644 --- a/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp +++ b/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp @@ -22,7 +22,7 @@ #include "PlaybackSession.h" #include "Parameters.h" #include "ParsedMessage.h" -#include "Sender.h" +#include "rtp/RTPSender.h" #include #include @@ -1140,7 +1140,7 @@ status_t WifiDisplaySource::onSetupRequest( return ERROR_MALFORMED; } - Sender::TransportMode transportMode = Sender::TRANSPORT_UDP; + RTPSender::TransportMode transportMode = RTPSender::TRANSPORT_UDP; int clientRtp, clientRtcp; if (transport.startsWith("RTP/AVP/TCP;")) { @@ -1149,7 +1149,7 @@ status_t WifiDisplaySource::onSetupRequest( transport.c_str(), "interleaved", &interleaved) && sscanf(interleaved.c_str(), "%d-%d", &clientRtp, &clientRtcp) == 2) { - transportMode = Sender::TRANSPORT_TCP_INTERLEAVED; + transportMode = RTPSender::TRANSPORT_TCP_INTERLEAVED; } else { bool badRequest = false; @@ -1171,7 +1171,7 @@ status_t WifiDisplaySource::onSetupRequest( return ERROR_MALFORMED; } - transportMode = Sender::TRANSPORT_TCP; + transportMode = RTPSender::TRANSPORT_TCP; } } else if (transport.startsWith("RTP/AVP;unicast;") || transport.startsWith("RTP/AVP/UDP;unicast;")) { @@ -1263,7 +1263,7 @@ status_t WifiDisplaySource::onSetupRequest( AString response = "RTSP/1.0 200 OK\r\n"; AppendCommonResponse(&response, cseq, playbackSessionID); - if (transportMode == Sender::TRANSPORT_TCP_INTERLEAVED) { + if (transportMode == RTPSender::TRANSPORT_TCP_INTERLEAVED) { response.append( StringPrintf( "Transport: RTP/AVP/TCP;interleaved=%d-%d;", @@ -1272,7 +1272,7 @@ status_t WifiDisplaySource::onSetupRequest( int32_t serverRtp = playbackSession->getRTPPort(); AString transportString = "UDP"; - if (transportMode == Sender::TRANSPORT_TCP) { + if (transportMode == RTPSender::TRANSPORT_TCP) { transportString = "TCP"; } -- cgit v1.1 From 6507d14c6d10f93d390de62b9eed267f9b544985 Mon Sep 17 00:00:00 2001 From: Andy McFadden Date: Tue, 5 Mar 2013 14:31:02 -0800 Subject: Correct MediaCodec + Surface behavior Assorted tweaks: - Allow signalEndOfInputStream() before ACodec is in Executing state (added message to two more states). - Return an error if signalEndOfInputStream() is called a second time on the same stream. - Require AndroidOpaque color format in createInputSurface(). - Disallow dequeueInputBuffer() after an input surface has been created (boolean flag in MediaCodec tracks it). - Discard input surface when encoder is re-configure()ed (drop OMXNodeInstance's ref when we go back to Loaded). Bug 7991062 Change-Id: Iff30f3036e14eb5a2f6536910dcf11aba33031ee --- include/media/stagefright/ACodec.h | 3 ++ include/media/stagefright/MediaCodec.h | 2 ++ media/libstagefright/ACodec.cpp | 39 +++++++++++++++--------- media/libstagefright/MediaCodec.cpp | 20 ++++++++---- media/libstagefright/omx/GraphicBufferSource.cpp | 25 +++++++++------ media/libstagefright/omx/GraphicBufferSource.h | 9 +++--- media/libstagefright/omx/OMXNodeInstance.cpp | 17 ++++++++--- 7 files changed, 74 insertions(+), 41 deletions(-) diff --git a/include/media/stagefright/ACodec.h b/include/media/stagefright/ACodec.h index 097ec5f..96baf34 100644 --- a/include/media/stagefright/ACodec.h +++ b/include/media/stagefright/ACodec.h @@ -281,6 +281,9 @@ private: status_t requestIDRFrame(); status_t setParameters(const sp ¶ms); + // Send EOS on input stream. + void onSignalEndOfInputStream(); + DISALLOW_EVIL_CONSTRUCTORS(ACodec); }; diff --git a/include/media/stagefright/MediaCodec.h b/include/media/stagefright/MediaCodec.h index ef695a7..35f46dc 100644 --- a/include/media/stagefright/MediaCodec.h +++ b/include/media/stagefright/MediaCodec.h @@ -212,6 +212,8 @@ private: sp mActivityNotify; + bool mHaveInputSurface; + MediaCodec(const sp &looper); static status_t PostAndAwaitResponse( diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp index 59fc45e..1a2eeb1 100644 --- a/media/libstagefright/ACodec.cpp +++ b/media/libstagefright/ACodec.cpp @@ -241,9 +241,6 @@ struct ACodec::ExecutingState : public ACodec::BaseState { // to fill with data. void resume(); - // Send EOS on input stream. - void onSignalEndOfInputStream(); - // Returns true iff input and output buffers are in play. bool active() const { return mActive; } @@ -3413,6 +3410,12 @@ bool ACodec::LoadedToIdleState::onMessageReceived(const sp &msg) { return true; } + case kWhatSignalEndOfInputStream: + { + mCodec->onSignalEndOfInputStream(); + return true; + } + default: return BaseState::onMessageReceived(msg); } @@ -3458,6 +3461,12 @@ bool ACodec::IdleToExecutingState::onMessageReceived(const sp &msg) { return true; } + case kWhatSignalEndOfInputStream: + { + mCodec->onSignalEndOfInputStream(); + return true; + } + default: return BaseState::onMessageReceived(msg); } @@ -3538,17 +3547,6 @@ void ACodec::ExecutingState::resume() { mActive = true; } -void ACodec::ExecutingState::onSignalEndOfInputStream() { - sp notify = mCodec->mNotify->dup(); - notify->setInt32("what", ACodec::kWhatSignaledInputEOS); - - status_t err = mCodec->mOMX->signalEndOfInputStream(mCodec->mNode); - if (err != OK) { - notify->setInt32("err", err); - } - notify->post(); -} - void ACodec::ExecutingState::stateEntered() { ALOGV("[%s] Now Executing", mCodec->mComponentName.c_str()); @@ -3640,7 +3638,7 @@ bool ACodec::ExecutingState::onMessageReceived(const sp &msg) { case ACodec::kWhatSignalEndOfInputStream: { - onSignalEndOfInputStream(); + mCodec->onSignalEndOfInputStream(); handled = true; break; } @@ -3678,6 +3676,17 @@ status_t ACodec::setParameters(const sp ¶ms) { return OK; } +void ACodec::onSignalEndOfInputStream() { + sp notify = mNotify->dup(); + notify->setInt32("what", ACodec::kWhatSignaledInputEOS); + + status_t err = mOMX->signalEndOfInputStream(mNode); + if (err != OK) { + notify->setInt32("err", err); + } + notify->post(); +} + bool ACodec::ExecutingState::onOMXEvent( OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { switch (event) { diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp index 79ea04c..0d89c0f 100644 --- a/media/libstagefright/MediaCodec.cpp +++ b/media/libstagefright/MediaCodec.cpp @@ -69,7 +69,8 @@ MediaCodec::MediaCodec(const sp &looper) mDequeueInputTimeoutGeneration(0), mDequeueInputReplyID(0), mDequeueOutputTimeoutGeneration(0), - mDequeueOutputReplyID(0) { + mDequeueOutputReplyID(0), + mHaveInputSurface(false) { } MediaCodec::~MediaCodec() { @@ -160,8 +161,6 @@ status_t MediaCodec::createInputSurface( sp* bufferProducer) { sp msg = new AMessage(kWhatCreateInputSurface, id()); - // TODO(fadden): require MediaFormat colorFormat == AndroidOpaque - sp response; status_t err = PostAndAwaitResponse(msg, &response); if (err == NO_ERROR) { @@ -256,8 +255,6 @@ status_t MediaCodec::queueSecureInputBuffer( } status_t MediaCodec::dequeueInputBuffer(size_t *index, int64_t timeoutUs) { - // TODO(fadden): fail if an input Surface has been configured - sp msg = new AMessage(kWhatDequeueInputBuffer, id()); msg->setInt64("timeoutUs", timeoutUs); @@ -604,6 +601,9 @@ void MediaCodec::onMessageReceived(const sp &msg) { CHECK_EQ(mState, CONFIGURING); setState(CONFIGURED); + // reset input surface flag + mHaveInputSurface = false; + (new AMessage)->postReply(mReplyID); break; } @@ -618,6 +618,7 @@ void MediaCodec::onMessageReceived(const sp &msg) { msg->findObject("input-surface", &obj); CHECK(obj != NULL); response->setObject("input-surface", obj); + mHaveInputSurface = true; } else { response->setInt32("err", err); } @@ -1029,10 +1030,17 @@ void MediaCodec::onMessageReceived(const sp &msg) { case kWhatDequeueInputBuffer: { - // TODO(fadden): make this fail if we're using an input Surface uint32_t replyID; CHECK(msg->senderAwaitsResponse(&replyID)); + if (mHaveInputSurface) { + ALOGE("dequeueInputBuffer can't be used with input surface"); + sp response = new AMessage; + response->setInt32("err", INVALID_OPERATION); + response->postReply(replyID); + break; + } + if (handleDequeueInputBuffer(replyID, true /* new request */)) { break; } diff --git a/media/libstagefright/omx/GraphicBufferSource.cpp b/media/libstagefright/omx/GraphicBufferSource.cpp index f207954..211e1d1 100644 --- a/media/libstagefright/omx/GraphicBufferSource.cpp +++ b/media/libstagefright/omx/GraphicBufferSource.cpp @@ -15,6 +15,7 @@ */ #define LOG_TAG "GraphicBufferSource" +//#define LOG_NDEBUG 0 #include #include @@ -110,15 +111,12 @@ void GraphicBufferSource::omxExecuting() { } } -void GraphicBufferSource::omxIdling(){ +void GraphicBufferSource::omxLoaded(){ Mutex::Autolock autoLock(mMutex); - ALOGV("--> idling"); - if (!mExecuting) { - // Transition from "loading" to "idling". Nothing to do. - return; - } + ALOGV("--> loaded"); + CHECK(mExecuting); - ALOGV("Dropped down to idle, avail=%d eos=%d eosSent=%d", + ALOGV("Dropped down to loaded, avail=%d eos=%d eosSent=%d", mNumFramesAvailable, mEndOfStream, mEndOfStreamSent); // Codec is no longer executing. Discard all codec-related state. @@ -282,10 +280,15 @@ status_t GraphicBufferSource::fillCodecBuffer_l() { return OK; } -void GraphicBufferSource::signalEndOfInputStream() { +status_t GraphicBufferSource::signalEndOfInputStream() { Mutex::Autolock autoLock(mMutex); - ALOGV("signalEndOfInputStream: exec=%d avail=%d", - mExecuting, mNumFramesAvailable); + ALOGV("signalEndOfInputStream: exec=%d avail=%d eos=%d", + mExecuting, mNumFramesAvailable, mEndOfStream); + + if (mEndOfStream) { + ALOGE("EOS was already signaled"); + return INVALID_OPERATION; + } // Set the end-of-stream flag. If no frames are pending from the // BufferQueue, and a codec buffer is available, and we're executing, @@ -300,6 +303,8 @@ void GraphicBufferSource::signalEndOfInputStream() { if (mExecuting && mNumFramesAvailable == 0) { submitEndOfInputStream_l(); } + + return OK; } status_t GraphicBufferSource::submitBuffer_l(sp& graphicBuffer, diff --git a/media/libstagefright/omx/GraphicBufferSource.h b/media/libstagefright/omx/GraphicBufferSource.h index 6d49f96..6a34bc5 100644 --- a/media/libstagefright/omx/GraphicBufferSource.h +++ b/media/libstagefright/omx/GraphicBufferSource.h @@ -67,10 +67,9 @@ public: // sitting in the BufferQueue, this will send them to the codec. void omxExecuting(); - // This is called when OMX transitions to OMX_StateIdle. If we were - // previously executing, this means we're about to be shut down. (We - // also enter Idle on the way up.) - void omxIdling(); + // This is called when OMX transitions to OMX_StateLoaded, indicating that + // we are shutting down. + void omxLoaded(); // A "codec buffer", i.e. a buffer that can be used to pass data into // the encoder, has been allocated. (This call does not call back into @@ -84,7 +83,7 @@ public: // This is called after the last input frame has been submitted. We // need to submit an empty buffer with the EOS flag set. If we don't // have a codec buffer ready, we just set the mEndOfStream flag. - void signalEndOfInputStream(); + status_t signalEndOfInputStream(); protected: // BufferQueue::ConsumerListener interface, called when a new frame of diff --git a/media/libstagefright/omx/OMXNodeInstance.cpp b/media/libstagefright/omx/OMXNodeInstance.cpp index 6c2c33b..f3d8d14 100644 --- a/media/libstagefright/omx/OMXNodeInstance.cpp +++ b/media/libstagefright/omx/OMXNodeInstance.cpp @@ -584,6 +584,11 @@ status_t OMXNodeInstance::createInputSurface( mHandle, OMX_IndexParamPortDefinition, &def); CHECK(oerr == OMX_ErrorNone); + if (def.format.video.eColorFormat != OMX_COLOR_FormatAndroidOpaque) { + ALOGE("createInputSurface requires AndroidOpaque color format"); + return INVALID_OPERATION; + } + GraphicBufferSource* bufferSource = new GraphicBufferSource( this, def.format.video.nFrameWidth, def.format.video.nFrameHeight); if ((err = bufferSource->initCheck()) != OK) { @@ -602,11 +607,10 @@ status_t OMXNodeInstance::signalEndOfInputStream() { // flag set). Seems easier than doing the equivalent from here. sp bufferSource(getGraphicBufferSource()); if (bufferSource == NULL) { - ALOGW("signalEndOfInputStream should only be used with Surface input"); + ALOGW("signalEndOfInputStream can only be used with Surface input"); return INVALID_OPERATION; }; - bufferSource->signalEndOfInputStream(); - return OK; + return bufferSource->signalEndOfInputStream(); } status_t OMXNodeInstance::allocateBuffer( @@ -801,8 +805,11 @@ void OMXNodeInstance::onEvent( arg1 == OMX_CommandStateSet) { if (arg2 == OMX_StateExecuting) { bufferSource->omxExecuting(); - } else if (arg2 == OMX_StateIdle) { - bufferSource->omxIdling(); + } else if (arg2 == OMX_StateLoaded) { + // Must be shutting down -- won't have a GraphicBufferSource + // on the way up. + bufferSource->omxLoaded(); + setGraphicBufferSource(NULL); } } } -- cgit v1.1 From 910403e064d37bd7f135f7de0cd3c64c5fa417c8 Mon Sep 17 00:00:00 2001 From: Alex Ray Date: Wed, 6 Mar 2013 23:28:51 -0800 Subject: Camera: Log warning on out-of-range face detect scores Bug: 7368364 Change-Id: I7b1e995dd15e87c816b89ccf9968a222fd66d140 --- services/camera/libcameraservice/camera2/FrameProcessor.cpp | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/services/camera/libcameraservice/camera2/FrameProcessor.cpp b/services/camera/libcameraservice/camera2/FrameProcessor.cpp index 3129a0b..1f2659c 100644 --- a/services/camera/libcameraservice/camera2/FrameProcessor.cpp +++ b/services/camera/libcameraservice/camera2/FrameProcessor.cpp @@ -247,6 +247,10 @@ status_t FrameProcessor::processFaceDetect(const CameraMetadata &frame, metadata.number_of_faces--; continue; } + if (faceScores[i] > 100) { + ALOGW("%s: Face index %d with out of range score %d", + __FUNCTION__, i, faceScores[i]); + } camera_face_t face; -- cgit v1.1 From bfd79f2a8e795f304062e22756c72d995af7a0e6 Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Thu, 7 Mar 2013 10:33:20 -0800 Subject: The transport stream HDCP descriptor belongs in the program_info section instead of being included in the per-stream descriptors Change-Id: If5251c0c02456646e2fdbb5e62acf66c356cf13e --- media/libstagefright/wifi-display/MediaSender.cpp | 28 ++++++---- media/libstagefright/wifi-display/MediaSender.h | 2 + .../wifi-display/source/TSPacketizer.cpp | 61 +++++++++++++++------- .../wifi-display/source/TSPacketizer.h | 9 +++- 4 files changed, 70 insertions(+), 30 deletions(-) diff --git a/media/libstagefright/wifi-display/MediaSender.cpp b/media/libstagefright/wifi-display/MediaSender.cpp index 900aa82..105c642 100644 --- a/media/libstagefright/wifi-display/MediaSender.cpp +++ b/media/libstagefright/wifi-display/MediaSender.cpp @@ -41,10 +41,16 @@ MediaSender::MediaSender( mMode(MODE_UNDEFINED), mGeneration(0), mPrevTimeUs(-1ll), - mInitDoneCount(0) { + mInitDoneCount(0), + mLogFile(NULL) { + // mLogFile = fopen("/data/misc/log.ts", "wb"); } MediaSender::~MediaSender() { + if (mLogFile != NULL) { + fclose(mLogFile); + mLogFile = NULL; + } } status_t MediaSender::setHDCP(const sp &hdcp) { @@ -89,21 +95,19 @@ status_t MediaSender::initAsync( return INVALID_OPERATION; } - mTSPacketizer = new TSPacketizer; + uint32_t flags = 0; + if (mHDCP != NULL) { + // XXX Determine proper HDCP version. + flags |= TSPacketizer::EMIT_HDCP20_DESCRIPTOR; + } + mTSPacketizer = new TSPacketizer(flags); status_t err = OK; for (size_t i = 0; i < mTrackInfos.size(); ++i) { TrackInfo *info = &mTrackInfos.editItemAt(i); - sp trackFormat = info->mFormat; - if (mHDCP != NULL && !info->mIsAudio) { - // HDCP2.0 _and_ HDCP 2.1 specs say to set the version - // inside the HDCP descriptor to 0x20!!! - trackFormat->setInt32("hdcp-version", 0x20); - } - ssize_t packetizerTrackIndex = - mTSPacketizer->addTrack(trackFormat); + mTSPacketizer->addTrack(info->mFormat); if (packetizerTrackIndex < 0) { err = packetizerTrackIndex; @@ -244,6 +248,10 @@ status_t MediaSender::queueAccessUnit( minTrackIndex, accessUnit, &tsPackets); if (err == OK) { + if (mLogFile != NULL) { + fwrite(tsPackets->data(), 1, tsPackets->size(), mLogFile); + } + err = mTSSender->queueBuffer( tsPackets, 33 /* packetType */, diff --git a/media/libstagefright/wifi-display/MediaSender.h b/media/libstagefright/wifi-display/MediaSender.h index 834780a..9a50f9a 100644 --- a/media/libstagefright/wifi-display/MediaSender.h +++ b/media/libstagefright/wifi-display/MediaSender.h @@ -107,6 +107,8 @@ private: size_t mInitDoneCount; + FILE *mLogFile; + void onSenderNotify(const sp &msg); void notifyInitDone(status_t err); diff --git a/media/libstagefright/wifi-display/source/TSPacketizer.cpp b/media/libstagefright/wifi-display/source/TSPacketizer.cpp index 8420529..53b7187 100644 --- a/media/libstagefright/wifi-display/source/TSPacketizer.cpp +++ b/media/libstagefright/wifi-display/source/TSPacketizer.cpp @@ -325,12 +325,31 @@ void TSPacketizer::Track::finalize() { mDescriptors.push_back(descriptor); } - int32_t hdcpVersion; - if (mFormat->findInt32("hdcp-version", &hdcpVersion)) { - // HDCP descriptor + mFinalized = true; +} - CHECK(hdcpVersion == 0x20 || hdcpVersion == 0x21); +//////////////////////////////////////////////////////////////////////////////// + +TSPacketizer::TSPacketizer(uint32_t flags) + : mFlags(flags), + mPATContinuityCounter(0), + mPMTContinuityCounter(0) { + initCrcTable(); + if (flags & (EMIT_HDCP20_DESCRIPTOR | EMIT_HDCP21_DESCRIPTOR)) { + int32_t hdcpVersion; + if (flags & EMIT_HDCP20_DESCRIPTOR) { + CHECK(!(flags & EMIT_HDCP21_DESCRIPTOR)); + hdcpVersion = 0x20; + } else { + CHECK(!(flags & EMIT_HDCP20_DESCRIPTOR)); + + // HDCP2.0 _and_ HDCP 2.1 specs say to set the version + // inside the HDCP descriptor to 0x20!!! + hdcpVersion = 0x20; + } + + // HDCP descriptor sp descriptor = new ABuffer(7); uint8_t *data = descriptor->data(); data[0] = 0x05; // descriptor_tag @@ -341,18 +360,8 @@ void TSPacketizer::Track::finalize() { data[5] = 'P'; data[6] = hdcpVersion; - mDescriptors.push_back(descriptor); + mProgramInfoDescriptors.push_back(descriptor); } - - mFinalized = true; -} - -//////////////////////////////////////////////////////////////////////////////// - -TSPacketizer::TSPacketizer() - : mPATContinuityCounter(0), - mPMTContinuityCounter(0) { - initCrcTable(); } TSPacketizer::~TSPacketizer() { @@ -605,8 +614,9 @@ status_t TSPacketizer::packetize( // reserved = b111 // PCR_PID = kPCR_PID (13 bits) // reserved = b1111 - // program_info_length = 0x000 - // one or more elementary stream descriptions follow: + // program_info_length = 0x??? + // program_info_descriptors follow + // one or more elementary stream descriptions follow: // stream_type = 0x?? // reserved = b111 // elementary_PID = b? ???? ???? ???? (13 bits) @@ -638,8 +648,21 @@ status_t TSPacketizer::packetize( *ptr++ = 0x00; *ptr++ = 0xe0 | (kPID_PCR >> 8); *ptr++ = kPID_PCR & 0xff; - *ptr++ = 0xf0; - *ptr++ = 0x00; + + size_t program_info_length = 0; + for (size_t i = 0; i < mProgramInfoDescriptors.size(); ++i) { + program_info_length += mProgramInfoDescriptors.itemAt(i)->size(); + } + + CHECK_LT(program_info_length, 0x400); + *ptr++ = 0xf0 | (program_info_length >> 8); + *ptr++ = (program_info_length & 0xff); + + for (size_t i = 0; i < mProgramInfoDescriptors.size(); ++i) { + const sp &desc = mProgramInfoDescriptors.itemAt(i); + memcpy(ptr, desc->data(), desc->size()); + ptr += desc->size(); + } for (size_t i = 0; i < mTracks.size(); ++i) { const sp &track = mTracks.itemAt(i); diff --git a/media/libstagefright/wifi-display/source/TSPacketizer.h b/media/libstagefright/wifi-display/source/TSPacketizer.h index 5d1d70e..4a664ee 100644 --- a/media/libstagefright/wifi-display/source/TSPacketizer.h +++ b/media/libstagefright/wifi-display/source/TSPacketizer.h @@ -32,7 +32,11 @@ struct AMessage; // Emits metadata tables (PAT and PMT) and timestamp stream (PCR) based // on flags. struct TSPacketizer : public RefBase { - TSPacketizer(); + enum { + EMIT_HDCP20_DESCRIPTOR = 1, + EMIT_HDCP21_DESCRIPTOR = 2, + }; + TSPacketizer(uint32_t flags); // Returns trackIndex or error. ssize_t addTrack(const sp &format); @@ -68,8 +72,11 @@ private: struct Track; + uint32_t mFlags; Vector > mTracks; + Vector > mProgramInfoDescriptors; + unsigned mPATContinuityCounter; unsigned mPMTContinuityCounter; -- cgit v1.1 From 5abf87f9af48149972eeb851ecaea679911da040 Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Thu, 7 Mar 2013 10:57:07 -0800 Subject: Disable our fancy logic to respect both sink and source's native formats since it isn't actually supported by anything in the field. Change-Id: I9cd038d7631105de26303312ca87c472d67034d4 --- media/libstagefright/wifi-display/VideoFormats.cpp | 9 +++++++-- media/libstagefright/wifi-display/VideoFormats.h | 2 +- media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp | 8 -------- .../libstagefright/wifi-display/source/WifiDisplaySource.cpp | 12 ++---------- 4 files changed, 10 insertions(+), 21 deletions(-) diff --git a/media/libstagefright/wifi-display/VideoFormats.cpp b/media/libstagefright/wifi-display/VideoFormats.cpp index 9ad8c3c..d171c6f 100644 --- a/media/libstagefright/wifi-display/VideoFormats.cpp +++ b/media/libstagefright/wifi-display/VideoFormats.cpp @@ -256,7 +256,7 @@ bool VideoFormats::parseFormatSpec(const char *spec) { return GetConfiguration(mNativeType, mNativeIndex, NULL, NULL, NULL, NULL); } -AString VideoFormats::getFormatSpec() const { +AString VideoFormats::getFormatSpec(bool forM4Message) const { CHECK_EQ(kNumResolutionTypes, 3); // wfd_video_formats: @@ -277,7 +277,7 @@ AString VideoFormats::getFormatSpec() const { return StringPrintf( "%02x 00 02 02 %08x %08x %08x 00 0000 0000 00 none none", - (mNativeIndex << 3) | mNativeType, + forM4Message ? 0x00 : ((mNativeIndex << 3) | mNativeType), mResolutionEnabled[0], mResolutionEnabled[1], mResolutionEnabled[2]); @@ -289,6 +289,10 @@ bool VideoFormats::PickBestFormat( const VideoFormats &sourceSupported, ResolutionType *chosenType, size_t *chosenIndex) { +#if 0 + // Support for the native format is a great idea, the spec includes + // these features, but nobody supports it and the tests don't validate it. + ResolutionType nativeType; size_t nativeIndex; sinkSupported.getNativeResolution(&nativeType, &nativeIndex); @@ -316,6 +320,7 @@ bool VideoFormats::PickBestFormat( ALOGW("Source advertised native resolution that it doesn't " "actually support... ignoring"); } +#endif bool first = true; uint32_t bestScore = 0; diff --git a/media/libstagefright/wifi-display/VideoFormats.h b/media/libstagefright/wifi-display/VideoFormats.h index a84407a..69e2197 100644 --- a/media/libstagefright/wifi-display/VideoFormats.h +++ b/media/libstagefright/wifi-display/VideoFormats.h @@ -60,7 +60,7 @@ struct VideoFormats { bool *interlaced); bool parseFormatSpec(const char *spec); - AString getFormatSpec() const; + AString getFormatSpec(bool forM4Message = false) const; static bool PickBestFormat( const VideoFormats &sinkSupported, diff --git a/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp b/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp index a6f58cd..158c2da 100644 --- a/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp +++ b/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp @@ -47,19 +47,11 @@ WifiDisplaySink::WifiDisplaySink( mSessionID(0), mNextCSeq(1), mIDRFrameRequestPending(false) { -#if 1 // We support any and all resolutions, but prefer 720p30 mSinkSupportedVideoFormats.setNativeResolution( VideoFormats::RESOLUTION_CEA, 5); // 1280 x 720 p30 mSinkSupportedVideoFormats.enableAll(); -#else - // We only support 640 x 360 p30. - mSinkSupportedVideoFormats.disableAll(); - - mSinkSupportedVideoFormats.setNativeResolution( - VideoFormats::RESOLUTION_HH, 6); // 640 x 360 p30 -#endif } WifiDisplaySink::~WifiDisplaySink() { diff --git a/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp b/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp index 07eb237..b8524f6 100644 --- a/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp +++ b/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp @@ -59,18 +59,10 @@ WifiDisplaySource::WifiDisplaySource( mHDCPPort(0), mHDCPInitializationComplete(false), mSetupTriggerDeferred(false) { - mSupportedSourceVideoFormats.enableAll(); + mSupportedSourceVideoFormats.disableAll(); mSupportedSourceVideoFormats.setNativeResolution( VideoFormats::RESOLUTION_CEA, 5); // 1280x720 p30 - - // Disable resolutions above 1080p since the encoder won't be able to - // handle them. - mSupportedSourceVideoFormats.setResolutionEnabled( - VideoFormats::RESOLUTION_VESA, 28, false); // 1920x1200 p30 - - mSupportedSourceVideoFormats.setResolutionEnabled( - VideoFormats::RESOLUTION_VESA, 29, false); // 1920x1200 p60 } WifiDisplaySource::~WifiDisplaySource() { @@ -607,7 +599,7 @@ status_t WifiDisplaySource::sendM4(int32_t sessionID) { chosenVideoFormat.setNativeResolution( mChosenVideoResolutionType, mChosenVideoResolutionIndex); - body.append(chosenVideoFormat.getFormatSpec()); + body.append(chosenVideoFormat.getFormatSpec(true /* forM4Message */)); body.append("\r\n"); } -- cgit v1.1 From 3db62dfc5102247d415df4667bd9609e669fc022 Mon Sep 17 00:00:00 2001 From: ztenghui Date: Fri, 22 Feb 2013 14:32:59 -0800 Subject: Clean up the native code to match Java update 1. Add flags to match the java side change. 2. Update the interface. bug:7991013 Change-Id: I8ffe84c466b2a68e2e1e48b35b78db9e44640265 --- cmds/stagefright/muxer.cpp | 3 ++- include/media/stagefright/MediaMuxer.h | 19 +++++++++++++++++-- media/libstagefright/MediaMuxer.cpp | 23 ++++++++++++++--------- 3 files changed, 33 insertions(+), 12 deletions(-) diff --git a/cmds/stagefright/muxer.cpp b/cmds/stagefright/muxer.cpp index 1b127c7..fac2acc 100644 --- a/cmds/stagefright/muxer.cpp +++ b/cmds/stagefright/muxer.cpp @@ -69,7 +69,8 @@ static int muxing( ALOGV("input file %s, output file %s", path, outputFileName); ALOGV("useAudio %d, useVideo %d", useAudio, useVideo); - sp muxer = new MediaMuxer(outputFileName); + sp muxer = new MediaMuxer(outputFileName, + MediaMuxer::OUTPUT_FORMAT_MPEG_4); size_t trackCount = extractor->countTracks(); // Map the extractor's track index to the muxer's track index. diff --git a/include/media/stagefright/MediaMuxer.h b/include/media/stagefright/MediaMuxer.h index 27a141e..167d0d9 100644 --- a/include/media/stagefright/MediaMuxer.h +++ b/include/media/stagefright/MediaMuxer.h @@ -40,11 +40,25 @@ struct MPEG4Writer; // deleting the output file after stop. struct MediaMuxer : public RefBase { public: + // Please update media/java/android/media/MediaMuxer.java if the + // SampleFlags is updated. + enum SampleFlags { + SAMPLE_FLAG_SYNC = 1, + }; + + // Please update media/java/android/media/MediaMuxer.java if the + // OutputFormat is updated. + enum OutputFormat { + OUTPUT_FORMAT_MPEG_4 = 0, + OUTPUT_FORMAT_LIST_END // must be last - used to validate format type + }; + // Construct the muxer with the output file path. - MediaMuxer(const char* pathOut); + MediaMuxer(const char *path, OutputFormat format); + // Construct the muxer with the file descriptor. Note that the MediaMuxer // will close this file at stop(). - MediaMuxer(int fd); + MediaMuxer(int fd, OutputFormat format); virtual ~MediaMuxer(); @@ -94,6 +108,7 @@ private: Mutex mMuxerLock; enum State { + UNINITED, INITED, STARTED, STOPPED diff --git a/media/libstagefright/MediaMuxer.cpp b/media/libstagefright/MediaMuxer.cpp index 30bed90..aefc270 100644 --- a/media/libstagefright/MediaMuxer.cpp +++ b/media/libstagefright/MediaMuxer.cpp @@ -35,14 +35,20 @@ namespace android { -MediaMuxer::MediaMuxer(const char* pathOut) - : mState(INITED) { - mWriter = new MPEG4Writer(pathOut); +MediaMuxer::MediaMuxer(const char *path, OutputFormat format) + : mState(UNINITED) { + if (format == OUTPUT_FORMAT_MPEG_4) { + mWriter = new MPEG4Writer(path); + mState = INITED; + } } -MediaMuxer::MediaMuxer(int fd) - : mState(INITED) { - mWriter = new MPEG4Writer(fd); +MediaMuxer::MediaMuxer(int fd, OutputFormat format) + : mState(UNINITED) { + if (format == OUTPUT_FORMAT_MPEG_4) { + mWriter = new MPEG4Writer(fd); + mState = INITED; + } } MediaMuxer::~MediaMuxer() { @@ -107,8 +113,6 @@ status_t MediaMuxer::writeSampleData(const sp &buffer, size_t trackInde int64_t timeUs, uint32_t flags) { Mutex::Autolock autoLock(mMuxerLock); - sp currentTrack = mTrackList[trackIndex]; - if (buffer.get() == NULL) { ALOGE("WriteSampleData() get an NULL buffer."); return -EINVAL; @@ -134,10 +138,11 @@ status_t MediaMuxer::writeSampleData(const sp &buffer, size_t trackInde // Just set the kKeyDecodingTime as the presentation time for now. metaData->setInt64(kKeyDecodingTime, timeUs); - if (flags & MediaCodec::BUFFER_FLAG_SYNCFRAME) { + if (flags & SAMPLE_FLAG_SYNC) { metaData->setInt32(kKeyIsSyncFrame, true); } + sp currentTrack = mTrackList[trackIndex]; // This pushBuffer will wait until the mediaBuffer is consumed. return currentTrack->pushBuffer(mediaBuffer); } -- cgit v1.1 From 0d35f7818dedd67844cc90218d5c131a8644f802 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Mon, 11 Mar 2013 10:58:50 -0700 Subject: Increase size of statistics buffer Bug: 8354576 Change-Id: I327e9dc203e09df8abc21e589ce9056540618abd --- services/audioflinger/FastMixer.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/audioflinger/FastMixer.h b/services/audioflinger/FastMixer.h index 462739b..2ab1d04 100644 --- a/services/audioflinger/FastMixer.h +++ b/services/audioflinger/FastMixer.h @@ -107,7 +107,7 @@ struct FastMixerDumpState { #ifdef FAST_MIXER_STATISTICS // Recently collected samples of per-cycle monotonic time, thread CPU time, and CPU frequency. // kSamplingN is the size of the sampling frame, and must be a power of 2 <= 0x8000. - static const uint32_t kSamplingN = 0x1000; + static const uint32_t kSamplingN = 0x8000; // The bounds define the interval of valid samples, and are represented as follows: // newest open (excluded) endpoint = lower 16 bits of bounds, modulo N // oldest closed (included) endpoint = upper 16 bits of bounds, modulo N -- cgit v1.1 From 44cfcf00b9008c1c04f4c8277c6c06af039fd976 Mon Sep 17 00:00:00 2001 From: Igor Murashkin Date: Fri, 1 Mar 2013 16:22:28 -0800 Subject: CameraService: Refactor Camera2Client to share a base with ProCameraClient Change-Id: I249e2a0fc47ae84f29c9d9c4a223fba13da3ee66 --- services/camera/libcameraservice/Android.mk | 1 + services/camera/libcameraservice/Camera2Client.cpp | 152 +++------- services/camera/libcameraservice/Camera2Client.h | 45 +-- .../camera/libcameraservice/Camera2ClientBase.cpp | 315 +++++++++++++++++++++ .../camera/libcameraservice/Camera2ClientBase.h | 128 +++++++++ services/camera/libcameraservice/CameraClient.cpp | 27 +- services/camera/libcameraservice/CameraService.cpp | 14 +- services/camera/libcameraservice/CameraService.h | 14 +- .../camera/libcameraservice/ProCamera2Client.cpp | 223 ++------------- .../camera/libcameraservice/ProCamera2Client.h | 84 +----- .../libcameraservice/camera2/CallbackProcessor.cpp | 7 +- .../libcameraservice/camera2/CaptureSequencer.cpp | 20 +- .../libcameraservice/camera2/FrameProcessor.cpp | 10 +- .../libcameraservice/camera2/ProFrameProcessor.cpp | 7 +- .../camera2/StreamingProcessor.cpp | 6 +- 15 files changed, 583 insertions(+), 470 deletions(-) create mode 100644 services/camera/libcameraservice/Camera2ClientBase.cpp create mode 100644 services/camera/libcameraservice/Camera2ClientBase.h diff --git a/services/camera/libcameraservice/Android.mk b/services/camera/libcameraservice/Android.mk index d6ad889..8600735 100644 --- a/services/camera/libcameraservice/Android.mk +++ b/services/camera/libcameraservice/Android.mk @@ -11,6 +11,7 @@ LOCAL_SRC_FILES:= \ CameraClient.cpp \ Camera2Client.cpp \ ProCamera2Client.cpp \ + Camera2ClientBase.cpp \ CameraDeviceBase.cpp \ Camera2Device.cpp \ Camera3Device.cpp \ diff --git a/services/camera/libcameraservice/Camera2Client.cpp b/services/camera/libcameraservice/Camera2Client.cpp index 8295905..eb7a8d8 100644 --- a/services/camera/libcameraservice/Camera2Client.cpp +++ b/services/camera/libcameraservice/Camera2Client.cpp @@ -49,9 +49,8 @@ Camera2Client::Camera2Client(const sp& cameraService, uid_t clientUid, int servicePid, int deviceVersion): - Client(cameraService, cameraClient, clientPackageName, + Camera2ClientBase(cameraService, cameraClient, clientPackageName, cameraId, cameraFacing, clientPid, clientUid, servicePid), - mSharedCameraClient(cameraClient), mParameters(cameraId, cameraFacing) { ATRACE_CALL(); @@ -76,15 +75,6 @@ Camera2Client::Camera2Client(const sp& cameraService, l.mParameters.state = Parameters::DISCONNECTED; } -status_t Camera2Client::checkPid(const char* checkLocation) const { - int callingPid = getCallingPid(); - if (callingPid == mClientPid) return NO_ERROR; - - ALOGE("%s: attempt to use a locked camera from a different process" - " (old pid %d, new pid %d)", checkLocation, mClientPid, callingPid); - return PERMISSION_DENIED; -} - status_t Camera2Client::initialize(camera_module_t *module) { ATRACE_CALL(); @@ -173,7 +163,7 @@ status_t Camera2Client::dump(int fd, const Vector& args) { String8 result; result.appendFormat("Client2[%d] (%p) PID: %d, dump:\n", mCameraId, - getCameraClient()->asBinder().get(), + getRemoteCallback()->asBinder().get(), mClientPid); result.append(" State: "); #define CASE_APPEND_ENUM(x) case x: result.append(#x "\n"); break; @@ -376,25 +366,15 @@ status_t Camera2Client::dump(int fd, const Vector& args) { mZslProcessor->dump(fd, args); - result = " Device dump:\n"; - write(fd, result.string(), result.size()); - - status_t res = mDevice->dump(fd, args); - if (res != OK) { - result = String8::format(" Error dumping device: %s (%d)", - strerror(-res), res); - write(fd, result.string(), result.size()); - } - + return dumpDevice(fd, args); #undef CASE_APPEND_ENUM - return NO_ERROR; } // ICamera interface void Camera2Client::disconnect() { ATRACE_CALL(); - Mutex::Autolock icl(mICameraLock); + Mutex::Autolock icl(mBinderSerializationLock); // Allow both client and the media server to disconnect at all times int callingPid = getCallingPid(); @@ -444,7 +424,7 @@ void Camera2Client::disconnect() { status_t Camera2Client::connect(const sp& client) { ATRACE_CALL(); ALOGV("%s: E", __FUNCTION__); - Mutex::Autolock icl(mICameraLock); + Mutex::Autolock icl(mBinderSerializationLock); if (mClientPid != 0 && getCallingPid() != mClientPid) { ALOGE("%s: Camera %d: Connection attempt from pid %d; " @@ -455,8 +435,8 @@ status_t Camera2Client::connect(const sp& client) { mClientPid = getCallingPid(); - mCameraClient = client; - mSharedCameraClient = client; + mRemoteCallback = client; + mSharedCameraCallbacks = client; return OK; } @@ -464,7 +444,7 @@ status_t Camera2Client::connect(const sp& client) { status_t Camera2Client::lock() { ATRACE_CALL(); ALOGV("%s: E", __FUNCTION__); - Mutex::Autolock icl(mICameraLock); + Mutex::Autolock icl(mBinderSerializationLock); ALOGV("%s: Camera %d: Lock call from pid %d; current client pid %d", __FUNCTION__, mCameraId, getCallingPid(), mClientPid); @@ -485,7 +465,7 @@ status_t Camera2Client::lock() { status_t Camera2Client::unlock() { ATRACE_CALL(); ALOGV("%s: E", __FUNCTION__); - Mutex::Autolock icl(mICameraLock); + Mutex::Autolock icl(mBinderSerializationLock); ALOGV("%s: Camera %d: Unlock call from pid %d; current client pid %d", __FUNCTION__, mCameraId, getCallingPid(), mClientPid); @@ -497,8 +477,8 @@ status_t Camera2Client::unlock() { return INVALID_OPERATION; } mClientPid = 0; - mCameraClient.clear(); - mSharedCameraClient.clear(); + mRemoteCallback.clear(); + mSharedCameraCallbacks.clear(); return OK; } @@ -511,7 +491,7 @@ status_t Camera2Client::setPreviewDisplay( const sp& surface) { ATRACE_CALL(); ALOGV("%s: E", __FUNCTION__); - Mutex::Autolock icl(mICameraLock); + Mutex::Autolock icl(mBinderSerializationLock); status_t res; if ( (res = checkPid(__FUNCTION__) ) != OK) return res; @@ -529,7 +509,7 @@ status_t Camera2Client::setPreviewTexture( const sp& bufferProducer) { ATRACE_CALL(); ALOGV("%s: E", __FUNCTION__); - Mutex::Autolock icl(mICameraLock); + Mutex::Autolock icl(mBinderSerializationLock); status_t res; if ( (res = checkPid(__FUNCTION__) ) != OK) return res; @@ -598,7 +578,7 @@ status_t Camera2Client::setPreviewWindowL(const sp& binder, void Camera2Client::setPreviewCallbackFlag(int flag) { ATRACE_CALL(); ALOGV("%s: Camera %d: Flag 0x%x", __FUNCTION__, mCameraId, flag); - Mutex::Autolock icl(mICameraLock); + Mutex::Autolock icl(mBinderSerializationLock); if ( checkPid(__FUNCTION__) != OK) return; @@ -637,7 +617,7 @@ void Camera2Client::setPreviewCallbackFlagL(Parameters ¶ms, int flag) { status_t Camera2Client::startPreview() { ATRACE_CALL(); ALOGV("%s: E", __FUNCTION__); - Mutex::Autolock icl(mICameraLock); + Mutex::Autolock icl(mBinderSerializationLock); status_t res; if ( (res = checkPid(__FUNCTION__) ) != OK) return res; SharedParameters::Lock l(mParameters); @@ -753,7 +733,7 @@ status_t Camera2Client::startPreviewL(Parameters ¶ms, bool restart) { void Camera2Client::stopPreview() { ATRACE_CALL(); ALOGV("%s: E", __FUNCTION__); - Mutex::Autolock icl(mICameraLock); + Mutex::Autolock icl(mBinderSerializationLock); status_t res; if ( (res = checkPid(__FUNCTION__) ) != OK) return; stopPreviewL(); @@ -801,7 +781,7 @@ void Camera2Client::stopPreviewL() { bool Camera2Client::previewEnabled() { ATRACE_CALL(); - Mutex::Autolock icl(mICameraLock); + Mutex::Autolock icl(mBinderSerializationLock); status_t res; if ( (res = checkPid(__FUNCTION__) ) != OK) return false; @@ -811,7 +791,7 @@ bool Camera2Client::previewEnabled() { status_t Camera2Client::storeMetaDataInBuffers(bool enabled) { ATRACE_CALL(); - Mutex::Autolock icl(mICameraLock); + Mutex::Autolock icl(mBinderSerializationLock); status_t res; if ( (res = checkPid(__FUNCTION__) ) != OK) return res; @@ -836,7 +816,7 @@ status_t Camera2Client::storeMetaDataInBuffers(bool enabled) { status_t Camera2Client::startRecording() { ATRACE_CALL(); ALOGV("%s: E", __FUNCTION__); - Mutex::Autolock icl(mICameraLock); + Mutex::Autolock icl(mBinderSerializationLock); status_t res; if ( (res = checkPid(__FUNCTION__) ) != OK) return res; SharedParameters::Lock l(mParameters); @@ -927,7 +907,7 @@ status_t Camera2Client::startRecordingL(Parameters ¶ms, bool restart) { void Camera2Client::stopRecording() { ATRACE_CALL(); ALOGV("%s: E", __FUNCTION__); - Mutex::Autolock icl(mICameraLock); + Mutex::Autolock icl(mBinderSerializationLock); SharedParameters::Lock l(mParameters); status_t res; @@ -959,7 +939,7 @@ void Camera2Client::stopRecording() { bool Camera2Client::recordingEnabled() { ATRACE_CALL(); - Mutex::Autolock icl(mICameraLock); + Mutex::Autolock icl(mBinderSerializationLock); if ( checkPid(__FUNCTION__) != OK) return false; @@ -976,7 +956,7 @@ bool Camera2Client::recordingEnabledL() { void Camera2Client::releaseRecordingFrame(const sp& mem) { ATRACE_CALL(); - Mutex::Autolock icl(mICameraLock); + Mutex::Autolock icl(mBinderSerializationLock); if ( checkPid(__FUNCTION__) != OK) return; mStreamingProcessor->releaseRecordingFrame(mem); @@ -984,7 +964,7 @@ void Camera2Client::releaseRecordingFrame(const sp& mem) { status_t Camera2Client::autoFocus() { ATRACE_CALL(); - Mutex::Autolock icl(mICameraLock); + Mutex::Autolock icl(mBinderSerializationLock); ALOGV("%s: Camera %d", __FUNCTION__, mCameraId); status_t res; if ( (res = checkPid(__FUNCTION__) ) != OK) return res; @@ -1022,9 +1002,9 @@ status_t Camera2Client::autoFocus() { * Send immediate notification back to client */ if (notifyImmediately) { - SharedCameraClient::Lock l(mSharedCameraClient); - if (l.mCameraClient != 0) { - l.mCameraClient->notifyCallback(CAMERA_MSG_FOCUS, + SharedCameraCallbacks::Lock l(mSharedCameraCallbacks); + if (l.mRemoteCallback != 0) { + l.mRemoteCallback->notifyCallback(CAMERA_MSG_FOCUS, notifySuccess ? 1 : 0, 0); } return OK; @@ -1055,7 +1035,7 @@ status_t Camera2Client::autoFocus() { status_t Camera2Client::cancelAutoFocus() { ATRACE_CALL(); - Mutex::Autolock icl(mICameraLock); + Mutex::Autolock icl(mBinderSerializationLock); ALOGV("%s: Camera %d", __FUNCTION__, mCameraId); status_t res; if ( (res = checkPid(__FUNCTION__) ) != OK) return res; @@ -1087,7 +1067,7 @@ status_t Camera2Client::cancelAutoFocus() { status_t Camera2Client::takePicture(int msgType) { ATRACE_CALL(); - Mutex::Autolock icl(mICameraLock); + Mutex::Autolock icl(mBinderSerializationLock); status_t res; if ( (res = checkPid(__FUNCTION__) ) != OK) return res; @@ -1146,7 +1126,7 @@ status_t Camera2Client::takePicture(int msgType) { status_t Camera2Client::setParameters(const String8& params) { ATRACE_CALL(); ALOGV("%s: Camera %d", __FUNCTION__, mCameraId); - Mutex::Autolock icl(mICameraLock); + Mutex::Autolock icl(mBinderSerializationLock); status_t res; if ( (res = checkPid(__FUNCTION__) ) != OK) return res; @@ -1163,7 +1143,7 @@ status_t Camera2Client::setParameters(const String8& params) { String8 Camera2Client::getParameters() const { ATRACE_CALL(); ALOGV("%s: Camera %d", __FUNCTION__, mCameraId); - Mutex::Autolock icl(mICameraLock); + Mutex::Autolock icl(mBinderSerializationLock); if ( checkPid(__FUNCTION__) != OK) return String8(); SharedParameters::ReadLock l(mParameters); @@ -1173,7 +1153,7 @@ String8 Camera2Client::getParameters() const { status_t Camera2Client::sendCommand(int32_t cmd, int32_t arg1, int32_t arg2) { ATRACE_CALL(); - Mutex::Autolock icl(mICameraLock); + Mutex::Autolock icl(mBinderSerializationLock); status_t res; if ( (res = checkPid(__FUNCTION__) ) != OK) return res; @@ -1348,18 +1328,6 @@ status_t Camera2Client::commandSetVideoBufferCountL(size_t count) { } /** Device-related methods */ - -void Camera2Client::notifyError(int errorCode, int arg1, int arg2) { - ALOGE("Error condition %d reported by HAL, arguments %d, %d", errorCode, arg1, arg2); -} - -void Camera2Client::notifyShutter(int frameNumber, nsecs_t timestamp) { - (void)frameNumber; - (void)timestamp; - ALOGV("%s: Shutter notification for frame %d at time %lld", __FUNCTION__, - frameNumber, timestamp); -} - void Camera2Client::notifyAutoFocus(uint8_t newState, int triggerId) { ALOGV("%s: Autofocus state now %d, last trigger %d", __FUNCTION__, newState, triggerId); @@ -1455,16 +1423,16 @@ void Camera2Client::notifyAutoFocus(uint8_t newState, int triggerId) { } } if (sendMovingMessage) { - SharedCameraClient::Lock l(mSharedCameraClient); - if (l.mCameraClient != 0) { - l.mCameraClient->notifyCallback(CAMERA_MSG_FOCUS_MOVE, + SharedCameraCallbacks::Lock l(mSharedCameraCallbacks); + if (l.mRemoteCallback != 0) { + l.mRemoteCallback->notifyCallback(CAMERA_MSG_FOCUS_MOVE, afInMotion ? 1 : 0, 0); } } if (sendCompletedMessage) { - SharedCameraClient::Lock l(mSharedCameraClient); - if (l.mCameraClient != 0) { - l.mCameraClient->notifyCallback(CAMERA_MSG_FOCUS, + SharedCameraCallbacks::Lock l(mSharedCameraCallbacks); + if (l.mRemoteCallback != 0) { + l.mRemoteCallback->notifyCallback(CAMERA_MSG_FOCUS, success ? 1 : 0, 0); } } @@ -1476,25 +1444,6 @@ void Camera2Client::notifyAutoExposure(uint8_t newState, int triggerId) { mCaptureSequencer->notifyAutoExposure(newState, triggerId); } -void Camera2Client::notifyAutoWhitebalance(uint8_t newState, int triggerId) { - (void)newState; - (void)triggerId; - ALOGV("%s: Auto-whitebalance state now %d, last trigger %d", - __FUNCTION__, newState, triggerId); -} - -int Camera2Client::getCameraId() const { - return mCameraId; -} - -const sp& Camera2Client::getCameraDevice() { - return mDevice; -} - -const sp& Camera2Client::getCameraService() { - return mCameraService; -} - camera2::SharedParameters& Camera2Client::getParameters() { return mParameters; } @@ -1533,32 +1482,6 @@ status_t Camera2Client::stopStream() { return mStreamingProcessor->stopStream(); } -Camera2Client::SharedCameraClient::Lock::Lock(SharedCameraClient &client): - mCameraClient(client.mCameraClient), - mSharedClient(client) { - mSharedClient.mCameraClientLock.lock(); -} - -Camera2Client::SharedCameraClient::Lock::~Lock() { - mSharedClient.mCameraClientLock.unlock(); -} - -Camera2Client::SharedCameraClient::SharedCameraClient(const sp&client): - mCameraClient(client) { -} - -Camera2Client::SharedCameraClient& Camera2Client::SharedCameraClient::operator=( - const sp&client) { - Mutex::Autolock l(mCameraClientLock); - mCameraClient = client; - return *this; -} - -void Camera2Client::SharedCameraClient::clear() { - Mutex::Autolock l(mCameraClientLock); - mCameraClient.clear(); -} - const int32_t Camera2Client::kPreviewRequestIdStart; const int32_t Camera2Client::kPreviewRequestIdEnd; const int32_t Camera2Client::kRecordingRequestIdStart; @@ -1660,4 +1583,5 @@ status_t Camera2Client::syncWithDevice() { return res; } + } // namespace android diff --git a/services/camera/libcameraservice/Camera2Client.h b/services/camera/libcameraservice/Camera2Client.h index 80b88f4..713fab3 100644 --- a/services/camera/libcameraservice/Camera2Client.h +++ b/services/camera/libcameraservice/Camera2Client.h @@ -26,6 +26,7 @@ #include "camera2/ZslProcessor.h" #include "camera2/CaptureSequencer.h" #include "camera2/CallbackProcessor.h" +#include "Camera2ClientBase.h" namespace android { @@ -35,8 +36,7 @@ class IMemory; * CAMERA_DEVICE_API_VERSION_2_0 and 3_0. */ class Camera2Client : - public CameraService::Client, - public CameraDeviceBase::NotificationListener + public Camera2ClientBase { public: /** @@ -90,19 +90,13 @@ public: * Interface used by CameraDeviceBase */ - virtual void notifyError(int errorCode, int arg1, int arg2); - virtual void notifyShutter(int frameNumber, nsecs_t timestamp); virtual void notifyAutoFocus(uint8_t newState, int triggerId); virtual void notifyAutoExposure(uint8_t newState, int triggerId); - virtual void notifyAutoWhitebalance(uint8_t newState, int triggerId); /** * Interface used by independent components of Camera2Client. */ - int getCameraId() const; - const sp& getCameraDevice(); - const sp& getCameraService(); camera2::SharedParameters& getParameters(); int getPreviewStreamId() const; @@ -118,27 +112,6 @@ public: status_t stopStream(); - // Simple class to ensure that access to ICameraClient is serialized by - // requiring mCameraClientLock to be locked before access to mCameraClient - // is possible. - class SharedCameraClient { - public: - class Lock { - public: - Lock(SharedCameraClient &client); - ~Lock(); - sp &mCameraClient; - private: - SharedCameraClient &mSharedClient; - }; - SharedCameraClient(const sp& client); - SharedCameraClient& operator=(const sp& client); - void clear(); - private: - sp mCameraClient; - mutable Mutex mCameraClientLock; - } mSharedCameraClient; - static size_t calculateBufferSize(int width, int height, int format, int stride); @@ -153,13 +126,6 @@ public: private: /** ICamera interface-related private members */ - - // Mutex that must be locked by methods implementing the ICamera interface. - // Ensures serialization between incoming ICamera calls. All methods below - // that append 'L' to the name assume that mICameraLock is locked when - // they're called - mutable Mutex mICameraLock; - typedef camera2::Parameters Parameters; status_t setPreviewWindowL(const sp& binder, @@ -213,17 +179,10 @@ private: bool mAfInMotion; - /** CameraDevice instance, wraps HAL camera device */ - - sp mDevice; - /** Utility members */ // Wait until the camera device has received the latest control settings status_t syncWithDevice(); - - // Verify that caller is the owner of the camera - status_t checkPid(const char *checkLocation) const; }; }; // namespace android diff --git a/services/camera/libcameraservice/Camera2ClientBase.cpp b/services/camera/libcameraservice/Camera2ClientBase.cpp new file mode 100644 index 0000000..e92ad1c --- /dev/null +++ b/services/camera/libcameraservice/Camera2ClientBase.cpp @@ -0,0 +1,315 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "Camera2ClientBase" +#define ATRACE_TAG ATRACE_TAG_CAMERA +//#define LOG_NDEBUG 0 + +#include +#include + +#include +#include +#include +#include "camera2/Parameters.h" +#include "Camera2ClientBase.h" +#include "camera2/ProFrameProcessor.h" + +#include "Camera2Device.h" + +namespace android { +using namespace camera2; + +static int getCallingPid() { + return IPCThreadState::self()->getCallingPid(); +} + +// Interface used by CameraService + +template +Camera2ClientBase::Camera2ClientBase( + const sp& cameraService, + const sp& remoteCallback, + const String16& clientPackageName, + int cameraId, + int cameraFacing, + int clientPid, + uid_t clientUid, + int servicePid): + TClientBase(cameraService, remoteCallback, clientPackageName, + cameraId, cameraFacing, clientPid, clientUid, servicePid), + mSharedCameraCallbacks(remoteCallback) +{ + ALOGI("Camera %d: Opened", cameraId); + mDevice = new Camera2Device(cameraId); +} + +template +status_t Camera2ClientBase::checkPid(const char* checkLocation) + const { + + int callingPid = getCallingPid(); + if (callingPid == TClientBase::mClientPid) return NO_ERROR; + + ALOGE("%s: attempt to use a locked camera from a different process" + " (old pid %d, new pid %d)", checkLocation, TClientBase::mClientPid, callingPid); + return PERMISSION_DENIED; +} + +template +status_t Camera2ClientBase::initialize(camera_module_t *module) { + ATRACE_CALL(); + ALOGV("%s: Initializing client for camera %d", __FUNCTION__, + TClientBase::mCameraId); + status_t res; + + res = mDevice->initialize(module); + if (res != OK) { + ALOGE("%s: Camera %d: unable to initialize device: %s (%d)", + __FUNCTION__, TClientBase::mCameraId, strerror(-res), res); + return NO_INIT; + } + + res = mDevice->setNotifyCallback(this); + + return OK; +} + +template +Camera2ClientBase::~Camera2ClientBase() { + ATRACE_CALL(); + + TClientBase::mDestructionStarted = true; + + disconnect(); + + ALOGI("Closed Camera %d", TClientBase::mCameraId); +} + +template +status_t Camera2ClientBase::dump(int fd, + const Vector& args) { + String8 result; + result.appendFormat("Camera2ClientBase[%d] (%p) PID: %d, dump:\n", + TClientBase::mCameraId, + TClientBase::getRemoteCallback()->asBinder().get(), + TClientBase::mClientPid); + result.append(" State: "); + + write(fd, result.string(), result.size()); + // TODO: print dynamic/request section from most recent requests + + return dumpDevice(fd, args); +} + +template +status_t Camera2ClientBase::dumpDevice( + int fd, + const Vector& args) { + String8 result; + + result = " Device dump:\n"; + write(fd, result.string(), result.size()); + + if (!mDevice.get()) { + result = " *** Device is detached\n"; + write(fd, result.string(), result.size()); + return NO_ERROR; + } + + status_t res = mDevice->dump(fd, args); + if (res != OK) { + result = String8::format(" Error dumping device: %s (%d)", + strerror(-res), res); + write(fd, result.string(), result.size()); + } + + return NO_ERROR; +} + +// ICameraClient2BaseUser interface + + +template +void Camera2ClientBase::disconnect() { + ATRACE_CALL(); + Mutex::Autolock icl(mBinderSerializationLock); + + // Allow both client and the media server to disconnect at all times + int callingPid = getCallingPid(); + if (callingPid != TClientBase::mClientPid && + callingPid != TClientBase::mServicePid) return; + + ALOGV("Camera %d: Shutting down", TClientBase::mCameraId); + + detachDevice(); + + TClientBase::disconnect(); + + ALOGV("Camera %d: Shut down complete complete", TClientBase::mCameraId); +} + +template +void Camera2ClientBase::detachDevice() { + if (mDevice == 0) return; + mDevice->disconnect(); + + mDevice.clear(); + + ALOGV("Camera %d: Detach complete", TClientBase::mCameraId); +} + +template +status_t Camera2ClientBase::connect( + const sp& client) { + ATRACE_CALL(); + ALOGV("%s: E", __FUNCTION__); + Mutex::Autolock icl(mBinderSerializationLock); + + if (TClientBase::mClientPid != 0 && + getCallingPid() != TClientBase::mClientPid) { + + ALOGE("%s: Camera %d: Connection attempt from pid %d; " + "current locked to pid %d", + __FUNCTION__, + TClientBase::mCameraId, + getCallingPid(), + TClientBase::mClientPid); + return BAD_VALUE; + } + + TClientBase::mClientPid = getCallingPid(); + + TClientBase::mRemoteCallback = client; + mSharedCameraCallbacks = client; + + return OK; +} + +/** Device-related methods */ + +template +void Camera2ClientBase::notifyError(int errorCode, int arg1, + int arg2) { + ALOGE("Error condition %d reported by HAL, arguments %d, %d", errorCode, + arg1, arg2); +} + +template +void Camera2ClientBase::notifyShutter(int frameNumber, + nsecs_t timestamp) { + (void)frameNumber; + (void)timestamp; + + ALOGV("%s: Shutter notification for frame %d at time %lld", __FUNCTION__, + frameNumber, timestamp); +} + +template +void Camera2ClientBase::notifyAutoFocus(uint8_t newState, + int triggerId) { + (void)newState; + (void)triggerId; + + ALOGV("%s: Autofocus state now %d, last trigger %d", + __FUNCTION__, newState, triggerId); + + typename SharedCameraCallbacks::Lock l(mSharedCameraCallbacks); + if (l.mRemoteCallback != 0) { + l.mRemoteCallback->notifyCallback(CAMERA_MSG_FOCUS_MOVE, 1, 0); + } + if (l.mRemoteCallback != 0) { + l.mRemoteCallback->notifyCallback(CAMERA_MSG_FOCUS, 1, 0); + } +} + +template +void Camera2ClientBase::notifyAutoExposure(uint8_t newState, + int triggerId) { + (void)newState; + (void)triggerId; + + ALOGV("%s: Autoexposure state now %d, last trigger %d", + __FUNCTION__, newState, triggerId); +} + +template +void Camera2ClientBase::notifyAutoWhitebalance(uint8_t newState, + int triggerId) { + (void)newState; + (void)triggerId; + + ALOGV("%s: Auto-whitebalance state now %d, last trigger %d", + __FUNCTION__, newState, triggerId); +} + +template +int Camera2ClientBase::getCameraId() const { + return TClientBase::mCameraId; +} + +template +const sp& Camera2ClientBase::getCameraDevice() { + return mDevice; +} + +template +const sp& Camera2ClientBase::getCameraService() { + return TClientBase::mCameraService; +} + +template +Camera2ClientBase::SharedCameraCallbacks::Lock::Lock( + SharedCameraCallbacks &client) : + + mRemoteCallback(client.mRemoteCallback), + mSharedClient(client) { + + mSharedClient.mRemoteCallbackLock.lock(); +} + +template +Camera2ClientBase::SharedCameraCallbacks::Lock::~Lock() { + mSharedClient.mRemoteCallbackLock.unlock(); +} + +template +Camera2ClientBase::SharedCameraCallbacks::SharedCameraCallbacks( + const sp&client) : + + mRemoteCallback(client) { +} + +template +typename Camera2ClientBase::SharedCameraCallbacks& +Camera2ClientBase::SharedCameraCallbacks::operator=( + const sp&client) { + + Mutex::Autolock l(mRemoteCallbackLock); + mRemoteCallback = client; + return *this; +} + +template +void Camera2ClientBase::SharedCameraCallbacks::clear() { + Mutex::Autolock l(mRemoteCallbackLock); + mRemoteCallback.clear(); +} + +template class Camera2ClientBase; +template class Camera2ClientBase; + +} // namespace android diff --git a/services/camera/libcameraservice/Camera2ClientBase.h b/services/camera/libcameraservice/Camera2ClientBase.h new file mode 100644 index 0000000..9001efb --- /dev/null +++ b/services/camera/libcameraservice/Camera2ClientBase.h @@ -0,0 +1,128 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_SERVERS_CAMERA_CAMERA2CLIENT_BASE_H +#define ANDROID_SERVERS_CAMERA_CAMERA2CLIENT_BASE_H + +#include "CameraDeviceBase.h" +#include "CameraService.h" + +namespace android { + +class IMemory; + +template +class Camera2ClientBase : + public TClientBase, + public CameraDeviceBase::NotificationListener +{ +public: + typedef typename TClientBase::TCamCallbacks TCamCallbacks; + + /** + * Base binder interface (see ICamera/IProCameraUser for details) + */ + virtual status_t connect(const sp& callbacks); + virtual void disconnect(); + + /** + * Interface used by CameraService + */ + + // TODO: too many params, move into a ClientArgs + Camera2ClientBase(const sp& cameraService, + const sp& remoteCallback, + const String16& clientPackageName, + int cameraId, + int cameraFacing, + int clientPid, + uid_t clientUid, + int servicePid); + virtual ~Camera2ClientBase(); + + virtual status_t initialize(camera_module_t *module); + virtual status_t dump(int fd, const Vector& args); + + /** + * CameraDeviceBase::NotificationListener implementation + */ + + virtual void notifyError(int errorCode, int arg1, int arg2); + virtual void notifyShutter(int frameNumber, nsecs_t timestamp); + virtual void notifyAutoFocus(uint8_t newState, int triggerId); + virtual void notifyAutoExposure(uint8_t newState, int triggerId); + virtual void notifyAutoWhitebalance(uint8_t newState, + int triggerId); + + + int getCameraId() const; + const sp& + getCameraDevice(); + const sp& + getCameraService(); + + /** + * Interface used by independent components of CameraClient2Base. + */ + + // Simple class to ensure that access to TCamCallbacks is serialized + // by requiring mRemoteCallbackLock to be locked before access to + // mRemoteCallback is possible. + class SharedCameraCallbacks { + public: + class Lock { + public: + Lock(SharedCameraCallbacks &client); + ~Lock(); + sp &mRemoteCallback; + private: + SharedCameraCallbacks &mSharedClient; + }; + SharedCameraCallbacks(const sp& client); + SharedCameraCallbacks& operator=(const sp& client); + void clear(); + private: + sp mRemoteCallback; + mutable Mutex mRemoteCallbackLock; + } mSharedCameraCallbacks; + +protected: + + virtual status_t dumpDevice(int fd, const Vector& args); + + /** Binder client interface-related private members */ + + // Mutex that must be locked by methods implementing the binder client + // interface. Ensures serialization between incoming client calls. + // All methods in this class hierarchy that append 'L' to the name assume + // that mBinderSerializationLock is locked when they're called + mutable Mutex mBinderSerializationLock; + + /** CameraDeviceBase instance wrapping HAL2+ entry */ + + sp mDevice; + + /** Utility members */ + + // Verify that caller is the owner of the camera + status_t checkPid(const char *checkLocation) const; + + virtual void detachDevice(); +}; + +}; // namespace android + +#endif diff --git a/services/camera/libcameraservice/CameraClient.cpp b/services/camera/libcameraservice/CameraClient.cpp index 90f8f40..e577fa3 100644 --- a/services/camera/libcameraservice/CameraClient.cpp +++ b/services/camera/libcameraservice/CameraClient.cpp @@ -117,7 +117,7 @@ status_t CameraClient::dump(int fd, const Vector& args) { size_t len = snprintf(buffer, SIZE, "Client[%d] (%p) PID: %d\n", mCameraId, - getCameraClient()->asBinder().get(), + getRemoteCallback()->asBinder().get(), mClientPid); len = (len > SIZE - 1) ? SIZE - 1 : len; write(fd, buffer, len); @@ -173,10 +173,10 @@ status_t CameraClient::unlock() { return INVALID_OPERATION; } mClientPid = 0; - LOG1("clear mCameraClient (pid %d)", callingPid); + LOG1("clear mRemoteCallback (pid %d)", callingPid); // we need to remove the reference to ICameraClient so that when the app // goes away, the reference count goes to 0. - mCameraClient.clear(); + mRemoteCallback.clear(); } return result; } @@ -193,14 +193,15 @@ status_t CameraClient::connect(const sp& client) { return EBUSY; } - if (mCameraClient != 0 && (client->asBinder() == mCameraClient->asBinder())) { + if (mRemoteCallback != 0 && + (client->asBinder() == mRemoteCallback->asBinder())) { LOG1("Connect to the same client"); return NO_ERROR; } mPreviewCallbackFlag = CAMERA_FRAME_CALLBACK_FLAG_NOOP; mClientPid = callingPid; - mCameraClient = client; + mRemoteCallback = client; LOG1("connect X (pid %d)", callingPid); return NO_ERROR; @@ -780,7 +781,7 @@ void CameraClient::handleShutter(void) { mCameraService->playSound(CameraService::SOUND_SHUTTER); } - sp c = mCameraClient; + sp c = mRemoteCallback; if (c != 0) { mLock.unlock(); c->notifyCallback(CAMERA_MSG_SHUTTER, 0, 0); @@ -811,7 +812,7 @@ void CameraClient::handlePreviewData(int32_t msgType, } // hold a strong pointer to the client - sp c = mCameraClient; + sp c = mRemoteCallback; // clear callback flags if no client or one-shot mode if (c == 0 || (mPreviewCallbackFlag & CAMERA_FRAME_CALLBACK_FLAG_ONE_SHOT_MASK)) { @@ -841,7 +842,7 @@ void CameraClient::handlePreviewData(int32_t msgType, void CameraClient::handlePostview(const sp& mem) { disableMsgType(CAMERA_MSG_POSTVIEW_FRAME); - sp c = mCameraClient; + sp c = mRemoteCallback; mLock.unlock(); if (c != 0) { c->dataCallback(CAMERA_MSG_POSTVIEW_FRAME, mem, NULL); @@ -856,7 +857,7 @@ void CameraClient::handleRawPicture(const sp& mem) { size_t size; sp heap = mem->getMemory(&offset, &size); - sp c = mCameraClient; + sp c = mRemoteCallback; mLock.unlock(); if (c != 0) { c->dataCallback(CAMERA_MSG_RAW_IMAGE, mem, NULL); @@ -867,7 +868,7 @@ void CameraClient::handleRawPicture(const sp& mem) { void CameraClient::handleCompressedPicture(const sp& mem) { disableMsgType(CAMERA_MSG_COMPRESSED_IMAGE); - sp c = mCameraClient; + sp c = mRemoteCallback; mLock.unlock(); if (c != 0) { c->dataCallback(CAMERA_MSG_COMPRESSED_IMAGE, mem, NULL); @@ -877,7 +878,7 @@ void CameraClient::handleCompressedPicture(const sp& mem) { void CameraClient::handleGenericNotify(int32_t msgType, int32_t ext1, int32_t ext2) { - sp c = mCameraClient; + sp c = mRemoteCallback; mLock.unlock(); if (c != 0) { c->notifyCallback(msgType, ext1, ext2); @@ -886,7 +887,7 @@ void CameraClient::handleGenericNotify(int32_t msgType, void CameraClient::handleGenericData(int32_t msgType, const sp& dataPtr, camera_frame_metadata_t *metadata) { - sp c = mCameraClient; + sp c = mRemoteCallback; mLock.unlock(); if (c != 0) { c->dataCallback(msgType, dataPtr, metadata); @@ -895,7 +896,7 @@ void CameraClient::handleGenericData(int32_t msgType, void CameraClient::handleGenericDataTimestamp(nsecs_t timestamp, int32_t msgType, const sp& dataPtr) { - sp c = mCameraClient; + sp c = mRemoteCallback; mLock.unlock(); if (c != 0) { c->dataCallbackTimestamp(timestamp, msgType, dataPtr); diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp index 8c4f619..d46ca88 100644 --- a/services/camera/libcameraservice/CameraService.cpp +++ b/services/camera/libcameraservice/CameraService.cpp @@ -223,7 +223,9 @@ sp CameraService::connect( if (mClient[cameraId] != 0) { client = mClient[cameraId].promote(); if (client != 0) { - if (cameraClient->asBinder() == client->getCameraClient()->asBinder()) { + if (cameraClient->asBinder() == + client->getRemoteCallback()->asBinder()) { + LOG1("CameraService::connect X (pid %d) (the same client)", callingPid); return client; @@ -496,7 +498,7 @@ sp CameraService::findClientUnsafe( continue; } - if (cameraClient == client->getCameraClient()->asBinder()) { + if (cameraClient == client->getRemoteCallback()->asBinder()) { // Found our camera outIndex = i; return client; @@ -639,7 +641,7 @@ CameraService::Client::Client(const sp& cameraService, int callingPid = getCallingPid(); LOG1("Client::Client E (pid %d, id %d)", callingPid, cameraId); - mCameraClient = cameraClient; + mRemoteCallback = cameraClient; cameraService->setCameraBusy(cameraId); cameraService->loadSound(); @@ -666,7 +668,7 @@ CameraService::BasicClient::BasicClient(const sp& cameraService, mClientPackageName(clientPackageName) { mCameraService = cameraService; - mRemoteCallback = remoteCallback; + mRemoteBinder = remoteCallback; mCameraId = cameraId; mCameraFacing = cameraFacing; mClientPid = clientPid; @@ -681,7 +683,7 @@ CameraService::BasicClient::~BasicClient() { } void CameraService::BasicClient::disconnect() { - mCameraService->removeClientByRemote(mRemoteCallback); + mCameraService->removeClientByRemote(mRemoteBinder); } status_t CameraService::BasicClient::startCameraOps() { @@ -767,7 +769,7 @@ CameraService::Client* CameraService::Client::getClientFromCookie(void* user) { } void CameraService::Client::notifyError() { - mCameraClient->notifyCallback(CAMERA_MSG_ERROR, CAMERA_ERROR_RELEASED, 0); + mRemoteCallback->notifyCallback(CAMERA_MSG_ERROR, CAMERA_ERROR_RELEASED, 0); } // NOTE: function is idempotent diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h index 8acc63f..d7a336c 100644 --- a/services/camera/libcameraservice/CameraService.h +++ b/services/camera/libcameraservice/CameraService.h @@ -109,7 +109,7 @@ public: virtual void disconnect() = 0; wp getRemote() { - return mRemoteCallback; + return mRemoteBinder; } protected: @@ -140,7 +140,7 @@ public: pid_t mServicePid; // immutable after constructor // - The app-side Binder interface to receive callbacks from us - wp mRemoteCallback; // immutable after constructor + wp mRemoteBinder; // immutable after constructor // permissions management status_t startCameraOps(); @@ -173,6 +173,8 @@ public: class Client : public BnCamera, public BasicClient { public: + typedef ICameraClient TCamCallbacks; + // ICamera interface (see ICamera for details) virtual void disconnect(); virtual status_t connect(const sp& client) = 0; @@ -208,8 +210,8 @@ public: ~Client(); // return our camera client - const sp& getCameraClient() { - return mCameraClient; + const sp& getRemoteCallback() { + return mRemoteCallback; } protected: @@ -222,12 +224,14 @@ public: // Initialized in constructor // - The app-side Binder interface to receive callbacks from us - sp mCameraClient; + sp mRemoteCallback; }; // class Client class ProClient : public BnProCameraUser, public BasicClient { public: + typedef IProCameraCallbacks TCamCallbacks; + ProClient(const sp& cameraService, const sp& remoteCallback, const String16& clientPackageName, diff --git a/services/camera/libcameraservice/ProCamera2Client.cpp b/services/camera/libcameraservice/ProCamera2Client.cpp index 6fed8b4..4a5a3d5 100644 --- a/services/camera/libcameraservice/ProCamera2Client.cpp +++ b/services/camera/libcameraservice/ProCamera2Client.cpp @@ -31,64 +31,38 @@ namespace android { using namespace camera2; -static int getCallingPid() { - return IPCThreadState::self()->getCallingPid(); -} - -static int getCallingUid() { - return IPCThreadState::self()->getCallingUid(); -} - // Interface used by CameraService ProCamera2Client::ProCamera2Client(const sp& cameraService, - const sp& remoteCallback, - const String16& clientPackageName, - int cameraId, - int cameraFacing, - int clientPid, - uid_t clientUid, - int servicePid): - ProClient(cameraService, remoteCallback, clientPackageName, - cameraId, cameraFacing, clientPid, clientUid, servicePid), - mSharedCameraCallbacks(remoteCallback) + const sp& remoteCallback, + const String16& clientPackageName, + int cameraId, + int cameraFacing, + int clientPid, + uid_t clientUid, + int servicePid) : + Camera2ClientBase(cameraService, remoteCallback, clientPackageName, + cameraId, cameraFacing, clientPid, clientUid, servicePid) { ATRACE_CALL(); ALOGI("ProCamera %d: Opened", cameraId); - mDevice = new Camera2Device(cameraId); - mExclusiveLock = false; } -status_t ProCamera2Client::checkPid(const char* checkLocation) const { - int callingPid = getCallingPid(); - if (callingPid == mClientPid) return NO_ERROR; - - ALOGE("%s: attempt to use a locked camera from a different process" - " (old pid %d, new pid %d)", checkLocation, mClientPid, callingPid); - return PERMISSION_DENIED; -} - status_t ProCamera2Client::initialize(camera_module_t *module) { ATRACE_CALL(); - ALOGV("%s: Initializing client for camera %d", __FUNCTION__, mCameraId); status_t res; - res = mDevice->initialize(module); + res = Camera2ClientBase::initialize(module); if (res != OK) { - ALOGE("%s: Camera %d: unable to initialize device: %s (%d)", - __FUNCTION__, mCameraId, strerror(-res), res); - return NO_INIT; + return res; } - res = mDevice->setNotifyCallback(this); - String8 threadName; mFrameProcessor = new ProFrameProcessor(this); - threadName = String8::format("PC2-%d-FrameProc", - mCameraId); + threadName = String8::format("PC2-%d-FrameProc", mCameraId); mFrameProcessor->run(threadName.string()); mFrameProcessor->registerListener(FRAME_PROCESSOR_LISTENER_MIN_ID, @@ -99,20 +73,13 @@ status_t ProCamera2Client::initialize(camera_module_t *module) } ProCamera2Client::~ProCamera2Client() { - ATRACE_CALL(); - - mDestructionStarted = true; - - disconnect(); - - ALOGI("ProCamera %d: Closed", mCameraId); } status_t ProCamera2Client::exclusiveTryLock() { ATRACE_CALL(); ALOGV("%s", __FUNCTION__); - Mutex::Autolock icl(mIProCameraUserLock); + Mutex::Autolock icl(mBinderSerializationLock); SharedCameraCallbacks::Lock l(mSharedCameraCallbacks); if (!mDevice.get()) return PERMISSION_DENIED; @@ -143,7 +110,7 @@ status_t ProCamera2Client::exclusiveLock() { ATRACE_CALL(); ALOGV("%s", __FUNCTION__); - Mutex::Autolock icl(mIProCameraUserLock); + Mutex::Autolock icl(mBinderSerializationLock); SharedCameraCallbacks::Lock l(mSharedCameraCallbacks); if (!mDevice.get()) return PERMISSION_DENIED; @@ -177,7 +144,7 @@ status_t ProCamera2Client::exclusiveUnlock() { ATRACE_CALL(); ALOGV("%s", __FUNCTION__); - Mutex::Autolock icl(mIProCameraUserLock); + Mutex::Autolock icl(mBinderSerializationLock); SharedCameraCallbacks::Lock l(mSharedCameraCallbacks); // don't allow unlocking if we have no lock @@ -198,6 +165,7 @@ status_t ProCamera2Client::exclusiveUnlock() { } bool ProCamera2Client::hasExclusiveLock() { + Mutex::Autolock icl(mBinderSerializationLock); return mExclusiveLock; } @@ -205,7 +173,7 @@ void ProCamera2Client::onExclusiveLockStolen() { ALOGV("%s: ProClient lost exclusivity (id %d)", __FUNCTION__, mCameraId); - Mutex::Autolock icl(mIProCameraUserLock); + Mutex::Autolock icl(mBinderSerializationLock); SharedCameraCallbacks::Lock l(mSharedCameraCallbacks); if (mExclusiveLock && mRemoteCallback.get() != NULL) { @@ -224,7 +192,7 @@ status_t ProCamera2Client::submitRequest(camera_metadata_t* request, ATRACE_CALL(); ALOGV("%s", __FUNCTION__); - Mutex::Autolock icl(mIProCameraUserLock); + Mutex::Autolock icl(mBinderSerializationLock); if (!mDevice.get()) return DEAD_OBJECT; @@ -248,7 +216,7 @@ status_t ProCamera2Client::cancelRequest(int requestId) { ATRACE_CALL(); ALOGV("%s", __FUNCTION__); - Mutex::Autolock icl(mIProCameraUserLock); + Mutex::Autolock icl(mBinderSerializationLock); if (!mDevice.get()) return DEAD_OBJECT; @@ -256,10 +224,12 @@ status_t ProCamera2Client::cancelRequest(int requestId) { return PERMISSION_DENIED; } + // TODO: implement ALOGE("%s: not fully implemented yet", __FUNCTION__); return INVALID_OPERATION; } +//TODO: Remove status_t ProCamera2Client::requestStream(int streamId) { ALOGE("%s: not implemented yet", __FUNCTION__); @@ -273,7 +243,7 @@ status_t ProCamera2Client::cancelStream(int streamId) { status_t res; if ( (res = checkPid(__FUNCTION__) ) != OK) return res; - Mutex::Autolock icl(mIProCameraUserLock); + Mutex::Autolock icl(mBinderSerializationLock); if (!mDevice.get()) return DEAD_OBJECT; mDevice->clearStreamingRequest(); @@ -301,7 +271,7 @@ status_t ProCamera2Client::createStream(int width, int height, int format, status_t res; if ( (res = checkPid(__FUNCTION__) ) != OK) return res; - Mutex::Autolock icl(mIProCameraUserLock); + Mutex::Autolock icl(mBinderSerializationLock); if (!mDevice.get()) return DEAD_OBJECT; @@ -332,7 +302,7 @@ status_t ProCamera2Client::createDefaultRequest(int templateId, status_t res; if ( (res = checkPid(__FUNCTION__) ) != OK) return res; - Mutex::Autolock icl(mIProCameraUserLock); + Mutex::Autolock icl(mBinderSerializationLock); if (!mDevice.get()) return DEAD_OBJECT; @@ -352,7 +322,7 @@ status_t ProCamera2Client::getCameraInfo(int cameraId, return INVALID_OPERATION; } - Mutex::Autolock icl(mIProCameraUserLock); + Mutex::Autolock icl(mBinderSerializationLock); if (!mDevice.get()) return DEAD_OBJECT; @@ -373,47 +343,11 @@ status_t ProCamera2Client::dump(int fd, const Vector& args) { // TODO: print dynamic/request section from most recent requests mFrameProcessor->dump(fd, args); -#define CASE_APPEND_ENUM(x) case x: result.append(#x "\n"); break; - - result = " Device dump:\n"; - write(fd, result.string(), result.size()); - - if (!mDevice.get()) { - result = " *** Device is detached\n"; - write(fd, result.string(), result.size()); - return NO_ERROR; - } - - status_t res = mDevice->dump(fd, args); - if (res != OK) { - result = String8::format(" Error dumping device: %s (%d)", - strerror(-res), res); - write(fd, result.string(), result.size()); - } - -#undef CASE_APPEND_ENUM - return NO_ERROR; + return dumpDevice(fd, args); } // IProCameraUser interface -void ProCamera2Client::disconnect() { - ATRACE_CALL(); - Mutex::Autolock icl(mIProCameraUserLock); - status_t res; - - // Allow both client and the media server to disconnect at all times - int callingPid = getCallingPid(); - if (callingPid != mClientPid && callingPid != mServicePid) return; - - ALOGV("Camera %d: Shutting down", mCameraId); - - detachDevice(); - ProClient::disconnect(); - - ALOGV("Camera %d: Shut down complete complete", mCameraId); -} - void ProCamera2Client::detachDevice() { if (mDevice == 0) return; @@ -438,117 +372,16 @@ void ProCamera2Client::detachDevice() { } } - mDevice->disconnect(); - - mDevice.clear(); - - ALOGV("Camera %d: Detach complete", mCameraId); -} - -status_t ProCamera2Client::connect(const sp& client) { - ATRACE_CALL(); - ALOGV("%s: E", __FUNCTION__); - Mutex::Autolock icl(mIProCameraUserLock); - - if (mClientPid != 0 && getCallingPid() != mClientPid) { - ALOGE("%s: Camera %d: Connection attempt from pid %d; " - "current locked to pid %d", __FUNCTION__, - mCameraId, getCallingPid(), mClientPid); - return BAD_VALUE; - } - - mClientPid = getCallingPid(); - - mRemoteCallback = client; - mSharedCameraCallbacks = client; - - return OK; + Camera2ClientBase::detachDevice(); } /** Device-related methods */ - -void ProCamera2Client::notifyError(int errorCode, int arg1, int arg2) { - ALOGE("Error condition %d reported by HAL, arguments %d, %d", errorCode, - arg1, arg2); -} - -void ProCamera2Client::notifyShutter(int frameNumber, nsecs_t timestamp) { - ALOGV("%s: Shutter notification for frame %d at time %lld", __FUNCTION__, - frameNumber, timestamp); -} - -void ProCamera2Client::notifyAutoFocus(uint8_t newState, int triggerId) { - ALOGV("%s: Autofocus state now %d, last trigger %d", - __FUNCTION__, newState, triggerId); - - SharedCameraCallbacks::Lock l(mSharedCameraCallbacks); - if (l.mRemoteCallback != 0) { - l.mRemoteCallback->notifyCallback(CAMERA_MSG_FOCUS_MOVE, - 1, 0); - } - if (l.mRemoteCallback != 0) { - l.mRemoteCallback->notifyCallback(CAMERA_MSG_FOCUS, - 1, 0); - } -} - -void ProCamera2Client::notifyAutoExposure(uint8_t newState, int triggerId) { - ALOGV("%s: Autoexposure state now %d, last trigger %d", - __FUNCTION__, newState, triggerId); -} - -void ProCamera2Client::notifyAutoWhitebalance(uint8_t newState, int triggerId) { - ALOGV("%s: Auto-whitebalance state now %d, last trigger %d", - __FUNCTION__, newState, triggerId); -} - -int ProCamera2Client::getCameraId() const { - return mCameraId; -} - -const sp& ProCamera2Client::getCameraDevice() { - return mDevice; -} - -const sp& ProCamera2Client::getCameraService() { - return mCameraService; -} - -ProCamera2Client::SharedCameraCallbacks::Lock::Lock( - SharedCameraCallbacks &client): - mRemoteCallback(client.mRemoteCallback), - mSharedClient(client) { - mSharedClient.mRemoteCallbackLock.lock(); -} - -ProCamera2Client::SharedCameraCallbacks::Lock::~Lock() { - mSharedClient.mRemoteCallbackLock.unlock(); -} - -ProCamera2Client::SharedCameraCallbacks::SharedCameraCallbacks - (const sp&client): - mRemoteCallback(client) { -} - -ProCamera2Client::SharedCameraCallbacks& - ProCamera2Client::SharedCameraCallbacks::operator=( - const sp&client) { - Mutex::Autolock l(mRemoteCallbackLock); - mRemoteCallback = client; - return *this; -} - -void ProCamera2Client::SharedCameraCallbacks::clear() { - Mutex::Autolock l(mRemoteCallbackLock); - mRemoteCallback.clear(); -} - void ProCamera2Client::onFrameAvailable(int32_t frameId, const CameraMetadata& frame) { ATRACE_CALL(); ALOGV("%s", __FUNCTION__); - Mutex::Autolock icl(mIProCameraUserLock); + Mutex::Autolock icl(mBinderSerializationLock); SharedCameraCallbacks::Lock l(mSharedCameraCallbacks); if (mRemoteCallback != NULL) { diff --git a/services/camera/libcameraservice/ProCamera2Client.h b/services/camera/libcameraservice/ProCamera2Client.h index ff6f4e2..f69021e 100644 --- a/services/camera/libcameraservice/ProCamera2Client.h +++ b/services/camera/libcameraservice/ProCamera2Client.h @@ -20,6 +20,7 @@ #include "Camera2Device.h" #include "CameraService.h" #include "camera2/ProFrameProcessor.h" +#include "Camera2ClientBase.h" namespace android { @@ -29,17 +30,13 @@ class IMemory; * meant for HAL2-level private API access. */ class ProCamera2Client : - public CameraService::ProClient, - public Camera2Device::NotificationListener, + public Camera2ClientBase, public camera2::ProFrameProcessor::FilteredListener { public: /** * IProCameraUser interface (see IProCameraUser for details) */ - virtual status_t connect(const sp& callbacks); - virtual void disconnect(); - virtual status_t exclusiveTryLock(); virtual status_t exclusiveLock(); virtual status_t exclusiveUnlock(); @@ -54,10 +51,13 @@ public: virtual status_t requestStream(int streamId); virtual status_t cancelStream(int streamId); - virtual status_t createStream(int width, int height, int format, - const sp& bufferProducer, - /*out*/ - int* streamId); + virtual status_t createStream( + int width, + int height, + int format, + const sp& bufferProducer, + /*out*/ + int* streamId); // Create a request object from a template. // -- Caller owns the newly allocated metadata @@ -85,24 +85,9 @@ public: int servicePid); virtual ~ProCamera2Client(); - status_t initialize(camera_module_t *module); - - virtual status_t dump(int fd, const Vector& args); - - /** - * Interface used by Camera2Device - */ - - virtual void notifyError(int errorCode, int arg1, int arg2); - virtual void notifyShutter(int frameNumber, nsecs_t timestamp); - virtual void notifyAutoFocus(uint8_t newState, int triggerId); - virtual void notifyAutoExposure(uint8_t newState, int triggerId); - virtual void notifyAutoWhitebalance(uint8_t newState, int triggerId); + virtual status_t initialize(camera_module_t *module); - - int getCameraId() const; - const sp& getCameraDevice(); - const sp& getCameraService(); + virtual status_t dump(int fd, const Vector& args); // Callbacks from camera service virtual void onExclusiveLockStolen(); @@ -111,67 +96,26 @@ public: * Interface used by independent components of ProCamera2Client. */ - // Simple class to ensure that access to IProCameraCallbacks is serialized - // by requiring mRemoteCallbackLock to be locked before access to - // mCameraClient is possible. - class SharedCameraCallbacks { - public: - class Lock { - public: - Lock(SharedCameraCallbacks &client); - ~Lock(); - sp &mRemoteCallback; - private: - SharedCameraCallbacks &mSharedClient; - }; - SharedCameraCallbacks(const sp& client); - SharedCameraCallbacks& operator=(const sp& client); - void clear(); - private: - sp mRemoteCallback; - mutable Mutex mRemoteCallbackLock; - } mSharedCameraCallbacks; - protected: /** FilteredListener implementation **/ - virtual void onFrameAvailable(int32_t frameId, const CameraMetadata& frame); + virtual void onFrameAvailable(int32_t frameId, + const CameraMetadata& frame); + virtual void detachDevice(); private: /** IProCameraUser interface-related private members */ - // Mutex that must be locked by methods implementing the IProCameraUser - // interface. Ensures serialization between incoming IProCameraUser calls. - // All methods below that append 'L' to the name assume that - // mIProCameraUserLock is locked when they're called - mutable Mutex mIProCameraUserLock; - - // Used with stream IDs - static const int NO_STREAM = -1; - - /* Preview/Recording related members */ - - sp mPreviewSurface; - /** Preview callback related members */ sp mFrameProcessor; static const int32_t FRAME_PROCESSOR_LISTENER_MIN_ID = 0; static const int32_t FRAME_PROCESSOR_LISTENER_MAX_ID = 0x7fffffffL; - /** Camera2Device instance wrapping HAL2 entry */ - - sp mDevice; - /** Utility members */ - // Verify that caller is the owner of the camera - status_t checkPid(const char *checkLocation) const; - // Whether or not we have an exclusive lock on the device // - if no we can't modify the request queue. // note that creating/deleting streams we own is still OK bool mExclusiveLock; - - void detachDevice(); }; }; // namespace android diff --git a/services/camera/libcameraservice/camera2/CallbackProcessor.cpp b/services/camera/libcameraservice/camera2/CallbackProcessor.cpp index 9a14758..30c14ef 100644 --- a/services/camera/libcameraservice/camera2/CallbackProcessor.cpp +++ b/services/camera/libcameraservice/camera2/CallbackProcessor.cpp @@ -278,11 +278,12 @@ status_t CallbackProcessor::processNewCallback(sp &client) { // Call outside parameter lock to allow re-entrancy from notification { - Camera2Client::SharedCameraClient::Lock l(client->mSharedCameraClient); - if (l.mCameraClient != 0) { + Camera2Client::SharedCameraCallbacks::Lock + l(client->mSharedCameraCallbacks); + if (l.mRemoteCallback != 0) { ALOGV("%s: Camera %d: Invoking client data callback", __FUNCTION__, client->getCameraId()); - l.mCameraClient->dataCallback(CAMERA_MSG_PREVIEW_FRAME, + l.mRemoteCallback->dataCallback(CAMERA_MSG_PREVIEW_FRAME, mCallbackHeap->mBuffers[heapIdx], NULL); } } diff --git a/services/camera/libcameraservice/camera2/CaptureSequencer.cpp b/services/camera/libcameraservice/camera2/CaptureSequencer.cpp index 513a47e..1880912 100644 --- a/services/camera/libcameraservice/camera2/CaptureSequencer.cpp +++ b/services/camera/libcameraservice/camera2/CaptureSequencer.cpp @@ -271,10 +271,11 @@ CaptureSequencer::CaptureState CaptureSequencer::manageDone(sp &c } if (mCaptureBuffer != 0 && res == OK) { - Camera2Client::SharedCameraClient::Lock l(client->mSharedCameraClient); + Camera2Client::SharedCameraCallbacks::Lock + l(client->mSharedCameraCallbacks); ALOGV("%s: Sending still image to client", __FUNCTION__); - if (l.mCameraClient != 0) { - l.mCameraClient->dataCallback(CAMERA_MSG_COMPRESSED_IMAGE, + if (l.mRemoteCallback != 0) { + l.mRemoteCallback->dataCallback(CAMERA_MSG_COMPRESSED_IMAGE, mCaptureBuffer, NULL); } else { ALOGV("%s: No client!", __FUNCTION__); @@ -344,7 +345,7 @@ CaptureSequencer::CaptureState CaptureSequencer::manageZslStart( } SharedParameters::Lock l(client->getParameters()); - /* warning: this also locks a SharedCameraClient */ + /* warning: this also locks a SharedCameraCallbacks */ shutterNotifyLocked(l.mParameters, client, mMsgType); mShutterNotified = true; mTimeoutCount = kMaxTimeoutsForCaptureEnd; @@ -496,7 +497,7 @@ CaptureSequencer::CaptureState CaptureSequencer::manageStandardCaptureWait( } if (mNewFrameReceived && !mShutterNotified) { SharedParameters::Lock l(client->getParameters()); - /* warning: this also locks a SharedCameraClient */ + /* warning: this also locks a SharedCameraCallbacks */ shutterNotifyLocked(l.mParameters, client, mMsgType); mShutterNotified = true; } @@ -651,16 +652,17 @@ status_t CaptureSequencer::updateCaptureRequest(const Parameters ¶ms, } { - Camera2Client::SharedCameraClient::Lock l(client->mSharedCameraClient); + Camera2Client::SharedCameraCallbacks::Lock + l(client->mSharedCameraCallbacks); ALOGV("%s: Notifying of shutter close to client", __FUNCTION__); - if (l.mCameraClient != 0) { + if (l.mRemoteCallback != 0) { // ShutterCallback - l.mCameraClient->notifyCallback(CAMERA_MSG_SHUTTER, + l.mRemoteCallback->notifyCallback(CAMERA_MSG_SHUTTER, /*ext1*/0, /*ext2*/0); // RawCallback with null buffer - l.mCameraClient->notifyCallback(CAMERA_MSG_RAW_IMAGE_NOTIFY, + l.mRemoteCallback->notifyCallback(CAMERA_MSG_RAW_IMAGE_NOTIFY, /*ext1*/0, /*ext2*/0); } else { ALOGV("%s: No client!", __FUNCTION__); diff --git a/services/camera/libcameraservice/camera2/FrameProcessor.cpp b/services/camera/libcameraservice/camera2/FrameProcessor.cpp index 1f2659c..09b4b27 100644 --- a/services/camera/libcameraservice/camera2/FrameProcessor.cpp +++ b/services/camera/libcameraservice/camera2/FrameProcessor.cpp @@ -195,7 +195,7 @@ status_t FrameProcessor::processFaceDetect(const CameraMetadata &frame, entry = frame.find(ANDROID_STATISTICS_FACE_RECTANGLES); if (entry.count == 0) { // No faces this frame - /* warning: locks SharedCameraClient */ + /* warning: locks SharedCameraCallbacks */ callbackFaceDetection(client, metadata); return OK; } @@ -286,7 +286,7 @@ status_t FrameProcessor::processFaceDetect(const CameraMetadata &frame, metadata.faces = faces.editArray(); } - /* warning: locks SharedCameraClient */ + /* warning: locks SharedCameraCallbacks */ callbackFaceDetection(client, metadata); return OK; @@ -297,9 +297,9 @@ void FrameProcessor::callbackFaceDetection(sp client, /* Filter out repeated 0-face callbacks, but not when the last frame was >0 */ if (metadata.number_of_faces != 0 || mLastFrameNumberOfFaces != metadata.number_of_faces) { - Camera2Client::SharedCameraClient::Lock l(client->mSharedCameraClient); - if (l.mCameraClient != NULL) { - l.mCameraClient->dataCallback(CAMERA_MSG_PREVIEW_METADATA, + Camera2Client::SharedCameraCallbacks::Lock l(client->mSharedCameraCallbacks); + if (l.mRemoteCallback != NULL) { + l.mRemoteCallback->dataCallback(CAMERA_MSG_PREVIEW_METADATA, NULL, &metadata); } } diff --git a/services/camera/libcameraservice/camera2/ProFrameProcessor.cpp b/services/camera/libcameraservice/camera2/ProFrameProcessor.cpp index 8d4933c..742577a 100644 --- a/services/camera/libcameraservice/camera2/ProFrameProcessor.cpp +++ b/services/camera/libcameraservice/camera2/ProFrameProcessor.cpp @@ -22,7 +22,7 @@ #include #include "ProFrameProcessor.h" -#include "../Camera2Device.h" +#include "../CameraDeviceBase.h" #include "../ProCamera2Client.h" namespace android { @@ -62,7 +62,7 @@ status_t ProFrameProcessor::removeListener(int32_t minId, return OK; } -void ProFrameProcessor::dump(int fd, const Vector& args) { +void ProFrameProcessor::dump(int fd, const Vector& /*args*/) { String8 result(" Latest received frame:\n"); write(fd, result.string(), result.size()); mLastFrame.dump(fd, 2, 6); @@ -71,7 +71,7 @@ void ProFrameProcessor::dump(int fd, const Vector& args) { bool ProFrameProcessor::threadLoop() { status_t res; - sp device; + sp device; { sp client = mClient.promote(); if (client == 0) return false; @@ -125,7 +125,6 @@ void ProFrameProcessor::processNewFrames(sp &client) { status_t ProFrameProcessor::processListeners(const CameraMetadata &frame, sp &client) { - status_t res; ATRACE_CALL(); camera_metadata_ro_entry_t entry; diff --git a/services/camera/libcameraservice/camera2/StreamingProcessor.cpp b/services/camera/libcameraservice/camera2/StreamingProcessor.cpp index 6a4b95d..fbc5b93 100644 --- a/services/camera/libcameraservice/camera2/StreamingProcessor.cpp +++ b/services/camera/libcameraservice/camera2/StreamingProcessor.cpp @@ -556,9 +556,9 @@ void StreamingProcessor::onFrameAvailable() { } // Call outside locked parameters to allow re-entrancy from notification - Camera2Client::SharedCameraClient::Lock l(client->mSharedCameraClient); - if (l.mCameraClient != 0) { - l.mCameraClient->dataCallbackTimestamp(timestamp, + Camera2Client::SharedCameraCallbacks::Lock l(client->mSharedCameraCallbacks); + if (l.mRemoteCallback != 0) { + l.mRemoteCallback->dataCallbackTimestamp(timestamp, CAMERA_MSG_VIDEO_FRAME, recordingHeap->mBuffers[heapIdx]); } -- cgit v1.1 From ce124da179775a81ad7dcc3f33315eca451e66f2 Mon Sep 17 00:00:00 2001 From: Igor Murashkin Date: Mon, 4 Mar 2013 14:53:08 -0800 Subject: (Camera)FrameProcessor: Refactor to share code with ProFrameProcessor Change-Id: Ie8cd0df7caf83f9d0134f560ae31ab72f2f7d1fc --- services/camera/libcameraservice/Camera2Client.cpp | 2 +- services/camera/libcameraservice/Camera2Device.cpp | 4 + services/camera/libcameraservice/Camera2Device.h | 1 + services/camera/libcameraservice/Camera3Device.cpp | 4 + services/camera/libcameraservice/Camera3Device.h | 1 + .../camera/libcameraservice/CameraDeviceBase.h | 5 + .../camera/libcameraservice/ProCamera2Client.cpp | 3 +- .../libcameraservice/camera2/FrameProcessor.cpp | 175 +++++---------------- .../libcameraservice/camera2/FrameProcessor.h | 45 ++---- .../libcameraservice/camera2/ProFrameProcessor.cpp | 42 ++--- .../libcameraservice/camera2/ProFrameProcessor.h | 27 ++-- 11 files changed, 106 insertions(+), 203 deletions(-) diff --git a/services/camera/libcameraservice/Camera2Client.cpp b/services/camera/libcameraservice/Camera2Client.cpp index eb7a8d8..eb94d9f 100644 --- a/services/camera/libcameraservice/Camera2Client.cpp +++ b/services/camera/libcameraservice/Camera2Client.cpp @@ -115,7 +115,7 @@ status_t Camera2Client::initialize(camera_module_t *module) mStreamingProcessor = new StreamingProcessor(this); - mFrameProcessor = new FrameProcessor(this); + mFrameProcessor = new FrameProcessor(mDevice, this); threadName = String8::format("C2-%d-FrameProc", mCameraId); mFrameProcessor->run(threadName.string()); diff --git a/services/camera/libcameraservice/Camera2Device.cpp b/services/camera/libcameraservice/Camera2Device.cpp index 81e58ca..37ba5ae 100644 --- a/services/camera/libcameraservice/Camera2Device.cpp +++ b/services/camera/libcameraservice/Camera2Device.cpp @@ -47,6 +47,10 @@ Camera2Device::~Camera2Device() disconnect(); } +int Camera2Device::getId() const { + return mId; +} + status_t Camera2Device::initialize(camera_module_t *module) { ATRACE_CALL(); diff --git a/services/camera/libcameraservice/Camera2Device.h b/services/camera/libcameraservice/Camera2Device.h index 1adb7a9..3034a1d 100644 --- a/services/camera/libcameraservice/Camera2Device.h +++ b/services/camera/libcameraservice/Camera2Device.h @@ -38,6 +38,7 @@ class Camera2Device: public CameraDeviceBase { /** * CameraDevice interface */ + virtual int getId() const; virtual status_t initialize(camera_module_t *module); virtual status_t disconnect(); virtual status_t dump(int fd, const Vector& args); diff --git a/services/camera/libcameraservice/Camera3Device.cpp b/services/camera/libcameraservice/Camera3Device.cpp index 2a1be09..04a6e6a 100644 --- a/services/camera/libcameraservice/Camera3Device.cpp +++ b/services/camera/libcameraservice/Camera3Device.cpp @@ -50,6 +50,10 @@ Camera3Device::~Camera3Device() disconnect(); } +int Camera3Device::getId() const { + return mId; +} + status_t Camera3Device::initialize(camera_module_t *module) { ATRACE_CALL(); diff --git a/services/camera/libcameraservice/Camera3Device.h b/services/camera/libcameraservice/Camera3Device.h index 2bc7cf0..df7352c 100644 --- a/services/camera/libcameraservice/Camera3Device.h +++ b/services/camera/libcameraservice/Camera3Device.h @@ -57,6 +57,7 @@ class Camera3Device : /** * CameraDevice interface */ + virtual int getId() const; virtual status_t initialize(camera_module_t *module); virtual status_t disconnect(); virtual status_t dump(int fd, const Vector &args); diff --git a/services/camera/libcameraservice/CameraDeviceBase.h b/services/camera/libcameraservice/CameraDeviceBase.h index 8252af7..8c457d9 100644 --- a/services/camera/libcameraservice/CameraDeviceBase.h +++ b/services/camera/libcameraservice/CameraDeviceBase.h @@ -36,6 +36,11 @@ class CameraDeviceBase : public virtual RefBase { public: virtual ~CameraDeviceBase(); + /** + * The device's camera ID + */ + virtual int getId() const = 0; + virtual status_t initialize(camera_module_t *module) = 0; virtual status_t disconnect() = 0; diff --git a/services/camera/libcameraservice/ProCamera2Client.cpp b/services/camera/libcameraservice/ProCamera2Client.cpp index 4a5a3d5..1270751 100644 --- a/services/camera/libcameraservice/ProCamera2Client.cpp +++ b/services/camera/libcameraservice/ProCamera2Client.cpp @@ -27,6 +27,7 @@ #include "camera2/Parameters.h" #include "ProCamera2Client.h" #include "camera2/ProFrameProcessor.h" +#include "CameraDeviceBase.h" namespace android { using namespace camera2; @@ -61,7 +62,7 @@ status_t ProCamera2Client::initialize(camera_module_t *module) } String8 threadName; - mFrameProcessor = new ProFrameProcessor(this); + mFrameProcessor = new ProFrameProcessor(mDevice); threadName = String8::format("PC2-%d-FrameProc", mCameraId); mFrameProcessor->run(threadName.string()); diff --git a/services/camera/libcameraservice/camera2/FrameProcessor.cpp b/services/camera/libcameraservice/camera2/FrameProcessor.cpp index 09b4b27..d13d398 100644 --- a/services/camera/libcameraservice/camera2/FrameProcessor.cpp +++ b/services/camera/libcameraservice/camera2/FrameProcessor.cpp @@ -28,146 +28,37 @@ namespace android { namespace camera2 { -FrameProcessor::FrameProcessor(wp client): - Thread(false), mClient(client), mLastFrameNumberOfFaces(0) { +FrameProcessor::FrameProcessor(wp device, + wp client) : + ProFrameProcessor(device), + mClient(client), + mLastFrameNumberOfFaces(0) { } FrameProcessor::~FrameProcessor() { - ALOGV("%s: Exit", __FUNCTION__); } -status_t FrameProcessor::registerListener(int32_t minId, - int32_t maxId, wp listener) { - Mutex::Autolock l(mInputMutex); - ALOGV("%s: Registering listener for frame id range %d - %d", - __FUNCTION__, minId, maxId); - RangeListener rListener = { minId, maxId, listener }; - mRangeListeners.push_back(rListener); - return OK; -} +bool FrameProcessor::processSingleFrame(CameraMetadata &frame, + const sp &device) { -status_t FrameProcessor::removeListener(int32_t minId, - int32_t maxId, wp listener) { - Mutex::Autolock l(mInputMutex); - List::iterator item = mRangeListeners.begin(); - while (item != mRangeListeners.end()) { - if (item->minId == minId && - item->maxId == maxId && - item->listener == listener) { - item = mRangeListeners.erase(item); - } else { - item++; - } + sp client = mClient.promote(); + if (!client.get()) { + return false; } - return OK; -} -void FrameProcessor::dump(int fd, const Vector& /*args*/) { - String8 result(" Latest received frame:\n"); - write(fd, result.string(), result.size()); - mLastFrame.dump(fd, 2, 6); -} - -bool FrameProcessor::threadLoop() { - status_t res; - - sp device; - { - sp client = mClient.promote(); - if (client == 0) return false; - device = client->getCameraDevice(); - if (device == 0) return false; + if (processFaceDetect(frame, client) != OK) { + return false; } - res = device->waitForNextFrame(kWaitDuration); - if (res == OK) { - sp client = mClient.promote(); - if (client == 0) return false; - processNewFrames(client); - } else if (res != TIMED_OUT) { - ALOGE("Camera2Client::FrameProcessor: Error waiting for new " - "frames: %s (%d)", strerror(-res), res); + if (!ProFrameProcessor::processSingleFrame(frame, device)) { + return false; } return true; } -void FrameProcessor::processNewFrames(sp &client) { - status_t res; - ATRACE_CALL(); - CameraMetadata frame; - while ( (res = client->getCameraDevice()->getNextFrame(&frame)) == OK) { - camera_metadata_entry_t entry; - - entry = frame.find(ANDROID_REQUEST_FRAME_COUNT); - if (entry.count == 0) { - ALOGE("%s: Camera %d: Error reading frame number", - __FUNCTION__, client->getCameraId()); - break; - } - ATRACE_INT("cam2_frame", entry.data.i32[0]); - - res = processFaceDetect(frame, client); - if (res != OK) break; - - res = processListeners(frame, client); - if (res != OK) break; - - if (!frame.isEmpty()) { - mLastFrame.acquire(frame); - } - } - if (res != NOT_ENOUGH_DATA) { - ALOGE("%s: Camera %d: Error getting next frame: %s (%d)", - __FUNCTION__, client->getCameraId(), strerror(-res), res); - return; - } - - return; -} - -status_t FrameProcessor::processListeners(const CameraMetadata &frame, - sp &client) { - ATRACE_CALL(); - camera_metadata_ro_entry_t entry; - - entry = frame.find(ANDROID_REQUEST_ID); - if (entry.count == 0) { - ALOGE("%s: Camera %d: Error reading frame id", - __FUNCTION__, client->getCameraId()); - return BAD_VALUE; - } - int32_t frameId = entry.data.i32[0]; - - List > listeners; - { - Mutex::Autolock l(mInputMutex); - - List::iterator item = mRangeListeners.begin(); - while (item != mRangeListeners.end()) { - if (frameId >= item->minId && - frameId < item->maxId) { - sp listener = item->listener.promote(); - if (listener == 0) { - item = mRangeListeners.erase(item); - continue; - } else { - listeners.push_back(listener); - } - } - item++; - } - } - ALOGV("Got %d range listeners out of %d", listeners.size(), mRangeListeners.size()); - List >::iterator item = listeners.begin(); - for (; item != listeners.end(); item++) { - (*item)->onFrameAvailable(frameId, frame); - } - return OK; -} - status_t FrameProcessor::processFaceDetect(const CameraMetadata &frame, - sp &client) { + const sp &client) { status_t res = BAD_VALUE; ATRACE_CALL(); camera_metadata_ro_entry_t entry; @@ -190,7 +81,9 @@ status_t FrameProcessor::processFaceDetect(const CameraMetadata &frame, Vector faces; metadata.number_of_faces = 0; - if (enableFaceDetect && faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) { + if (enableFaceDetect && + faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) { + SharedParameters::Lock l(client->getParameters()); entry = frame.find(ANDROID_STATISTICS_FACE_RECTANGLES); if (entry.count == 0) { @@ -263,17 +156,17 @@ status_t FrameProcessor::processFaceDetect(const CameraMetadata &frame, if (faceDetectMode == ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) { face.id = faceIds[i]; face.left_eye[0] = - l.mParameters.arrayXToNormalized(faceLandmarks[i*6 + 0]); + l.mParameters.arrayXToNormalized(faceLandmarks[i*6 + 0]); face.left_eye[1] = - l.mParameters.arrayYToNormalized(faceLandmarks[i*6 + 1]); + l.mParameters.arrayYToNormalized(faceLandmarks[i*6 + 1]); face.right_eye[0] = - l.mParameters.arrayXToNormalized(faceLandmarks[i*6 + 2]); + l.mParameters.arrayXToNormalized(faceLandmarks[i*6 + 2]); face.right_eye[1] = - l.mParameters.arrayYToNormalized(faceLandmarks[i*6 + 3]); + l.mParameters.arrayYToNormalized(faceLandmarks[i*6 + 3]); face.mouth[0] = - l.mParameters.arrayXToNormalized(faceLandmarks[i*6 + 4]); + l.mParameters.arrayXToNormalized(faceLandmarks[i*6 + 4]); face.mouth[1] = - l.mParameters.arrayYToNormalized(faceLandmarks[i*6 + 5]); + l.mParameters.arrayYToNormalized(faceLandmarks[i*6 + 5]); } else { face.id = 0; face.left_eye[0] = face.left_eye[1] = -2000; @@ -293,14 +186,24 @@ status_t FrameProcessor::processFaceDetect(const CameraMetadata &frame, } void FrameProcessor::callbackFaceDetection(sp client, - /*in*/camera_frame_metadata &metadata) { + const camera_frame_metadata &metadata) { + + camera_frame_metadata *metadata_ptr = + const_cast(&metadata); + + /** + * Filter out repeated 0-face callbacks, + * but not when the last frame was >0 + */ + if (metadata.number_of_faces != 0 || + mLastFrameNumberOfFaces != metadata.number_of_faces) { - /* Filter out repeated 0-face callbacks, but not when the last frame was >0 */ - if (metadata.number_of_faces != 0 || mLastFrameNumberOfFaces != metadata.number_of_faces) { - Camera2Client::SharedCameraCallbacks::Lock l(client->mSharedCameraCallbacks); + Camera2Client::SharedCameraCallbacks::Lock + l(client->mSharedCameraCallbacks); if (l.mRemoteCallback != NULL) { l.mRemoteCallback->dataCallback(CAMERA_MSG_PREVIEW_METADATA, - NULL, &metadata); + NULL, + metadata_ptr); } } diff --git a/services/camera/libcameraservice/camera2/FrameProcessor.h b/services/camera/libcameraservice/camera2/FrameProcessor.h index 66e3cda..27ed8f6 100644 --- a/services/camera/libcameraservice/camera2/FrameProcessor.h +++ b/services/camera/libcameraservice/camera2/FrameProcessor.h @@ -22,7 +22,9 @@ #include #include #include -#include "camera/CameraMetadata.h" +#include + +#include "ProFrameProcessor.h" struct camera_frame_metadata; @@ -35,51 +37,26 @@ namespace camera2 { /* Output frame metadata processing thread. This thread waits for new * frames from the device, and analyzes them as necessary. */ -class FrameProcessor: public Thread { +class FrameProcessor : public ProFrameProcessor { public: - FrameProcessor(wp client); + FrameProcessor(wp device, wp client); ~FrameProcessor(); - struct FilteredListener: virtual public RefBase { - virtual void onFrameAvailable(int32_t frameId, - const CameraMetadata &frame) = 0; - }; - - // Register a listener for a range of IDs [minId, maxId). Multiple listeners - // can be listening to the same range - status_t registerListener(int32_t minId, int32_t maxId, wp listener); - status_t removeListener(int32_t minId, int32_t maxId, wp listener); - - void dump(int fd, const Vector& args); private: - static const nsecs_t kWaitDuration = 10000000; // 10 ms wp mClient; + int mLastFrameNumberOfFaces; - virtual bool threadLoop(); - - Mutex mInputMutex; - - struct RangeListener { - int32_t minId; - int32_t maxId; - wp listener; - }; - List mRangeListeners; + void processNewFrames(const sp &client); - void processNewFrames(sp &client); + virtual bool processSingleFrame(CameraMetadata &frame, + const sp &device); status_t processFaceDetect(const CameraMetadata &frame, - sp &client); - - status_t processListeners(const CameraMetadata &frame, - sp &client); - - CameraMetadata mLastFrame; - int mLastFrameNumberOfFaces; + const sp &client); // Emit FaceDetection event to java if faces changed void callbackFaceDetection(sp client, - camera_frame_metadata &metadata); + const camera_frame_metadata &metadata); }; diff --git a/services/camera/libcameraservice/camera2/ProFrameProcessor.cpp b/services/camera/libcameraservice/camera2/ProFrameProcessor.cpp index 742577a..257a45f 100644 --- a/services/camera/libcameraservice/camera2/ProFrameProcessor.cpp +++ b/services/camera/libcameraservice/camera2/ProFrameProcessor.cpp @@ -23,13 +23,13 @@ #include "ProFrameProcessor.h" #include "../CameraDeviceBase.h" -#include "../ProCamera2Client.h" namespace android { namespace camera2 { -ProFrameProcessor::ProFrameProcessor(wp client): - Thread(false), mClient(client) { +ProFrameProcessor::ProFrameProcessor(wp device) : + Thread(/*canCallJava*/false), + mDevice(device) { } ProFrameProcessor::~ProFrameProcessor() { @@ -47,7 +47,8 @@ status_t ProFrameProcessor::registerListener(int32_t minId, } status_t ProFrameProcessor::removeListener(int32_t minId, - int32_t maxId, wp listener) { + int32_t maxId, + wp listener) { Mutex::Autolock l(mInputMutex); List::iterator item = mRangeListeners.begin(); while (item != mRangeListeners.end()) { @@ -73,42 +74,40 @@ bool ProFrameProcessor::threadLoop() { sp device; { - sp client = mClient.promote(); - if (client == 0) return false; - device = client->getCameraDevice(); + device = mDevice.promote(); if (device == 0) return false; } res = device->waitForNextFrame(kWaitDuration); if (res == OK) { - sp client = mClient.promote(); - if (client == 0) return false; - processNewFrames(client); + processNewFrames(device); } else if (res != TIMED_OUT) { - ALOGE("ProCamera2Client::ProFrameProcessor: Error waiting for new " + ALOGE("ProFrameProcessor: Error waiting for new " "frames: %s (%d)", strerror(-res), res); } return true; } -void ProFrameProcessor::processNewFrames(sp &client) { +void ProFrameProcessor::processNewFrames(const sp &device) { status_t res; ATRACE_CALL(); CameraMetadata frame; - while ( (res = client->getCameraDevice()->getNextFrame(&frame)) == OK) { + while ( (res = device->getNextFrame(&frame)) == OK) { + camera_metadata_entry_t entry; entry = frame.find(ANDROID_REQUEST_FRAME_COUNT); if (entry.count == 0) { ALOGE("%s: Camera %d: Error reading frame number", - __FUNCTION__, client->getCameraId()); + __FUNCTION__, device->getId()); break; } ATRACE_INT("cam2_frame", entry.data.i32[0]); - res = processListeners(frame, client); - if (res != OK) break; + if (!processSingleFrame(frame, device)) { + break; + } if (!frame.isEmpty()) { mLastFrame.acquire(frame); @@ -116,22 +115,27 @@ void ProFrameProcessor::processNewFrames(sp &client) { } if (res != NOT_ENOUGH_DATA) { ALOGE("%s: Camera %d: Error getting next frame: %s (%d)", - __FUNCTION__, client->getCameraId(), strerror(-res), res); + __FUNCTION__, device->getId(), strerror(-res), res); return; } return; } +bool ProFrameProcessor::processSingleFrame(CameraMetadata &frame, + const sp &device) { + return processListeners(frame, device) == OK; +} + status_t ProFrameProcessor::processListeners(const CameraMetadata &frame, - sp &client) { + const sp &device) { ATRACE_CALL(); camera_metadata_ro_entry_t entry; entry = frame.find(ANDROID_REQUEST_ID); if (entry.count == 0) { ALOGE("%s: Camera %d: Error reading frame id", - __FUNCTION__, client->getCameraId()); + __FUNCTION__, device->getId()); return BAD_VALUE; } int32_t frameId = entry.data.i32[0]; diff --git a/services/camera/libcameraservice/camera2/ProFrameProcessor.h b/services/camera/libcameraservice/camera2/ProFrameProcessor.h index e4094a6..b82942c 100644 --- a/services/camera/libcameraservice/camera2/ProFrameProcessor.h +++ b/services/camera/libcameraservice/camera2/ProFrameProcessor.h @@ -24,11 +24,9 @@ #include #include -struct camera_frame_metadata; - namespace android { -class ProCamera2Client; +class CameraDeviceBase; namespace camera2 { @@ -37,23 +35,25 @@ namespace camera2 { */ class ProFrameProcessor: public Thread { public: - ProFrameProcessor(wp client); - ~ProFrameProcessor(); + ProFrameProcessor(wp device); + virtual ~ProFrameProcessor(); struct FilteredListener: virtual public RefBase { virtual void onFrameAvailable(int32_t frameId, - const CameraMetadata &frame) = 0; + const CameraMetadata &frame) = 0; }; // Register a listener for a range of IDs [minId, maxId). Multiple listeners // can be listening to the same range - status_t registerListener(int32_t minId, int32_t maxId, wp listener); - status_t removeListener(int32_t minId, int32_t maxId, wp listener); + status_t registerListener(int32_t minId, int32_t maxId, + wp listener); + status_t removeListener(int32_t minId, int32_t maxId, + wp listener); void dump(int fd, const Vector& args); - private: + protected: static const nsecs_t kWaitDuration = 10000000; // 10 ms - wp mClient; + wp mDevice; virtual bool threadLoop(); @@ -66,10 +66,13 @@ class ProFrameProcessor: public Thread { }; List mRangeListeners; - void processNewFrames(sp &client); + void processNewFrames(const sp &device); + + virtual bool processSingleFrame(CameraMetadata &frame, + const sp &device); status_t processListeners(const CameraMetadata &frame, - sp &client); + const sp &device); CameraMetadata mLastFrame; }; -- cgit v1.1 From fa4cf9d310685b4c25877cba772ff7da84caf517 Mon Sep 17 00:00:00 2001 From: Igor Murashkin Date: Mon, 4 Mar 2013 16:14:23 -0800 Subject: (Camera)ProCamera: Remove unused functions from binder interface Change-Id: I0582268cef6e84b630bc87c8a03dcd69d54c440d --- camera/Camera.cpp | 19 ++++++- camera/CameraBase.cpp | 35 ------------- camera/IProCameraCallbacks.cpp | 60 ---------------------- camera/IProCameraUser.cpp | 26 ++-------- camera/ProCamera.cpp | 22 +------- camera/tests/ProCameraTests.cpp | 14 ++--- include/camera/CameraBase.h | 8 +-- include/camera/IProCameraCallbacks.h | 18 +++---- include/camera/IProCameraUser.h | 3 +- include/camera/ProCamera.h | 33 ++++-------- .../camera/libcameraservice/ProCamera2Client.cpp | 9 +--- .../camera/libcameraservice/ProCamera2Client.h | 3 +- 12 files changed, 51 insertions(+), 199 deletions(-) diff --git a/camera/Camera.cpp b/camera/Camera.cpp index f417c90..e8908d2 100644 --- a/camera/Camera.cpp +++ b/camera/Camera.cpp @@ -283,7 +283,14 @@ void Camera::notifyCallback(int32_t msgType, int32_t ext1, int32_t ext2) void Camera::dataCallback(int32_t msgType, const sp& dataPtr, camera_frame_metadata_t *metadata) { - return CameraBaseT::dataCallback(msgType, dataPtr, metadata); + sp listener; + { + Mutex::Autolock _l(mLock); + listener = mListener; + } + if (listener != NULL) { + listener->postData(msgType, dataPtr, metadata); + } } // callback from camera service when timestamped frame is ready @@ -302,7 +309,15 @@ void Camera::dataCallbackTimestamp(nsecs_t timestamp, int32_t msgType, const sp< return; } - if (!CameraBaseT::dataCallbackTimestamp(timestamp, msgType, dataPtr)) { + sp listener; + { + Mutex::Autolock _l(mLock); + listener = mListener; + } + + if (listener != NULL) { + listener->postDataTimestamp(timestamp, msgType, dataPtr); + } else { ALOGW("No listener was set. Drop a recording frame."); releaseRecordingFrame(dataPtr); } diff --git a/camera/CameraBase.cpp b/camera/CameraBase.cpp index 29096da..c25c5fd 100644 --- a/camera/CameraBase.cpp +++ b/camera/CameraBase.cpp @@ -176,41 +176,6 @@ void CameraBase::notifyCallback(int32_t msgType, } } -// callback from camera service when frame or image is ready -template -void CameraBase::dataCallback(int32_t msgType, - const sp& dataPtr, - camera_frame_metadata *metadata) -{ - sp listener; - { - Mutex::Autolock _l(mLock); - listener = mListener; - } - if (listener != NULL) { - listener->postData(msgType, dataPtr, metadata); - } -} - -// callback from camera service when timestamped frame is ready -template -bool CameraBase::dataCallbackTimestamp(nsecs_t timestamp, - int32_t msgType, - const sp& dataPtr) -{ - sp listener; - { - Mutex::Autolock _l(mLock); - listener = mListener; - } - if (listener != NULL) { - listener->postDataTimestamp(timestamp, msgType, dataPtr); - return true; - } - - return false; -} - template int CameraBase::getNumberOfCameras() { const sp cs = getCameraService(); diff --git a/camera/IProCameraCallbacks.cpp b/camera/IProCameraCallbacks.cpp index 6cd36bf..b9cd14d 100644 --- a/camera/IProCameraCallbacks.cpp +++ b/camera/IProCameraCallbacks.cpp @@ -34,8 +34,6 @@ namespace android { enum { NOTIFY_CALLBACK = IBinder::FIRST_CALL_TRANSACTION, - DATA_CALLBACK, - DATA_CALLBACK_TIMESTAMP, LOCK_STATUS_CHANGED, RESULT_RECEIVED, }; @@ -63,37 +61,6 @@ public: remote()->transact(NOTIFY_CALLBACK, data, &reply, IBinder::FLAG_ONEWAY); } - // generic data callback from camera service to app with image data - void dataCallback(int32_t msgType, const sp& imageData, - camera_frame_metadata_t *metadata) - { - ALOGV("dataCallback"); - Parcel data, reply; - data.writeInterfaceToken(IProCameraCallbacks::getInterfaceDescriptor()); - data.writeInt32(msgType); - data.writeStrongBinder(imageData->asBinder()); - if (metadata) { - data.writeInt32(metadata->number_of_faces); - data.write(metadata->faces, - sizeof(camera_face_t) * metadata->number_of_faces); - } - remote()->transact(DATA_CALLBACK, data, &reply, IBinder::FLAG_ONEWAY); - } - - // generic data callback from camera service to app with image data - void dataCallbackTimestamp(nsecs_t timestamp, int32_t msgType, - const sp& imageData) - { - ALOGV("dataCallback"); - Parcel data, reply; - data.writeInterfaceToken(IProCameraCallbacks::getInterfaceDescriptor()); - data.writeInt64(timestamp); - data.writeInt32(msgType); - data.writeStrongBinder(imageData->asBinder()); - remote()->transact(DATA_CALLBACK_TIMESTAMP, data, &reply, - IBinder::FLAG_ONEWAY); - } - void onLockStatusChanged(LockStatus newLockStatus) { ALOGV("onLockStatusChanged"); Parcel data, reply; @@ -132,33 +99,6 @@ status_t BnProCameraCallbacks::onTransact( notifyCallback(msgType, ext1, ext2); return NO_ERROR; } break; - case DATA_CALLBACK: { - ALOGV("DATA_CALLBACK"); - CHECK_INTERFACE(IProCameraCallbacks, data, reply); - int32_t msgType = data.readInt32(); - sp imageData = interface_cast( - data.readStrongBinder()); - camera_frame_metadata_t *metadata = NULL; - if (data.dataAvail() > 0) { - metadata = new camera_frame_metadata_t; - metadata->number_of_faces = data.readInt32(); - metadata->faces = (camera_face_t *) data.readInplace( - sizeof(camera_face_t) * metadata->number_of_faces); - } - dataCallback(msgType, imageData, metadata); - if (metadata) delete metadata; - return NO_ERROR; - } break; - case DATA_CALLBACK_TIMESTAMP: { - ALOGV("DATA_CALLBACK_TIMESTAMP"); - CHECK_INTERFACE(IProCameraCallbacks, data, reply); - nsecs_t timestamp = data.readInt64(); - int32_t msgType = data.readInt32(); - sp imageData = interface_cast( - data.readStrongBinder()); - dataCallbackTimestamp(timestamp, msgType, imageData); - return NO_ERROR; - } break; case LOCK_STATUS_CHANGED: { ALOGV("LOCK_STATUS_CHANGED"); CHECK_INTERFACE(IProCameraCallbacks, data, reply); diff --git a/camera/IProCameraUser.cpp b/camera/IProCameraUser.cpp index c9d98aa..0c94bd4 100644 --- a/camera/IProCameraUser.cpp +++ b/camera/IProCameraUser.cpp @@ -40,8 +40,7 @@ enum { HAS_EXCLUSIVE_LOCK, SUBMIT_REQUEST, CANCEL_REQUEST, - REQUEST_STREAM, - CANCEL_STREAM, + DELETE_STREAM, CREATE_STREAM, CREATE_DEFAULT_REQUEST, GET_CAMERA_INFO, @@ -200,22 +199,13 @@ public: return reply.readInt32(); } - virtual status_t requestStream(int streamId) + virtual status_t deleteStream(int streamId) { Parcel data, reply; data.writeInterfaceToken(IProCameraUser::getInterfaceDescriptor()); data.writeInt32(streamId); - remote()->transact(REQUEST_STREAM, data, &reply); - return reply.readInt32(); - } - virtual status_t cancelStream(int streamId) - { - Parcel data, reply; - data.writeInterfaceToken(IProCameraUser::getInterfaceDescriptor()); - data.writeInt32(streamId); - - remote()->transact(CANCEL_STREAM, data, &reply); + remote()->transact(DELETE_STREAM, data, &reply); return reply.readInt32(); } @@ -334,16 +324,10 @@ status_t BnProCameraUser::onTransact( reply->writeInt32(cancelRequest(requestId)); return NO_ERROR; } break; - case REQUEST_STREAM: { - CHECK_INTERFACE(IProCameraUser, data, reply); - int streamId = data.readInt32(); - reply->writeInt32(requestStream(streamId)); - return NO_ERROR; - } break; - case CANCEL_STREAM: { + case DELETE_STREAM: { CHECK_INTERFACE(IProCameraUser, data, reply); int streamId = data.readInt32(); - reply->writeInt32(cancelStream(streamId)); + reply->writeInt32(deleteStream(streamId)); return NO_ERROR; } break; case CREATE_STREAM: { diff --git a/camera/ProCamera.cpp b/camera/ProCamera.cpp index 3cfabf6..396b009 100644 --- a/camera/ProCamera.cpp +++ b/camera/ProCamera.cpp @@ -60,21 +60,6 @@ void ProCamera::notifyCallback(int32_t msgType, int32_t ext1, int32_t ext2) return CameraBaseT::notifyCallback(msgType, ext1, ext2); } -// callback from camera service when frame or image is ready -void ProCamera::dataCallback(int32_t msgType, const sp& dataPtr, - camera_frame_metadata_t *metadata) -{ - return CameraBaseT::dataCallback(msgType, dataPtr, metadata); -} - -// callback from camera service when timestamped frame is ready -void ProCamera::dataCallbackTimestamp(nsecs_t timestamp, int32_t msgType, - const sp& dataPtr) -{ - CameraBaseT::dataCallbackTimestamp(timestamp, msgType, dataPtr); -} - - void ProCamera::onLockStatusChanged( IProCameraCallbacks::LockStatus newLockStatus) { @@ -185,7 +170,7 @@ status_t ProCamera::deleteStream(int streamId) sp c = mCamera; if (c == 0) return NO_INIT; - status_t s = c->cancelStream(streamId); + status_t s = c->deleteStream(streamId); mStreams.removeItem(streamId); @@ -330,10 +315,7 @@ void ProCamera::onFrameAvailable(int streamId) { CpuConsumer::LockedBuffer buf; if (listener.get() != NULL) { - if (listener->useOnFrameAvailable()) { - listener->onFrameAvailable(streamId, stream.cpuConsumer); - return; - } + listener->onFrameAvailable(streamId, stream.cpuConsumer); } // Unblock waitForFrame(id) callers diff --git a/camera/tests/ProCameraTests.cpp b/camera/tests/ProCameraTests.cpp index 1a8564e..71813ae 100644 --- a/camera/tests/ProCameraTests.cpp +++ b/camera/tests/ProCameraTests.cpp @@ -271,13 +271,11 @@ protected: free_camera_metadata(request); } - // TODO: remove - - virtual void notify(int32_t , int32_t , int32_t ) {} - virtual void postData(int32_t , const sp& , - camera_frame_metadata_t *) {} - virtual void postDataTimestamp(nsecs_t , int32_t , const sp& ) {} - + virtual void notify(int32_t msg, int32_t ext1, int32_t ext2) { + dout << "Notify received: msg " << std::hex << msg + << ", ext1: " << std::hex << ext1 << ", ext2: " << std::hex << ext2 + << std::endl; + } Vector mProEventList; Mutex mListenerMutex; @@ -717,6 +715,7 @@ TEST_F(ProCameraTest, CpuConsumerSingle) { return; } + // FIXME: Note this test is broken because onBufferReceived was removed mListener->SetEventMask(ProEvent_Mask(BUFFER_RECEIVED)); int streamId = -1; @@ -783,6 +782,7 @@ TEST_F(ProCameraTest, CpuConsumerDual) { return; } + // FIXME: Note this test is broken because onBufferReceived was removed mListener->SetEventMask(ProEvent_Mask(BUFFER_RECEIVED)); int streamId = -1; diff --git a/include/camera/CameraBase.h b/include/camera/CameraBase.h index 2735a86..9b08c0f 100644 --- a/include/camera/CameraBase.h +++ b/include/camera/CameraBase.h @@ -91,12 +91,6 @@ protected: //////////////////////////////////////////////////////// virtual void notifyCallback(int32_t msgType, int32_t ext, int32_t ext2); - virtual void dataCallback(int32_t msgType, - const sp& dataPtr, - camera_frame_metadata *metadata); - bool dataCallbackTimestamp(nsecs_t timestamp, - int32_t msgType, - const sp& dataPtr); //////////////////////////////////////////////////////// // Common instance variables @@ -115,7 +109,7 @@ protected: const int mCameraId; - typedef CameraBase CameraBaseT; + typedef CameraBase CameraBaseT; }; }; // namespace android diff --git a/include/camera/IProCameraCallbacks.h b/include/camera/IProCameraCallbacks.h index fc24026..563ec17 100644 --- a/include/camera/IProCameraCallbacks.h +++ b/include/camera/IProCameraCallbacks.h @@ -28,19 +28,14 @@ struct camera_metadata; namespace android { -class IProCameraCallbacks: public IInterface +class IProCameraCallbacks : public IInterface { public: DECLARE_META_INTERFACE(ProCameraCallbacks); - virtual void notifyCallback(int32_t msgType, int32_t ext1, - int32_t ext2) = 0; - virtual void dataCallback(int32_t msgType, - const sp& data, - camera_frame_metadata_t *metadata) = 0; - virtual void dataCallbackTimestamp(nsecs_t timestamp, - int32_t msgType, - const sp& data) = 0; + virtual void notifyCallback(int32_t msgType, + int32_t ext1, + int32_t ext2) = 0; enum LockStatus { LOCK_ACQUIRED, @@ -53,12 +48,13 @@ public: /** Missing by design: implementation is client-side in ProCamera.cpp **/ // virtual void onBufferReceived(int streamId, // const CpuConsumer::LockedBufer& buf); - virtual void onResultReceived(int32_t frameId, camera_metadata* result) = 0; + virtual void onResultReceived(int32_t frameId, + camera_metadata* result) = 0; }; // ---------------------------------------------------------------------------- -class BnProCameraCallbacks: public BnInterface +class BnProCameraCallbacks : public BnInterface { public: virtual status_t onTransact( uint32_t code, diff --git a/include/camera/IProCameraUser.h b/include/camera/IProCameraUser.h index 7bddb0c..45b818c 100644 --- a/include/camera/IProCameraUser.h +++ b/include/camera/IProCameraUser.h @@ -61,8 +61,7 @@ public: bool streaming = false) = 0; virtual status_t cancelRequest(int requestId) = 0; - virtual status_t requestStream(int streamId) = 0; - virtual status_t cancelStream(int streamId) = 0; + virtual status_t deleteStream(int streamId) = 0; virtual status_t createStream( int width, int height, int format, const sp& bufferProducer, diff --git a/include/camera/ProCamera.h b/include/camera/ProCamera.h index 5d6cfaa..3d1652f 100644 --- a/include/camera/ProCamera.h +++ b/include/camera/ProCamera.h @@ -40,9 +40,11 @@ namespace android { // All callbacks on this class are concurrent // (they come from separate threads) -class ProCameraListener : public CameraListener +class ProCameraListener : virtual public RefBase { public: + virtual void notify(int32_t msgType, int32_t ext1, int32_t ext2) = 0; + // Lock has been acquired. Write operations now available. virtual void onLockAcquired() = 0; // Lock has been released with exclusiveUnlock. @@ -53,19 +55,9 @@ public: // Lock free. virtual void onTriggerNotify(int32_t msgType, int32_t ext1, int32_t ext2) = 0; - - // OnBufferReceived and OnRequestReceived can come in with any order, + // onFrameAvailable and OnResultReceived can come in with any order, // use android.sensor.timestamp and LockedBuffer.timestamp to correlate them - // TODO: remove onBufferReceived - - // A new frame buffer has been received for this stream. - // -- This callback only fires for createStreamCpu streams - // -- Use buf.timestamp to correlate with metadata's - // android.sensor.timestamp - // -- The buffer must not be accessed after this function call completes - virtual void onBufferReceived(int streamId, - const CpuConsumer::LockedBuffer& buf) = 0; /** * A new metadata buffer has been received. * -- Ownership of request passes on to the callee, free with @@ -77,17 +69,14 @@ public: // A new frame buffer has been received for this stream. // -- This callback only fires for createStreamCpu streams - // -- Use buf.timestamp to correlate with metadata's android.sensor.timestamp + // -- A buffer may be obtained by calling cpuConsumer->lockNextBuffer + // -- Use buf.timestamp to correlate with result's android.sensor.timestamp // -- The buffer should be accessed with CpuConsumer::lockNextBuffer // and CpuConsumer::unlockBuffer virtual void onFrameAvailable(int /*streamId*/, const sp& /*cpuConsumer*/) { } - // TODO: Remove useOnFrameAvailable - virtual bool useOnFrameAvailable() { - return false; - } }; class ProCamera; @@ -249,14 +238,10 @@ protected: //////////////////////////////////////////////////////// // IProCameraCallbacks implementation //////////////////////////////////////////////////////// - virtual void notifyCallback(int32_t msgType, int32_t ext, + virtual void notifyCallback(int32_t msgType, + int32_t ext, int32_t ext2); - virtual void dataCallback(int32_t msgType, - const sp& dataPtr, - camera_frame_metadata_t *metadata); - virtual void dataCallbackTimestamp(nsecs_t timestamp, - int32_t msgType, - const sp& dataPtr); + virtual void onLockStatusChanged( IProCameraCallbacks::LockStatus newLockStatus); diff --git a/services/camera/libcameraservice/ProCamera2Client.cpp b/services/camera/libcameraservice/ProCamera2Client.cpp index 1270751..575b075 100644 --- a/services/camera/libcameraservice/ProCamera2Client.cpp +++ b/services/camera/libcameraservice/ProCamera2Client.cpp @@ -230,14 +230,7 @@ status_t ProCamera2Client::cancelRequest(int requestId) { return INVALID_OPERATION; } -//TODO: Remove -status_t ProCamera2Client::requestStream(int streamId) { - ALOGE("%s: not implemented yet", __FUNCTION__); - - return INVALID_OPERATION; -} - -status_t ProCamera2Client::cancelStream(int streamId) { +status_t ProCamera2Client::deleteStream(int streamId) { ATRACE_CALL(); ALOGV("%s (streamId = 0x%x)", __FUNCTION__, streamId); diff --git a/services/camera/libcameraservice/ProCamera2Client.h b/services/camera/libcameraservice/ProCamera2Client.h index f69021e..1dec263 100644 --- a/services/camera/libcameraservice/ProCamera2Client.h +++ b/services/camera/libcameraservice/ProCamera2Client.h @@ -48,8 +48,7 @@ public: bool streaming = false); virtual status_t cancelRequest(int requestId); - virtual status_t requestStream(int streamId); - virtual status_t cancelStream(int streamId); + virtual status_t deleteStream(int streamId); virtual status_t createStream( int width, -- cgit v1.1 From e6800cea0678dbc0bf697b44c3e4548b0253085c Mon Sep 17 00:00:00 2001 From: Igor Murashkin Date: Mon, 4 Mar 2013 17:25:57 -0800 Subject: Camera: Make ProCamera connect take the same paths as Camera connect * ProCamera uses the app ops manager * Refactored connect calls to be as common as possible * Removed some useless not implemented function calls in ProClient Change-Id: I5dab30d20f0c202a494a07b2cfe4c1fa04a2a076 --- services/camera/libcameraservice/Camera2Client.cpp | 18 +- .../camera/libcameraservice/Camera2ClientBase.cpp | 16 +- services/camera/libcameraservice/CameraService.cpp | 215 +++++++++------------ services/camera/libcameraservice/CameraService.h | 45 +++-- 4 files changed, 130 insertions(+), 164 deletions(-) diff --git a/services/camera/libcameraservice/Camera2Client.cpp b/services/camera/libcameraservice/Camera2Client.cpp index eb94d9f..056271d 100644 --- a/services/camera/libcameraservice/Camera2Client.cpp +++ b/services/camera/libcameraservice/Camera2Client.cpp @@ -81,27 +81,11 @@ status_t Camera2Client::initialize(camera_module_t *module) ALOGV("%s: Initializing client for camera %d", __FUNCTION__, mCameraId); status_t res; - // Verify ops permissions - res = startCameraOps(); + res = Camera2ClientBase::initialize(module); if (res != OK) { return res; } - if (mDevice == NULL) { - ALOGE("%s: Camera %d: No device connected", - __FUNCTION__, mCameraId); - return NO_INIT; - } - - res = mDevice->initialize(module); - if (res != OK) { - ALOGE("%s: Camera %d: unable to initialize device: %s (%d)", - __FUNCTION__, mCameraId, strerror(-res), res); - return NO_INIT; - } - - res = mDevice->setNotifyCallback(this); - SharedParameters::Lock l(mParameters); res = l.mParameters.initialize(&(mDevice->info())); diff --git a/services/camera/libcameraservice/Camera2ClientBase.cpp b/services/camera/libcameraservice/Camera2ClientBase.cpp index e92ad1c..0623b89 100644 --- a/services/camera/libcameraservice/Camera2ClientBase.cpp +++ b/services/camera/libcameraservice/Camera2ClientBase.cpp @@ -76,6 +76,18 @@ status_t Camera2ClientBase::initialize(camera_module_t *module) { TClientBase::mCameraId); status_t res; + // Verify ops permissions + res = TClientBase::startCameraOps(); + if (res != OK) { + return res; + } + + if (mDevice == NULL) { + ALOGE("%s: Camera %d: No device connected", + __FUNCTION__, TClientBase::mCameraId); + return NO_INIT; + } + res = mDevice->initialize(module); if (res != OK) { ALOGE("%s: Camera %d: unable to initialize device: %s (%d)", @@ -94,6 +106,8 @@ Camera2ClientBase::~Camera2ClientBase() { TClientBase::mDestructionStarted = true; + TClientBase::finishCameraOps(); + disconnect(); ALOGI("Closed Camera %d", TClientBase::mCameraId); @@ -157,7 +171,7 @@ void Camera2ClientBase::disconnect() { detachDevice(); - TClientBase::disconnect(); + CameraService::BasicClient::disconnect(); ALOGV("Camera %d: Shut down complete complete", TClientBase::mCameraId); } diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp index d46ca88..7636143 100644 --- a/services/camera/libcameraservice/CameraService.cpp +++ b/services/camera/libcameraservice/CameraService.cpp @@ -176,18 +176,12 @@ bool CameraService::isValidCameraId(int cameraId) { return false; } -sp CameraService::connect( - const sp& cameraClient, - int cameraId, - const String16& clientPackageName, - int clientUid) { +bool CameraService::validateConnect(int cameraId, + /*inout*/ + int& clientUid) const { - String8 clientName8(clientPackageName); int callingPid = getCallingPid(); - LOG1("CameraService::connect E (pid %d \"%s\", id %d)", callingPid, - clientName8.string(), cameraId); - if (clientUid == USE_CALLING_UID) { clientUid = getCallingUid(); } else { @@ -195,20 +189,19 @@ sp CameraService::connect( if (callingPid != getpid()) { ALOGE("CameraService::connect X (pid %d) rejected (don't trust clientUid)", callingPid); - return NULL; + return false; } } if (!mModule) { ALOGE("Camera HAL module not loaded"); - return NULL; + return false; } - sp client; if (cameraId < 0 || cameraId >= mNumberOfCameras) { ALOGE("CameraService::connect X (pid %d) rejected (invalid cameraId %d).", callingPid, cameraId); - return NULL; + return false; } char value[PROPERTY_VALUE_MAX]; @@ -216,24 +209,32 @@ sp CameraService::connect( if (strcmp(value, "1") == 0) { // Camera is disabled by DevicePolicyManager. ALOGI("Camera is disabled. connect X (pid %d) rejected", callingPid); - return NULL; + return false; } - Mutex::Autolock lock(mServiceLock); + return true; +} + +bool CameraService::canConnectUnsafe(int cameraId, + const String16& clientPackageName, + const sp& remoteCallback, + sp &client) { + String8 clientName8(clientPackageName); + int callingPid = getCallingPid(); + if (mClient[cameraId] != 0) { client = mClient[cameraId].promote(); if (client != 0) { - if (cameraClient->asBinder() == - client->getRemoteCallback()->asBinder()) { - + if (remoteCallback == client->getRemoteCallback()->asBinder()) { LOG1("CameraService::connect X (pid %d) (the same client)", callingPid); - return client; + return true; } else { - // TODOSC: need to support 1 regular client, multiple shared clients here - ALOGW("CameraService::connect X (pid %d) rejected (existing client).", - callingPid); - return NULL; + // TODOSC: need to support 1 regular client, + // multiple shared clients here + ALOGW("CameraService::connect X (pid %d) rejected" + " (existing client).", callingPid); + return false; } } mClient[cameraId].clear(); @@ -249,16 +250,47 @@ sp CameraService::connect( would be fine */ if (mBusy[cameraId]) { - ALOGW("CameraService::connect X (pid %d, \"%s\") rejected" " (camera %d is still busy).", callingPid, clientName8.string(), cameraId); + return false; + } + + return true; +} + +sp CameraService::connect( + const sp& cameraClient, + int cameraId, + const String16& clientPackageName, + int clientUid) { + + String8 clientName8(clientPackageName); + int callingPid = getCallingPid(); + + LOG1("CameraService::connect E (pid %d \"%s\", id %d)", callingPid, + clientName8.string(), cameraId); + + if (!validateConnect(cameraId, /*inout*/clientUid)) { + return NULL; + } + + sp client; + + Mutex::Autolock lock(mServiceLock); + if (!canConnectUnsafe(cameraId, clientPackageName, + cameraClient->asBinder(), + /*out*/client)) { return NULL; + } else if (client.get() != NULL) { + return client; } int facing = -1; int deviceVersion = getDeviceVersion(cameraId, &facing); + // If there are other non-exclusive users of the camera, + // this will tear them down before we can reuse the camera if (isValidCameraId(cameraId)) { updateStatus(ICameraServiceListener::STATUS_NOT_AVAILABLE, cameraId); } @@ -285,21 +317,30 @@ sp CameraService::connect( return NULL; } - if (client->initialize(mModule) != OK) { + if (!connectFinishUnsafe(client, client->asBinder())) { // this is probably not recoverable.. but maybe the client can try again updateStatus(ICameraServiceListener::STATUS_AVAILABLE, cameraId); return NULL; } - cameraClient->asBinder()->linkToDeath(this); - mClient[cameraId] = client; LOG1("CameraService::connect X (id %d, this pid is %d)", cameraId, getpid()); return client; } +bool CameraService::connectFinishUnsafe(const sp& client, + const sp& clientBinder) { + if (client->initialize(mModule) != OK) { + return false; + } + + clientBinder->linkToDeath(this); + + return true; +} + sp CameraService::connect( const sp& cameraCb, int cameraId, @@ -309,38 +350,24 @@ sp CameraService::connect( String8 clientName8(clientPackageName); int callingPid = getCallingPid(); - // TODO: use clientPackageName and clientUid with appOpsMangr - - LOG1("CameraService::connectPro E (pid %d, id %d)", callingPid, cameraId); - - if (!mModule) { - ALOGE("Camera HAL module not loaded"); - return NULL; - } + LOG1("CameraService::connectPro E (pid %d \"%s\", id %d)", callingPid, + clientName8.string(), cameraId); - sp client; - if (cameraId < 0 || cameraId >= mNumberOfCameras) { - ALOGE("CameraService::connectPro X (pid %d) rejected (invalid cameraId %d).", - callingPid, cameraId); + if (!validateConnect(cameraId, /*inout*/clientUid)) { return NULL; } - char value[PROPERTY_VALUE_MAX]; - property_get("sys.secpolicy.camera.disabled", value, "0"); - if (strcmp(value, "1") == 0) { - // Camera is disabled by DevicePolicyManager. - ALOGI("Camera is disabled. connect X (pid %d) rejected", callingPid); - return NULL; + Mutex::Autolock lock(mServiceLock); + { + sp client; + if (!canConnectUnsafe(cameraId, clientPackageName, + cameraCb->asBinder(), + /*out*/client)) { + return NULL; + } } - // TODO: allow concurrent connections with a ProCamera - if (mBusy[cameraId]) { - - ALOGW("CameraService::connectPro X (pid %d, \"%s\") rejected" - " (camera %d is still busy).", callingPid, - clientName8.string(), cameraId); - return NULL; - } + sp client; int facing = -1; int deviceVersion = getDeviceVersion(cameraId, &facing); @@ -363,16 +390,15 @@ sp CameraService::connect( return NULL; } - if (client->initialize(mModule) != OK) { + if (!connectFinishUnsafe(client, client->asBinder())) { return NULL; } mProClientList[cameraId].push(client); - cameraCb->asBinder()->linkToDeath(this); - LOG1("CameraService::connectPro X (id %d, this pid is %d)", cameraId, getpid()); + return client; } @@ -654,7 +680,6 @@ CameraService::Client::~Client() { mDestructionStarted = true; mCameraService->releaseSound(); - finishCameraOps(); // unconditionally disconnect. function is idempotent Client::disconnect(); } @@ -691,6 +716,11 @@ status_t CameraService::BasicClient::startCameraOps() { mOpsCallback = new OpsCallback(this); + { + ALOGV("%s: Start camera ops, package name = %s, client UID = %d", + __FUNCTION__, String8(mClientPackageName).string(), mClientUid); + } + mAppOpsManager.startWatchingMode(AppOpsManager::OP_CAMERA, mClientPackageName, mOpsCallback); res = mAppOpsManager.startOp(AppOpsManager::OP_CAMERA, @@ -812,79 +842,10 @@ CameraService::ProClient::ProClient(const sp& cameraService, } CameraService::ProClient::~ProClient() { - mDestructionStarted = true; - - ProClient::disconnect(); -} - -status_t CameraService::ProClient::connect(const sp& callbacks) { - ALOGE("%s: not implemented yet", __FUNCTION__); - - return INVALID_OPERATION; -} - -void CameraService::ProClient::disconnect() { - BasicClient::disconnect(); -} - -status_t CameraService::ProClient::initialize(camera_module_t* module) -{ - ALOGW("%s: not implemented yet", __FUNCTION__); - return OK; -} - -status_t CameraService::ProClient::exclusiveTryLock() { - ALOGE("%s: not implemented yet", __FUNCTION__); - return INVALID_OPERATION; -} - -status_t CameraService::ProClient::exclusiveLock() { - ALOGE("%s: not implemented yet", __FUNCTION__); - return INVALID_OPERATION; -} - -status_t CameraService::ProClient::exclusiveUnlock() { - ALOGE("%s: not implemented yet", __FUNCTION__); - return INVALID_OPERATION; -} - -bool CameraService::ProClient::hasExclusiveLock() { - ALOGE("%s: not implemented yet", __FUNCTION__); - return false; -} - -void CameraService::ProClient::onExclusiveLockStolen() { - ALOGE("%s: not implemented yet", __FUNCTION__); -} - -status_t CameraService::ProClient::submitRequest(camera_metadata_t* request, bool streaming) { - ALOGE("%s: not implemented yet", __FUNCTION__); - - free_camera_metadata(request); - - return INVALID_OPERATION; -} - -status_t CameraService::ProClient::cancelRequest(int requestId) { - ALOGE("%s: not implemented yet", __FUNCTION__); - - return INVALID_OPERATION; -} - -status_t CameraService::ProClient::requestStream(int streamId) { - ALOGE("%s: not implemented yet", __FUNCTION__); - - return INVALID_OPERATION; -} - -status_t CameraService::ProClient::cancelStream(int streamId) { - ALOGE("%s: not implemented yet", __FUNCTION__); - - return INVALID_OPERATION; } void CameraService::ProClient::notifyError() { - ALOGE("%s: not implemented yet", __FUNCTION__); + mRemoteCallback->notifyCallback(CAMERA_MSG_ERROR, CAMERA_ERROR_RELEASED, 0); } // ---------------------------------------------------------------------------- diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h index d7a336c..c5e495f 100644 --- a/services/camera/libcameraservice/CameraService.h +++ b/services/camera/libcameraservice/CameraService.h @@ -108,6 +108,7 @@ public: virtual void disconnect() = 0; + // Return the remote callback binder object (e.g. IProCameraCallbacks) wp getRemote() { return mRemoteBinder; } @@ -247,34 +248,24 @@ public: return mRemoteCallback; } - // BasicClient implementation - virtual status_t initialize(camera_module_t *module); - /*** IProCamera implementation ***/ + virtual status_t connect(const sp& callbacks) + = 0; + virtual status_t exclusiveTryLock() = 0; + virtual status_t exclusiveLock() = 0; + virtual status_t exclusiveUnlock() = 0; - - virtual status_t connect( - const sp& callbacks); - virtual void disconnect(); - - virtual status_t exclusiveTryLock(); - virtual status_t exclusiveLock(); - virtual status_t exclusiveUnlock(); - - virtual bool hasExclusiveLock(); + virtual bool hasExclusiveLock() = 0; // Note that the callee gets a copy of the metadata. virtual int submitRequest(camera_metadata_t* metadata, - bool streaming = false); - virtual status_t cancelRequest(int requestId); - - virtual status_t requestStream(int streamId); - virtual status_t cancelStream(int streamId); + bool streaming = false) = 0; + virtual status_t cancelRequest(int requestId) = 0; // Callbacks from camera service - virtual void onExclusiveLockStolen(); + virtual void onExclusiveLockStolen() = 0; protected: virtual void notifyError(); @@ -287,6 +278,22 @@ private: // Delay-load the Camera HAL module virtual void onFirstRef(); + // Step 1. Check if we can connect, before we acquire the service lock. + bool validateConnect(int cameraId, + /*inout*/ + int& clientUid) const; + + // Step 2. Check if we can connect, after we acquire the service lock. + bool canConnectUnsafe(int cameraId, + const String16& clientPackageName, + const sp& remoteCallback, + /*out*/ + sp &client); + + // When connection is successful, initialize client and track its death + bool connectFinishUnsafe(const sp& client, + const sp& clientBinder); + virtual sp getClientByRemote(const wp& cameraClient); Mutex mServiceLock; -- cgit v1.1 From b9f4140b374b56277f8aec47d1a31ec713ad8668 Mon Sep 17 00:00:00 2001 From: Andy McFadden Date: Mon, 11 Mar 2013 15:09:32 -0700 Subject: GraphicBufferSource fixes Various fixes: - Set the maximum number of BQ buffers we're allowed to acquire equal to the actual number of codec buffers. That way we keep the codec as full as possible, and never try to acquire more than we're allowed from the BufferQueue. - Actually use "end of stream sent" flag. - Name the BufferQueue (for debug messages). Bug 8359403 Change-Id: I3b8c1f679bbebf6a89e623e13ca029eda7f657ba --- media/libstagefright/omx/GraphicBufferSource.cpp | 128 +++++++++++++---------- media/libstagefright/omx/GraphicBufferSource.h | 6 +- media/libstagefright/omx/OMXNodeInstance.cpp | 3 +- 3 files changed, 81 insertions(+), 56 deletions(-) diff --git a/media/libstagefright/omx/GraphicBufferSource.cpp b/media/libstagefright/omx/GraphicBufferSource.cpp index 211e1d1..3854e52 100644 --- a/media/libstagefright/omx/GraphicBufferSource.cpp +++ b/media/libstagefright/omx/GraphicBufferSource.cpp @@ -32,7 +32,7 @@ static const bool EXTRA_CHECK = true; GraphicBufferSource::GraphicBufferSource(OMXNodeInstance* nodeInstance, - uint32_t bufferWidth, uint32_t bufferHeight) : + uint32_t bufferWidth, uint32_t bufferHeight, uint32_t bufferCount) : mInitCheck(UNKNOWN_ERROR), mNodeInstance(nodeInstance), mExecuting(false), @@ -40,20 +40,31 @@ GraphicBufferSource::GraphicBufferSource(OMXNodeInstance* nodeInstance, mEndOfStream(false), mEndOfStreamSent(false) { - ALOGV("GraphicBufferSource w=%u h=%u", bufferWidth, bufferHeight); + ALOGV("GraphicBufferSource w=%u h=%u c=%u", + bufferWidth, bufferHeight, bufferCount); if (bufferWidth == 0 || bufferHeight == 0) { - ALOGE("Invalid dimensions %dx%d", bufferWidth, bufferHeight); + ALOGE("Invalid dimensions %ux%u", bufferWidth, bufferHeight); mInitCheck = BAD_VALUE; return; } + String8 name("GraphicBufferSource"); + mBufferQueue = new BufferQueue(true); + mBufferQueue->setConsumerName(name); mBufferQueue->setDefaultBufferSize(bufferWidth, bufferHeight); mBufferQueue->setSynchronousMode(true); mBufferQueue->setConsumerUsageBits(GRALLOC_USAGE_HW_VIDEO_ENCODER | GRALLOC_USAGE_HW_TEXTURE); + mInitCheck = mBufferQueue->setMaxAcquiredBufferCount(bufferCount); + if (mInitCheck != NO_ERROR) { + ALOGE("Unable to set BQ max acquired buffer count to %u: %d", + bufferCount, mInitCheck); + return; + } + // Note that we can't create an sp<...>(this) in a ctor that will not keep a // reference once the ctor ends, as that would cause the refcount of 'this' // dropping to 0 at the end of the ctor. Since all we need is a wp<...> @@ -64,21 +75,23 @@ GraphicBufferSource::GraphicBufferSource(OMXNodeInstance* nodeInstance, sp proxy; proxy = new BufferQueue::ProxyConsumerListener(listener); - status_t err = mBufferQueue->consumerConnect(proxy); - if (err != NO_ERROR) { + mInitCheck = mBufferQueue->consumerConnect(proxy); + if (mInitCheck != NO_ERROR) { ALOGE("Error connecting to BufferQueue: %s (%d)", - strerror(-err), err); + strerror(-mInitCheck), mInitCheck); return; } - mInitCheck = OK; + CHECK(mInitCheck == NO_ERROR); } GraphicBufferSource::~GraphicBufferSource() { ALOGV("~GraphicBufferSource"); - status_t err = mBufferQueue->consumerDisconnect(); - if (err != NO_ERROR) { - ALOGW("consumerDisconnect failed: %d", err); + if (mBufferQueue != NULL) { + status_t err = mBufferQueue->consumerDisconnect(); + if (err != NO_ERROR) { + ALOGW("consumerDisconnect failed: %d", err); + } } } @@ -98,8 +111,12 @@ void GraphicBufferSource::omxExecuting() { // one codec buffer simultaneously. (We could instead try to submit // all BQ buffers whenever any codec buffer is freed, but if we get the // initial conditions right that will never be useful.) - while (mNumFramesAvailable && isCodecBufferAvailable_l()) { - fillCodecBuffer_l(); + while (mNumFramesAvailable) { + if (!fillCodecBuffer_l()) { + ALOGV("stop load with frames available (codecAvail=%d)", + isCodecBufferAvailable_l()); + break; + } } ALOGV("done loading initial frames, avail=%d", mNumFramesAvailable); @@ -166,7 +183,7 @@ void GraphicBufferSource::codecBufferEmptied(OMX_BUFFERHEADERTYPE* header) { // see if the GraphicBuffer reference was null, which should only ever // happen for EOS. if (codecBuffer.mGraphicBuffer == NULL) { - CHECK(mEndOfStream); + CHECK(mEndOfStream && mEndOfStreamSent); // No GraphicBuffer to deal with, no additional input or output is // expected, so just return. return; @@ -216,8 +233,9 @@ void GraphicBufferSource::codecBufferEmptied(OMX_BUFFERHEADERTYPE* header) { if (mNumFramesAvailable) { // Fill this codec buffer. - CHECK(!mEndOfStream); - ALOGV("buffer freed, %d frames avail", mNumFramesAvailable); + CHECK(!mEndOfStreamSent); + ALOGV("buffer freed, %d frames avail (eos=%d)", + mNumFramesAvailable, mEndOfStream); fillCodecBuffer_l(); } else if (mEndOfStream) { // No frames available, but EOS is pending, so use this buffer to @@ -228,56 +246,58 @@ void GraphicBufferSource::codecBufferEmptied(OMX_BUFFERHEADERTYPE* header) { return; } -status_t GraphicBufferSource::fillCodecBuffer_l() { +bool GraphicBufferSource::fillCodecBuffer_l() { CHECK(mExecuting && mNumFramesAvailable > 0); + int cbi = findAvailableCodecBuffer_l(); if (cbi < 0) { // No buffers available, bail. ALOGV("fillCodecBuffer_l: no codec buffers, avail now %d", mNumFramesAvailable); - } else { - ALOGV("fillCodecBuffer_l: acquiring buffer, avail=%d", - mNumFramesAvailable); - BufferQueue::BufferItem item; - status_t err = mBufferQueue->acquireBuffer(&item); - if (err == BufferQueue::NO_BUFFER_AVAILABLE) { - // shouldn't happen - ALOGW("fillCodecBuffer_l: frame was not available"); - return err; - } else if (err != OK) { - // now what? fake end-of-stream? - ALOGW("fillCodecBuffer_l: acquireBuffer returned err=%d", err); - return err; - } + return false; + } - mNumFramesAvailable--; + ALOGV("fillCodecBuffer_l: acquiring buffer, avail=%d", + mNumFramesAvailable); + BufferQueue::BufferItem item; + status_t err = mBufferQueue->acquireBuffer(&item); + if (err == BufferQueue::NO_BUFFER_AVAILABLE) { + // shouldn't happen + ALOGW("fillCodecBuffer_l: frame was not available"); + return false; + } else if (err != OK) { + // now what? fake end-of-stream? + ALOGW("fillCodecBuffer_l: acquireBuffer returned err=%d", err); + return false; + } - // Wait for it to become available. - err = item.mFence->waitForever(1000, - "GraphicBufferSource::fillCodecBuffer_l"); - if (err != OK) { - ALOGW("failed to wait for buffer fence: %d", err); - // keep going - } + mNumFramesAvailable--; - // If this is the first time we're seeing this buffer, add it to our - // slot table. - if (item.mGraphicBuffer != NULL) { - ALOGV("fillCodecBuffer_l: setting mBufferSlot %d", item.mBuf); - mBufferSlot[item.mBuf] = item.mGraphicBuffer; - } + // Wait for it to become available. + err = item.mFence->waitForever(1000, + "GraphicBufferSource::fillCodecBuffer_l"); + if (err != OK) { + ALOGW("failed to wait for buffer fence: %d", err); + // keep going + } - err = submitBuffer_l(mBufferSlot[item.mBuf], item.mTimestamp, cbi); - if (err != OK) { - ALOGV("submitBuffer_l failed, releasing bq buf %d", item.mBuf); - mBufferQueue->releaseBuffer(item.mBuf, EGL_NO_DISPLAY, + // If this is the first time we're seeing this buffer, add it to our + // slot table. + if (item.mGraphicBuffer != NULL) { + ALOGV("fillCodecBuffer_l: setting mBufferSlot %d", item.mBuf); + mBufferSlot[item.mBuf] = item.mGraphicBuffer; + } + + err = submitBuffer_l(mBufferSlot[item.mBuf], item.mTimestamp, cbi); + if (err != OK) { + ALOGV("submitBuffer_l failed, releasing bq buf %d", item.mBuf); + mBufferQueue->releaseBuffer(item.mBuf, EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, Fence::NO_FENCE); - } else { - ALOGV("buffer submitted (bq %d, cbi %d)", item.mBuf, cbi); - } + } else { + ALOGV("buffer submitted (bq %d, cbi %d)", item.mBuf, cbi); } - return OK; + return true; } status_t GraphicBufferSource::signalEndOfInputStream() { @@ -372,6 +392,7 @@ void GraphicBufferSource::submitEndOfInputStream_l() { } else { ALOGV("submitEndOfInputStream_l: buffer submitted, header=%p cbi=%d", header, cbi); + mEndOfStreamSent = true; } } @@ -400,7 +421,8 @@ int GraphicBufferSource::findMatchingCodecBuffer_l( void GraphicBufferSource::onFrameAvailable() { Mutex::Autolock autoLock(mMutex); - ALOGV("onFrameAvailable exec=%d avail=%d", mExecuting, mNumFramesAvailable); + ALOGV("onFrameAvailable exec=%d avail=%d", + mExecuting, mNumFramesAvailable); if (mEndOfStream) { // This should only be possible if a new buffer was queued after diff --git a/media/libstagefright/omx/GraphicBufferSource.h b/media/libstagefright/omx/GraphicBufferSource.h index 6a34bc5..7f1f22e 100644 --- a/media/libstagefright/omx/GraphicBufferSource.h +++ b/media/libstagefright/omx/GraphicBufferSource.h @@ -47,7 +47,7 @@ namespace android { class GraphicBufferSource : public BufferQueue::ConsumerListener { public: GraphicBufferSource(OMXNodeInstance* nodeInstance, - uint32_t bufferWidth, uint32_t bufferHeight); + uint32_t bufferWidth, uint32_t bufferHeight, uint32_t bufferCount); virtual ~GraphicBufferSource(); // We can't throw an exception if the constructor fails, so we just set @@ -124,7 +124,9 @@ private: // in the onFrameAvailable callback, or if we're in codecBufferEmptied // and mNumFramesAvailable is nonzero). Returns without doing anything if // we don't have a codec buffer available. - status_t fillCodecBuffer_l(); + // + // Returns true if we successfully filled a codec buffer with a BQ buffer. + bool fillCodecBuffer_l(); // Marks the mCodecBuffers entry as in-use, copies the GraphicBuffer // reference into the codec buffer, and submits the data to the codec. diff --git a/media/libstagefright/omx/OMXNodeInstance.cpp b/media/libstagefright/omx/OMXNodeInstance.cpp index f3d8d14..46ff22f 100644 --- a/media/libstagefright/omx/OMXNodeInstance.cpp +++ b/media/libstagefright/omx/OMXNodeInstance.cpp @@ -590,7 +590,8 @@ status_t OMXNodeInstance::createInputSurface( } GraphicBufferSource* bufferSource = new GraphicBufferSource( - this, def.format.video.nFrameWidth, def.format.video.nFrameHeight); + this, def.format.video.nFrameWidth, def.format.video.nFrameHeight, + def.nBufferCountActual); if ((err = bufferSource->initCheck()) != OK) { delete bufferSource; return err; -- cgit v1.1 From d5e56231a598b180a1d898bb7dc61b75580e59a4 Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Tue, 12 Mar 2013 11:01:43 -0700 Subject: Squashed commit of the following: commit f4edf442741886cdbe071e2d15f6e6247269f7c5 Author: Andreas Huber Date: Tue Mar 12 09:09:18 2013 -0700 Pass additional flags to the sink, use TCP by default in wolfiecast mode. Change-Id: I41e11a2375d4199656e45c4f149d8441d0016092 commit 6302602ed280a38287f507159abfb40a1da38c5a Author: Andreas Huber Date: Tue Mar 12 08:51:58 2013 -0700 tweaks Change-Id: Ie29e422d7258be522f4bb1f6c5afcf74c937e547 commit a38a860e4979ba563cadbaafa21b084439449d26 Author: Andreas Huber Date: Mon Mar 11 16:57:43 2013 -0700 Report average lateness all the way from NuPlayerRenderer... Change-Id: I2e7700703ae656515e44b9c25610d26c75778111 commit a7d49b11675ea88be4029dd8451d1649db94571d Author: Andreas Huber Date: Mon Mar 11 14:54:19 2013 -0700 Make TimeSyncer smarter, enable TunnelRenderer Change-Id: I27377a60cd8feb01589da456967fddd34532c20e commit 0f214c8ef68179f7b61512c37040939554013151 Author: Andreas Huber Date: Thu Mar 7 15:57:56 2013 -0800 convert source timestamps to sink timestamps, report lateness. Change-Id: I051a60fbbceca2f7b508ae3dac6e01e402bae39e commit 04a4f8e16bad09157b5615a5fa45310438955832 Author: Andreas Huber Date: Thu Mar 7 09:00:28 2013 -0800 Sync time between sink and source. Change-Id: Ie8b4d75c957aa48310e7c81d1279761b9f821efe commit aebe20e6184e3636a99082f8ece08e708015cb8d Author: Andreas Huber Date: Wed Mar 6 09:03:12 2013 -0800 play with back pressure Change-Id: I51eb69257e6a79e76f5f9c75ff99d8adbd083947 Change-Id: Ifdf57228667fed7fc71c5090a2c3f7cea1037c5c --- include/media/IStreamSource.h | 3 + media/libmediaplayerservice/nuplayer/NuPlayer.cpp | 9 +- .../nuplayer/NuPlayerDriver.cpp | 1 + .../nuplayer/NuPlayerRenderer.cpp | 22 +- .../nuplayer/NuPlayerRenderer.h | 7 +- .../nuplayer/NuPlayerSource.h | 4 + .../nuplayer/StreamingSource.cpp | 4 + .../nuplayer/StreamingSource.h | 2 + media/libstagefright/mpeg2ts/ATSParser.cpp | 13 + media/libstagefright/mpeg2ts/ATSParser.h | 4 + .../wifi-display/ANetworkSession.cpp | 9 + media/libstagefright/wifi-display/Android.mk | 1 + .../libstagefright/wifi-display/MediaReceiver.cpp | 14 +- media/libstagefright/wifi-display/MediaReceiver.h | 2 + media/libstagefright/wifi-display/TimeSyncer.cpp | 332 +++++++++++++++++++++ media/libstagefright/wifi-display/TimeSyncer.h | 109 +++++++ .../wifi-display/rtp/RTPAssembler.cpp | 5 + .../wifi-display/rtp/RTPReceiver.cpp | 44 ++- .../libstagefright/wifi-display/rtp/RTPReceiver.h | 2 + .../libstagefright/wifi-display/rtp/RTPSender.cpp | 2 + .../wifi-display/sink/DirectRenderer.cpp | 53 +++- .../wifi-display/sink/DirectRenderer.h | 14 +- .../wifi-display/sink/TunnelRenderer.cpp | 40 ++- .../wifi-display/sink/TunnelRenderer.h | 6 + .../wifi-display/sink/WifiDisplaySink.cpp | 105 ++++++- .../wifi-display/sink/WifiDisplaySink.h | 19 ++ .../wifi-display/source/WifiDisplaySource.cpp | 12 + .../wifi-display/source/WifiDisplaySource.h | 3 + media/libstagefright/wifi-display/udptest.cpp | 283 +----------------- media/libstagefright/wifi-display/wfd.cpp | 5 +- 30 files changed, 802 insertions(+), 327 deletions(-) create mode 100644 media/libstagefright/wifi-display/TimeSyncer.cpp create mode 100644 media/libstagefright/wifi-display/TimeSyncer.h diff --git a/include/media/IStreamSource.h b/include/media/IStreamSource.h index 39e0a9e..677119b 100644 --- a/include/media/IStreamSource.h +++ b/include/media/IStreamSource.h @@ -37,6 +37,9 @@ struct IStreamSource : public IInterface { enum { // Video PES packets contain exactly one (aligned) access unit. kFlagAlignedVideoData = 1, + + // Timestamps are in ALooper::GetNowUs() units. + kFlagIsRealTimeData = 2, }; virtual uint32_t flags() const { return 0; } }; diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp index 2ba6c22..5387e1a 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp +++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp @@ -381,9 +381,16 @@ void NuPlayer::onMessageReceived(const sp &msg) { mSource->start(); + uint32_t flags = 0; + + if (mSource->isRealTime()) { + flags |= Renderer::FLAG_REAL_TIME; + } + mRenderer = new Renderer( mAudioSink, - new AMessage(kWhatRendererNotify, id())); + new AMessage(kWhatRendererNotify, id()), + flags); looper()->registerHandler(mRenderer); diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp index 3c63e80..723af09 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp +++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp @@ -378,6 +378,7 @@ status_t NuPlayerDriver::invoke(const Parcel &request, Parcel *reply) { int mode = request.readInt32(); return mPlayer->setVideoScalingMode(mode); } + default: { return INVALID_OPERATION; diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp index 1ba76a5..404b56f 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp +++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp @@ -31,9 +31,11 @@ const int64_t NuPlayer::Renderer::kMinPositionUpdateDelayUs = 100000ll; NuPlayer::Renderer::Renderer( const sp &sink, - const sp ¬ify) + const sp ¬ify, + uint32_t flags) : mAudioSink(sink), mNotify(notify), + mFlags(flags), mNumFramesWritten(0), mDrainAudioQueuePending(false), mDrainVideoQueuePending(false), @@ -323,6 +325,11 @@ void NuPlayer::Renderer::postDrainVideoQueue() { if (entry.mBuffer == NULL) { // EOS doesn't carry a timestamp. delayUs = 0; + } else if (mFlags & FLAG_REAL_TIME) { + int64_t mediaTimeUs; + CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); + + delayUs = mediaTimeUs - ALooper::GetNowUs(); } else { int64_t mediaTimeUs; CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); @@ -368,12 +375,17 @@ void NuPlayer::Renderer::onDrainVideoQueue() { return; } - int64_t mediaTimeUs; - CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); + int64_t realTimeUs; + if (mFlags & FLAG_REAL_TIME) { + CHECK(entry->mBuffer->meta()->findInt64("timeUs", &realTimeUs)); + } else { + int64_t mediaTimeUs; + CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); + + realTimeUs = mediaTimeUs - mAnchorTimeMediaUs + mAnchorTimeRealUs; + } - int64_t realTimeUs = mediaTimeUs - mAnchorTimeMediaUs + mAnchorTimeRealUs; mVideoLateByUs = ALooper::GetNowUs() - realTimeUs; - bool tooLate = (mVideoLateByUs > 40000); if (tooLate) { diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h index e4368c7..c9796e2 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h +++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h @@ -25,8 +25,12 @@ namespace android { struct ABuffer; struct NuPlayer::Renderer : public AHandler { + enum Flags { + FLAG_REAL_TIME = 1, + }; Renderer(const sp &sink, - const sp ¬ify); + const sp ¬ify, + uint32_t flags = 0); void queueBuffer( bool audio, @@ -79,6 +83,7 @@ private: sp mAudioSink; sp mNotify; + uint32_t mFlags; List mAudioQueue; List mVideoQueue; uint32_t mNumFramesWritten; diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerSource.h b/media/libmediaplayerservice/nuplayer/NuPlayerSource.h index 8622abe..1cbf575 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayerSource.h +++ b/media/libmediaplayerservice/nuplayer/NuPlayerSource.h @@ -74,6 +74,10 @@ struct NuPlayer::Source : public AHandler { return INVALID_OPERATION; } + virtual bool isRealTime() const { + return false; + } + protected: virtual ~Source() {} diff --git a/media/libmediaplayerservice/nuplayer/StreamingSource.cpp b/media/libmediaplayerservice/nuplayer/StreamingSource.cpp index df03f86..28f0d50 100644 --- a/media/libmediaplayerservice/nuplayer/StreamingSource.cpp +++ b/media/libmediaplayerservice/nuplayer/StreamingSource.cpp @@ -182,5 +182,9 @@ status_t NuPlayer::StreamingSource::dequeueAccessUnit( return err; } +bool NuPlayer::StreamingSource::isRealTime() const { + return mSource->flags() & IStreamSource::kFlagIsRealTimeData; +} + } // namespace android diff --git a/media/libmediaplayerservice/nuplayer/StreamingSource.h b/media/libmediaplayerservice/nuplayer/StreamingSource.h index 80b061c..412b6c4 100644 --- a/media/libmediaplayerservice/nuplayer/StreamingSource.h +++ b/media/libmediaplayerservice/nuplayer/StreamingSource.h @@ -38,6 +38,8 @@ struct NuPlayer::StreamingSource : public NuPlayer::Source { virtual status_t dequeueAccessUnit(bool audio, sp *accessUnit); + virtual bool isRealTime() const; + protected: virtual ~StreamingSource(); diff --git a/media/libstagefright/mpeg2ts/ATSParser.cpp b/media/libstagefright/mpeg2ts/ATSParser.cpp index a167b5a..c12572f 100644 --- a/media/libstagefright/mpeg2ts/ATSParser.cpp +++ b/media/libstagefright/mpeg2ts/ATSParser.cpp @@ -452,6 +452,10 @@ int64_t ATSParser::Program::convertPTSToTimestamp(uint64_t PTS) { timeUs += mParser->mAbsoluteTimeAnchorUs; } + if (mParser->mTimeOffsetValid) { + timeUs += mParser->mTimeOffsetUs; + } + return timeUs; } @@ -930,6 +934,8 @@ sp ATSParser::Stream::getSource(SourceType type) { ATSParser::ATSParser(uint32_t flags) : mFlags(flags), mAbsoluteTimeAnchorUs(-1ll), + mTimeOffsetValid(false), + mTimeOffsetUs(0ll), mNumTSPacketsParsed(0), mNumPCRs(0) { mPSISections.add(0 /* PID */, new PSISection); @@ -960,6 +966,13 @@ void ATSParser::signalDiscontinuity( CHECK(mPrograms.empty()); mAbsoluteTimeAnchorUs = timeUs; return; + } else if (type == DISCONTINUITY_TIME_OFFSET) { + int64_t offset; + CHECK(extra->findInt64("offset", &offset)); + + mTimeOffsetValid = true; + mTimeOffsetUs = offset; + return; } for (size_t i = 0; i < mPrograms.size(); ++i) { diff --git a/media/libstagefright/mpeg2ts/ATSParser.h b/media/libstagefright/mpeg2ts/ATSParser.h index 46edc45..a10edc9 100644 --- a/media/libstagefright/mpeg2ts/ATSParser.h +++ b/media/libstagefright/mpeg2ts/ATSParser.h @@ -39,6 +39,7 @@ struct ATSParser : public RefBase { DISCONTINUITY_AUDIO_FORMAT = 2, DISCONTINUITY_VIDEO_FORMAT = 4, DISCONTINUITY_ABSOLUTE_TIME = 8, + DISCONTINUITY_TIME_OFFSET = 16, DISCONTINUITY_SEEK = DISCONTINUITY_TIME, @@ -106,6 +107,9 @@ private: int64_t mAbsoluteTimeAnchorUs; + bool mTimeOffsetValid; + int64_t mTimeOffsetUs; + size_t mNumTSPacketsParsed; void parseProgramAssociationTable(ABitReader *br); diff --git a/media/libstagefright/wifi-display/ANetworkSession.cpp b/media/libstagefright/wifi-display/ANetworkSession.cpp index cb6011c..465f4c4 100644 --- a/media/libstagefright/wifi-display/ANetworkSession.cpp +++ b/media/libstagefright/wifi-display/ANetworkSession.cpp @@ -27,6 +27,7 @@ #include #include #include +#include #include #include @@ -507,6 +508,14 @@ status_t ANetworkSession::Session::writeMore() { mSawSendFailure = true; } +#if 0 + int numBytesQueued; + int res = ioctl(mSocket, SIOCOUTQ, &numBytesQueued); + if (res == 0 && numBytesQueued > 102400) { + ALOGI("numBytesQueued = %d", numBytesQueued); + } +#endif + return err; } diff --git a/media/libstagefright/wifi-display/Android.mk b/media/libstagefright/wifi-display/Android.mk index 19f560c..f81929c 100644 --- a/media/libstagefright/wifi-display/Android.mk +++ b/media/libstagefright/wifi-display/Android.mk @@ -15,6 +15,7 @@ LOCAL_SRC_FILES:= \ sink/TunnelRenderer.cpp \ sink/WifiDisplaySink.cpp \ SNTPClient.cpp \ + TimeSyncer.cpp \ source/Converter.cpp \ source/MediaPuller.cpp \ source/PlaybackSession.cpp \ diff --git a/media/libstagefright/wifi-display/MediaReceiver.cpp b/media/libstagefright/wifi-display/MediaReceiver.cpp index 3c92d41..10a2dff 100644 --- a/media/libstagefright/wifi-display/MediaReceiver.cpp +++ b/media/libstagefright/wifi-display/MediaReceiver.cpp @@ -127,7 +127,10 @@ void MediaReceiver::onMessageReceived(const sp &msg) { notifyInitDone(mInitStatus); } - mTSParser = new ATSParser(ATSParser::ALIGNED_VIDEO_DATA); + mTSParser = new ATSParser( + ATSParser::ALIGNED_VIDEO_DATA + | ATSParser::TS_TIMESTAMPS_ARE_ABSOLUTE); + mFormatKnownMask = 0; break; } @@ -306,6 +309,15 @@ void MediaReceiver::postAccessUnit( notify->post(); } +status_t MediaReceiver::notifyLateness(size_t trackIndex, int64_t latenessUs) { + if (trackIndex >= mTrackInfos.size()) { + return -ERANGE; + } + + TrackInfo *info = &mTrackInfos.editItemAt(trackIndex); + return info->mReceiver->notifyLateness(latenessUs); +} + } // namespace android diff --git a/media/libstagefright/wifi-display/MediaReceiver.h b/media/libstagefright/wifi-display/MediaReceiver.h index 7adc3c4..cdfde99 100644 --- a/media/libstagefright/wifi-display/MediaReceiver.h +++ b/media/libstagefright/wifi-display/MediaReceiver.h @@ -60,6 +60,8 @@ struct MediaReceiver : public AHandler { }; status_t initAsync(Mode mode); + status_t notifyLateness(size_t trackIndex, int64_t latenessUs); + protected: virtual void onMessageReceived(const sp &msg); virtual ~MediaReceiver(); diff --git a/media/libstagefright/wifi-display/TimeSyncer.cpp b/media/libstagefright/wifi-display/TimeSyncer.cpp new file mode 100644 index 0000000..64e182e --- /dev/null +++ b/media/libstagefright/wifi-display/TimeSyncer.cpp @@ -0,0 +1,332 @@ +/* + * Copyright 2013, The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NEBUG 0 +#define LOG_TAG "TimeSyncer" +#include + +#include "TimeSyncer.h" + +#include "ANetworkSession.h" + +#include +#include +#include +#include +#include +#include + +namespace android { + +TimeSyncer::TimeSyncer( + const sp &netSession, const sp ¬ify) + : mNetSession(netSession), + mNotify(notify), + mIsServer(false), + mConnected(false), + mUDPSession(0), + mSeqNo(0), + mTotalTimeUs(0.0), + mPendingT1(0ll), + mTimeoutGeneration(0) { +} + +TimeSyncer::~TimeSyncer() { +} + +void TimeSyncer::startServer(unsigned localPort) { + sp msg = new AMessage(kWhatStartServer, id()); + msg->setInt32("localPort", localPort); + msg->post(); +} + +void TimeSyncer::startClient(const char *remoteHost, unsigned remotePort) { + sp msg = new AMessage(kWhatStartClient, id()); + msg->setString("remoteHost", remoteHost); + msg->setInt32("remotePort", remotePort); + msg->post(); +} + +void TimeSyncer::onMessageReceived(const sp &msg) { + switch (msg->what()) { + case kWhatStartClient: + { + AString remoteHost; + CHECK(msg->findString("remoteHost", &remoteHost)); + + int32_t remotePort; + CHECK(msg->findInt32("remotePort", &remotePort)); + + sp notify = new AMessage(kWhatUDPNotify, id()); + + CHECK_EQ((status_t)OK, + mNetSession->createUDPSession( + 0 /* localPort */, + remoteHost.c_str(), + remotePort, + notify, + &mUDPSession)); + + postSendPacket(); + break; + } + + case kWhatStartServer: + { + mIsServer = true; + + int32_t localPort; + CHECK(msg->findInt32("localPort", &localPort)); + + sp notify = new AMessage(kWhatUDPNotify, id()); + + CHECK_EQ((status_t)OK, + mNetSession->createUDPSession( + localPort, notify, &mUDPSession)); + + break; + } + + case kWhatSendPacket: + { + TimeInfo ti; + memset(&ti, 0, sizeof(ti)); + + ti.mT1 = ALooper::GetNowUs(); + + CHECK_EQ((status_t)OK, + mNetSession->sendRequest( + mUDPSession, &ti, sizeof(ti))); + + mPendingT1 = ti.mT1; + postTimeout(); + break; + } + + case kWhatTimedOut: + { + int32_t generation; + CHECK(msg->findInt32("generation", &generation)); + + if (generation != mTimeoutGeneration) { + break; + } + + ALOGI("timed out, sending another request"); + postSendPacket(); + break; + } + + case kWhatUDPNotify: + { + int32_t reason; + CHECK(msg->findInt32("reason", &reason)); + + switch (reason) { + case ANetworkSession::kWhatError: + { + int32_t sessionID; + CHECK(msg->findInt32("sessionID", &sessionID)); + + int32_t err; + CHECK(msg->findInt32("err", &err)); + + AString detail; + CHECK(msg->findString("detail", &detail)); + + ALOGE("An error occurred in session %d (%d, '%s/%s').", + sessionID, + err, + detail.c_str(), + strerror(-err)); + + mNetSession->destroySession(sessionID); + + cancelTimeout(); + + notifyError(err); + break; + } + + case ANetworkSession::kWhatDatagram: + { + int32_t sessionID; + CHECK(msg->findInt32("sessionID", &sessionID)); + + sp packet; + CHECK(msg->findBuffer("data", &packet)); + + int64_t arrivalTimeUs; + CHECK(packet->meta()->findInt64( + "arrivalTimeUs", &arrivalTimeUs)); + + CHECK_EQ(packet->size(), sizeof(TimeInfo)); + + TimeInfo *ti = (TimeInfo *)packet->data(); + + if (mIsServer) { + if (!mConnected) { + AString fromAddr; + CHECK(msg->findString("fromAddr", &fromAddr)); + + int32_t fromPort; + CHECK(msg->findInt32("fromPort", &fromPort)); + + CHECK_EQ((status_t)OK, + mNetSession->connectUDPSession( + mUDPSession, fromAddr.c_str(), fromPort)); + + mConnected = true; + } + + ti->mT2 = arrivalTimeUs; + ti->mT3 = ALooper::GetNowUs(); + + CHECK_EQ((status_t)OK, + mNetSession->sendRequest( + mUDPSession, ti, sizeof(*ti))); + } else { + if (ti->mT1 != mPendingT1) { + break; + } + + cancelTimeout(); + mPendingT1 = 0; + + ti->mT4 = arrivalTimeUs; + + // One way delay for a packet to travel from client + // to server or back (assumed to be the same either way). + int64_t delay = + (ti->mT2 - ti->mT1 + ti->mT4 - ti->mT3) / 2; + + // Offset between the client clock (T1, T4) and the + // server clock (T2, T3) timestamps. + int64_t offset = + (ti->mT2 - ti->mT1 - ti->mT4 + ti->mT3) / 2; + + mHistory.push_back(*ti); + + ALOGV("delay = %lld us,\toffset %lld us", + delay, + offset); + + if (mHistory.size() < kNumPacketsPerBatch) { + postSendPacket(1000000ll / 30); + } else { + notifyOffset(); + + mHistory.clear(); + postSendPacket(kBatchDelayUs); + } + } + break; + } + + default: + TRESPASS(); + } + + break; + } + + default: + TRESPASS(); + } +} + +void TimeSyncer::postSendPacket(int64_t delayUs) { + (new AMessage(kWhatSendPacket, id()))->post(delayUs); +} + +void TimeSyncer::postTimeout() { + sp msg = new AMessage(kWhatTimedOut, id()); + msg->setInt32("generation", mTimeoutGeneration); + msg->post(kTimeoutDelayUs); +} + +void TimeSyncer::cancelTimeout() { + ++mTimeoutGeneration; +} + +void TimeSyncer::notifyError(status_t err) { + if (mNotify == NULL) { + looper()->stop(); + return; + } + + sp notify = mNotify->dup(); + notify->setInt32("what", kWhatError); + notify->setInt32("err", err); + notify->post(); +} + +// static +int TimeSyncer::CompareRountripTime(const TimeInfo *ti1, const TimeInfo *ti2) { + int64_t rt1 = ti1->mT4 - ti1->mT1; + int64_t rt2 = ti2->mT4 - ti2->mT1; + + if (rt1 < rt2) { + return -1; + } else if (rt1 > rt2) { + return 1; + } + + return 0; +} + +void TimeSyncer::notifyOffset() { + mHistory.sort(CompareRountripTime); + + int64_t sum = 0ll; + size_t count = 0; + + // Only consider the third of the information associated with the best + // (smallest) roundtrip times. + for (size_t i = 0; i < mHistory.size() / 3; ++i) { + const TimeInfo *ti = &mHistory[i]; + +#if 0 + // One way delay for a packet to travel from client + // to server or back (assumed to be the same either way). + int64_t delay = + (ti->mT2 - ti->mT1 + ti->mT4 - ti->mT3) / 2; +#endif + + // Offset between the client clock (T1, T4) and the + // server clock (T2, T3) timestamps. + int64_t offset = + (ti->mT2 - ti->mT1 - ti->mT4 + ti->mT3) / 2; + + ALOGV("(%d) RT: %lld us, offset: %lld us", + i, ti->mT4 - ti->mT1, offset); + + sum += offset; + ++count; + } + + if (mNotify == NULL) { + ALOGI("avg. offset is %lld", sum / count); + return; + } + + sp notify = mNotify->dup(); + notify->setInt32("what", kWhatTimeOffset); + notify->setInt64("offset", sum / count); + notify->post(); +} + +} // namespace android diff --git a/media/libstagefright/wifi-display/TimeSyncer.h b/media/libstagefright/wifi-display/TimeSyncer.h new file mode 100644 index 0000000..0e3aed7 --- /dev/null +++ b/media/libstagefright/wifi-display/TimeSyncer.h @@ -0,0 +1,109 @@ +/* + * Copyright 2013, The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef TIME_SYNCER_H_ + +#define TIME_SYNCER_H_ + +#include + +namespace android { + +struct ANetworkSession; + +/* + TimeSyncer allows us to synchronize time between a client and a server. + The client sends a UDP packet containing its send-time to the server, + the server sends that packet back to the client amended with information + about when it was received as well as the time the reply was sent back. + Finally the client receives the reply and has now enough information to + compute the clock offset between client and server assuming that packet + exchange is symmetric, i.e. time for a packet client->server and + server->client is roughly equal. + This exchange is repeated a number of times and the average offset computed + over the 30% of packets that had the lowest roundtrip times. + The offset is determined every 10 secs to account for slight differences in + clock frequency. +*/ +struct TimeSyncer : public AHandler { + enum { + kWhatError, + kWhatTimeOffset, + }; + TimeSyncer( + const sp &netSession, + const sp ¬ify); + + void startServer(unsigned localPort); + void startClient(const char *remoteHost, unsigned remotePort); + +protected: + virtual ~TimeSyncer(); + + virtual void onMessageReceived(const sp &msg); + +private: + enum { + kWhatStartServer, + kWhatStartClient, + kWhatUDPNotify, + kWhatSendPacket, + kWhatTimedOut, + }; + + struct TimeInfo { + int64_t mT1; // client timestamp at send + int64_t mT2; // server timestamp at receive + int64_t mT3; // server timestamp at send + int64_t mT4; // client timestamp at receive + }; + + enum { + kNumPacketsPerBatch = 30, + }; + static const int64_t kTimeoutDelayUs = 500000ll; + static const int64_t kBatchDelayUs = 10000000ll; // every 10 secs + + sp mNetSession; + sp mNotify; + + bool mIsServer; + bool mConnected; + int32_t mUDPSession; + uint32_t mSeqNo; + double mTotalTimeUs; + + Vector mHistory; + + int64_t mPendingT1; + int32_t mTimeoutGeneration; + + void postSendPacket(int64_t delayUs = 0ll); + + void postTimeout(); + void cancelTimeout(); + + void notifyError(status_t err); + void notifyOffset(); + + static int CompareRountripTime(const TimeInfo *ti1, const TimeInfo *ti2); + + DISALLOW_EVIL_CONSTRUCTORS(TimeSyncer); +}; + +} // namespace android + +#endif // TIME_SYNCER_H_ diff --git a/media/libstagefright/wifi-display/rtp/RTPAssembler.cpp b/media/libstagefright/wifi-display/rtp/RTPAssembler.cpp index d0ab60d..5f189e7 100644 --- a/media/libstagefright/wifi-display/rtp/RTPAssembler.cpp +++ b/media/libstagefright/wifi-display/rtp/RTPAssembler.cpp @@ -53,6 +53,11 @@ void RTPReceiver::TSAssembler::signalDiscontinuity() { } status_t RTPReceiver::TSAssembler::processPacket(const sp &packet) { + int32_t rtpTime; + CHECK(packet->meta()->findInt32("rtp-time", &rtpTime)); + + packet->meta()->setInt64("timeUs", (rtpTime * 100ll) / 9); + postAccessUnit(packet, mSawDiscontinuity); if (mSawDiscontinuity) { diff --git a/media/libstagefright/wifi-display/rtp/RTPReceiver.cpp b/media/libstagefright/wifi-display/rtp/RTPReceiver.cpp index 29482af..8711b08 100644 --- a/media/libstagefright/wifi-display/rtp/RTPReceiver.cpp +++ b/media/libstagefright/wifi-display/rtp/RTPReceiver.cpp @@ -221,10 +221,12 @@ void RTPReceiver::Source::dequeueMore() { mNumDeclaredLostPrior = mNumDeclaredLost; - ALOGI("lost %lld packets (%.2f %%), declared %d lost\n", - lostInterval, - 100.0f * lostInterval / expectedInterval, - declaredLostInterval); + if (declaredLostInterval > 0) { + ALOGI("lost %lld packets (%.2f %%), declared %d lost\n", + lostInterval, + 100.0f * lostInterval / expectedInterval, + declaredLostInterval); + } } mNextReportTimeUs = nowUs + kReportIntervalUs; @@ -530,6 +532,40 @@ status_t RTPReceiver::connect( return OK; } +status_t RTPReceiver::notifyLateness(int64_t latenessUs) { + sp buf = new ABuffer(20); + + uint8_t *ptr = buf->data(); + ptr[0] = 0x80 | 0; + ptr[1] = 204; // APP + ptr[2] = 0; + + CHECK((buf->size() % 4) == 0u); + ptr[3] = (buf->size() / 4) - 1; + + ptr[4] = kSourceID >> 24; // SSRC + ptr[5] = (kSourceID >> 16) & 0xff; + ptr[6] = (kSourceID >> 8) & 0xff; + ptr[7] = kSourceID & 0xff; + ptr[8] = 'l'; + ptr[9] = 'a'; + ptr[10] = 't'; + ptr[11] = 'e'; + + ptr[12] = latenessUs >> 56; + ptr[13] = (latenessUs >> 48) & 0xff; + ptr[14] = (latenessUs >> 40) & 0xff; + ptr[15] = (latenessUs >> 32) & 0xff; + ptr[16] = (latenessUs >> 24) & 0xff; + ptr[17] = (latenessUs >> 16) & 0xff; + ptr[18] = (latenessUs >> 8) & 0xff; + ptr[19] = latenessUs & 0xff; + + mNetSession->sendRequest(mRTCPSessionID, buf->data(), buf->size()); + + return OK; +} + void RTPReceiver::onMessageReceived(const sp &msg) { switch (msg->what()) { case kWhatRTPNotify: diff --git a/media/libstagefright/wifi-display/rtp/RTPReceiver.h b/media/libstagefright/wifi-display/rtp/RTPReceiver.h index 2ae864a..ec4671d 100644 --- a/media/libstagefright/wifi-display/rtp/RTPReceiver.h +++ b/media/libstagefright/wifi-display/rtp/RTPReceiver.h @@ -53,6 +53,8 @@ struct RTPReceiver : public RTPBase, public AHandler { int32_t remoteRTPPort, int32_t remoteRTCPPort); + status_t notifyLateness(int64_t latenessUs); + protected: virtual ~RTPReceiver(); virtual void onMessageReceived(const sp &msg); diff --git a/media/libstagefright/wifi-display/rtp/RTPSender.cpp b/media/libstagefright/wifi-display/rtp/RTPSender.cpp index 85c5933..b60853d 100644 --- a/media/libstagefright/wifi-display/rtp/RTPSender.cpp +++ b/media/libstagefright/wifi-display/rtp/RTPSender.cpp @@ -577,6 +577,8 @@ status_t RTPSender::onRTCPData(const sp &buffer) { case 202: // SDES case 203: + break; + case 204: // APP break; diff --git a/media/libstagefright/wifi-display/sink/DirectRenderer.cpp b/media/libstagefright/wifi-display/sink/DirectRenderer.cpp index b53252d..5efcd17 100644 --- a/media/libstagefright/wifi-display/sink/DirectRenderer.cpp +++ b/media/libstagefright/wifi-display/sink/DirectRenderer.cpp @@ -39,8 +39,11 @@ DirectRenderer::DirectRenderer( : mSurfaceTex(bufferProducer), mVideoDecoderNotificationPending(false), mRenderPending(false), - mFirstRenderTimeUs(-1ll), - mFirstRenderRealUs(-1ll) { + mTimeOffsetUs(0ll), + mLatencySum(0ll), + mLatencyCount(0), + mNumFramesLate(0), + mNumFrames(0) { } DirectRenderer::~DirectRenderer() { @@ -53,6 +56,29 @@ DirectRenderer::~DirectRenderer() { } } +void DirectRenderer::setTimeOffset(int64_t offset) { + mTimeOffsetUs = offset; +} + +int64_t DirectRenderer::getAvgLatenessUs() { + if (mLatencyCount == 0) { + return 0ll; + } + + int64_t avgLatencyUs = mLatencySum / mLatencyCount; + + mLatencySum = 0ll; + mLatencyCount = 0; + + if (mNumFrames > 0) { + ALOGI("%d / %d frames late", mNumFramesLate, mNumFrames); + mNumFramesLate = 0; + mNumFrames = 0; + } + + return avgLatencyUs; +} + void DirectRenderer::onMessageReceived(const sp &msg) { switch (msg->what()) { case kWhatVideoDecoderNotify: @@ -224,14 +250,17 @@ void DirectRenderer::onVideoDecoderNotify() { } void DirectRenderer::queueOutputBuffer(size_t index, int64_t timeUs) { -#if 0 +#if 1 OutputInfo info; info.mIndex = index; - info.mTimeUs = timeUs; + info.mTimeUs = timeUs + mTimeOffsetUs; mOutputBuffers.push_back(info); scheduleRenderIfNecessary(); #else + mLatencySum += ALooper::GetNowUs() - (timeUs + mTimeOffsetUs); + ++mLatencyCount; + status_t err = mVideoDecoder->renderOutputBufferAndRelease(index); CHECK_EQ(err, (status_t)OK); #endif @@ -247,13 +276,7 @@ void DirectRenderer::scheduleRenderIfNecessary() { int64_t timeUs = (*mOutputBuffers.begin()).mTimeUs; int64_t nowUs = ALooper::GetNowUs(); - if (mFirstRenderTimeUs < 0ll) { - mFirstRenderTimeUs = timeUs; - mFirstRenderRealUs = nowUs; - } - - int64_t whenUs = timeUs - mFirstRenderTimeUs + mFirstRenderRealUs; - int64_t delayUs = whenUs - nowUs; + int64_t delayUs = timeUs - nowUs; (new AMessage(kWhatRender, id()))->post(delayUs); } @@ -270,6 +293,14 @@ void DirectRenderer::onRender() { break; } + if (info.mTimeUs + 15000ll < nowUs) { + ++mNumFramesLate; + } + ++mNumFrames; + + mLatencySum += nowUs - info.mTimeUs; + ++mLatencyCount; + status_t err = mVideoDecoder->renderOutputBufferAndRelease(info.mIndex); CHECK_EQ(err, (status_t)OK); diff --git a/media/libstagefright/wifi-display/sink/DirectRenderer.h b/media/libstagefright/wifi-display/sink/DirectRenderer.h index 7219080..44be8f8 100644 --- a/media/libstagefright/wifi-display/sink/DirectRenderer.h +++ b/media/libstagefright/wifi-display/sink/DirectRenderer.h @@ -36,6 +36,10 @@ struct DirectRenderer : public AHandler { void setFormat(size_t trackIndex, const sp &format); void queueAccessUnit(size_t trackIndex, const sp &accessUnit); + void setTimeOffset(int64_t offset); + + int64_t getAvgLatenessUs(); + protected: virtual void onMessageReceived(const sp &msg); virtual ~DirectRenderer(); @@ -63,8 +67,14 @@ private: List mOutputBuffers; bool mRenderPending; - int64_t mFirstRenderTimeUs; - int64_t mFirstRenderRealUs; + + int64_t mTimeOffsetUs; + + int64_t mLatencySum; + size_t mLatencyCount; + + int32_t mNumFramesLate; + int32_t mNumFrames; void onVideoDecoderNotify(); void onRender(); diff --git a/media/libstagefright/wifi-display/sink/TunnelRenderer.cpp b/media/libstagefright/wifi-display/sink/TunnelRenderer.cpp index d9d8a76..6b185db 100644 --- a/media/libstagefright/wifi-display/sink/TunnelRenderer.cpp +++ b/media/libstagefright/wifi-display/sink/TunnelRenderer.cpp @@ -27,6 +27,7 @@ #include #include #include +#include #include #include #include @@ -60,6 +61,8 @@ struct TunnelRenderer::StreamSource : public BnStreamSource { void doSomeWork(); + void setTimeOffset(int64_t offset); + protected: virtual ~StreamSource(); @@ -75,6 +78,9 @@ private: size_t mNumDeqeued; + int64_t mTimeOffsetUs; + bool mTimeOffsetChanged; + DISALLOW_EVIL_CONSTRUCTORS(StreamSource); }; @@ -82,7 +88,9 @@ private: TunnelRenderer::StreamSource::StreamSource(TunnelRenderer *owner) : mOwner(owner), - mNumDeqeued(0) { + mNumDeqeued(0), + mTimeOffsetUs(0ll), + mTimeOffsetChanged(false) { } TunnelRenderer::StreamSource::~StreamSource() { @@ -110,7 +118,7 @@ void TunnelRenderer::StreamSource::onBufferAvailable(size_t index) { } uint32_t TunnelRenderer::StreamSource::flags() const { - return kFlagAlignedVideoData; + return kFlagAlignedVideoData | kFlagIsRealTimeData; } void TunnelRenderer::StreamSource::doSomeWork() { @@ -124,21 +132,21 @@ void TunnelRenderer::StreamSource::doSomeWork() { ++mNumDeqeued; - if (mNumDeqeued == 1) { - ALOGI("fixing real time now."); - + if (mTimeOffsetChanged) { sp extra = new AMessage; extra->setInt32( IStreamListener::kKeyDiscontinuityMask, - ATSParser::DISCONTINUITY_ABSOLUTE_TIME); + ATSParser::DISCONTINUITY_TIME_OFFSET); - extra->setInt64("timeUs", ALooper::GetNowUs()); + extra->setInt64("offset", mTimeOffsetUs); mListener->issueCommand( IStreamListener::DISCONTINUITY, false /* synchronous */, extra); + + mTimeOffsetChanged = false; } ALOGV("dequeue TS packet of size %d", srcBuffer->size()); @@ -155,18 +163,32 @@ void TunnelRenderer::StreamSource::doSomeWork() { } } +void TunnelRenderer::StreamSource::setTimeOffset(int64_t offset) { + Mutex::Autolock autoLock(mLock); + + if (offset != mTimeOffsetUs) { + mTimeOffsetUs = offset; + mTimeOffsetChanged = true; + } +} + //////////////////////////////////////////////////////////////////////////////// TunnelRenderer::TunnelRenderer( const sp &bufferProducer) : mSurfaceTex(bufferProducer), mStartup(true) { + mStreamSource = new StreamSource(this); } TunnelRenderer::~TunnelRenderer() { destroyPlayer(); } +void TunnelRenderer::setTimeOffset(int64_t offset) { + mStreamSource->setTimeOffset(offset); +} + void TunnelRenderer::onMessageReceived(const sp &msg) { switch (msg->what()) { default: @@ -209,8 +231,6 @@ void TunnelRenderer::initPlayer() { sp service = interface_cast(binder); CHECK(service.get() != NULL); - mStreamSource = new StreamSource(this); - mPlayerClient = new PlayerClient; mPlayer = service->create(mPlayerClient, 0); @@ -226,6 +246,8 @@ void TunnelRenderer::initPlayer() { void TunnelRenderer::destroyPlayer() { mStreamSource.clear(); + mPlayer->setVideoSurfaceTexture(NULL); + mPlayer->stop(); mPlayer.clear(); diff --git a/media/libstagefright/wifi-display/sink/TunnelRenderer.h b/media/libstagefright/wifi-display/sink/TunnelRenderer.h index 8e96665..479e73c 100644 --- a/media/libstagefright/wifi-display/sink/TunnelRenderer.h +++ b/media/libstagefright/wifi-display/sink/TunnelRenderer.h @@ -39,6 +39,12 @@ struct TunnelRenderer : public AHandler { void queueBuffer(const sp &buffer); sp dequeueBuffer(); + void setTimeOffset(int64_t offset); + + int64_t getAvgLatenessUs() { + return 0ll; + } + protected: virtual void onMessageReceived(const sp &msg); virtual ~TunnelRenderer(); diff --git a/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp b/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp index 158c2da..0d2e347 100644 --- a/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp +++ b/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp @@ -23,22 +23,24 @@ #include "DirectRenderer.h" #include "MediaReceiver.h" #include "ParsedMessage.h" +#include "TimeSyncer.h" #include "TunnelRenderer.h" +#include #include #include #include #include -#include - namespace android { WifiDisplaySink::WifiDisplaySink( + uint32_t flags, const sp &netSession, const sp &bufferProducer, const sp ¬ify) : mState(UNDEFINED), + mFlags(flags), mNetSession(netSession), mSurfaceTex(bufferProducer), mNotify(notify), @@ -46,7 +48,11 @@ WifiDisplaySink::WifiDisplaySink( mUsingTCPInterleaving(false), mSessionID(0), mNextCSeq(1), - mIDRFrameRequestPending(false) { + mIDRFrameRequestPending(false), + mTimeOffsetUs(0ll), + mTimeOffsetValid(false), + mTargetLatencyUs(-1ll), + mSetupDeferred(false) { // We support any and all resolutions, but prefer 720p30 mSinkSupportedVideoFormats.setNativeResolution( VideoFormats::RESOLUTION_CEA, 5); // 1280 x 720 p30 @@ -199,6 +205,16 @@ void WifiDisplaySink::onMessageReceived(const sp &msg) { { ALOGI("We're now connected."); mState = CONNECTED; + + if (mFlags & FLAG_SPECIAL_MODE) { + sp notify = new AMessage( + kWhatTimeSyncerNotify, id()); + + mTimeSyncer = new TimeSyncer(mNetSession, notify); + looper()->registerHandler(mTimeSyncer); + + mTimeSyncer->startClient(mRTSPHost.c_str(), 8123); + } break; } @@ -226,6 +242,41 @@ void WifiDisplaySink::onMessageReceived(const sp &msg) { break; } + case kWhatTimeSyncerNotify: + { + int32_t what; + CHECK(msg->findInt32("what", &what)); + + if (what == TimeSyncer::kWhatTimeOffset) { + CHECK(msg->findInt64("offset", &mTimeOffsetUs)); + mTimeOffsetValid = true; + + if (mSetupDeferred) { + CHECK_EQ((status_t)OK, + sendSetup( + mSessionID, + "rtsp://x.x.x.x:x/wfd1.0/streamid=0")); + + mSetupDeferred = false; + } + } + break; + } + + case kWhatReportLateness: + { + int64_t latenessUs = mRenderer->getAvgLatenessUs(); + + ALOGI("avg. lateness = %lld ms", + (latenessUs + mTargetLatencyUs) / 1000ll); + + mMediaReceiver->notifyLateness( + 0 /* trackIndex */, latenessUs); + + msg->post(kReportLatenessEveryUs); + break; + } + default: TRESPASS(); } @@ -266,15 +317,39 @@ void WifiDisplaySink::onMediaReceiverNotify(const sp &msg) { looper()->registerHandler(mRenderer); } + CHECK(mTimeOffsetValid); + + int64_t latencyUs = 300000ll; // 300ms by default + + char val[PROPERTY_VALUE_MAX]; + if (property_get("media.wfd-sink.latency", val, NULL)) { + char *end; + int64_t x = strtoll(val, &end, 10); + + if (end > val && *end == '\0' && x >= 0ll) { + latencyUs = x; + } + } + + if (latencyUs != mTargetLatencyUs) { + mTargetLatencyUs = latencyUs; + + ALOGI("Assuming %lld ms of latency.", latencyUs / 1000ll); + } + + // We are the timesync _client_, + // client time = server time - time offset. + mRenderer->setTimeOffset(-mTimeOffsetUs + mTargetLatencyUs); + sp accessUnit; CHECK(msg->findBuffer("accessUnit", &accessUnit)); -#if USE_TUNNEL_RENDERER - mRenderer->queueBuffer(accessUnit); -#else size_t trackIndex; CHECK(msg->findSize("trackIndex", &trackIndex)); +#if USE_TUNNEL_RENDERER + mRenderer->queueBuffer(accessUnit); +#else sp format; if (msg->findMessage("format", &format)) { mRenderer->setFormat(trackIndex, format); @@ -445,6 +520,8 @@ status_t WifiDisplaySink::onReceivePlayResponse( mState = PLAYING; + (new AMessage(kWhatReportLateness, id()))->post(kReportLatenessEveryUs); + return OK; } @@ -555,6 +632,8 @@ void WifiDisplaySink::onGetParameterRequest( mUsingTCPTransport = true; mUsingTCPInterleaving = true; } + } else if (mFlags & FLAG_SPECIAL_MODE) { + mUsingTCPTransport = true; } body = "wfd_video_formats: "; @@ -735,12 +814,16 @@ void WifiDisplaySink::onSetParameterRequest( const char *content = data->getContent(); if (strstr(content, "wfd_trigger_method: SETUP\r\n") != NULL) { - status_t err = - sendSetup( - sessionID, - "rtsp://x.x.x.x:x/wfd1.0/streamid=0"); + if ((mFlags & FLAG_SPECIAL_MODE) && !mTimeOffsetValid) { + mSetupDeferred = true; + } else { + status_t err = + sendSetup( + sessionID, + "rtsp://x.x.x.x:x/wfd1.0/streamid=0"); - CHECK_EQ(err, (status_t)OK); + CHECK_EQ(err, (status_t)OK); + } } AString response = "RTSP/1.0 200 OK\r\n"; diff --git a/media/libstagefright/wifi-display/sink/WifiDisplaySink.h b/media/libstagefright/wifi-display/sink/WifiDisplaySink.h index 01af58b..2b8c6f7 100644 --- a/media/libstagefright/wifi-display/sink/WifiDisplaySink.h +++ b/media/libstagefright/wifi-display/sink/WifiDisplaySink.h @@ -31,6 +31,7 @@ struct AMessage; struct DirectRenderer; struct MediaReceiver; struct ParsedMessage; +struct TimeSyncer; struct TunnelRenderer; #define USE_TUNNEL_RENDERER 0 @@ -43,11 +44,16 @@ struct WifiDisplaySink : public AHandler { kWhatDisconnected, }; + enum Flags { + FLAG_SPECIAL_MODE = 1, + }; + // If no notification message is specified (notify == NULL) // the sink will stop its looper() once the session ends, // otherwise it will post an appropriate notification but leave // the looper() running. WifiDisplaySink( + uint32_t flags, const sp &netSession, const sp &bufferProducer = NULL, const sp ¬ify = NULL); @@ -73,6 +79,8 @@ private: kWhatRTSPNotify, kWhatStop, kWhatMediaReceiverNotify, + kWhatTimeSyncerNotify, + kWhatReportLateness, }; struct ResponseID { @@ -89,11 +97,15 @@ private: typedef status_t (WifiDisplaySink::*HandleRTSPResponseFunc)( int32_t sessionID, const sp &msg); + static const int64_t kReportLatenessEveryUs = 1000000ll; + State mState; + uint32_t mFlags; VideoFormats mSinkSupportedVideoFormats; sp mNetSession; sp mSurfaceTex; sp mNotify; + sp mTimeSyncer; bool mUsingTCPTransport; bool mUsingTCPInterleaving; AString mRTSPHost; @@ -117,6 +129,13 @@ private: bool mIDRFrameRequestPending; + int64_t mTimeOffsetUs; + bool mTimeOffsetValid; + + int64_t mTargetLatencyUs; + + bool mSetupDeferred; + status_t sendM2(int32_t sessionID); status_t sendSetup(int32_t sessionID, const char *uri); status_t sendPlay(int32_t sessionID, const char *uri); diff --git a/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp b/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp index b8524f6..de66bde 100644 --- a/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp +++ b/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp @@ -23,6 +23,7 @@ #include "Parameters.h" #include "ParsedMessage.h" #include "rtp/RTPSender.h" +#include "TimeSyncer.h" #include #include @@ -157,6 +158,12 @@ void WifiDisplaySource::onMessageReceived(const sp &msg) { } if (err == OK) { + sp notify = new AMessage(kWhatTimeSyncerNotify, id()); + mTimeSyncer = new TimeSyncer(mNetSession, notify); + looper()->registerHandler(mTimeSyncer); + + mTimeSyncer->startServer(8123); + mState = AWAITING_CLIENT_CONNECTION; } @@ -520,6 +527,11 @@ void WifiDisplaySource::onMessageReceived(const sp &msg) { break; } + case kWhatTimeSyncerNotify: + { + break; + } + default: TRESPASS(); } diff --git a/media/libstagefright/wifi-display/source/WifiDisplaySource.h b/media/libstagefright/wifi-display/source/WifiDisplaySource.h index 724462c..9e72682 100644 --- a/media/libstagefright/wifi-display/source/WifiDisplaySource.h +++ b/media/libstagefright/wifi-display/source/WifiDisplaySource.h @@ -30,6 +30,7 @@ namespace android { struct IHDCP; struct IRemoteDisplayClient; struct ParsedMessage; +struct TimeSyncer; // Represents the RTSP server acting as a wifi display source. // Manages incoming connections, sets up Playback sessions as necessary. @@ -81,6 +82,7 @@ private: kWhatHDCPNotify, kWhatFinishStop2, kWhatTeardownTriggerTimedOut, + kWhatTimeSyncerNotify, }; struct ResponseID { @@ -114,6 +116,7 @@ private: VideoFormats mSupportedSourceVideoFormats; sp mNetSession; sp mClient; + sp mTimeSyncer; struct in_addr mInterfaceAddr; int32_t mSessionID; diff --git a/media/libstagefright/wifi-display/udptest.cpp b/media/libstagefright/wifi-display/udptest.cpp index 86437e0..111846d 100644 --- a/media/libstagefright/wifi-display/udptest.cpp +++ b/media/libstagefright/wifi-display/udptest.cpp @@ -19,292 +19,13 @@ #include #include "ANetworkSession.h" +#include "TimeSyncer.h" #include -#include -#include -#include -#include #include -#include namespace android { -struct TestHandler : public AHandler { - TestHandler(const sp &netSession); - - void startServer(unsigned localPort); - void startClient(const char *remoteHost, unsigned remotePort); - -protected: - virtual ~TestHandler(); - - virtual void onMessageReceived(const sp &msg); - -private: - enum { - kWhatStartServer, - kWhatStartClient, - kWhatUDPNotify, - kWhatSendPacket, - kWhatTimedOut, - }; - - struct TimeInfo { - int64_t mT1; // client timestamp at send - int64_t mT2; // server timestamp at receive - int64_t mT3; // server timestamp at send - int64_t mT4; // client timestamp at receive - }; - - static const int64_t kTimeoutDelayUs = 1000000ll; - - sp mNetSession; - - bool mIsServer; - bool mConnected; - int32_t mUDPSession; - uint32_t mSeqNo; - double mTotalTimeUs; - int32_t mCount; - int64_t mSumOffsets; - - int64_t mPendingT1; - int32_t mTimeoutGeneration; - - void postSendPacket(int64_t delayUs = 0ll); - - void postTimeout(); - void cancelTimeout(); - - DISALLOW_EVIL_CONSTRUCTORS(TestHandler); -}; - -TestHandler::TestHandler(const sp &netSession) - : mNetSession(netSession), - mIsServer(false), - mConnected(false), - mUDPSession(0), - mSeqNo(0), - mTotalTimeUs(0.0), - mCount(0), - mSumOffsets(0ll), - mPendingT1(0ll), - mTimeoutGeneration(0) { -} - -TestHandler::~TestHandler() { -} - -void TestHandler::startServer(unsigned localPort) { - sp msg = new AMessage(kWhatStartServer, id()); - msg->setInt32("localPort", localPort); - msg->post(); -} - -void TestHandler::startClient(const char *remoteHost, unsigned remotePort) { - sp msg = new AMessage(kWhatStartClient, id()); - msg->setString("remoteHost", remoteHost); - msg->setInt32("remotePort", remotePort); - msg->post(); -} - -void TestHandler::onMessageReceived(const sp &msg) { - switch (msg->what()) { - case kWhatStartClient: - { - AString remoteHost; - CHECK(msg->findString("remoteHost", &remoteHost)); - - int32_t remotePort; - CHECK(msg->findInt32("remotePort", &remotePort)); - - sp notify = new AMessage(kWhatUDPNotify, id()); - - CHECK_EQ((status_t)OK, - mNetSession->createUDPSession( - 0 /* localPort */, - remoteHost.c_str(), - remotePort, - notify, - &mUDPSession)); - - postSendPacket(); - break; - } - - case kWhatStartServer: - { - mIsServer = true; - - int32_t localPort; - CHECK(msg->findInt32("localPort", &localPort)); - - sp notify = new AMessage(kWhatUDPNotify, id()); - - CHECK_EQ((status_t)OK, - mNetSession->createUDPSession( - localPort, notify, &mUDPSession)); - - break; - } - - case kWhatSendPacket: - { - TimeInfo ti; - memset(&ti, 0, sizeof(ti)); - - ti.mT1 = ALooper::GetNowUs(); - - CHECK_EQ((status_t)OK, - mNetSession->sendRequest( - mUDPSession, &ti, sizeof(ti))); - - mPendingT1 = ti.mT1; - postTimeout(); - break; - } - - case kWhatTimedOut: - { - int32_t generation; - CHECK(msg->findInt32("generation", &generation)); - - if (generation != mTimeoutGeneration) { - break; - } - - ALOGI("timed out, sending another request"); - postSendPacket(); - break; - } - - case kWhatUDPNotify: - { - int32_t reason; - CHECK(msg->findInt32("reason", &reason)); - - switch (reason) { - case ANetworkSession::kWhatError: - { - int32_t sessionID; - CHECK(msg->findInt32("sessionID", &sessionID)); - - int32_t err; - CHECK(msg->findInt32("err", &err)); - - AString detail; - CHECK(msg->findString("detail", &detail)); - - ALOGE("An error occurred in session %d (%d, '%s/%s').", - sessionID, - err, - detail.c_str(), - strerror(-err)); - - mNetSession->destroySession(sessionID); - - cancelTimeout(); - looper()->stop(); - break; - } - - case ANetworkSession::kWhatDatagram: - { - int32_t sessionID; - CHECK(msg->findInt32("sessionID", &sessionID)); - - sp packet; - CHECK(msg->findBuffer("data", &packet)); - - int64_t arrivalTimeUs; - CHECK(packet->meta()->findInt64( - "arrivalTimeUs", &arrivalTimeUs)); - - CHECK_EQ(packet->size(), sizeof(TimeInfo)); - - TimeInfo *ti = (TimeInfo *)packet->data(); - - if (mIsServer) { - if (!mConnected) { - AString fromAddr; - CHECK(msg->findString("fromAddr", &fromAddr)); - - int32_t fromPort; - CHECK(msg->findInt32("fromPort", &fromPort)); - - CHECK_EQ((status_t)OK, - mNetSession->connectUDPSession( - mUDPSession, fromAddr.c_str(), fromPort)); - - mConnected = true; - } - - ti->mT2 = arrivalTimeUs; - ti->mT3 = ALooper::GetNowUs(); - - CHECK_EQ((status_t)OK, - mNetSession->sendRequest( - mUDPSession, ti, sizeof(*ti))); - } else { - if (ti->mT1 != mPendingT1) { - break; - } - - cancelTimeout(); - mPendingT1 = 0; - - ti->mT4 = arrivalTimeUs; - - // One way delay for a packet to travel from client - // to server or back (assumed to be the same either way). - int64_t delay = - (ti->mT2 - ti->mT1 + ti->mT4 - ti->mT3) / 2; - - // Offset between the client clock (T1, T4) and the - // server clock (T2, T3) timestamps. - int64_t offset = - (ti->mT2 - ti->mT1 - ti->mT4 + ti->mT3) / 2; - - mSumOffsets += offset; - ++mCount; - - printf("delay = %lld us,\toffset %lld us\n", - delay, - offset); - fflush(stdout); - - postSendPacket(1000000ll / 30); - } - break; - } - - default: - TRESPASS(); - } - - break; - } - - default: - TRESPASS(); - } -} - -void TestHandler::postSendPacket(int64_t delayUs) { - (new AMessage(kWhatSendPacket, id()))->post(delayUs); -} - -void TestHandler::postTimeout() { - sp msg = new AMessage(kWhatTimedOut, id()); - msg->setInt32("generation", mTimeoutGeneration); - msg->post(kTimeoutDelayUs); -} - -void TestHandler::cancelTimeout() { - ++mTimeoutGeneration; -} - } // namespace android static void usage(const char *me) { @@ -379,7 +100,7 @@ int main(int argc, char **argv) { sp looper = new ALooper; - sp handler = new TestHandler(netSession); + sp handler = new TimeSyncer(netSession, NULL /* notify */); looper->registerHandler(handler); if (localPort >= 0) { diff --git a/media/libstagefright/wifi-display/wfd.cpp b/media/libstagefright/wifi-display/wfd.cpp index 3f4216a..0b18484 100644 --- a/media/libstagefright/wifi-display/wfd.cpp +++ b/media/libstagefright/wifi-display/wfd.cpp @@ -321,7 +321,10 @@ int main(int argc, char **argv) { sp looper = new ALooper; sp sink = new WifiDisplaySink( - session, surface->getIGraphicBufferProducer()); + 0 /* flags */, + session, + surface->getIGraphicBufferProducer()); + looper->registerHandler(sink); if (connectToPort >= 0) { -- cgit v1.1 From 3eca0ac4428aa74f197a2a498d84490c4e30f11d Mon Sep 17 00:00:00 2001 From: Mathias Agopian Date: Tue, 12 Mar 2013 17:40:55 -0700 Subject: remove CameraServiceTest which wasn't used this test hasn't been compiled or run in a long long time and isn't up to date or relevant. Change-Id: I5a7f1190d4a783e088d44e3e6ef66231cf0cfb49 --- services/camera/tests/CameraServiceTest/Android.mk | 26 - .../tests/CameraServiceTest/CameraServiceTest.cpp | 924 --------------------- .../tests/CameraServiceTest/MODULE_LICENSE_APACHE2 | 0 services/camera/tests/CameraServiceTest/NOTICE | 190 ----- 4 files changed, 1140 deletions(-) delete mode 100644 services/camera/tests/CameraServiceTest/Android.mk delete mode 100644 services/camera/tests/CameraServiceTest/CameraServiceTest.cpp delete mode 100644 services/camera/tests/CameraServiceTest/MODULE_LICENSE_APACHE2 delete mode 100644 services/camera/tests/CameraServiceTest/NOTICE diff --git a/services/camera/tests/CameraServiceTest/Android.mk b/services/camera/tests/CameraServiceTest/Android.mk deleted file mode 100644 index 41b6f63..0000000 --- a/services/camera/tests/CameraServiceTest/Android.mk +++ /dev/null @@ -1,26 +0,0 @@ -LOCAL_PATH:= $(call my-dir) - -include $(CLEAR_VARS) - -LOCAL_SRC_FILES:= CameraServiceTest.cpp - -LOCAL_MODULE:= CameraServiceTest - -LOCAL_MODULE_TAGS := tests - -LOCAL_C_INCLUDES += \ - frameworks/av/libs - -LOCAL_CFLAGS := - -LOCAL_SHARED_LIBRARIES += \ - libbinder \ - libcutils \ - libutils \ - libui \ - libcamera_client \ - libgui - -# Disable it because the ISurface interface may change, and before we have a -# chance to fix this test, we don't want to break normal builds. -#include $(BUILD_EXECUTABLE) diff --git a/services/camera/tests/CameraServiceTest/CameraServiceTest.cpp b/services/camera/tests/CameraServiceTest/CameraServiceTest.cpp deleted file mode 100644 index e417b79..0000000 --- a/services/camera/tests/CameraServiceTest/CameraServiceTest.cpp +++ /dev/null @@ -1,924 +0,0 @@ -/* - * Copyright (C) 2010 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#define LOG_TAG "CameraServiceTest" - -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -using namespace android; - -// -// Assertion and Logging utilities -// -#define INFO(...) \ - do { \ - printf(__VA_ARGS__); \ - printf("\n"); \ - ALOGD(__VA_ARGS__); \ - } while(0) - -void assert_fail(const char *file, int line, const char *func, const char *expr) { - INFO("assertion failed at file %s, line %d, function %s:", - file, line, func); - INFO("%s", expr); - abort(); -} - -void assert_eq_fail(const char *file, int line, const char *func, - const char *expr, int actual) { - INFO("assertion failed at file %s, line %d, function %s:", - file, line, func); - INFO("(expected) %s != (actual) %d", expr, actual); - abort(); -} - -#define ASSERT(e) \ - do { \ - if (!(e)) \ - assert_fail(__FILE__, __LINE__, __func__, #e); \ - } while(0) - -#define ASSERT_EQ(expected, actual) \ - do { \ - int _x = (actual); \ - if (_x != (expected)) \ - assert_eq_fail(__FILE__, __LINE__, __func__, #expected, _x); \ - } while(0) - -// -// Holder service for pass objects between processes. -// -class IHolder : public IInterface { -protected: - enum { - HOLDER_PUT = IBinder::FIRST_CALL_TRANSACTION, - HOLDER_GET, - HOLDER_CLEAR - }; -public: - DECLARE_META_INTERFACE(Holder); - - virtual void put(sp obj) = 0; - virtual sp get() = 0; - virtual void clear() = 0; -}; - -class BnHolder : public BnInterface { - virtual status_t onTransact(uint32_t code, - const Parcel& data, - Parcel* reply, - uint32_t flags = 0); -}; - -class BpHolder : public BpInterface { -public: - BpHolder(const sp& impl) - : BpInterface(impl) { - } - - virtual void put(sp obj) { - Parcel data, reply; - data.writeStrongBinder(obj); - remote()->transact(HOLDER_PUT, data, &reply, IBinder::FLAG_ONEWAY); - } - - virtual sp get() { - Parcel data, reply; - remote()->transact(HOLDER_GET, data, &reply); - return reply.readStrongBinder(); - } - - virtual void clear() { - Parcel data, reply; - remote()->transact(HOLDER_CLEAR, data, &reply); - } -}; - -IMPLEMENT_META_INTERFACE(Holder, "CameraServiceTest.Holder"); - -status_t BnHolder::onTransact( - uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) { - switch(code) { - case HOLDER_PUT: { - put(data.readStrongBinder()); - return NO_ERROR; - } break; - case HOLDER_GET: { - reply->writeStrongBinder(get()); - return NO_ERROR; - } break; - case HOLDER_CLEAR: { - clear(); - return NO_ERROR; - } break; - default: - return BBinder::onTransact(code, data, reply, flags); - } -} - -class HolderService : public BnHolder { - virtual void put(sp obj) { - mObj = obj; - } - virtual sp get() { - return mObj; - } - virtual void clear() { - mObj.clear(); - } -private: - sp mObj; -}; - -// -// A mock CameraClient -// -class MCameraClient : public BnCameraClient { -public: - virtual void notifyCallback(int32_t msgType, int32_t ext1, int32_t ext2); - virtual void dataCallback(int32_t msgType, const sp& data); - virtual void dataCallbackTimestamp(nsecs_t timestamp, - int32_t msgType, const sp& data); - - // new functions - void clearStat(); - enum OP { EQ, GE, LE, GT, LT }; - void assertNotify(int32_t msgType, OP op, int count); - void assertData(int32_t msgType, OP op, int count); - void waitNotify(int32_t msgType, OP op, int count); - void waitData(int32_t msgType, OP op, int count); - void assertDataSize(int32_t msgType, OP op, int dataSize); - - void setReleaser(ICamera *releaser) { - mReleaser = releaser; - } -private: - Mutex mLock; - Condition mCond; - DefaultKeyedVector mNotifyCount; - DefaultKeyedVector mDataCount; - DefaultKeyedVector mDataSize; - bool test(OP op, int v1, int v2); - void assertTest(OP op, int v1, int v2); - - ICamera *mReleaser; -}; - -void MCameraClient::clearStat() { - Mutex::Autolock _l(mLock); - mNotifyCount.clear(); - mDataCount.clear(); - mDataSize.clear(); -} - -bool MCameraClient::test(OP op, int v1, int v2) { - switch (op) { - case EQ: return v1 == v2; - case GT: return v1 > v2; - case LT: return v1 < v2; - case GE: return v1 >= v2; - case LE: return v1 <= v2; - default: ASSERT(0); break; - } - return false; -} - -void MCameraClient::assertTest(OP op, int v1, int v2) { - if (!test(op, v1, v2)) { - ALOGE("assertTest failed: op=%d, v1=%d, v2=%d", op, v1, v2); - ASSERT(0); - } -} - -void MCameraClient::assertNotify(int32_t msgType, OP op, int count) { - Mutex::Autolock _l(mLock); - int v = mNotifyCount.valueFor(msgType); - assertTest(op, v, count); -} - -void MCameraClient::assertData(int32_t msgType, OP op, int count) { - Mutex::Autolock _l(mLock); - int v = mDataCount.valueFor(msgType); - assertTest(op, v, count); -} - -void MCameraClient::assertDataSize(int32_t msgType, OP op, int dataSize) { - Mutex::Autolock _l(mLock); - int v = mDataSize.valueFor(msgType); - assertTest(op, v, dataSize); -} - -void MCameraClient::notifyCallback(int32_t msgType, int32_t ext1, int32_t ext2) { - INFO("%s", __func__); - Mutex::Autolock _l(mLock); - ssize_t i = mNotifyCount.indexOfKey(msgType); - if (i < 0) { - mNotifyCount.add(msgType, 1); - } else { - ++mNotifyCount.editValueAt(i); - } - mCond.signal(); -} - -void MCameraClient::dataCallback(int32_t msgType, const sp& data) { - INFO("%s", __func__); - int dataSize = data->size(); - INFO("data type = %d, size = %d", msgType, dataSize); - Mutex::Autolock _l(mLock); - ssize_t i = mDataCount.indexOfKey(msgType); - if (i < 0) { - mDataCount.add(msgType, 1); - mDataSize.add(msgType, dataSize); - } else { - ++mDataCount.editValueAt(i); - mDataSize.editValueAt(i) = dataSize; - } - mCond.signal(); - - if (msgType == CAMERA_MSG_VIDEO_FRAME) { - ASSERT(mReleaser != NULL); - mReleaser->releaseRecordingFrame(data); - } -} - -void MCameraClient::dataCallbackTimestamp(nsecs_t timestamp, int32_t msgType, - const sp& data) { - dataCallback(msgType, data); -} - -void MCameraClient::waitNotify(int32_t msgType, OP op, int count) { - INFO("waitNotify: %d, %d, %d", msgType, op, count); - Mutex::Autolock _l(mLock); - while (true) { - int v = mNotifyCount.valueFor(msgType); - if (test(op, v, count)) { - break; - } - mCond.wait(mLock); - } -} - -void MCameraClient::waitData(int32_t msgType, OP op, int count) { - INFO("waitData: %d, %d, %d", msgType, op, count); - Mutex::Autolock _l(mLock); - while (true) { - int v = mDataCount.valueFor(msgType); - if (test(op, v, count)) { - break; - } - mCond.wait(mLock); - } -} - -// -// A mock Surface -// -class MSurface : public BnSurface { -public: - virtual status_t registerBuffers(const BufferHeap& buffers); - virtual void postBuffer(ssize_t offset); - virtual void unregisterBuffers(); - virtual sp requestBuffer(int bufferIdx, int usage); - virtual status_t setBufferCount(int bufferCount); - - // new functions - void clearStat(); - void waitUntil(int c0, int c1, int c2); - -private: - // check callback count - Condition mCond; - Mutex mLock; - int registerBuffersCount; - int postBufferCount; - int unregisterBuffersCount; -}; - -status_t MSurface::registerBuffers(const BufferHeap& buffers) { - INFO("%s", __func__); - Mutex::Autolock _l(mLock); - ++registerBuffersCount; - mCond.signal(); - return NO_ERROR; -} - -void MSurface::postBuffer(ssize_t offset) { - // INFO("%s", __func__); - Mutex::Autolock _l(mLock); - ++postBufferCount; - mCond.signal(); -} - -void MSurface::unregisterBuffers() { - INFO("%s", __func__); - Mutex::Autolock _l(mLock); - ++unregisterBuffersCount; - mCond.signal(); -} - -sp MSurface::requestBuffer(int bufferIdx, int usage) { - INFO("%s", __func__); - return NULL; -} - -status_t MSurface::setBufferCount(int bufferCount) { - INFO("%s", __func__); - return NULL; -} - -void MSurface::clearStat() { - Mutex::Autolock _l(mLock); - registerBuffersCount = 0; - postBufferCount = 0; - unregisterBuffersCount = 0; -} - -void MSurface::waitUntil(int c0, int c1, int c2) { - INFO("waitUntil: %d %d %d", c0, c1, c2); - Mutex::Autolock _l(mLock); - while (true) { - if (registerBuffersCount >= c0 && - postBufferCount >= c1 && - unregisterBuffersCount >= c2) { - break; - } - mCond.wait(mLock); - } -} - -// -// Utilities to use the Holder service -// -sp getHolder() { - sp sm = defaultServiceManager(); - ASSERT(sm != 0); - sp binder = sm->getService(String16("CameraServiceTest.Holder")); - ASSERT(binder != 0); - sp holder = interface_cast(binder); - ASSERT(holder != 0); - return holder; -} - -void putTempObject(sp obj) { - INFO("%s", __func__); - getHolder()->put(obj); -} - -sp getTempObject() { - INFO("%s", __func__); - return getHolder()->get(); -} - -void clearTempObject() { - INFO("%s", __func__); - getHolder()->clear(); -} - -// -// Get a Camera Service -// -sp getCameraService() { - sp sm = defaultServiceManager(); - ASSERT(sm != 0); - sp binder = sm->getService(String16("media.camera")); - ASSERT(binder != 0); - sp cs = interface_cast(binder); - ASSERT(cs != 0); - return cs; -} - -int getNumberOfCameras() { - sp cs = getCameraService(); - return cs->getNumberOfCameras(); -} - -// -// Various Connect Tests -// -void testConnect(int cameraId) { - INFO("%s", __func__); - sp cs = getCameraService(); - sp cc = new MCameraClient(); - sp c = cs->connect(cc, cameraId); - ASSERT(c != 0); - c->disconnect(); -} - -void testAllowConnectOnceOnly(int cameraId) { - INFO("%s", __func__); - sp cs = getCameraService(); - // Connect the first client. - sp cc = new MCameraClient(); - sp c = cs->connect(cc, cameraId); - ASSERT(c != 0); - // Same client -- ok. - ASSERT(cs->connect(cc, cameraId) != 0); - // Different client -- not ok. - sp cc2 = new MCameraClient(); - ASSERT(cs->connect(cc2, cameraId) == 0); - c->disconnect(); -} - -void testReconnectFailed() { - INFO("%s", __func__); - sp c = interface_cast(getTempObject()); - sp cc = new MCameraClient(); - ASSERT(c->connect(cc) != NO_ERROR); -} - -void testReconnectSuccess() { - INFO("%s", __func__); - sp c = interface_cast(getTempObject()); - sp cc = new MCameraClient(); - ASSERT(c->connect(cc) == NO_ERROR); - c->disconnect(); -} - -void testLockFailed() { - INFO("%s", __func__); - sp c = interface_cast(getTempObject()); - ASSERT(c->lock() != NO_ERROR); -} - -void testLockUnlockSuccess() { - INFO("%s", __func__); - sp c = interface_cast(getTempObject()); - ASSERT(c->lock() == NO_ERROR); - ASSERT(c->unlock() == NO_ERROR); -} - -void testLockSuccess() { - INFO("%s", __func__); - sp c = interface_cast(getTempObject()); - ASSERT(c->lock() == NO_ERROR); - c->disconnect(); -} - -// -// Run the connect tests in another process. -// -const char *gExecutable; - -struct FunctionTableEntry { - const char *name; - void (*func)(); -}; - -FunctionTableEntry function_table[] = { -#define ENTRY(x) {#x, &x} - ENTRY(testReconnectFailed), - ENTRY(testReconnectSuccess), - ENTRY(testLockUnlockSuccess), - ENTRY(testLockFailed), - ENTRY(testLockSuccess), -#undef ENTRY -}; - -void runFunction(const char *tag) { - INFO("runFunction: %s", tag); - int entries = sizeof(function_table) / sizeof(function_table[0]); - for (int i = 0; i < entries; i++) { - if (strcmp(function_table[i].name, tag) == 0) { - (*function_table[i].func)(); - return; - } - } - ASSERT(0); -} - -void runInAnotherProcess(const char *tag) { - pid_t pid = fork(); - if (pid == 0) { - execlp(gExecutable, gExecutable, tag, NULL); - ASSERT(0); - } else { - int status; - ASSERT_EQ(pid, wait(&status)); - ASSERT_EQ(0, status); - } -} - -void testReconnect(int cameraId) { - INFO("%s", __func__); - sp cs = getCameraService(); - sp cc = new MCameraClient(); - sp c = cs->connect(cc, cameraId); - ASSERT(c != 0); - // Reconnect to the same client -- ok. - ASSERT(c->connect(cc) == NO_ERROR); - // Reconnect to a different client (but the same pid) -- ok. - sp cc2 = new MCameraClient(); - ASSERT(c->connect(cc2) == NO_ERROR); - c->disconnect(); - cc->assertNotify(CAMERA_MSG_ERROR, MCameraClient::EQ, 0); -} - -void testLockUnlock(int cameraId) { - sp cs = getCameraService(); - sp cc = new MCameraClient(); - sp c = cs->connect(cc, cameraId); - ASSERT(c != 0); - // We can lock as many times as we want. - ASSERT(c->lock() == NO_ERROR); - ASSERT(c->lock() == NO_ERROR); - // Lock from a different process -- not ok. - putTempObject(c->asBinder()); - runInAnotherProcess("testLockFailed"); - // Unlock then lock from a different process -- ok. - ASSERT(c->unlock() == NO_ERROR); - runInAnotherProcess("testLockUnlockSuccess"); - // Unlock then lock from a different process -- ok. - runInAnotherProcess("testLockSuccess"); - clearTempObject(); -} - -void testReconnectFromAnotherProcess(int cameraId) { - INFO("%s", __func__); - - sp cs = getCameraService(); - sp cc = new MCameraClient(); - sp c = cs->connect(cc, cameraId); - ASSERT(c != 0); - // Reconnect from a different process -- not ok. - putTempObject(c->asBinder()); - runInAnotherProcess("testReconnectFailed"); - // Unlock then reconnect from a different process -- ok. - ASSERT(c->unlock() == NO_ERROR); - runInAnotherProcess("testReconnectSuccess"); - clearTempObject(); -} - -// We need to flush the command buffer after the reference -// to ICamera is gone. The sleep is for the server to run -// the destructor for it. -static void flushCommands() { - IPCThreadState::self()->flushCommands(); - usleep(200000); // 200ms -} - -// Run a test case -#define RUN(class_name, cameraId) do { \ - { \ - INFO(#class_name); \ - class_name instance; \ - instance.init(cameraId); \ - instance.run(); \ - } \ - flushCommands(); \ -} while(0) - -// Base test case after the the camera is connected. -class AfterConnect { -public: - void init(int cameraId) { - cs = getCameraService(); - cc = new MCameraClient(); - c = cs->connect(cc, cameraId); - ASSERT(c != 0); - } - -protected: - sp cs; - sp cc; - sp c; - - ~AfterConnect() { - c->disconnect(); - c.clear(); - cc.clear(); - cs.clear(); - } -}; - -class TestSetPreviewDisplay : public AfterConnect { -public: - void run() { - sp surface = new MSurface(); - ASSERT(c->setPreviewDisplay(surface) == NO_ERROR); - c->disconnect(); - cc->assertNotify(CAMERA_MSG_ERROR, MCameraClient::EQ, 0); - } -}; - -class TestStartPreview : public AfterConnect { -public: - void run() { - sp surface = new MSurface(); - ASSERT(c->setPreviewDisplay(surface) == NO_ERROR); - - ASSERT(c->startPreview() == NO_ERROR); - ASSERT(c->previewEnabled() == true); - - surface->waitUntil(1, 10, 0); // needs 1 registerBuffers and 10 postBuffer - surface->clearStat(); - - sp another_surface = new MSurface(); - c->setPreviewDisplay(another_surface); // just to make sure unregisterBuffers - // is called. - surface->waitUntil(0, 0, 1); // needs unregisterBuffers - - cc->assertNotify(CAMERA_MSG_ERROR, MCameraClient::EQ, 0); - } -}; - -class TestStartPreviewWithoutDisplay : public AfterConnect { -public: - void run() { - ASSERT(c->startPreview() == NO_ERROR); - ASSERT(c->previewEnabled() == true); - c->disconnect(); - cc->assertNotify(CAMERA_MSG_ERROR, MCameraClient::EQ, 0); - } -}; - -// Base test case after the the camera is connected and the preview is started. -class AfterStartPreview : public AfterConnect { -public: - void init(int cameraId) { - AfterConnect::init(cameraId); - surface = new MSurface(); - ASSERT(c->setPreviewDisplay(surface) == NO_ERROR); - ASSERT(c->startPreview() == NO_ERROR); - } - -protected: - sp surface; - - ~AfterStartPreview() { - surface.clear(); - } -}; - -class TestAutoFocus : public AfterStartPreview { -public: - void run() { - cc->assertNotify(CAMERA_MSG_FOCUS, MCameraClient::EQ, 0); - c->autoFocus(); - cc->waitNotify(CAMERA_MSG_FOCUS, MCameraClient::EQ, 1); - c->disconnect(); - cc->assertNotify(CAMERA_MSG_ERROR, MCameraClient::EQ, 0); - } -}; - -class TestStopPreview : public AfterStartPreview { -public: - void run() { - ASSERT(c->previewEnabled() == true); - c->stopPreview(); - ASSERT(c->previewEnabled() == false); - c->disconnect(); - cc->assertNotify(CAMERA_MSG_ERROR, MCameraClient::EQ, 0); - } -}; - -class TestTakePicture: public AfterStartPreview { -public: - void run() { - ASSERT(c->takePicture() == NO_ERROR); - cc->waitNotify(CAMERA_MSG_SHUTTER, MCameraClient::EQ, 1); - cc->waitData(CAMERA_MSG_RAW_IMAGE, MCameraClient::EQ, 1); - cc->waitData(CAMERA_MSG_COMPRESSED_IMAGE, MCameraClient::EQ, 1); - c->stopPreview(); - c->disconnect(); - cc->assertNotify(CAMERA_MSG_ERROR, MCameraClient::EQ, 0); - } -}; - -class TestTakeMultiplePictures: public AfterStartPreview { -public: - void run() { - for (int i = 0; i < 10; i++) { - cc->clearStat(); - ASSERT(c->takePicture() == NO_ERROR); - cc->waitNotify(CAMERA_MSG_SHUTTER, MCameraClient::EQ, 1); - cc->waitData(CAMERA_MSG_RAW_IMAGE, MCameraClient::EQ, 1); - cc->waitData(CAMERA_MSG_COMPRESSED_IMAGE, MCameraClient::EQ, 1); - } - c->disconnect(); - cc->assertNotify(CAMERA_MSG_ERROR, MCameraClient::EQ, 0); - } -}; - -class TestGetParameters: public AfterStartPreview { -public: - void run() { - String8 param_str = c->getParameters(); - INFO("%s", static_cast(param_str)); - } -}; - -static bool getNextSize(const char **ptrS, int *w, int *h) { - const char *s = *ptrS; - - // skip over ',' - if (*s == ',') s++; - - // remember start position in p - const char *p = s; - while (*s != '\0' && *s != 'x') { - s++; - } - if (*s == '\0') return false; - - // get the width - *w = atoi(p); - - // skip over 'x' - ASSERT(*s == 'x'); - p = s + 1; - while (*s != '\0' && *s != ',') { - s++; - } - - // get the height - *h = atoi(p); - *ptrS = s; - return true; -} - -class TestPictureSize : public AfterStartPreview { -public: - void checkOnePicture(int w, int h) { - const float rate = 0.9; // byte per pixel limit - int pixels = w * h; - - CameraParameters param(c->getParameters()); - param.setPictureSize(w, h); - // disable thumbnail to get more accurate size. - param.set(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH, 0); - param.set(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT, 0); - c->setParameters(param.flatten()); - - cc->clearStat(); - ASSERT(c->takePicture() == NO_ERROR); - cc->waitData(CAMERA_MSG_RAW_IMAGE, MCameraClient::EQ, 1); - //cc->assertDataSize(CAMERA_MSG_RAW_IMAGE, MCameraClient::EQ, pixels*3/2); - cc->waitData(CAMERA_MSG_COMPRESSED_IMAGE, MCameraClient::EQ, 1); - cc->assertDataSize(CAMERA_MSG_COMPRESSED_IMAGE, MCameraClient::LT, - int(pixels * rate)); - cc->assertDataSize(CAMERA_MSG_COMPRESSED_IMAGE, MCameraClient::GT, 0); - cc->assertNotify(CAMERA_MSG_ERROR, MCameraClient::EQ, 0); - } - - void run() { - CameraParameters param(c->getParameters()); - int w, h; - const char *s = param.get(CameraParameters::KEY_SUPPORTED_PICTURE_SIZES); - while (getNextSize(&s, &w, &h)) { - ALOGD("checking picture size %dx%d", w, h); - checkOnePicture(w, h); - } - } -}; - -class TestPreviewCallbackFlag : public AfterConnect { -public: - void run() { - sp surface = new MSurface(); - ASSERT(c->setPreviewDisplay(surface) == NO_ERROR); - - // Try all flag combinations. - for (int v = 0; v < 8; v++) { - ALOGD("TestPreviewCallbackFlag: flag=%d", v); - usleep(100000); // sleep a while to clear the in-flight callbacks. - cc->clearStat(); - c->setPreviewCallbackFlag(v); - ASSERT(c->previewEnabled() == false); - ASSERT(c->startPreview() == NO_ERROR); - ASSERT(c->previewEnabled() == true); - sleep(2); - c->stopPreview(); - if ((v & CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK) == 0) { - cc->assertData(CAMERA_MSG_PREVIEW_FRAME, MCameraClient::EQ, 0); - } else { - if ((v & CAMERA_FRAME_CALLBACK_FLAG_ONE_SHOT_MASK) == 0) { - cc->assertData(CAMERA_MSG_PREVIEW_FRAME, MCameraClient::GE, 10); - } else { - cc->assertData(CAMERA_MSG_PREVIEW_FRAME, MCameraClient::EQ, 1); - } - } - } - } -}; - -class TestRecording : public AfterConnect { -public: - void run() { - ASSERT(c->recordingEnabled() == false); - sp surface = new MSurface(); - ASSERT(c->setPreviewDisplay(surface) == NO_ERROR); - c->setPreviewCallbackFlag(CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK); - cc->setReleaser(c.get()); - c->startRecording(); - ASSERT(c->recordingEnabled() == true); - sleep(2); - c->stopRecording(); - usleep(100000); // sleep a while to clear the in-flight callbacks. - cc->setReleaser(NULL); - cc->assertData(CAMERA_MSG_VIDEO_FRAME, MCameraClient::GE, 10); - } -}; - -class TestPreviewSize : public AfterStartPreview { -public: - void checkOnePicture(int w, int h) { - int size = w*h*3/2; // should read from parameters - - c->stopPreview(); - - CameraParameters param(c->getParameters()); - param.setPreviewSize(w, h); - c->setPreviewCallbackFlag(CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK); - c->setParameters(param.flatten()); - - c->startPreview(); - - cc->clearStat(); - cc->waitData(CAMERA_MSG_PREVIEW_FRAME, MCameraClient::GE, 1); - cc->assertDataSize(CAMERA_MSG_PREVIEW_FRAME, MCameraClient::EQ, size); - } - - void run() { - CameraParameters param(c->getParameters()); - int w, h; - const char *s = param.get(CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES); - while (getNextSize(&s, &w, &h)) { - ALOGD("checking preview size %dx%d", w, h); - checkOnePicture(w, h); - } - } -}; - -void runHolderService() { - defaultServiceManager()->addService( - String16("CameraServiceTest.Holder"), new HolderService()); - ProcessState::self()->startThreadPool(); -} - -int main(int argc, char **argv) -{ - if (argc != 1) { - runFunction(argv[1]); - return 0; - } - INFO("CameraServiceTest start"); - gExecutable = argv[0]; - runHolderService(); - int n = getNumberOfCameras(); - INFO("%d Cameras available", n); - - for (int id = 0; id < n; id++) { - INFO("Testing camera %d", id); - testConnect(id); flushCommands(); - testAllowConnectOnceOnly(id); flushCommands(); - testReconnect(id); flushCommands(); - testLockUnlock(id); flushCommands(); - testReconnectFromAnotherProcess(id); flushCommands(); - - RUN(TestSetPreviewDisplay, id); - RUN(TestStartPreview, id); - RUN(TestStartPreviewWithoutDisplay, id); - RUN(TestAutoFocus, id); - RUN(TestStopPreview, id); - RUN(TestTakePicture, id); - RUN(TestTakeMultiplePictures, id); - RUN(TestGetParameters, id); - RUN(TestPictureSize, id); - RUN(TestPreviewCallbackFlag, id); - RUN(TestRecording, id); - RUN(TestPreviewSize, id); - } - - INFO("CameraServiceTest finished"); -} diff --git a/services/camera/tests/CameraServiceTest/MODULE_LICENSE_APACHE2 b/services/camera/tests/CameraServiceTest/MODULE_LICENSE_APACHE2 deleted file mode 100644 index e69de29..0000000 diff --git a/services/camera/tests/CameraServiceTest/NOTICE b/services/camera/tests/CameraServiceTest/NOTICE deleted file mode 100644 index c5b1efa..0000000 --- a/services/camera/tests/CameraServiceTest/NOTICE +++ /dev/null @@ -1,190 +0,0 @@ - - Copyright (c) 2005-2008, The Android Open Source Project - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - -- cgit v1.1 From 99617adda9bc46c43f511f0940bc735c73de61de Mon Sep 17 00:00:00 2001 From: Mathias Agopian Date: Tue, 12 Mar 2013 18:42:23 -0700 Subject: remove uses of Surface in favor or IGraphicBufferProducer Change-Id: I13d7a9553aa335bca790a3a59d389d7533c83d57 --- camera/Camera.cpp | 22 ++-------------------- camera/ICamera.cpp | 19 ------------------- include/camera/Camera.h | 3 --- include/camera/ICamera.h | 3 --- include/media/IMediaRecorder.h | 2 +- include/media/MediaRecorderBase.h | 2 +- include/media/mediarecorder.h | 2 +- include/media/stagefright/CameraSource.h | 6 +++--- include/media/stagefright/CameraSourceTimeLapse.h | 4 ++-- media/libmedia/IMediaRecorder.cpp | 6 +++--- media/libmedia/mediarecorder.cpp | 2 +- .../libmediaplayerservice/MediaRecorderClient.cpp | 2 +- media/libmediaplayerservice/MediaRecorderClient.h | 2 +- .../libmediaplayerservice/StagefrightRecorder.cpp | 2 +- media/libmediaplayerservice/StagefrightRecorder.h | 4 ++-- media/libstagefright/CameraSource.cpp | 6 +++--- media/libstagefright/CameraSourceTimeLapse.cpp | 4 ++-- 17 files changed, 24 insertions(+), 67 deletions(-) diff --git a/camera/Camera.cpp b/camera/Camera.cpp index e8908d2..1b136de 100644 --- a/camera/Camera.cpp +++ b/camera/Camera.cpp @@ -96,32 +96,14 @@ status_t Camera::unlock() return c->unlock(); } -// pass the buffered Surface to the camera service -status_t Camera::setPreviewDisplay(const sp& surface) -{ - ALOGV("setPreviewDisplay(%p)", surface.get()); - sp c = mCamera; - if (c == 0) return NO_INIT; - if (surface != 0) { - return c->setPreviewDisplay(surface); - } else { - ALOGD("app passed NULL surface"); - return c->setPreviewDisplay(0); - } -} - // pass the buffered IGraphicBufferProducer to the camera service status_t Camera::setPreviewTexture(const sp& bufferProducer) { ALOGV("setPreviewTexture(%p)", bufferProducer.get()); sp c = mCamera; if (c == 0) return NO_INIT; - if (bufferProducer != 0) { - return c->setPreviewTexture(bufferProducer); - } else { - ALOGD("app passed NULL surface"); - return c->setPreviewTexture(0); - } + ALOGD_IF(bufferProducer == 0, "app passed NULL surface"); + return c->setPreviewTexture(bufferProducer); } // start preview mode diff --git a/camera/ICamera.cpp b/camera/ICamera.cpp index 5d210e7..8900867 100644 --- a/camera/ICamera.cpp +++ b/camera/ICamera.cpp @@ -29,7 +29,6 @@ namespace android { enum { DISCONNECT = IBinder::FIRST_CALL_TRANSACTION, - SET_PREVIEW_DISPLAY, SET_PREVIEW_TEXTURE, SET_PREVIEW_CALLBACK_FLAG, START_PREVIEW, @@ -68,17 +67,6 @@ public: remote()->transact(DISCONNECT, data, &reply); } - // pass the buffered Surface to the camera service - status_t setPreviewDisplay(const sp& surface) - { - ALOGV("setPreviewDisplay"); - Parcel data, reply; - data.writeInterfaceToken(ICamera::getInterfaceDescriptor()); - Surface::writeToParcel(surface, &data); - remote()->transact(SET_PREVIEW_DISPLAY, data, &reply); - return reply.readInt32(); - } - // pass the buffered IGraphicBufferProducer to the camera service status_t setPreviewTexture(const sp& bufferProducer) { @@ -282,13 +270,6 @@ status_t BnCamera::onTransact( disconnect(); return NO_ERROR; } break; - case SET_PREVIEW_DISPLAY: { - ALOGV("SET_PREVIEW_DISPLAY"); - CHECK_INTERFACE(ICamera, data, reply); - sp surface = Surface::readFromParcel(data); - reply->writeInt32(setPreviewDisplay(surface)); - return NO_ERROR; - } break; case SET_PREVIEW_TEXTURE: { ALOGV("SET_PREVIEW_TEXTURE"); CHECK_INTERFACE(ICamera, data, reply); diff --git a/include/camera/Camera.h b/include/camera/Camera.h index 71c66ce..37626a4 100644 --- a/include/camera/Camera.h +++ b/include/camera/Camera.h @@ -74,9 +74,6 @@ public: status_t lock(); status_t unlock(); - // pass the buffered Surface to the camera service - status_t setPreviewDisplay(const sp& surface); - // pass the buffered IGraphicBufferProducer to the camera service status_t setPreviewTexture(const sp& bufferProducer); diff --git a/include/camera/ICamera.h b/include/camera/ICamera.h index eccaa41..2236c1f 100644 --- a/include/camera/ICamera.h +++ b/include/camera/ICamera.h @@ -46,9 +46,6 @@ public: // allow other processes to use this ICamera interface virtual status_t unlock() = 0; - // pass the buffered Surface to the camera service - virtual status_t setPreviewDisplay(const sp& surface) = 0; - // pass the buffered IGraphicBufferProducer to the camera service virtual status_t setPreviewTexture( const sp& bufferProducer) = 0; diff --git a/include/media/IMediaRecorder.h b/include/media/IMediaRecorder.h index 8d7f11d..3e67550 100644 --- a/include/media/IMediaRecorder.h +++ b/include/media/IMediaRecorder.h @@ -35,7 +35,7 @@ public: virtual status_t setCamera(const sp& camera, const sp& proxy) = 0; - virtual status_t setPreviewSurface(const sp& surface) = 0; + virtual status_t setPreviewSurface(const sp& surface) = 0; virtual status_t setVideoSource(int vs) = 0; virtual status_t setAudioSource(int as) = 0; virtual status_t setOutputFormat(int of) = 0; diff --git a/include/media/MediaRecorderBase.h b/include/media/MediaRecorderBase.h index 8dd40d2..d7ac302 100644 --- a/include/media/MediaRecorderBase.h +++ b/include/media/MediaRecorderBase.h @@ -42,7 +42,7 @@ struct MediaRecorderBase { virtual status_t setVideoFrameRate(int frames_per_second) = 0; virtual status_t setCamera(const sp& camera, const sp& proxy) = 0; - virtual status_t setPreviewSurface(const sp& surface) = 0; + virtual status_t setPreviewSurface(const sp& surface) = 0; virtual status_t setOutputFile(const char *path) = 0; virtual status_t setOutputFile(int fd, int64_t offset, int64_t length) = 0; virtual status_t setOutputFileAuxiliary(int fd) {return INVALID_OPERATION;} diff --git a/include/media/mediarecorder.h b/include/media/mediarecorder.h index 3b33479..88a42a0 100644 --- a/include/media/mediarecorder.h +++ b/include/media/mediarecorder.h @@ -207,7 +207,7 @@ public: void died(); status_t initCheck(); status_t setCamera(const sp& camera, const sp& proxy); - status_t setPreviewSurface(const sp& surface); + status_t setPreviewSurface(const sp& surface); status_t setVideoSource(int vs); status_t setAudioSource(int as); status_t setOutputFormat(int of); diff --git a/include/media/stagefright/CameraSource.h b/include/media/stagefright/CameraSource.h index cf38b14..a829916 100644 --- a/include/media/stagefright/CameraSource.h +++ b/include/media/stagefright/CameraSource.h @@ -82,7 +82,7 @@ public: uid_t clientUid, Size videoSize, int32_t frameRate, - const sp& surface, + const sp& surface, bool storeMetaDataInVideoBuffers = false); virtual ~CameraSource(); @@ -154,7 +154,7 @@ protected: sp mCamera; sp mCameraRecordingProxy; sp mDeathNotifier; - sp mSurface; + sp mSurface; sp mMeta; int64_t mStartTimeUs; @@ -169,7 +169,7 @@ protected: CameraSource(const sp& camera, const sp& proxy, int32_t cameraId, const String16& clientName, uid_t clientUid, Size videoSize, int32_t frameRate, - const sp& surface, + const sp& surface, bool storeMetaDataInVideoBuffers); virtual void startCameraRecording(); diff --git a/include/media/stagefright/CameraSourceTimeLapse.h b/include/media/stagefright/CameraSourceTimeLapse.h index 774772b..6b7a63c 100644 --- a/include/media/stagefright/CameraSourceTimeLapse.h +++ b/include/media/stagefright/CameraSourceTimeLapse.h @@ -40,7 +40,7 @@ public: uid_t clientUid, Size videoSize, int32_t videoFrameRate, - const sp& surface, + const sp& surface, int64_t timeBetweenTimeLapseFrameCaptureUs); virtual ~CameraSourceTimeLapse(); @@ -115,7 +115,7 @@ private: uid_t clientUid, Size videoSize, int32_t videoFrameRate, - const sp& surface, + const sp& surface, int64_t timeBetweenTimeLapseFrameCaptureUs); // Wrapper over CameraSource::signalBufferReturned() to implement quick stop. diff --git a/media/libmedia/IMediaRecorder.cpp b/media/libmedia/IMediaRecorder.cpp index c935d97..8e58162 100644 --- a/media/libmedia/IMediaRecorder.cpp +++ b/media/libmedia/IMediaRecorder.cpp @@ -87,12 +87,12 @@ public: return interface_cast(reply.readStrongBinder()); } - status_t setPreviewSurface(const sp& surface) + status_t setPreviewSurface(const sp& surface) { ALOGV("setPreviewSurface(%p)", surface.get()); Parcel data, reply; data.writeInterfaceToken(IMediaRecorder::getInterfaceDescriptor()); - Surface::writeToParcel(surface, &data); + data.writeStrongBinder(surface->asBinder()); remote()->transact(SET_PREVIEW_SURFACE, data, &reply); return reply.readInt32(); } @@ -443,7 +443,7 @@ status_t BnMediaRecorder::onTransact( case SET_PREVIEW_SURFACE: { ALOGV("SET_PREVIEW_SURFACE"); CHECK_INTERFACE(IMediaRecorder, data, reply); - sp surface = Surface::readFromParcel(data); + sp surface = interface_cast(data.readStrongBinder()); reply->writeInt32(setPreviewSurface(surface)); return NO_ERROR; } break; diff --git a/media/libmedia/mediarecorder.cpp b/media/libmedia/mediarecorder.cpp index 3ac98cc..3710e46 100644 --- a/media/libmedia/mediarecorder.cpp +++ b/media/libmedia/mediarecorder.cpp @@ -49,7 +49,7 @@ status_t MediaRecorder::setCamera(const sp& camera, const sp& surface) +status_t MediaRecorder::setPreviewSurface(const sp& surface) { ALOGV("setPreviewSurface(%p)", surface.get()); if (mMediaRecorder == NULL) { diff --git a/media/libmediaplayerservice/MediaRecorderClient.cpp b/media/libmediaplayerservice/MediaRecorderClient.cpp index a52b238..a9820e0 100644 --- a/media/libmediaplayerservice/MediaRecorderClient.cpp +++ b/media/libmediaplayerservice/MediaRecorderClient.cpp @@ -81,7 +81,7 @@ status_t MediaRecorderClient::setCamera(const sp& camera, return mRecorder->setCamera(camera, proxy); } -status_t MediaRecorderClient::setPreviewSurface(const sp& surface) +status_t MediaRecorderClient::setPreviewSurface(const sp& surface) { ALOGV("setPreviewSurface"); Mutex::Autolock lock(mLock); diff --git a/media/libmediaplayerservice/MediaRecorderClient.h b/media/libmediaplayerservice/MediaRecorderClient.h index bd0eaf1..a65ec9f 100644 --- a/media/libmediaplayerservice/MediaRecorderClient.h +++ b/media/libmediaplayerservice/MediaRecorderClient.h @@ -32,7 +32,7 @@ class MediaRecorderClient : public BnMediaRecorder public: virtual status_t setCamera(const sp& camera, const sp& proxy); - virtual status_t setPreviewSurface(const sp& surface); + virtual status_t setPreviewSurface(const sp& surface); virtual status_t setVideoSource(int vs); virtual status_t setAudioSource(int as); virtual status_t setOutputFormat(int of); diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp index f570856..c2c9985 100644 --- a/media/libmediaplayerservice/StagefrightRecorder.cpp +++ b/media/libmediaplayerservice/StagefrightRecorder.cpp @@ -224,7 +224,7 @@ status_t StagefrightRecorder::setCamera(const sp &camera, return OK; } -status_t StagefrightRecorder::setPreviewSurface(const sp &surface) { +status_t StagefrightRecorder::setPreviewSurface(const sp &surface) { ALOGV("setPreviewSurface: %p", surface.get()); mPreviewSurface = surface; diff --git a/media/libmediaplayerservice/StagefrightRecorder.h b/media/libmediaplayerservice/StagefrightRecorder.h index fbe6fa6..c864207 100644 --- a/media/libmediaplayerservice/StagefrightRecorder.h +++ b/media/libmediaplayerservice/StagefrightRecorder.h @@ -51,7 +51,7 @@ struct StagefrightRecorder : public MediaRecorderBase { virtual status_t setVideoSize(int width, int height); virtual status_t setVideoFrameRate(int frames_per_second); virtual status_t setCamera(const sp& camera, const sp& proxy); - virtual status_t setPreviewSurface(const sp& surface); + virtual status_t setPreviewSurface(const sp& surface); virtual status_t setOutputFile(const char *path); virtual status_t setOutputFile(int fd, int64_t offset, int64_t length); virtual status_t setParameters(const String8& params); @@ -71,7 +71,7 @@ struct StagefrightRecorder : public MediaRecorderBase { private: sp mCamera; sp mCameraProxy; - sp mPreviewSurface; + sp mPreviewSurface; sp mListener; String16 mClientName; uid_t mClientUid; diff --git a/media/libstagefright/CameraSource.cpp b/media/libstagefright/CameraSource.cpp index f8557d0..5a26b06 100644 --- a/media/libstagefright/CameraSource.cpp +++ b/media/libstagefright/CameraSource.cpp @@ -140,7 +140,7 @@ CameraSource *CameraSource::CreateFromCamera( uid_t clientUid, Size videoSize, int32_t frameRate, - const sp& surface, + const sp& surface, bool storeMetaDataInVideoBuffers) { CameraSource *source = new CameraSource(camera, proxy, cameraId, @@ -157,7 +157,7 @@ CameraSource::CameraSource( uid_t clientUid, Size videoSize, int32_t frameRate, - const sp& surface, + const sp& surface, bool storeMetaDataInVideoBuffers) : mCameraFlags(0), mNumInputBuffers(0), @@ -536,7 +536,7 @@ status_t CameraSource::initWithCameraAccess( if (mSurface != NULL) { // This CHECK is good, since we just passed the lock/unlock // check earlier by calling mCamera->setParameters(). - CHECK_EQ((status_t)OK, mCamera->setPreviewDisplay(mSurface)); + CHECK_EQ((status_t)OK, mCamera->setPreviewTexture(mSurface)); } // By default, do not store metadata in video buffers diff --git a/media/libstagefright/CameraSourceTimeLapse.cpp b/media/libstagefright/CameraSourceTimeLapse.cpp index 2ed2223..20214e8 100644 --- a/media/libstagefright/CameraSourceTimeLapse.cpp +++ b/media/libstagefright/CameraSourceTimeLapse.cpp @@ -40,7 +40,7 @@ CameraSourceTimeLapse *CameraSourceTimeLapse::CreateFromCamera( uid_t clientUid, Size videoSize, int32_t videoFrameRate, - const sp& surface, + const sp& surface, int64_t timeBetweenFrameCaptureUs) { CameraSourceTimeLapse *source = new @@ -66,7 +66,7 @@ CameraSourceTimeLapse::CameraSourceTimeLapse( uid_t clientUid, Size videoSize, int32_t videoFrameRate, - const sp& surface, + const sp& surface, int64_t timeBetweenFrameCaptureUs) : CameraSource(camera, proxy, cameraId, clientName, clientUid, videoSize, videoFrameRate, surface, true), -- cgit v1.1 From 126568c7aeeb5570789e70a310477f44dbdbd885 Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Tue, 12 Mar 2013 15:55:43 -0700 Subject: Attempt to recover from network stalls by dropping frames on the source side. Change-Id: I5f9eb4f5acb624a9e5bc0087801fb5a4a9ade35c --- .../wifi-display/ANetworkSession.cpp | 26 ++++++++++++++++++---- .../libstagefright/wifi-display/ANetworkSession.h | 1 + media/libstagefright/wifi-display/MediaSender.cpp | 16 +++++++++++++ media/libstagefright/wifi-display/MediaSender.h | 2 ++ .../libstagefright/wifi-display/rtp/RTPSender.cpp | 19 ++++++++++++++++ media/libstagefright/wifi-display/rtp/RTPSender.h | 2 ++ .../wifi-display/sink/WifiDisplaySink.cpp | 2 +- .../wifi-display/source/Converter.cpp | 18 +++++++++++++++ .../libstagefright/wifi-display/source/Converter.h | 5 +++++ .../wifi-display/source/PlaybackSession.cpp | 10 +++++++++ .../wifi-display/source/WifiDisplaySource.cpp | 5 +++++ 11 files changed, 101 insertions(+), 5 deletions(-) diff --git a/media/libstagefright/wifi-display/ANetworkSession.cpp b/media/libstagefright/wifi-display/ANetworkSession.cpp index 465f4c4..23bb04e 100644 --- a/media/libstagefright/wifi-display/ANetworkSession.cpp +++ b/media/libstagefright/wifi-display/ANetworkSession.cpp @@ -104,6 +104,8 @@ private: AString mInBuffer; + int64_t mLastStallReportUs; + void notifyError(bool send, status_t err, const char *detail); void notify(NotificationReason reason); @@ -137,7 +139,8 @@ ANetworkSession::Session::Session( mSocket(s), mNotify(notify), mSawReceiveFailure(false), - mSawSendFailure(false) { + mSawSendFailure(false), + mLastStallReportUs(-1ll) { if (mState == CONNECTED) { struct sockaddr_in localAddr; socklen_t localAddrLen = sizeof(localAddr); @@ -508,11 +511,26 @@ status_t ANetworkSession::Session::writeMore() { mSawSendFailure = true; } -#if 0 +#if 1 int numBytesQueued; int res = ioctl(mSocket, SIOCOUTQ, &numBytesQueued); - if (res == 0 && numBytesQueued > 102400) { - ALOGI("numBytesQueued = %d", numBytesQueued); + if (res == 0 && numBytesQueued > 50 * 1024) { + if (numBytesQueued > 409600) { + ALOGW("!!! numBytesQueued = %d", numBytesQueued); + } + + int64_t nowUs = ALooper::GetNowUs(); + + if (mLastStallReportUs < 0ll + || nowUs > mLastStallReportUs + 500000ll) { + sp msg = mNotify->dup(); + msg->setInt32("sessionID", mSessionID); + msg->setInt32("reason", kWhatNetworkStall); + msg->setSize("numBytesQueued", numBytesQueued); + msg->post(); + + mLastStallReportUs = nowUs; + } } #endif diff --git a/media/libstagefright/wifi-display/ANetworkSession.h b/media/libstagefright/wifi-display/ANetworkSession.h index c1acdcc..0d7cbd6 100644 --- a/media/libstagefright/wifi-display/ANetworkSession.h +++ b/media/libstagefright/wifi-display/ANetworkSession.h @@ -83,6 +83,7 @@ struct ANetworkSession : public RefBase { kWhatData, kWhatDatagram, kWhatBinaryData, + kWhatNetworkStall, }; protected: diff --git a/media/libstagefright/wifi-display/MediaSender.cpp b/media/libstagefright/wifi-display/MediaSender.cpp index 105c642..e1e957a 100644 --- a/media/libstagefright/wifi-display/MediaSender.cpp +++ b/media/libstagefright/wifi-display/MediaSender.cpp @@ -325,6 +325,15 @@ void MediaSender::onSenderNotify(const sp &msg) { break; } + case kWhatNetworkStall: + { + size_t numBytesQueued; + CHECK(msg->findSize("numBytesQueued", &numBytesQueued)); + + notifyNetworkStall(numBytesQueued); + break; + } + default: TRESPASS(); } @@ -344,6 +353,13 @@ void MediaSender::notifyError(status_t err) { notify->post(); } +void MediaSender::notifyNetworkStall(size_t numBytesQueued) { + sp notify = mNotify->dup(); + notify->setInt32("what", kWhatNetworkStall); + notify->setSize("numBytesQueued", numBytesQueued); + notify->post(); +} + status_t MediaSender::packetizeAccessUnit( size_t trackIndex, sp accessUnit, diff --git a/media/libstagefright/wifi-display/MediaSender.h b/media/libstagefright/wifi-display/MediaSender.h index 9a50f9a..447abf7 100644 --- a/media/libstagefright/wifi-display/MediaSender.h +++ b/media/libstagefright/wifi-display/MediaSender.h @@ -42,6 +42,7 @@ struct MediaSender : public AHandler { enum { kWhatInitDone, kWhatError, + kWhatNetworkStall, }; MediaSender( @@ -113,6 +114,7 @@ private: void notifyInitDone(status_t err); void notifyError(status_t err); + void notifyNetworkStall(size_t numBytesQueued); status_t packetizeAccessUnit( size_t trackIndex, diff --git a/media/libstagefright/wifi-display/rtp/RTPSender.cpp b/media/libstagefright/wifi-display/rtp/RTPSender.cpp index b60853d..8cd712d 100644 --- a/media/libstagefright/wifi-display/rtp/RTPSender.cpp +++ b/media/libstagefright/wifi-display/rtp/RTPSender.cpp @@ -530,6 +530,18 @@ void RTPSender::onNetNotify(bool isRTP, const sp &msg) { } break; } + + case ANetworkSession::kWhatNetworkStall: + { + size_t numBytesQueued; + CHECK(msg->findSize("numBytesQueued", &numBytesQueued)); + + notifyNetworkStall(numBytesQueued); + break; + } + + default: + TRESPASS(); } } @@ -699,5 +711,12 @@ void RTPSender::notifyError(status_t err) { notify->post(); } +void RTPSender::notifyNetworkStall(size_t numBytesQueued) { + sp notify = mNotify->dup(); + notify->setInt32("what", kWhatNetworkStall); + notify->setSize("numBytesQueued", numBytesQueued); + notify->post(); +} + } // namespace android diff --git a/media/libstagefright/wifi-display/rtp/RTPSender.h b/media/libstagefright/wifi-display/rtp/RTPSender.h index 2b683a4..83c6223 100644 --- a/media/libstagefright/wifi-display/rtp/RTPSender.h +++ b/media/libstagefright/wifi-display/rtp/RTPSender.h @@ -36,6 +36,7 @@ struct RTPSender : public RTPBase, public AHandler { enum { kWhatInitDone, kWhatError, + kWhatNetworkStall, }; RTPSender( const sp &netSession, @@ -103,6 +104,7 @@ private: void notifyInitDone(status_t err); void notifyError(status_t err); + void notifyNetworkStall(size_t numBytesQueued); DISALLOW_EVIL_CONSTRUCTORS(RTPSender); }; diff --git a/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp b/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp index 0d2e347..d635c3a 100644 --- a/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp +++ b/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp @@ -319,7 +319,7 @@ void WifiDisplaySink::onMediaReceiverNotify(const sp &msg) { CHECK(mTimeOffsetValid); - int64_t latencyUs = 300000ll; // 300ms by default + int64_t latencyUs = 200000ll; // 200ms by default char val[PROPERTY_VALUE_MAX]; if (property_get("media.wfd-sink.latency", val, NULL)) { diff --git a/media/libstagefright/wifi-display/source/Converter.cpp b/media/libstagefright/wifi-display/source/Converter.cpp index 2861aa9..bb8c387 100644 --- a/media/libstagefright/wifi-display/source/Converter.cpp +++ b/media/libstagefright/wifi-display/source/Converter.cpp @@ -55,6 +55,7 @@ Converter::Converter( ,mInSilentMode(false) #endif ,mPrevVideoBitrate(-1) + ,mNumFramesToDrop(0) { AString mime; CHECK(mInputFormat->findString("mime", &mime)); @@ -327,6 +328,13 @@ void Converter::onMessageReceived(const sp &msg) { sp accessUnit; CHECK(msg->findBuffer("accessUnit", &accessUnit)); + if (mIsVideo && mNumFramesToDrop) { + --mNumFramesToDrop; + ALOGI("dropping frame."); + ReleaseMediaBufferReference(accessUnit); + break; + } + #if 0 void *mbuf; if (accessUnit->meta()->findPointer("mediaBuffer", &mbuf) @@ -422,6 +430,12 @@ void Converter::onMessageReceived(const sp &msg) { break; } + case kWhatDropAFrame: + { + ++mNumFramesToDrop; + break; + } + default: TRESPASS(); } @@ -690,4 +704,8 @@ void Converter::requestIDRFrame() { (new AMessage(kWhatRequestIDRFrame, id()))->post(); } +void Converter::dropAFrame() { + (new AMessage(kWhatDropAFrame, id()))->post(); +} + } // namespace android diff --git a/media/libstagefright/wifi-display/source/Converter.h b/media/libstagefright/wifi-display/source/Converter.h index 57802bd..a418f69 100644 --- a/media/libstagefright/wifi-display/source/Converter.h +++ b/media/libstagefright/wifi-display/source/Converter.h @@ -51,6 +51,8 @@ struct Converter : public AHandler { void requestIDRFrame(); + void dropAFrame(); + enum { kWhatAccessUnit, kWhatEOS, @@ -63,6 +65,7 @@ struct Converter : public AHandler { kWhatShutdown, kWhatMediaPullerNotify, kWhatEncoderActivity, + kWhatDropAFrame, }; void shutdownAsync(); @@ -102,6 +105,8 @@ private: int32_t mPrevVideoBitrate; + int32_t mNumFramesToDrop; + status_t initEncoder(); void releaseEncoder(); diff --git a/media/libstagefright/wifi-display/source/PlaybackSession.cpp b/media/libstagefright/wifi-display/source/PlaybackSession.cpp index ea195b3..94cb2a4 100644 --- a/media/libstagefright/wifi-display/source/PlaybackSession.cpp +++ b/media/libstagefright/wifi-display/source/PlaybackSession.cpp @@ -515,6 +515,16 @@ void WifiDisplaySource::PlaybackSession::onMessageReceived( } } else if (what == MediaSender::kWhatError) { notifySessionDead(); + } else if (what == MediaSender::kWhatNetworkStall) { + size_t numBytesQueued; + CHECK(msg->findSize("numBytesQueued", &numBytesQueued)); + + if (mVideoTrackIndex >= 0) { + const sp &videoTrack = + mTracks.valueFor(mVideoTrackIndex); + + videoTrack->converter()->dropAFrame(); + } } else { TRESPASS(); } diff --git a/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp b/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp index de66bde..c8798c6 100644 --- a/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp +++ b/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp @@ -272,6 +272,11 @@ void WifiDisplaySource::onMessageReceived(const sp &msg) { break; } + case ANetworkSession::kWhatNetworkStall: + { + break; + } + default: TRESPASS(); } -- cgit v1.1 From 441a78d5e224e0d67f9b52fa9adc795c6944159b Mon Sep 17 00:00:00 2001 From: Jeff Tinker Date: Fri, 8 Feb 2013 10:18:35 -0800 Subject: Implementing MediaDrm APIs Change-Id: I9ff8eeb7d0c383b5c0c68cd54eb54ce7d2d22fe6 --- drm/mediadrm/plugins/mock/Android.mk | 38 ++ drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.cpp | 417 ++++++++++++++++ drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.h | 122 +++++ include/media/IDrm.h | 89 ++++ include/media/IMediaPlayerService.h | 2 + media/libmedia/Android.mk | 1 + media/libmedia/IDrm.cpp | 551 +++++++++++++++++++++ media/libmedia/IMediaPlayerService.cpp | 15 + media/libmediaplayerservice/Android.mk | 2 + media/libmediaplayerservice/Drm.cpp | 423 ++++++++++++++++ media/libmediaplayerservice/Drm.h | 100 ++++ media/libmediaplayerservice/MediaPlayerService.cpp | 5 + media/libmediaplayerservice/MediaPlayerService.h | 1 + media/libmediaplayerservice/SharedLibrary.cpp | 49 ++ media/libmediaplayerservice/SharedLibrary.h | 39 ++ 15 files changed, 1854 insertions(+) create mode 100644 drm/mediadrm/plugins/mock/Android.mk create mode 100644 drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.cpp create mode 100644 drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.h create mode 100644 include/media/IDrm.h create mode 100644 media/libmedia/IDrm.cpp create mode 100644 media/libmediaplayerservice/Drm.cpp create mode 100644 media/libmediaplayerservice/Drm.h create mode 100644 media/libmediaplayerservice/SharedLibrary.cpp create mode 100644 media/libmediaplayerservice/SharedLibrary.h diff --git a/drm/mediadrm/plugins/mock/Android.mk b/drm/mediadrm/plugins/mock/Android.mk new file mode 100644 index 0000000..a056cd8 --- /dev/null +++ b/drm/mediadrm/plugins/mock/Android.mk @@ -0,0 +1,38 @@ +# +# Copyright (C) 2013 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +LOCAL_PATH:= $(call my-dir) +include $(CLEAR_VARS) + +LOCAL_SRC_FILES:= \ + MockDrmCryptoPlugin.cpp + +LOCAL_MODULE := libmockdrmcryptoplugin + +LOCAL_MODULE_PATH := $(TARGET_OUT_VENDOR_SHARED_LIBRARIES)/mediadrm + +LOCAL_SHARED_LIBRARIES := \ + libutils + +LOCAL_C_INCLUDES += \ + $(TOP)/frameworks/av/include \ + $(TOP)/frameworks/native/include/media + +# Set the following flag to enable the decryption passthru flow +#LOCAL_CFLAGS += -DENABLE_PASSTHRU_DECRYPTION + +LOCAL_MODULE_TAGS := optional + +include $(BUILD_SHARED_LIBRARY) diff --git a/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.cpp b/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.cpp new file mode 100644 index 0000000..91f5c9c --- /dev/null +++ b/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.cpp @@ -0,0 +1,417 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "MockDrmCryptoPlugin" +#include + + +#include "drm/DrmAPI.h" +#include "MockDrmCryptoPlugin.h" + +using namespace android; + +// Shared library entry point +DrmFactory *createDrmFactory() +{ + return new MockDrmFactory(); +} + +// Shared library entry point +CryptoFactory *createCryptoFactory() +{ + return new MockCryptoFactory(); +} + +const uint8_t mock_uuid[16] = {0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, + 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f, 0x10}; + +namespace android { + + // MockDrmFactory + bool MockDrmFactory::isCryptoSchemeSupported(const uint8_t uuid[16]) + { + return (!memcmp(uuid, mock_uuid, sizeof(uuid))); + } + + status_t MockDrmFactory::createDrmPlugin(const uint8_t uuid[16], DrmPlugin **plugin) + { + *plugin = new MockDrmPlugin(); + return OK; + } + + // MockCryptoFactory + bool MockCryptoFactory::isCryptoSchemeSupported(const uint8_t uuid[16]) const + { + return (!memcmp(uuid, mock_uuid, sizeof(uuid))); + } + + status_t MockCryptoFactory::createPlugin(const uint8_t uuid[16], const void *data, + size_t size, CryptoPlugin **plugin) + { + *plugin = new MockCryptoPlugin(); + return OK; + } + + + // MockDrmPlugin methods + + status_t MockDrmPlugin::openSession(Vector &sessionId) + { + const size_t kSessionIdSize = 8; + + Mutex::Autolock lock(mLock); + for (size_t i = 0; i < kSessionIdSize / sizeof(long); i++) { + long r = random(); + sessionId.appendArray((uint8_t *)&r, sizeof(long)); + } + mSessions.add(sessionId); + + ALOGD("MockDrmPlugin::openSession() -> %s", vectorToString(sessionId).string()); + return OK; + } + + status_t MockDrmPlugin::closeSession(Vector const &sessionId) + { + Mutex::Autolock lock(mLock); + ALOGD("MockDrmPlugin::closeSession(%s)", vectorToString(sessionId).string()); + ssize_t index = findSession(sessionId); + if (index == kNotFound) { + ALOGD("Invalid sessionId"); + return BAD_VALUE; + } + mSessions.removeAt(index); + return OK; + } + + + status_t MockDrmPlugin::getLicenseRequest(Vector const &sessionId, + Vector const &initData, + String8 const &mimeType, LicenseType licenseType, + KeyedVector const &optionalParameters, + Vector &request, String8 &defaultUrl) + { + Mutex::Autolock lock(mLock); + ALOGD("MockDrmPlugin::getLicenseRequest(sessionId=%s, initData=%s, mimeType=%s" + ", licenseType=%d, optionalParameters=%s))", + vectorToString(sessionId).string(), vectorToString(initData).string(), mimeType.string(), + licenseType, stringMapToString(optionalParameters).string()); + + ssize_t index = findSession(sessionId); + if (index == kNotFound) { + ALOGD("Invalid sessionId"); + return BAD_VALUE; + } + + // Properties used in mock test, set by mock plugin and verifed cts test app + // byte[] initData -> mock-initdata + // string mimeType -> mock-mimetype + // string licenseType -> mock-licensetype + // string optionalParameters -> mock-optparams formatted as {key1,value1},{key2,value2} + + mByteArrayProperties.add(String8("mock-initdata"), initData); + mStringProperties.add(String8("mock-mimetype"), mimeType); + + String8 licenseTypeStr; + licenseTypeStr.appendFormat("%d", (int)licenseType); + mStringProperties.add(String8("mock-licensetype"), licenseTypeStr); + + String8 params; + for (size_t i = 0; i < optionalParameters.size(); i++) { + params.appendFormat("%s{%s,%s}", i ? "," : "", + optionalParameters.keyAt(i).string(), + optionalParameters.valueAt(i).string()); + } + mStringProperties.add(String8("mock-optparams"), params); + + // Properties used in mock test, set by cts test app returned from mock plugin + // byte[] mock-request -> request + // string mock-default-url -> defaultUrl + + index = mByteArrayProperties.indexOfKey(String8("mock-request")); + if (index < 0) { + ALOGD("Missing 'mock-request' parameter for mock"); + return BAD_VALUE; + } else { + request = mByteArrayProperties.valueAt(index); + } + + index = mStringProperties.indexOfKey(String8("mock-defaultUrl")); + if (index < 0) { + ALOGD("Missing 'mock-defaultUrl' parameter for mock"); + return BAD_VALUE; + } else { + defaultUrl = mStringProperties.valueAt(index); + } + return OK; + } + + status_t MockDrmPlugin::provideLicenseResponse(Vector const &sessionId, + Vector const &response) + { + Mutex::Autolock lock(mLock); + ALOGD("MockDrmPlugin::provideLicenseResponse(sessionId=%s, response=%s)", + vectorToString(sessionId).string(), vectorToString(response).string()); + ssize_t index = findSession(sessionId); + if (index == kNotFound) { + ALOGD("Invalid sessionId"); + return BAD_VALUE; + } + if (response.size() == 0) { + return BAD_VALUE; + } + + // Properties used in mock test, set by mock plugin and verifed cts test app + // byte[] response -> mock-response + + mByteArrayProperties.add(String8("mock-response"), response); + + return OK; + } + + status_t MockDrmPlugin::removeLicense(Vector const &sessionId) + { + Mutex::Autolock lock(mLock); + ALOGD("MockDrmPlugin::removeLicense(sessionId=%s)", + vectorToString(sessionId).string()); + ssize_t index = findSession(sessionId); + if (index == kNotFound) { + ALOGD("Invalid sessionId"); + return BAD_VALUE; + } + + return OK; + } + + status_t MockDrmPlugin::queryLicenseStatus(Vector const &sessionId, + KeyedVector &infoMap) const + { + ALOGD("MockDrmPlugin::queryLicenseStatus(sessionId=%s)", + vectorToString(sessionId).string()); + + ssize_t index = findSession(sessionId); + if (index == kNotFound) { + ALOGD("Invalid sessionId"); + return BAD_VALUE; + } + + infoMap.add(String8("purchaseDuration"), String8("1000")); + infoMap.add(String8("licenseDuration"), String8("100")); + return OK; + } + + status_t MockDrmPlugin::getProvisionRequest(Vector &request, + String8 &defaultUrl) + { + Mutex::Autolock lock(mLock); + ALOGD("MockDrmPlugin::getProvisionRequest()"); + + // Properties used in mock test, set by cts test app returned from mock plugin + // byte[] mock-request -> request + // string mock-default-url -> defaultUrl + + ssize_t index = mByteArrayProperties.indexOfKey(String8("mock-request")); + if (index < 0) { + ALOGD("Missing 'mock-request' parameter for mock"); + return BAD_VALUE; + } else { + request = mByteArrayProperties.valueAt(index); + } + + index = mStringProperties.indexOfKey(String8("mock-defaultUrl")); + if (index < 0) { + ALOGD("Missing 'mock-defaultUrl' parameter for mock"); + return BAD_VALUE; + } else { + defaultUrl = mStringProperties.valueAt(index); + } + return OK; + } + + status_t MockDrmPlugin::provideProvisionResponse(Vector const &response) + { + Mutex::Autolock lock(mLock); + ALOGD("MockDrmPlugin::provideProvisionResponse(%s)", + vectorToString(response).string()); + + // Properties used in mock test, set by mock plugin and verifed cts test app + // byte[] response -> mock-response + + mByteArrayProperties.add(String8("mock-response"), response); + return OK; + } + + status_t MockDrmPlugin::getSecureStops(List > &secureStops) + { + Mutex::Autolock lock(mLock); + ALOGD("MockDrmPlugin::getSecureStops()"); + const uint8_t ss1[] = {0x81, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87, 0x88, 0x89}; + const uint8_t ss2[] = {0x91, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97, 0x98, 0x99}; + + Vector vec; + vec.appendArray(ss1, sizeof(ss1)); + secureStops.push_back(vec); + + vec.clear(); + vec.appendArray(ss2, sizeof(ss2)); + secureStops.push_back(vec); + return OK; + } + + status_t MockDrmPlugin::releaseSecureStops(Vector const &ssRelease) + { + Mutex::Autolock lock(mLock); + ALOGD("MockDrmPlugin::releaseSecureStops(%s)", + vectorToString(ssRelease).string()); + return OK; + } + + status_t MockDrmPlugin::getPropertyString(String8 const &name, String8 &value) const + { + ALOGD("MockDrmPlugin::getPropertyString(name=%s)", name.string()); + ssize_t index = mStringProperties.indexOfKey(name); + if (index < 0) { + ALOGD("no property for '%s'", name.string()); + return BAD_VALUE; + } + value = mStringProperties.valueAt(index); + return OK; + } + + status_t MockDrmPlugin::getPropertyByteArray(String8 const &name, + Vector &value) const + { + ALOGD("MockDrmPlugin::getPropertyByteArray(name=%s)", name.string()); + ssize_t index = mByteArrayProperties.indexOfKey(name); + if (index < 0) { + ALOGD("no property for '%s'", name.string()); + return BAD_VALUE; + } + value = mByteArrayProperties.valueAt(index); + return OK; + } + + status_t MockDrmPlugin::setPropertyString(String8 const &name, + String8 const &value) + { + Mutex::Autolock lock(mLock); + ALOGD("MockDrmPlugin::setPropertyString(name=%s, value=%s)", + name.string(), value.string()); + mStringProperties.add(name, value); + return OK; + } + + status_t MockDrmPlugin::setPropertyByteArray(String8 const &name, + Vector const &value) + { + Mutex::Autolock lock(mLock); + ALOGD("MockDrmPlugin::setPropertyByteArray(name=%s, value=%s)", + name.string(), vectorToString(value).string()); + mByteArrayProperties.add(name, value); + return OK; + } + + ssize_t MockDrmPlugin::findSession(Vector const &sessionId) const + { + ALOGD("findSession: nsessions=%d, size=%d", mSessions.size(), sessionId.size()); + for (size_t i = 0; i < mSessions.size(); ++i) { + if (memcmp(mSessions[i].array(), sessionId.array(), sessionId.size()) == 0) { + return i; + } + } + return kNotFound; + } + + // Conversion utilities + String8 MockDrmPlugin::vectorToString(Vector const &vector) const + { + return arrayToString(vector.array(), vector.size()); + } + + String8 MockDrmPlugin::arrayToString(uint8_t const *array, size_t len) const + { + String8 result("{ "); + for (size_t i = 0; i < len; i++) { + result.appendFormat("0x%02x ", array[i]); + } + result += "}"; + return result; + } + + String8 MockDrmPlugin::stringMapToString(KeyedVector map) const + { + String8 result("{ "); + for (size_t i = 0; i < map.size(); i++) { + result.appendFormat("%s{name=%s, value=%s}", i > 0 ? ", " : "", + map.keyAt(i).string(), map.valueAt(i).string()); + } + return result + " }"; + } + + bool operator<(Vector const &lhs, Vector const &rhs) { + return lhs.size() < rhs.size() || (memcmp(lhs.array(), rhs.array(), lhs.size()) < 0); + } + + // + // Crypto Plugin + // + + bool MockCryptoPlugin::requiresSecureDecoderComponent(const char *mime) const + { + ALOGD("MockCryptoPlugin::requiresSecureDecoderComponent(mime=%s)", mime); + return false; + } + + ssize_t + MockCryptoPlugin::decrypt(bool secure, const uint8_t key[16], const uint8_t iv[16], + Mode mode, const void *srcPtr, const SubSample *subSamples, + size_t numSubSamples, void *dstPtr, AString *errorDetailMsg) + { + ALOGD("MockCryptoPlugin::decrypt(secure=%d, key=%s, iv=%s, mode=%d, src=%p, " + "subSamples=%s, dst=%p)", + (int)secure, + arrayToString(key, sizeof(key)).string(), + arrayToString(iv, sizeof(iv)).string(), + (int)mode, srcPtr, + subSamplesToString(subSamples, numSubSamples).string(), + dstPtr); + return OK; + } + + // Conversion utilities + String8 MockCryptoPlugin::arrayToString(uint8_t const *array, size_t len) const + { + String8 result("{ "); + for (size_t i = 0; i < len; i++) { + result.appendFormat("0x%02x ", array[i]); + } + result += "}"; + return result; + } + + String8 MockCryptoPlugin::subSamplesToString(SubSample const *subSamples, + size_t numSubSamples) const + { + String8 result; + for (size_t i = 0; i < numSubSamples; i++) { + result.appendFormat("[%d] {clear:%d, encrypted:%d} ", i, + subSamples[i].mNumBytesOfClearData, + subSamples[i].mNumBytesOfEncryptedData); + } + return result; + } + +}; diff --git a/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.h b/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.h new file mode 100644 index 0000000..d46a127 --- /dev/null +++ b/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.h @@ -0,0 +1,122 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include "drm/DrmAPI.h" +#include "hardware/CryptoAPI.h" + +extern "C" { + android::DrmFactory *createDrmFactory(); + android::CryptoFactory *createCryptoFactory(); +} + +namespace android { + + class MockDrmFactory : public DrmFactory { + public: + MockDrmFactory() {} + virtual ~MockDrmFactory() {} + + bool isCryptoSchemeSupported(const uint8_t uuid[16]); + status_t createDrmPlugin(const uint8_t uuid[16], DrmPlugin **plugin); + }; + + class MockCryptoFactory : public CryptoFactory { + public: + MockCryptoFactory() {} + virtual ~MockCryptoFactory() {} + + bool isCryptoSchemeSupported(const uint8_t uuid[16]) const; + status_t createPlugin( + const uint8_t uuid[16], const void *data, size_t size, + CryptoPlugin **plugin); + }; + + + + class MockDrmPlugin : public DrmPlugin { + public: + MockDrmPlugin() {} + virtual ~MockDrmPlugin() {} + + // from DrmPlugin + status_t openSession(Vector &sessionId); + status_t closeSession(Vector const &sessionId); + + status_t + getLicenseRequest(Vector const &sessionId, + Vector const &initData, + String8 const &mimeType, LicenseType licenseType, + KeyedVector const &optionalParameters, + Vector &request, String8 &defaultUrl); + + status_t provideLicenseResponse(Vector const &sessionId, + Vector const &response); + + status_t removeLicense(Vector const &sessionId); + + status_t + queryLicenseStatus(Vector const &sessionId, + KeyedVector &infoMap) const; + + status_t getProvisionRequest(Vector &request, + String8 &defaultUrl); + + status_t provideProvisionResponse(Vector const &response); + + status_t getSecureStops(List > &secureStops); + status_t releaseSecureStops(Vector const &ssRelease); + + status_t getPropertyString(String8 const &name, String8 &value ) const; + status_t getPropertyByteArray(String8 const &name, + Vector &value ) const; + + status_t setPropertyString(String8 const &name, + String8 const &value ); + status_t setPropertyByteArray(String8 const &name, + Vector const &value ); + + private: + String8 vectorToString(Vector const &vector) const; + String8 arrayToString(uint8_t const *array, size_t len) const; + String8 stringMapToString(KeyedVector map) const; + + SortedVector > mSessions; + + static const ssize_t kNotFound = -1; + ssize_t findSession(Vector const &sessionId) const; + + Mutex mLock; + KeyedVector mStringProperties; + KeyedVector > mByteArrayProperties; + }; + + + class MockCryptoPlugin : public CryptoPlugin { + + bool requiresSecureDecoderComponent(const char *mime) const; + + ssize_t decrypt(bool secure, + const uint8_t key[16], const uint8_t iv[16], + Mode mode, const void *srcPtr, + const SubSample *subSamples, size_t numSubSamples, + void *dstPtr, AString *errorDetailMsg); + private: + String8 subSamplesToString(CryptoPlugin::SubSample const *subSamples, size_t numSubSamples) const; + String8 arrayToString(uint8_t const *array, size_t len) const; + }; +}; diff --git a/include/media/IDrm.h b/include/media/IDrm.h new file mode 100644 index 0000000..38e2378 --- /dev/null +++ b/include/media/IDrm.h @@ -0,0 +1,89 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include + +#ifndef ANDROID_IDRM_H_ + +#define ANDROID_IDRM_H_ + +namespace android { + +struct AString; + +struct IDrm : public IInterface { + DECLARE_META_INTERFACE(Drm); + + virtual status_t initCheck() const = 0; + + virtual bool isCryptoSchemeSupported(const uint8_t uuid[16]) = 0; + + virtual status_t createPlugin(const uint8_t uuid[16]) = 0; + + virtual status_t destroyPlugin() = 0; + + virtual status_t openSession(Vector &sessionId) = 0; + + virtual status_t closeSession(Vector const &sessionId) = 0; + + virtual status_t + getLicenseRequest(Vector const &sessionId, + Vector const &initData, + String8 const &mimeType, DrmPlugin::LicenseType licenseType, + KeyedVector const &optionalParameters, + Vector &request, String8 &defaultUrl) = 0; + + virtual status_t provideLicenseResponse(Vector const &sessionId, + Vector const &response) = 0; + + virtual status_t removeLicense(Vector const &sessionId) = 0; + + virtual status_t queryLicenseStatus(Vector const &sessionId, + KeyedVector &infoMap) const = 0; + + virtual status_t getProvisionRequest(Vector &request, + String8 &defaulUrl) = 0; + + virtual status_t provideProvisionResponse(Vector const &response) = 0; + + virtual status_t getSecureStops(List > &secureStops) = 0; + + virtual status_t releaseSecureStops(Vector const &ssRelease) = 0; + + virtual status_t getPropertyString(String8 const &name, String8 &value ) const = 0; + virtual status_t getPropertyByteArray(String8 const &name, + Vector &value ) const = 0; + virtual status_t setPropertyString(String8 const &name, + String8 const &value ) const = 0; + virtual status_t setPropertyByteArray(String8 const &name, + Vector const &value ) const = 0; + +private: + DISALLOW_EVIL_CONSTRUCTORS(IDrm); +}; + +struct BnDrm : public BnInterface { + virtual status_t onTransact( + uint32_t code, const Parcel &data, Parcel *reply, + uint32_t flags = 0); +}; + +} // namespace android + +#endif // ANDROID_IDRM_H_ + diff --git a/include/media/IMediaPlayerService.h b/include/media/IMediaPlayerService.h index b29d3c7..182d563 100644 --- a/include/media/IMediaPlayerService.h +++ b/include/media/IMediaPlayerService.h @@ -32,6 +32,7 @@ namespace android { struct ICrypto; +struct IDrm; struct IHDCP; class IMediaRecorder; class IOMX; @@ -52,6 +53,7 @@ public: virtual sp decode(int fd, int64_t offset, int64_t length, uint32_t *pSampleRate, int* pNumChannels, audio_format_t* pFormat) = 0; virtual sp getOMX() = 0; virtual sp makeCrypto() = 0; + virtual sp makeDrm() = 0; virtual sp makeHDCP(bool createEncryptionModule) = 0; // Connects to a remote display. diff --git a/media/libmedia/Android.mk b/media/libmedia/Android.mk index 6b48991..1ada9c3 100644 --- a/media/libmedia/Android.mk +++ b/media/libmedia/Android.mk @@ -19,6 +19,7 @@ LOCAL_SRC_FILES:= \ IAudioTrack.cpp \ IAudioRecord.cpp \ ICrypto.cpp \ + IDrm.cpp \ IHDCP.cpp \ AudioRecord.cpp \ AudioSystem.cpp \ diff --git a/media/libmedia/IDrm.cpp b/media/libmedia/IDrm.cpp new file mode 100644 index 0000000..3b13ec6 --- /dev/null +++ b/media/libmedia/IDrm.cpp @@ -0,0 +1,551 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "IDrm" +#include + +#include +#include +#include +#include +#include + +namespace android { + +enum { + INIT_CHECK = IBinder::FIRST_CALL_TRANSACTION, + IS_CRYPTO_SUPPORTED, + CREATE_PLUGIN, + DESTROY_PLUGIN, + OPEN_SESSION, + CLOSE_SESSION, + GET_LICENSE_REQUEST, + PROVIDE_LICENSE_RESPONSE, + REMOVE_LICENSE, + QUERY_LICENSE_STATUS, + GET_PROVISION_REQUEST, + PROVIDE_PROVISION_RESPONSE, + GET_SECURE_STOPS, + RELEASE_SECURE_STOPS, + GET_PROPERTY_STRING, + GET_PROPERTY_BYTE_ARRAY, + SET_PROPERTY_STRING, + SET_PROPERTY_BYTE_ARRAY +}; + +struct BpDrm : public BpInterface { + BpDrm(const sp &impl) + : BpInterface(impl) { + } + + virtual status_t initCheck() const { + Parcel data, reply; + data.writeInterfaceToken(IDrm::getInterfaceDescriptor()); + remote()->transact(INIT_CHECK, data, &reply); + + return reply.readInt32(); + } + + virtual bool isCryptoSchemeSupported(const uint8_t uuid[16]) { + Parcel data, reply; + data.writeInterfaceToken(IDrm::getInterfaceDescriptor()); + data.write(uuid, 16); + remote()->transact(IS_CRYPTO_SUPPORTED, data, &reply); + + return reply.readInt32() != 0; + } + + virtual status_t createPlugin(const uint8_t uuid[16]) { + Parcel data, reply; + data.writeInterfaceToken(IDrm::getInterfaceDescriptor()); + data.write(uuid, 16); + + remote()->transact(CREATE_PLUGIN, data, &reply); + + return reply.readInt32(); + } + + virtual status_t destroyPlugin() { + Parcel data, reply; + data.writeInterfaceToken(IDrm::getInterfaceDescriptor()); + remote()->transact(DESTROY_PLUGIN, data, &reply); + + return reply.readInt32(); + } + + virtual status_t openSession(Vector &sessionId) { + Parcel data, reply; + data.writeInterfaceToken(IDrm::getInterfaceDescriptor()); + + remote()->transact(OPEN_SESSION, data, &reply); + uint32_t size = reply.readInt32(); + sessionId.insertAt((size_t)0, size); + reply.read(sessionId.editArray(), size); + + return reply.readInt32(); + } + + virtual status_t closeSession(Vector const &sessionId) { + Parcel data, reply; + data.writeInterfaceToken(IDrm::getInterfaceDescriptor()); + + data.writeInt32(sessionId.size()); + data.write(sessionId.array(), sessionId.size()); + remote()->transact(CLOSE_SESSION, data, &reply); + + return reply.readInt32(); + } + + virtual status_t + getLicenseRequest(Vector const &sessionId, + Vector const &initData, + String8 const &mimeType, DrmPlugin::LicenseType licenseType, + KeyedVector const &optionalParameters, + Vector &request, String8 &defaultUrl) { + Parcel data, reply; + data.writeInterfaceToken(IDrm::getInterfaceDescriptor()); + + data.writeInt32(sessionId.size()); + data.write(sessionId.array(), sessionId.size()); + + data.writeInt32(initData.size()); + data.write(initData.array(), initData.size()); + + data.writeString8(mimeType); + data.writeInt32((uint32_t)licenseType); + + data.writeInt32(optionalParameters.size()); + for (size_t i = 0; i < optionalParameters.size(); ++i) { + data.writeString8(optionalParameters.keyAt(i)); + data.writeString8(optionalParameters.valueAt(i)); + } + remote()->transact(GET_LICENSE_REQUEST, data, &reply); + + uint32_t len = reply.readInt32(); + request.insertAt((size_t)0, len); + reply.read(request.editArray(), len); + defaultUrl = reply.readString8(); + + return reply.readInt32(); + } + + virtual status_t provideLicenseResponse(Vector const &sessionId, + Vector const &response) { + Parcel data, reply; + data.writeInterfaceToken(IDrm::getInterfaceDescriptor()); + + data.writeInt32(sessionId.size()); + data.write(sessionId.array(), sessionId.size()); + data.writeInt32(response.size()); + data.write(response.array(), response.size()); + remote()->transact(PROVIDE_LICENSE_RESPONSE, data, &reply); + + return reply.readInt32(); + } + + virtual status_t removeLicense(Vector const &sessionId) { + Parcel data, reply; + data.writeInterfaceToken(IDrm::getInterfaceDescriptor()); + + data.writeInt32(sessionId.size()); + data.write(sessionId.array(), sessionId.size()); + remote()->transact(REMOVE_LICENSE, data, &reply); + + return reply.readInt32(); + } + + virtual status_t queryLicenseStatus(Vector const &sessionId, + KeyedVector &infoMap) const { + Parcel data, reply; + data.writeInterfaceToken(IDrm::getInterfaceDescriptor()); + + data.writeInt32(sessionId.size()); + data.write(sessionId.array(), sessionId.size()); + + remote()->transact(QUERY_LICENSE_STATUS, data, &reply); + + infoMap.clear(); + size_t count = reply.readInt32(); + for (size_t i = 0; i < count; i++) { + String8 key = reply.readString8(); + String8 value = reply.readString8(); + infoMap.add(key, value); + } + return reply.readInt32(); + } + + virtual status_t getProvisionRequest(Vector &request, + String8 &defaultUrl) { + Parcel data, reply; + data.writeInterfaceToken(IDrm::getInterfaceDescriptor()); + + remote()->transact(GET_PROVISION_REQUEST, data, &reply); + + uint32_t len = reply.readInt32(); + request.insertAt((size_t)0, len); + reply.read(request.editArray(), len); + defaultUrl = reply.readString8(); + + return reply.readInt32(); + } + + virtual status_t provideProvisionResponse(Vector const &response) { + Parcel data, reply; + data.writeInterfaceToken(IDrm::getInterfaceDescriptor()); + + data.writeInt32(response.size()); + data.write(response.array(), response.size()); + remote()->transact(PROVIDE_PROVISION_RESPONSE, data, &reply); + + return reply.readInt32(); + } + + virtual status_t getSecureStops(List > &secureStops) { + Parcel data, reply; + data.writeInterfaceToken(IDrm::getInterfaceDescriptor()); + + remote()->transact(GET_SECURE_STOPS, data, &reply); + + secureStops.clear(); + uint32_t count = reply.readInt32(); + for (size_t i = 0; i < count; i++) { + Vector secureStop; + uint32_t len = reply.readInt32(); + secureStop.insertAt((size_t)0, len); + reply.read(secureStop.editArray(), len); + secureStops.push_back(secureStop); + } + return reply.readInt32(); + } + + virtual status_t releaseSecureStops(Vector const &ssRelease) { + Parcel data, reply; + data.writeInterfaceToken(IDrm::getInterfaceDescriptor()); + + data.writeInt32(ssRelease.size()); + data.write(ssRelease.array(), ssRelease.size()); + remote()->transact(RELEASE_SECURE_STOPS, data, &reply); + + return reply.readInt32(); + } + + virtual status_t getPropertyString(String8 const &name, String8 &value) const { + Parcel data, reply; + data.writeInterfaceToken(IDrm::getInterfaceDescriptor()); + + data.writeString8(name); + remote()->transact(GET_PROPERTY_STRING, data, &reply); + + value = reply.readString8(); + return reply.readInt32(); + } + + virtual status_t getPropertyByteArray(String8 const &name, Vector &value) const { + Parcel data, reply; + data.writeInterfaceToken(IDrm::getInterfaceDescriptor()); + + data.writeString8(name); + remote()->transact(GET_PROPERTY_BYTE_ARRAY, data, &reply); + + uint32_t len = reply.readInt32(); + value.insertAt((size_t)0, len); + reply.read(value.editArray(), len); + + return reply.readInt32(); + } + + virtual status_t setPropertyString(String8 const &name, String8 const &value) const { + Parcel data, reply; + data.writeInterfaceToken(IDrm::getInterfaceDescriptor()); + + data.writeString8(name); + data.writeString8(value); + remote()->transact(SET_PROPERTY_STRING, data, &reply); + + return reply.readInt32(); + } + + virtual status_t setPropertyByteArray(String8 const &name, + Vector const &value) const { + Parcel data, reply; + data.writeInterfaceToken(IDrm::getInterfaceDescriptor()); + + data.writeString8(name); + data.writeInt32(value.size()); + data.write(value.array(), value.size()); + remote()->transact(SET_PROPERTY_BYTE_ARRAY, data, &reply); + + return reply.readInt32(); + } + + +private: + DISALLOW_EVIL_CONSTRUCTORS(BpDrm); +}; + +IMPLEMENT_META_INTERFACE(Drm, "android.drm.IDrm"); + +//////////////////////////////////////////////////////////////////////////////// + +status_t BnDrm::onTransact( + uint32_t code, const Parcel &data, Parcel *reply, uint32_t flags) { + switch (code) { + case INIT_CHECK: + { + CHECK_INTERFACE(IDrm, data, reply); + reply->writeInt32(initCheck()); + return OK; + } + + case IS_CRYPTO_SUPPORTED: + { + CHECK_INTERFACE(IDrm, data, reply); + uint8_t uuid[16]; + data.read(uuid, sizeof(uuid)); + reply->writeInt32(isCryptoSchemeSupported(uuid)); + return OK; + } + + case CREATE_PLUGIN: + { + CHECK_INTERFACE(IDrm, data, reply); + uint8_t uuid[16]; + data.read(uuid, sizeof(uuid)); + reply->writeInt32(createPlugin(uuid)); + return OK; + } + + case DESTROY_PLUGIN: + { + CHECK_INTERFACE(IDrm, data, reply); + reply->writeInt32(destroyPlugin()); + return OK; + } + + case OPEN_SESSION: + { + CHECK_INTERFACE(IDrm, data, reply); + Vector sessionId; + status_t result = openSession(sessionId); + reply->writeInt32(sessionId.size()); + reply->write(sessionId.array(), sessionId.size()); + reply->writeInt32(result); + return OK; + } + + case CLOSE_SESSION: + { + CHECK_INTERFACE(IDrm, data, reply); + Vector sessionId; + uint32_t size = data.readInt32(); + sessionId.insertAt((size_t)0, size); + data.read(sessionId.editArray(), size); + reply->writeInt32(closeSession(sessionId)); + return OK; + } + + case GET_LICENSE_REQUEST: + { + CHECK_INTERFACE(IDrm, data, reply); + Vector sessionId; + uint32_t size = data.readInt32(); + sessionId.insertAt((size_t)0, size); + data.read(sessionId.editArray(), size); + + Vector initData; + size = data.readInt32(); + initData.insertAt((size_t)0, size); + data.read(initData.editArray(), size); + + String8 mimeType = data.readString8(); + DrmPlugin::LicenseType licenseType = (DrmPlugin::LicenseType)data.readInt32(); + + KeyedVector optionalParameters; + uint32_t count = data.readInt32(); + for (size_t i = 0; i < count; ++i) { + String8 key, value; + key = data.readString8(); + value = data.readString8(); + optionalParameters.add(key, value); + } + + Vector request; + String8 defaultUrl; + + status_t result = getLicenseRequest(sessionId, initData, + mimeType, licenseType, + optionalParameters, + request, defaultUrl); + reply->writeInt32(request.size()); + reply->write(request.array(), request.size()); + reply->writeString8(defaultUrl); + reply->writeInt32(result); + return OK; + } + + case PROVIDE_LICENSE_RESPONSE: + { + CHECK_INTERFACE(IDrm, data, reply); + Vector sessionId; + uint32_t size = data.readInt32(); + sessionId.insertAt((size_t)0, size); + data.read(sessionId.editArray(), size); + Vector response; + size = data.readInt32(); + response.insertAt((size_t)0, size); + data.read(response.editArray(), size); + + reply->writeInt32(provideLicenseResponse(sessionId, response)); + return OK; + } + + case REMOVE_LICENSE: + { + CHECK_INTERFACE(IDrm, data, reply); + Vector sessionId; + uint32_t size = data.readInt32(); + sessionId.insertAt((size_t)0, size); + data.read(sessionId.editArray(), size); + reply->writeInt32(removeLicense(sessionId)); + return OK; + } + + case QUERY_LICENSE_STATUS: + { + CHECK_INTERFACE(IDrm, data, reply); + Vector sessionId; + uint32_t size = data.readInt32(); + sessionId.insertAt((size_t)0, size); + data.read(sessionId.editArray(), size); + KeyedVector infoMap; + + status_t result = queryLicenseStatus(sessionId, infoMap); + + size_t count = infoMap.size(); + reply->writeInt32(count); + for (size_t i = 0; i < count; ++i) { + reply->writeString8(infoMap.keyAt(i)); + reply->writeString8(infoMap.valueAt(i)); + } + reply->writeInt32(result); + return OK; + } + + case GET_PROVISION_REQUEST: + { + CHECK_INTERFACE(IDrm, data, reply); + Vector request; + String8 defaultUrl; + status_t result = getProvisionRequest(request, defaultUrl); + reply->writeInt32(request.size()); + reply->write(request.array(), request.size()); + reply->writeString8(defaultUrl); + reply->writeInt32(result); + return OK; + } + + case PROVIDE_PROVISION_RESPONSE: + { + CHECK_INTERFACE(IDrm, data, reply); + Vector response; + uint32_t size = data.readInt32(); + response.insertAt((size_t)0, size); + data.read(response.editArray(), size); + reply->writeInt32(provideProvisionResponse(response)); + + return OK; + } + + case GET_SECURE_STOPS: + { + CHECK_INTERFACE(IDrm, data, reply); + List > secureStops; + status_t result = getSecureStops(secureStops); + size_t count = secureStops.size(); + reply->writeInt32(count); + List >::iterator iter = secureStops.begin(); + while(iter != secureStops.end()) { + size_t size = iter->size(); + reply->writeInt32(size); + reply->write(iter->array(), iter->size()); + } + reply->writeInt32(result); + return OK; + } + + case RELEASE_SECURE_STOPS: + { + CHECK_INTERFACE(IDrm, data, reply); + Vector ssRelease; + uint32_t size = data.readInt32(); + ssRelease.insertAt((size_t)0, size); + data.read(ssRelease.editArray(), size); + reply->writeInt32(releaseSecureStops(ssRelease)); + return OK; + } + + case GET_PROPERTY_STRING: + { + CHECK_INTERFACE(IDrm, data, reply); + String8 name = data.readString8(); + String8 value; + status_t result = getPropertyString(name, value); + reply->writeString8(value); + reply->writeInt32(result); + return OK; + } + + case GET_PROPERTY_BYTE_ARRAY: + { + CHECK_INTERFACE(IDrm, data, reply); + String8 name = data.readString8(); + Vector value; + status_t result = getPropertyByteArray(name, value); + reply->writeInt32(value.size()); + reply->write(value.array(), value.size()); + reply->writeInt32(result); + return OK; + } + + case SET_PROPERTY_STRING: + { + CHECK_INTERFACE(IDrm, data, reply); + String8 name = data.readString8(); + String8 value = data.readString8(); + reply->writeInt32(setPropertyString(name, value)); + return OK; + } + + case SET_PROPERTY_BYTE_ARRAY: + { + CHECK_INTERFACE(IDrm, data, reply); + String8 name = data.readString8(); + Vector value; + size_t count = data.readInt32(); + value.insertAt((size_t)0, count); + data.read(value.editArray(), count); + reply->writeInt32(setPropertyByteArray(name, value)); + return OK; + } + + default: + return BBinder::onTransact(code, data, reply, flags); + } +} + +} // namespace android + diff --git a/media/libmedia/IMediaPlayerService.cpp b/media/libmedia/IMediaPlayerService.cpp index a95f4c9..e1ce5a9 100644 --- a/media/libmedia/IMediaPlayerService.cpp +++ b/media/libmedia/IMediaPlayerService.cpp @@ -21,6 +21,7 @@ #include #include #include +#include #include #include #include @@ -42,6 +43,7 @@ enum { CREATE_METADATA_RETRIEVER, GET_OMX, MAKE_CRYPTO, + MAKE_DRM, MAKE_HDCP, ADD_BATTERY_DATA, PULL_BATTERY_DATA, @@ -123,6 +125,13 @@ public: return interface_cast(reply.readStrongBinder()); } + virtual sp makeDrm() { + Parcel data, reply; + data.writeInterfaceToken(IMediaPlayerService::getInterfaceDescriptor()); + remote()->transact(MAKE_DRM, data, &reply); + return interface_cast(reply.readStrongBinder()); + } + virtual sp makeHDCP(bool createEncryptionModule) { Parcel data, reply; data.writeInterfaceToken(IMediaPlayerService::getInterfaceDescriptor()); @@ -225,6 +234,12 @@ status_t BnMediaPlayerService::onTransact( reply->writeStrongBinder(crypto->asBinder()); return NO_ERROR; } break; + case MAKE_DRM: { + CHECK_INTERFACE(IMediaPlayerService, data, reply); + sp drm = makeDrm(); + reply->writeStrongBinder(drm->asBinder()); + return NO_ERROR; + } break; case MAKE_HDCP: { CHECK_INTERFACE(IMediaPlayerService, data, reply); bool createEncryptionModule = data.readInt32(); diff --git a/media/libmediaplayerservice/Android.mk b/media/libmediaplayerservice/Android.mk index 48f48e4..2a6f3c7 100644 --- a/media/libmediaplayerservice/Android.mk +++ b/media/libmediaplayerservice/Android.mk @@ -9,6 +9,7 @@ include $(CLEAR_VARS) LOCAL_SRC_FILES:= \ ActivityManager.cpp \ Crypto.cpp \ + Drm.cpp \ HDCP.cpp \ MediaPlayerFactory.cpp \ MediaPlayerService.cpp \ @@ -17,6 +18,7 @@ LOCAL_SRC_FILES:= \ MidiFile.cpp \ MidiMetadataRetriever.cpp \ RemoteDisplay.cpp \ + SharedLibrary.cpp \ StagefrightPlayer.cpp \ StagefrightRecorder.cpp \ TestPlayerStub.cpp \ diff --git a/media/libmediaplayerservice/Drm.cpp b/media/libmediaplayerservice/Drm.cpp new file mode 100644 index 0000000..6ac7530 --- /dev/null +++ b/media/libmediaplayerservice/Drm.cpp @@ -0,0 +1,423 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "Drm" +#include + +#include +#include + +#include "Drm.h" + +#include +#include +#include +#include +#include + +namespace android { + +KeyedVector, String8> Drm::mUUIDToLibraryPathMap; +KeyedVector > Drm::mLibraryPathToOpenLibraryMap; +Mutex Drm::mMapLock; + +static bool operator<(const Vector &lhs, const Vector &rhs) { + if (lhs.size() < rhs.size()) { + return true; + } else if (lhs.size() > rhs.size()) { + return false; + } + + return memcmp((void *)lhs.array(), (void *)rhs.array(), rhs.size()) < 0; +} + +Drm::Drm() + : mInitCheck(NO_INIT), + mFactory(NULL), + mPlugin(NULL) { +} + +Drm::~Drm() { + delete mPlugin; + mPlugin = NULL; + closeFactory(); +} + +void Drm::closeFactory() { + delete mFactory; + mFactory = NULL; + mLibrary.clear(); +} + +status_t Drm::initCheck() const { + return mInitCheck; +} + + +/* + * Search the plugins directory for a plugin that supports the scheme + * specified by uuid + * + * If found: + * mLibrary holds a strong pointer to the dlopen'd library + * mFactory is set to the library's factory method + * mInitCheck is set to OK + * + * If not found: + * mLibrary is cleared and mFactory are set to NULL + * mInitCheck is set to an error (!OK) + */ +void Drm::findFactoryForScheme(const uint8_t uuid[16]) { + + closeFactory(); + + // lock static maps + Mutex::Autolock autoLock(mMapLock); + + // first check cache + Vector uuidVector; + uuidVector.appendArray(uuid, sizeof(uuid)); + ssize_t index = mUUIDToLibraryPathMap.indexOfKey(uuidVector); + if (index >= 0) { + if (loadLibraryForScheme(mUUIDToLibraryPathMap[index], uuid)) { + mInitCheck = OK; + return; + } else { + ALOGE("Failed to load from cached library path!"); + mInitCheck = ERROR_UNSUPPORTED; + return; + } + } + + // no luck, have to search + String8 dirPath("/vendor/lib/mediadrm"); + DIR* pDir = opendir(dirPath.string()); + + if (pDir == NULL) { + mInitCheck = ERROR_UNSUPPORTED; + ALOGE("Failed to open plugin directory %s", dirPath.string()); + return; + } + + + struct dirent* pEntry; + while ((pEntry = readdir(pDir))) { + + String8 pluginPath = dirPath + "/" + pEntry->d_name; + + if (pluginPath.getPathExtension() == ".so") { + + if (loadLibraryForScheme(pluginPath, uuid)) { + mUUIDToLibraryPathMap.add(uuidVector, pluginPath); + mInitCheck = OK; + closedir(pDir); + return; + } + } + } + + closedir(pDir); + + ALOGE("Failed to find drm plugin"); + mInitCheck = ERROR_UNSUPPORTED; +} + +bool Drm::loadLibraryForScheme(const String8 &path, const uint8_t uuid[16]) { + + // get strong pointer to open shared library + ssize_t index = mLibraryPathToOpenLibraryMap.indexOfKey(path); + if (index >= 0) { + mLibrary = mLibraryPathToOpenLibraryMap[index].promote(); + } else { + index = mLibraryPathToOpenLibraryMap.add(path, NULL); + } + + if (!mLibrary.get()) { + mLibrary = new SharedLibrary(path); + if (!*mLibrary) { + return false; + } + + mLibraryPathToOpenLibraryMap.replaceValueAt(index, mLibrary); + } + + typedef DrmFactory *(*CreateDrmFactoryFunc)(); + + CreateDrmFactoryFunc createDrmFactory = + (CreateDrmFactoryFunc)mLibrary->lookup("createDrmFactory"); + + if (createDrmFactory == NULL || + (mFactory = createDrmFactory()) == NULL || + !mFactory->isCryptoSchemeSupported(uuid)) { + closeFactory(); + return false; + } + return true; +} + +bool Drm::isCryptoSchemeSupported(const uint8_t uuid[16]) { + Mutex::Autolock autoLock(mLock); + + if (mFactory && mFactory->isCryptoSchemeSupported(uuid)) { + return true; + } + + findFactoryForScheme(uuid); + return (mInitCheck == OK); +} + +status_t Drm::createPlugin(const uint8_t uuid[16]) { + Mutex::Autolock autoLock(mLock); + + if (mPlugin != NULL) { + return -EINVAL; + } + + if (!mFactory || !mFactory->isCryptoSchemeSupported(uuid)) { + findFactoryForScheme(uuid); + } + + if (mInitCheck != OK) { + return mInitCheck; + } + + return mFactory->createDrmPlugin(uuid, &mPlugin); +} + +status_t Drm::destroyPlugin() { + Mutex::Autolock autoLock(mLock); + + if (mInitCheck != OK) { + return mInitCheck; + } + + if (mPlugin == NULL) { + return -EINVAL; + } + + delete mPlugin; + mPlugin = NULL; + + return OK; +} + +status_t Drm::openSession(Vector &sessionId) { + Mutex::Autolock autoLock(mLock); + + if (mInitCheck != OK) { + return mInitCheck; + } + + if (mPlugin == NULL) { + return -EINVAL; + } + + return mPlugin->openSession(sessionId); +} + +status_t Drm::closeSession(Vector const &sessionId) { + Mutex::Autolock autoLock(mLock); + + if (mInitCheck != OK) { + return mInitCheck; + } + + if (mPlugin == NULL) { + return -EINVAL; + } + + return mPlugin->closeSession(sessionId); +} + +status_t Drm::getLicenseRequest(Vector const &sessionId, + Vector const &initData, + String8 const &mimeType, DrmPlugin::LicenseType licenseType, + KeyedVector const &optionalParameters, + Vector &request, String8 &defaultUrl) { + Mutex::Autolock autoLock(mLock); + + if (mInitCheck != OK) { + return mInitCheck; + } + + if (mPlugin == NULL) { + return -EINVAL; + } + + return mPlugin->getLicenseRequest(sessionId, initData, mimeType, licenseType, + optionalParameters, request, defaultUrl); +} + +status_t Drm::provideLicenseResponse(Vector const &sessionId, + Vector const &response) { + Mutex::Autolock autoLock(mLock); + + if (mInitCheck != OK) { + return mInitCheck; + } + + if (mPlugin == NULL) { + return -EINVAL; + } + + return mPlugin->provideLicenseResponse(sessionId, response); +} + +status_t Drm::removeLicense(Vector const &sessionId) { + Mutex::Autolock autoLock(mLock); + + if (mInitCheck != OK) { + return mInitCheck; + } + + if (mPlugin == NULL) { + return -EINVAL; + } + + return mPlugin->removeLicense(sessionId); +} + +status_t Drm::queryLicenseStatus(Vector const &sessionId, + KeyedVector &infoMap) const { + Mutex::Autolock autoLock(mLock); + + if (mInitCheck != OK) { + return mInitCheck; + } + + if (mPlugin == NULL) { + return -EINVAL; + } + + return mPlugin->queryLicenseStatus(sessionId, infoMap); +} + +status_t Drm::getProvisionRequest(Vector &request, String8 &defaultUrl) { + Mutex::Autolock autoLock(mLock); + + if (mInitCheck != OK) { + return mInitCheck; + } + + if (mPlugin == NULL) { + return -EINVAL; + } + + return mPlugin->getProvisionRequest(request, defaultUrl); +} + +status_t Drm::provideProvisionResponse(Vector const &response) { + Mutex::Autolock autoLock(mLock); + + if (mInitCheck != OK) { + return mInitCheck; + } + + if (mPlugin == NULL) { + return -EINVAL; + } + + return mPlugin->provideProvisionResponse(response); +} + + +status_t Drm::getSecureStops(List > &secureStops) { + Mutex::Autolock autoLock(mLock); + + if (mInitCheck != OK) { + return mInitCheck; + } + + if (mPlugin == NULL) { + return -EINVAL; + } + + return mPlugin->getSecureStops(secureStops); +} + +status_t Drm::releaseSecureStops(Vector const &ssRelease) { + Mutex::Autolock autoLock(mLock); + + if (mInitCheck != OK) { + return mInitCheck; + } + + if (mPlugin == NULL) { + return -EINVAL; + } + + return mPlugin->releaseSecureStops(ssRelease); +} + +status_t Drm::getPropertyString(String8 const &name, String8 &value ) const { + Mutex::Autolock autoLock(mLock); + + if (mInitCheck != OK) { + return mInitCheck; + } + + if (mPlugin == NULL) { + return -EINVAL; + } + + return mPlugin->getPropertyString(name, value); +} + +status_t Drm::getPropertyByteArray(String8 const &name, Vector &value ) const { + Mutex::Autolock autoLock(mLock); + + if (mInitCheck != OK) { + return mInitCheck; + } + + if (mPlugin == NULL) { + return -EINVAL; + } + + return mPlugin->getPropertyByteArray(name, value); +} + +status_t Drm::setPropertyString(String8 const &name, String8 const &value ) const { + Mutex::Autolock autoLock(mLock); + + if (mInitCheck != OK) { + return mInitCheck; + } + + if (mPlugin == NULL) { + return -EINVAL; + } + + return mPlugin->setPropertyString(name, value); +} + +status_t Drm::setPropertyByteArray(String8 const &name, + Vector const &value ) const { + Mutex::Autolock autoLock(mLock); + + if (mInitCheck != OK) { + return mInitCheck; + } + + if (mPlugin == NULL) { + return -EINVAL; + } + + return mPlugin->setPropertyByteArray(name, value); +} + +} // namespace android diff --git a/media/libmediaplayerservice/Drm.h b/media/libmediaplayerservice/Drm.h new file mode 100644 index 0000000..1b10958 --- /dev/null +++ b/media/libmediaplayerservice/Drm.h @@ -0,0 +1,100 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef DRM_H_ + +#define DRM_H_ + +#include "SharedLibrary.h" + +#include +#include + +namespace android { + +struct DrmFactory; +struct DrmPlugin; + +struct Drm : public BnDrm { + Drm(); + virtual ~Drm(); + + virtual status_t initCheck() const; + + virtual bool isCryptoSchemeSupported(const uint8_t uuid[16]); + + virtual status_t createPlugin(const uint8_t uuid[16]); + + virtual status_t destroyPlugin(); + + virtual status_t openSession(Vector &sessionId); + + virtual status_t closeSession(Vector const &sessionId); + + virtual status_t + getLicenseRequest(Vector const &sessionId, + Vector const &initData, + String8 const &mimeType, DrmPlugin::LicenseType licenseType, + KeyedVector const &optionalParameters, + Vector &request, String8 &defaultUrl); + + virtual status_t provideLicenseResponse(Vector const &sessionId, + Vector const &response); + + virtual status_t removeLicense(Vector const &sessionId); + + virtual status_t queryLicenseStatus(Vector const &sessionId, + KeyedVector &infoMap) const; + + virtual status_t getProvisionRequest(Vector &request, + String8 &defaulUrl); + + virtual status_t provideProvisionResponse(Vector const &response); + + virtual status_t getSecureStops(List > &secureStops); + + virtual status_t releaseSecureStops(Vector const &ssRelease); + + virtual status_t getPropertyString(String8 const &name, String8 &value ) const; + virtual status_t getPropertyByteArray(String8 const &name, + Vector &value ) const; + virtual status_t setPropertyString(String8 const &name, String8 const &value ) const; + virtual status_t setPropertyByteArray(String8 const &name, + Vector const &value ) const; + +private: + mutable Mutex mLock; + + status_t mInitCheck; + sp mLibrary; + DrmFactory *mFactory; + DrmPlugin *mPlugin; + + static KeyedVector, String8> mUUIDToLibraryPathMap; + static KeyedVector > mLibraryPathToOpenLibraryMap; + static Mutex mMapLock; + + void findFactoryForScheme(const uint8_t uuid[16]); + bool loadLibraryForScheme(const String8 &path, const uint8_t uuid[16]); + void closeFactory(); + + + DISALLOW_EVIL_CONSTRUCTORS(Drm); +}; + +} // namespace android + +#endif // CRYPTO_H_ diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp index 16f1317..ec6ace1 100644 --- a/media/libmediaplayerservice/MediaPlayerService.cpp +++ b/media/libmediaplayerservice/MediaPlayerService.cpp @@ -72,6 +72,7 @@ #include #include "Crypto.h" +#include "Drm.h" #include "HDCP.h" #include "RemoteDisplay.h" @@ -285,6 +286,10 @@ sp MediaPlayerService::makeCrypto() { return new Crypto; } +sp MediaPlayerService::makeDrm() { + return new Drm; +} + sp MediaPlayerService::makeHDCP(bool createEncryptionModule) { return new HDCP(createEncryptionModule); } diff --git a/media/libmediaplayerservice/MediaPlayerService.h b/media/libmediaplayerservice/MediaPlayerService.h index 2d2a09d..82dc29b 100644 --- a/media/libmediaplayerservice/MediaPlayerService.h +++ b/media/libmediaplayerservice/MediaPlayerService.h @@ -249,6 +249,7 @@ public: virtual sp decode(int fd, int64_t offset, int64_t length, uint32_t *pSampleRate, int* pNumChannels, audio_format_t* pFormat); virtual sp getOMX(); virtual sp makeCrypto(); + virtual sp makeDrm(); virtual sp makeHDCP(bool createEncryptionModule); virtual sp listenForRemoteDisplay(const sp& client, diff --git a/media/libmediaplayerservice/SharedLibrary.cpp b/media/libmediaplayerservice/SharedLibrary.cpp new file mode 100644 index 0000000..178e15d --- /dev/null +++ b/media/libmediaplayerservice/SharedLibrary.cpp @@ -0,0 +1,49 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "Drm" +#include +#include + +#include + +#include "SharedLibrary.h" + +namespace android { + + SharedLibrary::SharedLibrary(const String8 &path) { + mLibHandle = dlopen(path.string(), RTLD_NOW); + } + + SharedLibrary::~SharedLibrary() { + if (mLibHandle != NULL) { + dlclose(mLibHandle); + mLibHandle = NULL; + } + } + + bool SharedLibrary::operator!() const { + return mLibHandle == NULL; + } + + void *SharedLibrary::lookup(const char *symbol) const { + if (!mLibHandle) { + return NULL; + } + return dlsym(mLibHandle, symbol); + } +}; diff --git a/media/libmediaplayerservice/SharedLibrary.h b/media/libmediaplayerservice/SharedLibrary.h new file mode 100644 index 0000000..5353642 --- /dev/null +++ b/media/libmediaplayerservice/SharedLibrary.h @@ -0,0 +1,39 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef SHARED_LIBRARY_H_ +#define SHARED_LIBRARY_H_ + +#include +#include +#include + +namespace android { + class SharedLibrary : public RefBase { + public: + SharedLibrary(const String8 &path); + ~SharedLibrary(); + + bool operator!() const; + void *lookup(const char *symbol) const; + + private: + void *mLibHandle; + DISALLOW_EVIL_CONSTRUCTORS(SharedLibrary); + }; +}; + +#endif // SHARED_LIBRARY_H_ -- cgit v1.1 From c66f8788871b2ae2d240e0e16dd3bdc5c277ce31 Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Wed, 13 Mar 2013 10:22:46 -0700 Subject: Adds audio support to DirectRenderer. Change-Id: Ibf4df90aca29d638215e2da9b39e78bf3a2c4d08 --- .../wifi-display/sink/DirectRenderer.cpp | 607 ++++++++++++++++----- .../wifi-display/sink/DirectRenderer.h | 42 +- .../wifi-display/sink/WifiDisplaySink.cpp | 11 +- 3 files changed, 491 insertions(+), 169 deletions(-) diff --git a/media/libstagefright/wifi-display/sink/DirectRenderer.cpp b/media/libstagefright/wifi-display/sink/DirectRenderer.cpp index 5efcd17..12338e9 100644 --- a/media/libstagefright/wifi-display/sink/DirectRenderer.cpp +++ b/media/libstagefright/wifi-display/sink/DirectRenderer.cpp @@ -22,6 +22,7 @@ #include #include +#include #include #include #include @@ -34,159 +35,208 @@ namespace android { -DirectRenderer::DirectRenderer( - const sp &bufferProducer) - : mSurfaceTex(bufferProducer), - mVideoDecoderNotificationPending(false), - mRenderPending(false), - mTimeOffsetUs(0ll), - mLatencySum(0ll), - mLatencyCount(0), - mNumFramesLate(0), - mNumFrames(0) { -} +/* + Drives the decoding process using a MediaCodec instance. Input buffers + queued by calls to "queueInputBuffer" are fed to the decoder as soon + as the decoder is ready for them, the client is notified about output + buffers as the decoder spits them out. +*/ +struct DirectRenderer::DecoderContext : public AHandler { + enum { + kWhatOutputBufferReady, + }; + DecoderContext(const sp ¬ify); -DirectRenderer::~DirectRenderer() { - if (mVideoDecoder != NULL) { - mVideoDecoder->release(); - mVideoDecoder.clear(); + status_t init( + const sp &format, + const sp &surfaceTex); - mVideoDecoderLooper->stop(); - mVideoDecoderLooper.clear(); - } -} + void queueInputBuffer(const sp &accessUnit); -void DirectRenderer::setTimeOffset(int64_t offset) { - mTimeOffsetUs = offset; -} + status_t renderOutputBufferAndRelease(size_t index); + status_t releaseOutputBuffer(size_t index); -int64_t DirectRenderer::getAvgLatenessUs() { - if (mLatencyCount == 0) { - return 0ll; - } +protected: + virtual ~DecoderContext(); - int64_t avgLatencyUs = mLatencySum / mLatencyCount; + virtual void onMessageReceived(const sp &msg); - mLatencySum = 0ll; - mLatencyCount = 0; +private: + enum { + kWhatDecoderNotify, + }; - if (mNumFrames > 0) { - ALOGI("%d / %d frames late", mNumFramesLate, mNumFrames); - mNumFramesLate = 0; - mNumFrames = 0; - } + sp mNotify; + sp mDecoderLooper; + sp mDecoder; + Vector > mDecoderInputBuffers; + Vector > mDecoderOutputBuffers; + List mDecoderInputBuffersAvailable; + bool mDecoderNotificationPending; - return avgLatencyUs; -} + List > mAccessUnits; -void DirectRenderer::onMessageReceived(const sp &msg) { - switch (msg->what()) { - case kWhatVideoDecoderNotify: - { - onVideoDecoderNotify(); - break; - } + void onDecoderNotify(); + void scheduleDecoderNotification(); + void queueDecoderInputBuffers(); - case kWhatRender: - { - onRender(); - break; - } + void queueOutputBuffer( + size_t index, int64_t timeUs, const sp &buffer); - default: - TRESPASS(); - } + DISALLOW_EVIL_CONSTRUCTORS(DecoderContext); +}; + +//////////////////////////////////////////////////////////////////////////////// + +/* + A "push" audio renderer. The primary function of this renderer is to use + an AudioTrack in push mode and making sure not to block the event loop + be ensuring that calls to AudioTrack::write never block. This is done by + estimating an upper bound of data that can be written to the AudioTrack + buffer without delay. +*/ +struct DirectRenderer::AudioRenderer : public AHandler { + AudioRenderer(const sp &decoderContext); + + void queueInputBuffer( + size_t index, int64_t timeUs, const sp &buffer); + +protected: + virtual ~AudioRenderer(); + virtual void onMessageReceived(const sp &msg); + +private: + enum { + kWhatPushAudio, + }; + + struct BufferInfo { + size_t mIndex; + int64_t mTimeUs; + sp mBuffer; + }; + + sp mDecoderContext; + sp mAudioTrack; + + List mInputBuffers; + bool mPushPending; + + size_t mNumFramesWritten; + + void schedulePushIfNecessary(); + void onPushAudio(); + + ssize_t writeNonBlocking(const uint8_t *data, size_t size); + + DISALLOW_EVIL_CONSTRUCTORS(AudioRenderer); +}; + +//////////////////////////////////////////////////////////////////////////////// + +DirectRenderer::DecoderContext::DecoderContext(const sp ¬ify) + : mNotify(notify), + mDecoderNotificationPending(false) { } -void DirectRenderer::setFormat( - size_t trackIndex, const sp &format) { - if (trackIndex == 1) { - // Ignore audio for now. - return; +DirectRenderer::DecoderContext::~DecoderContext() { + if (mDecoder != NULL) { + mDecoder->release(); + mDecoder.clear(); + + mDecoderLooper->stop(); + mDecoderLooper.clear(); } +} - CHECK(mVideoDecoder == NULL); +status_t DirectRenderer::DecoderContext::init( + const sp &format, + const sp &surfaceTex) { + CHECK(mDecoder == NULL); AString mime; CHECK(format->findString("mime", &mime)); - mVideoDecoderLooper = new ALooper; - mVideoDecoderLooper->setName("video codec looper"); + mDecoderLooper = new ALooper; + mDecoderLooper->setName("video codec looper"); - mVideoDecoderLooper->start( + mDecoderLooper->start( false /* runOnCallingThread */, false /* canCallJava */, PRIORITY_DEFAULT); - mVideoDecoder = MediaCodec::CreateByType( - mVideoDecoderLooper, mime.c_str(), false /* encoder */); + mDecoder = MediaCodec::CreateByType( + mDecoderLooper, mime.c_str(), false /* encoder */); - CHECK(mVideoDecoder != NULL); + CHECK(mDecoder != NULL); - status_t err = mVideoDecoder->configure( + status_t err = mDecoder->configure( format, - mSurfaceTex == NULL - ? NULL : new Surface(mSurfaceTex), + surfaceTex == NULL + ? NULL : new Surface(surfaceTex), NULL /* crypto */, 0 /* flags */); CHECK_EQ(err, (status_t)OK); - err = mVideoDecoder->start(); + err = mDecoder->start(); CHECK_EQ(err, (status_t)OK); - err = mVideoDecoder->getInputBuffers( - &mVideoDecoderInputBuffers); + err = mDecoder->getInputBuffers( + &mDecoderInputBuffers); CHECK_EQ(err, (status_t)OK); - scheduleVideoDecoderNotification(); + err = mDecoder->getOutputBuffers( + &mDecoderOutputBuffers); + CHECK_EQ(err, (status_t)OK); + + scheduleDecoderNotification(); + + return OK; } -void DirectRenderer::queueAccessUnit( - size_t trackIndex, const sp &accessUnit) { - if (trackIndex == 1) { - // Ignore audio for now. - return; - } +void DirectRenderer::DecoderContext::queueInputBuffer( + const sp &accessUnit) { + CHECK(mDecoder != NULL); - if (mVideoDecoder == NULL) { - sp format = new AMessage; - format->setString("mime", "video/avc"); - format->setInt32("width", 640); - format->setInt32("height", 360); + mAccessUnits.push_back(accessUnit); + queueDecoderInputBuffers(); +} - setFormat(0, format); - } +status_t DirectRenderer::DecoderContext::renderOutputBufferAndRelease( + size_t index) { + return mDecoder->renderOutputBufferAndRelease(index); +} - mVideoAccessUnits.push_back(accessUnit); - queueVideoDecoderInputBuffers(); +status_t DirectRenderer::DecoderContext::releaseOutputBuffer(size_t index) { + return mDecoder->releaseOutputBuffer(index); } -void DirectRenderer::queueVideoDecoderInputBuffers() { - if (mVideoDecoder == NULL) { +void DirectRenderer::DecoderContext::queueDecoderInputBuffers() { + if (mDecoder == NULL) { return; } bool submittedMore = false; - while (!mVideoAccessUnits.empty() - && !mVideoDecoderInputBuffersAvailable.empty()) { - size_t index = *mVideoDecoderInputBuffersAvailable.begin(); + while (!mAccessUnits.empty() + && !mDecoderInputBuffersAvailable.empty()) { + size_t index = *mDecoderInputBuffersAvailable.begin(); - mVideoDecoderInputBuffersAvailable.erase( - mVideoDecoderInputBuffersAvailable.begin()); + mDecoderInputBuffersAvailable.erase( + mDecoderInputBuffersAvailable.begin()); - sp srcBuffer = *mVideoAccessUnits.begin(); - mVideoAccessUnits.erase(mVideoAccessUnits.begin()); + sp srcBuffer = *mAccessUnits.begin(); + mAccessUnits.erase(mAccessUnits.begin()); const sp &dstBuffer = - mVideoDecoderInputBuffers.itemAt(index); + mDecoderInputBuffers.itemAt(index); memcpy(dstBuffer->data(), srcBuffer->data(), srcBuffer->size()); int64_t timeUs; CHECK(srcBuffer->meta()->findInt64("timeUs", &timeUs)); - status_t err = mVideoDecoder->queueInputBuffer( + status_t err = mDecoder->queueInputBuffer( index, 0 /* offset */, srcBuffer->size(), @@ -198,19 +248,33 @@ void DirectRenderer::queueVideoDecoderInputBuffers() { } if (submittedMore) { - scheduleVideoDecoderNotification(); + scheduleDecoderNotification(); + } +} + +void DirectRenderer::DecoderContext::onMessageReceived( + const sp &msg) { + switch (msg->what()) { + case kWhatDecoderNotify: + { + onDecoderNotify(); + break; + } + + default: + TRESPASS(); } } -void DirectRenderer::onVideoDecoderNotify() { - mVideoDecoderNotificationPending = false; +void DirectRenderer::DecoderContext::onDecoderNotify() { + mDecoderNotificationPending = false; for (;;) { size_t index; - status_t err = mVideoDecoder->dequeueInputBuffer(&index); + status_t err = mDecoder->dequeueInputBuffer(&index); if (err == OK) { - mVideoDecoderInputBuffersAvailable.push_back(index); + mDecoderInputBuffersAvailable.push_back(index); } else if (err == -EAGAIN) { break; } else { @@ -218,7 +282,7 @@ void DirectRenderer::onVideoDecoderNotify() { } } - queueVideoDecoderInputBuffers(); + queueDecoderInputBuffers(); for (;;) { size_t index; @@ -226,7 +290,7 @@ void DirectRenderer::onVideoDecoderNotify() { size_t size; int64_t timeUs; uint32_t flags; - status_t err = mVideoDecoder->dequeueOutputBuffer( + status_t err = mDecoder->dequeueOutputBuffer( &index, &offset, &size, @@ -234,9 +298,12 @@ void DirectRenderer::onVideoDecoderNotify() { &flags); if (err == OK) { - queueOutputBuffer(index, timeUs); + queueOutputBuffer( + index, timeUs, mDecoderOutputBuffers.itemAt(index)); } else if (err == INFO_OUTPUT_BUFFERS_CHANGED) { - // We don't care. + err = mDecoder->getOutputBuffers( + &mDecoderOutputBuffers); + CHECK_EQ(err, (status_t)OK); } else if (err == INFO_FORMAT_CHANGED) { // We don't care. } else if (err == -EAGAIN) { @@ -246,48 +313,315 @@ void DirectRenderer::onVideoDecoderNotify() { } } - scheduleVideoDecoderNotification(); + scheduleDecoderNotification(); } -void DirectRenderer::queueOutputBuffer(size_t index, int64_t timeUs) { -#if 1 - OutputInfo info; +void DirectRenderer::DecoderContext::scheduleDecoderNotification() { + if (mDecoderNotificationPending) { + return; + } + + sp notify = + new AMessage(kWhatDecoderNotify, id()); + + mDecoder->requestActivityNotification(notify); + mDecoderNotificationPending = true; +} + +void DirectRenderer::DecoderContext::queueOutputBuffer( + size_t index, int64_t timeUs, const sp &buffer) { + sp msg = mNotify->dup(); + msg->setInt32("what", kWhatOutputBufferReady); + msg->setSize("index", index); + msg->setInt64("timeUs", timeUs); + msg->setBuffer("buffer", buffer); + msg->post(); +} + +//////////////////////////////////////////////////////////////////////////////// + +DirectRenderer::AudioRenderer::AudioRenderer( + const sp &decoderContext) + : mDecoderContext(decoderContext), + mPushPending(false), + mNumFramesWritten(0) { + mAudioTrack = new AudioTrack( + AUDIO_STREAM_DEFAULT, + 48000.0f, + AUDIO_FORMAT_PCM, + AUDIO_CHANNEL_OUT_STEREO, + (int)0 /* frameCount */); + + CHECK_EQ((status_t)OK, mAudioTrack->initCheck()); + + mAudioTrack->start(); +} + +DirectRenderer::AudioRenderer::~AudioRenderer() { +} + +void DirectRenderer::AudioRenderer::queueInputBuffer( + size_t index, int64_t timeUs, const sp &buffer) { + BufferInfo info; info.mIndex = index; - info.mTimeUs = timeUs + mTimeOffsetUs; - mOutputBuffers.push_back(info); + info.mTimeUs = timeUs; + info.mBuffer = buffer; - scheduleRenderIfNecessary(); -#else - mLatencySum += ALooper::GetNowUs() - (timeUs + mTimeOffsetUs); - ++mLatencyCount; + mInputBuffers.push_back(info); + schedulePushIfNecessary(); +} - status_t err = mVideoDecoder->renderOutputBufferAndRelease(index); - CHECK_EQ(err, (status_t)OK); -#endif +void DirectRenderer::AudioRenderer::onMessageReceived( + const sp &msg) { + switch (msg->what()) { + case kWhatPushAudio: + { + onPushAudio(); + break; + } + + default: + break; + } } -void DirectRenderer::scheduleRenderIfNecessary() { - if (mRenderPending || mOutputBuffers.empty()) { +void DirectRenderer::AudioRenderer::schedulePushIfNecessary() { + if (mPushPending || mInputBuffers.empty()) { return; } - mRenderPending = true; + mPushPending = true; + + uint32_t numFramesPlayed; + CHECK_EQ(mAudioTrack->getPosition(&numFramesPlayed), + (status_t)OK); + + uint32_t numFramesPendingPlayout = mNumFramesWritten - numFramesPlayed; + + // This is how long the audio sink will have data to + // play back. + const float msecsPerFrame = 1000.0f / mAudioTrack->getSampleRate(); + + int64_t delayUs = + msecsPerFrame * numFramesPendingPlayout * 1000ll; - int64_t timeUs = (*mOutputBuffers.begin()).mTimeUs; + // Let's give it more data after about half that time + // has elapsed. + (new AMessage(kWhatPushAudio, id()))->post(delayUs / 2); +} + +void DirectRenderer::AudioRenderer::onPushAudio() { + mPushPending = false; + + while (!mInputBuffers.empty()) { + const BufferInfo &info = *mInputBuffers.begin(); + + ssize_t n = writeNonBlocking( + info.mBuffer->data(), info.mBuffer->size()); + + if (n < (ssize_t)info.mBuffer->size()) { + CHECK_GE(n, 0); + + info.mBuffer->setRange( + info.mBuffer->offset() + n, info.mBuffer->size() - n); + break; + } + + mDecoderContext->releaseOutputBuffer(info.mIndex); + + mInputBuffers.erase(mInputBuffers.begin()); + } + + schedulePushIfNecessary(); +} + +ssize_t DirectRenderer::AudioRenderer::writeNonBlocking( + const uint8_t *data, size_t size) { + uint32_t numFramesPlayed; + status_t err = mAudioTrack->getPosition(&numFramesPlayed); + if (err != OK) { + return err; + } + + ssize_t numFramesAvailableToWrite = + mAudioTrack->frameCount() - (mNumFramesWritten - numFramesPlayed); + + size_t numBytesAvailableToWrite = + numFramesAvailableToWrite * mAudioTrack->frameSize(); + + if (size > numBytesAvailableToWrite) { + size = numBytesAvailableToWrite; + } + + CHECK_EQ(mAudioTrack->write(data, size), (ssize_t)size); + + size_t numFramesWritten = size / mAudioTrack->frameSize(); + mNumFramesWritten += numFramesWritten; + + return size; +} + +//////////////////////////////////////////////////////////////////////////////// + +DirectRenderer::DirectRenderer( + const sp &bufferProducer) + : mSurfaceTex(bufferProducer), + mVideoRenderPending(false), + mLatencySum(0ll), + mLatencyCount(0), + mNumFramesLate(0), + mNumFrames(0) { +} + +DirectRenderer::~DirectRenderer() { +} + +int64_t DirectRenderer::getAvgLatenessUs() { + if (mLatencyCount == 0) { + return 0ll; + } + + int64_t avgLatencyUs = mLatencySum / mLatencyCount; + + mLatencySum = 0ll; + mLatencyCount = 0; + + if (mNumFrames > 0) { + ALOGI("%d / %d frames late", mNumFramesLate, mNumFrames); + mNumFramesLate = 0; + mNumFrames = 0; + } + + return avgLatencyUs; +} + +void DirectRenderer::onMessageReceived(const sp &msg) { + switch (msg->what()) { + case kWhatDecoderNotify: + { + onDecoderNotify(msg); + break; + } + + case kWhatRenderVideo: + { + onRenderVideo(); + break; + } + + default: + TRESPASS(); + } +} + +void DirectRenderer::setFormat(size_t trackIndex, const sp &format) { + CHECK_LT(trackIndex, 2u); + + CHECK(mDecoderContext[trackIndex] == NULL); + + sp notify = new AMessage(kWhatDecoderNotify, id()); + notify->setSize("trackIndex", trackIndex); + + mDecoderContext[trackIndex] = new DecoderContext(notify); + looper()->registerHandler(mDecoderContext[trackIndex]); + + CHECK_EQ((status_t)OK, + mDecoderContext[trackIndex]->init( + format, trackIndex == 0 ? mSurfaceTex : NULL)); + + if (trackIndex == 1) { + // Audio + mAudioRenderer = new AudioRenderer(mDecoderContext[1]); + looper()->registerHandler(mAudioRenderer); + } +} + +void DirectRenderer::queueAccessUnit( + size_t trackIndex, const sp &accessUnit) { + CHECK_LT(trackIndex, 2u); + + if (mDecoderContext[trackIndex] == NULL) { + CHECK_EQ(trackIndex, 0u); + + sp format = new AMessage; + format->setString("mime", "video/avc"); + format->setInt32("width", 640); + format->setInt32("height", 360); + + setFormat(trackIndex, format); + } + + mDecoderContext[trackIndex]->queueInputBuffer(accessUnit); +} + +void DirectRenderer::onDecoderNotify(const sp &msg) { + size_t trackIndex; + CHECK(msg->findSize("trackIndex", &trackIndex)); + + int32_t what; + CHECK(msg->findInt32("what", &what)); + + switch (what) { + case DecoderContext::kWhatOutputBufferReady: + { + size_t index; + CHECK(msg->findSize("index", &index)); + + int64_t timeUs; + CHECK(msg->findInt64("timeUs", &timeUs)); + + sp buffer; + CHECK(msg->findBuffer("buffer", &buffer)); + + queueOutputBuffer(trackIndex, index, timeUs, buffer); + break; + } + + default: + TRESPASS(); + } +} + +void DirectRenderer::queueOutputBuffer( + size_t trackIndex, + size_t index, int64_t timeUs, const sp &buffer) { + if (trackIndex == 1) { + // Audio + mAudioRenderer->queueInputBuffer(index, timeUs, buffer); + return; + } + + OutputInfo info; + info.mIndex = index; + info.mTimeUs = timeUs; + info.mBuffer = buffer; + mVideoOutputBuffers.push_back(info); + + scheduleVideoRenderIfNecessary(); +} + +void DirectRenderer::scheduleVideoRenderIfNecessary() { + if (mVideoRenderPending || mVideoOutputBuffers.empty()) { + return; + } + + mVideoRenderPending = true; + + int64_t timeUs = (*mVideoOutputBuffers.begin()).mTimeUs; int64_t nowUs = ALooper::GetNowUs(); int64_t delayUs = timeUs - nowUs; - (new AMessage(kWhatRender, id()))->post(delayUs); + (new AMessage(kWhatRenderVideo, id()))->post(delayUs); } -void DirectRenderer::onRender() { - mRenderPending = false; +void DirectRenderer::onRenderVideo() { + mVideoRenderPending = false; int64_t nowUs = ALooper::GetNowUs(); - while (!mOutputBuffers.empty()) { - const OutputInfo &info = *mOutputBuffers.begin(); + while (!mVideoOutputBuffers.empty()) { + const OutputInfo &info = *mVideoOutputBuffers.begin(); if (info.mTimeUs > nowUs) { break; @@ -301,25 +635,14 @@ void DirectRenderer::onRender() { mLatencySum += nowUs - info.mTimeUs; ++mLatencyCount; - status_t err = mVideoDecoder->renderOutputBufferAndRelease(info.mIndex); + status_t err = + mDecoderContext[0]->renderOutputBufferAndRelease(info.mIndex); CHECK_EQ(err, (status_t)OK); - mOutputBuffers.erase(mOutputBuffers.begin()); + mVideoOutputBuffers.erase(mVideoOutputBuffers.begin()); } - scheduleRenderIfNecessary(); -} - -void DirectRenderer::scheduleVideoDecoderNotification() { - if (mVideoDecoderNotificationPending) { - return; - } - - sp notify = - new AMessage(kWhatVideoDecoderNotify, id()); - - mVideoDecoder->requestActivityNotification(notify); - mVideoDecoderNotificationPending = true; + scheduleVideoRenderIfNecessary(); } } // namespace android diff --git a/media/libstagefright/wifi-display/sink/DirectRenderer.h b/media/libstagefright/wifi-display/sink/DirectRenderer.h index 44be8f8..92c176a 100644 --- a/media/libstagefright/wifi-display/sink/DirectRenderer.h +++ b/media/libstagefright/wifi-display/sink/DirectRenderer.h @@ -23,21 +23,17 @@ namespace android { struct ABuffer; +struct AudioTrack; struct IGraphicBufferProducer; struct MediaCodec; -// An experimental renderer that only supports video and decodes video data -// as soon as it arrives using a MediaCodec instance, rendering it without -// delay. Primarily meant to finetune packet loss discovery and minimize -// latency. +// Renders audio and video data queued by calls to "queueAccessUnit". struct DirectRenderer : public AHandler { DirectRenderer(const sp &bufferProducer); void setFormat(size_t trackIndex, const sp &format); void queueAccessUnit(size_t trackIndex, const sp &accessUnit); - void setTimeOffset(int64_t offset); - int64_t getAvgLatenessUs(); protected: @@ -45,30 +41,28 @@ protected: virtual ~DirectRenderer(); private: + struct DecoderContext; + struct AudioRenderer; + enum { - kWhatVideoDecoderNotify, - kWhatRender, + kWhatDecoderNotify, + kWhatRenderVideo, }; struct OutputInfo { size_t mIndex; int64_t mTimeUs; + sp mBuffer; }; sp mSurfaceTex; - sp mVideoDecoderLooper; - sp mVideoDecoder; - Vector > mVideoDecoderInputBuffers; - List mVideoDecoderInputBuffersAvailable; - bool mVideoDecoderNotificationPending; - - List > mVideoAccessUnits; + sp mDecoderContext[2]; + List mVideoOutputBuffers; - List mOutputBuffers; - bool mRenderPending; + bool mVideoRenderPending; - int64_t mTimeOffsetUs; + sp mAudioRenderer; int64_t mLatencySum; size_t mLatencyCount; @@ -76,14 +70,14 @@ private: int32_t mNumFramesLate; int32_t mNumFrames; - void onVideoDecoderNotify(); - void onRender(); + void onDecoderNotify(const sp &msg); - void queueVideoDecoderInputBuffers(); - void scheduleVideoDecoderNotification(); - void scheduleRenderIfNecessary(); + void queueOutputBuffer( + size_t trackIndex, + size_t index, int64_t timeUs, const sp &buffer); - void queueOutputBuffer(size_t index, int64_t timeUs); + void scheduleVideoRenderIfNecessary(); + void onRenderVideo(); DISALLOW_EVIL_CONSTRUCTORS(DirectRenderer); }; diff --git a/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp b/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp index d635c3a..62021c0 100644 --- a/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp +++ b/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp @@ -337,12 +337,17 @@ void WifiDisplaySink::onMediaReceiverNotify(const sp &msg) { ALOGI("Assuming %lld ms of latency.", latencyUs / 1000ll); } + sp accessUnit; + CHECK(msg->findBuffer("accessUnit", &accessUnit)); + + int64_t timeUs; + CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs)); + // We are the timesync _client_, // client time = server time - time offset. - mRenderer->setTimeOffset(-mTimeOffsetUs + mTargetLatencyUs); + timeUs += mTargetLatencyUs - mTimeOffsetUs; - sp accessUnit; - CHECK(msg->findBuffer("accessUnit", &accessUnit)); + accessUnit->meta()->setInt64("timeUs", timeUs); size_t trackIndex; CHECK(msg->findSize("trackIndex", &trackIndex)); -- cgit v1.1 From acd695c42749f8821b0a0cc27739ddf096c6d4e8 Mon Sep 17 00:00:00 2001 From: Igor Murashkin Date: Wed, 13 Mar 2013 17:23:00 -0700 Subject: ProCamera: Fix rare deadlock when client destructs inside the connect call Bug: 8337737 Change-Id: Ia6fca4365fa20fdbfd6a1ec8d047639a002f2aba --- services/camera/libcameraservice/CameraService.cpp | 164 +++++++++++---------- 1 file changed, 87 insertions(+), 77 deletions(-) diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp index 7636143..5a6a3c8 100644 --- a/services/camera/libcameraservice/CameraService.cpp +++ b/services/camera/libcameraservice/CameraService.cpp @@ -277,55 +277,61 @@ sp CameraService::connect( sp client; - Mutex::Autolock lock(mServiceLock); - if (!canConnectUnsafe(cameraId, clientPackageName, - cameraClient->asBinder(), - /*out*/client)) { - return NULL; - } else if (client.get() != NULL) { - return client; - } + { + Mutex::Autolock lock(mServiceLock); + if (!canConnectUnsafe(cameraId, clientPackageName, + cameraClient->asBinder(), + /*out*/client)) { + return NULL; + } else if (client.get() != NULL) { + return client; + } - int facing = -1; - int deviceVersion = getDeviceVersion(cameraId, &facing); + int facing = -1; + int deviceVersion = getDeviceVersion(cameraId, &facing); - // If there are other non-exclusive users of the camera, - // this will tear them down before we can reuse the camera - if (isValidCameraId(cameraId)) { - updateStatus(ICameraServiceListener::STATUS_NOT_AVAILABLE, cameraId); - } + // If there are other non-exclusive users of the camera, + // this will tear them down before we can reuse the camera + if (isValidCameraId(cameraId)) { + updateStatus(ICameraServiceListener::STATUS_NOT_AVAILABLE, + cameraId); + } - switch(deviceVersion) { - case CAMERA_DEVICE_API_VERSION_1_0: - client = new CameraClient(this, cameraClient, - clientPackageName, cameraId, - facing, callingPid, clientUid, getpid()); - break; - case CAMERA_DEVICE_API_VERSION_2_0: - case CAMERA_DEVICE_API_VERSION_2_1: - case CAMERA_DEVICE_API_VERSION_3_0: - client = new Camera2Client(this, cameraClient, - clientPackageName, cameraId, - facing, callingPid, clientUid, getpid(), - deviceVersion); - break; - case -1: - ALOGE("Invalid camera id %d", cameraId); - return NULL; - default: - ALOGE("Unknown camera device HAL version: %d", deviceVersion); - return NULL; - } + switch(deviceVersion) { + case CAMERA_DEVICE_API_VERSION_1_0: + client = new CameraClient(this, cameraClient, + clientPackageName, cameraId, + facing, callingPid, clientUid, getpid()); + break; + case CAMERA_DEVICE_API_VERSION_2_0: + case CAMERA_DEVICE_API_VERSION_2_1: + case CAMERA_DEVICE_API_VERSION_3_0: + client = new Camera2Client(this, cameraClient, + clientPackageName, cameraId, + facing, callingPid, clientUid, getpid(), + deviceVersion); + break; + case -1: + ALOGE("Invalid camera id %d", cameraId); + return NULL; + default: + ALOGE("Unknown camera device HAL version: %d", deviceVersion); + return NULL; + } - if (!connectFinishUnsafe(client, client->asBinder())) { - // this is probably not recoverable.. but maybe the client can try again - updateStatus(ICameraServiceListener::STATUS_AVAILABLE, cameraId); + if (!connectFinishUnsafe(client, client->asBinder())) { + // this is probably not recoverable.. maybe the client can try again + updateStatus(ICameraServiceListener::STATUS_AVAILABLE, cameraId); - return NULL; - } + return NULL; + } - mClient[cameraId] = client; - LOG1("CameraService::connect X (id %d, this pid is %d)", cameraId, getpid()); + mClient[cameraId] = client; + LOG1("CameraService::connect X (id %d, this pid is %d)", cameraId, + getpid()); + } + // important: release the mutex here so the client can call back + // into the service from its destructor (can be at the end of the call) return client; } @@ -357,47 +363,51 @@ sp CameraService::connect( return NULL; } - Mutex::Autolock lock(mServiceLock); + sp client; { - sp client; - if (!canConnectUnsafe(cameraId, clientPackageName, - cameraCb->asBinder(), - /*out*/client)) { - return NULL; + Mutex::Autolock lock(mServiceLock); + { + sp client; + if (!canConnectUnsafe(cameraId, clientPackageName, + cameraCb->asBinder(), + /*out*/client)) { + return NULL; + } } - } - sp client; + int facing = -1; + int deviceVersion = getDeviceVersion(cameraId, &facing); - int facing = -1; - int deviceVersion = getDeviceVersion(cameraId, &facing); - - switch(deviceVersion) { - case CAMERA_DEVICE_API_VERSION_1_0: - ALOGE("Camera id %d uses HALv1, doesn't support ProCamera", cameraId); - return NULL; - break; - case CAMERA_DEVICE_API_VERSION_2_0: - case CAMERA_DEVICE_API_VERSION_2_1: - client = new ProCamera2Client(this, cameraCb, String16(), - cameraId, facing, callingPid, USE_CALLING_UID, getpid()); - break; - case -1: - ALOGE("Invalid camera id %d", cameraId); - return NULL; - default: - ALOGE("Unknown camera device HAL version: %d", deviceVersion); - return NULL; - } + switch(deviceVersion) { + case CAMERA_DEVICE_API_VERSION_1_0: + ALOGE("Camera id %d uses HALv1, doesn't support ProCamera", + cameraId); + return NULL; + break; + case CAMERA_DEVICE_API_VERSION_2_0: + case CAMERA_DEVICE_API_VERSION_2_1: + client = new ProCamera2Client(this, cameraCb, String16(), + cameraId, facing, callingPid, USE_CALLING_UID, getpid()); + break; + case -1: + ALOGE("Invalid camera id %d", cameraId); + return NULL; + default: + ALOGE("Unknown camera device HAL version: %d", deviceVersion); + return NULL; + } - if (!connectFinishUnsafe(client, client->asBinder())) { - return NULL; - } + if (!connectFinishUnsafe(client, client->asBinder())) { + return NULL; + } - mProClientList[cameraId].push(client); + mProClientList[cameraId].push(client); - LOG1("CameraService::connectPro X (id %d, this pid is %d)", cameraId, - getpid()); + LOG1("CameraService::connectPro X (id %d, this pid is %d)", cameraId, + getpid()); + } + // important: release the mutex here so the client can call back + // into the service from its destructor (can be at the end of the call) return client; } -- cgit v1.1 From acc47642e0f5d962f6289e6ba687fabf68f8312b Mon Sep 17 00:00:00 2001 From: James Dong Date: Tue, 12 Mar 2013 10:40:20 -0700 Subject: Make limitations of MPEG4Writer explicit o No more than 2 tracks will be supported o No more than one video and/or one audio tracks will be supported o Only take video and/or audio track (for instance, no text tracks) o If there is no track before start() is called, bail out. At the same time, make sure the errors from addSource() report to addTrack(), not to start(). Bug: 7991013 Change-Id: I1ca35aaeb75b5448d75ed2c6c10dd12ecea720ab --- include/media/stagefright/MPEG4Writer.h | 6 +++++ media/libstagefright/MPEG4Writer.cpp | 41 +++++++++++++++++++++++++++++++++ media/libstagefright/MediaMuxer.cpp | 14 ++++++----- 3 files changed, 55 insertions(+), 6 deletions(-) diff --git a/include/media/stagefright/MPEG4Writer.h b/include/media/stagefright/MPEG4Writer.h index 3596b38..88df6b0 100644 --- a/include/media/stagefright/MPEG4Writer.h +++ b/include/media/stagefright/MPEG4Writer.h @@ -35,7 +35,13 @@ public: MPEG4Writer(const char *filename); MPEG4Writer(int fd); + // Limitations + // 1. No more than 2 tracks can be added + // 2. Only video or audio source can be added + // 3. No more than one video and/or one audio source can be added. virtual status_t addSource(const sp &source); + + // Returns INVALID_OPERATION if there is no source or track. virtual status_t start(MetaData *param = NULL); virtual status_t stop() { return reset(); } virtual status_t pause(); diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp index 056b47a..316f669 100644 --- a/media/libstagefright/MPEG4Writer.cpp +++ b/media/libstagefright/MPEG4Writer.cpp @@ -428,6 +428,42 @@ status_t MPEG4Writer::addSource(const sp &source) { ALOGE("Attempt to add source AFTER recording is started"); return UNKNOWN_ERROR; } + + // At most 2 tracks can be supported. + if (mTracks.size() >= 2) { + ALOGE("Too many tracks (%d) to add", mTracks.size()); + return ERROR_UNSUPPORTED; + } + + CHECK(source.get() != NULL); + + // A track of type other than video or audio is not supported. + const char *mime; + source->getFormat()->findCString(kKeyMIMEType, &mime); + bool isAudio = !strncasecmp(mime, "audio/", 6); + bool isVideo = !strncasecmp(mime, "video/", 6); + if (!isAudio && !isVideo) { + ALOGE("Track (%s) other than video or audio is not supported", + mime); + return ERROR_UNSUPPORTED; + } + + // At this point, we know the track to be added is either + // video or audio. Thus, we only need to check whether it + // is an audio track or not (if it is not, then it must be + // a video track). + + // No more than one video or one audio track is supported. + for (List::iterator it = mTracks.begin(); + it != mTracks.end(); ++it) { + if ((*it)->isAudio() == isAudio) { + ALOGE("%s track already exists", isAudio? "Audio": "Video"); + return ERROR_UNSUPPORTED; + } + } + + // This is the first track of either audio or video. + // Go ahead to add the track. Track *track = new Track(this, source, 1 + mTracks.size()); mTracks.push_back(track); @@ -435,6 +471,11 @@ status_t MPEG4Writer::addSource(const sp &source) { } status_t MPEG4Writer::startTracks(MetaData *params) { + if (mTracks.empty()) { + ALOGE("No source added"); + return INVALID_OPERATION; + } + for (List::iterator it = mTracks.begin(); it != mTracks.end(); ++it) { status_t err = (*it)->start(params); diff --git a/media/libstagefright/MediaMuxer.cpp b/media/libstagefright/MediaMuxer.cpp index aefc270..21841b3 100644 --- a/media/libstagefright/MediaMuxer.cpp +++ b/media/libstagefright/MediaMuxer.cpp @@ -76,17 +76,17 @@ ssize_t MediaMuxer::addTrack(const sp &format) { convertMessageToMetaData(format, meta); sp newTrack = new MediaAdapter(meta); - return mTrackList.add(newTrack); + status_t result = mWriter->addSource(newTrack); + if (result == OK) { + return mTrackList.add(newTrack); + } + return -1; } status_t MediaMuxer::start() { Mutex::Autolock autoLock(mMuxerLock); - if (mState == INITED) { mState = STARTED; - for (size_t i = 0 ; i < mTrackList.size(); i++) { - mWriter->addSource(mTrackList[i]); - } return mWriter->start(); } else { ALOGE("start() is called in invalid state %d", mState); @@ -100,7 +100,9 @@ status_t MediaMuxer::stop() { if (mState == STARTED) { mState = STOPPED; for (size_t i = 0; i < mTrackList.size(); i++) { - mTrackList[i]->stop(); + if (mTrackList[i]->stop() != OK) { + return INVALID_OPERATION; + } } return mWriter->stop(); } else { -- cgit v1.1 From 595ee7ba5f988ff34527226d0142a109ca3b049e Mon Sep 17 00:00:00 2001 From: Marco Nelissen Date: Thu, 14 Mar 2013 14:50:47 -0700 Subject: Parse SBR extension sample rate Change-Id: Ib6f6994228a279ee10b389515fba04516c7c42ba --- media/libstagefright/MPEG4Extractor.cpp | 31 ++++++++++++++++++++++--------- 1 file changed, 22 insertions(+), 9 deletions(-) diff --git a/media/libstagefright/MPEG4Extractor.cpp b/media/libstagefright/MPEG4Extractor.cpp index b2e60be..56fad60 100644 --- a/media/libstagefright/MPEG4Extractor.cpp +++ b/media/libstagefright/MPEG4Extractor.cpp @@ -2067,17 +2067,30 @@ status_t MPEG4Extractor::updateAudioTrackInfoFromESDS_MPEG4Audio( sampleRate = br.getBits(24); numChannels = br.getBits(4); } else { - static uint32_t kSamplingRate[] = { - 96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, - 16000, 12000, 11025, 8000, 7350 - }; - - if (freqIndex == 13 || freqIndex == 14) { - return ERROR_MALFORMED; + numChannels = br.getBits(4); + if (objectType == 5) { + // SBR specific config per 14496-3 table 1.13 + freqIndex = br.getBits(4); + if (freqIndex == 15) { + if (csd_size < 8) { + return ERROR_MALFORMED; + } + sampleRate = br.getBits(24); + } } - sampleRate = kSamplingRate[freqIndex]; - numChannels = br.getBits(4); + if (sampleRate == 0) { + static uint32_t kSamplingRate[] = { + 96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, + 16000, 12000, 11025, 8000, 7350 + }; + + if (freqIndex == 13 || freqIndex == 14) { + return ERROR_MALFORMED; + } + + sampleRate = kSamplingRate[freqIndex]; + } } if (numChannels == 0) { -- cgit v1.1 From e0fb528f8e3bbab04620c8534177168b358e837b Mon Sep 17 00:00:00 2001 From: ztenghui Date: Tue, 12 Mar 2013 15:43:56 -0700 Subject: Add the presentation rotation support bug:7991013 Change-Id: I10cb034b432876c724baa4974efcb3d67b8a99b6 --- cmds/stagefright/muxer.cpp | 15 ++++++++--- include/media/stagefright/MediaMuxer.h | 14 ++++++++-- media/libstagefright/MediaMuxer.cpp | 48 ++++++++++++++++++++++++---------- 3 files changed, 58 insertions(+), 19 deletions(-) diff --git a/cmds/stagefright/muxer.cpp b/cmds/stagefright/muxer.cpp index fac2acc..cca33e0 100644 --- a/cmds/stagefright/muxer.cpp +++ b/cmds/stagefright/muxer.cpp @@ -55,7 +55,8 @@ static int muxing( const char *outputFileName, bool enableTrim, int trimStartTimeMs, - int trimEndTimeMs) { + int trimEndTimeMs, + int rotationDegrees) { sp extractor = new NuMediaExtractor; if (extractor->setDataSource(path) != OK) { fprintf(stderr, "unable to instantiate extractor. %s\n", path); @@ -141,6 +142,7 @@ static int muxing( size_t trackIndex = -1; sp newBuffer = new ABuffer(bufferSize); + muxer->setOrientationHint(rotationDegrees); muxer->start(); while (!sawInputEOS) { @@ -210,12 +212,13 @@ int main(int argc, char **argv) { char *outputFileName = NULL; int trimStartTimeMs = -1; int trimEndTimeMs = -1; + int rotationDegrees = 0; // When trimStartTimeMs and trimEndTimeMs seems valid, we turn this switch // to true. bool enableTrim = false; int res; - while ((res = getopt(argc, argv, "h?avo:s:e:")) >= 0) { + while ((res = getopt(argc, argv, "h?avo:s:e:r:")) >= 0) { switch (res) { case 'a': { @@ -247,6 +250,12 @@ int main(int argc, char **argv) { break; } + case 'r': + { + rotationDegrees = atoi(optarg); + break; + } + case '?': case 'h': default: @@ -288,7 +297,7 @@ int main(int argc, char **argv) { looper->start(); int result = muxing(looper, argv[0], useAudio, useVideo, outputFileName, - enableTrim, trimStartTimeMs, trimEndTimeMs); + enableTrim, trimStartTimeMs, trimEndTimeMs, rotationDegrees); looper->stop(); diff --git a/include/media/stagefright/MediaMuxer.h b/include/media/stagefright/MediaMuxer.h index 167d0d9..fad319f 100644 --- a/include/media/stagefright/MediaMuxer.h +++ b/include/media/stagefright/MediaMuxer.h @@ -77,11 +77,20 @@ public: status_t start(); /** + * Set the orientation hint. + * @param degrees The rotation degrees. It has to be either 0, + * 90, 180 or 270. + * @return OK if no error. + */ + status_t setOrientationHint(int degrees); + + /** * Stop muxing. * This method is a blocking call. Depending on how * much data is bufferred internally, the time needed for stopping * the muxer may be time consuming. UI thread is * not recommended for launching this call. + * @return OK if no error. */ status_t stop(); @@ -104,12 +113,13 @@ public: private: sp mWriter; Vector< sp > mTrackList; // Each track has its MediaAdapter. + sp mFileMeta; // Metadata for the whole file. Mutex mMuxerLock; enum State { - UNINITED, - INITED, + UNINITIALIZED, + INITIALIZED, STARTED, STOPPED }; diff --git a/media/libstagefright/MediaMuxer.cpp b/media/libstagefright/MediaMuxer.cpp index 21841b3..b948fe2 100644 --- a/media/libstagefright/MediaMuxer.cpp +++ b/media/libstagefright/MediaMuxer.cpp @@ -36,18 +36,21 @@ namespace android { MediaMuxer::MediaMuxer(const char *path, OutputFormat format) - : mState(UNINITED) { + : mState(UNINITIALIZED) { if (format == OUTPUT_FORMAT_MPEG_4) { mWriter = new MPEG4Writer(path); - mState = INITED; + mFileMeta = new MetaData; + mState = INITIALIZED; } + } MediaMuxer::MediaMuxer(int fd, OutputFormat format) - : mState(UNINITED) { + : mState(UNINITIALIZED) { if (format == OUTPUT_FORMAT_MPEG_4) { mWriter = new MPEG4Writer(fd); - mState = INITED; + mFileMeta = new MetaData; + mState = INITIALIZED; } } @@ -55,6 +58,7 @@ MediaMuxer::~MediaMuxer() { Mutex::Autolock autoLock(mMuxerLock); // Clean up all the internal resources. + mFileMeta.clear(); mWriter.clear(); mTrackList.clear(); } @@ -67,15 +71,15 @@ ssize_t MediaMuxer::addTrack(const sp &format) { return -EINVAL; } - if (mState != INITED) { + if (mState != INITIALIZED) { ALOGE("addTrack() must be called after constructor and before start()."); return INVALID_OPERATION; } - sp meta = new MetaData; - convertMessageToMetaData(format, meta); + sp trackMeta = new MetaData; + convertMessageToMetaData(format, trackMeta); - sp newTrack = new MediaAdapter(meta); + sp newTrack = new MediaAdapter(trackMeta); status_t result = mWriter->addSource(newTrack); if (result == OK) { return mTrackList.add(newTrack); @@ -83,11 +87,27 @@ ssize_t MediaMuxer::addTrack(const sp &format) { return -1; } +status_t MediaMuxer::setOrientationHint(int degrees) { + Mutex::Autolock autoLock(mMuxerLock); + if (mState != INITIALIZED) { + ALOGE("setOrientationHint() must be called before start()."); + return INVALID_OPERATION; + } + + if (degrees != 0 && degrees != 90 && degrees != 180 && degrees != 270) { + ALOGE("setOrientationHint() get invalid degrees"); + return -EINVAL; + } + + mFileMeta->setInt32(kKeyRotation, degrees); + return OK; +} + status_t MediaMuxer::start() { Mutex::Autolock autoLock(mMuxerLock); - if (mState == INITED) { + if (mState == INITIALIZED) { mState = STARTED; - return mWriter->start(); + return mWriter->start(mFileMeta.get()); } else { ALOGE("start() is called in invalid state %d", mState); return INVALID_OPERATION; @@ -135,13 +155,13 @@ status_t MediaMuxer::writeSampleData(const sp &buffer, size_t trackInde mediaBuffer->add_ref(); // Released in MediaAdapter::signalBufferReturned(). mediaBuffer->set_range(buffer->offset(), buffer->size()); - sp metaData = mediaBuffer->meta_data(); - metaData->setInt64(kKeyTime, timeUs); + sp sampleMetaData = mediaBuffer->meta_data(); + sampleMetaData->setInt64(kKeyTime, timeUs); // Just set the kKeyDecodingTime as the presentation time for now. - metaData->setInt64(kKeyDecodingTime, timeUs); + sampleMetaData->setInt64(kKeyDecodingTime, timeUs); if (flags & SAMPLE_FLAG_SYNC) { - metaData->setInt32(kKeyIsSyncFrame, true); + sampleMetaData->setInt32(kKeyIsSyncFrame, true); } sp currentTrack = mTrackList[trackIndex]; -- cgit v1.1 From c13c8bc609be74c89a23a4e3b396b807e59047f1 Mon Sep 17 00:00:00 2001 From: Alex Ray Date: Sun, 17 Mar 2013 03:23:18 -0700 Subject: Camera: fix waitUntilDrained log message Change-Id: Iab6197e0674752de7933dd812bff430a5c487c4b --- services/camera/libcameraservice/Camera2Device.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/camera/libcameraservice/Camera2Device.cpp b/services/camera/libcameraservice/Camera2Device.cpp index 37ba5ae..946cdba 100644 --- a/services/camera/libcameraservice/Camera2Device.cpp +++ b/services/camera/libcameraservice/Camera2Device.cpp @@ -426,7 +426,7 @@ status_t Camera2Device::waitUntilDrained() { totalTime += kSleepTime; if (totalTime > kMaxSleepTime) { ALOGE("%s: Waited %d us, %d requests still in flight", __FUNCTION__, - mHal2Device->ops->get_in_progress_count(mHal2Device), totalTime); + totalTime, mHal2Device->ops->get_in_progress_count(mHal2Device)); return TIMED_OUT; } } -- cgit v1.1 From 0b530f1050150bb751ae642d5a9dce34141d9475 Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Mon, 18 Mar 2013 11:09:22 -0700 Subject: Allow for streaming of media files (without recompression) Change-Id: I1de356cc37506ba986822d12a1a59e7b64069e02 --- media/libstagefright/wifi-display/MediaSender.cpp | 31 ++++ .../wifi-display/source/PlaybackSession.cpp | 196 ++++++++++++++++++++- .../wifi-display/source/PlaybackSession.h | 18 +- .../wifi-display/source/TSPacketizer.cpp | 2 +- .../wifi-display/source/WifiDisplaySource.cpp | 36 +++- .../wifi-display/source/WifiDisplaySource.h | 8 +- media/libstagefright/wifi-display/wfd.cpp | 62 +++++-- 7 files changed, 318 insertions(+), 35 deletions(-) diff --git a/media/libstagefright/wifi-display/MediaSender.cpp b/media/libstagefright/wifi-display/MediaSender.cpp index e1e957a..a41f81b 100644 --- a/media/libstagefright/wifi-display/MediaSender.cpp +++ b/media/libstagefright/wifi-display/MediaSender.cpp @@ -256,6 +256,37 @@ status_t MediaSender::queueAccessUnit( tsPackets, 33 /* packetType */, RTPSender::PACKETIZATION_TRANSPORT_STREAM); + +#if 0 + { + int64_t nowUs = ALooper::GetNowUs(); + + int64_t timeUs; + CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs)); + + int64_t delayMs = (nowUs - timeUs) / 1000ll; + + static const int64_t kMinDelayMs = 0; + static const int64_t kMaxDelayMs = 300; + + const char *kPattern = "########################################"; + size_t kPatternSize = strlen(kPattern); + + int n = (kPatternSize * (delayMs - kMinDelayMs)) + / (kMaxDelayMs - kMinDelayMs); + + if (n < 0) { + n = 0; + } else if ((size_t)n > kPatternSize) { + n = kPatternSize; + } + + ALOGI("[%lld]: (%4lld ms) %s\n", + timeUs / 1000, + delayMs, + kPattern + kPatternSize - n); + } +#endif } if (err != OK) { diff --git a/media/libstagefright/wifi-display/source/PlaybackSession.cpp b/media/libstagefright/wifi-display/source/PlaybackSession.cpp index 94cb2a4..a3b6542 100644 --- a/media/libstagefright/wifi-display/source/PlaybackSession.cpp +++ b/media/libstagefright/wifi-display/source/PlaybackSession.cpp @@ -39,6 +39,7 @@ #include #include #include +#include #include #include @@ -57,6 +58,8 @@ struct WifiDisplaySource::PlaybackSession::Track : public AHandler { const sp &mediaPuller, const sp &converter); + Track(const sp ¬ify, const sp &format); + void setRepeaterSource(const sp &source); sp getFormat(); @@ -104,6 +107,7 @@ private: sp mCodecLooper; sp mMediaPuller; sp mConverter; + sp mFormat; bool mStarted; ssize_t mMediaSenderTrackIndex; bool mIsAudio; @@ -133,6 +137,15 @@ WifiDisplaySource::PlaybackSession::Track::Track( mLastOutputBufferQueuedTimeUs(-1ll) { } +WifiDisplaySource::PlaybackSession::Track::Track( + const sp ¬ify, const sp &format) + : mNotify(notify), + mFormat(format), + mStarted(false), + mIsAudio(IsAudioFormat(format)), + mLastOutputBufferQueuedTimeUs(-1ll) { +} + WifiDisplaySource::PlaybackSession::Track::~Track() { CHECK(!mStarted); } @@ -147,7 +160,7 @@ bool WifiDisplaySource::PlaybackSession::Track::IsAudioFormat( } sp WifiDisplaySource::PlaybackSession::Track::getFormat() { - return mConverter->getOutputFormat(); + return mFormat != NULL ? mFormat : mConverter->getOutputFormat(); } bool WifiDisplaySource::PlaybackSession::Track::isAudio() const { @@ -189,7 +202,9 @@ status_t WifiDisplaySource::PlaybackSession::Track::start() { void WifiDisplaySource::PlaybackSession::Track::stopAsync() { ALOGV("Track::stopAsync isAudio=%d", mIsAudio); - mConverter->shutdownAsync(); + if (mConverter != NULL) { + mConverter->shutdownAsync(); + } sp msg = new AMessage(kWhatMediaPullerStopped, id()); @@ -201,6 +216,7 @@ void WifiDisplaySource::PlaybackSession::Track::stopAsync() { mMediaPuller->stopAsync(msg); } else { + mStarted = false; msg->post(); } } @@ -324,7 +340,8 @@ WifiDisplaySource::PlaybackSession::PlaybackSession( const sp &netSession, const sp ¬ify, const in_addr &interfaceAddr, - const sp &hdcp) + const sp &hdcp, + const char *path) : mNetSession(netSession), mNotify(notify), mInterfaceAddr(interfaceAddr), @@ -334,7 +351,14 @@ WifiDisplaySource::PlaybackSession::PlaybackSession( mPaused(false), mLastLifesignUs(), mVideoTrackIndex(-1), - mPrevTimeUs(-1ll) { + mPrevTimeUs(-1ll), + mPullExtractorPending(false), + mPullExtractorGeneration(0), + mFirstSampleTimeRealUs(-1ll), + mFirstSampleTimeUs(-1ll) { + if (path != NULL) { + mMediaPath.setTo(path); + } } status_t WifiDisplaySource::PlaybackSession::init( @@ -405,10 +429,6 @@ status_t WifiDisplaySource::PlaybackSession::play() { return OK; } -status_t WifiDisplaySource::PlaybackSession::finishPlay() { - return OK; -} - status_t WifiDisplaySource::PlaybackSession::onMediaSenderInitialized() { for (size_t i = 0; i < mTracks.size(); ++i) { CHECK_EQ((status_t)OK, mTracks.editValueAt(i)->start()); @@ -523,7 +543,10 @@ void WifiDisplaySource::PlaybackSession::onMessageReceived( const sp &videoTrack = mTracks.valueFor(mVideoTrackIndex); - videoTrack->converter()->dropAFrame(); + sp converter = videoTrack->converter(); + if (converter != NULL) { + converter->dropAFrame(); + } } } else { TRESPASS(); @@ -564,6 +587,12 @@ void WifiDisplaySource::PlaybackSession::onMessageReceived( case kWhatPause: { + if (mExtractor != NULL) { + ++mPullExtractorGeneration; + mFirstSampleTimeRealUs = -1ll; + mFirstSampleTimeUs = -1ll; + } + if (mPaused) { break; } @@ -578,6 +607,10 @@ void WifiDisplaySource::PlaybackSession::onMessageReceived( case kWhatResume: { + if (mExtractor != NULL) { + schedulePullExtractor(); + } + if (!mPaused) { break; } @@ -590,11 +623,152 @@ void WifiDisplaySource::PlaybackSession::onMessageReceived( break; } + case kWhatPullExtractorSample: + { + int32_t generation; + CHECK(msg->findInt32("generation", &generation)); + + if (generation != mPullExtractorGeneration) { + break; + } + + mPullExtractorPending = false; + + onPullExtractor(); + break; + } + default: TRESPASS(); } } +status_t WifiDisplaySource::PlaybackSession::setupMediaPacketizer( + bool enableAudio, bool enableVideo) { + DataSource::RegisterDefaultSniffers(); + + mExtractor = new NuMediaExtractor; + + status_t err = mExtractor->setDataSource(mMediaPath.c_str()); + + if (err != OK) { + return err; + } + + size_t n = mExtractor->countTracks(); + bool haveAudio = false; + bool haveVideo = false; + for (size_t i = 0; i < n; ++i) { + sp format; + err = mExtractor->getTrackFormat(i, &format); + + if (err != OK) { + continue; + } + + AString mime; + CHECK(format->findString("mime", &mime)); + + bool isAudio = !strncasecmp(mime.c_str(), "audio/", 6); + bool isVideo = !strncasecmp(mime.c_str(), "video/", 6); + + if (isAudio && enableAudio && !haveAudio) { + haveAudio = true; + } else if (isVideo && enableVideo && !haveVideo) { + haveVideo = true; + } else { + continue; + } + + err = mExtractor->selectTrack(i); + + size_t trackIndex = mTracks.size(); + + sp notify = new AMessage(kWhatTrackNotify, id()); + notify->setSize("trackIndex", trackIndex); + + sp track = new Track(notify, format); + looper()->registerHandler(track); + + mTracks.add(trackIndex, track); + + mExtractorTrackToInternalTrack.add(i, trackIndex); + + if (isVideo) { + mVideoTrackIndex = trackIndex; + } + + uint32_t flags = MediaSender::FLAG_MANUALLY_PREPEND_SPS_PPS; + + ssize_t mediaSenderTrackIndex = + mMediaSender->addTrack(format, flags); + CHECK_GE(mediaSenderTrackIndex, 0); + + track->setMediaSenderTrackIndex(mediaSenderTrackIndex); + + if ((haveAudio || !enableAudio) && (haveVideo || !enableVideo)) { + break; + } + } + + return OK; +} + +void WifiDisplaySource::PlaybackSession::schedulePullExtractor() { + if (mPullExtractorPending) { + return; + } + + int64_t sampleTimeUs; + status_t err = mExtractor->getSampleTime(&sampleTimeUs); + + int64_t nowUs = ALooper::GetNowUs(); + + if (mFirstSampleTimeRealUs < 0ll) { + mFirstSampleTimeRealUs = nowUs; + mFirstSampleTimeUs = sampleTimeUs; + } + + int64_t whenUs = sampleTimeUs - mFirstSampleTimeUs + mFirstSampleTimeRealUs; + + sp msg = new AMessage(kWhatPullExtractorSample, id()); + msg->setInt32("generation", mPullExtractorGeneration); + msg->post(whenUs - nowUs); + + mPullExtractorPending = true; +} + +void WifiDisplaySource::PlaybackSession::onPullExtractor() { + sp accessUnit = new ABuffer(1024 * 1024); + status_t err = mExtractor->readSampleData(accessUnit); + if (err != OK) { + // EOS. + return; + } + + int64_t timeUs; + CHECK_EQ((status_t)OK, mExtractor->getSampleTime(&timeUs)); + + accessUnit->meta()->setInt64( + "timeUs", mFirstSampleTimeRealUs + timeUs - mFirstSampleTimeUs); + + size_t trackIndex; + CHECK_EQ((status_t)OK, mExtractor->getSampleTrackIndex(&trackIndex)); + + sp msg = new AMessage(kWhatConverterNotify, id()); + + msg->setSize( + "trackIndex", mExtractorTrackToInternalTrack.valueFor(trackIndex)); + + msg->setInt32("what", Converter::kWhatAccessUnit); + msg->setBuffer("accessUnit", accessUnit); + msg->post(); + + mExtractor->advance(); + + schedulePullExtractor(); +} + status_t WifiDisplaySource::PlaybackSession::setupPacketizer( bool enableAudio, bool usePCMAudio, @@ -603,6 +777,10 @@ status_t WifiDisplaySource::PlaybackSession::setupPacketizer( size_t videoResolutionIndex) { CHECK(enableAudio || enableVideo); + if (!mMediaPath.empty()) { + return setupMediaPacketizer(enableAudio, enableVideo); + } + if (enableVideo) { status_t err = addVideoSource( videoResolutionType, videoResolutionIndex); diff --git a/media/libstagefright/wifi-display/source/PlaybackSession.h b/media/libstagefright/wifi-display/source/PlaybackSession.h index cd6da85..da207e2 100644 --- a/media/libstagefright/wifi-display/source/PlaybackSession.h +++ b/media/libstagefright/wifi-display/source/PlaybackSession.h @@ -31,6 +31,7 @@ struct IGraphicBufferProducer; struct MediaPuller; struct MediaSource; struct MediaSender; +struct NuMediaExtractor; // Encapsulates the state of an RTP/RTCP session in the context of wifi // display. @@ -39,7 +40,8 @@ struct WifiDisplaySource::PlaybackSession : public AHandler { const sp &netSession, const sp ¬ify, const struct in_addr &interfaceAddr, - const sp &hdcp); + const sp &hdcp, + const char *path = NULL); status_t init( const char *clientIP, int32_t clientRtp, int32_t clientRtcp, @@ -87,12 +89,14 @@ private: kWhatPause, kWhatResume, kWhatMediaSenderNotify, + kWhatPullExtractorSample, }; sp mNetSession; sp mNotify; in_addr mInterfaceAddr; sp mHDCP; + AString mMediaPath; sp mMediaSender; int32_t mLocalRTPPort; @@ -109,6 +113,15 @@ private: int64_t mPrevTimeUs; + sp mExtractor; + KeyedVector mExtractorTrackToInternalTrack; + bool mPullExtractorPending; + int32_t mPullExtractorGeneration; + int64_t mFirstSampleTimeRealUs; + int64_t mFirstSampleTimeUs; + + status_t setupMediaPacketizer(bool enableAudio, bool enableVideo); + status_t setupPacketizer( bool enableAudio, bool usePCMAudio, @@ -133,6 +146,9 @@ private: void notifySessionDead(); + void schedulePullExtractor(); + void onPullExtractor(); + DISALLOW_EVIL_CONSTRUCTORS(PlaybackSession); }; diff --git a/media/libstagefright/wifi-display/source/TSPacketizer.cpp b/media/libstagefright/wifi-display/source/TSPacketizer.cpp index 53b7187..d993764 100644 --- a/media/libstagefright/wifi-display/source/TSPacketizer.cpp +++ b/media/libstagefright/wifi-display/source/TSPacketizer.cpp @@ -261,7 +261,7 @@ void TSPacketizer::Track::finalize() { data[0] = 40; // descriptor_tag data[1] = 4; // descriptor_length - CHECK_EQ(mCSD.size(), 1u); + CHECK_GE(mCSD.size(), 1u); const sp &sps = mCSD.itemAt(0); CHECK(!memcmp("\x00\x00\x00\x01", sps->data(), 4)); CHECK_GE(sps->size(), 7u); diff --git a/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp b/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp index c8798c6..5167cb3 100644 --- a/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp +++ b/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp @@ -44,7 +44,8 @@ namespace android { WifiDisplaySource::WifiDisplaySource( const sp &netSession, - const sp &client) + const sp &client, + const char *path) : mState(INITIALIZED), mNetSession(netSession), mClient(client), @@ -59,7 +60,12 @@ WifiDisplaySource::WifiDisplaySource( mIsHDCP2_0(false), mHDCPPort(0), mHDCPInitializationComplete(false), - mSetupTriggerDeferred(false) { + mSetupTriggerDeferred(false), + mPlaybackSessionEstablished(false) { + if (path != NULL) { + mMediaPath.setTo(path); + } + mSupportedSourceVideoFormats.disableAll(); mSupportedSourceVideoFormats.setNativeResolution( @@ -389,6 +395,8 @@ void WifiDisplaySource::onMessageReceived(const sp &msg) { mClient->onDisplayError( IRemoteDisplayClient::kDisplayErrorUnknown); } else if (what == PlaybackSession::kWhatSessionEstablished) { + mPlaybackSessionEstablished = true; + if (mClient != NULL) { if (!mSinkSupportsVideo) { mClient->onDisplayConnected( @@ -419,6 +427,8 @@ void WifiDisplaySource::onMessageReceived(const sp &msg) { } } + finishPlay(); + if (mState == ABOUT_TO_PLAY) { mState = PLAYING; } @@ -1222,7 +1232,7 @@ status_t WifiDisplaySource::onSetupRequest( sp playbackSession = new PlaybackSession( - mNetSession, notify, mInterfaceAddr, mHDCP); + mNetSession, notify, mInterfaceAddr, mHDCP, mMediaPath.c_str()); looper()->registerHandler(playbackSession); @@ -1332,16 +1342,18 @@ status_t WifiDisplaySource::onPlayRequest( } ALOGI("Received PLAY request."); - - status_t err = playbackSession->play(); - CHECK_EQ(err, (status_t)OK); + if (mPlaybackSessionEstablished) { + finishPlay(); + } else { + ALOGI("deferring PLAY request until session established."); + } AString response = "RTSP/1.0 200 OK\r\n"; AppendCommonResponse(&response, cseq, playbackSessionID); response.append("Range: npt=now-\r\n"); response.append("\r\n"); - err = mNetSession->sendRequest(sessionID, response.c_str()); + status_t err = mNetSession->sendRequest(sessionID, response.c_str()); if (err != OK) { return err; @@ -1352,14 +1364,20 @@ status_t WifiDisplaySource::onPlayRequest( return OK; } - playbackSession->finishPlay(); - CHECK_EQ(mState, AWAITING_CLIENT_PLAY); mState = ABOUT_TO_PLAY; return OK; } +void WifiDisplaySource::finishPlay() { + const sp &playbackSession = + mClientInfo.mPlaybackSession; + + status_t err = playbackSession->play(); + CHECK_EQ(err, (status_t)OK); +} + status_t WifiDisplaySource::onPauseRequest( int32_t sessionID, int32_t cseq, diff --git a/media/libstagefright/wifi-display/source/WifiDisplaySource.h b/media/libstagefright/wifi-display/source/WifiDisplaySource.h index 9e72682..3a1b0f9 100644 --- a/media/libstagefright/wifi-display/source/WifiDisplaySource.h +++ b/media/libstagefright/wifi-display/source/WifiDisplaySource.h @@ -39,7 +39,8 @@ struct WifiDisplaySource : public AHandler { WifiDisplaySource( const sp &netSession, - const sp &client); + const sp &client, + const char *path = NULL); status_t start(const char *iface); status_t stop(); @@ -116,6 +117,7 @@ private: VideoFormats mSupportedSourceVideoFormats; sp mNetSession; sp mClient; + AString mMediaPath; sp mTimeSyncer; struct in_addr mInterfaceAddr; int32_t mSessionID; @@ -161,6 +163,8 @@ private: bool mHDCPInitializationComplete; bool mSetupTriggerDeferred; + bool mPlaybackSessionEstablished; + status_t makeHDCP(); // <<<< HDCP specific section @@ -257,6 +261,8 @@ private: void finishStopAfterDisconnectingClient(); void finishStop2(); + void finishPlay(); + DISALLOW_EVIL_CONSTRUCTORS(WifiDisplaySource); }; diff --git a/media/libstagefright/wifi-display/wfd.cpp b/media/libstagefright/wifi-display/wfd.cpp index 0b18484..3a7a6e2 100644 --- a/media/libstagefright/wifi-display/wfd.cpp +++ b/media/libstagefright/wifi-display/wfd.cpp @@ -42,6 +42,7 @@ static void usage(const char *me) { " %s -c host[:port]\tconnect to wifi source\n" " -u uri \tconnect to an rtsp uri\n" " -l ip[:port] \tlisten on the specified port " + " -f(ilename) \tstream media " "(create a sink)\n", me); } @@ -93,22 +94,24 @@ void RemoteDisplayClient::onDisplayConnected( ALOGI("onDisplayConnected width=%u, height=%u, flags = 0x%08x", width, height, flags); - mSurfaceTexture = bufferProducer; - mDisplayBinder = mComposerClient->createDisplay( - String8("foo"), false /* secure */); + if (bufferProducer != NULL) { + mSurfaceTexture = bufferProducer; + mDisplayBinder = mComposerClient->createDisplay( + String8("foo"), false /* secure */); - SurfaceComposerClient::openGlobalTransaction(); - mComposerClient->setDisplaySurface(mDisplayBinder, mSurfaceTexture); + SurfaceComposerClient::openGlobalTransaction(); + mComposerClient->setDisplaySurface(mDisplayBinder, mSurfaceTexture); - Rect layerStackRect(1280, 720); // XXX fix this. - Rect displayRect(1280, 720); + Rect layerStackRect(1280, 720); // XXX fix this. + Rect displayRect(1280, 720); - mComposerClient->setDisplayProjection( - mDisplayBinder, 0 /* 0 degree rotation */, - layerStackRect, - displayRect); + mComposerClient->setDisplayProjection( + mDisplayBinder, 0 /* 0 degree rotation */, + layerStackRect, + displayRect); - SurfaceComposerClient::closeGlobalTransaction(); + SurfaceComposerClient::closeGlobalTransaction(); + } } void RemoteDisplayClient::onDisplayDisconnected() { @@ -181,6 +184,24 @@ static void createSource(const AString &addr, int32_t port) { enableAudioSubmix(false /* enable */); } +static void createFileSource( + const AString &addr, int32_t port, const char *path) { + sp session = new ANetworkSession; + session->start(); + + sp looper = new ALooper; + looper->start(); + + sp client = new RemoteDisplayClient; + sp source = new WifiDisplaySource(session, client, path); + looper->registerHandler(source); + + AString iface = StringPrintf("%s:%d", addr.c_str(), port); + CHECK_EQ((status_t)OK, source->start(iface.c_str())); + + client->waitUntilDone(); +} + } // namespace android int main(int argc, char **argv) { @@ -197,8 +218,10 @@ int main(int argc, char **argv) { AString listenOnAddr; int32_t listenOnPort = -1; + AString path; + int res; - while ((res = getopt(argc, argv, "hc:l:u:")) >= 0) { + while ((res = getopt(argc, argv, "hc:l:u:f:")) >= 0) { switch (res) { case 'c': { @@ -228,6 +251,12 @@ int main(int argc, char **argv) { break; } + case 'f': + { + path = optarg; + break; + } + case 'l': { const char *colonPos = strrchr(optarg, ':'); @@ -266,7 +295,12 @@ int main(int argc, char **argv) { } if (listenOnPort >= 0) { - createSource(listenOnAddr, listenOnPort); + if (path.empty()) { + createSource(listenOnAddr, listenOnPort); + } else { + createFileSource(listenOnAddr, listenOnPort, path.c_str()); + } + exit(0); } -- cgit v1.1 From 475391897f0886aae1833d5e8b2e5aea47cc36bb Mon Sep 17 00:00:00 2001 From: Igor Murashkin Date: Mon, 18 Mar 2013 11:10:48 -0700 Subject: ProCameraTests: Minor add grey/frame count debug-only print outs Change-Id: I0f7b7f3c083a644e9bb0c2302244a5ad434e96a8 --- camera/tests/ProCameraTests.cpp | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/camera/tests/ProCameraTests.cpp b/camera/tests/ProCameraTests.cpp index 71813ae..ecc0854 100644 --- a/camera/tests/ProCameraTests.cpp +++ b/camera/tests/ProCameraTests.cpp @@ -1015,6 +1015,9 @@ TEST_F(ProCameraTest, WaitForDualStreamBuffer) { ASSERT_NO_FATAL_FAILURE(createSubmitRequestForStreams(streams, /*count*/2, /*requests*/REQUEST_COUNT)); + int depthFrames = 0; + int greyFrames = 0; + // Consume two frames simultaneously. Unsynchronized by timestamps. for (int i = 0; i < REQUEST_COUNT; ++i) { @@ -1041,6 +1044,8 @@ TEST_F(ProCameraTest, WaitForDualStreamBuffer) { EXPECT_OK(depthConsumer->unlockBuffer(depthBuffer)); + depthFrames++; + /** Consume Greyscale frames if there are any. * There may not be since it runs at half FPS */ @@ -1053,9 +1058,14 @@ TEST_F(ProCameraTest, WaitForDualStreamBuffer) { ", timestamp = " << greyBuffer.timestamp << std::endl; EXPECT_OK(consumer->unlockBuffer(greyBuffer)); + + greyFrames++; } } + dout << "Done, summary: depth frames " << std::dec << depthFrames + << ", grey frames " << std::dec << greyFrames << std::endl; + // Done: clean up EXPECT_OK(mCamera->deleteStream(streamId)); EXPECT_OK(mCamera->exclusiveUnlock()); -- cgit v1.1 From a239dd722e760fe4fd7379b454d7722e1f312928 Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Mon, 18 Mar 2013 15:11:40 -0700 Subject: Change ANetworkSession implementation to optionally attach timestamps to fragments of data to be transferred and to log statistics when data is finally submitted to the POSIX layer. Change-Id: Icbfcac203cdc5c9eac1634e84d34bb380b316a01 --- .../wifi-display/ANetworkSession.cpp | 150 +++++++++++++++------ .../libstagefright/wifi-display/ANetworkSession.h | 3 +- media/libstagefright/wifi-display/MediaSender.cpp | 4 + .../libstagefright/wifi-display/rtp/RTPSender.cpp | 21 ++- media/libstagefright/wifi-display/rtp/RTPSender.h | 4 +- media/libstagefright/wifi-display/wfd.cpp | 2 + 6 files changed, 135 insertions(+), 49 deletions(-) diff --git a/media/libstagefright/wifi-display/ANetworkSession.cpp b/media/libstagefright/wifi-display/ANetworkSession.cpp index 23bb04e..df20ae2 100644 --- a/media/libstagefright/wifi-display/ANetworkSession.cpp +++ b/media/libstagefright/wifi-display/ANetworkSession.cpp @@ -81,7 +81,8 @@ struct ANetworkSession::Session : public RefBase { status_t readMore(); status_t writeMore(); - status_t sendRequest(const void *data, ssize_t size); + status_t sendRequest( + const void *data, ssize_t size, bool timeValid, int64_t timeUs); void setIsRTSPConnection(bool yesno); @@ -89,6 +90,15 @@ protected: virtual ~Session(); private: + enum { + FRAGMENT_FLAG_TIME_VALID = 1, + }; + struct Fragment { + uint32_t mFlags; + int64_t mTimeUs; + sp mBuffer; + }; + int32_t mSessionID; State mState; bool mIsRTSPConnection; @@ -96,11 +106,7 @@ private: sp mNotify; bool mSawReceiveFailure, mSawSendFailure; - // for TCP / stream data - AString mOutBuffer; - - // for UDP / datagrams - List > mOutDatagrams; + List mOutFragments; AString mInBuffer; @@ -109,6 +115,8 @@ private: void notifyError(bool send, status_t err, const char *detail); void notify(NotificationReason reason); + void dumpFragmentStats(const Fragment &frag); + DISALLOW_EVIL_CONSTRUCTORS(Session); }; //////////////////////////////////////////////////////////////////////////////// @@ -221,8 +229,8 @@ bool ANetworkSession::Session::wantsToRead() { bool ANetworkSession::Session::wantsToWrite() { return !mSawSendFailure && (mState == CONNECTING - || (mState == CONNECTED && !mOutBuffer.empty()) - || (mState == DATAGRAM && !mOutDatagrams.empty())); + || (mState == CONNECTED && !mOutFragments.empty()) + || (mState == DATAGRAM && !mOutFragments.empty())); } status_t ANetworkSession::Session::readMore() { @@ -407,13 +415,41 @@ status_t ANetworkSession::Session::readMore() { return err; } +void ANetworkSession::Session::dumpFragmentStats(const Fragment &frag) { +#if 0 + int64_t nowUs = ALooper::GetNowUs(); + int64_t delayMs = (nowUs - frag.mTimeUs) / 1000ll; + + static const int64_t kMinDelayMs = 0; + static const int64_t kMaxDelayMs = 300; + + const char *kPattern = "########################################"; + size_t kPatternSize = strlen(kPattern); + + int n = (kPatternSize * (delayMs - kMinDelayMs)) + / (kMaxDelayMs - kMinDelayMs); + + if (n < 0) { + n = 0; + } else if ((size_t)n > kPatternSize) { + n = kPatternSize; + } + + ALOGI("[%lld]: (%4lld ms) %s\n", + frag.mTimeUs / 1000, + delayMs, + kPattern + kPatternSize - n); +#endif +} + status_t ANetworkSession::Session::writeMore() { if (mState == DATAGRAM) { - CHECK(!mOutDatagrams.empty()); + CHECK(!mOutFragments.empty()); status_t err; do { - const sp &datagram = *mOutDatagrams.begin(); + const Fragment &frag = *mOutFragments.begin(); + const sp &datagram = frag.mBuffer; uint8_t *data = datagram->data(); if (data[0] == 0x80 && (data[1] & 0x7f) == 33) { @@ -441,17 +477,21 @@ status_t ANetworkSession::Session::writeMore() { err = OK; if (n > 0) { - mOutDatagrams.erase(mOutDatagrams.begin()); + if (frag.mFlags & FRAGMENT_FLAG_TIME_VALID) { + dumpFragmentStats(frag); + } + + mOutFragments.erase(mOutFragments.begin()); } else if (n < 0) { err = -errno; } else if (n == 0) { err = -ECONNRESET; } - } while (err == OK && !mOutDatagrams.empty()); + } while (err == OK && !mOutFragments.empty()); if (err == -EAGAIN) { - if (!mOutDatagrams.empty()) { - ALOGI("%d datagrams remain queued.", mOutDatagrams.size()); + if (!mOutFragments.empty()) { + ALOGI("%d datagrams remain queued.", mOutFragments.size()); } err = OK; } @@ -484,23 +524,37 @@ status_t ANetworkSession::Session::writeMore() { } CHECK_EQ(mState, CONNECTED); - CHECK(!mOutBuffer.empty()); + CHECK(!mOutFragments.empty()); ssize_t n; - do { - n = send(mSocket, mOutBuffer.c_str(), mOutBuffer.size(), 0); - } while (n < 0 && errno == EINTR); + while (!mOutFragments.empty()) { + const Fragment &frag = *mOutFragments.begin(); - status_t err = OK; + do { + n = send(mSocket, frag.mBuffer->data(), frag.mBuffer->size(), 0); + } while (n < 0 && errno == EINTR); - if (n > 0) { -#if 0 - ALOGI("out:"); - hexdump(mOutBuffer.c_str(), n); -#endif + if (n <= 0) { + break; + } - mOutBuffer.erase(0, n); - } else if (n < 0) { + frag.mBuffer->setRange( + frag.mBuffer->offset() + n, frag.mBuffer->size() - n); + + if (frag.mBuffer->size() > 0) { + break; + } + + if (frag.mFlags & FRAGMENT_FLAG_TIME_VALID) { + dumpFragmentStats(frag); + } + + mOutFragments.erase(mOutFragments.begin()); + } + + status_t err = OK; + + if (n < 0) { err = -errno; } else if (n == 0) { err = -ECONNRESET; @@ -537,32 +591,43 @@ status_t ANetworkSession::Session::writeMore() { return err; } -status_t ANetworkSession::Session::sendRequest(const void *data, ssize_t size) { +status_t ANetworkSession::Session::sendRequest( + const void *data, ssize_t size, bool timeValid, int64_t timeUs) { CHECK(mState == CONNECTED || mState == DATAGRAM); - if (mState == DATAGRAM) { - CHECK_GE(size, 0); - - sp datagram = new ABuffer(size); - memcpy(datagram->data(), data, size); + if (size < 0) { + size = strlen((const char *)data); + } - mOutDatagrams.push_back(datagram); + if (size == 0) { return OK; } + sp buffer; + if (mState == CONNECTED && !mIsRTSPConnection) { CHECK_LE(size, 65535); - uint8_t prefix[2]; - prefix[0] = size >> 8; - prefix[1] = size & 0xff; + buffer = new ABuffer(size + 2); + buffer->data()[0] = size >> 8; + buffer->data()[1] = size & 0xff; + memcpy(buffer->data() + 2, data, size); + } else { + buffer = new ABuffer(size); + memcpy(buffer->data(), data, size); + } + + Fragment frag; - mOutBuffer.append((const char *)prefix, sizeof(prefix)); + frag.mFlags = 0; + if (timeValid) { + frag.mFlags = FRAGMENT_FLAG_TIME_VALID; + frag.mTimeUs = timeUs; } - mOutBuffer.append( - (const char *)data, - (size >= 0) ? size : strlen((const char *)data)); + frag.mBuffer = buffer; + + mOutFragments.push_back(frag); return OK; } @@ -985,7 +1050,8 @@ status_t ANetworkSession::connectUDPSession( } status_t ANetworkSession::sendRequest( - int32_t sessionID, const void *data, ssize_t size) { + int32_t sessionID, const void *data, ssize_t size, + bool timeValid, int64_t timeUs) { Mutex::Autolock autoLock(mLock); ssize_t index = mSessions.indexOfKey(sessionID); @@ -996,7 +1062,7 @@ status_t ANetworkSession::sendRequest( const sp session = mSessions.valueAt(index); - status_t err = session->sendRequest(data, size); + status_t err = session->sendRequest(data, size, timeValid, timeUs); interrupt(); diff --git a/media/libstagefright/wifi-display/ANetworkSession.h b/media/libstagefright/wifi-display/ANetworkSession.h index 0d7cbd6..7c62b29 100644 --- a/media/libstagefright/wifi-display/ANetworkSession.h +++ b/media/libstagefright/wifi-display/ANetworkSession.h @@ -74,7 +74,8 @@ struct ANetworkSession : public RefBase { status_t destroySession(int32_t sessionID); status_t sendRequest( - int32_t sessionID, const void *data, ssize_t size = -1); + int32_t sessionID, const void *data, ssize_t size = -1, + bool timeValid = false, int64_t timeUs = -1ll); enum NotificationReason { kWhatError, diff --git a/media/libstagefright/wifi-display/MediaSender.cpp b/media/libstagefright/wifi-display/MediaSender.cpp index a41f81b..d13a92e 100644 --- a/media/libstagefright/wifi-display/MediaSender.cpp +++ b/media/libstagefright/wifi-display/MediaSender.cpp @@ -252,6 +252,10 @@ status_t MediaSender::queueAccessUnit( fwrite(tsPackets->data(), 1, tsPackets->size(), mLogFile); } + int64_t timeUs; + CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs)); + tsPackets->meta()->setInt64("timeUs", timeUs); + err = mTSSender->queueBuffer( tsPackets, 33 /* packetType */, diff --git a/media/libstagefright/wifi-display/rtp/RTPSender.cpp b/media/libstagefright/wifi-display/rtp/RTPSender.cpp index 8cd712d..c8e265c 100644 --- a/media/libstagefright/wifi-display/rtp/RTPSender.cpp +++ b/media/libstagefright/wifi-display/rtp/RTPSender.cpp @@ -194,6 +194,9 @@ status_t RTPSender::queueTSPackets( const sp &tsPackets, uint8_t packetType) { CHECK_EQ(0, tsPackets->size() % 188); + int64_t timeUs; + CHECK(tsPackets->meta()->findInt64("timeUs", &timeUs)); + const size_t numTSPackets = tsPackets->size() / 188; size_t srcOffset = 0; @@ -232,13 +235,19 @@ status_t RTPSender::queueTSPackets( memcpy(&rtp[12], tsPackets->data() + srcOffset, numTSPackets * 188); udpPacket->setRange(0, 12 + numTSPackets * 188); - status_t err = sendRTPPacket(udpPacket, true /* storeInHistory */); + + srcOffset += numTSPackets * 188; + bool isLastPacket = (srcOffset == tsPackets->size()); + + status_t err = sendRTPPacket( + udpPacket, + true /* storeInHistory */, + isLastPacket /* timeValid */, + timeUs); if (err != OK) { return err; } - - srcOffset += numTSPackets * 188; } return OK; @@ -395,11 +404,13 @@ status_t RTPSender::queueAVCBuffer( } status_t RTPSender::sendRTPPacket( - const sp &buffer, bool storeInHistory) { + const sp &buffer, bool storeInHistory, + bool timeValid, int64_t timeUs) { CHECK(mRTPConnected); status_t err = mNetSession->sendRequest( - mRTPSessionID, buffer->data(), buffer->size()); + mRTPSessionID, buffer->data(), buffer->size(), + timeValid, timeUs); if (err != OK) { return err; diff --git a/media/libstagefright/wifi-display/rtp/RTPSender.h b/media/libstagefright/wifi-display/rtp/RTPSender.h index 83c6223..90b1796 100644 --- a/media/libstagefright/wifi-display/rtp/RTPSender.h +++ b/media/libstagefright/wifi-display/rtp/RTPSender.h @@ -94,7 +94,9 @@ private: status_t queueTSPackets(const sp &tsPackets, uint8_t packetType); status_t queueAVCBuffer(const sp &accessUnit, uint8_t packetType); - status_t sendRTPPacket(const sp &packet, bool storeInHistory); + status_t sendRTPPacket( + const sp &packet, bool storeInHistory, + bool timeValid = false, int64_t timeUs = -1ll); void onNetNotify(bool isRTP, const sp &msg); diff --git a/media/libstagefright/wifi-display/wfd.cpp b/media/libstagefright/wifi-display/wfd.cpp index 3a7a6e2..4f7dcc8 100644 --- a/media/libstagefright/wifi-display/wfd.cpp +++ b/media/libstagefright/wifi-display/wfd.cpp @@ -200,6 +200,8 @@ static void createFileSource( CHECK_EQ((status_t)OK, source->start(iface.c_str())); client->waitUntilDone(); + + source->stop(); } } // namespace android -- cgit v1.1 From 0003b9b56e77764c77fd4e4e1a5d6e44a55e5b8a Mon Sep 17 00:00:00 2001 From: Marco Nelissen Date: Tue, 19 Mar 2013 09:57:29 -0700 Subject: Fix valgrind error The volume member of the BundledEffectContext class was not being initialized, resulting in uninitialized data being used for calculations and control flow. Change-Id: I84bf9fd478e5d0479e781323b21c7c03dea958c5 --- media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp | 1 + 1 file changed, 1 insertion(+) diff --git a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp index 94b9acf..54f8d9e 100644 --- a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp +++ b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp @@ -224,6 +224,7 @@ extern "C" int EffectCreate(const effect_uuid_t *uuid, pContext->pBundledContext->NumberEffectsEnabled = 0; pContext->pBundledContext->NumberEffectsCalled = 0; pContext->pBundledContext->firstVolume = LVM_TRUE; + pContext->pBundledContext->volume = 0; #ifdef LVM_PCM char fileName[256]; -- cgit v1.1 From 0e6858d6aea12fc585a8c7d217c1271878655081 Mon Sep 17 00:00:00 2001 From: Dan Morrill Date: Thu, 7 Mar 2013 14:40:40 -0800 Subject: Turn off debug tags in stagefright modules. LOCAL_MODULE_TAGS := debug causes the module to be included in every userdebug build, regardless of whether it's specified as a dep by the device config. This CL switches them all to optional (i.e. default behavior) so that we can do (userdebug) device builds without pulling these in. Change-Id: I4b7b65afea61865dd38b3af55550fb8f10edf66d --- cmds/stagefright/Android.mk | 16 +++++++++------- media/libstagefright/codecs/aacenc/SampleCode/Android.mk | 2 +- .../libstagefright/codecs/amrwbenc/SampleCode/Android.mk | 2 +- media/libstagefright/codecs/on2/h264dec/Android.mk | 2 +- media/libstagefright/id3/Android.mk | 2 +- media/libstagefright/rtsp/Android.mk | 2 +- media/libstagefright/wifi-display/Android.mk | 6 +++--- 7 files changed, 17 insertions(+), 15 deletions(-) diff --git a/cmds/stagefright/Android.mk b/cmds/stagefright/Android.mk index d583e65..908d449 100644 --- a/cmds/stagefright/Android.mk +++ b/cmds/stagefright/Android.mk @@ -19,7 +19,9 @@ LOCAL_C_INCLUDES:= \ LOCAL_CFLAGS += -Wno-multichar +ifneq (true,$(ANDROID_BUILD_EMBEDDED)) LOCAL_MODULE_TAGS := debug +endif LOCAL_MODULE:= stagefright @@ -42,7 +44,7 @@ LOCAL_C_INCLUDES:= \ LOCAL_CFLAGS += -Wno-multichar -LOCAL_MODULE_TAGS := debug +LOCAL_MODULE_TAGS := optional LOCAL_MODULE:= record @@ -65,7 +67,7 @@ LOCAL_C_INCLUDES:= \ LOCAL_CFLAGS += -Wno-multichar -LOCAL_MODULE_TAGS := debug +LOCAL_MODULE_TAGS := optional LOCAL_MODULE:= recordvideo @@ -89,7 +91,7 @@ LOCAL_C_INCLUDES:= \ LOCAL_CFLAGS += -Wno-multichar -LOCAL_MODULE_TAGS := debug +LOCAL_MODULE_TAGS := optional LOCAL_MODULE:= audioloop @@ -112,7 +114,7 @@ LOCAL_C_INCLUDES:= \ LOCAL_CFLAGS += -Wno-multichar -LOCAL_MODULE_TAGS := debug +LOCAL_MODULE_TAGS := optional LOCAL_MODULE:= stream @@ -135,7 +137,7 @@ LOCAL_C_INCLUDES:= \ LOCAL_CFLAGS += -Wno-multichar -LOCAL_MODULE_TAGS := debug +LOCAL_MODULE_TAGS := optional LOCAL_MODULE:= sf2 @@ -159,7 +161,7 @@ LOCAL_C_INCLUDES:= \ LOCAL_CFLAGS += -Wno-multichar -LOCAL_MODULE_TAGS := debug +LOCAL_MODULE_TAGS := optional LOCAL_MODULE:= codec @@ -182,7 +184,7 @@ LOCAL_C_INCLUDES:= \ LOCAL_CFLAGS += -Wno-multichar -LOCAL_MODULE_TAGS := debug +LOCAL_MODULE_TAGS := optional LOCAL_MODULE:= muxer diff --git a/media/libstagefright/codecs/aacenc/SampleCode/Android.mk b/media/libstagefright/codecs/aacenc/SampleCode/Android.mk index 01016e7..d06dcf6 100644 --- a/media/libstagefright/codecs/aacenc/SampleCode/Android.mk +++ b/media/libstagefright/codecs/aacenc/SampleCode/Android.mk @@ -5,7 +5,7 @@ LOCAL_SRC_FILES := \ AAC_E_SAMPLES.c \ ../../common/cmnMemory.c -LOCAL_MODULE_TAGS := debug +LOCAL_MODULE_TAGS := optional LOCAL_MODULE := AACEncTest diff --git a/media/libstagefright/codecs/amrwbenc/SampleCode/Android.mk b/media/libstagefright/codecs/amrwbenc/SampleCode/Android.mk index db34d08..c203f77 100644 --- a/media/libstagefright/codecs/amrwbenc/SampleCode/Android.mk +++ b/media/libstagefright/codecs/amrwbenc/SampleCode/Android.mk @@ -5,7 +5,7 @@ LOCAL_SRC_FILES := \ AMRWB_E_SAMPLE.c \ ../../common/cmnMemory.c -LOCAL_MODULE_TAGS := debug +LOCAL_MODULE_TAGS := optional LOCAL_MODULE := AMRWBEncTest LOCAL_ARM_MODE := arm diff --git a/media/libstagefright/codecs/on2/h264dec/Android.mk b/media/libstagefright/codecs/on2/h264dec/Android.mk index 772fd60..0a273e2 100644 --- a/media/libstagefright/codecs/on2/h264dec/Android.mk +++ b/media/libstagefright/codecs/on2/h264dec/Android.mk @@ -119,7 +119,7 @@ LOCAL_C_INCLUDES := $(LOCAL_PATH)/inc LOCAL_SHARED_LIBRARIES := libstagefright_soft_h264dec -LOCAL_MODULE_TAGS := debug +LOCAL_MODULE_TAGS := optional LOCAL_MODULE := decoder diff --git a/media/libstagefright/id3/Android.mk b/media/libstagefright/id3/Android.mk index ff35d4a..995ab83 100644 --- a/media/libstagefright/id3/Android.mk +++ b/media/libstagefright/id3/Android.mk @@ -21,7 +21,7 @@ LOCAL_SHARED_LIBRARIES := \ LOCAL_STATIC_LIBRARIES := \ libstagefright_id3 -LOCAL_MODULE_TAGS := debug +LOCAL_MODULE_TAGS := optional LOCAL_MODULE := testid3 diff --git a/media/libstagefright/rtsp/Android.mk b/media/libstagefright/rtsp/Android.mk index 9e2724d..e77c69c 100644 --- a/media/libstagefright/rtsp/Android.mk +++ b/media/libstagefright/rtsp/Android.mk @@ -51,7 +51,7 @@ LOCAL_C_INCLUDES:= \ LOCAL_CFLAGS += -Wno-multichar -LOCAL_MODULE_TAGS := debug +LOCAL_MODULE_TAGS := optional LOCAL_MODULE:= rtp_test diff --git a/media/libstagefright/wifi-display/Android.mk b/media/libstagefright/wifi-display/Android.mk index f81929c..137ebe3 100644 --- a/media/libstagefright/wifi-display/Android.mk +++ b/media/libstagefright/wifi-display/Android.mk @@ -63,7 +63,7 @@ LOCAL_SHARED_LIBRARIES:= \ LOCAL_MODULE:= wfd -LOCAL_MODULE_TAGS := debug +LOCAL_MODULE_TAGS := optional include $(BUILD_EXECUTABLE) @@ -85,7 +85,7 @@ LOCAL_SHARED_LIBRARIES:= \ LOCAL_MODULE:= udptest -LOCAL_MODULE_TAGS := debug +LOCAL_MODULE_TAGS := optional include $(BUILD_EXECUTABLE) @@ -107,6 +107,6 @@ LOCAL_SHARED_LIBRARIES:= \ LOCAL_MODULE:= rtptest -LOCAL_MODULE_TAGS := debug +LOCAL_MODULE_TAGS := optional include $(BUILD_EXECUTABLE) -- cgit v1.1 From caf3a9c1447f602c658f558025b90413d1b4114d Mon Sep 17 00:00:00 2001 From: Ziv Hendel Date: Thu, 21 Mar 2013 03:25:32 +0200 Subject: commandStartFaceDetectionL returned the wrong value when face detection is not supported by the HAL. This caused the JNI function to fail since it expected a BAD_VALUE response in that case. Change-Id: I53107a3958d541c25930b81eda638d4b6a394254 Signed-off-by: Igor Murashkin --- services/camera/libcameraservice/Camera2Client.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/camera/libcameraservice/Camera2Client.cpp b/services/camera/libcameraservice/Camera2Client.cpp index 056271d..d3adbdc 100644 --- a/services/camera/libcameraservice/Camera2Client.cpp +++ b/services/camera/libcameraservice/Camera2Client.cpp @@ -1255,7 +1255,7 @@ status_t Camera2Client::commandStartFaceDetectionL(int /*type*/) { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) { ALOGE("%s: Camera %d: Face detection not supported", __FUNCTION__, mCameraId); - return INVALID_OPERATION; + return BAD_VALUE; } if (l.mParameters.enableFaceDetect) return OK; -- cgit v1.1 From cc76ec941661206e3a3c53f9aff789c839a869bb Mon Sep 17 00:00:00 2001 From: Igor Murashkin Date: Thu, 21 Mar 2013 11:19:00 -0700 Subject: Camera2: getSceneMode should return NULL when there are no supported scene modes Bug: 8444806 Change-Id: Ic92924ceda3de738a7971605552baf00a4fc0546 --- services/camera/libcameraservice/camera2/Parameters.cpp | 2 ++ 1 file changed, 2 insertions(+) diff --git a/services/camera/libcameraservice/camera2/Parameters.cpp b/services/camera/libcameraservice/camera2/Parameters.cpp index 859e2e9..d13fe8b 100644 --- a/services/camera/libcameraservice/camera2/Parameters.cpp +++ b/services/camera/libcameraservice/camera2/Parameters.cpp @@ -534,6 +534,8 @@ status_t Parameters::initialize(const CameraMetadata *info) { if (!noSceneModes) { params.set(CameraParameters::KEY_SUPPORTED_SCENE_MODES, supportedSceneModes); + } else { + params.remove(CameraParameters::KEY_SCENE_MODE); } } -- cgit v1.1 From 820ebf8d452165d9a7619e2667ffa3c0b638da39 Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Thu, 21 Mar 2013 11:35:48 -0700 Subject: Ensure that the payload in each TS packet is an even multiple of 16 bytes long (except for the final TS packet) as specified by HDCP. Change-Id: I45d49d347c06f5daae310f196d9a8484be0f3ca0 related-to-bug: 7549145 --- .../wifi-display/source/TSPacketizer.cpp | 201 ++++++++++++++++----- 1 file changed, 158 insertions(+), 43 deletions(-) diff --git a/media/libstagefright/wifi-display/source/TSPacketizer.cpp b/media/libstagefright/wifi-display/source/TSPacketizer.cpp index d993764..2c4a373 100644 --- a/media/libstagefright/wifi-display/source/TSPacketizer.cpp +++ b/media/libstagefright/wifi-display/source/TSPacketizer.cpp @@ -502,16 +502,121 @@ status_t TSPacketizer::packetize( // reserved = b1 // the first fragment of "buffer" follows + // Each transport packet (except for the last one contributing to the PES + // payload) must contain a multiple of 16 bytes of payload per HDCP spec. + bool alignPayload = + (mFlags & (EMIT_HDCP20_DESCRIPTOR | EMIT_HDCP21_DESCRIPTOR)); + + /* + a) The very first PES transport stream packet contains + + 4 bytes of TS header + ... padding + 14 bytes of static PES header + PES_private_data_len + 1 bytes (only if PES_private_data_len > 0) + numStuffingBytes bytes + + followed by the payload + + b) Subsequent PES transport stream packets contain + + 4 bytes of TS header + ... padding + + followed by the payload + */ + size_t PES_packet_length = accessUnit->size() + 8 + numStuffingBytes; if (PES_private_data_len > 0) { PES_packet_length += PES_private_data_len + 1; } - size_t numTSPackets; - if (PES_packet_length <= 178) { - numTSPackets = 1; - } else { - numTSPackets = 1 + ((PES_packet_length - 178) + 183) / 184; + size_t numTSPackets = 1; + + { + // Make sure the PES header fits into a single TS packet: + size_t PES_header_size = 14 + numStuffingBytes; + if (PES_private_data_len > 0) { + PES_header_size += PES_private_data_len + 1; + } + + CHECK_LE(PES_header_size, 188u - 4u); + + size_t sizeAvailableForPayload = 188 - 4 - PES_header_size; + size_t numBytesOfPayload = accessUnit->size(); + + if (numBytesOfPayload > sizeAvailableForPayload) { + numBytesOfPayload = sizeAvailableForPayload; + + if (alignPayload && numBytesOfPayload > 16) { + numBytesOfPayload -= (numBytesOfPayload % 16); + } + } + + // size_t numPaddingBytes = sizeAvailableForPayload - numBytesOfPayload; + ALOGV("packet 1 contains %zd padding bytes and %zd bytes of payload", + numPaddingBytes, numBytesOfPayload); + + size_t numBytesOfPayloadRemaining = accessUnit->size() - numBytesOfPayload; + +#if 0 + // The following hopefully illustrates the logic that led to the + // more efficient computation in the #else block... + + while (numBytesOfPayloadRemaining > 0) { + size_t sizeAvailableForPayload = 188 - 4; + + size_t numBytesOfPayload = numBytesOfPayloadRemaining; + + if (numBytesOfPayload > sizeAvailableForPayload) { + numBytesOfPayload = sizeAvailableForPayload; + + if (alignPayload && numBytesOfPayload > 16) { + numBytesOfPayload -= (numBytesOfPayload % 16); + } + } + + size_t numPaddingBytes = sizeAvailableForPayload - numBytesOfPayload; + ALOGI("packet %zd contains %zd padding bytes and %zd bytes of payload", + numTSPackets + 1, numPaddingBytes, numBytesOfPayload); + + numBytesOfPayloadRemaining -= numBytesOfPayload; + ++numTSPackets; + } +#else + // This is how many bytes of payload each subsequent TS packet + // can contain at most. + sizeAvailableForPayload = 188 - 4; + size_t sizeAvailableForAlignedPayload = sizeAvailableForPayload; + if (alignPayload) { + // We're only going to use a subset of the available space + // since we need to make each fragment a multiple of 16 in size. + sizeAvailableForAlignedPayload -= + (sizeAvailableForAlignedPayload % 16); + } + + size_t numFullTSPackets = + numBytesOfPayloadRemaining / sizeAvailableForAlignedPayload; + + numTSPackets += numFullTSPackets; + + numBytesOfPayloadRemaining -= + numFullTSPackets * sizeAvailableForAlignedPayload; + + // numBytesOfPayloadRemaining < sizeAvailableForAlignedPayload + if (numFullTSPackets == 0 && numBytesOfPayloadRemaining > 0) { + // There wasn't enough payload left to form a full aligned payload, + // the last packet doesn't have to be aligned. + ++numTSPackets; + } else if (numFullTSPackets > 0 + && numBytesOfPayloadRemaining + + sizeAvailableForAlignedPayload > sizeAvailableForPayload) { + // The last packet emitted had a full aligned payload and together + // with the bytes remaining does exceed the unaligned payload + // size, so we need another packet. + ++numTSPackets; + } +#endif } if (flags & EMIT_PAT_AND_PMT) { @@ -755,8 +860,6 @@ status_t TSPacketizer::packetize( uint64_t PTS = (timeUs * 9ll) / 100ll; - bool padding = (PES_packet_length < (188 - 10)); - if (PES_packet_length >= 65536) { // This really should only happen for video. CHECK(track->isVideo()); @@ -765,19 +868,37 @@ status_t TSPacketizer::packetize( PES_packet_length = 0; } + size_t sizeAvailableForPayload = 188 - 4 - 14 - numStuffingBytes; + if (PES_private_data_len > 0) { + sizeAvailableForPayload -= PES_private_data_len + 1; + } + + size_t copy = accessUnit->size(); + + if (copy > sizeAvailableForPayload) { + copy = sizeAvailableForPayload; + + if (alignPayload && copy > 16) { + copy -= (copy % 16); + } + } + + size_t numPaddingBytes = sizeAvailableForPayload - copy; + uint8_t *ptr = packetDataStart; *ptr++ = 0x47; *ptr++ = 0x40 | (track->PID() >> 8); *ptr++ = track->PID() & 0xff; - *ptr++ = (padding ? 0x30 : 0x10) | track->incrementContinuityCounter(); - if (padding) { - size_t paddingSize = 188 - 10 - PES_packet_length; - *ptr++ = paddingSize - 1; - if (paddingSize >= 2) { + *ptr++ = (numPaddingBytes > 0 ? 0x30 : 0x10) + | track->incrementContinuityCounter(); + + if (numPaddingBytes > 0) { + *ptr++ = numPaddingBytes - 1; + if (numPaddingBytes >= 2) { *ptr++ = 0x00; - memset(ptr, 0xff, paddingSize - 2); - ptr += paddingSize - 2; + memset(ptr, 0xff, numPaddingBytes - 2); + ptr += numPaddingBytes - 2; } } @@ -813,25 +934,14 @@ status_t TSPacketizer::packetize( *ptr++ = 0xff; } - // 18 bytes of TS/PES header leave 188 - 18 = 170 bytes for the payload - - size_t sizeLeft = packetDataStart + 188 - ptr; - size_t copy = accessUnit->size(); - if (copy > sizeLeft) { - copy = sizeLeft; - } - memcpy(ptr, accessUnit->data(), copy); ptr += copy; - CHECK_EQ(sizeLeft, copy); - memset(ptr, 0xff, sizeLeft - copy); + CHECK_EQ(ptr, packetDataStart + 188); packetDataStart += 188; size_t offset = copy; while (offset < accessUnit->size()) { - bool padding = (accessUnit->size() - offset) < (188 - 4); - // for subsequent fragments of "buffer": // 0x47 // transport_error_indicator = b0 @@ -843,35 +953,40 @@ status_t TSPacketizer::packetize( // continuity_counter = b???? // the fragment of "buffer" follows. + size_t sizeAvailableForPayload = 188 - 4; + + size_t copy = accessUnit->size() - offset; + + if (copy > sizeAvailableForPayload) { + copy = sizeAvailableForPayload; + + if (alignPayload && copy > 16) { + copy -= (copy % 16); + } + } + + size_t numPaddingBytes = sizeAvailableForPayload - copy; + uint8_t *ptr = packetDataStart; *ptr++ = 0x47; *ptr++ = 0x00 | (track->PID() >> 8); *ptr++ = track->PID() & 0xff; - *ptr++ = (padding ? 0x30 : 0x10) | track->incrementContinuityCounter(); + *ptr++ = (numPaddingBytes > 0 ? 0x30 : 0x10) + | track->incrementContinuityCounter(); - if (padding) { - size_t paddingSize = 188 - 4 - (accessUnit->size() - offset); - *ptr++ = paddingSize - 1; - if (paddingSize >= 2) { + if (numPaddingBytes > 0) { + *ptr++ = numPaddingBytes - 1; + if (numPaddingBytes >= 2) { *ptr++ = 0x00; - memset(ptr, 0xff, paddingSize - 2); - ptr += paddingSize - 2; + memset(ptr, 0xff, numPaddingBytes - 2); + ptr += numPaddingBytes - 2; } } - // 4 bytes of TS header leave 188 - 4 = 184 bytes for the payload - - size_t sizeLeft = packetDataStart + 188 - ptr; - size_t copy = accessUnit->size() - offset; - if (copy > sizeLeft) { - copy = sizeLeft; - } - memcpy(ptr, accessUnit->data() + offset, copy); ptr += copy; - CHECK_EQ(sizeLeft, copy); - memset(ptr, 0xff, sizeLeft - copy); + CHECK_EQ(ptr, packetDataStart + 188); offset += copy; packetDataStart += 188; -- cgit v1.1 From b652df67cbdc2a92825144b23ea47eba4c276edd Mon Sep 17 00:00:00 2001 From: Igor Murashkin Date: Mon, 18 Mar 2013 13:36:48 -0700 Subject: ICameraServiceListener.h: Add STATUS_ENUMERATING enum Change-Id: I41a9f358f63a19e7b746a9ccf24722001e5e7475 --- include/camera/ICameraServiceListener.h | 23 +++++++++++++++++++++-- 1 file changed, 21 insertions(+), 2 deletions(-) diff --git a/include/camera/ICameraServiceListener.h b/include/camera/ICameraServiceListener.h index 207116a..88860dd 100644 --- a/include/camera/ICameraServiceListener.h +++ b/include/camera/ICameraServiceListener.h @@ -28,11 +28,30 @@ class ICameraServiceListener : public IInterface { public: + /** + * Initial status will be transmitted with onStatusChange immediately + * after this listener is added to the service listener list. + * + * Allowed transitions: + * + * (Any) -> NOT_PRESENT + * NOT_PRESENT -> PRESENT + * NOT_PRESENT -> ENUMERATING + * ENUMERATING -> PRESENT + * PRESENT -> AVAILABLE + * AVAILABLE -> NOT_AVAILABLE + * NOT_AVAILABLE -> AVAILABLE + * + * A state will never immediately transition back to itself. + */ enum Status { // Device physically unplugged - STATUS_PRESENT = CAMERA_DEVICE_STATUS_PRESENT, - // Device physically re-plugged STATUS_NOT_PRESENT = CAMERA_DEVICE_STATUS_NOT_PRESENT, + // Device physically has been plugged in + STATUS_PRESENT = CAMERA_DEVICE_STATUS_PRESENT, + // Device physically has been plugged in + // but it will not be connect-able until enumeration is complete + STATUS_ENUMERATING = CAMERA_DEVICE_STATUS_ENUMERATING, // Camera can be used exclusively STATUS_AVAILABLE = 0x80000000, -- cgit v1.1 From bafb682ec7f51486e751fea954169deb91846063 Mon Sep 17 00:00:00 2001 From: Jeff Tinker Date: Fri, 22 Mar 2013 15:26:39 -0700 Subject: Load crypto plugins from additional shared libraries Currently crypto plugins are expected to be in libdrmdecrypt.so. When there are multiple plugins supporting different schemes, this approach requires source code integration across vendors which is unmanagable. Also, for integration with MediaDrm where the crypto keys are obtained from a drm server, the MediaCrypto plugin needs to interoperate with the MediaDrm plugin. This change allows {MediaCrypto, MediaDrm} pairs that are logically related to be implemented in a common shared library. Change-Id: I7f6638f29171f91609fc2d944396365568630b56 --- include/media/ICrypto.h | 2 +- media/libmedia/ICrypto.cpp | 2 +- media/libmediaplayerservice/Crypto.cpp | 161 ++++++++++++++++++++++++++------- media/libmediaplayerservice/Crypto.h | 15 ++- 4 files changed, 140 insertions(+), 40 deletions(-) diff --git a/include/media/ICrypto.h b/include/media/ICrypto.h index 61059bd..9dcb8d9 100644 --- a/include/media/ICrypto.h +++ b/include/media/ICrypto.h @@ -31,7 +31,7 @@ struct ICrypto : public IInterface { virtual status_t initCheck() const = 0; - virtual bool isCryptoSchemeSupported(const uint8_t uuid[16]) const = 0; + virtual bool isCryptoSchemeSupported(const uint8_t uuid[16]) = 0; virtual status_t createPlugin( const uint8_t uuid[16], const void *data, size_t size) = 0; diff --git a/media/libmedia/ICrypto.cpp b/media/libmedia/ICrypto.cpp index 2defc2d..98b183a 100644 --- a/media/libmedia/ICrypto.cpp +++ b/media/libmedia/ICrypto.cpp @@ -48,7 +48,7 @@ struct BpCrypto : public BpInterface { return reply.readInt32(); } - virtual bool isCryptoSchemeSupported(const uint8_t uuid[16]) const { + virtual bool isCryptoSchemeSupported(const uint8_t uuid[16]) { Parcel data, reply; data.writeInterfaceToken(ICrypto::getInterfaceDescriptor()); data.write(uuid, 16); diff --git a/media/libmediaplayerservice/Crypto.cpp b/media/libmediaplayerservice/Crypto.cpp index 0e8f913..ae4d845 100644 --- a/media/libmediaplayerservice/Crypto.cpp +++ b/media/libmediaplayerservice/Crypto.cpp @@ -17,6 +17,8 @@ //#define LOG_NDEBUG 0 #define LOG_TAG "Crypto" #include +#include +#include #include "Crypto.h" @@ -26,87 +28,176 @@ #include #include -#include - namespace android { +KeyedVector, String8> Crypto::mUUIDToLibraryPathMap; +KeyedVector > Crypto::mLibraryPathToOpenLibraryMap; +Mutex Crypto::mMapLock; + +static bool operator<(const Vector &lhs, const Vector &rhs) { + if (lhs.size() < rhs.size()) { + return true; + } else if (lhs.size() > rhs.size()) { + return false; + } + + return memcmp((void *)lhs.array(), (void *)rhs.array(), rhs.size()) < 0; +} + Crypto::Crypto() : mInitCheck(NO_INIT), - mLibHandle(NULL), mFactory(NULL), mPlugin(NULL) { - mInitCheck = init(); } Crypto::~Crypto() { delete mPlugin; mPlugin = NULL; + closeFactory(); +} +void Crypto::closeFactory() { delete mFactory; mFactory = NULL; - - if (mLibHandle != NULL) { - dlclose(mLibHandle); - mLibHandle = NULL; - } + mLibrary.clear(); } status_t Crypto::initCheck() const { return mInitCheck; } -status_t Crypto::init() { - mLibHandle = dlopen("libdrmdecrypt.so", RTLD_NOW); +/* + * Search the plugins directory for a plugin that supports the scheme + * specified by uuid + * + * If found: + * mLibrary holds a strong pointer to the dlopen'd library + * mFactory is set to the library's factory method + * mInitCheck is set to OK + * + * If not found: + * mLibrary is cleared and mFactory are set to NULL + * mInitCheck is set to an error (!OK) + */ +void Crypto::findFactoryForScheme(const uint8_t uuid[16]) { - if (mLibHandle == NULL) { - ALOGE("Unable to locate libdrmdecrypt.so"); + closeFactory(); - return ERROR_UNSUPPORTED; + // lock static maps + Mutex::Autolock autoLock(mMapLock); + + // first check cache + Vector uuidVector; + uuidVector.appendArray(uuid, sizeof(uuid)); + ssize_t index = mUUIDToLibraryPathMap.indexOfKey(uuidVector); + if (index >= 0) { + if (loadLibraryForScheme(mUUIDToLibraryPathMap[index], uuid)) { + mInitCheck = OK; + return; + } else { + ALOGE("Failed to load from cached library path!"); + mInitCheck = ERROR_UNSUPPORTED; + return; + } } - typedef CryptoFactory *(*CreateCryptoFactoryFunc)(); - CreateCryptoFactoryFunc createCryptoFactory = - (CreateCryptoFactoryFunc)dlsym(mLibHandle, "createCryptoFactory"); + // no luck, have to search + String8 dirPath("/vendor/lib/mediadrm"); + String8 pluginPath; - if (createCryptoFactory == NULL - || ((mFactory = createCryptoFactory()) == NULL)) { - if (createCryptoFactory == NULL) { - ALOGE("Unable to find symbol 'createCryptoFactory'."); - } else { - ALOGE("createCryptoFactory() failed."); + DIR* pDir = opendir(dirPath.string()); + if (pDir) { + struct dirent* pEntry; + while ((pEntry = readdir(pDir))) { + + pluginPath = dirPath + "/" + pEntry->d_name; + + if (pluginPath.getPathExtension() == ".so") { + + if (loadLibraryForScheme(pluginPath, uuid)) { + mUUIDToLibraryPathMap.add(uuidVector, pluginPath); + mInitCheck = OK; + closedir(pDir); + return; + } + } } - dlclose(mLibHandle); - mLibHandle = NULL; + closedir(pDir); + } - return ERROR_UNSUPPORTED; + // try the legacy libdrmdecrypt.so + pluginPath = "libdrmdecrypt.so"; + if (loadLibraryForScheme(pluginPath, uuid)) { + mUUIDToLibraryPathMap.add(uuidVector, pluginPath); + mInitCheck = OK; + return; } - return OK; + ALOGE("Failed to find crypto plugin"); + mInitCheck = ERROR_UNSUPPORTED; } -bool Crypto::isCryptoSchemeSupported(const uint8_t uuid[16]) const { - Mutex::Autolock autoLock(mLock); +bool Crypto::loadLibraryForScheme(const String8 &path, const uint8_t uuid[16]) { - if (mInitCheck != OK) { + // get strong pointer to open shared library + ssize_t index = mLibraryPathToOpenLibraryMap.indexOfKey(path); + if (index >= 0) { + mLibrary = mLibraryPathToOpenLibraryMap[index].promote(); + } else { + index = mLibraryPathToOpenLibraryMap.add(path, NULL); + } + + if (!mLibrary.get()) { + mLibrary = new SharedLibrary(path); + if (!*mLibrary) { + return false; + } + + mLibraryPathToOpenLibraryMap.replaceValueAt(index, mLibrary); + } + + typedef CryptoFactory *(*CreateCryptoFactoryFunc)(); + + CreateCryptoFactoryFunc createCryptoFactory = + (CreateCryptoFactoryFunc)mLibrary->lookup("createCryptoFactory"); + + if (createCryptoFactory == NULL || + (mFactory = createCryptoFactory()) == NULL || + !mFactory->isCryptoSchemeSupported(uuid)) { + closeFactory(); return false; } + return true; +} - return mFactory->isCryptoSchemeSupported(uuid); +bool Crypto::isCryptoSchemeSupported(const uint8_t uuid[16]) { + Mutex::Autolock autoLock(mLock); + + if (mFactory && mFactory->isCryptoSchemeSupported(uuid)) { + return true; + } + + findFactoryForScheme(uuid); + return (mInitCheck == OK); } status_t Crypto::createPlugin( const uint8_t uuid[16], const void *data, size_t size) { Mutex::Autolock autoLock(mLock); - if (mInitCheck != OK) { - return mInitCheck; - } - if (mPlugin != NULL) { return -EINVAL; } + if (!mFactory || !mFactory->isCryptoSchemeSupported(uuid)) { + findFactoryForScheme(uuid); + } + + if (mInitCheck != OK) { + return mInitCheck; + } + return mFactory->createPlugin(uuid, data, size, &mPlugin); } diff --git a/media/libmediaplayerservice/Crypto.h b/media/libmediaplayerservice/Crypto.h index d066774..c44ae34 100644 --- a/media/libmediaplayerservice/Crypto.h +++ b/media/libmediaplayerservice/Crypto.h @@ -20,6 +20,9 @@ #include #include +#include + +#include "SharedLibrary.h" namespace android { @@ -32,7 +35,7 @@ struct Crypto : public BnCrypto { virtual status_t initCheck() const; - virtual bool isCryptoSchemeSupported(const uint8_t uuid[16]) const; + virtual bool isCryptoSchemeSupported(const uint8_t uuid[16]); virtual status_t createPlugin( const uint8_t uuid[16], const void *data, size_t size); @@ -56,11 +59,17 @@ private: mutable Mutex mLock; status_t mInitCheck; - void *mLibHandle; + sp mLibrary; CryptoFactory *mFactory; CryptoPlugin *mPlugin; - status_t init(); + static KeyedVector, String8> mUUIDToLibraryPathMap; + static KeyedVector > mLibraryPathToOpenLibraryMap; + static Mutex mMapLock; + + void findFactoryForScheme(const uint8_t uuid[16]); + bool loadLibraryForScheme(const String8 &path, const uint8_t uuid[16]); + void closeFactory(); DISALLOW_EVIL_CONSTRUCTORS(Crypto); }; -- cgit v1.1 From 6e98aba4d23d00cab236d993d895f57ea76ea0e5 Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Fri, 22 Mar 2013 09:56:29 -0700 Subject: Separate the mode of the RTP and RTCP channels. I now can use a TCP reliable data channel with a UDP back channel. Change-Id: Ieb0f0970e3a6da4cff250e9547e181c0c961b9fb --- .../libstagefright/wifi-display/MediaReceiver.cpp | 7 +- media/libstagefright/wifi-display/MediaSender.cpp | 11 +- media/libstagefright/wifi-display/rtp/RTPBase.h | 1 + .../wifi-display/rtp/RTPReceiver.cpp | 143 ++++++++++++++------- .../libstagefright/wifi-display/rtp/RTPReceiver.h | 11 +- .../libstagefright/wifi-display/rtp/RTPSender.cpp | 55 +++++--- media/libstagefright/wifi-display/rtp/RTPSender.h | 6 +- media/libstagefright/wifi-display/rtptest.cpp | 7 +- 8 files changed, 167 insertions(+), 74 deletions(-) diff --git a/media/libstagefright/wifi-display/MediaReceiver.cpp b/media/libstagefright/wifi-display/MediaReceiver.cpp index 10a2dff..e2e791d 100644 --- a/media/libstagefright/wifi-display/MediaReceiver.cpp +++ b/media/libstagefright/wifi-display/MediaReceiver.cpp @@ -73,7 +73,12 @@ ssize_t MediaReceiver::addTrack( info.mReceiver->registerPacketType( 97, RTPReceiver::PACKETIZATION_H264); - status_t err = info.mReceiver->initAsync(transportMode, localRTPPort); + status_t err = info.mReceiver->initAsync( + transportMode, // rtpMode + transportMode == RTPReceiver::TRANSPORT_UDP + ? transportMode + : RTPReceiver::TRANSPORT_NONE, // rtcpMode + localRTPPort); if (err != OK) { looper()->unregisterHandler(info.mReceiver->id()); diff --git a/media/libstagefright/wifi-display/MediaSender.cpp b/media/libstagefright/wifi-display/MediaSender.cpp index d13a92e..6fc50f7 100644 --- a/media/libstagefright/wifi-display/MediaSender.cpp +++ b/media/libstagefright/wifi-display/MediaSender.cpp @@ -124,10 +124,14 @@ status_t MediaSender::initAsync( looper()->registerHandler(mTSSender); err = mTSSender->initAsync( - transportMode, remoteHost, remoteRTPPort, + transportMode, // rtpMode remoteRTCPPort, + (transportMode == RTPSender::TRANSPORT_UDP + && remoteRTCPPort >= 0) + ? transportMode + : RTPSender::TRANSPORT_NONE, // rtcpMode localRTPPort); if (err != OK) { @@ -174,10 +178,13 @@ status_t MediaSender::initAsync( looper()->registerHandler(info->mSender); status_t err = info->mSender->initAsync( - transportMode, remoteHost, remoteRTPPort, + transportMode, // rtpMode remoteRTCPPort, + (transportMode == RTPSender::TRANSPORT_UDP && remoteRTCPPort >= 0) + ? transportMode + : RTPSender::TRANSPORT_NONE, // rtcpMode localRTPPort); if (err != OK) { diff --git a/media/libstagefright/wifi-display/rtp/RTPBase.h b/media/libstagefright/wifi-display/rtp/RTPBase.h index 6507a6f..e3fa845 100644 --- a/media/libstagefright/wifi-display/rtp/RTPBase.h +++ b/media/libstagefright/wifi-display/rtp/RTPBase.h @@ -29,6 +29,7 @@ struct RTPBase { enum TransportMode { TRANSPORT_UNDEFINED, + TRANSPORT_NONE, TRANSPORT_UDP, TRANSPORT_TCP, TRANSPORT_TCP_INTERLEAVED, diff --git a/media/libstagefright/wifi-display/rtp/RTPReceiver.cpp b/media/libstagefright/wifi-display/rtp/RTPReceiver.cpp index 8711b08..c55e0be 100644 --- a/media/libstagefright/wifi-display/rtp/RTPReceiver.cpp +++ b/media/libstagefright/wifi-display/rtp/RTPReceiver.cpp @@ -407,13 +407,22 @@ RTPReceiver::RTPReceiver( const sp ¬ify) : mNetSession(netSession), mNotify(notify), - mMode(TRANSPORT_UNDEFINED), + mRTPMode(TRANSPORT_UNDEFINED), + mRTCPMode(TRANSPORT_UNDEFINED), mRTPSessionID(0), mRTCPSessionID(0), - mRTPClientSessionID(0) { + mRTPConnected(false), + mRTCPConnected(false), + mRTPClientSessionID(0), + mRTCPClientSessionID(0) { } RTPReceiver::~RTPReceiver() { + if (mRTCPClientSessionID != 0) { + mNetSession->destroySession(mRTCPClientSessionID); + mRTCPClientSessionID = 0; + } + if (mRTPClientSessionID != 0) { mNetSession->destroySession(mRTPClientSessionID); mRTPClientSessionID = 0; @@ -430,17 +439,24 @@ RTPReceiver::~RTPReceiver() { } } -status_t RTPReceiver::initAsync(TransportMode mode, int32_t *outLocalRTPPort) { - if (mMode != TRANSPORT_UNDEFINED || mode == TRANSPORT_UNDEFINED) { +status_t RTPReceiver::initAsync( + TransportMode rtpMode, + TransportMode rtcpMode, + int32_t *outLocalRTPPort) { + if (mRTPMode != TRANSPORT_UNDEFINED + || rtpMode == TRANSPORT_UNDEFINED + || rtpMode == TRANSPORT_NONE + || rtcpMode == TRANSPORT_UNDEFINED) { return INVALID_OPERATION; } - CHECK_NE(mMode, TRANSPORT_TCP_INTERLEAVED); + CHECK_NE(rtpMode, TRANSPORT_TCP_INTERLEAVED); + CHECK_NE(rtcpMode, TRANSPORT_TCP_INTERLEAVED); sp rtpNotify = new AMessage(kWhatRTPNotify, id()); sp rtcpNotify; - if (mode == TRANSPORT_UDP) { + if (rtcpMode != TRANSPORT_NONE) { rtcpNotify = new AMessage(kWhatRTCPNotify, id()); } @@ -456,13 +472,13 @@ status_t RTPReceiver::initAsync(TransportMode mode, int32_t *outLocalRTPPort) { localRTPPort = PickRandomRTPPort(); status_t err; - if (mode == TRANSPORT_UDP) { + if (rtpMode == TRANSPORT_UDP) { err = mNetSession->createUDPSession( localRTPPort, rtpNotify, &mRTPSessionID); } else { - CHECK_EQ(mode, TRANSPORT_TCP); + CHECK_EQ(rtpMode, TRANSPORT_TCP); err = mNetSession->createTCPDatagramSession( ifaceAddr, localRTPPort, @@ -474,15 +490,22 @@ status_t RTPReceiver::initAsync(TransportMode mode, int32_t *outLocalRTPPort) { continue; } - if (mode == TRANSPORT_TCP) { + if (rtcpMode == TRANSPORT_NONE) { break; + } else if (rtcpMode == TRANSPORT_UDP) { + err = mNetSession->createUDPSession( + localRTPPort + 1, + rtcpNotify, + &mRTCPSessionID); + } else { + CHECK_EQ(rtpMode, TRANSPORT_TCP); + err = mNetSession->createTCPDatagramSession( + ifaceAddr, + localRTPPort + 1, + rtcpNotify, + &mRTCPSessionID); } - err = mNetSession->createUDPSession( - localRTPPort + 1, - rtcpNotify, - &mRTCPSessionID); - if (err == OK) { break; } @@ -491,7 +514,8 @@ status_t RTPReceiver::initAsync(TransportMode mode, int32_t *outLocalRTPPort) { mRTPSessionID = 0; } - mMode = mode; + mRTPMode = rtpMode; + mRTCPMode = rtcpMode; *outLocalRTPPort = localRTPPort; return OK; @@ -499,35 +523,46 @@ status_t RTPReceiver::initAsync(TransportMode mode, int32_t *outLocalRTPPort) { status_t RTPReceiver::connect( const char *remoteHost, int32_t remoteRTPPort, int32_t remoteRTCPPort) { - if (mMode == TRANSPORT_TCP) { - return OK; - } + status_t err; + + if (mRTPMode == TRANSPORT_UDP) { + CHECK(!mRTPConnected); + + err = mNetSession->connectUDPSession( + mRTPSessionID, remoteHost, remoteRTPPort); + + if (err != OK) { + notifyInitDone(err); + return err; + } - status_t err = mNetSession->connectUDPSession( - mRTPSessionID, remoteHost, remoteRTPPort); + ALOGI("connectUDPSession RTP successful."); - if (err != OK) { - notifyInitDone(err); - return err; + mRTPConnected = true; } - ALOGI("connectUDPSession RTP successful."); + if (mRTCPMode == TRANSPORT_UDP) { + CHECK(!mRTCPConnected); - if (remoteRTCPPort >= 0) { err = mNetSession->connectUDPSession( mRTCPSessionID, remoteHost, remoteRTCPPort); if (err != OK) { - ALOGI("connect failed w/ err %d", err); - notifyInitDone(err); return err; } scheduleSendRR(); + + ALOGI("connectUDPSession RTCP successful."); + + mRTCPConnected = true; } - notifyInitDone(OK); + if (mRTPConnected + && (mRTCPConnected || mRTCPMode == TRANSPORT_NONE)) { + notifyInitDone(OK); + } return OK; } @@ -615,15 +650,18 @@ void RTPReceiver::onNetNotify(bool isRTP, const sp &msg) { if (sessionID == mRTPSessionID) { mRTPSessionID = 0; - - if (mMode == TRANSPORT_TCP && mRTPClientSessionID == 0) { - notifyInitDone(err); - break; - } } else if (sessionID == mRTCPSessionID) { mRTCPSessionID = 0; } else if (sessionID == mRTPClientSessionID) { mRTPClientSessionID = 0; + } else if (sessionID == mRTCPClientSessionID) { + mRTCPClientSessionID = 0; + } + + if (!mRTPConnected + || (mRTCPMode != TRANSPORT_NONE && !mRTCPConnected)) { + notifyInitDone(err); + break; } notifyError(err); @@ -645,22 +683,39 @@ void RTPReceiver::onNetNotify(bool isRTP, const sp &msg) { case ANetworkSession::kWhatClientConnected: { - CHECK_EQ(mMode, TRANSPORT_TCP); - CHECK(isRTP); - int32_t sessionID; CHECK(msg->findInt32("sessionID", &sessionID)); - if (mRTPClientSessionID != 0) { - // We only allow a single client connection. - mNetSession->destroySession(sessionID); - sessionID = 0; - break; - } + if (isRTP) { + CHECK_EQ(mRTPMode, TRANSPORT_TCP); - mRTPClientSessionID = sessionID; + if (mRTPClientSessionID != 0) { + // We only allow a single client connection. + mNetSession->destroySession(sessionID); + sessionID = 0; + break; + } + + mRTPClientSessionID = sessionID; + mRTPConnected = true; + } else { + CHECK_EQ(mRTCPMode, TRANSPORT_TCP); + + if (mRTCPClientSessionID != 0) { + // We only allow a single client connection. + mNetSession->destroySession(sessionID); + sessionID = 0; + break; + } - notifyInitDone(OK); + mRTCPClientSessionID = sessionID; + mRTCPConnected = true; + } + + if (mRTPConnected + && (mRTCPConnected || mRTCPMode == TRANSPORT_NONE)) { + notifyInitDone(OK); + } break; } } diff --git a/media/libstagefright/wifi-display/rtp/RTPReceiver.h b/media/libstagefright/wifi-display/rtp/RTPReceiver.h index ec4671d..abbe6a8 100644 --- a/media/libstagefright/wifi-display/rtp/RTPReceiver.h +++ b/media/libstagefright/wifi-display/rtp/RTPReceiver.h @@ -46,7 +46,10 @@ struct RTPReceiver : public RTPBase, public AHandler { status_t registerPacketType( uint8_t packetType, PacketizationMode mode); - status_t initAsync(TransportMode mode, int32_t *outLocalRTPPort); + status_t initAsync( + TransportMode rtpMode, + TransportMode rtcpMode, + int32_t *outLocalRTPPort); status_t connect( const char *remoteHost, @@ -79,11 +82,15 @@ private: sp mNetSession; sp mNotify; - TransportMode mMode; + TransportMode mRTPMode; + TransportMode mRTCPMode; int32_t mRTPSessionID; int32_t mRTCPSessionID; + bool mRTPConnected; + bool mRTCPConnected; int32_t mRTPClientSessionID; // in TRANSPORT_TCP mode. + int32_t mRTCPClientSessionID; // in TRANSPORT_TCP mode. KeyedVector mPacketTypes; KeyedVector > mSources; diff --git a/media/libstagefright/wifi-display/rtp/RTPSender.cpp b/media/libstagefright/wifi-display/rtp/RTPSender.cpp index c8e265c..c686e01 100644 --- a/media/libstagefright/wifi-display/rtp/RTPSender.cpp +++ b/media/libstagefright/wifi-display/rtp/RTPSender.cpp @@ -38,7 +38,8 @@ RTPSender::RTPSender( const sp ¬ify) : mNetSession(netSession), mNotify(notify), - mMode(TRANSPORT_UNDEFINED), + mRTPMode(TRANSPORT_UNDEFINED), + mRTCPMode(TRANSPORT_UNDEFINED), mRTPSessionID(0), mRTCPSessionID(0), mRTPConnected(false), @@ -74,18 +75,24 @@ int32_t RTPBase::PickRandomRTPPort() { } status_t RTPSender::initAsync( - TransportMode mode, const char *remoteHost, int32_t remoteRTPPort, + TransportMode rtpMode, int32_t remoteRTCPPort, + TransportMode rtcpMode, int32_t *outLocalRTPPort) { - if (mMode != TRANSPORT_UNDEFINED || mode == TRANSPORT_UNDEFINED) { + if (mRTPMode != TRANSPORT_UNDEFINED + || rtpMode == TRANSPORT_UNDEFINED + || rtpMode == TRANSPORT_NONE + || rtcpMode == TRANSPORT_UNDEFINED) { return INVALID_OPERATION; } - CHECK_NE(mMode, TRANSPORT_TCP_INTERLEAVED); + CHECK_NE(rtpMode, TRANSPORT_TCP_INTERLEAVED); + CHECK_NE(rtcpMode, TRANSPORT_TCP_INTERLEAVED); - if (mode == TRANSPORT_TCP && remoteRTCPPort >= 0) { + if (rtcpMode == TRANSPORT_NONE && remoteRTCPPort >= 0 + || rtcpMode != TRANSPORT_NONE && remoteRTCPPort < 0) { return INVALID_OPERATION; } @@ -105,7 +112,7 @@ status_t RTPSender::initAsync( localRTPPort = PickRandomRTPPort(); status_t err; - if (mode == TRANSPORT_UDP) { + if (rtpMode == TRANSPORT_UDP) { err = mNetSession->createUDPSession( localRTPPort, remoteHost, @@ -113,7 +120,7 @@ status_t RTPSender::initAsync( rtpNotify, &mRTPSessionID); } else { - CHECK_EQ(mode, TRANSPORT_TCP); + CHECK_EQ(rtpMode, TRANSPORT_TCP); err = mNetSession->createTCPDatagramSession( localRTPPort, remoteHost, @@ -130,7 +137,7 @@ status_t RTPSender::initAsync( break; } - if (mode == TRANSPORT_UDP) { + if (rtcpMode == TRANSPORT_UDP) { err = mNetSession->createUDPSession( localRTPPort + 1, remoteHost, @@ -138,7 +145,7 @@ status_t RTPSender::initAsync( rtcpNotify, &mRTCPSessionID); } else { - CHECK_EQ(mode, TRANSPORT_TCP); + CHECK_EQ(rtcpMode, TRANSPORT_TCP); err = mNetSession->createTCPDatagramSession( localRTPPort + 1, remoteHost, @@ -155,15 +162,20 @@ status_t RTPSender::initAsync( mRTPSessionID = 0; } - if (mode == TRANSPORT_UDP) { + if (rtpMode == TRANSPORT_UDP) { mRTPConnected = true; + } + + if (rtcpMode == TRANSPORT_UDP) { mRTCPConnected = true; } - mMode = mode; + mRTPMode = rtpMode; + mRTCPMode = rtcpMode; *outLocalRTPPort = localRTPPort; - if (mMode == TRANSPORT_UDP) { + if (mRTPMode == TRANSPORT_UDP + && (mRTCPMode == TRANSPORT_UDP || mRTCPMode == TRANSPORT_NONE)) { notifyInitDone(OK); } @@ -496,12 +508,12 @@ void RTPSender::onNetNotify(bool isRTP, const sp &msg) { mRTCPSessionID = 0; } - if (mMode == TRANSPORT_TCP) { - if (!mRTPConnected - || (mRTCPSessionID > 0 && !mRTCPConnected)) { - notifyInitDone(err); - break; - } + if (!mRTPConnected + || (mRTPMode != TRANSPORT_NONE && !mRTCPConnected)) { + // We haven't completed initialization, attach the error + // to the notification instead. + notifyInitDone(err); + break; } notifyError(err); @@ -523,20 +535,21 @@ void RTPSender::onNetNotify(bool isRTP, const sp &msg) { case ANetworkSession::kWhatConnected: { - CHECK_EQ(mMode, TRANSPORT_TCP); - int32_t sessionID; CHECK(msg->findInt32("sessionID", &sessionID)); if (isRTP) { + CHECK_EQ(mRTPMode, TRANSPORT_TCP); CHECK_EQ(sessionID, mRTPSessionID); mRTPConnected = true; } else { + CHECK_EQ(mRTCPMode, TRANSPORT_TCP); CHECK_EQ(sessionID, mRTCPSessionID); mRTCPConnected = true; } - if (mRTPConnected && (mRTCPSessionID == 0 || mRTCPConnected)) { + if (mRTPConnected + && (mRTCPMode == TRANSPORT_NONE || mRTCPConnected)) { notifyInitDone(OK); } break; diff --git a/media/libstagefright/wifi-display/rtp/RTPSender.h b/media/libstagefright/wifi-display/rtp/RTPSender.h index 90b1796..8409b8d 100644 --- a/media/libstagefright/wifi-display/rtp/RTPSender.h +++ b/media/libstagefright/wifi-display/rtp/RTPSender.h @@ -43,10 +43,11 @@ struct RTPSender : public RTPBase, public AHandler { const sp ¬ify); status_t initAsync( - TransportMode mode, const char *remoteHost, int32_t remoteRTPPort, + TransportMode rtpMode, int32_t remoteRTCPPort, + TransportMode rtcpMode, int32_t *outLocalRTPPort); status_t queueBuffer( @@ -72,7 +73,8 @@ private: sp mNetSession; sp mNotify; - TransportMode mMode; + TransportMode mRTPMode; + TransportMode mRTCPMode; int32_t mRTPSessionID; int32_t mRTCPSessionID; bool mRTPConnected; diff --git a/media/libstagefright/wifi-display/rtptest.cpp b/media/libstagefright/wifi-display/rtptest.cpp index 607d9d2..eade832 100644 --- a/media/libstagefright/wifi-display/rtptest.cpp +++ b/media/libstagefright/wifi-display/rtptest.cpp @@ -106,7 +106,9 @@ void TestHandler::onMessageReceived(const sp &msg) { int32_t receiverRTPPort; CHECK_EQ((status_t)OK, mReceiver->initAsync( - RTPReceiver::TRANSPORT_UDP, &receiverRTPPort)); + RTPReceiver::TRANSPORT_UDP, // rtpMode + RTPReceiver::TRANSPORT_UDP, // rtcpMode + &receiverRTPPort)); printf("picked receiverRTPPort %d\n", receiverRTPPort); @@ -155,10 +157,11 @@ void TestHandler::onMessageReceived(const sp &msg) { int32_t senderRTPPort; CHECK_EQ((status_t)OK, mSender->initAsync( - RTPSender::TRANSPORT_UDP, host.c_str(), receiverRTPPort, + RTPSender::TRANSPORT_UDP, // rtpMode receiverRTPPort + 1, + RTPSender::TRANSPORT_UDP, // rtcpMode &senderRTPPort)); printf("picked senderRTPPort %d\n", senderRTPPort); -- cgit v1.1 From cc8623a7af8c1f7f40dd7810e2b5cf24a008faf3 Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Mon, 25 Mar 2013 13:25:39 -0700 Subject: Misc debugging support and handling of latency changes. Change-Id: I682944f793690842219cf1adbae5e61e061b6b62 --- media/libstagefright/wifi-display/MediaSender.cpp | 31 --------- .../wifi-display/sink/DirectRenderer.cpp | 24 ------- .../wifi-display/sink/DirectRenderer.h | 5 -- .../wifi-display/sink/WifiDisplaySink.cpp | 78 ++++++++++++++-------- .../wifi-display/sink/WifiDisplaySink.h | 6 +- media/libstagefright/wifi-display/wfd.cpp | 15 ++++- 6 files changed, 68 insertions(+), 91 deletions(-) diff --git a/media/libstagefright/wifi-display/MediaSender.cpp b/media/libstagefright/wifi-display/MediaSender.cpp index 6fc50f7..123bc1c 100644 --- a/media/libstagefright/wifi-display/MediaSender.cpp +++ b/media/libstagefright/wifi-display/MediaSender.cpp @@ -267,37 +267,6 @@ status_t MediaSender::queueAccessUnit( tsPackets, 33 /* packetType */, RTPSender::PACKETIZATION_TRANSPORT_STREAM); - -#if 0 - { - int64_t nowUs = ALooper::GetNowUs(); - - int64_t timeUs; - CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs)); - - int64_t delayMs = (nowUs - timeUs) / 1000ll; - - static const int64_t kMinDelayMs = 0; - static const int64_t kMaxDelayMs = 300; - - const char *kPattern = "########################################"; - size_t kPatternSize = strlen(kPattern); - - int n = (kPatternSize * (delayMs - kMinDelayMs)) - / (kMaxDelayMs - kMinDelayMs); - - if (n < 0) { - n = 0; - } else if ((size_t)n > kPatternSize) { - n = kPatternSize; - } - - ALOGI("[%lld]: (%4lld ms) %s\n", - timeUs / 1000, - delayMs, - kPattern + kPatternSize - n); - } -#endif } if (err != OK) { diff --git a/media/libstagefright/wifi-display/sink/DirectRenderer.cpp b/media/libstagefright/wifi-display/sink/DirectRenderer.cpp index 12338e9..15f9c88 100644 --- a/media/libstagefright/wifi-display/sink/DirectRenderer.cpp +++ b/media/libstagefright/wifi-display/sink/DirectRenderer.cpp @@ -467,8 +467,6 @@ DirectRenderer::DirectRenderer( const sp &bufferProducer) : mSurfaceTex(bufferProducer), mVideoRenderPending(false), - mLatencySum(0ll), - mLatencyCount(0), mNumFramesLate(0), mNumFrames(0) { } @@ -476,25 +474,6 @@ DirectRenderer::DirectRenderer( DirectRenderer::~DirectRenderer() { } -int64_t DirectRenderer::getAvgLatenessUs() { - if (mLatencyCount == 0) { - return 0ll; - } - - int64_t avgLatencyUs = mLatencySum / mLatencyCount; - - mLatencySum = 0ll; - mLatencyCount = 0; - - if (mNumFrames > 0) { - ALOGI("%d / %d frames late", mNumFramesLate, mNumFrames); - mNumFramesLate = 0; - mNumFrames = 0; - } - - return avgLatencyUs; -} - void DirectRenderer::onMessageReceived(const sp &msg) { switch (msg->what()) { case kWhatDecoderNotify: @@ -632,9 +611,6 @@ void DirectRenderer::onRenderVideo() { } ++mNumFrames; - mLatencySum += nowUs - info.mTimeUs; - ++mLatencyCount; - status_t err = mDecoderContext[0]->renderOutputBufferAndRelease(info.mIndex); CHECK_EQ(err, (status_t)OK); diff --git a/media/libstagefright/wifi-display/sink/DirectRenderer.h b/media/libstagefright/wifi-display/sink/DirectRenderer.h index 92c176a..c5a4a83 100644 --- a/media/libstagefright/wifi-display/sink/DirectRenderer.h +++ b/media/libstagefright/wifi-display/sink/DirectRenderer.h @@ -34,8 +34,6 @@ struct DirectRenderer : public AHandler { void setFormat(size_t trackIndex, const sp &format); void queueAccessUnit(size_t trackIndex, const sp &accessUnit); - int64_t getAvgLatenessUs(); - protected: virtual void onMessageReceived(const sp &msg); virtual ~DirectRenderer(); @@ -64,9 +62,6 @@ private: sp mAudioRenderer; - int64_t mLatencySum; - size_t mLatencyCount; - int32_t mNumFramesLate; int32_t mNumFrames; diff --git a/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp b/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp index 62021c0..bc98402 100644 --- a/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp +++ b/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp @@ -51,8 +51,10 @@ WifiDisplaySink::WifiDisplaySink( mIDRFrameRequestPending(false), mTimeOffsetUs(0ll), mTimeOffsetValid(false), - mTargetLatencyUs(-1ll), - mSetupDeferred(false) { + mSetupDeferred(false), + mLatencyCount(0), + mLatencySumUs(0ll), + mLatencyMaxUs(0ll) { // We support any and all resolutions, but prefer 720p30 mSinkSupportedVideoFormats.setNativeResolution( VideoFormats::RESOLUTION_CEA, 5); // 1280 x 720 p30 @@ -265,13 +267,20 @@ void WifiDisplaySink::onMessageReceived(const sp &msg) { case kWhatReportLateness: { - int64_t latenessUs = mRenderer->getAvgLatenessUs(); + if (mLatencyCount > 0) { + int64_t avgLatencyUs = mLatencySumUs / mLatencyCount; - ALOGI("avg. lateness = %lld ms", - (latenessUs + mTargetLatencyUs) / 1000ll); + ALOGI("avg. latency = %lld ms (max %lld ms)", + avgLatencyUs / 1000ll, + mLatencyMaxUs / 1000ll); - mMediaReceiver->notifyLateness( - 0 /* trackIndex */, latenessUs); + mMediaReceiver->notifyLateness( + 0 /* trackIndex */, avgLatencyUs); + } + + mLatencyCount = 0; + mLatencySumUs = 0ll; + mLatencyMaxUs = 0ll; msg->post(kReportLatenessEveryUs); break; @@ -282,6 +291,30 @@ void WifiDisplaySink::onMessageReceived(const sp &msg) { } } +static void dumpDelay(size_t trackIndex, int64_t timeUs) { + int64_t delayMs = (ALooper::GetNowUs() - timeUs) / 1000ll; + + static const int64_t kMinDelayMs = 0; + static const int64_t kMaxDelayMs = 300; + + const char *kPattern = "########################################"; + size_t kPatternSize = strlen(kPattern); + + int n = (kPatternSize * (delayMs - kMinDelayMs)) + / (kMaxDelayMs - kMinDelayMs); + + if (n < 0) { + n = 0; + } else if ((size_t)n > kPatternSize) { + n = kPatternSize; + } + + ALOGI("[%lld]: (%4lld ms) %s", + timeUs / 1000, + delayMs, + kPattern + kPatternSize - n); +} + void WifiDisplaySink::onMediaReceiverNotify(const sp &msg) { int32_t what; CHECK(msg->findInt32("what", &what)); @@ -319,24 +352,6 @@ void WifiDisplaySink::onMediaReceiverNotify(const sp &msg) { CHECK(mTimeOffsetValid); - int64_t latencyUs = 200000ll; // 200ms by default - - char val[PROPERTY_VALUE_MAX]; - if (property_get("media.wfd-sink.latency", val, NULL)) { - char *end; - int64_t x = strtoll(val, &end, 10); - - if (end > val && *end == '\0' && x >= 0ll) { - latencyUs = x; - } - } - - if (latencyUs != mTargetLatencyUs) { - mTargetLatencyUs = latencyUs; - - ALOGI("Assuming %lld ms of latency.", latencyUs / 1000ll); - } - sp accessUnit; CHECK(msg->findBuffer("accessUnit", &accessUnit)); @@ -345,13 +360,24 @@ void WifiDisplaySink::onMediaReceiverNotify(const sp &msg) { // We are the timesync _client_, // client time = server time - time offset. - timeUs += mTargetLatencyUs - mTimeOffsetUs; + timeUs -= mTimeOffsetUs; accessUnit->meta()->setInt64("timeUs", timeUs); size_t trackIndex; CHECK(msg->findSize("trackIndex", &trackIndex)); + int64_t nowUs = ALooper::GetNowUs(); + int64_t delayUs = nowUs - timeUs; + + mLatencySumUs += delayUs; + if (mLatencyCount == 0 || delayUs > mLatencyMaxUs) { + mLatencyMaxUs = delayUs; + } + ++mLatencyCount; + + // dumpDelay(trackIndex, timeUs); + #if USE_TUNNEL_RENDERER mRenderer->queueBuffer(accessUnit); #else diff --git a/media/libstagefright/wifi-display/sink/WifiDisplaySink.h b/media/libstagefright/wifi-display/sink/WifiDisplaySink.h index 2b8c6f7..f515177 100644 --- a/media/libstagefright/wifi-display/sink/WifiDisplaySink.h +++ b/media/libstagefright/wifi-display/sink/WifiDisplaySink.h @@ -132,10 +132,12 @@ private: int64_t mTimeOffsetUs; bool mTimeOffsetValid; - int64_t mTargetLatencyUs; - bool mSetupDeferred; + size_t mLatencyCount; + int64_t mLatencySumUs; + int64_t mLatencyMaxUs; + status_t sendM2(int32_t sessionID); status_t sendSetup(int32_t sessionID, const char *uri); status_t sendPlay(int32_t sessionID, const char *uri); diff --git a/media/libstagefright/wifi-display/wfd.cpp b/media/libstagefright/wifi-display/wfd.cpp index 4f7dcc8..9fee4d0 100644 --- a/media/libstagefright/wifi-display/wfd.cpp +++ b/media/libstagefright/wifi-display/wfd.cpp @@ -43,7 +43,8 @@ static void usage(const char *me) { " -u uri \tconnect to an rtsp uri\n" " -l ip[:port] \tlisten on the specified port " " -f(ilename) \tstream media " - "(create a sink)\n", + "(create a sink)\n" + " -s(pecial) \trun in 'special' mode\n", me); } @@ -222,8 +223,10 @@ int main(int argc, char **argv) { AString path; + bool specialMode = false; + int res; - while ((res = getopt(argc, argv, "hc:l:u:f:")) >= 0) { + while ((res = getopt(argc, argv, "hc:l:u:f:s")) >= 0) { switch (res) { case 'c': { @@ -281,6 +284,12 @@ int main(int argc, char **argv) { break; } + case 's': + { + specialMode = true; + break; + } + case '?': case 'h': default: @@ -357,7 +366,7 @@ int main(int argc, char **argv) { sp looper = new ALooper; sp sink = new WifiDisplaySink( - 0 /* flags */, + specialMode ? WifiDisplaySink::FLAG_SPECIAL_MODE : 0 /* flags */, session, surface->getIGraphicBufferProducer()); -- cgit v1.1 From f90debb467a0daf5288e7d8684642ef1119c4bad Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Mon, 25 Mar 2013 14:15:24 -0700 Subject: Get rid of TunnelRenderer Change-Id: I40dc00e2e689d7a6b8717ce524016c2948229807 --- media/libstagefright/wifi-display/Android.mk | 1 - .../wifi-display/sink/TunnelRenderer.cpp | 290 --------------------- .../wifi-display/sink/TunnelRenderer.h | 78 ------ .../wifi-display/sink/WifiDisplaySink.cpp | 18 +- .../wifi-display/sink/WifiDisplaySink.h | 8 - 5 files changed, 1 insertion(+), 394 deletions(-) delete mode 100644 media/libstagefright/wifi-display/sink/TunnelRenderer.cpp delete mode 100644 media/libstagefright/wifi-display/sink/TunnelRenderer.h diff --git a/media/libstagefright/wifi-display/Android.mk b/media/libstagefright/wifi-display/Android.mk index f81929c..f1f9f45 100644 --- a/media/libstagefright/wifi-display/Android.mk +++ b/media/libstagefright/wifi-display/Android.mk @@ -12,7 +12,6 @@ LOCAL_SRC_FILES:= \ rtp/RTPReceiver.cpp \ rtp/RTPSender.cpp \ sink/DirectRenderer.cpp \ - sink/TunnelRenderer.cpp \ sink/WifiDisplaySink.cpp \ SNTPClient.cpp \ TimeSyncer.cpp \ diff --git a/media/libstagefright/wifi-display/sink/TunnelRenderer.cpp b/media/libstagefright/wifi-display/sink/TunnelRenderer.cpp deleted file mode 100644 index 6b185db..0000000 --- a/media/libstagefright/wifi-display/sink/TunnelRenderer.cpp +++ /dev/null @@ -1,290 +0,0 @@ -/* - * Copyright 2012, The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -//#define LOG_NDEBUG 0 -#define LOG_TAG "TunnelRenderer" -#include - -#include "TunnelRenderer.h" - -#include "ATSParser.h" - -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -namespace android { - -struct TunnelRenderer::PlayerClient : public BnMediaPlayerClient { - PlayerClient() {} - - virtual void notify(int msg, int ext1, int ext2, const Parcel *obj) { - ALOGI("notify %d, %d, %d", msg, ext1, ext2); - } - -protected: - virtual ~PlayerClient() {} - -private: - DISALLOW_EVIL_CONSTRUCTORS(PlayerClient); -}; - -struct TunnelRenderer::StreamSource : public BnStreamSource { - StreamSource(TunnelRenderer *owner); - - virtual void setListener(const sp &listener); - virtual void setBuffers(const Vector > &buffers); - - virtual void onBufferAvailable(size_t index); - - virtual uint32_t flags() const; - - void doSomeWork(); - - void setTimeOffset(int64_t offset); - -protected: - virtual ~StreamSource(); - -private: - mutable Mutex mLock; - - TunnelRenderer *mOwner; - - sp mListener; - - Vector > mBuffers; - List mIndicesAvailable; - - size_t mNumDeqeued; - - int64_t mTimeOffsetUs; - bool mTimeOffsetChanged; - - DISALLOW_EVIL_CONSTRUCTORS(StreamSource); -}; - -//////////////////////////////////////////////////////////////////////////////// - -TunnelRenderer::StreamSource::StreamSource(TunnelRenderer *owner) - : mOwner(owner), - mNumDeqeued(0), - mTimeOffsetUs(0ll), - mTimeOffsetChanged(false) { -} - -TunnelRenderer::StreamSource::~StreamSource() { -} - -void TunnelRenderer::StreamSource::setListener( - const sp &listener) { - mListener = listener; -} - -void TunnelRenderer::StreamSource::setBuffers( - const Vector > &buffers) { - mBuffers = buffers; -} - -void TunnelRenderer::StreamSource::onBufferAvailable(size_t index) { - CHECK_LT(index, mBuffers.size()); - - { - Mutex::Autolock autoLock(mLock); - mIndicesAvailable.push_back(index); - } - - doSomeWork(); -} - -uint32_t TunnelRenderer::StreamSource::flags() const { - return kFlagAlignedVideoData | kFlagIsRealTimeData; -} - -void TunnelRenderer::StreamSource::doSomeWork() { - Mutex::Autolock autoLock(mLock); - - while (!mIndicesAvailable.empty()) { - sp srcBuffer = mOwner->dequeueBuffer(); - if (srcBuffer == NULL) { - break; - } - - ++mNumDeqeued; - - if (mTimeOffsetChanged) { - sp extra = new AMessage; - - extra->setInt32( - IStreamListener::kKeyDiscontinuityMask, - ATSParser::DISCONTINUITY_TIME_OFFSET); - - extra->setInt64("offset", mTimeOffsetUs); - - mListener->issueCommand( - IStreamListener::DISCONTINUITY, - false /* synchronous */, - extra); - - mTimeOffsetChanged = false; - } - - ALOGV("dequeue TS packet of size %d", srcBuffer->size()); - - size_t index = *mIndicesAvailable.begin(); - mIndicesAvailable.erase(mIndicesAvailable.begin()); - - sp mem = mBuffers.itemAt(index); - CHECK_LE(srcBuffer->size(), mem->size()); - CHECK_EQ((srcBuffer->size() % 188), 0u); - - memcpy(mem->pointer(), srcBuffer->data(), srcBuffer->size()); - mListener->queueBuffer(index, srcBuffer->size()); - } -} - -void TunnelRenderer::StreamSource::setTimeOffset(int64_t offset) { - Mutex::Autolock autoLock(mLock); - - if (offset != mTimeOffsetUs) { - mTimeOffsetUs = offset; - mTimeOffsetChanged = true; - } -} - -//////////////////////////////////////////////////////////////////////////////// - -TunnelRenderer::TunnelRenderer( - const sp &bufferProducer) - : mSurfaceTex(bufferProducer), - mStartup(true) { - mStreamSource = new StreamSource(this); -} - -TunnelRenderer::~TunnelRenderer() { - destroyPlayer(); -} - -void TunnelRenderer::setTimeOffset(int64_t offset) { - mStreamSource->setTimeOffset(offset); -} - -void TunnelRenderer::onMessageReceived(const sp &msg) { - switch (msg->what()) { - default: - TRESPASS(); - } -} - -void TunnelRenderer::initPlayer() { - if (mSurfaceTex == NULL) { - mComposerClient = new SurfaceComposerClient; - CHECK_EQ(mComposerClient->initCheck(), (status_t)OK); - - DisplayInfo info; - SurfaceComposerClient::getDisplayInfo(0, &info); - ssize_t displayWidth = info.w; - ssize_t displayHeight = info.h; - - mSurfaceControl = - mComposerClient->createSurface( - String8("A Surface"), - displayWidth, - displayHeight, - PIXEL_FORMAT_RGB_565, - 0); - - CHECK(mSurfaceControl != NULL); - CHECK(mSurfaceControl->isValid()); - - SurfaceComposerClient::openGlobalTransaction(); - CHECK_EQ(mSurfaceControl->setLayer(INT_MAX), (status_t)OK); - CHECK_EQ(mSurfaceControl->show(), (status_t)OK); - SurfaceComposerClient::closeGlobalTransaction(); - - mSurface = mSurfaceControl->getSurface(); - CHECK(mSurface != NULL); - } - - sp sm = defaultServiceManager(); - sp binder = sm->getService(String16("media.player")); - sp service = interface_cast(binder); - CHECK(service.get() != NULL); - - mPlayerClient = new PlayerClient; - - mPlayer = service->create(mPlayerClient, 0); - CHECK(mPlayer != NULL); - CHECK_EQ(mPlayer->setDataSource(mStreamSource), (status_t)OK); - - mPlayer->setVideoSurfaceTexture( - mSurfaceTex != NULL ? mSurfaceTex : mSurface->getIGraphicBufferProducer()); - - mPlayer->start(); -} - -void TunnelRenderer::destroyPlayer() { - mStreamSource.clear(); - - mPlayer->setVideoSurfaceTexture(NULL); - - mPlayer->stop(); - mPlayer.clear(); - - if (mSurfaceTex == NULL) { - mSurface.clear(); - mSurfaceControl.clear(); - - mComposerClient->dispose(); - mComposerClient.clear(); - } -} - -void TunnelRenderer::queueBuffer(const sp &buffer) { - { - Mutex::Autolock autoLock(mLock); - mBuffers.push_back(buffer); - } - - if (mStartup) { - initPlayer(); - mStartup = false; - } - - mStreamSource->doSomeWork(); -} - -sp TunnelRenderer::dequeueBuffer() { - Mutex::Autolock autoLock(mLock); - if (mBuffers.empty()) { - return NULL; - } - - sp buf = *mBuffers.begin(); - mBuffers.erase(mBuffers.begin()); - - return buf; -} - -} // namespace android - diff --git a/media/libstagefright/wifi-display/sink/TunnelRenderer.h b/media/libstagefright/wifi-display/sink/TunnelRenderer.h deleted file mode 100644 index 479e73c..0000000 --- a/media/libstagefright/wifi-display/sink/TunnelRenderer.h +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Copyright 2012, The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef TUNNEL_RENDERER_H_ - -#define TUNNEL_RENDERER_H_ - -#include -#include - -namespace android { - -struct ABuffer; -struct SurfaceComposerClient; -struct SurfaceControl; -struct Surface; -struct IMediaPlayer; -struct IStreamListener; - -// This class reassembles incoming RTP packets into the correct order -// and sends the resulting transport stream to a mediaplayer instance -// for playback. -struct TunnelRenderer : public AHandler { - TunnelRenderer(const sp &bufferProducer); - - void queueBuffer(const sp &buffer); - sp dequeueBuffer(); - - void setTimeOffset(int64_t offset); - - int64_t getAvgLatenessUs() { - return 0ll; - } - -protected: - virtual void onMessageReceived(const sp &msg); - virtual ~TunnelRenderer(); - -private: - struct PlayerClient; - struct StreamSource; - - mutable Mutex mLock; - - sp mSurfaceTex; - - bool mStartup; - List > mBuffers; - - sp mComposerClient; - sp mSurfaceControl; - sp mSurface; - sp mPlayerClient; - sp mPlayer; - sp mStreamSource; - - void initPlayer(); - void destroyPlayer(); - - DISALLOW_EVIL_CONSTRUCTORS(TunnelRenderer); -}; - -} // namespace android - -#endif // TUNNEL_RENDERER_H_ diff --git a/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp b/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp index bc98402..639634b 100644 --- a/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp +++ b/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp @@ -24,7 +24,6 @@ #include "MediaReceiver.h" #include "ParsedMessage.h" #include "TimeSyncer.h" -#include "TunnelRenderer.h" #include #include @@ -341,12 +340,7 @@ void WifiDisplaySink::onMediaReceiverNotify(const sp &msg) { case MediaReceiver::kWhatAccessUnit: { if (mRenderer == NULL) { -#if USE_TUNNEL_RENDERER - mRenderer = new TunnelRenderer(mSurfaceTex); -#else mRenderer = new DirectRenderer(mSurfaceTex); -#endif - looper()->registerHandler(mRenderer); } @@ -378,16 +372,12 @@ void WifiDisplaySink::onMediaReceiverNotify(const sp &msg) { // dumpDelay(trackIndex, timeUs); -#if USE_TUNNEL_RENDERER - mRenderer->queueBuffer(accessUnit); -#else sp format; if (msg->findMessage("format", &format)) { mRenderer->setFormat(trackIndex, format); } mRenderer->queueAccessUnit(trackIndex, accessUnit); -#endif break; } @@ -726,13 +716,7 @@ status_t WifiDisplaySink::sendSetup(int32_t sessionID, const char *uri) { status_t err = mMediaReceiver->addTrack(mode, &localRTPPort); if (err == OK) { - err = mMediaReceiver->initAsync( -#if USE_TUNNEL_RENDERER - MediaReceiver::MODE_TRANSPORT_STREAM_RAW -#else - MediaReceiver::MODE_TRANSPORT_STREAM -#endif - ); + err = mMediaReceiver->initAsync(MediaReceiver::MODE_TRANSPORT_STREAM); } if (err != OK) { diff --git a/media/libstagefright/wifi-display/sink/WifiDisplaySink.h b/media/libstagefright/wifi-display/sink/WifiDisplaySink.h index f515177..4587fb5 100644 --- a/media/libstagefright/wifi-display/sink/WifiDisplaySink.h +++ b/media/libstagefright/wifi-display/sink/WifiDisplaySink.h @@ -32,9 +32,6 @@ struct DirectRenderer; struct MediaReceiver; struct ParsedMessage; struct TimeSyncer; -struct TunnelRenderer; - -#define USE_TUNNEL_RENDERER 0 // Represents the RTSP client acting as a wifi display sink. // Connects to a wifi display source and renders the incoming @@ -117,12 +114,7 @@ private: sp mMediaReceiverLooper; sp mMediaReceiver; - -#if USE_TUNNEL_RENDERER - sp mRenderer; -#else sp mRenderer; -#endif AString mPlaybackSessionID; int32_t mPlaybackSessionTimeoutSecs; -- cgit v1.1 From 6eb954f54e4a92b3c4bfbee177a3259d1320500d Mon Sep 17 00:00:00 2001 From: Eric Laurent Date: Mon, 25 Mar 2013 14:38:10 -0700 Subject: ToneGenerator: optimize silent tone Do not create an AudioTrack and start playback when a silent tone is requested to ToneGenerator. Bug 7946399 Change-Id: Ib9282871a56f7a862af7d1504ce3fbd7c18e34e2 --- media/libmedia/ToneGenerator.cpp | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/media/libmedia/ToneGenerator.cpp b/media/libmedia/ToneGenerator.cpp index 42584fe..1c0268f 100644 --- a/media/libmedia/ToneGenerator.cpp +++ b/media/libmedia/ToneGenerator.cpp @@ -885,6 +885,11 @@ bool ToneGenerator::startTone(tone_type toneType, int durationMs) { if ((toneType < 0) || (toneType >= NUM_TONES)) return lResult; + toneType = getToneForRegion(toneType); + if (toneType == TONE_CDMA_SIGNAL_OFF) { + return true; + } + if (mState == TONE_IDLE) { ALOGV("startTone: try to re-init AudioTrack"); if (!initAudioTrack()) { @@ -897,7 +902,6 @@ bool ToneGenerator::startTone(tone_type toneType, int durationMs) { mLock.lock(); // Get descriptor for requested tone - toneType = getToneForRegion(toneType); mpNewToneDesc = &sToneDescriptors[toneType]; mDurationMs = durationMs; -- cgit v1.1 From 2aea9552aeba92bbaf9e56c666049ea2d14057b5 Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Mon, 25 Mar 2013 15:46:52 -0700 Subject: In "special" mode we now establish a UDP RTCP channel in addition to the TCP RTP channel and provide feedback on the latency of arriving packets from the sink back to the source. This information is then used to throttle video bitrate. Change-Id: Ic589a3cb65e4893a3ff67de947da6063d32a1c6e --- .../wifi-display/ANetworkSession.cpp | 4 +- .../libstagefright/wifi-display/MediaReceiver.cpp | 14 +++--- media/libstagefright/wifi-display/MediaReceiver.h | 5 ++- media/libstagefright/wifi-display/MediaSender.cpp | 32 ++++++++----- media/libstagefright/wifi-display/MediaSender.h | 4 +- .../wifi-display/rtp/RTPReceiver.cpp | 39 +++++++++++----- .../libstagefright/wifi-display/rtp/RTPReceiver.h | 2 +- .../libstagefright/wifi-display/rtp/RTPSender.cpp | 20 ++++++++- media/libstagefright/wifi-display/rtp/RTPSender.h | 2 + .../wifi-display/sink/WifiDisplaySink.cpp | 47 ++++++++++++------- .../wifi-display/source/Converter.cpp | 17 +++++++ .../libstagefright/wifi-display/source/Converter.h | 3 ++ .../wifi-display/source/PlaybackSession.cpp | 52 ++++++++++++++++++++-- .../wifi-display/source/PlaybackSession.h | 9 +++- .../wifi-display/source/WifiDisplaySource.cpp | 18 +++++--- 15 files changed, 206 insertions(+), 62 deletions(-) diff --git a/media/libstagefright/wifi-display/ANetworkSession.cpp b/media/libstagefright/wifi-display/ANetworkSession.cpp index df20ae2..88ca1cc 100644 --- a/media/libstagefright/wifi-display/ANetworkSession.cpp +++ b/media/libstagefright/wifi-display/ANetworkSession.cpp @@ -565,7 +565,7 @@ status_t ANetworkSession::Session::writeMore() { mSawSendFailure = true; } -#if 1 +#if 0 int numBytesQueued; int res = ioctl(mSocket, SIOCOUTQ, &numBytesQueued); if (res == 0 && numBytesQueued > 50 * 1024) { @@ -576,7 +576,7 @@ status_t ANetworkSession::Session::writeMore() { int64_t nowUs = ALooper::GetNowUs(); if (mLastStallReportUs < 0ll - || nowUs > mLastStallReportUs + 500000ll) { + || nowUs > mLastStallReportUs + 100000ll) { sp msg = mNotify->dup(); msg->setInt32("sessionID", mSessionID); msg->setInt32("reason", kWhatNetworkStall); diff --git a/media/libstagefright/wifi-display/MediaReceiver.cpp b/media/libstagefright/wifi-display/MediaReceiver.cpp index e2e791d..364acb9 100644 --- a/media/libstagefright/wifi-display/MediaReceiver.cpp +++ b/media/libstagefright/wifi-display/MediaReceiver.cpp @@ -47,7 +47,8 @@ MediaReceiver::~MediaReceiver() { } ssize_t MediaReceiver::addTrack( - RTPReceiver::TransportMode transportMode, + RTPReceiver::TransportMode rtpMode, + RTPReceiver::TransportMode rtcpMode, int32_t *localRTPPort) { if (mMode != MODE_UNDEFINED) { return INVALID_OPERATION; @@ -74,10 +75,8 @@ ssize_t MediaReceiver::addTrack( 97, RTPReceiver::PACKETIZATION_H264); status_t err = info.mReceiver->initAsync( - transportMode, // rtpMode - transportMode == RTPReceiver::TRANSPORT_UDP - ? transportMode - : RTPReceiver::TRANSPORT_NONE, // rtcpMode + rtpMode, + rtcpMode, localRTPPort); if (err != OK) { @@ -314,13 +313,14 @@ void MediaReceiver::postAccessUnit( notify->post(); } -status_t MediaReceiver::notifyLateness(size_t trackIndex, int64_t latenessUs) { +status_t MediaReceiver::informSender( + size_t trackIndex, const sp ¶ms) { if (trackIndex >= mTrackInfos.size()) { return -ERANGE; } TrackInfo *info = &mTrackInfos.editItemAt(trackIndex); - return info->mReceiver->notifyLateness(latenessUs); + return info->mReceiver->informSender(params); } } // namespace android diff --git a/media/libstagefright/wifi-display/MediaReceiver.h b/media/libstagefright/wifi-display/MediaReceiver.h index cdfde99..afbb407 100644 --- a/media/libstagefright/wifi-display/MediaReceiver.h +++ b/media/libstagefright/wifi-display/MediaReceiver.h @@ -43,7 +43,8 @@ struct MediaReceiver : public AHandler { const sp ¬ify); ssize_t addTrack( - RTPReceiver::TransportMode transportMode, + RTPReceiver::TransportMode rtpMode, + RTPReceiver::TransportMode rtcpMode, int32_t *localRTPPort); status_t connectTrack( @@ -60,7 +61,7 @@ struct MediaReceiver : public AHandler { }; status_t initAsync(Mode mode); - status_t notifyLateness(size_t trackIndex, int64_t latenessUs); + status_t informSender(size_t trackIndex, const sp ¶ms); protected: virtual void onMessageReceived(const sp &msg); diff --git a/media/libstagefright/wifi-display/MediaSender.cpp b/media/libstagefright/wifi-display/MediaSender.cpp index 123bc1c..33af66d 100644 --- a/media/libstagefright/wifi-display/MediaSender.cpp +++ b/media/libstagefright/wifi-display/MediaSender.cpp @@ -85,10 +85,11 @@ ssize_t MediaSender::addTrack(const sp &format, uint32_t flags) { status_t MediaSender::initAsync( ssize_t trackIndex, - RTPSender::TransportMode transportMode, const char *remoteHost, int32_t remoteRTPPort, + RTPSender::TransportMode rtpMode, int32_t remoteRTCPPort, + RTPSender::TransportMode rtcpMode, int32_t *localRTPPort) { if (trackIndex < 0) { if (mMode != MODE_UNDEFINED) { @@ -126,12 +127,9 @@ status_t MediaSender::initAsync( err = mTSSender->initAsync( remoteHost, remoteRTPPort, - transportMode, // rtpMode + rtpMode, remoteRTCPPort, - (transportMode == RTPSender::TRANSPORT_UDP - && remoteRTCPPort >= 0) - ? transportMode - : RTPSender::TRANSPORT_NONE, // rtcpMode + rtcpMode, localRTPPort); if (err != OK) { @@ -180,11 +178,9 @@ status_t MediaSender::initAsync( status_t err = info->mSender->initAsync( remoteHost, remoteRTPPort, - transportMode, // rtpMode + rtpMode, remoteRTCPPort, - (transportMode == RTPSender::TRANSPORT_UDP && remoteRTCPPort >= 0) - ? transportMode - : RTPSender::TRANSPORT_NONE, // rtcpMode + rtcpMode, localRTPPort); if (err != OK) { @@ -345,6 +341,22 @@ void MediaSender::onSenderNotify(const sp &msg) { break; } + case kWhatInformSender: + { + int64_t avgLatencyUs; + CHECK(msg->findInt64("avgLatencyUs", &avgLatencyUs)); + + int64_t maxLatencyUs; + CHECK(msg->findInt64("maxLatencyUs", &maxLatencyUs)); + + sp notify = mNotify->dup(); + notify->setInt32("what", kWhatInformSender); + notify->setInt64("avgLatencyUs", avgLatencyUs); + notify->setInt64("maxLatencyUs", maxLatencyUs); + notify->post(); + break; + } + default: TRESPASS(); } diff --git a/media/libstagefright/wifi-display/MediaSender.h b/media/libstagefright/wifi-display/MediaSender.h index 447abf7..04538ea 100644 --- a/media/libstagefright/wifi-display/MediaSender.h +++ b/media/libstagefright/wifi-display/MediaSender.h @@ -43,6 +43,7 @@ struct MediaSender : public AHandler { kWhatInitDone, kWhatError, kWhatNetworkStall, + kWhatInformSender, }; MediaSender( @@ -59,10 +60,11 @@ struct MediaSender : public AHandler { // If trackIndex == -1, initialize for transport stream muxing. status_t initAsync( ssize_t trackIndex, - RTPSender::TransportMode transportMode, const char *remoteHost, int32_t remoteRTPPort, + RTPSender::TransportMode rtpMode, int32_t remoteRTCPPort, + RTPSender::TransportMode rtcpMode, int32_t *localRTPPort); status_t queueAccessUnit( diff --git a/media/libstagefright/wifi-display/rtp/RTPReceiver.cpp b/media/libstagefright/wifi-display/rtp/RTPReceiver.cpp index c55e0be..238fb82 100644 --- a/media/libstagefright/wifi-display/rtp/RTPReceiver.cpp +++ b/media/libstagefright/wifi-display/rtp/RTPReceiver.cpp @@ -567,8 +567,18 @@ status_t RTPReceiver::connect( return OK; } -status_t RTPReceiver::notifyLateness(int64_t latenessUs) { - sp buf = new ABuffer(20); +status_t RTPReceiver::informSender(const sp ¶ms) { + if (!mRTCPConnected) { + return INVALID_OPERATION; + } + + int64_t avgLatencyUs; + CHECK(params->findInt64("avgLatencyUs", &avgLatencyUs)); + + int64_t maxLatencyUs; + CHECK(params->findInt64("maxLatencyUs", &maxLatencyUs)); + + sp buf = new ABuffer(28); uint8_t *ptr = buf->data(); ptr[0] = 0x80 | 0; @@ -587,14 +597,23 @@ status_t RTPReceiver::notifyLateness(int64_t latenessUs) { ptr[10] = 't'; ptr[11] = 'e'; - ptr[12] = latenessUs >> 56; - ptr[13] = (latenessUs >> 48) & 0xff; - ptr[14] = (latenessUs >> 40) & 0xff; - ptr[15] = (latenessUs >> 32) & 0xff; - ptr[16] = (latenessUs >> 24) & 0xff; - ptr[17] = (latenessUs >> 16) & 0xff; - ptr[18] = (latenessUs >> 8) & 0xff; - ptr[19] = latenessUs & 0xff; + ptr[12] = avgLatencyUs >> 56; + ptr[13] = (avgLatencyUs >> 48) & 0xff; + ptr[14] = (avgLatencyUs >> 40) & 0xff; + ptr[15] = (avgLatencyUs >> 32) & 0xff; + ptr[16] = (avgLatencyUs >> 24) & 0xff; + ptr[17] = (avgLatencyUs >> 16) & 0xff; + ptr[18] = (avgLatencyUs >> 8) & 0xff; + ptr[19] = avgLatencyUs & 0xff; + + ptr[20] = maxLatencyUs >> 56; + ptr[21] = (maxLatencyUs >> 48) & 0xff; + ptr[22] = (maxLatencyUs >> 40) & 0xff; + ptr[23] = (maxLatencyUs >> 32) & 0xff; + ptr[24] = (maxLatencyUs >> 24) & 0xff; + ptr[25] = (maxLatencyUs >> 16) & 0xff; + ptr[26] = (maxLatencyUs >> 8) & 0xff; + ptr[27] = maxLatencyUs & 0xff; mNetSession->sendRequest(mRTCPSessionID, buf->data(), buf->size()); diff --git a/media/libstagefright/wifi-display/rtp/RTPReceiver.h b/media/libstagefright/wifi-display/rtp/RTPReceiver.h index abbe6a8..630bce9 100644 --- a/media/libstagefright/wifi-display/rtp/RTPReceiver.h +++ b/media/libstagefright/wifi-display/rtp/RTPReceiver.h @@ -56,7 +56,7 @@ struct RTPReceiver : public RTPBase, public AHandler { int32_t remoteRTPPort, int32_t remoteRTCPPort); - status_t notifyLateness(int64_t latenessUs); + status_t informSender(const sp ¶ms); protected: virtual ~RTPReceiver(); diff --git a/media/libstagefright/wifi-display/rtp/RTPSender.cpp b/media/libstagefright/wifi-display/rtp/RTPSender.cpp index c686e01..9eeeabd 100644 --- a/media/libstagefright/wifi-display/rtp/RTPSender.cpp +++ b/media/libstagefright/wifi-display/rtp/RTPSender.cpp @@ -91,8 +91,8 @@ status_t RTPSender::initAsync( CHECK_NE(rtpMode, TRANSPORT_TCP_INTERLEAVED); CHECK_NE(rtcpMode, TRANSPORT_TCP_INTERLEAVED); - if (rtcpMode == TRANSPORT_NONE && remoteRTCPPort >= 0 - || rtcpMode != TRANSPORT_NONE && remoteRTCPPort < 0) { + if ((rtcpMode == TRANSPORT_NONE && remoteRTCPPort >= 0) + || (rtcpMode != TRANSPORT_NONE && remoteRTCPPort < 0)) { return INVALID_OPERATION; } @@ -616,6 +616,7 @@ status_t RTPSender::onRTCPData(const sp &buffer) { break; case 204: // APP + parseAPP(data, headerLength); break; case 205: // TSFB (transport layer specific feedback) @@ -721,6 +722,21 @@ status_t RTPSender::parseTSFB(const uint8_t *data, size_t size) { return OK; } +status_t RTPSender::parseAPP(const uint8_t *data, size_t size) { + if (!memcmp("late", &data[8], 4)) { + int64_t avgLatencyUs = (int64_t)U64_AT(&data[12]); + int64_t maxLatencyUs = (int64_t)U64_AT(&data[20]); + + sp notify = mNotify->dup(); + notify->setInt32("what", kWhatInformSender); + notify->setInt64("avgLatencyUs", avgLatencyUs); + notify->setInt64("maxLatencyUs", maxLatencyUs); + notify->post(); + } + + return OK; +} + void RTPSender::notifyInitDone(status_t err) { sp notify = mNotify->dup(); notify->setInt32("what", kWhatInitDone); diff --git a/media/libstagefright/wifi-display/rtp/RTPSender.h b/media/libstagefright/wifi-display/rtp/RTPSender.h index 8409b8d..3a926ea 100644 --- a/media/libstagefright/wifi-display/rtp/RTPSender.h +++ b/media/libstagefright/wifi-display/rtp/RTPSender.h @@ -37,6 +37,7 @@ struct RTPSender : public RTPBase, public AHandler { kWhatInitDone, kWhatError, kWhatNetworkStall, + kWhatInformSender, }; RTPSender( const sp &netSession, @@ -105,6 +106,7 @@ private: status_t onRTCPData(const sp &data); status_t parseReceiverReport(const uint8_t *data, size_t size); status_t parseTSFB(const uint8_t *data, size_t size); + status_t parseAPP(const uint8_t *data, size_t size); void notifyInitDone(status_t err); void notifyError(status_t err); diff --git a/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp b/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp index 639634b..f45a47f 100644 --- a/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp +++ b/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp @@ -269,12 +269,14 @@ void WifiDisplaySink::onMessageReceived(const sp &msg) { if (mLatencyCount > 0) { int64_t avgLatencyUs = mLatencySumUs / mLatencyCount; - ALOGI("avg. latency = %lld ms (max %lld ms)", + ALOGV("avg. latency = %lld ms (max %lld ms)", avgLatencyUs / 1000ll, mLatencyMaxUs / 1000ll); - mMediaReceiver->notifyLateness( - 0 /* trackIndex */, avgLatencyUs); + sp params = new AMessage; + params->setInt64("avgLatencyUs", avgLatencyUs); + params->setInt64("maxLatencyUs", mLatencyMaxUs); + mMediaReceiver->informSender(0 /* trackIndex */, params); } mLatencyCount = 0; @@ -356,8 +358,6 @@ void WifiDisplaySink::onMediaReceiverNotify(const sp &msg) { // client time = server time - time offset. timeUs -= mTimeOffsetUs; - accessUnit->meta()->setInt64("timeUs", timeUs); - size_t trackIndex; CHECK(msg->findSize("trackIndex", &trackIndex)); @@ -372,6 +372,9 @@ void WifiDisplaySink::onMediaReceiverNotify(const sp &msg) { // dumpDelay(trackIndex, timeUs); + timeUs += 220000ll; // Assume 220 ms of latency + accessUnit->meta()->setInt64("timeUs", timeUs); + sp format; if (msg->findMessage("format", &format)) { mRenderer->setFormat(trackIndex, format); @@ -486,7 +489,9 @@ status_t WifiDisplaySink::onReceiveSetupResponse( } status_t WifiDisplaySink::configureTransport(const sp &msg) { - if (mUsingTCPTransport) { + if (mUsingTCPTransport && !(mFlags & FLAG_SPECIAL_MODE)) { + // In "special" mode we still use a UDP RTCP back-channel that + // needs connecting. return OK; } @@ -703,17 +708,18 @@ status_t WifiDisplaySink::sendSetup(int32_t sessionID, const char *uri) { mMediaReceiver = new MediaReceiver(mNetSession, notify); mMediaReceiverLooper->registerHandler(mMediaReceiver); - RTPReceiver::TransportMode mode = RTPReceiver::TRANSPORT_UDP; + RTPReceiver::TransportMode rtpMode = RTPReceiver::TRANSPORT_UDP; if (mUsingTCPTransport) { if (mUsingTCPInterleaving) { - mode = RTPReceiver::TRANSPORT_TCP_INTERLEAVED; + rtpMode = RTPReceiver::TRANSPORT_TCP_INTERLEAVED; } else { - mode = RTPReceiver::TRANSPORT_TCP; + rtpMode = RTPReceiver::TRANSPORT_TCP; } } int32_t localRTPPort; - status_t err = mMediaReceiver->addTrack(mode, &localRTPPort); + status_t err = mMediaReceiver->addTrack( + rtpMode, RTPReceiver::TRANSPORT_UDP /* rtcpMode */, &localRTPPort); if (err == OK) { err = mMediaReceiver->initAsync(MediaReceiver::MODE_TRANSPORT_STREAM); @@ -733,13 +739,22 @@ status_t WifiDisplaySink::sendSetup(int32_t sessionID, const char *uri) { AppendCommonResponse(&request, mNextCSeq); - if (mode == RTPReceiver::TRANSPORT_TCP_INTERLEAVED) { + if (rtpMode == RTPReceiver::TRANSPORT_TCP_INTERLEAVED) { request.append("Transport: RTP/AVP/TCP;interleaved=0-1\r\n"); - } else if (mode == RTPReceiver::TRANSPORT_TCP) { - request.append( - StringPrintf( - "Transport: RTP/AVP/TCP;unicast;client_port=%d\r\n", - localRTPPort)); + } else if (rtpMode == RTPReceiver::TRANSPORT_TCP) { + if (mFlags & FLAG_SPECIAL_MODE) { + // This isn't quite true, since the RTP connection is through TCP + // and the RTCP connection through UDP... + request.append( + StringPrintf( + "Transport: RTP/AVP/TCP;unicast;client_port=%d-%d\r\n", + localRTPPort, localRTPPort + 1)); + } else { + request.append( + StringPrintf( + "Transport: RTP/AVP/TCP;unicast;client_port=%d\r\n", + localRTPPort)); + } } else { request.append( StringPrintf( diff --git a/media/libstagefright/wifi-display/source/Converter.cpp b/media/libstagefright/wifi-display/source/Converter.cpp index bb8c387..d41e1e6 100644 --- a/media/libstagefright/wifi-display/source/Converter.cpp +++ b/media/libstagefright/wifi-display/source/Converter.cpp @@ -622,6 +622,7 @@ status_t Converter::feedEncoderInputBuffers() { } status_t Converter::doMoreWork() { +#if 0 if (mIsVideo) { int32_t videoBitrate = getBitrate("media.wfd.video-bitrate", 5000000); if (videoBitrate != mPrevVideoBitrate) { @@ -633,6 +634,7 @@ status_t Converter::doMoreWork() { mPrevVideoBitrate = videoBitrate; } } +#endif status_t err; @@ -708,4 +710,19 @@ void Converter::dropAFrame() { (new AMessage(kWhatDropAFrame, id()))->post(); } +int32_t Converter::getVideoBitrate() const { + return mPrevVideoBitrate; +} + +void Converter::setVideoBitrate(int32_t bitRate) { + if (mIsVideo && mEncoder != NULL && bitRate != mPrevVideoBitrate) { + sp params = new AMessage; + params->setInt32("videoBitrate", bitRate); + + mEncoder->setParameters(params); + + mPrevVideoBitrate = bitRate; + } +} + } // namespace android diff --git a/media/libstagefright/wifi-display/source/Converter.h b/media/libstagefright/wifi-display/source/Converter.h index a418f69..538f10a 100644 --- a/media/libstagefright/wifi-display/source/Converter.h +++ b/media/libstagefright/wifi-display/source/Converter.h @@ -70,6 +70,9 @@ struct Converter : public AHandler { void shutdownAsync(); + int32_t getVideoBitrate() const; + void setVideoBitrate(int32_t bitrate); + protected: virtual ~Converter(); virtual void onMessageReceived(const sp &msg); diff --git a/media/libstagefright/wifi-display/source/PlaybackSession.cpp b/media/libstagefright/wifi-display/source/PlaybackSession.cpp index a3b6542..68aa9cb 100644 --- a/media/libstagefright/wifi-display/source/PlaybackSession.cpp +++ b/media/libstagefright/wifi-display/source/PlaybackSession.cpp @@ -362,8 +362,11 @@ WifiDisplaySource::PlaybackSession::PlaybackSession( } status_t WifiDisplaySource::PlaybackSession::init( - const char *clientIP, int32_t clientRtp, int32_t clientRtcp, - RTPSender::TransportMode transportMode, + const char *clientIP, + int32_t clientRtp, + RTPSender::TransportMode rtpMode, + int32_t clientRtcp, + RTPSender::TransportMode rtcpMode, bool enableAudio, bool usePCMAudio, bool enableVideo, @@ -385,10 +388,11 @@ status_t WifiDisplaySource::PlaybackSession::init( if (err == OK) { err = mMediaSender->initAsync( -1 /* trackIndex */, - transportMode, clientIP, clientRtp, + rtpMode, clientRtcp, + rtcpMode, &mLocalRTPPort); } @@ -548,6 +552,8 @@ void WifiDisplaySource::PlaybackSession::onMessageReceived( converter->dropAFrame(); } } + } else if (what == MediaSender::kWhatInformSender) { + onSinkFeedback(msg); } else { TRESPASS(); } @@ -643,6 +649,46 @@ void WifiDisplaySource::PlaybackSession::onMessageReceived( } } +void WifiDisplaySource::PlaybackSession::onSinkFeedback(const sp &msg) { + int64_t avgLatencyUs; + CHECK(msg->findInt64("avgLatencyUs", &avgLatencyUs)); + + int64_t maxLatencyUs; + CHECK(msg->findInt64("maxLatencyUs", &maxLatencyUs)); + + ALOGI("sink reports avg. latency of %lld ms (max %lld ms)", + avgLatencyUs / 1000ll, + maxLatencyUs / 1000ll); + + if (mVideoTrackIndex >= 0) { + const sp &videoTrack = mTracks.valueFor(mVideoTrackIndex); + sp converter = videoTrack->converter(); + if (converter != NULL) { + int32_t videoBitrate = converter->getVideoBitrate(); + + if (avgLatencyUs > 300000ll) { + videoBitrate *= 0.6; + + if (videoBitrate < 500000) { + videoBitrate = 500000; // cap at 500kbit/sec + } + } else if (avgLatencyUs < 100000ll) { + videoBitrate *= 1.1; + + if (videoBitrate > 10000000) { + videoBitrate = 10000000; // cap at 10Mbit/sec + } + } + + if (videoBitrate != converter->getVideoBitrate()) { + ALOGI("setting video bitrate to %d bps", videoBitrate); + + converter->setVideoBitrate(videoBitrate); + } + } + } +} + status_t WifiDisplaySource::PlaybackSession::setupMediaPacketizer( bool enableAudio, bool enableVideo) { DataSource::RegisterDefaultSniffers(); diff --git a/media/libstagefright/wifi-display/source/PlaybackSession.h b/media/libstagefright/wifi-display/source/PlaybackSession.h index da207e2..39086a1 100644 --- a/media/libstagefright/wifi-display/source/PlaybackSession.h +++ b/media/libstagefright/wifi-display/source/PlaybackSession.h @@ -44,8 +44,11 @@ struct WifiDisplaySource::PlaybackSession : public AHandler { const char *path = NULL); status_t init( - const char *clientIP, int32_t clientRtp, int32_t clientRtcp, - RTPSender::TransportMode transportMode, + const char *clientIP, + int32_t clientRtp, + RTPSender::TransportMode rtpMode, + int32_t clientRtcp, + RTPSender::TransportMode rtcpMode, bool enableAudio, bool usePCMAudio, bool enableVideo, @@ -149,6 +152,8 @@ private: void schedulePullExtractor(); void onPullExtractor(); + void onSinkFeedback(const sp &msg); + DISALLOW_EVIL_CONSTRUCTORS(PlaybackSession); }; diff --git a/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp b/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp index 5167cb3..f2e659a 100644 --- a/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp +++ b/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp @@ -1159,7 +1159,7 @@ status_t WifiDisplaySource::onSetupRequest( return ERROR_MALFORMED; } - RTPSender::TransportMode transportMode = RTPSender::TRANSPORT_UDP; + RTPSender::TransportMode rtpMode = RTPSender::TRANSPORT_UDP; int clientRtp, clientRtcp; if (transport.startsWith("RTP/AVP/TCP;")) { @@ -1168,7 +1168,7 @@ status_t WifiDisplaySource::onSetupRequest( transport.c_str(), "interleaved", &interleaved) && sscanf(interleaved.c_str(), "%d-%d", &clientRtp, &clientRtcp) == 2) { - transportMode = RTPSender::TRANSPORT_TCP_INTERLEAVED; + rtpMode = RTPSender::TRANSPORT_TCP_INTERLEAVED; } else { bool badRequest = false; @@ -1190,7 +1190,7 @@ status_t WifiDisplaySource::onSetupRequest( return ERROR_MALFORMED; } - transportMode = RTPSender::TRANSPORT_TCP; + rtpMode = RTPSender::TRANSPORT_TCP; } } else if (transport.startsWith("RTP/AVP;unicast;") || transport.startsWith("RTP/AVP/UDP;unicast;")) { @@ -1249,11 +1249,17 @@ status_t WifiDisplaySource::onSetupRequest( return ERROR_MALFORMED; } + RTPSender::TransportMode rtcpMode = RTPSender::TRANSPORT_UDP; + if (clientRtcp < 0) { + rtcpMode = RTPSender::TRANSPORT_NONE; + } + status_t err = playbackSession->init( mClientInfo.mRemoteIP.c_str(), clientRtp, + rtpMode, clientRtcp, - transportMode, + rtcpMode, mSinkSupportsAudio, mUsingPCMAudio, mSinkSupportsVideo, @@ -1282,7 +1288,7 @@ status_t WifiDisplaySource::onSetupRequest( AString response = "RTSP/1.0 200 OK\r\n"; AppendCommonResponse(&response, cseq, playbackSessionID); - if (transportMode == RTPSender::TRANSPORT_TCP_INTERLEAVED) { + if (rtpMode == RTPSender::TRANSPORT_TCP_INTERLEAVED) { response.append( StringPrintf( "Transport: RTP/AVP/TCP;interleaved=%d-%d;", @@ -1291,7 +1297,7 @@ status_t WifiDisplaySource::onSetupRequest( int32_t serverRtp = playbackSession->getRTPPort(); AString transportString = "UDP"; - if (transportMode == RTPSender::TRANSPORT_TCP) { + if (rtpMode == RTPSender::TRANSPORT_TCP) { transportString = "TCP"; } -- cgit v1.1 From cd77d4a1d38b7609a03f6826a1ff5fa7c98aa34f Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Tue, 26 Mar 2013 10:19:24 -0700 Subject: Identify network servers and clients with a OS version related string and put the logic to create that string in one location instead of many... Change-Id: I1f729f2e7376cd3b45eea0e48f7bd10084b41b39 --- include/media/stagefright/Utils.h | 3 +++ media/libstagefright/Utils.cpp | 17 ++++++++++++++- media/libstagefright/chromium_http/support.cpp | 16 ++------------ media/libstagefright/rtsp/ARTSPConnection.cpp | 25 ++++++---------------- media/libstagefright/rtsp/ARTSPConnection.h | 6 ++---- media/libstagefright/rtsp/MyHandler.h | 18 ++-------------- .../wifi-display/sink/WifiDisplaySink.cpp | 6 +++++- .../wifi-display/sink/WifiDisplaySink.h | 2 ++ .../wifi-display/source/WifiDisplaySource.cpp | 6 +++++- .../wifi-display/source/WifiDisplaySource.h | 2 ++ 10 files changed, 45 insertions(+), 56 deletions(-) diff --git a/include/media/stagefright/Utils.h b/include/media/stagefright/Utils.h index 8213af9..73940d3 100644 --- a/include/media/stagefright/Utils.h +++ b/include/media/stagefright/Utils.h @@ -18,6 +18,7 @@ #define UTILS_H_ +#include #include #include #include @@ -45,6 +46,8 @@ status_t convertMetaDataToMessage( void convertMessageToMetaData( const sp &format, sp &meta); +AString MakeUserAgent(); + } // namespace android #endif // UTILS_H_ diff --git a/media/libstagefright/Utils.cpp b/media/libstagefright/Utils.cpp index 8ed07bf..b0df379 100644 --- a/media/libstagefright/Utils.cpp +++ b/media/libstagefright/Utils.cpp @@ -21,7 +21,7 @@ #include "include/ESDS.h" #include - +#include #include #include #include @@ -455,6 +455,21 @@ void convertMessageToMetaData(const sp &msg, sp &meta) { #endif } +AString MakeUserAgent() { + AString ua; + ua.append("stagefright/1.2 (Linux;Android "); + +#if (PROPERTY_VALUE_MAX < 8) +#error "PROPERTY_VALUE_MAX must be at least 8" +#endif + + char value[PROPERTY_VALUE_MAX]; + property_get("ro.build.version.release", value, "Unknown"); + ua.append(value); + ua.append(")"); + + return ua; +} } // namespace android diff --git a/media/libstagefright/chromium_http/support.cpp b/media/libstagefright/chromium_http/support.cpp index 13ae3df..832e86d 100644 --- a/media/libstagefright/chromium_http/support.cpp +++ b/media/libstagefright/chromium_http/support.cpp @@ -36,8 +36,8 @@ #include "include/ChromiumHTTPDataSource.h" #include -#include #include +#include #include namespace android { @@ -156,19 +156,7 @@ net::NetLog::LogLevel SfNetLog::GetLogLevel() const { //////////////////////////////////////////////////////////////////////////////// SfRequestContext::SfRequestContext() { - AString ua; - ua.append("stagefright/1.2 (Linux;Android "); - -#if (PROPERTY_VALUE_MAX < 8) -#error "PROPERTY_VALUE_MAX must be at least 8" -#endif - - char value[PROPERTY_VALUE_MAX]; - property_get("ro.build.version.release", value, "Unknown"); - ua.append(value); - ua.append(")"); - - mUserAgent = ua.c_str(); + mUserAgent = MakeUserAgent().c_str(); set_net_log(new SfNetLog()); diff --git a/media/libstagefright/rtsp/ARTSPConnection.cpp b/media/libstagefright/rtsp/ARTSPConnection.cpp index 161bd4f..3068541 100644 --- a/media/libstagefright/rtsp/ARTSPConnection.cpp +++ b/media/libstagefright/rtsp/ARTSPConnection.cpp @@ -20,13 +20,12 @@ #include "ARTSPConnection.h" -#include - #include #include #include #include #include +#include #include #include @@ -41,6 +40,10 @@ namespace android { // static const int64_t ARTSPConnection::kSelectTimeoutUs = 1000ll; +// static +const AString ARTSPConnection::sUserAgent = + StringPrintf("User-Agent: %s\r\n", MakeUserAgent().c_str()); + ARTSPConnection::ARTSPConnection(bool uidValid, uid_t uid) : mUIDValid(uidValid), mUID(uid), @@ -50,7 +53,6 @@ ARTSPConnection::ARTSPConnection(bool uidValid, uid_t uid) mConnectionID(0), mNextCSeq(0), mReceiveResponseEventPending(false) { - MakeUserAgent(&mUserAgent); } ARTSPConnection::~ARTSPConnection() { @@ -1032,27 +1034,12 @@ void ARTSPConnection::addAuthentication(AString *request) { #endif } -// static -void ARTSPConnection::MakeUserAgent(AString *userAgent) { - userAgent->clear(); - userAgent->setTo("User-Agent: stagefright/1.1 (Linux;Android "); - -#if (PROPERTY_VALUE_MAX < 8) -#error "PROPERTY_VALUE_MAX must be at least 8" -#endif - - char value[PROPERTY_VALUE_MAX]; - property_get("ro.build.version.release", value, "Unknown"); - userAgent->append(value); - userAgent->append(")\r\n"); -} - void ARTSPConnection::addUserAgent(AString *request) const { // Find the boundary between headers and the body. ssize_t i = request->find("\r\n\r\n"); CHECK_GE(i, 0); - request->insert(mUserAgent, i + 2); + request->insert(sUserAgent, i + 2); } } // namespace android diff --git a/media/libstagefright/rtsp/ARTSPConnection.h b/media/libstagefright/rtsp/ARTSPConnection.h index 68f2d59..1fe9c99 100644 --- a/media/libstagefright/rtsp/ARTSPConnection.h +++ b/media/libstagefright/rtsp/ARTSPConnection.h @@ -74,6 +74,8 @@ private: static const int64_t kSelectTimeoutUs; + static const AString sUserAgent; + bool mUIDValid; uid_t mUID; State mState; @@ -89,8 +91,6 @@ private: sp mObserveBinaryMessage; - AString mUserAgent; - void performDisconnect(); void onConnect(const sp &msg); @@ -122,8 +122,6 @@ private: static bool ParseSingleUnsignedLong( const char *from, unsigned long *x); - static void MakeUserAgent(AString *userAgent); - DISALLOW_EVIL_CONSTRUCTORS(ARTSPConnection); }; diff --git a/media/libstagefright/rtsp/MyHandler.h b/media/libstagefright/rtsp/MyHandler.h index 95ed43a..e067e20 100644 --- a/media/libstagefright/rtsp/MyHandler.h +++ b/media/libstagefright/rtsp/MyHandler.h @@ -28,13 +28,13 @@ #include "ASessionDescription.h" #include -#include #include #include #include #include #include +#include #include #include @@ -56,19 +56,6 @@ static int64_t kPauseDelayUs = 3000000ll; namespace android { -static void MakeUserAgentString(AString *s) { - s->setTo("stagefright/1.1 (Linux;Android "); - -#if (PROPERTY_VALUE_MAX < 8) -#error "PROPERTY_VALUE_MAX must be at least 8" -#endif - - char value[PROPERTY_VALUE_MAX]; - property_get("ro.build.version.release", value, "Unknown"); - s->append(value); - s->append(")"); -} - static bool GetAttribute(const char *s, const char *key, AString *value) { value->clear(); @@ -279,8 +266,7 @@ struct MyHandler : public AHandler { data[offset++] = 6; // TOOL - AString tool; - MakeUserAgentString(&tool); + AString tool = MakeUserAgent(); data[offset++] = tool.size(); diff --git a/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp b/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp index f45a47f..1a08bf5 100644 --- a/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp +++ b/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp @@ -30,9 +30,13 @@ #include #include #include +#include namespace android { +// static +const AString WifiDisplaySink::sUserAgent = MakeUserAgent(); + WifiDisplaySink::WifiDisplaySink( uint32_t flags, const sp &netSession, @@ -892,7 +896,7 @@ void WifiDisplaySink::AppendCommonResponse(AString *response, int32_t cseq) { response->append(buf); response->append("\r\n"); - response->append("User-Agent: stagefright/1.1 (Linux;Android 4.1)\r\n"); + response->append(StringPrintf("User-Agent: %s\r\n", sUserAgent.c_str())); if (cseq >= 0) { response->append(StringPrintf("CSeq: %d\r\n", cseq)); diff --git a/media/libstagefright/wifi-display/sink/WifiDisplaySink.h b/media/libstagefright/wifi-display/sink/WifiDisplaySink.h index 4587fb5..7c62057 100644 --- a/media/libstagefright/wifi-display/sink/WifiDisplaySink.h +++ b/media/libstagefright/wifi-display/sink/WifiDisplaySink.h @@ -96,6 +96,8 @@ private: static const int64_t kReportLatenessEveryUs = 1000000ll; + static const AString sUserAgent; + State mState; uint32_t mFlags; VideoFormats mSinkSupportedVideoFormats; diff --git a/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp b/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp index f2e659a..792a9c5 100644 --- a/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp +++ b/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp @@ -34,6 +34,7 @@ #include #include #include +#include #include #include @@ -42,6 +43,9 @@ namespace android { +// static +const AString WifiDisplaySource::sUserAgent = MakeUserAgent(); + WifiDisplaySource::WifiDisplaySource( const sp &netSession, const sp &client, @@ -1559,7 +1563,7 @@ void WifiDisplaySource::AppendCommonResponse( response->append(buf); response->append("\r\n"); - response->append("Server: Mine/1.0\r\n"); + response->append(StringPrintf("Server: %s\r\n", sUserAgent.c_str())); if (cseq >= 0) { response->append(StringPrintf("CSeq: %d\r\n", cseq)); diff --git a/media/libstagefright/wifi-display/source/WifiDisplaySource.h b/media/libstagefright/wifi-display/source/WifiDisplaySource.h index 3a1b0f9..3efa0b4 100644 --- a/media/libstagefright/wifi-display/source/WifiDisplaySource.h +++ b/media/libstagefright/wifi-display/source/WifiDisplaySource.h @@ -113,6 +113,8 @@ private: static const int64_t kPlaybackSessionTimeoutUs = kPlaybackSessionTimeoutSecs * 1000000ll; + static const AString sUserAgent; + State mState; VideoFormats mSupportedSourceVideoFormats; sp mNetSession; -- cgit v1.1 From 6386b50b67185a966d43ee761acdfe7add569d10 Mon Sep 17 00:00:00 2001 From: Eric Laurent Date: Tue, 26 Mar 2013 12:25:30 -0700 Subject: ToneGenerator: fix AudioTrack pointer init The pointer to AudioTrack should be initialized before early return from ToneGenerator constructor because it is tested by the destructor. Bug 8140963 Change-Id: I9a7dfb60ba162b75dfaa5630ab7fc9485afd0074 --- media/libmedia/ToneGenerator.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/media/libmedia/ToneGenerator.cpp b/media/libmedia/ToneGenerator.cpp index 1c0268f..58d495e 100644 --- a/media/libmedia/ToneGenerator.cpp +++ b/media/libmedia/ToneGenerator.cpp @@ -803,6 +803,7 @@ ToneGenerator::ToneGenerator(audio_stream_type_t streamType, float volume, bool ALOGV("ToneGenerator constructor: streamType=%d, volume=%f", streamType, volume); mState = TONE_IDLE; + mpAudioTrack = NULL; if (AudioSystem::getOutputSamplingRate(&mSamplingRate, streamType) != NO_ERROR) { ALOGE("Unable to marshal AudioFlinger"); @@ -811,7 +812,6 @@ ToneGenerator::ToneGenerator(audio_stream_type_t streamType, float volume, bool mThreadCanCallJava = threadCanCallJava; mStreamType = streamType; mVolume = volume; - mpAudioTrack = NULL; mpToneDesc = NULL; mpNewToneDesc = NULL; // Generate tone by chunks of 20 ms to keep cadencing precision -- cgit v1.1 From eaf5381f38bf6c3ecb5fe32a8351c26a447549f5 Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Tue, 26 Mar 2013 13:52:43 -0700 Subject: Adapt frame rate instead of keeping that constant and tweaking bitrate Change-Id: I889abbbe1237e1a8fdd7135cdc91a2e9728ff39b --- .../wifi-display/source/Converter.cpp | 22 ++----- .../libstagefright/wifi-display/source/Converter.h | 2 + .../wifi-display/source/PlaybackSession.cpp | 71 ++++++++++++++++++---- .../wifi-display/source/RepeaterSource.cpp | 19 ++++++ .../wifi-display/source/RepeaterSource.h | 3 + 5 files changed, 88 insertions(+), 29 deletions(-) diff --git a/media/libstagefright/wifi-display/source/Converter.cpp b/media/libstagefright/wifi-display/source/Converter.cpp index d41e1e6..0a8462c 100644 --- a/media/libstagefright/wifi-display/source/Converter.cpp +++ b/media/libstagefright/wifi-display/source/Converter.cpp @@ -135,7 +135,9 @@ bool Converter::needToManuallyPrependSPSPPS() const { return mNeedToManuallyPrependSPSPPS; } -static int32_t getBitrate(const char *propName, int32_t defaultValue) { +// static +int32_t Converter::GetInt32Property( + const char *propName, int32_t defaultValue) { char val[PROPERTY_VALUE_MAX]; if (property_get(propName, val, NULL)) { char *end; @@ -185,8 +187,8 @@ status_t Converter::initEncoder() { mOutputFormat->setString("mime", outputMIME.c_str()); - int32_t audioBitrate = getBitrate("media.wfd.audio-bitrate", 128000); - int32_t videoBitrate = getBitrate("media.wfd.video-bitrate", 5000000); + int32_t audioBitrate = GetInt32Property("media.wfd.audio-bitrate", 128000); + int32_t videoBitrate = GetInt32Property("media.wfd.video-bitrate", 5000000); mPrevVideoBitrate = videoBitrate; ALOGI("using audio bitrate of %d bps, video bitrate of %d bps", @@ -622,20 +624,6 @@ status_t Converter::feedEncoderInputBuffers() { } status_t Converter::doMoreWork() { -#if 0 - if (mIsVideo) { - int32_t videoBitrate = getBitrate("media.wfd.video-bitrate", 5000000); - if (videoBitrate != mPrevVideoBitrate) { - sp params = new AMessage; - - params->setInt32("videoBitrate", videoBitrate); - mEncoder->setParameters(params); - - mPrevVideoBitrate = videoBitrate; - } - } -#endif - status_t err; for (;;) { diff --git a/media/libstagefright/wifi-display/source/Converter.h b/media/libstagefright/wifi-display/source/Converter.h index 538f10a..ba297c4 100644 --- a/media/libstagefright/wifi-display/source/Converter.h +++ b/media/libstagefright/wifi-display/source/Converter.h @@ -73,6 +73,8 @@ struct Converter : public AHandler { int32_t getVideoBitrate() const; void setVideoBitrate(int32_t bitrate); + static int32_t GetInt32Property(const char *propName, int32_t defaultValue); + protected: virtual ~Converter(); virtual void onMessageReceived(const sp &msg); diff --git a/media/libstagefright/wifi-display/source/PlaybackSession.cpp b/media/libstagefright/wifi-display/source/PlaybackSession.cpp index 68aa9cb..715d0b5 100644 --- a/media/libstagefright/wifi-display/source/PlaybackSession.cpp +++ b/media/libstagefright/wifi-display/source/PlaybackSession.cpp @@ -27,6 +27,7 @@ #include "WifiDisplaySource.h" #include +#include #include #include #include @@ -66,6 +67,7 @@ struct WifiDisplaySource::PlaybackSession::Track : public AHandler { bool isAudio() const; const sp &converter() const; + const sp &repeaterSource() const; ssize_t mediaSenderTrackIndex() const; void setMediaSenderTrackIndex(size_t index); @@ -171,6 +173,11 @@ const sp &WifiDisplaySource::PlaybackSession::Track::converter() cons return mConverter; } +const sp & +WifiDisplaySource::PlaybackSession::Track::repeaterSource() const { + return mRepeaterSource; +} + ssize_t WifiDisplaySource::PlaybackSession::Track::mediaSenderTrackIndex() const { CHECK_GE(mMediaSenderTrackIndex, 0); return mMediaSenderTrackIndex; @@ -663,27 +670,67 @@ void WifiDisplaySource::PlaybackSession::onSinkFeedback(const sp &msg) if (mVideoTrackIndex >= 0) { const sp &videoTrack = mTracks.valueFor(mVideoTrackIndex); sp converter = videoTrack->converter(); - if (converter != NULL) { - int32_t videoBitrate = converter->getVideoBitrate(); - if (avgLatencyUs > 300000ll) { - videoBitrate *= 0.6; + if (converter != NULL) { + int32_t videoBitrate = + Converter::GetInt32Property("media.wfd.video-bitrate", -1); + + char val[PROPERTY_VALUE_MAX]; + if (videoBitrate < 0 + && property_get("media.wfd.video-bitrate", val, NULL) + && !strcasecmp("adaptive", val)) { + videoBitrate = converter->getVideoBitrate(); + + if (avgLatencyUs > 300000ll) { + videoBitrate *= 0.6; + } else if (avgLatencyUs < 100000ll) { + videoBitrate *= 1.1; + } + } + if (videoBitrate > 0) { if (videoBitrate < 500000) { - videoBitrate = 500000; // cap at 500kbit/sec + videoBitrate = 500000; + } else if (videoBitrate > 10000000) { + videoBitrate = 10000000; + } + + if (videoBitrate != converter->getVideoBitrate()) { + ALOGI("setting video bitrate to %d bps", videoBitrate); + + converter->setVideoBitrate(videoBitrate); } - } else if (avgLatencyUs < 100000ll) { - videoBitrate *= 1.1; + } + } + + sp repeaterSource = videoTrack->repeaterSource(); + if (repeaterSource != NULL) { + double rateHz = + Converter::GetInt32Property( + "media.wfd.video-framerate", -1); - if (videoBitrate > 10000000) { - videoBitrate = 10000000; // cap at 10Mbit/sec + if (rateHz < 0.0) { + rateHz = repeaterSource->getFrameRate(); + + if (avgLatencyUs > 300000ll) { + rateHz *= 0.9; + } else if (avgLatencyUs < 200000ll) { + rateHz *= 1.1; } } - if (videoBitrate != converter->getVideoBitrate()) { - ALOGI("setting video bitrate to %d bps", videoBitrate); + if (rateHz > 0) { + if (rateHz < 5.0) { + rateHz = 5.0; + } else if (rateHz > 30.0) { + rateHz = 30.0; + } + + if (rateHz != repeaterSource->getFrameRate()) { + ALOGI("setting frame rate to %.2f Hz", rateHz); - converter->setVideoBitrate(videoBitrate); + repeaterSource->setFrameRate(rateHz); + } } } } diff --git a/media/libstagefright/wifi-display/source/RepeaterSource.cpp b/media/libstagefright/wifi-display/source/RepeaterSource.cpp index 72be927..cc8dee3 100644 --- a/media/libstagefright/wifi-display/source/RepeaterSource.cpp +++ b/media/libstagefright/wifi-display/source/RepeaterSource.cpp @@ -27,6 +27,25 @@ RepeaterSource::~RepeaterSource() { CHECK(!mStarted); } +double RepeaterSource::getFrameRate() const { + return mRateHz; +} + +void RepeaterSource::setFrameRate(double rateHz) { + Mutex::Autolock autoLock(mLock); + + if (rateHz == mRateHz) { + return; + } + + if (mStartTimeUs >= 0ll) { + int64_t nextTimeUs = mStartTimeUs + (mFrameCount * 1000000ll) / mRateHz; + mStartTimeUs = nextTimeUs; + mFrameCount = 0; + } + mRateHz = rateHz; +} + status_t RepeaterSource::start(MetaData *params) { CHECK(!mStarted); diff --git a/media/libstagefright/wifi-display/source/RepeaterSource.h b/media/libstagefright/wifi-display/source/RepeaterSource.h index 146af32..8d414fd 100644 --- a/media/libstagefright/wifi-display/source/RepeaterSource.h +++ b/media/libstagefright/wifi-display/source/RepeaterSource.h @@ -28,6 +28,9 @@ struct RepeaterSource : public MediaSource { // send updates in a while, this is its wakeup call. void wakeUp(); + double getFrameRate() const; + void setFrameRate(double rateHz); + protected: virtual ~RepeaterSource(); -- cgit v1.1 From bc5120797e0d2995b63a96306b638d1d0d8457d0 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Tue, 26 Mar 2013 15:09:04 -0700 Subject: media.log dump Bug: 8446008 Change-Id: Ia47b204f5c4c91b99591eec5420a25a3dc8e7631 --- services/medialog/MediaLogService.cpp | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/services/medialog/MediaLogService.cpp b/services/medialog/MediaLogService.cpp index 2332b3e..f60749d 100644 --- a/services/medialog/MediaLogService.cpp +++ b/services/medialog/MediaLogService.cpp @@ -19,6 +19,7 @@ #include #include +#include #include #include #include "MediaLogService.h" @@ -55,6 +56,14 @@ void MediaLogService::unregisterWriter(const sp& shared) status_t MediaLogService::dump(int fd, const Vector& args) { + // FIXME merge with similar but not identical code at services/audioflinger/ServiceUtilities.cpp + static const String16 sDump("android.permission.DUMP"); + if (!(IPCThreadState::self()->getCallingUid() == AID_MEDIA || + PermissionCache::checkCallingPermission(sDump))) { + fdprintf(fd, "Permission denied.\n"); + return NO_ERROR; + } + Vector namedReaders; { Mutex::Autolock _l(mLock); -- cgit v1.1 From 09108adeca8cbbf3fbb21f8aea2a2ff250db9531 Mon Sep 17 00:00:00 2001 From: Eric Laurent Date: Tue, 26 Mar 2013 16:37:19 -0700 Subject: ToneGenerator: fix stop/destroy concurrency There is a problem if the stopTone() method is called from two different threads (for instance if the destructor is called while stopTone() is waiting for the audio callback to finish). In this case, the second call to stopTone() will not wait for the condition to be signaled and call clearWaveGens() while the callback can still be active, thus causing a crash. There is a similar problem in case of concurrent calls to startTone() and stopTone(). The fix consists in making sure that stopTone() always waits for call back completion or timeout and exits before calling clearWaveGens() if a concurrent start request is detected. Bug 8163071 Change-Id: I9ddb4390407701dcad5bf83660fd9903f0d72268 --- media/libmedia/ToneGenerator.cpp | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/media/libmedia/ToneGenerator.cpp b/media/libmedia/ToneGenerator.cpp index 58d495e..3554608 100644 --- a/media/libmedia/ToneGenerator.cpp +++ b/media/libmedia/ToneGenerator.cpp @@ -976,21 +976,26 @@ void ToneGenerator::stopTone() { ALOGV("stopTone"); mLock.lock(); - if (mState == TONE_PLAYING || mState == TONE_STARTING || mState == TONE_RESTARTING) { - mState = TONE_STOPPING; + if (mState != TONE_IDLE && mState != TONE_INIT) { + if (mState == TONE_PLAYING || mState == TONE_STARTING || mState == TONE_RESTARTING) { + mState = TONE_STOPPING; + } ALOGV("waiting cond"); status_t lStatus = mWaitCbkCond.waitRelative(mLock, seconds(3)); if (lStatus == NO_ERROR) { + // If the tone was restarted exit now before calling clearWaveGens(); + if (mState != TONE_INIT) { + return; + } ALOGV("track stop complete, time %d", (unsigned int)(systemTime()/1000000)); } else { ALOGE("--- Stop timed out"); mState = TONE_IDLE; mpAudioTrack->stop(); } + clearWaveGens(); } - clearWaveGens(); - mLock.unlock(); } @@ -1299,7 +1304,7 @@ audioCallback_EndLoop: } if (lSignal) - lpToneGen->mWaitCbkCond.signal(); + lpToneGen->mWaitCbkCond.broadcast(); lpToneGen->mLock.unlock(); } } -- cgit v1.1 From cba2c163555cd329f49d40658ea3ee902e94dda3 Mon Sep 17 00:00:00 2001 From: Igor Murashkin Date: Wed, 20 Mar 2013 15:56:31 -0700 Subject: Camera: Add hotplug support (for fixed # of cameras) * Minor: also change addListener to fire the current status upon subscription * Minor: STATUS_AVAILABLE is now an alias for STATUS_PRESENT and deprecated Change-Id: I254608a7332095e3ef201ffea64cff156cfc1b3e --- camera/tests/ProCameraTests.cpp | 18 ++- include/camera/ICameraServiceListener.h | 11 +- services/camera/libcameraservice/CameraService.cpp | 134 ++++++++++++++++++++- services/camera/libcameraservice/CameraService.h | 14 ++- 4 files changed, 162 insertions(+), 15 deletions(-) diff --git a/camera/tests/ProCameraTests.cpp b/camera/tests/ProCameraTests.cpp index ecc0854..5f8f772 100644 --- a/camera/tests/ProCameraTests.cpp +++ b/camera/tests/ProCameraTests.cpp @@ -587,14 +587,19 @@ TEST_F(ProCameraTest, DISABLED_StreamingImageSingle) { sp listener = new ServiceListener(); EXPECT_OK(ProCamera::addServiceListener(listener)); - ServiceListener::Status currentStatus = ServiceListener::STATUS_AVAILABLE; + ServiceListener::Status currentStatus; + + // when subscribing a new listener, + // we immediately get a callback to the current status + while (listener->waitForStatusChange(/*out*/currentStatus) != OK); + EXPECT_EQ(ServiceListener::STATUS_PRESENT, currentStatus); dout << "Will now stream and resume infinitely..." << std::endl; while (true) { - if (currentStatus == ServiceListener::STATUS_AVAILABLE) { + if (currentStatus == ServiceListener::STATUS_PRESENT) { - EXPECT_OK(mCamera->createStream(mDisplayW, mDisplayH, mDisplayFmt, + ASSERT_OK(mCamera->createStream(mDisplayW, mDisplayH, mDisplayFmt, surface, &depthStreamId)); EXPECT_NE(-1, depthStreamId); @@ -613,12 +618,15 @@ TEST_F(ProCameraTest, DISABLED_StreamingImageSingle) { while (listener->waitForStatusChange(/*out*/stat) != OK); if (currentStatus != stat) { - if (stat == ServiceListener::STATUS_AVAILABLE) { + if (stat == ServiceListener::STATUS_PRESENT) { dout << "Reconnecting to camera" << std::endl; mCamera = ProCamera::connect(CAMERA_ID); } else if (stat == ServiceListener::STATUS_NOT_AVAILABLE) { dout << "Disconnecting from camera" << std::endl; mCamera->disconnect(); + } else if (stat == ServiceListener::STATUS_NOT_PRESENT) { + dout << "Camera unplugged" << std::endl; + mCamera = NULL; } else { dout << "Unknown status change " << std::hex << stat << std::endl; @@ -1216,7 +1224,7 @@ TEST_F(ProCameraTest, ServiceListenersFunctional) { } EXPECT_OK(listener->waitForStatusChange(/*out*/stat)); - EXPECT_EQ(ServiceListener::STATUS_AVAILABLE, stat); + EXPECT_EQ(ServiceListener::STATUS_PRESENT, stat); EXPECT_OK(ProCamera::removeServiceListener(listener)); } diff --git a/include/camera/ICameraServiceListener.h b/include/camera/ICameraServiceListener.h index 88860dd..f2a11c2 100644 --- a/include/camera/ICameraServiceListener.h +++ b/include/camera/ICameraServiceListener.h @@ -38,9 +38,8 @@ public: * NOT_PRESENT -> PRESENT * NOT_PRESENT -> ENUMERATING * ENUMERATING -> PRESENT - * PRESENT -> AVAILABLE - * AVAILABLE -> NOT_AVAILABLE - * NOT_AVAILABLE -> AVAILABLE + * PRESENT -> NOT_AVAILABLE + * NOT_AVAILABLE -> PRESENT * * A state will never immediately transition back to itself. */ @@ -48,15 +47,17 @@ public: // Device physically unplugged STATUS_NOT_PRESENT = CAMERA_DEVICE_STATUS_NOT_PRESENT, // Device physically has been plugged in + // and the camera can be used exlusively STATUS_PRESENT = CAMERA_DEVICE_STATUS_PRESENT, // Device physically has been plugged in // but it will not be connect-able until enumeration is complete STATUS_ENUMERATING = CAMERA_DEVICE_STATUS_ENUMERATING, // Camera can be used exclusively - STATUS_AVAILABLE = 0x80000000, + STATUS_AVAILABLE = STATUS_PRESENT, // deprecated, will be removed + // Camera is in use by another app and cannot be used exclusively - STATUS_NOT_AVAILABLE, + STATUS_NOT_AVAILABLE = 0x80000000, // Use to initialize variables only STATUS_UNKNOWN = 0xFFFFFFFF, diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp index 5a6a3c8..2db5224 100644 --- a/services/camera/libcameraservice/CameraService.cpp +++ b/services/camera/libcameraservice/CameraService.cpp @@ -66,6 +66,20 @@ static int getCallingUid() { return IPCThreadState::self()->getCallingUid(); } +extern "C" { +static void camera_device_status_change( + const struct camera_module_callbacks* callbacks, + int camera_id, + int new_status) { + sp cs = const_cast( + static_cast(callbacks)); + + cs->onDeviceStatusChanged( + camera_id, + new_status); +} +} // extern "C" + // ---------------------------------------------------------------------------- // This is ugly and only safe if we never re-create the CameraService, but @@ -79,8 +93,10 @@ CameraService::CameraService() gCameraService = this; for (size_t i = 0; i < MAX_CAMERAS; ++i) { - mStatusList[i] = ICameraServiceListener::STATUS_AVAILABLE; + mStatusList[i] = ICameraServiceListener::STATUS_PRESENT; } + + this->camera_device_status_change = android::camera_device_status_change; } void CameraService::onFirstRef() @@ -105,6 +121,11 @@ void CameraService::onFirstRef() for (int i = 0; i < mNumberOfCameras; i++) { setCameraFree(i); } + + if (mModule->common.module_api_version >= + CAMERA_MODULE_API_VERSION_2_1) { + mModule->set_callbacks(this); + } } } @@ -118,6 +139,67 @@ CameraService::~CameraService() { gCameraService = NULL; } +void CameraService::onDeviceStatusChanged(int cameraId, + int newStatus) +{ + ALOGI("%s: Status changed for cameraId=%d, newStatus=%d", __FUNCTION__, + cameraId, newStatus); + + if (cameraId < 0 || cameraId >= MAX_CAMERAS) { + ALOGE("%s: Bad camera ID %d", __FUNCTION__, cameraId); + return; + } + + if ((int)getStatus(cameraId) == newStatus) { + ALOGE("%s: State transition to the same status 0x%x not allowed", + __FUNCTION__, (uint32_t)newStatus); + return; + } + + /* don't do this in updateStatus + since it is also called from connect and we could get into a deadlock */ + if (newStatus == CAMERA_DEVICE_STATUS_NOT_PRESENT) { + Vector > clientsToDisconnect; + { + Mutex::Autolock al(mServiceLock); + + /* Find all clients that we need to disconnect */ + sp client = mClient[cameraId].promote(); + if (client.get() != NULL) { + clientsToDisconnect.push_back(client); + } + + int i = cameraId; + for (size_t j = 0; j < mProClientList[i].size(); ++j) { + sp cl = mProClientList[i][j].promote(); + if (cl != NULL) { + clientsToDisconnect.push_back(cl); + } + } + } + + /* now disconnect them. don't hold the lock + or we can get into a deadlock */ + + for (size_t i = 0; i < clientsToDisconnect.size(); ++i) { + sp client = clientsToDisconnect[i]; + + client->disconnect(); + /** + * The remote app will no longer be able to call methods on the + * client since the client PID will be reset to 0 + */ + } + + ALOGV("%s: After unplug, disconnected %d clients", + __FUNCTION__, clientsToDisconnect.size()); + } + + updateStatus( + static_cast(newStatus), cameraId); + +} + int32_t CameraService::getNumberOfCameras() { return mNumberOfCameras; } @@ -212,6 +294,19 @@ bool CameraService::validateConnect(int cameraId, return false; } + ICameraServiceListener::Status currentStatus = getStatus(cameraId); + if (currentStatus == ICameraServiceListener::STATUS_NOT_PRESENT) { + ALOGI("Camera is not plugged in," + " connect X (pid %d) rejected", callingPid); + return false; + } else if (currentStatus == ICameraServiceListener::STATUS_ENUMERATING) { + ALOGI("Camera is enumerating," + " connect X (pid %d) rejected", callingPid); + return false; + } + // Else don't check for STATUS_NOT_AVAILABLE. + // -- It's done implicitly in canConnectUnsafe /w the mBusy array + return true; } @@ -293,6 +388,7 @@ sp CameraService::connect( // If there are other non-exclusive users of the camera, // this will tear them down before we can reuse the camera if (isValidCameraId(cameraId)) { + // transition from PRESENT -> NOT_AVAILABLE updateStatus(ICameraServiceListener::STATUS_NOT_AVAILABLE, cameraId); } @@ -321,7 +417,8 @@ sp CameraService::connect( if (!connectFinishUnsafe(client, client->asBinder())) { // this is probably not recoverable.. maybe the client can try again - updateStatus(ICameraServiceListener::STATUS_AVAILABLE, cameraId); + // OK: we can only get here if we were originally in PRESENT state + updateStatus(ICameraServiceListener::STATUS_PRESENT, cameraId); return NULL; } @@ -429,6 +526,15 @@ status_t CameraService::addListener( mListenerList.push_back(listener); + /* Immediately signal current status to this listener only */ + { + Mutex::Autolock m(mStatusMutex) ; + int numCams = getNumberOfCameras(); + for (int i = 0; i < numCams; ++i) { + listener->onStatusChanged(mStatusList[i], i); + } + } + return OK; } status_t CameraService::removeListener( @@ -719,6 +825,8 @@ CameraService::BasicClient::~BasicClient() { void CameraService::BasicClient::disconnect() { mCameraService->removeClientByRemote(mRemoteBinder); + // client shouldn't be able to call into us anymore + mClientPid = 0; } status_t CameraService::BasicClient::startCameraOps() { @@ -816,7 +924,7 @@ void CameraService::Client::notifyError() { void CameraService::Client::disconnect() { BasicClient::disconnect(); mCameraService->setCameraFree(mCameraId); - mCameraService->updateStatus(ICameraServiceListener::STATUS_AVAILABLE, + mCameraService->updateStatus(ICameraServiceListener::STATUS_PRESENT, mCameraId); } @@ -1017,6 +1125,16 @@ void CameraService::updateStatusUnsafe(ICameraServiceListener::Status status, ALOGV("%s: Status has changed for camera ID %d from 0x%x to 0x%x", __FUNCTION__, cameraId, (uint32_t)oldStatus, (uint32_t)status); + if (oldStatus == ICameraServiceListener::STATUS_NOT_PRESENT && + (status != ICameraServiceListener::STATUS_PRESENT && + status != ICameraServiceListener::STATUS_ENUMERATING)) { + + ALOGW("%s: From NOT_PRESENT can only transition into PRESENT" + " or ENUMERATING", __FUNCTION__); + mStatusList[cameraId] = oldStatus; + return; + } + /** * ProClients lose their exclusive lock. * - Done before the CameraClient can initialize the HAL device, @@ -1041,4 +1159,14 @@ void CameraService::updateStatusUnsafe(ICameraServiceListener::Status status, } } +ICameraServiceListener::Status CameraService::getStatus(int cameraId) const { + if (cameraId < 0 || cameraId >= MAX_CAMERAS) { + ALOGE("%s: Invalid camera ID %d", __FUNCTION__, cameraId); + return ICameraServiceListener::STATUS_UNKNOWN; + } + + Mutex::Autolock al(mStatusMutex); + return mStatusList[cameraId]; +} + }; // namespace android diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h index c5e495f..8cb1691 100644 --- a/services/camera/libcameraservice/CameraService.h +++ b/services/camera/libcameraservice/CameraService.h @@ -45,7 +45,8 @@ class MediaPlayer; class CameraService : public BinderService, public BnCameraService, - public IBinder::DeathRecipient + public IBinder::DeathRecipient, + public camera_module_callbacks_t { friend class BinderService; public: @@ -59,6 +60,11 @@ public: virtual ~CameraService(); ///////////////////////////////////////////////////////////////////// + // HAL Callbacks + virtual void onDeviceStatusChanged(int cameraId, + int newStatus); + + ///////////////////////////////////////////////////////////////////// // ICameraService virtual int32_t getNumberOfCameras(); virtual status_t getCameraInfo(int cameraId, @@ -327,10 +333,14 @@ private: mListenerList; // guard only mStatusList and the broadcasting of ICameraServiceListener - Mutex mStatusMutex; + mutable Mutex mStatusMutex; ICameraServiceListener::Status mStatusList[MAX_CAMERAS]; + // Read the current status (locks mStatusMutex) + ICameraServiceListener::Status + getStatus(int cameraId) const; + // Broadcast the new status if it changed (locks the service mutex) void updateStatus( ICameraServiceListener::Status status, -- cgit v1.1 From 65d7986ceac6e35426749ac7e05bbd2a38949db4 Mon Sep 17 00:00:00 2001 From: Igor Murashkin Date: Wed, 27 Mar 2013 11:07:06 -0700 Subject: ProCamera: Fix memory leak in consumeFrameMetadata Bug: 8402301 Change-Id: I975990185f45bb8552712271b9ac457af08a170c --- camera/ProCamera.cpp | 6 ++---- camera/tests/ProCameraTests.cpp | 3 +++ 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/camera/ProCamera.cpp b/camera/ProCamera.cpp index 396b009..fec5461 100644 --- a/camera/ProCamera.cpp +++ b/camera/ProCamera.cpp @@ -103,7 +103,7 @@ void ProCamera::onResultReceived(int32_t frameId, camera_metadata* result) { { Mutex::Autolock al(mWaitMutex); mMetadataReady = true; - mLatestMetadata = tmp; + mLatestMetadata = tmp; // make copy mWaitCondition.broadcast(); } @@ -312,8 +312,6 @@ void ProCamera::onFrameAvailable(int streamId) { sp listener = mListener; StreamInfo& stream = getStreamInfo(streamId); - CpuConsumer::LockedBuffer buf; - if (listener.get() != NULL) { listener->onFrameAvailable(streamId, stream.cpuConsumer); } @@ -421,7 +419,7 @@ CameraMetadata ProCamera::consumeFrameMetadata() { // Destructive: Subsequent calls return empty metadatas CameraMetadata tmp = mLatestMetadata; - mLatestMetadata.release(); + mLatestMetadata.clear(); return tmp; } diff --git a/camera/tests/ProCameraTests.cpp b/camera/tests/ProCameraTests.cpp index ecc0854..87f817a 100644 --- a/camera/tests/ProCameraTests.cpp +++ b/camera/tests/ProCameraTests.cpp @@ -1021,6 +1021,9 @@ TEST_F(ProCameraTest, WaitForDualStreamBuffer) { // Consume two frames simultaneously. Unsynchronized by timestamps. for (int i = 0; i < REQUEST_COUNT; ++i) { + // Exhaust event queue so it doesn't keep growing + while (mListener->ReadEvent() != UNKNOWN); + // Get the metadata EXPECT_OK(mCamera->waitForFrameMetadata()); CameraMetadata meta = mCamera->consumeFrameMetadata(); -- cgit v1.1 From 59ca8040a0cfb0324c6be7dded74d02e9f5cf6e8 Mon Sep 17 00:00:00 2001 From: Marco Nelissen Date: Wed, 27 Mar 2013 13:48:36 -0700 Subject: Fix valgrind error. The constructor calls reset(), which in turn calls stop(), which then accesses mCaptureTimeLapse before it has been initialized. Change-Id: Ia94ac740b9bd1a0389c72647a5639dd25320d92c --- media/libmediaplayerservice/StagefrightRecorder.cpp | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp index c2c9985..095d5ca 100644 --- a/media/libmediaplayerservice/StagefrightRecorder.cpp +++ b/media/libmediaplayerservice/StagefrightRecorder.cpp @@ -70,7 +70,8 @@ StagefrightRecorder::StagefrightRecorder() mOutputFd(-1), mAudioSource(AUDIO_SOURCE_CNT), mVideoSource(VIDEO_SOURCE_LIST_END), - mStarted(false), mSurfaceMediaSource(NULL) { + mStarted(false), mSurfaceMediaSource(NULL), + mCaptureTimeLapse(false) { ALOGV("Constructor"); reset(); -- cgit v1.1 From 4a7fe9cd333c4cf533f78a074c8c2c820b94c6d2 Mon Sep 17 00:00:00 2001 From: Marco Nelissen Date: Wed, 27 Mar 2013 16:44:54 -0700 Subject: Fix valgrind error in software h264 decoder h264bsdNextMbAddress could read past the end of an allocation, which could conceivably result in a segfault if the allocation was at the very end of a page. Change-Id: Id7a0c5733d66e609f36feb0e15b2d67b9bbc0b4d --- media/libstagefright/codecs/on2/h264dec/source/h264bsd_util.c | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_util.c b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_util.c index 53b2fd8..cc838fd 100755 --- a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_util.c +++ b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_util.c @@ -220,7 +220,7 @@ u32 h264bsdNextMbAddress(u32 *pSliceGroupMap, u32 picSizeInMbs, u32 currMbAddr) /* Variables */ - u32 i, sliceGroup, tmp; + u32 i, sliceGroup; /* Code */ @@ -231,11 +231,9 @@ u32 h264bsdNextMbAddress(u32 *pSliceGroupMap, u32 picSizeInMbs, u32 currMbAddr) sliceGroup = pSliceGroupMap[currMbAddr]; i = currMbAddr + 1; - tmp = pSliceGroupMap[i]; - while ((i < picSizeInMbs) && (tmp != sliceGroup)) + while ((i < picSizeInMbs) && (pSliceGroupMap[i] != sliceGroup)) { i++; - tmp = pSliceGroupMap[i]; } if (i == picSizeInMbs) -- cgit v1.1 From 8be20f50711a94426f1394ec113672e41c1224e8 Mon Sep 17 00:00:00 2001 From: Eino-Ville Talvala Date: Wed, 6 Mar 2013 16:20:06 -0800 Subject: Camera: Add streams to camera3 HAL device - Generic stream interface - Functional output stream - Skeleton input/zsl stream Change-Id: I143794eac1a2217031d62b51912662fc6d1db900 --- services/camera/libcameraservice/Android.mk | 4 + .../camera3/Camera3InputStream.cpp | 77 ++++ .../libcameraservice/camera3/Camera3InputStream.h | 70 ++++ .../camera3/Camera3OutputStream.cpp | 437 +++++++++++++++++++++ .../libcameraservice/camera3/Camera3OutputStream.h | 95 +++++ .../libcameraservice/camera3/Camera3Stream.cpp | 264 +++++++++++++ .../libcameraservice/camera3/Camera3Stream.h | 248 ++++++++++++ .../libcameraservice/camera3/Camera3ZslStream.cpp | 89 +++++ .../libcameraservice/camera3/Camera3ZslStream.h | 79 ++++ 9 files changed, 1363 insertions(+) create mode 100644 services/camera/libcameraservice/camera3/Camera3InputStream.cpp create mode 100644 services/camera/libcameraservice/camera3/Camera3InputStream.h create mode 100644 services/camera/libcameraservice/camera3/Camera3OutputStream.cpp create mode 100644 services/camera/libcameraservice/camera3/Camera3OutputStream.h create mode 100644 services/camera/libcameraservice/camera3/Camera3Stream.cpp create mode 100644 services/camera/libcameraservice/camera3/Camera3Stream.h create mode 100644 services/camera/libcameraservice/camera3/Camera3ZslStream.cpp create mode 100644 services/camera/libcameraservice/camera3/Camera3ZslStream.h diff --git a/services/camera/libcameraservice/Android.mk b/services/camera/libcameraservice/Android.mk index 8600735..6847bf8 100644 --- a/services/camera/libcameraservice/Android.mk +++ b/services/camera/libcameraservice/Android.mk @@ -25,6 +25,10 @@ LOCAL_SRC_FILES:= \ camera2/JpegCompressor.cpp \ camera2/CaptureSequencer.cpp \ camera2/ProFrameProcessor.cpp \ + camera3/Camera3Stream.cpp \ + camera3/Camera3InputStream.cpp \ + camera3/Camera3OutputStream.cpp \ + camera3/Camera3ZslStream.cpp LOCAL_SHARED_LIBRARIES:= \ libui \ diff --git a/services/camera/libcameraservice/camera3/Camera3InputStream.cpp b/services/camera/libcameraservice/camera3/Camera3InputStream.cpp new file mode 100644 index 0000000..8a48ee5 --- /dev/null +++ b/services/camera/libcameraservice/camera3/Camera3InputStream.cpp @@ -0,0 +1,77 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "Camera3-InputStream" +#define ATRACE_TAG ATRACE_TAG_CAMERA +//#define LOG_NDEBUG 0 + +#include +#include +#include "Camera3InputStream.h" + +namespace android { + +namespace camera3 { + +Camera3InputStream::Camera3InputStream(int id, + uint32_t width, uint32_t height, int format) : + Camera3Stream(id, CAMERA3_STREAM_INPUT, width, height, 0, format) { +} + +status_t Camera3InputStream::getBufferLocked(camera3_stream_buffer *buffer) { + (void) buffer; + ALOGE("%s: Not implemented", __FUNCTION__); + return INVALID_OPERATION; +} + +status_t Camera3InputStream::returnBufferLocked( + const camera3_stream_buffer &buffer, + nsecs_t timestamp) { + (void) timestamp; + (void) buffer; + ALOGE("%s: Not implemented", __FUNCTION__); + return INVALID_OPERATION; +} + +bool Camera3InputStream::hasOutstandingBuffersLocked() const { + ALOGE("%s: Not implemented", __FUNCTION__); + return false; +} + +status_t Camera3InputStream::waitUntilIdle(nsecs_t timeout) { + (void) timeout; + ALOGE("%s: Not implemented", __FUNCTION__); + return INVALID_OPERATION; +} + +status_t Camera3InputStream::disconnectLocked() { + ALOGE("%s: Not implemented", __FUNCTION__); + return INVALID_OPERATION; +} + +sp Camera3InputStream::getProducerInterface() const { + return mConsumer->getProducerInterface(); +} + +void Camera3InputStream::dump(int fd, const Vector &args) const { + (void) fd; + (void) args; + ALOGE("%s: Not implemented", __FUNCTION__); +} + +}; // namespace camera3 + +}; // namespace android diff --git a/services/camera/libcameraservice/camera3/Camera3InputStream.h b/services/camera/libcameraservice/camera3/Camera3InputStream.h new file mode 100644 index 0000000..c4b5dd9 --- /dev/null +++ b/services/camera/libcameraservice/camera3/Camera3InputStream.h @@ -0,0 +1,70 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_SERVERS_CAMERA3_INPUT_STREAM_H +#define ANDROID_SERVERS_CAMERA3_INPUT_STREAM_H + +#include +#include +#include + +#include "Camera3Stream.h" + +namespace android { + +namespace camera3 { + +/** + * A class for managing a single stream of input data to the camera device. + */ +class Camera3InputStream : public Camera3Stream { + public: + /** + * Set up a stream for formats that have fixed size, such as RAW and YUV. + */ + Camera3InputStream(int id, uint32_t width, uint32_t height, int format); + + virtual status_t waitUntilIdle(nsecs_t timeout); + virtual void dump(int fd, const Vector &args) const; + + /** + * Get the producer interface for this stream, to hand off to a producer. + * The producer must be connected to the provided interface before + * finishConfigure is called on this stream. + */ + sp getProducerInterface() const; + + private: + + sp mConsumer; + + /** + * Camera3Stream interface + */ + + virtual status_t getBufferLocked(camera3_stream_buffer *buffer); + virtual status_t returnBufferLocked(const camera3_stream_buffer &buffer, + nsecs_t timestamp); + virtual bool hasOutstandingBuffersLocked() const; + virtual status_t disconnectLocked(); + +}; // class Camera3InputStream + +}; // namespace camera3 + +}; // namespace android + +#endif diff --git a/services/camera/libcameraservice/camera3/Camera3OutputStream.cpp b/services/camera/libcameraservice/camera3/Camera3OutputStream.cpp new file mode 100644 index 0000000..d07ae94 --- /dev/null +++ b/services/camera/libcameraservice/camera3/Camera3OutputStream.cpp @@ -0,0 +1,437 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "Camera3-OutputStream" +#define ATRACE_TAG ATRACE_TAG_CAMERA +//#define LOG_NDEBUG 0 + +// This is needed for stdint.h to define INT64_MAX in C++ +#define __STDC_LIMIT_MACROS + +#include +#include +#include "Camera3OutputStream.h" + +#ifndef container_of +#define container_of(ptr, type, member) \ + (type *)((char*)(ptr) - offsetof(type, member)) +#endif + +namespace android { + +namespace camera3 { + +Camera3OutputStream::Camera3OutputStream(int id, + sp consumer, + uint32_t width, uint32_t height, int format) : + Camera3Stream(id, CAMERA3_STREAM_OUTPUT, width, height, 0, format), + mConsumer(consumer), + mTransform(0), + mTotalBufferCount(0), + mDequeuedBufferCount(0), + mFrameCount(0), + mLastTimestamp(0) { + + mCombinedFence = new Fence(); + if (mConsumer == NULL) { + ALOGE("%s: Consumer is NULL!", __FUNCTION__); + mState = STATE_ERROR; + } +} + +Camera3OutputStream::Camera3OutputStream(int id, + sp consumer, + uint32_t width, uint32_t height, size_t maxSize, int format) : + Camera3Stream(id, CAMERA3_STREAM_OUTPUT, + width, height, maxSize, format), + mConsumer(consumer) { + + if (format != HAL_PIXEL_FORMAT_BLOB) { + ALOGE("%s: Bad format for size-only stream: %d", __FUNCTION__, + format); + mState = STATE_ERROR; + } + + if (mConsumer == NULL) { + ALOGE("%s: Consumer is NULL!", __FUNCTION__); + mState = STATE_ERROR; + } +} + +Camera3OutputStream::~Camera3OutputStream() { + disconnectLocked(); +} + +status_t Camera3OutputStream::getBufferLocked(camera3_stream_buffer *buffer) { + ATRACE_CALL(); + status_t res; + + // Allow dequeue during IN_[RE]CONFIG for registration + if (mState != STATE_CONFIGURED && + mState != STATE_IN_CONFIG && mState != STATE_IN_RECONFIG) { + ALOGE("%s: Stream %d: Can't get buffers in unconfigured state %d", + __FUNCTION__, mId, mState); + return INVALID_OPERATION; + } + + // Only limit dequeue amount when fully configured + if (mState == STATE_CONFIGURED && + mDequeuedBufferCount == camera3_stream::max_buffers) { + ALOGE("%s: Stream %d: Already dequeued maximum number of simultaneous" + " buffers (%d)", __FUNCTION__, mId, + camera3_stream::max_buffers); + return INVALID_OPERATION; + } + + ANativeWindowBuffer* anb; + int fenceFd; + + res = mConsumer->dequeueBuffer(mConsumer.get(), &anb, &fenceFd); + if (res != OK) { + ALOGE("%s: Stream %d: Can't dequeue next output buffer: %s (%d)", + __FUNCTION__, mId, strerror(-res), res); + return res; + } + + // Handing out a raw pointer to this object. Increment internal refcount. + incStrong(this); + buffer->stream = this; + buffer->buffer = &(anb->handle); + buffer->acquire_fence = fenceFd; + buffer->release_fence = -1; + buffer->status = CAMERA3_BUFFER_STATUS_OK; + + mDequeuedBufferCount++; + + return OK; +} + +status_t Camera3OutputStream::returnBufferLocked( + const camera3_stream_buffer &buffer, + nsecs_t timestamp) { + ATRACE_CALL(); + status_t res; + + // returnBuffer may be called from a raw pointer, not a sp<>, and we'll be + // decrementing the internal refcount next. In case this is the last ref, we + // might get destructed on the decStrong(), so keep an sp around until the + // end of the call - otherwise have to sprinkle the decStrong on all exit + // points. + sp keepAlive(this); + decStrong(this); + + // Allow buffers to be returned in the error state, to allow for disconnect + // and in the in-config states for registration + if (mState == STATE_CONSTRUCTED) { + ALOGE("%s: Stream %d: Can't return buffers in unconfigured state %d", + __FUNCTION__, mId, mState); + return INVALID_OPERATION; + } + if (mDequeuedBufferCount == 0) { + ALOGE("%s: Stream %d: No buffers outstanding to return", __FUNCTION__, + mId); + return INVALID_OPERATION; + } + if (buffer.status == CAMERA3_BUFFER_STATUS_ERROR) { + res = mConsumer->cancelBuffer(mConsumer.get(), + container_of(buffer.buffer, ANativeWindowBuffer, handle), + buffer.release_fence); + if (res != OK) { + ALOGE("%s: Stream %d: Error cancelling buffer to native window:" + " %s (%d)", __FUNCTION__, mId, strerror(-res), res); + return res; + } + } else { + res = native_window_set_buffers_timestamp(mConsumer.get(), timestamp); + if (res != OK) { + ALOGE("%s: Stream %d: Error setting timestamp: %s (%d)", + __FUNCTION__, mId, strerror(-res), res); + return res; + } + + sp releaseFence = new Fence(buffer.release_fence); + int anwReleaseFence = releaseFence->dup(); + + res = mConsumer->queueBuffer(mConsumer.get(), + container_of(buffer.buffer, ANativeWindowBuffer, handle), + anwReleaseFence); + if (res != OK) { + ALOGE("%s: Stream %d: Error queueing buffer to native window: %s (%d)", + __FUNCTION__, mId, strerror(-res), res); + close(anwReleaseFence); + return res; + } + + mCombinedFence = Fence::merge(mName, mCombinedFence, releaseFence); + } + + mDequeuedBufferCount--; + mBufferReturnedSignal.signal(); + mLastTimestamp = timestamp; + + return OK; +} + +bool Camera3OutputStream::hasOutstandingBuffersLocked() const { + nsecs_t signalTime = mCombinedFence->getSignalTime(); + ALOGV("%s: Stream %d: Has %d outstanding buffers," + " buffer signal time is %lld", + __FUNCTION__, mId, mDequeuedBufferCount, signalTime); + if (mDequeuedBufferCount > 0 || signalTime == INT64_MAX) { + return true; + } + return false; +} + +status_t Camera3OutputStream::waitUntilIdle(nsecs_t timeout) { + status_t res; + { + Mutex::Autolock l(mLock); + while (mDequeuedBufferCount > 0) { + if (timeout != TIMEOUT_NEVER) { + nsecs_t startTime = systemTime(); + res = mBufferReturnedSignal.waitRelative(mLock, timeout); + if (res == TIMED_OUT) { + return res; + } else if (res != OK) { + ALOGE("%s: Error waiting for outstanding buffers: %s (%d)", + __FUNCTION__, strerror(-res), res); + return res; + } + nsecs_t deltaTime = systemTime() - startTime; + if (timeout <= deltaTime) { + timeout = 0; + } else { + timeout -= deltaTime; + } + } else { + res = mBufferReturnedSignal.wait(mLock); + if (res != OK) { + ALOGE("%s: Error waiting for outstanding buffers: %s (%d)", + __FUNCTION__, strerror(-res), res); + return res; + } + } + } + } + + // No lock + + unsigned int timeoutMs; + if (timeout == TIMEOUT_NEVER) { + timeoutMs = Fence::TIMEOUT_NEVER; + } else if (timeout == 0) { + timeoutMs = 0; + } else { + // Round up to wait at least 1 ms + timeoutMs = (timeout + 999999) / 1000000; + } + + return mCombinedFence->wait(timeoutMs); +} + +void Camera3OutputStream::dump(int fd, const Vector &args) const { + (void) args; + String8 lines; + lines.appendFormat(" Stream[%d]: Output\n", mId); + lines.appendFormat(" State: %d\n", mState); + lines.appendFormat(" Dims: %d x %d, format 0x%x\n", + camera3_stream::width, camera3_stream::height, + camera3_stream::format); + lines.appendFormat(" Max size: %d\n", mMaxSize); + lines.appendFormat(" Usage: %d, max HAL buffers: %d\n", + camera3_stream::usage, camera3_stream::max_buffers); + lines.appendFormat(" Frames produced: %d, last timestamp: %lld ns\n", + mFrameCount, mLastTimestamp); + lines.appendFormat(" Total buffers: %d, currently dequeued: %d\n", + mTotalBufferCount, mDequeuedBufferCount); + write(fd, lines.string(), lines.size()); +} + +status_t Camera3OutputStream::setTransform(int transform) { + ATRACE_CALL(); + Mutex::Autolock l(mLock); + return setTransformLocked(transform); +} + +status_t Camera3OutputStream::setTransformLocked(int transform) { + status_t res = OK; + if (mState == STATE_ERROR) { + ALOGE("%s: Stream in error state", __FUNCTION__); + return INVALID_OPERATION; + } + + mTransform = transform; + if (mState == STATE_CONFIGURED) { + res = native_window_set_buffers_transform(mConsumer.get(), + transform); + if (res != OK) { + ALOGE("%s: Unable to configure stream transform to %x: %s (%d)", + __FUNCTION__, transform, strerror(-res), res); + } + } + return res; +} + +status_t Camera3OutputStream::configureQueueLocked() { + status_t res; + + switch (mState) { + case STATE_IN_RECONFIG: + res = disconnect(); + if (res != OK) { + return res; + } + break; + case STATE_IN_CONFIG: + // OK + break; + default: + ALOGE("%s: Bad state: %d", __FUNCTION__, mState); + return INVALID_OPERATION; + } + + // Configure consumer-side ANativeWindow interface + res = native_window_api_connect(mConsumer.get(), + NATIVE_WINDOW_API_CAMERA); + if (res != OK) { + ALOGE("%s: Unable to connect to native window for stream %d", + __FUNCTION__, mId); + return res; + } + + res = native_window_set_usage(mConsumer.get(), camera3_stream::usage); + if (res != OK) { + ALOGE("%s: Unable to configure usage %08x for stream %d", + __FUNCTION__, camera3_stream::usage, mId); + return res; + } + + res = native_window_set_scaling_mode(mConsumer.get(), + NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW); + if (res != OK) { + ALOGE("%s: Unable to configure stream scaling: %s (%d)", + __FUNCTION__, strerror(-res), res); + return res; + } + + res = setTransformLocked(0); + if (res != OK) { + return res; + } + + if (mMaxSize == 0) { + // For buffers of known size + res = native_window_set_buffers_geometry(mConsumer.get(), + camera3_stream::width, camera3_stream::height, + camera3_stream::format); + } else { + // For buffers with bounded size + res = native_window_set_buffers_geometry(mConsumer.get(), + mMaxSize, 1, + camera3_stream::format); + } + if (res != OK) { + ALOGE("%s: Unable to configure stream buffer geometry" + " %d x %d, format %x for stream %d", + __FUNCTION__, camera3_stream::width, camera3_stream::height, + camera3_stream::format, mId); + return res; + } + + int maxConsumerBuffers; + res = mConsumer->query(mConsumer.get(), + NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &maxConsumerBuffers); + if (res != OK) { + ALOGE("%s: Unable to query consumer undequeued" + " buffer count for stream %d", __FUNCTION__, mId); + return res; + } + + ALOGV("%s: Consumer wants %d buffers", __FUNCTION__, + maxConsumerBuffers); + + mTotalBufferCount = maxConsumerBuffers + camera3_stream::max_buffers; + mDequeuedBufferCount = 0; + mFrameCount = 0; + mLastTimestamp = 0; + + res = native_window_set_buffer_count(mConsumer.get(), + mTotalBufferCount); + if (res != OK) { + ALOGE("%s: Unable to set buffer count for stream %d", + __FUNCTION__, mId); + return res; + } + + res = native_window_set_buffers_transform(mConsumer.get(), + mTransform); + if (res != OK) { + ALOGE("%s: Unable to configure stream transform to %x: %s (%d)", + __FUNCTION__, mTransform, strerror(-res), res); + } + + return OK; +} + +size_t Camera3OutputStream::getBufferCountLocked() { + return mTotalBufferCount; +} + +status_t Camera3OutputStream::disconnectLocked() { + status_t res; + + switch (mState) { + case STATE_IN_RECONFIG: + case STATE_CONFIGURED: + // OK + break; + default: + // No connection, nothing to do + return OK; + } + + if (mDequeuedBufferCount > 0) { + ALOGE("%s: Can't disconnect with %d buffers still dequeued!", + __FUNCTION__, mDequeuedBufferCount); + return INVALID_OPERATION; + } + + res = native_window_api_disconnect(mConsumer.get(), NATIVE_WINDOW_API_CAMERA); + + /** + * This is not an error. if client calling process dies, the window will + * also die and all calls to it will return DEAD_OBJECT, thus it's already + * "disconnected" + */ + if (res == DEAD_OBJECT) { + ALOGW("%s: While disconnecting stream %d from native window, the" + " native window died from under us", __FUNCTION__, mId); + } + else if (res != OK) { + ALOGE("%s: Unable to disconnect stream %d from native window (error %d %s)", + __FUNCTION__, mId, res, strerror(-res)); + mState = STATE_ERROR; + return res; + } + + mState = (mState == STATE_IN_RECONFIG) ? STATE_IN_CONFIG : STATE_CONSTRUCTED; + return OK; +} + +}; // namespace camera3 + +}; // namespace android diff --git a/services/camera/libcameraservice/camera3/Camera3OutputStream.h b/services/camera/libcameraservice/camera3/Camera3OutputStream.h new file mode 100644 index 0000000..d331a94 --- /dev/null +++ b/services/camera/libcameraservice/camera3/Camera3OutputStream.h @@ -0,0 +1,95 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_SERVERS_CAMERA3_OUTPUT_STREAM_H +#define ANDROID_SERVERS_CAMERA3_OUTPUT_STREAM_H + +#include +#include + +#include "Camera3Stream.h" + +namespace android { + +namespace camera3 { + +/** + * A class for managing a single stream of output data from the camera device. + */ +class Camera3OutputStream : public Camera3Stream { + public: + /** + * Set up a stream for formats that have 2 dimensions, such as RAW and YUV. + */ + Camera3OutputStream(int id, sp consumer, + uint32_t width, uint32_t height, int format); + + /** + * Set up a stream for formats that have a variable buffer size for the same + * dimensions, such as compressed JPEG. + */ + Camera3OutputStream(int id, sp consumer, + uint32_t width, uint32_t height, size_t maxSize, int format); + + virtual ~Camera3OutputStream(); + + /** + * Camera3Stream interface + */ + + virtual status_t waitUntilIdle(nsecs_t timeout); + virtual void dump(int fd, const Vector &args) const; + + /** + * Set the transform on the output stream; one of the + * HAL_TRANSFORM_* / NATIVE_WINDOW_TRANSFORM_* constants. + */ + status_t setTransform(int transform); + + private: + sp mConsumer; + int mTransform; + size_t mTotalBufferCount; + size_t mDequeuedBufferCount; + Condition mBufferReturnedSignal; + uint32_t mFrameCount; + nsecs_t mLastTimestamp; + + // The merged release fence for all returned buffers + sp mCombinedFence; + + status_t setTransformLocked(int transform); + + /** + * Internal Camera3Stream interface + */ + virtual status_t getBufferLocked(camera3_stream_buffer *buffer); + virtual status_t returnBufferLocked( + const camera3_stream_buffer &buffer, + nsecs_t timestamp); + virtual bool hasOutstandingBuffersLocked() const; + + virtual status_t configureQueueLocked(); + virtual size_t getBufferCountLocked(); + virtual status_t disconnectLocked(); + +}; // class Camera3OutputStream + +} // namespace camera3 + +} // namespace android + +#endif diff --git a/services/camera/libcameraservice/camera3/Camera3Stream.cpp b/services/camera/libcameraservice/camera3/Camera3Stream.cpp new file mode 100644 index 0000000..cf3072b --- /dev/null +++ b/services/camera/libcameraservice/camera3/Camera3Stream.cpp @@ -0,0 +1,264 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "Camera3-Stream" +#define ATRACE_TAG ATRACE_TAG_CAMERA +//#define LOG_NDEBUG 0 + +#include +#include +#include "Camera3Stream.h" + +namespace android { + +namespace camera3 { + +Camera3Stream::~Camera3Stream() { +} + +Camera3Stream* Camera3Stream::cast(camera3_stream *stream) { + return static_cast(stream); +} + +const Camera3Stream* Camera3Stream::cast(const camera3_stream *stream) { + return static_cast(stream); +} + +Camera3Stream::Camera3Stream(int id, + camera3_stream_type type, + uint32_t width, uint32_t height, size_t maxSize, int format) : + camera3_stream(), + mId(id), + mName(String8::format("Camera3Stream[%d]", id)), + mMaxSize(maxSize), + mState(STATE_CONSTRUCTED) { + + camera3_stream::stream_type = type; + camera3_stream::width = width; + camera3_stream::height = height; + camera3_stream::format = format; + camera3_stream::usage = 0; + camera3_stream::max_buffers = 0; + camera3_stream::priv = NULL; + + if (format == HAL_PIXEL_FORMAT_BLOB && maxSize == 0) { + ALOGE("%s: BLOB format with size == 0", __FUNCTION__); + mState = STATE_ERROR; + } +} + +int Camera3Stream::getId() const { + return mId; +} + +uint32_t Camera3Stream::getWidth() const { + return camera3_stream::width; +} + +uint32_t Camera3Stream::getHeight() const { + return camera3_stream::height; +} + +int Camera3Stream::getFormat() const { + return camera3_stream::format; +} + +camera3_stream* Camera3Stream::startConfiguration() { + Mutex::Autolock l(mLock); + + switch (mState) { + case STATE_ERROR: + ALOGE("%s: In error state", __FUNCTION__); + return NULL; + case STATE_CONSTRUCTED: + // OK + break; + case STATE_IN_CONFIG: + case STATE_IN_RECONFIG: + // Can start config again with no trouble; but don't redo + // oldUsage/oldMaxBuffers + return this; + case STATE_CONFIGURED: + if (stream_type == CAMERA3_STREAM_INPUT) { + ALOGE("%s: Cannot configure an input stream twice", + __FUNCTION__); + return NULL; + } else if (hasOutstandingBuffersLocked()) { + ALOGE("%s: Cannot configure stream; has outstanding buffers", + __FUNCTION__); + return NULL; + } + break; + default: + ALOGE("%s: Unknown state %d", __FUNCTION__, mState); + return NULL; + } + + oldUsage = usage; + oldMaxBuffers = max_buffers; + + if (mState == STATE_CONSTRUCTED) { + mState = STATE_IN_CONFIG; + } else { // mState == STATE_CONFIGURED + mState = STATE_IN_RECONFIG; + } + + return this; +} + +bool Camera3Stream::isConfiguring() const { + Mutex::Autolock l(mLock); + return (mState == STATE_IN_CONFIG) || (mState == STATE_IN_RECONFIG); +} + +status_t Camera3Stream::finishConfiguration(camera3_device *hal3Device) { + Mutex::Autolock l(mLock); + switch (mState) { + case STATE_ERROR: + ALOGE("%s: In error state", __FUNCTION__); + return INVALID_OPERATION; + case STATE_IN_CONFIG: + case STATE_IN_RECONFIG: + // OK + break; + case STATE_CONSTRUCTED: + case STATE_CONFIGURED: + ALOGE("%s: Cannot finish configuration that hasn't been started", + __FUNCTION__); + return INVALID_OPERATION; + default: + ALOGE("%s: Unknown state", __FUNCTION__); + return INVALID_OPERATION; + } + + // Check if the stream configuration is unchanged, and skip reallocation if + // so. As documented in hardware/camera3.h:configure_streams(). + if (mState == STATE_IN_RECONFIG && + oldUsage == usage && + oldMaxBuffers == max_buffers) { + mState = STATE_CONFIGURED; + return OK; + } + + status_t res; + res = configureQueueLocked(); + if (res != OK) { + ALOGE("%s: Unable to configure stream %d queue: %s (%d)", + __FUNCTION__, mId, strerror(-res), res); + mState = STATE_ERROR; + return res; + } + + res = registerBuffersLocked(hal3Device); + if (res != OK) { + ALOGE("%s: Unable to register stream buffers with HAL: %s (%d)", + __FUNCTION__, strerror(-res), res); + mState = STATE_ERROR; + return res; + } + + mState = STATE_CONFIGURED; + + return res; +} + +status_t Camera3Stream::getBuffer(camera3_stream_buffer *buffer) { + ATRACE_CALL(); + Mutex::Autolock l(mLock); + return getBufferLocked(buffer); +} + +status_t Camera3Stream::returnBuffer(const camera3_stream_buffer &buffer, + nsecs_t timestamp) { + ATRACE_CALL(); + Mutex::Autolock l(mLock); + return returnBufferLocked(buffer, timestamp); +} + +bool Camera3Stream::hasOutstandingBuffers() const { + ATRACE_CALL(); + Mutex::Autolock l(mLock); + return hasOutstandingBuffersLocked(); +} + +status_t Camera3Stream::disconnect() { + ATRACE_CALL(); + Mutex::Autolock l(mLock); + return disconnectLocked(); +} + +status_t Camera3Stream::registerBuffersLocked(camera3_device *hal3Device) { + ATRACE_CALL(); + status_t res; + + size_t bufferCount = getBufferCountLocked(); + + Vector buffers; + buffers.insertAt(NULL, 0, bufferCount); + + camera3_stream_buffer_set bufferSet = camera3_stream_buffer_set(); + bufferSet.stream = this; + bufferSet.num_buffers = bufferCount; + bufferSet.buffers = buffers.editArray(); + + Vector streamBuffers; + streamBuffers.insertAt(camera3_stream_buffer_t(), 0, bufferCount); + + // Register all buffers with the HAL. This means getting all the buffers + // from the stream, providing them to the HAL with the + // register_stream_buffers() method, and then returning them back to the + // stream in the error state, since they won't have valid data. + // + // Only registered buffers can be sent to the HAL. + + uint32_t bufferIdx = 0; + for (; bufferIdx < bufferCount; bufferIdx++) { + res = getBufferLocked( &streamBuffers.editItemAt(bufferIdx) ); + if (res != OK) { + ALOGE("%s: Unable to get buffer %d for registration with HAL", + __FUNCTION__, bufferIdx); + // Skip registering, go straight to cleanup + break; + } + + sp fence = new Fence(streamBuffers[bufferIdx].acquire_fence); + fence->waitForever(kRegisterFenceTimeoutMs, + "Camera3Stream::registerBuffers"); + + buffers.editItemAt(bufferIdx) = streamBuffers[bufferIdx].buffer; + } + if (bufferIdx == bufferCount) { + // Got all buffers, register with HAL + ALOGV("%s: Registering %d buffers with camera HAL", + __FUNCTION__, bufferCount); + res = hal3Device->ops->register_stream_buffers(hal3Device, + &bufferSet); + } + + // Return all valid buffers to stream, in ERROR state to indicate + // they weren't filled. + for (size_t i = 0; i < bufferIdx; i++) { + streamBuffers.editItemAt(i).release_fence = -1; + streamBuffers.editItemAt(i).status = CAMERA3_BUFFER_STATUS_ERROR; + returnBufferLocked(streamBuffers[i], 0); + } + + return res; +} + +}; // namespace camera3 + +}; // namespace android diff --git a/services/camera/libcameraservice/camera3/Camera3Stream.h b/services/camera/libcameraservice/camera3/Camera3Stream.h new file mode 100644 index 0000000..2364cfd --- /dev/null +++ b/services/camera/libcameraservice/camera3/Camera3Stream.h @@ -0,0 +1,248 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_SERVERS_CAMERA3_STREAM_H +#define ANDROID_SERVERS_CAMERA3_STREAM_H + +#include +#include +#include +#include + +#include "hardware/camera3.h" + +namespace android { + +namespace camera3 { + +/** + * A class for managing a single stream of input or output data from the camera + * device. + * + * The stream has an internal state machine to track whether it's + * connected/configured/etc. + * + * States: + * + * STATE_ERROR: A serious error has occurred, stream is unusable. Outstanding + * buffers may still be returned. + * + * STATE_CONSTRUCTED: The stream is ready for configuration, but buffers cannot + * be gotten yet. Not connected to any endpoint, no buffers are registered + * with the HAL. + * + * STATE_IN_CONFIG: Configuration has started, but not yet concluded. During this + * time, the usage, max_buffers, and priv fields of camera3_stream returned by + * startConfiguration() may be modified. + * + * STATE_IN_RE_CONFIG: Configuration has started, and the stream has been + * configured before. Need to track separately from IN_CONFIG to avoid + * re-registering buffers with HAL. + * + * STATE_CONFIGURED: Stream is configured, and has registered buffers with the + * HAL. The stream's getBuffer/returnBuffer work. The priv pointer may still be + * modified. + * + * Transition table: + * + * => STATE_CONSTRUCTED: + * When constructed with valid arguments + * => STATE_ERROR: + * When constructed with invalid arguments + * STATE_CONSTRUCTED => STATE_IN_CONFIG: + * When startConfiguration() is called + * STATE_IN_CONFIG => STATE_CONFIGURED: + * When finishConfiguration() is called + * STATE_IN_CONFIG => STATE_ERROR: + * When finishConfiguration() fails to allocate or register buffers. + * STATE_CONFIGURED => STATE_IN_RE_CONFIG: * + * When startConfiguration() is called again, after making sure stream is + * idle with waitUntilIdle(). + * STATE_IN_RE_CONFIG => STATE_CONFIGURED: + * When finishConfiguration() is called. + * STATE_IN_RE_CONFIG => STATE_ERROR: + * When finishConfiguration() fails to allocate or register buffers. + * STATE_CONFIGURED => STATE_CONSTRUCTED: + * When disconnect() is called after making sure stream is idle with + * waitUntilIdle(). + */ +class Camera3Stream : + protected camera3_stream, + public LightRefBase { + public: + + virtual ~Camera3Stream(); + + static Camera3Stream* cast(camera3_stream *stream); + static const Camera3Stream* cast(const camera3_stream *stream); + + /** + * Get the stream's ID + */ + int getId() const; + + /** + * Get the stream's dimensions and format + */ + uint32_t getWidth() const; + uint32_t getHeight() const; + int getFormat() const; + + /** + * Start the stream configuration process. Returns a handle to the stream's + * information to be passed into the HAL device's configure_streams call. + * + * Until finishConfiguration() is called, no other methods on the stream may be + * called. The usage and max_buffers fields of camera3_stream may be modified + * between start/finishConfiguration, but may not be changed after that. + * The priv field of camera3_stream may be modified at any time after + * startConfiguration. + * + * Returns NULL in case of error starting configuration. + */ + camera3_stream* startConfiguration(); + + /** + * Check if the stream is mid-configuration (start has been called, but not + * finish). Used for lazy completion of configuration. + */ + bool isConfiguring() const; + + /** + * Completes the stream configuration process. During this call, the stream + * may call the device's register_stream_buffers() method. The stream + * information structure returned by startConfiguration() may no longer be + * modified after this call, but can still be read until the destruction of + * the stream. + * + * Returns: + * OK on a successful configuration + * NO_INIT in case of a serious error from the HAL device + * NO_MEMORY in case of an error registering buffers + * INVALID_OPERATION in case connecting to the consumer failed + */ + status_t finishConfiguration(camera3_device *hal3Device); + + /** + * Fill in the camera3_stream_buffer with the next valid buffer for this + * stream, to hand over to the HAL. + * + * This method may only be called once finishConfiguration has been called. + * For bidirectional streams, this method applies to the output-side + * buffers. + * + */ + status_t getBuffer(camera3_stream_buffer *buffer); + + /** + * Return a buffer to the stream after use by the HAL. + * + * This method may only be called for buffers provided by getBuffer(). + * For bidirectional streams, this method applies to the output-side buffers + */ + status_t returnBuffer(const camera3_stream_buffer &buffer, + nsecs_t timestamp); + + /** + * Whether any of the stream's buffers are currently in use by the HAL, + * including buffers that have been returned but not yet had their + * release fence signaled. + */ + bool hasOutstandingBuffers() const; + + enum { + TIMEOUT_NEVER = -1 + }; + /** + * Wait until the HAL is done with all of this stream's buffers, including + * signalling all release fences. Returns TIMED_OUT if the timeout is exceeded, + * OK on success. Pass in TIMEOUT_NEVER for timeout to indicate an indefinite wait. + */ + virtual status_t waitUntilIdle(nsecs_t timeout) = 0; + + /** + * Disconnect stream from its non-HAL endpoint. After this, + * start/finishConfiguration must be called before the stream can be used + * again. This cannot be called if the stream has outstanding dequeued + * buffers. + */ + status_t disconnect(); + + /** + * Debug dump of the stream's state. + */ + virtual void dump(int fd, const Vector &args) const = 0; + + protected: + const int mId; + const String8 mName; + // Zero for formats with fixed buffer size for given dimensions. + const size_t mMaxSize; + + enum { + STATE_ERROR, + STATE_CONSTRUCTED, + STATE_IN_CONFIG, + STATE_IN_RECONFIG, + STATE_CONFIGURED + } mState; + + mutable Mutex mLock; + + Camera3Stream(int id, camera3_stream_type type, + uint32_t width, uint32_t height, size_t maxSize, int format); + + /** + * Interface to be implemented by derived classes + */ + + // getBuffer / returnBuffer implementations + + // Since camera3_stream_buffer includes a raw pointer to the stream, + // cast to camera3_stream*, implementations must increment the + // refcount of the stream manually in getBufferLocked, and decrement it in + // returnBufferLocked. + virtual status_t getBufferLocked(camera3_stream_buffer *buffer) = 0; + virtual status_t returnBufferLocked(const camera3_stream_buffer &buffer, + nsecs_t timestamp) = 0; + virtual bool hasOutstandingBuffersLocked() const = 0; + virtual status_t disconnectLocked() = 0; + + // Configure the buffer queue interface to the other end of the stream, + // after the HAL has provided usage and max_buffers values. After this call, + // the stream must be ready to produce all buffers for registration with + // HAL. + virtual status_t configureQueueLocked() = 0; + + // Get the total number of buffers in the queue + virtual size_t getBufferCountLocked() = 0; + + private: + static const unsigned int kRegisterFenceTimeoutMs = 5000; + + uint32_t oldUsage; + uint32_t oldMaxBuffers; + + // Gets all buffers from endpoint and registers them with the HAL. + status_t registerBuffersLocked(camera3_device *hal3Device); + +}; // class Camera3Stream + +}; // namespace camera3 + +}; // namespace android + +#endif diff --git a/services/camera/libcameraservice/camera3/Camera3ZslStream.cpp b/services/camera/libcameraservice/camera3/Camera3ZslStream.cpp new file mode 100644 index 0000000..e8a5ca6 --- /dev/null +++ b/services/camera/libcameraservice/camera3/Camera3ZslStream.cpp @@ -0,0 +1,89 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "Camera3-ZslStream" +#define ATRACE_TAG ATRACE_TAG_CAMERA +//#define LOG_NDEBUG 0 + +#include +#include +#include "Camera3ZslStream.h" + +namespace android { + +namespace camera3 { + +Camera3ZslStream::Camera3ZslStream(int id, uint32_t width, uint32_t height, + int depth) : + Camera3Stream(id, CAMERA3_STREAM_BIDIRECTIONAL, width, height, 0, + HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED), + mDepth(depth) { +} + +status_t Camera3ZslStream::getBufferLocked(camera3_stream_buffer *buffer) { + (void) buffer; + ALOGE("%s: Not implemented", __FUNCTION__); + return INVALID_OPERATION; +} + +status_t Camera3ZslStream::returnBufferLocked( + const camera3_stream_buffer &buffer, + nsecs_t timestamp) { + (void) buffer; + (void) timestamp; + ALOGE("%s: Not implemented", __FUNCTION__); + return INVALID_OPERATION; +} + +bool Camera3ZslStream::hasOutstandingBuffersLocked() const { + ALOGE("%s: Not implemented", __FUNCTION__); + return false; +} + +status_t Camera3ZslStream::waitUntilIdle(nsecs_t timeout) { + (void) timeout; + ALOGE("%s: Not implemented", __FUNCTION__); + return INVALID_OPERATION; +} + +status_t Camera3ZslStream::disconnectLocked() { + ALOGE("%s: Not implemented", __FUNCTION__); + return INVALID_OPERATION; +} + +status_t Camera3ZslStream::getInputBuffer(camera3_stream_buffer *buffer, + nsecs_t timestamp) { + (void) buffer; + (void) timestamp; + ALOGE("%s: Not implemented", __FUNCTION__); + return INVALID_OPERATION; +} + +status_t Camera3ZslStream::returnInputBuffer(const camera3_stream_buffer &buffer) { + (void) buffer; + ALOGE("%s: Not implemented", __FUNCTION__); + return INVALID_OPERATION; +} + +void Camera3ZslStream::dump(int fd, const Vector &args) const { + (void) fd; + (void) args; + ALOGE("%s: Not implemented", __FUNCTION__); +} + +}; // namespace camera3 + +}; // namespace android diff --git a/services/camera/libcameraservice/camera3/Camera3ZslStream.h b/services/camera/libcameraservice/camera3/Camera3ZslStream.h new file mode 100644 index 0000000..39d5995 --- /dev/null +++ b/services/camera/libcameraservice/camera3/Camera3ZslStream.h @@ -0,0 +1,79 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_SERVERS_CAMERA3_ZSL_STREAM_H +#define ANDROID_SERVERS_CAMERA3_ZSL_STREAM_H + +#include +#include + +#include "Camera3Stream.h" + +namespace android { + +namespace camera3 { + +/** + * A class for managing a single opaque ZSL stream to/from the camera device. + * This acts as a bidirectional stream at the HAL layer, caching and discarding + * most output buffers, and when directed, pushes a buffer back to the HAL for + * processing. + */ +class Camera3ZslStream: public Camera3Stream { + public: + /** + * Set up a ZSL stream of a given resolution. Depth is the number of buffers + * cached within the stream that can be retrieved for input. + */ + Camera3ZslStream(int id, uint32_t width, uint32_t height, int depth); + + virtual status_t waitUntilIdle(nsecs_t timeout); + virtual void dump(int fd, const Vector &args) const; + + /** + * Get an input buffer matching a specific timestamp. If no buffer matching + * the timestamp is available, NO_MEMORY is returned. + */ + status_t getInputBuffer(camera3_stream_buffer *buffer, nsecs_t timestamp); + + /** + * Return input buffer from HAL. The buffer is then marked as unfilled, and + * returned to the output-side stream for refilling. + */ + status_t returnInputBuffer(const camera3_stream_buffer &buffer); + + private: + + int mDepth; + + /** + * Camera3Stream interface + */ + + // getBuffer/returnBuffer operate the output stream side of the ZslStream. + virtual status_t getBufferLocked(camera3_stream_buffer *buffer); + virtual status_t returnBufferLocked(const camera3_stream_buffer &buffer, + nsecs_t timestamp); + virtual bool hasOutstandingBuffersLocked() const; + virtual status_t disconnectLocked(); + +}; // class Camera3ZslStream + +}; // namespace camera3 + +}; // namespace android + +#endif -- cgit v1.1 From 3b53bc9b41c262d22f094406e3751bc5a41ef2ef Mon Sep 17 00:00:00 2001 From: Eino-Ville Talvala Date: Wed, 27 Feb 2013 18:02:26 -0800 Subject: Camera: Add input side to camera3 HAL device - Stream creation/deletion - Request submission - Request thread loop - Add get() to CameraMetadata to allow HAL submission while retaining ownership. Change-Id: I271f72bcbe9557eded43cbcbfe789109857f8144 --- camera/CameraMetadata.cpp | 117 ++- include/camera/CameraMetadata.h | 20 +- services/camera/libcameraservice/Camera3Device.cpp | 820 ++++++++++++++++++++- services/camera/libcameraservice/Camera3Device.h | 163 +++- 4 files changed, 1058 insertions(+), 62 deletions(-) diff --git a/camera/CameraMetadata.cpp b/camera/CameraMetadata.cpp index fdd0610..6c3e233 100644 --- a/camera/CameraMetadata.cpp +++ b/camera/CameraMetadata.cpp @@ -23,19 +23,22 @@ namespace android { CameraMetadata::CameraMetadata() : - mBuffer(NULL) { + mBuffer(NULL), mLocked(false) { } -CameraMetadata::CameraMetadata(size_t entryCapacity, size_t dataCapacity) +CameraMetadata::CameraMetadata(size_t entryCapacity, size_t dataCapacity) : + mLocked(false) { mBuffer = allocate_camera_metadata(entryCapacity, dataCapacity); } -CameraMetadata::CameraMetadata(const CameraMetadata &other) { +CameraMetadata::CameraMetadata(const CameraMetadata &other) : + mLocked(false) { mBuffer = clone_camera_metadata(other.mBuffer); } -CameraMetadata::CameraMetadata(camera_metadata_t *buffer) : mBuffer(NULL) { +CameraMetadata::CameraMetadata(camera_metadata_t *buffer) : + mBuffer(NULL), mLocked(false) { acquire(buffer); } @@ -44,6 +47,11 @@ CameraMetadata &CameraMetadata::operator=(const CameraMetadata &other) { } CameraMetadata &CameraMetadata::operator=(const camera_metadata_t *buffer) { + if (mLocked) { + ALOGE("%s: Assignment to a locked CameraMetadata!", __FUNCTION__); + return *this; + } + if (CC_LIKELY(buffer != mBuffer)) { camera_metadata_t *newBuffer = clone_camera_metadata(buffer); clear(); @@ -53,16 +61,44 @@ CameraMetadata &CameraMetadata::operator=(const camera_metadata_t *buffer) { } CameraMetadata::~CameraMetadata() { + mLocked = false; clear(); } +const camera_metadata_t* CameraMetadata::getAndLock() { + mLocked = true; + return mBuffer; +} + +status_t CameraMetadata::unlock(const camera_metadata_t *buffer) { + if (!mLocked) { + ALOGE("%s: Can't unlock a non-locked CameraMetadata!", __FUNCTION__); + return INVALID_OPERATION; + } + if (buffer != mBuffer) { + ALOGE("%s: Can't unlock CameraMetadata with wrong pointer!", + __FUNCTION__); + return BAD_VALUE; + } + mLocked = false; + return OK; +} + camera_metadata_t* CameraMetadata::release() { + if (mLocked) { + ALOGE("%s: CameraMetadata is locked", __FUNCTION__); + return NULL; + } camera_metadata_t *released = mBuffer; mBuffer = NULL; return released; } void CameraMetadata::clear() { + if (mLocked) { + ALOGE("%s: CameraMetadata is locked", __FUNCTION__); + return; + } if (mBuffer) { free_camera_metadata(mBuffer); mBuffer = NULL; @@ -70,15 +106,27 @@ void CameraMetadata::clear() { } void CameraMetadata::acquire(camera_metadata_t *buffer) { + if (mLocked) { + ALOGE("%s: CameraMetadata is locked", __FUNCTION__); + return; + } clear(); mBuffer = buffer; } void CameraMetadata::acquire(CameraMetadata &other) { + if (mLocked) { + ALOGE("%s: CameraMetadata is locked", __FUNCTION__); + return; + } acquire(other.release()); } status_t CameraMetadata::append(const CameraMetadata &other) { + if (mLocked) { + ALOGE("%s: CameraMetadata is locked", __FUNCTION__); + return INVALID_OPERATION; + } return append_camera_metadata(mBuffer, other.mBuffer); } @@ -92,6 +140,10 @@ bool CameraMetadata::isEmpty() const { } status_t CameraMetadata::sort() { + if (mLocked) { + ALOGE("%s: CameraMetadata is locked", __FUNCTION__); + return INVALID_OPERATION; + } return sort_camera_metadata(mBuffer); } @@ -115,69 +167,101 @@ status_t CameraMetadata::checkType(uint32_t tag, uint8_t expectedType) { status_t CameraMetadata::update(uint32_t tag, const int32_t *data, size_t data_count) { status_t res; + if (mLocked) { + ALOGE("%s: CameraMetadata is locked", __FUNCTION__); + return INVALID_OPERATION; + } if ( (res = checkType(tag, TYPE_INT32)) != OK) { return res; } - return update(tag, (const void*)data, data_count); + return updateImpl(tag, (const void*)data, data_count); } status_t CameraMetadata::update(uint32_t tag, const uint8_t *data, size_t data_count) { status_t res; + if (mLocked) { + ALOGE("%s: CameraMetadata is locked", __FUNCTION__); + return INVALID_OPERATION; + } if ( (res = checkType(tag, TYPE_BYTE)) != OK) { return res; } - return update(tag, (const void*)data, data_count); + return updateImpl(tag, (const void*)data, data_count); } status_t CameraMetadata::update(uint32_t tag, const float *data, size_t data_count) { status_t res; + if (mLocked) { + ALOGE("%s: CameraMetadata is locked", __FUNCTION__); + return INVALID_OPERATION; + } if ( (res = checkType(tag, TYPE_FLOAT)) != OK) { return res; } - return update(tag, (const void*)data, data_count); + return updateImpl(tag, (const void*)data, data_count); } status_t CameraMetadata::update(uint32_t tag, const int64_t *data, size_t data_count) { status_t res; + if (mLocked) { + ALOGE("%s: CameraMetadata is locked", __FUNCTION__); + return INVALID_OPERATION; + } if ( (res = checkType(tag, TYPE_INT64)) != OK) { return res; } - return update(tag, (const void*)data, data_count); + return updateImpl(tag, (const void*)data, data_count); } status_t CameraMetadata::update(uint32_t tag, const double *data, size_t data_count) { status_t res; + if (mLocked) { + ALOGE("%s: CameraMetadata is locked", __FUNCTION__); + return INVALID_OPERATION; + } if ( (res = checkType(tag, TYPE_DOUBLE)) != OK) { return res; } - return update(tag, (const void*)data, data_count); + return updateImpl(tag, (const void*)data, data_count); } status_t CameraMetadata::update(uint32_t tag, const camera_metadata_rational_t *data, size_t data_count) { status_t res; + if (mLocked) { + ALOGE("%s: CameraMetadata is locked", __FUNCTION__); + return INVALID_OPERATION; + } if ( (res = checkType(tag, TYPE_RATIONAL)) != OK) { return res; } - return update(tag, (const void*)data, data_count); + return updateImpl(tag, (const void*)data, data_count); } status_t CameraMetadata::update(uint32_t tag, const String8 &string) { status_t res; + if (mLocked) { + ALOGE("%s: CameraMetadata is locked", __FUNCTION__); + return INVALID_OPERATION; + } if ( (res = checkType(tag, TYPE_BYTE)) != OK) { return res; } - return update(tag, (const void*)string.string(), string.size()); + return updateImpl(tag, (const void*)string.string(), string.size()); } -status_t CameraMetadata::update(uint32_t tag, const void *data, +status_t CameraMetadata::updateImpl(uint32_t tag, const void *data, size_t data_count) { status_t res; + if (mLocked) { + ALOGE("%s: CameraMetadata is locked", __FUNCTION__); + return INVALID_OPERATION; + } int type = get_camera_metadata_tag_type(tag); if (type == -1) { ALOGE("%s: Tag %d not found", __FUNCTION__, tag); @@ -216,6 +300,11 @@ bool CameraMetadata::exists(uint32_t tag) const { camera_metadata_entry_t CameraMetadata::find(uint32_t tag) { status_t res; camera_metadata_entry entry; + if (mLocked) { + ALOGE("%s: CameraMetadata is locked", __FUNCTION__); + entry.count = 0; + return entry; + } res = find_camera_metadata_entry(mBuffer, tag, &entry); if (CC_UNLIKELY( res != OK )) { entry.count = 0; @@ -238,6 +327,10 @@ camera_metadata_ro_entry_t CameraMetadata::find(uint32_t tag) const { status_t CameraMetadata::erase(uint32_t tag) { camera_metadata_entry_t entry; status_t res; + if (mLocked) { + ALOGE("%s: CameraMetadata is locked", __FUNCTION__); + return INVALID_OPERATION; + } res = find_camera_metadata_entry(mBuffer, tag, &entry); if (res == NAME_NOT_FOUND) { return OK; diff --git a/include/camera/CameraMetadata.h b/include/camera/CameraMetadata.h index 4289126..8eeb2e7 100644 --- a/include/camera/CameraMetadata.h +++ b/include/camera/CameraMetadata.h @@ -49,6 +49,23 @@ class CameraMetadata { CameraMetadata &operator=(const camera_metadata_t *buffer); /** + * Get reference to the underlying metadata buffer. Ownership remains with + * the CameraMetadata object, but non-const CameraMetadata methods will not + * work until unlock() is called. Note that the lock has nothing to do with + * thread-safety, it simply prevents the camera_metadata_t pointer returned + * here from being accidentally invalidated by CameraMetadata operations. + */ + const camera_metadata_t* getAndLock(); + + /** + * Unlock the CameraMetadata for use again. After this unlock, the pointer + * given from getAndLock() may no longer be used. The pointer passed out + * from getAndLock must be provided to guarantee that the right object is + * being unlocked. + */ + status_t unlock(const camera_metadata_t *buffer); + + /** * Release a raw metadata buffer to the caller. After this call, * CameraMetadata no longer references the buffer, and the caller takes * responsibility for freeing the raw metadata buffer (using @@ -154,6 +171,7 @@ class CameraMetadata { private: camera_metadata_t *mBuffer; + bool mLocked; /** * Check if tag has a given type @@ -163,7 +181,7 @@ class CameraMetadata { /** * Base update entry method */ - status_t update(uint32_t tag, const void *data, size_t data_count); + status_t updateImpl(uint32_t tag, const void *data, size_t data_count); /** * Resize metadata buffer if needed by reallocating it and copying it over. diff --git a/services/camera/libcameraservice/Camera3Device.cpp b/services/camera/libcameraservice/Camera3Device.cpp index 04a6e6a..e6fb33e 100644 --- a/services/camera/libcameraservice/Camera3Device.cpp +++ b/services/camera/libcameraservice/Camera3Device.cpp @@ -29,13 +29,16 @@ #include #include #include "Camera3Device.h" +#include "camera3/Camera3OutputStream.h" -namespace android { +using namespace android::camera3; +namespace android { Camera3Device::Camera3Device(int id): mId(id), - mHal3Device(NULL) + mHal3Device(NULL), + mStatus(STATUS_UNINITIALIZED) { ATRACE_CALL(); camera3_callback_ops::notify = &sNotify; @@ -54,11 +57,17 @@ int Camera3Device::getId() const { return mId; } +/** + * CameraDeviceBase interface + */ + status_t Camera3Device::initialize(camera_module_t *module) { ATRACE_CALL(); + Mutex::Autolock l(mLock); + ALOGV("%s: Initializing device for camera %d", __FUNCTION__, mId); - if (mHal3Device != NULL) { + if (mStatus != STATUS_UNINITIALIZED) { ALOGE("%s: Already initialized!", __FUNCTION__); return INVALID_OPERATION; } @@ -76,6 +85,7 @@ status_t Camera3Device::initialize(camera_module_t *module) if (res != OK) { ALOGE("%s: Could not open camera %d: %s (%d)", __FUNCTION__, mId, strerror(-res), res); + mStatus = STATUS_ERROR; return res; } @@ -87,6 +97,7 @@ status_t Camera3Device::initialize(camera_module_t *module) __FUNCTION__, mId, CAMERA_DEVICE_API_VERSION_3_0, device->common.version); device->common.close(&device->common); + mStatus = STATUS_ERROR; return BAD_VALUE; } @@ -99,6 +110,7 @@ status_t Camera3Device::initialize(camera_module_t *module) " and device version (%x).", __FUNCTION__, device->common.version, info.device_version); device->common.close(&device->common); + mStatus = STATUS_ERROR; return BAD_VALUE; } @@ -109,6 +121,7 @@ status_t Camera3Device::initialize(camera_module_t *module) ALOGE("%s: Camera %d: Unable to initialize HAL device: %s (%d)", __FUNCTION__, mId, strerror(-res), res); device->common.close(&device->common); + mStatus = STATUS_ERROR; return BAD_VALUE; } @@ -124,18 +137,21 @@ status_t Camera3Device::initialize(camera_module_t *module) ALOGE("%s: Camera %d: Unable to set tag ops: %s (%d)", __FUNCTION__, mId, strerror(-res), res); device->common.close(&device->common); + mStatus = STATUS_ERROR; return res; } } /** Start up request queue thread */ - requestThread = new RequestThread(this); - res = requestThread->run(String8::format("C3Dev-%d-ReqQueue", mId).string()); + mRequestThread = new RequestThread(this, device); + res = mRequestThread->run(String8::format("C3Dev-%d-ReqQueue", mId).string()); if (res != OK) { ALOGE("%s: Camera %d: Unable to start request queue thread: %s (%d)", __FUNCTION__, mId, strerror(-res), res); device->common.close(&device->common); + mRequestThread.clear(); + mStatus = STATUS_ERROR; return res; } @@ -143,54 +159,205 @@ status_t Camera3Device::initialize(camera_module_t *module) mDeviceInfo = info.static_camera_characteristics; mHal3Device = device; + mStatus = STATUS_IDLE; + mNextStreamId = 0; return OK; } status_t Camera3Device::disconnect() { ATRACE_CALL(); + Mutex::Autolock l(mLock); - ALOGE("%s: Unimplemented", __FUNCTION__); - return INVALID_OPERATION; + ALOGV("%s: E", __FUNCTION__); + + status_t res; + if (mStatus == STATUS_UNINITIALIZED) return OK; + + if (mStatus == STATUS_ACTIVE || + (mStatus == STATUS_ERROR && mRequestThread != NULL)) { + res = mRequestThread->clearRepeatingRequests(); + if (res != OK) { + ALOGE("%s: Can't stop streaming", __FUNCTION__); + return res; + } + res = waitUntilDrainedLocked(); + if (res != OK) { + ALOGE("%s: Timeout waiting for HAL to drain", __FUNCTION__); + return res; + } + } + assert(mStatus == STATUS_IDLE || mStatus == STATUS_ERROR); + + if (mRequestThread != NULL) { + mRequestThread->requestExit(); + } + + mOutputStreams.clear(); + mInputStream.clear(); + + if (mRequestThread != NULL) { + mRequestThread->join(); + mRequestThread.clear(); + } + + if (mHal3Device != NULL) { + mHal3Device->common.close(&mHal3Device->common); + mHal3Device = NULL; + } + + mStatus = STATUS_UNINITIALIZED; + + ALOGV("%s: X", __FUNCTION__); + return OK; } status_t Camera3Device::dump(int fd, const Vector &args) { ATRACE_CALL(); (void)args; + String8 lines; + + const char *status = + mStatus == STATUS_ERROR ? "ERROR" : + mStatus == STATUS_UNINITIALIZED ? "UNINITIALIZED" : + mStatus == STATUS_IDLE ? "IDLE" : + mStatus == STATUS_ACTIVE ? "ACTIVE" : + "Unknown"; + lines.appendFormat(" Device status: %s\n", status); + lines.appendFormat(" Stream configuration:\n"); + + if (mInputStream != NULL) { + write(fd, lines.string(), lines.size()); + mInputStream->dump(fd, args); + } else { + lines.appendFormat(" No input stream.\n"); + write(fd, lines.string(), lines.size()); + } + for (size_t i = 0; i < mOutputStreams.size(); i++) { + mOutputStreams[i]->dump(fd,args); + } - mHal3Device->ops->dump(mHal3Device, fd); + if (mHal3Device != NULL) { + lines = String8(" HAL device dump:\n"); + write(fd, lines.string(), lines.size()); + mHal3Device->ops->dump(mHal3Device, fd); + } return OK; } const CameraMetadata& Camera3Device::info() const { ALOGVV("%s: E", __FUNCTION__); - + if (CC_UNLIKELY(mStatus == STATUS_UNINITIALIZED || + mStatus == STATUS_ERROR)) { + ALOGE("%s: Access to static info %s!", __FUNCTION__, + mStatus == STATUS_ERROR ? + "when in error state" : "before init"); + } return mDeviceInfo; } status_t Camera3Device::capture(CameraMetadata &request) { ATRACE_CALL(); - (void)request; + Mutex::Autolock l(mLock); + + switch (mStatus) { + case STATUS_ERROR: + ALOGE("%s: Device has encountered a serious error", __FUNCTION__); + return INVALID_OPERATION; + case STATUS_UNINITIALIZED: + ALOGE("%s: Device not initialized", __FUNCTION__); + return INVALID_OPERATION; + case STATUS_IDLE: + case STATUS_ACTIVE: + // OK + break; + default: + ALOGE("%s: Unexpected status: %d", __FUNCTION__, mStatus); + return INVALID_OPERATION; + } - ALOGE("%s: Unimplemented", __FUNCTION__); - return INVALID_OPERATION; + sp newRequest = setUpRequestLocked(request); + if (newRequest == NULL) { + ALOGE("%s: Can't create capture request", __FUNCTION__); + return BAD_VALUE; + } + + return mRequestThread->queueRequest(newRequest); } status_t Camera3Device::setStreamingRequest(const CameraMetadata &request) { ATRACE_CALL(); - (void)request; + Mutex::Autolock l(mLock); + + switch (mStatus) { + case STATUS_ERROR: + ALOGE("%s: Device has encountered a serious error", __FUNCTION__); + return INVALID_OPERATION; + case STATUS_UNINITIALIZED: + ALOGE("%s: Device not initialized", __FUNCTION__); + return INVALID_OPERATION; + case STATUS_IDLE: + case STATUS_ACTIVE: + // OK + break; + default: + ALOGE("%s: Unexpected status: %d", __FUNCTION__, mStatus); + return INVALID_OPERATION; + } - ALOGE("%s: Unimplemented", __FUNCTION__); - return INVALID_OPERATION; + sp newRepeatingRequest = setUpRequestLocked(request); + if (newRepeatingRequest == NULL) { + ALOGE("%s: Can't create repeating request", __FUNCTION__); + return BAD_VALUE; + } + + RequestList newRepeatingRequests; + newRepeatingRequests.push_back(newRepeatingRequest); + + return mRequestThread->setRepeatingRequests(newRepeatingRequests); +} + + +sp Camera3Device::setUpRequestLocked( + const CameraMetadata &request) { + status_t res; + + if (mStatus == STATUS_IDLE) { + res = configureStreamsLocked(); + if (res != OK) { + ALOGE("%s: Can't set up streams: %s (%d)", + __FUNCTION__, strerror(-res), res); + return NULL; + } + } + + sp newRequest = createCaptureRequest(request); + return newRequest; } status_t Camera3Device::clearStreamingRequest() { ATRACE_CALL(); + Mutex::Autolock l(mLock); + + switch (mStatus) { + case STATUS_ERROR: + ALOGE("%s: Device has encountered a serious error", __FUNCTION__); + return INVALID_OPERATION; + case STATUS_UNINITIALIZED: + ALOGE("%s: Device not initialized", __FUNCTION__); + return INVALID_OPERATION; + case STATUS_IDLE: + case STATUS_ACTIVE: + // OK + break; + default: + ALOGE("%s: Unexpected status: %d", __FUNCTION__, mStatus); + return INVALID_OPERATION; + } - ALOGE("%s: Unimplemented", __FUNCTION__); - return INVALID_OPERATION; + return mRequestThread->clearRepeatingRequests(); } status_t Camera3Device::waitUntilRequestReceived(int32_t requestId, nsecs_t timeout) { @@ -204,11 +371,70 @@ status_t Camera3Device::waitUntilRequestReceived(int32_t requestId, nsecs_t time status_t Camera3Device::createStream(sp consumer, uint32_t width, uint32_t height, int format, size_t size, int *id) { ATRACE_CALL(); - (void)consumer; (void)width; (void)height; (void)format; - (void)size; (void)id; + Mutex::Autolock l(mLock); - ALOGE("%s: Unimplemented", __FUNCTION__); - return INVALID_OPERATION; + status_t res; + bool wasActive = false; + + switch (mStatus) { + case STATUS_ERROR: + ALOGE("%s: Device has encountered a serious error", __FUNCTION__); + return INVALID_OPERATION; + case STATUS_UNINITIALIZED: + ALOGE("%s: Device not initialized", __FUNCTION__); + return INVALID_OPERATION; + case STATUS_IDLE: + // OK + break; + case STATUS_ACTIVE: + ALOGV("%s: Stopping activity to reconfigure streams", __FUNCTION__); + mRequestThread->setPaused(true); + res = waitUntilDrainedLocked(); + if (res != OK) { + ALOGE("%s: Can't pause captures to reconfigure streams!", + __FUNCTION__); + mStatus = STATUS_ERROR; + return res; + } + wasActive = true; + break; + default: + ALOGE("%s: Unexpected status: %d", __FUNCTION__, mStatus); + return INVALID_OPERATION; + } + assert(mStatus == STATUS_IDLE); + + sp newStream; + if (format == HAL_PIXEL_FORMAT_BLOB) { + newStream = new Camera3OutputStream(mNextStreamId, consumer, + width, height, size, format); + } else { + newStream = new Camera3OutputStream(mNextStreamId, consumer, + width, height, format); + } + + res = mOutputStreams.add(mNextStreamId, newStream); + if (res < 0) { + ALOGE("%s: Can't add new stream to set: %s (%d)", + __FUNCTION__, strerror(-res), res); + return res; + } + + *id = mNextStreamId++; + + // Continue captures if active at start + if (wasActive) { + ALOGV("%s: Restarting activity to reconfigure streams", __FUNCTION__); + res = configureStreamsLocked(); + if (res != OK) { + ALOGE("%s: Can't reconfigure device for new stream %d: %s (%d)", + __FUNCTION__, mNextStreamId, strerror(-res), res); + return res; + } + mRequestThread->setPaused(false); + } + + return OK; } status_t Camera3Device::createReprocessStreamFromStream(int outputId, int *id) { @@ -223,27 +449,104 @@ status_t Camera3Device::createReprocessStreamFromStream(int outputId, int *id) { status_t Camera3Device::getStreamInfo(int id, uint32_t *width, uint32_t *height, uint32_t *format) { ATRACE_CALL(); - (void)id; (void)width; (void)height; (void)format; + Mutex::Autolock l(mLock); + + switch (mStatus) { + case STATUS_ERROR: + ALOGE("%s: Device has encountered a serious error", __FUNCTION__); + return INVALID_OPERATION; + case STATUS_UNINITIALIZED: + ALOGE("%s: Device not initialized!", __FUNCTION__); + return INVALID_OPERATION; + case STATUS_IDLE: + case STATUS_ACTIVE: + // OK + break; + default: + ALOGE("%s: Unexpected status: %d", __FUNCTION__, mStatus); + return INVALID_OPERATION; + } - ALOGE("%s: Unimplemented", __FUNCTION__); - return INVALID_OPERATION; + ssize_t idx = mOutputStreams.indexOfKey(id); + if (idx == NAME_NOT_FOUND) { + ALOGE("%s: Stream %d is unknown", __FUNCTION__, id); + return idx; + } + + if (width) *width = mOutputStreams[idx]->getWidth(); + if (height) *height = mOutputStreams[idx]->getHeight(); + if (format) *format = mOutputStreams[idx]->getFormat(); + + return OK; } status_t Camera3Device::setStreamTransform(int id, int transform) { ATRACE_CALL(); - (void)id; (void)transform; + Mutex::Autolock l(mLock); + + switch (mStatus) { + case STATUS_ERROR: + ALOGE("%s: Device has encountered a serious error", __FUNCTION__); + return INVALID_OPERATION; + case STATUS_UNINITIALIZED: + ALOGE("%s: Device not initialized", __FUNCTION__); + return INVALID_OPERATION; + case STATUS_IDLE: + case STATUS_ACTIVE: + // OK + break; + default: + ALOGE("%s: Unexpected status: %d", __FUNCTION__, mStatus); + return INVALID_OPERATION; + } - ALOGE("%s: Unimplemented", __FUNCTION__); - return INVALID_OPERATION; + ssize_t idx = mOutputStreams.indexOfKey(id); + if (idx == NAME_NOT_FOUND) { + ALOGE("%s: Stream %d does not exist", + __FUNCTION__, id); + return BAD_VALUE; + } + + return mOutputStreams.editValueAt(idx)->setTransform(transform); } status_t Camera3Device::deleteStream(int id) { ATRACE_CALL(); - (void)id; + Mutex::Autolock l(mLock); + status_t res; - ALOGE("%s: Unimplemented", __FUNCTION__); - return INVALID_OPERATION; + // CameraDevice semantics require device to already be idle before + // deleteStream is called, unlike for createStream. + if (mStatus != STATUS_IDLE) { + ALOGE("%s: Device not idle", __FUNCTION__); + return INVALID_OPERATION; + } + + sp deletedStream; + if (mInputStream != NULL && id == mInputStream->getId()) { + deletedStream = mInputStream; + mInputStream.clear(); + } else { + ssize_t idx = mOutputStreams.indexOfKey(id); + if (idx == NAME_NOT_FOUND) { + ALOGE("%s: Stream %d does not exist", + __FUNCTION__, id); + return BAD_VALUE; + } + deletedStream = mOutputStreams.editValueAt(idx); + mOutputStreams.removeItem(id); + } + + // Free up the stream endpoint so that it can be used by some other stream + res = deletedStream->disconnect(); + if (res != OK) { + ALOGE("%s: Can't disconnect deleted stream", __FUNCTION__); + // fall through since we want to still list the stream as deleted. + } + mDeletedStreams.add(deletedStream); + + return res; } status_t Camera3Device::deleteReprocessStream(int id) { @@ -259,6 +562,23 @@ status_t Camera3Device::createDefaultRequest(int templateId, CameraMetadata *request) { ATRACE_CALL(); ALOGV("%s: E", __FUNCTION__); + Mutex::Autolock l(mLock); + + switch (mStatus) { + case STATUS_ERROR: + ALOGE("%s: Device has encountered a serious error", __FUNCTION__); + return INVALID_OPERATION; + case STATUS_UNINITIALIZED: + ALOGE("%s: Device is not initialized!", __FUNCTION__); + return INVALID_OPERATION; + case STATUS_IDLE: + case STATUS_ACTIVE: + // OK + break; + default: + ALOGE("%s: Unexpected status: %d", __FUNCTION__, mStatus); + return INVALID_OPERATION; + } const camera_metadata_t *rawRequest; rawRequest = mHal3Device->ops->construct_default_request_settings( @@ -271,9 +591,63 @@ status_t Camera3Device::createDefaultRequest(int templateId, status_t Camera3Device::waitUntilDrained() { ATRACE_CALL(); + Mutex::Autolock l(mLock); - ALOGE("%s: Unimplemented", __FUNCTION__); - return INVALID_OPERATION; + return waitUntilDrainedLocked(); +} + +status_t Camera3Device::waitUntilDrainedLocked() { + ATRACE_CALL(); + status_t res; + + switch (mStatus) { + case STATUS_UNINITIALIZED: + case STATUS_IDLE: + ALOGV("%s: Already idle", __FUNCTION__); + return OK; + case STATUS_ERROR: + case STATUS_ACTIVE: + // Need to shut down + break; + default: + ALOGE("%s: Unexpected status: %d", __FUNCTION__, mStatus); + return INVALID_OPERATION; + } + + if (mRequestThread != NULL) { + res = mRequestThread->waitUntilPaused(kShutdownTimeout); + if (res != OK) { + ALOGE("%s: Can't stop request thread in %f seconds!", + __FUNCTION__, kShutdownTimeout/1e9); + mStatus = STATUS_ERROR; + return res; + } + } + if (mInputStream != NULL) { + res = mInputStream->waitUntilIdle(kShutdownTimeout); + if (res != OK) { + ALOGE("%s: Can't idle input stream %d in %f seconds!", + __FUNCTION__, mInputStream->getId(), kShutdownTimeout/1e9); + mStatus = STATUS_ERROR; + return res; + } + } + for (size_t i = 0; i < mOutputStreams.size(); i++) { + res = mOutputStreams.editValueAt(i)->waitUntilIdle(kShutdownTimeout); + if (res != OK) { + ALOGE("%s: Can't idle output stream %d in %f seconds!", + __FUNCTION__, mOutputStreams.keyAt(i), + kShutdownTimeout/1e9); + mStatus = STATUS_ERROR; + return res; + } + } + + if (mStatus != STATUS_ERROR) { + mStatus = STATUS_IDLE; + } + + return OK; } status_t Camera3Device::setNotifyCallback(NotificationListener *listener) { @@ -335,17 +709,149 @@ status_t Camera3Device::pushReprocessBuffer(int reprocessStreamId, return INVALID_OPERATION; } -Camera3Device::RequestThread::RequestThread(wp parent) : - Thread(false), - mParent(parent) { +/** + * Camera3Device private methods + */ + +sp Camera3Device::createCaptureRequest( + const CameraMetadata &request) { + ATRACE_CALL(); + status_t res; + + sp newRequest = new CaptureRequest; + newRequest->mSettings = request; + + camera_metadata_entry_t inputStreams = + newRequest->mSettings.find(ANDROID_REQUEST_INPUT_STREAMS); + if (inputStreams.count > 0) { + if (mInputStream == NULL || + mInputStream->getId() != inputStreams.data.u8[0]) { + ALOGE("%s: Request references unknown input stream %d", + __FUNCTION__, inputStreams.data.u8[0]); + return NULL; + } + // Lazy completion of stream configuration (allocation/registration) + // on first use + if (mInputStream->isConfiguring()) { + res = mInputStream->finishConfiguration(mHal3Device); + if (res != OK) { + ALOGE("%s: Unable to finish configuring input stream %d:" + " %s (%d)", + __FUNCTION__, mInputStream->getId(), + strerror(-res), res); + return NULL; + } + } + + newRequest->mInputStream = mInputStream; + newRequest->mSettings.erase(ANDROID_REQUEST_INPUT_STREAMS); + } + + camera_metadata_entry_t streams = + newRequest->mSettings.find(ANDROID_REQUEST_OUTPUT_STREAMS); + if (streams.count == 0) { + ALOGE("%s: Zero output streams specified!", __FUNCTION__); + return NULL; + } + + for (size_t i = 0; i < streams.count; i++) { + int idx = mOutputStreams.indexOfKey(streams.data.u8[i]); + if (idx == NAME_NOT_FOUND) { + ALOGE("%s: Request references unknown stream %d", + __FUNCTION__, streams.data.u8[i]); + return NULL; + } + sp stream = mOutputStreams.editValueAt(idx); + + // Lazy completion of stream configuration (allocation/registration) + // on first use + if (stream->isConfiguring()) { + res = stream->finishConfiguration(mHal3Device); + if (res != OK) { + ALOGE("%s: Unable to finish configuring stream %d: %s (%d)", + __FUNCTION__, stream->getId(), strerror(-res), res); + return NULL; + } + } + + newRequest->mOutputStreams.push(stream); + } + newRequest->mSettings.erase(ANDROID_REQUEST_OUTPUT_STREAMS); + + return newRequest; } -bool Camera3Device::RequestThread::threadLoop() { - ALOGE("%s: Unimplemented", __FUNCTION__); +status_t Camera3Device::configureStreamsLocked() { + ATRACE_CALL(); + status_t res; - return false; + if (mStatus != STATUS_IDLE) { + ALOGE("%s: Not idle", __FUNCTION__); + return INVALID_OPERATION; + } + + // Start configuring the streams + + camera3_stream_configuration config; + + config.num_streams = (mInputStream != NULL) + mOutputStreams.size(); + + Vector streams; + streams.setCapacity(config.num_streams); + + if (mInputStream != NULL) { + camera3_stream_t *inputStream; + inputStream = mInputStream->startConfiguration(); + if (inputStream == NULL) { + ALOGE("%s: Can't start input stream configuration", + __FUNCTION__); + // TODO: Make sure the error flow here is correct + return INVALID_OPERATION; + } + streams.add(inputStream); + } + + for (size_t i = 0; i < mOutputStreams.size(); i++) { + camera3_stream_t *outputStream; + outputStream = mOutputStreams.editValueAt(i)->startConfiguration(); + if (outputStream == NULL) { + ALOGE("%s: Can't start output stream configuration", + __FUNCTION__); + // TODO: Make sure the error flow here is correct + return INVALID_OPERATION; + } + streams.add(outputStream); + } + + config.streams = streams.editArray(); + + // Do the HAL configuration; will potentially touch stream + // max_buffers, usage, priv fields. + + res = mHal3Device->ops->configure_streams(mHal3Device, &config); + + if (res != OK) { + ALOGE("%s: Unable to configure streams with HAL: %s (%d)", + __FUNCTION__, strerror(-res), res); + return res; + } + + // Request thread needs to know to avoid using repeat-last-settings protocol + // across configure_streams() calls + mRequestThread->configurationComplete(); + + // Finish configuring the streams lazily on first reference + + mStatus = STATUS_ACTIVE; + + return OK; } + +/** + * Camera HAL device callback methods + */ + void Camera3Device::processCaptureResult(const camera3_capture_result *result) { (void)result; @@ -359,6 +865,244 @@ void Camera3Device::notify(const camera3_notify_msg *msg) { } /** + * RequestThread inner class methods + */ + +Camera3Device::RequestThread::RequestThread(wp parent, + camera3_device_t *hal3Device) : + Thread(false), + mParent(parent), + mHal3Device(hal3Device), + mReconfigured(false), + mDoPause(false), + mPaused(true), + mFrameNumber(0) { +} + +void Camera3Device::RequestThread::configurationComplete() { + Mutex::Autolock l(mRequestLock); + mReconfigured = true; +} + +status_t Camera3Device::RequestThread::queueRequest( + sp request) { + Mutex::Autolock l(mRequestLock); + mRequestQueue.push_back(request); + + return OK; +} + +status_t Camera3Device::RequestThread::setRepeatingRequests( + const RequestList &requests) { + Mutex::Autolock l(mRequestLock); + mRepeatingRequests.clear(); + mRepeatingRequests.insert(mRepeatingRequests.begin(), + requests.begin(), requests.end()); + return OK; +} + +status_t Camera3Device::RequestThread::clearRepeatingRequests() { + Mutex::Autolock l(mRequestLock); + mRepeatingRequests.clear(); + return OK; +} + +void Camera3Device::RequestThread::setPaused(bool paused) { + Mutex::Autolock l(mPauseLock); + mDoPause = paused; + mDoPauseSignal.signal(); +} + +status_t Camera3Device::RequestThread::waitUntilPaused(nsecs_t timeout) { + status_t res; + Mutex::Autolock l(mPauseLock); + while (!mPaused) { + res = mPausedSignal.waitRelative(mPauseLock, timeout); + if (res == TIMED_OUT) { + return res; + } + } + return OK; +} + +bool Camera3Device::RequestThread::threadLoop() { + + status_t res; + + // Handle paused state. + if (waitIfPaused()) { + return true; + } + + // Get work to do + + sp nextRequest = waitForNextRequest(); + if (nextRequest == NULL) { + return true; + } + + // Create request to HAL + + camera3_capture_request_t request = camera3_capture_request_t(); + + if (mPrevRequest != nextRequest) { + request.settings = nextRequest->mSettings.getAndLock(); + mPrevRequest = nextRequest; + } // else leave request.settings NULL to indicate 'reuse latest given' + + camera3_stream_buffer_t inputBuffer; + Vector outputBuffers; + + // Fill in buffers + + if (nextRequest->mInputStream != NULL) { + request.input_buffer = &inputBuffer; + res = nextRequest->mInputStream->getBuffer(&inputBuffer); + if (res != OK) { + ALOGE("RequestThread: Can't get input buffer, skipping request:" + " %s (%d)", strerror(-res), res); + cleanUpFailedRequest(request, nextRequest, outputBuffers); + return true; + } + } else { + request.input_buffer = NULL; + } + + outputBuffers.insertAt(camera3_stream_buffer_t(), 0, + nextRequest->mOutputStreams.size()); + request.output_buffers = outputBuffers.array(); + for (size_t i = 0; i < nextRequest->mOutputStreams.size(); i++) { + res = nextRequest->mOutputStreams.editItemAt(i)-> + getBuffer(&outputBuffers.editItemAt(i)); + if (res != OK) { + ALOGE("RequestThread: Can't get output buffer, skipping request:" + "%s (%d)", strerror(-res), res); + cleanUpFailedRequest(request, nextRequest, outputBuffers); + return true; + } + request.num_output_buffers++; + } + + request.frame_number = mFrameNumber++; + + // Submit request and block until ready for next one + + res = mHal3Device->ops->process_capture_request(mHal3Device, &request); + if (res != OK) { + ALOGE("RequestThread: Unable to submit capture request %d to HAL" + " device: %s (%d)", request.frame_number, strerror(-res), res); + cleanUpFailedRequest(request, nextRequest, outputBuffers); + return false; + } + + if (request.settings != NULL) { + nextRequest->mSettings.unlock(request.settings); + } + return true; +} + +void Camera3Device::RequestThread::cleanUpFailedRequest( + camera3_capture_request_t &request, + sp &nextRequest, + Vector &outputBuffers) { + + if (request.settings != NULL) { + nextRequest->mSettings.unlock(request.settings); + } + if (request.input_buffer != NULL) { + request.input_buffer->status = CAMERA3_BUFFER_STATUS_ERROR; + nextRequest->mInputStream->returnBuffer(*(request.input_buffer), 0); + } + for (size_t i = 0; i < request.num_output_buffers; i++) { + outputBuffers.editItemAt(i).status = CAMERA3_BUFFER_STATUS_ERROR; + nextRequest->mOutputStreams.editItemAt(i)->returnBuffer( + outputBuffers[i], 0); + } + // TODO: Report error upstream +} + +sp + Camera3Device::RequestThread::waitForNextRequest() { + status_t res; + sp nextRequest; + + // Optimized a bit for the simple steady-state case (single repeating + // request), to avoid putting that request in the queue temporarily. + Mutex::Autolock l(mRequestLock); + + while (mRequestQueue.empty()) { + if (!mRepeatingRequests.empty()) { + // Always atomically enqueue all requests in a repeating request + // list. Guarantees a complete in-sequence set of captures to + // application. + const RequestList &requests = mRepeatingRequests; + RequestList::const_iterator firstRequest = + requests.begin(); + nextRequest = *firstRequest; + mRequestQueue.insert(mRequestQueue.end(), + ++firstRequest, + requests.end()); + // No need to wait any longer + break; + } + + res = mRequestSignal.waitRelative(mRequestLock, kRequestTimeout); + + if (res == TIMED_OUT) { + // Signal that we're paused by starvation + Mutex::Autolock pl(mPauseLock); + if (mPaused == false) { + mPaused = true; + mPausedSignal.signal(); + } + // Stop waiting for now and let thread management happen + return NULL; + } + } + + if (nextRequest == NULL) { + // Don't have a repeating request already in hand, so queue + // must have an entry now. + RequestList::iterator firstRequest = + mRequestQueue.begin(); + nextRequest = *firstRequest; + mRequestQueue.erase(firstRequest); + } + + // Not paused + Mutex::Autolock pl(mPauseLock); + mPaused = false; + + // Check if we've reconfigured since last time, and reset the preview + // request if so. Can't use 'NULL request == repeat' across configure calls. + if (mReconfigured) { + mPrevRequest.clear(); + mReconfigured = false; + } + + return nextRequest; +} + +bool Camera3Device::RequestThread::waitIfPaused() { + status_t res; + Mutex::Autolock l(mPauseLock); + while (mDoPause) { + // Signal that we're paused by request + if (mPaused == false) { + mPaused = true; + mPausedSignal.signal(); + } + res = mDoPauseSignal.waitRelative(mPauseLock, kRequestTimeout); + if (res == TIMED_OUT) { + return true; + } + } + // We don't set mPaused to false here, because waitForNextRequest needs + // to further manage the paused state in case of starvation. + return false; +} + +/** * Static callback forwarding methods from HAL to instance */ diff --git a/services/camera/libcameraservice/Camera3Device.h b/services/camera/libcameraservice/Camera3Device.h index df7352c..86f4c6a 100644 --- a/services/camera/libcameraservice/Camera3Device.h +++ b/services/camera/libcameraservice/Camera3Device.h @@ -14,8 +14,8 @@ * limitations under the License. */ -#ifndef ANDROID_SERVERS_CAMERA_CAMERA3DEVICE_H -#define ANDROID_SERVERS_CAMERA_CAMERA3DEVICE_H +#ifndef ANDROID_SERVERS_CAMERA3DEVICE_H +#define ANDROID_SERVERS_CAMERA3DEVICE_H #include #include @@ -24,6 +24,8 @@ #include #include "CameraDeviceBase.h" +#include "camera3/Camera3Stream.h" +#include "camera3/Camera3OutputStream.h" #include "hardware/camera3.h" @@ -55,63 +57,202 @@ class Camera3Device : virtual ~Camera3Device(); /** - * CameraDevice interface + * CameraDeviceBase interface */ + virtual int getId() const; + + // Transitions to idle state on success. virtual status_t initialize(camera_module_t *module); virtual status_t disconnect(); virtual status_t dump(int fd, const Vector &args); virtual const CameraMetadata& info() const; + + // Capture and setStreamingRequest will configure streams if currently in + // idle state virtual status_t capture(CameraMetadata &request); virtual status_t setStreamingRequest(const CameraMetadata &request); virtual status_t clearStreamingRequest(); + virtual status_t waitUntilRequestReceived(int32_t requestId, nsecs_t timeout); + + // Actual stream creation/deletion is delayed until first request is submitted + // If adding streams while actively capturing, will pause device before adding + // stream, reconfiguring device, and unpausing. virtual status_t createStream(sp consumer, uint32_t width, uint32_t height, int format, size_t size, int *id); virtual status_t createReprocessStreamFromStream(int outputId, int *id); + virtual status_t getStreamInfo(int id, uint32_t *width, uint32_t *height, uint32_t *format); virtual status_t setStreamTransform(int id, int transform); + virtual status_t deleteStream(int id); virtual status_t deleteReprocessStream(int id); + virtual status_t createDefaultRequest(int templateId, CameraMetadata *request); + + // Transitions to the idle state on success virtual status_t waitUntilDrained(); + virtual status_t setNotifyCallback(NotificationListener *listener); virtual status_t waitForNextFrame(nsecs_t timeout); virtual status_t getNextFrame(CameraMetadata *frame); + virtual status_t triggerAutofocus(uint32_t id); virtual status_t triggerCancelAutofocus(uint32_t id); virtual status_t triggerPrecaptureMetering(uint32_t id); + virtual status_t pushReprocessBuffer(int reprocessStreamId, buffer_handle_t *buffer, wp listener); private: - const int mId; - camera3_device_t *mHal3Device; + static const nsecs_t kShutdownTimeout = 5000000000; // 5 sec + + + Mutex mLock; + + /**** Scope for mLock ****/ + + const int mId; + camera3_device_t *mHal3Device; + + CameraMetadata mDeviceInfo; + vendor_tag_query_ops_t mVendorTagOps; + + enum { + STATUS_ERROR, + STATUS_UNINITIALIZED, + STATUS_IDLE, + STATUS_ACTIVE + } mStatus; + + // Mapping of stream IDs to stream instances + typedef KeyedVector > StreamSet; + + StreamSet mOutputStreams; + sp mInputStream; + int mNextStreamId; + + // Need to hold on to stream references until configure completes. + Vector > mDeletedStreams; + + /**** End scope for mLock ****/ + + class CaptureRequest : public LightRefBase { + public: + CameraMetadata mSettings; + sp mInputStream; + Vector > mOutputStreams; + }; + typedef List > RequestList; + + /** + * Lock-held version of waitUntilDrained. Will transition to IDLE on + * success. + */ + status_t waitUntilDrainedLocked(); + + /** + * Do common work for setting up a streaming or single capture request. + * On success, will transition to ACTIVE if in IDLE. + */ + sp setUpRequestLocked(const CameraMetadata &request); + + /** + * Build a CaptureRequest request from the CameraDeviceBase request + * settings. + */ + sp createCaptureRequest(const CameraMetadata &request); - CameraMetadata mDeviceInfo; - vendor_tag_query_ops_t mVendorTagOps; + /** + * Take the currently-defined set of streams and configure the HAL to use + * them. This is a long-running operation (may be several hundered ms). + */ + status_t configureStreamsLocked(); /** * Thread for managing capture request submission to HAL device. */ - class RequestThread: public Thread { + class RequestThread : public Thread { public: - RequestThread(wp parent); + RequestThread(wp parent, + camera3_device_t *hal3Device); + + /** + * Call after stream (re)-configuration is completed. + */ + void configurationComplete(); + + /** + * Set or clear the list of repeating requests. Does not block + * on either. Use waitUntilPaused to wait until request queue + * has emptied out. + */ + status_t setRepeatingRequests(const RequestList& requests); + status_t clearRepeatingRequests(); + + status_t queueRequest(sp request); + + /** + * Pause/unpause the capture thread. Doesn't block, so use + * waitUntilPaused to wait until the thread is paused. + */ + void setPaused(bool paused); + + /** + * Wait until thread is paused, either due to setPaused(true) + * or due to lack of input requests. Returns TIMED_OUT in case + * the thread does not pause within the timeout. + */ + status_t waitUntilPaused(nsecs_t timeout); protected: virtual bool threadLoop(); private: + static const nsecs_t kRequestTimeout = 50e6; // 50 ms + + // Waits for a request, or returns NULL if times out. + sp waitForNextRequest(); + + // Return buffers, etc, for a request that couldn't be fully + // constructed. The buffers will be returned in the ERROR state + // to mark them as not having valid data. + // All arguments will be modified. + void cleanUpFailedRequest(camera3_capture_request_t &request, + sp &nextRequest, + Vector &outputBuffers); + + // Pause handling + bool waitIfPaused(); + + wp mParent; + camera3_device_t *mHal3Device; + + Mutex mRequestLock; + Condition mRequestSignal; + RequestList mRequestQueue; + RequestList mRepeatingRequests; + + bool mReconfigured; + + // Used by waitIfPaused, waitForNextRequest, and waitUntilPaused + Mutex mPauseLock; + bool mDoPause; + Condition mDoPauseSignal; + bool mPaused; + Condition mPausedSignal; - wp mParent; + sp mPrevRequest; + int32_t mFrameNumber; }; - sp requestThread; + sp mRequestThread; /** * Callback functions from HAL device -- cgit v1.1 From d0158c38ad82c1c3033a6dd5806435def3727784 Mon Sep 17 00:00:00 2001 From: Eino-Ville Talvala Date: Mon, 11 Mar 2013 14:13:50 -0700 Subject: Camera: Add output side to camera3 HAL device - Notifications - Result queue and processing Change-Id: Id6b4746708ce6c6dcc7262666b6ac3130fa3d225 --- services/camera/libcameraservice/Camera2Client.cpp | 3 +- services/camera/libcameraservice/Camera3Device.cpp | 205 +++++++++++++++++++-- services/camera/libcameraservice/Camera3Device.h | 27 +++ 3 files changed, 219 insertions(+), 16 deletions(-) diff --git a/services/camera/libcameraservice/Camera2Client.cpp b/services/camera/libcameraservice/Camera2Client.cpp index d3adbdc..9421a77 100644 --- a/services/camera/libcameraservice/Camera2Client.cpp +++ b/services/camera/libcameraservice/Camera2Client.cpp @@ -145,9 +145,10 @@ Camera2Client::~Camera2Client() { status_t Camera2Client::dump(int fd, const Vector& args) { String8 result; - result.appendFormat("Client2[%d] (%p) PID: %d, dump:\n", + result.appendFormat("Client2[%d] (%p) Client: %s PID: %d, dump:\n", mCameraId, getRemoteCallback()->asBinder().get(), + String8(mClientPackageName).string(), mClientPid); result.append(" State: "); #define CASE_APPEND_ENUM(x) case x: result.append(#x "\n"); break; diff --git a/services/camera/libcameraservice/Camera3Device.cpp b/services/camera/libcameraservice/Camera3Device.cpp index e6fb33e..6cf652c 100644 --- a/services/camera/libcameraservice/Camera3Device.cpp +++ b/services/camera/libcameraservice/Camera3Device.cpp @@ -38,7 +38,8 @@ namespace android { Camera3Device::Camera3Device(int id): mId(id), mHal3Device(NULL), - mStatus(STATUS_UNINITIALIZED) + mStatus(STATUS_UNINITIALIZED), + mListener(NULL) { ATRACE_CALL(); camera3_callback_ops::notify = &sNotify; @@ -652,32 +653,53 @@ status_t Camera3Device::waitUntilDrainedLocked() { status_t Camera3Device::setNotifyCallback(NotificationListener *listener) { ATRACE_CALL(); - (void)listener; + Mutex::Autolock l(mOutputLock); - ALOGE("%s: Unimplemented", __FUNCTION__); - return INVALID_OPERATION; + if (listener != NULL && mListener != NULL) { + ALOGW("%s: Replacing old callback listener", __FUNCTION__); + } + mListener = listener; + + return OK; } status_t Camera3Device::waitForNextFrame(nsecs_t timeout) { - (void)timeout; + ATRACE_CALL(); + status_t res; + Mutex::Autolock l(mOutputLock); - ALOGE("%s: Unimplemented", __FUNCTION__); - return INVALID_OPERATION; + while (mResultQueue.empty()) { + res = mResultSignal.waitRelative(mOutputLock, timeout); + if (res == TIMED_OUT) { + return res; + } else if (res != OK) { + ALOGE("%s: Camera %d: Error waiting for frame: %s (%d)", + __FUNCTION__, mId, strerror(-res), res); + return res; + } + } + return OK; } status_t Camera3Device::getNextFrame(CameraMetadata *frame) { ATRACE_CALL(); - (void)frame; + Mutex::Autolock l(mOutputLock); - ALOGE("%s: Unimplemented", __FUNCTION__); - return INVALID_OPERATION; + if (mResultQueue.empty()) { + return NOT_ENOUGH_DATA; + } + + CameraMetadata &result = *(mResultQueue.begin()); + frame->acquire(result); + mResultQueue.erase(mResultQueue.begin()); + + return OK; } status_t Camera3Device::triggerAutofocus(uint32_t id) { ATRACE_CALL(); (void)id; - ALOGE("%s: Unimplemented", __FUNCTION__); return INVALID_OPERATION; } @@ -853,15 +875,168 @@ status_t Camera3Device::configureStreamsLocked() { */ void Camera3Device::processCaptureResult(const camera3_capture_result *result) { - (void)result; + ATRACE_CALL(); + + status_t res; + + if (result->result == NULL) { + // TODO: Report error upstream + ALOGW("%s: No metadata for frame %d", __FUNCTION__, + result->frame_number); + return; + } + + nsecs_t timestamp = 0; + AlgState cur3aState; + AlgState new3aState; + int32_t aeTriggerId = 0; + int32_t afTriggerId = 0; + + NotificationListener *listener; + + { + Mutex::Autolock l(mOutputLock); + + // Push result metadata into queue + mResultQueue.push_back(CameraMetadata()); + CameraMetadata &captureResult = *(mResultQueue.end()); + + captureResult = result->result; + captureResult.update(ANDROID_REQUEST_FRAME_COUNT, + (int32_t*)&result->frame_number, 1); + + // Get timestamp from result metadata + + camera_metadata_entry entry = + captureResult.find(ANDROID_SENSOR_TIMESTAMP); + if (entry.count == 0) { + ALOGE("%s: Camera %d: No timestamp provided by HAL for frame %d!", + __FUNCTION__, mId, result->frame_number); + // TODO: Report error upstream + } else { + timestamp = entry.data.i64[0]; + } + + // Get 3A states from result metadata + + entry = captureResult.find(ANDROID_CONTROL_AE_STATE); + if (entry.count == 0) { + ALOGE("%s: Camera %d: No AE state provided by HAL for frame %d!", + __FUNCTION__, mId, result->frame_number); + } else { + new3aState.aeState = + static_cast( + entry.data.u8[0]); + } + + entry = captureResult.find(ANDROID_CONTROL_AF_STATE); + if (entry.count == 0) { + ALOGE("%s: Camera %d: No AF state provided by HAL for frame %d!", + __FUNCTION__, mId, result->frame_number); + } else { + new3aState.afState = + static_cast( + entry.data.u8[0]); + } + + entry = captureResult.find(ANDROID_CONTROL_AWB_STATE); + if (entry.count == 0) { + ALOGE("%s: Camera %d: No AWB state provided by HAL for frame %d!", + __FUNCTION__, mId, result->frame_number); + } else { + new3aState.awbState = + static_cast( + entry.data.u8[0]); + } + + entry = captureResult.find(ANDROID_CONTROL_AF_TRIGGER_ID); + if (entry.count == 0) { + ALOGE("%s: Camera %d: No AF trigger ID provided by HAL for frame %d!", + __FUNCTION__, mId, result->frame_number); + } else { + afTriggerId = entry.data.i32[0]; + } + + entry = captureResult.find(ANDROID_CONTROL_AE_PRECAPTURE_ID); + if (entry.count == 0) { + ALOGE("%s: Camera %d: No AE precapture trigger ID provided by HAL" + " for frame %d!", __FUNCTION__, mId, result->frame_number); + } else { + aeTriggerId = entry.data.i32[0]; + } + + listener = mListener; + cur3aState = m3AState; + + m3AState = new3aState; + } // scope for mOutputLock + + // Return completed buffers to their streams + for (size_t i = 0; i < result->num_output_buffers; i++) { + Camera3Stream *stream = + Camera3Stream::cast(result->output_buffers[i].stream); + res = stream->returnBuffer(result->output_buffers[i], timestamp); + // Note: stream may be deallocated at this point, if this buffer was the + // last reference to it. + if (res != OK) { + ALOGE("%s: Camera %d: Can't return buffer %d for frame %d to its" + " stream:%s (%d)", __FUNCTION__, mId, i, + result->frame_number, strerror(-res), res); + // TODO: Report error upstream + } + } + + // Dispatch any 3A change events to listeners + if (listener != NULL) { + if (new3aState.aeState != cur3aState.aeState) { + listener->notifyAutoExposure(new3aState.aeState, aeTriggerId); + } + if (new3aState.afState != cur3aState.afState) { + listener->notifyAutoFocus(new3aState.afState, afTriggerId); + } + if (new3aState.awbState != cur3aState.awbState) { + listener->notifyAutoWhitebalance(new3aState.awbState, aeTriggerId); + } + } - ALOGE("%s: Unimplemented", __FUNCTION__); } void Camera3Device::notify(const camera3_notify_msg *msg) { - (void)msg; + NotificationListener *listener; + { + Mutex::Autolock l(mOutputLock); + if (mListener == NULL) return; + listener = mListener; + } - ALOGE("%s: Unimplemented", __FUNCTION__); + if (msg == NULL) { + ALOGE("%s: Camera %d: HAL sent NULL notify message!", + __FUNCTION__, mId); + return; + } + + switch (msg->type) { + case CAMERA3_MSG_ERROR: { + int streamId = 0; + if (msg->message.error.error_stream != NULL) { + Camera3Stream *stream = + Camera3Stream::cast( + msg->message.error.error_stream); + streamId = stream->getId(); + } + listener->notifyError(msg->message.error.error_code, + msg->message.error.frame_number, streamId); + break; + } + case CAMERA3_MSG_SHUTTER: { + listener->notifyShutter(msg->message.shutter.frame_number, + msg->message.shutter.timestamp); + break; + } + default: + ALOGE("%s: Camera %d: Unknown notify message from HAL: %d", + __FUNCTION__, mId, msg->type); + } } /** diff --git a/services/camera/libcameraservice/Camera3Device.h b/services/camera/libcameraservice/Camera3Device.h index 86f4c6a..8600c6c 100644 --- a/services/camera/libcameraservice/Camera3Device.h +++ b/services/camera/libcameraservice/Camera3Device.h @@ -255,6 +255,33 @@ class Camera3Device : sp mRequestThread; /** + * Output result queue and current HAL device 3A state + */ + + // Lock for output side of device + Mutex mOutputLock; + + /**** Scope for mOutputLock ****/ + + List mResultQueue; + Condition mResultSignal; + NotificationListener *mListener; + + struct AlgState { + camera_metadata_enum_android_control_ae_state aeState; + camera_metadata_enum_android_control_af_state afState; + camera_metadata_enum_android_control_awb_state awbState; + + AlgState() : + aeState(ANDROID_CONTROL_AE_STATE_INACTIVE), + afState(ANDROID_CONTROL_AF_STATE_INACTIVE), + awbState(ANDROID_CONTROL_AWB_STATE_INACTIVE) { + } + } m3AState; + + /**** End scope for mOutputLock ****/ + + /** * Callback functions from HAL device */ void processCaptureResult(const camera3_capture_result *result); -- cgit v1.1 From 681be0398a06da7e24db4ed934a92af64d1409b1 Mon Sep 17 00:00:00 2001 From: Eric Laurent Date: Wed, 27 Mar 2013 12:05:40 -0700 Subject: ToneGenerator: fix truncated tones The first DTMF tone after a silent period is truncated. This is because the phone app starts and stops the tone when the user presses and releases the key. This combined to the fact that the tones use the low latency path and that when the output stream exists standby there is a period of several milliseconds during which no audio is mixed until the stream is "warmed up". The result is that much less audio is generated than the actual key press duration. The fix consists in storing the tone start time and making sure that the number of samples generated corresponds at least to the time difference between the tone start and stop commands. Bug 6607077 Change-Id: I070d20dd8600c25a9e5d5a60c1d3313b7917b00d --- include/media/ToneGenerator.h | 1 + media/libmedia/ToneGenerator.cpp | 32 +++++++++++++++++++++++++++++++- 2 files changed, 32 insertions(+), 1 deletion(-) diff --git a/include/media/ToneGenerator.h b/include/media/ToneGenerator.h index 0529bcd..2183fbe 100644 --- a/include/media/ToneGenerator.h +++ b/include/media/ToneGenerator.h @@ -271,6 +271,7 @@ private: float mVolume; // Volume applied to audio track audio_stream_type_t mStreamType; // Audio stream used for output unsigned int mProcessSize; // Size of audio blocks generated at a time by audioCallback() (in PCM frames). + struct timespec mStartTime; // tone start time: needed to guaranty actual tone duration bool initAudioTrack(); static void audioCallback(int event, void* user, void *info); diff --git a/media/libmedia/ToneGenerator.cpp b/media/libmedia/ToneGenerator.cpp index 3554608..9ea3ea7 100644 --- a/media/libmedia/ToneGenerator.cpp +++ b/media/libmedia/ToneGenerator.cpp @@ -922,6 +922,9 @@ bool ToneGenerator::startTone(tone_type toneType, int durationMs) { ALOGV("Immediate start, time %d", (unsigned int)(systemTime()/1000000)); lResult = true; mState = TONE_STARTING; + if (clock_gettime(CLOCK_MONOTONIC, &mStartTime) != 0) { + mStartTime.tv_sec = 0; + } mLock.unlock(); mpAudioTrack->start(); mLock.lock(); @@ -940,6 +943,7 @@ bool ToneGenerator::startTone(tone_type toneType, int durationMs) { } else { ALOGV("Delayed start"); mState = TONE_RESTARTING; + mStartTime.tv_sec = 0; lStatus = mWaitCbkCond.waitRelative(mLock, seconds(3)); if (lStatus == NO_ERROR) { if (mState != TONE_IDLE) { @@ -978,7 +982,30 @@ void ToneGenerator::stopTone() { mLock.lock(); if (mState != TONE_IDLE && mState != TONE_INIT) { if (mState == TONE_PLAYING || mState == TONE_STARTING || mState == TONE_RESTARTING) { - mState = TONE_STOPPING; + struct timespec stopTime; + // If the start time is valid, make sure that the number of audio samples produced + // corresponds at least to the time between the start and stop commands. + // This is needed in case of cold start of the output stream. + if ((mStartTime. tv_sec != 0) && (clock_gettime(CLOCK_MONOTONIC, &stopTime) == 0)) { + time_t sec = stopTime.tv_sec - mStartTime.tv_sec; + long nsec = stopTime.tv_nsec - mStartTime.tv_nsec; + long durationMs; + if (nsec < 0) { + --sec; + nsec += 1000000000; + } + + if ((sec + 1) > ((long)(INT_MAX / mSamplingRate))) { + mMaxSmp = sec * mSamplingRate; + } else { + // mSamplingRate is always > 1000 + sec = sec * 1000 + nsec / 1000000; // duration in milliseconds + mMaxSmp = (sec * mSamplingRate) / 1000; + } + ALOGV("stopTone() forcing mMaxSmp to %d, total for far %d", mMaxSmp, mTotalSmp); + } else { + mState = TONE_STOPPING; + } } ALOGV("waiting cond"); status_t lStatus = mWaitCbkCond.waitRelative(mLock, seconds(3)); @@ -1263,6 +1290,9 @@ audioCallback_EndLoop: ALOGV("Cbk restarting track"); if (lpToneGen->prepareWave()) { lpToneGen->mState = TONE_STARTING; + if (clock_gettime(CLOCK_MONOTONIC, &lpToneGen->mStartTime) != 0) { + lpToneGen->mStartTime.tv_sec = 0; + } // must reload lpToneDesc as prepareWave() may change mpToneDesc lpToneDesc = lpToneGen->mpToneDesc; } else { -- cgit v1.1 From e467ef084b75b074d0081616080b54212a7024c8 Mon Sep 17 00:00:00 2001 From: Lajos Molnar Date: Wed, 27 Mar 2013 19:04:12 -0700 Subject: stagefright: matroska: don't crash on parsing error MatroskaExtractor crashed with an assertion if mkvparser::ParseNext() returned a negative number. Now handle all error values the same way as the EOF. Change-Id: I173c4f878d692a0cbdb915ad1118d0686249d625 Signed-off-by: Lajos Molnar Bug: 8433794 --- media/libstagefright/matroska/MatroskaExtractor.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/media/libstagefright/matroska/MatroskaExtractor.cpp b/media/libstagefright/matroska/MatroskaExtractor.cpp index 7fc7037..b304749 100644 --- a/media/libstagefright/matroska/MatroskaExtractor.cpp +++ b/media/libstagefright/matroska/MatroskaExtractor.cpp @@ -263,8 +263,8 @@ void BlockIterator::advance_l() { mCluster, nextCluster, pos, len); ALOGV("ParseNext returned %ld", res); - if (res > 0) { - // EOF + if (res != 0) { + // EOF or error mCluster = NULL; break; -- cgit v1.1 From d477b8d071826c0768620f7ac302f31d8b12b1ca Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Thu, 28 Mar 2013 11:16:43 -0700 Subject: Make sure resume() and flush() are handled appropriately even if the codec is in Idle->Executing state. Change-Id: I16a10791fae0e062a19299732c472cc93e4ed971 related-to-bug: 8347958 --- media/libstagefright/ACodec.cpp | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp index 1a2eeb1..c9f8741 100644 --- a/media/libstagefright/ACodec.cpp +++ b/media/libstagefright/ACodec.cpp @@ -3461,6 +3461,22 @@ bool ACodec::IdleToExecutingState::onMessageReceived(const sp &msg) { return true; } + case kWhatResume: + { + // We'll be active soon enough. + return true; + } + + case kWhatFlush: + { + // We haven't even started yet, so we're flushed alright... + sp notify = mCodec->mNotify->dup(); + notify->setInt32("what", ACodec::kWhatFlushCompleted); + notify->post(); + + return true; + } + case kWhatSignalEndOfInputStream: { mCodec->onSignalEndOfInputStream(); -- cgit v1.1 From a8eccec73a40d4afcff505eb463a016c89aeed42 Mon Sep 17 00:00:00 2001 From: Eric Laurent Date: Thu, 28 Mar 2013 11:58:45 -0700 Subject: ToneGenerator: fix overflow in stopTone Fix overflow in tone duration calculation introduced in commit 681be039. Bug 6607077 Change-Id: Ie12f13701345c2b2d3be0b3c4d71cbfa2394a29b --- media/libmedia/ToneGenerator.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/media/libmedia/ToneGenerator.cpp b/media/libmedia/ToneGenerator.cpp index 9ea3ea7..f09ce75 100644 --- a/media/libmedia/ToneGenerator.cpp +++ b/media/libmedia/ToneGenerator.cpp @@ -986,7 +986,7 @@ void ToneGenerator::stopTone() { // If the start time is valid, make sure that the number of audio samples produced // corresponds at least to the time between the start and stop commands. // This is needed in case of cold start of the output stream. - if ((mStartTime. tv_sec != 0) && (clock_gettime(CLOCK_MONOTONIC, &stopTime) == 0)) { + if ((mStartTime.tv_sec != 0) && (clock_gettime(CLOCK_MONOTONIC, &stopTime) == 0)) { time_t sec = stopTime.tv_sec - mStartTime.tv_sec; long nsec = stopTime.tv_nsec - mStartTime.tv_nsec; long durationMs; @@ -1000,7 +1000,7 @@ void ToneGenerator::stopTone() { } else { // mSamplingRate is always > 1000 sec = sec * 1000 + nsec / 1000000; // duration in milliseconds - mMaxSmp = (sec * mSamplingRate) / 1000; + mMaxSmp = (unsigned int)(((int64_t)sec * mSamplingRate) / 1000); } ALOGV("stopTone() forcing mMaxSmp to %d, total for far %d", mMaxSmp, mTotalSmp); } else { -- cgit v1.1 From ff9297ac908aa01e44fda4ab9ca7a4bb514c00fd Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Thu, 28 Mar 2013 13:34:10 -0700 Subject: Fix SHUTDOWN after SETUP and before PLAY in wifi display. Change-Id: Ieb8ce1ac3130254839975a3677162b64156735bc related-to-bug: 8499893 --- .../wifi-display/source/WifiDisplaySource.cpp | 20 +++++++++++++++++--- 1 file changed, 17 insertions(+), 3 deletions(-) diff --git a/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp b/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp index 792a9c5..4a49811 100644 --- a/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp +++ b/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp @@ -273,7 +273,8 @@ void WifiDisplaySource::onMessageReceived(const sp &msg) { if (!strcasecmp(val, "pause") && mState == PLAYING) { mState = PLAYING_TO_PAUSED; sendTrigger(mClientSessionID, TRIGGER_PAUSE); - } else if (!strcasecmp(val, "play") && mState == PAUSED) { + } else if (!strcasecmp(val, "play") + && mState == PAUSED) { mState = PAUSED_TO_PLAYING; sendTrigger(mClientSessionID, TRIGGER_PLAY); } @@ -422,7 +423,8 @@ void WifiDisplaySource::onMessageReceived(const sp &msg) { NULL /* interlaced */)); mClient->onDisplayConnected( - mClientInfo.mPlaybackSession->getSurfaceTexture(), + mClientInfo.mPlaybackSession + ->getSurfaceTexture(), width, height, mUsingHDCP @@ -1351,6 +1353,15 @@ status_t WifiDisplaySource::onPlayRequest( return ERROR_MALFORMED; } + if (mState != AWAITING_CLIENT_PLAY) { + ALOGW("Received PLAY request but we're in state %d", mState); + + sendErrorResponse( + sessionID, "455 Method Not Valid in This State", cseq); + + return INVALID_OPERATION; + } + ALOGI("Received PLAY request."); if (mPlaybackSessionEstablished) { finishPlay(); @@ -1673,7 +1684,10 @@ void WifiDisplaySource::HDCPObserver::notify( status_t WifiDisplaySource::makeHDCP() { sp sm = defaultServiceManager(); sp binder = sm->getService(String16("media.player")); - sp service = interface_cast(binder); + + sp service = + interface_cast(binder); + CHECK(service != NULL); mHDCP = service->makeHDCP(true /* createEncryptionModule */); -- cgit v1.1 From ec24fa46443634cd29627182c5812ccf43682692 Mon Sep 17 00:00:00 2001 From: Mike Lockwood Date: Mon, 1 Apr 2013 10:51:35 -0700 Subject: MTP: Implement date created field in GetObjectInfo Bug: 8293874 Change-Id: I6b74fe73362bd4fac34ca8a54e127ae91f82aef9 --- media/mtp/MtpServer.cpp | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/media/mtp/MtpServer.cpp b/media/mtp/MtpServer.cpp index 8568dfc..df87db4 100644 --- a/media/mtp/MtpServer.cpp +++ b/media/mtp/MtpServer.cpp @@ -704,7 +704,8 @@ MtpResponseCode MtpServer::doGetObjectInfo() { mData.putUInt32(info.mAssociationDesc); mData.putUInt32(info.mSequenceNumber); mData.putString(info.mName); - mData.putEmptyString(); // date created + formatDateTime(info.mDateCreated, date, sizeof(date)); + mData.putString(date); // date created formatDateTime(info.mDateModified, date, sizeof(date)); mData.putString(date); // date modified mData.putEmptyString(); // keywords -- cgit v1.1 From 997594088164cfb33c1cb8c376884346fbf1e7ae Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Mon, 1 Apr 2013 14:28:31 -0700 Subject: Fix seek in response to OnPrepared() for HLS content Change-Id: I0e52352845398a4db074e939487f6f6de94bd523 related-to-bug: 8225122 --- media/libmediaplayerservice/MediaPlayerFactory.cpp | 3 ++- media/libmediaplayerservice/nuplayer/NuPlayer.cpp | 19 +++++++++-------- media/libstagefright/httplive/LiveSession.cpp | 24 ++++++++++------------ 3 files changed, 23 insertions(+), 23 deletions(-) diff --git a/media/libmediaplayerservice/MediaPlayerFactory.cpp b/media/libmediaplayerservice/MediaPlayerFactory.cpp index 1fb8b1a..90aed39 100644 --- a/media/libmediaplayerservice/MediaPlayerFactory.cpp +++ b/media/libmediaplayerservice/MediaPlayerFactory.cpp @@ -206,7 +206,8 @@ class NuPlayerFactory : public MediaPlayerFactory::IFactory { return 0.0; if (!strncasecmp("http://", url, 7) - || !strncasecmp("https://", url, 8)) { + || !strncasecmp("https://", url, 8) + || !strncasecmp("file://", url, 7)) { size_t len = strlen(url); if (len >= 5 && !strcasecmp(".m3u8", &url[len - 5])) { return kOurScore; diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp index 5387e1a..46d0a5a 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp +++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp @@ -161,7 +161,8 @@ void NuPlayer::setDataSourceAsync(const sp &source) { static bool IsHTTPLiveURL(const char *url) { if (!strncasecmp("http://", url, 7) - || !strncasecmp("https://", url, 8)) { + || !strncasecmp("https://", url, 8) + || !strncasecmp("file://", url, 7)) { size_t len = strlen(url); if (len >= 5 && !strcasecmp(".m3u8", &url[len - 5])) { return true; @@ -833,14 +834,6 @@ status_t NuPlayer::instantiateDecoder(bool audio, sp *decoder) { (*decoder)->configure(format); - int64_t durationUs; - if (mDriver != NULL && mSource->getDuration(&durationUs) == OK) { - sp driver = mDriver.promote(); - if (driver != NULL) { - driver->notifyDuration(durationUs); - } - } - return OK; } @@ -1271,6 +1264,14 @@ void NuPlayer::onSourceNotify(const sp &msg) { if (driver != NULL) { driver->notifyPrepareCompleted(err); } + + int64_t durationUs; + if (mDriver != NULL && mSource->getDuration(&durationUs) == OK) { + sp driver = mDriver.promote(); + if (driver != NULL) { + driver->notifyDuration(durationUs); + } + } break; } diff --git a/media/libstagefright/httplive/LiveSession.cpp b/media/libstagefright/httplive/LiveSession.cpp index 962b01c..505bdb3 100644 --- a/media/libstagefright/httplive/LiveSession.cpp +++ b/media/libstagefright/httplive/LiveSession.cpp @@ -631,22 +631,20 @@ rinse_repeat: if (index < mPlaylist->size()) { int32_t newSeqNumber = firstSeqNumberInPlaylist + index; - if (newSeqNumber != mSeqNumber) { - ALOGI("seeking to seq no %d", newSeqNumber); + ALOGI("seeking to seq no %d", newSeqNumber); - mSeqNumber = newSeqNumber; + mSeqNumber = newSeqNumber; - mDataSource->reset(); + mDataSource->reset(); - // reseting the data source will have had the - // side effect of discarding any previously queued - // bandwidth change discontinuity. - // Therefore we'll need to treat these seek - // discontinuities as involving a bandwidth change - // even if they aren't directly. - seekDiscontinuity = true; - bandwidthChanged = true; - } + // reseting the data source will have had the + // side effect of discarding any previously queued + // bandwidth change discontinuity. + // Therefore we'll need to treat these seek + // discontinuities as involving a bandwidth change + // even if they aren't directly. + seekDiscontinuity = true; + bandwidthChanged = true; } } -- cgit v1.1 From 4c63a239c404af1e055e5f9939939ab0fd09d98a Mon Sep 17 00:00:00 2001 From: Jeff Tinker Date: Sat, 30 Mar 2013 16:19:44 -0700 Subject: MediaDrm API update Clarify offline usage of sessions and keys and implement implement CryptoSession to support additional crypto use cases. Change-Id: I5d8000ce7e1dd7eba08969fc50296c9e1456c4fc --- drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.cpp | 279 +++++++++++++-- drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.h | 57 ++- include/media/IDrm.h | 62 +++- media/libmedia/IDrm.cpp | 408 +++++++++++++++------- media/libmediaplayerservice/Drm.cpp | 148 +++++++- media/libmediaplayerservice/Drm.h | 53 ++- 6 files changed, 817 insertions(+), 190 deletions(-) diff --git a/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.cpp b/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.cpp index 91f5c9c..c34690b 100644 --- a/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.cpp +++ b/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.cpp @@ -21,6 +21,7 @@ #include "drm/DrmAPI.h" #include "MockDrmCryptoPlugin.h" +#include "media/stagefright/MediaErrors.h" using namespace android; @@ -98,17 +99,17 @@ namespace android { } - status_t MockDrmPlugin::getLicenseRequest(Vector const &sessionId, - Vector const &initData, - String8 const &mimeType, LicenseType licenseType, - KeyedVector const &optionalParameters, - Vector &request, String8 &defaultUrl) + status_t MockDrmPlugin::getKeyRequest(Vector const &sessionId, + Vector const &initData, + String8 const &mimeType, KeyType keyType, + KeyedVector const &optionalParameters, + Vector &request, String8 &defaultUrl) { Mutex::Autolock lock(mLock); - ALOGD("MockDrmPlugin::getLicenseRequest(sessionId=%s, initData=%s, mimeType=%s" - ", licenseType=%d, optionalParameters=%s))", + ALOGD("MockDrmPlugin::getKeyRequest(sessionId=%s, initData=%s, mimeType=%s" + ", keyType=%d, optionalParameters=%s))", vectorToString(sessionId).string(), vectorToString(initData).string(), mimeType.string(), - licenseType, stringMapToString(optionalParameters).string()); + keyType, stringMapToString(optionalParameters).string()); ssize_t index = findSession(sessionId); if (index == kNotFound) { @@ -119,15 +120,15 @@ namespace android { // Properties used in mock test, set by mock plugin and verifed cts test app // byte[] initData -> mock-initdata // string mimeType -> mock-mimetype - // string licenseType -> mock-licensetype + // string keyType -> mock-keytype // string optionalParameters -> mock-optparams formatted as {key1,value1},{key2,value2} mByteArrayProperties.add(String8("mock-initdata"), initData); mStringProperties.add(String8("mock-mimetype"), mimeType); - String8 licenseTypeStr; - licenseTypeStr.appendFormat("%d", (int)licenseType); - mStringProperties.add(String8("mock-licensetype"), licenseTypeStr); + String8 keyTypeStr; + keyTypeStr.appendFormat("%d", (int)keyType); + mStringProperties.add(String8("mock-keytype"), keyTypeStr); String8 params; for (size_t i = 0; i < optionalParameters.size(); i++) { @@ -159,11 +160,12 @@ namespace android { return OK; } - status_t MockDrmPlugin::provideLicenseResponse(Vector const &sessionId, - Vector const &response) + status_t MockDrmPlugin::provideKeyResponse(Vector const &sessionId, + Vector const &response, + Vector &keySetId) { Mutex::Autolock lock(mLock); - ALOGD("MockDrmPlugin::provideLicenseResponse(sessionId=%s, response=%s)", + ALOGD("MockDrmPlugin::provideKeyResponse(sessionId=%s, response=%s)", vectorToString(sessionId).string(), vectorToString(response).string()); ssize_t index = findSession(sessionId); if (index == kNotFound) { @@ -176,30 +178,61 @@ namespace android { // Properties used in mock test, set by mock plugin and verifed cts test app // byte[] response -> mock-response - mByteArrayProperties.add(String8("mock-response"), response); + const size_t kKeySetIdSize = 8; + + for (size_t i = 0; i < kKeySetIdSize / sizeof(long); i++) { + long r = random(); + keySetId.appendArray((uint8_t *)&r, sizeof(long)); + } + mKeySets.add(keySetId); + return OK; } - status_t MockDrmPlugin::removeLicense(Vector const &sessionId) + status_t MockDrmPlugin::removeKeys(Vector const &keySetId) { Mutex::Autolock lock(mLock); - ALOGD("MockDrmPlugin::removeLicense(sessionId=%s)", - vectorToString(sessionId).string()); + ALOGD("MockDrmPlugin::removeKeys(keySetId=%s)", + vectorToString(keySetId).string()); + + ssize_t index = findKeySet(keySetId); + if (index == kNotFound) { + ALOGD("Invalid keySetId"); + return BAD_VALUE; + } + mKeySets.removeAt(index); + + return OK; + } + + status_t MockDrmPlugin::restoreKeys(Vector const &sessionId, + Vector const &keySetId) + { + Mutex::Autolock lock(mLock); + ALOGD("MockDrmPlugin::restoreKeys(sessionId=%s, keySetId=%s)", + vectorToString(sessionId).string(), + vectorToString(keySetId).string()); ssize_t index = findSession(sessionId); if (index == kNotFound) { ALOGD("Invalid sessionId"); return BAD_VALUE; } + index = findKeySet(keySetId); + if (index == kNotFound) { + ALOGD("Invalid keySetId"); + return BAD_VALUE; + } + return OK; } - status_t MockDrmPlugin::queryLicenseStatus(Vector const &sessionId, + status_t MockDrmPlugin::queryKeyStatus(Vector const &sessionId, KeyedVector &infoMap) const { - ALOGD("MockDrmPlugin::queryLicenseStatus(sessionId=%s)", + ALOGD("MockDrmPlugin::queryKeyStatus(sessionId=%s)", vectorToString(sessionId).string()); ssize_t index = findSession(sessionId); @@ -324,6 +357,198 @@ namespace android { return OK; } + status_t MockDrmPlugin::setCipherAlgorithm(Vector const &sessionId, + String8 const &algorithm) + { + Mutex::Autolock lock(mLock); + + ALOGD("MockDrmPlugin::setCipherAlgorithm(sessionId=%s, algorithm=%s)", + vectorToString(sessionId).string(), algorithm.string()); + + ssize_t index = findSession(sessionId); + if (index == kNotFound) { + ALOGD("Invalid sessionId"); + return BAD_VALUE; + } + + if (algorithm == "AES/CBC/NoPadding") { + return OK; + } + return BAD_VALUE; + } + + status_t MockDrmPlugin::setMacAlgorithm(Vector const &sessionId, + String8 const &algorithm) + { + Mutex::Autolock lock(mLock); + + ALOGD("MockDrmPlugin::setMacAlgorithm(sessionId=%s, algorithm=%s)", + vectorToString(sessionId).string(), algorithm.string()); + + ssize_t index = findSession(sessionId); + if (index == kNotFound) { + ALOGD("Invalid sessionId"); + return BAD_VALUE; + } + + if (algorithm == "HmacSHA256") { + return OK; + } + return BAD_VALUE; + } + + status_t MockDrmPlugin::encrypt(Vector const &sessionId, + Vector const &keyId, + Vector const &input, + Vector const &iv, + Vector &output) + { + Mutex::Autolock lock(mLock); + ALOGD("MockDrmPlugin::encrypt(sessionId=%s, keyId=%s, input=%s, iv=%s)", + vectorToString(sessionId).string(), + vectorToString(keyId).string(), + vectorToString(input).string(), + vectorToString(iv).string()); + + ssize_t index = findSession(sessionId); + if (index == kNotFound) { + ALOGD("Invalid sessionId"); + return BAD_VALUE; + } + + // Properties used in mock test, set by mock plugin and verifed cts test app + // byte[] keyId -> mock-keyid + // byte[] input -> mock-input + // byte[] iv -> mock-iv + mByteArrayProperties.add(String8("mock-keyid"), keyId); + mByteArrayProperties.add(String8("mock-input"), input); + mByteArrayProperties.add(String8("mock-iv"), iv); + + // Properties used in mock test, set by cts test app returned from mock plugin + // byte[] mock-output -> output + index = mByteArrayProperties.indexOfKey(String8("mock-output")); + if (index < 0) { + ALOGD("Missing 'mock-request' parameter for mock"); + return BAD_VALUE; + } else { + output = mByteArrayProperties.valueAt(index); + } + return OK; + } + + status_t MockDrmPlugin::decrypt(Vector const &sessionId, + Vector const &keyId, + Vector const &input, + Vector const &iv, + Vector &output) + { + Mutex::Autolock lock(mLock); + ALOGD("MockDrmPlugin::decrypt(sessionId=%s, keyId=%s, input=%s, iv=%s)", + vectorToString(sessionId).string(), + vectorToString(keyId).string(), + vectorToString(input).string(), + vectorToString(iv).string()); + + ssize_t index = findSession(sessionId); + if (index == kNotFound) { + ALOGD("Invalid sessionId"); + return BAD_VALUE; + } + + // Properties used in mock test, set by mock plugin and verifed cts test app + // byte[] keyId -> mock-keyid + // byte[] input -> mock-input + // byte[] iv -> mock-iv + mByteArrayProperties.add(String8("mock-keyid"), keyId); + mByteArrayProperties.add(String8("mock-input"), input); + mByteArrayProperties.add(String8("mock-iv"), iv); + + // Properties used in mock test, set by cts test app returned from mock plugin + // byte[] mock-output -> output + index = mByteArrayProperties.indexOfKey(String8("mock-output")); + if (index < 0) { + ALOGD("Missing 'mock-request' parameter for mock"); + return BAD_VALUE; + } else { + output = mByteArrayProperties.valueAt(index); + } + return OK; + } + + status_t MockDrmPlugin::sign(Vector const &sessionId, + Vector const &keyId, + Vector const &message, + Vector &signature) + { + Mutex::Autolock lock(mLock); + ALOGD("MockDrmPlugin::sign(sessionId=%s, keyId=%s, message=%s)", + vectorToString(sessionId).string(), + vectorToString(keyId).string(), + vectorToString(message).string()); + + ssize_t index = findSession(sessionId); + if (index == kNotFound) { + ALOGD("Invalid sessionId"); + return BAD_VALUE; + } + + // Properties used in mock test, set by mock plugin and verifed cts test app + // byte[] keyId -> mock-keyid + // byte[] message -> mock-message + mByteArrayProperties.add(String8("mock-keyid"), keyId); + mByteArrayProperties.add(String8("mock-message"), message); + + // Properties used in mock test, set by cts test app returned from mock plugin + // byte[] mock-signature -> signature + index = mByteArrayProperties.indexOfKey(String8("mock-signature")); + if (index < 0) { + ALOGD("Missing 'mock-request' parameter for mock"); + return BAD_VALUE; + } else { + signature = mByteArrayProperties.valueAt(index); + } + return OK; + } + + status_t MockDrmPlugin::verify(Vector const &sessionId, + Vector const &keyId, + Vector const &message, + Vector const &signature, + bool &match) + { + Mutex::Autolock lock(mLock); + ALOGD("MockDrmPlugin::verify(sessionId=%s, keyId=%s, message=%s, signature=%s)", + vectorToString(sessionId).string(), + vectorToString(keyId).string(), + vectorToString(message).string(), + vectorToString(signature).string()); + + ssize_t index = findSession(sessionId); + if (index == kNotFound) { + ALOGD("Invalid sessionId"); + return BAD_VALUE; + } + + // Properties used in mock test, set by mock plugin and verifed cts test app + // byte[] keyId -> mock-keyid + // byte[] message -> mock-message + // byte[] signature -> mock-signature + mByteArrayProperties.add(String8("mock-keyid"), keyId); + mByteArrayProperties.add(String8("mock-message"), message); + mByteArrayProperties.add(String8("mock-signature"), signature); + + // Properties used in mock test, set by cts test app returned from mock plugin + // String mock-match "1" or "0" -> match + index = mStringProperties.indexOfKey(String8("mock-match")); + if (index < 0) { + ALOGD("Missing 'mock-request' parameter for mock"); + return BAD_VALUE; + } else { + match = atol(mStringProperties.valueAt(index).string()); + } + return OK; + } + ssize_t MockDrmPlugin::findSession(Vector const &sessionId) const { ALOGD("findSession: nsessions=%d, size=%d", mSessions.size(), sessionId.size()); @@ -335,6 +560,18 @@ namespace android { return kNotFound; } + ssize_t MockDrmPlugin::findKeySet(Vector const &keySetId) const + { + ALOGD("findKeySet: nkeySets=%d, size=%d", mKeySets.size(), keySetId.size()); + for (size_t i = 0; i < mKeySets.size(); ++i) { + if (memcmp(mKeySets[i].array(), keySetId.array(), keySetId.size()) == 0) { + return i; + } + } + return kNotFound; + } + + // Conversion utilities String8 MockDrmPlugin::vectorToString(Vector const &vector) const { diff --git a/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.h b/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.h index d46a127..ca9eac7 100644 --- a/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.h +++ b/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.h @@ -57,21 +57,23 @@ namespace android { status_t openSession(Vector &sessionId); status_t closeSession(Vector const &sessionId); - status_t - getLicenseRequest(Vector const &sessionId, - Vector const &initData, - String8 const &mimeType, LicenseType licenseType, - KeyedVector const &optionalParameters, - Vector &request, String8 &defaultUrl); + status_t getKeyRequest(Vector const &sessionId, + Vector const &initData, + String8 const &mimeType, KeyType keyType, + KeyedVector const &optionalParameters, + Vector &request, String8 &defaultUrl); - status_t provideLicenseResponse(Vector const &sessionId, - Vector const &response); + status_t provideKeyResponse(Vector const &sessionId, + Vector const &response, + Vector &keySetId); - status_t removeLicense(Vector const &sessionId); + status_t removeKeys(Vector const &keySetId); - status_t - queryLicenseStatus(Vector const &sessionId, - KeyedVector &infoMap) const; + status_t restoreKeys(Vector const &sessionId, + Vector const &keySetId); + + status_t queryKeyStatus(Vector const &sessionId, + KeyedVector &infoMap) const; status_t getProvisionRequest(Vector &request, String8 &defaultUrl); @@ -90,15 +92,46 @@ namespace android { status_t setPropertyByteArray(String8 const &name, Vector const &value ); + status_t setCipherAlgorithm(Vector const &sessionId, + String8 const &algorithm); + + status_t setMacAlgorithm(Vector const &sessionId, + String8 const &algorithm); + + status_t encrypt(Vector const &sessionId, + Vector const &keyId, + Vector const &input, + Vector const &iv, + Vector &output); + + status_t decrypt(Vector const &sessionId, + Vector const &keyId, + Vector const &input, + Vector const &iv, + Vector &output); + + status_t sign(Vector const &sessionId, + Vector const &keyId, + Vector const &message, + Vector &signature); + + status_t verify(Vector const &sessionId, + Vector const &keyId, + Vector const &message, + Vector const &signature, + bool &match); + private: String8 vectorToString(Vector const &vector) const; String8 arrayToString(uint8_t const *array, size_t len) const; String8 stringMapToString(KeyedVector map) const; SortedVector > mSessions; + SortedVector > mKeySets; static const ssize_t kNotFound = -1; ssize_t findSession(Vector const &sessionId) const; + ssize_t findKeySet(Vector const &keySetId) const; Mutex mLock; KeyedVector mStringProperties; diff --git a/include/media/IDrm.h b/include/media/IDrm.h index 38e2378..15d0a75 100644 --- a/include/media/IDrm.h +++ b/include/media/IDrm.h @@ -42,19 +42,23 @@ struct IDrm : public IInterface { virtual status_t closeSession(Vector const &sessionId) = 0; virtual status_t - getLicenseRequest(Vector const &sessionId, - Vector const &initData, - String8 const &mimeType, DrmPlugin::LicenseType licenseType, - KeyedVector const &optionalParameters, - Vector &request, String8 &defaultUrl) = 0; + getKeyRequest(Vector const &sessionId, + Vector const &initData, + String8 const &mimeType, DrmPlugin::KeyType keyType, + KeyedVector const &optionalParameters, + Vector &request, String8 &defaultUrl) = 0; - virtual status_t provideLicenseResponse(Vector const &sessionId, - Vector const &response) = 0; + virtual status_t provideKeyResponse(Vector const &sessionId, + Vector const &response, + Vector &keySetId) = 0; - virtual status_t removeLicense(Vector const &sessionId) = 0; + virtual status_t removeKeys(Vector const &keySetId) = 0; - virtual status_t queryLicenseStatus(Vector const &sessionId, - KeyedVector &infoMap) const = 0; + virtual status_t restoreKeys(Vector const &sessionId, + Vector const &keySetId) = 0; + + virtual status_t queryKeyStatus(Vector const &sessionId, + KeyedVector &infoMap) const = 0; virtual status_t getProvisionRequest(Vector &request, String8 &defaulUrl) = 0; @@ -65,13 +69,42 @@ struct IDrm : public IInterface { virtual status_t releaseSecureStops(Vector const &ssRelease) = 0; - virtual status_t getPropertyString(String8 const &name, String8 &value ) const = 0; + virtual status_t getPropertyString(String8 const &name, String8 &value) const = 0; virtual status_t getPropertyByteArray(String8 const &name, - Vector &value ) const = 0; + Vector &value) const = 0; virtual status_t setPropertyString(String8 const &name, String8 const &value ) const = 0; virtual status_t setPropertyByteArray(String8 const &name, - Vector const &value ) const = 0; + Vector const &value) const = 0; + + virtual status_t setCipherAlgorithm(Vector const &sessionId, + String8 const &algorithm) = 0; + + virtual status_t setMacAlgorithm(Vector const &sessionId, + String8 const &algorithm) = 0; + + virtual status_t encrypt(Vector const &sessionId, + Vector const &keyId, + Vector const &input, + Vector const &iv, + Vector &output) = 0; + + virtual status_t decrypt(Vector const &sessionId, + Vector const &keyId, + Vector const &input, + Vector const &iv, + Vector &output) = 0; + + virtual status_t sign(Vector const &sessionId, + Vector const &keyId, + Vector const &message, + Vector &signature) = 0; + + virtual status_t verify(Vector const &sessionId, + Vector const &keyId, + Vector const &message, + Vector const &signature, + bool &match) = 0; private: DISALLOW_EVIL_CONSTRUCTORS(IDrm); @@ -81,6 +114,9 @@ struct BnDrm : public BnInterface { virtual status_t onTransact( uint32_t code, const Parcel &data, Parcel *reply, uint32_t flags = 0); +private: + void readVector(const Parcel &data, Vector &vector) const; + void writeVector(Parcel *reply, Vector const &vector) const; }; } // namespace android diff --git a/media/libmedia/IDrm.cpp b/media/libmedia/IDrm.cpp index 3b13ec6..1641b56 100644 --- a/media/libmedia/IDrm.cpp +++ b/media/libmedia/IDrm.cpp @@ -33,10 +33,11 @@ enum { DESTROY_PLUGIN, OPEN_SESSION, CLOSE_SESSION, - GET_LICENSE_REQUEST, - PROVIDE_LICENSE_RESPONSE, - REMOVE_LICENSE, - QUERY_LICENSE_STATUS, + GET_KEY_REQUEST, + PROVIDE_KEY_RESPONSE, + REMOVE_KEYS, + RESTORE_KEYS, + QUERY_KEY_STATUS, GET_PROVISION_REQUEST, PROVIDE_PROVISION_RESPONSE, GET_SECURE_STOPS, @@ -44,7 +45,13 @@ enum { GET_PROPERTY_STRING, GET_PROPERTY_BYTE_ARRAY, SET_PROPERTY_STRING, - SET_PROPERTY_BYTE_ARRAY + SET_PROPERTY_BYTE_ARRAY, + SET_CIPHER_ALGORITHM, + SET_MAC_ALGORITHM, + ENCRYPT, + DECRYPT, + SIGN, + VERIFY }; struct BpDrm : public BpInterface { @@ -92,9 +99,7 @@ struct BpDrm : public BpInterface { data.writeInterfaceToken(IDrm::getInterfaceDescriptor()); remote()->transact(OPEN_SESSION, data, &reply); - uint32_t size = reply.readInt32(); - sessionId.insertAt((size_t)0, size); - reply.read(sessionId.editArray(), size); + readVector(reply, sessionId); return reply.readInt32(); } @@ -103,80 +108,81 @@ struct BpDrm : public BpInterface { Parcel data, reply; data.writeInterfaceToken(IDrm::getInterfaceDescriptor()); - data.writeInt32(sessionId.size()); - data.write(sessionId.array(), sessionId.size()); + writeVector(data, sessionId); remote()->transact(CLOSE_SESSION, data, &reply); return reply.readInt32(); } virtual status_t - getLicenseRequest(Vector const &sessionId, - Vector const &initData, - String8 const &mimeType, DrmPlugin::LicenseType licenseType, - KeyedVector const &optionalParameters, - Vector &request, String8 &defaultUrl) { + getKeyRequest(Vector const &sessionId, + Vector const &initData, + String8 const &mimeType, DrmPlugin::KeyType keyType, + KeyedVector const &optionalParameters, + Vector &request, String8 &defaultUrl) { Parcel data, reply; data.writeInterfaceToken(IDrm::getInterfaceDescriptor()); - data.writeInt32(sessionId.size()); - data.write(sessionId.array(), sessionId.size()); - - data.writeInt32(initData.size()); - data.write(initData.array(), initData.size()); - + writeVector(data, sessionId); + writeVector(data, initData); data.writeString8(mimeType); - data.writeInt32((uint32_t)licenseType); + data.writeInt32((uint32_t)keyType); data.writeInt32(optionalParameters.size()); for (size_t i = 0; i < optionalParameters.size(); ++i) { data.writeString8(optionalParameters.keyAt(i)); data.writeString8(optionalParameters.valueAt(i)); } - remote()->transact(GET_LICENSE_REQUEST, data, &reply); + remote()->transact(GET_KEY_REQUEST, data, &reply); - uint32_t len = reply.readInt32(); - request.insertAt((size_t)0, len); - reply.read(request.editArray(), len); + readVector(reply, request); defaultUrl = reply.readString8(); return reply.readInt32(); } - virtual status_t provideLicenseResponse(Vector const &sessionId, - Vector const &response) { + virtual status_t provideKeyResponse(Vector const &sessionId, + Vector const &response, + Vector &keySetId) { + Parcel data, reply; + data.writeInterfaceToken(IDrm::getInterfaceDescriptor()); + writeVector(data, sessionId); + writeVector(data, response); + remote()->transact(PROVIDE_KEY_RESPONSE, data, &reply); + readVector(reply, keySetId); + + return reply.readInt32(); + } + + virtual status_t removeKeys(Vector const &keySetId) { Parcel data, reply; data.writeInterfaceToken(IDrm::getInterfaceDescriptor()); - data.writeInt32(sessionId.size()); - data.write(sessionId.array(), sessionId.size()); - data.writeInt32(response.size()); - data.write(response.array(), response.size()); - remote()->transact(PROVIDE_LICENSE_RESPONSE, data, &reply); + writeVector(data, keySetId); + remote()->transact(REMOVE_KEYS, data, &reply); return reply.readInt32(); } - virtual status_t removeLicense(Vector const &sessionId) { + virtual status_t restoreKeys(Vector const &sessionId, + Vector const &keySetId) { Parcel data, reply; data.writeInterfaceToken(IDrm::getInterfaceDescriptor()); - data.writeInt32(sessionId.size()); - data.write(sessionId.array(), sessionId.size()); - remote()->transact(REMOVE_LICENSE, data, &reply); + writeVector(data, sessionId); + writeVector(data, keySetId); + remote()->transact(RESTORE_KEYS, data, &reply); return reply.readInt32(); } - virtual status_t queryLicenseStatus(Vector const &sessionId, + virtual status_t queryKeyStatus(Vector const &sessionId, KeyedVector &infoMap) const { Parcel data, reply; data.writeInterfaceToken(IDrm::getInterfaceDescriptor()); - data.writeInt32(sessionId.size()); - data.write(sessionId.array(), sessionId.size()); - - remote()->transact(QUERY_LICENSE_STATUS, data, &reply); + writeVector(data, sessionId); + remote()->transact(QUERY_KEY_STATUS, data, &reply); infoMap.clear(); size_t count = reply.readInt32(); @@ -195,9 +201,7 @@ struct BpDrm : public BpInterface { remote()->transact(GET_PROVISION_REQUEST, data, &reply); - uint32_t len = reply.readInt32(); - request.insertAt((size_t)0, len); - reply.read(request.editArray(), len); + readVector(reply, request); defaultUrl = reply.readString8(); return reply.readInt32(); @@ -207,8 +211,7 @@ struct BpDrm : public BpInterface { Parcel data, reply; data.writeInterfaceToken(IDrm::getInterfaceDescriptor()); - data.writeInt32(response.size()); - data.write(response.array(), response.size()); + writeVector(data, response); remote()->transact(PROVIDE_PROVISION_RESPONSE, data, &reply); return reply.readInt32(); @@ -224,9 +227,7 @@ struct BpDrm : public BpInterface { uint32_t count = reply.readInt32(); for (size_t i = 0; i < count; i++) { Vector secureStop; - uint32_t len = reply.readInt32(); - secureStop.insertAt((size_t)0, len); - reply.read(secureStop.editArray(), len); + readVector(reply, secureStop); secureStops.push_back(secureStop); } return reply.readInt32(); @@ -236,8 +237,7 @@ struct BpDrm : public BpInterface { Parcel data, reply; data.writeInterfaceToken(IDrm::getInterfaceDescriptor()); - data.writeInt32(ssRelease.size()); - data.write(ssRelease.array(), ssRelease.size()); + writeVector(data, ssRelease); remote()->transact(RELEASE_SECURE_STOPS, data, &reply); return reply.readInt32(); @@ -261,10 +261,7 @@ struct BpDrm : public BpInterface { data.writeString8(name); remote()->transact(GET_PROPERTY_BYTE_ARRAY, data, &reply); - uint32_t len = reply.readInt32(); - value.insertAt((size_t)0, len); - reply.read(value.editArray(), len); - + readVector(reply, value); return reply.readInt32(); } @@ -285,15 +282,120 @@ struct BpDrm : public BpInterface { data.writeInterfaceToken(IDrm::getInterfaceDescriptor()); data.writeString8(name); - data.writeInt32(value.size()); - data.write(value.array(), value.size()); + writeVector(data, value); remote()->transact(SET_PROPERTY_BYTE_ARRAY, data, &reply); return reply.readInt32(); } + virtual status_t setCipherAlgorithm(Vector const &sessionId, + String8 const &algorithm) { + Parcel data, reply; + data.writeInterfaceToken(IDrm::getInterfaceDescriptor()); + + writeVector(data, sessionId); + data.writeString8(algorithm); + remote()->transact(SET_CIPHER_ALGORITHM, data, &reply); + return reply.readInt32(); + } + + virtual status_t setMacAlgorithm(Vector const &sessionId, + String8 const &algorithm) { + Parcel data, reply; + data.writeInterfaceToken(IDrm::getInterfaceDescriptor()); + + writeVector(data, sessionId); + data.writeString8(algorithm); + remote()->transact(SET_MAC_ALGORITHM, data, &reply); + return reply.readInt32(); + } + + virtual status_t encrypt(Vector const &sessionId, + Vector const &keyId, + Vector const &input, + Vector const &iv, + Vector &output) { + Parcel data, reply; + data.writeInterfaceToken(IDrm::getInterfaceDescriptor()); + + writeVector(data, sessionId); + writeVector(data, keyId); + writeVector(data, input); + writeVector(data, iv); + + remote()->transact(ENCRYPT, data, &reply); + readVector(reply, output); + + return reply.readInt32(); + } + + virtual status_t decrypt(Vector const &sessionId, + Vector const &keyId, + Vector const &input, + Vector const &iv, + Vector &output) { + Parcel data, reply; + data.writeInterfaceToken(IDrm::getInterfaceDescriptor()); + + writeVector(data, sessionId); + writeVector(data, keyId); + writeVector(data, input); + writeVector(data, iv); + + remote()->transact(DECRYPT, data, &reply); + readVector(reply, output); + + return reply.readInt32(); + } + + virtual status_t sign(Vector const &sessionId, + Vector const &keyId, + Vector const &message, + Vector &signature) { + Parcel data, reply; + data.writeInterfaceToken(IDrm::getInterfaceDescriptor()); + + writeVector(data, sessionId); + writeVector(data, keyId); + writeVector(data, message); + + remote()->transact(SIGN, data, &reply); + readVector(reply, signature); + + return reply.readInt32(); + } + + virtual status_t verify(Vector const &sessionId, + Vector const &keyId, + Vector const &message, + Vector const &signature, + bool &match) { + Parcel data, reply; + data.writeInterfaceToken(IDrm::getInterfaceDescriptor()); + + writeVector(data, sessionId); + writeVector(data, keyId); + writeVector(data, message); + writeVector(data, signature); + + remote()->transact(VERIFY, data, &reply); + match = (bool)reply.readInt32(); + return reply.readInt32(); + } + private: + void readVector(Parcel &reply, Vector &vector) const { + uint32_t size = reply.readInt32(); + vector.insertAt((size_t)0, size); + reply.read(vector.editArray(), size); + } + + void writeVector(Parcel &data, Vector const &vector) const { + data.writeInt32(vector.size()); + data.write(vector.array(), vector.size()); + } + DISALLOW_EVIL_CONSTRUCTORS(BpDrm); }; @@ -301,6 +403,17 @@ IMPLEMENT_META_INTERFACE(Drm, "android.drm.IDrm"); //////////////////////////////////////////////////////////////////////////////// +void BnDrm::readVector(const Parcel &data, Vector &vector) const { + uint32_t size = data.readInt32(); + vector.insertAt((size_t)0, size); + data.read(vector.editArray(), size); +} + +void BnDrm::writeVector(Parcel *reply, Vector const &vector) const { + reply->writeInt32(vector.size()); + reply->write(vector.array(), vector.size()); +} + status_t BnDrm::onTransact( uint32_t code, const Parcel &data, Parcel *reply, uint32_t flags) { switch (code) { @@ -341,8 +454,7 @@ status_t BnDrm::onTransact( CHECK_INTERFACE(IDrm, data, reply); Vector sessionId; status_t result = openSession(sessionId); - reply->writeInt32(sessionId.size()); - reply->write(sessionId.array(), sessionId.size()); + writeVector(reply, sessionId); reply->writeInt32(result); return OK; } @@ -351,28 +463,20 @@ status_t BnDrm::onTransact( { CHECK_INTERFACE(IDrm, data, reply); Vector sessionId; - uint32_t size = data.readInt32(); - sessionId.insertAt((size_t)0, size); - data.read(sessionId.editArray(), size); + readVector(data, sessionId); reply->writeInt32(closeSession(sessionId)); return OK; } - case GET_LICENSE_REQUEST: + case GET_KEY_REQUEST: { CHECK_INTERFACE(IDrm, data, reply); - Vector sessionId; - uint32_t size = data.readInt32(); - sessionId.insertAt((size_t)0, size); - data.read(sessionId.editArray(), size); - - Vector initData; - size = data.readInt32(); - initData.insertAt((size_t)0, size); - data.read(initData.editArray(), size); + Vector sessionId, initData; + readVector(data, sessionId); + readVector(data, initData); String8 mimeType = data.readString8(); - DrmPlugin::LicenseType licenseType = (DrmPlugin::LicenseType)data.readInt32(); + DrmPlugin::KeyType keyType = (DrmPlugin::KeyType)data.readInt32(); KeyedVector optionalParameters; uint32_t count = data.readInt32(); @@ -386,55 +490,54 @@ status_t BnDrm::onTransact( Vector request; String8 defaultUrl; - status_t result = getLicenseRequest(sessionId, initData, - mimeType, licenseType, - optionalParameters, - request, defaultUrl); - reply->writeInt32(request.size()); - reply->write(request.array(), request.size()); + status_t result = getKeyRequest(sessionId, initData, + mimeType, keyType, + optionalParameters, + request, defaultUrl); + writeVector(reply, request); reply->writeString8(defaultUrl); reply->writeInt32(result); return OK; } - case PROVIDE_LICENSE_RESPONSE: + case PROVIDE_KEY_RESPONSE: { CHECK_INTERFACE(IDrm, data, reply); - Vector sessionId; - uint32_t size = data.readInt32(); - sessionId.insertAt((size_t)0, size); - data.read(sessionId.editArray(), size); - Vector response; - size = data.readInt32(); - response.insertAt((size_t)0, size); - data.read(response.editArray(), size); + Vector sessionId, response, keySetId; + readVector(data, sessionId); + readVector(data, response); + uint32_t result = provideKeyResponse(sessionId, response, keySetId); + writeVector(reply, keySetId); + reply->writeInt32(result); + return OK; + } - reply->writeInt32(provideLicenseResponse(sessionId, response)); + case REMOVE_KEYS: + { + CHECK_INTERFACE(IDrm, data, reply); + Vector keySetId; + readVector(data, keySetId); + reply->writeInt32(removeKeys(keySetId)); return OK; } - case REMOVE_LICENSE: + case RESTORE_KEYS: { CHECK_INTERFACE(IDrm, data, reply); - Vector sessionId; - uint32_t size = data.readInt32(); - sessionId.insertAt((size_t)0, size); - data.read(sessionId.editArray(), size); - reply->writeInt32(removeLicense(sessionId)); + Vector sessionId, keySetId; + readVector(data, sessionId); + readVector(data, keySetId); + reply->writeInt32(restoreKeys(sessionId, keySetId)); return OK; } - case QUERY_LICENSE_STATUS: + case QUERY_KEY_STATUS: { CHECK_INTERFACE(IDrm, data, reply); Vector sessionId; - uint32_t size = data.readInt32(); - sessionId.insertAt((size_t)0, size); - data.read(sessionId.editArray(), size); + readVector(data, sessionId); KeyedVector infoMap; - - status_t result = queryLicenseStatus(sessionId, infoMap); - + status_t result = queryKeyStatus(sessionId, infoMap); size_t count = infoMap.size(); reply->writeInt32(count); for (size_t i = 0; i < count; ++i) { @@ -451,8 +554,7 @@ status_t BnDrm::onTransact( Vector request; String8 defaultUrl; status_t result = getProvisionRequest(request, defaultUrl); - reply->writeInt32(request.size()); - reply->write(request.array(), request.size()); + writeVector(reply, request); reply->writeString8(defaultUrl); reply->writeInt32(result); return OK; @@ -462,11 +564,8 @@ status_t BnDrm::onTransact( { CHECK_INTERFACE(IDrm, data, reply); Vector response; - uint32_t size = data.readInt32(); - response.insertAt((size_t)0, size); - data.read(response.editArray(), size); + readVector(data, response); reply->writeInt32(provideProvisionResponse(response)); - return OK; } @@ -491,9 +590,7 @@ status_t BnDrm::onTransact( { CHECK_INTERFACE(IDrm, data, reply); Vector ssRelease; - uint32_t size = data.readInt32(); - ssRelease.insertAt((size_t)0, size); - data.read(ssRelease.editArray(), size); + readVector(data, ssRelease); reply->writeInt32(releaseSecureStops(ssRelease)); return OK; } @@ -515,8 +612,7 @@ status_t BnDrm::onTransact( String8 name = data.readString8(); Vector value; status_t result = getPropertyByteArray(name, value); - reply->writeInt32(value.size()); - reply->write(value.array(), value.size()); + writeVector(reply, value); reply->writeInt32(result); return OK; } @@ -535,15 +631,89 @@ status_t BnDrm::onTransact( CHECK_INTERFACE(IDrm, data, reply); String8 name = data.readString8(); Vector value; - size_t count = data.readInt32(); - value.insertAt((size_t)0, count); - data.read(value.editArray(), count); + readVector(data, value); reply->writeInt32(setPropertyByteArray(name, value)); return OK; } - default: - return BBinder::onTransact(code, data, reply, flags); + case SET_CIPHER_ALGORITHM: + { + CHECK_INTERFACE(IDrm, data, reply); + Vector sessionId; + readVector(data, sessionId); + String8 algorithm = data.readString8(); + reply->writeInt32(setCipherAlgorithm(sessionId, algorithm)); + return OK; + } + + case SET_MAC_ALGORITHM: + { + CHECK_INTERFACE(IDrm, data, reply); + Vector sessionId; + readVector(data, sessionId); + String8 algorithm = data.readString8(); + reply->writeInt32(setMacAlgorithm(sessionId, algorithm)); + return OK; + } + + case ENCRYPT: + { + CHECK_INTERFACE(IDrm, data, reply); + Vector sessionId, keyId, input, iv, output; + readVector(data, sessionId); + readVector(data, keyId); + readVector(data, input); + readVector(data, iv); + uint32_t result = encrypt(sessionId, keyId, input, iv, output); + writeVector(reply, output); + reply->writeInt32(result); + return OK; + } + + case DECRYPT: + { + CHECK_INTERFACE(IDrm, data, reply); + Vector sessionId, keyId, input, iv, output; + readVector(data, sessionId); + readVector(data, keyId); + readVector(data, input); + readVector(data, iv); + uint32_t result = decrypt(sessionId, keyId, input, iv, output); + writeVector(reply, output); + reply->writeInt32(result); + return OK; + } + + case SIGN: + { + CHECK_INTERFACE(IDrm, data, reply); + Vector sessionId, keyId, message, signature; + readVector(data, sessionId); + readVector(data, keyId); + readVector(data, message); + uint32_t result = sign(sessionId, keyId, message, signature); + writeVector(reply, signature); + reply->writeInt32(result); + return OK; + } + + case VERIFY: + { + CHECK_INTERFACE(IDrm, data, reply); + Vector sessionId, keyId, message, signature; + readVector(data, sessionId); + readVector(data, keyId); + readVector(data, message); + readVector(data, signature); + bool match; + uint32_t result = verify(sessionId, keyId, message, signature, match); + reply->writeInt32(match); + reply->writeInt32(result); + return OK; + } + + default: + return BBinder::onTransact(code, data, reply, flags); } } diff --git a/media/libmediaplayerservice/Drm.cpp b/media/libmediaplayerservice/Drm.cpp index 6ac7530..5fdb9f4 100644 --- a/media/libmediaplayerservice/Drm.cpp +++ b/media/libmediaplayerservice/Drm.cpp @@ -243,11 +243,11 @@ status_t Drm::closeSession(Vector const &sessionId) { return mPlugin->closeSession(sessionId); } -status_t Drm::getLicenseRequest(Vector const &sessionId, - Vector const &initData, - String8 const &mimeType, DrmPlugin::LicenseType licenseType, - KeyedVector const &optionalParameters, - Vector &request, String8 &defaultUrl) { +status_t Drm::getKeyRequest(Vector const &sessionId, + Vector const &initData, + String8 const &mimeType, DrmPlugin::KeyType keyType, + KeyedVector const &optionalParameters, + Vector &request, String8 &defaultUrl) { Mutex::Autolock autoLock(mLock); if (mInitCheck != OK) { @@ -258,12 +258,13 @@ status_t Drm::getLicenseRequest(Vector const &sessionId, return -EINVAL; } - return mPlugin->getLicenseRequest(sessionId, initData, mimeType, licenseType, - optionalParameters, request, defaultUrl); + return mPlugin->getKeyRequest(sessionId, initData, mimeType, keyType, + optionalParameters, request, defaultUrl); } -status_t Drm::provideLicenseResponse(Vector const &sessionId, - Vector const &response) { +status_t Drm::provideKeyResponse(Vector const &sessionId, + Vector const &response, + Vector &keySetId) { Mutex::Autolock autoLock(mLock); if (mInitCheck != OK) { @@ -274,10 +275,10 @@ status_t Drm::provideLicenseResponse(Vector const &sessionId, return -EINVAL; } - return mPlugin->provideLicenseResponse(sessionId, response); + return mPlugin->provideKeyResponse(sessionId, response, keySetId); } -status_t Drm::removeLicense(Vector const &sessionId) { +status_t Drm::removeKeys(Vector const &keySetId) { Mutex::Autolock autoLock(mLock); if (mInitCheck != OK) { @@ -288,11 +289,11 @@ status_t Drm::removeLicense(Vector const &sessionId) { return -EINVAL; } - return mPlugin->removeLicense(sessionId); + return mPlugin->removeKeys(keySetId); } -status_t Drm::queryLicenseStatus(Vector const &sessionId, - KeyedVector &infoMap) const { +status_t Drm::restoreKeys(Vector const &sessionId, + Vector const &keySetId) { Mutex::Autolock autoLock(mLock); if (mInitCheck != OK) { @@ -303,7 +304,22 @@ status_t Drm::queryLicenseStatus(Vector const &sessionId, return -EINVAL; } - return mPlugin->queryLicenseStatus(sessionId, infoMap); + return mPlugin->restoreKeys(sessionId, keySetId); +} + +status_t Drm::queryKeyStatus(Vector const &sessionId, + KeyedVector &infoMap) const { + Mutex::Autolock autoLock(mLock); + + if (mInitCheck != OK) { + return mInitCheck; + } + + if (mPlugin == NULL) { + return -EINVAL; + } + + return mPlugin->queryKeyStatus(sessionId, infoMap); } status_t Drm::getProvisionRequest(Vector &request, String8 &defaultUrl) { @@ -420,4 +436,106 @@ status_t Drm::setPropertyByteArray(String8 const &name, return mPlugin->setPropertyByteArray(name, value); } + +status_t Drm::setCipherAlgorithm(Vector const &sessionId, + String8 const &algorithm) { + Mutex::Autolock autoLock(mLock); + + if (mInitCheck != OK) { + return mInitCheck; + } + + if (mPlugin == NULL) { + return -EINVAL; + } + + return mPlugin->setCipherAlgorithm(sessionId, algorithm); +} + +status_t Drm::setMacAlgorithm(Vector const &sessionId, + String8 const &algorithm) { + Mutex::Autolock autoLock(mLock); + + if (mInitCheck != OK) { + return mInitCheck; + } + + if (mPlugin == NULL) { + return -EINVAL; + } + + return mPlugin->setMacAlgorithm(sessionId, algorithm); +} + +status_t Drm::encrypt(Vector const &sessionId, + Vector const &keyId, + Vector const &input, + Vector const &iv, + Vector &output) { + Mutex::Autolock autoLock(mLock); + + if (mInitCheck != OK) { + return mInitCheck; + } + + if (mPlugin == NULL) { + return -EINVAL; + } + + return mPlugin->encrypt(sessionId, keyId, input, iv, output); +} + +status_t Drm::decrypt(Vector const &sessionId, + Vector const &keyId, + Vector const &input, + Vector const &iv, + Vector &output) { + Mutex::Autolock autoLock(mLock); + + if (mInitCheck != OK) { + return mInitCheck; + } + + if (mPlugin == NULL) { + return -EINVAL; + } + + return mPlugin->decrypt(sessionId, keyId, input, iv, output); +} + +status_t Drm::sign(Vector const &sessionId, + Vector const &keyId, + Vector const &message, + Vector &signature) { + Mutex::Autolock autoLock(mLock); + + if (mInitCheck != OK) { + return mInitCheck; + } + + if (mPlugin == NULL) { + return -EINVAL; + } + + return mPlugin->sign(sessionId, keyId, message, signature); +} + +status_t Drm::verify(Vector const &sessionId, + Vector const &keyId, + Vector const &message, + Vector const &signature, + bool &match) { + Mutex::Autolock autoLock(mLock); + + if (mInitCheck != OK) { + return mInitCheck; + } + + if (mPlugin == NULL) { + return -EINVAL; + } + + return mPlugin->verify(sessionId, keyId, message, signature, match); +} + } // namespace android diff --git a/media/libmediaplayerservice/Drm.h b/media/libmediaplayerservice/Drm.h index 1b10958..f24921e 100644 --- a/media/libmediaplayerservice/Drm.h +++ b/media/libmediaplayerservice/Drm.h @@ -45,19 +45,23 @@ struct Drm : public BnDrm { virtual status_t closeSession(Vector const &sessionId); virtual status_t - getLicenseRequest(Vector const &sessionId, - Vector const &initData, - String8 const &mimeType, DrmPlugin::LicenseType licenseType, - KeyedVector const &optionalParameters, - Vector &request, String8 &defaultUrl); + getKeyRequest(Vector const &sessionId, + Vector const &initData, + String8 const &mimeType, DrmPlugin::KeyType keyType, + KeyedVector const &optionalParameters, + Vector &request, String8 &defaultUrl); - virtual status_t provideLicenseResponse(Vector const &sessionId, - Vector const &response); + virtual status_t provideKeyResponse(Vector const &sessionId, + Vector const &response, + Vector &keySetId); - virtual status_t removeLicense(Vector const &sessionId); + virtual status_t removeKeys(Vector const &keySetId); - virtual status_t queryLicenseStatus(Vector const &sessionId, - KeyedVector &infoMap) const; + virtual status_t restoreKeys(Vector const &sessionId, + Vector const &keySetId); + + virtual status_t queryKeyStatus(Vector const &sessionId, + KeyedVector &infoMap) const; virtual status_t getProvisionRequest(Vector &request, String8 &defaulUrl); @@ -75,6 +79,35 @@ struct Drm : public BnDrm { virtual status_t setPropertyByteArray(String8 const &name, Vector const &value ) const; + virtual status_t setCipherAlgorithm(Vector const &sessionId, + String8 const &algorithm); + + virtual status_t setMacAlgorithm(Vector const &sessionId, + String8 const &algorithm); + + virtual status_t encrypt(Vector const &sessionId, + Vector const &keyId, + Vector const &input, + Vector const &iv, + Vector &output); + + virtual status_t decrypt(Vector const &sessionId, + Vector const &keyId, + Vector const &input, + Vector const &iv, + Vector &output); + + virtual status_t sign(Vector const &sessionId, + Vector const &keyId, + Vector const &message, + Vector &signature); + + virtual status_t verify(Vector const &sessionId, + Vector const &keyId, + Vector const &message, + Vector const &signature, + bool &match); + private: mutable Mutex mLock; -- cgit v1.1 From b7c9d61b9d398b272c8138a2c6aace069229ac03 Mon Sep 17 00:00:00 2001 From: Igor Murashkin Date: Tue, 2 Apr 2013 12:32:32 -0700 Subject: Camera3: Fix metadata result not being rewritten after capture Generally this problem manifested itself as the Frame Count not being set, which then errored out ProFrameProcessor with a "Success" error. Change-Id: Ifa927f43d0ce92100651ad3f714099a2f1ec4c2a --- services/camera/libcameraservice/Camera3Device.cpp | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/services/camera/libcameraservice/Camera3Device.cpp b/services/camera/libcameraservice/Camera3Device.cpp index 6cf652c..f2c8c04 100644 --- a/services/camera/libcameraservice/Camera3Device.cpp +++ b/services/camera/libcameraservice/Camera3Device.cpp @@ -899,11 +899,19 @@ void Camera3Device::processCaptureResult(const camera3_capture_result *result) { // Push result metadata into queue mResultQueue.push_back(CameraMetadata()); - CameraMetadata &captureResult = *(mResultQueue.end()); + // Lets avoid copies! Too bad there's not a #back method + CameraMetadata &captureResult = *(--mResultQueue.end()); captureResult = result->result; - captureResult.update(ANDROID_REQUEST_FRAME_COUNT, - (int32_t*)&result->frame_number, 1); + if (captureResult.update(ANDROID_REQUEST_FRAME_COUNT, + (int32_t*)&result->frame_number, 1) != OK) { + ALOGE("%s: Camera %d: Failed to set frame# in metadata (%d)", + __FUNCTION__, mId, result->frame_number); + // TODO: Report error upstream + } else { + ALOGVV("%s: Camera %d: Set frame# in metadata (%d)", + __FUNCTION__, mId, result->frame_number); + } // Get timestamp from result metadata -- cgit v1.1 From e09f486085c7e7f3de329db73ad6bb9899740dbe Mon Sep 17 00:00:00 2001 From: Igor Murashkin Date: Tue, 2 Apr 2013 16:36:33 -0700 Subject: Camera3: Fix output stream creation for JPEG streams Change-Id: Iadf85c103c21614abb1aeb69a832ca01919c2aa5 --- services/camera/libcameraservice/camera3/Camera3OutputStream.cpp | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/services/camera/libcameraservice/camera3/Camera3OutputStream.cpp b/services/camera/libcameraservice/camera3/Camera3OutputStream.cpp index d07ae94..276b940 100644 --- a/services/camera/libcameraservice/camera3/Camera3OutputStream.cpp +++ b/services/camera/libcameraservice/camera3/Camera3OutputStream.cpp @@ -57,7 +57,14 @@ Camera3OutputStream::Camera3OutputStream(int id, uint32_t width, uint32_t height, size_t maxSize, int format) : Camera3Stream(id, CAMERA3_STREAM_OUTPUT, width, height, maxSize, format), - mConsumer(consumer) { + mConsumer(consumer), + mTransform(0), + mTotalBufferCount(0), + mDequeuedBufferCount(0), + mFrameCount(0), + mLastTimestamp(0) { + + mCombinedFence = new Fence(); if (format != HAL_PIXEL_FORMAT_BLOB) { ALOGE("%s: Bad format for size-only stream: %d", __FUNCTION__, -- cgit v1.1 From 2a9c5cd47159c41051fe7b8366d8205a7d1d5296 Mon Sep 17 00:00:00 2001 From: Eric Laurent Date: Tue, 2 Apr 2013 16:41:41 -0700 Subject: audioflinger: add effect config status check Check the result of the effect engine configuration command and do not attempt to send parameters to, enable, or process the effect if configuration fails. Bug 8512027 Change-Id: I8c78a05d79fba36b1a387aa5cf2700612301ac91 --- services/audioflinger/Effects.cpp | 63 +++++++++++++++++++++++++++++++-------- 1 file changed, 51 insertions(+), 12 deletions(-) diff --git a/services/audioflinger/Effects.cpp b/services/audioflinger/Effects.cpp index 74ba59e..d66294c 100644 --- a/services/audioflinger/Effects.cpp +++ b/services/audioflinger/Effects.cpp @@ -225,12 +225,18 @@ void AudioFlinger::EffectModule::updateState() { 0, mConfig.inputCfg.buffer.frameCount*sizeof(int32_t)); } - start_l(); - mState = ACTIVE; + if (start_l() == NO_ERROR) { + mState = ACTIVE; + } else { + mState = IDLE; + } break; case STOPPING: - stop_l(); - mDisableWaitCnt = mMaxDisableWaitCnt; + if (stop_l() == NO_ERROR) { + mDisableWaitCnt = mMaxDisableWaitCnt; + } else { + mDisableWaitCnt = 1; // will cause immediate transition to IDLE + } mState = STOPPED; break; case STOPPED: @@ -297,7 +303,7 @@ void AudioFlinger::EffectModule::process() void AudioFlinger::EffectModule::reset_l() { - if (mEffectInterface == NULL) { + if (mStatus != NO_ERROR || mEffectInterface == NULL) { return; } (*mEffectInterface)->command(mEffectInterface, EFFECT_CMD_RESET, 0, NULL, 0, NULL); @@ -305,17 +311,24 @@ void AudioFlinger::EffectModule::reset_l() status_t AudioFlinger::EffectModule::configure() { + status_t status; + sp thread; + uint32_t size; + audio_channel_mask_t channelMask; + if (mEffectInterface == NULL) { - return NO_INIT; + status = NO_INIT; + goto exit; } - sp thread = mThread.promote(); + thread = mThread.promote(); if (thread == 0) { - return DEAD_OBJECT; + status = DEAD_OBJECT; + goto exit; } // TODO: handle configuration of effects replacing track process - audio_channel_mask_t channelMask = thread->channelMask(); + channelMask = thread->channelMask(); if ((mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_AUXILIARY) { mConfig.inputCfg.channels = AUDIO_CHANNEL_OUT_MONO; @@ -357,8 +370,8 @@ status_t AudioFlinger::EffectModule::configure() this, thread.get(), mConfig.inputCfg.buffer.raw, mConfig.inputCfg.buffer.frameCount); status_t cmdStatus; - uint32_t size = sizeof(int); - status_t status = (*mEffectInterface)->command(mEffectInterface, + size = sizeof(int); + status = (*mEffectInterface)->command(mEffectInterface, EFFECT_CMD_SET_CONFIG, sizeof(effect_config_t), &mConfig, @@ -396,6 +409,8 @@ status_t AudioFlinger::EffectModule::configure() mMaxDisableWaitCnt = (MAX_DISABLE_TIME_MS * mConfig.outputCfg.samplingRate) / (1000 * mConfig.outputCfg.buffer.frameCount); +exit: + mStatus = status; return status; } @@ -430,6 +445,9 @@ status_t AudioFlinger::EffectModule::start_l() if (mEffectInterface == NULL) { return NO_INIT; } + if (mStatus != NO_ERROR) { + return mStatus; + } status_t cmdStatus; uint32_t size = sizeof(status_t); status_t status = (*mEffectInterface)->command(mEffectInterface, @@ -466,6 +484,9 @@ status_t AudioFlinger::EffectModule::stop_l() if (mEffectInterface == NULL) { return NO_INIT; } + if (mStatus != NO_ERROR) { + return mStatus; + } status_t cmdStatus; uint32_t size = sizeof(status_t); status_t status = (*mEffectInterface)->command(mEffectInterface, @@ -503,6 +524,9 @@ status_t AudioFlinger::EffectModule::command(uint32_t cmdCode, if (mState == DESTROYED || mEffectInterface == NULL) { return NO_INIT; } + if (mStatus != NO_ERROR) { + return mStatus; + } status_t status = (*mEffectInterface)->command(mEffectInterface, cmdCode, cmdSize, @@ -592,6 +616,10 @@ bool AudioFlinger::EffectModule::isEnabled() const bool AudioFlinger::EffectModule::isProcessEnabled() const { + if (mStatus != NO_ERROR) { + return false; + } + switch (mState) { case RESTART: case ACTIVE: @@ -609,8 +637,10 @@ bool AudioFlinger::EffectModule::isProcessEnabled() const status_t AudioFlinger::EffectModule::setVolume(uint32_t *left, uint32_t *right, bool controller) { Mutex::Autolock _l(mLock); + if (mStatus != NO_ERROR) { + return mStatus; + } status_t status = NO_ERROR; - // Send volume indication if EFFECT_FLAG_VOLUME_IND is set and read back altered volume // if controller flag is set (Note that controller == TRUE => EFFECT_FLAG_VOLUME_CTRL set) if (isProcessEnabled() && @@ -646,6 +676,9 @@ status_t AudioFlinger::EffectModule::setDevice(audio_devices_t device) } Mutex::Autolock _l(mLock); + if (mStatus != NO_ERROR) { + return mStatus; + } status_t status = NO_ERROR; if (device && (mDescriptor.flags & EFFECT_FLAG_DEVICE_MASK) == EFFECT_FLAG_DEVICE_IND) { status_t cmdStatus; @@ -665,6 +698,9 @@ status_t AudioFlinger::EffectModule::setDevice(audio_devices_t device) status_t AudioFlinger::EffectModule::setMode(audio_mode_t mode) { Mutex::Autolock _l(mLock); + if (mStatus != NO_ERROR) { + return mStatus; + } status_t status = NO_ERROR; if ((mDescriptor.flags & EFFECT_FLAG_AUDIO_MODE_MASK) == EFFECT_FLAG_AUDIO_MODE_IND) { status_t cmdStatus; @@ -685,6 +721,9 @@ status_t AudioFlinger::EffectModule::setMode(audio_mode_t mode) status_t AudioFlinger::EffectModule::setAudioSource(audio_source_t source) { Mutex::Autolock _l(mLock); + if (mStatus != NO_ERROR) { + return mStatus; + } status_t status = NO_ERROR; if ((mDescriptor.flags & EFFECT_FLAG_AUDIO_SOURCE_MASK) == EFFECT_FLAG_AUDIO_SOURCE_IND) { uint32_t size = 0; -- cgit v1.1 From c0d5f1f8405de861ed6f1725f26cd6601e7103ab Mon Sep 17 00:00:00 2001 From: Jeff Tinker Date: Tue, 2 Apr 2013 13:08:05 -0700 Subject: Implement async event callout from drm plugin to Java app Change-Id: I007f147d693664e777b8758be2bb8a4c7ec0236b --- drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.cpp | 26 +++++++- include/media/IDrm.h | 3 + include/media/IDrmClient.h | 48 ++++++++++++++ media/libmedia/Android.mk | 1 + media/libmedia/IDrm.cpp | 19 +++++- media/libmedia/IDrmClient.cpp | 81 +++++++++++++++++++++++ media/libmediaplayerservice/Drm.cpp | 40 ++++++++++- media/libmediaplayerservice/Drm.h | 14 +++- 8 files changed, 228 insertions(+), 4 deletions(-) create mode 100644 include/media/IDrmClient.h create mode 100644 media/libmedia/IDrmClient.cpp diff --git a/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.cpp b/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.cpp index c34690b..00f6de3 100644 --- a/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.cpp +++ b/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.cpp @@ -343,7 +343,31 @@ namespace android { Mutex::Autolock lock(mLock); ALOGD("MockDrmPlugin::setPropertyString(name=%s, value=%s)", name.string(), value.string()); - mStringProperties.add(name, value); + + if (name == "mock-send-event") { + unsigned code, extra; + sscanf(value.string(), "%d %d", &code, &extra); + DrmPlugin::EventType eventType = (DrmPlugin::EventType)code; + + Vector const *pSessionId = NULL; + ssize_t index = mByteArrayProperties.indexOfKey(String8("mock-event-session-id")); + if (index >= 0) { + pSessionId = &mByteArrayProperties[index]; + } + + Vector const *pData = NULL; + index = mByteArrayProperties.indexOfKey(String8("mock-event-data")); + if (index >= 0) { + pData = &mByteArrayProperties[index]; + } + ALOGD("sending event from mock drm plugin: %d %d %s %s", + (int)code, extra, pSessionId ? vectorToString(*pSessionId) : "{}", + pData ? vectorToString(*pData) : "{}"); + + sendEvent(eventType, extra, pSessionId, pData); + } else { + mStringProperties.add(name, value); + } return OK; } diff --git a/include/media/IDrm.h b/include/media/IDrm.h index 15d0a75..d630c40 100644 --- a/include/media/IDrm.h +++ b/include/media/IDrm.h @@ -17,6 +17,7 @@ #include #include #include +#include #ifndef ANDROID_IDRM_H_ @@ -106,6 +107,8 @@ struct IDrm : public IInterface { Vector const &signature, bool &match) = 0; + virtual status_t setListener(const sp& listener) = 0; + private: DISALLOW_EVIL_CONSTRUCTORS(IDrm); }; diff --git a/include/media/IDrmClient.h b/include/media/IDrmClient.h new file mode 100644 index 0000000..3b2fc7c --- /dev/null +++ b/include/media/IDrmClient.h @@ -0,0 +1,48 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_IDRMCLIENT_H +#define ANDROID_IDRMCLIENT_H + +#include +#include +#include +#include + +namespace android { + +class IDrmClient: public IInterface +{ +public: + DECLARE_META_INTERFACE(DrmClient); + + virtual void notify(DrmPlugin::EventType eventType, int extra, const Parcel *obj) = 0; +}; + +// ---------------------------------------------------------------------------- + +class BnDrmClient: public BnInterface +{ +public: + virtual status_t onTransact(uint32_t code, + const Parcel& data, + Parcel* reply, + uint32_t flags = 0); +}; + +}; // namespace android + +#endif // ANDROID_IDRMCLIENT_H diff --git a/media/libmedia/Android.mk b/media/libmedia/Android.mk index 1ada9c3..fbe71ad 100644 --- a/media/libmedia/Android.mk +++ b/media/libmedia/Android.mk @@ -20,6 +20,7 @@ LOCAL_SRC_FILES:= \ IAudioRecord.cpp \ ICrypto.cpp \ IDrm.cpp \ + IDrmClient.cpp \ IHDCP.cpp \ AudioRecord.cpp \ AudioSystem.cpp \ diff --git a/media/libmedia/IDrm.cpp b/media/libmedia/IDrm.cpp index 1641b56..1578846 100644 --- a/media/libmedia/IDrm.cpp +++ b/media/libmedia/IDrm.cpp @@ -51,7 +51,8 @@ enum { ENCRYPT, DECRYPT, SIGN, - VERIFY + VERIFY, + SET_LISTENER }; struct BpDrm : public BpInterface { @@ -384,6 +385,14 @@ struct BpDrm : public BpInterface { return reply.readInt32(); } + virtual status_t setListener(const sp& listener) { + Parcel data, reply; + data.writeInterfaceToken(IDrm::getInterfaceDescriptor()); + data.writeStrongBinder(listener->asBinder()); + remote()->transact(SET_LISTENER, data, &reply); + return reply.readInt32(); + } + private: void readVector(Parcel &reply, Vector &vector) const { uint32_t size = reply.readInt32(); @@ -712,6 +721,14 @@ status_t BnDrm::onTransact( return OK; } + case SET_LISTENER: { + CHECK_INTERFACE(IDrm, data, reply); + sp listener = + interface_cast(data.readStrongBinder()); + reply->writeInt32(setListener(listener)); + return NO_ERROR; + } break; + default: return BBinder::onTransact(code, data, reply, flags); } diff --git a/media/libmedia/IDrmClient.cpp b/media/libmedia/IDrmClient.cpp new file mode 100644 index 0000000..f50715e --- /dev/null +++ b/media/libmedia/IDrmClient.cpp @@ -0,0 +1,81 @@ +/* +** +** Copyright 2013, The Android Open Source Project +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "IDrmClient" +#include + +#include +#include +#include + +#include +#include + +namespace android { + +enum { + NOTIFY = IBinder::FIRST_CALL_TRANSACTION, +}; + +class BpDrmClient: public BpInterface +{ +public: + BpDrmClient(const sp& impl) + : BpInterface(impl) + { + } + + virtual void notify(DrmPlugin::EventType eventType, int extra, const Parcel *obj) + { + Parcel data, reply; + data.writeInterfaceToken(IDrmClient::getInterfaceDescriptor()); + data.writeInt32((int)eventType); + data.writeInt32(extra); + if (obj && obj->dataSize() > 0) { + data.appendFrom(const_cast(obj), 0, obj->dataSize()); + } + remote()->transact(NOTIFY, data, &reply, IBinder::FLAG_ONEWAY); + } +}; + +IMPLEMENT_META_INTERFACE(DrmClient, "android.media.IDrmClient"); + +// ---------------------------------------------------------------------- + +status_t BnDrmClient::onTransact( + uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) +{ + switch (code) { + case NOTIFY: { + CHECK_INTERFACE(IDrmClient, data, reply); + int eventType = data.readInt32(); + int extra = data.readInt32(); + Parcel obj; + if (data.dataAvail() > 0) { + obj.appendFrom(const_cast(&data), data.dataPosition(), data.dataAvail()); + } + + notify((DrmPlugin::EventType)eventType, extra, &obj); + return NO_ERROR; + } break; + default: + return BBinder::onTransact(code, data, reply, flags); + } +} + +}; // namespace android diff --git a/media/libmediaplayerservice/Drm.cpp b/media/libmediaplayerservice/Drm.cpp index 5fdb9f4..1e6cd94 100644 --- a/media/libmediaplayerservice/Drm.cpp +++ b/media/libmediaplayerservice/Drm.cpp @@ -47,6 +47,7 @@ static bool operator<(const Vector &lhs, const Vector &rhs) { Drm::Drm() : mInitCheck(NO_INIT), + mListener(NULL), mFactory(NULL), mPlugin(NULL) { } @@ -67,6 +68,41 @@ status_t Drm::initCheck() const { return mInitCheck; } +status_t Drm::setListener(const sp& listener) +{ + Mutex::Autolock lock(mEventLock); + mListener = listener; + return NO_ERROR; +} + +void Drm::sendEvent(DrmPlugin::EventType eventType, int extra, + Vector const *sessionId, + Vector const *data) +{ + mEventLock.lock(); + sp listener = mListener; + mEventLock.unlock(); + + if (listener != NULL) { + Parcel obj; + if (sessionId && sessionId->size()) { + obj.writeInt32(sessionId->size()); + obj.write(sessionId->array(), sessionId->size()); + } else { + obj.writeInt32(0); + } + + if (data && data->size()) { + obj.writeInt32(data->size()); + obj.write(data->array(), data->size()); + } else { + obj.writeInt32(0); + } + + Mutex::Autolock lock(mNotifyLock); + listener->notify(eventType, extra, &obj); + } +} /* * Search the plugins directory for a plugin that supports the scheme @@ -195,7 +231,9 @@ status_t Drm::createPlugin(const uint8_t uuid[16]) { return mInitCheck; } - return mFactory->createDrmPlugin(uuid, &mPlugin); + status_t result = mFactory->createDrmPlugin(uuid, &mPlugin); + mPlugin->setListener(this); + return result; } status_t Drm::destroyPlugin() { diff --git a/media/libmediaplayerservice/Drm.h b/media/libmediaplayerservice/Drm.h index f24921e..3da8ad4 100644 --- a/media/libmediaplayerservice/Drm.h +++ b/media/libmediaplayerservice/Drm.h @@ -21,6 +21,7 @@ #include "SharedLibrary.h" #include +#include #include namespace android { @@ -28,7 +29,7 @@ namespace android { struct DrmFactory; struct DrmPlugin; -struct Drm : public BnDrm { +struct Drm : public BnDrm, public DrmPluginListener { Drm(); virtual ~Drm(); @@ -108,10 +109,21 @@ struct Drm : public BnDrm { Vector const &signature, bool &match); + virtual status_t setListener(const sp& listener); + + virtual void sendEvent(DrmPlugin::EventType eventType, int extra, + Vector const *sessionId, + Vector const *data); + private: mutable Mutex mLock; status_t mInitCheck; + + sp mListener; + mutable Mutex mEventLock; + mutable Mutex mNotifyLock; + sp mLibrary; DrmFactory *mFactory; DrmPlugin *mPlugin; -- cgit v1.1 From a5761dcb5f9f017ca9b83ec868565d7ce54a92ac Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Thu, 4 Apr 2013 10:42:23 -0700 Subject: Fix a number of warnings in stagefright code. Change-Id: If3edd00d991851797aeccdfe795a4a405e3a2ea3 --- media/libstagefright/codecs/aacenc/SoftAACEncoder2.cpp | 2 +- media/libstagefright/codecs/amrnb/enc/SoftAMRNBEncoder.cpp | 2 +- media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp | 3 ++- 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/media/libstagefright/codecs/aacenc/SoftAACEncoder2.cpp b/media/libstagefright/codecs/aacenc/SoftAACEncoder2.cpp index 7719435..5749733 100644 --- a/media/libstagefright/codecs/aacenc/SoftAACEncoder2.cpp +++ b/media/libstagefright/codecs/aacenc/SoftAACEncoder2.cpp @@ -481,7 +481,7 @@ void SoftAACEncoder2::onQueueFilled(OMX_U32 portIndex) { void* inBuffer[] = { (unsigned char *)mInputFrame }; INT inBufferIds[] = { IN_AUDIO_DATA }; - INT inBufferSize[] = { numBytesPerInputFrame }; + INT inBufferSize[] = { (INT)numBytesPerInputFrame }; INT inBufferElSize[] = { sizeof(int16_t) }; AACENC_BufDesc inBufDesc; diff --git a/media/libstagefright/codecs/amrnb/enc/SoftAMRNBEncoder.cpp b/media/libstagefright/codecs/amrnb/enc/SoftAMRNBEncoder.cpp index 07f8b4f..50b739c 100644 --- a/media/libstagefright/codecs/amrnb/enc/SoftAMRNBEncoder.cpp +++ b/media/libstagefright/codecs/amrnb/enc/SoftAMRNBEncoder.cpp @@ -257,7 +257,7 @@ OMX_ERRORTYPE SoftAMRNBEncoder::internalSetParameter( } if (pcmParams->nChannels != 1 - || pcmParams->nSamplingRate != kSampleRate) { + || pcmParams->nSamplingRate != (OMX_U32)kSampleRate) { return OMX_ErrorUndefined; } diff --git a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp index cc38dc3..e25637a 100644 --- a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp +++ b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp @@ -655,7 +655,8 @@ void SoftVPXEncoder::onQueueFilled(OMX_U32 portIndex) { vpx_codec_iter_t encoded_packet_iterator = NULL; const vpx_codec_cx_pkt_t* encoded_packet; - while (encoded_packet = vpx_codec_get_cx_data(mCodecContext, &encoded_packet_iterator)) { + while ((encoded_packet = vpx_codec_get_cx_data( + mCodecContext, &encoded_packet_iterator))) { if (encoded_packet->kind == VPX_CODEC_CX_FRAME_PKT) { outputBufferHeader->nTimeStamp = encoded_packet->data.frame.pts; outputBufferHeader->nFlags = 0; -- cgit v1.1 From 5340cef8c137e7002ff196f2b88b508675bd5b24 Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Thu, 4 Apr 2013 10:48:50 -0700 Subject: Fix adaptive frame rate handling to be non-adaptive unless specified through adb shell setprop media.wfd.video-framerate adaptive Change-Id: I452576b62ad465680232b40464977e126616df18 --- media/libstagefright/wifi-display/source/PlaybackSession.cpp | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/media/libstagefright/wifi-display/source/PlaybackSession.cpp b/media/libstagefright/wifi-display/source/PlaybackSession.cpp index 715d0b5..cacfcca 100644 --- a/media/libstagefright/wifi-display/source/PlaybackSession.cpp +++ b/media/libstagefright/wifi-display/source/PlaybackSession.cpp @@ -709,8 +709,11 @@ void WifiDisplaySource::PlaybackSession::onSinkFeedback(const sp &msg) Converter::GetInt32Property( "media.wfd.video-framerate", -1); - if (rateHz < 0.0) { - rateHz = repeaterSource->getFrameRate(); + char val[PROPERTY_VALUE_MAX]; + if (rateHz < 0.0 + && property_get("media.wfd.video-framerate", val, NULL) + && !strcasecmp("adaptive", val)) { + rateHz = repeaterSource->getFrameRate(); if (avgLatencyUs > 300000ll) { rateHz *= 0.9; -- cgit v1.1 From 013673916b6badf0b6dfb65e7165cef84241ede3 Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Thu, 4 Apr 2013 10:50:18 -0700 Subject: Removed rtp timestamp patching hack in ANetworkSession, advertise differentiated service tags to help prioritize our traffic. Change-Id: Ibe42df64da272f4bb6b978e46a4ea2efdadfcbc7 --- .../wifi-display/ANetworkSession.cpp | 26 +++++++--------------- 1 file changed, 8 insertions(+), 18 deletions(-) diff --git a/media/libstagefright/wifi-display/ANetworkSession.cpp b/media/libstagefright/wifi-display/ANetworkSession.cpp index 88ca1cc..f074438 100644 --- a/media/libstagefright/wifi-display/ANetworkSession.cpp +++ b/media/libstagefright/wifi-display/ANetworkSession.cpp @@ -451,24 +451,6 @@ status_t ANetworkSession::Session::writeMore() { const Fragment &frag = *mOutFragments.begin(); const sp &datagram = frag.mBuffer; - uint8_t *data = datagram->data(); - if (data[0] == 0x80 && (data[1] & 0x7f) == 33) { - int64_t nowUs = ALooper::GetNowUs(); - - uint32_t prevRtpTime = U32_AT(&data[4]); - - // 90kHz time scale - uint32_t rtpTime = (nowUs * 9ll) / 100ll; - int32_t diffTime = (int32_t)rtpTime - (int32_t)prevRtpTime; - - ALOGV("correcting rtpTime by %.0f ms", diffTime / 90.0); - - data[4] = rtpTime >> 24; - data[5] = (rtpTime >> 16) & 0xff; - data[6] = (rtpTime >> 8) & 0xff; - data[7] = rtpTime & 0xff; - } - int n; do { n = send(mSocket, datagram->data(), datagram->size(), 0); @@ -874,6 +856,14 @@ status_t ANetworkSession::createClientOrServer( err = -errno; goto bail2; } + + int tos = 224; // VOICE + res = setsockopt(s, IPPROTO_IP, IP_TOS, &tos, sizeof(tos)); + + if (res < 0) { + err = -errno; + goto bail2; + } } err = MakeSocketNonBlocking(s); -- cgit v1.1 From 8f24c039fb3418c15f476988f12383b26c1201bc Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Thu, 4 Apr 2013 10:51:52 -0700 Subject: Reduce TimeSyncer frequency to once every 60 secs, added some logs to indicate when syncing intervals start and end. Change-Id: I3b2b997d6723ff592af7c31082c6020cc1eca433 --- media/libstagefright/wifi-display/TimeSyncer.cpp | 6 ++++++ media/libstagefright/wifi-display/TimeSyncer.h | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/media/libstagefright/wifi-display/TimeSyncer.cpp b/media/libstagefright/wifi-display/TimeSyncer.cpp index 64e182e..cb429bc 100644 --- a/media/libstagefright/wifi-display/TimeSyncer.cpp +++ b/media/libstagefright/wifi-display/TimeSyncer.cpp @@ -102,6 +102,10 @@ void TimeSyncer::onMessageReceived(const sp &msg) { case kWhatSendPacket: { + if (mHistory.size() == 0) { + ALOGI("starting batch"); + } + TimeInfo ti; memset(&ti, 0, sizeof(ti)); @@ -229,6 +233,8 @@ void TimeSyncer::onMessageReceived(const sp &msg) { } else { notifyOffset(); + ALOGI("batch done"); + mHistory.clear(); postSendPacket(kBatchDelayUs); } diff --git a/media/libstagefright/wifi-display/TimeSyncer.h b/media/libstagefright/wifi-display/TimeSyncer.h index 0e3aed7..4e7571f 100644 --- a/media/libstagefright/wifi-display/TimeSyncer.h +++ b/media/libstagefright/wifi-display/TimeSyncer.h @@ -75,7 +75,7 @@ private: kNumPacketsPerBatch = 30, }; static const int64_t kTimeoutDelayUs = 500000ll; - static const int64_t kBatchDelayUs = 10000000ll; // every 10 secs + static const int64_t kBatchDelayUs = 60000000ll; // every minute sp mNetSession; sp mNotify; -- cgit v1.1 From 30bf97b3665aa5a9024517697b8459d7662eb8b9 Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Thu, 4 Apr 2013 10:53:34 -0700 Subject: Track max latency in WifiDisplaySink also unbreak non-special-mode by choosing a proper time offset based on arrival time of the first access unit even when no time synchronization is present. Change-Id: I133050afc6f70d4639ca45de68a31d5bc3594e96 --- .../wifi-display/sink/WifiDisplaySink.cpp | 21 ++++++++++++++++----- .../wifi-display/sink/WifiDisplaySink.h | 4 ++++ 2 files changed, 20 insertions(+), 5 deletions(-) diff --git a/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp b/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp index 1a08bf5..5db2099 100644 --- a/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp +++ b/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp @@ -57,7 +57,8 @@ WifiDisplaySink::WifiDisplaySink( mSetupDeferred(false), mLatencyCount(0), mLatencySumUs(0ll), - mLatencyMaxUs(0ll) { + mLatencyMaxUs(0ll), + mMaxDelayMs(-1ll) { // We support any and all resolutions, but prefer 720p30 mSinkSupportedVideoFormats.setNativeResolution( VideoFormats::RESOLUTION_CEA, 5); // 1280 x 720 p30 @@ -296,9 +297,13 @@ void WifiDisplaySink::onMessageReceived(const sp &msg) { } } -static void dumpDelay(size_t trackIndex, int64_t timeUs) { +void WifiDisplaySink::dumpDelay(size_t trackIndex, int64_t timeUs) { int64_t delayMs = (ALooper::GetNowUs() - timeUs) / 1000ll; + if (delayMs > mMaxDelayMs) { + mMaxDelayMs = delayMs; + } + static const int64_t kMinDelayMs = 0; static const int64_t kMaxDelayMs = 300; @@ -314,9 +319,10 @@ static void dumpDelay(size_t trackIndex, int64_t timeUs) { n = kPatternSize; } - ALOGI("[%lld]: (%4lld ms) %s", + ALOGI("[%lld]: (%4lld ms / %4lld ms) %s", timeUs / 1000, delayMs, + mMaxDelayMs, kPattern + kPatternSize - n); } @@ -350,14 +356,19 @@ void WifiDisplaySink::onMediaReceiverNotify(const sp &msg) { looper()->registerHandler(mRenderer); } - CHECK(mTimeOffsetValid); - sp accessUnit; CHECK(msg->findBuffer("accessUnit", &accessUnit)); int64_t timeUs; CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs)); + if (!mTimeOffsetValid && !(mFlags & FLAG_SPECIAL_MODE)) { + mTimeOffsetUs = timeUs - ALooper::GetNowUs(); + mTimeOffsetValid = true; + } + + CHECK(mTimeOffsetValid); + // We are the timesync _client_, // client time = server time - time offset. timeUs -= mTimeOffsetUs; diff --git a/media/libstagefright/wifi-display/sink/WifiDisplaySink.h b/media/libstagefright/wifi-display/sink/WifiDisplaySink.h index 7c62057..adb9d89 100644 --- a/media/libstagefright/wifi-display/sink/WifiDisplaySink.h +++ b/media/libstagefright/wifi-display/sink/WifiDisplaySink.h @@ -132,6 +132,8 @@ private: int64_t mLatencySumUs; int64_t mLatencyMaxUs; + int64_t mMaxDelayMs; + status_t sendM2(int32_t sessionID); status_t sendSetup(int32_t sessionID, const char *uri); status_t sendPlay(int32_t sessionID, const char *uri); @@ -184,6 +186,8 @@ private: const char *url, AString *host, int32_t *port, AString *path, AString *user, AString *pass); + void dumpDelay(size_t trackIndex, int64_t timeUs); + DISALLOW_EVIL_CONSTRUCTORS(WifiDisplaySink); }; -- cgit v1.1 From 48c0addff1e943393272a5ed698d24afbf6b2471 Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Thu, 4 Apr 2013 11:08:21 -0700 Subject: RTPTest updated to allow for UDP/TCP transport and abstracted where the data is coming from, also added time synchronization. Change-Id: Iecc2201a2bd17be06f16690a28261bef5b4e439c --- media/libstagefright/wifi-display/rtptest.cpp | 277 +++++++++++++++++++++----- 1 file changed, 228 insertions(+), 49 deletions(-) diff --git a/media/libstagefright/wifi-display/rtptest.cpp b/media/libstagefright/wifi-display/rtptest.cpp index eade832..cf5199d 100644 --- a/media/libstagefright/wifi-display/rtptest.cpp +++ b/media/libstagefright/wifi-display/rtptest.cpp @@ -21,6 +21,7 @@ #include "ANetworkSession.h" #include "rtp/RTPSender.h" #include "rtp/RTPReceiver.h" +#include "TimeSyncer.h" #include #include @@ -28,12 +29,115 @@ #include #include #include +#include #include #include #include +#include namespace android { +struct PacketSource : public RefBase { + PacketSource() {} + + virtual sp getNextAccessUnit() = 0; + +protected: + virtual ~PacketSource() {} + +private: + DISALLOW_EVIL_CONSTRUCTORS(PacketSource); +}; + +struct MediaPacketSource : public PacketSource { + MediaPacketSource() + : mMaxSampleSize(1024 * 1024) { + mExtractor = new NuMediaExtractor; + CHECK_EQ((status_t)OK, + mExtractor->setDataSource( + "/sdcard/Frame Counter HD 30FPS_1080p.mp4")); + + bool haveVideo = false; + for (size_t i = 0; i < mExtractor->countTracks(); ++i) { + sp format; + CHECK_EQ((status_t)OK, mExtractor->getTrackFormat(i, &format)); + + AString mime; + CHECK(format->findString("mime", &mime)); + + if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_AVC, mime.c_str())) { + mExtractor->selectTrack(i); + haveVideo = true; + break; + } + } + + CHECK(haveVideo); + } + + virtual sp getNextAccessUnit() { + int64_t timeUs; + status_t err = mExtractor->getSampleTime(&timeUs); + + if (err != OK) { + return NULL; + } + + sp accessUnit = new ABuffer(mMaxSampleSize); + CHECK_EQ((status_t)OK, mExtractor->readSampleData(accessUnit)); + + accessUnit->meta()->setInt64("timeUs", timeUs); + + CHECK_EQ((status_t)OK, mExtractor->advance()); + + return accessUnit; + } + +protected: + virtual ~MediaPacketSource() { + } + +private: + sp mExtractor; + size_t mMaxSampleSize; + + DISALLOW_EVIL_CONSTRUCTORS(MediaPacketSource); +}; + +struct SimplePacketSource : public PacketSource { + SimplePacketSource() + : mCounter(0) { + } + + virtual sp getNextAccessUnit() { + sp buffer = new ABuffer(4); + uint8_t *dst = buffer->data(); + dst[0] = mCounter >> 24; + dst[1] = (mCounter >> 16) & 0xff; + dst[2] = (mCounter >> 8) & 0xff; + dst[3] = mCounter & 0xff; + + buffer->meta()->setInt64("timeUs", mCounter * 1000000ll / kFrameRate); + + ++mCounter; + + return buffer; + } + +protected: + virtual ~SimplePacketSource() { + } + +private: + enum { + kFrameRate = 30 + }; + + uint32_t mCounter; + + DISALLOW_EVIL_CONSTRUCTORS(SimplePacketSource); +}; + struct TestHandler : public AHandler { TestHandler(const sp &netSession); @@ -52,18 +156,39 @@ private: kWhatSenderNotify, kWhatSendMore, kWhatStop, + kWhatTimeSyncerNotify, }; +#if 1 + static const RTPBase::TransportMode kRTPMode = RTPBase::TRANSPORT_UDP; + static const RTPBase::TransportMode kRTCPMode = RTPBase::TRANSPORT_UDP; +#else + static const RTPBase::TransportMode kRTPMode = RTPBase::TRANSPORT_TCP; + static const RTPBase::TransportMode kRTCPMode = RTPBase::TRANSPORT_NONE; +#endif + +#if 1 + static const RTPBase::PacketizationMode kPacketizationMode + = RTPBase::PACKETIZATION_H264; +#else + static const RTPBase::PacketizationMode kPacketizationMode + = RTPBase::PACKETIZATION_NONE; +#endif + sp mNetSession; - sp mExtractor; + sp mSource; sp mSender; sp mReceiver; - size_t mMaxSampleSize; + sp mTimeSyncer; + bool mTimeSyncerStarted; int64_t mFirstTimeRealUs; int64_t mFirstTimeMediaUs; + int64_t mTimeOffsetUs; + bool mTimeOffsetValid; + status_t readMore(); DISALLOW_EVIL_CONSTRUCTORS(TestHandler); @@ -71,9 +196,11 @@ private: TestHandler::TestHandler(const sp &netSession) : mNetSession(netSession), - mMaxSampleSize(1024 * 1024), + mTimeSyncerStarted(false), mFirstTimeRealUs(-1ll), - mFirstTimeMediaUs(-1ll) { + mFirstTimeMediaUs(-1ll), + mTimeOffsetUs(-1ll), + mTimeOffsetValid(false) { } TestHandler::~TestHandler() { @@ -91,23 +218,48 @@ void TestHandler::connect(const char *host, int32_t port) { msg->post(); } +static void dumpDelay(int64_t delayMs) { + static const int64_t kMinDelayMs = 0; + static const int64_t kMaxDelayMs = 300; + + const char *kPattern = "########################################"; + size_t kPatternSize = strlen(kPattern); + + int n = (kPatternSize * (delayMs - kMinDelayMs)) + / (kMaxDelayMs - kMinDelayMs); + + if (n < 0) { + n = 0; + } else if ((size_t)n > kPatternSize) { + n = kPatternSize; + } + + ALOGI("(%4lld ms) %s\n", + delayMs, + kPattern + kPatternSize - n); +} + void TestHandler::onMessageReceived(const sp &msg) { switch (msg->what()) { case kWhatListen: { - sp notify = new AMessage(kWhatReceiverNotify, id()); - mReceiver = new RTPReceiver(mNetSession, notify); + sp notify = new AMessage(kWhatTimeSyncerNotify, id()); + mTimeSyncer = new TimeSyncer(mNetSession, notify); + looper()->registerHandler(mTimeSyncer); + + notify = new AMessage(kWhatReceiverNotify, id()); + mReceiver = new RTPReceiver( + mNetSession, notify, RTPReceiver::FLAG_AUTO_CONNECT); looper()->registerHandler(mReceiver); CHECK_EQ((status_t)OK, - mReceiver->registerPacketType( - 33, RTPReceiver::PACKETIZATION_H264)); + mReceiver->registerPacketType(33, kPacketizationMode)); int32_t receiverRTPPort; CHECK_EQ((status_t)OK, mReceiver->initAsync( - RTPReceiver::TRANSPORT_UDP, // rtpMode - RTPReceiver::TRANSPORT_UDP, // rtcpMode + kRTPMode, + kRTCPMode, &receiverRTPPort)); printf("picked receiverRTPPort %d\n", receiverRTPPort); @@ -125,33 +277,23 @@ void TestHandler::onMessageReceived(const sp &msg) { AString host; CHECK(msg->findString("host", &host)); + sp notify = new AMessage(kWhatTimeSyncerNotify, id()); + mTimeSyncer = new TimeSyncer(mNetSession, notify); + looper()->registerHandler(mTimeSyncer); + mTimeSyncer->startServer(8123); + int32_t receiverRTPPort; CHECK(msg->findInt32("port", &receiverRTPPort)); - mExtractor = new NuMediaExtractor; - CHECK_EQ((status_t)OK, - mExtractor->setDataSource( - "/sdcard/Frame Counter HD 30FPS_1080p.mp4")); - - bool haveVideo = false; - for (size_t i = 0; i < mExtractor->countTracks(); ++i) { - sp format; - CHECK_EQ((status_t)OK, mExtractor->getTrackFormat(i, &format)); - - AString mime; - CHECK(format->findString("mime", &mime)); - - if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_AVC, mime.c_str())) { - mExtractor->selectTrack(i); - haveVideo = true; - break; - } - } - - CHECK(haveVideo); +#if 1 + mSource = new MediaPacketSource; +#else + mSource = new SimplePacketSource; +#endif - sp notify = new AMessage(kWhatSenderNotify, id()); + notify = new AMessage(kWhatSenderNotify, id()); mSender = new RTPSender(mNetSession, notify); + looper()->registerHandler(mSender); int32_t senderRTPPort; @@ -159,9 +301,10 @@ void TestHandler::onMessageReceived(const sp &msg) { mSender->initAsync( host.c_str(), receiverRTPPort, - RTPSender::TRANSPORT_UDP, // rtpMode - receiverRTPPort + 1, - RTPSender::TRANSPORT_UDP, // rtcpMode + kRTPMode, + kRTCPMode == RTPBase::TRANSPORT_NONE + ? -1 : receiverRTPPort + 1, + kRTCPMode, &senderRTPPort)); printf("picked senderRTPPort %d\n", senderRTPPort); @@ -201,7 +344,7 @@ void TestHandler::onMessageReceived(const sp &msg) { case kWhatReceiverNotify: { - ALOGI("kWhatReceiverNotify"); + ALOGV("kWhatReceiverNotify"); int32_t what; CHECK(msg->findInt32("what", &what)); @@ -216,8 +359,40 @@ void TestHandler::onMessageReceived(const sp &msg) { break; } - case RTPSender::kWhatError: + case RTPReceiver::kWhatError: break; + + case RTPReceiver::kWhatAccessUnit: + { +#if 0 + if (!mTimeSyncerStarted) { + mTimeSyncer->startClient("172.18.41.216", 8123); + mTimeSyncerStarted = true; + } + + sp accessUnit; + CHECK(msg->findBuffer("accessUnit", &accessUnit)); + + int64_t timeUs; + CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs)); + + if (mTimeOffsetValid) { + timeUs -= mTimeOffsetUs; + int64_t nowUs = ALooper::GetNowUs(); + int64_t delayMs = (nowUs - timeUs) / 1000ll; + + dumpDelay(delayMs); + } +#endif + break; + } + + case RTPReceiver::kWhatPacketLost: + ALOGV("kWhatPacketLost"); + break; + + default: + TRESPASS(); } break; } @@ -231,7 +406,7 @@ void TestHandler::onMessageReceived(const sp &msg) { mSender->queueBuffer( accessUnit, 33, - RTPSender::PACKETIZATION_H264)); + kPacketizationMode)); status_t err = readMore(); @@ -253,31 +428,33 @@ void TestHandler::onMessageReceived(const sp &msg) { mSender.clear(); } - mExtractor.clear(); + mSource.clear(); looper()->stop(); break; } + case kWhatTimeSyncerNotify: + { + CHECK(msg->findInt64("offset", &mTimeOffsetUs)); + mTimeOffsetValid = true; + break; + } + default: TRESPASS(); } } status_t TestHandler::readMore() { - int64_t timeUs; - status_t err = mExtractor->getSampleTime(&timeUs); + sp accessUnit = mSource->getNextAccessUnit(); - if (err != OK) { - return err; + if (accessUnit == NULL) { + return ERROR_END_OF_STREAM; } - sp accessUnit = new ABuffer(mMaxSampleSize); - CHECK_EQ((status_t)OK, mExtractor->readSampleData(accessUnit)); - - accessUnit->meta()->setInt64("timeUs", timeUs); - - CHECK_EQ((status_t)OK, mExtractor->advance()); + int64_t timeUs; + CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs)); int64_t nowUs = ALooper::GetNowUs(); int64_t whenUs; @@ -289,6 +466,8 @@ status_t TestHandler::readMore() { whenUs = mFirstTimeRealUs + timeUs - mFirstTimeMediaUs; } + accessUnit->meta()->setInt64("timeUs", whenUs); + sp msg = new AMessage(kWhatSendMore, id()); msg->setBuffer("accessUnit", accessUnit); msg->post(whenUs - nowUs); -- cgit v1.1 From bd8319b282f39822d5f89bfb098c3317d2be6f03 Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Thu, 4 Apr 2013 11:12:24 -0700 Subject: New nettest, a simple TCP connection test to measure latency between two devices exchanging low-bitrate data. Change-Id: Iac138fa70d2a69bbc86c517c953011c80bed071f --- media/libstagefright/wifi-display/Android.mk | 22 ++ media/libstagefright/wifi-display/nettest.cpp | 396 ++++++++++++++++++++++++++ 2 files changed, 418 insertions(+) create mode 100644 media/libstagefright/wifi-display/nettest.cpp diff --git a/media/libstagefright/wifi-display/Android.mk b/media/libstagefright/wifi-display/Android.mk index f1f9f45..1578c21 100644 --- a/media/libstagefright/wifi-display/Android.mk +++ b/media/libstagefright/wifi-display/Android.mk @@ -109,3 +109,25 @@ LOCAL_MODULE:= rtptest LOCAL_MODULE_TAGS := debug include $(BUILD_EXECUTABLE) + +################################################################################ + +include $(CLEAR_VARS) + +LOCAL_SRC_FILES:= \ + nettest.cpp \ + +LOCAL_SHARED_LIBRARIES:= \ + libbinder \ + libgui \ + libmedia \ + libstagefright \ + libstagefright_foundation \ + libstagefright_wfd \ + libutils \ + +LOCAL_MODULE:= nettest + +LOCAL_MODULE_TAGS := debug + +include $(BUILD_EXECUTABLE) diff --git a/media/libstagefright/wifi-display/nettest.cpp b/media/libstagefright/wifi-display/nettest.cpp new file mode 100644 index 0000000..130016d --- /dev/null +++ b/media/libstagefright/wifi-display/nettest.cpp @@ -0,0 +1,396 @@ +/* + * Copyright 2013, The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NEBUG 0 +#define LOG_TAG "nettest" +#include + +#include "ANetworkSession.h" +#include "TimeSyncer.h" + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +namespace android { + +struct TestHandler : public AHandler { + TestHandler(const sp &netSession); + + void listen(int32_t port); + void connect(const char *host, int32_t port); + +protected: + virtual ~TestHandler(); + virtual void onMessageReceived(const sp &msg); + +private: + enum { + kWhatListen, + kWhatConnect, + kWhatTimeSyncerNotify, + kWhatNetNotify, + kWhatSendMore, + kWhatStop, + }; + + sp mNetSession; + sp mTimeSyncer; + + int32_t mServerSessionID; + int32_t mSessionID; + + int64_t mTimeOffsetUs; + bool mTimeOffsetValid; + + int32_t mCounter; + + int64_t mMaxDelayMs; + + void dumpDelay(int32_t counter, int64_t delayMs); + + DISALLOW_EVIL_CONSTRUCTORS(TestHandler); +}; + +TestHandler::TestHandler(const sp &netSession) + : mNetSession(netSession), + mServerSessionID(0), + mSessionID(0), + mTimeOffsetUs(-1ll), + mTimeOffsetValid(false), + mCounter(0), + mMaxDelayMs(-1ll) { +} + +TestHandler::~TestHandler() { +} + +void TestHandler::listen(int32_t port) { + sp msg = new AMessage(kWhatListen, id()); + msg->setInt32("port", port); + msg->post(); +} + +void TestHandler::connect(const char *host, int32_t port) { + sp msg = new AMessage(kWhatConnect, id()); + msg->setString("host", host); + msg->setInt32("port", port); + msg->post(); +} + +void TestHandler::dumpDelay(int32_t counter, int64_t delayMs) { + static const int64_t kMinDelayMs = 0; + static const int64_t kMaxDelayMs = 300; + + const char *kPattern = "########################################"; + size_t kPatternSize = strlen(kPattern); + + int n = (kPatternSize * (delayMs - kMinDelayMs)) + / (kMaxDelayMs - kMinDelayMs); + + if (n < 0) { + n = 0; + } else if ((size_t)n > kPatternSize) { + n = kPatternSize; + } + + if (delayMs > mMaxDelayMs) { + mMaxDelayMs = delayMs; + } + + ALOGI("[%d] (%4lld ms / %4lld ms) %s", + counter, + delayMs, + mMaxDelayMs, + kPattern + kPatternSize - n); +} + +void TestHandler::onMessageReceived(const sp &msg) { + switch (msg->what()) { + case kWhatListen: + { + sp notify = new AMessage(kWhatTimeSyncerNotify, id()); + mTimeSyncer = new TimeSyncer(mNetSession, notify); + looper()->registerHandler(mTimeSyncer); + + notify = new AMessage(kWhatNetNotify, id()); + + int32_t port; + CHECK(msg->findInt32("port", &port)); + + struct in_addr ifaceAddr; + ifaceAddr.s_addr = INADDR_ANY; + + CHECK_EQ((status_t)OK, + mNetSession->createTCPDatagramSession( + ifaceAddr, + port, + notify, + &mServerSessionID)); + break; + } + + case kWhatConnect: + { + sp notify = new AMessage(kWhatTimeSyncerNotify, id()); + mTimeSyncer = new TimeSyncer(mNetSession, notify); + looper()->registerHandler(mTimeSyncer); + mTimeSyncer->startServer(8123); + + AString host; + CHECK(msg->findString("host", &host)); + + int32_t port; + CHECK(msg->findInt32("port", &port)); + + notify = new AMessage(kWhatNetNotify, id()); + + CHECK_EQ((status_t)OK, + mNetSession->createTCPDatagramSession( + 0 /* localPort */, + host.c_str(), + port, + notify, + &mSessionID)); + break; + } + + case kWhatNetNotify: + { + int32_t reason; + CHECK(msg->findInt32("reason", &reason)); + + switch (reason) { + case ANetworkSession::kWhatConnected: + { + ALOGI("kWhatConnected"); + + (new AMessage(kWhatSendMore, id()))->post(); + break; + } + + case ANetworkSession::kWhatClientConnected: + { + ALOGI("kWhatClientConnected"); + + CHECK_EQ(mSessionID, 0); + CHECK(msg->findInt32("sessionID", &mSessionID)); + + AString clientIP; + CHECK(msg->findString("client-ip", &clientIP)); + + mTimeSyncer->startClient(clientIP.c_str(), 8123); + break; + } + + case ANetworkSession::kWhatDatagram: + { + sp packet; + CHECK(msg->findBuffer("data", &packet)); + + CHECK_EQ(packet->size(), 12u); + + int32_t counter = U32_AT(packet->data()); + int64_t timeUs = U64_AT(packet->data() + 4); + + if (mTimeOffsetValid) { + timeUs -= mTimeOffsetUs; + int64_t nowUs = ALooper::GetNowUs(); + int64_t delayMs = (nowUs - timeUs) / 1000ll; + + dumpDelay(counter, delayMs); + } else { + ALOGI("received %d", counter); + } + break; + } + + case ANetworkSession::kWhatError: + { + ALOGE("kWhatError"); + break; + } + + default: + TRESPASS(); + } + break; + } + + case kWhatTimeSyncerNotify: + { + CHECK(msg->findInt64("offset", &mTimeOffsetUs)); + mTimeOffsetValid = true; + break; + } + + case kWhatSendMore: + { + uint8_t buffer[4 + 8]; + buffer[0] = mCounter >> 24; + buffer[1] = (mCounter >> 16) & 0xff; + buffer[2] = (mCounter >> 8) & 0xff; + buffer[3] = mCounter & 0xff; + + int64_t nowUs = ALooper::GetNowUs(); + + buffer[4] = nowUs >> 56; + buffer[5] = (nowUs >> 48) & 0xff; + buffer[6] = (nowUs >> 40) & 0xff; + buffer[7] = (nowUs >> 32) & 0xff; + buffer[8] = (nowUs >> 24) & 0xff; + buffer[9] = (nowUs >> 16) & 0xff; + buffer[10] = (nowUs >> 8) & 0xff; + buffer[11] = nowUs & 0xff; + + ++mCounter; + + CHECK_EQ((status_t)OK, + mNetSession->sendRequest( + mSessionID, + buffer, + sizeof(buffer), + true /* timeValid */, + nowUs)); + + msg->post(100000ll); + break; + } + + case kWhatStop: + { + if (mSessionID != 0) { + mNetSession->destroySession(mSessionID); + mSessionID = 0; + } + + if (mServerSessionID != 0) { + mNetSession->destroySession(mServerSessionID); + mServerSessionID = 0; + } + + looper()->stop(); + break; + } + + default: + TRESPASS(); + } +} + +} // namespace android + +static void usage(const char *me) { + fprintf(stderr, + "usage: %s -c host:port\tconnect to remote host\n" + " -l port \tlisten\n", + me); +} + +int main(int argc, char **argv) { + using namespace android; + + // srand(time(NULL)); + + ProcessState::self()->startThreadPool(); + + DataSource::RegisterDefaultSniffers(); + + int32_t connectToPort = -1; + AString connectToHost; + + int32_t listenOnPort = -1; + + int res; + while ((res = getopt(argc, argv, "hc:l:")) >= 0) { + switch (res) { + case 'c': + { + const char *colonPos = strrchr(optarg, ':'); + + if (colonPos == NULL) { + usage(argv[0]); + exit(1); + } + + connectToHost.setTo(optarg, colonPos - optarg); + + char *end; + connectToPort = strtol(colonPos + 1, &end, 10); + + if (*end != '\0' || end == colonPos + 1 + || connectToPort < 1 || connectToPort > 65535) { + fprintf(stderr, "Illegal port specified.\n"); + exit(1); + } + break; + } + + case 'l': + { + char *end; + listenOnPort = strtol(optarg, &end, 10); + + if (*end != '\0' || end == optarg + 1 + || listenOnPort < 1 || listenOnPort > 65535) { + fprintf(stderr, "Illegal port specified.\n"); + exit(1); + } + break; + } + + case '?': + case 'h': + usage(argv[0]); + exit(1); + } + } + + if ((listenOnPort < 0 && connectToPort < 0) + || (listenOnPort >= 0 && connectToPort >= 0)) { + fprintf(stderr, + "You need to select either client or server mode.\n"); + exit(1); + } + + sp netSession = new ANetworkSession; + netSession->start(); + + sp looper = new ALooper; + + sp handler = new TestHandler(netSession); + looper->registerHandler(handler); + + if (listenOnPort) { + handler->listen(listenOnPort); + } + + if (connectToPort >= 0) { + handler->connect(connectToHost.c_str(), connectToPort); + } + + looper->start(true /* runOnCallingThread */); + + return 0; +} -- cgit v1.1 From 8f1f6a4814403dd78539250c845f8326f6137a61 Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Thu, 4 Apr 2013 11:16:39 -0700 Subject: Support "raw" packetization in RTPSender. Change-Id: I14d59573ee0f57eccc104fea0fb46377476d213d --- .../wifi-display/rtp/RTPAssembler.cpp | 1 - media/libstagefright/wifi-display/rtp/RTPBase.h | 1 + .../libstagefright/wifi-display/rtp/RTPSender.cpp | 44 ++++++++++++++++++++++ media/libstagefright/wifi-display/rtp/RTPSender.h | 1 + 4 files changed, 46 insertions(+), 1 deletion(-) diff --git a/media/libstagefright/wifi-display/rtp/RTPAssembler.cpp b/media/libstagefright/wifi-display/rtp/RTPAssembler.cpp index 5f189e7..7a96081 100644 --- a/media/libstagefright/wifi-display/rtp/RTPAssembler.cpp +++ b/media/libstagefright/wifi-display/rtp/RTPAssembler.cpp @@ -40,7 +40,6 @@ void RTPReceiver::Assembler::postAccessUnit( notify->setInt32("followsDiscontinuity", followsDiscontinuity); notify->post(); } - //////////////////////////////////////////////////////////////////////////////// RTPReceiver::TSAssembler::TSAssembler(const sp ¬ify) diff --git a/media/libstagefright/wifi-display/rtp/RTPBase.h b/media/libstagefright/wifi-display/rtp/RTPBase.h index e3fa845..6178f00 100644 --- a/media/libstagefright/wifi-display/rtp/RTPBase.h +++ b/media/libstagefright/wifi-display/rtp/RTPBase.h @@ -25,6 +25,7 @@ struct RTPBase { PACKETIZATION_TRANSPORT_STREAM, PACKETIZATION_H264, PACKETIZATION_AAC, + PACKETIZATION_NONE, }; enum TransportMode { diff --git a/media/libstagefright/wifi-display/rtp/RTPSender.cpp b/media/libstagefright/wifi-display/rtp/RTPSender.cpp index 9eeeabd..ed5a50e 100644 --- a/media/libstagefright/wifi-display/rtp/RTPSender.cpp +++ b/media/libstagefright/wifi-display/rtp/RTPSender.cpp @@ -187,6 +187,10 @@ status_t RTPSender::queueBuffer( status_t err; switch (mode) { + case PACKETIZATION_NONE: + err = queueRawPacket(buffer, packetType); + break; + case PACKETIZATION_TRANSPORT_STREAM: err = queueTSPackets(buffer, packetType); break; @@ -202,6 +206,46 @@ status_t RTPSender::queueBuffer( return err; } +status_t RTPSender::queueRawPacket( + const sp &packet, uint8_t packetType) { + CHECK_LE(packet->size(), kMaxUDPPacketSize - 12); + + int64_t timeUs; + CHECK(packet->meta()->findInt64("timeUs", &timeUs)); + + sp udpPacket = new ABuffer(12 + packet->size()); + + udpPacket->setInt32Data(mRTPSeqNo); + + uint8_t *rtp = udpPacket->data(); + rtp[0] = 0x80; + rtp[1] = packetType; + + rtp[2] = (mRTPSeqNo >> 8) & 0xff; + rtp[3] = mRTPSeqNo & 0xff; + ++mRTPSeqNo; + + uint32_t rtpTime = (timeUs * 9) / 100ll; + + rtp[4] = rtpTime >> 24; + rtp[5] = (rtpTime >> 16) & 0xff; + rtp[6] = (rtpTime >> 8) & 0xff; + rtp[7] = rtpTime & 0xff; + + rtp[8] = kSourceID >> 24; + rtp[9] = (kSourceID >> 16) & 0xff; + rtp[10] = (kSourceID >> 8) & 0xff; + rtp[11] = kSourceID & 0xff; + + memcpy(&rtp[12], packet->data(), packet->size()); + + return sendRTPPacket( + udpPacket, + true /* storeInHistory */, + true /* timeValid */, + timeUs); +} + status_t RTPSender::queueTSPackets( const sp &tsPackets, uint8_t packetType) { CHECK_EQ(0, tsPackets->size() % 188); diff --git a/media/libstagefright/wifi-display/rtp/RTPSender.h b/media/libstagefright/wifi-display/rtp/RTPSender.h index 3a926ea..fefcab7 100644 --- a/media/libstagefright/wifi-display/rtp/RTPSender.h +++ b/media/libstagefright/wifi-display/rtp/RTPSender.h @@ -94,6 +94,7 @@ private: static uint64_t GetNowNTP(); + status_t queueRawPacket(const sp &tsPackets, uint8_t packetType); status_t queueTSPackets(const sp &tsPackets, uint8_t packetType); status_t queueAVCBuffer(const sp &accessUnit, uint8_t packetType); -- cgit v1.1 From 2be6121a47d3df2a0efcb73afd214f2958eb9927 Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Thu, 4 Apr 2013 11:17:05 -0700 Subject: RTPReceiver can now track packet loss, account for late arrivals it also uses timers to trigger retransmission and packet loss declaration Change-Id: If1f9324783b3bef950076c2edf321f7c33ff9fea --- .../wifi-display/rtp/RTPReceiver.cpp | 272 ++++++++++++++++----- .../libstagefright/wifi-display/rtp/RTPReceiver.h | 8 +- 2 files changed, 215 insertions(+), 65 deletions(-) diff --git a/media/libstagefright/wifi-display/rtp/RTPReceiver.cpp b/media/libstagefright/wifi-display/rtp/RTPReceiver.cpp index 238fb82..8fa1dae 100644 --- a/media/libstagefright/wifi-display/rtp/RTPReceiver.cpp +++ b/media/libstagefright/wifi-display/rtp/RTPReceiver.cpp @@ -30,11 +30,13 @@ #include #include +#define TRACK_PACKET_LOSS 0 + namespace android { //////////////////////////////////////////////////////////////////////////////// -struct RTPReceiver::Source : public RefBase { +struct RTPReceiver::Source : public AHandler { Source(RTPReceiver *receiver, uint32_t ssrc); void onPacketReceived(uint16_t seq, const sp &buffer); @@ -44,7 +46,14 @@ struct RTPReceiver::Source : public RefBase { protected: virtual ~Source(); + virtual void onMessageReceived(const sp &msg); + private: + enum { + kWhatRetransmit, + kWhatDeclareLost, + }; + static const uint32_t kMinSequential = 2; static const uint32_t kMaxDropout = 3000; static const uint32_t kMaxMisorder = 100; @@ -67,6 +76,17 @@ private: // Ordered by extended seq number. List > mPackets; + enum StatusBits { + STATUS_DECLARED_LOST = 1, + STATUS_REQUESTED_RETRANSMISSION = 2, + STATUS_ARRIVED_LATE = 4, + }; +#if TRACK_PACKET_LOSS + KeyedVector mLostPackets; +#endif + + void modifyPacketStatus(int32_t extSeqNo, uint32_t mask); + int32_t mAwaitingExtSeqNo; bool mRequestedRetransmission; @@ -78,12 +98,20 @@ private: int32_t mNumDeclaredLost; int32_t mNumDeclaredLostPrior; + int32_t mRetransmitGeneration; + int32_t mDeclareLostGeneration; + bool mDeclareLostTimerPending; + void queuePacket(const sp &packet); void dequeueMore(); sp getNextPacket(); void resync(); + void postRetransmitTimer(int64_t delayUs); + void postDeclareLostTimer(int64_t delayUs); + void cancelTimers(); + DISALLOW_EVIL_CONSTRUCTORS(Source); }; @@ -106,12 +134,71 @@ RTPReceiver::Source::Source(RTPReceiver *receiver, uint32_t ssrc) mActivePacketType(-1), mNextReportTimeUs(-1ll), mNumDeclaredLost(0), - mNumDeclaredLostPrior(0) { + mNumDeclaredLostPrior(0), + mRetransmitGeneration(0), + mDeclareLostGeneration(0), + mDeclareLostTimerPending(false) { } RTPReceiver::Source::~Source() { } +void RTPReceiver::Source::onMessageReceived(const sp &msg) { + switch (msg->what()) { + case kWhatRetransmit: + { + int32_t generation; + CHECK(msg->findInt32("generation", &generation)); + + if (generation != mRetransmitGeneration) { + break; + } + + mRequestedRetransmission = true; + mReceiver->requestRetransmission(mSSRC, mAwaitingExtSeqNo); + + modifyPacketStatus( + mAwaitingExtSeqNo, STATUS_REQUESTED_RETRANSMISSION); + break; + } + + case kWhatDeclareLost: + { + int32_t generation; + CHECK(msg->findInt32("generation", &generation)); + + if (generation != mDeclareLostGeneration) { + break; + } + + cancelTimers(); + + ALOGV("Lost packet extSeqNo %d %s", + mAwaitingExtSeqNo, + mRequestedRetransmission ? "*" : ""); + + mRequestedRetransmission = false; + if (mActiveAssembler != NULL) { + mActiveAssembler->signalDiscontinuity(); + } + + modifyPacketStatus(mAwaitingExtSeqNo, STATUS_DECLARED_LOST); + + // resync(); + ++mAwaitingExtSeqNo; + ++mNumDeclaredLost; + + mReceiver->notifyPacketLost(); + + dequeueMore(); + break; + } + + default: + TRESPASS(); + } +} + void RTPReceiver::Source::onPacketReceived( uint16_t seq, const sp &buffer) { if (mFirst) { @@ -164,6 +251,8 @@ void RTPReceiver::Source::queuePacket(const sp &packet) { if (mAwaitingExtSeqNo >= 0 && newExtendedSeqNo < mAwaitingExtSeqNo) { // We're no longer interested in these. They're old. ALOGV("dropping stale extSeqNo %d", newExtendedSeqNo); + + modifyPacketStatus(newExtendedSeqNo, STATUS_ARRIVED_LATE); return; } @@ -230,85 +319,89 @@ void RTPReceiver::Source::dequeueMore() { } mNextReportTimeUs = nowUs + kReportIntervalUs; - } - for (;;) { - sp packet = getNextPacket(); +#if TRACK_PACKET_LOSS + for (size_t i = 0; i < mLostPackets.size(); ++i) { + int32_t key = mLostPackets.keyAt(i); + uint32_t value = mLostPackets.valueAt(i); - if (packet == NULL) { - if (mPackets.empty()) { - break; + AString status; + if (value & STATUS_REQUESTED_RETRANSMISSION) { + status.append("retrans "); + } + if (value & STATUS_ARRIVED_LATE) { + status.append("arrived-late "); } + ALOGI("Packet %d declared lost %s", key, status.c_str()); + } +#endif + } + + sp packet; + while ((packet = getNextPacket()) != NULL) { + if (mDeclareLostTimerPending) { + cancelTimers(); + } + + CHECK_GE(mAwaitingExtSeqNo, 0); +#if TRACK_PACKET_LOSS + mLostPackets.removeItem(mAwaitingExtSeqNo); +#endif - CHECK_GE(mAwaitingExtSeqNo, 0); + int32_t packetType; + CHECK(packet->meta()->findInt32("PT", &packetType)); - const sp &firstPacket = *mPackets.begin(); + if (packetType != mActivePacketType) { + mActiveAssembler = mReceiver->makeAssembler(packetType); + mActivePacketType = packetType; + } - uint32_t rtpTime; - CHECK(firstPacket->meta()->findInt32( - "rtp-time", (int32_t *)&rtpTime)); + if (mActiveAssembler != NULL) { + status_t err = mActiveAssembler->processPacket(packet); + if (err != OK) { + ALOGV("assembler returned error %d", err); + } + } + ++mAwaitingExtSeqNo; + } - int64_t rtpUs = (rtpTime * 100ll) / 9ll; + if (mDeclareLostTimerPending) { + return; + } - int64_t maxArrivalTimeUs = - mFirstArrivalTimeUs + rtpUs - mFirstRTPTimeUs; + if (mPackets.empty()) { + return; + } - int64_t nowUs = ALooper::GetNowUs(); + CHECK_GE(mAwaitingExtSeqNo, 0); - CHECK_LT(mAwaitingExtSeqNo, firstPacket->int32Data()); + const sp &firstPacket = *mPackets.begin(); - ALOGV("waiting for %d, comparing against %d, %lld us left", - mAwaitingExtSeqNo, - firstPacket->int32Data(), - maxArrivalTimeUs - nowUs); + uint32_t rtpTime; + CHECK(firstPacket->meta()->findInt32( + "rtp-time", (int32_t *)&rtpTime)); - if (maxArrivalTimeUs + kPacketLostAfterUs <= nowUs) { - ALOGV("Lost packet extSeqNo %d %s", - mAwaitingExtSeqNo, - mRequestedRetransmission ? "*" : ""); - mRequestedRetransmission = false; - if (mActiveAssembler != NULL) { - mActiveAssembler->signalDiscontinuity(); - } + int64_t rtpUs = (rtpTime * 100ll) / 9ll; - // resync(); - ++mAwaitingExtSeqNo; - ++mNumDeclaredLost; - - mReceiver->notifyPacketLost(); - continue; - } else if (kRequestRetransmissionAfterUs > 0 - && maxArrivalTimeUs + kRequestRetransmissionAfterUs <= nowUs - && !mRequestedRetransmission - && mAwaitingExtSeqNo >= 0) { - mRequestedRetransmission = true; - mReceiver->requestRetransmission(mSSRC, mAwaitingExtSeqNo); - break; - } else { - break; - } - } + int64_t maxArrivalTimeUs = + mFirstArrivalTimeUs + rtpUs - mFirstRTPTimeUs; - mRequestedRetransmission = false; + nowUs = ALooper::GetNowUs(); - int32_t packetType; - CHECK(packet->meta()->findInt32("PT", &packetType)); + CHECK_LT(mAwaitingExtSeqNo, firstPacket->int32Data()); - if (packetType != mActivePacketType) { - mActiveAssembler = mReceiver->makeAssembler(packetType); - mActivePacketType = packetType; - } + ALOGV("waiting for %d, comparing against %d, %lld us left", + mAwaitingExtSeqNo, + firstPacket->int32Data(), + maxArrivalTimeUs - nowUs); - if (mActiveAssembler == NULL) { - continue; - } + postDeclareLostTimer(maxArrivalTimeUs + kPacketLostAfterUs); - status_t err = mActiveAssembler->processPacket(packet); - if (err != OK) { - ALOGV("assembler returned error %d", err); - } + if (kRequestRetransmissionAfterUs > 0ll) { + postRetransmitTimer( + maxArrivalTimeUs + kRequestRetransmissionAfterUs); } } @@ -328,8 +421,6 @@ sp RTPReceiver::Source::getNextPacket() { sp packet = *mPackets.begin(); mPackets.erase(mPackets.begin()); - ++mAwaitingExtSeqNo; - return packet; } @@ -404,9 +495,11 @@ void RTPReceiver::Source::addReportBlock( RTPReceiver::RTPReceiver( const sp &netSession, - const sp ¬ify) + const sp ¬ify, + uint32_t flags) : mNetSession(netSession), mNotify(notify), + mFlags(flags), mRTPMode(TRANSPORT_UNDEFINED), mRTCPMode(TRANSPORT_UNDEFINED), mRTPSessionID(0), @@ -693,6 +786,20 @@ void RTPReceiver::onNetNotify(bool isRTP, const sp &msg) { CHECK(msg->findBuffer("data", &data)); if (isRTP) { + if (mFlags & FLAG_AUTO_CONNECT) { + AString fromAddr; + CHECK(msg->findString("fromAddr", &fromAddr)); + + int32_t fromPort; + CHECK(msg->findInt32("fromPort", &fromPort)); + + CHECK_EQ((status_t)OK, + connect( + fromAddr.c_str(), fromPort, fromPort + 1)); + + mFlags &= ~FLAG_AUTO_CONNECT; + } + onRTPData(data); } else { onRTCPData(data); @@ -835,6 +942,8 @@ status_t RTPReceiver::onRTPData(const sp &buffer) { sp source; if (index < 0) { source = new Source(this, srcId); + looper()->registerHandler(source); + mSources.add(srcId, source); } else { source = mSources.valueAt(index); @@ -965,6 +1074,7 @@ sp RTPReceiver::makeAssembler(uint8_t packetType) { PacketizationMode mode = mPacketTypes.valueAt(index); switch (mode) { + case PACKETIZATION_NONE: case PACKETIZATION_TRANSPORT_STREAM: return new TSAssembler(mNotify); @@ -1005,5 +1115,39 @@ void RTPReceiver::requestRetransmission(uint32_t senderSSRC, int32_t extSeqNo) { mNetSession->sendRequest(mRTCPSessionID, buf->data(), buf->size()); } +void RTPReceiver::Source::modifyPacketStatus(int32_t extSeqNo, uint32_t mask) { +#if TRACK_PACKET_LOSS + ssize_t index = mLostPackets.indexOfKey(extSeqNo); + if (index < 0) { + mLostPackets.add(extSeqNo, mask); + } else { + mLostPackets.editValueAt(index) |= mask; + } +#endif +} + +void RTPReceiver::Source::postRetransmitTimer(int64_t timeUs) { + int64_t delayUs = timeUs - ALooper::GetNowUs(); + sp msg = new AMessage(kWhatRetransmit, id()); + msg->setInt32("generation", mRetransmitGeneration); + msg->post(delayUs); +} + +void RTPReceiver::Source::postDeclareLostTimer(int64_t timeUs) { + CHECK(!mDeclareLostTimerPending); + mDeclareLostTimerPending = true; + + int64_t delayUs = timeUs - ALooper::GetNowUs(); + sp msg = new AMessage(kWhatDeclareLost, id()); + msg->setInt32("generation", mDeclareLostGeneration); + msg->post(delayUs); +} + +void RTPReceiver::Source::cancelTimers() { + ++mRetransmitGeneration; + ++mDeclareLostGeneration; + mDeclareLostTimerPending = false; +} + } // namespace android diff --git a/media/libstagefright/wifi-display/rtp/RTPReceiver.h b/media/libstagefright/wifi-display/rtp/RTPReceiver.h index 630bce9..240ab2e 100644 --- a/media/libstagefright/wifi-display/rtp/RTPReceiver.h +++ b/media/libstagefright/wifi-display/rtp/RTPReceiver.h @@ -39,9 +39,14 @@ struct RTPReceiver : public RTPBase, public AHandler { kWhatAccessUnit, kWhatPacketLost, }; + + enum Flags { + FLAG_AUTO_CONNECT = 1, + }; RTPReceiver( const sp &netSession, - const sp ¬ify); + const sp ¬ify, + uint32_t flags = 0); status_t registerPacketType( uint8_t packetType, PacketizationMode mode); @@ -82,6 +87,7 @@ private: sp mNetSession; sp mNotify; + uint32_t mFlags; TransportMode mRTPMode; TransportMode mRTCPMode; int32_t mRTPSessionID; -- cgit v1.1 From 4eac4e624f6930966d208d8e1ee99eefee077b50 Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Thu, 4 Apr 2013 13:03:03 -0700 Subject: Fix previous changes. Change-Id: I1cd3803b6507156174591c3252f1d89ef2e6140a --- media/libstagefright/wifi-display/nettest.cpp | 14 +++++++++----- media/libstagefright/wifi-display/rtp/RTPSender.cpp | 2 +- media/libstagefright/wifi-display/rtptest.cpp | 5 +++-- 3 files changed, 13 insertions(+), 8 deletions(-) diff --git a/media/libstagefright/wifi-display/nettest.cpp b/media/libstagefright/wifi-display/nettest.cpp index 130016d..0779bf5 100644 --- a/media/libstagefright/wifi-display/nettest.cpp +++ b/media/libstagefright/wifi-display/nettest.cpp @@ -47,6 +47,10 @@ protected: private: enum { + kTimeSyncerPort = 8123, + }; + + enum { kWhatListen, kWhatConnect, kWhatTimeSyncerNotify, @@ -156,7 +160,7 @@ void TestHandler::onMessageReceived(const sp &msg) { sp notify = new AMessage(kWhatTimeSyncerNotify, id()); mTimeSyncer = new TimeSyncer(mNetSession, notify); looper()->registerHandler(mTimeSyncer); - mTimeSyncer->startServer(8123); + mTimeSyncer->startServer(kTimeSyncerPort); AString host; CHECK(msg->findString("host", &host)); @@ -200,7 +204,7 @@ void TestHandler::onMessageReceived(const sp &msg) { AString clientIP; CHECK(msg->findString("client-ip", &clientIP)); - mTimeSyncer->startClient(clientIP.c_str(), 8123); + mTimeSyncer->startClient(clientIP.c_str(), kTimeSyncerPort); break; } @@ -340,7 +344,7 @@ int main(int argc, char **argv) { connectToPort = strtol(colonPos + 1, &end, 10); if (*end != '\0' || end == colonPos + 1 - || connectToPort < 1 || connectToPort > 65535) { + || connectToPort < 0 || connectToPort > 65535) { fprintf(stderr, "Illegal port specified.\n"); exit(1); } @@ -352,8 +356,8 @@ int main(int argc, char **argv) { char *end; listenOnPort = strtol(optarg, &end, 10); - if (*end != '\0' || end == optarg + 1 - || listenOnPort < 1 || listenOnPort > 65535) { + if (*end != '\0' || end == optarg + || listenOnPort < 0 || listenOnPort > 65535) { fprintf(stderr, "Illegal port specified.\n"); exit(1); } diff --git a/media/libstagefright/wifi-display/rtp/RTPSender.cpp b/media/libstagefright/wifi-display/rtp/RTPSender.cpp index ed5a50e..6bbe650 100644 --- a/media/libstagefright/wifi-display/rtp/RTPSender.cpp +++ b/media/libstagefright/wifi-display/rtp/RTPSender.cpp @@ -243,7 +243,7 @@ status_t RTPSender::queueRawPacket( udpPacket, true /* storeInHistory */, true /* timeValid */, - timeUs); + ALooper::GetNowUs()); } status_t RTPSender::queueTSPackets( diff --git a/media/libstagefright/wifi-display/rtptest.cpp b/media/libstagefright/wifi-display/rtptest.cpp index cf5199d..764a38b 100644 --- a/media/libstagefright/wifi-display/rtptest.cpp +++ b/media/libstagefright/wifi-display/rtptest.cpp @@ -35,6 +35,8 @@ #include #include +#define MEDIA_FILENAME "/sdcard/Frame Counter HD 30FPS_1080p.mp4" + namespace android { struct PacketSource : public RefBase { @@ -54,8 +56,7 @@ struct MediaPacketSource : public PacketSource { : mMaxSampleSize(1024 * 1024) { mExtractor = new NuMediaExtractor; CHECK_EQ((status_t)OK, - mExtractor->setDataSource( - "/sdcard/Frame Counter HD 30FPS_1080p.mp4")); + mExtractor->setDataSource(MEDIA_FILENAME)); bool haveVideo = false; for (size_t i = 0; i < mExtractor->countTracks(); ++i) { -- cgit v1.1 From d595b7c858c481a07745674ce2d8a6690e980e74 Mon Sep 17 00:00:00 2001 From: Eric Laurent Date: Wed, 3 Apr 2013 17:27:56 -0700 Subject: audioflinger: multiple tracks on direct output Fix problems when multiple tracks exist on a direct output thread: when multiple tracks are active, continue to update all track states but only take into account the last track started for audio HAL volume and mixer state control. Bug 8388941 Change-Id: I57e6757286f41651dda99b11a5449e431812431b --- services/audioflinger/Threads.cpp | 97 +++++++++++++++++++++------------------ 1 file changed, 52 insertions(+), 45 deletions(-) diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index 9d98f0b..47ca100 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -3122,16 +3122,15 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::DirectOutputThread::prep Vector< sp > *tracksToRemove ) { - sp trackToRemove; - + size_t count = mActiveTracks.size(); mixer_state mixerStatus = MIXER_IDLE; // find out which tracks need to be processed - if (mActiveTracks.size() != 0) { - sp t = mActiveTracks[0].promote(); + for (size_t i = 0; i < count; i++) { + sp t = mActiveTracks[i].promote(); // The track died recently if (t == 0) { - return MIXER_IDLE; + continue; } Track* const track = t.get(); @@ -3180,35 +3179,40 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::DirectOutputThread::prep } right = v_clamped/MAX_GAIN; } - - if (left != mLeftVolFloat || right != mRightVolFloat) { - mLeftVolFloat = left; - mRightVolFloat = right; - - // Convert volumes from float to 8.24 - uint32_t vl = (uint32_t)(left * (1 << 24)); - uint32_t vr = (uint32_t)(right * (1 << 24)); - - // Delegate volume control to effect in track effect chain if needed - // only one effect chain can be present on DirectOutputThread, so if - // there is one, the track is connected to it - if (!mEffectChains.isEmpty()) { - // Do not ramp volume if volume is controlled by effect - mEffectChains[0]->setVolume_l(&vl, &vr); - left = (float)vl / (1 << 24); - right = (float)vr / (1 << 24); + // Only consider last track started for volume and mixer state control. + // This is the last entry in mActiveTracks unless a track underruns. + // As we only care about the transition phase between two tracks on a + // direct output, it is not a problem to ignore the underrun case. + if (i == (count - 1)) { + if (left != mLeftVolFloat || right != mRightVolFloat) { + mLeftVolFloat = left; + mRightVolFloat = right; + + // Convert volumes from float to 8.24 + uint32_t vl = (uint32_t)(left * (1 << 24)); + uint32_t vr = (uint32_t)(right * (1 << 24)); + + // Delegate volume control to effect in track effect chain if needed + // only one effect chain can be present on DirectOutputThread, so if + // there is one, the track is connected to it + if (!mEffectChains.isEmpty()) { + // Do not ramp volume if volume is controlled by effect + mEffectChains[0]->setVolume_l(&vl, &vr); + left = (float)vl / (1 << 24); + right = (float)vr / (1 << 24); + } + mOutput->stream->set_volume(mOutput->stream, left, right); } - mOutput->stream->set_volume(mOutput->stream, left, right); - } - // reset retry count - track->mRetryCount = kMaxTrackRetriesDirect; - mActiveTrack = t; - mixerStatus = MIXER_TRACKS_READY; + // reset retry count + track->mRetryCount = kMaxTrackRetriesDirect; + mActiveTrack = t; + mixerStatus = MIXER_TRACKS_READY; + } } else { - // clear effect chain input buffer if an active track underruns to avoid sending - // previous audio buffer again to effects - if (!mEffectChains.isEmpty()) { + // clear effect chain input buffer if the last active track started underruns + // to avoid sending previous audio buffer again to effects + if (!mEffectChains.isEmpty() && (i == (count -1))) { mEffectChains[0]->clearInputBuffer(); } @@ -3224,33 +3228,36 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::DirectOutputThread::prep if (track->isStopped()) { track->reset(); } - trackToRemove = track; + tracksToRemove->add(track); } } else { // No buffers for this track. Give it a few chances to // fill a buffer, then remove it from active list. + // Only consider last track started for mixer state control if (--(track->mRetryCount) <= 0) { ALOGV("BUFFER TIMEOUT: remove(%d) from active list", track->name()); - trackToRemove = track; - } else { + tracksToRemove->add(track); + } else if (i == (count -1)){ mixerStatus = MIXER_TRACKS_ENABLED; } } } } - // FIXME merge this with similar code for removing multiple tracks // remove all the tracks that need to be... - if (CC_UNLIKELY(trackToRemove != 0)) { - tracksToRemove->add(trackToRemove); - mActiveTracks.remove(trackToRemove); - if (!mEffectChains.isEmpty()) { - ALOGV("stopping track on chain %p for session Id: %d", mEffectChains[0].get(), - trackToRemove->sessionId()); - mEffectChains[0]->decActiveTrackCnt(); - } - if (trackToRemove->isTerminated()) { - removeTrack_l(trackToRemove); + count = tracksToRemove->size(); + if (CC_UNLIKELY(count)) { + for (size_t i = 0 ; i < count ; i++) { + const sp& track = tracksToRemove->itemAt(i); + mActiveTracks.remove(track); + if (!mEffectChains.isEmpty()) { + ALOGV("stopping track on chain %p for session Id: %d", mEffectChains[0].get(), + track->sessionId()); + mEffectChains[0]->decActiveTrackCnt(); + } + if (track->isTerminated()) { + removeTrack_l(track); + } } } -- cgit v1.1 From 6463e76d41430f9b03a79b221de84255f2475658 Mon Sep 17 00:00:00 2001 From: Marco Nelissen Date: Thu, 4 Apr 2013 15:37:21 -0700 Subject: Make sure resume() and flush() are handled appropriately even if the codec is in Loaded->Idle state. b/8347958 Change-Id: Ic14d29502a7effc636251379bb1bbc25739db98e --- media/libstagefright/ACodec.cpp | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp index c9f8741..ff72b71 100644 --- a/media/libstagefright/ACodec.cpp +++ b/media/libstagefright/ACodec.cpp @@ -3416,6 +3416,21 @@ bool ACodec::LoadedToIdleState::onMessageReceived(const sp &msg) { return true; } + case kWhatResume: + { + // We'll be active soon enough. + return true; + } + + case kWhatFlush: + { + // We haven't even started yet, so we're flushed alright... + sp notify = mCodec->mNotify->dup(); + notify->setInt32("what", ACodec::kWhatFlushCompleted); + notify->post(); + return true; + } + default: return BaseState::onMessageReceived(msg); } -- cgit v1.1 From a1cc7d579888554a59f35c6cdfae3e7f85645ae2 Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Fri, 5 Apr 2013 09:27:29 -0700 Subject: In certain cases where AAC audio frames extended into the next PES payload (inside transport streams) timestamps would be miscalculated. This fixes it. Change-Id: I9d74eeea474d2b89e8a9cdc478ed6085282fb3be --- media/libstagefright/mpeg2ts/ESQueue.cpp | 61 +++++++++++++++++++------------- 1 file changed, 37 insertions(+), 24 deletions(-) diff --git a/media/libstagefright/mpeg2ts/ESQueue.cpp b/media/libstagefright/mpeg2ts/ESQueue.cpp index 9499712..9f3b19c 100644 --- a/media/libstagefright/mpeg2ts/ESQueue.cpp +++ b/media/libstagefright/mpeg2ts/ESQueue.cpp @@ -147,9 +147,9 @@ status_t ElementaryStreamQueue::appendData( } if (startOffset > 0) { - ALOGI("found something resembling an H.264/MPEG syncword at " - "offset %ld", - startOffset); + ALOGI("found something resembling an H.264/MPEG syncword " + "at offset %d", + startOffset); } data = &ptr[startOffset]; @@ -180,9 +180,9 @@ status_t ElementaryStreamQueue::appendData( } if (startOffset > 0) { - ALOGI("found something resembling an H.264/MPEG syncword at " - "offset %ld", - startOffset); + ALOGI("found something resembling an H.264/MPEG syncword " + "at offset %d", + startOffset); } data = &ptr[startOffset]; @@ -213,8 +213,9 @@ status_t ElementaryStreamQueue::appendData( } if (startOffset > 0) { - ALOGI("found something resembling an AAC syncword at offset %ld", - startOffset); + ALOGI("found something resembling an AAC syncword at " + "offset %d", + startOffset); } data = &ptr[startOffset]; @@ -241,8 +242,8 @@ status_t ElementaryStreamQueue::appendData( if (startOffset > 0) { ALOGI("found something resembling an MPEG audio " - "syncword at offset %ld", - startOffset); + "syncword at offset %d", + startOffset); } data = &ptr[startOffset]; @@ -394,10 +395,30 @@ sp ElementaryStreamQueue::dequeueAccessUnitPCMAudio() { } sp ElementaryStreamQueue::dequeueAccessUnitAAC() { - int64_t timeUs; + if (mBuffer->size() == 0) { + return NULL; + } + + CHECK(!mRangeInfos.empty()); + const RangeInfo &info = *mRangeInfos.begin(); + if (mBuffer->size() < info.mLength) { + return NULL; + } + + CHECK_GE(info.mTimestampUs, 0ll); + + // The idea here is consume all AAC frames starting at offsets before + // info.mLength so we can assign a meaningful timestamp without + // having to interpolate. + // The final AAC frame may well extend into the next RangeInfo but + // that's ok. size_t offset = 0; - while (offset + 7 <= mBuffer->size()) { + while (offset < info.mLength) { + if (offset + 7 > mBuffer->size()) { + return NULL; + } + ABitReader bits(mBuffer->data() + offset, mBuffer->size() - offset); // adts_fixed_header @@ -450,24 +471,15 @@ sp ElementaryStreamQueue::dequeueAccessUnitAAC() { } if (offset + aac_frame_length > mBuffer->size()) { - break; + return NULL; } size_t headerSize = protection_absent ? 7 : 9; - int64_t tmpUs = fetchTimestamp(aac_frame_length); - CHECK_GE(tmpUs, 0ll); - - if (offset == 0) { - timeUs = tmpUs; - } - offset += aac_frame_length; } - if (offset == 0) { - return NULL; - } + int64_t timeUs = fetchTimestamp(offset); sp accessUnit = new ABuffer(offset); memcpy(accessUnit->data(), mBuffer->data(), offset); @@ -492,7 +504,6 @@ int64_t ElementaryStreamQueue::fetchTimestamp(size_t size) { if (first) { timeUs = info->mTimestampUs; - first = false; } if (info->mLength > size) { @@ -509,6 +520,8 @@ int64_t ElementaryStreamQueue::fetchTimestamp(size_t size) { mRangeInfos.erase(mRangeInfos.begin()); info = NULL; } + + first = false; } if (timeUs == 0ll) { -- cgit v1.1 From b3a95a56f0adaf2b60ff164275487d1870442734 Mon Sep 17 00:00:00 2001 From: Igor Murashkin Date: Mon, 1 Apr 2013 17:29:07 -0700 Subject: Camera: implement takePicture for HAL3-using clients * Implements Camera2Device-style triggers by mutating the next request * Implements Camera3Device::waitUntilRequestReceived Change-Id: Ie0b5591158872513a0bffbfab33123cf18dacf8a --- services/camera/libcameraservice/Camera3Device.cpp | 350 +++++++++++++++++++-- services/camera/libcameraservice/Camera3Device.h | 55 +++- .../libcameraservice/camera2/CaptureSequencer.cpp | 29 +- .../libcameraservice/camera2/JpegProcessor.h | 1 + 4 files changed, 414 insertions(+), 21 deletions(-) diff --git a/services/camera/libcameraservice/Camera3Device.cpp b/services/camera/libcameraservice/Camera3Device.cpp index f2c8c04..e53dbb5 100644 --- a/services/camera/libcameraservice/Camera3Device.cpp +++ b/services/camera/libcameraservice/Camera3Device.cpp @@ -262,6 +262,8 @@ status_t Camera3Device::capture(CameraMetadata &request) { ATRACE_CALL(); Mutex::Autolock l(mLock); + // TODO: take ownership of the request + switch (mStatus) { case STATUS_ERROR: ALOGE("%s: Device has encountered a serious error", __FUNCTION__); @@ -363,10 +365,8 @@ status_t Camera3Device::clearStreamingRequest() { status_t Camera3Device::waitUntilRequestReceived(int32_t requestId, nsecs_t timeout) { ATRACE_CALL(); - (void)requestId; (void)timeout; - ALOGE("%s: Unimplemented", __FUNCTION__); - return INVALID_OPERATION; + return mRequestThread->waitUntilRequestProcessed(requestId, timeout); } status_t Camera3Device::createStream(sp consumer, @@ -698,28 +698,62 @@ status_t Camera3Device::getNextFrame(CameraMetadata *frame) { status_t Camera3Device::triggerAutofocus(uint32_t id) { ATRACE_CALL(); - (void)id; - ALOGE("%s: Unimplemented", __FUNCTION__); - return INVALID_OPERATION; + ALOGV("%s: Triggering autofocus, id %d", __FUNCTION__, id); + // Mix-in this trigger into the next request and only the next request. + RequestTrigger trigger[] = { + { + ANDROID_CONTROL_AF_TRIGGER, + ANDROID_CONTROL_AF_TRIGGER_START + }, + { + ANDROID_CONTROL_AF_TRIGGER_ID, + static_cast(id) + }, + }; + + return mRequestThread->queueTrigger(trigger, + sizeof(trigger)/sizeof(trigger[0])); } status_t Camera3Device::triggerCancelAutofocus(uint32_t id) { ATRACE_CALL(); - (void)id; - - ALOGE("%s: Unimplemented", __FUNCTION__); - return INVALID_OPERATION; + ALOGV("%s: Triggering cancel autofocus, id %d", __FUNCTION__, id); + // Mix-in this trigger into the next request and only the next request. + RequestTrigger trigger[] = { + { + ANDROID_CONTROL_AF_TRIGGER, + ANDROID_CONTROL_AF_TRIGGER_CANCEL + }, + { + ANDROID_CONTROL_AF_TRIGGER_ID, + static_cast(id) + }, + }; + + return mRequestThread->queueTrigger(trigger, + sizeof(trigger)/sizeof(trigger[0])); } status_t Camera3Device::triggerPrecaptureMetering(uint32_t id) { ATRACE_CALL(); - (void)id; - - ALOGE("%s: Unimplemented", __FUNCTION__); - return INVALID_OPERATION; + ALOGV("%s: Triggering precapture metering, id %d", __FUNCTION__, id); + // Mix-in this trigger into the next request and only the next request. + RequestTrigger trigger[] = { + { + ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, + ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_START + }, + { + ANDROID_CONTROL_AE_PRECAPTURE_ID, + static_cast(id) + }, + }; + + return mRequestThread->queueTrigger(trigger, + sizeof(trigger)/sizeof(trigger[0])); } status_t Camera3Device::pushReprocessBuffer(int reprocessStreamId, @@ -997,9 +1031,13 @@ void Camera3Device::processCaptureResult(const camera3_capture_result *result) { // Dispatch any 3A change events to listeners if (listener != NULL) { if (new3aState.aeState != cur3aState.aeState) { + ALOGVV("%s: AE state changed from 0x%x to 0x%x", + __FUNCTION__, cur3aState.aeState, new3aState.aeState); listener->notifyAutoExposure(new3aState.aeState, aeTriggerId); } if (new3aState.afState != cur3aState.afState) { + ALOGVV("%s: AF state changed from 0x%x to 0x%x", + __FUNCTION__, cur3aState.afState, new3aState.afState); listener->notifyAutoFocus(new3aState.afState, afTriggerId); } if (new3aState.awbState != cur3aState.awbState) { @@ -1059,7 +1097,8 @@ Camera3Device::RequestThread::RequestThread(wp parent, mReconfigured(false), mDoPause(false), mPaused(true), - mFrameNumber(0) { + mFrameNumber(0), + mLatestRequestId(NAME_NOT_FOUND) { } void Camera3Device::RequestThread::configurationComplete() { @@ -1075,6 +1114,57 @@ status_t Camera3Device::RequestThread::queueRequest( return OK; } + +status_t Camera3Device::RequestThread::queueTrigger( + RequestTrigger trigger[], + size_t count) { + + Mutex::Autolock l(mTriggerMutex); + status_t ret; + + for (size_t i = 0; i < count; ++i) { + ret = queueTriggerLocked(trigger[i]); + + if (ret != OK) { + return ret; + } + } + + return OK; +} + +status_t Camera3Device::RequestThread::queueTriggerLocked( + RequestTrigger trigger) { + + uint32_t tag = trigger.metadataTag; + ssize_t index = mTriggerMap.indexOfKey(tag); + + switch (trigger.getTagType()) { + case TYPE_BYTE: + // fall-through + case TYPE_INT32: + break; + default: + ALOGE("%s: Type not supported: 0x%x", + __FUNCTION__, + trigger.getTagType()); + return INVALID_OPERATION; + } + + /** + * Collect only the latest trigger, since we only have 1 field + * in the request settings per trigger tag, and can't send more than 1 + * trigger per request. + */ + if (index != NAME_NOT_FOUND) { + mTriggerMap.editValueAt(index) = trigger; + } else { + mTriggerMap.add(tag, trigger); + } + + return OK; +} + status_t Camera3Device::RequestThread::setRepeatingRequests( const RequestList &requests) { Mutex::Autolock l(mRequestLock); @@ -1108,6 +1198,24 @@ status_t Camera3Device::RequestThread::waitUntilPaused(nsecs_t timeout) { return OK; } +status_t Camera3Device::RequestThread::waitUntilRequestProcessed( + int32_t requestId, nsecs_t timeout) { + Mutex::Autolock l(mLatestRequestMutex); + status_t res; + while (mLatestRequestId != requestId) { + nsecs_t startTime = systemTime(); + + res = mLatestRequestSignal.waitRelative(mLatestRequestMutex, timeout); + if (res != OK) return res; + + timeout -= (systemTime() - startTime); + } + + return OK; +} + + + bool Camera3Device::RequestThread::threadLoop() { status_t res; @@ -1125,16 +1233,55 @@ bool Camera3Device::RequestThread::threadLoop() { } // Create request to HAL - camera3_capture_request_t request = camera3_capture_request_t(); + Vector outputBuffers; - if (mPrevRequest != nextRequest) { + // Insert any queued triggers (before metadata is locked) + int32_t triggerCount; + res = insertTriggers(nextRequest); + if (res < 0) { + ALOGE("RequestThread: Unable to insert triggers " + "(capture request %d, HAL device: %s (%d)", + (mFrameNumber+1), strerror(-res), res); + cleanUpFailedRequest(request, nextRequest, outputBuffers); + return false; + } + triggerCount = res; + + bool triggersMixedIn = (triggerCount > 0 || mPrevTriggers > 0); + + // If the request is the same as last, or we had triggers last time + if (mPrevRequest != nextRequest || triggersMixedIn) { + /** + * The request should be presorted so accesses in HAL + * are O(logn). Sidenote, sorting a sorted metadata is nop. + */ + nextRequest->mSettings.sort(); request.settings = nextRequest->mSettings.getAndLock(); mPrevRequest = nextRequest; - } // else leave request.settings NULL to indicate 'reuse latest given' + ALOGVV("%s: Request settings are NEW", __FUNCTION__); + + IF_ALOGV() { + camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t(); + find_camera_metadata_ro_entry( + request.settings, + ANDROID_CONTROL_AF_TRIGGER, + &e + ); + if (e.count > 0) { + ALOGV("%s: Request (frame num %d) had AF trigger 0x%x", + __FUNCTION__, + mFrameNumber+1, + e.data.u8[0]); + } + } + } else { + // leave request.settings NULL to indicate 'reuse latest given' + ALOGVV("%s: Request settings are REUSED", + __FUNCTION__); + } camera3_stream_buffer_t inputBuffer; - Vector outputBuffers; // Fill in buffers @@ -1168,6 +1315,7 @@ bool Camera3Device::RequestThread::threadLoop() { request.frame_number = mFrameNumber++; + // Submit request and block until ready for next one res = mHal3Device->ops->process_capture_request(mHal3Device, &request); @@ -1181,6 +1329,35 @@ bool Camera3Device::RequestThread::threadLoop() { if (request.settings != NULL) { nextRequest->mSettings.unlock(request.settings); } + + // Remove any previously queued triggers (after unlock) + res = removeTriggers(mPrevRequest); + if (res != OK) { + ALOGE("RequestThread: Unable to remove triggers " + "(capture request %d, HAL device: %s (%d)", + request.frame_number, strerror(-res), res); + return false; + } + mPrevTriggers = triggerCount; + + // Read android.request.id from the request settings metadata + // - inform waitUntilRequestProcessed thread of a new request ID + { + Mutex::Autolock al(mLatestRequestMutex); + + camera_metadata_entry_t requestIdEntry = + nextRequest->mSettings.find(ANDROID_REQUEST_ID); + if (requestIdEntry.count > 0) { + mLatestRequestId = requestIdEntry.data.i32[0]; + } else { + ALOGW("%s: Did not have android.request.id set in the request", + __FUNCTION__); + mLatestRequestId = NAME_NOT_FOUND; + } + + mLatestRequestSignal.signal(); + } + return true; } @@ -1285,6 +1462,141 @@ bool Camera3Device::RequestThread::waitIfPaused() { return false; } +status_t Camera3Device::RequestThread::insertTriggers( + const sp &request) { + + Mutex::Autolock al(mTriggerMutex); + + CameraMetadata &metadata = request->mSettings; + size_t count = mTriggerMap.size(); + + for (size_t i = 0; i < count; ++i) { + RequestTrigger trigger = mTriggerMap.valueAt(i); + + uint32_t tag = trigger.metadataTag; + camera_metadata_entry entry = metadata.find(tag); + + if (entry.count > 0) { + /** + * Already has an entry for this trigger in the request. + * Rewrite it with our requested trigger value. + */ + RequestTrigger oldTrigger = trigger; + + oldTrigger.entryValue = entry.data.u8[0]; + + mTriggerReplacedMap.add(tag, oldTrigger); + } else { + /** + * More typical, no trigger entry, so we just add it + */ + mTriggerRemovedMap.add(tag, trigger); + } + + status_t res; + + switch (trigger.getTagType()) { + case TYPE_BYTE: { + uint8_t entryValue = static_cast(trigger.entryValue); + res = metadata.update(tag, + &entryValue, + /*count*/1); + break; + } + case TYPE_INT32: + res = metadata.update(tag, + &trigger.entryValue, + /*count*/1); + break; + default: + ALOGE("%s: Type not supported: 0x%x", + __FUNCTION__, + trigger.getTagType()); + return INVALID_OPERATION; + } + + if (res != OK) { + ALOGE("%s: Failed to update request metadata with trigger tag %s" + ", value %d", __FUNCTION__, trigger.getTagName(), + trigger.entryValue); + return res; + } + + ALOGV("%s: Mixed in trigger %s, value %d", __FUNCTION__, + trigger.getTagName(), + trigger.entryValue); + } + + mTriggerMap.clear(); + + return count; +} + +status_t Camera3Device::RequestThread::removeTriggers( + const sp &request) { + Mutex::Autolock al(mTriggerMutex); + + CameraMetadata &metadata = request->mSettings; + + /** + * Replace all old entries with their old values. + */ + for (size_t i = 0; i < mTriggerReplacedMap.size(); ++i) { + RequestTrigger trigger = mTriggerReplacedMap.valueAt(i); + + status_t res; + + uint32_t tag = trigger.metadataTag; + switch (trigger.getTagType()) { + case TYPE_BYTE: { + uint8_t entryValue = static_cast(trigger.entryValue); + res = metadata.update(tag, + &entryValue, + /*count*/1); + break; + } + case TYPE_INT32: + res = metadata.update(tag, + &trigger.entryValue, + /*count*/1); + break; + default: + ALOGE("%s: Type not supported: 0x%x", + __FUNCTION__, + trigger.getTagType()); + return INVALID_OPERATION; + } + + if (res != OK) { + ALOGE("%s: Failed to restore request metadata with trigger tag %s" + ", trigger value %d", __FUNCTION__, + trigger.getTagName(), trigger.entryValue); + return res; + } + } + mTriggerReplacedMap.clear(); + + /** + * Remove all new entries. + */ + for (size_t i = 0; i < mTriggerRemovedMap.size(); ++i) { + RequestTrigger trigger = mTriggerRemovedMap.valueAt(i); + status_t res = metadata.erase(trigger.metadataTag); + + if (res != OK) { + ALOGE("%s: Failed to erase metadata with trigger tag %s" + ", trigger value %d", __FUNCTION__, + trigger.getTagName(), trigger.entryValue); + return res; + } + } + mTriggerRemovedMap.clear(); + + return OK; +} + + + /** * Static callback forwarding methods from HAL to instance */ diff --git a/services/camera/libcameraservice/Camera3Device.h b/services/camera/libcameraservice/Camera3Device.h index 8600c6c..7f294e6 100644 --- a/services/camera/libcameraservice/Camera3Device.h +++ b/services/camera/libcameraservice/Camera3Device.h @@ -109,7 +109,7 @@ class Camera3Device : private: static const nsecs_t kShutdownTimeout = 5000000000; // 5 sec - + struct RequestTrigger; Mutex mLock; @@ -172,6 +172,23 @@ class Camera3Device : */ status_t configureStreamsLocked(); + struct RequestTrigger { + // Metadata tag number, e.g. android.control.aePrecaptureTrigger + uint32_t metadataTag; + // Metadata value, e.g. 'START' or the trigger ID + int32_t entryValue; + + // The last part of the fully qualified path, e.g. afTrigger + const char *getTagName() const { + return get_camera_metadata_tag_name(metadataTag) ?: "NULL"; + } + + // e.g. TYPE_BYTE, TYPE_INT32, etc. + int getTagType() const { + return get_camera_metadata_tag_type(metadataTag); + } + }; + /** * Thread for managing capture request submission to HAL device. */ @@ -198,6 +215,14 @@ class Camera3Device : status_t queueRequest(sp request); /** + * Queue a trigger to be dispatched with the next outgoing + * process_capture_request. The settings for that request only + * will be temporarily rewritten to add the trigger tag/value. + * Subsequent requests will not be rewritten (for this tag). + */ + status_t queueTrigger(RequestTrigger trigger[], size_t count); + + /** * Pause/unpause the capture thread. Doesn't block, so use * waitUntilPaused to wait until the thread is paused. */ @@ -210,11 +235,27 @@ class Camera3Device : */ status_t waitUntilPaused(nsecs_t timeout); + /** + * Wait until thread processes the capture request with settings' + * android.request.id == requestId. + * + * Returns TIMED_OUT in case the thread does not process the request + * within the timeout. + */ + status_t waitUntilRequestProcessed(int32_t requestId, nsecs_t timeout); + protected: virtual bool threadLoop(); private: + status_t queueTriggerLocked(RequestTrigger trigger); + // Mix-in queued triggers into this request + int32_t insertTriggers(const sp &request); + // Purge the queued triggers from this request, + // restoring the old field values for those tags. + status_t removeTriggers(const sp &request); + static const nsecs_t kRequestTimeout = 50e6; // 50 ms // Waits for a request, or returns NULL if times out. @@ -249,8 +290,20 @@ class Camera3Device : Condition mPausedSignal; sp mPrevRequest; + int32_t mPrevTriggers; int32_t mFrameNumber; + + Mutex mLatestRequestMutex; + Condition mLatestRequestSignal; + // android.request.id for latest process_capture_request + int32_t mLatestRequestId; + + typedef KeyedVector TriggerMap; + Mutex mTriggerMutex; + TriggerMap mTriggerMap; + TriggerMap mTriggerRemovedMap; + TriggerMap mTriggerReplacedMap; }; sp mRequestThread; diff --git a/services/camera/libcameraservice/camera2/CaptureSequencer.cpp b/services/camera/libcameraservice/camera2/CaptureSequencer.cpp index 1880912..ee03329 100644 --- a/services/camera/libcameraservice/camera2/CaptureSequencer.cpp +++ b/services/camera/libcameraservice/camera2/CaptureSequencer.cpp @@ -270,6 +270,9 @@ CaptureSequencer::CaptureState CaptureSequencer::manageDone(sp &c processor->clearZslQueue(); } + /** + * Fire the jpegCallback in Camera#takePicture(..., jpegCallback) + */ if (mCaptureBuffer != 0 && res == OK) { Camera2Client::SharedCameraCallbacks::Lock l(client->mSharedCameraCallbacks); @@ -367,6 +370,8 @@ CaptureSequencer::CaptureState CaptureSequencer::manageZslReprocessing( CaptureSequencer::CaptureState CaptureSequencer::manageStandardStart( sp &client) { ATRACE_CALL(); + + // Get the onFrameAvailable callback when the requestID == mCaptureId client->registerFrameListener(mCaptureId, mCaptureId + 1, this); { @@ -426,6 +431,13 @@ CaptureSequencer::CaptureState CaptureSequencer::manageStandardCapture( SharedParameters::Lock l(client->getParameters()); Vector outputStreams; + /** + * Set up output streams in the request + * - preview + * - capture/jpeg + * - callback (if preview callbacks enabled) + * - recording (if recording enabled) + */ outputStreams.push(client->getPreviewStreamId()); outputStreams.push(client->getCaptureStreamId()); @@ -454,6 +466,7 @@ CaptureSequencer::CaptureState CaptureSequencer::manageStandardCapture( return DONE; } + // Create a capture copy since CameraDeviceBase#capture takes ownership CameraMetadata captureCopy = mCaptureRequest; if (captureCopy.entryCount() == 0) { ALOGE("%s: Camera %d: Unable to copy capture request for HAL device", @@ -461,7 +474,12 @@ CaptureSequencer::CaptureState CaptureSequencer::manageStandardCapture( return DONE; } + /** + * Clear the streaming request for still-capture pictures + * (as opposed to i.e. video snapshots) + */ if (l.mParameters.state == Parameters::STILL_CAPTURE) { + // API definition of takePicture() - stop preview before taking pic res = client->stopStream(); if (res != OK) { ALOGE("%s: Camera %d: Unable to stop preview for still capture: " @@ -488,6 +506,8 @@ CaptureSequencer::CaptureState CaptureSequencer::manageStandardCaptureWait( status_t res; ATRACE_CALL(); Mutex::Autolock l(mInputMutex); + + // Wait for new metadata result (mNewFrame) while (!mNewFrameReceived) { res = mNewFrameSignal.waitRelative(mInputMutex, kWaitDuration); if (res == TIMED_OUT) { @@ -495,12 +515,17 @@ CaptureSequencer::CaptureState CaptureSequencer::manageStandardCaptureWait( break; } } + + // Approximation of the shutter being closed + // - TODO: use the hal3 exposure callback in Camera3Device instead if (mNewFrameReceived && !mShutterNotified) { SharedParameters::Lock l(client->getParameters()); /* warning: this also locks a SharedCameraCallbacks */ shutterNotifyLocked(l.mParameters, client, mMsgType); mShutterNotified = true; } + + // Wait until jpeg was captured by JpegProcessor while (mNewFrameReceived && !mNewCaptureReceived) { res = mNewCaptureSignal.waitRelative(mInputMutex, kWaitDuration); if (res == TIMED_OUT) { @@ -524,7 +549,9 @@ CaptureSequencer::CaptureState CaptureSequencer::manageStandardCaptureWait( } if (entry.data.i64[0] != mCaptureTimestamp) { ALOGW("Mismatched capture timestamps: Metadata frame %lld," - " captured buffer %lld", entry.data.i64[0], mCaptureTimestamp); + " captured buffer %lld", + entry.data.i64[0], + mCaptureTimestamp); } client->removeFrameListener(mCaptureId, mCaptureId + 1, this); diff --git a/services/camera/libcameraservice/camera2/JpegProcessor.h b/services/camera/libcameraservice/camera2/JpegProcessor.h index 2283f28..74f4738 100644 --- a/services/camera/libcameraservice/camera2/JpegProcessor.h +++ b/services/camera/libcameraservice/camera2/JpegProcessor.h @@ -44,6 +44,7 @@ class JpegProcessor: JpegProcessor(wp client, wp sequencer); ~JpegProcessor(); + // CpuConsumer listener implementation void onFrameAvailable(); status_t updateStream(const Parameters ¶ms); -- cgit v1.1 From ec77122351b4e78c1fe5b60a208f76baf8c67591 Mon Sep 17 00:00:00 2001 From: Marco Nelissen Date: Mon, 8 Apr 2013 14:30:57 -0700 Subject: Add support for common encryption b/7465749 Change-Id: I5403b74a5ae428ad28b382863a09daafc400b137 --- include/media/stagefright/DataSource.h | 1 + include/media/stagefright/MetaData.h | 4 + include/media/stagefright/NuMediaExtractor.h | 2 + media/libstagefright/DataSource.cpp | 13 + media/libstagefright/MPEG4Extractor.cpp | 364 +++++++++++++++++++++++++- media/libstagefright/NuMediaExtractor.cpp | 28 ++ media/libstagefright/include/MPEG4Extractor.h | 7 + 7 files changed, 411 insertions(+), 8 deletions(-) diff --git a/include/media/stagefright/DataSource.h b/include/media/stagefright/DataSource.h index b0c1b34..742bc0e 100644 --- a/include/media/stagefright/DataSource.h +++ b/include/media/stagefright/DataSource.h @@ -54,6 +54,7 @@ public: // Convenience methods: bool getUInt16(off64_t offset, uint16_t *x); + bool getUInt24(off64_t offset, uint32_t *x); // 3 byte int, returned as a 32-bit int bool getUInt32(off64_t offset, uint32_t *x); bool getUInt64(off64_t offset, uint64_t *x); diff --git a/include/media/stagefright/MetaData.h b/include/media/stagefright/MetaData.h index be08c19..9ab3edc 100644 --- a/include/media/stagefright/MetaData.h +++ b/include/media/stagefright/MetaData.h @@ -157,6 +157,10 @@ enum { kKeyCryptoKey = 'cryK', // uint8_t[16] kKeyCryptoIV = 'cryI', // uint8_t[16] kKeyCryptoMode = 'cryM', // int32_t + + kKeyCryptoDefaultIVSize = 'cryS', // int32_t + + kKeyPssh = 'pssh', // raw data }; enum { diff --git a/include/media/stagefright/NuMediaExtractor.h b/include/media/stagefright/NuMediaExtractor.h index 0833110..5ae6f6b 100644 --- a/include/media/stagefright/NuMediaExtractor.h +++ b/include/media/stagefright/NuMediaExtractor.h @@ -55,6 +55,8 @@ struct NuMediaExtractor : public RefBase { size_t countTracks() const; status_t getTrackFormat(size_t index, sp *format) const; + status_t getFileFormat(sp *format) const; + status_t selectTrack(size_t index); status_t unselectTrack(size_t index); diff --git a/media/libstagefright/DataSource.cpp b/media/libstagefright/DataSource.cpp index 19b38ee..fc6fd9c 100644 --- a/media/libstagefright/DataSource.cpp +++ b/media/libstagefright/DataSource.cpp @@ -58,6 +58,19 @@ bool DataSource::getUInt16(off64_t offset, uint16_t *x) { return true; } +bool DataSource::getUInt24(off64_t offset, uint32_t *x) { + *x = 0; + + uint8_t byte[3]; + if (readAt(offset, byte, 3) != 3) { + return false; + } + + *x = (byte[0] << 16) | (byte[1] << 8) | byte[2]; + + return true; +} + bool DataSource::getUInt32(off64_t offset, uint32_t *x) { *x = 0; diff --git a/media/libstagefright/MPEG4Extractor.cpp b/media/libstagefright/MPEG4Extractor.cpp index 56fad60..3503aaf 100644 --- a/media/libstagefright/MPEG4Extractor.cpp +++ b/media/libstagefright/MPEG4Extractor.cpp @@ -78,6 +78,19 @@ private: int32_t mLastParsedTrackId; int32_t mTrackId; + int32_t mCryptoMode; // passed in from extractor + int32_t mDefaultIVSize; // passed in from extractor + uint8_t mCryptoKey[16]; // passed in from extractor + uint32_t mCurrentAuxInfoType; + uint32_t mCurrentAuxInfoTypeParameter; + uint32_t mCurrentDefaultSampleInfoSize; + uint32_t mCurrentSampleInfoCount; + uint32_t mCurrentSampleInfoAllocSize; + uint8_t* mCurrentSampleInfoSizes; + uint32_t mCurrentSampleInfoOffsetCount; + uint32_t mCurrentSampleInfoOffsetsAllocSize; + uint64_t* mCurrentSampleInfoOffsets; + bool mIsAVC; size_t mNALLengthSize; @@ -95,6 +108,8 @@ private: status_t parseChunk(off64_t *offset); status_t parseTrackFragmentHeader(off64_t offset, off64_t size); status_t parseTrackFragmentRun(off64_t offset, off64_t size); + status_t parseSampleAuxiliaryInformationSizes(off64_t offset, off64_t size); + status_t parseSampleAuxiliaryInformationOffsets(off64_t offset, off64_t size); struct TrackFragmentHeaderInfo { enum Flags { @@ -122,6 +137,9 @@ private: off64_t offset; size_t size; uint32_t duration; + uint8_t iv[16]; + Vector clearsizes; + Vector encryptedsizes; }; Vector mCurrentSamples; @@ -333,6 +351,10 @@ MPEG4Extractor::~MPEG4Extractor() { sinf = next; } mFirstSINF = NULL; + + for (size_t i = 0; i < mPssh.size(); i++) { + delete [] mPssh[i].data; + } } uint32_t MPEG4Extractor::flags() const { @@ -353,6 +375,7 @@ sp MPEG4Extractor::getMetaData() { size_t MPEG4Extractor::countTracks() { status_t err; if ((err = readMetaData()) != OK) { + ALOGV("MPEG4Extractor::countTracks: no tracks"); return 0; } @@ -363,6 +386,7 @@ size_t MPEG4Extractor::countTracks() { track = track->next; } + ALOGV("MPEG4Extractor::countTracks: %d tracks", n); return n; } @@ -461,6 +485,23 @@ status_t MPEG4Extractor::readMetaData() { } CHECK_NE(err, (status_t)NO_INIT); + + // copy pssh data into file metadata + int psshsize = 0; + for (size_t i = 0; i < mPssh.size(); i++) { + psshsize += 20 + mPssh[i].datalen; + } + if (psshsize) { + char *buf = (char*)malloc(psshsize); + char *ptr = buf; + for (size_t i = 0; i < mPssh.size(); i++) { + memcpy(ptr, mPssh[i].uuid, 20); // uuid + length + memcpy(ptr + 20, mPssh[i].data, mPssh[i].datalen); + ptr += (20 + mPssh[i].datalen); + } + mFileMetaData->setData(kKeyPssh, 'pssh', buf, psshsize); + free(buf); + } return mInitCheck; } @@ -759,6 +800,8 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { case FOURCC('m', 'f', 'r', 'a'): case FOURCC('u', 'd', 't', 'a'): case FOURCC('i', 'l', 's', 't'): + case FOURCC('s', 'i', 'n', 'f'): + case FOURCC('s', 'c', 'h', 'i'): { if (chunk_type == FOURCC('s', 't', 'b', 'l')) { ALOGV("sampleTable chunk is %d bytes long.", (size_t)chunk_size); @@ -846,6 +889,69 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { break; } + case FOURCC('f', 'r', 'm', 'a'): + { + int32_t original_fourcc; + if (mDataSource->readAt(data_offset, &original_fourcc, 4) < 4) { + return ERROR_IO; + } + original_fourcc = ntohl(original_fourcc); + ALOGV("read original format: %d", original_fourcc); + mLastTrack->meta->setCString(kKeyMIMEType, FourCC2MIME(original_fourcc)); + *offset += chunk_size; + break; + } + + case FOURCC('t', 'e', 'n', 'c'): + { + if (chunk_size < 32) { + return ERROR_MALFORMED; + } + + // tenc box contains 1 byte version, 3 byte flags, 3 byte default algorithm id, one byte + // default IV size, 16 bytes default KeyID + // (ISO 23001-7) + char buf[4]; + memset(buf, 0, 4); + if (mDataSource->readAt(data_offset + 4, buf + 1, 3) < 3) { + return ERROR_IO; + } + uint32_t defaultAlgorithmId = ntohl(*((int32_t*)buf)); + if (defaultAlgorithmId > 1) { + // only 0 (clear) and 1 (AES-128) are valid + return ERROR_MALFORMED; + } + + memset(buf, 0, 4); + if (mDataSource->readAt(data_offset + 7, buf + 3, 1) < 1) { + return ERROR_IO; + } + uint32_t defaultIVSize = ntohl(*((int32_t*)buf)); + + if ((defaultAlgorithmId == 0 && defaultIVSize != 0) || + (defaultAlgorithmId != 0 && defaultIVSize == 0)) { + // only unencrypted data must have 0 IV size + return ERROR_MALFORMED; + } else if (defaultIVSize != 0 && + defaultIVSize != 8 && + defaultIVSize != 16) { + // only supported sizes are 0, 8 and 16 + return ERROR_MALFORMED; + } + + uint8_t defaultKeyId[16]; + + if (mDataSource->readAt(data_offset + 8, &defaultKeyId, 16) < 16) { + return ERROR_IO; + } + + mLastTrack->meta->setInt32(kKeyCryptoMode, defaultAlgorithmId); + mLastTrack->meta->setInt32(kKeyCryptoDefaultIVSize, defaultIVSize); + mLastTrack->meta->setData(kKeyCryptoKey, 'tenc', defaultKeyId, 16); + *offset += chunk_size; + break; + } + case FOURCC('t', 'k', 'h', 'd'): { status_t err; @@ -857,6 +963,37 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { break; } + case FOURCC('p', 's', 's', 'h'): + { + PsshInfo pssh; + + if (mDataSource->readAt(data_offset + 4, &pssh.uuid, 16) < 16) { + return ERROR_IO; + } + + uint32_t psshdatalen = 0; + if (mDataSource->readAt(data_offset + 20, &psshdatalen, 4) < 4) { + return ERROR_IO; + } + pssh.datalen = ntohl(psshdatalen); + ALOGV("pssh data size: %d", pssh.datalen); + if (pssh.datalen + 20 > chunk_size) { + // pssh data length exceeds size of containing box + return ERROR_MALFORMED; + } + + pssh.data = new uint8_t[pssh.datalen]; + ALOGV("allocated pssh @ %p", pssh.data); + ssize_t requested = (ssize_t) pssh.datalen; + if (mDataSource->readAt(data_offset + 24, pssh.data, requested) < requested) { + return ERROR_IO; + } + mPssh.push_back(pssh); + + *offset += chunk_size; + break; + } + case FOURCC('m', 'd', 'h', 'd'): { if (chunk_data_size < 4) { @@ -970,16 +1107,17 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { // For 3GPP timed text, there could be multiple tx3g boxes contain // multiple text display formats. These formats will be used to // display the timed text. + // For encrypted files, there may also be more than one entry. const char *mime; CHECK(mLastTrack->meta->findCString(kKeyMIMEType, &mime)); - if (strcasecmp(mime, MEDIA_MIMETYPE_TEXT_3GPP)) { + if (strcasecmp(mime, MEDIA_MIMETYPE_TEXT_3GPP) && + strcasecmp(mime, "application/octet-stream")) { // For now we only support a single type of media per track. mLastTrack->skipTrack = true; *offset += chunk_size; break; } } - off64_t stop_offset = *offset + chunk_size; *offset = data_offset + 8; for (uint32_t i = 0; i < entry_count; ++i) { @@ -1053,6 +1191,7 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { } case FOURCC('m', 'p', '4', 'v'): + case FOURCC('e', 'n', 'c', 'v'): case FOURCC('s', '2', '6', '3'): case FOURCC('H', '2', '6', '3'): case FOURCC('h', '2', '6', '3'): @@ -1075,7 +1214,7 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { uint16_t width = U16_AT(&buffer[6 + 18]); uint16_t height = U16_AT(&buffer[6 + 20]); - // The video sample is not stand-compliant if it has invalid dimension. + // The video sample is not standard-compliant if it has invalid dimension. // Use some default width and height value, and // let the decoder figure out the actual width and height (and thus // be prepared for INFO_FOMRAT_CHANGED event). @@ -1085,7 +1224,10 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { // printf("*** coding='%s' width=%d height=%d\n", // chunk, width, height); - mLastTrack->meta->setCString(kKeyMIMEType, FourCC2MIME(chunk_type)); + if (chunk_type != FOURCC('e', 'n', 'c', 'v')) { + // if the chunk type is encv, we'll get the type from the sinf/frma box later + mLastTrack->meta->setCString(kKeyMIMEType, FourCC2MIME(chunk_type)); + } mLastTrack->meta->setInt32(kKeyWidth, width); mLastTrack->meta->setInt32(kKeyHeight, height); @@ -1442,6 +1584,7 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { case FOURCC('m', 'd', 'a', 't'): { + ALOGV("mdat chunk, drm: %d", mIsDrm); if (!mIsDrm) { *offset += chunk_size; break; @@ -1968,6 +2111,8 @@ sp MPEG4Extractor::getTrack(size_t index) { return NULL; } + ALOGV("getTrack called, pssh: %d", mPssh.size()); + return new MPEG4Source( track->meta, mDataSource, track->timescale, track->sampleTable, mSidxEntries, mMoofOffset); @@ -2139,6 +2284,10 @@ MPEG4Source::MPEG4Source( mFirstMoofOffset(firstMoofOffset), mCurrentMoofOffset(firstMoofOffset), mCurrentTime(0), + mCurrentSampleInfoAllocSize(0), + mCurrentSampleInfoSizes(NULL), + mCurrentSampleInfoOffsetsAllocSize(0), + mCurrentSampleInfoOffsets(NULL), mIsAVC(false), mNALLengthSize(0), mStarted(false), @@ -2146,6 +2295,18 @@ MPEG4Source::MPEG4Source( mBuffer(NULL), mWantsNALFragments(false), mSrcBuffer(NULL) { + + mFormat->findInt32(kKeyCryptoMode, &mCryptoMode); + mFormat->findInt32(kKeyCryptoDefaultIVSize, &mDefaultIVSize); + uint32_t keytype; + const void *key; + size_t keysize; + if (mFormat->findData(kKeyCryptoKey, &keytype, &key, &keysize)) { + CHECK(keysize <= 16); + memset(mCryptoKey, 0, 16); + memcpy(mCryptoKey, key, keysize); + } + const char *mime; bool success = mFormat->findCString(kKeyMIMEType, &mime); CHECK(success); @@ -2179,6 +2340,8 @@ MPEG4Source::~MPEG4Source() { if (mStarted) { stop(); } + free(mCurrentSampleInfoSizes); + free(mCurrentSampleInfoOffsets); } status_t MPEG4Source::start(MetaData *params) { @@ -2274,7 +2437,7 @@ status_t MPEG4Source::parseChunk(off64_t *offset) { } } if (chunk_type == FOURCC('m', 'o', 'o', 'f')) { - // *offset points to then mdat box following this moof + // *offset points to the mdat box following this moof parseChunk(offset); // doesn't actually parse it, just updates offset mNextMoofOffset = *offset; } @@ -2302,6 +2465,31 @@ status_t MPEG4Source::parseChunk(off64_t *offset) { break; } + case FOURCC('s', 'a', 'i', 'z'): { + status_t err; + if ((err = parseSampleAuxiliaryInformationSizes(data_offset, chunk_data_size)) != OK) { + return err; + } + *offset += chunk_size; + break; + } + case FOURCC('s', 'a', 'i', 'o'): { + status_t err; + if ((err = parseSampleAuxiliaryInformationOffsets(data_offset, chunk_data_size)) != OK) { + return err; + } + *offset += chunk_size; + break; + } + + case FOURCC('m', 'd', 'a', 't'): { + // parse DRM info if present + ALOGV("MPEG4Source::parseChunk mdat"); + // if saiz/saoi was previously observed, do something with the sampleinfos + *offset += chunk_size; + break; + } + default: { *offset += chunk_size; break; @@ -2310,6 +2498,152 @@ status_t MPEG4Source::parseChunk(off64_t *offset) { return OK; } +status_t MPEG4Source::parseSampleAuxiliaryInformationSizes(off64_t offset, off64_t size) { + ALOGV("parseSampleAuxiliaryInformationSizes"); + // 14496-12 8.7.12 + uint8_t version; + if (mDataSource->readAt( + offset, &version, sizeof(version)) + < (ssize_t)sizeof(version)) { + return ERROR_IO; + } + + if (version != 0) { + return ERROR_UNSUPPORTED; + } + offset++; + + uint32_t flags; + if (!mDataSource->getUInt24(offset, &flags)) { + return ERROR_IO; + } + offset += 3; + + if (flags & 1) { + uint32_t tmp; + if (!mDataSource->getUInt32(offset, &tmp)) { + return ERROR_MALFORMED; + } + mCurrentAuxInfoType = tmp; + offset += 4; + if (!mDataSource->getUInt32(offset, &tmp)) { + return ERROR_MALFORMED; + } + mCurrentAuxInfoTypeParameter = tmp; + offset += 4; + } + + uint8_t defsize; + if (mDataSource->readAt(offset, &defsize, 1) != 1) { + return ERROR_MALFORMED; + } + mCurrentDefaultSampleInfoSize = defsize; + offset++; + + uint32_t smplcnt; + if (!mDataSource->getUInt32(offset, &smplcnt)) { + return ERROR_MALFORMED; + } + offset += 4; + + if (smplcnt > mCurrentSampleInfoAllocSize) { + mCurrentSampleInfoSizes = (uint8_t*) realloc(mCurrentSampleInfoSizes, smplcnt); + mCurrentSampleInfoAllocSize = smplcnt; + } + mCurrentSampleInfoCount = smplcnt; + + mDataSource->readAt(offset, mCurrentSampleInfoSizes, smplcnt); + return OK; +} + +status_t MPEG4Source::parseSampleAuxiliaryInformationOffsets(off64_t offset, off64_t size) { + ALOGV("parseSampleAuxiliaryInformationOffsets"); + // 14496-12 8.7.13 + uint8_t version; + if (mDataSource->readAt(offset, &version, sizeof(version)) != 1) { + return ERROR_IO; + } + offset++; + + uint32_t flags; + if (!mDataSource->getUInt24(offset, &flags)) { + return ERROR_IO; + } + offset += 3; + + uint32_t entrycount; + if (!mDataSource->getUInt32(offset, &entrycount)) { + return ERROR_IO; + } + offset += 4; + + if (entrycount > mCurrentSampleInfoOffsetsAllocSize) { + mCurrentSampleInfoOffsets = (uint64_t*) realloc(mCurrentSampleInfoOffsets, entrycount * 8); + mCurrentSampleInfoOffsetsAllocSize = entrycount; + } + mCurrentSampleInfoOffsetCount = entrycount; + + for (size_t i = 0; i < entrycount; i++) { + if (version == 0) { + uint32_t tmp; + if (!mDataSource->getUInt32(offset, &tmp)) { + return ERROR_IO; + } + mCurrentSampleInfoOffsets[i] = tmp; + offset += 4; + } else { + uint64_t tmp; + if (!mDataSource->getUInt64(offset, &tmp)) { + return ERROR_IO; + } + mCurrentSampleInfoOffsets[i] = tmp; + offset += 8; + } + } + + // parse clear/encrypted data + + off64_t drmoffset = mCurrentSampleInfoOffsets[0]; // from moof + + drmoffset += mCurrentMoofOffset; + int ivlength; + CHECK(mFormat->findInt32(kKeyCryptoDefaultIVSize, &ivlength)); + int foo = 1; + for (size_t i = 0; i < mCurrentSampleInfoCount; i++) { + Sample *smpl = &mCurrentSamples.editItemAt(i); + + memset(smpl->iv, 0, 16); + if (mDataSource->readAt(drmoffset, smpl->iv, ivlength) != ivlength) { + return ERROR_IO; + } + + drmoffset += ivlength; + + uint16_t numsubsamples; + if (!mDataSource->getUInt16(drmoffset, &numsubsamples)) { + return ERROR_IO; + } + drmoffset += 2; + for (size_t j = 0; j < numsubsamples; j++) { + uint16_t numclear; + uint32_t numencrypted; + if (!mDataSource->getUInt16(drmoffset, &numclear)) { + return ERROR_IO; + } + drmoffset += 2; + if (!mDataSource->getUInt32(drmoffset, &numencrypted)) { + return ERROR_IO; + } + drmoffset += 4; + smpl->clearsizes.add(numclear); + smpl->encryptedsizes.add(numencrypted); + } + } + + + return OK; +} + status_t MPEG4Source::parseTrackFragmentHeader(off64_t offset, off64_t size) { if (size < 8) { @@ -2317,7 +2651,7 @@ status_t MPEG4Source::parseTrackFragmentHeader(off64_t offset, off64_t size) { } uint32_t flags; - if (!mDataSource->getUInt32(offset, &flags)) { + if (!mDataSource->getUInt32(offset, &flags)) { // actually version + flags return ERROR_MALFORMED; } @@ -2550,8 +2884,8 @@ status_t MPEG4Source::parseTrackFragmentRun(off64_t offset, off64_t size) { offset += 4; } - ALOGV("adding sample at offset 0x%08llx, size %u, duration %u, " - " flags 0x%08x", + ALOGV("adding sample %d at offset 0x%08llx, size %u, duration %u, " + " flags 0x%08x", i + 1, dataOffset, sampleSize, sampleDuration, (flags & kFirstSampleFlagsPresent) && i == 0 ? firstSampleFlags : sampleFlags); @@ -3111,6 +3445,20 @@ status_t MPEG4Source::fragmentedRead( mBuffer->meta_data()->setInt32(kKeyIsSyncFrame, 1); } + const Sample *smpl = &mCurrentSamples[mCurrentSampleIndex]; + if (smpl->encryptedsizes.size()) { + // store clear/encrypted lengths in metadata + sp bufmeta = mBuffer->meta_data(); + bufmeta->setData(kKeyPlainSizes, 0, + smpl->clearsizes.array(), smpl->clearsizes.size() * 4); + bufmeta->setData(kKeyEncryptedSizes, 0, + smpl->encryptedsizes.array(), smpl->encryptedsizes.size() * 4); + bufmeta->setData(kKeyCryptoIV, 0, smpl->iv, 16); // use 16 or the actual size? + bufmeta->setInt32(kKeyCryptoDefaultIVSize, mDefaultIVSize); + bufmeta->setInt32(kKeyCryptoMode, mCryptoMode); + bufmeta->setData(kKeyCryptoKey, 0, mCryptoKey, 16); + } + ++mCurrentSampleIndex; *out = mBuffer; diff --git a/media/libstagefright/NuMediaExtractor.cpp b/media/libstagefright/NuMediaExtractor.cpp index 404fa94..7bc7da2 100644 --- a/media/libstagefright/NuMediaExtractor.cpp +++ b/media/libstagefright/NuMediaExtractor.cpp @@ -228,6 +228,34 @@ status_t NuMediaExtractor::getTrackFormat( return convertMetaDataToMessage(meta, format); } +status_t NuMediaExtractor::getFileFormat(sp *format) const { + Mutex::Autolock autoLock(mLock); + + *format = NULL; + + if (mImpl == NULL) { + return -EINVAL; + } + + sp meta = mImpl->getMetaData(); + + const char *mime; + CHECK(meta->findCString(kKeyMIMEType, &mime)); + *format = new AMessage(); + (*format)->setString("mime", mime); + + uint32_t type; + const void *pssh; + size_t psshsize; + if (meta->findData(kKeyPssh, &type, &pssh, &psshsize)) { + sp buf = new ABuffer(psshsize); + memcpy(buf->data(), pssh, psshsize); + (*format)->setBuffer("pssh", buf); + } + + return OK; +} + status_t NuMediaExtractor::selectTrack(size_t index) { Mutex::Autolock autoLock(mLock); diff --git a/media/libstagefright/include/MPEG4Extractor.h b/media/libstagefright/include/MPEG4Extractor.h index c68623a..35eff96 100644 --- a/media/libstagefright/include/MPEG4Extractor.h +++ b/media/libstagefright/include/MPEG4Extractor.h @@ -59,6 +59,11 @@ protected: private: + struct PsshInfo { + uint8_t uuid[16]; + uint32_t datalen; + uint8_t *data; + }; struct Track { Track *next; sp meta; @@ -72,6 +77,8 @@ private: uint64_t mSidxDuration; off64_t mMoofOffset; + Vector mPssh; + sp mDataSource; status_t mInitCheck; bool mHasVideo; -- cgit v1.1 From 13ec8c4eb54067a9c982ee141121d0ec8230348b Mon Sep 17 00:00:00 2001 From: Eino-Ville Talvala Date: Tue, 9 Apr 2013 13:49:56 -0700 Subject: Camera3: Improve error logging In preparation for supporting fragmented result calls. Bug: 8565103 Change-Id: Id834637d7cbecf7b550333beedab3281b5400748 --- services/camera/libcameraservice/Camera3Device.cpp | 298 ++++++++++++--------- services/camera/libcameraservice/Camera3Device.h | 16 ++ 2 files changed, 183 insertions(+), 131 deletions(-) diff --git a/services/camera/libcameraservice/Camera3Device.cpp b/services/camera/libcameraservice/Camera3Device.cpp index e53dbb5..08aef83 100644 --- a/services/camera/libcameraservice/Camera3Device.cpp +++ b/services/camera/libcameraservice/Camera3Device.cpp @@ -25,6 +25,18 @@ #define ALOGVV(...) ((void)0) #endif +// Convenience macro for transient errors +#define CLOGE(fmt, ...) ALOGE("Camera %d: %s: " fmt, mId, __FUNCTION__, \ + ##__VA_ARGS__) + +// Convenience macros for transitioning to the error state +#define SET_ERR(fmt, ...) setErrorState( \ + "%s: " fmt, __FUNCTION__, \ + ##__VA_ARGS__) +#define SET_ERR_L(fmt, ...) setErrorStateLocked( \ + "%s: " fmt, __FUNCTION__, \ + ##__VA_ARGS__) + #include #include #include @@ -69,7 +81,7 @@ status_t Camera3Device::initialize(camera_module_t *module) ALOGV("%s: Initializing device for camera %d", __FUNCTION__, mId); if (mStatus != STATUS_UNINITIALIZED) { - ALOGE("%s: Already initialized!", __FUNCTION__); + CLOGE("Already initialized!"); return INVALID_OPERATION; } @@ -84,21 +96,18 @@ status_t Camera3Device::initialize(camera_module_t *module) reinterpret_cast(&device)); if (res != OK) { - ALOGE("%s: Could not open camera %d: %s (%d)", __FUNCTION__, - mId, strerror(-res), res); - mStatus = STATUS_ERROR; + SET_ERR_L("Could not open camera: %s (%d)", strerror(-res), res); return res; } /** Cross-check device version */ if (device->common.version != CAMERA_DEVICE_API_VERSION_3_0) { - ALOGE("%s: Could not open camera %d: " + SET_ERR_L("Could not open camera: " "Camera device is not version %x, reports %x instead", - __FUNCTION__, mId, CAMERA_DEVICE_API_VERSION_3_0, + CAMERA_DEVICE_API_VERSION_3_0, device->common.version); device->common.close(&device->common); - mStatus = STATUS_ERROR; return BAD_VALUE; } @@ -107,11 +116,10 @@ status_t Camera3Device::initialize(camera_module_t *module) if (res != OK) return res; if (info.device_version != device->common.version) { - ALOGE("%s: HAL reporting mismatched camera_info version (%x)" - " and device version (%x).", __FUNCTION__, + SET_ERR_L("HAL reporting mismatched camera_info version (%x)" + " and device version (%x).", device->common.version, info.device_version); device->common.close(&device->common); - mStatus = STATUS_ERROR; return BAD_VALUE; } @@ -119,10 +127,9 @@ status_t Camera3Device::initialize(camera_module_t *module) res = device->ops->initialize(device, this); if (res != OK) { - ALOGE("%s: Camera %d: Unable to initialize HAL device: %s (%d)", - __FUNCTION__, mId, strerror(-res), res); + SET_ERR_L("Unable to initialize HAL device: %s (%d)", + strerror(-res), res); device->common.close(&device->common); - mStatus = STATUS_ERROR; return BAD_VALUE; } @@ -135,10 +142,9 @@ status_t Camera3Device::initialize(camera_module_t *module) if (mVendorTagOps.get_camera_vendor_section_name != NULL) { res = set_camera_metadata_vendor_tag_ops(&mVendorTagOps); if (res != OK) { - ALOGE("%s: Camera %d: Unable to set tag ops: %s (%d)", - __FUNCTION__, mId, strerror(-res), res); + SET_ERR_L("Unable to set tag ops: %s (%d)", + strerror(-res), res); device->common.close(&device->common); - mStatus = STATUS_ERROR; return res; } } @@ -148,11 +154,10 @@ status_t Camera3Device::initialize(camera_module_t *module) mRequestThread = new RequestThread(this, device); res = mRequestThread->run(String8::format("C3Dev-%d-ReqQueue", mId).string()); if (res != OK) { - ALOGE("%s: Camera %d: Unable to start request queue thread: %s (%d)", - __FUNCTION__, mId, strerror(-res), res); + SET_ERR_L("Unable to start request queue thread: %s (%d)", + strerror(-res), res); device->common.close(&device->common); mRequestThread.clear(); - mStatus = STATUS_ERROR; return res; } @@ -179,12 +184,12 @@ status_t Camera3Device::disconnect() { (mStatus == STATUS_ERROR && mRequestThread != NULL)) { res = mRequestThread->clearRepeatingRequests(); if (res != OK) { - ALOGE("%s: Can't stop streaming", __FUNCTION__); + SET_ERR_L("Can't stop streaming"); return res; } res = waitUntilDrainedLocked(); if (res != OK) { - ALOGE("%s: Timeout waiting for HAL to drain", __FUNCTION__); + SET_ERR_L("Timeout waiting for HAL to drain"); return res; } } @@ -225,6 +230,9 @@ status_t Camera3Device::dump(int fd, const Vector &args) { mStatus == STATUS_ACTIVE ? "ACTIVE" : "Unknown"; lines.appendFormat(" Device status: %s\n", status); + if (mStatus == STATUS_ERROR) { + lines.appendFormat(" Error cause: %s\n", mErrorCause.string()); + } lines.appendFormat(" Stream configuration:\n"); if (mInputStream != NULL) { @@ -251,7 +259,7 @@ const CameraMetadata& Camera3Device::info() const { ALOGVV("%s: E", __FUNCTION__); if (CC_UNLIKELY(mStatus == STATUS_UNINITIALIZED || mStatus == STATUS_ERROR)) { - ALOGE("%s: Access to static info %s!", __FUNCTION__, + ALOGW("%s: Access to static info %s!", __FUNCTION__, mStatus == STATUS_ERROR ? "when in error state" : "before init"); } @@ -266,23 +274,23 @@ status_t Camera3Device::capture(CameraMetadata &request) { switch (mStatus) { case STATUS_ERROR: - ALOGE("%s: Device has encountered a serious error", __FUNCTION__); + CLOGE("Device has encountered a serious error"); return INVALID_OPERATION; case STATUS_UNINITIALIZED: - ALOGE("%s: Device not initialized", __FUNCTION__); + CLOGE("Device not initialized"); return INVALID_OPERATION; case STATUS_IDLE: case STATUS_ACTIVE: // OK break; default: - ALOGE("%s: Unexpected status: %d", __FUNCTION__, mStatus); + SET_ERR_L("Unexpected status: %d", mStatus); return INVALID_OPERATION; } sp newRequest = setUpRequestLocked(request); if (newRequest == NULL) { - ALOGE("%s: Can't create capture request", __FUNCTION__); + CLOGE("Can't create capture request"); return BAD_VALUE; } @@ -296,23 +304,23 @@ status_t Camera3Device::setStreamingRequest(const CameraMetadata &request) { switch (mStatus) { case STATUS_ERROR: - ALOGE("%s: Device has encountered a serious error", __FUNCTION__); + CLOGE("Device has encountered a serious error"); return INVALID_OPERATION; case STATUS_UNINITIALIZED: - ALOGE("%s: Device not initialized", __FUNCTION__); + CLOGE("Device not initialized"); return INVALID_OPERATION; case STATUS_IDLE: case STATUS_ACTIVE: // OK break; default: - ALOGE("%s: Unexpected status: %d", __FUNCTION__, mStatus); + SET_ERR_L("Unexpected status: %d", mStatus); return INVALID_OPERATION; } sp newRepeatingRequest = setUpRequestLocked(request); if (newRepeatingRequest == NULL) { - ALOGE("%s: Can't create repeating request", __FUNCTION__); + CLOGE("Can't create repeating request"); return BAD_VALUE; } @@ -330,8 +338,7 @@ sp Camera3Device::setUpRequestLocked( if (mStatus == STATUS_IDLE) { res = configureStreamsLocked(); if (res != OK) { - ALOGE("%s: Can't set up streams: %s (%d)", - __FUNCTION__, strerror(-res), res); + SET_ERR_L("Can't set up streams: %s (%d)", strerror(-res), res); return NULL; } } @@ -346,17 +353,17 @@ status_t Camera3Device::clearStreamingRequest() { switch (mStatus) { case STATUS_ERROR: - ALOGE("%s: Device has encountered a serious error", __FUNCTION__); + CLOGE("Device has encountered a serious error"); return INVALID_OPERATION; case STATUS_UNINITIALIZED: - ALOGE("%s: Device not initialized", __FUNCTION__); + CLOGE("Device not initialized"); return INVALID_OPERATION; case STATUS_IDLE: case STATUS_ACTIVE: // OK break; default: - ALOGE("%s: Unexpected status: %d", __FUNCTION__, mStatus); + SET_ERR_L("Unexpected status: %d", mStatus); return INVALID_OPERATION; } @@ -379,10 +386,10 @@ status_t Camera3Device::createStream(sp consumer, switch (mStatus) { case STATUS_ERROR: - ALOGE("%s: Device has encountered a serious error", __FUNCTION__); + CLOGE("Device has encountered a serious error"); return INVALID_OPERATION; case STATUS_UNINITIALIZED: - ALOGE("%s: Device not initialized", __FUNCTION__); + CLOGE("Device not initialized"); return INVALID_OPERATION; case STATUS_IDLE: // OK @@ -394,13 +401,12 @@ status_t Camera3Device::createStream(sp consumer, if (res != OK) { ALOGE("%s: Can't pause captures to reconfigure streams!", __FUNCTION__); - mStatus = STATUS_ERROR; return res; } wasActive = true; break; default: - ALOGE("%s: Unexpected status: %d", __FUNCTION__, mStatus); + SET_ERR_L("Unexpected status: %d", mStatus); return INVALID_OPERATION; } assert(mStatus == STATUS_IDLE); @@ -416,8 +422,7 @@ status_t Camera3Device::createStream(sp consumer, res = mOutputStreams.add(mNextStreamId, newStream); if (res < 0) { - ALOGE("%s: Can't add new stream to set: %s (%d)", - __FUNCTION__, strerror(-res), res); + SET_ERR_L("Can't add new stream to set: %s (%d)", strerror(-res), res); return res; } @@ -428,8 +433,8 @@ status_t Camera3Device::createStream(sp consumer, ALOGV("%s: Restarting activity to reconfigure streams", __FUNCTION__); res = configureStreamsLocked(); if (res != OK) { - ALOGE("%s: Can't reconfigure device for new stream %d: %s (%d)", - __FUNCTION__, mNextStreamId, strerror(-res), res); + CLOGE("Can't reconfigure device for new stream %d: %s (%d)", + mNextStreamId, strerror(-res), res); return res; } mRequestThread->setPaused(false); @@ -442,7 +447,7 @@ status_t Camera3Device::createReprocessStreamFromStream(int outputId, int *id) { ATRACE_CALL(); (void)outputId; (void)id; - ALOGE("%s: Unimplemented", __FUNCTION__); + CLOGE("Unimplemented"); return INVALID_OPERATION; } @@ -454,23 +459,23 @@ status_t Camera3Device::getStreamInfo(int id, switch (mStatus) { case STATUS_ERROR: - ALOGE("%s: Device has encountered a serious error", __FUNCTION__); + CLOGE("Device has encountered a serious error"); return INVALID_OPERATION; case STATUS_UNINITIALIZED: - ALOGE("%s: Device not initialized!", __FUNCTION__); + CLOGE("Device not initialized!"); return INVALID_OPERATION; case STATUS_IDLE: case STATUS_ACTIVE: // OK break; default: - ALOGE("%s: Unexpected status: %d", __FUNCTION__, mStatus); + SET_ERR_L("Unexpected status: %d", mStatus); return INVALID_OPERATION; } ssize_t idx = mOutputStreams.indexOfKey(id); if (idx == NAME_NOT_FOUND) { - ALOGE("%s: Stream %d is unknown", __FUNCTION__, id); + CLOGE("Stream %d is unknown", id); return idx; } @@ -488,24 +493,24 @@ status_t Camera3Device::setStreamTransform(int id, switch (mStatus) { case STATUS_ERROR: - ALOGE("%s: Device has encountered a serious error", __FUNCTION__); + CLOGE("Device has encountered a serious error"); return INVALID_OPERATION; case STATUS_UNINITIALIZED: - ALOGE("%s: Device not initialized", __FUNCTION__); + CLOGE("Device not initialized"); return INVALID_OPERATION; case STATUS_IDLE: case STATUS_ACTIVE: // OK break; default: - ALOGE("%s: Unexpected status: %d", __FUNCTION__, mStatus); + SET_ERR_L("Unexpected status: %d", mStatus); return INVALID_OPERATION; } ssize_t idx = mOutputStreams.indexOfKey(id); if (idx == NAME_NOT_FOUND) { - ALOGE("%s: Stream %d does not exist", - __FUNCTION__, id); + CLOGE("Stream %d does not exist", + id); return BAD_VALUE; } @@ -520,7 +525,7 @@ status_t Camera3Device::deleteStream(int id) { // CameraDevice semantics require device to already be idle before // deleteStream is called, unlike for createStream. if (mStatus != STATUS_IDLE) { - ALOGE("%s: Device not idle", __FUNCTION__); + CLOGE("Device not idle"); return INVALID_OPERATION; } @@ -531,8 +536,7 @@ status_t Camera3Device::deleteStream(int id) { } else { ssize_t idx = mOutputStreams.indexOfKey(id); if (idx == NAME_NOT_FOUND) { - ALOGE("%s: Stream %d does not exist", - __FUNCTION__, id); + CLOGE("Stream %d does not exist", id); return BAD_VALUE; } deletedStream = mOutputStreams.editValueAt(idx); @@ -542,7 +546,7 @@ status_t Camera3Device::deleteStream(int id) { // Free up the stream endpoint so that it can be used by some other stream res = deletedStream->disconnect(); if (res != OK) { - ALOGE("%s: Can't disconnect deleted stream", __FUNCTION__); + SET_ERR_L("Can't disconnect deleted stream %d", id); // fall through since we want to still list the stream as deleted. } mDeletedStreams.add(deletedStream); @@ -554,7 +558,7 @@ status_t Camera3Device::deleteReprocessStream(int id) { ATRACE_CALL(); (void)id; - ALOGE("%s: Unimplemented", __FUNCTION__); + CLOGE("Unimplemented"); return INVALID_OPERATION; } @@ -567,24 +571,28 @@ status_t Camera3Device::createDefaultRequest(int templateId, switch (mStatus) { case STATUS_ERROR: - ALOGE("%s: Device has encountered a serious error", __FUNCTION__); + CLOGE("Device has encountered a serious error"); return INVALID_OPERATION; case STATUS_UNINITIALIZED: - ALOGE("%s: Device is not initialized!", __FUNCTION__); + CLOGE("Device is not initialized!"); return INVALID_OPERATION; case STATUS_IDLE: case STATUS_ACTIVE: // OK break; default: - ALOGE("%s: Unexpected status: %d", __FUNCTION__, mStatus); + SET_ERR_L("Unexpected status: %d", mStatus); return INVALID_OPERATION; } const camera_metadata_t *rawRequest; rawRequest = mHal3Device->ops->construct_default_request_settings( mHal3Device, templateId); - if (rawRequest == NULL) return DEAD_OBJECT; + if (rawRequest == NULL) { + SET_ERR_L("HAL is unable to construct default settings for template %d", + templateId); + return DEAD_OBJECT; + } *request = rawRequest; return OK; @@ -611,35 +619,31 @@ status_t Camera3Device::waitUntilDrainedLocked() { // Need to shut down break; default: - ALOGE("%s: Unexpected status: %d", __FUNCTION__, mStatus); + SET_ERR_L("Unexpected status: %d",mStatus); return INVALID_OPERATION; } if (mRequestThread != NULL) { res = mRequestThread->waitUntilPaused(kShutdownTimeout); if (res != OK) { - ALOGE("%s: Can't stop request thread in %f seconds!", - __FUNCTION__, kShutdownTimeout/1e9); - mStatus = STATUS_ERROR; + SET_ERR_L("Can't stop request thread in %f seconds!", + kShutdownTimeout/1e9); return res; } } if (mInputStream != NULL) { res = mInputStream->waitUntilIdle(kShutdownTimeout); if (res != OK) { - ALOGE("%s: Can't idle input stream %d in %f seconds!", - __FUNCTION__, mInputStream->getId(), kShutdownTimeout/1e9); - mStatus = STATUS_ERROR; + SET_ERR_L("Can't idle input stream %d in %f seconds!", + mInputStream->getId(), kShutdownTimeout/1e9); return res; } } for (size_t i = 0; i < mOutputStreams.size(); i++) { res = mOutputStreams.editValueAt(i)->waitUntilIdle(kShutdownTimeout); if (res != OK) { - ALOGE("%s: Can't idle output stream %d in %f seconds!", - __FUNCTION__, mOutputStreams.keyAt(i), - kShutdownTimeout/1e9); - mStatus = STATUS_ERROR; + SET_ERR_L("Can't idle output stream %d in %f seconds!", + mOutputStreams.keyAt(i), kShutdownTimeout/1e9); return res; } } @@ -673,8 +677,8 @@ status_t Camera3Device::waitForNextFrame(nsecs_t timeout) { if (res == TIMED_OUT) { return res; } else if (res != OK) { - ALOGE("%s: Camera %d: Error waiting for frame: %s (%d)", - __FUNCTION__, mId, strerror(-res), res); + ALOGW("%s: Camera %d: No frame in %lld ns: %s (%d)", + __FUNCTION__, mId, timeout, strerror(-res), res); return res; } } @@ -761,7 +765,7 @@ status_t Camera3Device::pushReprocessBuffer(int reprocessStreamId, ATRACE_CALL(); (void)reprocessStreamId; (void)buffer; (void)listener; - ALOGE("%s: Unimplemented", __FUNCTION__); + CLOGE("Unimplemented"); return INVALID_OPERATION; } @@ -782,8 +786,8 @@ sp Camera3Device::createCaptureRequest( if (inputStreams.count > 0) { if (mInputStream == NULL || mInputStream->getId() != inputStreams.data.u8[0]) { - ALOGE("%s: Request references unknown input stream %d", - __FUNCTION__, inputStreams.data.u8[0]); + CLOGE("Request references unknown input stream %d", + inputStreams.data.u8[0]); return NULL; } // Lazy completion of stream configuration (allocation/registration) @@ -791,10 +795,9 @@ sp Camera3Device::createCaptureRequest( if (mInputStream->isConfiguring()) { res = mInputStream->finishConfiguration(mHal3Device); if (res != OK) { - ALOGE("%s: Unable to finish configuring input stream %d:" + SET_ERR_L("Unable to finish configuring input stream %d:" " %s (%d)", - __FUNCTION__, mInputStream->getId(), - strerror(-res), res); + mInputStream->getId(), strerror(-res), res); return NULL; } } @@ -806,15 +809,15 @@ sp Camera3Device::createCaptureRequest( camera_metadata_entry_t streams = newRequest->mSettings.find(ANDROID_REQUEST_OUTPUT_STREAMS); if (streams.count == 0) { - ALOGE("%s: Zero output streams specified!", __FUNCTION__); + CLOGE("Zero output streams specified!"); return NULL; } for (size_t i = 0; i < streams.count; i++) { int idx = mOutputStreams.indexOfKey(streams.data.u8[i]); if (idx == NAME_NOT_FOUND) { - ALOGE("%s: Request references unknown stream %d", - __FUNCTION__, streams.data.u8[i]); + CLOGE("Request references unknown stream %d", + streams.data.u8[i]); return NULL; } sp stream = mOutputStreams.editValueAt(idx); @@ -824,8 +827,8 @@ sp Camera3Device::createCaptureRequest( if (stream->isConfiguring()) { res = stream->finishConfiguration(mHal3Device); if (res != OK) { - ALOGE("%s: Unable to finish configuring stream %d: %s (%d)", - __FUNCTION__, stream->getId(), strerror(-res), res); + SET_ERR_L("Unable to finish configuring stream %d: %s (%d)", + stream->getId(), strerror(-res), res); return NULL; } } @@ -842,7 +845,7 @@ status_t Camera3Device::configureStreamsLocked() { status_t res; if (mStatus != STATUS_IDLE) { - ALOGE("%s: Not idle", __FUNCTION__); + CLOGE("Not idle"); return INVALID_OPERATION; } @@ -859,9 +862,7 @@ status_t Camera3Device::configureStreamsLocked() { camera3_stream_t *inputStream; inputStream = mInputStream->startConfiguration(); if (inputStream == NULL) { - ALOGE("%s: Can't start input stream configuration", - __FUNCTION__); - // TODO: Make sure the error flow here is correct + SET_ERR_L("Can't start input stream configuration"); return INVALID_OPERATION; } streams.add(inputStream); @@ -871,9 +872,7 @@ status_t Camera3Device::configureStreamsLocked() { camera3_stream_t *outputStream; outputStream = mOutputStreams.editValueAt(i)->startConfiguration(); if (outputStream == NULL) { - ALOGE("%s: Can't start output stream configuration", - __FUNCTION__); - // TODO: Make sure the error flow here is correct + SET_ERR_L("Can't start output stream configuration"); return INVALID_OPERATION; } streams.add(outputStream); @@ -887,8 +886,8 @@ status_t Camera3Device::configureStreamsLocked() { res = mHal3Device->ops->configure_streams(mHal3Device, &config); if (res != OK) { - ALOGE("%s: Unable to configure streams with HAL: %s (%d)", - __FUNCTION__, strerror(-res), res); + SET_ERR_L("Unable to configure streams with HAL: %s (%d)", + strerror(-res), res); return res; } @@ -903,6 +902,38 @@ status_t Camera3Device::configureStreamsLocked() { return OK; } +void Camera3Device::setErrorState(const char *fmt, ...) { + Mutex::Autolock l(mLock); + va_list args; + va_start(args, fmt); + + setErrorStateLockedV(fmt, args); + + va_end(args); +} + +void Camera3Device::setErrorStateV(const char *fmt, va_list args) { + Mutex::Autolock l(mLock); + setErrorStateLockedV(fmt, args); +} + +void Camera3Device::setErrorStateLocked(const char *fmt, ...) { + va_list args; + va_start(args, fmt); + + setErrorStateLockedV(fmt, args); + + va_end(args); +} + +void Camera3Device::setErrorStateLockedV(const char *fmt, va_list args) { + // Only accept the first failure cause + if (mStatus == STATUS_ERROR) return; + + mErrorCause = String8::formatV(fmt, args); + ALOGE("Camera %d: %s", mId, mErrorCause.string()); + mStatus = STATUS_ERROR; +} /** * Camera HAL device callback methods @@ -914,8 +945,7 @@ void Camera3Device::processCaptureResult(const camera3_capture_result *result) { status_t res; if (result->result == NULL) { - // TODO: Report error upstream - ALOGW("%s: No metadata for frame %d", __FUNCTION__, + SET_ERR("No metadata provided by HAL for frame %d", result->frame_number); return; } @@ -938,13 +968,12 @@ void Camera3Device::processCaptureResult(const camera3_capture_result *result) { captureResult = result->result; if (captureResult.update(ANDROID_REQUEST_FRAME_COUNT, - (int32_t*)&result->frame_number, 1) != OK) { - ALOGE("%s: Camera %d: Failed to set frame# in metadata (%d)", - __FUNCTION__, mId, result->frame_number); - // TODO: Report error upstream + (int32_t*)&result->frame_number, 1) != OK) { + SET_ERR("Failed to set frame# in metadata (%d)", + result->frame_number); } else { ALOGVV("%s: Camera %d: Set frame# in metadata (%d)", - __FUNCTION__, mId, result->frame_number); + __FUNCTION__, mId, result->frame_number); } // Get timestamp from result metadata @@ -952,9 +981,8 @@ void Camera3Device::processCaptureResult(const camera3_capture_result *result) { camera_metadata_entry entry = captureResult.find(ANDROID_SENSOR_TIMESTAMP); if (entry.count == 0) { - ALOGE("%s: Camera %d: No timestamp provided by HAL for frame %d!", - __FUNCTION__, mId, result->frame_number); - // TODO: Report error upstream + SET_ERR("No timestamp provided by HAL for frame %d!", + result->frame_number); } else { timestamp = entry.data.i64[0]; } @@ -963,8 +991,8 @@ void Camera3Device::processCaptureResult(const camera3_capture_result *result) { entry = captureResult.find(ANDROID_CONTROL_AE_STATE); if (entry.count == 0) { - ALOGE("%s: Camera %d: No AE state provided by HAL for frame %d!", - __FUNCTION__, mId, result->frame_number); + CLOGE("No AE state provided by HAL for frame %d!", + result->frame_number); } else { new3aState.aeState = static_cast( @@ -973,8 +1001,8 @@ void Camera3Device::processCaptureResult(const camera3_capture_result *result) { entry = captureResult.find(ANDROID_CONTROL_AF_STATE); if (entry.count == 0) { - ALOGE("%s: Camera %d: No AF state provided by HAL for frame %d!", - __FUNCTION__, mId, result->frame_number); + CLOGE("No AF state provided by HAL for frame %d!", + result->frame_number); } else { new3aState.afState = static_cast( @@ -983,8 +1011,8 @@ void Camera3Device::processCaptureResult(const camera3_capture_result *result) { entry = captureResult.find(ANDROID_CONTROL_AWB_STATE); if (entry.count == 0) { - ALOGE("%s: Camera %d: No AWB state provided by HAL for frame %d!", - __FUNCTION__, mId, result->frame_number); + CLOGE("No AWB state provided by HAL for frame %d!", + result->frame_number); } else { new3aState.awbState = static_cast( @@ -993,16 +1021,16 @@ void Camera3Device::processCaptureResult(const camera3_capture_result *result) { entry = captureResult.find(ANDROID_CONTROL_AF_TRIGGER_ID); if (entry.count == 0) { - ALOGE("%s: Camera %d: No AF trigger ID provided by HAL for frame %d!", - __FUNCTION__, mId, result->frame_number); + CLOGE("No AF trigger ID provided by HAL for frame %d!", + result->frame_number); } else { afTriggerId = entry.data.i32[0]; } entry = captureResult.find(ANDROID_CONTROL_AE_PRECAPTURE_ID); if (entry.count == 0) { - ALOGE("%s: Camera %d: No AE precapture trigger ID provided by HAL" - " for frame %d!", __FUNCTION__, mId, result->frame_number); + CLOGE("No AE precapture trigger ID provided by HAL" + " for frame %d!", result->frame_number); } else { aeTriggerId = entry.data.i32[0]; } @@ -1021,10 +1049,8 @@ void Camera3Device::processCaptureResult(const camera3_capture_result *result) { // Note: stream may be deallocated at this point, if this buffer was the // last reference to it. if (res != OK) { - ALOGE("%s: Camera %d: Can't return buffer %d for frame %d to its" - " stream:%s (%d)", __FUNCTION__, mId, i, - result->frame_number, strerror(-res), res); - // TODO: Report error upstream + SET_ERR("Can't return buffer %d for frame %d to its stream: " + " %s (%d)", i, result->frame_number, strerror(-res), res); } } @@ -1056,8 +1082,7 @@ void Camera3Device::notify(const camera3_notify_msg *msg) { } if (msg == NULL) { - ALOGE("%s: Camera %d: HAL sent NULL notify message!", - __FUNCTION__, mId); + SET_ERR_L("HAL sent NULL notify message!"); return; } @@ -1080,8 +1105,8 @@ void Camera3Device::notify(const camera3_notify_msg *msg) { break; } default: - ALOGE("%s: Camera %d: Unknown notify message from HAL: %d", - __FUNCTION__, mId, msg->type); + SET_ERR_L("Unknown notify message from HAL: %d", + msg->type); } } @@ -1240,9 +1265,9 @@ bool Camera3Device::RequestThread::threadLoop() { int32_t triggerCount; res = insertTriggers(nextRequest); if (res < 0) { - ALOGE("RequestThread: Unable to insert triggers " - "(capture request %d, HAL device: %s (%d)", - (mFrameNumber+1), strerror(-res), res); + SET_ERR("RequestThread: Unable to insert triggers " + "(capture request %d, HAL device: %s (%d)", + (mFrameNumber+1), strerror(-res), res); cleanUpFailedRequest(request, nextRequest, outputBuffers); return false; } @@ -1289,7 +1314,7 @@ bool Camera3Device::RequestThread::threadLoop() { request.input_buffer = &inputBuffer; res = nextRequest->mInputStream->getBuffer(&inputBuffer); if (res != OK) { - ALOGE("RequestThread: Can't get input buffer, skipping request:" + SET_ERR("RequestThread: Can't get input buffer, skipping request:" " %s (%d)", strerror(-res), res); cleanUpFailedRequest(request, nextRequest, outputBuffers); return true; @@ -1305,7 +1330,7 @@ bool Camera3Device::RequestThread::threadLoop() { res = nextRequest->mOutputStreams.editItemAt(i)-> getBuffer(&outputBuffers.editItemAt(i)); if (res != OK) { - ALOGE("RequestThread: Can't get output buffer, skipping request:" + SET_ERR("RequestThread: Can't get output buffer, skipping request:" "%s (%d)", strerror(-res), res); cleanUpFailedRequest(request, nextRequest, outputBuffers); return true; @@ -1320,7 +1345,7 @@ bool Camera3Device::RequestThread::threadLoop() { res = mHal3Device->ops->process_capture_request(mHal3Device, &request); if (res != OK) { - ALOGE("RequestThread: Unable to submit capture request %d to HAL" + SET_ERR("RequestThread: Unable to submit capture request %d to HAL" " device: %s (%d)", request.frame_number, strerror(-res), res); cleanUpFailedRequest(request, nextRequest, outputBuffers); return false; @@ -1333,7 +1358,7 @@ bool Camera3Device::RequestThread::threadLoop() { // Remove any previously queued triggers (after unlock) res = removeTriggers(mPrevRequest); if (res != OK) { - ALOGE("RequestThread: Unable to remove triggers " + SET_ERR("RequestThread: Unable to remove triggers " "(capture request %d, HAL device: %s (%d)", request.frame_number, strerror(-res), res); return false; @@ -1378,7 +1403,6 @@ void Camera3Device::RequestThread::cleanUpFailedRequest( nextRequest->mOutputStreams.editItemAt(i)->returnBuffer( outputBuffers[i], 0); } - // TODO: Report error upstream } sp @@ -1462,6 +1486,18 @@ bool Camera3Device::RequestThread::waitIfPaused() { return false; } +void Camera3Device::RequestThread::setErrorState(const char *fmt, ...) { + sp parent = mParent.promote(); + if (parent != NULL) { + va_list args; + va_start(args, fmt); + + parent->setErrorStateV(fmt, args); + + va_end(args); + } +} + status_t Camera3Device::RequestThread::insertTriggers( const sp &request) { diff --git a/services/camera/libcameraservice/Camera3Device.h b/services/camera/libcameraservice/Camera3Device.h index 7f294e6..2e4a303 100644 --- a/services/camera/libcameraservice/Camera3Device.h +++ b/services/camera/libcameraservice/Camera3Device.h @@ -128,6 +128,9 @@ class Camera3Device : STATUS_ACTIVE } mStatus; + // Tracking cause of fatal errors when in STATUS_ERROR + String8 mErrorCause; + // Mapping of stream IDs to stream instances typedef KeyedVector > StreamSet; @@ -172,6 +175,16 @@ class Camera3Device : */ status_t configureStreamsLocked(); + /** + * Set device into an error state due to some fatal failure, and set an + * error message to indicate why. Only the first call's message will be + * used. The message is also sent to the log. + */ + void setErrorState(const char *fmt, ...); + void setErrorStateV(const char *fmt, va_list args); + void setErrorStateLocked(const char *fmt, ...); + void setErrorStateLockedV(const char *fmt, va_list args); + struct RequestTrigger { // Metadata tag number, e.g. android.control.aePrecaptureTrigger uint32_t metadataTag; @@ -272,6 +285,9 @@ class Camera3Device : // Pause handling bool waitIfPaused(); + // Relay error to parent device object setErrorState + void setErrorState(const char *fmt, ...); + wp mParent; camera3_device_t *mHal3Device; -- cgit v1.1 From 63594e8f83d982e45bd454224f2d20739b662c40 Mon Sep 17 00:00:00 2001 From: Jamie Gennis Date: Tue, 9 Apr 2013 16:40:54 -0700 Subject: OMXNodeInstance: fix OMX_GetExtensionIndex logging This change fixes the logging of OMX_GetExtensionIndex errors. Under certain circumstances these errors are not harmful and should not be logged. Bug: 8538872 Change-Id: I19a13d29ca6263454a9a7a8be205e10363725f31 --- media/libstagefright/omx/OMXNodeInstance.cpp | 32 +++++++++++++--------------- 1 file changed, 15 insertions(+), 17 deletions(-) diff --git a/media/libstagefright/omx/OMXNodeInstance.cpp b/media/libstagefright/omx/OMXNodeInstance.cpp index 46ff22f..971875f 100644 --- a/media/libstagefright/omx/OMXNodeInstance.cpp +++ b/media/libstagefright/omx/OMXNodeInstance.cpp @@ -292,15 +292,14 @@ status_t OMXNodeInstance::getState(OMX_STATETYPE* state) { status_t OMXNodeInstance::enableGraphicBuffers( OMX_U32 portIndex, OMX_BOOL enable) { Mutex::Autolock autoLock(mLock); + OMX_STRING name = const_cast( + "OMX.google.android.index.enableAndroidNativeBuffers"); OMX_INDEXTYPE index; - OMX_ERRORTYPE err = OMX_GetExtensionIndex( - mHandle, - const_cast("OMX.google.android.index.enableAndroidNativeBuffers"), - &index); + OMX_ERRORTYPE err = OMX_GetExtensionIndex(mHandle, name, &index); if (err != OMX_ErrorNone) { - ALOGE("OMX_GetExtensionIndex failed"); + ALOGE("OMX_GetExtensionIndex %s failed", name); return StatusFromOMXError(err); } @@ -331,14 +330,12 @@ status_t OMXNodeInstance::getGraphicBufferUsage( Mutex::Autolock autoLock(mLock); OMX_INDEXTYPE index; - OMX_ERRORTYPE err = OMX_GetExtensionIndex( - mHandle, - const_cast( - "OMX.google.android.index.getAndroidNativeBufferUsage"), - &index); + OMX_STRING name = const_cast( + "OMX.google.android.index.getAndroidNativeBufferUsage"); + OMX_ERRORTYPE err = OMX_GetExtensionIndex(mHandle, name, &index); if (err != OMX_ErrorNone) { - ALOGE("OMX_GetExtensionIndex failed"); + ALOGE("OMX_GetExtensionIndex %s failed", name); return StatusFromOMXError(err); } @@ -381,7 +378,9 @@ status_t OMXNodeInstance::storeMetaDataInBuffers_l( OMX_ERRORTYPE err = OMX_GetExtensionIndex(mHandle, name, &index); if (err != OMX_ErrorNone) { - ALOGE("OMX_GetExtensionIndex %s failed", name); + if (enable) { + ALOGE("OMX_GetExtensionIndex %s failed", name); + } return StatusFromOMXError(err); } @@ -507,13 +506,12 @@ status_t OMXNodeInstance::useGraphicBuffer( return useGraphicBuffer2_l(portIndex, graphicBuffer, buffer); } - OMX_ERRORTYPE err = OMX_GetExtensionIndex( - mHandle, - const_cast("OMX.google.android.index.useAndroidNativeBuffer"), - &index); + OMX_STRING name = const_cast( + "OMX.google.android.index.useAndroidNativeBuffer"); + OMX_ERRORTYPE err = OMX_GetExtensionIndex(mHandle, name, &index); if (err != OMX_ErrorNone) { - ALOGE("OMX_GetExtensionIndex failed"); + ALOGE("OMX_GetExtensionIndex %s failed", name); return StatusFromOMXError(err); } -- cgit v1.1 From da0dc0af0effe9fbfb3ce3187c8472fca2baf3c6 Mon Sep 17 00:00:00 2001 From: Ying Wang Date: Tue, 9 Apr 2013 21:53:49 -0700 Subject: Add liblog Bug: 8580410 Change-Id: If493d87d60d71be664ad75b140c62acadb75b0d0 --- camera/Android.mk | 1 + cmds/stagefright/Android.mk | 3 +-- drm/drmserver/Android.mk | 1 + drm/libdrmframework/Android.mk | 1 + drm/libdrmframework/plugins/forward-lock/FwdLockEngine/Android.mk | 1 + drm/libdrmframework/plugins/passthru/Android.mk | 1 + drm/mediadrm/plugins/mock/Android.mk | 2 +- libvideoeditor/lvpp/Android.mk | 1 + libvideoeditor/osal/src/Android.mk | 3 +-- libvideoeditor/vss/src/Android.mk | 2 +- media/common_time/Android.mk | 3 ++- media/libeffects/downmix/Android.mk | 2 +- media/libeffects/factory/Android.mk | 2 +- media/libeffects/preprocessing/Android.mk | 3 ++- media/libeffects/visualizer/Android.mk | 1 + media/libmedia/Android.mk | 2 +- media/libmediaplayerservice/Android.mk | 1 + media/libnbaio/Android.mk | 3 ++- media/libstagefright/chromium_http/Android.mk | 1 + media/libstagefright/codecs/aacdec/Android.mk | 2 +- media/libstagefright/codecs/aacenc/Android.mk | 4 ++-- media/libstagefright/codecs/amrnb/dec/Android.mk | 2 +- media/libstagefright/codecs/amrnb/enc/Android.mk | 2 +- media/libstagefright/codecs/amrwbenc/Android.mk | 2 +- media/libstagefright/codecs/avc/enc/Android.mk | 1 + media/libstagefright/codecs/flac/enc/Android.mk | 2 +- media/libstagefright/codecs/g711/dec/Android.mk | 2 +- media/libstagefright/codecs/gsm/dec/Android.mk | 2 +- media/libstagefright/codecs/m4v_h263/dec/Android.mk | 2 +- media/libstagefright/codecs/m4v_h263/enc/Android.mk | 1 + media/libstagefright/codecs/mp3dec/Android.mk | 2 +- media/libstagefright/codecs/on2/dec/Android.mk | 2 +- media/libstagefright/codecs/on2/enc/Android.mk | 2 +- media/libstagefright/codecs/on2/h264dec/Android.mk | 3 +-- media/libstagefright/codecs/raw/Android.mk | 2 +- media/libstagefright/codecs/vorbis/dec/Android.mk | 3 +-- media/libstagefright/foundation/Android.mk | 1 + media/libstagefright/id3/Android.mk | 2 +- media/libstagefright/omx/Android.mk | 1 + media/libstagefright/omx/tests/Android.mk | 2 +- media/libstagefright/tests/Android.mk | 1 + media/libstagefright/wifi-display/Android.mk | 5 +++++ media/libstagefright/yuv/Android.mk | 3 ++- media/mediaserver/Android.mk | 1 + media/mtp/Android.mk | 2 +- services/audioflinger/Android.mk | 8 +++++--- services/camera/libcameraservice/Android.mk | 1 + services/medialog/Android.mk | 2 +- 48 files changed, 61 insertions(+), 38 deletions(-) diff --git a/camera/Android.mk b/camera/Android.mk index e33fb50..fa518ff 100644 --- a/camera/Android.mk +++ b/camera/Android.mk @@ -22,6 +22,7 @@ LOCAL_SRC_FILES:= \ LOCAL_SHARED_LIBRARIES := \ libcutils \ libutils \ + liblog \ libbinder \ libhardware \ libui \ diff --git a/cmds/stagefright/Android.mk b/cmds/stagefright/Android.mk index d583e65..3844487 100644 --- a/cmds/stagefright/Android.mk +++ b/cmds/stagefright/Android.mk @@ -9,7 +9,7 @@ LOCAL_SRC_FILES:= \ LOCAL_SHARED_LIBRARIES := \ libstagefright libmedia libutils libbinder libstagefright_foundation \ - libjpeg libgui libcutils + libjpeg libgui libcutils liblog LOCAL_C_INCLUDES:= \ frameworks/av/media/libstagefright \ @@ -187,4 +187,3 @@ LOCAL_MODULE_TAGS := debug LOCAL_MODULE:= muxer include $(BUILD_EXECUTABLE) - diff --git a/drm/drmserver/Android.mk b/drm/drmserver/Android.mk index 96205a1..dc973da 100644 --- a/drm/drmserver/Android.mk +++ b/drm/drmserver/Android.mk @@ -24,6 +24,7 @@ LOCAL_SRC_FILES:= \ LOCAL_SHARED_LIBRARIES := \ libmedia \ libutils \ + liblog \ libbinder \ libdl diff --git a/drm/libdrmframework/Android.mk b/drm/libdrmframework/Android.mk index 9e07fe3..49c4f9b 100644 --- a/drm/libdrmframework/Android.mk +++ b/drm/libdrmframework/Android.mk @@ -25,6 +25,7 @@ LOCAL_MODULE:= libdrmframework LOCAL_SHARED_LIBRARIES := \ libutils \ + liblog \ libbinder \ libdl diff --git a/drm/libdrmframework/plugins/forward-lock/FwdLockEngine/Android.mk b/drm/libdrmframework/plugins/forward-lock/FwdLockEngine/Android.mk index 205b9a5..e251f82 100644 --- a/drm/libdrmframework/plugins/forward-lock/FwdLockEngine/Android.mk +++ b/drm/libdrmframework/plugins/forward-lock/FwdLockEngine/Android.mk @@ -36,6 +36,7 @@ LOCAL_SHARED_LIBRARIES := \ libicui18n \ libicuuc \ libutils \ + liblog \ libdl \ libcrypto \ libssl \ diff --git a/drm/libdrmframework/plugins/passthru/Android.mk b/drm/libdrmframework/plugins/passthru/Android.mk index d170d49..cb3a2e2 100644 --- a/drm/libdrmframework/plugins/passthru/Android.mk +++ b/drm/libdrmframework/plugins/passthru/Android.mk @@ -25,6 +25,7 @@ LOCAL_STATIC_LIBRARIES := libdrmframeworkcommon LOCAL_SHARED_LIBRARIES := \ libutils \ + liblog \ libdl diff --git a/drm/mediadrm/plugins/mock/Android.mk b/drm/mediadrm/plugins/mock/Android.mk index a056cd8..ada23a2 100644 --- a/drm/mediadrm/plugins/mock/Android.mk +++ b/drm/mediadrm/plugins/mock/Android.mk @@ -24,7 +24,7 @@ LOCAL_MODULE := libmockdrmcryptoplugin LOCAL_MODULE_PATH := $(TARGET_OUT_VENDOR_SHARED_LIBRARIES)/mediadrm LOCAL_SHARED_LIBRARIES := \ - libutils + libutils liblog LOCAL_C_INCLUDES += \ $(TOP)/frameworks/av/include \ diff --git a/libvideoeditor/lvpp/Android.mk b/libvideoeditor/lvpp/Android.mk index 778c5ac..2286827 100755 --- a/libvideoeditor/lvpp/Android.mk +++ b/libvideoeditor/lvpp/Android.mk @@ -50,6 +50,7 @@ LOCAL_SHARED_LIBRARIES := \ libaudioutils \ libbinder \ libcutils \ + liblog \ libEGL \ libGLESv2 \ libgui \ diff --git a/libvideoeditor/osal/src/Android.mk b/libvideoeditor/osal/src/Android.mk index b73b9ae..4f38b0c 100755 --- a/libvideoeditor/osal/src/Android.mk +++ b/libvideoeditor/osal/src/Android.mk @@ -41,7 +41,7 @@ LOCAL_SRC_FILES:= \ LOCAL_MODULE_TAGS := optional -LOCAL_SHARED_LIBRARIES := libcutils libutils +LOCAL_SHARED_LIBRARIES := libcutils libutils liblog LOCAL_C_INCLUDES += \ $(TOP)/frameworks/av/libvideoeditor/osal/inc \ @@ -64,4 +64,3 @@ LOCAL_CFLAGS += -Wno-multichar \ -DUSE_STAGEFRIGHT_3GPP_READER include $(BUILD_SHARED_LIBRARY) - diff --git a/libvideoeditor/vss/src/Android.mk b/libvideoeditor/vss/src/Android.mk index cda7a83..0caa15b 100755 --- a/libvideoeditor/vss/src/Android.mk +++ b/libvideoeditor/vss/src/Android.mk @@ -57,6 +57,7 @@ LOCAL_SHARED_LIBRARIES := \ libaudioutils \ libbinder \ libcutils \ + liblog \ libmedia \ libstagefright \ libstagefright_foundation \ @@ -96,4 +97,3 @@ LOCAL_CFLAGS += -Wno-multichar \ -DDECODE_GIF_ON_SAVING include $(BUILD_SHARED_LIBRARY) - diff --git a/media/common_time/Android.mk b/media/common_time/Android.mk index 526f17b..632acbc 100644 --- a/media/common_time/Android.mk +++ b/media/common_time/Android.mk @@ -16,6 +16,7 @@ LOCAL_SRC_FILES := cc_helper.cpp \ utils.cpp LOCAL_SHARED_LIBRARIES := libbinder \ libhardware \ - libutils + libutils \ + liblog include $(BUILD_SHARED_LIBRARY) diff --git a/media/libeffects/downmix/Android.mk b/media/libeffects/downmix/Android.mk index 3052ad9..5d0a87c 100644 --- a/media/libeffects/downmix/Android.mk +++ b/media/libeffects/downmix/Android.mk @@ -7,7 +7,7 @@ LOCAL_SRC_FILES:= \ EffectDownmix.c LOCAL_SHARED_LIBRARIES := \ - libcutils + libcutils liblog LOCAL_MODULE:= libdownmix diff --git a/media/libeffects/factory/Android.mk b/media/libeffects/factory/Android.mk index 6e69151..60a6ce5 100644 --- a/media/libeffects/factory/Android.mk +++ b/media/libeffects/factory/Android.mk @@ -7,7 +7,7 @@ LOCAL_SRC_FILES:= \ EffectsFactory.c LOCAL_SHARED_LIBRARIES := \ - libcutils + libcutils liblog LOCAL_MODULE_PATH := $(TARGET_OUT_SHARED_LIBRARIES) LOCAL_MODULE:= libeffects diff --git a/media/libeffects/preprocessing/Android.mk b/media/libeffects/preprocessing/Android.mk index dfa1711..c344352 100644 --- a/media/libeffects/preprocessing/Android.mk +++ b/media/libeffects/preprocessing/Android.mk @@ -21,7 +21,8 @@ LOCAL_C_INCLUDES += $(call include-path-for, speex) LOCAL_SHARED_LIBRARIES := \ libwebrtc_audio_preprocessing \ libspeexresampler \ - libutils + libutils \ + liblog ifeq ($(TARGET_SIMULATOR),true) LOCAL_LDLIBS += -ldl diff --git a/media/libeffects/visualizer/Android.mk b/media/libeffects/visualizer/Android.mk index 49cf4fa..e196eb2 100644 --- a/media/libeffects/visualizer/Android.mk +++ b/media/libeffects/visualizer/Android.mk @@ -10,6 +10,7 @@ LOCAL_CFLAGS+= -O2 -fvisibility=hidden LOCAL_SHARED_LIBRARIES := \ libcutils \ + liblog \ libdl LOCAL_MODULE_PATH := $(TARGET_OUT_SHARED_LIBRARIES)/soundfx diff --git a/media/libmedia/Android.mk b/media/libmedia/Android.mk index fbe71ad..2c0c3a5 100644 --- a/media/libmedia/Android.mk +++ b/media/libmedia/Android.mk @@ -63,7 +63,7 @@ LOCAL_SRC_FILES += SingleStateQueue.cpp LOCAL_CFLAGS += -DSINGLE_STATE_QUEUE_INSTANTIATIONS='"SingleStateQueueInstantiations.cpp"' LOCAL_SHARED_LIBRARIES := \ - libui libcutils libutils libbinder libsonivox libicuuc libexpat \ + libui liblog libcutils libutils libbinder libsonivox libicuuc libexpat \ libcamera_client libstagefright_foundation \ libgui libdl libaudioutils diff --git a/media/libmediaplayerservice/Android.mk b/media/libmediaplayerservice/Android.mk index 2a6f3c7..d87bc7f 100644 --- a/media/libmediaplayerservice/Android.mk +++ b/media/libmediaplayerservice/Android.mk @@ -27,6 +27,7 @@ LOCAL_SHARED_LIBRARIES := \ libbinder \ libcamera_client \ libcutils \ + liblog \ libdl \ libgui \ libmedia \ diff --git a/media/libnbaio/Android.mk b/media/libnbaio/Android.mk index d372d20..5d00d15 100644 --- a/media/libnbaio/Android.mk +++ b/media/libnbaio/Android.mk @@ -30,6 +30,7 @@ LOCAL_SHARED_LIBRARIES := \ libbinder \ libcommon_time_client \ libcutils \ - libutils + libutils \ + liblog include $(BUILD_SHARED_LIBRARY) diff --git a/media/libstagefright/chromium_http/Android.mk b/media/libstagefright/chromium_http/Android.mk index 2c6d84c..f26f386 100644 --- a/media/libstagefright/chromium_http/Android.mk +++ b/media/libstagefright/chromium_http/Android.mk @@ -22,6 +22,7 @@ LOCAL_SHARED_LIBRARIES += \ libchromium_net \ libutils \ libcutils \ + liblog \ libstagefright_foundation \ libstagefright \ libdrmframework diff --git a/media/libstagefright/codecs/aacdec/Android.mk b/media/libstagefright/codecs/aacdec/Android.mk index 4dc38a8..ffa64f9 100644 --- a/media/libstagefright/codecs/aacdec/Android.mk +++ b/media/libstagefright/codecs/aacdec/Android.mk @@ -20,7 +20,7 @@ LOCAL_CFLAGS := LOCAL_STATIC_LIBRARIES := libFraunhoferAAC LOCAL_SHARED_LIBRARIES := \ - libstagefright_omx libstagefright_foundation libutils libcutils + libstagefright_omx libstagefright_foundation libutils libcutils liblog LOCAL_MODULE := libstagefright_soft_aacdec LOCAL_MODULE_TAGS := optional diff --git a/media/libstagefright/codecs/aacenc/Android.mk b/media/libstagefright/codecs/aacenc/Android.mk index 820734d..057c69b 100644 --- a/media/libstagefright/codecs/aacenc/Android.mk +++ b/media/libstagefright/codecs/aacenc/Android.mk @@ -109,7 +109,7 @@ ifeq ($(AAC_LIBRARY), fraunhofer) LOCAL_STATIC_LIBRARIES := libFraunhoferAAC LOCAL_SHARED_LIBRARIES := \ - libstagefright_omx libstagefright_foundation libutils + libstagefright_omx libstagefright_foundation libutils liblog LOCAL_MODULE := libstagefright_soft_aacenc LOCAL_MODULE_TAGS := optional @@ -132,7 +132,7 @@ else # visualon libstagefright_aacenc LOCAL_SHARED_LIBRARIES := \ - libstagefright_omx libstagefright_foundation libutils \ + libstagefright_omx libstagefright_foundation libutils liblog \ libstagefright_enc_common LOCAL_MODULE := libstagefright_soft_aacenc diff --git a/media/libstagefright/codecs/amrnb/dec/Android.mk b/media/libstagefright/codecs/amrnb/dec/Android.mk index b48a459..8d6c6f8 100644 --- a/media/libstagefright/codecs/amrnb/dec/Android.mk +++ b/media/libstagefright/codecs/amrnb/dec/Android.mk @@ -72,7 +72,7 @@ LOCAL_STATIC_LIBRARIES := \ libstagefright_amrnbdec libstagefright_amrwbdec LOCAL_SHARED_LIBRARIES := \ - libstagefright_omx libstagefright_foundation libutils \ + libstagefright_omx libstagefright_foundation libutils liblog \ libstagefright_amrnb_common LOCAL_MODULE := libstagefright_soft_amrdec diff --git a/media/libstagefright/codecs/amrnb/enc/Android.mk b/media/libstagefright/codecs/amrnb/enc/Android.mk index 457656a..f4e467a 100644 --- a/media/libstagefright/codecs/amrnb/enc/Android.mk +++ b/media/libstagefright/codecs/amrnb/enc/Android.mk @@ -92,7 +92,7 @@ LOCAL_STATIC_LIBRARIES := \ libstagefright_amrnbenc LOCAL_SHARED_LIBRARIES := \ - libstagefright_omx libstagefright_foundation libutils \ + libstagefright_omx libstagefright_foundation libutils liblog \ libstagefright_amrnb_common LOCAL_MODULE := libstagefright_soft_amrnbenc diff --git a/media/libstagefright/codecs/amrwbenc/Android.mk b/media/libstagefright/codecs/amrwbenc/Android.mk index edfd7b7..c5b8e0c 100644 --- a/media/libstagefright/codecs/amrwbenc/Android.mk +++ b/media/libstagefright/codecs/amrwbenc/Android.mk @@ -130,7 +130,7 @@ LOCAL_STATIC_LIBRARIES := \ libstagefright_amrwbenc LOCAL_SHARED_LIBRARIES := \ - libstagefright_omx libstagefright_foundation libutils \ + libstagefright_omx libstagefright_foundation libutils liblog \ libstagefright_enc_common LOCAL_MODULE := libstagefright_soft_amrwbenc diff --git a/media/libstagefright/codecs/avc/enc/Android.mk b/media/libstagefright/codecs/avc/enc/Android.mk index cffe469..7d17c2a 100644 --- a/media/libstagefright/codecs/avc/enc/Android.mk +++ b/media/libstagefright/codecs/avc/enc/Android.mk @@ -62,6 +62,7 @@ LOCAL_SHARED_LIBRARIES := \ libstagefright_foundation \ libstagefright_omx \ libutils \ + liblog \ libui diff --git a/media/libstagefright/codecs/flac/enc/Android.mk b/media/libstagefright/codecs/flac/enc/Android.mk index 546a357..f01d605 100644 --- a/media/libstagefright/codecs/flac/enc/Android.mk +++ b/media/libstagefright/codecs/flac/enc/Android.mk @@ -10,7 +10,7 @@ LOCAL_C_INCLUDES := \ external/flac/include LOCAL_SHARED_LIBRARIES := \ - libstagefright libstagefright_omx libstagefright_foundation libutils + libstagefright libstagefright_omx libstagefright_foundation libutils liblog LOCAL_STATIC_LIBRARIES := \ libFLAC \ diff --git a/media/libstagefright/codecs/g711/dec/Android.mk b/media/libstagefright/codecs/g711/dec/Android.mk index 28be646..4c80da6 100644 --- a/media/libstagefright/codecs/g711/dec/Android.mk +++ b/media/libstagefright/codecs/g711/dec/Android.mk @@ -9,7 +9,7 @@ LOCAL_C_INCLUDES := \ frameworks/native/include/media/openmax LOCAL_SHARED_LIBRARIES := \ - libstagefright libstagefright_omx libstagefright_foundation libutils + libstagefright libstagefright_omx libstagefright_foundation libutils liblog LOCAL_MODULE := libstagefright_soft_g711dec LOCAL_MODULE_TAGS := optional diff --git a/media/libstagefright/codecs/gsm/dec/Android.mk b/media/libstagefright/codecs/gsm/dec/Android.mk index 9c0c6ae..71613d2 100644 --- a/media/libstagefright/codecs/gsm/dec/Android.mk +++ b/media/libstagefright/codecs/gsm/dec/Android.mk @@ -10,7 +10,7 @@ LOCAL_C_INCLUDES := \ external/libgsm/inc LOCAL_SHARED_LIBRARIES := \ - libstagefright libstagefright_omx libstagefright_foundation libutils + libstagefright libstagefright_omx libstagefright_foundation libutils liblog LOCAL_STATIC_LIBRARIES := \ libgsm diff --git a/media/libstagefright/codecs/m4v_h263/dec/Android.mk b/media/libstagefright/codecs/m4v_h263/dec/Android.mk index a6b1edc..a3d5779 100644 --- a/media/libstagefright/codecs/m4v_h263/dec/Android.mk +++ b/media/libstagefright/codecs/m4v_h263/dec/Android.mk @@ -67,7 +67,7 @@ LOCAL_STATIC_LIBRARIES := \ libstagefright_m4vh263dec LOCAL_SHARED_LIBRARIES := \ - libstagefright libstagefright_omx libstagefright_foundation libutils + libstagefright libstagefright_omx libstagefright_foundation libutils liblog LOCAL_MODULE := libstagefright_soft_mpeg4dec LOCAL_MODULE_TAGS := optional diff --git a/media/libstagefright/codecs/m4v_h263/enc/Android.mk b/media/libstagefright/codecs/m4v_h263/enc/Android.mk index 865cc9c..83a2dd2 100644 --- a/media/libstagefright/codecs/m4v_h263/enc/Android.mk +++ b/media/libstagefright/codecs/m4v_h263/enc/Android.mk @@ -65,6 +65,7 @@ LOCAL_SHARED_LIBRARIES := \ libstagefright_foundation \ libstagefright_omx \ libutils \ + liblog \ libui diff --git a/media/libstagefright/codecs/mp3dec/Android.mk b/media/libstagefright/codecs/mp3dec/Android.mk index ec8d7ec..135c715 100644 --- a/media/libstagefright/codecs/mp3dec/Android.mk +++ b/media/libstagefright/codecs/mp3dec/Android.mk @@ -70,7 +70,7 @@ LOCAL_C_INCLUDES := \ $(LOCAL_PATH)/include LOCAL_SHARED_LIBRARIES := \ - libstagefright libstagefright_omx libstagefright_foundation libutils + libstagefright libstagefright_omx libstagefright_foundation libutils liblog LOCAL_STATIC_LIBRARIES := \ libstagefright_mp3dec diff --git a/media/libstagefright/codecs/on2/dec/Android.mk b/media/libstagefright/codecs/on2/dec/Android.mk index 0082d7c..7f2c46d 100644 --- a/media/libstagefright/codecs/on2/dec/Android.mk +++ b/media/libstagefright/codecs/on2/dec/Android.mk @@ -15,7 +15,7 @@ LOCAL_STATIC_LIBRARIES := \ libvpx LOCAL_SHARED_LIBRARIES := \ - libstagefright libstagefright_omx libstagefright_foundation libutils + libstagefright libstagefright_omx libstagefright_foundation libutils liblog LOCAL_MODULE := libstagefright_soft_vpxdec LOCAL_MODULE_TAGS := optional diff --git a/media/libstagefright/codecs/on2/enc/Android.mk b/media/libstagefright/codecs/on2/enc/Android.mk index 5d3317c..a92d376 100644 --- a/media/libstagefright/codecs/on2/enc/Android.mk +++ b/media/libstagefright/codecs/on2/enc/Android.mk @@ -16,7 +16,7 @@ LOCAL_STATIC_LIBRARIES := \ libvpx LOCAL_SHARED_LIBRARIES := \ - libstagefright libstagefright_omx libstagefright_foundation libutils \ + libstagefright libstagefright_omx libstagefright_foundation libutils liblog \ LOCAL_MODULE := libstagefright_soft_vpxenc LOCAL_MODULE_TAGS := optional diff --git a/media/libstagefright/codecs/on2/h264dec/Android.mk b/media/libstagefright/codecs/on2/h264dec/Android.mk index 772fd60..2539f98 100644 --- a/media/libstagefright/codecs/on2/h264dec/Android.mk +++ b/media/libstagefright/codecs/on2/h264dec/Android.mk @@ -97,7 +97,7 @@ ifeq ($(ARCH_ARM_HAVE_NEON),true) endif LOCAL_SHARED_LIBRARIES := \ - libstagefright libstagefright_omx libstagefright_foundation libutils \ + libstagefright libstagefright_omx libstagefright_foundation libutils liblog \ LOCAL_MODULE := libstagefright_soft_h264dec @@ -124,4 +124,3 @@ LOCAL_MODULE_TAGS := debug LOCAL_MODULE := decoder include $(BUILD_EXECUTABLE) - diff --git a/media/libstagefright/codecs/raw/Android.mk b/media/libstagefright/codecs/raw/Android.mk index 285c747..fe90a03 100644 --- a/media/libstagefright/codecs/raw/Android.mk +++ b/media/libstagefright/codecs/raw/Android.mk @@ -9,7 +9,7 @@ LOCAL_C_INCLUDES := \ frameworks/native/include/media/openmax LOCAL_SHARED_LIBRARIES := \ - libstagefright_omx libstagefright_foundation libutils + libstagefright_omx libstagefright_foundation libutils liblog LOCAL_MODULE := libstagefright_soft_rawdec LOCAL_MODULE_TAGS := optional diff --git a/media/libstagefright/codecs/vorbis/dec/Android.mk b/media/libstagefright/codecs/vorbis/dec/Android.mk index 395dd6b..2232353 100644 --- a/media/libstagefright/codecs/vorbis/dec/Android.mk +++ b/media/libstagefright/codecs/vorbis/dec/Android.mk @@ -11,10 +11,9 @@ LOCAL_C_INCLUDES := \ LOCAL_SHARED_LIBRARIES := \ libvorbisidec libstagefright libstagefright_omx \ - libstagefright_foundation libutils + libstagefright_foundation libutils liblog LOCAL_MODULE := libstagefright_soft_vorbisdec LOCAL_MODULE_TAGS := optional include $(BUILD_SHARED_LIBRARY) - diff --git a/media/libstagefright/foundation/Android.mk b/media/libstagefright/foundation/Android.mk index b7577d6..d65e213 100644 --- a/media/libstagefright/foundation/Android.mk +++ b/media/libstagefright/foundation/Android.mk @@ -20,6 +20,7 @@ LOCAL_C_INCLUDES:= \ LOCAL_SHARED_LIBRARIES := \ libbinder \ libutils \ + liblog LOCAL_CFLAGS += -Wno-multichar diff --git a/media/libstagefright/id3/Android.mk b/media/libstagefright/id3/Android.mk index ff35d4a..80a1a3a 100644 --- a/media/libstagefright/id3/Android.mk +++ b/media/libstagefright/id3/Android.mk @@ -16,7 +16,7 @@ LOCAL_SRC_FILES := \ testid3.cpp LOCAL_SHARED_LIBRARIES := \ - libstagefright libutils libbinder libstagefright_foundation + libstagefright libutils liblog libbinder libstagefright_foundation LOCAL_STATIC_LIBRARIES := \ libstagefright_id3 diff --git a/media/libstagefright/omx/Android.mk b/media/libstagefright/omx/Android.mk index 9129f08..a8b4939 100644 --- a/media/libstagefright/omx/Android.mk +++ b/media/libstagefright/omx/Android.mk @@ -19,6 +19,7 @@ LOCAL_SHARED_LIBRARIES := \ libbinder \ libmedia \ libutils \ + liblog \ libui \ libgui \ libcutils \ diff --git a/media/libstagefright/omx/tests/Android.mk b/media/libstagefright/omx/tests/Android.mk index 04441ca..1061c39 100644 --- a/media/libstagefright/omx/tests/Android.mk +++ b/media/libstagefright/omx/tests/Android.mk @@ -5,7 +5,7 @@ LOCAL_SRC_FILES = \ OMXHarness.cpp \ LOCAL_SHARED_LIBRARIES := \ - libstagefright libbinder libmedia libutils libstagefright_foundation + libstagefright libbinder libmedia libutils liblog libstagefright_foundation LOCAL_C_INCLUDES := \ $(TOP)/frameworks/av/media/libstagefright \ diff --git a/media/libstagefright/tests/Android.mk b/media/libstagefright/tests/Android.mk index 57fff0b..06ce16b 100644 --- a/media/libstagefright/tests/Android.mk +++ b/media/libstagefright/tests/Android.mk @@ -26,6 +26,7 @@ LOCAL_SHARED_LIBRARIES := \ libsync \ libui \ libutils \ + liblog LOCAL_STATIC_LIBRARIES := \ libgtest \ diff --git a/media/libstagefright/wifi-display/Android.mk b/media/libstagefright/wifi-display/Android.mk index 1578c21..f99ef60 100644 --- a/media/libstagefright/wifi-display/Android.mk +++ b/media/libstagefright/wifi-display/Android.mk @@ -31,6 +31,7 @@ LOCAL_C_INCLUDES:= \ LOCAL_SHARED_LIBRARIES:= \ libbinder \ libcutils \ + liblog \ libgui \ libmedia \ libstagefright \ @@ -59,6 +60,7 @@ LOCAL_SHARED_LIBRARIES:= \ libstagefright_foundation \ libstagefright_wfd \ libutils \ + liblog \ LOCAL_MODULE:= wfd @@ -81,6 +83,7 @@ LOCAL_SHARED_LIBRARIES:= \ libstagefright_foundation \ libstagefright_wfd \ libutils \ + liblog \ LOCAL_MODULE:= udptest @@ -103,6 +106,7 @@ LOCAL_SHARED_LIBRARIES:= \ libstagefright_foundation \ libstagefright_wfd \ libutils \ + liblog \ LOCAL_MODULE:= rtptest @@ -125,6 +129,7 @@ LOCAL_SHARED_LIBRARIES:= \ libstagefright_foundation \ libstagefright_wfd \ libutils \ + liblog \ LOCAL_MODULE:= nettest diff --git a/media/libstagefright/yuv/Android.mk b/media/libstagefright/yuv/Android.mk index a4253f6..b3f7b1b 100644 --- a/media/libstagefright/yuv/Android.mk +++ b/media/libstagefright/yuv/Android.mk @@ -6,7 +6,8 @@ LOCAL_SRC_FILES:= \ YUVCanvas.cpp LOCAL_SHARED_LIBRARIES := \ - libcutils + libcutils \ + liblog LOCAL_MODULE:= libstagefright_yuv diff --git a/media/mediaserver/Android.mk b/media/mediaserver/Android.mk index a485646..1ac647a 100644 --- a/media/mediaserver/Android.mk +++ b/media/mediaserver/Android.mk @@ -22,6 +22,7 @@ LOCAL_SHARED_LIBRARIES := \ libmedia \ libmediaplayerservice \ libutils \ + liblog \ libbinder LOCAL_STATIC_LIBRARIES := \ diff --git a/media/mtp/Android.mk b/media/mtp/Android.mk index bee28d4..ac608a1 100644 --- a/media/mtp/Android.mk +++ b/media/mtp/Android.mk @@ -42,6 +42,6 @@ LOCAL_CFLAGS := -DMTP_DEVICE -DMTP_HOST # Needed for LOCAL_C_INCLUDES := bionic/libc/private -LOCAL_SHARED_LIBRARIES := libutils libcutils libusbhost libbinder +LOCAL_SHARED_LIBRARIES := libutils libcutils liblog libusbhost libbinder include $(BUILD_SHARED_LIBRARY) diff --git a/services/audioflinger/Android.mk b/services/audioflinger/Android.mk index 7806f48..061a079 100644 --- a/services/audioflinger/Android.mk +++ b/services/audioflinger/Android.mk @@ -22,7 +22,7 @@ LOCAL_SRC_FILES:= \ AudioResampler.cpp.arm \ AudioPolicyService.cpp \ ServiceUtilities.cpp \ - AudioResamplerCubic.cpp.arm \ + AudioResamplerCubic.cpp.arm \ AudioResamplerSinc.cpp.arm LOCAL_SRC_FILES += StateQueue.cpp @@ -39,6 +39,7 @@ LOCAL_SHARED_LIBRARIES := \ libcommon_time_client \ libcutils \ libutils \ + liblog \ libbinder \ libmedia \ libnbaio \ @@ -94,9 +95,10 @@ LOCAL_SRC_FILES:= \ AudioResamplerSinc.cpp.arm LOCAL_SHARED_LIBRARIES := \ - libdl \ + libdl \ libcutils \ - libutils + libutils \ + liblog LOCAL_MODULE:= test-resample diff --git a/services/camera/libcameraservice/Android.mk b/services/camera/libcameraservice/Android.mk index 6847bf8..3c84703 100644 --- a/services/camera/libcameraservice/Android.mk +++ b/services/camera/libcameraservice/Android.mk @@ -32,6 +32,7 @@ LOCAL_SRC_FILES:= \ LOCAL_SHARED_LIBRARIES:= \ libui \ + liblog \ libutils \ libbinder \ libcutils \ diff --git a/services/medialog/Android.mk b/services/medialog/Android.mk index 559b1ed..08006c8 100644 --- a/services/medialog/Android.mk +++ b/services/medialog/Android.mk @@ -4,7 +4,7 @@ include $(CLEAR_VARS) LOCAL_SRC_FILES := MediaLogService.cpp -LOCAL_SHARED_LIBRARIES := libmedia libbinder libutils libnbaio +LOCAL_SHARED_LIBRARIES := libmedia libbinder libutils liblog libnbaio LOCAL_MODULE:= libmedialogservice -- cgit v1.1 From 2c65be2298f055d015c31dea9956855236a0b465 Mon Sep 17 00:00:00 2001 From: Rom Lemarchand Date: Wed, 10 Apr 2013 16:58:15 -0700 Subject: Add support for OMX_QCOM_COLOR_FormatYUV420PackedSemiPlanar32m color format Change-Id: Ib862ee341ccf668445f0dff29c5a39e91e769244 --- media/libstagefright/ACodec.cpp | 1 + media/libstagefright/OMXCodec.cpp | 3 ++- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp index ff72b71..9c4378e 100644 --- a/media/libstagefright/ACodec.cpp +++ b/media/libstagefright/ACodec.cpp @@ -1438,6 +1438,7 @@ status_t ACodec::setSupportedOutputFormat() { || format.eColorFormat == OMX_TI_COLOR_FormatYUV420PackedSemiPlanar || format.eColorFormat == OMX_QCOM_COLOR_FormatYVU420SemiPlanar || format.eColorFormat == OMX_QCOM_COLOR_FormatYUV420PackedSemiPlanar64x32Tile2m8ka + || format.eColorFormat == OMX_QCOM_COLOR_FormatYUV420PackedSemiPlanar32m || format.eColorFormat == OMX_SEC_COLOR_FormatNV12Tiled); return mOMX->setParameter( diff --git a/media/libstagefright/OMXCodec.cpp b/media/libstagefright/OMXCodec.cpp index 22aefcc..c537557 100644 --- a/media/libstagefright/OMXCodec.cpp +++ b/media/libstagefright/OMXCodec.cpp @@ -1218,7 +1218,8 @@ status_t OMXCodec::setVideoOutputFormat( || format.eColorFormat == OMX_COLOR_FormatCbYCrY || format.eColorFormat == OMX_TI_COLOR_FormatYUV420PackedSemiPlanar || format.eColorFormat == OMX_QCOM_COLOR_FormatYVU420SemiPlanar - || format.eColorFormat == OMX_QCOM_COLOR_FormatYUV420PackedSemiPlanar64x32Tile2m8ka); + || format.eColorFormat == OMX_QCOM_COLOR_FormatYUV420PackedSemiPlanar64x32Tile2m8ka + || format.eColorFormat == OMX_QCOM_COLOR_FormatYUV420PackedSemiPlanar32m); int32_t colorFormat; if (meta->findInt32(kKeyColorFormat, &colorFormat) -- cgit v1.1 From d85929f6086e050d7cb33bfe0d29f339ad7279e5 Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Thu, 11 Apr 2013 11:07:55 -0700 Subject: Instead of returning an error, return an invalid duration (-1 ms) if no duration information was available. This prevents us from entering ERROR state, effectively rendering the player instance useless. Change-Id: I602d2661ae8b8633360306c0ea9208fb11e2bf17 related-to-bug: 8596285 --- media/libmedia/mediaplayer.cpp | 7 +++++++ media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp | 6 +++--- 2 files changed, 10 insertions(+), 3 deletions(-) diff --git a/media/libmedia/mediaplayer.cpp b/media/libmedia/mediaplayer.cpp index 3defec3..ecae3d3 100644 --- a/media/libmedia/mediaplayer.cpp +++ b/media/libmedia/mediaplayer.cpp @@ -398,6 +398,13 @@ status_t MediaPlayer::getDuration_l(int *msec) if (mPlayer != 0 && isValidState) { int durationMs; status_t ret = mPlayer->getDuration(&durationMs); + + if (ret != OK) { + // Do not enter error state just because no duration was available. + durationMs = -1; + ret = OK; + } + if (msec) { *msec = durationMs; } diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp index 723af09..bdafb29 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp +++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp @@ -314,11 +314,11 @@ status_t NuPlayerDriver::getDuration(int *msec) { Mutex::Autolock autoLock(mLock); if (mDurationUs < 0) { - *msec = 0; - } else { - *msec = (mDurationUs + 500ll) / 1000; + return UNKNOWN_ERROR; } + *msec = (mDurationUs + 500ll) / 1000; + return OK; } -- cgit v1.1 From 76ab6df08a2069eac6317715dadccbb6041e7fab Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Thu, 11 Apr 2013 11:37:28 -0700 Subject: The framework really doesn't need to know the specifics of the codec color format, so stop trying to enforce a whitelist. Change-Id: I0ceb3bdd5bcc6c1bbd56740b0cd662a2b5820dfe related-to-bug: 8596546 --- media/libstagefright/ACodec.cpp | 8 -------- media/libstagefright/OMXCodec.cpp | 7 ------- 2 files changed, 15 deletions(-) diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp index ff72b71..d24bd64 100644 --- a/media/libstagefright/ACodec.cpp +++ b/media/libstagefright/ACodec.cpp @@ -1432,14 +1432,6 @@ status_t ACodec::setSupportedOutputFormat() { CHECK_EQ(err, (status_t)OK); CHECK_EQ((int)format.eCompressionFormat, (int)OMX_VIDEO_CodingUnused); - CHECK(format.eColorFormat == OMX_COLOR_FormatYUV420Planar - || format.eColorFormat == OMX_COLOR_FormatYUV420SemiPlanar - || format.eColorFormat == OMX_COLOR_FormatCbYCrY - || format.eColorFormat == OMX_TI_COLOR_FormatYUV420PackedSemiPlanar - || format.eColorFormat == OMX_QCOM_COLOR_FormatYVU420SemiPlanar - || format.eColorFormat == OMX_QCOM_COLOR_FormatYUV420PackedSemiPlanar64x32Tile2m8ka - || format.eColorFormat == OMX_SEC_COLOR_FormatNV12Tiled); - return mOMX->setParameter( mNode, OMX_IndexParamVideoPortFormat, &format, sizeof(format)); diff --git a/media/libstagefright/OMXCodec.cpp b/media/libstagefright/OMXCodec.cpp index 22aefcc..6c0779d 100644 --- a/media/libstagefright/OMXCodec.cpp +++ b/media/libstagefright/OMXCodec.cpp @@ -1213,13 +1213,6 @@ status_t OMXCodec::setVideoOutputFormat( CHECK_EQ(err, (status_t)OK); CHECK_EQ((int)format.eCompressionFormat, (int)OMX_VIDEO_CodingUnused); - CHECK(format.eColorFormat == OMX_COLOR_FormatYUV420Planar - || format.eColorFormat == OMX_COLOR_FormatYUV420SemiPlanar - || format.eColorFormat == OMX_COLOR_FormatCbYCrY - || format.eColorFormat == OMX_TI_COLOR_FormatYUV420PackedSemiPlanar - || format.eColorFormat == OMX_QCOM_COLOR_FormatYVU420SemiPlanar - || format.eColorFormat == OMX_QCOM_COLOR_FormatYUV420PackedSemiPlanar64x32Tile2m8ka); - int32_t colorFormat; if (meta->findInt32(kKeyColorFormat, &colorFormat) && colorFormat != OMX_COLOR_FormatUnused -- cgit v1.1 From ac0230da14a3d223c2144b165a3a163e8519d239 Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Thu, 11 Apr 2013 16:06:14 -0700 Subject: ACodec now signals an error if the mediaserver died while it is in anything other than "uninitialized" state. Change-Id: Id133d897ac65b455b34e5de17ff9c39b47285630 related-to-bug: 8397711 --- include/media/stagefright/ACodec.h | 2 ++ media/libstagefright/ACodec.cpp | 57 ++++++++++++++++++++++++++++++++------ media/libstagefright/OMXClient.cpp | 2 +- 3 files changed, 52 insertions(+), 9 deletions(-) diff --git a/include/media/stagefright/ACodec.h b/include/media/stagefright/ACodec.h index 96baf34..5cfe5bc 100644 --- a/include/media/stagefright/ACodec.h +++ b/include/media/stagefright/ACodec.h @@ -46,6 +46,7 @@ struct ACodec : public AHierarchicalStateMachine { kWhatInputSurfaceCreated = 'isfc', kWhatSignaledInputEOS = 'seos', kWhatBuffersAllocated = 'allc', + kWhatOMXDied = 'OMXd', }; ACodec(); @@ -97,6 +98,7 @@ private: struct ExecutingToIdleState; struct IdleToLoadedState; struct FlushingState; + struct DeathNotifier; enum { kWhatSetup = 'setu', diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp index ff72b71..01ff07e 100644 --- a/media/libstagefright/ACodec.cpp +++ b/media/libstagefright/ACodec.cpp @@ -166,6 +166,24 @@ private: //////////////////////////////////////////////////////////////////////////////// +struct ACodec::DeathNotifier : public IBinder::DeathRecipient { + DeathNotifier(const sp ¬ify) + : mNotify(notify) { + } + + virtual void binderDied(const wp &) { + mNotify->post(); + } + +protected: + virtual ~DeathNotifier() {} + +private: + sp mNotify; + + DISALLOW_EVIL_CONSTRUCTORS(DeathNotifier); +}; + struct ACodec::UninitializedState : public ACodec::BaseState { UninitializedState(ACodec *codec); @@ -177,6 +195,8 @@ private: void onSetup(const sp &msg); bool onAllocateComponent(const sp &msg); + sp mDeathNotifier; + DISALLOW_EVIL_CONSTRUCTORS(UninitializedState); }; @@ -2487,6 +2507,13 @@ bool ACodec::BaseState::onMessageReceived(const sp &msg) { return true; } + case ACodec::kWhatOMXDied: + { + ALOGE("OMX/mediaserver died, signalling error!"); + mCodec->signalError(OMX_ErrorResourcesLost, DEAD_OBJECT); + break; + } + default: return false; } @@ -3035,6 +3062,18 @@ ACodec::UninitializedState::UninitializedState(ACodec *codec) void ACodec::UninitializedState::stateEntered() { ALOGV("Now uninitialized"); + + if (mDeathNotifier != NULL) { + mCodec->mOMX->asBinder()->unlinkToDeath(mDeathNotifier); + mDeathNotifier.clear(); + } + + mCodec->mNativeWindow.clear(); + mCodec->mNode = NULL; + mCodec->mOMX.clear(); + mCodec->mQuirks = 0; + mCodec->mFlags = 0; + mCodec->mComponentName.clear(); } bool ACodec::UninitializedState::onMessageReceived(const sp &msg) { @@ -3106,6 +3145,15 @@ bool ACodec::UninitializedState::onAllocateComponent(const sp &msg) { sp omx = client.interface(); + sp notify = new AMessage(kWhatOMXDied, mCodec->id()); + + mDeathNotifier = new DeathNotifier(notify); + if (omx->asBinder()->linkToDeath(mDeathNotifier) != OK) { + // This was a local binder, if it dies so do we, we won't care + // about any notifications in the afterlife. + mDeathNotifier.clear(); + } + Vector matchingCodecs; AString mime; @@ -3170,7 +3218,7 @@ bool ACodec::UninitializedState::onAllocateComponent(const sp &msg) { return false; } - sp notify = new AMessage(kWhatOMXMessage, mCodec->id()); + notify = new AMessage(kWhatOMXMessage, mCodec->id()); observer->setNotificationMessage(notify); mCodec->mComponentName = componentName; @@ -3224,13 +3272,6 @@ void ACodec::LoadedState::onShutdown(bool keepComponentAllocated) { if (!keepComponentAllocated) { CHECK_EQ(mCodec->mOMX->freeNode(mCodec->mNode), (status_t)OK); - mCodec->mNativeWindow.clear(); - mCodec->mNode = NULL; - mCodec->mOMX.clear(); - mCodec->mQuirks = 0; - mCodec->mFlags = 0; - mCodec->mComponentName.clear(); - mCodec->changeState(mCodec->mUninitializedState); } diff --git a/media/libstagefright/OMXClient.cpp b/media/libstagefright/OMXClient.cpp index ff72e0e..1822f07 100644 --- a/media/libstagefright/OMXClient.cpp +++ b/media/libstagefright/OMXClient.cpp @@ -32,7 +32,7 @@ struct MuxOMX : public IOMX { MuxOMX(const sp &remoteOMX); virtual ~MuxOMX(); - virtual IBinder *onAsBinder() { return NULL; } + virtual IBinder *onAsBinder() { return mRemoteOMX->asBinder().get(); } virtual bool livesLocally(node_id node, pid_t pid); -- cgit v1.1 From 9a62391fa187a345a5d1301d4bc0b165c2ea4737 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Thu, 11 Apr 2013 17:21:41 -0700 Subject: Workaround: requestPriority() is one-way Bug: 8565696 Change-Id: If5fcdf8593e1ca15bea45217bd683b43a2a106c7 --- services/audioflinger/ISchedulingPolicyService.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/audioflinger/ISchedulingPolicyService.cpp b/services/audioflinger/ISchedulingPolicyService.cpp index 909b77e..218aa6b 100644 --- a/services/audioflinger/ISchedulingPolicyService.cpp +++ b/services/audioflinger/ISchedulingPolicyService.cpp @@ -44,7 +44,7 @@ public: data.writeInt32(pid); data.writeInt32(tid); data.writeInt32(prio); - remote()->transact(REQUEST_PRIORITY_TRANSACTION, data, &reply); + remote()->transact(REQUEST_PRIORITY_TRANSACTION, data, &reply, IBinder::FLAG_ONEWAY); // fail on exception if (reply.readExceptionCode() != 0) return -1; return reply.readInt32(); -- cgit v1.1 From f3bd1972e039c6ded5154db715e5a32f1813a239 Mon Sep 17 00:00:00 2001 From: Marco Nelissen Date: Tue, 9 Apr 2013 14:57:38 -0700 Subject: Fix MediaCodec.flush() There were two problems here. One was that the skip/cut buffer wasn't cleared when it should be, and the second was that we were always sending the first buffer of encoded data to the AAC decoder twice. b/8543366 Change-Id: Ic040edabf16cccd1f6ef8c9e5c9cfbacbdd8a089 --- media/libstagefright/ACodec.cpp | 5 +- media/libstagefright/codecs/aacdec/SoftAAC2.cpp | 103 +++++++++++------------- media/libstagefright/codecs/aacdec/SoftAAC2.h | 3 +- 3 files changed, 55 insertions(+), 56 deletions(-) diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp index ff72b71..31a9490 100644 --- a/media/libstagefright/ACodec.cpp +++ b/media/libstagefright/ACodec.cpp @@ -3626,7 +3626,6 @@ bool ACodec::ExecutingState::onMessageReceived(const sp &msg) { (status_t)OK); mCodec->changeState(mCodec->mFlushingState); - handled = true; break; } @@ -4141,6 +4140,10 @@ void ACodec::FlushingState::changeStateIfWeOwnAllBuffers() { mCodec->mInputEOSResult = OK; + if (mCodec->mSkipCutBuffer != NULL) { + mCodec->mSkipCutBuffer->clear(); + } + mCodec->changeState(mCodec->mExecutingState); } } diff --git a/media/libstagefright/codecs/aacdec/SoftAAC2.cpp b/media/libstagefright/codecs/aacdec/SoftAAC2.cpp index a8ab2ac..8ba2afb 100644 --- a/media/libstagefright/codecs/aacdec/SoftAAC2.cpp +++ b/media/libstagefright/codecs/aacdec/SoftAAC2.cpp @@ -118,7 +118,7 @@ status_t SoftAAC2::initDecoder() { status = OK; } } - mIsFirst = true; + mDecoderHasData = false; // for streams that contain metadata, use the mobile profile DRC settings unless overridden // by platform properties: @@ -327,6 +327,7 @@ void SoftAAC2::onQueueFilled(OMX_U32 portIndex) { notify(OMX_EventError, OMX_ErrorUndefined, decoderErr, NULL); return; } + inQueue.erase(inQueue.begin()); info->mOwnedByUs = false; notifyEmptyBufferDone(header); @@ -358,7 +359,7 @@ void SoftAAC2::onQueueFilled(OMX_U32 portIndex) { inInfo->mOwnedByUs = false; notifyEmptyBufferDone(inHeader); - if (!mIsFirst) { + if (mDecoderHasData) { // flush out the decoder's delayed data by calling DecodeFrame // one more time, with the AACDEC_FLUSH flag set INT_PCM *outBuffer = @@ -370,6 +371,7 @@ void SoftAAC2::onQueueFilled(OMX_U32 portIndex) { outBuffer, outHeader->nAllocLen, AACDEC_FLUSH); + mDecoderHasData = false; if (decoderErr != AAC_DEC_OK) { mSignalledError = true; @@ -385,9 +387,7 @@ void SoftAAC2::onQueueFilled(OMX_U32 portIndex) { * sizeof(int16_t) * mStreamInfo->numChannels; } else { - // Since we never discarded frames from the start, we won't have - // to add any padding at the end either. - + // we never submitted any data to the decoder, so there's nothing to flush out outHeader->nFilledLen = 0; } @@ -473,6 +473,7 @@ void SoftAAC2::onQueueFilled(OMX_U32 portIndex) { inBuffer, inBufferLength, bytesValid); + mDecoderHasData = true; decoderErr = aacDecoder_DecodeFrame(mAACDecoder, outBuffer, @@ -484,6 +485,35 @@ void SoftAAC2::onQueueFilled(OMX_U32 portIndex) { } } + size_t numOutBytes = + mStreamInfo->frameSize * sizeof(int16_t) * mStreamInfo->numChannels; + + if (decoderErr == AAC_DEC_OK) { + UINT inBufferUsedLength = inBufferLength[0] - bytesValid[0]; + inHeader->nFilledLen -= inBufferUsedLength; + inHeader->nOffset += inBufferUsedLength; + } else { + ALOGW("AAC decoder returned error %d, substituting silence", + decoderErr); + + memset(outHeader->pBuffer + outHeader->nOffset, 0, numOutBytes); + + // Discard input buffer. + inHeader->nFilledLen = 0; + + aacDecoder_SetParam(mAACDecoder, AAC_TPDEC_CLEAR_BUFFER, 1); + + // fall through + } + + if (inHeader->nFilledLen == 0) { + inInfo->mOwnedByUs = false; + inQueue.erase(inQueue.begin()); + inInfo = NULL; + notifyEmptyBufferDone(inHeader); + inHeader = NULL; + } + /* * AAC+/eAAC+ streams can be signalled in two ways: either explicitly * or implicitly, according to MPEG4 spec. AAC+/eAAC+ is a dual @@ -502,15 +532,9 @@ void SoftAAC2::onQueueFilled(OMX_U32 portIndex) { if (mStreamInfo->sampleRate != prevSampleRate || mStreamInfo->numChannels != prevNumChannels) { maybeConfigureDownmix(); - ALOGI("Reconfiguring decoder: %d Hz, %d channels", - mStreamInfo->sampleRate, - mStreamInfo->numChannels); - - // We're going to want to revisit this input buffer, but - // may have already advanced the offset. Undo that if - // necessary. - inHeader->nOffset -= adtsHeaderSize; - inHeader->nFilledLen += adtsHeaderSize; + ALOGI("Reconfiguring decoder: %d->%d Hz, %d->%d channels", + prevSampleRate, mStreamInfo->sampleRate, + prevNumChannels, mStreamInfo->numChannels); notify(OMX_EventPortSettingsChanged, 1, 0, NULL); mOutputPortSettingsChange = AWAITING_DISABLED; @@ -523,38 +547,10 @@ void SoftAAC2::onQueueFilled(OMX_U32 portIndex) { return; } - size_t numOutBytes = - mStreamInfo->frameSize * sizeof(int16_t) * mStreamInfo->numChannels; - - if (decoderErr == AAC_DEC_OK) { - UINT inBufferUsedLength = inBufferLength[0] - bytesValid[0]; - inHeader->nFilledLen -= inBufferUsedLength; - inHeader->nOffset += inBufferUsedLength; - } else { - ALOGW("AAC decoder returned error %d, substituting silence", - decoderErr); - - memset(outHeader->pBuffer + outHeader->nOffset, 0, numOutBytes); - - // Discard input buffer. - inHeader->nFilledLen = 0; - - aacDecoder_SetParam(mAACDecoder, AAC_TPDEC_CLEAR_BUFFER, 1); - - // fall through - } - if (decoderErr == AAC_DEC_OK || mNumSamplesOutput > 0) { // We'll only output data if we successfully decoded it or // we've previously decoded valid data, in the latter case // (decode failed) we'll output a silent frame. - if (mIsFirst) { - mIsFirst = false; - // the first decoded frame should be discarded to account - // for decoder delay - numOutBytes = 0; - } - outHeader->nFilledLen = numOutBytes; outHeader->nFlags = 0; @@ -571,14 +567,6 @@ void SoftAAC2::onQueueFilled(OMX_U32 portIndex) { outHeader = NULL; } - if (inHeader->nFilledLen == 0) { - inInfo->mOwnedByUs = false; - inQueue.erase(inQueue.begin()); - inInfo = NULL; - notifyEmptyBufferDone(inHeader); - inHeader = NULL; - } - if (decoderErr == AAC_DEC_OK) { ++mInputBufferCount; } @@ -589,14 +577,21 @@ void SoftAAC2::onPortFlushCompleted(OMX_U32 portIndex) { if (portIndex == 0) { // Make sure that the next buffer output does not still // depend on fragments from the last one decoded. - aacDecoder_SetParam(mAACDecoder, AAC_TPDEC_CLEAR_BUFFER, 1); - mIsFirst = true; + // drain all existing data + drainDecoder(); } } -void SoftAAC2::onReset() { +void SoftAAC2::drainDecoder() { + short buf [2048]; + aacDecoder_DecodeFrame(mAACDecoder, buf, 4096, AACDEC_FLUSH | AACDEC_CLRHIST | AACDEC_INTR); + aacDecoder_DecodeFrame(mAACDecoder, buf, 4096, AACDEC_FLUSH | AACDEC_CLRHIST | AACDEC_INTR); aacDecoder_SetParam(mAACDecoder, AAC_TPDEC_CLEAR_BUFFER, 1); - mIsFirst = true; + mDecoderHasData = false; +} + +void SoftAAC2::onReset() { + drainDecoder(); } void SoftAAC2::onPortEnableCompleted(OMX_U32 portIndex, bool enabled) { diff --git a/media/libstagefright/codecs/aacdec/SoftAAC2.h b/media/libstagefright/codecs/aacdec/SoftAAC2.h index 6957ade..2d960ab 100644 --- a/media/libstagefright/codecs/aacdec/SoftAAC2.h +++ b/media/libstagefright/codecs/aacdec/SoftAAC2.h @@ -52,7 +52,7 @@ private: HANDLE_AACDECODER mAACDecoder; CStreamInfo *mStreamInfo; bool mIsADTS; - bool mIsFirst; + bool mDecoderHasData; size_t mInputBufferCount; bool mSignalledError; int64_t mAnchorTimeUs; @@ -68,6 +68,7 @@ private: status_t initDecoder(); bool isConfigured() const; void maybeConfigureDownmix() const; + void drainDecoder(); DISALLOW_EVIL_CONSTRUCTORS(SoftAAC2); }; -- cgit v1.1 From 41f37e7c29b5cc3bbd9ee60ea73c4d857d2ad717 Mon Sep 17 00:00:00 2001 From: Yu Shan Emily Lau Date: Fri, 12 Apr 2013 14:52:52 -0700 Subject: Update the test output path. In order for the automated test to pick up the test. The output must be in /data/nativetest. Change-Id: Idc15036afb5617ac1ef67ff74ba332c5f73fe155 --- camera/tests/Android.mk | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/camera/tests/Android.mk b/camera/tests/Android.mk index e455943..ec13911 100644 --- a/camera/tests/Android.mk +++ b/camera/tests/Android.mk @@ -35,4 +35,4 @@ LOCAL_CFLAGS += -Wall -Wextra LOCAL_MODULE:= camera_client_test LOCAL_MODULE_TAGS := tests -include $(BUILD_EXECUTABLE) +include $(BUILD_NATIVE_TEST) -- cgit v1.1 From de05c8eab188e98798f2b9c3dfac53dbc18ef584 Mon Sep 17 00:00:00 2001 From: ztenghui Date: Fri, 12 Apr 2013 13:50:38 -0700 Subject: MediaMuxer prefer not to use the MPEG4Writer in real time recording mode. By default, MPEG4Write will keep running in real time recording mode. bug:8598944 Change-Id: Idf7fbd4e0feb7763660a74279ba8817b79098aaf --- cmds/stagefright/stagefright.cpp | 2 +- include/media/stagefright/MPEG4Writer.h | 8 ++++ include/media/stagefright/MetaData.h | 2 +- .../vss/stagefrightshells/src/VideoEditorUtils.cpp | 4 +- media/libstagefright/MPEG4Writer.cpp | 43 ++++++++++++++-------- media/libstagefright/MediaMuxer.cpp | 1 + 6 files changed, 40 insertions(+), 20 deletions(-) diff --git a/cmds/stagefright/stagefright.cpp b/cmds/stagefright/stagefright.cpp index 5bdbfbb..115b07c 100644 --- a/cmds/stagefright/stagefright.cpp +++ b/cmds/stagefright/stagefright.cpp @@ -523,7 +523,7 @@ static void writeSourcesToMP4( } sp params = new MetaData; - params->setInt32(kKeyNotRealTime, true); + params->setInt32(kKeyRealTimeRecording, false); CHECK_EQ(writer->start(params.get()), (status_t)OK); while (!writer->reachedEOS()) { diff --git a/include/media/stagefright/MPEG4Writer.h b/include/media/stagefright/MPEG4Writer.h index 88df6b0..3ef6b9a 100644 --- a/include/media/stagefright/MPEG4Writer.h +++ b/include/media/stagefright/MPEG4Writer.h @@ -74,6 +74,7 @@ private: int mFd; status_t mInitCheck; + bool mIsRealTimeRecording; bool mUse4ByteNalLength; bool mUse32BitOffset; bool mIsFileSizeLimitExplicitlyRequested; @@ -168,6 +169,13 @@ private: // Only makes sense for H.264/AVC bool useNalLengthFour(); + // Return whether the writer is used for real time recording. + // In real time recording mode, new samples will be allowed to buffered into + // chunks in higher priority thread, even though the file writer has not + // drained the chunks yet. + // By default, real time recording is on. + bool isRealTimeRecording() const; + void lock(); void unlock(); diff --git a/include/media/stagefright/MetaData.h b/include/media/stagefright/MetaData.h index be08c19..df54923 100644 --- a/include/media/stagefright/MetaData.h +++ b/include/media/stagefright/MetaData.h @@ -112,7 +112,7 @@ enum { // kKeyTrackTimeStatus is used to track progress in elapsed time kKeyTrackTimeStatus = 'tktm', // int64_t - kKeyNotRealTime = 'ntrt', // bool (int32_t) + kKeyRealTimeRecording = 'rtrc', // bool (int32_t) kKeyNumBuffers = 'nbbf', // int32_t // Ogg files can be tagged to be automatically looping... diff --git a/libvideoeditor/vss/stagefrightshells/src/VideoEditorUtils.cpp b/libvideoeditor/vss/stagefrightshells/src/VideoEditorUtils.cpp index 5309bd4..5a7237d 100755 --- a/libvideoeditor/vss/stagefrightshells/src/VideoEditorUtils.cpp +++ b/libvideoeditor/vss/stagefrightshells/src/VideoEditorUtils.cpp @@ -189,8 +189,8 @@ void displayMetaData(const sp meta) { if (meta->findInt64(kKeyTrackTimeStatus, &int64Data)) { LOG1("displayMetaData kKeyTrackTimeStatus %lld", int64Data); } - if (meta->findInt32(kKeyNotRealTime, &int32Data)) { - LOG1("displayMetaData kKeyNotRealTime %d", int32Data); + if (meta->findInt32(kKeyRealTimeRecording, &int32Data)) { + LOG1("displayMetaData kKeyRealTimeRecording %d", int32Data); } } diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp index 316f669..a0f17b5 100644 --- a/media/libstagefright/MPEG4Writer.cpp +++ b/media/libstagefright/MPEG4Writer.cpp @@ -212,7 +212,6 @@ private: int64_t mTrackDurationUs; int64_t mMaxChunkDurationUs; - bool mIsRealTimeRecording; int64_t mEstimatedTrackSizeBytes; int64_t mMdatSizeBytes; int32_t mTimeScale; @@ -335,6 +334,7 @@ private: MPEG4Writer::MPEG4Writer(const char *filename) : mFd(-1), mInitCheck(NO_INIT), + mIsRealTimeRecording(true), mUse4ByteNalLength(true), mUse32BitOffset(true), mIsFileSizeLimitExplicitlyRequested(false), @@ -359,6 +359,7 @@ MPEG4Writer::MPEG4Writer(const char *filename) MPEG4Writer::MPEG4Writer(int fd) : mFd(dup(fd)), mInitCheck(mFd < 0? NO_INIT: OK), + mIsRealTimeRecording(true), mUse4ByteNalLength(true), mUse32BitOffset(true), mIsFileSizeLimitExplicitlyRequested(false), @@ -596,6 +597,11 @@ status_t MPEG4Writer::start(MetaData *param) { mUse4ByteNalLength = false; } + int32_t isRealTimeRecording; + if (param && param->findInt32(kKeyRealTimeRecording, &isRealTimeRecording)) { + mIsRealTimeRecording = isRealTimeRecording; + } + mStartTimestampUs = -1; if (mStarted) { @@ -1640,12 +1646,18 @@ void MPEG4Writer::threadFunc() { mChunkReadyCondition.wait(mLock); } - // Actual write without holding the lock in order to - // reduce the blocking time for media track threads. + // In real time recording mode, write without holding the lock in order + // to reduce the blocking time for media track threads. + // Otherwise, hold the lock until the existing chunks get written to the + // file. if (chunkFound) { - mLock.unlock(); + if (mIsRealTimeRecording) { + mLock.unlock(); + } writeChunkToFile(&chunk); - mLock.lock(); + if (mIsRealTimeRecording) { + mLock.lock(); + } } } @@ -1695,18 +1707,10 @@ status_t MPEG4Writer::Track::start(MetaData *params) { mRotation = rotationDegrees; } - mIsRealTimeRecording = true; - { - int32_t isNotRealTime; - if (params && params->findInt32(kKeyNotRealTime, &isNotRealTime)) { - mIsRealTimeRecording = (isNotRealTime == 0); - } - } - initTrackingProgressStatus(params); sp meta = new MetaData; - if (mIsRealTimeRecording && mOwner->numTracks() > 1) { + if (mOwner->isRealTimeRecording() && mOwner->numTracks() > 1) { /* * This extra delay of accepting incoming audio/video signals * helps to align a/v start time at the beginning of a recording @@ -2084,7 +2088,10 @@ status_t MPEG4Writer::Track::threadEntry() { } else { prctl(PR_SET_NAME, (unsigned long)"VideoTrackEncoding", 0, 0, 0); } - androidSetThreadPriority(0, ANDROID_PRIORITY_AUDIO); + + if (mOwner->isRealTimeRecording()) { + androidSetThreadPriority(0, ANDROID_PRIORITY_AUDIO); + } sp meta_data; @@ -2245,7 +2252,7 @@ status_t MPEG4Writer::Track::threadEntry() { } - if (mIsRealTimeRecording) { + if (mOwner->isRealTimeRecording()) { if (mIsAudio) { updateDriftTime(meta_data); } @@ -2531,6 +2538,10 @@ int64_t MPEG4Writer::getDriftTimeUs() { return mDriftTimeUs; } +bool MPEG4Writer::isRealTimeRecording() const { + return mIsRealTimeRecording; +} + bool MPEG4Writer::useNalLengthFour() { return mUse4ByteNalLength; } diff --git a/media/libstagefright/MediaMuxer.cpp b/media/libstagefright/MediaMuxer.cpp index b948fe2..388c65b 100644 --- a/media/libstagefright/MediaMuxer.cpp +++ b/media/libstagefright/MediaMuxer.cpp @@ -107,6 +107,7 @@ status_t MediaMuxer::start() { Mutex::Autolock autoLock(mMuxerLock); if (mState == INITIALIZED) { mState = STARTED; + mFileMeta->setInt32(kKeyRealTimeRecording, false); return mWriter->start(mFileMeta.get()); } else { ALOGE("start() is called in invalid state %d", mState); -- cgit v1.1 From 7c5abbb0e1b20df4b265a08a8560899f637f9b44 Mon Sep 17 00:00:00 2001 From: Marco Nelissen Date: Mon, 15 Apr 2013 12:06:18 -0700 Subject: Fix AAC decoder reconfiguration The recent flush() changed made the codec behave differently after a reconfigure. Now we reset its state properly again. b/8543366 Change-Id: I8807b5ab02249b43fc1cf315d4e8d4ceb3f9b298 --- media/libstagefright/codecs/aacdec/SoftAAC2.cpp | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/media/libstagefright/codecs/aacdec/SoftAAC2.cpp b/media/libstagefright/codecs/aacdec/SoftAAC2.cpp index 8ba2afb..536cfde 100644 --- a/media/libstagefright/codecs/aacdec/SoftAAC2.cpp +++ b/media/libstagefright/codecs/aacdec/SoftAAC2.cpp @@ -592,6 +592,12 @@ void SoftAAC2::drainDecoder() { void SoftAAC2::onReset() { drainDecoder(); + // reset the "configured" state + mInputBufferCount = 0; + mNumSamplesOutput = 0; + // To make the codec behave the same before and after a reset, we need to invalidate the + // streaminfo struct. This does that: + mStreamInfo->sampleRate = 0; } void SoftAAC2::onPortEnableCompleted(OMX_U32 portIndex, bool enabled) { -- cgit v1.1 From 55137044fef6cb9dc4872fbbe963add28c4383a0 Mon Sep 17 00:00:00 2001 From: Eino-Ville Talvala Date: Tue, 9 Apr 2013 14:13:50 -0700 Subject: Camera3: Allow multiple process_capture_result calls per request To simplify HAL implementation, allow the results for a single capture request to be sent from the HAL across multiple calls to process_capture_request. This requires that the HAL must call notify() with the SHUTTER message to indicate start of exposure time, before the first call to process_capture_result for a given frame. Bug: 8565103 Change-Id: I6a61449725f98af88769fafa07736848a226dad2 --- services/camera/libcameraservice/Camera3Device.cpp | 235 +++++++++++++++++---- services/camera/libcameraservice/Camera3Device.h | 44 +++- 2 files changed, 236 insertions(+), 43 deletions(-) diff --git a/services/camera/libcameraservice/Camera3Device.cpp b/services/camera/libcameraservice/Camera3Device.cpp index 08aef83..d67b535 100644 --- a/services/camera/libcameraservice/Camera3Device.cpp +++ b/services/camera/libcameraservice/Camera3Device.cpp @@ -51,6 +51,8 @@ Camera3Device::Camera3Device(int id): mId(id), mHal3Device(NULL), mStatus(STATUS_UNINITIALIZED), + mNextResultFrameNumber(0), + mNextShutterFrameNumber(0), mListener(NULL) { ATRACE_CALL(); @@ -246,8 +248,22 @@ status_t Camera3Device::dump(int fd, const Vector &args) { mOutputStreams[i]->dump(fd,args); } + lines = String8(" In-flight requests:\n"); + if (mInFlightMap.size() == 0) { + lines.append(" None\n"); + } else { + for (size_t i = 0; i < mInFlightMap.size(); i++) { + InFlightRequest r = mInFlightMap.valueAt(i); + lines.appendFormat(" Frame %d | Timestamp: %lld, metadata" + " arrived: %s, buffers left: %d\n", mInFlightMap.keyAt(i), + r.captureTimestamp, r.haveResultMetadata ? "true" : "false", + r.numBuffersLeft); + } + } + write(fd, lines.string(), lines.size()); + if (mHal3Device != NULL) { - lines = String8(" HAL device dump:\n"); + lines = String8(" HAL device dump:\n"); write(fd, lines.string(), lines.size()); mHal3Device->ops->dump(mHal3Device, fd); } @@ -927,15 +943,36 @@ void Camera3Device::setErrorStateLocked(const char *fmt, ...) { } void Camera3Device::setErrorStateLockedV(const char *fmt, va_list args) { - // Only accept the first failure cause + // Print out all error messages to log + String8 errorCause = String8::formatV(fmt, args); + ALOGE("Camera %d: %s", mId, errorCause.string()); + + // But only do error state transition steps for the first error if (mStatus == STATUS_ERROR) return; - mErrorCause = String8::formatV(fmt, args); - ALOGE("Camera %d: %s", mId, mErrorCause.string()); + mErrorCause = errorCause; + + mRequestThread->setPaused(true); mStatus = STATUS_ERROR; } /** + * In-flight request management + */ + +status_t Camera3Device::registerInFlight(int32_t frameNumber, + int32_t numBuffers) { + ATRACE_CALL(); + Mutex::Autolock l(mInFlightLock); + + ssize_t res; + res = mInFlightMap.add(frameNumber, InFlightRequest(numBuffers)); + if (res < 0) return res; + + return OK; +} + +/** * Camera HAL device callback methods */ @@ -944,47 +981,107 @@ void Camera3Device::processCaptureResult(const camera3_capture_result *result) { status_t res; - if (result->result == NULL) { - SET_ERR("No metadata provided by HAL for frame %d", - result->frame_number); + uint32_t frameNumber = result->frame_number; + if (result->result == NULL && result->num_output_buffers == 0) { + SET_ERR("No result data provided by HAL for frame %d", + frameNumber); return; } + // Get capture timestamp from list of in-flight requests, where it was added + // by the shutter notification for this frame. Then update the in-flight + // status and remove the in-flight entry if all result data has been + // received. nsecs_t timestamp = 0; + { + Mutex::Autolock l(mInFlightLock); + ssize_t idx = mInFlightMap.indexOfKey(frameNumber); + if (idx == NAME_NOT_FOUND) { + SET_ERR("Unknown frame number for capture result: %d", + frameNumber); + return; + } + InFlightRequest &request = mInFlightMap.editValueAt(idx); + timestamp = request.captureTimestamp; + if (timestamp == 0) { + SET_ERR("Called before shutter notify for frame %d", + frameNumber); + return; + } + + if (result->result != NULL) { + if (request.haveResultMetadata) { + SET_ERR("Called multiple times with metadata for frame %d", + frameNumber); + return; + } + request.haveResultMetadata = true; + } + + request.numBuffersLeft -= result->num_output_buffers; + + if (request.numBuffersLeft < 0) { + SET_ERR("Too many buffers returned for frame %d", + frameNumber); + return; + } + + if (request.haveResultMetadata && request.numBuffersLeft == 0) { + mInFlightMap.removeItemsAt(idx, 1); + } + + // Sanity check - if we have too many in-flight frames, something has + // likely gone wrong + if (mInFlightMap.size() > kInFlightWarnLimit) { + CLOGE("In-flight list too large: %d", mInFlightMap.size()); + } + + } + AlgState cur3aState; AlgState new3aState; int32_t aeTriggerId = 0; int32_t afTriggerId = 0; - NotificationListener *listener; + NotificationListener *listener = NULL; - { + // Process the result metadata, if provided + if (result->result != NULL) { Mutex::Autolock l(mOutputLock); - // Push result metadata into queue - mResultQueue.push_back(CameraMetadata()); - // Lets avoid copies! Too bad there's not a #back method - CameraMetadata &captureResult = *(--mResultQueue.end()); + if (frameNumber != mNextResultFrameNumber) { + SET_ERR("Out-of-order capture result metadata submitted! " + "(got frame number %d, expecting %d)", + frameNumber, mNextResultFrameNumber); + return; + } + mNextResultFrameNumber++; + + CameraMetadata &captureResult = + *mResultQueue.insert(mResultQueue.end(), CameraMetadata()); captureResult = result->result; if (captureResult.update(ANDROID_REQUEST_FRAME_COUNT, - (int32_t*)&result->frame_number, 1) != OK) { + (int32_t*)&frameNumber, 1) != OK) { SET_ERR("Failed to set frame# in metadata (%d)", - result->frame_number); + frameNumber); } else { ALOGVV("%s: Camera %d: Set frame# in metadata (%d)", - __FUNCTION__, mId, result->frame_number); + __FUNCTION__, mId, frameNumber); } - // Get timestamp from result metadata + // Check that there's a timestamp in the result metadata camera_metadata_entry entry = captureResult.find(ANDROID_SENSOR_TIMESTAMP); if (entry.count == 0) { SET_ERR("No timestamp provided by HAL for frame %d!", - result->frame_number); - } else { - timestamp = entry.data.i64[0]; + frameNumber); + } + if (timestamp != entry.data.i64[0]) { + SET_ERR("Timestamp mismatch between shutter notify and result" + " metadata for frame %d (%lld vs %lld respectively)", + frameNumber, timestamp, entry.data.i64[0]); } // Get 3A states from result metadata @@ -992,7 +1089,7 @@ void Camera3Device::processCaptureResult(const camera3_capture_result *result) { entry = captureResult.find(ANDROID_CONTROL_AE_STATE); if (entry.count == 0) { CLOGE("No AE state provided by HAL for frame %d!", - result->frame_number); + frameNumber); } else { new3aState.aeState = static_cast( @@ -1002,7 +1099,7 @@ void Camera3Device::processCaptureResult(const camera3_capture_result *result) { entry = captureResult.find(ANDROID_CONTROL_AF_STATE); if (entry.count == 0) { CLOGE("No AF state provided by HAL for frame %d!", - result->frame_number); + frameNumber); } else { new3aState.afState = static_cast( @@ -1012,7 +1109,7 @@ void Camera3Device::processCaptureResult(const camera3_capture_result *result) { entry = captureResult.find(ANDROID_CONTROL_AWB_STATE); if (entry.count == 0) { CLOGE("No AWB state provided by HAL for frame %d!", - result->frame_number); + frameNumber); } else { new3aState.awbState = static_cast( @@ -1022,7 +1119,7 @@ void Camera3Device::processCaptureResult(const camera3_capture_result *result) { entry = captureResult.find(ANDROID_CONTROL_AF_TRIGGER_ID); if (entry.count == 0) { CLOGE("No AF trigger ID provided by HAL for frame %d!", - result->frame_number); + frameNumber); } else { afTriggerId = entry.data.i32[0]; } @@ -1030,7 +1127,7 @@ void Camera3Device::processCaptureResult(const camera3_capture_result *result) { entry = captureResult.find(ANDROID_CONTROL_AE_PRECAPTURE_ID); if (entry.count == 0) { CLOGE("No AE precapture trigger ID provided by HAL" - " for frame %d!", result->frame_number); + " for frame %d!", frameNumber); } else { aeTriggerId = entry.data.i32[0]; } @@ -1041,7 +1138,8 @@ void Camera3Device::processCaptureResult(const camera3_capture_result *result) { m3AState = new3aState; } // scope for mOutputLock - // Return completed buffers to their streams + // Return completed buffers to their streams with the timestamp + for (size_t i = 0; i < result->num_output_buffers; i++) { Camera3Stream *stream = Camera3Stream::cast(result->output_buffers[i].stream); @@ -1050,20 +1148,21 @@ void Camera3Device::processCaptureResult(const camera3_capture_result *result) { // last reference to it. if (res != OK) { SET_ERR("Can't return buffer %d for frame %d to its stream: " - " %s (%d)", i, result->frame_number, strerror(-res), res); + " %s (%d)", i, frameNumber, strerror(-res), res); } } - // Dispatch any 3A change events to listeners - if (listener != NULL) { + // Finally, dispatch any 3A change events to listeners if we got metadata + + if (result->result != NULL && listener != NULL) { if (new3aState.aeState != cur3aState.aeState) { ALOGVV("%s: AE state changed from 0x%x to 0x%x", - __FUNCTION__, cur3aState.aeState, new3aState.aeState); + __FUNCTION__, cur3aState.aeState, new3aState.aeState); listener->notifyAutoExposure(new3aState.aeState, aeTriggerId); } if (new3aState.afState != cur3aState.afState) { ALOGVV("%s: AF state changed from 0x%x to 0x%x", - __FUNCTION__, cur3aState.afState, new3aState.afState); + __FUNCTION__, cur3aState.afState, new3aState.afState); listener->notifyAutoFocus(new3aState.afState, afTriggerId); } if (new3aState.awbState != cur3aState.awbState) { @@ -1077,12 +1176,11 @@ void Camera3Device::notify(const camera3_notify_msg *msg) { NotificationListener *listener; { Mutex::Autolock l(mOutputLock); - if (mListener == NULL) return; listener = mListener; } if (msg == NULL) { - SET_ERR_L("HAL sent NULL notify message!"); + SET_ERR("HAL sent NULL notify message!"); return; } @@ -1095,17 +1193,50 @@ void Camera3Device::notify(const camera3_notify_msg *msg) { msg->message.error.error_stream); streamId = stream->getId(); } - listener->notifyError(msg->message.error.error_code, - msg->message.error.frame_number, streamId); + if (listener != NULL) { + listener->notifyError(msg->message.error.error_code, + msg->message.error.frame_number, streamId); + } break; } case CAMERA3_MSG_SHUTTER: { - listener->notifyShutter(msg->message.shutter.frame_number, - msg->message.shutter.timestamp); + ssize_t idx; + uint32_t frameNumber = msg->message.shutter.frame_number; + nsecs_t timestamp = msg->message.shutter.timestamp; + // Verify ordering of shutter notifications + { + Mutex::Autolock l(mOutputLock); + if (frameNumber != mNextShutterFrameNumber) { + SET_ERR("Shutter notification out-of-order. Expected " + "notification for frame %d, got frame %d", + mNextShutterFrameNumber, frameNumber); + break; + } + mNextShutterFrameNumber++; + } + + // Set timestamp for the request in the in-flight tracking + { + Mutex::Autolock l(mInFlightLock); + idx = mInFlightMap.indexOfKey(frameNumber); + if (idx >= 0) { + mInFlightMap.editValueAt(idx).captureTimestamp = timestamp; + } + } + if (idx < 0) { + SET_ERR("Shutter notification for non-existent frame number %d", + frameNumber); + break; + } + + // Call listener, if any + if (listener != NULL) { + listener->notifyShutter(frameNumber, timestamp); + } break; } default: - SET_ERR_L("Unknown notify message from HAL: %d", + SET_ERR("Unknown notify message from HAL: %d", msg->type); } } @@ -1119,6 +1250,7 @@ Camera3Device::RequestThread::RequestThread(wp parent, Thread(false), mParent(parent), mHal3Device(hal3Device), + mId(getId(parent)), mReconfigured(false), mDoPause(false), mPaused(true), @@ -1158,6 +1290,12 @@ status_t Camera3Device::RequestThread::queueTrigger( return OK; } +int Camera3Device::RequestThread::getId(const wp &device) { + sp d = device.promote(); + if (d != NULL) return d->mId; + return 0; +} + status_t Camera3Device::RequestThread::queueTriggerLocked( RequestTrigger trigger) { @@ -1170,9 +1308,8 @@ status_t Camera3Device::RequestThread::queueTriggerLocked( case TYPE_INT32: break; default: - ALOGE("%s: Type not supported: 0x%x", - __FUNCTION__, - trigger.getTagType()); + ALOGE("%s: Type not supported: 0x%x", __FUNCTION__, + trigger.getTagType()); return INVALID_OPERATION; } @@ -1340,6 +1477,22 @@ bool Camera3Device::RequestThread::threadLoop() { request.frame_number = mFrameNumber++; + // Log request in the in-flight queue + sp parent = mParent.promote(); + if (parent == NULL) { + CLOGE("RequestThread: Parent is gone"); + cleanUpFailedRequest(request, nextRequest, outputBuffers); + return false; + } + + res = parent->registerInFlight(request.frame_number, + request.num_output_buffers); + if (res != OK) { + SET_ERR("RequestThread: Unable to register new in-flight request:" + " %s (%d)", strerror(-res), res); + cleanUpFailedRequest(request, nextRequest, outputBuffers); + return false; + } // Submit request and block until ready for next one diff --git a/services/camera/libcameraservice/Camera3Device.h b/services/camera/libcameraservice/Camera3Device.h index 2e4a303..6cad08e 100644 --- a/services/camera/libcameraservice/Camera3Device.h +++ b/services/camera/libcameraservice/Camera3Device.h @@ -108,7 +108,8 @@ class Camera3Device : buffer_handle_t *buffer, wp listener); private: - static const nsecs_t kShutdownTimeout = 5000000000; // 5 sec + static const size_t kInFlightWarnLimit = 20; + static const nsecs_t kShutdownTimeout = 5000000000; // 5 sec struct RequestTrigger; Mutex mLock; @@ -262,6 +263,8 @@ class Camera3Device : virtual bool threadLoop(); private: + static int getId(const wp &device); + status_t queueTriggerLocked(RequestTrigger trigger); // Mix-in queued triggers into this request int32_t insertTriggers(const sp &request); @@ -291,6 +294,8 @@ class Camera3Device : wp mParent; camera3_device_t *mHal3Device; + const int mId; + Mutex mRequestLock; Condition mRequestSignal; RequestList mRequestQueue; @@ -308,7 +313,7 @@ class Camera3Device : sp mPrevRequest; int32_t mPrevTriggers; - int32_t mFrameNumber; + uint32_t mFrameNumber; Mutex mLatestRequestMutex; Condition mLatestRequestSignal; @@ -324,6 +329,39 @@ class Camera3Device : sp mRequestThread; /** + * In-flight queue for tracking completion of capture requests. + */ + + struct InFlightRequest { + // Set by notify() SHUTTER call. + nsecs_t captureTimestamp; + // Set by process_capture_result call with valid metadata + bool haveResultMetadata; + // Decremented by calls to process_capture_result with valid output + // buffers + int numBuffersLeft; + + InFlightRequest() : + captureTimestamp(0), + haveResultMetadata(false), + numBuffersLeft(0) { + } + + explicit InFlightRequest(int numBuffers) : + captureTimestamp(0), + haveResultMetadata(false), + numBuffersLeft(numBuffers) { + } + }; + // Map from frame number to the in-flight request state + typedef KeyedVector InFlightMap; + + Mutex mInFlightLock; // Protects mInFlightMap + InFlightMap mInFlightMap; + + status_t registerInFlight(int32_t frameNumber, int32_t numBuffers); + + /** * Output result queue and current HAL device 3A state */ @@ -332,6 +370,8 @@ class Camera3Device : /**** Scope for mOutputLock ****/ + uint32_t mNextResultFrameNumber; + uint32_t mNextShutterFrameNumber; List mResultQueue; Condition mResultSignal; NotificationListener *mListener; -- cgit v1.1 From a68e7b98361692d4120bf99fa5dc18cd93673130 Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Mon, 15 Apr 2013 16:18:56 -0700 Subject: Guard against mLooper == NULL. Change-Id: I01aa0e47b55d0dffe34525edf9f055a5cb4dc70f related-to-bug: 8620223 --- media/libmediaplayerservice/nuplayer/RTSPSource.cpp | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp index a5ff0ca..50ebf9c 100644 --- a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp +++ b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp @@ -66,7 +66,9 @@ NuPlayer::RTSPSource::RTSPSource( } NuPlayer::RTSPSource::~RTSPSource() { - mLooper->stop(); + if (mLooper != NULL) { + mLooper->stop(); + } } void NuPlayer::RTSPSource::prepareAsync() { -- cgit v1.1 From 94b66227ff5a57dd810aafa7b3aa810b9a185b8d Mon Sep 17 00:00:00 2001 From: Marco Nelissen Date: Mon, 15 Apr 2013 16:28:49 -0700 Subject: Fix Vorbis decoder reset b/8543366 Change-Id: I9f32e96fdfc355cf444259a7c40554e2de184728 --- media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp b/media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp index 922ac61..4115324 100644 --- a/media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp +++ b/media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp @@ -411,8 +411,19 @@ void SoftVorbis::onPortFlushCompleted(OMX_U32 portIndex) { } void SoftVorbis::onReset() { + mInputBufferCount = 0; mNumFramesOutput = 0; - vorbis_dsp_restart(mState); + if (mState != NULL) { + vorbis_dsp_clear(mState); + delete mState; + mState = NULL; + } + + if (mVi != NULL) { + vorbis_info_clear(mVi); + delete mVi; + mVi = NULL; + } } void SoftVorbis::onPortEnableCompleted(OMX_U32 portIndex, bool enabled) { -- cgit v1.1 From 0e03e72e7bb2633f5a16c2fe7c164bc3ec8ca855 Mon Sep 17 00:00:00 2001 From: Jean-Michel Trivi Date: Mon, 15 Apr 2013 18:40:51 -0700 Subject: Better default AAC DRC setting By default, use boost and attenuation together. bug 7140036 Change-Id: Ie0565f9e5cd24f7a59b5a218b4e6186661832601 --- media/libstagefright/codecs/aacdec/SoftAAC2.cpp | 3 +++ 1 file changed, 3 insertions(+) diff --git a/media/libstagefright/codecs/aacdec/SoftAAC2.cpp b/media/libstagefright/codecs/aacdec/SoftAAC2.cpp index 8ba2afb..5eeda5c 100644 --- a/media/libstagefright/codecs/aacdec/SoftAAC2.cpp +++ b/media/libstagefright/codecs/aacdec/SoftAAC2.cpp @@ -29,6 +29,7 @@ #define DRC_DEFAULT_MOBILE_REF_LEVEL 64 /* 64*-0.25dB = -16 dB below full scale for mobile conf */ #define DRC_DEFAULT_MOBILE_DRC_CUT 127 /* maximum compression of dynamic range for mobile conf */ +#define DRC_DEFAULT_MOBILE_DRC_BOOST 127 /* maximum compression of dynamic range for mobile conf */ #define MAX_CHANNEL_COUNT 6 /* maximum number of audio channels that can be decoded */ // names of properties that can be used to override the default DRC settings #define PROP_DRC_OVERRIDE_REF_LEVEL "aac_drc_reference_level" @@ -146,6 +147,8 @@ status_t SoftAAC2::initDecoder() { unsigned boost = atoi(value); ALOGV("AAC decoder using AAC_DRC_BOOST_FACTOR of %d", boost); aacDecoder_SetParam(mAACDecoder, AAC_DRC_BOOST_FACTOR, boost); + } else { + aacDecoder_SetParam(mAACDecoder, AAC_DRC_BOOST_FACTOR, DRC_DEFAULT_MOBILE_DRC_BOOST); } return status; -- cgit v1.1 From 1e0757e0a63d876acc65991ac73284227c522bdc Mon Sep 17 00:00:00 2001 From: Sungsoo Lim Date: Mon, 31 Dec 2012 17:48:20 +0900 Subject: Parse the last none-empty line of .m3u8 file If the last sentence of .m3u8 file is not processed, because it is not empty and has no line feed, M3UParser.isComplete() returns false even though it is completed. Change-Id: I01b9f900d44247a3ef40369a2f9198bb7eaf01b7 related-to-bug: 8405824 --- media/libstagefright/httplive/M3UParser.cpp | 3 --- 1 file changed, 3 deletions(-) diff --git a/media/libstagefright/httplive/M3UParser.cpp b/media/libstagefright/httplive/M3UParser.cpp index 44e03dc..68bbca2 100644 --- a/media/libstagefright/httplive/M3UParser.cpp +++ b/media/libstagefright/httplive/M3UParser.cpp @@ -163,9 +163,6 @@ status_t M3UParser::parse(const void *_data, size_t size) { while (offsetLF < size && data[offsetLF] != '\n') { ++offsetLF; } - if (offsetLF >= size) { - break; - } AString line; if (offsetLF > offset && data[offsetLF - 1] == '\r') { -- cgit v1.1 From da9740e63a835e610519bd235be9137d74b6d409 Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Tue, 16 Apr 2013 10:54:03 -0700 Subject: Fix a typo that would cause us not to shutdown/flush the decoders in some cases. related-to-bug: 8630032 Change-Id: I8e94b53b34e137e827e9630c65f3252ea91e4ebd --- media/libmediaplayerservice/nuplayer/NuPlayer.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp index 46d0a5a..607ec6a 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp +++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp @@ -1158,7 +1158,7 @@ void NuPlayer::performSeek(int64_t seekTimeUs) { void NuPlayer::performDecoderFlush() { ALOGV("performDecoderFlush"); - if (mAudioDecoder != NULL && mVideoDecoder == NULL) { + if (mAudioDecoder == NULL && mVideoDecoder == NULL) { return; } @@ -1176,7 +1176,7 @@ void NuPlayer::performDecoderFlush() { void NuPlayer::performDecoderShutdown() { ALOGV("performDecoderShutdown"); - if (mAudioDecoder != NULL && mVideoDecoder == NULL) { + if (mAudioDecoder == NULL && mVideoDecoder == NULL) { return; } -- cgit v1.1 From e42f027d19b20cf581be11a89e26b2c96c50c335 Mon Sep 17 00:00:00 2001 From: Marco Nelissen Date: Tue, 16 Apr 2013 15:57:38 -0700 Subject: Make sure the drain buffer is big enough b/8614909 Change-Id: I9d973dcd74100b793791359c262b821207ff9ddd --- media/libstagefright/codecs/aacdec/SoftAAC2.cpp | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/media/libstagefright/codecs/aacdec/SoftAAC2.cpp b/media/libstagefright/codecs/aacdec/SoftAAC2.cpp index cf81c16..cf50dc9 100644 --- a/media/libstagefright/codecs/aacdec/SoftAAC2.cpp +++ b/media/libstagefright/codecs/aacdec/SoftAAC2.cpp @@ -586,9 +586,12 @@ void SoftAAC2::onPortFlushCompleted(OMX_U32 portIndex) { } void SoftAAC2::drainDecoder() { - short buf [2048]; - aacDecoder_DecodeFrame(mAACDecoder, buf, 4096, AACDEC_FLUSH | AACDEC_CLRHIST | AACDEC_INTR); - aacDecoder_DecodeFrame(mAACDecoder, buf, 4096, AACDEC_FLUSH | AACDEC_CLRHIST | AACDEC_INTR); + // a buffer big enough for 6 channels of decoded HE-AAC + short buf [2048*6]; + aacDecoder_DecodeFrame(mAACDecoder, + buf, sizeof(buf), AACDEC_FLUSH | AACDEC_CLRHIST | AACDEC_INTR); + aacDecoder_DecodeFrame(mAACDecoder, + buf, sizeof(buf), AACDEC_FLUSH | AACDEC_CLRHIST | AACDEC_INTR); aacDecoder_SetParam(mAACDecoder, AAC_TPDEC_CLEAR_BUFFER, 1); mDecoderHasData = false; } -- cgit v1.1 From 423e33ce6569cb14ecf772e9670208517f7b30c4 Mon Sep 17 00:00:00 2001 From: Jeff Tinker Date: Mon, 8 Apr 2013 15:23:17 -0700 Subject: Added CTS test for secure stop APIs bug: 8604418 Change-Id: I173fa1ec904ba11dc4cff0343462b3f4bac0d365 --- drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.cpp | 35 +++++++++++++++++------ media/libmedia/IDrm.cpp | 1 + 2 files changed, 28 insertions(+), 8 deletions(-) diff --git a/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.cpp b/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.cpp index 00f6de3..06fc29d 100644 --- a/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.cpp +++ b/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.cpp @@ -291,16 +291,30 @@ namespace android { { Mutex::Autolock lock(mLock); ALOGD("MockDrmPlugin::getSecureStops()"); - const uint8_t ss1[] = {0x81, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87, 0x88, 0x89}; - const uint8_t ss2[] = {0x91, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97, 0x98, 0x99}; - Vector vec; - vec.appendArray(ss1, sizeof(ss1)); - secureStops.push_back(vec); + // Properties used in mock test, set by cts test app returned from mock plugin + // byte[] mock-secure-stop1 -> first secure stop in list + // byte[] mock-secure-stop2 -> second secure stop in list + + Vector ss1, ss2; + ssize_t index = mByteArrayProperties.indexOfKey(String8("mock-secure-stop1")); + if (index < 0) { + ALOGD("Missing 'mock-secure-stop1' parameter for mock"); + return BAD_VALUE; + } else { + ss1 = mByteArrayProperties.valueAt(index); + } + + index = mByteArrayProperties.indexOfKey(String8("mock-secure-stop2")); + if (index < 0) { + ALOGD("Missing 'mock-secure-stop2' parameter for mock"); + return BAD_VALUE; + } else { + ss2 = mByteArrayProperties.valueAt(index); + } - vec.clear(); - vec.appendArray(ss2, sizeof(ss2)); - secureStops.push_back(vec); + secureStops.push_back(ss1); + secureStops.push_back(ss2); return OK; } @@ -309,6 +323,11 @@ namespace android { Mutex::Autolock lock(mLock); ALOGD("MockDrmPlugin::releaseSecureStops(%s)", vectorToString(ssRelease).string()); + + // Properties used in mock test, set by mock plugin and verifed cts test app + // byte[] secure-stop-release -> mock-ssrelease + mByteArrayProperties.add(String8("mock-ssrelease"), ssRelease); + return OK; } diff --git a/media/libmedia/IDrm.cpp b/media/libmedia/IDrm.cpp index 1578846..902aeb2 100644 --- a/media/libmedia/IDrm.cpp +++ b/media/libmedia/IDrm.cpp @@ -590,6 +590,7 @@ status_t BnDrm::onTransact( size_t size = iter->size(); reply->writeInt32(size); reply->write(iter->array(), iter->size()); + iter++; } reply->writeInt32(result); return OK; -- cgit v1.1 From 42392e49e167c6a0c573e55e1c1b4c7fa0ceb213 Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Tue, 16 Apr 2013 16:35:19 -0700 Subject: Apparently the mp3 decoder glitches if it was configured with the wrong sample rate. Make sure we tell it about the one the extractor provides. Change-Id: Ice5b9cf55485ca1a8f099a14be0202be9540e781 related-to-bug: 8621639 --- media/libstagefright/ACodec.cpp | 13 +++++++++++++ media/libstagefright/OMXCodec.cpp | 11 +++++++++++ media/libstagefright/codecs/mp3dec/SoftMP3.cpp | 15 +++++++++++++++ 3 files changed, 39 insertions(+) diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp index ee49033..b3bc6d8 100644 --- a/media/libstagefright/ACodec.cpp +++ b/media/libstagefright/ACodec.cpp @@ -960,6 +960,19 @@ status_t ACodec::configureCodec( err = setupVideoDecoder(mime, width, height); } } + } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_MPEG)) { + int32_t numChannels, sampleRate; + if (!msg->findInt32("channel-count", &numChannels) + || !msg->findInt32("sample-rate", &sampleRate)) { + // Since we did not always check for these, leave them optional + // and have the decoder figure it all out. + err = OK; + } else { + err = setupRawAudioFormat( + encoder ? kPortIndexInput : kPortIndexOutput, + sampleRate, + numChannels); + } } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) { int32_t numChannels, sampleRate; if (!msg->findInt32("channel-count", &numChannels) diff --git a/media/libstagefright/OMXCodec.cpp b/media/libstagefright/OMXCodec.cpp index 6c0779d..9d349a1 100644 --- a/media/libstagefright/OMXCodec.cpp +++ b/media/libstagefright/OMXCodec.cpp @@ -522,6 +522,17 @@ status_t OMXCodec::configureCodec(const sp &meta) { CODEC_LOGE("setAACFormat() failed (err = %d)", err); return err; } + } else if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_MPEG, mMIME)) { + int32_t numChannels, sampleRate; + if (meta->findInt32(kKeyChannelCount, &numChannels) + && meta->findInt32(kKeySampleRate, &sampleRate)) { + // Since we did not always check for these, leave them optional + // and have the decoder figure it all out. + setRawAudioFormat( + mIsEncoder ? kPortIndexInput : kPortIndexOutput, + sampleRate, + numChannels); + } } else if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_G711_ALAW, mMIME) || !strcasecmp(MEDIA_MIMETYPE_AUDIO_G711_MLAW, mMIME)) { // These are PCM-like formats with a fixed sample rate but diff --git a/media/libstagefright/codecs/mp3dec/SoftMP3.cpp b/media/libstagefright/codecs/mp3dec/SoftMP3.cpp index 849be87..9f25536 100644 --- a/media/libstagefright/codecs/mp3dec/SoftMP3.cpp +++ b/media/libstagefright/codecs/mp3dec/SoftMP3.cpp @@ -166,6 +166,21 @@ OMX_ERRORTYPE SoftMP3::internalSetParameter( return OMX_ErrorNone; } + case OMX_IndexParamAudioPcm: + { + const OMX_AUDIO_PARAM_PCMMODETYPE *pcmParams = + (const OMX_AUDIO_PARAM_PCMMODETYPE *)params; + + if (pcmParams->nPortIndex != 1) { + return OMX_ErrorUndefined; + } + + mNumChannels = pcmParams->nChannels; + mSamplingRate = pcmParams->nSamplingRate; + + return OMX_ErrorNone; + } + default: return SimpleSoftOMXComponent::internalSetParameter(index, params); } -- cgit v1.1 From 3ee3550a2f529cbf56d87d8503f59a8f45dccf32 Mon Sep 17 00:00:00 2001 From: Eino-Ville Talvala Date: Tue, 2 Apr 2013 15:45:11 -0700 Subject: Camera: Add preview callback surface support - Add call to set a preview callback surface - Implement support for HAL2/3 devices - Still need HAL1 implementation Change-Id: I0dc0bd72e43d871aa487858d1665c1efca633ffe --- camera/Camera.cpp | 8 ++ camera/ICamera.cpp | 21 +++++ include/camera/Camera.h | 8 ++ include/camera/ICamera.h | 9 +- services/camera/libcameraservice/Camera2Client.cpp | 95 +++++++++++++++++++--- services/camera/libcameraservice/Camera2Client.h | 3 + services/camera/libcameraservice/CameraClient.cpp | 6 ++ services/camera/libcameraservice/CameraClient.h | 2 + services/camera/libcameraservice/CameraService.h | 2 + .../libcameraservice/camera2/CallbackProcessor.cpp | 52 ++++++++++-- .../libcameraservice/camera2/CallbackProcessor.h | 5 ++ .../camera/libcameraservice/camera2/Parameters.cpp | 1 + .../camera/libcameraservice/camera2/Parameters.h | 1 + 13 files changed, 191 insertions(+), 22 deletions(-) diff --git a/camera/Camera.cpp b/camera/Camera.cpp index 1b136de..fd78572 100644 --- a/camera/Camera.cpp +++ b/camera/Camera.cpp @@ -255,6 +255,14 @@ void Camera::setPreviewCallbackFlags(int flag) mCamera->setPreviewCallbackFlag(flag); } +status_t Camera::setPreviewCallbackTarget( + const sp& callbackProducer) +{ + sp c = mCamera; + if (c == 0) return NO_INIT; + return c->setPreviewCallbackTarget(callbackProducer); +} + // callback from camera service void Camera::notifyCallback(int32_t msgType, int32_t ext1, int32_t ext2) { diff --git a/camera/ICamera.cpp b/camera/ICamera.cpp index 8900867..732c204 100644 --- a/camera/ICamera.cpp +++ b/camera/ICamera.cpp @@ -31,6 +31,7 @@ enum { DISCONNECT = IBinder::FIRST_CALL_TRANSACTION, SET_PREVIEW_TEXTURE, SET_PREVIEW_CALLBACK_FLAG, + SET_PREVIEW_CALLBACK_TARGET, START_PREVIEW, STOP_PREVIEW, AUTO_FOCUS, @@ -90,6 +91,18 @@ public: remote()->transact(SET_PREVIEW_CALLBACK_FLAG, data, &reply); } + status_t setPreviewCallbackTarget( + const sp& callbackProducer) + { + ALOGV("setPreviewCallbackTarget"); + Parcel data, reply; + data.writeInterfaceToken(ICamera::getInterfaceDescriptor()); + sp b(callbackProducer->asBinder()); + data.writeStrongBinder(b); + remote()->transact(SET_PREVIEW_CALLBACK_TARGET, data, &reply); + return reply.readInt32(); + } + // start preview mode, must call setPreviewDisplay first status_t startPreview() { @@ -285,6 +298,14 @@ status_t BnCamera::onTransact( setPreviewCallbackFlag(callback_flag); return NO_ERROR; } break; + case SET_PREVIEW_CALLBACK_TARGET: { + ALOGV("SET_PREVIEW_CALLBACK_TARGET"); + CHECK_INTERFACE(ICamera, data, reply); + sp cp = + interface_cast(data.readStrongBinder()); + reply->writeInt32(setPreviewCallbackTarget(cp)); + return NO_ERROR; + } case START_PREVIEW: { ALOGV("START_PREVIEW"); CHECK_INTERFACE(ICamera, data, reply); diff --git a/include/camera/Camera.h b/include/camera/Camera.h index 37626a4..c34b3ea 100644 --- a/include/camera/Camera.h +++ b/include/camera/Camera.h @@ -121,7 +121,15 @@ public: void setListener(const sp& listener); void setRecordingProxyListener(const sp& listener); + + // Configure preview callbacks to app. Only one of the older + // callbacks or the callback surface can be active at the same time; + // enabling one will disable the other if active. Flags can be + // disabled by calling it with CAMERA_FRAME_CALLBACK_FLAG_NOOP, and + // Target by calling it with a NULL interface. void setPreviewCallbackFlags(int preview_callback_flag); + status_t setPreviewCallbackTarget( + const sp& callbackProducer); sp getRecordingProxy(); diff --git a/include/camera/ICamera.h b/include/camera/ICamera.h index 2236c1f..b2125bd 100644 --- a/include/camera/ICamera.h +++ b/include/camera/ICamera.h @@ -51,8 +51,15 @@ public: const sp& bufferProducer) = 0; // set the preview callback flag to affect how the received frames from - // preview are handled. + // preview are handled. Enabling preview callback flags disables any active + // preview callback surface set by setPreviewCallbackTarget(). virtual void setPreviewCallbackFlag(int flag) = 0; + // set a buffer interface to use for client-received preview frames instead + // of preview callback buffers. Passing a valid interface here disables any + // active preview callbacks set by setPreviewCallbackFlag(). Passing NULL + // disables the use of the callback target. + virtual status_t setPreviewCallbackTarget( + const sp& callbackProducer) = 0; // start preview mode, must call setPreviewDisplay first virtual status_t startPreview() = 0; diff --git a/services/camera/libcameraservice/Camera2Client.cpp b/services/camera/libcameraservice/Camera2Client.cpp index 9421a77..f3b3d94 100644 --- a/services/camera/libcameraservice/Camera2Client.cpp +++ b/services/camera/libcameraservice/Camera2Client.cpp @@ -578,27 +578,94 @@ void Camera2Client::setPreviewCallbackFlagL(Parameters ¶ms, int flag) { params.previewCallbackOneShot = true; } if (params.previewCallbackFlags != (uint32_t)flag) { + + if (flag != CAMERA_FRAME_CALLBACK_FLAG_NOOP) { + // Disable any existing preview callback window when enabling + // preview callback flags + res = mCallbackProcessor->setCallbackWindow(NULL); + if (res != OK) { + ALOGE("%s: Camera %d: Unable to clear preview callback surface:" + " %s (%d)", __FUNCTION__, mCameraId, strerror(-res), res); + return; + } + params.previewCallbackSurface = false; + } + params.previewCallbackFlags = flag; + switch(params.state) { + case Parameters::PREVIEW: + res = startPreviewL(params, true); + break; + case Parameters::RECORD: + case Parameters::VIDEO_SNAPSHOT: + res = startRecordingL(params, true); + break; + default: + break; + } + if (res != OK) { + ALOGE("%s: Camera %d: Unable to refresh request in state %s", + __FUNCTION__, mCameraId, + Parameters::getStateName(params.state)); + } + } + +} + +status_t Camera2Client::setPreviewCallbackTarget( + const sp& callbackProducer) { + ATRACE_CALL(); + ALOGV("%s: E", __FUNCTION__); + Mutex::Autolock icl(mBinderSerializationLock); + status_t res; + if ( (res = checkPid(__FUNCTION__) ) != OK) return res; + + sp window; + if (callbackProducer != 0) { + window = new Surface(callbackProducer); + } + + res = mCallbackProcessor->setCallbackWindow(window); + if (res != OK) { + ALOGE("%s: Camera %d: Unable to set preview callback surface: %s (%d)", + __FUNCTION__, mCameraId, strerror(-res), res); + return res; + } + + SharedParameters::Lock l(mParameters); + + if (window != NULL) { + // Disable traditional callbacks when a valid callback target is given + l.mParameters.previewCallbackFlags = CAMERA_FRAME_CALLBACK_FLAG_NOOP; + l.mParameters.previewCallbackOneShot = false; + l.mParameters.previewCallbackSurface = true; + } else { + // Disable callback target if given a NULL interface. + l.mParameters.previewCallbackSurface = false; + } + + switch(l.mParameters.state) { case Parameters::PREVIEW: - res = startPreviewL(params, true); + res = startPreviewL(l.mParameters, true); break; case Parameters::RECORD: case Parameters::VIDEO_SNAPSHOT: - res = startRecordingL(params, true); + res = startRecordingL(l.mParameters, true); break; default: break; - } - if (res != OK) { - ALOGE("%s: Camera %d: Unable to refresh request in state %s", - __FUNCTION__, mCameraId, - Parameters::getStateName(params.state)); - } + } + if (res != OK) { + ALOGE("%s: Camera %d: Unable to refresh request in state %s", + __FUNCTION__, mCameraId, + Parameters::getStateName(l.mParameters.state)); } + return OK; } + status_t Camera2Client::startPreview() { ATRACE_CALL(); ALOGV("%s: E", __FUNCTION__); @@ -645,8 +712,10 @@ status_t Camera2Client::startPreviewL(Parameters ¶ms, bool restart) { } Vector outputStreams; - bool callbacksEnabled = params.previewCallbackFlags & - CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK; + bool callbacksEnabled = (params.previewCallbackFlags & + CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK) || + params.previewCallbackSurface; + if (callbacksEnabled) { res = mCallbackProcessor->updateStream(params); if (res != OK) { @@ -860,8 +929,10 @@ status_t Camera2Client::startRecordingL(Parameters ¶ms, bool restart) { } Vector outputStreams; - bool callbacksEnabled = params.previewCallbackFlags & - CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK; + bool callbacksEnabled = (params.previewCallbackFlags & + CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK) || + params.previewCallbackSurface; + if (callbacksEnabled) { res = mCallbackProcessor->updateStream(params); if (res != OK) { diff --git a/services/camera/libcameraservice/Camera2Client.h b/services/camera/libcameraservice/Camera2Client.h index 713fab3..46c6ceb 100644 --- a/services/camera/libcameraservice/Camera2Client.h +++ b/services/camera/libcameraservice/Camera2Client.h @@ -51,6 +51,9 @@ public: virtual status_t setPreviewTexture( const sp& bufferProducer); virtual void setPreviewCallbackFlag(int flag); + virtual status_t setPreviewCallbackTarget( + const sp& callbackProducer); + virtual status_t startPreview(); virtual void stopPreview(); virtual bool previewEnabled(); diff --git a/services/camera/libcameraservice/CameraClient.cpp b/services/camera/libcameraservice/CameraClient.cpp index e577fa3..be78f69 100644 --- a/services/camera/libcameraservice/CameraClient.cpp +++ b/services/camera/libcameraservice/CameraClient.cpp @@ -347,6 +347,12 @@ void CameraClient::setPreviewCallbackFlag(int callback_flag) { } } +status_t CameraClient::setPreviewCallbackTarget( + const sp& callbackProducer) { + ALOGE("%s: Unimplemented!", __FUNCTION__); + return INVALID_OPERATION; +} + // start preview mode status_t CameraClient::startPreview() { LOG1("startPreview (pid %d)", getCallingPid()); diff --git a/services/camera/libcameraservice/CameraClient.h b/services/camera/libcameraservice/CameraClient.h index 7f0cb29..abde75a 100644 --- a/services/camera/libcameraservice/CameraClient.h +++ b/services/camera/libcameraservice/CameraClient.h @@ -40,6 +40,8 @@ public: virtual status_t setPreviewDisplay(const sp& surface); virtual status_t setPreviewTexture(const sp& bufferProducer); virtual void setPreviewCallbackFlag(int flag); + virtual status_t setPreviewCallbackTarget( + const sp& callbackProducer); virtual status_t startPreview(); virtual void stopPreview(); virtual bool previewEnabled(); diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h index 8cb1691..bb3fb25 100644 --- a/services/camera/libcameraservice/CameraService.h +++ b/services/camera/libcameraservice/CameraService.h @@ -190,6 +190,8 @@ public: virtual status_t setPreviewDisplay(const sp& surface) = 0; virtual status_t setPreviewTexture(const sp& bufferProducer)=0; virtual void setPreviewCallbackFlag(int flag) = 0; + virtual status_t setPreviewCallbackTarget( + const sp& callbackProducer) = 0; virtual status_t startPreview() = 0; virtual void stopPreview() = 0; virtual bool previewEnabled() = 0; diff --git a/services/camera/libcameraservice/camera2/CallbackProcessor.cpp b/services/camera/libcameraservice/camera2/CallbackProcessor.cpp index 30c14ef..1734c6a 100644 --- a/services/camera/libcameraservice/camera2/CallbackProcessor.cpp +++ b/services/camera/libcameraservice/camera2/CallbackProcessor.cpp @@ -34,6 +34,7 @@ CallbackProcessor::CallbackProcessor(wp client): Thread(false), mClient(client), mCallbackAvailable(false), + mCallbackToApp(false), mCallbackStreamId(NO_STREAM) { } @@ -50,6 +51,35 @@ void CallbackProcessor::onFrameAvailable() { } } +status_t CallbackProcessor::setCallbackWindow( + sp callbackWindow) { + ATRACE_CALL(); + status_t res; + + Mutex::Autolock l(mInputMutex); + + sp client = mClient.promote(); + if (client == 0) return OK; + sp device = client->getCameraDevice(); + + // If the window is changing, clear out stream if it already exists + if (mCallbackWindow != callbackWindow && mCallbackStreamId != NO_STREAM) { + res = device->deleteStream(mCallbackStreamId); + if (res != OK) { + ALOGE("%s: Camera %d: Unable to delete old stream " + "for callbacks: %s (%d)", __FUNCTION__, + client->getCameraId(), strerror(-res), res); + return res; + } + mCallbackStreamId = NO_STREAM; + mCallbackConsumer.clear(); + } + mCallbackWindow = callbackWindow; + mCallbackToApp = (mCallbackWindow != NULL); + + return OK; +} + status_t CallbackProcessor::updateStream(const Parameters ¶ms) { ATRACE_CALL(); status_t res; @@ -60,8 +90,8 @@ status_t CallbackProcessor::updateStream(const Parameters ¶ms) { if (client == 0) return OK; sp device = client->getCameraDevice(); - if (mCallbackConsumer == 0) { - // Create CPU buffer queue endpoint + if (!mCallbackToApp && mCallbackConsumer == 0) { + // Create CPU buffer queue endpoint, since app hasn't given us one mCallbackConsumer = new CpuConsumer(kCallbackHeapCount); mCallbackConsumer->setFrameAvailableListener(this); mCallbackConsumer->setName(String8("Camera2Client::CallbackConsumer")); @@ -69,6 +99,9 @@ status_t CallbackProcessor::updateStream(const Parameters ¶ms) { mCallbackConsumer->getProducerInterface()); } + uint32_t targetFormat = mCallbackToApp ? (uint32_t)HAL_PIXEL_FORMAT_YV12 : + (uint32_t)params.previewFormat; + if (mCallbackStreamId != NO_STREAM) { // Check if stream parameters have to change uint32_t currentWidth, currentHeight, currentFormat; @@ -82,17 +115,18 @@ status_t CallbackProcessor::updateStream(const Parameters ¶ms) { } if (currentWidth != (uint32_t)params.previewWidth || currentHeight != (uint32_t)params.previewHeight || - currentFormat != (uint32_t)params.previewFormat) { + currentFormat != targetFormat) { // Since size should only change while preview is not running, // assuming that all existing use of old callback stream is // completed. - ALOGV("%s: Camera %d: Deleting stream %d since the buffer dimensions changed", - __FUNCTION__, client->getCameraId(), mCallbackStreamId); + ALOGV("%s: Camera %d: Deleting stream %d since the buffer" + " dimensions changed", __FUNCTION__, + client->getCameraId(), mCallbackStreamId); res = device->deleteStream(mCallbackStreamId); if (res != OK) { ALOGE("%s: Camera %d: Unable to delete old output stream " - "for callbacks: %s (%d)", __FUNCTION__, client->getCameraId(), - strerror(-res), res); + "for callbacks: %s (%d)", __FUNCTION__, + client->getCameraId(), strerror(-res), res); return res; } mCallbackStreamId = NO_STREAM; @@ -102,10 +136,10 @@ status_t CallbackProcessor::updateStream(const Parameters ¶ms) { if (mCallbackStreamId == NO_STREAM) { ALOGV("Creating callback stream: %d %d format 0x%x", params.previewWidth, params.previewHeight, - params.previewFormat); + targetFormat); res = device->createStream(mCallbackWindow, params.previewWidth, params.previewHeight, - params.previewFormat, 0, &mCallbackStreamId); + targetFormat, 0, &mCallbackStreamId); if (res != OK) { ALOGE("%s: Camera %d: Can't create output stream for callbacks: " "%s (%d)", __FUNCTION__, client->getCameraId(), diff --git a/services/camera/libcameraservice/camera2/CallbackProcessor.h b/services/camera/libcameraservice/camera2/CallbackProcessor.h index e68bb75..5c46e0d 100644 --- a/services/camera/libcameraservice/camera2/CallbackProcessor.h +++ b/services/camera/libcameraservice/camera2/CallbackProcessor.h @@ -44,6 +44,8 @@ class CallbackProcessor: void onFrameAvailable(); + // Set to NULL to disable the direct-to-app callback window + status_t setCallbackWindow(sp callbackWindow); status_t updateStream(const Parameters ¶ms); status_t deleteStream(); int getStreamId() const; @@ -61,6 +63,9 @@ class CallbackProcessor: NO_STREAM = -1 }; + // True if mCallbackWindow is a remote consumer, false if just the local + // mCallbackConsumer + bool mCallbackToApp; int mCallbackStreamId; static const size_t kCallbackHeapCount = 6; sp mCallbackConsumer; diff --git a/services/camera/libcameraservice/camera2/Parameters.cpp b/services/camera/libcameraservice/camera2/Parameters.cpp index d13fe8b..1108535 100644 --- a/services/camera/libcameraservice/camera2/Parameters.cpp +++ b/services/camera/libcameraservice/camera2/Parameters.cpp @@ -787,6 +787,7 @@ status_t Parameters::initialize(const CameraMetadata *info) { previewCallbackFlags = 0; previewCallbackOneShot = false; + previewCallbackSurface = false; char value[PROPERTY_VALUE_MAX]; property_get("camera.disable_zsl_mode", value, "0"); diff --git a/services/camera/libcameraservice/camera2/Parameters.h b/services/camera/libcameraservice/camera2/Parameters.h index fe3ec1d..83743f8 100644 --- a/services/camera/libcameraservice/camera2/Parameters.h +++ b/services/camera/libcameraservice/camera2/Parameters.h @@ -142,6 +142,7 @@ struct Parameters { uint32_t previewCallbackFlags; bool previewCallbackOneShot; + bool previewCallbackSurface; bool zslMode; -- cgit v1.1 From 45ae3d644289f656cebf7aded0783efca0342dd3 Mon Sep 17 00:00:00 2001 From: Jeff Tinker Date: Wed, 17 Apr 2013 16:12:27 -0700 Subject: Add new error codes for DRM to improve error reporting bug: 8621516 Change-Id: I9a06c9ea5bfde924906d3876a417b9451b6abde8 --- include/media/stagefright/MediaErrors.h | 2 ++ 1 file changed, 2 insertions(+) diff --git a/include/media/stagefright/MediaErrors.h b/include/media/stagefright/MediaErrors.h index bb01467..ee5e4e2 100644 --- a/include/media/stagefright/MediaErrors.h +++ b/include/media/stagefright/MediaErrors.h @@ -54,6 +54,8 @@ enum { ERROR_DRM_DECRYPT = DRM_ERROR_BASE - 5, ERROR_DRM_CANNOT_HANDLE = DRM_ERROR_BASE - 6, ERROR_DRM_TAMPER_DETECTED = DRM_ERROR_BASE - 7, + ERROR_DRM_NOT_PROVISIONED = DRM_ERROR_BASE - 8, + ERROR_DRM_DEVICE_REVOKED = DRM_ERROR_BASE - 9, ERROR_DRM_VENDOR_MAX = DRM_ERROR_BASE - 500, ERROR_DRM_VENDOR_MIN = DRM_ERROR_BASE - 999, -- cgit v1.1 From 48186b6ec99aa71ec48338a55f2a2d8291681fe4 Mon Sep 17 00:00:00 2001 From: Marco Nelissen Date: Wed, 17 Apr 2013 11:49:11 -0700 Subject: Fix sample info parsing b/8626561 Change-Id: Ibd5168282eb33d1abdc423e15a0d9aeb5a1ad687 --- media/libstagefright/MPEG4Extractor.cpp | 141 +++++++++++++++++++------------- 1 file changed, 83 insertions(+), 58 deletions(-) diff --git a/media/libstagefright/MPEG4Extractor.cpp b/media/libstagefright/MPEG4Extractor.cpp index 3503aaf..145869e 100644 --- a/media/libstagefright/MPEG4Extractor.cpp +++ b/media/libstagefright/MPEG4Extractor.cpp @@ -83,7 +83,7 @@ private: uint8_t mCryptoKey[16]; // passed in from extractor uint32_t mCurrentAuxInfoType; uint32_t mCurrentAuxInfoTypeParameter; - uint32_t mCurrentDefaultSampleInfoSize; + int32_t mCurrentDefaultSampleInfoSize; uint32_t mCurrentSampleInfoCount; uint32_t mCurrentSampleInfoAllocSize; uint8_t* mCurrentSampleInfoSizes; @@ -320,6 +320,21 @@ static const char *FourCC2MIME(uint32_t fourcc) { } } +static bool AdjustChannelsAndRate(uint32_t fourcc, uint32_t *channels, uint32_t *rate) { + if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_AMR_NB, FourCC2MIME(fourcc))) { + // AMR NB audio is always mono, 8kHz + *channels = 1; + *rate = 8000; + return true; + } else if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_AMR_WB, FourCC2MIME(fourcc))) { + // AMR WB audio is always mono, 16kHz + *channels = 1; + *rate = 16000; + return true; + } + return false; +} + MPEG4Extractor::MPEG4Extractor(const sp &source) : mSidxDuration(0), mMoofOffset(0), @@ -443,6 +458,14 @@ sp MPEG4Extractor::getTrackMetaData( return track->meta; } +static void MakeFourCCString(uint32_t x, char *s) { + s[0] = x >> 24; + s[1] = (x >> 16) & 0xff; + s[2] = (x >> 8) & 0xff; + s[3] = x & 0xff; + s[4] = '\0'; +} + status_t MPEG4Extractor::readMetaData() { if (mInitCheck != NO_INIT) { return mInitCheck; @@ -673,14 +696,6 @@ status_t MPEG4Extractor::parseDrmSINF(off64_t *offset, off64_t data_offset) { return UNKNOWN_ERROR; // Return a dummy error. } -static void MakeFourCCString(uint32_t x, char *s) { - s[0] = x >> 24; - s[1] = (x >> 16) & 0xff; - s[2] = (x >> 8) & 0xff; - s[3] = x & 0xff; - s[4] = '\0'; -} - struct PathAdder { PathAdder(Vector *path, uint32_t chunkType) : mPath(path) { @@ -891,13 +906,19 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { case FOURCC('f', 'r', 'm', 'a'): { - int32_t original_fourcc; + uint32_t original_fourcc; if (mDataSource->readAt(data_offset, &original_fourcc, 4) < 4) { return ERROR_IO; } original_fourcc = ntohl(original_fourcc); ALOGV("read original format: %d", original_fourcc); mLastTrack->meta->setCString(kKeyMIMEType, FourCC2MIME(original_fourcc)); + uint32_t num_channels = 0; + uint32_t sample_rate = 0; + if (AdjustChannelsAndRate(original_fourcc, &num_channels, &sample_rate)) { + mLastTrack->meta->setInt32(kKeyChannelCount, num_channels); + mLastTrack->meta->setInt32(kKeySampleRate, sample_rate); + } *offset += chunk_size; break; } @@ -1134,6 +1155,7 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { } case FOURCC('m', 'p', '4', 'a'): + case FOURCC('e', 'n', 'c', 'a'): case FOURCC('s', 'a', 'm', 'r'): case FOURCC('s', 'a', 'w', 'b'): { @@ -1149,29 +1171,18 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { } uint16_t data_ref_index = U16_AT(&buffer[6]); - uint16_t num_channels = U16_AT(&buffer[16]); + uint32_t num_channels = U16_AT(&buffer[16]); uint16_t sample_size = U16_AT(&buffer[18]); uint32_t sample_rate = U32_AT(&buffer[24]) >> 16; - if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_AMR_NB, - FourCC2MIME(chunk_type))) { - // AMR NB audio is always mono, 8kHz - num_channels = 1; - sample_rate = 8000; - } else if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_AMR_WB, - FourCC2MIME(chunk_type))) { - // AMR WB audio is always mono, 16kHz - num_channels = 1; - sample_rate = 16000; + if (chunk_type != FOURCC('e', 'n', 'c', 'a')) { + // if the chunk type is enca, we'll get the type from the sinf/frma box later + mLastTrack->meta->setCString(kKeyMIMEType, FourCC2MIME(chunk_type)); + AdjustChannelsAndRate(chunk_type, &num_channels, &sample_rate); } - -#if 0 - printf("*** coding='%s' %d channels, size %d, rate %d\n", + ALOGV("*** coding='%s' %d channels, size %d, rate %d\n", chunk, num_channels, sample_size, sample_rate); -#endif - - mLastTrack->meta->setCString(kKeyMIMEType, FourCC2MIME(chunk_type)); mLastTrack->meta->setInt32(kKeyChannelCount, num_channels); mLastTrack->meta->setInt32(kKeySampleRate, sample_rate); @@ -2297,6 +2308,7 @@ MPEG4Source::MPEG4Source( mSrcBuffer(NULL) { mFormat->findInt32(kKeyCryptoMode, &mCryptoMode); + mDefaultIVSize = 0; mFormat->findInt32(kKeyCryptoDefaultIVSize, &mDefaultIVSize); uint32_t keytype; const void *key; @@ -2544,13 +2556,17 @@ status_t MPEG4Source::parseSampleAuxiliaryInformationSizes(off64_t offset, off64 if (!mDataSource->getUInt32(offset, &smplcnt)) { return ERROR_MALFORMED; } + mCurrentSampleInfoCount = smplcnt; offset += 4; + if (mCurrentDefaultSampleInfoSize != 0) { + ALOGV("@@@@ using default sample info size of %d", mCurrentDefaultSampleInfoSize); + return OK; + } if (smplcnt > mCurrentSampleInfoAllocSize) { mCurrentSampleInfoSizes = (uint8_t*) realloc(mCurrentSampleInfoSizes, smplcnt); mCurrentSampleInfoAllocSize = smplcnt; } - mCurrentSampleInfoCount = smplcnt; mDataSource->readAt(offset, mCurrentSampleInfoSizes, smplcnt); return OK; @@ -2608,7 +2624,8 @@ status_t MPEG4Source::parseSampleAuxiliaryInformationOffsets(off64_t offset, off drmoffset += mCurrentMoofOffset; int ivlength; CHECK(mFormat->findInt32(kKeyCryptoDefaultIVSize, &ivlength)); - int foo = 1; + + // read CencSampleAuxiliaryDataFormats for (size_t i = 0; i < mCurrentSampleInfoCount; i++) { Sample *smpl = &mCurrentSamples.editItemAt(i); @@ -2619,24 +2636,33 @@ status_t MPEG4Source::parseSampleAuxiliaryInformationOffsets(off64_t offset, off drmoffset += ivlength; - uint16_t numsubsamples; - if (!mDataSource->getUInt16(drmoffset, &numsubsamples)) { - return ERROR_IO; + int32_t smplinfosize = mCurrentDefaultSampleInfoSize; + if (smplinfosize == 0) { + smplinfosize = mCurrentSampleInfoSizes[i]; } - drmoffset += 2; - for (size_t j = 0; j < numsubsamples; j++) { - uint16_t numclear; - uint32_t numencrypted; - if (!mDataSource->getUInt16(drmoffset, &numclear)) { + if (smplinfosize > ivlength) { + uint16_t numsubsamples; + if (!mDataSource->getUInt16(drmoffset, &numsubsamples)) { return ERROR_IO; } drmoffset += 2; - if (!mDataSource->getUInt32(drmoffset, &numencrypted)) { - return ERROR_IO; + for (size_t j = 0; j < numsubsamples; j++) { + uint16_t numclear; + uint32_t numencrypted; + if (!mDataSource->getUInt16(drmoffset, &numclear)) { + return ERROR_IO; + } + drmoffset += 2; + if (!mDataSource->getUInt32(drmoffset, &numencrypted)) { + return ERROR_IO; + } + drmoffset += 4; + smpl->clearsizes.add(numclear); + smpl->encryptedsizes.add(numencrypted); } - drmoffset += 4; - smpl->clearsizes.add(numclear); - smpl->encryptedsizes.add(numencrypted); + } else { + smpl->clearsizes.add(0); + smpl->encryptedsizes.add(smpl->size); } } @@ -3293,6 +3319,21 @@ status_t MPEG4Source::fragmentedRead( } } + const Sample *smpl = &mCurrentSamples[mCurrentSampleIndex]; + const sp bufmeta = mBuffer->meta_data(); + bufmeta->clear(); + if (smpl->encryptedsizes.size()) { + // store clear/encrypted lengths in metadata + bufmeta->setData(kKeyPlainSizes, 0, + smpl->clearsizes.array(), smpl->clearsizes.size() * 4); + bufmeta->setData(kKeyEncryptedSizes, 0, + smpl->encryptedsizes.array(), smpl->encryptedsizes.size() * 4); + bufmeta->setData(kKeyCryptoIV, 0, smpl->iv, 16); // use 16 or the actual size? + bufmeta->setInt32(kKeyCryptoDefaultIVSize, mDefaultIVSize); + bufmeta->setInt32(kKeyCryptoMode, mCryptoMode); + bufmeta->setData(kKeyCryptoKey, 0, mCryptoKey, 16); + } + if (!mIsAVC || mWantsNALFragments) { if (newBuffer) { ssize_t num_bytes_read = @@ -3308,7 +3349,6 @@ status_t MPEG4Source::fragmentedRead( CHECK(mBuffer != NULL); mBuffer->set_range(0, size); - mBuffer->meta_data()->clear(); mBuffer->meta_data()->setInt64( kKeyTime, ((int64_t)cts * 1000000) / mTimescale); @@ -3432,7 +3472,6 @@ status_t MPEG4Source::fragmentedRead( mBuffer->set_range(0, dstOffset); } - mBuffer->meta_data()->clear(); mBuffer->meta_data()->setInt64( kKeyTime, ((int64_t)cts * 1000000) / mTimescale); @@ -3445,20 +3484,6 @@ status_t MPEG4Source::fragmentedRead( mBuffer->meta_data()->setInt32(kKeyIsSyncFrame, 1); } - const Sample *smpl = &mCurrentSamples[mCurrentSampleIndex]; - if (smpl->encryptedsizes.size()) { - // store clear/encrypted lengths in metadata - sp bufmeta = mBuffer->meta_data(); - bufmeta->setData(kKeyPlainSizes, 0, - smpl->clearsizes.array(), smpl->clearsizes.size() * 4); - bufmeta->setData(kKeyEncryptedSizes, 0, - smpl->encryptedsizes.array(), smpl->encryptedsizes.size() * 4); - bufmeta->setData(kKeyCryptoIV, 0, smpl->iv, 16); // use 16 or the actual size? - bufmeta->setInt32(kKeyCryptoDefaultIVSize, mDefaultIVSize); - bufmeta->setInt32(kKeyCryptoMode, mCryptoMode); - bufmeta->setData(kKeyCryptoKey, 0, mCryptoKey, 16); - } - ++mCurrentSampleIndex; *out = mBuffer; -- cgit v1.1 From 03ddaec84b65157af1dbf022a72de778dc59a63e Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Thu, 18 Apr 2013 13:24:17 -0700 Subject: Make sure MediaCodec::stop() and MediaCodec::release() still return instead of blocking indefinitely if the mediaserver died while the call is pending. Change-Id: If2789b7fe99634d947ce4a3bb69c04baff5f8b10 related-to-bug: 8397711 --- media/libstagefright/MediaCodec.cpp | 24 +++++++++++++++++++++++- 1 file changed, 23 insertions(+), 1 deletion(-) diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp index 0d89c0f..e4e95d2 100644 --- a/media/libstagefright/MediaCodec.cpp +++ b/media/libstagefright/MediaCodec.cpp @@ -534,6 +534,20 @@ void MediaCodec::onMessageReceived(const sp &msg) { // the shutdown complete notification. sendErrorReponse = false; + + if (omxError == OMX_ErrorResourcesLost + && internalError == DEAD_OBJECT) { + // MediaServer died, there definitely won't + // be a shutdown complete notification after + // all. + + // note that we're directly going from + // STOPPING->UNINITIALIZED, instead of the + // usual STOPPING->INITIALIZED state. + setState(UNINITIALIZED); + + (new AMessage)->postReply(mReplyID); + } break; } @@ -1013,8 +1027,16 @@ void MediaCodec::onMessageReceived(const sp &msg) { if (mState != INITIALIZED && mState != CONFIGURED && mState != STARTED) { + // We may be in "UNINITIALIZED" state already without the + // client being aware of this if media server died while + // we were being stopped. The client would assume that + // after stop() returned, it would be safe to call release() + // and it should be in this case, no harm to allow a release() + // if we're already uninitialized. sp response = new AMessage; - response->setInt32("err", INVALID_OPERATION); + response->setInt32( + "err", + mState == UNINITIALIZED ? OK : INVALID_OPERATION); response->postReply(replyID); break; -- cgit v1.1 From c6deb68aa7d1b9a4a4ba4549411c3be2753723d2 Mon Sep 17 00:00:00 2001 From: Igor Murashkin Date: Thu, 18 Apr 2013 11:53:35 -0700 Subject: Fix ProCameraTests to pass on Wolfie, disabling failing tests Note: tests fail in racy conditions, but we don't occur those conditions in practice in other consumers of ProCamera. Bug: 8654735 Change-Id: Ic73395ad564031a6b30ad99762dee59f9c35cecf --- camera/tests/ProCameraTests.cpp | 88 ++++++++++++++++++++++++++++++----------- 1 file changed, 64 insertions(+), 24 deletions(-) diff --git a/camera/tests/ProCameraTests.cpp b/camera/tests/ProCameraTests.cpp index 2b5f3ad..f203949 100644 --- a/camera/tests/ProCameraTests.cpp +++ b/camera/tests/ProCameraTests.cpp @@ -80,7 +80,7 @@ struct ServiceListener : public BnCameraServiceListener { void onStatusChanged(Status status, int32_t cameraId) { dout << "On status changed: 0x" << std::hex - << status << " cameraId " << cameraId + << (unsigned int) status << " cameraId " << cameraId << std::endl; Mutex::Autolock al(mMutex); @@ -121,7 +121,7 @@ enum ProEvent { ACQUIRED, RELEASED, STOLEN, - BUFFER_RECEIVED, + FRAME_RECEIVED, RESULT_RECEIVED, }; @@ -158,6 +158,7 @@ public: ProCameraTestListener() { mEventMask = EVENT_MASK_ALL; + mDropFrames = false; } status_t WaitForEvent() { @@ -208,12 +209,19 @@ public: mEventMask = eventMask; } + // Automatically acquire/release frames as they are available + void SetDropFrames(bool dropFrames) { + Mutex::Autolock al(mListenerMutex); + mDropFrames = dropFrames; + } + private: void QueueEvent(ProEvent ev) { bool eventAdded = false; { Mutex::Autolock al(mListenerMutex); + // Drop events not part of mask if (ProEvent_Mask(ev) & mEventMask) { mProEventList.push(ev); eventAdded = true; @@ -253,16 +261,30 @@ protected: << " " << ext3 << std::endl; } - virtual void onBufferReceived(int streamId, - const CpuConsumer::LockedBuffer& buf) { + virtual void onFrameAvailable(int streamId, + const sp& consumer) { - dout << "Buffer received on streamId = " << streamId << - ", dataPtr = " << (void*)buf.data << - ", timestamp = " << buf.timestamp << std::endl; + QueueEvent(FRAME_RECEIVED); + + Mutex::Autolock al(mListenerMutex); + if (mDropFrames) { + CpuConsumer::LockedBuffer buf; + status_t ret; + + EXPECT_OK(ret); + if (OK == (ret = consumer->lockNextBuffer(&buf))) { - QueueEvent(BUFFER_RECEIVED); + dout << "Frame received on streamId = " << streamId << + ", dataPtr = " << (void*)buf.data << + ", timestamp = " << buf.timestamp << std::endl; + EXPECT_OK(consumer->unlockBuffer(buf)); + } + } else { + dout << "Frame received on streamId = " << streamId << std::endl; + } } + virtual void onResultReceived(int32_t frameId, camera_metadata* request) { dout << "Result received frameId = " << frameId @@ -282,6 +304,7 @@ protected: Mutex mConditionMutex; Condition mListenerCondition; int mEventMask; + bool mDropFrames; }; class ProCameraTest : public ::testing::Test { @@ -723,8 +746,11 @@ TEST_F(ProCameraTest, CpuConsumerSingle) { return; } - // FIXME: Note this test is broken because onBufferReceived was removed - mListener->SetEventMask(ProEvent_Mask(BUFFER_RECEIVED)); + mListener->SetEventMask(ProEvent_Mask(ACQUIRED) | + ProEvent_Mask(STOLEN) | + ProEvent_Mask(RELEASED) | + ProEvent_Mask(FRAME_RECEIVED)); + mListener->SetDropFrames(true); int streamId = -1; sp consumer; @@ -776,7 +802,7 @@ TEST_F(ProCameraTest, CpuConsumerSingle) { // Consume a couple of frames for (int i = 0; i < TEST_CPU_FRAME_COUNT; ++i) { EXPECT_EQ(OK, mListener->WaitForEvent()); - EXPECT_EQ(BUFFER_RECEIVED, mListener->ReadEvent()); + EXPECT_EQ(FRAME_RECEIVED, mListener->ReadEvent()); } // Done: clean up @@ -790,8 +816,8 @@ TEST_F(ProCameraTest, CpuConsumerDual) { return; } - // FIXME: Note this test is broken because onBufferReceived was removed - mListener->SetEventMask(ProEvent_Mask(BUFFER_RECEIVED)); + mListener->SetEventMask(ProEvent_Mask(FRAME_RECEIVED)); + mListener->SetDropFrames(true); int streamId = -1; sp consumer; @@ -849,11 +875,11 @@ TEST_F(ProCameraTest, CpuConsumerDual) { for (int i = 0; i < TEST_CPU_FRAME_COUNT; ++i) { // stream id 1 EXPECT_EQ(OK, mListener->WaitForEvent()); - EXPECT_EQ(BUFFER_RECEIVED, mListener->ReadEvent()); + EXPECT_EQ(FRAME_RECEIVED, mListener->ReadEvent()); // stream id 2 EXPECT_EQ(OK, mListener->WaitForEvent()); - EXPECT_EQ(BUFFER_RECEIVED, mListener->ReadEvent()); + EXPECT_EQ(FRAME_RECEIVED, mListener->ReadEvent()); //TODO: events should be a struct with some data like the stream id } @@ -870,7 +896,8 @@ TEST_F(ProCameraTest, ResultReceiver) { } mListener->SetEventMask(ProEvent_Mask(RESULT_RECEIVED)); - //FIXME: if this is run right after the previous test we get BUFFER_RECEIVED + mListener->SetDropFrames(true); + //FIXME: if this is run right after the previous test we get FRAME_RECEIVED // need to filter out events at read time int streamId = -1; @@ -931,11 +958,14 @@ TEST_F(ProCameraTest, ResultReceiver) { EXPECT_OK(mCamera->exclusiveUnlock()); } -TEST_F(ProCameraTest, WaitForResult) { +// FIXME: This is racy and sometimes fails on waitForFrameMetadata +TEST_F(ProCameraTest, DISABLED_WaitForResult) { if (HasFatalFailure()) { return; } + mListener->SetDropFrames(true); + int streamId = -1; sp consumer; EXPECT_OK(mCamera->createStreamCpu(/*width*/1280, /*height*/960, @@ -955,7 +985,6 @@ TEST_F(ProCameraTest, WaitForResult) { } // Done: clean up - consumer->abandon(); // since we didn't consume any of the buffers EXPECT_OK(mCamera->deleteStream(streamId)); EXPECT_OK(mCamera->exclusiveUnlock()); } @@ -996,7 +1025,8 @@ TEST_F(ProCameraTest, WaitForSingleStreamBuffer) { EXPECT_OK(mCamera->exclusiveUnlock()); } -TEST_F(ProCameraTest, WaitForDualStreamBuffer) { +// FIXME: This is racy and sometimes fails on waitForFrameMetadata +TEST_F(ProCameraTest, DISABLED_WaitForDualStreamBuffer) { if (HasFatalFailure()) { return; } @@ -1142,6 +1172,7 @@ TEST_F(ProCameraTest, WaitForSingleStreamBufferAndDropFramesAsync) { } const int NUM_REQUESTS = 20 * TEST_CPU_FRAME_COUNT; + const int CONSECUTIVE_FAILS_ASSUME_TIME_OUT = 5; int streamId = -1; sp consumer; @@ -1156,10 +1187,13 @@ TEST_F(ProCameraTest, WaitForSingleStreamBufferAndDropFramesAsync) { ASSERT_NO_FATAL_FAILURE(createSubmitRequestForStreams(streams, /*count*/1, /*requests*/NUM_REQUESTS)); + uint64_t lastFrameNumber = 0; + int numFrames; + // Consume a couple of results - for (int i = 0; i < NUM_REQUESTS; ++i) { - int numFrames; - EXPECT_TRUE((numFrames = mCamera->waitForFrameBuffer(streamId)) > 0); + int i; + for (i = 0; i < NUM_REQUESTS && lastFrameNumber < NUM_REQUESTS; ++i) { + EXPECT_LT(0, (numFrames = mCamera->waitForFrameBuffer(streamId))); dout << "Dropped " << (numFrames - 1) << " frames" << std::endl; @@ -1168,11 +1202,15 @@ TEST_F(ProCameraTest, WaitForSingleStreamBufferAndDropFramesAsync) { // "Consume" the buffer CpuConsumer::LockedBuffer buf; - EXPECT_OK(consumer->lockNextBuffer(&buf)); + + EXPECT_EQ(OK, consumer->lockNextBuffer(&buf)); + + lastFrameNumber = buf.frameNumber; dout << "Buffer asynchronously received on streamId = " << streamId << ", dataPtr = " << (void*)buf.data << - ", timestamp = " << buf.timestamp << std::endl; + ", timestamp = " << buf.timestamp << + ", framenumber = " << buf.frameNumber << std::endl; // Process at 10fps, stream is at 15fps. // This means we will definitely fill up the buffer queue with @@ -1182,6 +1220,8 @@ TEST_F(ProCameraTest, WaitForSingleStreamBufferAndDropFramesAsync) { EXPECT_OK(consumer->unlockBuffer(buf)); } + dout << "Done after " << i << " iterations " << std::endl; + // Done: clean up EXPECT_OK(mCamera->deleteStream(streamId)); EXPECT_OK(mCamera->exclusiveUnlock()); -- cgit v1.1 From 5ea77ae380a7ad9268fdbda1d7d3aa14b5839065 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Thu, 18 Apr 2013 15:26:47 -0700 Subject: Fix error message to match CTS Bug: 8657725 Change-Id: Ib608eb55f14d557d667f93e4f646c03d5ea470c1 --- services/medialog/MediaLogService.cpp | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/services/medialog/MediaLogService.cpp b/services/medialog/MediaLogService.cpp index f60749d..683fdf3 100644 --- a/services/medialog/MediaLogService.cpp +++ b/services/medialog/MediaLogService.cpp @@ -60,7 +60,9 @@ status_t MediaLogService::dump(int fd, const Vector& args) static const String16 sDump("android.permission.DUMP"); if (!(IPCThreadState::self()->getCallingUid() == AID_MEDIA || PermissionCache::checkCallingPermission(sDump))) { - fdprintf(fd, "Permission denied.\n"); + fdprintf(fd, "Permission Denial: can't dump media.log from pid=%d, uid=%d\n", + IPCThreadState::self()->getCallingPid(), + IPCThreadState::self()->getCallingUid()); return NO_ERROR; } -- cgit v1.1 From 949a926cadbc961fbb649c91d76d7aee8ea4d7bd Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Tue, 16 Apr 2013 12:35:20 -0700 Subject: Use correct pid when running under test harness This is a partial fix which addresses one of the root causes of this bug, but only when running under test harness. It has no effect otherwise. Bug: 8598539 Change-Id: I87444daef1d76b17544f331e651ba1c87893c381 --- services/audioflinger/AudioFlinger.cpp | 1 + services/audioflinger/ServiceUtilities.cpp | 5 +++-- services/audioflinger/ServiceUtilities.h | 2 +- 3 files changed, 5 insertions(+), 3 deletions(-) diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp index b3de526..87eb6aa 100644 --- a/services/audioflinger/AudioFlinger.cpp +++ b/services/audioflinger/AudioFlinger.cpp @@ -143,6 +143,7 @@ AudioFlinger::AudioFlinger() mMode(AUDIO_MODE_INVALID), mBtNrecIsOff(false) { + getpid_cached = getpid(); char value[PROPERTY_VALUE_MAX]; bool doLog = (property_get("ro.test_harness", value, "0") > 0) && (atoi(value) == 1); if (doLog) { diff --git a/services/audioflinger/ServiceUtilities.cpp b/services/audioflinger/ServiceUtilities.cpp index 6a58852..d15bd04 100644 --- a/services/audioflinger/ServiceUtilities.cpp +++ b/services/audioflinger/ServiceUtilities.cpp @@ -21,8 +21,9 @@ namespace android { -// This optimization assumes mediaserver process doesn't fork, which it doesn't -const pid_t getpid_cached = getpid(); +// Not valid until initialized by AudioFlinger constructor. It would have to be +// re-initialized if the process containing AudioFlinger service forks (which it doesn't). +pid_t getpid_cached; bool recordingAllowed() { if (getpid_cached == IPCThreadState::self()->getCallingPid()) return true; diff --git a/services/audioflinger/ServiceUtilities.h b/services/audioflinger/ServiceUtilities.h index f77ec5b..80cecba 100644 --- a/services/audioflinger/ServiceUtilities.h +++ b/services/audioflinger/ServiceUtilities.h @@ -18,7 +18,7 @@ namespace android { -extern const pid_t getpid_cached; +extern pid_t getpid_cached; bool recordingAllowed(); bool settingsAllowed(); -- cgit v1.1 From 5ffabf0664b80b3dd94d40a3a63db25ecd993fd0 Mon Sep 17 00:00:00 2001 From: Jamie Gennis Date: Thu, 18 Apr 2013 16:34:44 -0700 Subject: OMXNodeInstance: actually fix OMX_GetExtIndex logging Bug: 8538872 Change-Id: I228746e8eb502af4bba4054caa4d8569fab35025 --- media/libstagefright/omx/OMXNodeInstance.cpp | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/media/libstagefright/omx/OMXNodeInstance.cpp b/media/libstagefright/omx/OMXNodeInstance.cpp index 971875f..e7d5e74 100644 --- a/media/libstagefright/omx/OMXNodeInstance.cpp +++ b/media/libstagefright/omx/OMXNodeInstance.cpp @@ -299,7 +299,9 @@ status_t OMXNodeInstance::enableGraphicBuffers( OMX_ERRORTYPE err = OMX_GetExtensionIndex(mHandle, name, &index); if (err != OMX_ErrorNone) { - ALOGE("OMX_GetExtensionIndex %s failed", name); + if (enable) { + ALOGE("OMX_GetExtensionIndex %s failed", name); + } return StatusFromOMXError(err); } @@ -378,9 +380,8 @@ status_t OMXNodeInstance::storeMetaDataInBuffers_l( OMX_ERRORTYPE err = OMX_GetExtensionIndex(mHandle, name, &index); if (err != OMX_ErrorNone) { - if (enable) { - ALOGE("OMX_GetExtensionIndex %s failed", name); - } + ALOGE("OMX_GetExtensionIndex %s failed", name); + return StatusFromOMXError(err); } -- cgit v1.1 From 7e7013392e302a28364df1dcee79b82ad90978b4 Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Fri, 19 Apr 2013 11:55:18 -0700 Subject: A flush of a video decoder connected to a native window must reclaim output buffers already queued for rendering before considering a flush completed. Otherwise the decoder may not have enough output buffers to continue decoding after the discontinuity and we'll never dequeue more from the native window. Change-Id: I42e275dc336568e180081c6d7c0dc05fc9637c79 related-to-bug: 8578467 --- include/media/stagefright/ACodec.h | 6 +++++- media/libstagefright/ACodec.cpp | 40 ++++++++++++++++++++++++++++++++++++++ 2 files changed, 45 insertions(+), 1 deletion(-) diff --git a/include/media/stagefright/ACodec.h b/include/media/stagefright/ACodec.h index 5cfe5bc..34bae29 100644 --- a/include/media/stagefright/ACodec.h +++ b/include/media/stagefright/ACodec.h @@ -264,12 +264,16 @@ private: status_t pushBlankBuffersToNativeWindow(); - // Returns true iff all buffers on the given port have status OWNED_BY_US. + // Returns true iff all buffers on the given port have status + // OWNED_BY_US or OWNED_BY_NATIVE_WINDOW. bool allYourBuffersAreBelongToUs(OMX_U32 portIndex); bool allYourBuffersAreBelongToUs(); + void waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs(); + size_t countBuffersOwnedByComponent(OMX_U32 portIndex) const; + size_t countBuffersOwnedByNativeWindow() const; void deferMessage(const sp &msg); void processDeferredMessages(); diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp index b3bc6d8..6d952c3 100644 --- a/media/libstagefright/ACodec.cpp +++ b/media/libstagefright/ACodec.cpp @@ -2135,6 +2135,42 @@ size_t ACodec::countBuffersOwnedByComponent(OMX_U32 portIndex) const { return n; } +size_t ACodec::countBuffersOwnedByNativeWindow() const { + size_t n = 0; + + for (size_t i = 0; i < mBuffers[kPortIndexOutput].size(); ++i) { + const BufferInfo &info = mBuffers[kPortIndexOutput].itemAt(i); + + if (info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { + ++n; + } + } + + return n; +} + +void ACodec::waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs() { + if (mNativeWindow == NULL) { + return; + } + + int minUndequeuedBufs = 0; + status_t err = mNativeWindow->query( + mNativeWindow.get(), NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, + &minUndequeuedBufs); + + if (err != OK) { + ALOGE("[%s] NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)", + mComponentName.c_str(), strerror(-err), -err); + + minUndequeuedBufs = 0; + } + + while (countBuffersOwnedByNativeWindow() > (size_t)minUndequeuedBufs + && dequeueBufferFromNativeWindow() != NULL) { + } +} + bool ACodec::allYourBuffersAreBelongToUs( OMX_U32 portIndex) { for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { @@ -4177,6 +4213,10 @@ void ACodec::FlushingState::changeStateIfWeOwnAllBuffers() { if (mFlushComplete[kPortIndexInput] && mFlushComplete[kPortIndexOutput] && mCodec->allYourBuffersAreBelongToUs()) { + // We now own all buffers except possibly those still queued with + // the native window for rendering. Let's get those back as well. + mCodec->waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs(); + sp notify = mCodec->mNotify->dup(); notify->setInt32("what", ACodec::kWhatFlushCompleted); notify->post(); -- cgit v1.1 From aa7f97bb9c70176245ffb7ed0ce52bee6c1a57d7 Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Fri, 19 Apr 2013 14:33:45 -0700 Subject: 3rd time's the charm, right? Fix another instance where MediaCodec would not return from a stop() or release() call if mediaserver dies at just the right moment. Change-Id: I7728f8df82d62602d4d272f8023aa88678dd7d95 related-to-bug: 8397711 --- include/media/stagefright/MediaCodec.h | 1 + media/libstagefright/MediaCodec.cpp | 54 ++++++++++++++++++---------------- 2 files changed, 29 insertions(+), 26 deletions(-) diff --git a/include/media/stagefright/MediaCodec.h b/include/media/stagefright/MediaCodec.h index 35f46dc..a06a8e1 100644 --- a/include/media/stagefright/MediaCodec.h +++ b/include/media/stagefright/MediaCodec.h @@ -176,6 +176,7 @@ private: kFlagDequeueInputPending = 16, kFlagDequeueOutputPending = 32, kFlagIsSecure = 64, + kFlagSawMediaServerDie = 128, }; struct BufferInfo { diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp index e4e95d2..ae7bb17 100644 --- a/media/libstagefright/MediaCodec.cpp +++ b/media/libstagefright/MediaCodec.cpp @@ -506,6 +506,11 @@ void MediaCodec::onMessageReceived(const sp &msg) { "(omx error 0x%08x, internalError %d)", omxError, internalError); + if (omxError == OMX_ErrorResourcesLost + && internalError == DEAD_OBJECT) { + mFlags |= kFlagSawMediaServerDie; + } + bool sendErrorReponse = true; switch (mState) { @@ -535,8 +540,7 @@ void MediaCodec::onMessageReceived(const sp &msg) { sendErrorReponse = false; - if (omxError == OMX_ErrorResourcesLost - && internalError == DEAD_OBJECT) { + if (mFlags & kFlagSawMediaServerDie) { // MediaServer died, there definitely won't // be a shutdown complete notification after // all. @@ -999,29 +1003,11 @@ void MediaCodec::onMessageReceived(const sp &msg) { } case kWhatStop: - { - uint32_t replyID; - CHECK(msg->senderAwaitsResponse(&replyID)); - - if (mState != INITIALIZED - && mState != CONFIGURED && mState != STARTED) { - sp response = new AMessage; - response->setInt32("err", INVALID_OPERATION); - - response->postReply(replyID); - break; - } - - mReplyID = replyID; - setState(STOPPING); - - mCodec->initiateShutdown(true /* keepComponentAllocated */); - returnBuffersToCodec(); - break; - } - case kWhatRelease: { + State targetState = + (msg->what() == kWhatStop) ? INITIALIZED : UNINITIALIZED; + uint32_t replyID; CHECK(msg->senderAwaitsResponse(&replyID)); @@ -1033,19 +1019,30 @@ void MediaCodec::onMessageReceived(const sp &msg) { // after stop() returned, it would be safe to call release() // and it should be in this case, no harm to allow a release() // if we're already uninitialized. + // Similarly stopping a stopped MediaCodec should be benign. sp response = new AMessage; response->setInt32( "err", - mState == UNINITIALIZED ? OK : INVALID_OPERATION); + mState == targetState ? OK : INVALID_OPERATION); response->postReply(replyID); break; } + if (mFlags & kFlagSawMediaServerDie) { + // It's dead, Jim. Don't expect initiateShutdown to yield + // any useful results now... + setState(UNINITIALIZED); + (new AMessage)->postReply(replyID); + break; + } + mReplyID = replyID; - setState(RELEASING); + setState(msg->what() == kWhatStop ? STOPPING : RELEASING); + + mCodec->initiateShutdown( + msg->what() == kWhatStop /* keepComponentAllocated */); - mCodec->initiateShutdown(); returnBuffersToCodec(); break; } @@ -1422,6 +1419,11 @@ void MediaCodec::setState(State newState) { if (newState == UNINITIALIZED) { mComponentName.clear(); + + // The component is gone, mediaserver's probably back up already + // but should definitely be back up should we try to instantiate + // another component.. and the cycle continues. + mFlags &= ~kFlagSawMediaServerDie; } mState = newState; -- cgit v1.1 From cc8d4f8f280dfdcc76df4f18f63e7f9c21684455 Mon Sep 17 00:00:00 2001 From: Eino-Ville Talvala Date: Fri, 19 Apr 2013 17:26:13 -0700 Subject: Camera3: Register all stream buffers at stream configuration time. While registering them later would be nice for startup time, current hardware will have to reinitialize everything, resulting in glitches. Bug: 8657740 Change-Id: I1ed1f0a65d648f219f5228c8df15ffcf2a4b272e --- services/camera/libcameraservice/Camera3Device.cpp | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/services/camera/libcameraservice/Camera3Device.cpp b/services/camera/libcameraservice/Camera3Device.cpp index d67b535..1433108 100644 --- a/services/camera/libcameraservice/Camera3Device.cpp +++ b/services/camera/libcameraservice/Camera3Device.cpp @@ -907,6 +907,28 @@ status_t Camera3Device::configureStreamsLocked() { return res; } + // Finish all stream configuration immediately. + // TODO: Try to relax this later back to lazy completion, which should be + // faster + + if (mInputStream != NULL) { + res = mInputStream->finishConfiguration(mHal3Device); + if (res != OK) { + SET_ERR_L("Can't finish configuring input stream %d: %s (%d)", + mInputStream->getId(), strerror(-res), res); + return res; + } + } + + for (size_t i = 0; i < mOutputStreams.size(); i++) { + res = mOutputStreams.editValueAt(i)->finishConfiguration(mHal3Device); + if (res != OK) { + SET_ERR_L("Can't finish configuring output stream %d: %s (%d)", + mOutputStreams[i]->getId(), strerror(-res), res); + return res; + } + } + // Request thread needs to know to avoid using repeat-last-settings protocol // across configure_streams() calls mRequestThread->configurationComplete(); -- cgit v1.1 From 3a9682a86ead84d6f60d3f3aa01b2b4d34af983d Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Tue, 23 Apr 2013 13:47:46 -0700 Subject: Remove all traces of wifi display sink implementation and supporting code. Change-Id: I64b681b7e3df1ef0dd80c0d261cacae293d5e684 related-to-bug: 8698812 --- media/libstagefright/wifi-display/Android.mk | 76 -- .../libstagefright/wifi-display/MediaReceiver.cpp | 328 ------ media/libstagefright/wifi-display/MediaReceiver.h | 111 -- media/libstagefright/wifi-display/MediaSender.cpp | 16 - media/libstagefright/wifi-display/MediaSender.h | 1 - media/libstagefright/wifi-display/SNTPClient.cpp | 174 --- media/libstagefright/wifi-display/SNTPClient.h | 62 -- media/libstagefright/wifi-display/TimeSyncer.cpp | 338 ------ media/libstagefright/wifi-display/TimeSyncer.h | 109 -- media/libstagefright/wifi-display/nettest.cpp | 400 ------- .../wifi-display/rtp/RTPAssembler.cpp | 328 ------ .../libstagefright/wifi-display/rtp/RTPAssembler.h | 92 -- .../wifi-display/rtp/RTPReceiver.cpp | 1153 -------------------- .../libstagefright/wifi-display/rtp/RTPReceiver.h | 125 --- .../libstagefright/wifi-display/rtp/RTPSender.cpp | 11 - media/libstagefright/wifi-display/rtp/RTPSender.h | 1 - media/libstagefright/wifi-display/rtptest.cpp | 565 ---------- .../wifi-display/sink/DirectRenderer.cpp | 625 ----------- .../wifi-display/sink/DirectRenderer.h | 82 -- .../wifi-display/sink/WifiDisplaySink.cpp | 917 ---------------- .../wifi-display/sink/WifiDisplaySink.h | 196 ---- .../wifi-display/source/PlaybackSession.cpp | 85 -- .../wifi-display/source/WifiDisplaySource.cpp | 14 - .../wifi-display/source/WifiDisplaySource.h | 3 - media/libstagefright/wifi-display/udptest.cpp | 116 -- media/libstagefright/wifi-display/wfd.cpp | 125 +-- 26 files changed, 4 insertions(+), 6049 deletions(-) delete mode 100644 media/libstagefright/wifi-display/MediaReceiver.cpp delete mode 100644 media/libstagefright/wifi-display/MediaReceiver.h delete mode 100644 media/libstagefright/wifi-display/SNTPClient.cpp delete mode 100644 media/libstagefright/wifi-display/SNTPClient.h delete mode 100644 media/libstagefright/wifi-display/TimeSyncer.cpp delete mode 100644 media/libstagefright/wifi-display/TimeSyncer.h delete mode 100644 media/libstagefright/wifi-display/nettest.cpp delete mode 100644 media/libstagefright/wifi-display/rtp/RTPAssembler.cpp delete mode 100644 media/libstagefright/wifi-display/rtp/RTPAssembler.h delete mode 100644 media/libstagefright/wifi-display/rtp/RTPReceiver.cpp delete mode 100644 media/libstagefright/wifi-display/rtp/RTPReceiver.h delete mode 100644 media/libstagefright/wifi-display/rtptest.cpp delete mode 100644 media/libstagefright/wifi-display/sink/DirectRenderer.cpp delete mode 100644 media/libstagefright/wifi-display/sink/DirectRenderer.h delete mode 100644 media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp delete mode 100644 media/libstagefright/wifi-display/sink/WifiDisplaySink.h delete mode 100644 media/libstagefright/wifi-display/udptest.cpp diff --git a/media/libstagefright/wifi-display/Android.mk b/media/libstagefright/wifi-display/Android.mk index f99ef60..061ae89 100644 --- a/media/libstagefright/wifi-display/Android.mk +++ b/media/libstagefright/wifi-display/Android.mk @@ -4,17 +4,10 @@ include $(CLEAR_VARS) LOCAL_SRC_FILES:= \ ANetworkSession.cpp \ - MediaReceiver.cpp \ MediaSender.cpp \ Parameters.cpp \ ParsedMessage.cpp \ - rtp/RTPAssembler.cpp \ - rtp/RTPReceiver.cpp \ rtp/RTPSender.cpp \ - sink/DirectRenderer.cpp \ - sink/WifiDisplaySink.cpp \ - SNTPClient.cpp \ - TimeSyncer.cpp \ source/Converter.cpp \ source/MediaPuller.cpp \ source/PlaybackSession.cpp \ @@ -67,72 +60,3 @@ LOCAL_MODULE:= wfd LOCAL_MODULE_TAGS := debug include $(BUILD_EXECUTABLE) - -################################################################################ - -include $(CLEAR_VARS) - -LOCAL_SRC_FILES:= \ - udptest.cpp \ - -LOCAL_SHARED_LIBRARIES:= \ - libbinder \ - libgui \ - libmedia \ - libstagefright \ - libstagefright_foundation \ - libstagefright_wfd \ - libutils \ - liblog \ - -LOCAL_MODULE:= udptest - -LOCAL_MODULE_TAGS := debug - -include $(BUILD_EXECUTABLE) - -################################################################################ - -include $(CLEAR_VARS) - -LOCAL_SRC_FILES:= \ - rtptest.cpp \ - -LOCAL_SHARED_LIBRARIES:= \ - libbinder \ - libgui \ - libmedia \ - libstagefright \ - libstagefright_foundation \ - libstagefright_wfd \ - libutils \ - liblog \ - -LOCAL_MODULE:= rtptest - -LOCAL_MODULE_TAGS := debug - -include $(BUILD_EXECUTABLE) - -################################################################################ - -include $(CLEAR_VARS) - -LOCAL_SRC_FILES:= \ - nettest.cpp \ - -LOCAL_SHARED_LIBRARIES:= \ - libbinder \ - libgui \ - libmedia \ - libstagefright \ - libstagefright_foundation \ - libstagefright_wfd \ - libutils \ - liblog \ - -LOCAL_MODULE:= nettest - -LOCAL_MODULE_TAGS := debug - -include $(BUILD_EXECUTABLE) diff --git a/media/libstagefright/wifi-display/MediaReceiver.cpp b/media/libstagefright/wifi-display/MediaReceiver.cpp deleted file mode 100644 index 364acb9..0000000 --- a/media/libstagefright/wifi-display/MediaReceiver.cpp +++ /dev/null @@ -1,328 +0,0 @@ -/* - * Copyright 2013, The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -//#define LOG_NDEBUG 0 -#define LOG_TAG "MediaReceiver" -#include - -#include "MediaReceiver.h" - -#include "ANetworkSession.h" -#include "AnotherPacketSource.h" -#include "rtp/RTPReceiver.h" - -#include -#include -#include -#include -#include - -namespace android { - -MediaReceiver::MediaReceiver( - const sp &netSession, - const sp ¬ify) - : mNetSession(netSession), - mNotify(notify), - mMode(MODE_UNDEFINED), - mGeneration(0), - mInitStatus(OK), - mInitDoneCount(0) { -} - -MediaReceiver::~MediaReceiver() { -} - -ssize_t MediaReceiver::addTrack( - RTPReceiver::TransportMode rtpMode, - RTPReceiver::TransportMode rtcpMode, - int32_t *localRTPPort) { - if (mMode != MODE_UNDEFINED) { - return INVALID_OPERATION; - } - - size_t trackIndex = mTrackInfos.size(); - - TrackInfo info; - - sp notify = new AMessage(kWhatReceiverNotify, id()); - notify->setInt32("generation", mGeneration); - notify->setSize("trackIndex", trackIndex); - - info.mReceiver = new RTPReceiver(mNetSession, notify); - looper()->registerHandler(info.mReceiver); - - info.mReceiver->registerPacketType( - 33, RTPReceiver::PACKETIZATION_TRANSPORT_STREAM); - - info.mReceiver->registerPacketType( - 96, RTPReceiver::PACKETIZATION_AAC); - - info.mReceiver->registerPacketType( - 97, RTPReceiver::PACKETIZATION_H264); - - status_t err = info.mReceiver->initAsync( - rtpMode, - rtcpMode, - localRTPPort); - - if (err != OK) { - looper()->unregisterHandler(info.mReceiver->id()); - info.mReceiver.clear(); - - return err; - } - - mTrackInfos.push_back(info); - - return trackIndex; -} - -status_t MediaReceiver::connectTrack( - size_t trackIndex, - const char *remoteHost, - int32_t remoteRTPPort, - int32_t remoteRTCPPort) { - if (trackIndex >= mTrackInfos.size()) { - return -ERANGE; - } - - TrackInfo *info = &mTrackInfos.editItemAt(trackIndex); - return info->mReceiver->connect(remoteHost, remoteRTPPort, remoteRTCPPort); -} - -status_t MediaReceiver::initAsync(Mode mode) { - if ((mode == MODE_TRANSPORT_STREAM || mode == MODE_TRANSPORT_STREAM_RAW) - && mTrackInfos.size() > 1) { - return INVALID_OPERATION; - } - - sp msg = new AMessage(kWhatInit, id()); - msg->setInt32("mode", mode); - msg->post(); - - return OK; -} - -void MediaReceiver::onMessageReceived(const sp &msg) { - switch (msg->what()) { - case kWhatInit: - { - int32_t mode; - CHECK(msg->findInt32("mode", &mode)); - - CHECK_EQ(mMode, MODE_UNDEFINED); - mMode = (Mode)mode; - - if (mInitStatus != OK || mInitDoneCount == mTrackInfos.size()) { - notifyInitDone(mInitStatus); - } - - mTSParser = new ATSParser( - ATSParser::ALIGNED_VIDEO_DATA - | ATSParser::TS_TIMESTAMPS_ARE_ABSOLUTE); - - mFormatKnownMask = 0; - break; - } - - case kWhatReceiverNotify: - { - int32_t generation; - CHECK(msg->findInt32("generation", &generation)); - if (generation != mGeneration) { - break; - } - - onReceiverNotify(msg); - break; - } - - default: - TRESPASS(); - } -} - -void MediaReceiver::onReceiverNotify(const sp &msg) { - int32_t what; - CHECK(msg->findInt32("what", &what)); - - switch (what) { - case RTPReceiver::kWhatInitDone: - { - ++mInitDoneCount; - - int32_t err; - CHECK(msg->findInt32("err", &err)); - - if (err != OK) { - mInitStatus = err; - ++mGeneration; - } - - if (mMode != MODE_UNDEFINED) { - if (mInitStatus != OK || mInitDoneCount == mTrackInfos.size()) { - notifyInitDone(mInitStatus); - } - } - break; - } - - case RTPReceiver::kWhatError: - { - int32_t err; - CHECK(msg->findInt32("err", &err)); - - notifyError(err); - break; - } - - case RTPReceiver::kWhatAccessUnit: - { - size_t trackIndex; - CHECK(msg->findSize("trackIndex", &trackIndex)); - - sp accessUnit; - CHECK(msg->findBuffer("accessUnit", &accessUnit)); - - int32_t followsDiscontinuity; - if (!msg->findInt32( - "followsDiscontinuity", &followsDiscontinuity)) { - followsDiscontinuity = 0; - } - - if (mMode == MODE_TRANSPORT_STREAM) { - if (followsDiscontinuity) { - mTSParser->signalDiscontinuity( - ATSParser::DISCONTINUITY_TIME, NULL /* extra */); - } - - for (size_t offset = 0; - offset < accessUnit->size(); offset += 188) { - status_t err = mTSParser->feedTSPacket( - accessUnit->data() + offset, 188); - - if (err != OK) { - notifyError(err); - break; - } - } - - drainPackets(0 /* trackIndex */, ATSParser::VIDEO); - drainPackets(1 /* trackIndex */, ATSParser::AUDIO); - } else { - postAccessUnit(trackIndex, accessUnit, NULL); - } - break; - } - - case RTPReceiver::kWhatPacketLost: - { - notifyPacketLost(); - break; - } - - default: - TRESPASS(); - } -} - -void MediaReceiver::drainPackets( - size_t trackIndex, ATSParser::SourceType type) { - sp source = - static_cast( - mTSParser->getSource(type).get()); - - if (source == NULL) { - return; - } - - sp format; - if (!(mFormatKnownMask & (1ul << trackIndex))) { - sp meta = source->getFormat(); - CHECK(meta != NULL); - - CHECK_EQ((status_t)OK, convertMetaDataToMessage(meta, &format)); - - mFormatKnownMask |= 1ul << trackIndex; - } - - status_t finalResult; - while (source->hasBufferAvailable(&finalResult)) { - sp accessUnit; - status_t err = source->dequeueAccessUnit(&accessUnit); - if (err == OK) { - postAccessUnit(trackIndex, accessUnit, format); - format.clear(); - } else if (err != INFO_DISCONTINUITY) { - notifyError(err); - } - } - - if (finalResult != OK) { - notifyError(finalResult); - } -} - -void MediaReceiver::notifyInitDone(status_t err) { - sp notify = mNotify->dup(); - notify->setInt32("what", kWhatInitDone); - notify->setInt32("err", err); - notify->post(); -} - -void MediaReceiver::notifyError(status_t err) { - sp notify = mNotify->dup(); - notify->setInt32("what", kWhatError); - notify->setInt32("err", err); - notify->post(); -} - -void MediaReceiver::notifyPacketLost() { - sp notify = mNotify->dup(); - notify->setInt32("what", kWhatPacketLost); - notify->post(); -} - -void MediaReceiver::postAccessUnit( - size_t trackIndex, - const sp &accessUnit, - const sp &format) { - sp notify = mNotify->dup(); - notify->setInt32("what", kWhatAccessUnit); - notify->setSize("trackIndex", trackIndex); - notify->setBuffer("accessUnit", accessUnit); - - if (format != NULL) { - notify->setMessage("format", format); - } - - notify->post(); -} - -status_t MediaReceiver::informSender( - size_t trackIndex, const sp ¶ms) { - if (trackIndex >= mTrackInfos.size()) { - return -ERANGE; - } - - TrackInfo *info = &mTrackInfos.editItemAt(trackIndex); - return info->mReceiver->informSender(params); -} - -} // namespace android - - diff --git a/media/libstagefright/wifi-display/MediaReceiver.h b/media/libstagefright/wifi-display/MediaReceiver.h deleted file mode 100644 index afbb407..0000000 --- a/media/libstagefright/wifi-display/MediaReceiver.h +++ /dev/null @@ -1,111 +0,0 @@ -/* - * Copyright 2013, The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#include - -#include "ATSParser.h" -#include "rtp/RTPReceiver.h" - -namespace android { - -struct ABuffer; -struct ANetworkSession; -struct AMessage; -struct ATSParser; - -// This class facilitates receiving of media data for one or more tracks -// over RTP. Either a 1:1 track to RTP channel mapping is used or a single -// RTP channel provides the data for a transport stream that is consequently -// demuxed and its track's data provided to the observer. -struct MediaReceiver : public AHandler { - enum { - kWhatInitDone, - kWhatError, - kWhatAccessUnit, - kWhatPacketLost, - }; - - MediaReceiver( - const sp &netSession, - const sp ¬ify); - - ssize_t addTrack( - RTPReceiver::TransportMode rtpMode, - RTPReceiver::TransportMode rtcpMode, - int32_t *localRTPPort); - - status_t connectTrack( - size_t trackIndex, - const char *remoteHost, - int32_t remoteRTPPort, - int32_t remoteRTCPPort); - - enum Mode { - MODE_UNDEFINED, - MODE_TRANSPORT_STREAM, - MODE_TRANSPORT_STREAM_RAW, - MODE_ELEMENTARY_STREAMS, - }; - status_t initAsync(Mode mode); - - status_t informSender(size_t trackIndex, const sp ¶ms); - -protected: - virtual void onMessageReceived(const sp &msg); - virtual ~MediaReceiver(); - -private: - enum { - kWhatInit, - kWhatReceiverNotify, - }; - - struct TrackInfo { - sp mReceiver; - }; - - sp mNetSession; - sp mNotify; - - Mode mMode; - int32_t mGeneration; - - Vector mTrackInfos; - - status_t mInitStatus; - size_t mInitDoneCount; - - sp mTSParser; - uint32_t mFormatKnownMask; - - void onReceiverNotify(const sp &msg); - - void drainPackets(size_t trackIndex, ATSParser::SourceType type); - - void notifyInitDone(status_t err); - void notifyError(status_t err); - void notifyPacketLost(); - - void postAccessUnit( - size_t trackIndex, - const sp &accessUnit, - const sp &format); - - DISALLOW_EVIL_CONSTRUCTORS(MediaReceiver); -}; - -} // namespace android - diff --git a/media/libstagefright/wifi-display/MediaSender.cpp b/media/libstagefright/wifi-display/MediaSender.cpp index 33af66d..8a3566f 100644 --- a/media/libstagefright/wifi-display/MediaSender.cpp +++ b/media/libstagefright/wifi-display/MediaSender.cpp @@ -341,22 +341,6 @@ void MediaSender::onSenderNotify(const sp &msg) { break; } - case kWhatInformSender: - { - int64_t avgLatencyUs; - CHECK(msg->findInt64("avgLatencyUs", &avgLatencyUs)); - - int64_t maxLatencyUs; - CHECK(msg->findInt64("maxLatencyUs", &maxLatencyUs)); - - sp notify = mNotify->dup(); - notify->setInt32("what", kWhatInformSender); - notify->setInt64("avgLatencyUs", avgLatencyUs); - notify->setInt64("maxLatencyUs", maxLatencyUs); - notify->post(); - break; - } - default: TRESPASS(); } diff --git a/media/libstagefright/wifi-display/MediaSender.h b/media/libstagefright/wifi-display/MediaSender.h index 04538ea..64722c5 100644 --- a/media/libstagefright/wifi-display/MediaSender.h +++ b/media/libstagefright/wifi-display/MediaSender.h @@ -43,7 +43,6 @@ struct MediaSender : public AHandler { kWhatInitDone, kWhatError, kWhatNetworkStall, - kWhatInformSender, }; MediaSender( diff --git a/media/libstagefright/wifi-display/SNTPClient.cpp b/media/libstagefright/wifi-display/SNTPClient.cpp deleted file mode 100644 index 5c0af6a..0000000 --- a/media/libstagefright/wifi-display/SNTPClient.cpp +++ /dev/null @@ -1,174 +0,0 @@ -/* - * Copyright 2013, The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#include "SNTPClient.h" - -#include -#include - -#include -#include -#include -#include -#include - -namespace android { - -SNTPClient::SNTPClient() { -} - -status_t SNTPClient::requestTime(const char *host) { - struct hostent *ent; - int64_t requestTimeNTP, requestTimeUs; - ssize_t n; - int64_t responseTimeUs, responseTimeNTP; - int64_t originateTimeNTP, receiveTimeNTP, transmitTimeNTP; - int64_t roundTripTimeNTP, clockOffsetNTP; - - status_t err = UNKNOWN_ERROR; - - int s = socket(AF_INET, SOCK_DGRAM, 0); - - if (s < 0) { - err = -errno; - - goto bail; - } - - ent = gethostbyname(host); - - if (ent == NULL) { - err = -ENOENT; - goto bail2; - } - - struct sockaddr_in hostAddr; - memset(hostAddr.sin_zero, 0, sizeof(hostAddr.sin_zero)); - hostAddr.sin_family = AF_INET; - hostAddr.sin_port = htons(kNTPPort); - hostAddr.sin_addr.s_addr = *(in_addr_t *)ent->h_addr; - - uint8_t packet[kNTPPacketSize]; - memset(packet, 0, sizeof(packet)); - - packet[0] = kNTPModeClient | (kNTPVersion << 3); - - requestTimeNTP = getNowNTP(); - requestTimeUs = ALooper::GetNowUs(); - writeTimeStamp(&packet[kNTPTransmitTimeOffset], requestTimeNTP); - - n = sendto( - s, packet, sizeof(packet), 0, - (const struct sockaddr *)&hostAddr, sizeof(hostAddr)); - - if (n < 0) { - err = -errno; - goto bail2; - } - - memset(packet, 0, sizeof(packet)); - - do { - n = recv(s, packet, sizeof(packet), 0); - } while (n < 0 && errno == EINTR); - - if (n < 0) { - err = -errno; - goto bail2; - } - - responseTimeUs = ALooper::GetNowUs(); - - responseTimeNTP = requestTimeNTP + makeNTP(responseTimeUs - requestTimeUs); - - originateTimeNTP = readTimeStamp(&packet[kNTPOriginateTimeOffset]); - receiveTimeNTP = readTimeStamp(&packet[kNTPReceiveTimeOffset]); - transmitTimeNTP = readTimeStamp(&packet[kNTPTransmitTimeOffset]); - - roundTripTimeNTP = - makeNTP(responseTimeUs - requestTimeUs) - - (transmitTimeNTP - receiveTimeNTP); - - clockOffsetNTP = - ((receiveTimeNTP - originateTimeNTP) - + (transmitTimeNTP - responseTimeNTP)) / 2; - - mTimeReferenceNTP = responseTimeNTP + clockOffsetNTP; - mTimeReferenceUs = responseTimeUs; - mRoundTripTimeNTP = roundTripTimeNTP; - - err = OK; - -bail2: - close(s); - s = -1; - -bail: - return err; -} - -int64_t SNTPClient::adjustTimeUs(int64_t timeUs) const { - uint64_t nowNTP = - mTimeReferenceNTP + makeNTP(timeUs - mTimeReferenceUs); - - int64_t nowUs = - (nowNTP >> 32) * 1000000ll - + ((nowNTP & 0xffffffff) * 1000000ll) / (1ll << 32); - - nowUs -= ((70ll * 365 + 17) * 24) * 60 * 60 * 1000000ll; - - return nowUs; -} - -// static -void SNTPClient::writeTimeStamp(uint8_t *dst, uint64_t ntpTime) { - *dst++ = (ntpTime >> 56) & 0xff; - *dst++ = (ntpTime >> 48) & 0xff; - *dst++ = (ntpTime >> 40) & 0xff; - *dst++ = (ntpTime >> 32) & 0xff; - *dst++ = (ntpTime >> 24) & 0xff; - *dst++ = (ntpTime >> 16) & 0xff; - *dst++ = (ntpTime >> 8) & 0xff; - *dst++ = ntpTime & 0xff; -} - -// static -uint64_t SNTPClient::readTimeStamp(const uint8_t *dst) { - return U64_AT(dst); -} - -// static -uint64_t SNTPClient::getNowNTP() { - struct timeval tv; - gettimeofday(&tv, NULL /* time zone */); - - uint64_t nowUs = tv.tv_sec * 1000000ll + tv.tv_usec; - - nowUs += ((70ll * 365 + 17) * 24) * 60 * 60 * 1000000ll; - - return makeNTP(nowUs); -} - -// static -uint64_t SNTPClient::makeNTP(uint64_t deltaUs) { - uint64_t hi = deltaUs / 1000000ll; - uint64_t lo = ((1ll << 32) * (deltaUs % 1000000ll)) / 1000000ll; - - return (hi << 32) | lo; -} - -} // namespace android - diff --git a/media/libstagefright/wifi-display/SNTPClient.h b/media/libstagefright/wifi-display/SNTPClient.h deleted file mode 100644 index 967d1fc..0000000 --- a/media/libstagefright/wifi-display/SNTPClient.h +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Copyright 2013, The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef SNTP_CLIENT_H_ - -#define SNTP_CLIENT_H_ - -#include -#include - -namespace android { - -// Implementation of the SNTP (Simple Network Time Protocol) -struct SNTPClient { - SNTPClient(); - - status_t requestTime(const char *host); - - // given a time obtained from ALooper::GetNowUs() - // return the number of us elapsed since Jan 1 1970 00:00:00 (UTC). - int64_t adjustTimeUs(int64_t timeUs) const; - -private: - enum { - kNTPPort = 123, - kNTPPacketSize = 48, - kNTPModeClient = 3, - kNTPVersion = 3, - kNTPTransmitTimeOffset = 40, - kNTPOriginateTimeOffset = 24, - kNTPReceiveTimeOffset = 32, - }; - - uint64_t mTimeReferenceNTP; - int64_t mTimeReferenceUs; - int64_t mRoundTripTimeNTP; - - static void writeTimeStamp(uint8_t *dst, uint64_t ntpTime); - static uint64_t readTimeStamp(const uint8_t *dst); - - static uint64_t getNowNTP(); - static uint64_t makeNTP(uint64_t deltaUs); - - DISALLOW_EVIL_CONSTRUCTORS(SNTPClient); -}; - -} // namespace android - -#endif // SNTP_CLIENT_H_ diff --git a/media/libstagefright/wifi-display/TimeSyncer.cpp b/media/libstagefright/wifi-display/TimeSyncer.cpp deleted file mode 100644 index cb429bc..0000000 --- a/media/libstagefright/wifi-display/TimeSyncer.cpp +++ /dev/null @@ -1,338 +0,0 @@ -/* - * Copyright 2013, The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -//#define LOG_NEBUG 0 -#define LOG_TAG "TimeSyncer" -#include - -#include "TimeSyncer.h" - -#include "ANetworkSession.h" - -#include -#include -#include -#include -#include -#include - -namespace android { - -TimeSyncer::TimeSyncer( - const sp &netSession, const sp ¬ify) - : mNetSession(netSession), - mNotify(notify), - mIsServer(false), - mConnected(false), - mUDPSession(0), - mSeqNo(0), - mTotalTimeUs(0.0), - mPendingT1(0ll), - mTimeoutGeneration(0) { -} - -TimeSyncer::~TimeSyncer() { -} - -void TimeSyncer::startServer(unsigned localPort) { - sp msg = new AMessage(kWhatStartServer, id()); - msg->setInt32("localPort", localPort); - msg->post(); -} - -void TimeSyncer::startClient(const char *remoteHost, unsigned remotePort) { - sp msg = new AMessage(kWhatStartClient, id()); - msg->setString("remoteHost", remoteHost); - msg->setInt32("remotePort", remotePort); - msg->post(); -} - -void TimeSyncer::onMessageReceived(const sp &msg) { - switch (msg->what()) { - case kWhatStartClient: - { - AString remoteHost; - CHECK(msg->findString("remoteHost", &remoteHost)); - - int32_t remotePort; - CHECK(msg->findInt32("remotePort", &remotePort)); - - sp notify = new AMessage(kWhatUDPNotify, id()); - - CHECK_EQ((status_t)OK, - mNetSession->createUDPSession( - 0 /* localPort */, - remoteHost.c_str(), - remotePort, - notify, - &mUDPSession)); - - postSendPacket(); - break; - } - - case kWhatStartServer: - { - mIsServer = true; - - int32_t localPort; - CHECK(msg->findInt32("localPort", &localPort)); - - sp notify = new AMessage(kWhatUDPNotify, id()); - - CHECK_EQ((status_t)OK, - mNetSession->createUDPSession( - localPort, notify, &mUDPSession)); - - break; - } - - case kWhatSendPacket: - { - if (mHistory.size() == 0) { - ALOGI("starting batch"); - } - - TimeInfo ti; - memset(&ti, 0, sizeof(ti)); - - ti.mT1 = ALooper::GetNowUs(); - - CHECK_EQ((status_t)OK, - mNetSession->sendRequest( - mUDPSession, &ti, sizeof(ti))); - - mPendingT1 = ti.mT1; - postTimeout(); - break; - } - - case kWhatTimedOut: - { - int32_t generation; - CHECK(msg->findInt32("generation", &generation)); - - if (generation != mTimeoutGeneration) { - break; - } - - ALOGI("timed out, sending another request"); - postSendPacket(); - break; - } - - case kWhatUDPNotify: - { - int32_t reason; - CHECK(msg->findInt32("reason", &reason)); - - switch (reason) { - case ANetworkSession::kWhatError: - { - int32_t sessionID; - CHECK(msg->findInt32("sessionID", &sessionID)); - - int32_t err; - CHECK(msg->findInt32("err", &err)); - - AString detail; - CHECK(msg->findString("detail", &detail)); - - ALOGE("An error occurred in session %d (%d, '%s/%s').", - sessionID, - err, - detail.c_str(), - strerror(-err)); - - mNetSession->destroySession(sessionID); - - cancelTimeout(); - - notifyError(err); - break; - } - - case ANetworkSession::kWhatDatagram: - { - int32_t sessionID; - CHECK(msg->findInt32("sessionID", &sessionID)); - - sp packet; - CHECK(msg->findBuffer("data", &packet)); - - int64_t arrivalTimeUs; - CHECK(packet->meta()->findInt64( - "arrivalTimeUs", &arrivalTimeUs)); - - CHECK_EQ(packet->size(), sizeof(TimeInfo)); - - TimeInfo *ti = (TimeInfo *)packet->data(); - - if (mIsServer) { - if (!mConnected) { - AString fromAddr; - CHECK(msg->findString("fromAddr", &fromAddr)); - - int32_t fromPort; - CHECK(msg->findInt32("fromPort", &fromPort)); - - CHECK_EQ((status_t)OK, - mNetSession->connectUDPSession( - mUDPSession, fromAddr.c_str(), fromPort)); - - mConnected = true; - } - - ti->mT2 = arrivalTimeUs; - ti->mT3 = ALooper::GetNowUs(); - - CHECK_EQ((status_t)OK, - mNetSession->sendRequest( - mUDPSession, ti, sizeof(*ti))); - } else { - if (ti->mT1 != mPendingT1) { - break; - } - - cancelTimeout(); - mPendingT1 = 0; - - ti->mT4 = arrivalTimeUs; - - // One way delay for a packet to travel from client - // to server or back (assumed to be the same either way). - int64_t delay = - (ti->mT2 - ti->mT1 + ti->mT4 - ti->mT3) / 2; - - // Offset between the client clock (T1, T4) and the - // server clock (T2, T3) timestamps. - int64_t offset = - (ti->mT2 - ti->mT1 - ti->mT4 + ti->mT3) / 2; - - mHistory.push_back(*ti); - - ALOGV("delay = %lld us,\toffset %lld us", - delay, - offset); - - if (mHistory.size() < kNumPacketsPerBatch) { - postSendPacket(1000000ll / 30); - } else { - notifyOffset(); - - ALOGI("batch done"); - - mHistory.clear(); - postSendPacket(kBatchDelayUs); - } - } - break; - } - - default: - TRESPASS(); - } - - break; - } - - default: - TRESPASS(); - } -} - -void TimeSyncer::postSendPacket(int64_t delayUs) { - (new AMessage(kWhatSendPacket, id()))->post(delayUs); -} - -void TimeSyncer::postTimeout() { - sp msg = new AMessage(kWhatTimedOut, id()); - msg->setInt32("generation", mTimeoutGeneration); - msg->post(kTimeoutDelayUs); -} - -void TimeSyncer::cancelTimeout() { - ++mTimeoutGeneration; -} - -void TimeSyncer::notifyError(status_t err) { - if (mNotify == NULL) { - looper()->stop(); - return; - } - - sp notify = mNotify->dup(); - notify->setInt32("what", kWhatError); - notify->setInt32("err", err); - notify->post(); -} - -// static -int TimeSyncer::CompareRountripTime(const TimeInfo *ti1, const TimeInfo *ti2) { - int64_t rt1 = ti1->mT4 - ti1->mT1; - int64_t rt2 = ti2->mT4 - ti2->mT1; - - if (rt1 < rt2) { - return -1; - } else if (rt1 > rt2) { - return 1; - } - - return 0; -} - -void TimeSyncer::notifyOffset() { - mHistory.sort(CompareRountripTime); - - int64_t sum = 0ll; - size_t count = 0; - - // Only consider the third of the information associated with the best - // (smallest) roundtrip times. - for (size_t i = 0; i < mHistory.size() / 3; ++i) { - const TimeInfo *ti = &mHistory[i]; - -#if 0 - // One way delay for a packet to travel from client - // to server or back (assumed to be the same either way). - int64_t delay = - (ti->mT2 - ti->mT1 + ti->mT4 - ti->mT3) / 2; -#endif - - // Offset between the client clock (T1, T4) and the - // server clock (T2, T3) timestamps. - int64_t offset = - (ti->mT2 - ti->mT1 - ti->mT4 + ti->mT3) / 2; - - ALOGV("(%d) RT: %lld us, offset: %lld us", - i, ti->mT4 - ti->mT1, offset); - - sum += offset; - ++count; - } - - if (mNotify == NULL) { - ALOGI("avg. offset is %lld", sum / count); - return; - } - - sp notify = mNotify->dup(); - notify->setInt32("what", kWhatTimeOffset); - notify->setInt64("offset", sum / count); - notify->post(); -} - -} // namespace android diff --git a/media/libstagefright/wifi-display/TimeSyncer.h b/media/libstagefright/wifi-display/TimeSyncer.h deleted file mode 100644 index 4e7571f..0000000 --- a/media/libstagefright/wifi-display/TimeSyncer.h +++ /dev/null @@ -1,109 +0,0 @@ -/* - * Copyright 2013, The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef TIME_SYNCER_H_ - -#define TIME_SYNCER_H_ - -#include - -namespace android { - -struct ANetworkSession; - -/* - TimeSyncer allows us to synchronize time between a client and a server. - The client sends a UDP packet containing its send-time to the server, - the server sends that packet back to the client amended with information - about when it was received as well as the time the reply was sent back. - Finally the client receives the reply and has now enough information to - compute the clock offset between client and server assuming that packet - exchange is symmetric, i.e. time for a packet client->server and - server->client is roughly equal. - This exchange is repeated a number of times and the average offset computed - over the 30% of packets that had the lowest roundtrip times. - The offset is determined every 10 secs to account for slight differences in - clock frequency. -*/ -struct TimeSyncer : public AHandler { - enum { - kWhatError, - kWhatTimeOffset, - }; - TimeSyncer( - const sp &netSession, - const sp ¬ify); - - void startServer(unsigned localPort); - void startClient(const char *remoteHost, unsigned remotePort); - -protected: - virtual ~TimeSyncer(); - - virtual void onMessageReceived(const sp &msg); - -private: - enum { - kWhatStartServer, - kWhatStartClient, - kWhatUDPNotify, - kWhatSendPacket, - kWhatTimedOut, - }; - - struct TimeInfo { - int64_t mT1; // client timestamp at send - int64_t mT2; // server timestamp at receive - int64_t mT3; // server timestamp at send - int64_t mT4; // client timestamp at receive - }; - - enum { - kNumPacketsPerBatch = 30, - }; - static const int64_t kTimeoutDelayUs = 500000ll; - static const int64_t kBatchDelayUs = 60000000ll; // every minute - - sp mNetSession; - sp mNotify; - - bool mIsServer; - bool mConnected; - int32_t mUDPSession; - uint32_t mSeqNo; - double mTotalTimeUs; - - Vector mHistory; - - int64_t mPendingT1; - int32_t mTimeoutGeneration; - - void postSendPacket(int64_t delayUs = 0ll); - - void postTimeout(); - void cancelTimeout(); - - void notifyError(status_t err); - void notifyOffset(); - - static int CompareRountripTime(const TimeInfo *ti1, const TimeInfo *ti2); - - DISALLOW_EVIL_CONSTRUCTORS(TimeSyncer); -}; - -} // namespace android - -#endif // TIME_SYNCER_H_ diff --git a/media/libstagefright/wifi-display/nettest.cpp b/media/libstagefright/wifi-display/nettest.cpp deleted file mode 100644 index 0779bf5..0000000 --- a/media/libstagefright/wifi-display/nettest.cpp +++ /dev/null @@ -1,400 +0,0 @@ -/* - * Copyright 2013, The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -//#define LOG_NEBUG 0 -#define LOG_TAG "nettest" -#include - -#include "ANetworkSession.h" -#include "TimeSyncer.h" - -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -namespace android { - -struct TestHandler : public AHandler { - TestHandler(const sp &netSession); - - void listen(int32_t port); - void connect(const char *host, int32_t port); - -protected: - virtual ~TestHandler(); - virtual void onMessageReceived(const sp &msg); - -private: - enum { - kTimeSyncerPort = 8123, - }; - - enum { - kWhatListen, - kWhatConnect, - kWhatTimeSyncerNotify, - kWhatNetNotify, - kWhatSendMore, - kWhatStop, - }; - - sp mNetSession; - sp mTimeSyncer; - - int32_t mServerSessionID; - int32_t mSessionID; - - int64_t mTimeOffsetUs; - bool mTimeOffsetValid; - - int32_t mCounter; - - int64_t mMaxDelayMs; - - void dumpDelay(int32_t counter, int64_t delayMs); - - DISALLOW_EVIL_CONSTRUCTORS(TestHandler); -}; - -TestHandler::TestHandler(const sp &netSession) - : mNetSession(netSession), - mServerSessionID(0), - mSessionID(0), - mTimeOffsetUs(-1ll), - mTimeOffsetValid(false), - mCounter(0), - mMaxDelayMs(-1ll) { -} - -TestHandler::~TestHandler() { -} - -void TestHandler::listen(int32_t port) { - sp msg = new AMessage(kWhatListen, id()); - msg->setInt32("port", port); - msg->post(); -} - -void TestHandler::connect(const char *host, int32_t port) { - sp msg = new AMessage(kWhatConnect, id()); - msg->setString("host", host); - msg->setInt32("port", port); - msg->post(); -} - -void TestHandler::dumpDelay(int32_t counter, int64_t delayMs) { - static const int64_t kMinDelayMs = 0; - static const int64_t kMaxDelayMs = 300; - - const char *kPattern = "########################################"; - size_t kPatternSize = strlen(kPattern); - - int n = (kPatternSize * (delayMs - kMinDelayMs)) - / (kMaxDelayMs - kMinDelayMs); - - if (n < 0) { - n = 0; - } else if ((size_t)n > kPatternSize) { - n = kPatternSize; - } - - if (delayMs > mMaxDelayMs) { - mMaxDelayMs = delayMs; - } - - ALOGI("[%d] (%4lld ms / %4lld ms) %s", - counter, - delayMs, - mMaxDelayMs, - kPattern + kPatternSize - n); -} - -void TestHandler::onMessageReceived(const sp &msg) { - switch (msg->what()) { - case kWhatListen: - { - sp notify = new AMessage(kWhatTimeSyncerNotify, id()); - mTimeSyncer = new TimeSyncer(mNetSession, notify); - looper()->registerHandler(mTimeSyncer); - - notify = new AMessage(kWhatNetNotify, id()); - - int32_t port; - CHECK(msg->findInt32("port", &port)); - - struct in_addr ifaceAddr; - ifaceAddr.s_addr = INADDR_ANY; - - CHECK_EQ((status_t)OK, - mNetSession->createTCPDatagramSession( - ifaceAddr, - port, - notify, - &mServerSessionID)); - break; - } - - case kWhatConnect: - { - sp notify = new AMessage(kWhatTimeSyncerNotify, id()); - mTimeSyncer = new TimeSyncer(mNetSession, notify); - looper()->registerHandler(mTimeSyncer); - mTimeSyncer->startServer(kTimeSyncerPort); - - AString host; - CHECK(msg->findString("host", &host)); - - int32_t port; - CHECK(msg->findInt32("port", &port)); - - notify = new AMessage(kWhatNetNotify, id()); - - CHECK_EQ((status_t)OK, - mNetSession->createTCPDatagramSession( - 0 /* localPort */, - host.c_str(), - port, - notify, - &mSessionID)); - break; - } - - case kWhatNetNotify: - { - int32_t reason; - CHECK(msg->findInt32("reason", &reason)); - - switch (reason) { - case ANetworkSession::kWhatConnected: - { - ALOGI("kWhatConnected"); - - (new AMessage(kWhatSendMore, id()))->post(); - break; - } - - case ANetworkSession::kWhatClientConnected: - { - ALOGI("kWhatClientConnected"); - - CHECK_EQ(mSessionID, 0); - CHECK(msg->findInt32("sessionID", &mSessionID)); - - AString clientIP; - CHECK(msg->findString("client-ip", &clientIP)); - - mTimeSyncer->startClient(clientIP.c_str(), kTimeSyncerPort); - break; - } - - case ANetworkSession::kWhatDatagram: - { - sp packet; - CHECK(msg->findBuffer("data", &packet)); - - CHECK_EQ(packet->size(), 12u); - - int32_t counter = U32_AT(packet->data()); - int64_t timeUs = U64_AT(packet->data() + 4); - - if (mTimeOffsetValid) { - timeUs -= mTimeOffsetUs; - int64_t nowUs = ALooper::GetNowUs(); - int64_t delayMs = (nowUs - timeUs) / 1000ll; - - dumpDelay(counter, delayMs); - } else { - ALOGI("received %d", counter); - } - break; - } - - case ANetworkSession::kWhatError: - { - ALOGE("kWhatError"); - break; - } - - default: - TRESPASS(); - } - break; - } - - case kWhatTimeSyncerNotify: - { - CHECK(msg->findInt64("offset", &mTimeOffsetUs)); - mTimeOffsetValid = true; - break; - } - - case kWhatSendMore: - { - uint8_t buffer[4 + 8]; - buffer[0] = mCounter >> 24; - buffer[1] = (mCounter >> 16) & 0xff; - buffer[2] = (mCounter >> 8) & 0xff; - buffer[3] = mCounter & 0xff; - - int64_t nowUs = ALooper::GetNowUs(); - - buffer[4] = nowUs >> 56; - buffer[5] = (nowUs >> 48) & 0xff; - buffer[6] = (nowUs >> 40) & 0xff; - buffer[7] = (nowUs >> 32) & 0xff; - buffer[8] = (nowUs >> 24) & 0xff; - buffer[9] = (nowUs >> 16) & 0xff; - buffer[10] = (nowUs >> 8) & 0xff; - buffer[11] = nowUs & 0xff; - - ++mCounter; - - CHECK_EQ((status_t)OK, - mNetSession->sendRequest( - mSessionID, - buffer, - sizeof(buffer), - true /* timeValid */, - nowUs)); - - msg->post(100000ll); - break; - } - - case kWhatStop: - { - if (mSessionID != 0) { - mNetSession->destroySession(mSessionID); - mSessionID = 0; - } - - if (mServerSessionID != 0) { - mNetSession->destroySession(mServerSessionID); - mServerSessionID = 0; - } - - looper()->stop(); - break; - } - - default: - TRESPASS(); - } -} - -} // namespace android - -static void usage(const char *me) { - fprintf(stderr, - "usage: %s -c host:port\tconnect to remote host\n" - " -l port \tlisten\n", - me); -} - -int main(int argc, char **argv) { - using namespace android; - - // srand(time(NULL)); - - ProcessState::self()->startThreadPool(); - - DataSource::RegisterDefaultSniffers(); - - int32_t connectToPort = -1; - AString connectToHost; - - int32_t listenOnPort = -1; - - int res; - while ((res = getopt(argc, argv, "hc:l:")) >= 0) { - switch (res) { - case 'c': - { - const char *colonPos = strrchr(optarg, ':'); - - if (colonPos == NULL) { - usage(argv[0]); - exit(1); - } - - connectToHost.setTo(optarg, colonPos - optarg); - - char *end; - connectToPort = strtol(colonPos + 1, &end, 10); - - if (*end != '\0' || end == colonPos + 1 - || connectToPort < 0 || connectToPort > 65535) { - fprintf(stderr, "Illegal port specified.\n"); - exit(1); - } - break; - } - - case 'l': - { - char *end; - listenOnPort = strtol(optarg, &end, 10); - - if (*end != '\0' || end == optarg - || listenOnPort < 0 || listenOnPort > 65535) { - fprintf(stderr, "Illegal port specified.\n"); - exit(1); - } - break; - } - - case '?': - case 'h': - usage(argv[0]); - exit(1); - } - } - - if ((listenOnPort < 0 && connectToPort < 0) - || (listenOnPort >= 0 && connectToPort >= 0)) { - fprintf(stderr, - "You need to select either client or server mode.\n"); - exit(1); - } - - sp netSession = new ANetworkSession; - netSession->start(); - - sp looper = new ALooper; - - sp handler = new TestHandler(netSession); - looper->registerHandler(handler); - - if (listenOnPort) { - handler->listen(listenOnPort); - } - - if (connectToPort >= 0) { - handler->connect(connectToHost.c_str(), connectToPort); - } - - looper->start(true /* runOnCallingThread */); - - return 0; -} diff --git a/media/libstagefright/wifi-display/rtp/RTPAssembler.cpp b/media/libstagefright/wifi-display/rtp/RTPAssembler.cpp deleted file mode 100644 index 7a96081..0000000 --- a/media/libstagefright/wifi-display/rtp/RTPAssembler.cpp +++ /dev/null @@ -1,328 +0,0 @@ -/* - * Copyright 2013, The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -//#define LOG_NDEBUG 0 -#define LOG_TAG "RTPAssembler" -#include - -#include "RTPAssembler.h" - -#include -#include -#include -#include -#include - -namespace android { - -RTPReceiver::Assembler::Assembler(const sp ¬ify) - : mNotify(notify) { -} - -void RTPReceiver::Assembler::postAccessUnit( - const sp &accessUnit, bool followsDiscontinuity) { - sp notify = mNotify->dup(); - notify->setInt32("what", RTPReceiver::kWhatAccessUnit); - notify->setBuffer("accessUnit", accessUnit); - notify->setInt32("followsDiscontinuity", followsDiscontinuity); - notify->post(); -} -//////////////////////////////////////////////////////////////////////////////// - -RTPReceiver::TSAssembler::TSAssembler(const sp ¬ify) - : Assembler(notify), - mSawDiscontinuity(false) { -} - -void RTPReceiver::TSAssembler::signalDiscontinuity() { - mSawDiscontinuity = true; -} - -status_t RTPReceiver::TSAssembler::processPacket(const sp &packet) { - int32_t rtpTime; - CHECK(packet->meta()->findInt32("rtp-time", &rtpTime)); - - packet->meta()->setInt64("timeUs", (rtpTime * 100ll) / 9); - - postAccessUnit(packet, mSawDiscontinuity); - - if (mSawDiscontinuity) { - mSawDiscontinuity = false; - } - - return OK; -} - -//////////////////////////////////////////////////////////////////////////////// - -RTPReceiver::H264Assembler::H264Assembler(const sp ¬ify) - : Assembler(notify), - mState(0), - mIndicator(0), - mNALType(0), - mAccessUnitRTPTime(0) { -} - -void RTPReceiver::H264Assembler::signalDiscontinuity() { - reset(); -} - -status_t RTPReceiver::H264Assembler::processPacket(const sp &packet) { - status_t err = internalProcessPacket(packet); - - if (err != OK) { - reset(); - } - - return err; -} - -status_t RTPReceiver::H264Assembler::internalProcessPacket( - const sp &packet) { - const uint8_t *data = packet->data(); - size_t size = packet->size(); - - switch (mState) { - case 0: - { - if (size < 1 || (data[0] & 0x80)) { - ALOGV("Malformed H264 RTP packet (empty or F-bit set)"); - return ERROR_MALFORMED; - } - - unsigned nalType = data[0] & 0x1f; - if (nalType >= 1 && nalType <= 23) { - addSingleNALUnit(packet); - ALOGV("added single NAL packet"); - } else if (nalType == 28) { - // FU-A - unsigned indicator = data[0]; - CHECK((indicator & 0x1f) == 28); - - if (size < 2) { - ALOGV("Malformed H264 FU-A packet (single byte)"); - return ERROR_MALFORMED; - } - - if (!(data[1] & 0x80)) { - ALOGV("Malformed H264 FU-A packet (no start bit)"); - return ERROR_MALFORMED; - } - - mIndicator = data[0]; - mNALType = data[1] & 0x1f; - uint32_t nri = (data[0] >> 5) & 3; - - clearAccumulator(); - - uint8_t byte = mNALType | (nri << 5); - appendToAccumulator(&byte, 1); - appendToAccumulator(data + 2, size - 2); - - int32_t rtpTime; - CHECK(packet->meta()->findInt32("rtp-time", &rtpTime)); - mAccumulator->meta()->setInt32("rtp-time", rtpTime); - - if (data[1] & 0x40) { - // Huh? End bit also set on the first buffer. - addSingleNALUnit(mAccumulator); - clearAccumulator(); - - ALOGV("added FU-A"); - break; - } - - mState = 1; - } else if (nalType == 24) { - // STAP-A - - status_t err = addSingleTimeAggregationPacket(packet); - if (err != OK) { - return err; - } - } else { - ALOGV("Malformed H264 packet (unknown type %d)", nalType); - return ERROR_UNSUPPORTED; - } - break; - } - - case 1: - { - if (size < 2 - || data[0] != mIndicator - || (data[1] & 0x1f) != mNALType - || (data[1] & 0x80)) { - ALOGV("Malformed H264 FU-A packet (indicator, " - "type or start bit mismatch)"); - - return ERROR_MALFORMED; - } - - appendToAccumulator(data + 2, size - 2); - - if (data[1] & 0x40) { - addSingleNALUnit(mAccumulator); - - clearAccumulator(); - mState = 0; - - ALOGV("added FU-A"); - } - break; - } - - default: - TRESPASS(); - } - - int32_t marker; - CHECK(packet->meta()->findInt32("M", &marker)); - - if (marker) { - flushAccessUnit(); - } - - return OK; -} - -void RTPReceiver::H264Assembler::reset() { - mNALUnits.clear(); - - clearAccumulator(); - mState = 0; -} - -void RTPReceiver::H264Assembler::clearAccumulator() { - if (mAccumulator != NULL) { - // XXX Too expensive. - mAccumulator.clear(); - } -} - -void RTPReceiver::H264Assembler::appendToAccumulator( - const void *data, size_t size) { - if (mAccumulator == NULL) { - mAccumulator = new ABuffer(size); - memcpy(mAccumulator->data(), data, size); - return; - } - - if (mAccumulator->size() + size > mAccumulator->capacity()) { - sp buf = new ABuffer(mAccumulator->size() + size); - memcpy(buf->data(), mAccumulator->data(), mAccumulator->size()); - buf->setRange(0, mAccumulator->size()); - - int32_t rtpTime; - if (mAccumulator->meta()->findInt32("rtp-time", &rtpTime)) { - buf->meta()->setInt32("rtp-time", rtpTime); - } - - mAccumulator = buf; - } - - memcpy(mAccumulator->data() + mAccumulator->size(), data, size); - mAccumulator->setRange(0, mAccumulator->size() + size); -} - -void RTPReceiver::H264Assembler::addSingleNALUnit(const sp &packet) { - if (mNALUnits.empty()) { - int32_t rtpTime; - CHECK(packet->meta()->findInt32("rtp-time", &rtpTime)); - - mAccessUnitRTPTime = rtpTime; - } - - mNALUnits.push_back(packet); -} - -void RTPReceiver::H264Assembler::flushAccessUnit() { - if (mNALUnits.empty()) { - return; - } - - size_t totalSize = 0; - for (List >::iterator it = mNALUnits.begin(); - it != mNALUnits.end(); ++it) { - totalSize += 4 + (*it)->size(); - } - - sp accessUnit = new ABuffer(totalSize); - size_t offset = 0; - for (List >::iterator it = mNALUnits.begin(); - it != mNALUnits.end(); ++it) { - const sp nalUnit = *it; - - memcpy(accessUnit->data() + offset, "\x00\x00\x00\x01", 4); - - memcpy(accessUnit->data() + offset + 4, - nalUnit->data(), - nalUnit->size()); - - offset += 4 + nalUnit->size(); - } - - mNALUnits.clear(); - - accessUnit->meta()->setInt64("timeUs", mAccessUnitRTPTime * 100ll / 9ll); - postAccessUnit(accessUnit, false /* followsDiscontinuity */); -} - -status_t RTPReceiver::H264Assembler::addSingleTimeAggregationPacket( - const sp &packet) { - const uint8_t *data = packet->data(); - size_t size = packet->size(); - - if (size < 3) { - ALOGV("Malformed H264 STAP-A packet (too small)"); - return ERROR_MALFORMED; - } - - int32_t rtpTime; - CHECK(packet->meta()->findInt32("rtp-time", &rtpTime)); - - ++data; - --size; - while (size >= 2) { - size_t nalSize = (data[0] << 8) | data[1]; - - if (size < nalSize + 2) { - ALOGV("Malformed H264 STAP-A packet (incomplete NAL unit)"); - return ERROR_MALFORMED; - } - - sp unit = new ABuffer(nalSize); - memcpy(unit->data(), &data[2], nalSize); - - unit->meta()->setInt32("rtp-time", rtpTime); - - addSingleNALUnit(unit); - - data += 2 + nalSize; - size -= 2 + nalSize; - } - - if (size != 0) { - ALOGV("Unexpected padding at end of STAP-A packet."); - } - - ALOGV("added STAP-A"); - - return OK; -} - -} // namespace android - diff --git a/media/libstagefright/wifi-display/rtp/RTPAssembler.h b/media/libstagefright/wifi-display/rtp/RTPAssembler.h deleted file mode 100644 index e456d32..0000000 --- a/media/libstagefright/wifi-display/rtp/RTPAssembler.h +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Copyright 2013, The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef RTP_ASSEMBLER_H_ - -#define RTP_ASSEMBLER_H_ - -#include "RTPReceiver.h" - -namespace android { - -// A helper class to reassemble the payload of RTP packets into access -// units depending on the packetization scheme. -struct RTPReceiver::Assembler : public RefBase { - Assembler(const sp ¬ify); - - virtual void signalDiscontinuity() = 0; - virtual status_t processPacket(const sp &packet) = 0; - -protected: - virtual ~Assembler() {} - - void postAccessUnit( - const sp &accessUnit, bool followsDiscontinuity); - -private: - sp mNotify; - - DISALLOW_EVIL_CONSTRUCTORS(Assembler); -}; - -struct RTPReceiver::TSAssembler : public RTPReceiver::Assembler { - TSAssembler(const sp ¬ify); - - virtual void signalDiscontinuity(); - virtual status_t processPacket(const sp &packet); - -private: - bool mSawDiscontinuity; - - DISALLOW_EVIL_CONSTRUCTORS(TSAssembler); -}; - -struct RTPReceiver::H264Assembler : public RTPReceiver::Assembler { - H264Assembler(const sp ¬ify); - - virtual void signalDiscontinuity(); - virtual status_t processPacket(const sp &packet); - -private: - int32_t mState; - - uint8_t mIndicator; - uint8_t mNALType; - - sp mAccumulator; - - List > mNALUnits; - int32_t mAccessUnitRTPTime; - - status_t internalProcessPacket(const sp &packet); - - void addSingleNALUnit(const sp &packet); - status_t addSingleTimeAggregationPacket(const sp &packet); - - void flushAccessUnit(); - - void clearAccumulator(); - void appendToAccumulator(const void *data, size_t size); - - void reset(); - - DISALLOW_EVIL_CONSTRUCTORS(H264Assembler); -}; - -} // namespace android - -#endif // RTP_ASSEMBLER_H_ - diff --git a/media/libstagefright/wifi-display/rtp/RTPReceiver.cpp b/media/libstagefright/wifi-display/rtp/RTPReceiver.cpp deleted file mode 100644 index 8fa1dae..0000000 --- a/media/libstagefright/wifi-display/rtp/RTPReceiver.cpp +++ /dev/null @@ -1,1153 +0,0 @@ -/* - * Copyright 2013, The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -//#define LOG_NDEBUG 0 -#define LOG_TAG "RTPReceiver" -#include - -#include "RTPAssembler.h" -#include "RTPReceiver.h" - -#include "ANetworkSession.h" - -#include -#include -#include -#include -#include -#include - -#define TRACK_PACKET_LOSS 0 - -namespace android { - -//////////////////////////////////////////////////////////////////////////////// - -struct RTPReceiver::Source : public AHandler { - Source(RTPReceiver *receiver, uint32_t ssrc); - - void onPacketReceived(uint16_t seq, const sp &buffer); - - void addReportBlock(uint32_t ssrc, const sp &buf); - -protected: - virtual ~Source(); - - virtual void onMessageReceived(const sp &msg); - -private: - enum { - kWhatRetransmit, - kWhatDeclareLost, - }; - - static const uint32_t kMinSequential = 2; - static const uint32_t kMaxDropout = 3000; - static const uint32_t kMaxMisorder = 100; - static const uint32_t kRTPSeqMod = 1u << 16; - static const int64_t kReportIntervalUs = 10000000ll; - - RTPReceiver *mReceiver; - uint32_t mSSRC; - bool mFirst; - uint16_t mMaxSeq; - uint32_t mCycles; - uint32_t mBaseSeq; - uint32_t mReceived; - uint32_t mExpectedPrior; - uint32_t mReceivedPrior; - - int64_t mFirstArrivalTimeUs; - int64_t mFirstRTPTimeUs; - - // Ordered by extended seq number. - List > mPackets; - - enum StatusBits { - STATUS_DECLARED_LOST = 1, - STATUS_REQUESTED_RETRANSMISSION = 2, - STATUS_ARRIVED_LATE = 4, - }; -#if TRACK_PACKET_LOSS - KeyedVector mLostPackets; -#endif - - void modifyPacketStatus(int32_t extSeqNo, uint32_t mask); - - int32_t mAwaitingExtSeqNo; - bool mRequestedRetransmission; - - int32_t mActivePacketType; - sp mActiveAssembler; - - int64_t mNextReportTimeUs; - - int32_t mNumDeclaredLost; - int32_t mNumDeclaredLostPrior; - - int32_t mRetransmitGeneration; - int32_t mDeclareLostGeneration; - bool mDeclareLostTimerPending; - - void queuePacket(const sp &packet); - void dequeueMore(); - - sp getNextPacket(); - void resync(); - - void postRetransmitTimer(int64_t delayUs); - void postDeclareLostTimer(int64_t delayUs); - void cancelTimers(); - - DISALLOW_EVIL_CONSTRUCTORS(Source); -}; - -//////////////////////////////////////////////////////////////////////////////// - -RTPReceiver::Source::Source(RTPReceiver *receiver, uint32_t ssrc) - : mReceiver(receiver), - mSSRC(ssrc), - mFirst(true), - mMaxSeq(0), - mCycles(0), - mBaseSeq(0), - mReceived(0), - mExpectedPrior(0), - mReceivedPrior(0), - mFirstArrivalTimeUs(-1ll), - mFirstRTPTimeUs(-1ll), - mAwaitingExtSeqNo(-1), - mRequestedRetransmission(false), - mActivePacketType(-1), - mNextReportTimeUs(-1ll), - mNumDeclaredLost(0), - mNumDeclaredLostPrior(0), - mRetransmitGeneration(0), - mDeclareLostGeneration(0), - mDeclareLostTimerPending(false) { -} - -RTPReceiver::Source::~Source() { -} - -void RTPReceiver::Source::onMessageReceived(const sp &msg) { - switch (msg->what()) { - case kWhatRetransmit: - { - int32_t generation; - CHECK(msg->findInt32("generation", &generation)); - - if (generation != mRetransmitGeneration) { - break; - } - - mRequestedRetransmission = true; - mReceiver->requestRetransmission(mSSRC, mAwaitingExtSeqNo); - - modifyPacketStatus( - mAwaitingExtSeqNo, STATUS_REQUESTED_RETRANSMISSION); - break; - } - - case kWhatDeclareLost: - { - int32_t generation; - CHECK(msg->findInt32("generation", &generation)); - - if (generation != mDeclareLostGeneration) { - break; - } - - cancelTimers(); - - ALOGV("Lost packet extSeqNo %d %s", - mAwaitingExtSeqNo, - mRequestedRetransmission ? "*" : ""); - - mRequestedRetransmission = false; - if (mActiveAssembler != NULL) { - mActiveAssembler->signalDiscontinuity(); - } - - modifyPacketStatus(mAwaitingExtSeqNo, STATUS_DECLARED_LOST); - - // resync(); - ++mAwaitingExtSeqNo; - ++mNumDeclaredLost; - - mReceiver->notifyPacketLost(); - - dequeueMore(); - break; - } - - default: - TRESPASS(); - } -} - -void RTPReceiver::Source::onPacketReceived( - uint16_t seq, const sp &buffer) { - if (mFirst) { - buffer->setInt32Data(mCycles | seq); - queuePacket(buffer); - - mFirst = false; - mBaseSeq = seq; - mMaxSeq = seq; - ++mReceived; - return; - } - - uint16_t udelta = seq - mMaxSeq; - - if (udelta < kMaxDropout) { - // In order, with permissible gap. - - if (seq < mMaxSeq) { - // Sequence number wrapped - count another 64K cycle - mCycles += kRTPSeqMod; - } - - mMaxSeq = seq; - - ++mReceived; - } else if (udelta <= kRTPSeqMod - kMaxMisorder) { - // The sequence number made a very large jump - return; - } else { - // Duplicate or reordered packet. - } - - buffer->setInt32Data(mCycles | seq); - queuePacket(buffer); -} - -void RTPReceiver::Source::queuePacket(const sp &packet) { - int32_t newExtendedSeqNo = packet->int32Data(); - - if (mFirstArrivalTimeUs < 0ll) { - mFirstArrivalTimeUs = ALooper::GetNowUs(); - - uint32_t rtpTime; - CHECK(packet->meta()->findInt32("rtp-time", (int32_t *)&rtpTime)); - - mFirstRTPTimeUs = (rtpTime * 100ll) / 9ll; - } - - if (mAwaitingExtSeqNo >= 0 && newExtendedSeqNo < mAwaitingExtSeqNo) { - // We're no longer interested in these. They're old. - ALOGV("dropping stale extSeqNo %d", newExtendedSeqNo); - - modifyPacketStatus(newExtendedSeqNo, STATUS_ARRIVED_LATE); - return; - } - - if (mPackets.empty()) { - mPackets.push_back(packet); - dequeueMore(); - return; - } - - List >::iterator firstIt = mPackets.begin(); - List >::iterator it = --mPackets.end(); - for (;;) { - int32_t extendedSeqNo = (*it)->int32Data(); - - if (extendedSeqNo == newExtendedSeqNo) { - // Duplicate packet. - return; - } - - if (extendedSeqNo < newExtendedSeqNo) { - // Insert new packet after the one at "it". - mPackets.insert(++it, packet); - break; - } - - if (it == firstIt) { - // Insert new packet before the first existing one. - mPackets.insert(it, packet); - break; - } - - --it; - } - - dequeueMore(); -} - -void RTPReceiver::Source::dequeueMore() { - int64_t nowUs = ALooper::GetNowUs(); - if (mNextReportTimeUs < 0ll || nowUs >= mNextReportTimeUs) { - if (mNextReportTimeUs >= 0ll) { - uint32_t expected = (mMaxSeq | mCycles) - mBaseSeq + 1; - - uint32_t expectedInterval = expected - mExpectedPrior; - mExpectedPrior = expected; - - uint32_t receivedInterval = mReceived - mReceivedPrior; - mReceivedPrior = mReceived; - - int64_t lostInterval = - (int64_t)expectedInterval - (int64_t)receivedInterval; - - int32_t declaredLostInterval = - mNumDeclaredLost - mNumDeclaredLostPrior; - - mNumDeclaredLostPrior = mNumDeclaredLost; - - if (declaredLostInterval > 0) { - ALOGI("lost %lld packets (%.2f %%), declared %d lost\n", - lostInterval, - 100.0f * lostInterval / expectedInterval, - declaredLostInterval); - } - } - - mNextReportTimeUs = nowUs + kReportIntervalUs; - -#if TRACK_PACKET_LOSS - for (size_t i = 0; i < mLostPackets.size(); ++i) { - int32_t key = mLostPackets.keyAt(i); - uint32_t value = mLostPackets.valueAt(i); - - AString status; - if (value & STATUS_REQUESTED_RETRANSMISSION) { - status.append("retrans "); - } - if (value & STATUS_ARRIVED_LATE) { - status.append("arrived-late "); - } - ALOGI("Packet %d declared lost %s", key, status.c_str()); - } -#endif - } - - sp packet; - while ((packet = getNextPacket()) != NULL) { - if (mDeclareLostTimerPending) { - cancelTimers(); - } - - CHECK_GE(mAwaitingExtSeqNo, 0); -#if TRACK_PACKET_LOSS - mLostPackets.removeItem(mAwaitingExtSeqNo); -#endif - - int32_t packetType; - CHECK(packet->meta()->findInt32("PT", &packetType)); - - if (packetType != mActivePacketType) { - mActiveAssembler = mReceiver->makeAssembler(packetType); - mActivePacketType = packetType; - } - - if (mActiveAssembler != NULL) { - status_t err = mActiveAssembler->processPacket(packet); - if (err != OK) { - ALOGV("assembler returned error %d", err); - } - } - - ++mAwaitingExtSeqNo; - } - - if (mDeclareLostTimerPending) { - return; - } - - if (mPackets.empty()) { - return; - } - - CHECK_GE(mAwaitingExtSeqNo, 0); - - const sp &firstPacket = *mPackets.begin(); - - uint32_t rtpTime; - CHECK(firstPacket->meta()->findInt32( - "rtp-time", (int32_t *)&rtpTime)); - - - int64_t rtpUs = (rtpTime * 100ll) / 9ll; - - int64_t maxArrivalTimeUs = - mFirstArrivalTimeUs + rtpUs - mFirstRTPTimeUs; - - nowUs = ALooper::GetNowUs(); - - CHECK_LT(mAwaitingExtSeqNo, firstPacket->int32Data()); - - ALOGV("waiting for %d, comparing against %d, %lld us left", - mAwaitingExtSeqNo, - firstPacket->int32Data(), - maxArrivalTimeUs - nowUs); - - postDeclareLostTimer(maxArrivalTimeUs + kPacketLostAfterUs); - - if (kRequestRetransmissionAfterUs > 0ll) { - postRetransmitTimer( - maxArrivalTimeUs + kRequestRetransmissionAfterUs); - } -} - -sp RTPReceiver::Source::getNextPacket() { - if (mPackets.empty()) { - return NULL; - } - - int32_t extSeqNo = (*mPackets.begin())->int32Data(); - - if (mAwaitingExtSeqNo < 0) { - mAwaitingExtSeqNo = extSeqNo; - } else if (extSeqNo != mAwaitingExtSeqNo) { - return NULL; - } - - sp packet = *mPackets.begin(); - mPackets.erase(mPackets.begin()); - - return packet; -} - -void RTPReceiver::Source::resync() { - mAwaitingExtSeqNo = -1; -} - -void RTPReceiver::Source::addReportBlock( - uint32_t ssrc, const sp &buf) { - uint32_t extMaxSeq = mMaxSeq | mCycles; - uint32_t expected = extMaxSeq - mBaseSeq + 1; - - int64_t lost = (int64_t)expected - (int64_t)mReceived; - if (lost > 0x7fffff) { - lost = 0x7fffff; - } else if (lost < -0x800000) { - lost = -0x800000; - } - - uint32_t expectedInterval = expected - mExpectedPrior; - mExpectedPrior = expected; - - uint32_t receivedInterval = mReceived - mReceivedPrior; - mReceivedPrior = mReceived; - - int64_t lostInterval = expectedInterval - receivedInterval; - - uint8_t fractionLost; - if (expectedInterval == 0 || lostInterval <=0) { - fractionLost = 0; - } else { - fractionLost = (lostInterval << 8) / expectedInterval; - } - - uint8_t *ptr = buf->data() + buf->size(); - - ptr[0] = ssrc >> 24; - ptr[1] = (ssrc >> 16) & 0xff; - ptr[2] = (ssrc >> 8) & 0xff; - ptr[3] = ssrc & 0xff; - - ptr[4] = fractionLost; - - ptr[5] = (lost >> 16) & 0xff; - ptr[6] = (lost >> 8) & 0xff; - ptr[7] = lost & 0xff; - - ptr[8] = extMaxSeq >> 24; - ptr[9] = (extMaxSeq >> 16) & 0xff; - ptr[10] = (extMaxSeq >> 8) & 0xff; - ptr[11] = extMaxSeq & 0xff; - - // XXX TODO: - - ptr[12] = 0x00; // interarrival jitter - ptr[13] = 0x00; - ptr[14] = 0x00; - ptr[15] = 0x00; - - ptr[16] = 0x00; // last SR - ptr[17] = 0x00; - ptr[18] = 0x00; - ptr[19] = 0x00; - - ptr[20] = 0x00; // delay since last SR - ptr[21] = 0x00; - ptr[22] = 0x00; - ptr[23] = 0x00; -} - -//////////////////////////////////////////////////////////////////////////////// - -RTPReceiver::RTPReceiver( - const sp &netSession, - const sp ¬ify, - uint32_t flags) - : mNetSession(netSession), - mNotify(notify), - mFlags(flags), - mRTPMode(TRANSPORT_UNDEFINED), - mRTCPMode(TRANSPORT_UNDEFINED), - mRTPSessionID(0), - mRTCPSessionID(0), - mRTPConnected(false), - mRTCPConnected(false), - mRTPClientSessionID(0), - mRTCPClientSessionID(0) { -} - -RTPReceiver::~RTPReceiver() { - if (mRTCPClientSessionID != 0) { - mNetSession->destroySession(mRTCPClientSessionID); - mRTCPClientSessionID = 0; - } - - if (mRTPClientSessionID != 0) { - mNetSession->destroySession(mRTPClientSessionID); - mRTPClientSessionID = 0; - } - - if (mRTCPSessionID != 0) { - mNetSession->destroySession(mRTCPSessionID); - mRTCPSessionID = 0; - } - - if (mRTPSessionID != 0) { - mNetSession->destroySession(mRTPSessionID); - mRTPSessionID = 0; - } -} - -status_t RTPReceiver::initAsync( - TransportMode rtpMode, - TransportMode rtcpMode, - int32_t *outLocalRTPPort) { - if (mRTPMode != TRANSPORT_UNDEFINED - || rtpMode == TRANSPORT_UNDEFINED - || rtpMode == TRANSPORT_NONE - || rtcpMode == TRANSPORT_UNDEFINED) { - return INVALID_OPERATION; - } - - CHECK_NE(rtpMode, TRANSPORT_TCP_INTERLEAVED); - CHECK_NE(rtcpMode, TRANSPORT_TCP_INTERLEAVED); - - sp rtpNotify = new AMessage(kWhatRTPNotify, id()); - - sp rtcpNotify; - if (rtcpMode != TRANSPORT_NONE) { - rtcpNotify = new AMessage(kWhatRTCPNotify, id()); - } - - CHECK_EQ(mRTPSessionID, 0); - CHECK_EQ(mRTCPSessionID, 0); - - int32_t localRTPPort; - - struct in_addr ifaceAddr; - ifaceAddr.s_addr = INADDR_ANY; - - for (;;) { - localRTPPort = PickRandomRTPPort(); - - status_t err; - if (rtpMode == TRANSPORT_UDP) { - err = mNetSession->createUDPSession( - localRTPPort, - rtpNotify, - &mRTPSessionID); - } else { - CHECK_EQ(rtpMode, TRANSPORT_TCP); - err = mNetSession->createTCPDatagramSession( - ifaceAddr, - localRTPPort, - rtpNotify, - &mRTPSessionID); - } - - if (err != OK) { - continue; - } - - if (rtcpMode == TRANSPORT_NONE) { - break; - } else if (rtcpMode == TRANSPORT_UDP) { - err = mNetSession->createUDPSession( - localRTPPort + 1, - rtcpNotify, - &mRTCPSessionID); - } else { - CHECK_EQ(rtpMode, TRANSPORT_TCP); - err = mNetSession->createTCPDatagramSession( - ifaceAddr, - localRTPPort + 1, - rtcpNotify, - &mRTCPSessionID); - } - - if (err == OK) { - break; - } - - mNetSession->destroySession(mRTPSessionID); - mRTPSessionID = 0; - } - - mRTPMode = rtpMode; - mRTCPMode = rtcpMode; - *outLocalRTPPort = localRTPPort; - - return OK; -} - -status_t RTPReceiver::connect( - const char *remoteHost, int32_t remoteRTPPort, int32_t remoteRTCPPort) { - status_t err; - - if (mRTPMode == TRANSPORT_UDP) { - CHECK(!mRTPConnected); - - err = mNetSession->connectUDPSession( - mRTPSessionID, remoteHost, remoteRTPPort); - - if (err != OK) { - notifyInitDone(err); - return err; - } - - ALOGI("connectUDPSession RTP successful."); - - mRTPConnected = true; - } - - if (mRTCPMode == TRANSPORT_UDP) { - CHECK(!mRTCPConnected); - - err = mNetSession->connectUDPSession( - mRTCPSessionID, remoteHost, remoteRTCPPort); - - if (err != OK) { - notifyInitDone(err); - return err; - } - - scheduleSendRR(); - - ALOGI("connectUDPSession RTCP successful."); - - mRTCPConnected = true; - } - - if (mRTPConnected - && (mRTCPConnected || mRTCPMode == TRANSPORT_NONE)) { - notifyInitDone(OK); - } - - return OK; -} - -status_t RTPReceiver::informSender(const sp ¶ms) { - if (!mRTCPConnected) { - return INVALID_OPERATION; - } - - int64_t avgLatencyUs; - CHECK(params->findInt64("avgLatencyUs", &avgLatencyUs)); - - int64_t maxLatencyUs; - CHECK(params->findInt64("maxLatencyUs", &maxLatencyUs)); - - sp buf = new ABuffer(28); - - uint8_t *ptr = buf->data(); - ptr[0] = 0x80 | 0; - ptr[1] = 204; // APP - ptr[2] = 0; - - CHECK((buf->size() % 4) == 0u); - ptr[3] = (buf->size() / 4) - 1; - - ptr[4] = kSourceID >> 24; // SSRC - ptr[5] = (kSourceID >> 16) & 0xff; - ptr[6] = (kSourceID >> 8) & 0xff; - ptr[7] = kSourceID & 0xff; - ptr[8] = 'l'; - ptr[9] = 'a'; - ptr[10] = 't'; - ptr[11] = 'e'; - - ptr[12] = avgLatencyUs >> 56; - ptr[13] = (avgLatencyUs >> 48) & 0xff; - ptr[14] = (avgLatencyUs >> 40) & 0xff; - ptr[15] = (avgLatencyUs >> 32) & 0xff; - ptr[16] = (avgLatencyUs >> 24) & 0xff; - ptr[17] = (avgLatencyUs >> 16) & 0xff; - ptr[18] = (avgLatencyUs >> 8) & 0xff; - ptr[19] = avgLatencyUs & 0xff; - - ptr[20] = maxLatencyUs >> 56; - ptr[21] = (maxLatencyUs >> 48) & 0xff; - ptr[22] = (maxLatencyUs >> 40) & 0xff; - ptr[23] = (maxLatencyUs >> 32) & 0xff; - ptr[24] = (maxLatencyUs >> 24) & 0xff; - ptr[25] = (maxLatencyUs >> 16) & 0xff; - ptr[26] = (maxLatencyUs >> 8) & 0xff; - ptr[27] = maxLatencyUs & 0xff; - - mNetSession->sendRequest(mRTCPSessionID, buf->data(), buf->size()); - - return OK; -} - -void RTPReceiver::onMessageReceived(const sp &msg) { - switch (msg->what()) { - case kWhatRTPNotify: - case kWhatRTCPNotify: - onNetNotify(msg->what() == kWhatRTPNotify, msg); - break; - - case kWhatSendRR: - { - onSendRR(); - break; - } - - default: - TRESPASS(); - } -} - -void RTPReceiver::onNetNotify(bool isRTP, const sp &msg) { - int32_t reason; - CHECK(msg->findInt32("reason", &reason)); - - switch (reason) { - case ANetworkSession::kWhatError: - { - int32_t sessionID; - CHECK(msg->findInt32("sessionID", &sessionID)); - - int32_t err; - CHECK(msg->findInt32("err", &err)); - - int32_t errorOccuredDuringSend; - CHECK(msg->findInt32("send", &errorOccuredDuringSend)); - - AString detail; - CHECK(msg->findString("detail", &detail)); - - ALOGE("An error occurred during %s in session %d " - "(%d, '%s' (%s)).", - errorOccuredDuringSend ? "send" : "receive", - sessionID, - err, - detail.c_str(), - strerror(-err)); - - mNetSession->destroySession(sessionID); - - if (sessionID == mRTPSessionID) { - mRTPSessionID = 0; - } else if (sessionID == mRTCPSessionID) { - mRTCPSessionID = 0; - } else if (sessionID == mRTPClientSessionID) { - mRTPClientSessionID = 0; - } else if (sessionID == mRTCPClientSessionID) { - mRTCPClientSessionID = 0; - } - - if (!mRTPConnected - || (mRTCPMode != TRANSPORT_NONE && !mRTCPConnected)) { - notifyInitDone(err); - break; - } - - notifyError(err); - break; - } - - case ANetworkSession::kWhatDatagram: - { - sp data; - CHECK(msg->findBuffer("data", &data)); - - if (isRTP) { - if (mFlags & FLAG_AUTO_CONNECT) { - AString fromAddr; - CHECK(msg->findString("fromAddr", &fromAddr)); - - int32_t fromPort; - CHECK(msg->findInt32("fromPort", &fromPort)); - - CHECK_EQ((status_t)OK, - connect( - fromAddr.c_str(), fromPort, fromPort + 1)); - - mFlags &= ~FLAG_AUTO_CONNECT; - } - - onRTPData(data); - } else { - onRTCPData(data); - } - break; - } - - case ANetworkSession::kWhatClientConnected: - { - int32_t sessionID; - CHECK(msg->findInt32("sessionID", &sessionID)); - - if (isRTP) { - CHECK_EQ(mRTPMode, TRANSPORT_TCP); - - if (mRTPClientSessionID != 0) { - // We only allow a single client connection. - mNetSession->destroySession(sessionID); - sessionID = 0; - break; - } - - mRTPClientSessionID = sessionID; - mRTPConnected = true; - } else { - CHECK_EQ(mRTCPMode, TRANSPORT_TCP); - - if (mRTCPClientSessionID != 0) { - // We only allow a single client connection. - mNetSession->destroySession(sessionID); - sessionID = 0; - break; - } - - mRTCPClientSessionID = sessionID; - mRTCPConnected = true; - } - - if (mRTPConnected - && (mRTCPConnected || mRTCPMode == TRANSPORT_NONE)) { - notifyInitDone(OK); - } - break; - } - } -} - -void RTPReceiver::notifyInitDone(status_t err) { - sp notify = mNotify->dup(); - notify->setInt32("what", kWhatInitDone); - notify->setInt32("err", err); - notify->post(); -} - -void RTPReceiver::notifyError(status_t err) { - sp notify = mNotify->dup(); - notify->setInt32("what", kWhatError); - notify->setInt32("err", err); - notify->post(); -} - -void RTPReceiver::notifyPacketLost() { - sp notify = mNotify->dup(); - notify->setInt32("what", kWhatPacketLost); - notify->post(); -} - -status_t RTPReceiver::onRTPData(const sp &buffer) { - size_t size = buffer->size(); - if (size < 12) { - // Too short to be a valid RTP header. - return ERROR_MALFORMED; - } - - const uint8_t *data = buffer->data(); - - if ((data[0] >> 6) != 2) { - // Unsupported version. - return ERROR_UNSUPPORTED; - } - - if (data[0] & 0x20) { - // Padding present. - - size_t paddingLength = data[size - 1]; - - if (paddingLength + 12 > size) { - // If we removed this much padding we'd end up with something - // that's too short to be a valid RTP header. - return ERROR_MALFORMED; - } - - size -= paddingLength; - } - - int numCSRCs = data[0] & 0x0f; - - size_t payloadOffset = 12 + 4 * numCSRCs; - - if (size < payloadOffset) { - // Not enough data to fit the basic header and all the CSRC entries. - return ERROR_MALFORMED; - } - - if (data[0] & 0x10) { - // Header eXtension present. - - if (size < payloadOffset + 4) { - // Not enough data to fit the basic header, all CSRC entries - // and the first 4 bytes of the extension header. - - return ERROR_MALFORMED; - } - - const uint8_t *extensionData = &data[payloadOffset]; - - size_t extensionLength = - 4 * (extensionData[2] << 8 | extensionData[3]); - - if (size < payloadOffset + 4 + extensionLength) { - return ERROR_MALFORMED; - } - - payloadOffset += 4 + extensionLength; - } - - uint32_t srcId = U32_AT(&data[8]); - uint32_t rtpTime = U32_AT(&data[4]); - uint16_t seqNo = U16_AT(&data[2]); - - sp meta = buffer->meta(); - meta->setInt32("ssrc", srcId); - meta->setInt32("rtp-time", rtpTime); - meta->setInt32("PT", data[1] & 0x7f); - meta->setInt32("M", data[1] >> 7); - - buffer->setRange(payloadOffset, size - payloadOffset); - - ssize_t index = mSources.indexOfKey(srcId); - sp source; - if (index < 0) { - source = new Source(this, srcId); - looper()->registerHandler(source); - - mSources.add(srcId, source); - } else { - source = mSources.valueAt(index); - } - - source->onPacketReceived(seqNo, buffer); - - return OK; -} - -status_t RTPReceiver::onRTCPData(const sp &data) { - ALOGI("onRTCPData"); - return OK; -} - -void RTPReceiver::addSDES(const sp &buffer) { - uint8_t *data = buffer->data() + buffer->size(); - data[0] = 0x80 | 1; - data[1] = 202; // SDES - data[4] = kSourceID >> 24; // SSRC - data[5] = (kSourceID >> 16) & 0xff; - data[6] = (kSourceID >> 8) & 0xff; - data[7] = kSourceID & 0xff; - - size_t offset = 8; - - data[offset++] = 1; // CNAME - - AString cname = "stagefright@somewhere"; - data[offset++] = cname.size(); - - memcpy(&data[offset], cname.c_str(), cname.size()); - offset += cname.size(); - - data[offset++] = 6; // TOOL - - AString tool = "stagefright/1.0"; - data[offset++] = tool.size(); - - memcpy(&data[offset], tool.c_str(), tool.size()); - offset += tool.size(); - - data[offset++] = 0; - - if ((offset % 4) > 0) { - size_t count = 4 - (offset % 4); - switch (count) { - case 3: - data[offset++] = 0; - case 2: - data[offset++] = 0; - case 1: - data[offset++] = 0; - } - } - - size_t numWords = (offset / 4) - 1; - data[2] = numWords >> 8; - data[3] = numWords & 0xff; - - buffer->setRange(buffer->offset(), buffer->size() + offset); -} - -void RTPReceiver::scheduleSendRR() { - (new AMessage(kWhatSendRR, id()))->post(5000000ll); -} - -void RTPReceiver::onSendRR() { -#if 0 - sp buf = new ABuffer(kMaxUDPPacketSize); - buf->setRange(0, 0); - - uint8_t *ptr = buf->data(); - ptr[0] = 0x80 | 0; - ptr[1] = 201; // RR - ptr[2] = 0; - ptr[3] = 1; - ptr[4] = kSourceID >> 24; // SSRC - ptr[5] = (kSourceID >> 16) & 0xff; - ptr[6] = (kSourceID >> 8) & 0xff; - ptr[7] = kSourceID & 0xff; - - buf->setRange(0, 8); - - size_t numReportBlocks = 0; - for (size_t i = 0; i < mSources.size(); ++i) { - uint32_t ssrc = mSources.keyAt(i); - sp source = mSources.valueAt(i); - - if (numReportBlocks > 31 || buf->size() + 24 > buf->capacity()) { - // Cannot fit another report block. - break; - } - - source->addReportBlock(ssrc, buf); - ++numReportBlocks; - } - - ptr[0] |= numReportBlocks; // 5 bit - - size_t sizeInWordsMinus1 = 1 + 6 * numReportBlocks; - ptr[2] = sizeInWordsMinus1 >> 8; - ptr[3] = sizeInWordsMinus1 & 0xff; - - buf->setRange(0, (sizeInWordsMinus1 + 1) * 4); - - addSDES(buf); - - mNetSession->sendRequest(mRTCPSessionID, buf->data(), buf->size()); -#endif - - scheduleSendRR(); -} - -status_t RTPReceiver::registerPacketType( - uint8_t packetType, PacketizationMode mode) { - mPacketTypes.add(packetType, mode); - - return OK; -} - -sp RTPReceiver::makeAssembler(uint8_t packetType) { - ssize_t index = mPacketTypes.indexOfKey(packetType); - if (index < 0) { - return NULL; - } - - PacketizationMode mode = mPacketTypes.valueAt(index); - - switch (mode) { - case PACKETIZATION_NONE: - case PACKETIZATION_TRANSPORT_STREAM: - return new TSAssembler(mNotify); - - case PACKETIZATION_H264: - return new H264Assembler(mNotify); - - default: - return NULL; - } -} - -void RTPReceiver::requestRetransmission(uint32_t senderSSRC, int32_t extSeqNo) { - int32_t blp = 0; - - sp buf = new ABuffer(16); - buf->setRange(0, 0); - - uint8_t *ptr = buf->data(); - ptr[0] = 0x80 | 1; // generic NACK - ptr[1] = 205; // TSFB - ptr[2] = 0; - ptr[3] = 3; - ptr[8] = (senderSSRC >> 24) & 0xff; - ptr[9] = (senderSSRC >> 16) & 0xff; - ptr[10] = (senderSSRC >> 8) & 0xff; - ptr[11] = (senderSSRC & 0xff); - ptr[8] = (kSourceID >> 24) & 0xff; - ptr[9] = (kSourceID >> 16) & 0xff; - ptr[10] = (kSourceID >> 8) & 0xff; - ptr[11] = (kSourceID & 0xff); - ptr[12] = (extSeqNo >> 8) & 0xff; - ptr[13] = (extSeqNo & 0xff); - ptr[14] = (blp >> 8) & 0xff; - ptr[15] = (blp & 0xff); - - buf->setRange(0, 16); - - mNetSession->sendRequest(mRTCPSessionID, buf->data(), buf->size()); -} - -void RTPReceiver::Source::modifyPacketStatus(int32_t extSeqNo, uint32_t mask) { -#if TRACK_PACKET_LOSS - ssize_t index = mLostPackets.indexOfKey(extSeqNo); - if (index < 0) { - mLostPackets.add(extSeqNo, mask); - } else { - mLostPackets.editValueAt(index) |= mask; - } -#endif -} - -void RTPReceiver::Source::postRetransmitTimer(int64_t timeUs) { - int64_t delayUs = timeUs - ALooper::GetNowUs(); - sp msg = new AMessage(kWhatRetransmit, id()); - msg->setInt32("generation", mRetransmitGeneration); - msg->post(delayUs); -} - -void RTPReceiver::Source::postDeclareLostTimer(int64_t timeUs) { - CHECK(!mDeclareLostTimerPending); - mDeclareLostTimerPending = true; - - int64_t delayUs = timeUs - ALooper::GetNowUs(); - sp msg = new AMessage(kWhatDeclareLost, id()); - msg->setInt32("generation", mDeclareLostGeneration); - msg->post(delayUs); -} - -void RTPReceiver::Source::cancelTimers() { - ++mRetransmitGeneration; - ++mDeclareLostGeneration; - mDeclareLostTimerPending = false; -} - -} // namespace android - diff --git a/media/libstagefright/wifi-display/rtp/RTPReceiver.h b/media/libstagefright/wifi-display/rtp/RTPReceiver.h deleted file mode 100644 index 240ab2e..0000000 --- a/media/libstagefright/wifi-display/rtp/RTPReceiver.h +++ /dev/null @@ -1,125 +0,0 @@ -/* - * Copyright 2013, The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef RTP_RECEIVER_H_ - -#define RTP_RECEIVER_H_ - -#include "RTPBase.h" - -#include - -namespace android { - -struct ABuffer; -struct ANetworkSession; - -// An object of this class facilitates receiving of media data on an RTP -// channel. The channel is established over a UDP or TCP connection depending -// on which "TransportMode" was chosen. In addition different RTP packetization -// schemes are supported such as "Transport Stream Packets over RTP", -// or "AVC/H.264 encapsulation as specified in RFC 3984 (non-interleaved mode)" -struct RTPReceiver : public RTPBase, public AHandler { - enum { - kWhatInitDone, - kWhatError, - kWhatAccessUnit, - kWhatPacketLost, - }; - - enum Flags { - FLAG_AUTO_CONNECT = 1, - }; - RTPReceiver( - const sp &netSession, - const sp ¬ify, - uint32_t flags = 0); - - status_t registerPacketType( - uint8_t packetType, PacketizationMode mode); - - status_t initAsync( - TransportMode rtpMode, - TransportMode rtcpMode, - int32_t *outLocalRTPPort); - - status_t connect( - const char *remoteHost, - int32_t remoteRTPPort, - int32_t remoteRTCPPort); - - status_t informSender(const sp ¶ms); - -protected: - virtual ~RTPReceiver(); - virtual void onMessageReceived(const sp &msg); - -private: - enum { - kWhatRTPNotify, - kWhatRTCPNotify, - kWhatSendRR, - }; - - enum { - kSourceID = 0xdeadbeef, - kPacketLostAfterUs = 100000, - kRequestRetransmissionAfterUs = -1, - }; - - struct Assembler; - struct H264Assembler; - struct Source; - struct TSAssembler; - - sp mNetSession; - sp mNotify; - uint32_t mFlags; - TransportMode mRTPMode; - TransportMode mRTCPMode; - int32_t mRTPSessionID; - int32_t mRTCPSessionID; - bool mRTPConnected; - bool mRTCPConnected; - - int32_t mRTPClientSessionID; // in TRANSPORT_TCP mode. - int32_t mRTCPClientSessionID; // in TRANSPORT_TCP mode. - - KeyedVector mPacketTypes; - KeyedVector > mSources; - - void onNetNotify(bool isRTP, const sp &msg); - status_t onRTPData(const sp &data); - status_t onRTCPData(const sp &data); - void onSendRR(); - - void scheduleSendRR(); - void addSDES(const sp &buffer); - - void notifyInitDone(status_t err); - void notifyError(status_t err); - void notifyPacketLost(); - - sp makeAssembler(uint8_t packetType); - - void requestRetransmission(uint32_t senderSSRC, int32_t extSeqNo); - - DISALLOW_EVIL_CONSTRUCTORS(RTPReceiver); -}; - -} // namespace android - -#endif // RTP_RECEIVER_H_ diff --git a/media/libstagefright/wifi-display/rtp/RTPSender.cpp b/media/libstagefright/wifi-display/rtp/RTPSender.cpp index 6bbe650..095fd97 100644 --- a/media/libstagefright/wifi-display/rtp/RTPSender.cpp +++ b/media/libstagefright/wifi-display/rtp/RTPSender.cpp @@ -767,17 +767,6 @@ status_t RTPSender::parseTSFB(const uint8_t *data, size_t size) { } status_t RTPSender::parseAPP(const uint8_t *data, size_t size) { - if (!memcmp("late", &data[8], 4)) { - int64_t avgLatencyUs = (int64_t)U64_AT(&data[12]); - int64_t maxLatencyUs = (int64_t)U64_AT(&data[20]); - - sp notify = mNotify->dup(); - notify->setInt32("what", kWhatInformSender); - notify->setInt64("avgLatencyUs", avgLatencyUs); - notify->setInt64("maxLatencyUs", maxLatencyUs); - notify->post(); - } - return OK; } diff --git a/media/libstagefright/wifi-display/rtp/RTPSender.h b/media/libstagefright/wifi-display/rtp/RTPSender.h index fefcab7..7dc138a 100644 --- a/media/libstagefright/wifi-display/rtp/RTPSender.h +++ b/media/libstagefright/wifi-display/rtp/RTPSender.h @@ -37,7 +37,6 @@ struct RTPSender : public RTPBase, public AHandler { kWhatInitDone, kWhatError, kWhatNetworkStall, - kWhatInformSender, }; RTPSender( const sp &netSession, diff --git a/media/libstagefright/wifi-display/rtptest.cpp b/media/libstagefright/wifi-display/rtptest.cpp deleted file mode 100644 index 764a38b..0000000 --- a/media/libstagefright/wifi-display/rtptest.cpp +++ /dev/null @@ -1,565 +0,0 @@ -/* - * Copyright 2013, The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -//#define LOG_NEBUG 0 -#define LOG_TAG "rtptest" -#include - -#include "ANetworkSession.h" -#include "rtp/RTPSender.h" -#include "rtp/RTPReceiver.h" -#include "TimeSyncer.h" - -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -#define MEDIA_FILENAME "/sdcard/Frame Counter HD 30FPS_1080p.mp4" - -namespace android { - -struct PacketSource : public RefBase { - PacketSource() {} - - virtual sp getNextAccessUnit() = 0; - -protected: - virtual ~PacketSource() {} - -private: - DISALLOW_EVIL_CONSTRUCTORS(PacketSource); -}; - -struct MediaPacketSource : public PacketSource { - MediaPacketSource() - : mMaxSampleSize(1024 * 1024) { - mExtractor = new NuMediaExtractor; - CHECK_EQ((status_t)OK, - mExtractor->setDataSource(MEDIA_FILENAME)); - - bool haveVideo = false; - for (size_t i = 0; i < mExtractor->countTracks(); ++i) { - sp format; - CHECK_EQ((status_t)OK, mExtractor->getTrackFormat(i, &format)); - - AString mime; - CHECK(format->findString("mime", &mime)); - - if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_AVC, mime.c_str())) { - mExtractor->selectTrack(i); - haveVideo = true; - break; - } - } - - CHECK(haveVideo); - } - - virtual sp getNextAccessUnit() { - int64_t timeUs; - status_t err = mExtractor->getSampleTime(&timeUs); - - if (err != OK) { - return NULL; - } - - sp accessUnit = new ABuffer(mMaxSampleSize); - CHECK_EQ((status_t)OK, mExtractor->readSampleData(accessUnit)); - - accessUnit->meta()->setInt64("timeUs", timeUs); - - CHECK_EQ((status_t)OK, mExtractor->advance()); - - return accessUnit; - } - -protected: - virtual ~MediaPacketSource() { - } - -private: - sp mExtractor; - size_t mMaxSampleSize; - - DISALLOW_EVIL_CONSTRUCTORS(MediaPacketSource); -}; - -struct SimplePacketSource : public PacketSource { - SimplePacketSource() - : mCounter(0) { - } - - virtual sp getNextAccessUnit() { - sp buffer = new ABuffer(4); - uint8_t *dst = buffer->data(); - dst[0] = mCounter >> 24; - dst[1] = (mCounter >> 16) & 0xff; - dst[2] = (mCounter >> 8) & 0xff; - dst[3] = mCounter & 0xff; - - buffer->meta()->setInt64("timeUs", mCounter * 1000000ll / kFrameRate); - - ++mCounter; - - return buffer; - } - -protected: - virtual ~SimplePacketSource() { - } - -private: - enum { - kFrameRate = 30 - }; - - uint32_t mCounter; - - DISALLOW_EVIL_CONSTRUCTORS(SimplePacketSource); -}; - -struct TestHandler : public AHandler { - TestHandler(const sp &netSession); - - void listen(); - void connect(const char *host, int32_t port); - -protected: - virtual ~TestHandler(); - virtual void onMessageReceived(const sp &msg); - -private: - enum { - kWhatListen, - kWhatConnect, - kWhatReceiverNotify, - kWhatSenderNotify, - kWhatSendMore, - kWhatStop, - kWhatTimeSyncerNotify, - }; - -#if 1 - static const RTPBase::TransportMode kRTPMode = RTPBase::TRANSPORT_UDP; - static const RTPBase::TransportMode kRTCPMode = RTPBase::TRANSPORT_UDP; -#else - static const RTPBase::TransportMode kRTPMode = RTPBase::TRANSPORT_TCP; - static const RTPBase::TransportMode kRTCPMode = RTPBase::TRANSPORT_NONE; -#endif - -#if 1 - static const RTPBase::PacketizationMode kPacketizationMode - = RTPBase::PACKETIZATION_H264; -#else - static const RTPBase::PacketizationMode kPacketizationMode - = RTPBase::PACKETIZATION_NONE; -#endif - - sp mNetSession; - sp mSource; - sp mSender; - sp mReceiver; - - sp mTimeSyncer; - bool mTimeSyncerStarted; - - int64_t mFirstTimeRealUs; - int64_t mFirstTimeMediaUs; - - int64_t mTimeOffsetUs; - bool mTimeOffsetValid; - - status_t readMore(); - - DISALLOW_EVIL_CONSTRUCTORS(TestHandler); -}; - -TestHandler::TestHandler(const sp &netSession) - : mNetSession(netSession), - mTimeSyncerStarted(false), - mFirstTimeRealUs(-1ll), - mFirstTimeMediaUs(-1ll), - mTimeOffsetUs(-1ll), - mTimeOffsetValid(false) { -} - -TestHandler::~TestHandler() { -} - -void TestHandler::listen() { - sp msg = new AMessage(kWhatListen, id()); - msg->post(); -} - -void TestHandler::connect(const char *host, int32_t port) { - sp msg = new AMessage(kWhatConnect, id()); - msg->setString("host", host); - msg->setInt32("port", port); - msg->post(); -} - -static void dumpDelay(int64_t delayMs) { - static const int64_t kMinDelayMs = 0; - static const int64_t kMaxDelayMs = 300; - - const char *kPattern = "########################################"; - size_t kPatternSize = strlen(kPattern); - - int n = (kPatternSize * (delayMs - kMinDelayMs)) - / (kMaxDelayMs - kMinDelayMs); - - if (n < 0) { - n = 0; - } else if ((size_t)n > kPatternSize) { - n = kPatternSize; - } - - ALOGI("(%4lld ms) %s\n", - delayMs, - kPattern + kPatternSize - n); -} - -void TestHandler::onMessageReceived(const sp &msg) { - switch (msg->what()) { - case kWhatListen: - { - sp notify = new AMessage(kWhatTimeSyncerNotify, id()); - mTimeSyncer = new TimeSyncer(mNetSession, notify); - looper()->registerHandler(mTimeSyncer); - - notify = new AMessage(kWhatReceiverNotify, id()); - mReceiver = new RTPReceiver( - mNetSession, notify, RTPReceiver::FLAG_AUTO_CONNECT); - looper()->registerHandler(mReceiver); - - CHECK_EQ((status_t)OK, - mReceiver->registerPacketType(33, kPacketizationMode)); - - int32_t receiverRTPPort; - CHECK_EQ((status_t)OK, - mReceiver->initAsync( - kRTPMode, - kRTCPMode, - &receiverRTPPort)); - - printf("picked receiverRTPPort %d\n", receiverRTPPort); - -#if 0 - CHECK_EQ((status_t)OK, - mReceiver->connect( - "127.0.0.1", senderRTPPort, senderRTPPort + 1)); -#endif - break; - } - - case kWhatConnect: - { - AString host; - CHECK(msg->findString("host", &host)); - - sp notify = new AMessage(kWhatTimeSyncerNotify, id()); - mTimeSyncer = new TimeSyncer(mNetSession, notify); - looper()->registerHandler(mTimeSyncer); - mTimeSyncer->startServer(8123); - - int32_t receiverRTPPort; - CHECK(msg->findInt32("port", &receiverRTPPort)); - -#if 1 - mSource = new MediaPacketSource; -#else - mSource = new SimplePacketSource; -#endif - - notify = new AMessage(kWhatSenderNotify, id()); - mSender = new RTPSender(mNetSession, notify); - - looper()->registerHandler(mSender); - - int32_t senderRTPPort; - CHECK_EQ((status_t)OK, - mSender->initAsync( - host.c_str(), - receiverRTPPort, - kRTPMode, - kRTCPMode == RTPBase::TRANSPORT_NONE - ? -1 : receiverRTPPort + 1, - kRTCPMode, - &senderRTPPort)); - - printf("picked senderRTPPort %d\n", senderRTPPort); - break; - } - - case kWhatSenderNotify: - { - ALOGI("kWhatSenderNotify"); - - int32_t what; - CHECK(msg->findInt32("what", &what)); - - switch (what) { - case RTPSender::kWhatInitDone: - { - int32_t err; - CHECK(msg->findInt32("err", &err)); - - ALOGI("RTPSender::initAsync completed w/ err %d", err); - - if (err == OK) { - err = readMore(); - - if (err != OK) { - (new AMessage(kWhatStop, id()))->post(); - } - } - break; - } - - case RTPSender::kWhatError: - break; - } - break; - } - - case kWhatReceiverNotify: - { - ALOGV("kWhatReceiverNotify"); - - int32_t what; - CHECK(msg->findInt32("what", &what)); - - switch (what) { - case RTPReceiver::kWhatInitDone: - { - int32_t err; - CHECK(msg->findInt32("err", &err)); - - ALOGI("RTPReceiver::initAsync completed w/ err %d", err); - break; - } - - case RTPReceiver::kWhatError: - break; - - case RTPReceiver::kWhatAccessUnit: - { -#if 0 - if (!mTimeSyncerStarted) { - mTimeSyncer->startClient("172.18.41.216", 8123); - mTimeSyncerStarted = true; - } - - sp accessUnit; - CHECK(msg->findBuffer("accessUnit", &accessUnit)); - - int64_t timeUs; - CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs)); - - if (mTimeOffsetValid) { - timeUs -= mTimeOffsetUs; - int64_t nowUs = ALooper::GetNowUs(); - int64_t delayMs = (nowUs - timeUs) / 1000ll; - - dumpDelay(delayMs); - } -#endif - break; - } - - case RTPReceiver::kWhatPacketLost: - ALOGV("kWhatPacketLost"); - break; - - default: - TRESPASS(); - } - break; - } - - case kWhatSendMore: - { - sp accessUnit; - CHECK(msg->findBuffer("accessUnit", &accessUnit)); - - CHECK_EQ((status_t)OK, - mSender->queueBuffer( - accessUnit, - 33, - kPacketizationMode)); - - status_t err = readMore(); - - if (err != OK) { - (new AMessage(kWhatStop, id()))->post(); - } - break; - } - - case kWhatStop: - { - if (mReceiver != NULL) { - looper()->unregisterHandler(mReceiver->id()); - mReceiver.clear(); - } - - if (mSender != NULL) { - looper()->unregisterHandler(mSender->id()); - mSender.clear(); - } - - mSource.clear(); - - looper()->stop(); - break; - } - - case kWhatTimeSyncerNotify: - { - CHECK(msg->findInt64("offset", &mTimeOffsetUs)); - mTimeOffsetValid = true; - break; - } - - default: - TRESPASS(); - } -} - -status_t TestHandler::readMore() { - sp accessUnit = mSource->getNextAccessUnit(); - - if (accessUnit == NULL) { - return ERROR_END_OF_STREAM; - } - - int64_t timeUs; - CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs)); - - int64_t nowUs = ALooper::GetNowUs(); - int64_t whenUs; - - if (mFirstTimeRealUs < 0ll) { - mFirstTimeRealUs = whenUs = nowUs; - mFirstTimeMediaUs = timeUs; - } else { - whenUs = mFirstTimeRealUs + timeUs - mFirstTimeMediaUs; - } - - accessUnit->meta()->setInt64("timeUs", whenUs); - - sp msg = new AMessage(kWhatSendMore, id()); - msg->setBuffer("accessUnit", accessUnit); - msg->post(whenUs - nowUs); - - return OK; -} - -} // namespace android - -static void usage(const char *me) { - fprintf(stderr, - "usage: %s -c host:port\tconnect to remote host\n" - " -l \tlisten\n", - me); -} - -int main(int argc, char **argv) { - using namespace android; - - // srand(time(NULL)); - - ProcessState::self()->startThreadPool(); - - DataSource::RegisterDefaultSniffers(); - - bool listen = false; - int32_t connectToPort = -1; - AString connectToHost; - - int res; - while ((res = getopt(argc, argv, "hc:l")) >= 0) { - switch (res) { - case 'c': - { - const char *colonPos = strrchr(optarg, ':'); - - if (colonPos == NULL) { - usage(argv[0]); - exit(1); - } - - connectToHost.setTo(optarg, colonPos - optarg); - - char *end; - connectToPort = strtol(colonPos + 1, &end, 10); - - if (*end != '\0' || end == colonPos + 1 - || connectToPort < 1 || connectToPort > 65535) { - fprintf(stderr, "Illegal port specified.\n"); - exit(1); - } - break; - } - - case 'l': - { - listen = true; - break; - } - - case '?': - case 'h': - usage(argv[0]); - exit(1); - } - } - - if (!listen && connectToPort < 0) { - fprintf(stderr, - "You need to select either client or server mode.\n"); - exit(1); - } - - sp netSession = new ANetworkSession; - netSession->start(); - - sp looper = new ALooper; - - sp handler = new TestHandler(netSession); - looper->registerHandler(handler); - - if (listen) { - handler->listen(); - } - - if (connectToPort >= 0) { - handler->connect(connectToHost.c_str(), connectToPort); - } - - looper->start(true /* runOnCallingThread */); - - return 0; -} - diff --git a/media/libstagefright/wifi-display/sink/DirectRenderer.cpp b/media/libstagefright/wifi-display/sink/DirectRenderer.cpp deleted file mode 100644 index 15f9c88..0000000 --- a/media/libstagefright/wifi-display/sink/DirectRenderer.cpp +++ /dev/null @@ -1,625 +0,0 @@ -/* - * Copyright 2012, The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -//#define LOG_NDEBUG 0 -#define LOG_TAG "DirectRenderer" -#include - -#include "DirectRenderer.h" - -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -namespace android { - -/* - Drives the decoding process using a MediaCodec instance. Input buffers - queued by calls to "queueInputBuffer" are fed to the decoder as soon - as the decoder is ready for them, the client is notified about output - buffers as the decoder spits them out. -*/ -struct DirectRenderer::DecoderContext : public AHandler { - enum { - kWhatOutputBufferReady, - }; - DecoderContext(const sp ¬ify); - - status_t init( - const sp &format, - const sp &surfaceTex); - - void queueInputBuffer(const sp &accessUnit); - - status_t renderOutputBufferAndRelease(size_t index); - status_t releaseOutputBuffer(size_t index); - -protected: - virtual ~DecoderContext(); - - virtual void onMessageReceived(const sp &msg); - -private: - enum { - kWhatDecoderNotify, - }; - - sp mNotify; - sp mDecoderLooper; - sp mDecoder; - Vector > mDecoderInputBuffers; - Vector > mDecoderOutputBuffers; - List mDecoderInputBuffersAvailable; - bool mDecoderNotificationPending; - - List > mAccessUnits; - - void onDecoderNotify(); - void scheduleDecoderNotification(); - void queueDecoderInputBuffers(); - - void queueOutputBuffer( - size_t index, int64_t timeUs, const sp &buffer); - - DISALLOW_EVIL_CONSTRUCTORS(DecoderContext); -}; - -//////////////////////////////////////////////////////////////////////////////// - -/* - A "push" audio renderer. The primary function of this renderer is to use - an AudioTrack in push mode and making sure not to block the event loop - be ensuring that calls to AudioTrack::write never block. This is done by - estimating an upper bound of data that can be written to the AudioTrack - buffer without delay. -*/ -struct DirectRenderer::AudioRenderer : public AHandler { - AudioRenderer(const sp &decoderContext); - - void queueInputBuffer( - size_t index, int64_t timeUs, const sp &buffer); - -protected: - virtual ~AudioRenderer(); - virtual void onMessageReceived(const sp &msg); - -private: - enum { - kWhatPushAudio, - }; - - struct BufferInfo { - size_t mIndex; - int64_t mTimeUs; - sp mBuffer; - }; - - sp mDecoderContext; - sp mAudioTrack; - - List mInputBuffers; - bool mPushPending; - - size_t mNumFramesWritten; - - void schedulePushIfNecessary(); - void onPushAudio(); - - ssize_t writeNonBlocking(const uint8_t *data, size_t size); - - DISALLOW_EVIL_CONSTRUCTORS(AudioRenderer); -}; - -//////////////////////////////////////////////////////////////////////////////// - -DirectRenderer::DecoderContext::DecoderContext(const sp ¬ify) - : mNotify(notify), - mDecoderNotificationPending(false) { -} - -DirectRenderer::DecoderContext::~DecoderContext() { - if (mDecoder != NULL) { - mDecoder->release(); - mDecoder.clear(); - - mDecoderLooper->stop(); - mDecoderLooper.clear(); - } -} - -status_t DirectRenderer::DecoderContext::init( - const sp &format, - const sp &surfaceTex) { - CHECK(mDecoder == NULL); - - AString mime; - CHECK(format->findString("mime", &mime)); - - mDecoderLooper = new ALooper; - mDecoderLooper->setName("video codec looper"); - - mDecoderLooper->start( - false /* runOnCallingThread */, - false /* canCallJava */, - PRIORITY_DEFAULT); - - mDecoder = MediaCodec::CreateByType( - mDecoderLooper, mime.c_str(), false /* encoder */); - - CHECK(mDecoder != NULL); - - status_t err = mDecoder->configure( - format, - surfaceTex == NULL - ? NULL : new Surface(surfaceTex), - NULL /* crypto */, - 0 /* flags */); - CHECK_EQ(err, (status_t)OK); - - err = mDecoder->start(); - CHECK_EQ(err, (status_t)OK); - - err = mDecoder->getInputBuffers( - &mDecoderInputBuffers); - CHECK_EQ(err, (status_t)OK); - - err = mDecoder->getOutputBuffers( - &mDecoderOutputBuffers); - CHECK_EQ(err, (status_t)OK); - - scheduleDecoderNotification(); - - return OK; -} - -void DirectRenderer::DecoderContext::queueInputBuffer( - const sp &accessUnit) { - CHECK(mDecoder != NULL); - - mAccessUnits.push_back(accessUnit); - queueDecoderInputBuffers(); -} - -status_t DirectRenderer::DecoderContext::renderOutputBufferAndRelease( - size_t index) { - return mDecoder->renderOutputBufferAndRelease(index); -} - -status_t DirectRenderer::DecoderContext::releaseOutputBuffer(size_t index) { - return mDecoder->releaseOutputBuffer(index); -} - -void DirectRenderer::DecoderContext::queueDecoderInputBuffers() { - if (mDecoder == NULL) { - return; - } - - bool submittedMore = false; - - while (!mAccessUnits.empty() - && !mDecoderInputBuffersAvailable.empty()) { - size_t index = *mDecoderInputBuffersAvailable.begin(); - - mDecoderInputBuffersAvailable.erase( - mDecoderInputBuffersAvailable.begin()); - - sp srcBuffer = *mAccessUnits.begin(); - mAccessUnits.erase(mAccessUnits.begin()); - - const sp &dstBuffer = - mDecoderInputBuffers.itemAt(index); - - memcpy(dstBuffer->data(), srcBuffer->data(), srcBuffer->size()); - - int64_t timeUs; - CHECK(srcBuffer->meta()->findInt64("timeUs", &timeUs)); - - status_t err = mDecoder->queueInputBuffer( - index, - 0 /* offset */, - srcBuffer->size(), - timeUs, - 0 /* flags */); - CHECK_EQ(err, (status_t)OK); - - submittedMore = true; - } - - if (submittedMore) { - scheduleDecoderNotification(); - } -} - -void DirectRenderer::DecoderContext::onMessageReceived( - const sp &msg) { - switch (msg->what()) { - case kWhatDecoderNotify: - { - onDecoderNotify(); - break; - } - - default: - TRESPASS(); - } -} - -void DirectRenderer::DecoderContext::onDecoderNotify() { - mDecoderNotificationPending = false; - - for (;;) { - size_t index; - status_t err = mDecoder->dequeueInputBuffer(&index); - - if (err == OK) { - mDecoderInputBuffersAvailable.push_back(index); - } else if (err == -EAGAIN) { - break; - } else { - TRESPASS(); - } - } - - queueDecoderInputBuffers(); - - for (;;) { - size_t index; - size_t offset; - size_t size; - int64_t timeUs; - uint32_t flags; - status_t err = mDecoder->dequeueOutputBuffer( - &index, - &offset, - &size, - &timeUs, - &flags); - - if (err == OK) { - queueOutputBuffer( - index, timeUs, mDecoderOutputBuffers.itemAt(index)); - } else if (err == INFO_OUTPUT_BUFFERS_CHANGED) { - err = mDecoder->getOutputBuffers( - &mDecoderOutputBuffers); - CHECK_EQ(err, (status_t)OK); - } else if (err == INFO_FORMAT_CHANGED) { - // We don't care. - } else if (err == -EAGAIN) { - break; - } else { - TRESPASS(); - } - } - - scheduleDecoderNotification(); -} - -void DirectRenderer::DecoderContext::scheduleDecoderNotification() { - if (mDecoderNotificationPending) { - return; - } - - sp notify = - new AMessage(kWhatDecoderNotify, id()); - - mDecoder->requestActivityNotification(notify); - mDecoderNotificationPending = true; -} - -void DirectRenderer::DecoderContext::queueOutputBuffer( - size_t index, int64_t timeUs, const sp &buffer) { - sp msg = mNotify->dup(); - msg->setInt32("what", kWhatOutputBufferReady); - msg->setSize("index", index); - msg->setInt64("timeUs", timeUs); - msg->setBuffer("buffer", buffer); - msg->post(); -} - -//////////////////////////////////////////////////////////////////////////////// - -DirectRenderer::AudioRenderer::AudioRenderer( - const sp &decoderContext) - : mDecoderContext(decoderContext), - mPushPending(false), - mNumFramesWritten(0) { - mAudioTrack = new AudioTrack( - AUDIO_STREAM_DEFAULT, - 48000.0f, - AUDIO_FORMAT_PCM, - AUDIO_CHANNEL_OUT_STEREO, - (int)0 /* frameCount */); - - CHECK_EQ((status_t)OK, mAudioTrack->initCheck()); - - mAudioTrack->start(); -} - -DirectRenderer::AudioRenderer::~AudioRenderer() { -} - -void DirectRenderer::AudioRenderer::queueInputBuffer( - size_t index, int64_t timeUs, const sp &buffer) { - BufferInfo info; - info.mIndex = index; - info.mTimeUs = timeUs; - info.mBuffer = buffer; - - mInputBuffers.push_back(info); - schedulePushIfNecessary(); -} - -void DirectRenderer::AudioRenderer::onMessageReceived( - const sp &msg) { - switch (msg->what()) { - case kWhatPushAudio: - { - onPushAudio(); - break; - } - - default: - break; - } -} - -void DirectRenderer::AudioRenderer::schedulePushIfNecessary() { - if (mPushPending || mInputBuffers.empty()) { - return; - } - - mPushPending = true; - - uint32_t numFramesPlayed; - CHECK_EQ(mAudioTrack->getPosition(&numFramesPlayed), - (status_t)OK); - - uint32_t numFramesPendingPlayout = mNumFramesWritten - numFramesPlayed; - - // This is how long the audio sink will have data to - // play back. - const float msecsPerFrame = 1000.0f / mAudioTrack->getSampleRate(); - - int64_t delayUs = - msecsPerFrame * numFramesPendingPlayout * 1000ll; - - // Let's give it more data after about half that time - // has elapsed. - (new AMessage(kWhatPushAudio, id()))->post(delayUs / 2); -} - -void DirectRenderer::AudioRenderer::onPushAudio() { - mPushPending = false; - - while (!mInputBuffers.empty()) { - const BufferInfo &info = *mInputBuffers.begin(); - - ssize_t n = writeNonBlocking( - info.mBuffer->data(), info.mBuffer->size()); - - if (n < (ssize_t)info.mBuffer->size()) { - CHECK_GE(n, 0); - - info.mBuffer->setRange( - info.mBuffer->offset() + n, info.mBuffer->size() - n); - break; - } - - mDecoderContext->releaseOutputBuffer(info.mIndex); - - mInputBuffers.erase(mInputBuffers.begin()); - } - - schedulePushIfNecessary(); -} - -ssize_t DirectRenderer::AudioRenderer::writeNonBlocking( - const uint8_t *data, size_t size) { - uint32_t numFramesPlayed; - status_t err = mAudioTrack->getPosition(&numFramesPlayed); - if (err != OK) { - return err; - } - - ssize_t numFramesAvailableToWrite = - mAudioTrack->frameCount() - (mNumFramesWritten - numFramesPlayed); - - size_t numBytesAvailableToWrite = - numFramesAvailableToWrite * mAudioTrack->frameSize(); - - if (size > numBytesAvailableToWrite) { - size = numBytesAvailableToWrite; - } - - CHECK_EQ(mAudioTrack->write(data, size), (ssize_t)size); - - size_t numFramesWritten = size / mAudioTrack->frameSize(); - mNumFramesWritten += numFramesWritten; - - return size; -} - -//////////////////////////////////////////////////////////////////////////////// - -DirectRenderer::DirectRenderer( - const sp &bufferProducer) - : mSurfaceTex(bufferProducer), - mVideoRenderPending(false), - mNumFramesLate(0), - mNumFrames(0) { -} - -DirectRenderer::~DirectRenderer() { -} - -void DirectRenderer::onMessageReceived(const sp &msg) { - switch (msg->what()) { - case kWhatDecoderNotify: - { - onDecoderNotify(msg); - break; - } - - case kWhatRenderVideo: - { - onRenderVideo(); - break; - } - - default: - TRESPASS(); - } -} - -void DirectRenderer::setFormat(size_t trackIndex, const sp &format) { - CHECK_LT(trackIndex, 2u); - - CHECK(mDecoderContext[trackIndex] == NULL); - - sp notify = new AMessage(kWhatDecoderNotify, id()); - notify->setSize("trackIndex", trackIndex); - - mDecoderContext[trackIndex] = new DecoderContext(notify); - looper()->registerHandler(mDecoderContext[trackIndex]); - - CHECK_EQ((status_t)OK, - mDecoderContext[trackIndex]->init( - format, trackIndex == 0 ? mSurfaceTex : NULL)); - - if (trackIndex == 1) { - // Audio - mAudioRenderer = new AudioRenderer(mDecoderContext[1]); - looper()->registerHandler(mAudioRenderer); - } -} - -void DirectRenderer::queueAccessUnit( - size_t trackIndex, const sp &accessUnit) { - CHECK_LT(trackIndex, 2u); - - if (mDecoderContext[trackIndex] == NULL) { - CHECK_EQ(trackIndex, 0u); - - sp format = new AMessage; - format->setString("mime", "video/avc"); - format->setInt32("width", 640); - format->setInt32("height", 360); - - setFormat(trackIndex, format); - } - - mDecoderContext[trackIndex]->queueInputBuffer(accessUnit); -} - -void DirectRenderer::onDecoderNotify(const sp &msg) { - size_t trackIndex; - CHECK(msg->findSize("trackIndex", &trackIndex)); - - int32_t what; - CHECK(msg->findInt32("what", &what)); - - switch (what) { - case DecoderContext::kWhatOutputBufferReady: - { - size_t index; - CHECK(msg->findSize("index", &index)); - - int64_t timeUs; - CHECK(msg->findInt64("timeUs", &timeUs)); - - sp buffer; - CHECK(msg->findBuffer("buffer", &buffer)); - - queueOutputBuffer(trackIndex, index, timeUs, buffer); - break; - } - - default: - TRESPASS(); - } -} - -void DirectRenderer::queueOutputBuffer( - size_t trackIndex, - size_t index, int64_t timeUs, const sp &buffer) { - if (trackIndex == 1) { - // Audio - mAudioRenderer->queueInputBuffer(index, timeUs, buffer); - return; - } - - OutputInfo info; - info.mIndex = index; - info.mTimeUs = timeUs; - info.mBuffer = buffer; - mVideoOutputBuffers.push_back(info); - - scheduleVideoRenderIfNecessary(); -} - -void DirectRenderer::scheduleVideoRenderIfNecessary() { - if (mVideoRenderPending || mVideoOutputBuffers.empty()) { - return; - } - - mVideoRenderPending = true; - - int64_t timeUs = (*mVideoOutputBuffers.begin()).mTimeUs; - int64_t nowUs = ALooper::GetNowUs(); - - int64_t delayUs = timeUs - nowUs; - - (new AMessage(kWhatRenderVideo, id()))->post(delayUs); -} - -void DirectRenderer::onRenderVideo() { - mVideoRenderPending = false; - - int64_t nowUs = ALooper::GetNowUs(); - - while (!mVideoOutputBuffers.empty()) { - const OutputInfo &info = *mVideoOutputBuffers.begin(); - - if (info.mTimeUs > nowUs) { - break; - } - - if (info.mTimeUs + 15000ll < nowUs) { - ++mNumFramesLate; - } - ++mNumFrames; - - status_t err = - mDecoderContext[0]->renderOutputBufferAndRelease(info.mIndex); - CHECK_EQ(err, (status_t)OK); - - mVideoOutputBuffers.erase(mVideoOutputBuffers.begin()); - } - - scheduleVideoRenderIfNecessary(); -} - -} // namespace android - diff --git a/media/libstagefright/wifi-display/sink/DirectRenderer.h b/media/libstagefright/wifi-display/sink/DirectRenderer.h deleted file mode 100644 index c5a4a83..0000000 --- a/media/libstagefright/wifi-display/sink/DirectRenderer.h +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Copyright 2012, The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef DIRECT_RENDERER_H_ - -#define DIRECT_RENDERER_H_ - -#include - -namespace android { - -struct ABuffer; -struct AudioTrack; -struct IGraphicBufferProducer; -struct MediaCodec; - -// Renders audio and video data queued by calls to "queueAccessUnit". -struct DirectRenderer : public AHandler { - DirectRenderer(const sp &bufferProducer); - - void setFormat(size_t trackIndex, const sp &format); - void queueAccessUnit(size_t trackIndex, const sp &accessUnit); - -protected: - virtual void onMessageReceived(const sp &msg); - virtual ~DirectRenderer(); - -private: - struct DecoderContext; - struct AudioRenderer; - - enum { - kWhatDecoderNotify, - kWhatRenderVideo, - }; - - struct OutputInfo { - size_t mIndex; - int64_t mTimeUs; - sp mBuffer; - }; - - sp mSurfaceTex; - - sp mDecoderContext[2]; - List mVideoOutputBuffers; - - bool mVideoRenderPending; - - sp mAudioRenderer; - - int32_t mNumFramesLate; - int32_t mNumFrames; - - void onDecoderNotify(const sp &msg); - - void queueOutputBuffer( - size_t trackIndex, - size_t index, int64_t timeUs, const sp &buffer); - - void scheduleVideoRenderIfNecessary(); - void onRenderVideo(); - - DISALLOW_EVIL_CONSTRUCTORS(DirectRenderer); -}; - -} // namespace android - -#endif // DIRECT_RENDERER_H_ diff --git a/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp b/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp deleted file mode 100644 index 5db2099..0000000 --- a/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp +++ /dev/null @@ -1,917 +0,0 @@ -/* - * Copyright 2012, The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -//#define LOG_NDEBUG 0 -#define LOG_TAG "WifiDisplaySink" -#include - -#include "WifiDisplaySink.h" - -#include "DirectRenderer.h" -#include "MediaReceiver.h" -#include "ParsedMessage.h" -#include "TimeSyncer.h" - -#include -#include -#include -#include -#include -#include - -namespace android { - -// static -const AString WifiDisplaySink::sUserAgent = MakeUserAgent(); - -WifiDisplaySink::WifiDisplaySink( - uint32_t flags, - const sp &netSession, - const sp &bufferProducer, - const sp ¬ify) - : mState(UNDEFINED), - mFlags(flags), - mNetSession(netSession), - mSurfaceTex(bufferProducer), - mNotify(notify), - mUsingTCPTransport(false), - mUsingTCPInterleaving(false), - mSessionID(0), - mNextCSeq(1), - mIDRFrameRequestPending(false), - mTimeOffsetUs(0ll), - mTimeOffsetValid(false), - mSetupDeferred(false), - mLatencyCount(0), - mLatencySumUs(0ll), - mLatencyMaxUs(0ll), - mMaxDelayMs(-1ll) { - // We support any and all resolutions, but prefer 720p30 - mSinkSupportedVideoFormats.setNativeResolution( - VideoFormats::RESOLUTION_CEA, 5); // 1280 x 720 p30 - - mSinkSupportedVideoFormats.enableAll(); -} - -WifiDisplaySink::~WifiDisplaySink() { -} - -void WifiDisplaySink::start(const char *sourceHost, int32_t sourcePort) { - sp msg = new AMessage(kWhatStart, id()); - msg->setString("sourceHost", sourceHost); - msg->setInt32("sourcePort", sourcePort); - msg->post(); -} - -void WifiDisplaySink::start(const char *uri) { - sp msg = new AMessage(kWhatStart, id()); - msg->setString("setupURI", uri); - msg->post(); -} - -// static -bool WifiDisplaySink::ParseURL( - const char *url, AString *host, int32_t *port, AString *path, - AString *user, AString *pass) { - host->clear(); - *port = 0; - path->clear(); - user->clear(); - pass->clear(); - - if (strncasecmp("rtsp://", url, 7)) { - return false; - } - - const char *slashPos = strchr(&url[7], '/'); - - if (slashPos == NULL) { - host->setTo(&url[7]); - path->setTo("/"); - } else { - host->setTo(&url[7], slashPos - &url[7]); - path->setTo(slashPos); - } - - ssize_t atPos = host->find("@"); - - if (atPos >= 0) { - // Split of user:pass@ from hostname. - - AString userPass(*host, 0, atPos); - host->erase(0, atPos + 1); - - ssize_t colonPos = userPass.find(":"); - - if (colonPos < 0) { - *user = userPass; - } else { - user->setTo(userPass, 0, colonPos); - pass->setTo(userPass, colonPos + 1, userPass.size() - colonPos - 1); - } - } - - const char *colonPos = strchr(host->c_str(), ':'); - - if (colonPos != NULL) { - char *end; - unsigned long x = strtoul(colonPos + 1, &end, 10); - - if (end == colonPos + 1 || *end != '\0' || x >= 65536) { - return false; - } - - *port = x; - - size_t colonOffset = colonPos - host->c_str(); - size_t trailing = host->size() - colonOffset; - host->erase(colonOffset, trailing); - } else { - *port = 554; - } - - return true; -} - -void WifiDisplaySink::onMessageReceived(const sp &msg) { - switch (msg->what()) { - case kWhatStart: - { - sleep(2); // XXX - - int32_t sourcePort; - CHECK(msg->findString("sourceHost", &mRTSPHost)); - CHECK(msg->findInt32("sourcePort", &sourcePort)); - - sp notify = new AMessage(kWhatRTSPNotify, id()); - - status_t err = mNetSession->createRTSPClient( - mRTSPHost.c_str(), sourcePort, notify, &mSessionID); - CHECK_EQ(err, (status_t)OK); - - mState = CONNECTING; - break; - } - - case kWhatRTSPNotify: - { - int32_t reason; - CHECK(msg->findInt32("reason", &reason)); - - switch (reason) { - case ANetworkSession::kWhatError: - { - int32_t sessionID; - CHECK(msg->findInt32("sessionID", &sessionID)); - - int32_t err; - CHECK(msg->findInt32("err", &err)); - - AString detail; - CHECK(msg->findString("detail", &detail)); - - ALOGE("An error occurred in session %d (%d, '%s/%s').", - sessionID, - err, - detail.c_str(), - strerror(-err)); - - if (sessionID == mSessionID) { - ALOGI("Lost control connection."); - - // The control connection is dead now. - mNetSession->destroySession(mSessionID); - mSessionID = 0; - - if (mNotify == NULL) { - looper()->stop(); - } else { - sp notify = mNotify->dup(); - notify->setInt32("what", kWhatDisconnected); - notify->post(); - } - } - break; - } - - case ANetworkSession::kWhatConnected: - { - ALOGI("We're now connected."); - mState = CONNECTED; - - if (mFlags & FLAG_SPECIAL_MODE) { - sp notify = new AMessage( - kWhatTimeSyncerNotify, id()); - - mTimeSyncer = new TimeSyncer(mNetSession, notify); - looper()->registerHandler(mTimeSyncer); - - mTimeSyncer->startClient(mRTSPHost.c_str(), 8123); - } - break; - } - - case ANetworkSession::kWhatData: - { - onReceiveClientData(msg); - break; - } - - default: - TRESPASS(); - } - break; - } - - case kWhatStop: - { - looper()->stop(); - break; - } - - case kWhatMediaReceiverNotify: - { - onMediaReceiverNotify(msg); - break; - } - - case kWhatTimeSyncerNotify: - { - int32_t what; - CHECK(msg->findInt32("what", &what)); - - if (what == TimeSyncer::kWhatTimeOffset) { - CHECK(msg->findInt64("offset", &mTimeOffsetUs)); - mTimeOffsetValid = true; - - if (mSetupDeferred) { - CHECK_EQ((status_t)OK, - sendSetup( - mSessionID, - "rtsp://x.x.x.x:x/wfd1.0/streamid=0")); - - mSetupDeferred = false; - } - } - break; - } - - case kWhatReportLateness: - { - if (mLatencyCount > 0) { - int64_t avgLatencyUs = mLatencySumUs / mLatencyCount; - - ALOGV("avg. latency = %lld ms (max %lld ms)", - avgLatencyUs / 1000ll, - mLatencyMaxUs / 1000ll); - - sp params = new AMessage; - params->setInt64("avgLatencyUs", avgLatencyUs); - params->setInt64("maxLatencyUs", mLatencyMaxUs); - mMediaReceiver->informSender(0 /* trackIndex */, params); - } - - mLatencyCount = 0; - mLatencySumUs = 0ll; - mLatencyMaxUs = 0ll; - - msg->post(kReportLatenessEveryUs); - break; - } - - default: - TRESPASS(); - } -} - -void WifiDisplaySink::dumpDelay(size_t trackIndex, int64_t timeUs) { - int64_t delayMs = (ALooper::GetNowUs() - timeUs) / 1000ll; - - if (delayMs > mMaxDelayMs) { - mMaxDelayMs = delayMs; - } - - static const int64_t kMinDelayMs = 0; - static const int64_t kMaxDelayMs = 300; - - const char *kPattern = "########################################"; - size_t kPatternSize = strlen(kPattern); - - int n = (kPatternSize * (delayMs - kMinDelayMs)) - / (kMaxDelayMs - kMinDelayMs); - - if (n < 0) { - n = 0; - } else if ((size_t)n > kPatternSize) { - n = kPatternSize; - } - - ALOGI("[%lld]: (%4lld ms / %4lld ms) %s", - timeUs / 1000, - delayMs, - mMaxDelayMs, - kPattern + kPatternSize - n); -} - -void WifiDisplaySink::onMediaReceiverNotify(const sp &msg) { - int32_t what; - CHECK(msg->findInt32("what", &what)); - - switch (what) { - case MediaReceiver::kWhatInitDone: - { - status_t err; - CHECK(msg->findInt32("err", &err)); - - ALOGI("MediaReceiver initialization completed w/ err %d", err); - break; - } - - case MediaReceiver::kWhatError: - { - status_t err; - CHECK(msg->findInt32("err", &err)); - - ALOGE("MediaReceiver signaled error %d", err); - break; - } - - case MediaReceiver::kWhatAccessUnit: - { - if (mRenderer == NULL) { - mRenderer = new DirectRenderer(mSurfaceTex); - looper()->registerHandler(mRenderer); - } - - sp accessUnit; - CHECK(msg->findBuffer("accessUnit", &accessUnit)); - - int64_t timeUs; - CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs)); - - if (!mTimeOffsetValid && !(mFlags & FLAG_SPECIAL_MODE)) { - mTimeOffsetUs = timeUs - ALooper::GetNowUs(); - mTimeOffsetValid = true; - } - - CHECK(mTimeOffsetValid); - - // We are the timesync _client_, - // client time = server time - time offset. - timeUs -= mTimeOffsetUs; - - size_t trackIndex; - CHECK(msg->findSize("trackIndex", &trackIndex)); - - int64_t nowUs = ALooper::GetNowUs(); - int64_t delayUs = nowUs - timeUs; - - mLatencySumUs += delayUs; - if (mLatencyCount == 0 || delayUs > mLatencyMaxUs) { - mLatencyMaxUs = delayUs; - } - ++mLatencyCount; - - // dumpDelay(trackIndex, timeUs); - - timeUs += 220000ll; // Assume 220 ms of latency - accessUnit->meta()->setInt64("timeUs", timeUs); - - sp format; - if (msg->findMessage("format", &format)) { - mRenderer->setFormat(trackIndex, format); - } - - mRenderer->queueAccessUnit(trackIndex, accessUnit); - break; - } - - case MediaReceiver::kWhatPacketLost: - { -#if 0 - if (!mIDRFrameRequestPending) { - ALOGI("requesting IDR frame"); - - sendIDRFrameRequest(mSessionID); - } -#endif - break; - } - - default: - TRESPASS(); - } -} - -void WifiDisplaySink::registerResponseHandler( - int32_t sessionID, int32_t cseq, HandleRTSPResponseFunc func) { - ResponseID id; - id.mSessionID = sessionID; - id.mCSeq = cseq; - mResponseHandlers.add(id, func); -} - -status_t WifiDisplaySink::sendM2(int32_t sessionID) { - AString request = "OPTIONS * RTSP/1.0\r\n"; - AppendCommonResponse(&request, mNextCSeq); - - request.append( - "Require: org.wfa.wfd1.0\r\n" - "\r\n"); - - status_t err = - mNetSession->sendRequest(sessionID, request.c_str(), request.size()); - - if (err != OK) { - return err; - } - - registerResponseHandler( - sessionID, mNextCSeq, &WifiDisplaySink::onReceiveM2Response); - - ++mNextCSeq; - - return OK; -} - -status_t WifiDisplaySink::onReceiveM2Response( - int32_t sessionID, const sp &msg) { - int32_t statusCode; - if (!msg->getStatusCode(&statusCode)) { - return ERROR_MALFORMED; - } - - if (statusCode != 200) { - return ERROR_UNSUPPORTED; - } - - return OK; -} - -status_t WifiDisplaySink::onReceiveSetupResponse( - int32_t sessionID, const sp &msg) { - int32_t statusCode; - if (!msg->getStatusCode(&statusCode)) { - return ERROR_MALFORMED; - } - - if (statusCode != 200) { - return ERROR_UNSUPPORTED; - } - - if (!msg->findString("session", &mPlaybackSessionID)) { - return ERROR_MALFORMED; - } - - if (!ParsedMessage::GetInt32Attribute( - mPlaybackSessionID.c_str(), - "timeout", - &mPlaybackSessionTimeoutSecs)) { - mPlaybackSessionTimeoutSecs = -1; - } - - ssize_t colonPos = mPlaybackSessionID.find(";"); - if (colonPos >= 0) { - // Strip any options from the returned session id. - mPlaybackSessionID.erase( - colonPos, mPlaybackSessionID.size() - colonPos); - } - - status_t err = configureTransport(msg); - - if (err != OK) { - return err; - } - - mState = PAUSED; - - return sendPlay( - sessionID, - "rtsp://x.x.x.x:x/wfd1.0/streamid=0"); -} - -status_t WifiDisplaySink::configureTransport(const sp &msg) { - if (mUsingTCPTransport && !(mFlags & FLAG_SPECIAL_MODE)) { - // In "special" mode we still use a UDP RTCP back-channel that - // needs connecting. - return OK; - } - - AString transport; - if (!msg->findString("transport", &transport)) { - ALOGE("Missing 'transport' field in SETUP response."); - return ERROR_MALFORMED; - } - - AString sourceHost; - if (!ParsedMessage::GetAttribute( - transport.c_str(), "source", &sourceHost)) { - sourceHost = mRTSPHost; - } - - AString serverPortStr; - if (!ParsedMessage::GetAttribute( - transport.c_str(), "server_port", &serverPortStr)) { - ALOGE("Missing 'server_port' in Transport field."); - return ERROR_MALFORMED; - } - - int rtpPort, rtcpPort; - if (sscanf(serverPortStr.c_str(), "%d-%d", &rtpPort, &rtcpPort) != 2 - || rtpPort <= 0 || rtpPort > 65535 - || rtcpPort <=0 || rtcpPort > 65535 - || rtcpPort != rtpPort + 1) { - ALOGE("Invalid server_port description '%s'.", - serverPortStr.c_str()); - - return ERROR_MALFORMED; - } - - if (rtpPort & 1) { - ALOGW("Server picked an odd numbered RTP port."); - } - - return mMediaReceiver->connectTrack( - 0 /* trackIndex */, sourceHost.c_str(), rtpPort, rtcpPort); -} - -status_t WifiDisplaySink::onReceivePlayResponse( - int32_t sessionID, const sp &msg) { - int32_t statusCode; - if (!msg->getStatusCode(&statusCode)) { - return ERROR_MALFORMED; - } - - if (statusCode != 200) { - return ERROR_UNSUPPORTED; - } - - mState = PLAYING; - - (new AMessage(kWhatReportLateness, id()))->post(kReportLatenessEveryUs); - - return OK; -} - -status_t WifiDisplaySink::onReceiveIDRFrameRequestResponse( - int32_t sessionID, const sp &msg) { - CHECK(mIDRFrameRequestPending); - mIDRFrameRequestPending = false; - - return OK; -} - -void WifiDisplaySink::onReceiveClientData(const sp &msg) { - int32_t sessionID; - CHECK(msg->findInt32("sessionID", &sessionID)); - - sp obj; - CHECK(msg->findObject("data", &obj)); - - sp data = - static_cast(obj.get()); - - ALOGV("session %d received '%s'", - sessionID, data->debugString().c_str()); - - AString method; - AString uri; - data->getRequestField(0, &method); - - int32_t cseq; - if (!data->findInt32("cseq", &cseq)) { - sendErrorResponse(sessionID, "400 Bad Request", -1 /* cseq */); - return; - } - - if (method.startsWith("RTSP/")) { - // This is a response. - - ResponseID id; - id.mSessionID = sessionID; - id.mCSeq = cseq; - - ssize_t index = mResponseHandlers.indexOfKey(id); - - if (index < 0) { - ALOGW("Received unsolicited server response, cseq %d", cseq); - return; - } - - HandleRTSPResponseFunc func = mResponseHandlers.valueAt(index); - mResponseHandlers.removeItemsAt(index); - - status_t err = (this->*func)(sessionID, data); - CHECK_EQ(err, (status_t)OK); - } else { - AString version; - data->getRequestField(2, &version); - if (!(version == AString("RTSP/1.0"))) { - sendErrorResponse(sessionID, "505 RTSP Version not supported", cseq); - return; - } - - if (method == "OPTIONS") { - onOptionsRequest(sessionID, cseq, data); - } else if (method == "GET_PARAMETER") { - onGetParameterRequest(sessionID, cseq, data); - } else if (method == "SET_PARAMETER") { - onSetParameterRequest(sessionID, cseq, data); - } else { - sendErrorResponse(sessionID, "405 Method Not Allowed", cseq); - } - } -} - -void WifiDisplaySink::onOptionsRequest( - int32_t sessionID, - int32_t cseq, - const sp &data) { - AString response = "RTSP/1.0 200 OK\r\n"; - AppendCommonResponse(&response, cseq); - response.append("Public: org.wfa.wfd1.0, GET_PARAMETER, SET_PARAMETER\r\n"); - response.append("\r\n"); - - status_t err = mNetSession->sendRequest(sessionID, response.c_str()); - CHECK_EQ(err, (status_t)OK); - - err = sendM2(sessionID); - CHECK_EQ(err, (status_t)OK); -} - -void WifiDisplaySink::onGetParameterRequest( - int32_t sessionID, - int32_t cseq, - const sp &data) { - AString body; - - if (mState == CONNECTED) { - mUsingTCPTransport = false; - mUsingTCPInterleaving = false; - - char val[PROPERTY_VALUE_MAX]; - if (property_get("media.wfd-sink.tcp-mode", val, NULL)) { - if (!strcasecmp("true", val) || !strcmp("1", val)) { - ALOGI("Using TCP unicast transport."); - mUsingTCPTransport = true; - mUsingTCPInterleaving = false; - } else if (!strcasecmp("interleaved", val)) { - ALOGI("Using TCP interleaved transport."); - mUsingTCPTransport = true; - mUsingTCPInterleaving = true; - } - } else if (mFlags & FLAG_SPECIAL_MODE) { - mUsingTCPTransport = true; - } - - body = "wfd_video_formats: "; - body.append(mSinkSupportedVideoFormats.getFormatSpec()); - - body.append( - "\r\nwfd_audio_codecs: AAC 0000000F 00\r\n" - "wfd_client_rtp_ports: RTP/AVP/"); - - if (mUsingTCPTransport) { - body.append("TCP;"); - if (mUsingTCPInterleaving) { - body.append("interleaved"); - } else { - body.append("unicast 19000 0"); - } - } else { - body.append("UDP;unicast 19000 0"); - } - - body.append(" mode=play\r\n"); - } - - AString response = "RTSP/1.0 200 OK\r\n"; - AppendCommonResponse(&response, cseq); - response.append("Content-Type: text/parameters\r\n"); - response.append(StringPrintf("Content-Length: %d\r\n", body.size())); - response.append("\r\n"); - response.append(body); - - status_t err = mNetSession->sendRequest(sessionID, response.c_str()); - CHECK_EQ(err, (status_t)OK); -} - -status_t WifiDisplaySink::sendSetup(int32_t sessionID, const char *uri) { - sp notify = new AMessage(kWhatMediaReceiverNotify, id()); - - mMediaReceiverLooper = new ALooper; - mMediaReceiverLooper->setName("media_receiver"); - - mMediaReceiverLooper->start( - false /* runOnCallingThread */, - false /* canCallJava */, - PRIORITY_AUDIO); - - mMediaReceiver = new MediaReceiver(mNetSession, notify); - mMediaReceiverLooper->registerHandler(mMediaReceiver); - - RTPReceiver::TransportMode rtpMode = RTPReceiver::TRANSPORT_UDP; - if (mUsingTCPTransport) { - if (mUsingTCPInterleaving) { - rtpMode = RTPReceiver::TRANSPORT_TCP_INTERLEAVED; - } else { - rtpMode = RTPReceiver::TRANSPORT_TCP; - } - } - - int32_t localRTPPort; - status_t err = mMediaReceiver->addTrack( - rtpMode, RTPReceiver::TRANSPORT_UDP /* rtcpMode */, &localRTPPort); - - if (err == OK) { - err = mMediaReceiver->initAsync(MediaReceiver::MODE_TRANSPORT_STREAM); - } - - if (err != OK) { - mMediaReceiverLooper->unregisterHandler(mMediaReceiver->id()); - mMediaReceiver.clear(); - - mMediaReceiverLooper->stop(); - mMediaReceiverLooper.clear(); - - return err; - } - - AString request = StringPrintf("SETUP %s RTSP/1.0\r\n", uri); - - AppendCommonResponse(&request, mNextCSeq); - - if (rtpMode == RTPReceiver::TRANSPORT_TCP_INTERLEAVED) { - request.append("Transport: RTP/AVP/TCP;interleaved=0-1\r\n"); - } else if (rtpMode == RTPReceiver::TRANSPORT_TCP) { - if (mFlags & FLAG_SPECIAL_MODE) { - // This isn't quite true, since the RTP connection is through TCP - // and the RTCP connection through UDP... - request.append( - StringPrintf( - "Transport: RTP/AVP/TCP;unicast;client_port=%d-%d\r\n", - localRTPPort, localRTPPort + 1)); - } else { - request.append( - StringPrintf( - "Transport: RTP/AVP/TCP;unicast;client_port=%d\r\n", - localRTPPort)); - } - } else { - request.append( - StringPrintf( - "Transport: RTP/AVP/UDP;unicast;client_port=%d-%d\r\n", - localRTPPort, - localRTPPort + 1)); - } - - request.append("\r\n"); - - ALOGV("request = '%s'", request.c_str()); - - err = mNetSession->sendRequest(sessionID, request.c_str(), request.size()); - - if (err != OK) { - return err; - } - - registerResponseHandler( - sessionID, mNextCSeq, &WifiDisplaySink::onReceiveSetupResponse); - - ++mNextCSeq; - - return OK; -} - -status_t WifiDisplaySink::sendPlay(int32_t sessionID, const char *uri) { - AString request = StringPrintf("PLAY %s RTSP/1.0\r\n", uri); - - AppendCommonResponse(&request, mNextCSeq); - - request.append(StringPrintf("Session: %s\r\n", mPlaybackSessionID.c_str())); - request.append("\r\n"); - - status_t err = - mNetSession->sendRequest(sessionID, request.c_str(), request.size()); - - if (err != OK) { - return err; - } - - registerResponseHandler( - sessionID, mNextCSeq, &WifiDisplaySink::onReceivePlayResponse); - - ++mNextCSeq; - - return OK; -} - -status_t WifiDisplaySink::sendIDRFrameRequest(int32_t sessionID) { - CHECK(!mIDRFrameRequestPending); - - AString request = "SET_PARAMETER rtsp://localhost/wfd1.0 RTSP/1.0\r\n"; - - AppendCommonResponse(&request, mNextCSeq); - - AString content = "wfd_idr_request\r\n"; - - request.append(StringPrintf("Session: %s\r\n", mPlaybackSessionID.c_str())); - request.append(StringPrintf("Content-Length: %d\r\n", content.size())); - request.append("\r\n"); - request.append(content); - - status_t err = - mNetSession->sendRequest(sessionID, request.c_str(), request.size()); - - if (err != OK) { - return err; - } - - registerResponseHandler( - sessionID, - mNextCSeq, - &WifiDisplaySink::onReceiveIDRFrameRequestResponse); - - ++mNextCSeq; - - mIDRFrameRequestPending = true; - - return OK; -} - -void WifiDisplaySink::onSetParameterRequest( - int32_t sessionID, - int32_t cseq, - const sp &data) { - const char *content = data->getContent(); - - if (strstr(content, "wfd_trigger_method: SETUP\r\n") != NULL) { - if ((mFlags & FLAG_SPECIAL_MODE) && !mTimeOffsetValid) { - mSetupDeferred = true; - } else { - status_t err = - sendSetup( - sessionID, - "rtsp://x.x.x.x:x/wfd1.0/streamid=0"); - - CHECK_EQ(err, (status_t)OK); - } - } - - AString response = "RTSP/1.0 200 OK\r\n"; - AppendCommonResponse(&response, cseq); - response.append("\r\n"); - - status_t err = mNetSession->sendRequest(sessionID, response.c_str()); - CHECK_EQ(err, (status_t)OK); -} - -void WifiDisplaySink::sendErrorResponse( - int32_t sessionID, - const char *errorDetail, - int32_t cseq) { - AString response; - response.append("RTSP/1.0 "); - response.append(errorDetail); - response.append("\r\n"); - - AppendCommonResponse(&response, cseq); - - response.append("\r\n"); - - status_t err = mNetSession->sendRequest(sessionID, response.c_str()); - CHECK_EQ(err, (status_t)OK); -} - -// static -void WifiDisplaySink::AppendCommonResponse(AString *response, int32_t cseq) { - time_t now = time(NULL); - struct tm *now2 = gmtime(&now); - char buf[128]; - strftime(buf, sizeof(buf), "%a, %d %b %Y %H:%M:%S %z", now2); - - response->append("Date: "); - response->append(buf); - response->append("\r\n"); - - response->append(StringPrintf("User-Agent: %s\r\n", sUserAgent.c_str())); - - if (cseq >= 0) { - response->append(StringPrintf("CSeq: %d\r\n", cseq)); - } -} - -} // namespace android diff --git a/media/libstagefright/wifi-display/sink/WifiDisplaySink.h b/media/libstagefright/wifi-display/sink/WifiDisplaySink.h deleted file mode 100644 index adb9d89..0000000 --- a/media/libstagefright/wifi-display/sink/WifiDisplaySink.h +++ /dev/null @@ -1,196 +0,0 @@ -/* - * Copyright 2012, The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef WIFI_DISPLAY_SINK_H_ - -#define WIFI_DISPLAY_SINK_H_ - -#include "ANetworkSession.h" - -#include "VideoFormats.h" - -#include -#include - -namespace android { - -struct AMessage; -struct DirectRenderer; -struct MediaReceiver; -struct ParsedMessage; -struct TimeSyncer; - -// Represents the RTSP client acting as a wifi display sink. -// Connects to a wifi display source and renders the incoming -// transport stream using a MediaPlayer instance. -struct WifiDisplaySink : public AHandler { - enum { - kWhatDisconnected, - }; - - enum Flags { - FLAG_SPECIAL_MODE = 1, - }; - - // If no notification message is specified (notify == NULL) - // the sink will stop its looper() once the session ends, - // otherwise it will post an appropriate notification but leave - // the looper() running. - WifiDisplaySink( - uint32_t flags, - const sp &netSession, - const sp &bufferProducer = NULL, - const sp ¬ify = NULL); - - void start(const char *sourceHost, int32_t sourcePort); - void start(const char *uri); - -protected: - virtual ~WifiDisplaySink(); - virtual void onMessageReceived(const sp &msg); - -private: - enum State { - UNDEFINED, - CONNECTING, - CONNECTED, - PAUSED, - PLAYING, - }; - - enum { - kWhatStart, - kWhatRTSPNotify, - kWhatStop, - kWhatMediaReceiverNotify, - kWhatTimeSyncerNotify, - kWhatReportLateness, - }; - - struct ResponseID { - int32_t mSessionID; - int32_t mCSeq; - - bool operator<(const ResponseID &other) const { - return mSessionID < other.mSessionID - || (mSessionID == other.mSessionID - && mCSeq < other.mCSeq); - } - }; - - typedef status_t (WifiDisplaySink::*HandleRTSPResponseFunc)( - int32_t sessionID, const sp &msg); - - static const int64_t kReportLatenessEveryUs = 1000000ll; - - static const AString sUserAgent; - - State mState; - uint32_t mFlags; - VideoFormats mSinkSupportedVideoFormats; - sp mNetSession; - sp mSurfaceTex; - sp mNotify; - sp mTimeSyncer; - bool mUsingTCPTransport; - bool mUsingTCPInterleaving; - AString mRTSPHost; - int32_t mSessionID; - - int32_t mNextCSeq; - - KeyedVector mResponseHandlers; - - sp mMediaReceiverLooper; - sp mMediaReceiver; - sp mRenderer; - - AString mPlaybackSessionID; - int32_t mPlaybackSessionTimeoutSecs; - - bool mIDRFrameRequestPending; - - int64_t mTimeOffsetUs; - bool mTimeOffsetValid; - - bool mSetupDeferred; - - size_t mLatencyCount; - int64_t mLatencySumUs; - int64_t mLatencyMaxUs; - - int64_t mMaxDelayMs; - - status_t sendM2(int32_t sessionID); - status_t sendSetup(int32_t sessionID, const char *uri); - status_t sendPlay(int32_t sessionID, const char *uri); - status_t sendIDRFrameRequest(int32_t sessionID); - - status_t onReceiveM2Response( - int32_t sessionID, const sp &msg); - - status_t onReceiveSetupResponse( - int32_t sessionID, const sp &msg); - - status_t configureTransport(const sp &msg); - - status_t onReceivePlayResponse( - int32_t sessionID, const sp &msg); - - status_t onReceiveIDRFrameRequestResponse( - int32_t sessionID, const sp &msg); - - void registerResponseHandler( - int32_t sessionID, int32_t cseq, HandleRTSPResponseFunc func); - - void onReceiveClientData(const sp &msg); - - void onOptionsRequest( - int32_t sessionID, - int32_t cseq, - const sp &data); - - void onGetParameterRequest( - int32_t sessionID, - int32_t cseq, - const sp &data); - - void onSetParameterRequest( - int32_t sessionID, - int32_t cseq, - const sp &data); - - void onMediaReceiverNotify(const sp &msg); - - void sendErrorResponse( - int32_t sessionID, - const char *errorDetail, - int32_t cseq); - - static void AppendCommonResponse(AString *response, int32_t cseq); - - bool ParseURL( - const char *url, AString *host, int32_t *port, AString *path, - AString *user, AString *pass); - - void dumpDelay(size_t trackIndex, int64_t timeUs); - - DISALLOW_EVIL_CONSTRUCTORS(WifiDisplaySink); -}; - -} // namespace android - -#endif // WIFI_DISPLAY_SINK_H_ diff --git a/media/libstagefright/wifi-display/source/PlaybackSession.cpp b/media/libstagefright/wifi-display/source/PlaybackSession.cpp index cacfcca..3d7b865 100644 --- a/media/libstagefright/wifi-display/source/PlaybackSession.cpp +++ b/media/libstagefright/wifi-display/source/PlaybackSession.cpp @@ -559,8 +559,6 @@ void WifiDisplaySource::PlaybackSession::onMessageReceived( converter->dropAFrame(); } } - } else if (what == MediaSender::kWhatInformSender) { - onSinkFeedback(msg); } else { TRESPASS(); } @@ -656,89 +654,6 @@ void WifiDisplaySource::PlaybackSession::onMessageReceived( } } -void WifiDisplaySource::PlaybackSession::onSinkFeedback(const sp &msg) { - int64_t avgLatencyUs; - CHECK(msg->findInt64("avgLatencyUs", &avgLatencyUs)); - - int64_t maxLatencyUs; - CHECK(msg->findInt64("maxLatencyUs", &maxLatencyUs)); - - ALOGI("sink reports avg. latency of %lld ms (max %lld ms)", - avgLatencyUs / 1000ll, - maxLatencyUs / 1000ll); - - if (mVideoTrackIndex >= 0) { - const sp &videoTrack = mTracks.valueFor(mVideoTrackIndex); - sp converter = videoTrack->converter(); - - if (converter != NULL) { - int32_t videoBitrate = - Converter::GetInt32Property("media.wfd.video-bitrate", -1); - - char val[PROPERTY_VALUE_MAX]; - if (videoBitrate < 0 - && property_get("media.wfd.video-bitrate", val, NULL) - && !strcasecmp("adaptive", val)) { - videoBitrate = converter->getVideoBitrate(); - - if (avgLatencyUs > 300000ll) { - videoBitrate *= 0.6; - } else if (avgLatencyUs < 100000ll) { - videoBitrate *= 1.1; - } - } - - if (videoBitrate > 0) { - if (videoBitrate < 500000) { - videoBitrate = 500000; - } else if (videoBitrate > 10000000) { - videoBitrate = 10000000; - } - - if (videoBitrate != converter->getVideoBitrate()) { - ALOGI("setting video bitrate to %d bps", videoBitrate); - - converter->setVideoBitrate(videoBitrate); - } - } - } - - sp repeaterSource = videoTrack->repeaterSource(); - if (repeaterSource != NULL) { - double rateHz = - Converter::GetInt32Property( - "media.wfd.video-framerate", -1); - - char val[PROPERTY_VALUE_MAX]; - if (rateHz < 0.0 - && property_get("media.wfd.video-framerate", val, NULL) - && !strcasecmp("adaptive", val)) { - rateHz = repeaterSource->getFrameRate(); - - if (avgLatencyUs > 300000ll) { - rateHz *= 0.9; - } else if (avgLatencyUs < 200000ll) { - rateHz *= 1.1; - } - } - - if (rateHz > 0) { - if (rateHz < 5.0) { - rateHz = 5.0; - } else if (rateHz > 30.0) { - rateHz = 30.0; - } - - if (rateHz != repeaterSource->getFrameRate()) { - ALOGI("setting frame rate to %.2f Hz", rateHz); - - repeaterSource->setFrameRate(rateHz); - } - } - } - } -} - status_t WifiDisplaySource::PlaybackSession::setupMediaPacketizer( bool enableAudio, bool enableVideo) { DataSource::RegisterDefaultSniffers(); diff --git a/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp b/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp index 4a49811..2b5bee9 100644 --- a/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp +++ b/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp @@ -23,7 +23,6 @@ #include "Parameters.h" #include "ParsedMessage.h" #include "rtp/RTPSender.h" -#include "TimeSyncer.h" #include #include @@ -165,14 +164,6 @@ void WifiDisplaySource::onMessageReceived(const sp &msg) { } else { err = -EINVAL; } - } - - if (err == OK) { - sp notify = new AMessage(kWhatTimeSyncerNotify, id()); - mTimeSyncer = new TimeSyncer(mNetSession, notify); - looper()->registerHandler(mTimeSyncer); - - mTimeSyncer->startServer(8123); mState = AWAITING_CLIENT_CONNECTION; } @@ -548,11 +539,6 @@ void WifiDisplaySource::onMessageReceived(const sp &msg) { break; } - case kWhatTimeSyncerNotify: - { - break; - } - default: TRESPASS(); } diff --git a/media/libstagefright/wifi-display/source/WifiDisplaySource.h b/media/libstagefright/wifi-display/source/WifiDisplaySource.h index 3efa0b4..44d3e4d 100644 --- a/media/libstagefright/wifi-display/source/WifiDisplaySource.h +++ b/media/libstagefright/wifi-display/source/WifiDisplaySource.h @@ -30,7 +30,6 @@ namespace android { struct IHDCP; struct IRemoteDisplayClient; struct ParsedMessage; -struct TimeSyncer; // Represents the RTSP server acting as a wifi display source. // Manages incoming connections, sets up Playback sessions as necessary. @@ -83,7 +82,6 @@ private: kWhatHDCPNotify, kWhatFinishStop2, kWhatTeardownTriggerTimedOut, - kWhatTimeSyncerNotify, }; struct ResponseID { @@ -120,7 +118,6 @@ private: sp mNetSession; sp mClient; AString mMediaPath; - sp mTimeSyncer; struct in_addr mInterfaceAddr; int32_t mSessionID; diff --git a/media/libstagefright/wifi-display/udptest.cpp b/media/libstagefright/wifi-display/udptest.cpp deleted file mode 100644 index 111846d..0000000 --- a/media/libstagefright/wifi-display/udptest.cpp +++ /dev/null @@ -1,116 +0,0 @@ -/* - * Copyright 2012, The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -//#define LOG_NEBUG 0 -#define LOG_TAG "udptest" -#include - -#include "ANetworkSession.h" -#include "TimeSyncer.h" - -#include -#include - -namespace android { - -} // namespace android - -static void usage(const char *me) { - fprintf(stderr, - "usage: %s -c host[:port]\tconnect to test server\n" - " -l \tcreate a test server\n", - me); -} - -int main(int argc, char **argv) { - using namespace android; - - ProcessState::self()->startThreadPool(); - - int32_t localPort = -1; - int32_t connectToPort = -1; - AString connectToHost; - - int res; - while ((res = getopt(argc, argv, "hc:l:")) >= 0) { - switch (res) { - case 'c': - { - const char *colonPos = strrchr(optarg, ':'); - - if (colonPos == NULL) { - connectToHost = optarg; - connectToPort = 49152; - } else { - connectToHost.setTo(optarg, colonPos - optarg); - - char *end; - connectToPort = strtol(colonPos + 1, &end, 10); - - if (*end != '\0' || end == colonPos + 1 - || connectToPort < 1 || connectToPort > 65535) { - fprintf(stderr, "Illegal port specified.\n"); - exit(1); - } - } - break; - } - - case 'l': - { - char *end; - localPort = strtol(optarg, &end, 10); - - if (*end != '\0' || end == optarg - || localPort < 1 || localPort > 65535) { - fprintf(stderr, "Illegal port specified.\n"); - exit(1); - } - break; - } - - case '?': - case 'h': - usage(argv[0]); - exit(1); - } - } - - if (localPort < 0 && connectToPort < 0) { - fprintf(stderr, - "You need to select either client or server mode.\n"); - exit(1); - } - - sp netSession = new ANetworkSession; - netSession->start(); - - sp looper = new ALooper; - - sp handler = new TimeSyncer(netSession, NULL /* notify */); - looper->registerHandler(handler); - - if (localPort >= 0) { - handler->startServer(localPort); - } else { - handler->startClient(connectToHost.c_str(), connectToPort); - } - - looper->start(true /* runOnCallingThread */); - - return 0; -} - diff --git a/media/libstagefright/wifi-display/wfd.cpp b/media/libstagefright/wifi-display/wfd.cpp index 9fee4d0..c947765 100644 --- a/media/libstagefright/wifi-display/wfd.cpp +++ b/media/libstagefright/wifi-display/wfd.cpp @@ -18,7 +18,6 @@ #define LOG_TAG "wfd" #include -#include "sink/WifiDisplaySink.h" #include "source/WifiDisplaySource.h" #include @@ -39,12 +38,8 @@ namespace android { static void usage(const char *me) { fprintf(stderr, "usage:\n" - " %s -c host[:port]\tconnect to wifi source\n" - " -u uri \tconnect to an rtsp uri\n" - " -l ip[:port] \tlisten on the specified port " - " -f(ilename) \tstream media " - "(create a sink)\n" - " -s(pecial) \trun in 'special' mode\n", + " %s -l iface[:port]\tcreate a wifi display source\n" + " -f(ilename) \tstream media\n", me); } @@ -214,48 +209,14 @@ int main(int argc, char **argv) { DataSource::RegisterDefaultSniffers(); - AString connectToHost; - int32_t connectToPort = -1; - AString uri; - AString listenOnAddr; int32_t listenOnPort = -1; AString path; - bool specialMode = false; - int res; - while ((res = getopt(argc, argv, "hc:l:u:f:s")) >= 0) { + while ((res = getopt(argc, argv, "hl:f:")) >= 0) { switch (res) { - case 'c': - { - const char *colonPos = strrchr(optarg, ':'); - - if (colonPos == NULL) { - connectToHost = optarg; - connectToPort = WifiDisplaySource::kWifiDisplayDefaultPort; - } else { - connectToHost.setTo(optarg, colonPos - optarg); - - char *end; - connectToPort = strtol(colonPos + 1, &end, 10); - - if (*end != '\0' || end == colonPos + 1 - || connectToPort < 1 || connectToPort > 65535) { - fprintf(stderr, "Illegal port specified.\n"); - exit(1); - } - } - break; - } - - case 'u': - { - uri = optarg; - break; - } - case 'f': { path = optarg; @@ -284,12 +245,6 @@ int main(int argc, char **argv) { break; } - case 's': - { - specialMode = true; - break; - } - case '?': case 'h': default: @@ -298,13 +253,6 @@ int main(int argc, char **argv) { } } - if (connectToPort >= 0 && listenOnPort >= 0) { - fprintf(stderr, - "You can connect to a source or create one, " - "but not both at the same time.\n"); - exit(1); - } - if (listenOnPort >= 0) { if (path.empty()) { createSource(listenOnAddr, listenOnPort); @@ -315,72 +263,7 @@ int main(int argc, char **argv) { exit(0); } - if (connectToPort < 0 && uri.empty()) { - fprintf(stderr, - "You need to select either source host or uri.\n"); - - exit(1); - } - - if (connectToPort >= 0 && !uri.empty()) { - fprintf(stderr, - "You need to either connect to a wfd host or an rtsp url, " - "not both.\n"); - exit(1); - } - - sp composerClient = new SurfaceComposerClient; - CHECK_EQ(composerClient->initCheck(), (status_t)OK); - - sp display(SurfaceComposerClient::getBuiltInDisplay( - ISurfaceComposer::eDisplayIdMain)); - DisplayInfo info; - SurfaceComposerClient::getDisplayInfo(display, &info); - ssize_t displayWidth = info.w; - ssize_t displayHeight = info.h; - - ALOGV("display is %d x %d\n", displayWidth, displayHeight); - - sp control = - composerClient->createSurface( - String8("A Surface"), - displayWidth, - displayHeight, - PIXEL_FORMAT_RGB_565, - 0); - - CHECK(control != NULL); - CHECK(control->isValid()); - - SurfaceComposerClient::openGlobalTransaction(); - CHECK_EQ(control->setLayer(INT_MAX), (status_t)OK); - CHECK_EQ(control->show(), (status_t)OK); - SurfaceComposerClient::closeGlobalTransaction(); - - sp surface = control->getSurface(); - CHECK(surface != NULL); - - sp session = new ANetworkSession; - session->start(); - - sp looper = new ALooper; - - sp sink = new WifiDisplaySink( - specialMode ? WifiDisplaySink::FLAG_SPECIAL_MODE : 0 /* flags */, - session, - surface->getIGraphicBufferProducer()); - - looper->registerHandler(sink); - - if (connectToPort >= 0) { - sink->start(connectToHost.c_str(), connectToPort); - } else { - sink->start(uri.c_str()); - } - - looper->start(true /* runOnCallingThread */); - - composerClient->dispose(); + usage(argv[0]); return 0; } -- cgit v1.1 From a07f17ca46db04c9d5d9e7d6b2878db59ca2b9ea Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Tue, 23 Apr 2013 12:39:37 -0700 Subject: Remove timing jitter during startup of audio This fixes a regression introduced recently, that increased timing jitter during the startup of the FastMixer and AudioTrack callback threads. The regression was to make requestPriority() asynchronous as a way to avoid an apparent priority inversion in system_server. This means that the target thread could run briefly with the initial priority, before the new priority takes effect. This change removes the startup jitter for FastMixer, by making the requestPriority() synchronous again for that case. It doesn't matter that this restores the priority inversion involving normal mixer thread, because it happens during startup of both threads. The change also removes the startup jitter for the AudioTrack callback thread, by having the target thread check whether the requestPriority() has completed yet. If not, the target thread blocks with a timeout until the priority boost finishes. Finally, we now log an error message if the expected priority boost doesn't happen. Bug: 8698989 Change-Id: Id590e9a274b70ec1ba85b44a585ee37a22e41cbc --- include/media/AudioTrack.h | 1 + media/libmedia/AudioTrack.cpp | 21 +++++++++++++++++++++ services/audioflinger/FastMixer.cpp | 4 ++++ services/audioflinger/ISchedulingPolicyService.cpp | 5 +++-- services/audioflinger/ISchedulingPolicyService.h | 2 +- services/audioflinger/SchedulingPolicyService.cpp | 4 ++-- services/audioflinger/SchedulingPolicyService.h | 5 ++++- services/audioflinger/Threads.cpp | 4 +++- 8 files changed, 39 insertions(+), 7 deletions(-) diff --git a/include/media/AudioTrack.h b/include/media/AudioTrack.h index db5a7ab..64f82bb 100644 --- a/include/media/AudioTrack.h +++ b/include/media/AudioTrack.h @@ -599,6 +599,7 @@ protected: int mPreviousPriority; // before start() SchedPolicy mPreviousSchedulingGroup; AudioTrackClientProxy* mProxy; + bool mAwaitBoost; // thread should wait for priority boost before running }; class TimedAudioTrack : public AudioTrack diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp index 1bd839f..7eeb4f8 100644 --- a/media/libmedia/AudioTrack.cpp +++ b/media/libmedia/AudioTrack.cpp @@ -893,9 +893,11 @@ status_t AudioTrack::createTrack_l( ALOGW("Requested frameCount %u but received frameCount %u", frameCount, temp); } frameCount = temp; + mAwaitBoost = false; if (flags & AUDIO_OUTPUT_FLAG_FAST) { if (trackFlags & IAudioFlinger::TRACK_FAST) { ALOGV("AUDIO_OUTPUT_FLAG_FAST successful; frameCount %u", frameCount); + mAwaitBoost = true; } else { ALOGV("AUDIO_OUTPUT_FLAG_FAST denied by server; frameCount %u", frameCount); // once denied, do not request again if IAudioTrack is re-created @@ -1219,6 +1221,25 @@ bool AudioTrack::processAudioBuffer(const sp& thread) size_t writtenSize; mLock.lock(); + if (mAwaitBoost) { + mAwaitBoost = false; + mLock.unlock(); + static const int32_t kMaxTries = 5; + int32_t tryCounter = kMaxTries; + uint32_t pollUs = 10000; + do { + int policy = sched_getscheduler(0); + if (policy == SCHED_FIFO || policy == SCHED_RR) { + break; + } + usleep(pollUs); + pollUs <<= 1; + } while (tryCounter-- > 0); + if (tryCounter < 0) { + ALOGE("did not receive expected priority boost on time"); + } + return true; + } // acquire a strong reference on the IMemory and IAudioTrack so that they cannot be destroyed // while we are accessing the cblk sp audioTrack = mAudioTrack; diff --git a/services/audioflinger/FastMixer.cpp b/services/audioflinger/FastMixer.cpp index 24a6dfe..21df1d7 100644 --- a/services/audioflinger/FastMixer.cpp +++ b/services/audioflinger/FastMixer.cpp @@ -170,6 +170,10 @@ bool FastMixer::threadLoop() if (old <= 0) { __futex_syscall4(coldFutexAddr, FUTEX_WAIT_PRIVATE, old - 1, NULL); } + int policy = sched_getscheduler(0); + if (!(policy == SCHED_FIFO || policy == SCHED_RR)) { + ALOGE("did not receive expected priority boost"); + } // This may be overly conservative; there could be times that the normal mixer // requests such a brief cold idle that it doesn't require resetting this flag. isWarm = false; diff --git a/services/audioflinger/ISchedulingPolicyService.cpp b/services/audioflinger/ISchedulingPolicyService.cpp index 218aa6b..0079968 100644 --- a/services/audioflinger/ISchedulingPolicyService.cpp +++ b/services/audioflinger/ISchedulingPolicyService.cpp @@ -37,14 +37,15 @@ public: { } - virtual int requestPriority(int32_t pid, int32_t tid, int32_t prio) + virtual int requestPriority(int32_t pid, int32_t tid, int32_t prio, bool asynchronous) { Parcel data, reply; data.writeInterfaceToken(ISchedulingPolicyService::getInterfaceDescriptor()); data.writeInt32(pid); data.writeInt32(tid); data.writeInt32(prio); - remote()->transact(REQUEST_PRIORITY_TRANSACTION, data, &reply, IBinder::FLAG_ONEWAY); + uint32_t flags = asynchronous ? IBinder::FLAG_ONEWAY : 0; + remote()->transact(REQUEST_PRIORITY_TRANSACTION, data, &reply, flags); // fail on exception if (reply.readExceptionCode() != 0) return -1; return reply.readInt32(); diff --git a/services/audioflinger/ISchedulingPolicyService.h b/services/audioflinger/ISchedulingPolicyService.h index a38e67e..b94b191 100644 --- a/services/audioflinger/ISchedulingPolicyService.h +++ b/services/audioflinger/ISchedulingPolicyService.h @@ -27,7 +27,7 @@ public: DECLARE_META_INTERFACE(SchedulingPolicyService); virtual int requestPriority(/*pid_t*/int32_t pid, /*pid_t*/int32_t tid, - int32_t prio) = 0; + int32_t prio, bool asynchronous) = 0; }; diff --git a/services/audioflinger/SchedulingPolicyService.cpp b/services/audioflinger/SchedulingPolicyService.cpp index 59cc99a..36e62e9 100644 --- a/services/audioflinger/SchedulingPolicyService.cpp +++ b/services/audioflinger/SchedulingPolicyService.cpp @@ -25,7 +25,7 @@ static sp sSchedulingPolicyService; static const String16 _scheduling_policy("scheduling_policy"); static Mutex sMutex; -int requestPriority(pid_t pid, pid_t tid, int32_t prio) +int requestPriority(pid_t pid, pid_t tid, int32_t prio, bool asynchronous) { // FIXME merge duplicated code related to service lookup, caching, and error recovery sp sps; @@ -46,7 +46,7 @@ int requestPriority(pid_t pid, pid_t tid, int32_t prio) } sleep(1); } - return sps->requestPriority(pid, tid, prio); + return sps->requestPriority(pid, tid, prio, asynchronous); } } // namespace android diff --git a/services/audioflinger/SchedulingPolicyService.h b/services/audioflinger/SchedulingPolicyService.h index 7ac8454..a9870d4 100644 --- a/services/audioflinger/SchedulingPolicyService.h +++ b/services/audioflinger/SchedulingPolicyService.h @@ -21,7 +21,10 @@ namespace android { // Request elevated priority for thread tid, whose thread group leader must be pid. // The priority parameter is currently restricted to either 1 or 2. -int requestPriority(pid_t pid, pid_t tid, int32_t prio); +// The asynchronous parameter should be 'true' to return immediately, +// after the request is enqueued but not necessarily executed. +// The default value 'false' means to return after request has been enqueued and executed. +int requestPriority(pid_t pid, pid_t tid, int32_t prio, bool asynchronous = false); } // namespace android diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index 47ca100..539bb4f 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -373,7 +373,9 @@ void AudioFlinger::ThreadBase::processConfigEvents() switch(event->type()) { case CFG_EVENT_PRIO: { PrioConfigEvent *prioEvent = static_cast(event); - int err = requestPriority(prioEvent->pid(), prioEvent->tid(), prioEvent->prio()); + // FIXME Need to understand why this has be done asynchronously + int err = requestPriority(prioEvent->pid(), prioEvent->tid(), prioEvent->prio(), + true /*asynchronous*/); if (err != 0) { ALOGW("Policy SCHED_FIFO priority %d is unavailable for pid %d tid %d; " "error %d", -- cgit v1.1 From 8d2d4932b96632e9eb3af4a3d4000192ef603960 Mon Sep 17 00:00:00 2001 From: Eric Laurent Date: Thu, 25 Apr 2013 12:56:18 -0700 Subject: audioflinger: fix duplicating track sampling rate Add missing initialization of client proxy sampling rate and volumes in OutputTrack constructor. Bug: 8687522 Change-Id: I813e700315bb97083a63dd32279f1998ac775483 --- services/audioflinger/Tracks.cpp | 3 +++ 1 file changed, 3 insertions(+) diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp index a6ab4f8..5ac3129 100644 --- a/services/audioflinger/Tracks.cpp +++ b/services/audioflinger/Tracks.cpp @@ -1415,6 +1415,9 @@ AudioFlinger::PlaybackThread::OutputTrack::OutputTrack( // since client and server are in the same process, // the buffer has the same virtual address on both sides mClientProxy = new AudioTrackClientProxy(mCblk, mBuffer, mFrameCount, mFrameSize); + mClientProxy->setVolumeLR((uint32_t(uint16_t(0x1000)) << 16) | uint16_t(0x1000)); + mClientProxy->setSendLevel(0.0); + mClientProxy->setSampleRate(sampleRate); } else { ALOGW("Error creating output track on thread %p", playbackThread); } -- cgit v1.1 From 03ac850527ffb90348dcdaad95caceb97649fd6b Mon Sep 17 00:00:00 2001 From: Igor Murashkin Date: Tue, 23 Apr 2013 14:51:29 -0700 Subject: ProCamera: Add security permission checks for disabling transmit LED Bug: 8554573 Change-Id: Ie909908a4cab3700bd622282e8342e8fa5b72376 --- .../camera/libcameraservice/ProCamera2Client.cpp | 55 ++++++++++++++++++++++ .../camera/libcameraservice/ProCamera2Client.h | 1 + .../camera/libcameraservice/camera2/Parameters.cpp | 26 ++++++++++ 3 files changed, 82 insertions(+) diff --git a/services/camera/libcameraservice/ProCamera2Client.cpp b/services/camera/libcameraservice/ProCamera2Client.cpp index 575b075..251fdab 100644 --- a/services/camera/libcameraservice/ProCamera2Client.cpp +++ b/services/camera/libcameraservice/ProCamera2Client.cpp @@ -203,6 +203,10 @@ status_t ProCamera2Client::submitRequest(camera_metadata_t* request, CameraMetadata metadata(request); + if (!enforceRequestPermissions(metadata)) { + return PERMISSION_DENIED; + } + if (streaming) { return mDevice->setStreamingRequest(metadata); } else { @@ -388,4 +392,55 @@ void ProCamera2Client::onFrameAvailable(int32_t frameId, } +bool ProCamera2Client::enforceRequestPermissions(CameraMetadata& metadata) { + + const int pid = IPCThreadState::self()->getCallingPid(); + const int selfPid = getpid(); + camera_metadata_entry_t entry; + + /** + * Mixin default important security values + * - android.led.transmit = defaulted ON + */ + CameraMetadata staticInfo = mDevice->info(); + entry = staticInfo.find(ANDROID_LED_AVAILABLE_LEDS); + for(size_t i = 0; i < entry.count; ++i) { + uint8_t led = entry.data.u8[i]; + + switch(led) { + case ANDROID_LED_AVAILABLE_LEDS_TRANSMIT: { + uint8_t transmitDefault = ANDROID_LED_TRANSMIT_ON; + if (!metadata.exists(ANDROID_LED_TRANSMIT)) { + metadata.update(ANDROID_LED_TRANSMIT, + &transmitDefault, 1); + } + break; + } + } + } + + // We can do anything! + if (pid == selfPid) { + return true; + } + + /** + * Permission check special fields in the request + * - android.led.transmit = android.permission.CAMERA_DISABLE_TRANSMIT + */ + entry = metadata.find(ANDROID_LED_TRANSMIT); + if (entry.count > 0 && entry.data.u8[0] != ANDROID_LED_TRANSMIT_ON) { + String16 permissionString = + String16("android.permission.CAMERA_DISABLE_TRANSMIT_LED"); + if (!checkCallingPermission(permissionString)) { + const int uid = IPCThreadState::self()->getCallingUid(); + ALOGE("Permission Denial: " + "can't disable transmit LED pid=%d, uid=%d", pid, uid); + return false; + } + } + + return true; +} + } // namespace android diff --git a/services/camera/libcameraservice/ProCamera2Client.h b/services/camera/libcameraservice/ProCamera2Client.h index 1dec263..faee9f9 100644 --- a/services/camera/libcameraservice/ProCamera2Client.h +++ b/services/camera/libcameraservice/ProCamera2Client.h @@ -110,6 +110,7 @@ private: static const int32_t FRAME_PROCESSOR_LISTENER_MAX_ID = 0x7fffffffL; /** Utility members */ + bool enforceRequestPermissions(CameraMetadata& metadata); // Whether or not we have an exclusive lock on the device // - if no we can't modify the request queue. diff --git a/services/camera/libcameraservice/camera2/Parameters.cpp b/services/camera/libcameraservice/camera2/Parameters.cpp index d13fe8b..a304b35 100644 --- a/services/camera/libcameraservice/camera2/Parameters.cpp +++ b/services/camera/libcameraservice/camera2/Parameters.cpp @@ -1588,6 +1588,32 @@ status_t Parameters::updateRequest(CameraMetadata *request) const { ATRACE_CALL(); status_t res; + /** + * Mixin default important security values + * - android.led.transmit = defaulted ON + */ + camera_metadata_ro_entry_t entry = staticInfo(ANDROID_LED_AVAILABLE_LEDS, + /*minimumCount*/0); + for(size_t i = 0; i < entry.count; ++i) { + uint8_t led = entry.data.u8[i]; + + switch(led) { + // Transmit LED is unconditionally on when using + // the android.hardware.Camera API + case ANDROID_LED_AVAILABLE_LEDS_TRANSMIT: { + uint8_t transmitDefault = ANDROID_LED_TRANSMIT_ON; + res = request->update(ANDROID_LED_TRANSMIT, + &transmitDefault, 1); + if (res != OK) return res; + break; + } + } + } + + /** + * Construct metadata from parameters + */ + uint8_t metadataMode = ANDROID_REQUEST_METADATA_MODE_FULL; res = request->update(ANDROID_REQUEST_METADATA_MODE, &metadataMode, 1); -- cgit v1.1 From b5f28d4749b898d92fe5e56236b417e37b6fe84f Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Thu, 25 Apr 2013 15:11:19 -0700 Subject: Handle the case where an asynchronous prepare was initiated and then the mediaplayer was reset. Change-Id: Ib241747c5dc002b88a3854569c1f8340b2a8ef41 related-to-bug: 8688986 --- media/libmediaplayerservice/nuplayer/NuPlayer.cpp | 7 +++++++ media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp | 16 ++++++++++++++++ 2 files changed, 23 insertions(+) diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp index 607ec6a..b89b1c8 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp +++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp @@ -1257,6 +1257,13 @@ void NuPlayer::onSourceNotify(const sp &msg) { switch (what) { case Source::kWhatPrepared: { + if (mSource == NULL) { + // This is a stale notification from a source that was + // asynchronously preparing when the client called reset(). + // We handled the reset, the source is gone. + break; + } + int32_t err; CHECK(msg->findInt32("err", &err)); diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp index bdafb29..68b9623 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp +++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp @@ -333,6 +333,14 @@ status_t NuPlayerDriver::reset() { case STATE_RESET_IN_PROGRESS: return INVALID_OPERATION; + case STATE_PREPARING: + { + CHECK(mIsAsyncPrepare); + + notifyListener(MEDIA_PREPARED); + break; + } + default: break; } @@ -503,6 +511,14 @@ void NuPlayerDriver::notifySetDataSourceCompleted(status_t err) { void NuPlayerDriver::notifyPrepareCompleted(status_t err) { Mutex::Autolock autoLock(mLock); + if (mState != STATE_PREPARING) { + // We were preparing asynchronously when the client called + // reset(), we sent a premature "prepared" notification and + // then initiated the reset. This notification is stale. + CHECK(mState == STATE_RESET_IN_PROGRESS || mState == STATE_IDLE); + return; + } + CHECK_EQ(mState, STATE_PREPARING); mAsyncResult = err; -- cgit v1.1 From c86ef45279185b474bd6af0a7ae407f8ab577f13 Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Fri, 26 Apr 2013 08:42:50 -0700 Subject: Revert "Remove all traces of wifi display sink implementation and supporting code." This reverts commit 3a9682a86ead84d6f60d3f3aa01b2b4d34af983d. --- media/libstagefright/wifi-display/Android.mk | 76 ++ .../libstagefright/wifi-display/MediaReceiver.cpp | 328 ++++++ media/libstagefright/wifi-display/MediaReceiver.h | 111 ++ media/libstagefright/wifi-display/MediaSender.cpp | 16 + media/libstagefright/wifi-display/MediaSender.h | 1 + media/libstagefright/wifi-display/SNTPClient.cpp | 174 +++ media/libstagefright/wifi-display/SNTPClient.h | 62 ++ media/libstagefright/wifi-display/TimeSyncer.cpp | 338 ++++++ media/libstagefright/wifi-display/TimeSyncer.h | 109 ++ media/libstagefright/wifi-display/nettest.cpp | 400 +++++++ .../wifi-display/rtp/RTPAssembler.cpp | 328 ++++++ .../libstagefright/wifi-display/rtp/RTPAssembler.h | 92 ++ .../wifi-display/rtp/RTPReceiver.cpp | 1153 ++++++++++++++++++++ .../libstagefright/wifi-display/rtp/RTPReceiver.h | 125 +++ .../libstagefright/wifi-display/rtp/RTPSender.cpp | 11 + media/libstagefright/wifi-display/rtp/RTPSender.h | 1 + media/libstagefright/wifi-display/rtptest.cpp | 565 ++++++++++ .../wifi-display/sink/DirectRenderer.cpp | 625 +++++++++++ .../wifi-display/sink/DirectRenderer.h | 82 ++ .../wifi-display/sink/WifiDisplaySink.cpp | 917 ++++++++++++++++ .../wifi-display/sink/WifiDisplaySink.h | 196 ++++ .../wifi-display/source/PlaybackSession.cpp | 85 ++ .../wifi-display/source/WifiDisplaySource.cpp | 14 + .../wifi-display/source/WifiDisplaySource.h | 3 + media/libstagefright/wifi-display/udptest.cpp | 116 ++ media/libstagefright/wifi-display/wfd.cpp | 125 ++- 26 files changed, 6049 insertions(+), 4 deletions(-) create mode 100644 media/libstagefright/wifi-display/MediaReceiver.cpp create mode 100644 media/libstagefright/wifi-display/MediaReceiver.h create mode 100644 media/libstagefright/wifi-display/SNTPClient.cpp create mode 100644 media/libstagefright/wifi-display/SNTPClient.h create mode 100644 media/libstagefright/wifi-display/TimeSyncer.cpp create mode 100644 media/libstagefright/wifi-display/TimeSyncer.h create mode 100644 media/libstagefright/wifi-display/nettest.cpp create mode 100644 media/libstagefright/wifi-display/rtp/RTPAssembler.cpp create mode 100644 media/libstagefright/wifi-display/rtp/RTPAssembler.h create mode 100644 media/libstagefright/wifi-display/rtp/RTPReceiver.cpp create mode 100644 media/libstagefright/wifi-display/rtp/RTPReceiver.h create mode 100644 media/libstagefright/wifi-display/rtptest.cpp create mode 100644 media/libstagefright/wifi-display/sink/DirectRenderer.cpp create mode 100644 media/libstagefright/wifi-display/sink/DirectRenderer.h create mode 100644 media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp create mode 100644 media/libstagefright/wifi-display/sink/WifiDisplaySink.h create mode 100644 media/libstagefright/wifi-display/udptest.cpp diff --git a/media/libstagefright/wifi-display/Android.mk b/media/libstagefright/wifi-display/Android.mk index 061ae89..f99ef60 100644 --- a/media/libstagefright/wifi-display/Android.mk +++ b/media/libstagefright/wifi-display/Android.mk @@ -4,10 +4,17 @@ include $(CLEAR_VARS) LOCAL_SRC_FILES:= \ ANetworkSession.cpp \ + MediaReceiver.cpp \ MediaSender.cpp \ Parameters.cpp \ ParsedMessage.cpp \ + rtp/RTPAssembler.cpp \ + rtp/RTPReceiver.cpp \ rtp/RTPSender.cpp \ + sink/DirectRenderer.cpp \ + sink/WifiDisplaySink.cpp \ + SNTPClient.cpp \ + TimeSyncer.cpp \ source/Converter.cpp \ source/MediaPuller.cpp \ source/PlaybackSession.cpp \ @@ -60,3 +67,72 @@ LOCAL_MODULE:= wfd LOCAL_MODULE_TAGS := debug include $(BUILD_EXECUTABLE) + +################################################################################ + +include $(CLEAR_VARS) + +LOCAL_SRC_FILES:= \ + udptest.cpp \ + +LOCAL_SHARED_LIBRARIES:= \ + libbinder \ + libgui \ + libmedia \ + libstagefright \ + libstagefright_foundation \ + libstagefright_wfd \ + libutils \ + liblog \ + +LOCAL_MODULE:= udptest + +LOCAL_MODULE_TAGS := debug + +include $(BUILD_EXECUTABLE) + +################################################################################ + +include $(CLEAR_VARS) + +LOCAL_SRC_FILES:= \ + rtptest.cpp \ + +LOCAL_SHARED_LIBRARIES:= \ + libbinder \ + libgui \ + libmedia \ + libstagefright \ + libstagefright_foundation \ + libstagefright_wfd \ + libutils \ + liblog \ + +LOCAL_MODULE:= rtptest + +LOCAL_MODULE_TAGS := debug + +include $(BUILD_EXECUTABLE) + +################################################################################ + +include $(CLEAR_VARS) + +LOCAL_SRC_FILES:= \ + nettest.cpp \ + +LOCAL_SHARED_LIBRARIES:= \ + libbinder \ + libgui \ + libmedia \ + libstagefright \ + libstagefright_foundation \ + libstagefright_wfd \ + libutils \ + liblog \ + +LOCAL_MODULE:= nettest + +LOCAL_MODULE_TAGS := debug + +include $(BUILD_EXECUTABLE) diff --git a/media/libstagefright/wifi-display/MediaReceiver.cpp b/media/libstagefright/wifi-display/MediaReceiver.cpp new file mode 100644 index 0000000..364acb9 --- /dev/null +++ b/media/libstagefright/wifi-display/MediaReceiver.cpp @@ -0,0 +1,328 @@ +/* + * Copyright 2013, The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "MediaReceiver" +#include + +#include "MediaReceiver.h" + +#include "ANetworkSession.h" +#include "AnotherPacketSource.h" +#include "rtp/RTPReceiver.h" + +#include +#include +#include +#include +#include + +namespace android { + +MediaReceiver::MediaReceiver( + const sp &netSession, + const sp ¬ify) + : mNetSession(netSession), + mNotify(notify), + mMode(MODE_UNDEFINED), + mGeneration(0), + mInitStatus(OK), + mInitDoneCount(0) { +} + +MediaReceiver::~MediaReceiver() { +} + +ssize_t MediaReceiver::addTrack( + RTPReceiver::TransportMode rtpMode, + RTPReceiver::TransportMode rtcpMode, + int32_t *localRTPPort) { + if (mMode != MODE_UNDEFINED) { + return INVALID_OPERATION; + } + + size_t trackIndex = mTrackInfos.size(); + + TrackInfo info; + + sp notify = new AMessage(kWhatReceiverNotify, id()); + notify->setInt32("generation", mGeneration); + notify->setSize("trackIndex", trackIndex); + + info.mReceiver = new RTPReceiver(mNetSession, notify); + looper()->registerHandler(info.mReceiver); + + info.mReceiver->registerPacketType( + 33, RTPReceiver::PACKETIZATION_TRANSPORT_STREAM); + + info.mReceiver->registerPacketType( + 96, RTPReceiver::PACKETIZATION_AAC); + + info.mReceiver->registerPacketType( + 97, RTPReceiver::PACKETIZATION_H264); + + status_t err = info.mReceiver->initAsync( + rtpMode, + rtcpMode, + localRTPPort); + + if (err != OK) { + looper()->unregisterHandler(info.mReceiver->id()); + info.mReceiver.clear(); + + return err; + } + + mTrackInfos.push_back(info); + + return trackIndex; +} + +status_t MediaReceiver::connectTrack( + size_t trackIndex, + const char *remoteHost, + int32_t remoteRTPPort, + int32_t remoteRTCPPort) { + if (trackIndex >= mTrackInfos.size()) { + return -ERANGE; + } + + TrackInfo *info = &mTrackInfos.editItemAt(trackIndex); + return info->mReceiver->connect(remoteHost, remoteRTPPort, remoteRTCPPort); +} + +status_t MediaReceiver::initAsync(Mode mode) { + if ((mode == MODE_TRANSPORT_STREAM || mode == MODE_TRANSPORT_STREAM_RAW) + && mTrackInfos.size() > 1) { + return INVALID_OPERATION; + } + + sp msg = new AMessage(kWhatInit, id()); + msg->setInt32("mode", mode); + msg->post(); + + return OK; +} + +void MediaReceiver::onMessageReceived(const sp &msg) { + switch (msg->what()) { + case kWhatInit: + { + int32_t mode; + CHECK(msg->findInt32("mode", &mode)); + + CHECK_EQ(mMode, MODE_UNDEFINED); + mMode = (Mode)mode; + + if (mInitStatus != OK || mInitDoneCount == mTrackInfos.size()) { + notifyInitDone(mInitStatus); + } + + mTSParser = new ATSParser( + ATSParser::ALIGNED_VIDEO_DATA + | ATSParser::TS_TIMESTAMPS_ARE_ABSOLUTE); + + mFormatKnownMask = 0; + break; + } + + case kWhatReceiverNotify: + { + int32_t generation; + CHECK(msg->findInt32("generation", &generation)); + if (generation != mGeneration) { + break; + } + + onReceiverNotify(msg); + break; + } + + default: + TRESPASS(); + } +} + +void MediaReceiver::onReceiverNotify(const sp &msg) { + int32_t what; + CHECK(msg->findInt32("what", &what)); + + switch (what) { + case RTPReceiver::kWhatInitDone: + { + ++mInitDoneCount; + + int32_t err; + CHECK(msg->findInt32("err", &err)); + + if (err != OK) { + mInitStatus = err; + ++mGeneration; + } + + if (mMode != MODE_UNDEFINED) { + if (mInitStatus != OK || mInitDoneCount == mTrackInfos.size()) { + notifyInitDone(mInitStatus); + } + } + break; + } + + case RTPReceiver::kWhatError: + { + int32_t err; + CHECK(msg->findInt32("err", &err)); + + notifyError(err); + break; + } + + case RTPReceiver::kWhatAccessUnit: + { + size_t trackIndex; + CHECK(msg->findSize("trackIndex", &trackIndex)); + + sp accessUnit; + CHECK(msg->findBuffer("accessUnit", &accessUnit)); + + int32_t followsDiscontinuity; + if (!msg->findInt32( + "followsDiscontinuity", &followsDiscontinuity)) { + followsDiscontinuity = 0; + } + + if (mMode == MODE_TRANSPORT_STREAM) { + if (followsDiscontinuity) { + mTSParser->signalDiscontinuity( + ATSParser::DISCONTINUITY_TIME, NULL /* extra */); + } + + for (size_t offset = 0; + offset < accessUnit->size(); offset += 188) { + status_t err = mTSParser->feedTSPacket( + accessUnit->data() + offset, 188); + + if (err != OK) { + notifyError(err); + break; + } + } + + drainPackets(0 /* trackIndex */, ATSParser::VIDEO); + drainPackets(1 /* trackIndex */, ATSParser::AUDIO); + } else { + postAccessUnit(trackIndex, accessUnit, NULL); + } + break; + } + + case RTPReceiver::kWhatPacketLost: + { + notifyPacketLost(); + break; + } + + default: + TRESPASS(); + } +} + +void MediaReceiver::drainPackets( + size_t trackIndex, ATSParser::SourceType type) { + sp source = + static_cast( + mTSParser->getSource(type).get()); + + if (source == NULL) { + return; + } + + sp format; + if (!(mFormatKnownMask & (1ul << trackIndex))) { + sp meta = source->getFormat(); + CHECK(meta != NULL); + + CHECK_EQ((status_t)OK, convertMetaDataToMessage(meta, &format)); + + mFormatKnownMask |= 1ul << trackIndex; + } + + status_t finalResult; + while (source->hasBufferAvailable(&finalResult)) { + sp accessUnit; + status_t err = source->dequeueAccessUnit(&accessUnit); + if (err == OK) { + postAccessUnit(trackIndex, accessUnit, format); + format.clear(); + } else if (err != INFO_DISCONTINUITY) { + notifyError(err); + } + } + + if (finalResult != OK) { + notifyError(finalResult); + } +} + +void MediaReceiver::notifyInitDone(status_t err) { + sp notify = mNotify->dup(); + notify->setInt32("what", kWhatInitDone); + notify->setInt32("err", err); + notify->post(); +} + +void MediaReceiver::notifyError(status_t err) { + sp notify = mNotify->dup(); + notify->setInt32("what", kWhatError); + notify->setInt32("err", err); + notify->post(); +} + +void MediaReceiver::notifyPacketLost() { + sp notify = mNotify->dup(); + notify->setInt32("what", kWhatPacketLost); + notify->post(); +} + +void MediaReceiver::postAccessUnit( + size_t trackIndex, + const sp &accessUnit, + const sp &format) { + sp notify = mNotify->dup(); + notify->setInt32("what", kWhatAccessUnit); + notify->setSize("trackIndex", trackIndex); + notify->setBuffer("accessUnit", accessUnit); + + if (format != NULL) { + notify->setMessage("format", format); + } + + notify->post(); +} + +status_t MediaReceiver::informSender( + size_t trackIndex, const sp ¶ms) { + if (trackIndex >= mTrackInfos.size()) { + return -ERANGE; + } + + TrackInfo *info = &mTrackInfos.editItemAt(trackIndex); + return info->mReceiver->informSender(params); +} + +} // namespace android + + diff --git a/media/libstagefright/wifi-display/MediaReceiver.h b/media/libstagefright/wifi-display/MediaReceiver.h new file mode 100644 index 0000000..afbb407 --- /dev/null +++ b/media/libstagefright/wifi-display/MediaReceiver.h @@ -0,0 +1,111 @@ +/* + * Copyright 2013, The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include "ATSParser.h" +#include "rtp/RTPReceiver.h" + +namespace android { + +struct ABuffer; +struct ANetworkSession; +struct AMessage; +struct ATSParser; + +// This class facilitates receiving of media data for one or more tracks +// over RTP. Either a 1:1 track to RTP channel mapping is used or a single +// RTP channel provides the data for a transport stream that is consequently +// demuxed and its track's data provided to the observer. +struct MediaReceiver : public AHandler { + enum { + kWhatInitDone, + kWhatError, + kWhatAccessUnit, + kWhatPacketLost, + }; + + MediaReceiver( + const sp &netSession, + const sp ¬ify); + + ssize_t addTrack( + RTPReceiver::TransportMode rtpMode, + RTPReceiver::TransportMode rtcpMode, + int32_t *localRTPPort); + + status_t connectTrack( + size_t trackIndex, + const char *remoteHost, + int32_t remoteRTPPort, + int32_t remoteRTCPPort); + + enum Mode { + MODE_UNDEFINED, + MODE_TRANSPORT_STREAM, + MODE_TRANSPORT_STREAM_RAW, + MODE_ELEMENTARY_STREAMS, + }; + status_t initAsync(Mode mode); + + status_t informSender(size_t trackIndex, const sp ¶ms); + +protected: + virtual void onMessageReceived(const sp &msg); + virtual ~MediaReceiver(); + +private: + enum { + kWhatInit, + kWhatReceiverNotify, + }; + + struct TrackInfo { + sp mReceiver; + }; + + sp mNetSession; + sp mNotify; + + Mode mMode; + int32_t mGeneration; + + Vector mTrackInfos; + + status_t mInitStatus; + size_t mInitDoneCount; + + sp mTSParser; + uint32_t mFormatKnownMask; + + void onReceiverNotify(const sp &msg); + + void drainPackets(size_t trackIndex, ATSParser::SourceType type); + + void notifyInitDone(status_t err); + void notifyError(status_t err); + void notifyPacketLost(); + + void postAccessUnit( + size_t trackIndex, + const sp &accessUnit, + const sp &format); + + DISALLOW_EVIL_CONSTRUCTORS(MediaReceiver); +}; + +} // namespace android + diff --git a/media/libstagefright/wifi-display/MediaSender.cpp b/media/libstagefright/wifi-display/MediaSender.cpp index 8a3566f..33af66d 100644 --- a/media/libstagefright/wifi-display/MediaSender.cpp +++ b/media/libstagefright/wifi-display/MediaSender.cpp @@ -341,6 +341,22 @@ void MediaSender::onSenderNotify(const sp &msg) { break; } + case kWhatInformSender: + { + int64_t avgLatencyUs; + CHECK(msg->findInt64("avgLatencyUs", &avgLatencyUs)); + + int64_t maxLatencyUs; + CHECK(msg->findInt64("maxLatencyUs", &maxLatencyUs)); + + sp notify = mNotify->dup(); + notify->setInt32("what", kWhatInformSender); + notify->setInt64("avgLatencyUs", avgLatencyUs); + notify->setInt64("maxLatencyUs", maxLatencyUs); + notify->post(); + break; + } + default: TRESPASS(); } diff --git a/media/libstagefright/wifi-display/MediaSender.h b/media/libstagefright/wifi-display/MediaSender.h index 64722c5..04538ea 100644 --- a/media/libstagefright/wifi-display/MediaSender.h +++ b/media/libstagefright/wifi-display/MediaSender.h @@ -43,6 +43,7 @@ struct MediaSender : public AHandler { kWhatInitDone, kWhatError, kWhatNetworkStall, + kWhatInformSender, }; MediaSender( diff --git a/media/libstagefright/wifi-display/SNTPClient.cpp b/media/libstagefright/wifi-display/SNTPClient.cpp new file mode 100644 index 0000000..5c0af6a --- /dev/null +++ b/media/libstagefright/wifi-display/SNTPClient.cpp @@ -0,0 +1,174 @@ +/* + * Copyright 2013, The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "SNTPClient.h" + +#include +#include + +#include +#include +#include +#include +#include + +namespace android { + +SNTPClient::SNTPClient() { +} + +status_t SNTPClient::requestTime(const char *host) { + struct hostent *ent; + int64_t requestTimeNTP, requestTimeUs; + ssize_t n; + int64_t responseTimeUs, responseTimeNTP; + int64_t originateTimeNTP, receiveTimeNTP, transmitTimeNTP; + int64_t roundTripTimeNTP, clockOffsetNTP; + + status_t err = UNKNOWN_ERROR; + + int s = socket(AF_INET, SOCK_DGRAM, 0); + + if (s < 0) { + err = -errno; + + goto bail; + } + + ent = gethostbyname(host); + + if (ent == NULL) { + err = -ENOENT; + goto bail2; + } + + struct sockaddr_in hostAddr; + memset(hostAddr.sin_zero, 0, sizeof(hostAddr.sin_zero)); + hostAddr.sin_family = AF_INET; + hostAddr.sin_port = htons(kNTPPort); + hostAddr.sin_addr.s_addr = *(in_addr_t *)ent->h_addr; + + uint8_t packet[kNTPPacketSize]; + memset(packet, 0, sizeof(packet)); + + packet[0] = kNTPModeClient | (kNTPVersion << 3); + + requestTimeNTP = getNowNTP(); + requestTimeUs = ALooper::GetNowUs(); + writeTimeStamp(&packet[kNTPTransmitTimeOffset], requestTimeNTP); + + n = sendto( + s, packet, sizeof(packet), 0, + (const struct sockaddr *)&hostAddr, sizeof(hostAddr)); + + if (n < 0) { + err = -errno; + goto bail2; + } + + memset(packet, 0, sizeof(packet)); + + do { + n = recv(s, packet, sizeof(packet), 0); + } while (n < 0 && errno == EINTR); + + if (n < 0) { + err = -errno; + goto bail2; + } + + responseTimeUs = ALooper::GetNowUs(); + + responseTimeNTP = requestTimeNTP + makeNTP(responseTimeUs - requestTimeUs); + + originateTimeNTP = readTimeStamp(&packet[kNTPOriginateTimeOffset]); + receiveTimeNTP = readTimeStamp(&packet[kNTPReceiveTimeOffset]); + transmitTimeNTP = readTimeStamp(&packet[kNTPTransmitTimeOffset]); + + roundTripTimeNTP = + makeNTP(responseTimeUs - requestTimeUs) + - (transmitTimeNTP - receiveTimeNTP); + + clockOffsetNTP = + ((receiveTimeNTP - originateTimeNTP) + + (transmitTimeNTP - responseTimeNTP)) / 2; + + mTimeReferenceNTP = responseTimeNTP + clockOffsetNTP; + mTimeReferenceUs = responseTimeUs; + mRoundTripTimeNTP = roundTripTimeNTP; + + err = OK; + +bail2: + close(s); + s = -1; + +bail: + return err; +} + +int64_t SNTPClient::adjustTimeUs(int64_t timeUs) const { + uint64_t nowNTP = + mTimeReferenceNTP + makeNTP(timeUs - mTimeReferenceUs); + + int64_t nowUs = + (nowNTP >> 32) * 1000000ll + + ((nowNTP & 0xffffffff) * 1000000ll) / (1ll << 32); + + nowUs -= ((70ll * 365 + 17) * 24) * 60 * 60 * 1000000ll; + + return nowUs; +} + +// static +void SNTPClient::writeTimeStamp(uint8_t *dst, uint64_t ntpTime) { + *dst++ = (ntpTime >> 56) & 0xff; + *dst++ = (ntpTime >> 48) & 0xff; + *dst++ = (ntpTime >> 40) & 0xff; + *dst++ = (ntpTime >> 32) & 0xff; + *dst++ = (ntpTime >> 24) & 0xff; + *dst++ = (ntpTime >> 16) & 0xff; + *dst++ = (ntpTime >> 8) & 0xff; + *dst++ = ntpTime & 0xff; +} + +// static +uint64_t SNTPClient::readTimeStamp(const uint8_t *dst) { + return U64_AT(dst); +} + +// static +uint64_t SNTPClient::getNowNTP() { + struct timeval tv; + gettimeofday(&tv, NULL /* time zone */); + + uint64_t nowUs = tv.tv_sec * 1000000ll + tv.tv_usec; + + nowUs += ((70ll * 365 + 17) * 24) * 60 * 60 * 1000000ll; + + return makeNTP(nowUs); +} + +// static +uint64_t SNTPClient::makeNTP(uint64_t deltaUs) { + uint64_t hi = deltaUs / 1000000ll; + uint64_t lo = ((1ll << 32) * (deltaUs % 1000000ll)) / 1000000ll; + + return (hi << 32) | lo; +} + +} // namespace android + diff --git a/media/libstagefright/wifi-display/SNTPClient.h b/media/libstagefright/wifi-display/SNTPClient.h new file mode 100644 index 0000000..967d1fc --- /dev/null +++ b/media/libstagefright/wifi-display/SNTPClient.h @@ -0,0 +1,62 @@ +/* + * Copyright 2013, The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef SNTP_CLIENT_H_ + +#define SNTP_CLIENT_H_ + +#include +#include + +namespace android { + +// Implementation of the SNTP (Simple Network Time Protocol) +struct SNTPClient { + SNTPClient(); + + status_t requestTime(const char *host); + + // given a time obtained from ALooper::GetNowUs() + // return the number of us elapsed since Jan 1 1970 00:00:00 (UTC). + int64_t adjustTimeUs(int64_t timeUs) const; + +private: + enum { + kNTPPort = 123, + kNTPPacketSize = 48, + kNTPModeClient = 3, + kNTPVersion = 3, + kNTPTransmitTimeOffset = 40, + kNTPOriginateTimeOffset = 24, + kNTPReceiveTimeOffset = 32, + }; + + uint64_t mTimeReferenceNTP; + int64_t mTimeReferenceUs; + int64_t mRoundTripTimeNTP; + + static void writeTimeStamp(uint8_t *dst, uint64_t ntpTime); + static uint64_t readTimeStamp(const uint8_t *dst); + + static uint64_t getNowNTP(); + static uint64_t makeNTP(uint64_t deltaUs); + + DISALLOW_EVIL_CONSTRUCTORS(SNTPClient); +}; + +} // namespace android + +#endif // SNTP_CLIENT_H_ diff --git a/media/libstagefright/wifi-display/TimeSyncer.cpp b/media/libstagefright/wifi-display/TimeSyncer.cpp new file mode 100644 index 0000000..cb429bc --- /dev/null +++ b/media/libstagefright/wifi-display/TimeSyncer.cpp @@ -0,0 +1,338 @@ +/* + * Copyright 2013, The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NEBUG 0 +#define LOG_TAG "TimeSyncer" +#include + +#include "TimeSyncer.h" + +#include "ANetworkSession.h" + +#include +#include +#include +#include +#include +#include + +namespace android { + +TimeSyncer::TimeSyncer( + const sp &netSession, const sp ¬ify) + : mNetSession(netSession), + mNotify(notify), + mIsServer(false), + mConnected(false), + mUDPSession(0), + mSeqNo(0), + mTotalTimeUs(0.0), + mPendingT1(0ll), + mTimeoutGeneration(0) { +} + +TimeSyncer::~TimeSyncer() { +} + +void TimeSyncer::startServer(unsigned localPort) { + sp msg = new AMessage(kWhatStartServer, id()); + msg->setInt32("localPort", localPort); + msg->post(); +} + +void TimeSyncer::startClient(const char *remoteHost, unsigned remotePort) { + sp msg = new AMessage(kWhatStartClient, id()); + msg->setString("remoteHost", remoteHost); + msg->setInt32("remotePort", remotePort); + msg->post(); +} + +void TimeSyncer::onMessageReceived(const sp &msg) { + switch (msg->what()) { + case kWhatStartClient: + { + AString remoteHost; + CHECK(msg->findString("remoteHost", &remoteHost)); + + int32_t remotePort; + CHECK(msg->findInt32("remotePort", &remotePort)); + + sp notify = new AMessage(kWhatUDPNotify, id()); + + CHECK_EQ((status_t)OK, + mNetSession->createUDPSession( + 0 /* localPort */, + remoteHost.c_str(), + remotePort, + notify, + &mUDPSession)); + + postSendPacket(); + break; + } + + case kWhatStartServer: + { + mIsServer = true; + + int32_t localPort; + CHECK(msg->findInt32("localPort", &localPort)); + + sp notify = new AMessage(kWhatUDPNotify, id()); + + CHECK_EQ((status_t)OK, + mNetSession->createUDPSession( + localPort, notify, &mUDPSession)); + + break; + } + + case kWhatSendPacket: + { + if (mHistory.size() == 0) { + ALOGI("starting batch"); + } + + TimeInfo ti; + memset(&ti, 0, sizeof(ti)); + + ti.mT1 = ALooper::GetNowUs(); + + CHECK_EQ((status_t)OK, + mNetSession->sendRequest( + mUDPSession, &ti, sizeof(ti))); + + mPendingT1 = ti.mT1; + postTimeout(); + break; + } + + case kWhatTimedOut: + { + int32_t generation; + CHECK(msg->findInt32("generation", &generation)); + + if (generation != mTimeoutGeneration) { + break; + } + + ALOGI("timed out, sending another request"); + postSendPacket(); + break; + } + + case kWhatUDPNotify: + { + int32_t reason; + CHECK(msg->findInt32("reason", &reason)); + + switch (reason) { + case ANetworkSession::kWhatError: + { + int32_t sessionID; + CHECK(msg->findInt32("sessionID", &sessionID)); + + int32_t err; + CHECK(msg->findInt32("err", &err)); + + AString detail; + CHECK(msg->findString("detail", &detail)); + + ALOGE("An error occurred in session %d (%d, '%s/%s').", + sessionID, + err, + detail.c_str(), + strerror(-err)); + + mNetSession->destroySession(sessionID); + + cancelTimeout(); + + notifyError(err); + break; + } + + case ANetworkSession::kWhatDatagram: + { + int32_t sessionID; + CHECK(msg->findInt32("sessionID", &sessionID)); + + sp packet; + CHECK(msg->findBuffer("data", &packet)); + + int64_t arrivalTimeUs; + CHECK(packet->meta()->findInt64( + "arrivalTimeUs", &arrivalTimeUs)); + + CHECK_EQ(packet->size(), sizeof(TimeInfo)); + + TimeInfo *ti = (TimeInfo *)packet->data(); + + if (mIsServer) { + if (!mConnected) { + AString fromAddr; + CHECK(msg->findString("fromAddr", &fromAddr)); + + int32_t fromPort; + CHECK(msg->findInt32("fromPort", &fromPort)); + + CHECK_EQ((status_t)OK, + mNetSession->connectUDPSession( + mUDPSession, fromAddr.c_str(), fromPort)); + + mConnected = true; + } + + ti->mT2 = arrivalTimeUs; + ti->mT3 = ALooper::GetNowUs(); + + CHECK_EQ((status_t)OK, + mNetSession->sendRequest( + mUDPSession, ti, sizeof(*ti))); + } else { + if (ti->mT1 != mPendingT1) { + break; + } + + cancelTimeout(); + mPendingT1 = 0; + + ti->mT4 = arrivalTimeUs; + + // One way delay for a packet to travel from client + // to server or back (assumed to be the same either way). + int64_t delay = + (ti->mT2 - ti->mT1 + ti->mT4 - ti->mT3) / 2; + + // Offset between the client clock (T1, T4) and the + // server clock (T2, T3) timestamps. + int64_t offset = + (ti->mT2 - ti->mT1 - ti->mT4 + ti->mT3) / 2; + + mHistory.push_back(*ti); + + ALOGV("delay = %lld us,\toffset %lld us", + delay, + offset); + + if (mHistory.size() < kNumPacketsPerBatch) { + postSendPacket(1000000ll / 30); + } else { + notifyOffset(); + + ALOGI("batch done"); + + mHistory.clear(); + postSendPacket(kBatchDelayUs); + } + } + break; + } + + default: + TRESPASS(); + } + + break; + } + + default: + TRESPASS(); + } +} + +void TimeSyncer::postSendPacket(int64_t delayUs) { + (new AMessage(kWhatSendPacket, id()))->post(delayUs); +} + +void TimeSyncer::postTimeout() { + sp msg = new AMessage(kWhatTimedOut, id()); + msg->setInt32("generation", mTimeoutGeneration); + msg->post(kTimeoutDelayUs); +} + +void TimeSyncer::cancelTimeout() { + ++mTimeoutGeneration; +} + +void TimeSyncer::notifyError(status_t err) { + if (mNotify == NULL) { + looper()->stop(); + return; + } + + sp notify = mNotify->dup(); + notify->setInt32("what", kWhatError); + notify->setInt32("err", err); + notify->post(); +} + +// static +int TimeSyncer::CompareRountripTime(const TimeInfo *ti1, const TimeInfo *ti2) { + int64_t rt1 = ti1->mT4 - ti1->mT1; + int64_t rt2 = ti2->mT4 - ti2->mT1; + + if (rt1 < rt2) { + return -1; + } else if (rt1 > rt2) { + return 1; + } + + return 0; +} + +void TimeSyncer::notifyOffset() { + mHistory.sort(CompareRountripTime); + + int64_t sum = 0ll; + size_t count = 0; + + // Only consider the third of the information associated with the best + // (smallest) roundtrip times. + for (size_t i = 0; i < mHistory.size() / 3; ++i) { + const TimeInfo *ti = &mHistory[i]; + +#if 0 + // One way delay for a packet to travel from client + // to server or back (assumed to be the same either way). + int64_t delay = + (ti->mT2 - ti->mT1 + ti->mT4 - ti->mT3) / 2; +#endif + + // Offset between the client clock (T1, T4) and the + // server clock (T2, T3) timestamps. + int64_t offset = + (ti->mT2 - ti->mT1 - ti->mT4 + ti->mT3) / 2; + + ALOGV("(%d) RT: %lld us, offset: %lld us", + i, ti->mT4 - ti->mT1, offset); + + sum += offset; + ++count; + } + + if (mNotify == NULL) { + ALOGI("avg. offset is %lld", sum / count); + return; + } + + sp notify = mNotify->dup(); + notify->setInt32("what", kWhatTimeOffset); + notify->setInt64("offset", sum / count); + notify->post(); +} + +} // namespace android diff --git a/media/libstagefright/wifi-display/TimeSyncer.h b/media/libstagefright/wifi-display/TimeSyncer.h new file mode 100644 index 0000000..4e7571f --- /dev/null +++ b/media/libstagefright/wifi-display/TimeSyncer.h @@ -0,0 +1,109 @@ +/* + * Copyright 2013, The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef TIME_SYNCER_H_ + +#define TIME_SYNCER_H_ + +#include + +namespace android { + +struct ANetworkSession; + +/* + TimeSyncer allows us to synchronize time between a client and a server. + The client sends a UDP packet containing its send-time to the server, + the server sends that packet back to the client amended with information + about when it was received as well as the time the reply was sent back. + Finally the client receives the reply and has now enough information to + compute the clock offset between client and server assuming that packet + exchange is symmetric, i.e. time for a packet client->server and + server->client is roughly equal. + This exchange is repeated a number of times and the average offset computed + over the 30% of packets that had the lowest roundtrip times. + The offset is determined every 10 secs to account for slight differences in + clock frequency. +*/ +struct TimeSyncer : public AHandler { + enum { + kWhatError, + kWhatTimeOffset, + }; + TimeSyncer( + const sp &netSession, + const sp ¬ify); + + void startServer(unsigned localPort); + void startClient(const char *remoteHost, unsigned remotePort); + +protected: + virtual ~TimeSyncer(); + + virtual void onMessageReceived(const sp &msg); + +private: + enum { + kWhatStartServer, + kWhatStartClient, + kWhatUDPNotify, + kWhatSendPacket, + kWhatTimedOut, + }; + + struct TimeInfo { + int64_t mT1; // client timestamp at send + int64_t mT2; // server timestamp at receive + int64_t mT3; // server timestamp at send + int64_t mT4; // client timestamp at receive + }; + + enum { + kNumPacketsPerBatch = 30, + }; + static const int64_t kTimeoutDelayUs = 500000ll; + static const int64_t kBatchDelayUs = 60000000ll; // every minute + + sp mNetSession; + sp mNotify; + + bool mIsServer; + bool mConnected; + int32_t mUDPSession; + uint32_t mSeqNo; + double mTotalTimeUs; + + Vector mHistory; + + int64_t mPendingT1; + int32_t mTimeoutGeneration; + + void postSendPacket(int64_t delayUs = 0ll); + + void postTimeout(); + void cancelTimeout(); + + void notifyError(status_t err); + void notifyOffset(); + + static int CompareRountripTime(const TimeInfo *ti1, const TimeInfo *ti2); + + DISALLOW_EVIL_CONSTRUCTORS(TimeSyncer); +}; + +} // namespace android + +#endif // TIME_SYNCER_H_ diff --git a/media/libstagefright/wifi-display/nettest.cpp b/media/libstagefright/wifi-display/nettest.cpp new file mode 100644 index 0000000..0779bf5 --- /dev/null +++ b/media/libstagefright/wifi-display/nettest.cpp @@ -0,0 +1,400 @@ +/* + * Copyright 2013, The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NEBUG 0 +#define LOG_TAG "nettest" +#include + +#include "ANetworkSession.h" +#include "TimeSyncer.h" + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +namespace android { + +struct TestHandler : public AHandler { + TestHandler(const sp &netSession); + + void listen(int32_t port); + void connect(const char *host, int32_t port); + +protected: + virtual ~TestHandler(); + virtual void onMessageReceived(const sp &msg); + +private: + enum { + kTimeSyncerPort = 8123, + }; + + enum { + kWhatListen, + kWhatConnect, + kWhatTimeSyncerNotify, + kWhatNetNotify, + kWhatSendMore, + kWhatStop, + }; + + sp mNetSession; + sp mTimeSyncer; + + int32_t mServerSessionID; + int32_t mSessionID; + + int64_t mTimeOffsetUs; + bool mTimeOffsetValid; + + int32_t mCounter; + + int64_t mMaxDelayMs; + + void dumpDelay(int32_t counter, int64_t delayMs); + + DISALLOW_EVIL_CONSTRUCTORS(TestHandler); +}; + +TestHandler::TestHandler(const sp &netSession) + : mNetSession(netSession), + mServerSessionID(0), + mSessionID(0), + mTimeOffsetUs(-1ll), + mTimeOffsetValid(false), + mCounter(0), + mMaxDelayMs(-1ll) { +} + +TestHandler::~TestHandler() { +} + +void TestHandler::listen(int32_t port) { + sp msg = new AMessage(kWhatListen, id()); + msg->setInt32("port", port); + msg->post(); +} + +void TestHandler::connect(const char *host, int32_t port) { + sp msg = new AMessage(kWhatConnect, id()); + msg->setString("host", host); + msg->setInt32("port", port); + msg->post(); +} + +void TestHandler::dumpDelay(int32_t counter, int64_t delayMs) { + static const int64_t kMinDelayMs = 0; + static const int64_t kMaxDelayMs = 300; + + const char *kPattern = "########################################"; + size_t kPatternSize = strlen(kPattern); + + int n = (kPatternSize * (delayMs - kMinDelayMs)) + / (kMaxDelayMs - kMinDelayMs); + + if (n < 0) { + n = 0; + } else if ((size_t)n > kPatternSize) { + n = kPatternSize; + } + + if (delayMs > mMaxDelayMs) { + mMaxDelayMs = delayMs; + } + + ALOGI("[%d] (%4lld ms / %4lld ms) %s", + counter, + delayMs, + mMaxDelayMs, + kPattern + kPatternSize - n); +} + +void TestHandler::onMessageReceived(const sp &msg) { + switch (msg->what()) { + case kWhatListen: + { + sp notify = new AMessage(kWhatTimeSyncerNotify, id()); + mTimeSyncer = new TimeSyncer(mNetSession, notify); + looper()->registerHandler(mTimeSyncer); + + notify = new AMessage(kWhatNetNotify, id()); + + int32_t port; + CHECK(msg->findInt32("port", &port)); + + struct in_addr ifaceAddr; + ifaceAddr.s_addr = INADDR_ANY; + + CHECK_EQ((status_t)OK, + mNetSession->createTCPDatagramSession( + ifaceAddr, + port, + notify, + &mServerSessionID)); + break; + } + + case kWhatConnect: + { + sp notify = new AMessage(kWhatTimeSyncerNotify, id()); + mTimeSyncer = new TimeSyncer(mNetSession, notify); + looper()->registerHandler(mTimeSyncer); + mTimeSyncer->startServer(kTimeSyncerPort); + + AString host; + CHECK(msg->findString("host", &host)); + + int32_t port; + CHECK(msg->findInt32("port", &port)); + + notify = new AMessage(kWhatNetNotify, id()); + + CHECK_EQ((status_t)OK, + mNetSession->createTCPDatagramSession( + 0 /* localPort */, + host.c_str(), + port, + notify, + &mSessionID)); + break; + } + + case kWhatNetNotify: + { + int32_t reason; + CHECK(msg->findInt32("reason", &reason)); + + switch (reason) { + case ANetworkSession::kWhatConnected: + { + ALOGI("kWhatConnected"); + + (new AMessage(kWhatSendMore, id()))->post(); + break; + } + + case ANetworkSession::kWhatClientConnected: + { + ALOGI("kWhatClientConnected"); + + CHECK_EQ(mSessionID, 0); + CHECK(msg->findInt32("sessionID", &mSessionID)); + + AString clientIP; + CHECK(msg->findString("client-ip", &clientIP)); + + mTimeSyncer->startClient(clientIP.c_str(), kTimeSyncerPort); + break; + } + + case ANetworkSession::kWhatDatagram: + { + sp packet; + CHECK(msg->findBuffer("data", &packet)); + + CHECK_EQ(packet->size(), 12u); + + int32_t counter = U32_AT(packet->data()); + int64_t timeUs = U64_AT(packet->data() + 4); + + if (mTimeOffsetValid) { + timeUs -= mTimeOffsetUs; + int64_t nowUs = ALooper::GetNowUs(); + int64_t delayMs = (nowUs - timeUs) / 1000ll; + + dumpDelay(counter, delayMs); + } else { + ALOGI("received %d", counter); + } + break; + } + + case ANetworkSession::kWhatError: + { + ALOGE("kWhatError"); + break; + } + + default: + TRESPASS(); + } + break; + } + + case kWhatTimeSyncerNotify: + { + CHECK(msg->findInt64("offset", &mTimeOffsetUs)); + mTimeOffsetValid = true; + break; + } + + case kWhatSendMore: + { + uint8_t buffer[4 + 8]; + buffer[0] = mCounter >> 24; + buffer[1] = (mCounter >> 16) & 0xff; + buffer[2] = (mCounter >> 8) & 0xff; + buffer[3] = mCounter & 0xff; + + int64_t nowUs = ALooper::GetNowUs(); + + buffer[4] = nowUs >> 56; + buffer[5] = (nowUs >> 48) & 0xff; + buffer[6] = (nowUs >> 40) & 0xff; + buffer[7] = (nowUs >> 32) & 0xff; + buffer[8] = (nowUs >> 24) & 0xff; + buffer[9] = (nowUs >> 16) & 0xff; + buffer[10] = (nowUs >> 8) & 0xff; + buffer[11] = nowUs & 0xff; + + ++mCounter; + + CHECK_EQ((status_t)OK, + mNetSession->sendRequest( + mSessionID, + buffer, + sizeof(buffer), + true /* timeValid */, + nowUs)); + + msg->post(100000ll); + break; + } + + case kWhatStop: + { + if (mSessionID != 0) { + mNetSession->destroySession(mSessionID); + mSessionID = 0; + } + + if (mServerSessionID != 0) { + mNetSession->destroySession(mServerSessionID); + mServerSessionID = 0; + } + + looper()->stop(); + break; + } + + default: + TRESPASS(); + } +} + +} // namespace android + +static void usage(const char *me) { + fprintf(stderr, + "usage: %s -c host:port\tconnect to remote host\n" + " -l port \tlisten\n", + me); +} + +int main(int argc, char **argv) { + using namespace android; + + // srand(time(NULL)); + + ProcessState::self()->startThreadPool(); + + DataSource::RegisterDefaultSniffers(); + + int32_t connectToPort = -1; + AString connectToHost; + + int32_t listenOnPort = -1; + + int res; + while ((res = getopt(argc, argv, "hc:l:")) >= 0) { + switch (res) { + case 'c': + { + const char *colonPos = strrchr(optarg, ':'); + + if (colonPos == NULL) { + usage(argv[0]); + exit(1); + } + + connectToHost.setTo(optarg, colonPos - optarg); + + char *end; + connectToPort = strtol(colonPos + 1, &end, 10); + + if (*end != '\0' || end == colonPos + 1 + || connectToPort < 0 || connectToPort > 65535) { + fprintf(stderr, "Illegal port specified.\n"); + exit(1); + } + break; + } + + case 'l': + { + char *end; + listenOnPort = strtol(optarg, &end, 10); + + if (*end != '\0' || end == optarg + || listenOnPort < 0 || listenOnPort > 65535) { + fprintf(stderr, "Illegal port specified.\n"); + exit(1); + } + break; + } + + case '?': + case 'h': + usage(argv[0]); + exit(1); + } + } + + if ((listenOnPort < 0 && connectToPort < 0) + || (listenOnPort >= 0 && connectToPort >= 0)) { + fprintf(stderr, + "You need to select either client or server mode.\n"); + exit(1); + } + + sp netSession = new ANetworkSession; + netSession->start(); + + sp looper = new ALooper; + + sp handler = new TestHandler(netSession); + looper->registerHandler(handler); + + if (listenOnPort) { + handler->listen(listenOnPort); + } + + if (connectToPort >= 0) { + handler->connect(connectToHost.c_str(), connectToPort); + } + + looper->start(true /* runOnCallingThread */); + + return 0; +} diff --git a/media/libstagefright/wifi-display/rtp/RTPAssembler.cpp b/media/libstagefright/wifi-display/rtp/RTPAssembler.cpp new file mode 100644 index 0000000..7a96081 --- /dev/null +++ b/media/libstagefright/wifi-display/rtp/RTPAssembler.cpp @@ -0,0 +1,328 @@ +/* + * Copyright 2013, The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "RTPAssembler" +#include + +#include "RTPAssembler.h" + +#include +#include +#include +#include +#include + +namespace android { + +RTPReceiver::Assembler::Assembler(const sp ¬ify) + : mNotify(notify) { +} + +void RTPReceiver::Assembler::postAccessUnit( + const sp &accessUnit, bool followsDiscontinuity) { + sp notify = mNotify->dup(); + notify->setInt32("what", RTPReceiver::kWhatAccessUnit); + notify->setBuffer("accessUnit", accessUnit); + notify->setInt32("followsDiscontinuity", followsDiscontinuity); + notify->post(); +} +//////////////////////////////////////////////////////////////////////////////// + +RTPReceiver::TSAssembler::TSAssembler(const sp ¬ify) + : Assembler(notify), + mSawDiscontinuity(false) { +} + +void RTPReceiver::TSAssembler::signalDiscontinuity() { + mSawDiscontinuity = true; +} + +status_t RTPReceiver::TSAssembler::processPacket(const sp &packet) { + int32_t rtpTime; + CHECK(packet->meta()->findInt32("rtp-time", &rtpTime)); + + packet->meta()->setInt64("timeUs", (rtpTime * 100ll) / 9); + + postAccessUnit(packet, mSawDiscontinuity); + + if (mSawDiscontinuity) { + mSawDiscontinuity = false; + } + + return OK; +} + +//////////////////////////////////////////////////////////////////////////////// + +RTPReceiver::H264Assembler::H264Assembler(const sp ¬ify) + : Assembler(notify), + mState(0), + mIndicator(0), + mNALType(0), + mAccessUnitRTPTime(0) { +} + +void RTPReceiver::H264Assembler::signalDiscontinuity() { + reset(); +} + +status_t RTPReceiver::H264Assembler::processPacket(const sp &packet) { + status_t err = internalProcessPacket(packet); + + if (err != OK) { + reset(); + } + + return err; +} + +status_t RTPReceiver::H264Assembler::internalProcessPacket( + const sp &packet) { + const uint8_t *data = packet->data(); + size_t size = packet->size(); + + switch (mState) { + case 0: + { + if (size < 1 || (data[0] & 0x80)) { + ALOGV("Malformed H264 RTP packet (empty or F-bit set)"); + return ERROR_MALFORMED; + } + + unsigned nalType = data[0] & 0x1f; + if (nalType >= 1 && nalType <= 23) { + addSingleNALUnit(packet); + ALOGV("added single NAL packet"); + } else if (nalType == 28) { + // FU-A + unsigned indicator = data[0]; + CHECK((indicator & 0x1f) == 28); + + if (size < 2) { + ALOGV("Malformed H264 FU-A packet (single byte)"); + return ERROR_MALFORMED; + } + + if (!(data[1] & 0x80)) { + ALOGV("Malformed H264 FU-A packet (no start bit)"); + return ERROR_MALFORMED; + } + + mIndicator = data[0]; + mNALType = data[1] & 0x1f; + uint32_t nri = (data[0] >> 5) & 3; + + clearAccumulator(); + + uint8_t byte = mNALType | (nri << 5); + appendToAccumulator(&byte, 1); + appendToAccumulator(data + 2, size - 2); + + int32_t rtpTime; + CHECK(packet->meta()->findInt32("rtp-time", &rtpTime)); + mAccumulator->meta()->setInt32("rtp-time", rtpTime); + + if (data[1] & 0x40) { + // Huh? End bit also set on the first buffer. + addSingleNALUnit(mAccumulator); + clearAccumulator(); + + ALOGV("added FU-A"); + break; + } + + mState = 1; + } else if (nalType == 24) { + // STAP-A + + status_t err = addSingleTimeAggregationPacket(packet); + if (err != OK) { + return err; + } + } else { + ALOGV("Malformed H264 packet (unknown type %d)", nalType); + return ERROR_UNSUPPORTED; + } + break; + } + + case 1: + { + if (size < 2 + || data[0] != mIndicator + || (data[1] & 0x1f) != mNALType + || (data[1] & 0x80)) { + ALOGV("Malformed H264 FU-A packet (indicator, " + "type or start bit mismatch)"); + + return ERROR_MALFORMED; + } + + appendToAccumulator(data + 2, size - 2); + + if (data[1] & 0x40) { + addSingleNALUnit(mAccumulator); + + clearAccumulator(); + mState = 0; + + ALOGV("added FU-A"); + } + break; + } + + default: + TRESPASS(); + } + + int32_t marker; + CHECK(packet->meta()->findInt32("M", &marker)); + + if (marker) { + flushAccessUnit(); + } + + return OK; +} + +void RTPReceiver::H264Assembler::reset() { + mNALUnits.clear(); + + clearAccumulator(); + mState = 0; +} + +void RTPReceiver::H264Assembler::clearAccumulator() { + if (mAccumulator != NULL) { + // XXX Too expensive. + mAccumulator.clear(); + } +} + +void RTPReceiver::H264Assembler::appendToAccumulator( + const void *data, size_t size) { + if (mAccumulator == NULL) { + mAccumulator = new ABuffer(size); + memcpy(mAccumulator->data(), data, size); + return; + } + + if (mAccumulator->size() + size > mAccumulator->capacity()) { + sp buf = new ABuffer(mAccumulator->size() + size); + memcpy(buf->data(), mAccumulator->data(), mAccumulator->size()); + buf->setRange(0, mAccumulator->size()); + + int32_t rtpTime; + if (mAccumulator->meta()->findInt32("rtp-time", &rtpTime)) { + buf->meta()->setInt32("rtp-time", rtpTime); + } + + mAccumulator = buf; + } + + memcpy(mAccumulator->data() + mAccumulator->size(), data, size); + mAccumulator->setRange(0, mAccumulator->size() + size); +} + +void RTPReceiver::H264Assembler::addSingleNALUnit(const sp &packet) { + if (mNALUnits.empty()) { + int32_t rtpTime; + CHECK(packet->meta()->findInt32("rtp-time", &rtpTime)); + + mAccessUnitRTPTime = rtpTime; + } + + mNALUnits.push_back(packet); +} + +void RTPReceiver::H264Assembler::flushAccessUnit() { + if (mNALUnits.empty()) { + return; + } + + size_t totalSize = 0; + for (List >::iterator it = mNALUnits.begin(); + it != mNALUnits.end(); ++it) { + totalSize += 4 + (*it)->size(); + } + + sp accessUnit = new ABuffer(totalSize); + size_t offset = 0; + for (List >::iterator it = mNALUnits.begin(); + it != mNALUnits.end(); ++it) { + const sp nalUnit = *it; + + memcpy(accessUnit->data() + offset, "\x00\x00\x00\x01", 4); + + memcpy(accessUnit->data() + offset + 4, + nalUnit->data(), + nalUnit->size()); + + offset += 4 + nalUnit->size(); + } + + mNALUnits.clear(); + + accessUnit->meta()->setInt64("timeUs", mAccessUnitRTPTime * 100ll / 9ll); + postAccessUnit(accessUnit, false /* followsDiscontinuity */); +} + +status_t RTPReceiver::H264Assembler::addSingleTimeAggregationPacket( + const sp &packet) { + const uint8_t *data = packet->data(); + size_t size = packet->size(); + + if (size < 3) { + ALOGV("Malformed H264 STAP-A packet (too small)"); + return ERROR_MALFORMED; + } + + int32_t rtpTime; + CHECK(packet->meta()->findInt32("rtp-time", &rtpTime)); + + ++data; + --size; + while (size >= 2) { + size_t nalSize = (data[0] << 8) | data[1]; + + if (size < nalSize + 2) { + ALOGV("Malformed H264 STAP-A packet (incomplete NAL unit)"); + return ERROR_MALFORMED; + } + + sp unit = new ABuffer(nalSize); + memcpy(unit->data(), &data[2], nalSize); + + unit->meta()->setInt32("rtp-time", rtpTime); + + addSingleNALUnit(unit); + + data += 2 + nalSize; + size -= 2 + nalSize; + } + + if (size != 0) { + ALOGV("Unexpected padding at end of STAP-A packet."); + } + + ALOGV("added STAP-A"); + + return OK; +} + +} // namespace android + diff --git a/media/libstagefright/wifi-display/rtp/RTPAssembler.h b/media/libstagefright/wifi-display/rtp/RTPAssembler.h new file mode 100644 index 0000000..e456d32 --- /dev/null +++ b/media/libstagefright/wifi-display/rtp/RTPAssembler.h @@ -0,0 +1,92 @@ +/* + * Copyright 2013, The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef RTP_ASSEMBLER_H_ + +#define RTP_ASSEMBLER_H_ + +#include "RTPReceiver.h" + +namespace android { + +// A helper class to reassemble the payload of RTP packets into access +// units depending on the packetization scheme. +struct RTPReceiver::Assembler : public RefBase { + Assembler(const sp ¬ify); + + virtual void signalDiscontinuity() = 0; + virtual status_t processPacket(const sp &packet) = 0; + +protected: + virtual ~Assembler() {} + + void postAccessUnit( + const sp &accessUnit, bool followsDiscontinuity); + +private: + sp mNotify; + + DISALLOW_EVIL_CONSTRUCTORS(Assembler); +}; + +struct RTPReceiver::TSAssembler : public RTPReceiver::Assembler { + TSAssembler(const sp ¬ify); + + virtual void signalDiscontinuity(); + virtual status_t processPacket(const sp &packet); + +private: + bool mSawDiscontinuity; + + DISALLOW_EVIL_CONSTRUCTORS(TSAssembler); +}; + +struct RTPReceiver::H264Assembler : public RTPReceiver::Assembler { + H264Assembler(const sp ¬ify); + + virtual void signalDiscontinuity(); + virtual status_t processPacket(const sp &packet); + +private: + int32_t mState; + + uint8_t mIndicator; + uint8_t mNALType; + + sp mAccumulator; + + List > mNALUnits; + int32_t mAccessUnitRTPTime; + + status_t internalProcessPacket(const sp &packet); + + void addSingleNALUnit(const sp &packet); + status_t addSingleTimeAggregationPacket(const sp &packet); + + void flushAccessUnit(); + + void clearAccumulator(); + void appendToAccumulator(const void *data, size_t size); + + void reset(); + + DISALLOW_EVIL_CONSTRUCTORS(H264Assembler); +}; + +} // namespace android + +#endif // RTP_ASSEMBLER_H_ + diff --git a/media/libstagefright/wifi-display/rtp/RTPReceiver.cpp b/media/libstagefright/wifi-display/rtp/RTPReceiver.cpp new file mode 100644 index 0000000..8fa1dae --- /dev/null +++ b/media/libstagefright/wifi-display/rtp/RTPReceiver.cpp @@ -0,0 +1,1153 @@ +/* + * Copyright 2013, The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "RTPReceiver" +#include + +#include "RTPAssembler.h" +#include "RTPReceiver.h" + +#include "ANetworkSession.h" + +#include +#include +#include +#include +#include +#include + +#define TRACK_PACKET_LOSS 0 + +namespace android { + +//////////////////////////////////////////////////////////////////////////////// + +struct RTPReceiver::Source : public AHandler { + Source(RTPReceiver *receiver, uint32_t ssrc); + + void onPacketReceived(uint16_t seq, const sp &buffer); + + void addReportBlock(uint32_t ssrc, const sp &buf); + +protected: + virtual ~Source(); + + virtual void onMessageReceived(const sp &msg); + +private: + enum { + kWhatRetransmit, + kWhatDeclareLost, + }; + + static const uint32_t kMinSequential = 2; + static const uint32_t kMaxDropout = 3000; + static const uint32_t kMaxMisorder = 100; + static const uint32_t kRTPSeqMod = 1u << 16; + static const int64_t kReportIntervalUs = 10000000ll; + + RTPReceiver *mReceiver; + uint32_t mSSRC; + bool mFirst; + uint16_t mMaxSeq; + uint32_t mCycles; + uint32_t mBaseSeq; + uint32_t mReceived; + uint32_t mExpectedPrior; + uint32_t mReceivedPrior; + + int64_t mFirstArrivalTimeUs; + int64_t mFirstRTPTimeUs; + + // Ordered by extended seq number. + List > mPackets; + + enum StatusBits { + STATUS_DECLARED_LOST = 1, + STATUS_REQUESTED_RETRANSMISSION = 2, + STATUS_ARRIVED_LATE = 4, + }; +#if TRACK_PACKET_LOSS + KeyedVector mLostPackets; +#endif + + void modifyPacketStatus(int32_t extSeqNo, uint32_t mask); + + int32_t mAwaitingExtSeqNo; + bool mRequestedRetransmission; + + int32_t mActivePacketType; + sp mActiveAssembler; + + int64_t mNextReportTimeUs; + + int32_t mNumDeclaredLost; + int32_t mNumDeclaredLostPrior; + + int32_t mRetransmitGeneration; + int32_t mDeclareLostGeneration; + bool mDeclareLostTimerPending; + + void queuePacket(const sp &packet); + void dequeueMore(); + + sp getNextPacket(); + void resync(); + + void postRetransmitTimer(int64_t delayUs); + void postDeclareLostTimer(int64_t delayUs); + void cancelTimers(); + + DISALLOW_EVIL_CONSTRUCTORS(Source); +}; + +//////////////////////////////////////////////////////////////////////////////// + +RTPReceiver::Source::Source(RTPReceiver *receiver, uint32_t ssrc) + : mReceiver(receiver), + mSSRC(ssrc), + mFirst(true), + mMaxSeq(0), + mCycles(0), + mBaseSeq(0), + mReceived(0), + mExpectedPrior(0), + mReceivedPrior(0), + mFirstArrivalTimeUs(-1ll), + mFirstRTPTimeUs(-1ll), + mAwaitingExtSeqNo(-1), + mRequestedRetransmission(false), + mActivePacketType(-1), + mNextReportTimeUs(-1ll), + mNumDeclaredLost(0), + mNumDeclaredLostPrior(0), + mRetransmitGeneration(0), + mDeclareLostGeneration(0), + mDeclareLostTimerPending(false) { +} + +RTPReceiver::Source::~Source() { +} + +void RTPReceiver::Source::onMessageReceived(const sp &msg) { + switch (msg->what()) { + case kWhatRetransmit: + { + int32_t generation; + CHECK(msg->findInt32("generation", &generation)); + + if (generation != mRetransmitGeneration) { + break; + } + + mRequestedRetransmission = true; + mReceiver->requestRetransmission(mSSRC, mAwaitingExtSeqNo); + + modifyPacketStatus( + mAwaitingExtSeqNo, STATUS_REQUESTED_RETRANSMISSION); + break; + } + + case kWhatDeclareLost: + { + int32_t generation; + CHECK(msg->findInt32("generation", &generation)); + + if (generation != mDeclareLostGeneration) { + break; + } + + cancelTimers(); + + ALOGV("Lost packet extSeqNo %d %s", + mAwaitingExtSeqNo, + mRequestedRetransmission ? "*" : ""); + + mRequestedRetransmission = false; + if (mActiveAssembler != NULL) { + mActiveAssembler->signalDiscontinuity(); + } + + modifyPacketStatus(mAwaitingExtSeqNo, STATUS_DECLARED_LOST); + + // resync(); + ++mAwaitingExtSeqNo; + ++mNumDeclaredLost; + + mReceiver->notifyPacketLost(); + + dequeueMore(); + break; + } + + default: + TRESPASS(); + } +} + +void RTPReceiver::Source::onPacketReceived( + uint16_t seq, const sp &buffer) { + if (mFirst) { + buffer->setInt32Data(mCycles | seq); + queuePacket(buffer); + + mFirst = false; + mBaseSeq = seq; + mMaxSeq = seq; + ++mReceived; + return; + } + + uint16_t udelta = seq - mMaxSeq; + + if (udelta < kMaxDropout) { + // In order, with permissible gap. + + if (seq < mMaxSeq) { + // Sequence number wrapped - count another 64K cycle + mCycles += kRTPSeqMod; + } + + mMaxSeq = seq; + + ++mReceived; + } else if (udelta <= kRTPSeqMod - kMaxMisorder) { + // The sequence number made a very large jump + return; + } else { + // Duplicate or reordered packet. + } + + buffer->setInt32Data(mCycles | seq); + queuePacket(buffer); +} + +void RTPReceiver::Source::queuePacket(const sp &packet) { + int32_t newExtendedSeqNo = packet->int32Data(); + + if (mFirstArrivalTimeUs < 0ll) { + mFirstArrivalTimeUs = ALooper::GetNowUs(); + + uint32_t rtpTime; + CHECK(packet->meta()->findInt32("rtp-time", (int32_t *)&rtpTime)); + + mFirstRTPTimeUs = (rtpTime * 100ll) / 9ll; + } + + if (mAwaitingExtSeqNo >= 0 && newExtendedSeqNo < mAwaitingExtSeqNo) { + // We're no longer interested in these. They're old. + ALOGV("dropping stale extSeqNo %d", newExtendedSeqNo); + + modifyPacketStatus(newExtendedSeqNo, STATUS_ARRIVED_LATE); + return; + } + + if (mPackets.empty()) { + mPackets.push_back(packet); + dequeueMore(); + return; + } + + List >::iterator firstIt = mPackets.begin(); + List >::iterator it = --mPackets.end(); + for (;;) { + int32_t extendedSeqNo = (*it)->int32Data(); + + if (extendedSeqNo == newExtendedSeqNo) { + // Duplicate packet. + return; + } + + if (extendedSeqNo < newExtendedSeqNo) { + // Insert new packet after the one at "it". + mPackets.insert(++it, packet); + break; + } + + if (it == firstIt) { + // Insert new packet before the first existing one. + mPackets.insert(it, packet); + break; + } + + --it; + } + + dequeueMore(); +} + +void RTPReceiver::Source::dequeueMore() { + int64_t nowUs = ALooper::GetNowUs(); + if (mNextReportTimeUs < 0ll || nowUs >= mNextReportTimeUs) { + if (mNextReportTimeUs >= 0ll) { + uint32_t expected = (mMaxSeq | mCycles) - mBaseSeq + 1; + + uint32_t expectedInterval = expected - mExpectedPrior; + mExpectedPrior = expected; + + uint32_t receivedInterval = mReceived - mReceivedPrior; + mReceivedPrior = mReceived; + + int64_t lostInterval = + (int64_t)expectedInterval - (int64_t)receivedInterval; + + int32_t declaredLostInterval = + mNumDeclaredLost - mNumDeclaredLostPrior; + + mNumDeclaredLostPrior = mNumDeclaredLost; + + if (declaredLostInterval > 0) { + ALOGI("lost %lld packets (%.2f %%), declared %d lost\n", + lostInterval, + 100.0f * lostInterval / expectedInterval, + declaredLostInterval); + } + } + + mNextReportTimeUs = nowUs + kReportIntervalUs; + +#if TRACK_PACKET_LOSS + for (size_t i = 0; i < mLostPackets.size(); ++i) { + int32_t key = mLostPackets.keyAt(i); + uint32_t value = mLostPackets.valueAt(i); + + AString status; + if (value & STATUS_REQUESTED_RETRANSMISSION) { + status.append("retrans "); + } + if (value & STATUS_ARRIVED_LATE) { + status.append("arrived-late "); + } + ALOGI("Packet %d declared lost %s", key, status.c_str()); + } +#endif + } + + sp packet; + while ((packet = getNextPacket()) != NULL) { + if (mDeclareLostTimerPending) { + cancelTimers(); + } + + CHECK_GE(mAwaitingExtSeqNo, 0); +#if TRACK_PACKET_LOSS + mLostPackets.removeItem(mAwaitingExtSeqNo); +#endif + + int32_t packetType; + CHECK(packet->meta()->findInt32("PT", &packetType)); + + if (packetType != mActivePacketType) { + mActiveAssembler = mReceiver->makeAssembler(packetType); + mActivePacketType = packetType; + } + + if (mActiveAssembler != NULL) { + status_t err = mActiveAssembler->processPacket(packet); + if (err != OK) { + ALOGV("assembler returned error %d", err); + } + } + + ++mAwaitingExtSeqNo; + } + + if (mDeclareLostTimerPending) { + return; + } + + if (mPackets.empty()) { + return; + } + + CHECK_GE(mAwaitingExtSeqNo, 0); + + const sp &firstPacket = *mPackets.begin(); + + uint32_t rtpTime; + CHECK(firstPacket->meta()->findInt32( + "rtp-time", (int32_t *)&rtpTime)); + + + int64_t rtpUs = (rtpTime * 100ll) / 9ll; + + int64_t maxArrivalTimeUs = + mFirstArrivalTimeUs + rtpUs - mFirstRTPTimeUs; + + nowUs = ALooper::GetNowUs(); + + CHECK_LT(mAwaitingExtSeqNo, firstPacket->int32Data()); + + ALOGV("waiting for %d, comparing against %d, %lld us left", + mAwaitingExtSeqNo, + firstPacket->int32Data(), + maxArrivalTimeUs - nowUs); + + postDeclareLostTimer(maxArrivalTimeUs + kPacketLostAfterUs); + + if (kRequestRetransmissionAfterUs > 0ll) { + postRetransmitTimer( + maxArrivalTimeUs + kRequestRetransmissionAfterUs); + } +} + +sp RTPReceiver::Source::getNextPacket() { + if (mPackets.empty()) { + return NULL; + } + + int32_t extSeqNo = (*mPackets.begin())->int32Data(); + + if (mAwaitingExtSeqNo < 0) { + mAwaitingExtSeqNo = extSeqNo; + } else if (extSeqNo != mAwaitingExtSeqNo) { + return NULL; + } + + sp packet = *mPackets.begin(); + mPackets.erase(mPackets.begin()); + + return packet; +} + +void RTPReceiver::Source::resync() { + mAwaitingExtSeqNo = -1; +} + +void RTPReceiver::Source::addReportBlock( + uint32_t ssrc, const sp &buf) { + uint32_t extMaxSeq = mMaxSeq | mCycles; + uint32_t expected = extMaxSeq - mBaseSeq + 1; + + int64_t lost = (int64_t)expected - (int64_t)mReceived; + if (lost > 0x7fffff) { + lost = 0x7fffff; + } else if (lost < -0x800000) { + lost = -0x800000; + } + + uint32_t expectedInterval = expected - mExpectedPrior; + mExpectedPrior = expected; + + uint32_t receivedInterval = mReceived - mReceivedPrior; + mReceivedPrior = mReceived; + + int64_t lostInterval = expectedInterval - receivedInterval; + + uint8_t fractionLost; + if (expectedInterval == 0 || lostInterval <=0) { + fractionLost = 0; + } else { + fractionLost = (lostInterval << 8) / expectedInterval; + } + + uint8_t *ptr = buf->data() + buf->size(); + + ptr[0] = ssrc >> 24; + ptr[1] = (ssrc >> 16) & 0xff; + ptr[2] = (ssrc >> 8) & 0xff; + ptr[3] = ssrc & 0xff; + + ptr[4] = fractionLost; + + ptr[5] = (lost >> 16) & 0xff; + ptr[6] = (lost >> 8) & 0xff; + ptr[7] = lost & 0xff; + + ptr[8] = extMaxSeq >> 24; + ptr[9] = (extMaxSeq >> 16) & 0xff; + ptr[10] = (extMaxSeq >> 8) & 0xff; + ptr[11] = extMaxSeq & 0xff; + + // XXX TODO: + + ptr[12] = 0x00; // interarrival jitter + ptr[13] = 0x00; + ptr[14] = 0x00; + ptr[15] = 0x00; + + ptr[16] = 0x00; // last SR + ptr[17] = 0x00; + ptr[18] = 0x00; + ptr[19] = 0x00; + + ptr[20] = 0x00; // delay since last SR + ptr[21] = 0x00; + ptr[22] = 0x00; + ptr[23] = 0x00; +} + +//////////////////////////////////////////////////////////////////////////////// + +RTPReceiver::RTPReceiver( + const sp &netSession, + const sp ¬ify, + uint32_t flags) + : mNetSession(netSession), + mNotify(notify), + mFlags(flags), + mRTPMode(TRANSPORT_UNDEFINED), + mRTCPMode(TRANSPORT_UNDEFINED), + mRTPSessionID(0), + mRTCPSessionID(0), + mRTPConnected(false), + mRTCPConnected(false), + mRTPClientSessionID(0), + mRTCPClientSessionID(0) { +} + +RTPReceiver::~RTPReceiver() { + if (mRTCPClientSessionID != 0) { + mNetSession->destroySession(mRTCPClientSessionID); + mRTCPClientSessionID = 0; + } + + if (mRTPClientSessionID != 0) { + mNetSession->destroySession(mRTPClientSessionID); + mRTPClientSessionID = 0; + } + + if (mRTCPSessionID != 0) { + mNetSession->destroySession(mRTCPSessionID); + mRTCPSessionID = 0; + } + + if (mRTPSessionID != 0) { + mNetSession->destroySession(mRTPSessionID); + mRTPSessionID = 0; + } +} + +status_t RTPReceiver::initAsync( + TransportMode rtpMode, + TransportMode rtcpMode, + int32_t *outLocalRTPPort) { + if (mRTPMode != TRANSPORT_UNDEFINED + || rtpMode == TRANSPORT_UNDEFINED + || rtpMode == TRANSPORT_NONE + || rtcpMode == TRANSPORT_UNDEFINED) { + return INVALID_OPERATION; + } + + CHECK_NE(rtpMode, TRANSPORT_TCP_INTERLEAVED); + CHECK_NE(rtcpMode, TRANSPORT_TCP_INTERLEAVED); + + sp rtpNotify = new AMessage(kWhatRTPNotify, id()); + + sp rtcpNotify; + if (rtcpMode != TRANSPORT_NONE) { + rtcpNotify = new AMessage(kWhatRTCPNotify, id()); + } + + CHECK_EQ(mRTPSessionID, 0); + CHECK_EQ(mRTCPSessionID, 0); + + int32_t localRTPPort; + + struct in_addr ifaceAddr; + ifaceAddr.s_addr = INADDR_ANY; + + for (;;) { + localRTPPort = PickRandomRTPPort(); + + status_t err; + if (rtpMode == TRANSPORT_UDP) { + err = mNetSession->createUDPSession( + localRTPPort, + rtpNotify, + &mRTPSessionID); + } else { + CHECK_EQ(rtpMode, TRANSPORT_TCP); + err = mNetSession->createTCPDatagramSession( + ifaceAddr, + localRTPPort, + rtpNotify, + &mRTPSessionID); + } + + if (err != OK) { + continue; + } + + if (rtcpMode == TRANSPORT_NONE) { + break; + } else if (rtcpMode == TRANSPORT_UDP) { + err = mNetSession->createUDPSession( + localRTPPort + 1, + rtcpNotify, + &mRTCPSessionID); + } else { + CHECK_EQ(rtpMode, TRANSPORT_TCP); + err = mNetSession->createTCPDatagramSession( + ifaceAddr, + localRTPPort + 1, + rtcpNotify, + &mRTCPSessionID); + } + + if (err == OK) { + break; + } + + mNetSession->destroySession(mRTPSessionID); + mRTPSessionID = 0; + } + + mRTPMode = rtpMode; + mRTCPMode = rtcpMode; + *outLocalRTPPort = localRTPPort; + + return OK; +} + +status_t RTPReceiver::connect( + const char *remoteHost, int32_t remoteRTPPort, int32_t remoteRTCPPort) { + status_t err; + + if (mRTPMode == TRANSPORT_UDP) { + CHECK(!mRTPConnected); + + err = mNetSession->connectUDPSession( + mRTPSessionID, remoteHost, remoteRTPPort); + + if (err != OK) { + notifyInitDone(err); + return err; + } + + ALOGI("connectUDPSession RTP successful."); + + mRTPConnected = true; + } + + if (mRTCPMode == TRANSPORT_UDP) { + CHECK(!mRTCPConnected); + + err = mNetSession->connectUDPSession( + mRTCPSessionID, remoteHost, remoteRTCPPort); + + if (err != OK) { + notifyInitDone(err); + return err; + } + + scheduleSendRR(); + + ALOGI("connectUDPSession RTCP successful."); + + mRTCPConnected = true; + } + + if (mRTPConnected + && (mRTCPConnected || mRTCPMode == TRANSPORT_NONE)) { + notifyInitDone(OK); + } + + return OK; +} + +status_t RTPReceiver::informSender(const sp ¶ms) { + if (!mRTCPConnected) { + return INVALID_OPERATION; + } + + int64_t avgLatencyUs; + CHECK(params->findInt64("avgLatencyUs", &avgLatencyUs)); + + int64_t maxLatencyUs; + CHECK(params->findInt64("maxLatencyUs", &maxLatencyUs)); + + sp buf = new ABuffer(28); + + uint8_t *ptr = buf->data(); + ptr[0] = 0x80 | 0; + ptr[1] = 204; // APP + ptr[2] = 0; + + CHECK((buf->size() % 4) == 0u); + ptr[3] = (buf->size() / 4) - 1; + + ptr[4] = kSourceID >> 24; // SSRC + ptr[5] = (kSourceID >> 16) & 0xff; + ptr[6] = (kSourceID >> 8) & 0xff; + ptr[7] = kSourceID & 0xff; + ptr[8] = 'l'; + ptr[9] = 'a'; + ptr[10] = 't'; + ptr[11] = 'e'; + + ptr[12] = avgLatencyUs >> 56; + ptr[13] = (avgLatencyUs >> 48) & 0xff; + ptr[14] = (avgLatencyUs >> 40) & 0xff; + ptr[15] = (avgLatencyUs >> 32) & 0xff; + ptr[16] = (avgLatencyUs >> 24) & 0xff; + ptr[17] = (avgLatencyUs >> 16) & 0xff; + ptr[18] = (avgLatencyUs >> 8) & 0xff; + ptr[19] = avgLatencyUs & 0xff; + + ptr[20] = maxLatencyUs >> 56; + ptr[21] = (maxLatencyUs >> 48) & 0xff; + ptr[22] = (maxLatencyUs >> 40) & 0xff; + ptr[23] = (maxLatencyUs >> 32) & 0xff; + ptr[24] = (maxLatencyUs >> 24) & 0xff; + ptr[25] = (maxLatencyUs >> 16) & 0xff; + ptr[26] = (maxLatencyUs >> 8) & 0xff; + ptr[27] = maxLatencyUs & 0xff; + + mNetSession->sendRequest(mRTCPSessionID, buf->data(), buf->size()); + + return OK; +} + +void RTPReceiver::onMessageReceived(const sp &msg) { + switch (msg->what()) { + case kWhatRTPNotify: + case kWhatRTCPNotify: + onNetNotify(msg->what() == kWhatRTPNotify, msg); + break; + + case kWhatSendRR: + { + onSendRR(); + break; + } + + default: + TRESPASS(); + } +} + +void RTPReceiver::onNetNotify(bool isRTP, const sp &msg) { + int32_t reason; + CHECK(msg->findInt32("reason", &reason)); + + switch (reason) { + case ANetworkSession::kWhatError: + { + int32_t sessionID; + CHECK(msg->findInt32("sessionID", &sessionID)); + + int32_t err; + CHECK(msg->findInt32("err", &err)); + + int32_t errorOccuredDuringSend; + CHECK(msg->findInt32("send", &errorOccuredDuringSend)); + + AString detail; + CHECK(msg->findString("detail", &detail)); + + ALOGE("An error occurred during %s in session %d " + "(%d, '%s' (%s)).", + errorOccuredDuringSend ? "send" : "receive", + sessionID, + err, + detail.c_str(), + strerror(-err)); + + mNetSession->destroySession(sessionID); + + if (sessionID == mRTPSessionID) { + mRTPSessionID = 0; + } else if (sessionID == mRTCPSessionID) { + mRTCPSessionID = 0; + } else if (sessionID == mRTPClientSessionID) { + mRTPClientSessionID = 0; + } else if (sessionID == mRTCPClientSessionID) { + mRTCPClientSessionID = 0; + } + + if (!mRTPConnected + || (mRTCPMode != TRANSPORT_NONE && !mRTCPConnected)) { + notifyInitDone(err); + break; + } + + notifyError(err); + break; + } + + case ANetworkSession::kWhatDatagram: + { + sp data; + CHECK(msg->findBuffer("data", &data)); + + if (isRTP) { + if (mFlags & FLAG_AUTO_CONNECT) { + AString fromAddr; + CHECK(msg->findString("fromAddr", &fromAddr)); + + int32_t fromPort; + CHECK(msg->findInt32("fromPort", &fromPort)); + + CHECK_EQ((status_t)OK, + connect( + fromAddr.c_str(), fromPort, fromPort + 1)); + + mFlags &= ~FLAG_AUTO_CONNECT; + } + + onRTPData(data); + } else { + onRTCPData(data); + } + break; + } + + case ANetworkSession::kWhatClientConnected: + { + int32_t sessionID; + CHECK(msg->findInt32("sessionID", &sessionID)); + + if (isRTP) { + CHECK_EQ(mRTPMode, TRANSPORT_TCP); + + if (mRTPClientSessionID != 0) { + // We only allow a single client connection. + mNetSession->destroySession(sessionID); + sessionID = 0; + break; + } + + mRTPClientSessionID = sessionID; + mRTPConnected = true; + } else { + CHECK_EQ(mRTCPMode, TRANSPORT_TCP); + + if (mRTCPClientSessionID != 0) { + // We only allow a single client connection. + mNetSession->destroySession(sessionID); + sessionID = 0; + break; + } + + mRTCPClientSessionID = sessionID; + mRTCPConnected = true; + } + + if (mRTPConnected + && (mRTCPConnected || mRTCPMode == TRANSPORT_NONE)) { + notifyInitDone(OK); + } + break; + } + } +} + +void RTPReceiver::notifyInitDone(status_t err) { + sp notify = mNotify->dup(); + notify->setInt32("what", kWhatInitDone); + notify->setInt32("err", err); + notify->post(); +} + +void RTPReceiver::notifyError(status_t err) { + sp notify = mNotify->dup(); + notify->setInt32("what", kWhatError); + notify->setInt32("err", err); + notify->post(); +} + +void RTPReceiver::notifyPacketLost() { + sp notify = mNotify->dup(); + notify->setInt32("what", kWhatPacketLost); + notify->post(); +} + +status_t RTPReceiver::onRTPData(const sp &buffer) { + size_t size = buffer->size(); + if (size < 12) { + // Too short to be a valid RTP header. + return ERROR_MALFORMED; + } + + const uint8_t *data = buffer->data(); + + if ((data[0] >> 6) != 2) { + // Unsupported version. + return ERROR_UNSUPPORTED; + } + + if (data[0] & 0x20) { + // Padding present. + + size_t paddingLength = data[size - 1]; + + if (paddingLength + 12 > size) { + // If we removed this much padding we'd end up with something + // that's too short to be a valid RTP header. + return ERROR_MALFORMED; + } + + size -= paddingLength; + } + + int numCSRCs = data[0] & 0x0f; + + size_t payloadOffset = 12 + 4 * numCSRCs; + + if (size < payloadOffset) { + // Not enough data to fit the basic header and all the CSRC entries. + return ERROR_MALFORMED; + } + + if (data[0] & 0x10) { + // Header eXtension present. + + if (size < payloadOffset + 4) { + // Not enough data to fit the basic header, all CSRC entries + // and the first 4 bytes of the extension header. + + return ERROR_MALFORMED; + } + + const uint8_t *extensionData = &data[payloadOffset]; + + size_t extensionLength = + 4 * (extensionData[2] << 8 | extensionData[3]); + + if (size < payloadOffset + 4 + extensionLength) { + return ERROR_MALFORMED; + } + + payloadOffset += 4 + extensionLength; + } + + uint32_t srcId = U32_AT(&data[8]); + uint32_t rtpTime = U32_AT(&data[4]); + uint16_t seqNo = U16_AT(&data[2]); + + sp meta = buffer->meta(); + meta->setInt32("ssrc", srcId); + meta->setInt32("rtp-time", rtpTime); + meta->setInt32("PT", data[1] & 0x7f); + meta->setInt32("M", data[1] >> 7); + + buffer->setRange(payloadOffset, size - payloadOffset); + + ssize_t index = mSources.indexOfKey(srcId); + sp source; + if (index < 0) { + source = new Source(this, srcId); + looper()->registerHandler(source); + + mSources.add(srcId, source); + } else { + source = mSources.valueAt(index); + } + + source->onPacketReceived(seqNo, buffer); + + return OK; +} + +status_t RTPReceiver::onRTCPData(const sp &data) { + ALOGI("onRTCPData"); + return OK; +} + +void RTPReceiver::addSDES(const sp &buffer) { + uint8_t *data = buffer->data() + buffer->size(); + data[0] = 0x80 | 1; + data[1] = 202; // SDES + data[4] = kSourceID >> 24; // SSRC + data[5] = (kSourceID >> 16) & 0xff; + data[6] = (kSourceID >> 8) & 0xff; + data[7] = kSourceID & 0xff; + + size_t offset = 8; + + data[offset++] = 1; // CNAME + + AString cname = "stagefright@somewhere"; + data[offset++] = cname.size(); + + memcpy(&data[offset], cname.c_str(), cname.size()); + offset += cname.size(); + + data[offset++] = 6; // TOOL + + AString tool = "stagefright/1.0"; + data[offset++] = tool.size(); + + memcpy(&data[offset], tool.c_str(), tool.size()); + offset += tool.size(); + + data[offset++] = 0; + + if ((offset % 4) > 0) { + size_t count = 4 - (offset % 4); + switch (count) { + case 3: + data[offset++] = 0; + case 2: + data[offset++] = 0; + case 1: + data[offset++] = 0; + } + } + + size_t numWords = (offset / 4) - 1; + data[2] = numWords >> 8; + data[3] = numWords & 0xff; + + buffer->setRange(buffer->offset(), buffer->size() + offset); +} + +void RTPReceiver::scheduleSendRR() { + (new AMessage(kWhatSendRR, id()))->post(5000000ll); +} + +void RTPReceiver::onSendRR() { +#if 0 + sp buf = new ABuffer(kMaxUDPPacketSize); + buf->setRange(0, 0); + + uint8_t *ptr = buf->data(); + ptr[0] = 0x80 | 0; + ptr[1] = 201; // RR + ptr[2] = 0; + ptr[3] = 1; + ptr[4] = kSourceID >> 24; // SSRC + ptr[5] = (kSourceID >> 16) & 0xff; + ptr[6] = (kSourceID >> 8) & 0xff; + ptr[7] = kSourceID & 0xff; + + buf->setRange(0, 8); + + size_t numReportBlocks = 0; + for (size_t i = 0; i < mSources.size(); ++i) { + uint32_t ssrc = mSources.keyAt(i); + sp source = mSources.valueAt(i); + + if (numReportBlocks > 31 || buf->size() + 24 > buf->capacity()) { + // Cannot fit another report block. + break; + } + + source->addReportBlock(ssrc, buf); + ++numReportBlocks; + } + + ptr[0] |= numReportBlocks; // 5 bit + + size_t sizeInWordsMinus1 = 1 + 6 * numReportBlocks; + ptr[2] = sizeInWordsMinus1 >> 8; + ptr[3] = sizeInWordsMinus1 & 0xff; + + buf->setRange(0, (sizeInWordsMinus1 + 1) * 4); + + addSDES(buf); + + mNetSession->sendRequest(mRTCPSessionID, buf->data(), buf->size()); +#endif + + scheduleSendRR(); +} + +status_t RTPReceiver::registerPacketType( + uint8_t packetType, PacketizationMode mode) { + mPacketTypes.add(packetType, mode); + + return OK; +} + +sp RTPReceiver::makeAssembler(uint8_t packetType) { + ssize_t index = mPacketTypes.indexOfKey(packetType); + if (index < 0) { + return NULL; + } + + PacketizationMode mode = mPacketTypes.valueAt(index); + + switch (mode) { + case PACKETIZATION_NONE: + case PACKETIZATION_TRANSPORT_STREAM: + return new TSAssembler(mNotify); + + case PACKETIZATION_H264: + return new H264Assembler(mNotify); + + default: + return NULL; + } +} + +void RTPReceiver::requestRetransmission(uint32_t senderSSRC, int32_t extSeqNo) { + int32_t blp = 0; + + sp buf = new ABuffer(16); + buf->setRange(0, 0); + + uint8_t *ptr = buf->data(); + ptr[0] = 0x80 | 1; // generic NACK + ptr[1] = 205; // TSFB + ptr[2] = 0; + ptr[3] = 3; + ptr[8] = (senderSSRC >> 24) & 0xff; + ptr[9] = (senderSSRC >> 16) & 0xff; + ptr[10] = (senderSSRC >> 8) & 0xff; + ptr[11] = (senderSSRC & 0xff); + ptr[8] = (kSourceID >> 24) & 0xff; + ptr[9] = (kSourceID >> 16) & 0xff; + ptr[10] = (kSourceID >> 8) & 0xff; + ptr[11] = (kSourceID & 0xff); + ptr[12] = (extSeqNo >> 8) & 0xff; + ptr[13] = (extSeqNo & 0xff); + ptr[14] = (blp >> 8) & 0xff; + ptr[15] = (blp & 0xff); + + buf->setRange(0, 16); + + mNetSession->sendRequest(mRTCPSessionID, buf->data(), buf->size()); +} + +void RTPReceiver::Source::modifyPacketStatus(int32_t extSeqNo, uint32_t mask) { +#if TRACK_PACKET_LOSS + ssize_t index = mLostPackets.indexOfKey(extSeqNo); + if (index < 0) { + mLostPackets.add(extSeqNo, mask); + } else { + mLostPackets.editValueAt(index) |= mask; + } +#endif +} + +void RTPReceiver::Source::postRetransmitTimer(int64_t timeUs) { + int64_t delayUs = timeUs - ALooper::GetNowUs(); + sp msg = new AMessage(kWhatRetransmit, id()); + msg->setInt32("generation", mRetransmitGeneration); + msg->post(delayUs); +} + +void RTPReceiver::Source::postDeclareLostTimer(int64_t timeUs) { + CHECK(!mDeclareLostTimerPending); + mDeclareLostTimerPending = true; + + int64_t delayUs = timeUs - ALooper::GetNowUs(); + sp msg = new AMessage(kWhatDeclareLost, id()); + msg->setInt32("generation", mDeclareLostGeneration); + msg->post(delayUs); +} + +void RTPReceiver::Source::cancelTimers() { + ++mRetransmitGeneration; + ++mDeclareLostGeneration; + mDeclareLostTimerPending = false; +} + +} // namespace android + diff --git a/media/libstagefright/wifi-display/rtp/RTPReceiver.h b/media/libstagefright/wifi-display/rtp/RTPReceiver.h new file mode 100644 index 0000000..240ab2e --- /dev/null +++ b/media/libstagefright/wifi-display/rtp/RTPReceiver.h @@ -0,0 +1,125 @@ +/* + * Copyright 2013, The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef RTP_RECEIVER_H_ + +#define RTP_RECEIVER_H_ + +#include "RTPBase.h" + +#include + +namespace android { + +struct ABuffer; +struct ANetworkSession; + +// An object of this class facilitates receiving of media data on an RTP +// channel. The channel is established over a UDP or TCP connection depending +// on which "TransportMode" was chosen. In addition different RTP packetization +// schemes are supported such as "Transport Stream Packets over RTP", +// or "AVC/H.264 encapsulation as specified in RFC 3984 (non-interleaved mode)" +struct RTPReceiver : public RTPBase, public AHandler { + enum { + kWhatInitDone, + kWhatError, + kWhatAccessUnit, + kWhatPacketLost, + }; + + enum Flags { + FLAG_AUTO_CONNECT = 1, + }; + RTPReceiver( + const sp &netSession, + const sp ¬ify, + uint32_t flags = 0); + + status_t registerPacketType( + uint8_t packetType, PacketizationMode mode); + + status_t initAsync( + TransportMode rtpMode, + TransportMode rtcpMode, + int32_t *outLocalRTPPort); + + status_t connect( + const char *remoteHost, + int32_t remoteRTPPort, + int32_t remoteRTCPPort); + + status_t informSender(const sp ¶ms); + +protected: + virtual ~RTPReceiver(); + virtual void onMessageReceived(const sp &msg); + +private: + enum { + kWhatRTPNotify, + kWhatRTCPNotify, + kWhatSendRR, + }; + + enum { + kSourceID = 0xdeadbeef, + kPacketLostAfterUs = 100000, + kRequestRetransmissionAfterUs = -1, + }; + + struct Assembler; + struct H264Assembler; + struct Source; + struct TSAssembler; + + sp mNetSession; + sp mNotify; + uint32_t mFlags; + TransportMode mRTPMode; + TransportMode mRTCPMode; + int32_t mRTPSessionID; + int32_t mRTCPSessionID; + bool mRTPConnected; + bool mRTCPConnected; + + int32_t mRTPClientSessionID; // in TRANSPORT_TCP mode. + int32_t mRTCPClientSessionID; // in TRANSPORT_TCP mode. + + KeyedVector mPacketTypes; + KeyedVector > mSources; + + void onNetNotify(bool isRTP, const sp &msg); + status_t onRTPData(const sp &data); + status_t onRTCPData(const sp &data); + void onSendRR(); + + void scheduleSendRR(); + void addSDES(const sp &buffer); + + void notifyInitDone(status_t err); + void notifyError(status_t err); + void notifyPacketLost(); + + sp makeAssembler(uint8_t packetType); + + void requestRetransmission(uint32_t senderSSRC, int32_t extSeqNo); + + DISALLOW_EVIL_CONSTRUCTORS(RTPReceiver); +}; + +} // namespace android + +#endif // RTP_RECEIVER_H_ diff --git a/media/libstagefright/wifi-display/rtp/RTPSender.cpp b/media/libstagefright/wifi-display/rtp/RTPSender.cpp index 095fd97..6bbe650 100644 --- a/media/libstagefright/wifi-display/rtp/RTPSender.cpp +++ b/media/libstagefright/wifi-display/rtp/RTPSender.cpp @@ -767,6 +767,17 @@ status_t RTPSender::parseTSFB(const uint8_t *data, size_t size) { } status_t RTPSender::parseAPP(const uint8_t *data, size_t size) { + if (!memcmp("late", &data[8], 4)) { + int64_t avgLatencyUs = (int64_t)U64_AT(&data[12]); + int64_t maxLatencyUs = (int64_t)U64_AT(&data[20]); + + sp notify = mNotify->dup(); + notify->setInt32("what", kWhatInformSender); + notify->setInt64("avgLatencyUs", avgLatencyUs); + notify->setInt64("maxLatencyUs", maxLatencyUs); + notify->post(); + } + return OK; } diff --git a/media/libstagefright/wifi-display/rtp/RTPSender.h b/media/libstagefright/wifi-display/rtp/RTPSender.h index 7dc138a..fefcab7 100644 --- a/media/libstagefright/wifi-display/rtp/RTPSender.h +++ b/media/libstagefright/wifi-display/rtp/RTPSender.h @@ -37,6 +37,7 @@ struct RTPSender : public RTPBase, public AHandler { kWhatInitDone, kWhatError, kWhatNetworkStall, + kWhatInformSender, }; RTPSender( const sp &netSession, diff --git a/media/libstagefright/wifi-display/rtptest.cpp b/media/libstagefright/wifi-display/rtptest.cpp new file mode 100644 index 0000000..764a38b --- /dev/null +++ b/media/libstagefright/wifi-display/rtptest.cpp @@ -0,0 +1,565 @@ +/* + * Copyright 2013, The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NEBUG 0 +#define LOG_TAG "rtptest" +#include + +#include "ANetworkSession.h" +#include "rtp/RTPSender.h" +#include "rtp/RTPReceiver.h" +#include "TimeSyncer.h" + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#define MEDIA_FILENAME "/sdcard/Frame Counter HD 30FPS_1080p.mp4" + +namespace android { + +struct PacketSource : public RefBase { + PacketSource() {} + + virtual sp getNextAccessUnit() = 0; + +protected: + virtual ~PacketSource() {} + +private: + DISALLOW_EVIL_CONSTRUCTORS(PacketSource); +}; + +struct MediaPacketSource : public PacketSource { + MediaPacketSource() + : mMaxSampleSize(1024 * 1024) { + mExtractor = new NuMediaExtractor; + CHECK_EQ((status_t)OK, + mExtractor->setDataSource(MEDIA_FILENAME)); + + bool haveVideo = false; + for (size_t i = 0; i < mExtractor->countTracks(); ++i) { + sp format; + CHECK_EQ((status_t)OK, mExtractor->getTrackFormat(i, &format)); + + AString mime; + CHECK(format->findString("mime", &mime)); + + if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_AVC, mime.c_str())) { + mExtractor->selectTrack(i); + haveVideo = true; + break; + } + } + + CHECK(haveVideo); + } + + virtual sp getNextAccessUnit() { + int64_t timeUs; + status_t err = mExtractor->getSampleTime(&timeUs); + + if (err != OK) { + return NULL; + } + + sp accessUnit = new ABuffer(mMaxSampleSize); + CHECK_EQ((status_t)OK, mExtractor->readSampleData(accessUnit)); + + accessUnit->meta()->setInt64("timeUs", timeUs); + + CHECK_EQ((status_t)OK, mExtractor->advance()); + + return accessUnit; + } + +protected: + virtual ~MediaPacketSource() { + } + +private: + sp mExtractor; + size_t mMaxSampleSize; + + DISALLOW_EVIL_CONSTRUCTORS(MediaPacketSource); +}; + +struct SimplePacketSource : public PacketSource { + SimplePacketSource() + : mCounter(0) { + } + + virtual sp getNextAccessUnit() { + sp buffer = new ABuffer(4); + uint8_t *dst = buffer->data(); + dst[0] = mCounter >> 24; + dst[1] = (mCounter >> 16) & 0xff; + dst[2] = (mCounter >> 8) & 0xff; + dst[3] = mCounter & 0xff; + + buffer->meta()->setInt64("timeUs", mCounter * 1000000ll / kFrameRate); + + ++mCounter; + + return buffer; + } + +protected: + virtual ~SimplePacketSource() { + } + +private: + enum { + kFrameRate = 30 + }; + + uint32_t mCounter; + + DISALLOW_EVIL_CONSTRUCTORS(SimplePacketSource); +}; + +struct TestHandler : public AHandler { + TestHandler(const sp &netSession); + + void listen(); + void connect(const char *host, int32_t port); + +protected: + virtual ~TestHandler(); + virtual void onMessageReceived(const sp &msg); + +private: + enum { + kWhatListen, + kWhatConnect, + kWhatReceiverNotify, + kWhatSenderNotify, + kWhatSendMore, + kWhatStop, + kWhatTimeSyncerNotify, + }; + +#if 1 + static const RTPBase::TransportMode kRTPMode = RTPBase::TRANSPORT_UDP; + static const RTPBase::TransportMode kRTCPMode = RTPBase::TRANSPORT_UDP; +#else + static const RTPBase::TransportMode kRTPMode = RTPBase::TRANSPORT_TCP; + static const RTPBase::TransportMode kRTCPMode = RTPBase::TRANSPORT_NONE; +#endif + +#if 1 + static const RTPBase::PacketizationMode kPacketizationMode + = RTPBase::PACKETIZATION_H264; +#else + static const RTPBase::PacketizationMode kPacketizationMode + = RTPBase::PACKETIZATION_NONE; +#endif + + sp mNetSession; + sp mSource; + sp mSender; + sp mReceiver; + + sp mTimeSyncer; + bool mTimeSyncerStarted; + + int64_t mFirstTimeRealUs; + int64_t mFirstTimeMediaUs; + + int64_t mTimeOffsetUs; + bool mTimeOffsetValid; + + status_t readMore(); + + DISALLOW_EVIL_CONSTRUCTORS(TestHandler); +}; + +TestHandler::TestHandler(const sp &netSession) + : mNetSession(netSession), + mTimeSyncerStarted(false), + mFirstTimeRealUs(-1ll), + mFirstTimeMediaUs(-1ll), + mTimeOffsetUs(-1ll), + mTimeOffsetValid(false) { +} + +TestHandler::~TestHandler() { +} + +void TestHandler::listen() { + sp msg = new AMessage(kWhatListen, id()); + msg->post(); +} + +void TestHandler::connect(const char *host, int32_t port) { + sp msg = new AMessage(kWhatConnect, id()); + msg->setString("host", host); + msg->setInt32("port", port); + msg->post(); +} + +static void dumpDelay(int64_t delayMs) { + static const int64_t kMinDelayMs = 0; + static const int64_t kMaxDelayMs = 300; + + const char *kPattern = "########################################"; + size_t kPatternSize = strlen(kPattern); + + int n = (kPatternSize * (delayMs - kMinDelayMs)) + / (kMaxDelayMs - kMinDelayMs); + + if (n < 0) { + n = 0; + } else if ((size_t)n > kPatternSize) { + n = kPatternSize; + } + + ALOGI("(%4lld ms) %s\n", + delayMs, + kPattern + kPatternSize - n); +} + +void TestHandler::onMessageReceived(const sp &msg) { + switch (msg->what()) { + case kWhatListen: + { + sp notify = new AMessage(kWhatTimeSyncerNotify, id()); + mTimeSyncer = new TimeSyncer(mNetSession, notify); + looper()->registerHandler(mTimeSyncer); + + notify = new AMessage(kWhatReceiverNotify, id()); + mReceiver = new RTPReceiver( + mNetSession, notify, RTPReceiver::FLAG_AUTO_CONNECT); + looper()->registerHandler(mReceiver); + + CHECK_EQ((status_t)OK, + mReceiver->registerPacketType(33, kPacketizationMode)); + + int32_t receiverRTPPort; + CHECK_EQ((status_t)OK, + mReceiver->initAsync( + kRTPMode, + kRTCPMode, + &receiverRTPPort)); + + printf("picked receiverRTPPort %d\n", receiverRTPPort); + +#if 0 + CHECK_EQ((status_t)OK, + mReceiver->connect( + "127.0.0.1", senderRTPPort, senderRTPPort + 1)); +#endif + break; + } + + case kWhatConnect: + { + AString host; + CHECK(msg->findString("host", &host)); + + sp notify = new AMessage(kWhatTimeSyncerNotify, id()); + mTimeSyncer = new TimeSyncer(mNetSession, notify); + looper()->registerHandler(mTimeSyncer); + mTimeSyncer->startServer(8123); + + int32_t receiverRTPPort; + CHECK(msg->findInt32("port", &receiverRTPPort)); + +#if 1 + mSource = new MediaPacketSource; +#else + mSource = new SimplePacketSource; +#endif + + notify = new AMessage(kWhatSenderNotify, id()); + mSender = new RTPSender(mNetSession, notify); + + looper()->registerHandler(mSender); + + int32_t senderRTPPort; + CHECK_EQ((status_t)OK, + mSender->initAsync( + host.c_str(), + receiverRTPPort, + kRTPMode, + kRTCPMode == RTPBase::TRANSPORT_NONE + ? -1 : receiverRTPPort + 1, + kRTCPMode, + &senderRTPPort)); + + printf("picked senderRTPPort %d\n", senderRTPPort); + break; + } + + case kWhatSenderNotify: + { + ALOGI("kWhatSenderNotify"); + + int32_t what; + CHECK(msg->findInt32("what", &what)); + + switch (what) { + case RTPSender::kWhatInitDone: + { + int32_t err; + CHECK(msg->findInt32("err", &err)); + + ALOGI("RTPSender::initAsync completed w/ err %d", err); + + if (err == OK) { + err = readMore(); + + if (err != OK) { + (new AMessage(kWhatStop, id()))->post(); + } + } + break; + } + + case RTPSender::kWhatError: + break; + } + break; + } + + case kWhatReceiverNotify: + { + ALOGV("kWhatReceiverNotify"); + + int32_t what; + CHECK(msg->findInt32("what", &what)); + + switch (what) { + case RTPReceiver::kWhatInitDone: + { + int32_t err; + CHECK(msg->findInt32("err", &err)); + + ALOGI("RTPReceiver::initAsync completed w/ err %d", err); + break; + } + + case RTPReceiver::kWhatError: + break; + + case RTPReceiver::kWhatAccessUnit: + { +#if 0 + if (!mTimeSyncerStarted) { + mTimeSyncer->startClient("172.18.41.216", 8123); + mTimeSyncerStarted = true; + } + + sp accessUnit; + CHECK(msg->findBuffer("accessUnit", &accessUnit)); + + int64_t timeUs; + CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs)); + + if (mTimeOffsetValid) { + timeUs -= mTimeOffsetUs; + int64_t nowUs = ALooper::GetNowUs(); + int64_t delayMs = (nowUs - timeUs) / 1000ll; + + dumpDelay(delayMs); + } +#endif + break; + } + + case RTPReceiver::kWhatPacketLost: + ALOGV("kWhatPacketLost"); + break; + + default: + TRESPASS(); + } + break; + } + + case kWhatSendMore: + { + sp accessUnit; + CHECK(msg->findBuffer("accessUnit", &accessUnit)); + + CHECK_EQ((status_t)OK, + mSender->queueBuffer( + accessUnit, + 33, + kPacketizationMode)); + + status_t err = readMore(); + + if (err != OK) { + (new AMessage(kWhatStop, id()))->post(); + } + break; + } + + case kWhatStop: + { + if (mReceiver != NULL) { + looper()->unregisterHandler(mReceiver->id()); + mReceiver.clear(); + } + + if (mSender != NULL) { + looper()->unregisterHandler(mSender->id()); + mSender.clear(); + } + + mSource.clear(); + + looper()->stop(); + break; + } + + case kWhatTimeSyncerNotify: + { + CHECK(msg->findInt64("offset", &mTimeOffsetUs)); + mTimeOffsetValid = true; + break; + } + + default: + TRESPASS(); + } +} + +status_t TestHandler::readMore() { + sp accessUnit = mSource->getNextAccessUnit(); + + if (accessUnit == NULL) { + return ERROR_END_OF_STREAM; + } + + int64_t timeUs; + CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs)); + + int64_t nowUs = ALooper::GetNowUs(); + int64_t whenUs; + + if (mFirstTimeRealUs < 0ll) { + mFirstTimeRealUs = whenUs = nowUs; + mFirstTimeMediaUs = timeUs; + } else { + whenUs = mFirstTimeRealUs + timeUs - mFirstTimeMediaUs; + } + + accessUnit->meta()->setInt64("timeUs", whenUs); + + sp msg = new AMessage(kWhatSendMore, id()); + msg->setBuffer("accessUnit", accessUnit); + msg->post(whenUs - nowUs); + + return OK; +} + +} // namespace android + +static void usage(const char *me) { + fprintf(stderr, + "usage: %s -c host:port\tconnect to remote host\n" + " -l \tlisten\n", + me); +} + +int main(int argc, char **argv) { + using namespace android; + + // srand(time(NULL)); + + ProcessState::self()->startThreadPool(); + + DataSource::RegisterDefaultSniffers(); + + bool listen = false; + int32_t connectToPort = -1; + AString connectToHost; + + int res; + while ((res = getopt(argc, argv, "hc:l")) >= 0) { + switch (res) { + case 'c': + { + const char *colonPos = strrchr(optarg, ':'); + + if (colonPos == NULL) { + usage(argv[0]); + exit(1); + } + + connectToHost.setTo(optarg, colonPos - optarg); + + char *end; + connectToPort = strtol(colonPos + 1, &end, 10); + + if (*end != '\0' || end == colonPos + 1 + || connectToPort < 1 || connectToPort > 65535) { + fprintf(stderr, "Illegal port specified.\n"); + exit(1); + } + break; + } + + case 'l': + { + listen = true; + break; + } + + case '?': + case 'h': + usage(argv[0]); + exit(1); + } + } + + if (!listen && connectToPort < 0) { + fprintf(stderr, + "You need to select either client or server mode.\n"); + exit(1); + } + + sp netSession = new ANetworkSession; + netSession->start(); + + sp looper = new ALooper; + + sp handler = new TestHandler(netSession); + looper->registerHandler(handler); + + if (listen) { + handler->listen(); + } + + if (connectToPort >= 0) { + handler->connect(connectToHost.c_str(), connectToPort); + } + + looper->start(true /* runOnCallingThread */); + + return 0; +} + diff --git a/media/libstagefright/wifi-display/sink/DirectRenderer.cpp b/media/libstagefright/wifi-display/sink/DirectRenderer.cpp new file mode 100644 index 0000000..15f9c88 --- /dev/null +++ b/media/libstagefright/wifi-display/sink/DirectRenderer.cpp @@ -0,0 +1,625 @@ +/* + * Copyright 2012, The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "DirectRenderer" +#include + +#include "DirectRenderer.h" + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +namespace android { + +/* + Drives the decoding process using a MediaCodec instance. Input buffers + queued by calls to "queueInputBuffer" are fed to the decoder as soon + as the decoder is ready for them, the client is notified about output + buffers as the decoder spits them out. +*/ +struct DirectRenderer::DecoderContext : public AHandler { + enum { + kWhatOutputBufferReady, + }; + DecoderContext(const sp ¬ify); + + status_t init( + const sp &format, + const sp &surfaceTex); + + void queueInputBuffer(const sp &accessUnit); + + status_t renderOutputBufferAndRelease(size_t index); + status_t releaseOutputBuffer(size_t index); + +protected: + virtual ~DecoderContext(); + + virtual void onMessageReceived(const sp &msg); + +private: + enum { + kWhatDecoderNotify, + }; + + sp mNotify; + sp mDecoderLooper; + sp mDecoder; + Vector > mDecoderInputBuffers; + Vector > mDecoderOutputBuffers; + List mDecoderInputBuffersAvailable; + bool mDecoderNotificationPending; + + List > mAccessUnits; + + void onDecoderNotify(); + void scheduleDecoderNotification(); + void queueDecoderInputBuffers(); + + void queueOutputBuffer( + size_t index, int64_t timeUs, const sp &buffer); + + DISALLOW_EVIL_CONSTRUCTORS(DecoderContext); +}; + +//////////////////////////////////////////////////////////////////////////////// + +/* + A "push" audio renderer. The primary function of this renderer is to use + an AudioTrack in push mode and making sure not to block the event loop + be ensuring that calls to AudioTrack::write never block. This is done by + estimating an upper bound of data that can be written to the AudioTrack + buffer without delay. +*/ +struct DirectRenderer::AudioRenderer : public AHandler { + AudioRenderer(const sp &decoderContext); + + void queueInputBuffer( + size_t index, int64_t timeUs, const sp &buffer); + +protected: + virtual ~AudioRenderer(); + virtual void onMessageReceived(const sp &msg); + +private: + enum { + kWhatPushAudio, + }; + + struct BufferInfo { + size_t mIndex; + int64_t mTimeUs; + sp mBuffer; + }; + + sp mDecoderContext; + sp mAudioTrack; + + List mInputBuffers; + bool mPushPending; + + size_t mNumFramesWritten; + + void schedulePushIfNecessary(); + void onPushAudio(); + + ssize_t writeNonBlocking(const uint8_t *data, size_t size); + + DISALLOW_EVIL_CONSTRUCTORS(AudioRenderer); +}; + +//////////////////////////////////////////////////////////////////////////////// + +DirectRenderer::DecoderContext::DecoderContext(const sp ¬ify) + : mNotify(notify), + mDecoderNotificationPending(false) { +} + +DirectRenderer::DecoderContext::~DecoderContext() { + if (mDecoder != NULL) { + mDecoder->release(); + mDecoder.clear(); + + mDecoderLooper->stop(); + mDecoderLooper.clear(); + } +} + +status_t DirectRenderer::DecoderContext::init( + const sp &format, + const sp &surfaceTex) { + CHECK(mDecoder == NULL); + + AString mime; + CHECK(format->findString("mime", &mime)); + + mDecoderLooper = new ALooper; + mDecoderLooper->setName("video codec looper"); + + mDecoderLooper->start( + false /* runOnCallingThread */, + false /* canCallJava */, + PRIORITY_DEFAULT); + + mDecoder = MediaCodec::CreateByType( + mDecoderLooper, mime.c_str(), false /* encoder */); + + CHECK(mDecoder != NULL); + + status_t err = mDecoder->configure( + format, + surfaceTex == NULL + ? NULL : new Surface(surfaceTex), + NULL /* crypto */, + 0 /* flags */); + CHECK_EQ(err, (status_t)OK); + + err = mDecoder->start(); + CHECK_EQ(err, (status_t)OK); + + err = mDecoder->getInputBuffers( + &mDecoderInputBuffers); + CHECK_EQ(err, (status_t)OK); + + err = mDecoder->getOutputBuffers( + &mDecoderOutputBuffers); + CHECK_EQ(err, (status_t)OK); + + scheduleDecoderNotification(); + + return OK; +} + +void DirectRenderer::DecoderContext::queueInputBuffer( + const sp &accessUnit) { + CHECK(mDecoder != NULL); + + mAccessUnits.push_back(accessUnit); + queueDecoderInputBuffers(); +} + +status_t DirectRenderer::DecoderContext::renderOutputBufferAndRelease( + size_t index) { + return mDecoder->renderOutputBufferAndRelease(index); +} + +status_t DirectRenderer::DecoderContext::releaseOutputBuffer(size_t index) { + return mDecoder->releaseOutputBuffer(index); +} + +void DirectRenderer::DecoderContext::queueDecoderInputBuffers() { + if (mDecoder == NULL) { + return; + } + + bool submittedMore = false; + + while (!mAccessUnits.empty() + && !mDecoderInputBuffersAvailable.empty()) { + size_t index = *mDecoderInputBuffersAvailable.begin(); + + mDecoderInputBuffersAvailable.erase( + mDecoderInputBuffersAvailable.begin()); + + sp srcBuffer = *mAccessUnits.begin(); + mAccessUnits.erase(mAccessUnits.begin()); + + const sp &dstBuffer = + mDecoderInputBuffers.itemAt(index); + + memcpy(dstBuffer->data(), srcBuffer->data(), srcBuffer->size()); + + int64_t timeUs; + CHECK(srcBuffer->meta()->findInt64("timeUs", &timeUs)); + + status_t err = mDecoder->queueInputBuffer( + index, + 0 /* offset */, + srcBuffer->size(), + timeUs, + 0 /* flags */); + CHECK_EQ(err, (status_t)OK); + + submittedMore = true; + } + + if (submittedMore) { + scheduleDecoderNotification(); + } +} + +void DirectRenderer::DecoderContext::onMessageReceived( + const sp &msg) { + switch (msg->what()) { + case kWhatDecoderNotify: + { + onDecoderNotify(); + break; + } + + default: + TRESPASS(); + } +} + +void DirectRenderer::DecoderContext::onDecoderNotify() { + mDecoderNotificationPending = false; + + for (;;) { + size_t index; + status_t err = mDecoder->dequeueInputBuffer(&index); + + if (err == OK) { + mDecoderInputBuffersAvailable.push_back(index); + } else if (err == -EAGAIN) { + break; + } else { + TRESPASS(); + } + } + + queueDecoderInputBuffers(); + + for (;;) { + size_t index; + size_t offset; + size_t size; + int64_t timeUs; + uint32_t flags; + status_t err = mDecoder->dequeueOutputBuffer( + &index, + &offset, + &size, + &timeUs, + &flags); + + if (err == OK) { + queueOutputBuffer( + index, timeUs, mDecoderOutputBuffers.itemAt(index)); + } else if (err == INFO_OUTPUT_BUFFERS_CHANGED) { + err = mDecoder->getOutputBuffers( + &mDecoderOutputBuffers); + CHECK_EQ(err, (status_t)OK); + } else if (err == INFO_FORMAT_CHANGED) { + // We don't care. + } else if (err == -EAGAIN) { + break; + } else { + TRESPASS(); + } + } + + scheduleDecoderNotification(); +} + +void DirectRenderer::DecoderContext::scheduleDecoderNotification() { + if (mDecoderNotificationPending) { + return; + } + + sp notify = + new AMessage(kWhatDecoderNotify, id()); + + mDecoder->requestActivityNotification(notify); + mDecoderNotificationPending = true; +} + +void DirectRenderer::DecoderContext::queueOutputBuffer( + size_t index, int64_t timeUs, const sp &buffer) { + sp msg = mNotify->dup(); + msg->setInt32("what", kWhatOutputBufferReady); + msg->setSize("index", index); + msg->setInt64("timeUs", timeUs); + msg->setBuffer("buffer", buffer); + msg->post(); +} + +//////////////////////////////////////////////////////////////////////////////// + +DirectRenderer::AudioRenderer::AudioRenderer( + const sp &decoderContext) + : mDecoderContext(decoderContext), + mPushPending(false), + mNumFramesWritten(0) { + mAudioTrack = new AudioTrack( + AUDIO_STREAM_DEFAULT, + 48000.0f, + AUDIO_FORMAT_PCM, + AUDIO_CHANNEL_OUT_STEREO, + (int)0 /* frameCount */); + + CHECK_EQ((status_t)OK, mAudioTrack->initCheck()); + + mAudioTrack->start(); +} + +DirectRenderer::AudioRenderer::~AudioRenderer() { +} + +void DirectRenderer::AudioRenderer::queueInputBuffer( + size_t index, int64_t timeUs, const sp &buffer) { + BufferInfo info; + info.mIndex = index; + info.mTimeUs = timeUs; + info.mBuffer = buffer; + + mInputBuffers.push_back(info); + schedulePushIfNecessary(); +} + +void DirectRenderer::AudioRenderer::onMessageReceived( + const sp &msg) { + switch (msg->what()) { + case kWhatPushAudio: + { + onPushAudio(); + break; + } + + default: + break; + } +} + +void DirectRenderer::AudioRenderer::schedulePushIfNecessary() { + if (mPushPending || mInputBuffers.empty()) { + return; + } + + mPushPending = true; + + uint32_t numFramesPlayed; + CHECK_EQ(mAudioTrack->getPosition(&numFramesPlayed), + (status_t)OK); + + uint32_t numFramesPendingPlayout = mNumFramesWritten - numFramesPlayed; + + // This is how long the audio sink will have data to + // play back. + const float msecsPerFrame = 1000.0f / mAudioTrack->getSampleRate(); + + int64_t delayUs = + msecsPerFrame * numFramesPendingPlayout * 1000ll; + + // Let's give it more data after about half that time + // has elapsed. + (new AMessage(kWhatPushAudio, id()))->post(delayUs / 2); +} + +void DirectRenderer::AudioRenderer::onPushAudio() { + mPushPending = false; + + while (!mInputBuffers.empty()) { + const BufferInfo &info = *mInputBuffers.begin(); + + ssize_t n = writeNonBlocking( + info.mBuffer->data(), info.mBuffer->size()); + + if (n < (ssize_t)info.mBuffer->size()) { + CHECK_GE(n, 0); + + info.mBuffer->setRange( + info.mBuffer->offset() + n, info.mBuffer->size() - n); + break; + } + + mDecoderContext->releaseOutputBuffer(info.mIndex); + + mInputBuffers.erase(mInputBuffers.begin()); + } + + schedulePushIfNecessary(); +} + +ssize_t DirectRenderer::AudioRenderer::writeNonBlocking( + const uint8_t *data, size_t size) { + uint32_t numFramesPlayed; + status_t err = mAudioTrack->getPosition(&numFramesPlayed); + if (err != OK) { + return err; + } + + ssize_t numFramesAvailableToWrite = + mAudioTrack->frameCount() - (mNumFramesWritten - numFramesPlayed); + + size_t numBytesAvailableToWrite = + numFramesAvailableToWrite * mAudioTrack->frameSize(); + + if (size > numBytesAvailableToWrite) { + size = numBytesAvailableToWrite; + } + + CHECK_EQ(mAudioTrack->write(data, size), (ssize_t)size); + + size_t numFramesWritten = size / mAudioTrack->frameSize(); + mNumFramesWritten += numFramesWritten; + + return size; +} + +//////////////////////////////////////////////////////////////////////////////// + +DirectRenderer::DirectRenderer( + const sp &bufferProducer) + : mSurfaceTex(bufferProducer), + mVideoRenderPending(false), + mNumFramesLate(0), + mNumFrames(0) { +} + +DirectRenderer::~DirectRenderer() { +} + +void DirectRenderer::onMessageReceived(const sp &msg) { + switch (msg->what()) { + case kWhatDecoderNotify: + { + onDecoderNotify(msg); + break; + } + + case kWhatRenderVideo: + { + onRenderVideo(); + break; + } + + default: + TRESPASS(); + } +} + +void DirectRenderer::setFormat(size_t trackIndex, const sp &format) { + CHECK_LT(trackIndex, 2u); + + CHECK(mDecoderContext[trackIndex] == NULL); + + sp notify = new AMessage(kWhatDecoderNotify, id()); + notify->setSize("trackIndex", trackIndex); + + mDecoderContext[trackIndex] = new DecoderContext(notify); + looper()->registerHandler(mDecoderContext[trackIndex]); + + CHECK_EQ((status_t)OK, + mDecoderContext[trackIndex]->init( + format, trackIndex == 0 ? mSurfaceTex : NULL)); + + if (trackIndex == 1) { + // Audio + mAudioRenderer = new AudioRenderer(mDecoderContext[1]); + looper()->registerHandler(mAudioRenderer); + } +} + +void DirectRenderer::queueAccessUnit( + size_t trackIndex, const sp &accessUnit) { + CHECK_LT(trackIndex, 2u); + + if (mDecoderContext[trackIndex] == NULL) { + CHECK_EQ(trackIndex, 0u); + + sp format = new AMessage; + format->setString("mime", "video/avc"); + format->setInt32("width", 640); + format->setInt32("height", 360); + + setFormat(trackIndex, format); + } + + mDecoderContext[trackIndex]->queueInputBuffer(accessUnit); +} + +void DirectRenderer::onDecoderNotify(const sp &msg) { + size_t trackIndex; + CHECK(msg->findSize("trackIndex", &trackIndex)); + + int32_t what; + CHECK(msg->findInt32("what", &what)); + + switch (what) { + case DecoderContext::kWhatOutputBufferReady: + { + size_t index; + CHECK(msg->findSize("index", &index)); + + int64_t timeUs; + CHECK(msg->findInt64("timeUs", &timeUs)); + + sp buffer; + CHECK(msg->findBuffer("buffer", &buffer)); + + queueOutputBuffer(trackIndex, index, timeUs, buffer); + break; + } + + default: + TRESPASS(); + } +} + +void DirectRenderer::queueOutputBuffer( + size_t trackIndex, + size_t index, int64_t timeUs, const sp &buffer) { + if (trackIndex == 1) { + // Audio + mAudioRenderer->queueInputBuffer(index, timeUs, buffer); + return; + } + + OutputInfo info; + info.mIndex = index; + info.mTimeUs = timeUs; + info.mBuffer = buffer; + mVideoOutputBuffers.push_back(info); + + scheduleVideoRenderIfNecessary(); +} + +void DirectRenderer::scheduleVideoRenderIfNecessary() { + if (mVideoRenderPending || mVideoOutputBuffers.empty()) { + return; + } + + mVideoRenderPending = true; + + int64_t timeUs = (*mVideoOutputBuffers.begin()).mTimeUs; + int64_t nowUs = ALooper::GetNowUs(); + + int64_t delayUs = timeUs - nowUs; + + (new AMessage(kWhatRenderVideo, id()))->post(delayUs); +} + +void DirectRenderer::onRenderVideo() { + mVideoRenderPending = false; + + int64_t nowUs = ALooper::GetNowUs(); + + while (!mVideoOutputBuffers.empty()) { + const OutputInfo &info = *mVideoOutputBuffers.begin(); + + if (info.mTimeUs > nowUs) { + break; + } + + if (info.mTimeUs + 15000ll < nowUs) { + ++mNumFramesLate; + } + ++mNumFrames; + + status_t err = + mDecoderContext[0]->renderOutputBufferAndRelease(info.mIndex); + CHECK_EQ(err, (status_t)OK); + + mVideoOutputBuffers.erase(mVideoOutputBuffers.begin()); + } + + scheduleVideoRenderIfNecessary(); +} + +} // namespace android + diff --git a/media/libstagefright/wifi-display/sink/DirectRenderer.h b/media/libstagefright/wifi-display/sink/DirectRenderer.h new file mode 100644 index 0000000..c5a4a83 --- /dev/null +++ b/media/libstagefright/wifi-display/sink/DirectRenderer.h @@ -0,0 +1,82 @@ +/* + * Copyright 2012, The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef DIRECT_RENDERER_H_ + +#define DIRECT_RENDERER_H_ + +#include + +namespace android { + +struct ABuffer; +struct AudioTrack; +struct IGraphicBufferProducer; +struct MediaCodec; + +// Renders audio and video data queued by calls to "queueAccessUnit". +struct DirectRenderer : public AHandler { + DirectRenderer(const sp &bufferProducer); + + void setFormat(size_t trackIndex, const sp &format); + void queueAccessUnit(size_t trackIndex, const sp &accessUnit); + +protected: + virtual void onMessageReceived(const sp &msg); + virtual ~DirectRenderer(); + +private: + struct DecoderContext; + struct AudioRenderer; + + enum { + kWhatDecoderNotify, + kWhatRenderVideo, + }; + + struct OutputInfo { + size_t mIndex; + int64_t mTimeUs; + sp mBuffer; + }; + + sp mSurfaceTex; + + sp mDecoderContext[2]; + List mVideoOutputBuffers; + + bool mVideoRenderPending; + + sp mAudioRenderer; + + int32_t mNumFramesLate; + int32_t mNumFrames; + + void onDecoderNotify(const sp &msg); + + void queueOutputBuffer( + size_t trackIndex, + size_t index, int64_t timeUs, const sp &buffer); + + void scheduleVideoRenderIfNecessary(); + void onRenderVideo(); + + DISALLOW_EVIL_CONSTRUCTORS(DirectRenderer); +}; + +} // namespace android + +#endif // DIRECT_RENDERER_H_ diff --git a/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp b/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp new file mode 100644 index 0000000..5db2099 --- /dev/null +++ b/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp @@ -0,0 +1,917 @@ +/* + * Copyright 2012, The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "WifiDisplaySink" +#include + +#include "WifiDisplaySink.h" + +#include "DirectRenderer.h" +#include "MediaReceiver.h" +#include "ParsedMessage.h" +#include "TimeSyncer.h" + +#include +#include +#include +#include +#include +#include + +namespace android { + +// static +const AString WifiDisplaySink::sUserAgent = MakeUserAgent(); + +WifiDisplaySink::WifiDisplaySink( + uint32_t flags, + const sp &netSession, + const sp &bufferProducer, + const sp ¬ify) + : mState(UNDEFINED), + mFlags(flags), + mNetSession(netSession), + mSurfaceTex(bufferProducer), + mNotify(notify), + mUsingTCPTransport(false), + mUsingTCPInterleaving(false), + mSessionID(0), + mNextCSeq(1), + mIDRFrameRequestPending(false), + mTimeOffsetUs(0ll), + mTimeOffsetValid(false), + mSetupDeferred(false), + mLatencyCount(0), + mLatencySumUs(0ll), + mLatencyMaxUs(0ll), + mMaxDelayMs(-1ll) { + // We support any and all resolutions, but prefer 720p30 + mSinkSupportedVideoFormats.setNativeResolution( + VideoFormats::RESOLUTION_CEA, 5); // 1280 x 720 p30 + + mSinkSupportedVideoFormats.enableAll(); +} + +WifiDisplaySink::~WifiDisplaySink() { +} + +void WifiDisplaySink::start(const char *sourceHost, int32_t sourcePort) { + sp msg = new AMessage(kWhatStart, id()); + msg->setString("sourceHost", sourceHost); + msg->setInt32("sourcePort", sourcePort); + msg->post(); +} + +void WifiDisplaySink::start(const char *uri) { + sp msg = new AMessage(kWhatStart, id()); + msg->setString("setupURI", uri); + msg->post(); +} + +// static +bool WifiDisplaySink::ParseURL( + const char *url, AString *host, int32_t *port, AString *path, + AString *user, AString *pass) { + host->clear(); + *port = 0; + path->clear(); + user->clear(); + pass->clear(); + + if (strncasecmp("rtsp://", url, 7)) { + return false; + } + + const char *slashPos = strchr(&url[7], '/'); + + if (slashPos == NULL) { + host->setTo(&url[7]); + path->setTo("/"); + } else { + host->setTo(&url[7], slashPos - &url[7]); + path->setTo(slashPos); + } + + ssize_t atPos = host->find("@"); + + if (atPos >= 0) { + // Split of user:pass@ from hostname. + + AString userPass(*host, 0, atPos); + host->erase(0, atPos + 1); + + ssize_t colonPos = userPass.find(":"); + + if (colonPos < 0) { + *user = userPass; + } else { + user->setTo(userPass, 0, colonPos); + pass->setTo(userPass, colonPos + 1, userPass.size() - colonPos - 1); + } + } + + const char *colonPos = strchr(host->c_str(), ':'); + + if (colonPos != NULL) { + char *end; + unsigned long x = strtoul(colonPos + 1, &end, 10); + + if (end == colonPos + 1 || *end != '\0' || x >= 65536) { + return false; + } + + *port = x; + + size_t colonOffset = colonPos - host->c_str(); + size_t trailing = host->size() - colonOffset; + host->erase(colonOffset, trailing); + } else { + *port = 554; + } + + return true; +} + +void WifiDisplaySink::onMessageReceived(const sp &msg) { + switch (msg->what()) { + case kWhatStart: + { + sleep(2); // XXX + + int32_t sourcePort; + CHECK(msg->findString("sourceHost", &mRTSPHost)); + CHECK(msg->findInt32("sourcePort", &sourcePort)); + + sp notify = new AMessage(kWhatRTSPNotify, id()); + + status_t err = mNetSession->createRTSPClient( + mRTSPHost.c_str(), sourcePort, notify, &mSessionID); + CHECK_EQ(err, (status_t)OK); + + mState = CONNECTING; + break; + } + + case kWhatRTSPNotify: + { + int32_t reason; + CHECK(msg->findInt32("reason", &reason)); + + switch (reason) { + case ANetworkSession::kWhatError: + { + int32_t sessionID; + CHECK(msg->findInt32("sessionID", &sessionID)); + + int32_t err; + CHECK(msg->findInt32("err", &err)); + + AString detail; + CHECK(msg->findString("detail", &detail)); + + ALOGE("An error occurred in session %d (%d, '%s/%s').", + sessionID, + err, + detail.c_str(), + strerror(-err)); + + if (sessionID == mSessionID) { + ALOGI("Lost control connection."); + + // The control connection is dead now. + mNetSession->destroySession(mSessionID); + mSessionID = 0; + + if (mNotify == NULL) { + looper()->stop(); + } else { + sp notify = mNotify->dup(); + notify->setInt32("what", kWhatDisconnected); + notify->post(); + } + } + break; + } + + case ANetworkSession::kWhatConnected: + { + ALOGI("We're now connected."); + mState = CONNECTED; + + if (mFlags & FLAG_SPECIAL_MODE) { + sp notify = new AMessage( + kWhatTimeSyncerNotify, id()); + + mTimeSyncer = new TimeSyncer(mNetSession, notify); + looper()->registerHandler(mTimeSyncer); + + mTimeSyncer->startClient(mRTSPHost.c_str(), 8123); + } + break; + } + + case ANetworkSession::kWhatData: + { + onReceiveClientData(msg); + break; + } + + default: + TRESPASS(); + } + break; + } + + case kWhatStop: + { + looper()->stop(); + break; + } + + case kWhatMediaReceiverNotify: + { + onMediaReceiverNotify(msg); + break; + } + + case kWhatTimeSyncerNotify: + { + int32_t what; + CHECK(msg->findInt32("what", &what)); + + if (what == TimeSyncer::kWhatTimeOffset) { + CHECK(msg->findInt64("offset", &mTimeOffsetUs)); + mTimeOffsetValid = true; + + if (mSetupDeferred) { + CHECK_EQ((status_t)OK, + sendSetup( + mSessionID, + "rtsp://x.x.x.x:x/wfd1.0/streamid=0")); + + mSetupDeferred = false; + } + } + break; + } + + case kWhatReportLateness: + { + if (mLatencyCount > 0) { + int64_t avgLatencyUs = mLatencySumUs / mLatencyCount; + + ALOGV("avg. latency = %lld ms (max %lld ms)", + avgLatencyUs / 1000ll, + mLatencyMaxUs / 1000ll); + + sp params = new AMessage; + params->setInt64("avgLatencyUs", avgLatencyUs); + params->setInt64("maxLatencyUs", mLatencyMaxUs); + mMediaReceiver->informSender(0 /* trackIndex */, params); + } + + mLatencyCount = 0; + mLatencySumUs = 0ll; + mLatencyMaxUs = 0ll; + + msg->post(kReportLatenessEveryUs); + break; + } + + default: + TRESPASS(); + } +} + +void WifiDisplaySink::dumpDelay(size_t trackIndex, int64_t timeUs) { + int64_t delayMs = (ALooper::GetNowUs() - timeUs) / 1000ll; + + if (delayMs > mMaxDelayMs) { + mMaxDelayMs = delayMs; + } + + static const int64_t kMinDelayMs = 0; + static const int64_t kMaxDelayMs = 300; + + const char *kPattern = "########################################"; + size_t kPatternSize = strlen(kPattern); + + int n = (kPatternSize * (delayMs - kMinDelayMs)) + / (kMaxDelayMs - kMinDelayMs); + + if (n < 0) { + n = 0; + } else if ((size_t)n > kPatternSize) { + n = kPatternSize; + } + + ALOGI("[%lld]: (%4lld ms / %4lld ms) %s", + timeUs / 1000, + delayMs, + mMaxDelayMs, + kPattern + kPatternSize - n); +} + +void WifiDisplaySink::onMediaReceiverNotify(const sp &msg) { + int32_t what; + CHECK(msg->findInt32("what", &what)); + + switch (what) { + case MediaReceiver::kWhatInitDone: + { + status_t err; + CHECK(msg->findInt32("err", &err)); + + ALOGI("MediaReceiver initialization completed w/ err %d", err); + break; + } + + case MediaReceiver::kWhatError: + { + status_t err; + CHECK(msg->findInt32("err", &err)); + + ALOGE("MediaReceiver signaled error %d", err); + break; + } + + case MediaReceiver::kWhatAccessUnit: + { + if (mRenderer == NULL) { + mRenderer = new DirectRenderer(mSurfaceTex); + looper()->registerHandler(mRenderer); + } + + sp accessUnit; + CHECK(msg->findBuffer("accessUnit", &accessUnit)); + + int64_t timeUs; + CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs)); + + if (!mTimeOffsetValid && !(mFlags & FLAG_SPECIAL_MODE)) { + mTimeOffsetUs = timeUs - ALooper::GetNowUs(); + mTimeOffsetValid = true; + } + + CHECK(mTimeOffsetValid); + + // We are the timesync _client_, + // client time = server time - time offset. + timeUs -= mTimeOffsetUs; + + size_t trackIndex; + CHECK(msg->findSize("trackIndex", &trackIndex)); + + int64_t nowUs = ALooper::GetNowUs(); + int64_t delayUs = nowUs - timeUs; + + mLatencySumUs += delayUs; + if (mLatencyCount == 0 || delayUs > mLatencyMaxUs) { + mLatencyMaxUs = delayUs; + } + ++mLatencyCount; + + // dumpDelay(trackIndex, timeUs); + + timeUs += 220000ll; // Assume 220 ms of latency + accessUnit->meta()->setInt64("timeUs", timeUs); + + sp format; + if (msg->findMessage("format", &format)) { + mRenderer->setFormat(trackIndex, format); + } + + mRenderer->queueAccessUnit(trackIndex, accessUnit); + break; + } + + case MediaReceiver::kWhatPacketLost: + { +#if 0 + if (!mIDRFrameRequestPending) { + ALOGI("requesting IDR frame"); + + sendIDRFrameRequest(mSessionID); + } +#endif + break; + } + + default: + TRESPASS(); + } +} + +void WifiDisplaySink::registerResponseHandler( + int32_t sessionID, int32_t cseq, HandleRTSPResponseFunc func) { + ResponseID id; + id.mSessionID = sessionID; + id.mCSeq = cseq; + mResponseHandlers.add(id, func); +} + +status_t WifiDisplaySink::sendM2(int32_t sessionID) { + AString request = "OPTIONS * RTSP/1.0\r\n"; + AppendCommonResponse(&request, mNextCSeq); + + request.append( + "Require: org.wfa.wfd1.0\r\n" + "\r\n"); + + status_t err = + mNetSession->sendRequest(sessionID, request.c_str(), request.size()); + + if (err != OK) { + return err; + } + + registerResponseHandler( + sessionID, mNextCSeq, &WifiDisplaySink::onReceiveM2Response); + + ++mNextCSeq; + + return OK; +} + +status_t WifiDisplaySink::onReceiveM2Response( + int32_t sessionID, const sp &msg) { + int32_t statusCode; + if (!msg->getStatusCode(&statusCode)) { + return ERROR_MALFORMED; + } + + if (statusCode != 200) { + return ERROR_UNSUPPORTED; + } + + return OK; +} + +status_t WifiDisplaySink::onReceiveSetupResponse( + int32_t sessionID, const sp &msg) { + int32_t statusCode; + if (!msg->getStatusCode(&statusCode)) { + return ERROR_MALFORMED; + } + + if (statusCode != 200) { + return ERROR_UNSUPPORTED; + } + + if (!msg->findString("session", &mPlaybackSessionID)) { + return ERROR_MALFORMED; + } + + if (!ParsedMessage::GetInt32Attribute( + mPlaybackSessionID.c_str(), + "timeout", + &mPlaybackSessionTimeoutSecs)) { + mPlaybackSessionTimeoutSecs = -1; + } + + ssize_t colonPos = mPlaybackSessionID.find(";"); + if (colonPos >= 0) { + // Strip any options from the returned session id. + mPlaybackSessionID.erase( + colonPos, mPlaybackSessionID.size() - colonPos); + } + + status_t err = configureTransport(msg); + + if (err != OK) { + return err; + } + + mState = PAUSED; + + return sendPlay( + sessionID, + "rtsp://x.x.x.x:x/wfd1.0/streamid=0"); +} + +status_t WifiDisplaySink::configureTransport(const sp &msg) { + if (mUsingTCPTransport && !(mFlags & FLAG_SPECIAL_MODE)) { + // In "special" mode we still use a UDP RTCP back-channel that + // needs connecting. + return OK; + } + + AString transport; + if (!msg->findString("transport", &transport)) { + ALOGE("Missing 'transport' field in SETUP response."); + return ERROR_MALFORMED; + } + + AString sourceHost; + if (!ParsedMessage::GetAttribute( + transport.c_str(), "source", &sourceHost)) { + sourceHost = mRTSPHost; + } + + AString serverPortStr; + if (!ParsedMessage::GetAttribute( + transport.c_str(), "server_port", &serverPortStr)) { + ALOGE("Missing 'server_port' in Transport field."); + return ERROR_MALFORMED; + } + + int rtpPort, rtcpPort; + if (sscanf(serverPortStr.c_str(), "%d-%d", &rtpPort, &rtcpPort) != 2 + || rtpPort <= 0 || rtpPort > 65535 + || rtcpPort <=0 || rtcpPort > 65535 + || rtcpPort != rtpPort + 1) { + ALOGE("Invalid server_port description '%s'.", + serverPortStr.c_str()); + + return ERROR_MALFORMED; + } + + if (rtpPort & 1) { + ALOGW("Server picked an odd numbered RTP port."); + } + + return mMediaReceiver->connectTrack( + 0 /* trackIndex */, sourceHost.c_str(), rtpPort, rtcpPort); +} + +status_t WifiDisplaySink::onReceivePlayResponse( + int32_t sessionID, const sp &msg) { + int32_t statusCode; + if (!msg->getStatusCode(&statusCode)) { + return ERROR_MALFORMED; + } + + if (statusCode != 200) { + return ERROR_UNSUPPORTED; + } + + mState = PLAYING; + + (new AMessage(kWhatReportLateness, id()))->post(kReportLatenessEveryUs); + + return OK; +} + +status_t WifiDisplaySink::onReceiveIDRFrameRequestResponse( + int32_t sessionID, const sp &msg) { + CHECK(mIDRFrameRequestPending); + mIDRFrameRequestPending = false; + + return OK; +} + +void WifiDisplaySink::onReceiveClientData(const sp &msg) { + int32_t sessionID; + CHECK(msg->findInt32("sessionID", &sessionID)); + + sp obj; + CHECK(msg->findObject("data", &obj)); + + sp data = + static_cast(obj.get()); + + ALOGV("session %d received '%s'", + sessionID, data->debugString().c_str()); + + AString method; + AString uri; + data->getRequestField(0, &method); + + int32_t cseq; + if (!data->findInt32("cseq", &cseq)) { + sendErrorResponse(sessionID, "400 Bad Request", -1 /* cseq */); + return; + } + + if (method.startsWith("RTSP/")) { + // This is a response. + + ResponseID id; + id.mSessionID = sessionID; + id.mCSeq = cseq; + + ssize_t index = mResponseHandlers.indexOfKey(id); + + if (index < 0) { + ALOGW("Received unsolicited server response, cseq %d", cseq); + return; + } + + HandleRTSPResponseFunc func = mResponseHandlers.valueAt(index); + mResponseHandlers.removeItemsAt(index); + + status_t err = (this->*func)(sessionID, data); + CHECK_EQ(err, (status_t)OK); + } else { + AString version; + data->getRequestField(2, &version); + if (!(version == AString("RTSP/1.0"))) { + sendErrorResponse(sessionID, "505 RTSP Version not supported", cseq); + return; + } + + if (method == "OPTIONS") { + onOptionsRequest(sessionID, cseq, data); + } else if (method == "GET_PARAMETER") { + onGetParameterRequest(sessionID, cseq, data); + } else if (method == "SET_PARAMETER") { + onSetParameterRequest(sessionID, cseq, data); + } else { + sendErrorResponse(sessionID, "405 Method Not Allowed", cseq); + } + } +} + +void WifiDisplaySink::onOptionsRequest( + int32_t sessionID, + int32_t cseq, + const sp &data) { + AString response = "RTSP/1.0 200 OK\r\n"; + AppendCommonResponse(&response, cseq); + response.append("Public: org.wfa.wfd1.0, GET_PARAMETER, SET_PARAMETER\r\n"); + response.append("\r\n"); + + status_t err = mNetSession->sendRequest(sessionID, response.c_str()); + CHECK_EQ(err, (status_t)OK); + + err = sendM2(sessionID); + CHECK_EQ(err, (status_t)OK); +} + +void WifiDisplaySink::onGetParameterRequest( + int32_t sessionID, + int32_t cseq, + const sp &data) { + AString body; + + if (mState == CONNECTED) { + mUsingTCPTransport = false; + mUsingTCPInterleaving = false; + + char val[PROPERTY_VALUE_MAX]; + if (property_get("media.wfd-sink.tcp-mode", val, NULL)) { + if (!strcasecmp("true", val) || !strcmp("1", val)) { + ALOGI("Using TCP unicast transport."); + mUsingTCPTransport = true; + mUsingTCPInterleaving = false; + } else if (!strcasecmp("interleaved", val)) { + ALOGI("Using TCP interleaved transport."); + mUsingTCPTransport = true; + mUsingTCPInterleaving = true; + } + } else if (mFlags & FLAG_SPECIAL_MODE) { + mUsingTCPTransport = true; + } + + body = "wfd_video_formats: "; + body.append(mSinkSupportedVideoFormats.getFormatSpec()); + + body.append( + "\r\nwfd_audio_codecs: AAC 0000000F 00\r\n" + "wfd_client_rtp_ports: RTP/AVP/"); + + if (mUsingTCPTransport) { + body.append("TCP;"); + if (mUsingTCPInterleaving) { + body.append("interleaved"); + } else { + body.append("unicast 19000 0"); + } + } else { + body.append("UDP;unicast 19000 0"); + } + + body.append(" mode=play\r\n"); + } + + AString response = "RTSP/1.0 200 OK\r\n"; + AppendCommonResponse(&response, cseq); + response.append("Content-Type: text/parameters\r\n"); + response.append(StringPrintf("Content-Length: %d\r\n", body.size())); + response.append("\r\n"); + response.append(body); + + status_t err = mNetSession->sendRequest(sessionID, response.c_str()); + CHECK_EQ(err, (status_t)OK); +} + +status_t WifiDisplaySink::sendSetup(int32_t sessionID, const char *uri) { + sp notify = new AMessage(kWhatMediaReceiverNotify, id()); + + mMediaReceiverLooper = new ALooper; + mMediaReceiverLooper->setName("media_receiver"); + + mMediaReceiverLooper->start( + false /* runOnCallingThread */, + false /* canCallJava */, + PRIORITY_AUDIO); + + mMediaReceiver = new MediaReceiver(mNetSession, notify); + mMediaReceiverLooper->registerHandler(mMediaReceiver); + + RTPReceiver::TransportMode rtpMode = RTPReceiver::TRANSPORT_UDP; + if (mUsingTCPTransport) { + if (mUsingTCPInterleaving) { + rtpMode = RTPReceiver::TRANSPORT_TCP_INTERLEAVED; + } else { + rtpMode = RTPReceiver::TRANSPORT_TCP; + } + } + + int32_t localRTPPort; + status_t err = mMediaReceiver->addTrack( + rtpMode, RTPReceiver::TRANSPORT_UDP /* rtcpMode */, &localRTPPort); + + if (err == OK) { + err = mMediaReceiver->initAsync(MediaReceiver::MODE_TRANSPORT_STREAM); + } + + if (err != OK) { + mMediaReceiverLooper->unregisterHandler(mMediaReceiver->id()); + mMediaReceiver.clear(); + + mMediaReceiverLooper->stop(); + mMediaReceiverLooper.clear(); + + return err; + } + + AString request = StringPrintf("SETUP %s RTSP/1.0\r\n", uri); + + AppendCommonResponse(&request, mNextCSeq); + + if (rtpMode == RTPReceiver::TRANSPORT_TCP_INTERLEAVED) { + request.append("Transport: RTP/AVP/TCP;interleaved=0-1\r\n"); + } else if (rtpMode == RTPReceiver::TRANSPORT_TCP) { + if (mFlags & FLAG_SPECIAL_MODE) { + // This isn't quite true, since the RTP connection is through TCP + // and the RTCP connection through UDP... + request.append( + StringPrintf( + "Transport: RTP/AVP/TCP;unicast;client_port=%d-%d\r\n", + localRTPPort, localRTPPort + 1)); + } else { + request.append( + StringPrintf( + "Transport: RTP/AVP/TCP;unicast;client_port=%d\r\n", + localRTPPort)); + } + } else { + request.append( + StringPrintf( + "Transport: RTP/AVP/UDP;unicast;client_port=%d-%d\r\n", + localRTPPort, + localRTPPort + 1)); + } + + request.append("\r\n"); + + ALOGV("request = '%s'", request.c_str()); + + err = mNetSession->sendRequest(sessionID, request.c_str(), request.size()); + + if (err != OK) { + return err; + } + + registerResponseHandler( + sessionID, mNextCSeq, &WifiDisplaySink::onReceiveSetupResponse); + + ++mNextCSeq; + + return OK; +} + +status_t WifiDisplaySink::sendPlay(int32_t sessionID, const char *uri) { + AString request = StringPrintf("PLAY %s RTSP/1.0\r\n", uri); + + AppendCommonResponse(&request, mNextCSeq); + + request.append(StringPrintf("Session: %s\r\n", mPlaybackSessionID.c_str())); + request.append("\r\n"); + + status_t err = + mNetSession->sendRequest(sessionID, request.c_str(), request.size()); + + if (err != OK) { + return err; + } + + registerResponseHandler( + sessionID, mNextCSeq, &WifiDisplaySink::onReceivePlayResponse); + + ++mNextCSeq; + + return OK; +} + +status_t WifiDisplaySink::sendIDRFrameRequest(int32_t sessionID) { + CHECK(!mIDRFrameRequestPending); + + AString request = "SET_PARAMETER rtsp://localhost/wfd1.0 RTSP/1.0\r\n"; + + AppendCommonResponse(&request, mNextCSeq); + + AString content = "wfd_idr_request\r\n"; + + request.append(StringPrintf("Session: %s\r\n", mPlaybackSessionID.c_str())); + request.append(StringPrintf("Content-Length: %d\r\n", content.size())); + request.append("\r\n"); + request.append(content); + + status_t err = + mNetSession->sendRequest(sessionID, request.c_str(), request.size()); + + if (err != OK) { + return err; + } + + registerResponseHandler( + sessionID, + mNextCSeq, + &WifiDisplaySink::onReceiveIDRFrameRequestResponse); + + ++mNextCSeq; + + mIDRFrameRequestPending = true; + + return OK; +} + +void WifiDisplaySink::onSetParameterRequest( + int32_t sessionID, + int32_t cseq, + const sp &data) { + const char *content = data->getContent(); + + if (strstr(content, "wfd_trigger_method: SETUP\r\n") != NULL) { + if ((mFlags & FLAG_SPECIAL_MODE) && !mTimeOffsetValid) { + mSetupDeferred = true; + } else { + status_t err = + sendSetup( + sessionID, + "rtsp://x.x.x.x:x/wfd1.0/streamid=0"); + + CHECK_EQ(err, (status_t)OK); + } + } + + AString response = "RTSP/1.0 200 OK\r\n"; + AppendCommonResponse(&response, cseq); + response.append("\r\n"); + + status_t err = mNetSession->sendRequest(sessionID, response.c_str()); + CHECK_EQ(err, (status_t)OK); +} + +void WifiDisplaySink::sendErrorResponse( + int32_t sessionID, + const char *errorDetail, + int32_t cseq) { + AString response; + response.append("RTSP/1.0 "); + response.append(errorDetail); + response.append("\r\n"); + + AppendCommonResponse(&response, cseq); + + response.append("\r\n"); + + status_t err = mNetSession->sendRequest(sessionID, response.c_str()); + CHECK_EQ(err, (status_t)OK); +} + +// static +void WifiDisplaySink::AppendCommonResponse(AString *response, int32_t cseq) { + time_t now = time(NULL); + struct tm *now2 = gmtime(&now); + char buf[128]; + strftime(buf, sizeof(buf), "%a, %d %b %Y %H:%M:%S %z", now2); + + response->append("Date: "); + response->append(buf); + response->append("\r\n"); + + response->append(StringPrintf("User-Agent: %s\r\n", sUserAgent.c_str())); + + if (cseq >= 0) { + response->append(StringPrintf("CSeq: %d\r\n", cseq)); + } +} + +} // namespace android diff --git a/media/libstagefright/wifi-display/sink/WifiDisplaySink.h b/media/libstagefright/wifi-display/sink/WifiDisplaySink.h new file mode 100644 index 0000000..adb9d89 --- /dev/null +++ b/media/libstagefright/wifi-display/sink/WifiDisplaySink.h @@ -0,0 +1,196 @@ +/* + * Copyright 2012, The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef WIFI_DISPLAY_SINK_H_ + +#define WIFI_DISPLAY_SINK_H_ + +#include "ANetworkSession.h" + +#include "VideoFormats.h" + +#include +#include + +namespace android { + +struct AMessage; +struct DirectRenderer; +struct MediaReceiver; +struct ParsedMessage; +struct TimeSyncer; + +// Represents the RTSP client acting as a wifi display sink. +// Connects to a wifi display source and renders the incoming +// transport stream using a MediaPlayer instance. +struct WifiDisplaySink : public AHandler { + enum { + kWhatDisconnected, + }; + + enum Flags { + FLAG_SPECIAL_MODE = 1, + }; + + // If no notification message is specified (notify == NULL) + // the sink will stop its looper() once the session ends, + // otherwise it will post an appropriate notification but leave + // the looper() running. + WifiDisplaySink( + uint32_t flags, + const sp &netSession, + const sp &bufferProducer = NULL, + const sp ¬ify = NULL); + + void start(const char *sourceHost, int32_t sourcePort); + void start(const char *uri); + +protected: + virtual ~WifiDisplaySink(); + virtual void onMessageReceived(const sp &msg); + +private: + enum State { + UNDEFINED, + CONNECTING, + CONNECTED, + PAUSED, + PLAYING, + }; + + enum { + kWhatStart, + kWhatRTSPNotify, + kWhatStop, + kWhatMediaReceiverNotify, + kWhatTimeSyncerNotify, + kWhatReportLateness, + }; + + struct ResponseID { + int32_t mSessionID; + int32_t mCSeq; + + bool operator<(const ResponseID &other) const { + return mSessionID < other.mSessionID + || (mSessionID == other.mSessionID + && mCSeq < other.mCSeq); + } + }; + + typedef status_t (WifiDisplaySink::*HandleRTSPResponseFunc)( + int32_t sessionID, const sp &msg); + + static const int64_t kReportLatenessEveryUs = 1000000ll; + + static const AString sUserAgent; + + State mState; + uint32_t mFlags; + VideoFormats mSinkSupportedVideoFormats; + sp mNetSession; + sp mSurfaceTex; + sp mNotify; + sp mTimeSyncer; + bool mUsingTCPTransport; + bool mUsingTCPInterleaving; + AString mRTSPHost; + int32_t mSessionID; + + int32_t mNextCSeq; + + KeyedVector mResponseHandlers; + + sp mMediaReceiverLooper; + sp mMediaReceiver; + sp mRenderer; + + AString mPlaybackSessionID; + int32_t mPlaybackSessionTimeoutSecs; + + bool mIDRFrameRequestPending; + + int64_t mTimeOffsetUs; + bool mTimeOffsetValid; + + bool mSetupDeferred; + + size_t mLatencyCount; + int64_t mLatencySumUs; + int64_t mLatencyMaxUs; + + int64_t mMaxDelayMs; + + status_t sendM2(int32_t sessionID); + status_t sendSetup(int32_t sessionID, const char *uri); + status_t sendPlay(int32_t sessionID, const char *uri); + status_t sendIDRFrameRequest(int32_t sessionID); + + status_t onReceiveM2Response( + int32_t sessionID, const sp &msg); + + status_t onReceiveSetupResponse( + int32_t sessionID, const sp &msg); + + status_t configureTransport(const sp &msg); + + status_t onReceivePlayResponse( + int32_t sessionID, const sp &msg); + + status_t onReceiveIDRFrameRequestResponse( + int32_t sessionID, const sp &msg); + + void registerResponseHandler( + int32_t sessionID, int32_t cseq, HandleRTSPResponseFunc func); + + void onReceiveClientData(const sp &msg); + + void onOptionsRequest( + int32_t sessionID, + int32_t cseq, + const sp &data); + + void onGetParameterRequest( + int32_t sessionID, + int32_t cseq, + const sp &data); + + void onSetParameterRequest( + int32_t sessionID, + int32_t cseq, + const sp &data); + + void onMediaReceiverNotify(const sp &msg); + + void sendErrorResponse( + int32_t sessionID, + const char *errorDetail, + int32_t cseq); + + static void AppendCommonResponse(AString *response, int32_t cseq); + + bool ParseURL( + const char *url, AString *host, int32_t *port, AString *path, + AString *user, AString *pass); + + void dumpDelay(size_t trackIndex, int64_t timeUs); + + DISALLOW_EVIL_CONSTRUCTORS(WifiDisplaySink); +}; + +} // namespace android + +#endif // WIFI_DISPLAY_SINK_H_ diff --git a/media/libstagefright/wifi-display/source/PlaybackSession.cpp b/media/libstagefright/wifi-display/source/PlaybackSession.cpp index 3d7b865..cacfcca 100644 --- a/media/libstagefright/wifi-display/source/PlaybackSession.cpp +++ b/media/libstagefright/wifi-display/source/PlaybackSession.cpp @@ -559,6 +559,8 @@ void WifiDisplaySource::PlaybackSession::onMessageReceived( converter->dropAFrame(); } } + } else if (what == MediaSender::kWhatInformSender) { + onSinkFeedback(msg); } else { TRESPASS(); } @@ -654,6 +656,89 @@ void WifiDisplaySource::PlaybackSession::onMessageReceived( } } +void WifiDisplaySource::PlaybackSession::onSinkFeedback(const sp &msg) { + int64_t avgLatencyUs; + CHECK(msg->findInt64("avgLatencyUs", &avgLatencyUs)); + + int64_t maxLatencyUs; + CHECK(msg->findInt64("maxLatencyUs", &maxLatencyUs)); + + ALOGI("sink reports avg. latency of %lld ms (max %lld ms)", + avgLatencyUs / 1000ll, + maxLatencyUs / 1000ll); + + if (mVideoTrackIndex >= 0) { + const sp &videoTrack = mTracks.valueFor(mVideoTrackIndex); + sp converter = videoTrack->converter(); + + if (converter != NULL) { + int32_t videoBitrate = + Converter::GetInt32Property("media.wfd.video-bitrate", -1); + + char val[PROPERTY_VALUE_MAX]; + if (videoBitrate < 0 + && property_get("media.wfd.video-bitrate", val, NULL) + && !strcasecmp("adaptive", val)) { + videoBitrate = converter->getVideoBitrate(); + + if (avgLatencyUs > 300000ll) { + videoBitrate *= 0.6; + } else if (avgLatencyUs < 100000ll) { + videoBitrate *= 1.1; + } + } + + if (videoBitrate > 0) { + if (videoBitrate < 500000) { + videoBitrate = 500000; + } else if (videoBitrate > 10000000) { + videoBitrate = 10000000; + } + + if (videoBitrate != converter->getVideoBitrate()) { + ALOGI("setting video bitrate to %d bps", videoBitrate); + + converter->setVideoBitrate(videoBitrate); + } + } + } + + sp repeaterSource = videoTrack->repeaterSource(); + if (repeaterSource != NULL) { + double rateHz = + Converter::GetInt32Property( + "media.wfd.video-framerate", -1); + + char val[PROPERTY_VALUE_MAX]; + if (rateHz < 0.0 + && property_get("media.wfd.video-framerate", val, NULL) + && !strcasecmp("adaptive", val)) { + rateHz = repeaterSource->getFrameRate(); + + if (avgLatencyUs > 300000ll) { + rateHz *= 0.9; + } else if (avgLatencyUs < 200000ll) { + rateHz *= 1.1; + } + } + + if (rateHz > 0) { + if (rateHz < 5.0) { + rateHz = 5.0; + } else if (rateHz > 30.0) { + rateHz = 30.0; + } + + if (rateHz != repeaterSource->getFrameRate()) { + ALOGI("setting frame rate to %.2f Hz", rateHz); + + repeaterSource->setFrameRate(rateHz); + } + } + } + } +} + status_t WifiDisplaySource::PlaybackSession::setupMediaPacketizer( bool enableAudio, bool enableVideo) { DataSource::RegisterDefaultSniffers(); diff --git a/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp b/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp index 2b5bee9..4a49811 100644 --- a/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp +++ b/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp @@ -23,6 +23,7 @@ #include "Parameters.h" #include "ParsedMessage.h" #include "rtp/RTPSender.h" +#include "TimeSyncer.h" #include #include @@ -164,6 +165,14 @@ void WifiDisplaySource::onMessageReceived(const sp &msg) { } else { err = -EINVAL; } + } + + if (err == OK) { + sp notify = new AMessage(kWhatTimeSyncerNotify, id()); + mTimeSyncer = new TimeSyncer(mNetSession, notify); + looper()->registerHandler(mTimeSyncer); + + mTimeSyncer->startServer(8123); mState = AWAITING_CLIENT_CONNECTION; } @@ -539,6 +548,11 @@ void WifiDisplaySource::onMessageReceived(const sp &msg) { break; } + case kWhatTimeSyncerNotify: + { + break; + } + default: TRESPASS(); } diff --git a/media/libstagefright/wifi-display/source/WifiDisplaySource.h b/media/libstagefright/wifi-display/source/WifiDisplaySource.h index 44d3e4d..3efa0b4 100644 --- a/media/libstagefright/wifi-display/source/WifiDisplaySource.h +++ b/media/libstagefright/wifi-display/source/WifiDisplaySource.h @@ -30,6 +30,7 @@ namespace android { struct IHDCP; struct IRemoteDisplayClient; struct ParsedMessage; +struct TimeSyncer; // Represents the RTSP server acting as a wifi display source. // Manages incoming connections, sets up Playback sessions as necessary. @@ -82,6 +83,7 @@ private: kWhatHDCPNotify, kWhatFinishStop2, kWhatTeardownTriggerTimedOut, + kWhatTimeSyncerNotify, }; struct ResponseID { @@ -118,6 +120,7 @@ private: sp mNetSession; sp mClient; AString mMediaPath; + sp mTimeSyncer; struct in_addr mInterfaceAddr; int32_t mSessionID; diff --git a/media/libstagefright/wifi-display/udptest.cpp b/media/libstagefright/wifi-display/udptest.cpp new file mode 100644 index 0000000..111846d --- /dev/null +++ b/media/libstagefright/wifi-display/udptest.cpp @@ -0,0 +1,116 @@ +/* + * Copyright 2012, The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NEBUG 0 +#define LOG_TAG "udptest" +#include + +#include "ANetworkSession.h" +#include "TimeSyncer.h" + +#include +#include + +namespace android { + +} // namespace android + +static void usage(const char *me) { + fprintf(stderr, + "usage: %s -c host[:port]\tconnect to test server\n" + " -l \tcreate a test server\n", + me); +} + +int main(int argc, char **argv) { + using namespace android; + + ProcessState::self()->startThreadPool(); + + int32_t localPort = -1; + int32_t connectToPort = -1; + AString connectToHost; + + int res; + while ((res = getopt(argc, argv, "hc:l:")) >= 0) { + switch (res) { + case 'c': + { + const char *colonPos = strrchr(optarg, ':'); + + if (colonPos == NULL) { + connectToHost = optarg; + connectToPort = 49152; + } else { + connectToHost.setTo(optarg, colonPos - optarg); + + char *end; + connectToPort = strtol(colonPos + 1, &end, 10); + + if (*end != '\0' || end == colonPos + 1 + || connectToPort < 1 || connectToPort > 65535) { + fprintf(stderr, "Illegal port specified.\n"); + exit(1); + } + } + break; + } + + case 'l': + { + char *end; + localPort = strtol(optarg, &end, 10); + + if (*end != '\0' || end == optarg + || localPort < 1 || localPort > 65535) { + fprintf(stderr, "Illegal port specified.\n"); + exit(1); + } + break; + } + + case '?': + case 'h': + usage(argv[0]); + exit(1); + } + } + + if (localPort < 0 && connectToPort < 0) { + fprintf(stderr, + "You need to select either client or server mode.\n"); + exit(1); + } + + sp netSession = new ANetworkSession; + netSession->start(); + + sp looper = new ALooper; + + sp handler = new TimeSyncer(netSession, NULL /* notify */); + looper->registerHandler(handler); + + if (localPort >= 0) { + handler->startServer(localPort); + } else { + handler->startClient(connectToHost.c_str(), connectToPort); + } + + looper->start(true /* runOnCallingThread */); + + return 0; +} + diff --git a/media/libstagefright/wifi-display/wfd.cpp b/media/libstagefright/wifi-display/wfd.cpp index c947765..9fee4d0 100644 --- a/media/libstagefright/wifi-display/wfd.cpp +++ b/media/libstagefright/wifi-display/wfd.cpp @@ -18,6 +18,7 @@ #define LOG_TAG "wfd" #include +#include "sink/WifiDisplaySink.h" #include "source/WifiDisplaySource.h" #include @@ -38,8 +39,12 @@ namespace android { static void usage(const char *me) { fprintf(stderr, "usage:\n" - " %s -l iface[:port]\tcreate a wifi display source\n" - " -f(ilename) \tstream media\n", + " %s -c host[:port]\tconnect to wifi source\n" + " -u uri \tconnect to an rtsp uri\n" + " -l ip[:port] \tlisten on the specified port " + " -f(ilename) \tstream media " + "(create a sink)\n" + " -s(pecial) \trun in 'special' mode\n", me); } @@ -209,14 +214,48 @@ int main(int argc, char **argv) { DataSource::RegisterDefaultSniffers(); + AString connectToHost; + int32_t connectToPort = -1; + AString uri; + AString listenOnAddr; int32_t listenOnPort = -1; AString path; + bool specialMode = false; + int res; - while ((res = getopt(argc, argv, "hl:f:")) >= 0) { + while ((res = getopt(argc, argv, "hc:l:u:f:s")) >= 0) { switch (res) { + case 'c': + { + const char *colonPos = strrchr(optarg, ':'); + + if (colonPos == NULL) { + connectToHost = optarg; + connectToPort = WifiDisplaySource::kWifiDisplayDefaultPort; + } else { + connectToHost.setTo(optarg, colonPos - optarg); + + char *end; + connectToPort = strtol(colonPos + 1, &end, 10); + + if (*end != '\0' || end == colonPos + 1 + || connectToPort < 1 || connectToPort > 65535) { + fprintf(stderr, "Illegal port specified.\n"); + exit(1); + } + } + break; + } + + case 'u': + { + uri = optarg; + break; + } + case 'f': { path = optarg; @@ -245,6 +284,12 @@ int main(int argc, char **argv) { break; } + case 's': + { + specialMode = true; + break; + } + case '?': case 'h': default: @@ -253,6 +298,13 @@ int main(int argc, char **argv) { } } + if (connectToPort >= 0 && listenOnPort >= 0) { + fprintf(stderr, + "You can connect to a source or create one, " + "but not both at the same time.\n"); + exit(1); + } + if (listenOnPort >= 0) { if (path.empty()) { createSource(listenOnAddr, listenOnPort); @@ -263,7 +315,72 @@ int main(int argc, char **argv) { exit(0); } - usage(argv[0]); + if (connectToPort < 0 && uri.empty()) { + fprintf(stderr, + "You need to select either source host or uri.\n"); + + exit(1); + } + + if (connectToPort >= 0 && !uri.empty()) { + fprintf(stderr, + "You need to either connect to a wfd host or an rtsp url, " + "not both.\n"); + exit(1); + } + + sp composerClient = new SurfaceComposerClient; + CHECK_EQ(composerClient->initCheck(), (status_t)OK); + + sp display(SurfaceComposerClient::getBuiltInDisplay( + ISurfaceComposer::eDisplayIdMain)); + DisplayInfo info; + SurfaceComposerClient::getDisplayInfo(display, &info); + ssize_t displayWidth = info.w; + ssize_t displayHeight = info.h; + + ALOGV("display is %d x %d\n", displayWidth, displayHeight); + + sp control = + composerClient->createSurface( + String8("A Surface"), + displayWidth, + displayHeight, + PIXEL_FORMAT_RGB_565, + 0); + + CHECK(control != NULL); + CHECK(control->isValid()); + + SurfaceComposerClient::openGlobalTransaction(); + CHECK_EQ(control->setLayer(INT_MAX), (status_t)OK); + CHECK_EQ(control->show(), (status_t)OK); + SurfaceComposerClient::closeGlobalTransaction(); + + sp surface = control->getSurface(); + CHECK(surface != NULL); + + sp session = new ANetworkSession; + session->start(); + + sp looper = new ALooper; + + sp sink = new WifiDisplaySink( + specialMode ? WifiDisplaySink::FLAG_SPECIAL_MODE : 0 /* flags */, + session, + surface->getIGraphicBufferProducer()); + + looper->registerHandler(sink); + + if (connectToPort >= 0) { + sink->start(connectToHost.c_str(), connectToPort); + } else { + sink->start(uri.c_str()); + } + + looper->start(true /* runOnCallingThread */); + + composerClient->dispose(); return 0; } -- cgit v1.1 From d09801b99503b57c35e321ad9afa7e861e012813 Mon Sep 17 00:00:00 2001 From: Eino-Ville Talvala Date: Tue, 23 Apr 2013 15:16:57 -0700 Subject: Camera2: Fix deadlock on shutdown due to client getting killed. When the binder connection dies and is the only holder of a strong pointer to the Camera2Client, disconnect is called from the destructor. At this point, all weak pointers to Camera2Client are no longer promotable, and lots of cleanup code paths are broken as a result. Rework all such code paths to not need the client pointer, and to discard image buffers that arrive during shutdown. Bug: 8696047 Change-Id: Ic0672ecde7c1baaf65079f925a45bd5be45f1fb3 --- services/camera/libcameraservice/Camera2Client.cpp | 20 ++- services/camera/libcameraservice/Camera2Device.cpp | 3 +- services/camera/libcameraservice/CameraService.cpp | 4 + .../libcameraservice/camera2/CallbackProcessor.cpp | 78 +++++++--- .../libcameraservice/camera2/CallbackProcessor.h | 8 +- .../libcameraservice/camera2/JpegProcessor.cpp | 45 +++--- .../libcameraservice/camera2/JpegProcessor.h | 8 +- .../camera2/StreamingProcessor.cpp | 161 ++++++++++++--------- .../libcameraservice/camera2/StreamingProcessor.h | 5 +- .../libcameraservice/camera2/ZslProcessor.cpp | 74 ++++++---- .../camera/libcameraservice/camera2/ZslProcessor.h | 6 +- 11 files changed, 263 insertions(+), 149 deletions(-) diff --git a/services/camera/libcameraservice/Camera2Client.cpp b/services/camera/libcameraservice/Camera2Client.cpp index 9421a77..eae7461 100644 --- a/services/camera/libcameraservice/Camera2Client.cpp +++ b/services/camera/libcameraservice/Camera2Client.cpp @@ -135,6 +135,7 @@ status_t Camera2Client::initialize(camera_module_t *module) Camera2Client::~Camera2Client() { ATRACE_CALL(); + ALOGV("~Camera2Client"); mDestructionStarted = true; @@ -369,6 +370,12 @@ void Camera2Client::disconnect() { ALOGV("Camera %d: Shutting down", mCameraId); + /** + * disconnect() cannot call any methods that might need to promote a + * wp, since disconnect can be called from the destructor, at + * which point all such promotions will fail. + */ + stopPreviewL(); { @@ -538,7 +545,12 @@ status_t Camera2Client::setPreviewWindowL(const sp& binder, break; case Parameters::PREVIEW: // Already running preview - need to stop and create a new stream - mStreamingProcessor->stopStream(); + res = stopStream(); + if (res != OK) { + ALOGE("%s: Unable to stop preview to swap windows: %s (%d)", + __FUNCTION__, strerror(-res), res); + return res; + } state = Parameters::WAITING_FOR_PREVIEW_WINDOW; break; } @@ -745,7 +757,11 @@ void Camera2Client::stopPreviewL() { // no break case Parameters::RECORD: case Parameters::PREVIEW: - mStreamingProcessor->stopStream(); + res = stopStream(); + if (res != OK) { + ALOGE("%s: Camera %d: Can't stop streaming: %s (%d)", + __FUNCTION__, mCameraId, strerror(-res), res); + } res = mDevice->waitUntilDrained(); if (res != OK) { ALOGE("%s: Camera %d: Waiting to stop streaming failed: %s (%d)", diff --git a/services/camera/libcameraservice/Camera2Device.cpp b/services/camera/libcameraservice/Camera2Device.cpp index 946cdba..77df152 100644 --- a/services/camera/libcameraservice/Camera2Device.cpp +++ b/services/camera/libcameraservice/Camera2Device.cpp @@ -1133,7 +1133,8 @@ cleanUpBuffers: status_t Camera2Device::StreamAdapter::release() { ATRACE_CALL(); status_t res; - ALOGV("%s: Releasing stream %d", __FUNCTION__, mId); + ALOGV("%s: Releasing stream %d (%d x %d, format %d)", __FUNCTION__, mId, + mWidth, mHeight, mFormat); if (mState >= ALLOCATED) { res = mHal2Device->ops->release_stream(mHal2Device, mId); if (res != OK) { diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp index 2db5224..cdeb92e 100644 --- a/services/camera/libcameraservice/CameraService.cpp +++ b/services/camera/libcameraservice/CameraService.cpp @@ -793,6 +793,7 @@ CameraService::Client::Client(const sp& cameraService, // tear down the client CameraService::Client::~Client() { + ALOGV("~Client"); mDestructionStarted = true; mCameraService->releaseSound(); @@ -820,10 +821,12 @@ CameraService::BasicClient::BasicClient(const sp& cameraService, } CameraService::BasicClient::~BasicClient() { + ALOGV("~BasicClient"); mDestructionStarted = true; } void CameraService::BasicClient::disconnect() { + ALOGV("BasicClient::disconnect"); mCameraService->removeClientByRemote(mRemoteBinder); // client shouldn't be able to call into us anymore mClientPid = 0; @@ -922,6 +925,7 @@ void CameraService::Client::notifyError() { // NOTE: function is idempotent void CameraService::Client::disconnect() { + ALOGV("Client::disconnect"); BasicClient::disconnect(); mCameraService->setCameraFree(mCameraId); mCameraService->updateStatus(ICameraServiceListener::STATUS_PRESENT, diff --git a/services/camera/libcameraservice/camera2/CallbackProcessor.cpp b/services/camera/libcameraservice/camera2/CallbackProcessor.cpp index 30c14ef..dd37283 100644 --- a/services/camera/libcameraservice/camera2/CallbackProcessor.cpp +++ b/services/camera/libcameraservice/camera2/CallbackProcessor.cpp @@ -30,9 +30,11 @@ namespace android { namespace camera2 { -CallbackProcessor::CallbackProcessor(wp client): +CallbackProcessor::CallbackProcessor(sp client): Thread(false), mClient(client), + mDevice(client->getCameraDevice()), + mId(client->getCameraId()), mCallbackAvailable(false), mCallbackStreamId(NO_STREAM) { } @@ -56,9 +58,11 @@ status_t CallbackProcessor::updateStream(const Parameters ¶ms) { Mutex::Autolock l(mInputMutex); - sp client = mClient.promote(); - if (client == 0) return OK; - sp device = client->getCameraDevice(); + sp device = mDevice.promote(); + if (device == 0) { + ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId); + return INVALID_OPERATION; + } if (mCallbackConsumer == 0) { // Create CPU buffer queue endpoint @@ -76,7 +80,7 @@ status_t CallbackProcessor::updateStream(const Parameters ¶ms) { ¤tWidth, ¤tHeight, ¤tFormat); if (res != OK) { ALOGE("%s: Camera %d: Error querying callback output stream info: " - "%s (%d)", __FUNCTION__, client->getCameraId(), + "%s (%d)", __FUNCTION__, mId, strerror(-res), res); return res; } @@ -87,11 +91,11 @@ status_t CallbackProcessor::updateStream(const Parameters ¶ms) { // assuming that all existing use of old callback stream is // completed. ALOGV("%s: Camera %d: Deleting stream %d since the buffer dimensions changed", - __FUNCTION__, client->getCameraId(), mCallbackStreamId); + __FUNCTION__, mId, mCallbackStreamId); res = device->deleteStream(mCallbackStreamId); if (res != OK) { ALOGE("%s: Camera %d: Unable to delete old output stream " - "for callbacks: %s (%d)", __FUNCTION__, client->getCameraId(), + "for callbacks: %s (%d)", __FUNCTION__, mId, strerror(-res), res); return res; } @@ -108,7 +112,7 @@ status_t CallbackProcessor::updateStream(const Parameters ¶ms) { params.previewFormat, 0, &mCallbackStreamId); if (res != OK) { ALOGE("%s: Camera %d: Can't create output stream for callbacks: " - "%s (%d)", __FUNCTION__, client->getCameraId(), + "%s (%d)", __FUNCTION__, mId, strerror(-res), res); return res; } @@ -119,15 +123,24 @@ status_t CallbackProcessor::updateStream(const Parameters ¶ms) { status_t CallbackProcessor::deleteStream() { ATRACE_CALL(); + sp device; - Mutex::Autolock l(mInputMutex); + { + Mutex::Autolock l(mInputMutex); - if (mCallbackStreamId != NO_STREAM) { - sp client = mClient.promote(); - if (client == 0) return OK; - sp device = client->getCameraDevice(); + if (mCallbackStreamId == NO_STREAM) { + return OK; + } + device = mDevice.promote(); + if (device == 0) { + ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId); + return INVALID_OPERATION; + } + } + device->deleteStream(mCallbackStreamId); - device->deleteStream(mCallbackStreamId); + { + Mutex::Autolock l(mInputMutex); mCallbackHeap.clear(); mCallbackWindow.clear(); @@ -161,13 +174,32 @@ bool CallbackProcessor::threadLoop() { do { sp client = mClient.promote(); - if (client == 0) return false; - res = processNewCallback(client); + if (client == 0) { + res = discardNewCallback(); + } else { + res = processNewCallback(client); + } } while (res == OK); return true; } +status_t CallbackProcessor::discardNewCallback() { + ATRACE_CALL(); + status_t res; + CpuConsumer::LockedBuffer imgBuffer; + res = mCallbackConsumer->lockNextBuffer(&imgBuffer); + if (res != OK) { + if (res != BAD_VALUE) { + ALOGE("%s: Camera %d: Error receiving next callback buffer: " + "%s (%d)", __FUNCTION__, mId, strerror(-res), res); + } + return res; + } + mCallbackConsumer->unlockBuffer(imgBuffer); + return OK; +} + status_t CallbackProcessor::processNewCallback(sp &client) { ATRACE_CALL(); status_t res; @@ -181,12 +213,12 @@ status_t CallbackProcessor::processNewCallback(sp &client) { if (res != OK) { if (res != BAD_VALUE) { ALOGE("%s: Camera %d: Error receiving next callback buffer: " - "%s (%d)", __FUNCTION__, client->getCameraId(), strerror(-res), res); + "%s (%d)", __FUNCTION__, mId, strerror(-res), res); } return res; } ALOGV("%s: Camera %d: Preview callback available", __FUNCTION__, - client->getCameraId()); + mId); { SharedParameters::Lock l(client->getParameters()); @@ -195,7 +227,7 @@ status_t CallbackProcessor::processNewCallback(sp &client) { && l.mParameters.state != Parameters::RECORD && l.mParameters.state != Parameters::VIDEO_SNAPSHOT) { ALOGV("%s: Camera %d: No longer streaming", - __FUNCTION__, client->getCameraId()); + __FUNCTION__, mId); mCallbackConsumer->unlockBuffer(imgBuffer); return OK; } @@ -216,7 +248,7 @@ status_t CallbackProcessor::processNewCallback(sp &client) { if (imgBuffer.format != l.mParameters.previewFormat) { ALOGE("%s: Camera %d: Unexpected format for callback: " - "%x, expected %x", __FUNCTION__, client->getCameraId(), + "%x, expected %x", __FUNCTION__, mId, imgBuffer.format, l.mParameters.previewFormat); mCallbackConsumer->unlockBuffer(imgBuffer); return INVALID_OPERATION; @@ -241,7 +273,7 @@ status_t CallbackProcessor::processNewCallback(sp &client) { "Camera2Client::CallbackHeap"); if (mCallbackHeap->mHeap->getSize() == 0) { ALOGE("%s: Camera %d: Unable to allocate memory for callbacks", - __FUNCTION__, client->getCameraId()); + __FUNCTION__, mId); mCallbackConsumer->unlockBuffer(imgBuffer); return INVALID_OPERATION; } @@ -252,7 +284,7 @@ status_t CallbackProcessor::processNewCallback(sp &client) { if (mCallbackHeapFree == 0) { ALOGE("%s: Camera %d: No free callback buffers, dropping frame", - __FUNCTION__, client->getCameraId()); + __FUNCTION__, mId); mCallbackConsumer->unlockBuffer(imgBuffer); return OK; } @@ -282,7 +314,7 @@ status_t CallbackProcessor::processNewCallback(sp &client) { l(client->mSharedCameraCallbacks); if (l.mRemoteCallback != 0) { ALOGV("%s: Camera %d: Invoking client data callback", - __FUNCTION__, client->getCameraId()); + __FUNCTION__, mId); l.mRemoteCallback->dataCallback(CAMERA_MSG_PREVIEW_FRAME, mCallbackHeap->mBuffers[heapIdx], NULL); } diff --git a/services/camera/libcameraservice/camera2/CallbackProcessor.h b/services/camera/libcameraservice/camera2/CallbackProcessor.h index e68bb75..1c40a03 100644 --- a/services/camera/libcameraservice/camera2/CallbackProcessor.h +++ b/services/camera/libcameraservice/camera2/CallbackProcessor.h @@ -30,6 +30,7 @@ namespace android { class Camera2Client; +class CameraDeviceBase; namespace camera2 { @@ -39,7 +40,7 @@ namespace camera2 { class CallbackProcessor: public Thread, public CpuConsumer::FrameAvailableListener { public: - CallbackProcessor(wp client); + CallbackProcessor(sp client); ~CallbackProcessor(); void onFrameAvailable(); @@ -52,6 +53,8 @@ class CallbackProcessor: private: static const nsecs_t kWaitDuration = 10000000; // 10 ms wp mClient; + wp mDevice; + int mId; mutable Mutex mInputMutex; bool mCallbackAvailable; @@ -72,7 +75,8 @@ class CallbackProcessor: virtual bool threadLoop(); status_t processNewCallback(sp &client); - + // Used when shutting down + status_t discardNewCallback(); }; diff --git a/services/camera/libcameraservice/camera2/JpegProcessor.cpp b/services/camera/libcameraservice/camera2/JpegProcessor.cpp index 286fac4..01d7f9c 100644 --- a/services/camera/libcameraservice/camera2/JpegProcessor.cpp +++ b/services/camera/libcameraservice/camera2/JpegProcessor.cpp @@ -35,11 +35,12 @@ namespace android { namespace camera2 { JpegProcessor::JpegProcessor( - wp client, + sp client, wp sequencer): Thread(false), - mClient(client), + mDevice(client->getCameraDevice()), mSequencer(sequencer), + mId(client->getCameraId()), mCaptureAvailable(false), mCaptureStreamId(NO_STREAM) { } @@ -64,16 +65,18 @@ status_t JpegProcessor::updateStream(const Parameters ¶ms) { Mutex::Autolock l(mInputMutex); - sp client = mClient.promote(); - if (client == 0) return OK; - sp device = client->getCameraDevice(); + sp device = mDevice.promote(); + if (device == 0) { + ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId); + return INVALID_OPERATION; + } // Find out buffer size for JPEG camera_metadata_ro_entry_t maxJpegSize = params.staticInfo(ANDROID_JPEG_MAX_SIZE); if (maxJpegSize.count == 0) { ALOGE("%s: Camera %d: Can't find ANDROID_JPEG_MAX_SIZE!", - __FUNCTION__, client->getCameraId()); + __FUNCTION__, mId); return INVALID_OPERATION; } @@ -89,7 +92,7 @@ status_t JpegProcessor::updateStream(const Parameters ¶ms) { "Camera2Client::CaptureHeap"); if (mCaptureHeap->getSize() == 0) { ALOGE("%s: Camera %d: Unable to allocate memory for capture", - __FUNCTION__, client->getCameraId()); + __FUNCTION__, mId); return NO_MEMORY; } } @@ -102,18 +105,18 @@ status_t JpegProcessor::updateStream(const Parameters ¶ms) { if (res != OK) { ALOGE("%s: Camera %d: Error querying capture output stream info: " "%s (%d)", __FUNCTION__, - client->getCameraId(), strerror(-res), res); + mId, strerror(-res), res); return res; } if (currentWidth != (uint32_t)params.pictureWidth || currentHeight != (uint32_t)params.pictureHeight) { ALOGV("%s: Camera %d: Deleting stream %d since the buffer dimensions changed", - __FUNCTION__, client->getCameraId(), mCaptureStreamId); + __FUNCTION__, mId, mCaptureStreamId); res = device->deleteStream(mCaptureStreamId); if (res != OK) { ALOGE("%s: Camera %d: Unable to delete old output stream " "for capture: %s (%d)", __FUNCTION__, - client->getCameraId(), strerror(-res), res); + mId, strerror(-res), res); return res; } mCaptureStreamId = NO_STREAM; @@ -128,7 +131,7 @@ status_t JpegProcessor::updateStream(const Parameters ¶ms) { &mCaptureStreamId); if (res != OK) { ALOGE("%s: Camera %d: Can't create output stream for capture: " - "%s (%d)", __FUNCTION__, client->getCameraId(), + "%s (%d)", __FUNCTION__, mId, strerror(-res), res); return res; } @@ -143,9 +146,11 @@ status_t JpegProcessor::deleteStream() { Mutex::Autolock l(mInputMutex); if (mCaptureStreamId != NO_STREAM) { - sp client = mClient.promote(); - if (client == 0) return OK; - sp device = client->getCameraDevice(); + sp device = mDevice.promote(); + if (device == 0) { + ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId); + return INVALID_OPERATION; + } device->deleteStream(mCaptureStreamId); @@ -180,15 +185,13 @@ bool JpegProcessor::threadLoop() { } do { - sp client = mClient.promote(); - if (client == 0) return false; - res = processNewCapture(client); + res = processNewCapture(); } while (res == OK); return true; } -status_t JpegProcessor::processNewCapture(sp &client) { +status_t JpegProcessor::processNewCapture() { ATRACE_CALL(); status_t res; sp captureHeap; @@ -200,17 +203,17 @@ status_t JpegProcessor::processNewCapture(sp &client) { if (res != BAD_VALUE) { ALOGE("%s: Camera %d: Error receiving still image buffer: " "%s (%d)", __FUNCTION__, - client->getCameraId(), strerror(-res), res); + mId, strerror(-res), res); } return res; } ALOGV("%s: Camera %d: Still capture available", __FUNCTION__, - client->getCameraId()); + mId); if (imgBuffer.format != HAL_PIXEL_FORMAT_BLOB) { ALOGE("%s: Camera %d: Unexpected format for still image: " - "%x, expected %x", __FUNCTION__, client->getCameraId(), + "%x, expected %x", __FUNCTION__, mId, imgBuffer.format, HAL_PIXEL_FORMAT_BLOB); mCaptureConsumer->unlockBuffer(imgBuffer); diff --git a/services/camera/libcameraservice/camera2/JpegProcessor.h b/services/camera/libcameraservice/camera2/JpegProcessor.h index 74f4738..a38611c 100644 --- a/services/camera/libcameraservice/camera2/JpegProcessor.h +++ b/services/camera/libcameraservice/camera2/JpegProcessor.h @@ -29,6 +29,7 @@ namespace android { class Camera2Client; +class CameraDeviceBase; class MemoryHeapBase; namespace camera2 { @@ -41,7 +42,7 @@ class CaptureSequencer; class JpegProcessor: public Thread, public CpuConsumer::FrameAvailableListener { public: - JpegProcessor(wp client, wp sequencer); + JpegProcessor(sp client, wp sequencer); ~JpegProcessor(); // CpuConsumer listener implementation @@ -54,8 +55,9 @@ class JpegProcessor: void dump(int fd, const Vector& args) const; private: static const nsecs_t kWaitDuration = 10000000; // 10 ms - wp mClient; + wp mDevice; wp mSequencer; + int mId; mutable Mutex mInputMutex; bool mCaptureAvailable; @@ -72,7 +74,7 @@ class JpegProcessor: virtual bool threadLoop(); - status_t processNewCapture(sp &client); + status_t processNewCapture(); size_t findJpegSize(uint8_t* jpegBuffer, size_t maxSize); }; diff --git a/services/camera/libcameraservice/camera2/StreamingProcessor.cpp b/services/camera/libcameraservice/camera2/StreamingProcessor.cpp index fbc5b93..c36cf87 100644 --- a/services/camera/libcameraservice/camera2/StreamingProcessor.cpp +++ b/services/camera/libcameraservice/camera2/StreamingProcessor.cpp @@ -31,8 +31,10 @@ namespace android { namespace camera2 { -StreamingProcessor::StreamingProcessor(wp client): +StreamingProcessor::StreamingProcessor(sp client): mClient(client), + mDevice(client->getCameraDevice()), + mId(client->getCameraId()), mActiveRequest(NONE), mPreviewRequestId(Camera2Client::kPreviewRequestIdStart), mPreviewStreamId(NO_STREAM), @@ -40,7 +42,6 @@ StreamingProcessor::StreamingProcessor(wp client): mRecordingStreamId(NO_STREAM), mRecordingHeapCount(kDefaultRecordingHeapCount) { - } StreamingProcessor::~StreamingProcessor() { @@ -70,16 +71,19 @@ bool StreamingProcessor::haveValidPreviewWindow() const { status_t StreamingProcessor::updatePreviewRequest(const Parameters ¶ms) { ATRACE_CALL(); status_t res; - sp client = mClient.promote(); - if (client == 0) return INVALID_OPERATION; + sp device = mDevice.promote(); + if (device == 0) { + ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId); + return INVALID_OPERATION; + } Mutex::Autolock m(mMutex); if (mPreviewRequest.entryCount() == 0) { - res = client->getCameraDevice()->createDefaultRequest(CAMERA2_TEMPLATE_PREVIEW, + res = device->createDefaultRequest(CAMERA2_TEMPLATE_PREVIEW, &mPreviewRequest); if (res != OK) { ALOGE("%s: Camera %d: Unable to create default preview request: " - "%s (%d)", __FUNCTION__, client->getCameraId(), strerror(-res), res); + "%s (%d)", __FUNCTION__, mId, strerror(-res), res); return res; } } @@ -87,7 +91,7 @@ status_t StreamingProcessor::updatePreviewRequest(const Parameters ¶ms) { res = params.updateRequest(&mPreviewRequest); if (res != OK) { ALOGE("%s: Camera %d: Unable to update common entries of preview " - "request: %s (%d)", __FUNCTION__, client->getCameraId(), + "request: %s (%d)", __FUNCTION__, mId, strerror(-res), res); return res; } @@ -96,7 +100,7 @@ status_t StreamingProcessor::updatePreviewRequest(const Parameters ¶ms) { &mPreviewRequestId, 1); if (res != OK) { ALOGE("%s: Camera %d: Unable to update request id for preview: %s (%d)", - __FUNCTION__, client->getCameraId(), strerror(-res), res); + __FUNCTION__, mId, strerror(-res), res); return res; } @@ -108,9 +112,11 @@ status_t StreamingProcessor::updatePreviewStream(const Parameters ¶ms) { Mutex::Autolock m(mMutex); status_t res; - sp client = mClient.promote(); - if (client == 0) return INVALID_OPERATION; - sp device = client->getCameraDevice(); + sp device = mDevice.promote(); + if (device == 0) { + ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId); + return INVALID_OPERATION; + } if (mPreviewStreamId != NO_STREAM) { // Check if stream parameters have to change @@ -119,24 +125,24 @@ status_t StreamingProcessor::updatePreviewStream(const Parameters ¶ms) { ¤tWidth, ¤tHeight, 0); if (res != OK) { ALOGE("%s: Camera %d: Error querying preview stream info: " - "%s (%d)", __FUNCTION__, client->getCameraId(), strerror(-res), res); + "%s (%d)", __FUNCTION__, mId, strerror(-res), res); return res; } if (currentWidth != (uint32_t)params.previewWidth || currentHeight != (uint32_t)params.previewHeight) { ALOGV("%s: Camera %d: Preview size switch: %d x %d -> %d x %d", - __FUNCTION__, client->getCameraId(), currentWidth, currentHeight, + __FUNCTION__, mId, currentWidth, currentHeight, params.previewWidth, params.previewHeight); res = device->waitUntilDrained(); if (res != OK) { ALOGE("%s: Camera %d: Error waiting for preview to drain: " - "%s (%d)", __FUNCTION__, client->getCameraId(), strerror(-res), res); + "%s (%d)", __FUNCTION__, mId, strerror(-res), res); return res; } res = device->deleteStream(mPreviewStreamId); if (res != OK) { ALOGE("%s: Camera %d: Unable to delete old output stream " - "for preview: %s (%d)", __FUNCTION__, client->getCameraId(), + "for preview: %s (%d)", __FUNCTION__, mId, strerror(-res), res); return res; } @@ -151,7 +157,7 @@ status_t StreamingProcessor::updatePreviewStream(const Parameters ¶ms) { &mPreviewStreamId); if (res != OK) { ALOGE("%s: Camera %d: Unable to create preview stream: %s (%d)", - __FUNCTION__, client->getCameraId(), strerror(-res), res); + __FUNCTION__, mId, strerror(-res), res); return res; } } @@ -160,7 +166,7 @@ status_t StreamingProcessor::updatePreviewStream(const Parameters ¶ms) { params.previewTransform); if (res != OK) { ALOGE("%s: Camera %d: Unable to set preview stream transform: " - "%s (%d)", __FUNCTION__, client->getCameraId(), strerror(-res), res); + "%s (%d)", __FUNCTION__, mId, strerror(-res), res); return res; } @@ -174,12 +180,14 @@ status_t StreamingProcessor::deletePreviewStream() { Mutex::Autolock m(mMutex); if (mPreviewStreamId != NO_STREAM) { - sp client = mClient.promote(); - if (client == 0) return INVALID_OPERATION; - sp device = client->getCameraDevice(); + sp device = mDevice.promote(); + if (device == 0) { + ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId); + return INVALID_OPERATION; + } ALOGV("%s: for cameraId %d on streamId %d", - __FUNCTION__, client->getCameraId(), mPreviewStreamId); + __FUNCTION__, mId, mPreviewStreamId); res = device->waitUntilDrained(); if (res != OK) { @@ -206,11 +214,9 @@ int StreamingProcessor::getPreviewStreamId() const { status_t StreamingProcessor::setRecordingBufferCount(size_t count) { ATRACE_CALL(); // 32 is the current upper limit on the video buffer count for BufferQueue - sp client = mClient.promote(); - if (client == 0) return INVALID_OPERATION; if (count > 32) { ALOGE("%s: Camera %d: Error setting %d as video buffer count value", - __FUNCTION__, client->getCameraId(), count); + __FUNCTION__, mId, count); return BAD_VALUE; } @@ -233,15 +239,18 @@ status_t StreamingProcessor::updateRecordingRequest(const Parameters ¶ms) { status_t res; Mutex::Autolock m(mMutex); - sp client = mClient.promote(); - if (client == 0) return INVALID_OPERATION; + sp device = mDevice.promote(); + if (device == 0) { + ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId); + return INVALID_OPERATION; + } if (mRecordingRequest.entryCount() == 0) { - res = client->getCameraDevice()->createDefaultRequest(CAMERA2_TEMPLATE_VIDEO_RECORD, + res = device->createDefaultRequest(CAMERA2_TEMPLATE_VIDEO_RECORD, &mRecordingRequest); if (res != OK) { ALOGE("%s: Camera %d: Unable to create default recording request:" - " %s (%d)", __FUNCTION__, client->getCameraId(), strerror(-res), res); + " %s (%d)", __FUNCTION__, mId, strerror(-res), res); return res; } } @@ -249,7 +258,7 @@ status_t StreamingProcessor::updateRecordingRequest(const Parameters ¶ms) { res = params.updateRequest(&mRecordingRequest); if (res != OK) { ALOGE("%s: Camera %d: Unable to update common entries of recording " - "request: %s (%d)", __FUNCTION__, client->getCameraId(), + "request: %s (%d)", __FUNCTION__, mId, strerror(-res), res); return res; } @@ -258,7 +267,7 @@ status_t StreamingProcessor::updateRecordingRequest(const Parameters ¶ms) { &mRecordingRequestId, 1); if (res != OK) { ALOGE("%s: Camera %d: Unable to update request id for request: %s (%d)", - __FUNCTION__, client->getCameraId(), strerror(-res), res); + __FUNCTION__, mId, strerror(-res), res); return res; } @@ -270,9 +279,11 @@ status_t StreamingProcessor::updateRecordingStream(const Parameters ¶ms) { status_t res; Mutex::Autolock m(mMutex); - sp client = mClient.promote(); - if (client == 0) return INVALID_OPERATION; - sp device = client->getCameraDevice(); + sp device = mDevice.promote(); + if (device == 0) { + ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId); + return INVALID_OPERATION; + } if (mRecordingConsumer == 0) { // Create CPU buffer queue endpoint. We need one more buffer here so that we can @@ -296,7 +307,7 @@ status_t StreamingProcessor::updateRecordingStream(const Parameters ¶ms) { ¤tWidth, ¤tHeight, 0); if (res != OK) { ALOGE("%s: Camera %d: Error querying recording output stream info: " - "%s (%d)", __FUNCTION__, client->getCameraId(), + "%s (%d)", __FUNCTION__, mId, strerror(-res), res); return res; } @@ -307,7 +318,7 @@ status_t StreamingProcessor::updateRecordingStream(const Parameters ¶ms) { if (res != OK) { ALOGE("%s: Camera %d: Unable to delete old output stream " "for recording: %s (%d)", __FUNCTION__, - client->getCameraId(), strerror(-res), res); + mId, strerror(-res), res); return res; } mRecordingStreamId = NO_STREAM; @@ -321,7 +332,7 @@ status_t StreamingProcessor::updateRecordingStream(const Parameters ¶ms) { CAMERA2_HAL_PIXEL_FORMAT_OPAQUE, 0, &mRecordingStreamId); if (res != OK) { ALOGE("%s: Camera %d: Can't create output stream for recording: " - "%s (%d)", __FUNCTION__, client->getCameraId(), + "%s (%d)", __FUNCTION__, mId, strerror(-res), res); return res; } @@ -337,9 +348,11 @@ status_t StreamingProcessor::deleteRecordingStream() { Mutex::Autolock m(mMutex); if (mRecordingStreamId != NO_STREAM) { - sp client = mClient.promote(); - if (client == 0) return INVALID_OPERATION; - sp device = client->getCameraDevice(); + sp device = mDevice.promote(); + if (device == 0) { + ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId); + return INVALID_OPERATION; + } res = device->waitUntilDrained(); if (res != OK) { @@ -369,10 +382,13 @@ status_t StreamingProcessor::startStream(StreamType type, if (type == NONE) return INVALID_OPERATION; - sp client = mClient.promote(); - if (client == 0) return INVALID_OPERATION; + sp device = mDevice.promote(); + if (device == 0) { + ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId); + return INVALID_OPERATION; + } - ALOGV("%s: Camera %d: type = %d", __FUNCTION__, client->getCameraId(), type); + ALOGV("%s: Camera %d: type = %d", __FUNCTION__, mId, type); Mutex::Autolock m(mMutex); @@ -384,22 +400,22 @@ status_t StreamingProcessor::startStream(StreamType type, outputStreams); if (res != OK) { ALOGE("%s: Camera %d: Unable to set up preview request: %s (%d)", - __FUNCTION__, client->getCameraId(), strerror(-res), res); + __FUNCTION__, mId, strerror(-res), res); return res; } res = request.sort(); if (res != OK) { ALOGE("%s: Camera %d: Error sorting preview request: %s (%d)", - __FUNCTION__, client->getCameraId(), strerror(-res), res); + __FUNCTION__, mId, strerror(-res), res); return res; } - res = client->getCameraDevice()->setStreamingRequest(request); + res = device->setStreamingRequest(request); if (res != OK) { ALOGE("%s: Camera %d: Unable to set preview request to start preview: " "%s (%d)", - __FUNCTION__, client->getCameraId(), strerror(-res), res); + __FUNCTION__, mId, strerror(-res), res); return res; } mActiveRequest = type; @@ -413,16 +429,19 @@ status_t StreamingProcessor::stopStream() { Mutex::Autolock m(mMutex); - sp client = mClient.promote(); - if (client == 0) return INVALID_OPERATION; - sp device = client->getCameraDevice(); + sp device = mDevice.promote(); + if (device == 0) { + ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId); + return INVALID_OPERATION; + } res = device->clearStreamingRequest(); if (res != OK) { ALOGE("%s: Camera %d: Can't clear stream request: %s (%d)", - __FUNCTION__, client->getCameraId(), strerror(-res), res); + __FUNCTION__, mId, strerror(-res), res); return res; } + mActiveRequest = NONE; return OK; @@ -466,7 +485,18 @@ void StreamingProcessor::onFrameAvailable() { nsecs_t timestamp; sp client = mClient.promote(); - if (client == 0) return; + if (client == 0) { + // Discard frames during shutdown + BufferItemConsumer::BufferItem imgBuffer; + res = mRecordingConsumer->acquireBuffer(&imgBuffer); + if (res != OK) { + ALOGE("%s: Camera %d: Error receiving recording buffer: %s (%d)", + __FUNCTION__, mId, strerror(-res), res); + return; + } + mRecordingConsumer->releaseBuffer(imgBuffer); + return; + } { /* acquire SharedParameters before mMutex so we don't dead lock @@ -477,7 +507,7 @@ void StreamingProcessor::onFrameAvailable() { res = mRecordingConsumer->acquireBuffer(&imgBuffer); if (res != OK) { ALOGE("%s: Camera %d: Error receiving recording buffer: %s (%d)", - __FUNCTION__, client->getCameraId(), strerror(-res), res); + __FUNCTION__, mId, strerror(-res), res); return; } timestamp = imgBuffer.mTimestamp; @@ -490,7 +520,7 @@ void StreamingProcessor::onFrameAvailable() { l.mParameters.state != Parameters::VIDEO_SNAPSHOT) { ALOGV("%s: Camera %d: Discarding recording image buffers " "received after recording done", __FUNCTION__, - client->getCameraId()); + mId); mRecordingConsumer->releaseBuffer(imgBuffer); return; } @@ -498,14 +528,14 @@ void StreamingProcessor::onFrameAvailable() { if (mRecordingHeap == 0) { const size_t bufferSize = 4 + sizeof(buffer_handle_t); ALOGV("%s: Camera %d: Creating recording heap with %d buffers of " - "size %d bytes", __FUNCTION__, client->getCameraId(), + "size %d bytes", __FUNCTION__, mId, mRecordingHeapCount, bufferSize); mRecordingHeap = new Camera2Heap(bufferSize, mRecordingHeapCount, "Camera2Client::RecordingHeap"); if (mRecordingHeap->mHeap->getSize() == 0) { ALOGE("%s: Camera %d: Unable to allocate memory for recording", - __FUNCTION__, client->getCameraId()); + __FUNCTION__, mId); mRecordingConsumer->releaseBuffer(imgBuffer); return; } @@ -513,7 +543,7 @@ void StreamingProcessor::onFrameAvailable() { if (mRecordingBuffers[i].mBuf != BufferItemConsumer::INVALID_BUFFER_SLOT) { ALOGE("%s: Camera %d: Non-empty recording buffers list!", - __FUNCTION__, client->getCameraId()); + __FUNCTION__, mId); } } mRecordingBuffers.clear(); @@ -526,7 +556,7 @@ void StreamingProcessor::onFrameAvailable() { if ( mRecordingHeapFree == 0) { ALOGE("%s: Camera %d: No free recording buffers, dropping frame", - __FUNCTION__, client->getCameraId()); + __FUNCTION__, mId); mRecordingConsumer->releaseBuffer(imgBuffer); return; } @@ -536,7 +566,7 @@ void StreamingProcessor::onFrameAvailable() { mRecordingHeapFree--; ALOGV("%s: Camera %d: Timestamp %lld", - __FUNCTION__, client->getCameraId(), timestamp); + __FUNCTION__, mId, timestamp); ssize_t offset; size_t size; @@ -549,7 +579,7 @@ void StreamingProcessor::onFrameAvailable() { *((uint32_t*)data) = type; *((buffer_handle_t*)(data + 4)) = imgBuffer.mGraphicBuffer->handle; ALOGV("%s: Camera %d: Sending out buffer_handle_t %p", - __FUNCTION__, client->getCameraId(), + __FUNCTION__, mId, imgBuffer.mGraphicBuffer->handle); mRecordingBuffers.replaceAt(imgBuffer, heapIdx); recordingHeap = mRecordingHeap; @@ -568,9 +598,6 @@ void StreamingProcessor::releaseRecordingFrame(const sp& mem) { ATRACE_CALL(); status_t res; - sp client = mClient.promote(); - if (client == 0) return; - Mutex::Autolock m(mMutex); // Make sure this is for the current heap ssize_t offset; @@ -578,7 +605,7 @@ void StreamingProcessor::releaseRecordingFrame(const sp& mem) { sp heap = mem->getMemory(&offset, &size); if (heap->getHeapID() != mRecordingHeap->mHeap->getHeapID()) { ALOGW("%s: Camera %d: Mismatched heap ID, ignoring release " - "(got %x, expected %x)", __FUNCTION__, client->getCameraId(), + "(got %x, expected %x)", __FUNCTION__, mId, heap->getHeapID(), mRecordingHeap->mHeap->getHeapID()); return; } @@ -586,7 +613,7 @@ void StreamingProcessor::releaseRecordingFrame(const sp& mem) { uint32_t type = *(uint32_t*)data; if (type != kMetadataBufferTypeGrallocSource) { ALOGE("%s: Camera %d: Recording frame type invalid (got %x, expected %x)", - __FUNCTION__, client->getCameraId(), type, + __FUNCTION__, mId, type, kMetadataBufferTypeGrallocSource); return; } @@ -606,19 +633,19 @@ void StreamingProcessor::releaseRecordingFrame(const sp& mem) { } if (itemIndex == mRecordingBuffers.size()) { ALOGE("%s: Camera %d: Can't find buffer_handle_t %p in list of " - "outstanding buffers", __FUNCTION__, client->getCameraId(), + "outstanding buffers", __FUNCTION__, mId, imgHandle); return; } ALOGV("%s: Camera %d: Freeing buffer_handle_t %p", __FUNCTION__, - client->getCameraId(), imgHandle); + mId, imgHandle); res = mRecordingConsumer->releaseBuffer(mRecordingBuffers[itemIndex]); if (res != OK) { ALOGE("%s: Camera %d: Unable to free recording frame " "(buffer_handle_t: %p): %s (%d)", __FUNCTION__, - client->getCameraId(), imgHandle, strerror(-res), res); + mId, imgHandle, strerror(-res), res); return; } mRecordingBuffers.replaceAt(itemIndex); diff --git a/services/camera/libcameraservice/camera2/StreamingProcessor.h b/services/camera/libcameraservice/camera2/StreamingProcessor.h index e5732ad..643114e 100644 --- a/services/camera/libcameraservice/camera2/StreamingProcessor.h +++ b/services/camera/libcameraservice/camera2/StreamingProcessor.h @@ -27,6 +27,7 @@ namespace android { class Camera2Client; +class CameraDeviceBase; class IMemory; namespace camera2 { @@ -38,7 +39,7 @@ class Camera2Heap; */ class StreamingProcessor: public BufferItemConsumer::FrameAvailableListener { public: - StreamingProcessor(wp client); + StreamingProcessor(sp client); ~StreamingProcessor(); status_t setPreviewWindow(sp window); @@ -86,6 +87,8 @@ class StreamingProcessor: public BufferItemConsumer::FrameAvailableListener { }; wp mClient; + wp mDevice; + int mId; StreamType mActiveRequest; diff --git a/services/camera/libcameraservice/camera2/ZslProcessor.cpp b/services/camera/libcameraservice/camera2/ZslProcessor.cpp index 769d9bd..2c12fb0 100644 --- a/services/camera/libcameraservice/camera2/ZslProcessor.cpp +++ b/services/camera/libcameraservice/camera2/ZslProcessor.cpp @@ -38,12 +38,14 @@ namespace android { namespace camera2 { ZslProcessor::ZslProcessor( - wp client, + sp client, wp sequencer): Thread(false), mState(RUNNING), mClient(client), + mDevice(client->getCameraDevice()), mSequencer(sequencer), + mId(client->getCameraId()), mZslBufferAvailable(false), mZslStreamId(NO_STREAM), mZslReprocessStreamId(NO_STREAM), @@ -69,7 +71,8 @@ void ZslProcessor::onFrameAvailable() { } } -void ZslProcessor::onFrameAvailable(int32_t /*frameId*/, const CameraMetadata &frame) { +void ZslProcessor::onFrameAvailable(int32_t /*frameId*/, + const CameraMetadata &frame) { Mutex::Autolock l(mInputMutex); camera_metadata_ro_entry_t entry; entry = frame.find(ANDROID_SENSOR_TIMESTAMP); @@ -113,8 +116,15 @@ status_t ZslProcessor::updateStream(const Parameters ¶ms) { Mutex::Autolock l(mInputMutex); sp client = mClient.promote(); - if (client == 0) return OK; - sp device = client->getCameraDevice(); + if (client == 0) { + ALOGE("%s: Camera %d: Client does not exist", __FUNCTION__, mId); + return INVALID_OPERATION; + } + sp device = mDevice.promote(); + if (device == 0) { + ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId); + return INVALID_OPERATION; + } if (mZslConsumer == 0) { // Create CPU buffer queue endpoint @@ -136,7 +146,7 @@ status_t ZslProcessor::updateStream(const Parameters ¶ms) { if (res != OK) { ALOGE("%s: Camera %d: Error querying capture output stream info: " "%s (%d)", __FUNCTION__, - client->getCameraId(), strerror(-res), res); + mId, strerror(-res), res); return res; } if (currentWidth != (uint32_t)params.fastInfo.arrayWidth || @@ -145,16 +155,16 @@ status_t ZslProcessor::updateStream(const Parameters ¶ms) { if (res != OK) { ALOGE("%s: Camera %d: Unable to delete old reprocess stream " "for ZSL: %s (%d)", __FUNCTION__, - client->getCameraId(), strerror(-res), res); + mId, strerror(-res), res); return res; } ALOGV("%s: Camera %d: Deleting stream %d since the buffer dimensions changed", - __FUNCTION__, client->getCameraId(), mZslStreamId); + __FUNCTION__, mId, mZslStreamId); res = device->deleteStream(mZslStreamId); if (res != OK) { ALOGE("%s: Camera %d: Unable to delete old output stream " "for ZSL: %s (%d)", __FUNCTION__, - client->getCameraId(), strerror(-res), res); + mId, strerror(-res), res); return res; } mZslStreamId = NO_STREAM; @@ -173,7 +183,7 @@ status_t ZslProcessor::updateStream(const Parameters ¶ms) { &mZslStreamId); if (res != OK) { ALOGE("%s: Camera %d: Can't create output stream for ZSL: " - "%s (%d)", __FUNCTION__, client->getCameraId(), + "%s (%d)", __FUNCTION__, mId, strerror(-res), res); return res; } @@ -181,7 +191,7 @@ status_t ZslProcessor::updateStream(const Parameters ¶ms) { &mZslReprocessStreamId); if (res != OK) { ALOGE("%s: Camera %d: Can't create reprocess stream for ZSL: " - "%s (%d)", __FUNCTION__, client->getCameraId(), + "%s (%d)", __FUNCTION__, mId, strerror(-res), res); return res; } @@ -200,14 +210,18 @@ status_t ZslProcessor::deleteStream() { Mutex::Autolock l(mInputMutex); if (mZslStreamId != NO_STREAM) { - sp client = mClient.promote(); - if (client == 0) return OK; - sp device = client->getCameraDevice(); + sp device = mDevice.promote(); + if (device == 0) { + ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId); + return INVALID_OPERATION; + } + + clearZslQueueLocked(); res = device->deleteReprocessStream(mZslReprocessStreamId); if (res != OK) { ALOGE("%s: Camera %d: Cannot delete ZSL reprocessing stream %d: " - "%s (%d)", __FUNCTION__, client->getCameraId(), + "%s (%d)", __FUNCTION__, mId, mZslReprocessStreamId, strerror(-res), res); return res; } @@ -216,7 +230,7 @@ status_t ZslProcessor::deleteStream() { res = device->deleteStream(mZslStreamId); if (res != OK) { ALOGE("%s: Camera %d: Cannot delete ZSL output stream %d: " - "%s (%d)", __FUNCTION__, client->getCameraId(), + "%s (%d)", __FUNCTION__, mId, mZslStreamId, strerror(-res), res); return res; } @@ -246,7 +260,10 @@ status_t ZslProcessor::pushToReprocess(int32_t requestId) { status_t res; sp client = mClient.promote(); - if (client == 0) return INVALID_OPERATION; + if (client == 0) { + ALOGE("%s: Camera %d: Client does not exist", __FUNCTION__, mId); + return INVALID_OPERATION; + } IF_ALOGV() { dumpZslQueue(-1); @@ -309,7 +326,7 @@ status_t ZslProcessor::pushToReprocess(int32_t requestId) { if (res != OK) { ALOGE("%s: Camera %d: Unable to stop preview for ZSL capture: " "%s (%d)", - __FUNCTION__, client->getCameraId(), strerror(-res), res); + __FUNCTION__, mId, strerror(-res), res); return INVALID_OPERATION; } // TODO: have push-and-clear be atomic @@ -328,7 +345,7 @@ status_t ZslProcessor::pushToReprocess(int32_t requestId) { if (res != OK) { ALOGE("%s: Camera %d: Unable to update JPEG entries of ZSL " "capture request: %s (%d)", __FUNCTION__, - client->getCameraId(), + mId, strerror(-res), res); return res; } @@ -397,26 +414,29 @@ bool ZslProcessor::threadLoop() { } do { - sp client = mClient.promote(); - if (client == 0) return false; - res = processNewZslBuffer(client); + res = processNewZslBuffer(); } while (res == OK); return true; } -status_t ZslProcessor::processNewZslBuffer(sp &client) { +status_t ZslProcessor::processNewZslBuffer() { ATRACE_CALL(); status_t res; - + sp zslConsumer; + { + Mutex::Autolock l(mInputMutex); + if (mZslConsumer == 0) return OK; + zslConsumer = mZslConsumer; + } ALOGVV("Trying to get next buffer"); BufferItemConsumer::BufferItem item; - res = mZslConsumer->acquireBuffer(&item); + res = zslConsumer->acquireBuffer(&item); if (res != OK) { if (res != BufferItemConsumer::NO_BUFFER_AVAILABLE) { ALOGE("%s: Camera %d: Error receiving ZSL image buffer: " "%s (%d)", __FUNCTION__, - client->getCameraId(), strerror(-res), res); + mId, strerror(-res), res); } else { ALOGVV(" No buffer"); } @@ -427,7 +447,7 @@ status_t ZslProcessor::processNewZslBuffer(sp &client) { if (mState == LOCKED) { ALOGVV("In capture, discarding new ZSL buffers"); - mZslConsumer->releaseBuffer(item); + zslConsumer->releaseBuffer(item); return OK; } @@ -435,7 +455,7 @@ status_t ZslProcessor::processNewZslBuffer(sp &client) { if ( (mZslQueueHead + 1) % kZslBufferDepth == mZslQueueTail) { ALOGVV("Releasing oldest buffer"); - mZslConsumer->releaseBuffer(mZslQueue[mZslQueueTail].buffer); + zslConsumer->releaseBuffer(mZslQueue[mZslQueueTail].buffer); mZslQueue.replaceAt(mZslQueueTail); mZslQueueTail = (mZslQueueTail + 1) % kZslBufferDepth; } diff --git a/services/camera/libcameraservice/camera2/ZslProcessor.h b/services/camera/libcameraservice/camera2/ZslProcessor.h index b2cf5b1..ee3bcd6 100644 --- a/services/camera/libcameraservice/camera2/ZslProcessor.h +++ b/services/camera/libcameraservice/camera2/ZslProcessor.h @@ -46,7 +46,7 @@ class ZslProcessor: virtual public FrameProcessor::FilteredListener, virtual public CameraDeviceBase::BufferReleasedListener { public: - ZslProcessor(wp client, wp sequencer); + ZslProcessor(sp client, wp sequencer); ~ZslProcessor(); // From mZslConsumer @@ -74,7 +74,9 @@ class ZslProcessor: } mState; wp mClient; + wp mDevice; wp mSequencer; + int mId; mutable Mutex mInputMutex; bool mZslBufferAvailable; @@ -109,7 +111,7 @@ class ZslProcessor: virtual bool threadLoop(); - status_t processNewZslBuffer(sp &client); + status_t processNewZslBuffer(); // Match up entries from frame list to buffers in ZSL queue void findMatchesLocked(); -- cgit v1.1 From 0776a14513300f04dc5c1d2f89c4156576b8b8ed Mon Sep 17 00:00:00 2001 From: Igor Murashkin Date: Mon, 15 Apr 2013 14:59:22 -0700 Subject: Camera3: Add input stream support - Untested with actual CAMERA3_STREAM_INPUT streams. Bug: 8629088 Change-Id: Ia0c21ef0a2c951e401ea8babd15d3cceb4bb25a1 --- services/camera/libcameraservice/Camera3Device.cpp | 85 +++++- services/camera/libcameraservice/Camera3Device.h | 3 + .../camera3/Camera3InputStream.cpp | 329 +++++++++++++++++++-- .../libcameraservice/camera3/Camera3InputStream.h | 25 +- .../camera3/Camera3OutputStream.cpp | 2 +- .../libcameraservice/camera3/Camera3Stream.cpp | 31 ++ .../libcameraservice/camera3/Camera3Stream.h | 26 +- 7 files changed, 473 insertions(+), 28 deletions(-) diff --git a/services/camera/libcameraservice/Camera3Device.cpp b/services/camera/libcameraservice/Camera3Device.cpp index e53dbb5..3437130 100644 --- a/services/camera/libcameraservice/Camera3Device.cpp +++ b/services/camera/libcameraservice/Camera3Device.cpp @@ -30,6 +30,7 @@ #include #include "Camera3Device.h" #include "camera3/Camera3OutputStream.h" +#include "camera3/Camera3InputStream.h" using namespace android::camera3; @@ -369,6 +370,69 @@ status_t Camera3Device::waitUntilRequestReceived(int32_t requestId, nsecs_t time return mRequestThread->waitUntilRequestProcessed(requestId, timeout); } +status_t Camera3Device::createInputStream( + uint32_t width, uint32_t height, int format, int *id) { + ATRACE_CALL(); + Mutex::Autolock l(mLock); + + status_t res; + bool wasActive = false; + + switch (mStatus) { + case STATUS_ERROR: + ALOGE("%s: Device has encountered a serious error", __FUNCTION__); + return INVALID_OPERATION; + case STATUS_UNINITIALIZED: + ALOGE("%s: Device not initialized", __FUNCTION__); + return INVALID_OPERATION; + case STATUS_IDLE: + // OK + break; + case STATUS_ACTIVE: + ALOGV("%s: Stopping activity to reconfigure streams", __FUNCTION__); + mRequestThread->setPaused(true); + res = waitUntilDrainedLocked(); + if (res != OK) { + ALOGE("%s: Can't pause captures to reconfigure streams!", + __FUNCTION__); + mStatus = STATUS_ERROR; + return res; + } + wasActive = true; + break; + default: + ALOGE("%s: Unexpected status: %d", __FUNCTION__, mStatus); + return INVALID_OPERATION; + } + assert(mStatus == STATUS_IDLE); + + if (mInputStream != 0) { + ALOGE("%s: Cannot create more than 1 input stream", __FUNCTION__); + return INVALID_OPERATION; + } + + sp newStream = new Camera3InputStream(mNextStreamId, + width, height, format); + + mInputStream = newStream; + + *id = mNextStreamId++; + + // Continue captures if active at start + if (wasActive) { + ALOGV("%s: Restarting activity to reconfigure streams", __FUNCTION__); + res = configureStreamsLocked(); + if (res != OK) { + ALOGE("%s: Can't reconfigure device for new stream %d: %s (%d)", + __FUNCTION__, mNextStreamId, strerror(-res), res); + return res; + } + mRequestThread->setPaused(false); + } + + return OK; +} + status_t Camera3Device::createStream(sp consumer, uint32_t width, uint32_t height, int format, size_t size, int *id) { ATRACE_CALL(); @@ -1287,7 +1351,7 @@ bool Camera3Device::RequestThread::threadLoop() { if (nextRequest->mInputStream != NULL) { request.input_buffer = &inputBuffer; - res = nextRequest->mInputStream->getBuffer(&inputBuffer); + res = nextRequest->mInputStream->getInputBuffer(&inputBuffer); if (res != OK) { ALOGE("RequestThread: Can't get input buffer, skipping request:" " %s (%d)", strerror(-res), res); @@ -1358,6 +1422,23 @@ bool Camera3Device::RequestThread::threadLoop() { mLatestRequestSignal.signal(); } + // Return input buffer back to framework + if (request.input_buffer != NULL) { + Camera3Stream *stream = + Camera3Stream::cast(request.input_buffer->stream); + res = stream->returnInputBuffer(*(request.input_buffer)); + // Note: stream may be deallocated at this point, if this buffer was the + // last reference to it. + if (res != OK) { + ALOGE("%s: RequestThread: Can't return input buffer for frame %d to" + " its stream:%s (%d)", __FUNCTION__, + request.frame_number, strerror(-res), res); + // TODO: Report error upstream + } + } + + + return true; } @@ -1371,7 +1452,7 @@ void Camera3Device::RequestThread::cleanUpFailedRequest( } if (request.input_buffer != NULL) { request.input_buffer->status = CAMERA3_BUFFER_STATUS_ERROR; - nextRequest->mInputStream->returnBuffer(*(request.input_buffer), 0); + nextRequest->mInputStream->returnInputBuffer(*(request.input_buffer)); } for (size_t i = 0; i < request.num_output_buffers; i++) { outputBuffers.editItemAt(i).status = CAMERA3_BUFFER_STATUS_ERROR; diff --git a/services/camera/libcameraservice/Camera3Device.h b/services/camera/libcameraservice/Camera3Device.h index 7f294e6..5c5faeb 100644 --- a/services/camera/libcameraservice/Camera3Device.h +++ b/services/camera/libcameraservice/Camera3Device.h @@ -82,6 +82,9 @@ class Camera3Device : virtual status_t createStream(sp consumer, uint32_t width, uint32_t height, int format, size_t size, int *id); + virtual status_t createInputStream( + uint32_t width, uint32_t height, int format, + int *id); virtual status_t createReprocessStreamFromStream(int outputId, int *id); virtual status_t getStreamInfo(int id, diff --git a/services/camera/libcameraservice/camera3/Camera3InputStream.cpp b/services/camera/libcameraservice/camera3/Camera3InputStream.cpp index 8a48ee5..c7dd12a 100644 --- a/services/camera/libcameraservice/camera3/Camera3InputStream.cpp +++ b/services/camera/libcameraservice/camera3/Camera3InputStream.cpp @@ -18,6 +18,9 @@ #define ATRACE_TAG ATRACE_TAG_CAMERA //#define LOG_NDEBUG 0 +// This is needed for stdint.h to define INT64_MAX in C++ +#define __STDC_LIMIT_MACROS + #include #include #include "Camera3InputStream.h" @@ -28,38 +31,262 @@ namespace camera3 { Camera3InputStream::Camera3InputStream(int id, uint32_t width, uint32_t height, int format) : - Camera3Stream(id, CAMERA3_STREAM_INPUT, width, height, 0, format) { + Camera3Stream(id, CAMERA3_STREAM_INPUT, width, height, 0, format), + mTotalBufferCount(0), + mDequeuedBufferCount(0), + mFrameCount(0), + mLastTimestamp(0) { + mCombinedFence = new Fence(); + + if (format == HAL_PIXEL_FORMAT_BLOB) { + ALOGE("%s: Bad format, BLOB not supported", __FUNCTION__); + mState = STATE_ERROR; + } +} + +Camera3InputStream::~Camera3InputStream() { + disconnectLocked(); } -status_t Camera3InputStream::getBufferLocked(camera3_stream_buffer *buffer) { - (void) buffer; - ALOGE("%s: Not implemented", __FUNCTION__); - return INVALID_OPERATION; +status_t Camera3InputStream::getInputBufferLocked( + camera3_stream_buffer *buffer) { + ATRACE_CALL(); + status_t res; + + // FIXME: will not work in (re-)registration + if (mState == STATE_IN_CONFIG || mState == STATE_IN_RECONFIG) { + ALOGE("%s: Stream %d: Buffer registration for input streams" + " not implemented (state %d)", + __FUNCTION__, mId, mState); + return INVALID_OPERATION; + } + + // Allow acquire during IN_[RE]CONFIG for registration + if (mState != STATE_CONFIGURED && + mState != STATE_IN_CONFIG && mState != STATE_IN_RECONFIG) { + ALOGE("%s: Stream %d: Can't get buffers in unconfigured state %d", + __FUNCTION__, mId, mState); + return INVALID_OPERATION; + } + + // Only limit acquire amount when fully configured + if (mState == STATE_CONFIGURED && + mDequeuedBufferCount == camera3_stream::max_buffers) { + ALOGE("%s: Stream %d: Already acquired maximum number of simultaneous" + " buffers (%d)", __FUNCTION__, mId, + camera3_stream::max_buffers); + return INVALID_OPERATION; + } + + ANativeWindowBuffer* anb; + int fenceFd; + + assert(mConsumer != 0); + + BufferItem bufferItem; + res = mConsumer->acquireBuffer(&bufferItem, /*waitForFence*/false); + + if (res != OK) { + ALOGE("%s: Stream %d: Can't acquire next output buffer: %s (%d)", + __FUNCTION__, mId, strerror(-res), res); + return res; + } + + anb = bufferItem.mGraphicBuffer->getNativeBuffer(); + assert(anb != NULL); + fenceFd = bufferItem.mFence->dup(); + /** + * FenceFD now owned by HAL except in case of error, + * in which case we reassign it to acquire_fence + */ + + // Handing out a raw pointer to this object. Increment internal refcount. + incStrong(this); + buffer->stream = this; + buffer->buffer = &(anb->handle); + buffer->acquire_fence = fenceFd; + buffer->release_fence = -1; + buffer->status = CAMERA3_BUFFER_STATUS_OK; + + mDequeuedBufferCount++; + + mBuffersInFlight.push_back(bufferItem); + + return OK; } -status_t Camera3InputStream::returnBufferLocked( - const camera3_stream_buffer &buffer, - nsecs_t timestamp) { - (void) timestamp; - (void) buffer; - ALOGE("%s: Not implemented", __FUNCTION__); - return INVALID_OPERATION; +status_t Camera3InputStream::returnInputBufferLocked( + const camera3_stream_buffer &buffer) { + ATRACE_CALL(); + status_t res; + + // returnBuffer may be called from a raw pointer, not a sp<>, and we'll be + // decrementing the internal refcount next. In case this is the last ref, we + // might get destructed on the decStrong(), so keep an sp around until the + // end of the call - otherwise have to sprinkle the decStrong on all exit + // points. + sp keepAlive(this); + decStrong(this); + + // Allow buffers to be returned in the error state, to allow for disconnect + // and in the in-config states for registration + if (mState == STATE_CONSTRUCTED) { + ALOGE("%s: Stream %d: Can't return buffers in unconfigured state %d", + __FUNCTION__, mId, mState); + return INVALID_OPERATION; + } + if (mDequeuedBufferCount == 0) { + ALOGE("%s: Stream %d: No buffers outstanding to return", __FUNCTION__, + mId); + return INVALID_OPERATION; + } + + bool bufferFound = false; + BufferItem bufferItem; + { + // Find the buffer we are returning + Vector::iterator it, end; + for (it = mBuffersInFlight.begin(), end = mBuffersInFlight.end(); + it != end; + ++it) { + + const BufferItem& tmp = *it; + ANativeWindowBuffer *anb = tmp.mGraphicBuffer->getNativeBuffer(); + if (anb != NULL && &(anb->handle) == buffer.buffer) { + bufferFound = true; + bufferItem = tmp; + mBuffersInFlight.erase(it); + mDequeuedBufferCount--; + } + } + } + if (!bufferFound) { + ALOGE("%s: Stream %d: Can't return buffer that wasn't sent to HAL", + __FUNCTION__, mId); + return INVALID_OPERATION; + } + + if (buffer.status == CAMERA3_BUFFER_STATUS_ERROR) { + if (buffer.release_fence != -1) { + ALOGE("%s: Stream %d: HAL should not set release_fence(%d) when " + "there is an error", __FUNCTION__, mId, buffer.release_fence); + close(buffer.release_fence); + } + + /** + * Reassign release fence as the acquire fence incase of error + */ + const_cast(&buffer)->release_fence = + buffer.acquire_fence; + } + + /** + * Unconditionally return buffer to the buffer queue. + * - Fwk takes over the release_fence ownership + */ + sp releaseFence = new Fence(buffer.release_fence); + res = mConsumer->releaseBuffer(bufferItem, releaseFence); + if (res != OK) { + ALOGE("%s: Stream %d: Error releasing buffer back to buffer queue:" + " %s (%d)", __FUNCTION__, mId, strerror(-res), res); + return res; + } + + mCombinedFence = Fence::merge(mName, mCombinedFence, releaseFence); + + mBufferReturnedSignal.signal(); + + return OK; + } bool Camera3InputStream::hasOutstandingBuffersLocked() const { - ALOGE("%s: Not implemented", __FUNCTION__); + nsecs_t signalTime = mCombinedFence->getSignalTime(); + ALOGV("%s: Stream %d: Has %d outstanding buffers," + " buffer signal time is %lld", + __FUNCTION__, mId, mDequeuedBufferCount, signalTime); + if (mDequeuedBufferCount > 0 || signalTime == INT64_MAX) { + return true; + } return false; } status_t Camera3InputStream::waitUntilIdle(nsecs_t timeout) { - (void) timeout; - ALOGE("%s: Not implemented", __FUNCTION__); - return INVALID_OPERATION; + status_t res; + { + Mutex::Autolock l(mLock); + while (mDequeuedBufferCount > 0) { + if (timeout != TIMEOUT_NEVER) { + nsecs_t startTime = systemTime(); + res = mBufferReturnedSignal.waitRelative(mLock, timeout); + if (res == TIMED_OUT) { + return res; + } else if (res != OK) { + ALOGE("%s: Error waiting for outstanding buffers: %s (%d)", + __FUNCTION__, strerror(-res), res); + return res; + } + nsecs_t deltaTime = systemTime() - startTime; + if (timeout <= deltaTime) { + timeout = 0; + } else { + timeout -= deltaTime; + } + } else { + res = mBufferReturnedSignal.wait(mLock); + if (res != OK) { + ALOGE("%s: Error waiting for outstanding buffers: %s (%d)", + __FUNCTION__, strerror(-res), res); + return res; + } + } + } + } + + // No lock + + unsigned int timeoutMs; + if (timeout == TIMEOUT_NEVER) { + timeoutMs = Fence::TIMEOUT_NEVER; + } else if (timeout == 0) { + timeoutMs = 0; + } else { + // Round up to wait at least 1 ms + timeoutMs = (timeout + 999999) / 1000000; + } + + return mCombinedFence->wait(timeoutMs); +} + +size_t Camera3InputStream::getBufferCountLocked() { + return mTotalBufferCount; } status_t Camera3InputStream::disconnectLocked() { - ALOGE("%s: Not implemented", __FUNCTION__); - return INVALID_OPERATION; + switch (mState) { + case STATE_IN_RECONFIG: + case STATE_CONFIGURED: + // OK + break; + default: + // No connection, nothing to do + return OK; + } + + if (mDequeuedBufferCount > 0) { + ALOGE("%s: Can't disconnect with %d buffers still acquired!", + __FUNCTION__, mDequeuedBufferCount); + return INVALID_OPERATION; + } + + assert(mBuffersInFlight.size() == 0); + + /** + * no-op since we can't disconnect the producer from the consumer-side + */ + + mState = (mState == STATE_IN_RECONFIG) ? STATE_IN_CONFIG : STATE_CONSTRUCTED; + return OK; } sp Camera3InputStream::getProducerInterface() const { @@ -67,9 +294,71 @@ sp Camera3InputStream::getProducerInterface() const { } void Camera3InputStream::dump(int fd, const Vector &args) const { - (void) fd; (void) args; - ALOGE("%s: Not implemented", __FUNCTION__); + String8 lines; + lines.appendFormat(" Stream[%d]: Input\n", mId); + lines.appendFormat(" State: %d\n", mState); + lines.appendFormat(" Dims: %d x %d, format 0x%x\n", + camera3_stream::width, camera3_stream::height, + camera3_stream::format); + lines.appendFormat(" Max size: %d\n", mMaxSize); + lines.appendFormat(" Usage: %d, max HAL buffers: %d\n", + camera3_stream::usage, camera3_stream::max_buffers); + lines.appendFormat(" Frames produced: %d, last timestamp: %lld ns\n", + mFrameCount, mLastTimestamp); + lines.appendFormat(" Total buffers: %d, currently acquired: %d\n", + mTotalBufferCount, mDequeuedBufferCount); + write(fd, lines.string(), lines.size()); +} + +status_t Camera3InputStream::configureQueueLocked() { + status_t res; + + switch (mState) { + case STATE_IN_RECONFIG: + res = disconnectLocked(); + if (res != OK) { + return res; + } + break; + case STATE_IN_CONFIG: + // OK + break; + default: + ALOGE("%s: Bad state: %d", __FUNCTION__, mState); + return INVALID_OPERATION; + } + + assert(mMaxSize == 0); + assert(camera3_stream::format != HAL_PIXEL_FORMAT_BLOB); + + mTotalBufferCount = BufferQueue::MIN_UNDEQUEUED_BUFFERS + + camera3_stream::max_buffers; + mDequeuedBufferCount = 0; + mFrameCount = 0; + + if (mConsumer.get() == 0) { + mConsumer = new BufferItemConsumer(camera3_stream::usage, + mTotalBufferCount, + /*synchronousMode*/true); + mConsumer->setName(String8::format("Camera3-InputStream-%d", mId)); + } + + res = mConsumer->setDefaultBufferSize(camera3_stream::width, + camera3_stream::height); + if (res != OK) { + ALOGE("%s: Stream %d: Could not set buffer dimensions %dx%d", + __FUNCTION__, mId, camera3_stream::width, camera3_stream::height); + return res; + } + res = mConsumer->setDefaultBufferFormat(camera3_stream::format); + if (res != OK) { + ALOGE("%s: Stream %d: Could not set buffer format %d", + __FUNCTION__, mId, camera3_stream::format); + return res; + } + + return OK; } }; // namespace camera3 diff --git a/services/camera/libcameraservice/camera3/Camera3InputStream.h b/services/camera/libcameraservice/camera3/Camera3InputStream.h index c4b5dd9..fd9f464 100644 --- a/services/camera/libcameraservice/camera3/Camera3InputStream.h +++ b/services/camera/libcameraservice/camera3/Camera3InputStream.h @@ -29,6 +29,10 @@ namespace camera3 { /** * A class for managing a single stream of input data to the camera device. + * + * This class serves as a consumer adapter for the HAL, and will consume the + * buffers by feeding them into the HAL, as well as releasing the buffers back + * the buffers once the HAL is done with them. */ class Camera3InputStream : public Camera3Stream { public: @@ -36,6 +40,7 @@ class Camera3InputStream : public Camera3Stream { * Set up a stream for formats that have fixed size, such as RAW and YUV. */ Camera3InputStream(int id, uint32_t width, uint32_t height, int format); + ~Camera3InputStream(); virtual status_t waitUntilIdle(nsecs_t timeout); virtual void dump(int fd, const Vector &args) const; @@ -49,18 +54,32 @@ class Camera3InputStream : public Camera3Stream { private: + typedef BufferItemConsumer::BufferItem BufferItem; + sp mConsumer; + Vector mBuffersInFlight; + size_t mTotalBufferCount; + size_t mDequeuedBufferCount; + Condition mBufferReturnedSignal; + uint32_t mFrameCount; + nsecs_t mLastTimestamp; + + // The merged release fence for all returned buffers + sp mCombinedFence; /** * Camera3Stream interface */ - virtual status_t getBufferLocked(camera3_stream_buffer *buffer); - virtual status_t returnBufferLocked(const camera3_stream_buffer &buffer, - nsecs_t timestamp); + virtual status_t getInputBufferLocked(camera3_stream_buffer *buffer); + virtual status_t returnInputBufferLocked( + const camera3_stream_buffer &buffer); virtual bool hasOutstandingBuffersLocked() const; virtual status_t disconnectLocked(); + virtual status_t configureQueueLocked(); + virtual size_t getBufferCountLocked(); + }; // class Camera3InputStream }; // namespace camera3 diff --git a/services/camera/libcameraservice/camera3/Camera3OutputStream.cpp b/services/camera/libcameraservice/camera3/Camera3OutputStream.cpp index 276b940..ec8cf0d 100644 --- a/services/camera/libcameraservice/camera3/Camera3OutputStream.cpp +++ b/services/camera/libcameraservice/camera3/Camera3OutputStream.cpp @@ -298,7 +298,7 @@ status_t Camera3OutputStream::configureQueueLocked() { switch (mState) { case STATE_IN_RECONFIG: - res = disconnect(); + res = disconnectLocked(); if (res != OK) { return res; } diff --git a/services/camera/libcameraservice/camera3/Camera3Stream.cpp b/services/camera/libcameraservice/camera3/Camera3Stream.cpp index cf3072b..bc259b6 100644 --- a/services/camera/libcameraservice/camera3/Camera3Stream.cpp +++ b/services/camera/libcameraservice/camera3/Camera3Stream.cpp @@ -188,6 +188,18 @@ status_t Camera3Stream::returnBuffer(const camera3_stream_buffer &buffer, return returnBufferLocked(buffer, timestamp); } +status_t Camera3Stream::getInputBuffer(camera3_stream_buffer *buffer) { + ATRACE_CALL(); + Mutex::Autolock l(mLock); + return getInputBufferLocked(buffer); +} + +status_t Camera3Stream::returnInputBuffer(const camera3_stream_buffer &buffer) { + ATRACE_CALL(); + Mutex::Autolock l(mLock); + return returnInputBufferLocked(buffer); +} + bool Camera3Stream::hasOutstandingBuffers() const { ATRACE_CALL(); Mutex::Autolock l(mLock); @@ -259,6 +271,25 @@ status_t Camera3Stream::registerBuffersLocked(camera3_device *hal3Device) { return res; } +status_t Camera3Stream::getBufferLocked(camera3_stream_buffer *) { + ALOGE("%s: This type of stream does not support output", __FUNCTION__); + return INVALID_OPERATION; +} +status_t Camera3Stream::returnBufferLocked(const camera3_stream_buffer &, + nsecs_t) { + ALOGE("%s: This type of stream does not support output", __FUNCTION__); + return INVALID_OPERATION; +} +status_t Camera3Stream::getInputBufferLocked(camera3_stream_buffer *) { + ALOGE("%s: This type of stream does not support input", __FUNCTION__); + return INVALID_OPERATION; +} +status_t Camera3Stream::returnInputBufferLocked( + const camera3_stream_buffer &) { + ALOGE("%s: This type of stream does not support input", __FUNCTION__); + return INVALID_OPERATION; +} + }; // namespace camera3 }; // namespace android diff --git a/services/camera/libcameraservice/camera3/Camera3Stream.h b/services/camera/libcameraservice/camera3/Camera3Stream.h index 2364cfd..46a3872 100644 --- a/services/camera/libcameraservice/camera3/Camera3Stream.h +++ b/services/camera/libcameraservice/camera3/Camera3Stream.h @@ -157,6 +157,25 @@ class Camera3Stream : nsecs_t timestamp); /** + * Fill in the camera3_stream_buffer with the next valid buffer for this + * stream, to hand over to the HAL. + * + * This method may only be called once finishConfiguration has been called. + * For bidirectional streams, this method applies to the input-side + * buffers. + * + */ + status_t getInputBuffer(camera3_stream_buffer *buffer); + + /** + * Return a buffer to the stream after use by the HAL. + * + * This method may only be called for buffers provided by getBuffer(). + * For bidirectional streams, this method applies to the input-side buffers + */ + status_t returnInputBuffer(const camera3_stream_buffer &buffer); + + /** * Whether any of the stream's buffers are currently in use by the HAL, * including buffers that have been returned but not yet had their * release fence signaled. @@ -215,9 +234,12 @@ class Camera3Stream : // cast to camera3_stream*, implementations must increment the // refcount of the stream manually in getBufferLocked, and decrement it in // returnBufferLocked. - virtual status_t getBufferLocked(camera3_stream_buffer *buffer) = 0; + virtual status_t getBufferLocked(camera3_stream_buffer *buffer); virtual status_t returnBufferLocked(const camera3_stream_buffer &buffer, - nsecs_t timestamp) = 0; + nsecs_t timestamp); + virtual status_t getInputBufferLocked(camera3_stream_buffer *buffer); + virtual status_t returnInputBufferLocked( + const camera3_stream_buffer &buffer); virtual bool hasOutstandingBuffersLocked() const = 0; virtual status_t disconnectLocked() = 0; -- cgit v1.1 From 2ded8b53014602d25b20bade8ce46db95a8da4b5 Mon Sep 17 00:00:00 2001 From: Lajos Molnar Date: Fri, 26 Apr 2013 16:11:45 -0700 Subject: stagefright: Fix port-reconfiguration & output-buffer-filled race condition Remove the invalid assumption that when a port-reconfiguration event is received, buffers cannot be downstream (waiting to be rendered). Luckily, these buffers are properly handled (freed) after they are sent to be rendered. Also, the case where buffers have been sent onto the native window is already handled. Change-Id: I1df39c1ffc2bfb96f8b7b4ee5be07cae654f956f Signed-off-by: Lajos Molnar Bug: 8736466 --- media/libstagefright/ACodec.cpp | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp index 6d952c3..73bf6ba 100644 --- a/media/libstagefright/ACodec.cpp +++ b/media/libstagefright/ACodec.cpp @@ -748,12 +748,10 @@ status_t ACodec::freeOutputBuffersNotOwnedByComponent() { BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); - if (info->mStatus != - BufferInfo::OWNED_BY_COMPONENT) { - // We shouldn't have sent out any buffers to the client at this - // point. - CHECK_NE((int)info->mStatus, (int)BufferInfo::OWNED_BY_DOWNSTREAM); - + // At this time some buffers may still be with the component + // or being drained. + if (info->mStatus != BufferInfo::OWNED_BY_COMPONENT && + info->mStatus != BufferInfo::OWNED_BY_DOWNSTREAM) { CHECK_EQ((status_t)OK, freeBuffer(kPortIndexOutput, i)); } } -- cgit v1.1 From 40602741ae87e6bf368c17dd28db4d2db344bded Mon Sep 17 00:00:00 2001 From: Igor Murashkin Date: Mon, 29 Apr 2013 10:31:06 -0700 Subject: camera: Add new RingBufferConsumer to keep a ring buffer of acquired frames Bug: 8563838 Change-Id: I5a95e0be94e5388b30639905efae42d3c3279f72 --- services/camera/libcameraservice/Android.mk | 3 +- .../libcameraservice/gui/RingBufferConsumer.cpp | 346 +++++++++++++++++++++ .../libcameraservice/gui/RingBufferConsumer.h | 189 +++++++++++ 3 files changed, 537 insertions(+), 1 deletion(-) create mode 100644 services/camera/libcameraservice/gui/RingBufferConsumer.cpp create mode 100644 services/camera/libcameraservice/gui/RingBufferConsumer.h diff --git a/services/camera/libcameraservice/Android.mk b/services/camera/libcameraservice/Android.mk index 6847bf8..e1c36d5 100644 --- a/services/camera/libcameraservice/Android.mk +++ b/services/camera/libcameraservice/Android.mk @@ -28,7 +28,8 @@ LOCAL_SRC_FILES:= \ camera3/Camera3Stream.cpp \ camera3/Camera3InputStream.cpp \ camera3/Camera3OutputStream.cpp \ - camera3/Camera3ZslStream.cpp + camera3/Camera3ZslStream.cpp \ + gui/RingBufferConsumer.cpp \ LOCAL_SHARED_LIBRARIES:= \ libui \ diff --git a/services/camera/libcameraservice/gui/RingBufferConsumer.cpp b/services/camera/libcameraservice/gui/RingBufferConsumer.cpp new file mode 100644 index 0000000..1b2a717 --- /dev/null +++ b/services/camera/libcameraservice/gui/RingBufferConsumer.cpp @@ -0,0 +1,346 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#undef NDEBUG +#include + +//#define LOG_NDEBUG 0 +#define LOG_TAG "RingBufferConsumer" +#define ATRACE_TAG ATRACE_TAG_GRAPHICS +#include + +#include + +#define BI_LOGV(x, ...) ALOGV("[%s] "x, mName.string(), ##__VA_ARGS__) +#define BI_LOGD(x, ...) ALOGD("[%s] "x, mName.string(), ##__VA_ARGS__) +#define BI_LOGI(x, ...) ALOGI("[%s] "x, mName.string(), ##__VA_ARGS__) +#define BI_LOGW(x, ...) ALOGW("[%s] "x, mName.string(), ##__VA_ARGS__) +#define BI_LOGE(x, ...) ALOGE("[%s] "x, mName.string(), ##__VA_ARGS__) + +typedef android::RingBufferConsumer::PinnedBufferItem PinnedBufferItem; + +namespace android { + +RingBufferConsumer::RingBufferConsumer(uint32_t consumerUsage, + int bufferCount) : + ConsumerBase(new BufferQueue(true)), + mBufferCount(bufferCount) +{ + mBufferQueue->setConsumerUsageBits(consumerUsage); + mBufferQueue->setSynchronousMode(true); + mBufferQueue->setMaxAcquiredBufferCount(bufferCount); + + assert(bufferCount > 0); +} + +RingBufferConsumer::~RingBufferConsumer() { +} + +void RingBufferConsumer::setName(const String8& name) { + Mutex::Autolock _l(mMutex); + mName = name; + mBufferQueue->setConsumerName(name); +} + +sp RingBufferConsumer::pinSelectedBuffer( + const RingBufferComparator& filter, + bool waitForFence) { + + sp pinnedBuffer; + + { + List::iterator it, end, accIt; + BufferInfo acc, cur; + BufferInfo* accPtr = NULL; + + Mutex::Autolock _l(mMutex); + + for (it = mBufferItemList.begin(), end = mBufferItemList.end(); + it != end; + ++it) { + + const RingBufferItem& item = *it; + + cur.mCrop = item.mCrop; + cur.mTransform = item.mTransform; + cur.mScalingMode = item.mScalingMode; + cur.mTimestamp = item.mTimestamp; + cur.mFrameNumber = item.mFrameNumber; + cur.mPinned = item.mPinCount > 0; + + int ret = filter.compare(accPtr, &cur); + + if (ret == 0) { + accPtr = NULL; + } else if (ret > 0) { + acc = cur; + accPtr = &acc; + accIt = it; + } // else acc = acc + } + + if (!accPtr) { + return NULL; + } + + pinnedBuffer = new PinnedBufferItem(this, *accIt); + pinBufferLocked(pinnedBuffer->getBufferItem()); + + } // end scope of mMutex autolock + + if (pinnedBuffer != 0) { + BI_LOGV("Pinned buffer frame %lld, timestamp %lld", + pinnedBuffer->getBufferItem().mFrameNumber, + pinnedBuffer->getBufferItem().mTimestamp); + } + + if (waitForFence) { + status_t err = pinnedBuffer->getBufferItem().mFence->waitForever(1000, + "RingBufferConsumer::pinSelectedBuffer"); + if (err != OK) { + BI_LOGE("Failed to wait for fence of acquired buffer: %s (%d)", + strerror(-err), err); + } + } + + return pinnedBuffer; +} + +status_t RingBufferConsumer::clear() { + + status_t err; + Mutex::Autolock _l(mMutex); + + BI_LOGV("%s", __FUNCTION__); + + // Avoid annoying log warnings by returning early + if (mBufferItemList.size() == 0) { + return OK; + } + + do { + size_t pinnedFrames = 0; + err = releaseOldestBufferLocked(&pinnedFrames); + + if (err == NO_BUFFER_AVAILABLE) { + assert(pinnedFrames == mBufferItemList.size()); + break; + } + + if (err == NOT_ENOUGH_DATA) { + // Fine. Empty buffer item list. + break; + } + + if (err != OK) { + BI_LOGE("Clear failed, could not release buffer"); + return err; + } + + } while(true); + + return OK; +} + +void RingBufferConsumer::pinBufferLocked(const BufferItem& item) { + List::iterator it, end; + + for (it = mBufferItemList.begin(), end = mBufferItemList.end(); + it != end; + ++it) { + + RingBufferItem& find = *it; + if (item.mGraphicBuffer == find.mGraphicBuffer) { + find.mPinCount++; + break; + } + } + + if (it == end) { + BI_LOGE("Failed to pin buffer (timestamp %lld, framenumber %lld)", + item.mTimestamp, item.mFrameNumber); + } +} + +status_t RingBufferConsumer::releaseOldestBufferLocked(size_t* pinnedFrames) { + status_t err = OK; + + List::iterator it, end, accIt; + + it = mBufferItemList.begin(); + end = mBufferItemList.end(); + accIt = it; + + if (it == end) { + /** + * This is fine. We really care about being able to acquire a buffer + * successfully after this function completes, not about it releasing + * some buffer. + */ + BI_LOGV("%s: No buffers yet acquired, can't release anything", + __FUNCTION__); + return NOT_ENOUGH_DATA; + } + + for (; it != end; ++it) { + RingBufferItem& find = *it; + if (find.mTimestamp < accIt->mTimestamp && find.mPinCount <= 0) { + accIt = it; + } + + if (find.mPinCount > 0 && pinnedFrames != NULL) { + ++(*pinnedFrames); + } + } + + if (accIt != end) { + RingBufferItem& item = *accIt; + + // In case the object was never pinned, pass the acquire fence + // back to the release fence. If the fence was already waited on, + // it'll just be a no-op to wait on it again. + err = addReleaseFenceLocked(item.mBuf, item.mFence); + + if (err != OK) { + BI_LOGE("Failed to add release fence to buffer " + "(timestamp %lld, framenumber %lld", + item.mTimestamp, item.mFrameNumber); + return err; + } + + BI_LOGV("Attempting to release buffer timestamp %lld, frame %lld", + item.mTimestamp, item.mFrameNumber); + + err = releaseBufferLocked(item.mBuf, + EGL_NO_DISPLAY, + EGL_NO_SYNC_KHR); + if (err != OK) { + BI_LOGE("Failed to release buffer: %s (%d)", + strerror(-err), err); + return err; + } + + BI_LOGV("Buffer timestamp %lld, frame %lld evicted", + item.mTimestamp, item.mFrameNumber); + + size_t currentSize = mBufferItemList.size(); + mBufferItemList.erase(accIt); + assert(mBufferItemList.size() == currentSize - 1); + } else { + BI_LOGW("All buffers pinned, could not find any to release"); + return NO_BUFFER_AVAILABLE; + + } + + return OK; +} + +void RingBufferConsumer::onFrameAvailable() { + status_t err; + + { + Mutex::Autolock _l(mMutex); + + /** + * Release oldest frame + */ + if (mBufferItemList.size() >= (size_t)mBufferCount) { + err = releaseOldestBufferLocked(/*pinnedFrames*/NULL); + assert(err != NOT_ENOUGH_DATA); + + // TODO: implement the case for NO_BUFFER_AVAILABLE + assert(err != NO_BUFFER_AVAILABLE); + if (err != OK) { + return; + } + // TODO: in unpinBuffer rerun this routine if we had buffers + // we could've locked but didn't because there was no space + } + + RingBufferItem& item = *mBufferItemList.insert(mBufferItemList.end(), + RingBufferItem()); + + /** + * Acquire new frame + */ + err = acquireBufferLocked(&item); + if (err != OK) { + if (err != NO_BUFFER_AVAILABLE) { + BI_LOGE("Error acquiring buffer: %s (%d)", strerror(err), err); + } + + mBufferItemList.erase(--mBufferItemList.end()); + return; + } + + BI_LOGV("New buffer acquired (timestamp %lld), " + "buffer items %u out of %d", + item.mTimestamp, + mBufferItemList.size(), mBufferCount); + + item.mGraphicBuffer = mSlots[item.mBuf].mGraphicBuffer; + } // end of mMutex lock + + ConsumerBase::onFrameAvailable(); +} + +void RingBufferConsumer::unpinBuffer(const BufferItem& item) { + Mutex::Autolock _l(mMutex); + + List::iterator it, end, accIt; + + for (it = mBufferItemList.begin(), end = mBufferItemList.end(); + it != end; + ++it) { + + RingBufferItem& find = *it; + if (item.mGraphicBuffer == find.mGraphicBuffer) { + status_t res = addReleaseFenceLocked(item.mBuf, item.mFence); + + if (res != OK) { + BI_LOGE("Failed to add release fence to buffer " + "(timestamp %lld, framenumber %lld", + item.mTimestamp, item.mFrameNumber); + return; + } + + find.mPinCount--; + break; + } + } + + if (it == end) { + BI_LOGE("Failed to unpin buffer (timestamp %lld, framenumber %lld", + item.mTimestamp, item.mFrameNumber); + } +} + +status_t RingBufferConsumer::setDefaultBufferSize(uint32_t w, uint32_t h) { + Mutex::Autolock _l(mMutex); + return mBufferQueue->setDefaultBufferSize(w, h); +} + +status_t RingBufferConsumer::setDefaultBufferFormat(uint32_t defaultFormat) { + Mutex::Autolock _l(mMutex); + return mBufferQueue->setDefaultBufferFormat(defaultFormat); +} + +status_t RingBufferConsumer::setConsumerUsage(uint32_t usage) { + Mutex::Autolock _l(mMutex); + return mBufferQueue->setConsumerUsageBits(usage); +} + +} // namespace android diff --git a/services/camera/libcameraservice/gui/RingBufferConsumer.h b/services/camera/libcameraservice/gui/RingBufferConsumer.h new file mode 100644 index 0000000..454fbae --- /dev/null +++ b/services/camera/libcameraservice/gui/RingBufferConsumer.h @@ -0,0 +1,189 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_GUI_RINGBUFFERCONSUMER_H +#define ANDROID_GUI_RINGBUFFERCONSUMER_H + +#include + +#include + +#include +#include +#include +#include + +#define ANDROID_GRAPHICS_RINGBUFFERCONSUMER_JNI_ID "mRingBufferConsumer" + +namespace android { + +/** + * The RingBufferConsumer maintains a ring buffer of BufferItem objects, + * (which are 'acquired' as long as they are part of the ring buffer, and + * 'released' when they leave the ring buffer). + * + * When new buffers are produced, the oldest non-pinned buffer item is immediately + * dropped from the ring buffer, and overridden with the newest buffer. + * + * Users can only access a buffer item after pinning it (which also guarantees + * that during its duration it will not be released back into the BufferQueue). + * + * Note that the 'oldest' buffer is the one with the smallest timestamp. + * + * Edge cases: + * - If ringbuffer is not full, no drops occur when a buffer is produced. + * - If all the buffers get filled or pinned then there will be no empty + * buffers left, so the producer will block on dequeue. + */ +class RingBufferConsumer : public ConsumerBase, + public ConsumerBase::FrameAvailableListener +{ + public: + typedef ConsumerBase::FrameAvailableListener FrameAvailableListener; + + typedef BufferQueue::BufferItem BufferItem; + + enum { INVALID_BUFFER_SLOT = BufferQueue::INVALID_BUFFER_SLOT }; + enum { NO_BUFFER_AVAILABLE = BufferQueue::NO_BUFFER_AVAILABLE }; + + // Create a new ring buffer consumer. The consumerUsage parameter determines + // the consumer usage flags passed to the graphics allocator. The + // bufferCount parameter specifies how many buffers can be pinned for user + // access at the same time. + RingBufferConsumer(uint32_t consumerUsage, + int bufferCount = BufferQueue::MIN_UNDEQUEUED_BUFFERS); + + virtual ~RingBufferConsumer(); + + // set the name of the RingBufferConsumer that will be used to identify it in + // log messages. + void setName(const String8& name); + + sp getProducerInterface() const { return getBufferQueue(); } + + // setDefaultBufferSize is used to set the size of buffers returned by + // requestBuffers when a with and height of zero is requested. + status_t setDefaultBufferSize(uint32_t w, uint32_t h); + + // setDefaultBufferFormat allows the BufferQueue to create + // GraphicBuffers of a defaultFormat if no format is specified + // by the producer endpoint. + status_t setDefaultBufferFormat(uint32_t defaultFormat); + + // setConsumerUsage allows the BufferQueue consumer usage to be + // set at a later time after construction. + status_t setConsumerUsage(uint32_t usage); + + // Buffer info, minus the graphics buffer/slot itself. + struct BufferInfo { + // mCrop is the current crop rectangle for this buffer slot. + Rect mCrop; + + // mTransform is the current transform flags for this buffer slot. + uint32_t mTransform; + + // mScalingMode is the current scaling mode for this buffer slot. + uint32_t mScalingMode; + + // mTimestamp is the current timestamp for this buffer slot. This gets + // to set by queueBuffer each time this slot is queued. + int64_t mTimestamp; + + // mFrameNumber is the number of the queued frame for this slot. + uint64_t mFrameNumber; + + // mPinned is whether or not the buffer has been pinned already. + bool mPinned; + }; + + struct RingBufferComparator { + // Return < 0 to select i1, > 0 to select i2, 0 for neither + // i1 or i2 can be NULL. + // + // The comparator has to implement a total ordering. Otherwise + // a linear scan won't find the most preferred buffer. + virtual int compare(const BufferInfo* i1, + const BufferInfo* i2) const = 0; + + virtual ~RingBufferComparator() {} + }; + + struct PinnedBufferItem : public LightRefBase { + PinnedBufferItem(wp consumer, + const BufferItem& item) : + mConsumer(consumer), + mBufferItem(item) { + } + + ~PinnedBufferItem() { + sp consumer = mConsumer.promote(); + if (consumer != NULL) { + consumer->unpinBuffer(mBufferItem); + } + } + + bool isEmpty() { + return mBufferItem.mBuf == BufferQueue::INVALID_BUFFER_SLOT; + } + + BufferItem& getBufferItem() { return mBufferItem; } + const BufferItem& getBufferItem() const { return mBufferItem; } + + private: + wp mConsumer; + BufferItem mBufferItem; + }; + + // Find a buffer using the filter, then pin it before returning it. + // + // The filter will be invoked on each buffer item in the ring buffer, + // passing the item that was selected from each previous iteration, + // as well as the current iteration's item. + // + // Pinning will ensure that the buffer will not be dropped when a new + // frame is available. + sp pinSelectedBuffer(const RingBufferComparator& filter, + bool waitForFence = true); + + // Release all the non-pinned buffers in the ring buffer + status_t clear(); + + private: + + // Override ConsumerBase::onFrameAvailable + virtual void onFrameAvailable(); + + void pinBufferLocked(const BufferItem& item); + void unpinBuffer(const BufferItem& item); + + // Releases oldest buffer. Returns NO_BUFFER_AVAILABLE + // if all the buffers were pinned. + // Returns NOT_ENOUGH_DATA if list was empty. + status_t releaseOldestBufferLocked(size_t* pinnedFrames); + + struct RingBufferItem : public BufferItem { + RingBufferItem() : BufferItem(), mPinCount(0) {} + int mPinCount; + }; + + // List of acquired buffers in our ring buffer + List mBufferItemList; + const int mBufferCount; +}; + +} // namespace android + +#endif // ANDROID_GUI_CPUCONSUMER_H -- cgit v1.1 From ae500e53efa1d26eb7c13e62d0ecc8d75db2473a Mon Sep 17 00:00:00 2001 From: Igor Murashkin Date: Mon, 22 Apr 2013 14:03:54 -0700 Subject: Camera3: Add ZSL stream support Bug: 8563838 Change-Id: I2feda142ff5172aba17ade5c8d502f9bb5d5b766 --- services/camera/libcameraservice/Android.mk | 1 + services/camera/libcameraservice/Camera2Client.cpp | 35 +- services/camera/libcameraservice/Camera2Client.h | 6 +- services/camera/libcameraservice/Camera3Device.cpp | 89 ++- services/camera/libcameraservice/Camera3Device.h | 15 +- .../libcameraservice/camera2/CaptureSequencer.cpp | 9 +- .../libcameraservice/camera2/CaptureSequencer.h | 6 +- .../libcameraservice/camera2/ZslProcessor.cpp | 5 - .../camera/libcameraservice/camera2/ZslProcessor.h | 11 +- .../libcameraservice/camera2/ZslProcessor3.cpp | 442 ++++++++++++++ .../libcameraservice/camera2/ZslProcessor3.h | 137 +++++ .../camera2/ZslProcessorInterface.h | 59 ++ .../libcameraservice/camera3/Camera3OutputStream.h | 5 +- .../camera3/Camera3OutputStreamInterface.h | 43 ++ .../libcameraservice/camera3/Camera3Stream.cpp | 87 ++- .../libcameraservice/camera3/Camera3Stream.h | 16 +- .../camera3/Camera3StreamInterface.h | 162 ++++++ .../libcameraservice/camera3/Camera3ZslStream.cpp | 634 ++++++++++++++++++++- .../libcameraservice/camera3/Camera3ZslStream.h | 55 +- 19 files changed, 1751 insertions(+), 66 deletions(-) create mode 100644 services/camera/libcameraservice/camera2/ZslProcessor3.cpp create mode 100644 services/camera/libcameraservice/camera2/ZslProcessor3.h create mode 100644 services/camera/libcameraservice/camera2/ZslProcessorInterface.h create mode 100644 services/camera/libcameraservice/camera3/Camera3OutputStreamInterface.h create mode 100644 services/camera/libcameraservice/camera3/Camera3StreamInterface.h diff --git a/services/camera/libcameraservice/Android.mk b/services/camera/libcameraservice/Android.mk index e1c36d5..8ae414f 100644 --- a/services/camera/libcameraservice/Android.mk +++ b/services/camera/libcameraservice/Android.mk @@ -25,6 +25,7 @@ LOCAL_SRC_FILES:= \ camera2/JpegCompressor.cpp \ camera2/CaptureSequencer.cpp \ camera2/ProFrameProcessor.cpp \ + camera2/ZslProcessor3.cpp \ camera3/Camera3Stream.cpp \ camera3/Camera3InputStream.cpp \ camera3/Camera3OutputStream.cpp \ diff --git a/services/camera/libcameraservice/Camera2Client.cpp b/services/camera/libcameraservice/Camera2Client.cpp index eae7461..48f3606 100644 --- a/services/camera/libcameraservice/Camera2Client.cpp +++ b/services/camera/libcameraservice/Camera2Client.cpp @@ -28,6 +28,9 @@ #include "Camera2Device.h" #include "Camera3Device.h" +#include "camera2/ZslProcessor.h" +#include "camera2/ZslProcessor3.h" + #define ALOG1(...) ALOGD_IF(gLogLevel >= 1, __VA_ARGS__); #define ALOG2(...) ALOGD_IF(gLogLevel >= 2, __VA_ARGS__); @@ -51,12 +54,13 @@ Camera2Client::Camera2Client(const sp& cameraService, int deviceVersion): Camera2ClientBase(cameraService, cameraClient, clientPackageName, cameraId, cameraFacing, clientPid, clientUid, servicePid), - mParameters(cameraId, cameraFacing) + mParameters(cameraId, cameraFacing), + mDeviceVersion(deviceVersion) { ATRACE_CALL(); ALOGI("Camera %d: Opened", cameraId); - switch (deviceVersion) { + switch (mDeviceVersion) { case CAMERA_DEVICE_API_VERSION_2_0: mDevice = new Camera2Device(cameraId); break; @@ -65,7 +69,7 @@ Camera2Client::Camera2Client(const sp& cameraService, break; default: ALOGE("Camera %d: Unknown HAL device version %d", - cameraId, deviceVersion); + cameraId, mDeviceVersion); mDevice = NULL; break; } @@ -114,10 +118,27 @@ status_t Camera2Client::initialize(camera_module_t *module) mCameraId); mJpegProcessor->run(threadName.string()); - mZslProcessor = new ZslProcessor(this, mCaptureSequencer); + switch (mDeviceVersion) { + case CAMERA_DEVICE_API_VERSION_2_0: { + sp zslProc = + new ZslProcessor(this, mCaptureSequencer); + mZslProcessor = zslProc; + mZslProcessorThread = zslProc; + break; + } + case CAMERA_DEVICE_API_VERSION_3_0:{ + sp zslProc = + new ZslProcessor3(this, mCaptureSequencer); + mZslProcessor = zslProc; + mZslProcessorThread = zslProc; + break; + } + default: + break; + } threadName = String8::format("C2-%d-ZslProc", mCameraId); - mZslProcessor->run(threadName.string()); + mZslProcessorThread->run(threadName.string()); mCallbackProcessor = new CallbackProcessor(this); threadName = String8::format("C2-%d-CallbkProc", @@ -393,7 +414,7 @@ void Camera2Client::disconnect() { mFrameProcessor->requestExit(); mCaptureSequencer->requestExit(); mJpegProcessor->requestExit(); - mZslProcessor->requestExit(); + mZslProcessorThread->requestExit(); mCallbackProcessor->requestExit(); ALOGV("Camera %d: Waiting for threads", mCameraId); @@ -401,7 +422,7 @@ void Camera2Client::disconnect() { mFrameProcessor->join(); mCaptureSequencer->join(); mJpegProcessor->join(); - mZslProcessor->join(); + mZslProcessorThread->join(); mCallbackProcessor->join(); ALOGV("Camera %d: Disconnecting device", mCameraId); diff --git a/services/camera/libcameraservice/Camera2Client.h b/services/camera/libcameraservice/Camera2Client.h index 713fab3..af72ab2 100644 --- a/services/camera/libcameraservice/Camera2Client.h +++ b/services/camera/libcameraservice/Camera2Client.h @@ -23,7 +23,7 @@ #include "camera2/FrameProcessor.h" #include "camera2/StreamingProcessor.h" #include "camera2/JpegProcessor.h" -#include "camera2/ZslProcessor.h" +#include "camera2/ZslProcessorInterface.h" #include "camera2/CaptureSequencer.h" #include "camera2/CallbackProcessor.h" #include "Camera2ClientBase.h" @@ -154,6 +154,7 @@ private: void setPreviewCallbackFlagL(Parameters ¶ms, int flag); status_t updateRequests(Parameters ¶ms); + int mDeviceVersion; // Used with stream IDs static const int NO_STREAM = -1; @@ -173,7 +174,8 @@ private: sp mCaptureSequencer; sp mJpegProcessor; - sp mZslProcessor; + sp mZslProcessor; + sp mZslProcessorThread; /** Notification-related members */ diff --git a/services/camera/libcameraservice/Camera3Device.cpp b/services/camera/libcameraservice/Camera3Device.cpp index 3437130..748f3a7 100644 --- a/services/camera/libcameraservice/Camera3Device.cpp +++ b/services/camera/libcameraservice/Camera3Device.cpp @@ -433,6 +433,81 @@ status_t Camera3Device::createInputStream( return OK; } + +status_t Camera3Device::createZslStream( + uint32_t width, uint32_t height, + int depth, + /*out*/ + int *id, + sp* zslStream) { + ATRACE_CALL(); + Mutex::Autolock l(mLock); + + status_t res; + bool wasActive = false; + + switch (mStatus) { + case STATUS_ERROR: + ALOGE("%s: Device has encountered a serious error", __FUNCTION__); + return INVALID_OPERATION; + case STATUS_UNINITIALIZED: + ALOGE("%s: Device not initialized", __FUNCTION__); + return INVALID_OPERATION; + case STATUS_IDLE: + // OK + break; + case STATUS_ACTIVE: + ALOGV("%s: Stopping activity to reconfigure streams", __FUNCTION__); + mRequestThread->setPaused(true); + res = waitUntilDrainedLocked(); + if (res != OK) { + ALOGE("%s: Can't pause captures to reconfigure streams!", + __FUNCTION__); + mStatus = STATUS_ERROR; + return res; + } + wasActive = true; + break; + default: + ALOGE("%s: Unexpected status: %d", __FUNCTION__, mStatus); + return INVALID_OPERATION; + } + assert(mStatus == STATUS_IDLE); + + if (mInputStream != 0) { + ALOGE("%s: Cannot create more than 1 input stream", __FUNCTION__); + return INVALID_OPERATION; + } + + sp newStream = new Camera3ZslStream(mNextStreamId, + width, height, depth); + + res = mOutputStreams.add(mNextStreamId, newStream); + if (res < 0) { + ALOGE("%s: Can't add new stream to set: %s (%d)", + __FUNCTION__, strerror(-res), res); + return res; + } + mInputStream = newStream; + + *id = mNextStreamId++; + *zslStream = newStream; + + // Continue captures if active at start + if (wasActive) { + ALOGV("%s: Restarting activity to reconfigure streams", __FUNCTION__); + res = configureStreamsLocked(); + if (res != OK) { + ALOGE("%s: Can't reconfigure device for new stream %d: %s (%d)", + __FUNCTION__, mNextStreamId, strerror(-res), res); + return res; + } + mRequestThread->setPaused(false); + } + + return OK; +} + status_t Camera3Device::createStream(sp consumer, uint32_t width, uint32_t height, int format, size_t size, int *id) { ATRACE_CALL(); @@ -588,7 +663,7 @@ status_t Camera3Device::deleteStream(int id) { return INVALID_OPERATION; } - sp deletedStream; + sp deletedStream; if (mInputStream != NULL && id == mInputStream->getId()) { deletedStream = mInputStream; mInputStream.clear(); @@ -881,7 +956,8 @@ sp Camera3Device::createCaptureRequest( __FUNCTION__, streams.data.u8[i]); return NULL; } - sp stream = mOutputStreams.editValueAt(idx); + sp stream = + mOutputStreams.editValueAt(idx); // Lazy completion of stream configuration (allocation/registration) // on first use @@ -932,6 +1008,15 @@ status_t Camera3Device::configureStreamsLocked() { } for (size_t i = 0; i < mOutputStreams.size(); i++) { + + // Don't configure bidi streams twice, nor add them twice to the list + if (mOutputStreams[i].get() == + static_cast(mInputStream.get())) { + + config.num_streams--; + continue; + } + camera3_stream_t *outputStream; outputStream = mOutputStreams.editValueAt(i)->startConfiguration(); if (outputStream == NULL) { diff --git a/services/camera/libcameraservice/Camera3Device.h b/services/camera/libcameraservice/Camera3Device.h index 5c5faeb..9b26f06 100644 --- a/services/camera/libcameraservice/Camera3Device.h +++ b/services/camera/libcameraservice/Camera3Device.h @@ -26,6 +26,7 @@ #include "CameraDeviceBase.h" #include "camera3/Camera3Stream.h" #include "camera3/Camera3OutputStream.h" +#include "camera3/Camera3ZslStream.h" #include "hardware/camera3.h" @@ -85,6 +86,12 @@ class Camera3Device : virtual status_t createInputStream( uint32_t width, uint32_t height, int format, int *id); + virtual status_t createZslStream( + uint32_t width, uint32_t height, + int depth, + /*out*/ + int *id, + sp* zslStream); virtual status_t createReprocessStreamFromStream(int outputId, int *id); virtual status_t getStreamInfo(int id, @@ -132,14 +139,15 @@ class Camera3Device : } mStatus; // Mapping of stream IDs to stream instances - typedef KeyedVector > StreamSet; + typedef KeyedVector > + StreamSet; StreamSet mOutputStreams; sp mInputStream; int mNextStreamId; // Need to hold on to stream references until configure completes. - Vector > mDeletedStreams; + Vector > mDeletedStreams; /**** End scope for mLock ****/ @@ -147,7 +155,8 @@ class Camera3Device : public: CameraMetadata mSettings; sp mInputStream; - Vector > mOutputStreams; + Vector > + mOutputStreams; }; typedef List > RequestList; diff --git a/services/camera/libcameraservice/camera2/CaptureSequencer.cpp b/services/camera/libcameraservice/camera2/CaptureSequencer.cpp index ee03329..266e516 100644 --- a/services/camera/libcameraservice/camera2/CaptureSequencer.cpp +++ b/services/camera/libcameraservice/camera2/CaptureSequencer.cpp @@ -27,6 +27,7 @@ #include "../Camera2Device.h" #include "../Camera2Client.h" #include "Parameters.h" +#include "ZslProcessorInterface.h" namespace android { namespace camera2 { @@ -54,7 +55,7 @@ CaptureSequencer::~CaptureSequencer() { ALOGV("%s: Exit", __FUNCTION__); } -void CaptureSequencer::setZslProcessor(wp processor) { +void CaptureSequencer::setZslProcessor(wp processor) { Mutex::Autolock l(mInputMutex); mZslProcessor = processor; } @@ -265,8 +266,10 @@ CaptureSequencer::CaptureState CaptureSequencer::manageDone(sp &c res = INVALID_OPERATION; } } - sp processor = mZslProcessor.promote(); + sp processor = mZslProcessor.promote(); if (processor != 0) { + ALOGV("%s: Memory optimization, clearing ZSL queue", + __FUNCTION__); processor->clearZslQueue(); } @@ -324,7 +327,7 @@ CaptureSequencer::CaptureState CaptureSequencer::manageZslStart( sp &client) { ALOGV("%s", __FUNCTION__); status_t res; - sp processor = mZslProcessor.promote(); + sp processor = mZslProcessor.promote(); if (processor == 0) { ALOGE("%s: No ZSL queue to use!", __FUNCTION__); return DONE; diff --git a/services/camera/libcameraservice/camera2/CaptureSequencer.h b/services/camera/libcameraservice/camera2/CaptureSequencer.h index 7db8007..76750aa 100644 --- a/services/camera/libcameraservice/camera2/CaptureSequencer.h +++ b/services/camera/libcameraservice/camera2/CaptureSequencer.h @@ -33,7 +33,7 @@ class Camera2Client; namespace camera2 { -class ZslProcessor; +class ZslProcessorInterface; class BurstCapture; /** @@ -48,7 +48,7 @@ class CaptureSequencer: ~CaptureSequencer(); // Get reference to the ZslProcessor, which holds the ZSL buffers and frames - void setZslProcessor(wp processor); + void setZslProcessor(wp processor); // Begin still image capture status_t startCapture(int msgType); @@ -105,7 +105,7 @@ class CaptureSequencer: static const int kMaxTimeoutsForCaptureEnd = 40; // 4 sec wp mClient; - wp mZslProcessor; + wp mZslProcessor; sp mBurstCapture; enum CaptureState { diff --git a/services/camera/libcameraservice/camera2/ZslProcessor.cpp b/services/camera/libcameraservice/camera2/ZslProcessor.cpp index 2c12fb0..94059cd 100644 --- a/services/camera/libcameraservice/camera2/ZslProcessor.cpp +++ b/services/camera/libcameraservice/camera2/ZslProcessor.cpp @@ -248,11 +248,6 @@ int ZslProcessor::getStreamId() const { return mZslStreamId; } -int ZslProcessor::getReprocessStreamId() const { - Mutex::Autolock l(mInputMutex); - return mZslReprocessStreamId; -} - status_t ZslProcessor::pushToReprocess(int32_t requestId) { ALOGV("%s: Send in reprocess request with id %d", __FUNCTION__, requestId); diff --git a/services/camera/libcameraservice/camera2/ZslProcessor.h b/services/camera/libcameraservice/camera2/ZslProcessor.h index ee3bcd6..27b597e 100644 --- a/services/camera/libcameraservice/camera2/ZslProcessor.h +++ b/services/camera/libcameraservice/camera2/ZslProcessor.h @@ -28,6 +28,7 @@ #include "camera/CameraMetadata.h" #include "Camera2Heap.h" #include "../CameraDeviceBase.h" +#include "ZslProcessorInterface.h" namespace android { @@ -44,7 +45,8 @@ class ZslProcessor: virtual public Thread, virtual public BufferItemConsumer::FrameAvailableListener, virtual public FrameProcessor::FilteredListener, - virtual public CameraDeviceBase::BufferReleasedListener { + virtual public CameraDeviceBase::BufferReleasedListener, + public ZslProcessorInterface { public: ZslProcessor(sp client, wp sequencer); ~ZslProcessor(); @@ -56,10 +58,15 @@ class ZslProcessor: virtual void onBufferReleased(buffer_handle_t *handle); + /** + **************************************** + * ZslProcessorInterface implementation * + **************************************** + */ + status_t updateStream(const Parameters ¶ms); status_t deleteStream(); int getStreamId() const; - int getReprocessStreamId() const; status_t pushToReprocess(int32_t requestId); status_t clearZslQueue(); diff --git a/services/camera/libcameraservice/camera2/ZslProcessor3.cpp b/services/camera/libcameraservice/camera2/ZslProcessor3.cpp new file mode 100644 index 0000000..88bcefb --- /dev/null +++ b/services/camera/libcameraservice/camera2/ZslProcessor3.cpp @@ -0,0 +1,442 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "Camera2-ZslProcessor3" +#define ATRACE_TAG ATRACE_TAG_CAMERA +//#define LOG_NDEBUG 0 +//#define LOG_NNDEBUG 0 + +#ifdef LOG_NNDEBUG +#define ALOGVV(...) ALOGV(__VA_ARGS__) +#else +#define ALOGVV(...) ((void)0) +#endif + +#include +#include + +#include "ZslProcessor3.h" +#include +#include "../CameraDeviceBase.h" +#include "../Camera3Device.h" +#include "../Camera2Client.h" + + +namespace android { +namespace camera2 { + +ZslProcessor3::ZslProcessor3( + sp client, + wp sequencer): + Thread(false), + mState(RUNNING), + mClient(client), + mSequencer(sequencer), + mId(client->getCameraId()), + mZslStreamId(NO_STREAM), + mFrameListHead(0), + mZslQueueHead(0), + mZslQueueTail(0) { + mZslQueue.insertAt(0, kZslBufferDepth); + mFrameList.insertAt(0, kFrameListDepth); + sp captureSequencer = mSequencer.promote(); + if (captureSequencer != 0) captureSequencer->setZslProcessor(this); +} + +ZslProcessor3::~ZslProcessor3() { + ALOGV("%s: Exit", __FUNCTION__); + deleteStream(); +} + +void ZslProcessor3::onFrameAvailable(int32_t /*frameId*/, + const CameraMetadata &frame) { + Mutex::Autolock l(mInputMutex); + camera_metadata_ro_entry_t entry; + entry = frame.find(ANDROID_SENSOR_TIMESTAMP); + nsecs_t timestamp = entry.data.i64[0]; + (void)timestamp; + ALOGVV("Got preview metadata for timestamp %lld", timestamp); + + if (mState != RUNNING) return; + + mFrameList.editItemAt(mFrameListHead) = frame; + mFrameListHead = (mFrameListHead + 1) % kFrameListDepth; +} + +status_t ZslProcessor3::updateStream(const Parameters ¶ms) { + ATRACE_CALL(); + ALOGV("%s: Configuring ZSL streams", __FUNCTION__); + status_t res; + + Mutex::Autolock l(mInputMutex); + + sp client = mClient.promote(); + if (client == 0) { + ALOGE("%s: Camera %d: Client does not exist", __FUNCTION__, mId); + return INVALID_OPERATION; + } + sp device = + static_cast(client->getCameraDevice().get()); + if (device == 0) { + ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId); + return INVALID_OPERATION; + } + + if (mZslStreamId != NO_STREAM) { + // Check if stream parameters have to change + uint32_t currentWidth, currentHeight; + res = device->getStreamInfo(mZslStreamId, + ¤tWidth, ¤tHeight, 0); + if (res != OK) { + ALOGE("%s: Camera %d: Error querying capture output stream info: " + "%s (%d)", __FUNCTION__, + client->getCameraId(), strerror(-res), res); + return res; + } + if (currentWidth != (uint32_t)params.fastInfo.arrayWidth || + currentHeight != (uint32_t)params.fastInfo.arrayHeight) { + ALOGV("%s: Camera %d: Deleting stream %d since the buffer " + "dimensions changed", + __FUNCTION__, client->getCameraId(), mZslStreamId); + res = device->deleteStream(mZslStreamId); + if (res != OK) { + ALOGE("%s: Camera %d: Unable to delete old output stream " + "for ZSL: %s (%d)", __FUNCTION__, + client->getCameraId(), strerror(-res), res); + return res; + } + mZslStreamId = NO_STREAM; + } + } + + if (mZslStreamId == NO_STREAM) { + // Create stream for HAL production + // TODO: Sort out better way to select resolution for ZSL + + // Note that format specified internally in Camera3ZslStream + res = device->createZslStream( + params.fastInfo.arrayWidth, params.fastInfo.arrayHeight, + kZslBufferDepth, + &mZslStreamId, + &mZslStream); + if (res != OK) { + ALOGE("%s: Camera %d: Can't create ZSL stream: " + "%s (%d)", __FUNCTION__, client->getCameraId(), + strerror(-res), res); + return res; + } + } + client->registerFrameListener(Camera2Client::kPreviewRequestIdStart, + Camera2Client::kPreviewRequestIdEnd, + this); + + return OK; +} + +status_t ZslProcessor3::deleteStream() { + ATRACE_CALL(); + status_t res; + + Mutex::Autolock l(mInputMutex); + + if (mZslStreamId != NO_STREAM) { + sp client = mClient.promote(); + if (client == 0) { + ALOGE("%s: Camera %d: Client does not exist", __FUNCTION__, mId); + return INVALID_OPERATION; + } + + sp device = + reinterpret_cast(client->getCameraDevice().get()); + if (device == 0) { + ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId); + return INVALID_OPERATION; + } + + res = device->deleteStream(mZslStreamId); + if (res != OK) { + ALOGE("%s: Camera %d: Cannot delete ZSL output stream %d: " + "%s (%d)", __FUNCTION__, client->getCameraId(), + mZslStreamId, strerror(-res), res); + return res; + } + + mZslStreamId = NO_STREAM; + } + return OK; +} + +int ZslProcessor3::getStreamId() const { + Mutex::Autolock l(mInputMutex); + return mZslStreamId; +} + +status_t ZslProcessor3::pushToReprocess(int32_t requestId) { + ALOGV("%s: Send in reprocess request with id %d", + __FUNCTION__, requestId); + Mutex::Autolock l(mInputMutex); + status_t res; + sp client = mClient.promote(); + + if (client == 0) { + ALOGE("%s: Camera %d: Client does not exist", __FUNCTION__, mId); + return INVALID_OPERATION; + } + + IF_ALOGV() { + dumpZslQueue(-1); + } + + size_t metadataIdx; + nsecs_t candidateTimestamp = getCandidateTimestampLocked(&metadataIdx); + + if (candidateTimestamp == -1) { + ALOGE("%s: Could not find good candidate for ZSL reprocessing", + __FUNCTION__); + return NOT_ENOUGH_DATA; + } + + res = mZslStream->enqueueInputBufferByTimestamp(candidateTimestamp, + /*actualTimestamp*/NULL); + + if (res == mZslStream->NO_BUFFER_AVAILABLE) { + ALOGV("%s: No ZSL buffers yet", __FUNCTION__); + return NOT_ENOUGH_DATA; + } else if (res != OK) { + ALOGE("%s: Unable to push buffer for reprocessing: %s (%d)", + __FUNCTION__, strerror(-res), res); + return res; + } + + { + CameraMetadata request = mFrameList[metadataIdx]; + + // Verify that the frame is reasonable for reprocessing + + camera_metadata_entry_t entry; + entry = request.find(ANDROID_CONTROL_AE_STATE); + if (entry.count == 0) { + ALOGE("%s: ZSL queue frame has no AE state field!", + __FUNCTION__); + return BAD_VALUE; + } + if (entry.data.u8[0] != ANDROID_CONTROL_AE_STATE_CONVERGED && + entry.data.u8[0] != ANDROID_CONTROL_AE_STATE_LOCKED) { + ALOGV("%s: ZSL queue frame AE state is %d, need full capture", + __FUNCTION__, entry.data.u8[0]); + return NOT_ENOUGH_DATA; + } + + uint8_t requestType = ANDROID_REQUEST_TYPE_REPROCESS; + res = request.update(ANDROID_REQUEST_TYPE, + &requestType, 1); + uint8_t inputStreams[1] = + { static_cast(mZslStreamId) }; + if (res == OK) request.update(ANDROID_REQUEST_INPUT_STREAMS, + inputStreams, 1); + // TODO: Shouldn't we also update the latest preview frame? + uint8_t outputStreams[1] = + { static_cast(client->getCaptureStreamId()) }; + if (res == OK) request.update(ANDROID_REQUEST_OUTPUT_STREAMS, + outputStreams, 1); + res = request.update(ANDROID_REQUEST_ID, + &requestId, 1); + + if (res != OK ) { + ALOGE("%s: Unable to update frame to a reprocess request", + __FUNCTION__); + return INVALID_OPERATION; + } + + res = client->stopStream(); + if (res != OK) { + ALOGE("%s: Camera %d: Unable to stop preview for ZSL capture: " + "%s (%d)", + __FUNCTION__, client->getCameraId(), strerror(-res), res); + return INVALID_OPERATION; + } + + // Update JPEG settings + { + SharedParameters::Lock l(client->getParameters()); + res = l.mParameters.updateRequestJpeg(&request); + if (res != OK) { + ALOGE("%s: Camera %d: Unable to update JPEG entries of ZSL " + "capture request: %s (%d)", __FUNCTION__, + client->getCameraId(), + strerror(-res), res); + return res; + } + } + + mLatestCapturedRequest = request; + res = client->getCameraDevice()->capture(request); + if (res != OK ) { + ALOGE("%s: Unable to send ZSL reprocess request to capture: %s" + " (%d)", __FUNCTION__, strerror(-res), res); + return res; + } + + mState = LOCKED; + } + + return OK; +} + +status_t ZslProcessor3::clearZslQueue() { + Mutex::Autolock l(mInputMutex); + // If in middle of capture, can't clear out queue + if (mState == LOCKED) return OK; + + return clearZslQueueLocked(); +} + +status_t ZslProcessor3::clearZslQueueLocked() { + return mZslStream->clearInputRingBuffer(); +} + +void ZslProcessor3::dump(int fd, const Vector& /*args*/) const { + Mutex::Autolock l(mInputMutex); + if (!mLatestCapturedRequest.isEmpty()) { + String8 result(" Latest ZSL capture request:\n"); + write(fd, result.string(), result.size()); + mLatestCapturedRequest.dump(fd, 2, 6); + } else { + String8 result(" Latest ZSL capture request: none yet\n"); + write(fd, result.string(), result.size()); + } + dumpZslQueue(fd); +} + +bool ZslProcessor3::threadLoop() { + // TODO: remove dependency on thread + return true; +} + +void ZslProcessor3::dumpZslQueue(int fd) const { + String8 header("ZSL queue contents:"); + String8 indent(" "); + ALOGV("%s", header.string()); + if (fd != -1) { + header = indent + header + "\n"; + write(fd, header.string(), header.size()); + } + for (size_t i = 0; i < mZslQueue.size(); i++) { + const ZslPair &queueEntry = mZslQueue[i]; + nsecs_t bufferTimestamp = queueEntry.buffer.mTimestamp; + camera_metadata_ro_entry_t entry; + nsecs_t frameTimestamp = 0; + int frameAeState = -1; + if (!queueEntry.frame.isEmpty()) { + entry = queueEntry.frame.find(ANDROID_SENSOR_TIMESTAMP); + if (entry.count > 0) frameTimestamp = entry.data.i64[0]; + entry = queueEntry.frame.find(ANDROID_CONTROL_AE_STATE); + if (entry.count > 0) frameAeState = entry.data.u8[0]; + } + String8 result = + String8::format(" %d: b: %lld\tf: %lld, AE state: %d", i, + bufferTimestamp, frameTimestamp, frameAeState); + ALOGV("%s", result.string()); + if (fd != -1) { + result = indent + result + "\n"; + write(fd, result.string(), result.size()); + } + + } +} + +nsecs_t ZslProcessor3::getCandidateTimestampLocked(size_t* metadataIdx) const { + /** + * Find the smallest timestamp we know about so far + * - ensure that aeState is either converged or locked + */ + + size_t idx = 0; + nsecs_t minTimestamp = -1; + for (size_t j = 0; j < mFrameList.size(); j++) { + const CameraMetadata &frame = mFrameList[j]; + if (!frame.isEmpty()) { + camera_metadata_ro_entry_t entry; + entry = frame.find(ANDROID_SENSOR_TIMESTAMP); + if (entry.count == 0) { + ALOGE("%s: Can't find timestamp in frame!", + __FUNCTION__); + continue; + } + nsecs_t frameTimestamp = entry.data.i64[0]; + if (minTimestamp > frameTimestamp) { + + entry = frame.find(ANDROID_CONTROL_AE_STATE); + if (entry.count == 0) { + ALOGW("%s: ZSL queue frame has no AE state field!", + __FUNCTION__); + continue; + } + if (entry.data.u8[0] != ANDROID_CONTROL_AE_STATE_CONVERGED && + entry.data.u8[0] != ANDROID_CONTROL_AE_STATE_LOCKED) { + ALOGVV("%s: ZSL queue frame AE state is %d, need " + "full capture", __FUNCTION__, entry.data.u8[0]); + continue; + } + + minTimestamp = frameTimestamp; + idx = j; + } + } + } + + if (metadataIdx) { + *metadataIdx = idx; + } + + return minTimestamp; +} + +void ZslProcessor3::onBufferAcquired(const BufferInfo& /*bufferInfo*/) { + // Intentionally left empty + // Although theoretically we could use this to get better dump info +} + +void ZslProcessor3::onBufferReleased(const BufferInfo& bufferInfo) { + Mutex::Autolock l(mInputMutex); + + // ignore output buffers + if (bufferInfo.mOutput) { + return; + } + + // TODO: Verify that the buffer is in our queue by looking at timestamp + // theoretically unnecessary unless we change the following assumptions: + // -- only 1 buffer reprocessed at a time (which is the case now) + + // Erase entire ZSL queue since we've now completed the capture and preview + // is stopped. + // + // We need to guarantee that if we do two back-to-back captures, + // the second won't use a buffer that's older/the same as the first, which + // is theoretically possible if we don't clear out the queue and the + // selection criteria is something like 'newest'. Clearing out the queue + // on a completed capture ensures we'll only use new data. + ALOGV("%s: Memory optimization, clearing ZSL queue", + __FUNCTION__); + clearZslQueueLocked(); + + // Required so we accept more ZSL requests + mState = RUNNING; +} + +}; // namespace camera2 +}; // namespace android diff --git a/services/camera/libcameraservice/camera2/ZslProcessor3.h b/services/camera/libcameraservice/camera2/ZslProcessor3.h new file mode 100644 index 0000000..cb98b99 --- /dev/null +++ b/services/camera/libcameraservice/camera2/ZslProcessor3.h @@ -0,0 +1,137 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_SERVERS_CAMERA_CAMERA2_ZSLPROCESSOR3_H +#define ANDROID_SERVERS_CAMERA_CAMERA2_ZSLPROCESSOR3_H + +#include +#include +#include +#include +#include +#include +#include "Parameters.h" +#include "FrameProcessor.h" +#include "camera/CameraMetadata.h" +#include "Camera2Heap.h" +#include "../CameraDeviceBase.h" +#include "ZslProcessorInterface.h" +#include "../camera3/Camera3ZslStream.h" + +namespace android { + +class Camera2Client; + +namespace camera2 { + +class CaptureSequencer; + +/*** + * ZSL queue processing + */ +class ZslProcessor3 : + public ZslProcessorInterface, + public camera3::Camera3StreamBufferListener, + virtual public Thread, + virtual public FrameProcessor::FilteredListener { + public: + ZslProcessor3(sp client, wp sequencer); + ~ZslProcessor3(); + + // From FrameProcessor + virtual void onFrameAvailable(int32_t frameId, const CameraMetadata &frame); + + /** + **************************************** + * ZslProcessorInterface implementation * + **************************************** + */ + + virtual status_t updateStream(const Parameters ¶ms); + virtual status_t deleteStream(); + virtual int getStreamId() const; + + virtual status_t pushToReprocess(int32_t requestId); + virtual status_t clearZslQueue(); + + void dump(int fd, const Vector& args) const; + + protected: + /** + ********************************************** + * Camera3StreamBufferListener implementation * + ********************************************** + */ + typedef camera3::Camera3StreamBufferListener::BufferInfo BufferInfo; + // Buffer was acquired by the HAL + virtual void onBufferAcquired(const BufferInfo& bufferInfo); + // Buffer was released by the HAL + virtual void onBufferReleased(const BufferInfo& bufferInfo); + + private: + static const nsecs_t kWaitDuration = 10000000; // 10 ms + + enum { + RUNNING, + LOCKED + } mState; + + wp mClient; + wp mSequencer; + + const int mId; + + mutable Mutex mInputMutex; + + enum { + NO_STREAM = -1 + }; + + int mZslStreamId; + sp mZslStream; + + struct ZslPair { + BufferItemConsumer::BufferItem buffer; + CameraMetadata frame; + }; + + static const size_t kZslBufferDepth = 4; + static const size_t kFrameListDepth = kZslBufferDepth * 2; + Vector mFrameList; + size_t mFrameListHead; + + ZslPair mNextPair; + + Vector mZslQueue; + size_t mZslQueueHead; + size_t mZslQueueTail; + + CameraMetadata mLatestCapturedRequest; + + virtual bool threadLoop(); + + status_t clearZslQueueLocked(); + + void dumpZslQueue(int id) const; + + nsecs_t getCandidateTimestampLocked(size_t* metadataIdx) const; +}; + + +}; //namespace camera2 +}; //namespace android + +#endif diff --git a/services/camera/libcameraservice/camera2/ZslProcessorInterface.h b/services/camera/libcameraservice/camera2/ZslProcessorInterface.h new file mode 100644 index 0000000..183c0c2 --- /dev/null +++ b/services/camera/libcameraservice/camera2/ZslProcessorInterface.h @@ -0,0 +1,59 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_SERVERS_CAMERA_CAMERA2_ZSLPROCESSORINTERFACE_H +#define ANDROID_SERVERS_CAMERA_CAMERA2_ZSLPROCESSORINTERFACE_H + +#include +#include + +namespace android { +namespace camera2 { + +class Parameters; + +class ZslProcessorInterface : virtual public RefBase { +public: + + // Get ID for use with android.request.outputStreams / inputStreams + virtual int getStreamId() const = 0; + + // Update the streams by recreating them if the size/format has changed + virtual status_t updateStream(const Parameters& params) = 0; + + // Delete the underlying CameraDevice streams + virtual status_t deleteStream() = 0; + + /** + * Submits a ZSL capture request (id = requestId) + * + * An appropriate ZSL buffer is selected by the closest timestamp, + * then we push that buffer to be reprocessed by the HAL. + * A capture request is created and submitted on behalf of the client. + */ + virtual status_t pushToReprocess(int32_t requestId) = 0; + + // Flush the ZSL buffer queue, freeing up all the buffers + virtual status_t clearZslQueue() = 0; + + // (Debugging only) Dump the current state to the specified file descriptor + virtual void dump(int fd, const Vector& args) const = 0; +}; + +}; //namespace camera2 +}; //namespace android + +#endif diff --git a/services/camera/libcameraservice/camera3/Camera3OutputStream.h b/services/camera/libcameraservice/camera3/Camera3OutputStream.h index d331a94..2464dce 100644 --- a/services/camera/libcameraservice/camera3/Camera3OutputStream.h +++ b/services/camera/libcameraservice/camera3/Camera3OutputStream.h @@ -21,6 +21,7 @@ #include #include "Camera3Stream.h" +#include "Camera3OutputStreamInterface.h" namespace android { @@ -29,7 +30,9 @@ namespace camera3 { /** * A class for managing a single stream of output data from the camera device. */ -class Camera3OutputStream : public Camera3Stream { +class Camera3OutputStream : + public Camera3Stream, + public Camera3OutputStreamInterface { public: /** * Set up a stream for formats that have 2 dimensions, such as RAW and YUV. diff --git a/services/camera/libcameraservice/camera3/Camera3OutputStreamInterface.h b/services/camera/libcameraservice/camera3/Camera3OutputStreamInterface.h new file mode 100644 index 0000000..aae72cf --- /dev/null +++ b/services/camera/libcameraservice/camera3/Camera3OutputStreamInterface.h @@ -0,0 +1,43 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_SERVERS_CAMERA3_OUTPUT_STREAM_INTERFACE_H +#define ANDROID_SERVERS_CAMERA3_OUTPUT_STREAM_INTERFACE_H + +#include "Camera3StreamInterface.h" + +namespace android { + +namespace camera3 { + +/** + * An interface for managing a single stream of output data from the camera + * device. + */ +class Camera3OutputStreamInterface : public virtual Camera3StreamInterface { + public: + /** + * Set the transform on the output stream; one of the + * HAL_TRANSFORM_* / NATIVE_WINDOW_TRANSFORM_* constants. + */ + virtual status_t setTransform(int transform) = 0; +}; + +} // namespace camera3 + +} // namespace android + +#endif diff --git a/services/camera/libcameraservice/camera3/Camera3Stream.cpp b/services/camera/libcameraservice/camera3/Camera3Stream.cpp index bc259b6..f137227 100644 --- a/services/camera/libcameraservice/camera3/Camera3Stream.cpp +++ b/services/camera/libcameraservice/camera3/Camera3Stream.cpp @@ -178,26 +178,75 @@ status_t Camera3Stream::finishConfiguration(camera3_device *hal3Device) { status_t Camera3Stream::getBuffer(camera3_stream_buffer *buffer) { ATRACE_CALL(); Mutex::Autolock l(mLock); - return getBufferLocked(buffer); + + status_t res = getBufferLocked(buffer); + if (res == OK) { + fireBufferListenersLocked(*buffer, /*acquired*/true, /*output*/true); + } + + return res; } status_t Camera3Stream::returnBuffer(const camera3_stream_buffer &buffer, nsecs_t timestamp) { ATRACE_CALL(); Mutex::Autolock l(mLock); - return returnBufferLocked(buffer, timestamp); + + status_t res = returnBufferLocked(buffer, timestamp); + if (res == OK) { + fireBufferListenersLocked(buffer, /*acquired*/false, /*output*/true); + } + + return res; } status_t Camera3Stream::getInputBuffer(camera3_stream_buffer *buffer) { ATRACE_CALL(); Mutex::Autolock l(mLock); - return getInputBufferLocked(buffer); + + status_t res = getInputBufferLocked(buffer); + if (res == OK) { + fireBufferListenersLocked(*buffer, /*acquired*/true, /*output*/false); + } + + return res; } status_t Camera3Stream::returnInputBuffer(const camera3_stream_buffer &buffer) { ATRACE_CALL(); Mutex::Autolock l(mLock); - return returnInputBufferLocked(buffer); + + status_t res = returnInputBufferLocked(buffer); + if (res == OK) { + fireBufferListenersLocked(buffer, /*acquired*/false, /*output*/false); + } + return res; +} + +void Camera3Stream::fireBufferListenersLocked( + const camera3_stream_buffer& /*buffer*/, bool acquired, bool output) { + List >::iterator it, end; + + // TODO: finish implementing + + Camera3StreamBufferListener::BufferInfo info = + Camera3StreamBufferListener::BufferInfo(); + info.mOutput = output; + // TODO: rest of fields + + for (it = mBufferListenerList.begin(), end = mBufferListenerList.end(); + it != end; + ++it) { + + sp listener = it->promote(); + if (listener != 0) { + if (acquired) { + listener->onBufferAcquired(info); + } else { + listener->onBufferReleased(info); + } + } + } } bool Camera3Stream::hasOutstandingBuffers() const { @@ -290,6 +339,36 @@ status_t Camera3Stream::returnInputBufferLocked( return INVALID_OPERATION; } +void Camera3Stream::addBufferListener( + wp listener) { + Mutex::Autolock l(mLock); + mBufferListenerList.push_back(listener); +} + +void Camera3Stream::removeBufferListener( + const sp& listener) { + Mutex::Autolock l(mLock); + + bool erased = true; + List >::iterator it, end; + for (it = mBufferListenerList.begin(), end = mBufferListenerList.end(); + it != end; + ) { + + if (*it == listener) { + it = mBufferListenerList.erase(it); + erased = true; + } else { + ++it; + } + } + + if (!erased) { + ALOGW("%s: Could not find listener to remove, already removed", + __FUNCTION__); + } +} + }; // namespace camera3 }; // namespace android diff --git a/services/camera/libcameraservice/camera3/Camera3Stream.h b/services/camera/libcameraservice/camera3/Camera3Stream.h index 46a3872..d992cfe 100644 --- a/services/camera/libcameraservice/camera3/Camera3Stream.h +++ b/services/camera/libcameraservice/camera3/Camera3Stream.h @@ -21,9 +21,13 @@ #include #include #include +#include #include "hardware/camera3.h" +#include "Camera3StreamBufferListener.h" +#include "Camera3StreamInterface.h" + namespace android { namespace camera3 { @@ -81,7 +85,8 @@ namespace camera3 { */ class Camera3Stream : protected camera3_stream, - public LightRefBase { + public virtual Camera3StreamInterface, + public virtual RefBase { public: virtual ~Camera3Stream(); @@ -205,6 +210,11 @@ class Camera3Stream : */ virtual void dump(int fd, const Vector &args) const = 0; + void addBufferListener( + wp listener); + void removeBufferListener( + const sp& listener); + protected: const int mId; const String8 mName; @@ -261,6 +271,10 @@ class Camera3Stream : // Gets all buffers from endpoint and registers them with the HAL. status_t registerBuffersLocked(camera3_device *hal3Device); + void fireBufferListenersLocked(const camera3_stream_buffer& buffer, + bool acquired, bool output); + List > mBufferListenerList; + }; // class Camera3Stream }; // namespace camera3 diff --git a/services/camera/libcameraservice/camera3/Camera3StreamInterface.h b/services/camera/libcameraservice/camera3/Camera3StreamInterface.h new file mode 100644 index 0000000..4768536 --- /dev/null +++ b/services/camera/libcameraservice/camera3/Camera3StreamInterface.h @@ -0,0 +1,162 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_SERVERS_CAMERA3_STREAM_INTERFACE_H +#define ANDROID_SERVERS_CAMERA3_STREAM_INTERFACE_H + +#include +#include "Camera3StreamBufferListener.h" + +struct camera3_stream_buffer; + +namespace android { + +namespace camera3 { + +/** + * An interface for managing a single stream of input and/or output data from + * the camera device. + */ +class Camera3StreamInterface : public virtual RefBase { + public: + /** + * Get the stream's ID + */ + virtual int getId() const = 0; + + /** + * Get the stream's dimensions and format + */ + virtual uint32_t getWidth() const = 0; + virtual uint32_t getHeight() const = 0; + virtual int getFormat() const = 0; + + /** + * Start the stream configuration process. Returns a handle to the stream's + * information to be passed into the HAL device's configure_streams call. + * + * Until finishConfiguration() is called, no other methods on the stream may + * be called. The usage and max_buffers fields of camera3_stream may be + * modified between start/finishConfiguration, but may not be changed after + * that. The priv field of camera3_stream may be modified at any time after + * startConfiguration. + * + * Returns NULL in case of error starting configuration. + */ + virtual camera3_stream* startConfiguration() = 0; + + /** + * Check if the stream is mid-configuration (start has been called, but not + * finish). Used for lazy completion of configuration. + */ + virtual bool isConfiguring() const = 0; + + /** + * Completes the stream configuration process. During this call, the stream + * may call the device's register_stream_buffers() method. The stream + * information structure returned by startConfiguration() may no longer be + * modified after this call, but can still be read until the destruction of + * the stream. + * + * Returns: + * OK on a successful configuration + * NO_INIT in case of a serious error from the HAL device + * NO_MEMORY in case of an error registering buffers + * INVALID_OPERATION in case connecting to the consumer failed + */ + virtual status_t finishConfiguration(camera3_device *hal3Device) = 0; + + /** + * Fill in the camera3_stream_buffer with the next valid buffer for this + * stream, to hand over to the HAL. + * + * This method may only be called once finishConfiguration has been called. + * For bidirectional streams, this method applies to the output-side + * buffers. + * + */ + virtual status_t getBuffer(camera3_stream_buffer *buffer) = 0; + + /** + * Return a buffer to the stream after use by the HAL. + * + * This method may only be called for buffers provided by getBuffer(). + * For bidirectional streams, this method applies to the output-side buffers + */ + virtual status_t returnBuffer(const camera3_stream_buffer &buffer, + nsecs_t timestamp) = 0; + + /** + * Fill in the camera3_stream_buffer with the next valid buffer for this + * stream, to hand over to the HAL. + * + * This method may only be called once finishConfiguration has been called. + * For bidirectional streams, this method applies to the input-side + * buffers. + * + */ + virtual status_t getInputBuffer(camera3_stream_buffer *buffer) = 0; + + /** + * Return a buffer to the stream after use by the HAL. + * + * This method may only be called for buffers provided by getBuffer(). + * For bidirectional streams, this method applies to the input-side buffers + */ + virtual status_t returnInputBuffer(const camera3_stream_buffer &buffer) = 0; + + /** + * Whether any of the stream's buffers are currently in use by the HAL, + * including buffers that have been returned but not yet had their + * release fence signaled. + */ + virtual bool hasOutstandingBuffers() const = 0; + + enum { + TIMEOUT_NEVER = -1 + }; + /** + * Wait until the HAL is done with all of this stream's buffers, including + * signalling all release fences. Returns TIMED_OUT if the timeout is + * exceeded, OK on success. Pass in TIMEOUT_NEVER for timeout to indicate + * an indefinite wait. + */ + virtual status_t waitUntilIdle(nsecs_t timeout) = 0; + + /** + * Disconnect stream from its non-HAL endpoint. After this, + * start/finishConfiguration must be called before the stream can be used + * again. This cannot be called if the stream has outstanding dequeued + * buffers. + */ + virtual status_t disconnect() = 0; + + /** + * Debug dump of the stream's state. + */ + virtual void dump(int fd, const Vector &args) const = 0; + + virtual void addBufferListener( + wp listener) = 0; + virtual void removeBufferListener( + const sp& listener) = 0; +}; + +} // namespace camera3 + +} // namespace android + +#endif diff --git a/services/camera/libcameraservice/camera3/Camera3ZslStream.cpp b/services/camera/libcameraservice/camera3/Camera3ZslStream.cpp index e8a5ca6..0345d5b 100644 --- a/services/camera/libcameraservice/camera3/Camera3ZslStream.cpp +++ b/services/camera/libcameraservice/camera3/Camera3ZslStream.cpp @@ -18,70 +18,652 @@ #define ATRACE_TAG ATRACE_TAG_CAMERA //#define LOG_NDEBUG 0 +// This is needed for stdint.h to define INT64_MAX in C++ +#define __STDC_LIMIT_MACROS + #include #include #include "Camera3ZslStream.h" +#ifndef container_of +#define container_of(ptr, type, member) \ + (type *)((char*)(ptr) - offsetof(type, member)) +#endif + +typedef android::RingBufferConsumer::PinnedBufferItem PinnedBufferItem; + namespace android { namespace camera3 { +namespace { +struct TimestampFinder : public RingBufferConsumer::RingBufferComparator { + typedef RingBufferConsumer::BufferInfo BufferInfo; + + enum { + SELECT_I1 = -1, + SELECT_I2 = 1, + SELECT_NEITHER = 0, + }; + + TimestampFinder(nsecs_t timestamp) : mTimestamp(timestamp) {} + ~TimestampFinder() {} + + template + static void swap(T& a, T& b) { + T tmp = a; + a = b; + b = tmp; + } + + /** + * Try to find the best candidate for a ZSL buffer. + * Match priority from best to worst: + * 1) Timestamps match. + * 2) Timestamp is closest to the needle (and lower). + * 3) Timestamp is closest to the needle (and higher). + * + */ + virtual int compare(const BufferInfo *i1, + const BufferInfo *i2) const { + // Try to select non-null object first. + if (i1 == NULL) { + return SELECT_I2; + } else if (i2 == NULL) { + return SELECT_I1; + } + + // Best result: timestamp is identical + if (i1->mTimestamp == mTimestamp) { + return SELECT_I1; + } else if (i2->mTimestamp == mTimestamp) { + return SELECT_I2; + } + + const BufferInfo* infoPtrs[2] = { + i1, + i2 + }; + int infoSelectors[2] = { + SELECT_I1, + SELECT_I2 + }; + + // Order i1,i2 so that always i1.timestamp < i2.timestamp + if (i1->mTimestamp > i2->mTimestamp) { + swap(infoPtrs[0], infoPtrs[1]); + swap(infoSelectors[0], infoSelectors[1]); + } + + // Second best: closest (lower) timestamp + if (infoPtrs[1]->mTimestamp < mTimestamp) { + return infoSelectors[1]; + } else if (infoPtrs[0]->mTimestamp < mTimestamp) { + return infoSelectors[0]; + } + + // Worst: closest (higher) timestamp + return infoSelectors[0]; + + /** + * The above cases should cover all the possibilities, + * and we get an 'empty' result only if the ring buffer + * was empty itself + */ + } + + const nsecs_t mTimestamp; +}; // struct TimestampFinder +} // namespace anonymous + Camera3ZslStream::Camera3ZslStream(int id, uint32_t width, uint32_t height, int depth) : Camera3Stream(id, CAMERA3_STREAM_BIDIRECTIONAL, width, height, 0, HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED), - mDepth(depth) { + mDepth(depth), + mProducer(new RingBufferConsumer(GRALLOC_USAGE_HW_CAMERA_ZSL, + depth)), + mConsumer(new Surface(mProducer->getProducerInterface())), + //mTransform(0), + mTotalBufferCount(0), + mDequeuedBufferCount(0), + mFrameCount(0), + mLastTimestamp(0), + mCombinedFence(new Fence()) { +} + +Camera3ZslStream::~Camera3ZslStream() { + disconnectLocked(); } status_t Camera3ZslStream::getBufferLocked(camera3_stream_buffer *buffer) { - (void) buffer; - ALOGE("%s: Not implemented", __FUNCTION__); - return INVALID_OPERATION; + // same as output stream code + ATRACE_CALL(); + status_t res; + + // Allow dequeue during IN_[RE]CONFIG for registration + if (mState != STATE_CONFIGURED && + mState != STATE_IN_CONFIG && mState != STATE_IN_RECONFIG) { + ALOGE("%s: Stream %d: Can't get buffers in unconfigured state %d", + __FUNCTION__, mId, mState); + return INVALID_OPERATION; + } + + // Only limit dequeue amount when fully configured + if (mState == STATE_CONFIGURED && + mDequeuedBufferCount == camera3_stream::max_buffers) { + ALOGE("%s: Stream %d: Already dequeued maximum number of simultaneous" + " buffers (%d)", __FUNCTION__, mId, + camera3_stream::max_buffers); + return INVALID_OPERATION; + } + + ANativeWindowBuffer* anb; + int fenceFd; + + res = mConsumer->dequeueBuffer(mConsumer.get(), &anb, &fenceFd); + if (res != OK) { + ALOGE("%s: Stream %d: Can't dequeue next output buffer: %s (%d)", + __FUNCTION__, mId, strerror(-res), res); + return res; + } + + // Handing out a raw pointer to this object. Increment internal refcount. + incStrong(this); + buffer->stream = this; + buffer->buffer = &(anb->handle); + buffer->acquire_fence = fenceFd; + buffer->release_fence = -1; + buffer->status = CAMERA3_BUFFER_STATUS_OK; + + mDequeuedBufferCount++; + + return OK; } status_t Camera3ZslStream::returnBufferLocked( const camera3_stream_buffer &buffer, nsecs_t timestamp) { - (void) buffer; - (void) timestamp; - ALOGE("%s: Not implemented", __FUNCTION__); - return INVALID_OPERATION; + // same as output stream code + ATRACE_CALL(); + status_t res; + + // returnBuffer may be called from a raw pointer, not a sp<>, and we'll be + // decrementing the internal refcount next. In case this is the last ref, we + // might get destructed on the decStrong(), so keep an sp around until the + // end of the call - otherwise have to sprinkle the decStrong on all exit + // points. + sp keepAlive(this); + decStrong(this); + + // Allow buffers to be returned in the error state, to allow for disconnect + // and in the in-config states for registration + if (mState == STATE_CONSTRUCTED) { + ALOGE("%s: Stream %d: Can't return buffers in unconfigured state %d", + __FUNCTION__, mId, mState); + return INVALID_OPERATION; + } + if (mDequeuedBufferCount == 0) { + ALOGE("%s: Stream %d: No buffers outstanding to return", __FUNCTION__, + mId); + return INVALID_OPERATION; + } + if (buffer.status == CAMERA3_BUFFER_STATUS_ERROR) { + res = mConsumer->cancelBuffer(mConsumer.get(), + container_of(buffer.buffer, ANativeWindowBuffer, handle), + buffer.release_fence); + if (res != OK) { + ALOGE("%s: Stream %d: Error cancelling buffer to native window:" + " %s (%d)", __FUNCTION__, mId, strerror(-res), res); + return res; + } + } else { + res = native_window_set_buffers_timestamp(mConsumer.get(), timestamp); + if (res != OK) { + ALOGE("%s: Stream %d: Error setting timestamp: %s (%d)", + __FUNCTION__, mId, strerror(-res), res); + return res; + } + + sp releaseFence = new Fence(buffer.release_fence); + int anwReleaseFence = releaseFence->dup(); + + res = mConsumer->queueBuffer(mConsumer.get(), + container_of(buffer.buffer, ANativeWindowBuffer, handle), + anwReleaseFence); + if (res != OK) { + ALOGE("%s: Stream %d: Error queueing buffer to native window: %s (%d)", + __FUNCTION__, mId, strerror(-res), res); + close(anwReleaseFence); + return res; + } + + mCombinedFence = Fence::merge(mName, mCombinedFence, releaseFence); + } + + mDequeuedBufferCount--; + mBufferReturnedSignal.signal(); + mLastTimestamp = timestamp; + + return OK; } bool Camera3ZslStream::hasOutstandingBuffersLocked() const { - ALOGE("%s: Not implemented", __FUNCTION__); + // same as output stream + nsecs_t signalTime = mCombinedFence->getSignalTime(); + ALOGV("%s: Stream %d: Has %d outstanding buffers," + " buffer signal time is %lld", + __FUNCTION__, mId, mDequeuedBufferCount, signalTime); + if (mDequeuedBufferCount > 0 || signalTime == INT64_MAX) { + return true; + } return false; } status_t Camera3ZslStream::waitUntilIdle(nsecs_t timeout) { - (void) timeout; - ALOGE("%s: Not implemented", __FUNCTION__); - return INVALID_OPERATION; + // same as output stream + status_t res; + { + Mutex::Autolock l(mLock); + while (mDequeuedBufferCount > 0) { + if (timeout != TIMEOUT_NEVER) { + nsecs_t startTime = systemTime(); + res = mBufferReturnedSignal.waitRelative(mLock, timeout); + if (res == TIMED_OUT) { + return res; + } else if (res != OK) { + ALOGE("%s: Error waiting for outstanding buffers: %s (%d)", + __FUNCTION__, strerror(-res), res); + return res; + } + nsecs_t deltaTime = systemTime() - startTime; + if (timeout <= deltaTime) { + timeout = 0; + } else { + timeout -= deltaTime; + } + } else { + res = mBufferReturnedSignal.wait(mLock); + if (res != OK) { + ALOGE("%s: Error waiting for outstanding buffers: %s (%d)", + __FUNCTION__, strerror(-res), res); + return res; + } + } + } + } + + // No lock + + unsigned int timeoutMs; + if (timeout == TIMEOUT_NEVER) { + timeoutMs = Fence::TIMEOUT_NEVER; + } else if (timeout == 0) { + timeoutMs = 0; + } else { + // Round up to wait at least 1 ms + timeoutMs = (timeout + 999999) / 1000000; + } + + return mCombinedFence->wait(timeoutMs); +} + +status_t Camera3ZslStream::configureQueueLocked() { + status_t res; + + switch (mState) { + case STATE_IN_RECONFIG: + res = disconnectLocked(); + if (res != OK) { + return res; + } + break; + case STATE_IN_CONFIG: + // OK + break; + default: + ALOGE("%s: Bad state: %d", __FUNCTION__, mState); + return INVALID_OPERATION; + } + + // Configure consumer-side ANativeWindow interface + res = native_window_api_connect(mConsumer.get(), + NATIVE_WINDOW_API_CAMERA); + if (res != OK) { + ALOGE("%s: Unable to connect to native window for stream %d", + __FUNCTION__, mId); + return res; + } + + res = native_window_set_usage(mConsumer.get(), camera3_stream::usage); + if (res != OK) { + ALOGE("%s: Unable to configure usage %08x for stream %d", + __FUNCTION__, camera3_stream::usage, mId); + return res; + } + + res = native_window_set_scaling_mode(mConsumer.get(), + NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW); + if (res != OK) { + ALOGE("%s: Unable to configure stream scaling: %s (%d)", + __FUNCTION__, strerror(-res), res); + return res; + } + + if (mMaxSize == 0) { + // For buffers of known size + res = native_window_set_buffers_geometry(mConsumer.get(), + camera3_stream::width, camera3_stream::height, + camera3_stream::format); + } else { + // For buffers with bounded size + res = native_window_set_buffers_geometry(mConsumer.get(), + mMaxSize, 1, + camera3_stream::format); + } + if (res != OK) { + ALOGE("%s: Unable to configure stream buffer geometry" + " %d x %d, format %x for stream %d", + __FUNCTION__, camera3_stream::width, camera3_stream::height, + camera3_stream::format, mId); + return res; + } + + int maxConsumerBuffers; + res = mConsumer->query(mConsumer.get(), + NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &maxConsumerBuffers); + if (res != OK) { + ALOGE("%s: Unable to query consumer undequeued" + " buffer count for stream %d", __FUNCTION__, mId); + return res; + } + + ALOGV("%s: Consumer wants %d buffers", __FUNCTION__, + maxConsumerBuffers); + + mTotalBufferCount = maxConsumerBuffers + camera3_stream::max_buffers; + mDequeuedBufferCount = 0; + mFrameCount = 0; + mLastTimestamp = 0; + + res = native_window_set_buffer_count(mConsumer.get(), + mTotalBufferCount); + if (res != OK) { + ALOGE("%s: Unable to set buffer count for stream %d", + __FUNCTION__, mId); + return res; + } + + return OK; +} + +size_t Camera3ZslStream::getBufferCountLocked() { + return mTotalBufferCount; } status_t Camera3ZslStream::disconnectLocked() { - ALOGE("%s: Not implemented", __FUNCTION__); - return INVALID_OPERATION; + status_t res; + + switch (mState) { + case STATE_IN_RECONFIG: + case STATE_CONFIGURED: + // OK + break; + default: + // No connection, nothing to do + return OK; + } + + if (mDequeuedBufferCount > 0) { + ALOGE("%s: Can't disconnect with %d buffers still dequeued!", + __FUNCTION__, mDequeuedBufferCount); + return INVALID_OPERATION; + } + + res = native_window_api_disconnect(mConsumer.get(), NATIVE_WINDOW_API_CAMERA); + + /** + * This is not an error. if client calling process dies, the window will + * also die and all calls to it will return DEAD_OBJECT, thus it's already + * "disconnected" + */ + if (res == DEAD_OBJECT) { + ALOGW("%s: While disconnecting stream %d from native window, the" + " native window died from under us", __FUNCTION__, mId); + } + else if (res != OK) { + ALOGE("%s: Unable to disconnect stream %d from native window (error %d %s)", + __FUNCTION__, mId, res, strerror(-res)); + mState = STATE_ERROR; + return res; + } + + mState = (mState == STATE_IN_RECONFIG) ? STATE_IN_CONFIG : STATE_CONSTRUCTED; + return OK; } -status_t Camera3ZslStream::getInputBuffer(camera3_stream_buffer *buffer, - nsecs_t timestamp) { - (void) buffer; - (void) timestamp; - ALOGE("%s: Not implemented", __FUNCTION__); - return INVALID_OPERATION; +status_t Camera3ZslStream::getInputBufferLocked(camera3_stream_buffer *buffer) { + ATRACE_CALL(); + + // TODO: potentially register from inputBufferLocked + // this should be ok, registerBuffersLocked only calls getBuffer for now + // register in output mode instead of input mode for ZSL streams. + if (mState == STATE_IN_CONFIG || mState == STATE_IN_RECONFIG) { + ALOGE("%s: Stream %d: Buffer registration for input streams" + " not implemented (state %d)", + __FUNCTION__, mId, mState); + return INVALID_OPERATION; + } + + // Allow dequeue during IN_[RE]CONFIG for registration + if (mState != STATE_CONFIGURED && + mState != STATE_IN_CONFIG && mState != STATE_IN_RECONFIG) { + ALOGE("%s: Stream %d: Can't get buffers in unconfigured state %d", + __FUNCTION__, mId, mState); + return INVALID_OPERATION; + } + + // Only limit dequeue amount when fully configured + if (mState == STATE_CONFIGURED && + mDequeuedBufferCount == camera3_stream::max_buffers) { + ALOGE("%s: Stream %d: Already dequeued maximum number of simultaneous" + " buffers (%d)", __FUNCTION__, mId, + camera3_stream::max_buffers); + return INVALID_OPERATION; + } + + ANativeWindowBuffer* anb; + int fenceFd; + + assert(mProducer != 0); + + sp bufferItem; + { + List >::iterator it, end; + it = mInputBufferQueue.begin(); + end = mInputBufferQueue.end(); + + // Need to call enqueueInputBufferByTimestamp as a prerequisite + if (it == end) { + ALOGE("%s: Stream %d: No input buffer was queued", + __FUNCTION__, mId); + return INVALID_OPERATION; + } + bufferItem = *it; + mInputBufferQueue.erase(it); + } + + anb = bufferItem->getBufferItem().mGraphicBuffer->getNativeBuffer(); + assert(anb != NULL); + fenceFd = bufferItem->getBufferItem().mFence->dup(); + + /** + * FenceFD now owned by HAL except in case of error, + * in which case we reassign it to acquire_fence + */ + + // Handing out a raw pointer to this object. Increment internal refcount. + incStrong(this); + buffer->stream = this; + buffer->buffer = &(anb->handle); + buffer->acquire_fence = fenceFd; + buffer->release_fence = -1; + buffer->status = CAMERA3_BUFFER_STATUS_OK; + + mDequeuedBufferCount++; + + mBuffersInFlight.push_back(bufferItem); + + return OK; } -status_t Camera3ZslStream::returnInputBuffer(const camera3_stream_buffer &buffer) { - (void) buffer; - ALOGE("%s: Not implemented", __FUNCTION__); - return INVALID_OPERATION; +status_t Camera3ZslStream::returnInputBufferLocked( + const camera3_stream_buffer &buffer) { + ATRACE_CALL(); + + // returnBuffer may be called from a raw pointer, not a sp<>, and we'll be + // decrementing the internal refcount next. In case this is the last ref, we + // might get destructed on the decStrong(), so keep an sp around until the + // end of the call - otherwise have to sprinkle the decStrong on all exit + // points. + sp keepAlive(this); + decStrong(this); + + // Allow buffers to be returned in the error state, to allow for disconnect + // and in the in-config states for registration + if (mState == STATE_CONSTRUCTED) { + ALOGE("%s: Stream %d: Can't return buffers in unconfigured state %d", + __FUNCTION__, mId, mState); + return INVALID_OPERATION; + } + if (mDequeuedBufferCount == 0) { + ALOGE("%s: Stream %d: No buffers outstanding to return", __FUNCTION__, + mId); + return INVALID_OPERATION; + } + + bool bufferFound = false; + sp bufferItem; + { + // Find the buffer we are returning + Vector >::iterator it, end; + for (it = mBuffersInFlight.begin(), end = mBuffersInFlight.end(); + it != end; + ++it) { + + const sp& tmp = *it; + ANativeWindowBuffer *anb = + tmp->getBufferItem().mGraphicBuffer->getNativeBuffer(); + if (anb != NULL && &(anb->handle) == buffer.buffer) { + bufferFound = true; + bufferItem = tmp; + mBuffersInFlight.erase(it); + mDequeuedBufferCount--; + } + } + } + if (!bufferFound) { + ALOGE("%s: Stream %d: Can't return buffer that wasn't sent to HAL", + __FUNCTION__, mId); + return INVALID_OPERATION; + } + + int releaseFenceFd = buffer.release_fence; + + if (buffer.status == CAMERA3_BUFFER_STATUS_ERROR) { + if (buffer.release_fence != -1) { + ALOGE("%s: Stream %d: HAL should not set release_fence(%d) when " + "there is an error", __FUNCTION__, mId, buffer.release_fence); + close(buffer.release_fence); + } + + /** + * Reassign release fence as the acquire fence incase of error + */ + releaseFenceFd = buffer.acquire_fence; + } + + /** + * Unconditionally return buffer to the buffer queue. + * - Fwk takes over the release_fence ownership + */ + sp releaseFence = new Fence(releaseFenceFd); + bufferItem->getBufferItem().mFence = releaseFence; + bufferItem.clear(); // dropping last reference unpins buffer + + mCombinedFence = Fence::merge(mName, mCombinedFence, releaseFence); + + mBufferReturnedSignal.signal(); + + return OK; + } void Camera3ZslStream::dump(int fd, const Vector &args) const { - (void) fd; (void) args; - ALOGE("%s: Not implemented", __FUNCTION__); + + String8 lines; + lines.appendFormat(" Stream[%d]: ZSL\n", mId); + lines.appendFormat(" State: %d\n", mState); + lines.appendFormat(" Dims: %d x %d, format 0x%x\n", + camera3_stream::width, camera3_stream::height, + camera3_stream::format); + lines.appendFormat(" Usage: %d, max HAL buffers: %d\n", + camera3_stream::usage, camera3_stream::max_buffers); + lines.appendFormat(" Frames produced: %d, last timestamp: %lld ns\n", + mFrameCount, mLastTimestamp); + lines.appendFormat(" Total buffers: %d, currently dequeued: %d\n", + mTotalBufferCount, mDequeuedBufferCount); + lines.appendFormat(" Input buffers pending: %d, in flight %d\n", + mInputBufferQueue.size(), mBuffersInFlight.size()); + write(fd, lines.string(), lines.size()); +} + +status_t Camera3ZslStream::enqueueInputBufferByTimestamp( + nsecs_t timestamp, + nsecs_t* actualTimestamp) { + + Mutex::Autolock l(mLock); + + TimestampFinder timestampFinder = TimestampFinder(timestamp); + + sp pinnedBuffer = + mProducer->pinSelectedBuffer(timestampFinder, + /*waitForFence*/false); + + if (pinnedBuffer == 0) { + ALOGE("%s: No ZSL buffers were available yet", __FUNCTION__); + return NO_BUFFER_AVAILABLE; + } + + nsecs_t actual = pinnedBuffer->getBufferItem().mTimestamp; + + if (actual != timestamp) { + ALOGW("%s: ZSL buffer candidate search didn't find an exact match --" + " requested timestamp = %lld, actual timestamp = %lld", + __FUNCTION__, timestamp, actual); + } + + mInputBufferQueue.push_back(pinnedBuffer); + + if (actualTimestamp != NULL) { + *actualTimestamp = actual; + } + + return OK; +} + +status_t Camera3ZslStream::clearInputRingBuffer() { + Mutex::Autolock l(mLock); + + mInputBufferQueue.clear(); + + return mProducer->clear(); +} + +status_t Camera3ZslStream::setTransform(int /*transform*/) { + ALOGV("%s: Not implemented", __FUNCTION__); + return INVALID_OPERATION; } }; // namespace camera3 diff --git a/services/camera/libcameraservice/camera3/Camera3ZslStream.h b/services/camera/libcameraservice/camera3/Camera3ZslStream.h index 39d5995..b863e7f 100644 --- a/services/camera/libcameraservice/camera3/Camera3ZslStream.h +++ b/services/camera/libcameraservice/camera3/Camera3ZslStream.h @@ -19,8 +19,10 @@ #include #include +#include #include "Camera3Stream.h" +#include "Camera3OutputStreamInterface.h" namespace android { @@ -32,32 +34,62 @@ namespace camera3 { * most output buffers, and when directed, pushes a buffer back to the HAL for * processing. */ -class Camera3ZslStream: public Camera3Stream { +class Camera3ZslStream : + public Camera3Stream, + public Camera3OutputStreamInterface { public: /** * Set up a ZSL stream of a given resolution. Depth is the number of buffers * cached within the stream that can be retrieved for input. */ Camera3ZslStream(int id, uint32_t width, uint32_t height, int depth); + ~Camera3ZslStream(); virtual status_t waitUntilIdle(nsecs_t timeout); virtual void dump(int fd, const Vector &args) const; + enum { NO_BUFFER_AVAILABLE = BufferQueue::NO_BUFFER_AVAILABLE }; + + /** + * Locate a buffer matching this timestamp in the RingBufferConsumer, + * and mark it to be queued at the next getInputBufferLocked invocation. + * + * Errors: Returns NO_BUFFER_AVAILABLE if we could not find a match. + * + */ + status_t enqueueInputBufferByTimestamp(nsecs_t timestamp, + nsecs_t* actualTimestamp); + /** - * Get an input buffer matching a specific timestamp. If no buffer matching - * the timestamp is available, NO_MEMORY is returned. + * Clears the buffers that can be used by enqueueInputBufferByTimestamp */ - status_t getInputBuffer(camera3_stream_buffer *buffer, nsecs_t timestamp); + status_t clearInputRingBuffer(); /** - * Return input buffer from HAL. The buffer is then marked as unfilled, and - * returned to the output-side stream for refilling. + * Camera3OutputStreamInterface implementation */ - status_t returnInputBuffer(const camera3_stream_buffer &buffer); + status_t setTransform(int transform); private: int mDepth; + // Input buffers pending to be queued into HAL + List > mInputBufferQueue; + sp mProducer; + sp mConsumer; + + // Input buffers in flight to HAL + Vector > mBuffersInFlight; + size_t mTotalBufferCount; + // sum of input and output buffers that are currently acquired by HAL + size_t mDequeuedBufferCount; + Condition mBufferReturnedSignal; + uint32_t mFrameCount; + // Last received output buffer's timestamp + nsecs_t mLastTimestamp; + + // The merged release fence for all returned buffers + sp mCombinedFence; /** * Camera3Stream interface @@ -67,9 +99,18 @@ class Camera3ZslStream: public Camera3Stream { virtual status_t getBufferLocked(camera3_stream_buffer *buffer); virtual status_t returnBufferLocked(const camera3_stream_buffer &buffer, nsecs_t timestamp); + // getInputBuffer/returnInputBuffer operate the input stream side of the + // ZslStream. + virtual status_t getInputBufferLocked(camera3_stream_buffer *buffer); + virtual status_t returnInputBufferLocked( + const camera3_stream_buffer &buffer); + virtual bool hasOutstandingBuffersLocked() const; virtual status_t disconnectLocked(); + virtual status_t configureQueueLocked(); + virtual size_t getBufferCountLocked(); + }; // class Camera3ZslStream }; // namespace camera3 -- cgit v1.1 From 4b4bb11b8747adeb2efe56c7df4ab6803dd7db41 Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Mon, 29 Apr 2013 13:17:50 -0700 Subject: A reference to the psi section data could become invalid if more sections were added to the KeyedVector. Change-Id: I095b5452ccfad89d69fc502fb21ce39495e201c3 related-to-bug: 8754565 --- media/libstagefright/mpeg2ts/ATSParser.cpp | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/media/libstagefright/mpeg2ts/ATSParser.cpp b/media/libstagefright/mpeg2ts/ATSParser.cpp index c12572f..9850a46 100644 --- a/media/libstagefright/mpeg2ts/ATSParser.cpp +++ b/media/libstagefright/mpeg2ts/ATSParser.cpp @@ -1059,7 +1059,7 @@ status_t ATSParser::parsePID( ssize_t sectionIndex = mPSISections.indexOfKey(PID); if (sectionIndex >= 0) { - const sp §ion = mPSISections.valueAt(sectionIndex); + sp section = mPSISections.valueAt(sectionIndex); if (payload_unit_start_indicator) { CHECK(section->isEmpty()); @@ -1068,7 +1068,6 @@ status_t ATSParser::parsePID( br->skipBits(skip * 8); } - CHECK((br->numBitsLeft() % 8) == 0); status_t err = section->append(br->data(), br->numBitsLeft() / 8); @@ -1103,10 +1102,13 @@ status_t ATSParser::parsePID( if (!handled) { mPSISections.removeItem(PID); + section.clear(); } } - section->clear(); + if (section != NULL) { + section->clear(); + } return OK; } -- cgit v1.1 From d8c7ad1cbc2e6830560a085380a5828cc3d41010 Mon Sep 17 00:00:00 2001 From: Igor Murashkin Date: Tue, 30 Apr 2013 13:06:15 -0700 Subject: Camera: Don't complain repeatedly about missing android.led.availableLeds Bug: 8765726 Change-Id: I78f5f98315068aaad3a39d56698b9c8a18cc8f4a --- services/camera/libcameraservice/camera2/Parameters.cpp | 8 +++++--- services/camera/libcameraservice/camera2/Parameters.h | 2 +- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/services/camera/libcameraservice/camera2/Parameters.cpp b/services/camera/libcameraservice/camera2/Parameters.cpp index a304b35..d02f17e 100644 --- a/services/camera/libcameraservice/camera2/Parameters.cpp +++ b/services/camera/libcameraservice/camera2/Parameters.cpp @@ -952,10 +952,10 @@ status_t Parameters::buildQuirks() { } camera_metadata_ro_entry_t Parameters::staticInfo(uint32_t tag, - size_t minCount, size_t maxCount) const { + size_t minCount, size_t maxCount, bool required) const { camera_metadata_ro_entry_t entry = info->find(tag); - if (CC_UNLIKELY( entry.count == 0 )) { + if (CC_UNLIKELY( entry.count == 0 ) && required) { const char* tagSection = get_camera_metadata_section_name(tag); if (tagSection == NULL) tagSection = ""; const char* tagName = get_camera_metadata_tag_name(tag); @@ -1593,7 +1593,9 @@ status_t Parameters::updateRequest(CameraMetadata *request) const { * - android.led.transmit = defaulted ON */ camera_metadata_ro_entry_t entry = staticInfo(ANDROID_LED_AVAILABLE_LEDS, - /*minimumCount*/0); + /*minimumCount*/0, + /*maximumCount*/0, + /*required*/false); for(size_t i = 0; i < entry.count; ++i) { uint8_t led = entry.data.u8[i]; diff --git a/services/camera/libcameraservice/camera2/Parameters.h b/services/camera/libcameraservice/camera2/Parameters.h index fe3ec1d..696ee2f 100644 --- a/services/camera/libcameraservice/camera2/Parameters.h +++ b/services/camera/libcameraservice/camera2/Parameters.h @@ -213,7 +213,7 @@ struct Parameters { // max/minCount means to do no bounds check in that direction. In case of // error, the entry data pointer is null and the count is 0. camera_metadata_ro_entry_t staticInfo(uint32_t tag, - size_t minCount=0, size_t maxCount=0) const; + size_t minCount=0, size_t maxCount=0, bool required=true) const; // Validate and update camera parameters based on new settings status_t set(const String8 ¶mString); -- cgit v1.1 From e96ee699aca0f711d41e6c0833e5de2341c4a36d Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Tue, 30 Apr 2013 16:08:47 -0700 Subject: Support MediaCodec::getOutputFormat for encoders codec specific data is provided as part of the MediaFormat if available. Change-Id: I5a79c936e2411fe66ebc694791071faefc33941e related-to-bug: 8616651 --- include/media/stagefright/MediaCodec.h | 4 + media/libstagefright/ACodec.cpp | 268 +++++++++++++++++++++++---------- media/libstagefright/MediaCodec.cpp | 78 +++++++++- 3 files changed, 265 insertions(+), 85 deletions(-) diff --git a/include/media/stagefright/MediaCodec.h b/include/media/stagefright/MediaCodec.h index a06a8e1..76aa503 100644 --- a/include/media/stagefright/MediaCodec.h +++ b/include/media/stagefright/MediaCodec.h @@ -177,6 +177,8 @@ private: kFlagDequeueOutputPending = 32, kFlagIsSecure = 64, kFlagSawMediaServerDie = 128, + kFlagIsEncoder = 256, + kFlagGatherCodecSpecificData = 512, }; struct BufferInfo { @@ -244,6 +246,8 @@ private: status_t onSetParameters(const sp ¶ms); + status_t amendOutputFormatWithCodecSpecificData(const sp &buffer); + DISALLOW_EVIL_CONSTRUCTORS(MediaCodec); }; diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp index 6d952c3..058852e 100644 --- a/media/libstagefright/ACodec.cpp +++ b/media/libstagefright/ACodec.cpp @@ -1470,24 +1470,47 @@ status_t ACodec::setSupportedOutputFormat() { &format, sizeof(format)); } +static const struct VideoCodingMapEntry { + const char *mMime; + OMX_VIDEO_CODINGTYPE mVideoCodingType; +} kVideoCodingMapEntry[] = { + { MEDIA_MIMETYPE_VIDEO_AVC, OMX_VIDEO_CodingAVC }, + { MEDIA_MIMETYPE_VIDEO_MPEG4, OMX_VIDEO_CodingMPEG4 }, + { MEDIA_MIMETYPE_VIDEO_H263, OMX_VIDEO_CodingH263 }, + { MEDIA_MIMETYPE_VIDEO_MPEG2, OMX_VIDEO_CodingMPEG2 }, + { MEDIA_MIMETYPE_VIDEO_VPX, OMX_VIDEO_CodingVPX }, +}; + static status_t GetVideoCodingTypeFromMime( const char *mime, OMX_VIDEO_CODINGTYPE *codingType) { - if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_AVC, mime)) { - *codingType = OMX_VIDEO_CodingAVC; - } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_MPEG4, mime)) { - *codingType = OMX_VIDEO_CodingMPEG4; - } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_H263, mime)) { - *codingType = OMX_VIDEO_CodingH263; - } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_MPEG2, mime)) { - *codingType = OMX_VIDEO_CodingMPEG2; - } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_VPX, mime)) { - *codingType = OMX_VIDEO_CodingVPX; - } else { - *codingType = OMX_VIDEO_CodingUnused; - return ERROR_UNSUPPORTED; + for (size_t i = 0; + i < sizeof(kVideoCodingMapEntry) / sizeof(kVideoCodingMapEntry[0]); + ++i) { + if (!strcasecmp(mime, kVideoCodingMapEntry[i].mMime)) { + *codingType = kVideoCodingMapEntry[i].mVideoCodingType; + return OK; + } } - return OK; + *codingType = OMX_VIDEO_CodingUnused; + + return ERROR_UNSUPPORTED; +} + +static status_t GetMimeTypeForVideoCoding( + OMX_VIDEO_CODINGTYPE codingType, AString *mime) { + for (size_t i = 0; + i < sizeof(kVideoCodingMapEntry) / sizeof(kVideoCodingMapEntry[0]); + ++i) { + if (codingType == kVideoCodingMapEntry[i].mVideoCodingType) { + *mime = kVideoCodingMapEntry[i].mMime; + return OK; + } + } + + mime->clear(); + + return ERROR_UNSUPPORTED; } status_t ACodec::setupVideoDecoder( @@ -2227,49 +2250,61 @@ void ACodec::sendFormatChange() { { OMX_VIDEO_PORTDEFINITIONTYPE *videoDef = &def.format.video; - notify->setString("mime", MEDIA_MIMETYPE_VIDEO_RAW); + AString mime; + if (!mIsEncoder) { + notify->setString("mime", MEDIA_MIMETYPE_VIDEO_RAW); + } else if (GetMimeTypeForVideoCoding( + videoDef->eCompressionFormat, &mime) != OK) { + notify->setString("mime", "application/octet-stream"); + } else { + notify->setString("mime", mime.c_str()); + } + notify->setInt32("width", videoDef->nFrameWidth); notify->setInt32("height", videoDef->nFrameHeight); - notify->setInt32("stride", videoDef->nStride); - notify->setInt32("slice-height", videoDef->nSliceHeight); - notify->setInt32("color-format", videoDef->eColorFormat); - - OMX_CONFIG_RECTTYPE rect; - InitOMXParams(&rect); - rect.nPortIndex = kPortIndexOutput; - - if (mOMX->getConfig( - mNode, OMX_IndexConfigCommonOutputCrop, - &rect, sizeof(rect)) != OK) { - rect.nLeft = 0; - rect.nTop = 0; - rect.nWidth = videoDef->nFrameWidth; - rect.nHeight = videoDef->nFrameHeight; - } - CHECK_GE(rect.nLeft, 0); - CHECK_GE(rect.nTop, 0); - CHECK_GE(rect.nWidth, 0u); - CHECK_GE(rect.nHeight, 0u); - CHECK_LE(rect.nLeft + rect.nWidth - 1, videoDef->nFrameWidth); - CHECK_LE(rect.nTop + rect.nHeight - 1, videoDef->nFrameHeight); - - notify->setRect( - "crop", - rect.nLeft, - rect.nTop, - rect.nLeft + rect.nWidth - 1, - rect.nTop + rect.nHeight - 1); - - if (mNativeWindow != NULL) { - android_native_rect_t crop; - crop.left = rect.nLeft; - crop.top = rect.nTop; - crop.right = rect.nLeft + rect.nWidth; - crop.bottom = rect.nTop + rect.nHeight; - - CHECK_EQ(0, native_window_set_crop( - mNativeWindow.get(), &crop)); + if (!mIsEncoder) { + notify->setInt32("stride", videoDef->nStride); + notify->setInt32("slice-height", videoDef->nSliceHeight); + notify->setInt32("color-format", videoDef->eColorFormat); + + OMX_CONFIG_RECTTYPE rect; + InitOMXParams(&rect); + rect.nPortIndex = kPortIndexOutput; + + if (mOMX->getConfig( + mNode, OMX_IndexConfigCommonOutputCrop, + &rect, sizeof(rect)) != OK) { + rect.nLeft = 0; + rect.nTop = 0; + rect.nWidth = videoDef->nFrameWidth; + rect.nHeight = videoDef->nFrameHeight; + } + + CHECK_GE(rect.nLeft, 0); + CHECK_GE(rect.nTop, 0); + CHECK_GE(rect.nWidth, 0u); + CHECK_GE(rect.nHeight, 0u); + CHECK_LE(rect.nLeft + rect.nWidth - 1, videoDef->nFrameWidth); + CHECK_LE(rect.nTop + rect.nHeight - 1, videoDef->nFrameHeight); + + notify->setRect( + "crop", + rect.nLeft, + rect.nTop, + rect.nLeft + rect.nWidth - 1, + rect.nTop + rect.nHeight - 1); + + if (mNativeWindow != NULL) { + android_native_rect_t crop; + crop.left = rect.nLeft; + crop.top = rect.nTop; + crop.right = rect.nLeft + rect.nWidth; + crop.bottom = rect.nTop + rect.nHeight; + + CHECK_EQ(0, native_window_set_crop( + mNativeWindow.get(), &crop)); + } } break; } @@ -2277,41 +2312,108 @@ void ACodec::sendFormatChange() { case OMX_PortDomainAudio: { OMX_AUDIO_PORTDEFINITIONTYPE *audioDef = &def.format.audio; - CHECK_EQ((int)audioDef->eEncoding, (int)OMX_AUDIO_CodingPCM); - OMX_AUDIO_PARAM_PCMMODETYPE params; - InitOMXParams(¶ms); - params.nPortIndex = kPortIndexOutput; + switch (audioDef->eEncoding) { + case OMX_AUDIO_CodingPCM: + { + OMX_AUDIO_PARAM_PCMMODETYPE params; + InitOMXParams(¶ms); + params.nPortIndex = kPortIndexOutput; - CHECK_EQ(mOMX->getParameter( - mNode, OMX_IndexParamAudioPcm, - ¶ms, sizeof(params)), - (status_t)OK); + CHECK_EQ(mOMX->getParameter( + mNode, OMX_IndexParamAudioPcm, + ¶ms, sizeof(params)), + (status_t)OK); + + CHECK(params.nChannels == 1 || params.bInterleaved); + CHECK_EQ(params.nBitPerSample, 16u); + CHECK_EQ((int)params.eNumData, (int)OMX_NumericalDataSigned); + CHECK_EQ((int)params.ePCMMode, (int)OMX_AUDIO_PCMModeLinear); + + notify->setString("mime", MEDIA_MIMETYPE_AUDIO_RAW); + notify->setInt32("channel-count", params.nChannels); + notify->setInt32("sample-rate", params.nSamplingRate); + if (mEncoderDelay + mEncoderPadding) { + size_t frameSize = params.nChannels * sizeof(int16_t); + if (mSkipCutBuffer != NULL) { + size_t prevbufsize = mSkipCutBuffer->size(); + if (prevbufsize != 0) { + ALOGW("Replacing SkipCutBuffer holding %d bytes", prevbufsize); + } + } + mSkipCutBuffer = new SkipCutBuffer(mEncoderDelay * frameSize, + mEncoderPadding * frameSize); + } - CHECK(params.nChannels == 1 || params.bInterleaved); - CHECK_EQ(params.nBitPerSample, 16u); - CHECK_EQ((int)params.eNumData, (int)OMX_NumericalDataSigned); - CHECK_EQ((int)params.ePCMMode, (int)OMX_AUDIO_PCMModeLinear); - - notify->setString("mime", MEDIA_MIMETYPE_AUDIO_RAW); - notify->setInt32("channel-count", params.nChannels); - notify->setInt32("sample-rate", params.nSamplingRate); - if (mEncoderDelay + mEncoderPadding) { - size_t frameSize = params.nChannels * sizeof(int16_t); - if (mSkipCutBuffer != NULL) { - size_t prevbufsize = mSkipCutBuffer->size(); - if (prevbufsize != 0) { - ALOGW("Replacing SkipCutBuffer holding %d bytes", prevbufsize); + if (mChannelMaskPresent) { + notify->setInt32("channel-mask", mChannelMask); } + break; } - mSkipCutBuffer = new SkipCutBuffer(mEncoderDelay * frameSize, - mEncoderPadding * frameSize); - } - if (mChannelMaskPresent) { - notify->setInt32("channel-mask", mChannelMask); - } + case OMX_AUDIO_CodingAAC: + { + OMX_AUDIO_PARAM_AACPROFILETYPE params; + InitOMXParams(¶ms); + params.nPortIndex = kPortIndexOutput; + + CHECK_EQ(mOMX->getParameter( + mNode, OMX_IndexParamAudioAac, + ¶ms, sizeof(params)), + (status_t)OK); + + notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AAC); + notify->setInt32("channel-count", params.nChannels); + notify->setInt32("sample-rate", params.nSampleRate); + break; + } + + case OMX_AUDIO_CodingAMR: + { + OMX_AUDIO_PARAM_AMRTYPE params; + InitOMXParams(¶ms); + params.nPortIndex = kPortIndexOutput; + + CHECK_EQ(mOMX->getParameter( + mNode, OMX_IndexParamAudioAmr, + ¶ms, sizeof(params)), + (status_t)OK); + notify->setInt32("channel-count", 1); + if (params.eAMRBandMode >= OMX_AUDIO_AMRBandModeWB0) { + notify->setString( + "mime", MEDIA_MIMETYPE_AUDIO_AMR_WB); + + notify->setInt32("sample-rate", 16000); + } else { + notify->setString( + "mime", MEDIA_MIMETYPE_AUDIO_AMR_NB); + + notify->setInt32("sample-rate", 8000); + } + break; + } + + case OMX_AUDIO_CodingFLAC: + { + OMX_AUDIO_PARAM_FLACTYPE params; + InitOMXParams(¶ms); + params.nPortIndex = kPortIndexOutput; + + CHECK_EQ(mOMX->getParameter( + mNode, OMX_IndexParamAudioFlac, + ¶ms, sizeof(params)), + (status_t)OK); + + notify->setString("mime", MEDIA_MIMETYPE_AUDIO_FLAC); + notify->setInt32("channel-count", params.nChannels); + notify->setInt32("sample-rate", params.nSampleRate); + break; + } + + default: + TRESPASS(); + } break; } @@ -2957,7 +3059,7 @@ bool ACodec::BaseState::onOMXFillBufferDone( break; } - if (!mCodec->mIsEncoder && !mCodec->mSentFormat) { + if (!mCodec->mSentFormat) { mCodec->sendFormatChange(); } diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp index ae7bb17..714da55 100644 --- a/media/libstagefright/MediaCodec.cpp +++ b/media/libstagefright/MediaCodec.cpp @@ -31,10 +31,13 @@ #include #include #include +#include #include #include #include +#include "include/avc_utils.h" + namespace android { // static @@ -741,8 +744,16 @@ void MediaCodec::onMessageReceived(const sp &msg) { } mOutputFormat = msg; - mFlags |= kFlagOutputFormatChanged; - postActivityNotificationIfPossible(); + + if (mFlags & kFlagIsEncoder) { + // Before we announce the format change we should + // collect codec specific data and amend the output + // format as necessary. + mFlags |= kFlagGatherCodecSpecificData; + } else { + mFlags |= kFlagOutputFormatChanged; + postActivityNotificationIfPossible(); + } break; } @@ -812,6 +823,25 @@ void MediaCodec::onMessageReceived(const sp &msg) { buffer->meta()->setInt32("omxFlags", omxFlags); + if (mFlags & kFlagGatherCodecSpecificData) { + // This is the very first output buffer after a + // format change was signalled, it'll either contain + // the one piece of codec specific data we can expect + // or there won't be codec specific data. + if (omxFlags & OMX_BUFFERFLAG_CODECCONFIG) { + status_t err = + amendOutputFormatWithCodecSpecificData(buffer); + + if (err != OK) { + ALOGE("Codec spit out malformed codec " + "specific data!"); + } + } + + mFlags &= ~kFlagGatherCodecSpecificData; + mFlags |= kFlagOutputFormatChanged; + } + if (mFlags & kFlagDequeueOutputPending) { CHECK(handleDequeueOutputBuffer(mDequeueOutputReplyID)); @@ -955,6 +985,7 @@ void MediaCodec::onMessageReceived(const sp &msg) { if (flags & CONFIGURE_FLAG_ENCODE) { format->setInt32("encoder", true); + mFlags |= kFlagIsEncoder; } extractCSD(format); @@ -1413,6 +1444,8 @@ void MediaCodec::setState(State newState) { mFlags &= ~kFlagOutputFormatChanged; mFlags &= ~kFlagOutputBuffersChanged; mFlags &= ~kFlagStickyError; + mFlags &= ~kFlagIsEncoder; + mFlags &= ~kFlagGatherCodecSpecificData; mActivityNotify.clear(); } @@ -1720,4 +1753,45 @@ status_t MediaCodec::onSetParameters(const sp ¶ms) { return OK; } +status_t MediaCodec::amendOutputFormatWithCodecSpecificData( + const sp &buffer) { + AString mime; + CHECK(mOutputFormat->findString("mime", &mime)); + + if (!strcasecmp(mime.c_str(), MEDIA_MIMETYPE_VIDEO_AVC)) { + // Codec specific data should be SPS and PPS in a single buffer, + // each prefixed by a startcode (0x00 0x00 0x00 0x01). + // We separate the two and put them into the output format + // under the keys "csd-0" and "csd-1". + + unsigned csdIndex = 0; + + const uint8_t *data = buffer->data(); + size_t size = buffer->size(); + + const uint8_t *nalStart; + size_t nalSize; + while (getNextNALUnit(&data, &size, &nalStart, &nalSize, true) == OK) { + sp csd = new ABuffer(nalSize + 4); + memcpy(csd->data(), "\x00\x00\x00\x01", 4); + memcpy(csd->data() + 4, nalStart, nalSize); + + mOutputFormat->setBuffer( + StringPrintf("csd-%u", csdIndex).c_str(), csd); + + ++csdIndex; + } + + if (csdIndex != 2) { + return ERROR_MALFORMED; + } + } else { + // For everything else we just stash the codec specific data into + // the output format as a single piece of csd under "csd-0". + mOutputFormat->setBuffer("csd-0", buffer); + } + + return OK; +} + } // namespace android -- cgit v1.1 From eb10919cd3c8c26b0b9c09f933d152248ac3a666 Mon Sep 17 00:00:00 2001 From: Igor Murashkin Date: Wed, 1 May 2013 11:18:40 -0700 Subject: Camera3: Add missing buffer listener include (for ZSL) Bug: 8563838 Change-Id: Ie053655f5583d20003abdabccee68980d7cdcbfd --- .../camera3/Camera3StreamBufferListener.h | 48 ++++++++++++++++++++++ 1 file changed, 48 insertions(+) create mode 100644 services/camera/libcameraservice/camera3/Camera3StreamBufferListener.h diff --git a/services/camera/libcameraservice/camera3/Camera3StreamBufferListener.h b/services/camera/libcameraservice/camera3/Camera3StreamBufferListener.h new file mode 100644 index 0000000..62ea6c0 --- /dev/null +++ b/services/camera/libcameraservice/camera3/Camera3StreamBufferListener.h @@ -0,0 +1,48 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_SERVERS_CAMERA3_STREAMBUFFERLISTENER_H +#define ANDROID_SERVERS_CAMERA3_STREAMBUFFERLISTENER_H + +#include +#include + +namespace android { + +namespace camera3 { + +class Camera3StreamBufferListener : public virtual RefBase { +public: + + struct BufferInfo { + bool mOutput; // if false then input buffer + Rect mCrop; + uint32_t mTransform; + uint32_t mScalingMode; + int64_t mTimestamp; + uint64_t mFrameNumber; + }; + + // Buffer was acquired by the HAL + virtual void onBufferAcquired(const BufferInfo& bufferInfo) = 0; + // Buffer was released by the HAL + virtual void onBufferReleased(const BufferInfo& bufferInfo) = 0; +}; + +}; //namespace camera3 +}; //namespace android + +#endif -- cgit v1.1 From eb8709e3c65b59e85b882b5ca8710068708671be Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Wed, 1 May 2013 13:58:36 -0700 Subject: The software FLAC encoder now properly signals an error if client attempts to configure it with too large an input buffer size. Previously this would lead to memory corruption during encoding due to a typo. Change-Id: I229b07b7dbe87fb8424419706671b66a8d58ec6b related-to-bug: 8778893 --- .../codecs/flac/enc/SoftFlacEncoder.cpp | 21 +++++++++++++++++++-- .../codecs/flac/enc/SoftFlacEncoder.h | 1 + 2 files changed, 20 insertions(+), 2 deletions(-) diff --git a/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.cpp b/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.cpp index 233aed3..e64fe72 100644 --- a/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.cpp +++ b/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.cpp @@ -109,7 +109,7 @@ void SoftFlacEncoder::initPorts() { def.eDir = OMX_DirInput; def.nBufferCountMin = kNumBuffers;// TODO verify that 1 is enough def.nBufferCountActual = def.nBufferCountMin; - def.nBufferSize = kMaxNumSamplesPerFrame * sizeof(int16_t) * 2; + def.nBufferSize = kMaxInputBufferSize; def.bEnabled = OMX_TRUE; def.bPopulated = OMX_FALSE; def.eDomain = OMX_PortDomainAudio; @@ -234,6 +234,22 @@ OMX_ERRORTYPE SoftFlacEncoder::internalSetParameter( return OMX_ErrorNone; } + case OMX_IndexParamPortDefinition: + { + OMX_PARAM_PORTDEFINITIONTYPE *defParams = + (OMX_PARAM_PORTDEFINITIONTYPE *)params; + + if (defParams->nPortIndex == 0) { + if (defParams->nBufferSize > kMaxInputBufferSize) { + ALOGE("Input buffer size must be at most %zu bytes", + kMaxInputBufferSize); + return OMX_ErrorUnsupportedSetting; + } + } + + // fall through + } + default: ALOGV("SoftFlacEncoder::internalSetParameter(default)"); return SimpleSoftOMXComponent::internalSetParameter(index, params); @@ -273,7 +289,7 @@ void SoftFlacEncoder::onQueueFilled(OMX_U32 portIndex) { return; } - if (inHeader->nFilledLen > kMaxNumSamplesPerFrame * sizeof(FLAC__int32) * 2) { + if (inHeader->nFilledLen > kMaxInputBufferSize) { ALOGE("input buffer too large (%ld).", inHeader->nFilledLen); mSignalledError = true; notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL); @@ -290,6 +306,7 @@ void SoftFlacEncoder::onQueueFilled(OMX_U32 portIndex) { const unsigned nbInputSamples = inHeader->nFilledLen / 2; const OMX_S16 * const pcm16 = reinterpret_cast(inHeader->pBuffer); + CHECK_LE(nbInputSamples, 2 * kMaxNumSamplesPerFrame); for (unsigned i=0 ; i < nbInputSamples ; i++) { mInputBufferPcm32[i] = (FLAC__int32) pcm16[i]; } diff --git a/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.h b/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.h index 1e0148a..97361fa 100644 --- a/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.h +++ b/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.h @@ -52,6 +52,7 @@ private: enum { kNumBuffers = 2, kMaxNumSamplesPerFrame = 1152, + kMaxInputBufferSize = kMaxNumSamplesPerFrame * sizeof(int16_t) * 2, kMaxOutputBufferSize = 65536, //TODO check if this can be reduced }; -- cgit v1.1 From 0182f9acca6f873ee127898e408cf75cc316c3ea Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Wed, 1 May 2013 14:13:26 -0700 Subject: Don't even try to verify the native resolution validity a miracast sink advertises, even if it were valid we couldn't use it since it's not consistently implemented by sinks. Change-Id: Ibee6b3e23b5a55270fc3c419a581e2626530e3af related-to-bug: 8772006 --- media/libstagefright/wifi-display/VideoFormats.cpp | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/media/libstagefright/wifi-display/VideoFormats.cpp b/media/libstagefright/wifi-display/VideoFormats.cpp index d171c6f..da557f7 100644 --- a/media/libstagefright/wifi-display/VideoFormats.cpp +++ b/media/libstagefright/wifi-display/VideoFormats.cpp @@ -249,11 +249,20 @@ bool VideoFormats::parseFormatSpec(const char *spec) { mNativeIndex = native >> 3; mNativeType = (ResolutionType)(native & 7); + bool success; if (mNativeType >= kNumResolutionTypes) { - return false; + success = false; + } else { + success = GetConfiguration( + mNativeType, mNativeIndex, NULL, NULL, NULL, NULL); } - return GetConfiguration(mNativeType, mNativeIndex, NULL, NULL, NULL, NULL); + if (!success) { + ALOGW("sink advertised an illegal native resolution, fortunately " + "this value is ignored for the time being..."); + } + + return true; } AString VideoFormats::getFormatSpec(bool forM4Message) const { -- cgit v1.1 From 1f1872f1e1f3705e235ce784ce6d79102d26c117 Mon Sep 17 00:00:00 2001 From: Eino-Ville Talvala Date: Mon, 29 Apr 2013 13:50:24 -0700 Subject: Camera2/3: Fix still image FOV reporting. Still FOV can vary as a function of the output sizes, with the function depending on the HAL version. Calculate and update the FOV whenever the output sizes might change. Bug: 8484377 Change-Id: I56f2cc768a1e128a159b326588be55b2752db06e --- .../camera/libcameraservice/camera2/Parameters.cpp | 131 +++++++++++++++++++-- .../camera/libcameraservice/camera2/Parameters.h | 4 + 2 files changed, 125 insertions(+), 10 deletions(-) diff --git a/services/camera/libcameraservice/camera2/Parameters.cpp b/services/camera/libcameraservice/camera2/Parameters.cpp index a304b35..c1b1daf 100644 --- a/services/camera/libcameraservice/camera2/Parameters.cpp +++ b/services/camera/libcameraservice/camera2/Parameters.cpp @@ -659,15 +659,13 @@ status_t Parameters::initialize(const CameraMetadata *info) { float minFocalLength = availableFocalLengths.data.f[0]; params.setFloat(CameraParameters::KEY_FOCAL_LENGTH, minFocalLength); - camera_metadata_ro_entry_t sensorSize = - staticInfo(ANDROID_SENSOR_INFO_PHYSICAL_SIZE, 2, 2); - if (!sensorSize.count) return NO_INIT; + float horizFov, vertFov; + res = calculatePictureFovs(&horizFov, &vertFov); + if (res != OK) { + ALOGE("%s: Can't calculate field of views!", __FUNCTION__); + return res; + } - // The fields of view here assume infinity focus, maximum wide angle - float horizFov = 180 / M_PI * - 2 * atanf(sensorSize.data.f[0] / (2 * minFocalLength)); - float vertFov = 180 / M_PI * - 2 * atanf(sensorSize.data.f[1] / (2 * minFocalLength)); params.setFloat(CameraParameters::KEY_HORIZONTAL_VIEW_ANGLE, horizFov); params.setFloat(CameraParameters::KEY_VERTICAL_VIEW_ANGLE, vertFov); @@ -861,6 +859,10 @@ status_t Parameters::buildFastInfo() { staticInfo(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE); bool fixedLens = (minFocusDistance.data.f[0] == 0); + camera_metadata_ro_entry_t availableFocalLengths = + staticInfo(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS); + if (!availableFocalLengths.count) return NO_INIT; + if (sceneModeOverrides.count > 0) { // sceneModeOverrides is defined to have 3 entries for each scene mode, // which are AE, AWB, and AF override modes the HAL wants for that scene @@ -928,6 +930,16 @@ status_t Parameters::buildFastInfo() { fastInfo.arrayHeight = arrayHeight; fastInfo.bestFaceDetectMode = bestFaceDetectMode; fastInfo.maxFaces = maxFaces; + + // Find smallest (widest-angle) focal length to use as basis of still + // picture FOV reporting. + fastInfo.minFocalLength = availableFocalLengths.data.f[0]; + for (size_t i = 1; i < availableFocalLengths.count; i++) { + if (fastInfo.minFocalLength > availableFocalLengths.data.f[i]) { + fastInfo.minFocalLength = availableFocalLengths.data.f[i]; + } + } + return OK; } @@ -1577,6 +1589,21 @@ status_t Parameters::set(const String8& paramString) { *this = validatedParams; + /** Update external parameters calculated from the internal ones */ + + // HORIZONTAL/VERTICAL FIELD OF VIEW + float horizFov, vertFov; + res = calculatePictureFovs(&horizFov, &vertFov); + if (res != OK) { + ALOGE("%s: Can't calculate FOVs", __FUNCTION__); + // continue so parameters are at least consistent + } + newParams.setFloat(CameraParameters::KEY_HORIZONTAL_VIEW_ANGLE, + horizFov); + newParams.setFloat(CameraParameters::KEY_VERTICAL_VIEW_ANGLE, + vertFov); + ALOGV("Current still picture FOV: %f x %f deg", horizFov, vertFov); + // Need to flatten again in case of overrides paramsFlattened = newParams.flatten(); params = newParams; @@ -2244,7 +2271,7 @@ int Parameters::cropXToArray(int x) const { CropRegion previewCrop = calculateCropRegion(CropRegion::OUTPUT_PREVIEW); ALOG_ASSERT(x < previewCrop.width, "Crop-relative X coordinate = '%d' " - "is out of bounds (upper = %d)", x, previewCrop.width); + "is out of bounds (upper = %f)", x, previewCrop.width); int ret = x + previewCrop.left; @@ -2260,7 +2287,7 @@ int Parameters::cropYToArray(int y) const { CropRegion previewCrop = calculateCropRegion(CropRegion::OUTPUT_PREVIEW); ALOG_ASSERT(y < previewCrop.height, "Crop-relative Y coordinate = '%d' is " - "out of bounds (upper = %d)", y, previewCrop.height); + "out of bounds (upper = %f)", y, previewCrop.height); int ret = y + previewCrop.top; @@ -2466,6 +2493,90 @@ Parameters::CropRegion Parameters::calculateCropRegion( return crop; } +status_t Parameters::calculatePictureFovs(float *horizFov, float *vertFov) + const { + camera_metadata_ro_entry_t sensorSize = + staticInfo(ANDROID_SENSOR_INFO_PHYSICAL_SIZE, 2, 2); + if (!sensorSize.count) return NO_INIT; + + camera_metadata_ro_entry_t availableFocalLengths = + staticInfo(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS); + if (!availableFocalLengths.count) return NO_INIT; + + float arrayAspect = static_cast(fastInfo.arrayWidth) / + fastInfo.arrayHeight; + float stillAspect = static_cast(pictureWidth) / pictureHeight; + ALOGV("Array aspect: %f, still aspect: %f", arrayAspect, stillAspect); + + // The crop factors from the full sensor array to the still picture crop + // region + float horizCropFactor = 1.f; + float vertCropFactor = 1.f; + + /** + * Need to calculate the still image field of view based on the total pixel + * array field of view, and the relative aspect ratios of the pixel array + * and output streams. + * + * Special treatment for quirky definition of crop region and relative + * stream cropping. + */ + if (quirks.meteringCropRegion) { + /** + * All streams are the same in height, so narrower aspect ratios will + * get cropped on the sides. First find the largest (widest) aspect + * ratio, then calculate the crop of the still FOV based on that. + */ + float cropAspect = arrayAspect; + float aspects[] = { + stillAspect, + static_cast(previewWidth) / previewHeight, + static_cast(videoWidth) / videoHeight + }; + for (size_t i = 0; i < sizeof(aspects)/sizeof(aspects[0]); i++) { + if (cropAspect < aspects[i]) cropAspect = aspects[i]; + } + ALOGV("Widest crop aspect: %f", cropAspect); + // Horizontal crop of still is done based on fitting in the widest + // aspect ratio + horizCropFactor = stillAspect / cropAspect; + // Vertical crop is a function of the array aspect ratio and the + // widest aspect ratio. + vertCropFactor = arrayAspect / cropAspect; + } else { + /** + * Crop are just a function of just the still/array relative aspect + * ratios. Since each stream will maximize its area within the crop + * region, and for FOV we assume a full-sensor crop region, we only ever + * crop the FOV either vertically or horizontally, never both. + */ + horizCropFactor = (arrayAspect > stillAspect) ? + (stillAspect / arrayAspect) : 1.f; + vertCropFactor = (arrayAspect < stillAspect) ? + (arrayAspect / stillAspect) : 1.f; + } + ALOGV("Horiz crop factor: %f, vert crop fact: %f", + horizCropFactor, vertCropFactor); + /** + * Basic field of view formula is: + * angle of view = 2 * arctangent ( d / 2f ) + * where d is the physical sensor dimension of interest, and f is + * the focal length. This only applies to rectilinear sensors, for focusing + * at distances >> f, etc. + */ + if (horizFov != NULL) { + *horizFov = 180 / M_PI * 2 * + atanf(horizCropFactor * sensorSize.data.f[0] / + (2 * fastInfo.minFocalLength)); + } + if (vertFov != NULL) { + *vertFov = 180 / M_PI * 2 * + atanf(vertCropFactor * sensorSize.data.f[1] / + (2 * fastInfo.minFocalLength)); + } + return OK; +} + int32_t Parameters::fpsFromRange(int32_t /*min*/, int32_t max) const { return max; } diff --git a/services/camera/libcameraservice/camera2/Parameters.h b/services/camera/libcameraservice/camera2/Parameters.h index fe3ec1d..15a1ef8 100644 --- a/services/camera/libcameraservice/camera2/Parameters.h +++ b/services/camera/libcameraservice/camera2/Parameters.h @@ -183,6 +183,7 @@ struct Parameters { } }; DefaultKeyedVector sceneModeOverrides; + float minFocalLength; } fastInfo; // Quirks information; these are short-lived flags to enable workarounds for @@ -243,6 +244,9 @@ struct Parameters { }; CropRegion calculateCropRegion(CropRegion::Outputs outputs) const; + // Calculate the field of view of the high-resolution JPEG capture + status_t calculatePictureFovs(float *horizFov, float *vertFov) const; + // Static methods for debugging and converting between camera1 and camera2 // parameters -- cgit v1.1 From b489b1639c0c12fdd498def46d3f5be3e1fdf6b9 Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Wed, 1 May 2013 16:03:28 -0700 Subject: Fix miracast source code to ignore the encoder output format change it doesn't care about. Change-Id: Iec1594775a98b0c1aba662cc9f08652d2f8d4805 related-to-bug: 8616651 --- media/libstagefright/wifi-display/source/Converter.cpp | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/media/libstagefright/wifi-display/source/Converter.cpp b/media/libstagefright/wifi-display/source/Converter.cpp index 0a8462c..5344623 100644 --- a/media/libstagefright/wifi-display/source/Converter.cpp +++ b/media/libstagefright/wifi-display/source/Converter.cpp @@ -649,6 +649,13 @@ status_t Converter::doMoreWork() { &bufferIndex, &offset, &size, &timeUs, &flags); if (err != OK) { + if (err == INFO_FORMAT_CHANGED) { + continue; + } else if (err == INFO_OUTPUT_BUFFERS_CHANGED) { + mEncoder->getOutputBuffers(&mEncoderOutputBuffers); + continue; + } + if (err == -EAGAIN) { err = OK; } -- cgit v1.1 From c92d6b0d491df675c6728cd4ffb7217469cc9d72 Mon Sep 17 00:00:00 2001 From: Eric Laurent Date: Wed, 1 May 2013 16:15:49 -0700 Subject: Fix reverb at 48kHz The LVM reverb wrapper had a test to only accept input sampling rate of 44.1 kHz. As the LVM reberb engine supports multiple sampling rate we can remove this test. The fix for issue 8512027 (commit 2a9c5cd4) caused a regression because the framework now checks the return code of the effect configure command and ignores subsequent commands in case of error. Bug: 8630044 Change-Id: I3146871f1ad8f7945a2e63ea763dd7b87368337d --- media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp b/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp index 87e2c85..8a96212 100644 --- a/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp +++ b/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp @@ -616,10 +616,6 @@ int Reverb_setConfig(ReverbContext *pContext, effect_config_t *pConfig){ || pConfig->outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE); CHECK_ARG(pConfig->inputCfg.format == AUDIO_FORMAT_PCM_16_BIT); - if(pConfig->inputCfg.samplingRate != 44100){ - return -EINVAL; - } - //ALOGV("\tReverb_setConfig calling memcpy"); pContext->config = *pConfig; @@ -648,7 +644,7 @@ int Reverb_setConfig(ReverbContext *pContext, effect_config_t *pConfig){ return -EINVAL; } - if(pContext->SampleRate != SampleRate){ + if (pContext->SampleRate != SampleRate) { LVREV_ControlParams_st ActiveParams; LVREV_ReturnStatus_en LvmStatus = LVREV_SUCCESS; @@ -662,11 +658,14 @@ int Reverb_setConfig(ReverbContext *pContext, effect_config_t *pConfig){ LVM_ERROR_CHECK(LvmStatus, "LVREV_GetControlParameters", "Reverb_setConfig") if(LvmStatus != LVREV_SUCCESS) return -EINVAL; + ActiveParams.SampleRate = SampleRate; + LvmStatus = LVREV_SetControlParameters(pContext->hInstance, &ActiveParams); LVM_ERROR_CHECK(LvmStatus, "LVREV_SetControlParameters", "Reverb_setConfig") + if(LvmStatus != LVREV_SUCCESS) return -EINVAL; //ALOGV("\tReverb_setConfig Succesfully called LVREV_SetControlParameters\n"); - + pContext->SampleRate = SampleRate; }else{ //ALOGV("\tReverb_setConfig keep sampling rate at %d", SampleRate); } @@ -818,6 +817,7 @@ int Reverb_init(ReverbContext *pContext){ /* General parameters */ params.OperatingMode = LVM_MODE_ON; params.SampleRate = LVM_FS_44100; + pContext->SampleRate = LVM_FS_44100; if(pContext->config.inputCfg.channels == AUDIO_CHANNEL_OUT_MONO){ params.SourceFormat = LVM_MONO; -- cgit v1.1 From 768b7d07e8c660d7b42b0f6b76da275821ee0adb Mon Sep 17 00:00:00 2001 From: Igor Murashkin Date: Thu, 2 May 2013 11:53:00 -0700 Subject: camera3: Don't crash when taking a still capture with ZSL disabled Bug: 8790212 Change-Id: I4bf7ac5ab856b0ea1738844182efb86137e14083 --- services/camera/libcameraservice/camera2/ZslProcessor3.cpp | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/services/camera/libcameraservice/camera2/ZslProcessor3.cpp b/services/camera/libcameraservice/camera2/ZslProcessor3.cpp index 88bcefb..be1ffeb 100644 --- a/services/camera/libcameraservice/camera2/ZslProcessor3.cpp +++ b/services/camera/libcameraservice/camera2/ZslProcessor3.cpp @@ -305,7 +305,10 @@ status_t ZslProcessor3::clearZslQueue() { } status_t ZslProcessor3::clearZslQueueLocked() { - return mZslStream->clearInputRingBuffer(); + if (mZslStream != 0) { + return mZslStream->clearInputRingBuffer(); + } + return OK; } void ZslProcessor3::dump(int fd, const Vector& /*args*/) const { -- cgit v1.1 From f2ae760602a948598a168ad43673bfbd9d50fc6b Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Mon, 29 Apr 2013 13:17:50 -0700 Subject: A reference to the psi section data could become invalid if more sections were added to the KeyedVector. Change-Id: I095b5452ccfad89d69fc502fb21ce39495e201c3 related-to-bug: 8754565 --- media/libstagefright/mpeg2ts/ATSParser.cpp | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/media/libstagefright/mpeg2ts/ATSParser.cpp b/media/libstagefright/mpeg2ts/ATSParser.cpp index c12572f..9850a46 100644 --- a/media/libstagefright/mpeg2ts/ATSParser.cpp +++ b/media/libstagefright/mpeg2ts/ATSParser.cpp @@ -1059,7 +1059,7 @@ status_t ATSParser::parsePID( ssize_t sectionIndex = mPSISections.indexOfKey(PID); if (sectionIndex >= 0) { - const sp §ion = mPSISections.valueAt(sectionIndex); + sp section = mPSISections.valueAt(sectionIndex); if (payload_unit_start_indicator) { CHECK(section->isEmpty()); @@ -1068,7 +1068,6 @@ status_t ATSParser::parsePID( br->skipBits(skip * 8); } - CHECK((br->numBitsLeft() % 8) == 0); status_t err = section->append(br->data(), br->numBitsLeft() / 8); @@ -1103,10 +1102,13 @@ status_t ATSParser::parsePID( if (!handled) { mPSISections.removeItem(PID); + section.clear(); } } - section->clear(); + if (section != NULL) { + section->clear(); + } return OK; } -- cgit v1.1 From 0b918674b971c111b8cba322aad5848a6d53a913 Mon Sep 17 00:00:00 2001 From: Igor Murashkin Date: Thu, 2 May 2013 14:59:28 -0700 Subject: camera3: Don't eagerly finish configuring bidi streams more than once Finishing all stream configuration immediately is good, but when a stream is both input and output it attempted to finish configuring bidi streams twice. Since all ZSL streams are bidi, when we had a ZSL stream active preview would immediately stop working. Bug: 8563838 Change-Id: Iec998f11f6405fc15f3f31bd7cd29f03a7968d14 --- services/camera/libcameraservice/Camera3Device.cpp | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/services/camera/libcameraservice/Camera3Device.cpp b/services/camera/libcameraservice/Camera3Device.cpp index 5f87e8b..5e5bfc2 100644 --- a/services/camera/libcameraservice/Camera3Device.cpp +++ b/services/camera/libcameraservice/Camera3Device.cpp @@ -1060,7 +1060,7 @@ status_t Camera3Device::configureStreamsLocked() { // TODO: Try to relax this later back to lazy completion, which should be // faster - if (mInputStream != NULL) { + if (mInputStream != NULL && mInputStream->isConfiguring()) { res = mInputStream->finishConfiguration(mHal3Device); if (res != OK) { SET_ERR_L("Can't finish configuring input stream %d: %s (%d)", @@ -1070,11 +1070,15 @@ status_t Camera3Device::configureStreamsLocked() { } for (size_t i = 0; i < mOutputStreams.size(); i++) { - res = mOutputStreams.editValueAt(i)->finishConfiguration(mHal3Device); - if (res != OK) { - SET_ERR_L("Can't finish configuring output stream %d: %s (%d)", - mOutputStreams[i]->getId(), strerror(-res), res); - return res; + sp outputStream = + mOutputStreams.editValueAt(i); + if (outputStream->isConfiguring()) { + res = outputStream->finishConfiguration(mHal3Device); + if (res != OK) { + SET_ERR_L("Can't finish configuring output stream %d: %s (%d)", + outputStream->getId(), strerror(-res), res); + return res; + } } } -- cgit v1.1 From e2d1e3d0436aec645739c65e6d3131dd814f40a1 Mon Sep 17 00:00:00 2001 From: Igor Murashkin Date: Tue, 30 Apr 2013 18:18:06 -0700 Subject: camera: Use new camera_metadata structure validation functions * Reject unvalidated metadata across binder boundaries * Sanity check in-process CameraMetadata when mutating data Bug: 8713951 Change-Id: I121d8e15f8fdc9cdbbaf27dfd947813e11831e1c --- camera/CameraMetadata.cpp | 15 +++++++++++ camera/IProCameraUser.cpp | 63 ++++++++++++++++++++++++++++++++++++----------- 2 files changed, 63 insertions(+), 15 deletions(-) diff --git a/camera/CameraMetadata.cpp b/camera/CameraMetadata.cpp index 6c3e233..a8f9eff 100644 --- a/camera/CameraMetadata.cpp +++ b/camera/CameraMetadata.cpp @@ -14,6 +14,8 @@ * limitations under the License. */ +// #define LOG_NDEBUG 0 + #define LOG_TAG "Camera2-Metadata" #include #include @@ -112,6 +114,10 @@ void CameraMetadata::acquire(camera_metadata_t *buffer) { } clear(); mBuffer = buffer; + + ALOGE_IF(validate_camera_metadata_structure(mBuffer, /*size*/NULL) != OK, + "%s: Failed to validate metadata structure %p", + __FUNCTION__, buffer); } void CameraMetadata::acquire(CameraMetadata &other) { @@ -289,6 +295,15 @@ status_t CameraMetadata::updateImpl(uint32_t tag, const void *data, __FUNCTION__, get_camera_metadata_section_name(tag), get_camera_metadata_tag_name(tag), tag, strerror(-res), res); } + + IF_ALOGV() { + ALOGE_IF(validate_camera_metadata_structure(mBuffer, /*size*/NULL) != + OK, + + "%s: Failed to validate metadata structure after update %p", + __FUNCTION__, mBuffer); + } + return res; } diff --git a/camera/IProCameraUser.cpp b/camera/IProCameraUser.cpp index 0c94bd4..4c4dec3 100644 --- a/camera/IProCameraUser.cpp +++ b/camera/IProCameraUser.cpp @@ -50,17 +50,30 @@ enum { * Caller becomes the owner of the new metadata * 'const Parcel' doesnt prevent us from calling the read functions. * which is interesting since it changes the internal state + * + * NULL can be returned when no metadata was sent, OR if there was an issue + * unpacking the serialized data (i.e. bad parcel or invalid structure). */ void readMetadata(const Parcel& data, camera_metadata_t** out) { - camera_metadata_t* metadata; + + status_t err = OK; + + camera_metadata_t* metadata = NULL; + + if (out) { + *out = NULL; + } // arg0 = metadataSize (int32) - size_t metadataSize = static_cast(data.readInt32()); + int32_t metadataSizeTmp = -1; + if ((err = data.readInt32(&metadataSizeTmp)) != OK) { + ALOGE("%s: Failed to read metadata size (error %d %s)", + __FUNCTION__, err, strerror(-err)); + return; + } + const size_t metadataSize = static_cast(metadataSizeTmp); if (metadataSize == 0) { - if (out) { - *out = NULL; - } return; } @@ -70,21 +83,23 @@ void readMetadata(const Parcel& data, camera_metadata_t** out) { ReadableBlob blob; // arg1 = metadata (blob) - { - data.readBlob(metadataSize, &blob); + do { + if ((err = data.readBlob(metadataSize, &blob)) != OK) { + ALOGE("%s: Failed to read metadata blob (sized %d). Possible " + " serialization bug. Error %d %s", + __FUNCTION__, metadataSize, err, strerror(-err)); + break; + } const camera_metadata_t* tmp = reinterpret_cast(blob.data()); - size_t entry_capacity = get_camera_metadata_entry_capacity(tmp); - size_t data_capacity = get_camera_metadata_data_capacity(tmp); - metadata = allocate_camera_metadata(entry_capacity, data_capacity); - copy_camera_metadata(metadata, metadataSize, tmp); - } + metadata = allocate_copy_camera_metadata_checked(tmp, metadataSize); + } while(0); blob.release(); if (out) { *out = metadata; - } else { + } else if (metadata != NULL) { free_camera_metadata(metadata); } } @@ -95,14 +110,13 @@ void readMetadata(const Parcel& data, camera_metadata_t** out) { */ void writeMetadata(Parcel& data, camera_metadata_t* metadata) { // arg0 = metadataSize (int32) - size_t metadataSize; if (metadata == NULL) { data.writeInt32(0); return; } - metadataSize = get_camera_metadata_compact_size(metadata); + const size_t metadataSize = get_camera_metadata_compact_size(metadata); data.writeInt32(static_cast(metadataSize)); // arg1 = metadata (blob) @@ -110,6 +124,25 @@ void writeMetadata(Parcel& data, camera_metadata_t* metadata) { { data.writeBlob(metadataSize, &blob); copy_camera_metadata(blob.data(), metadataSize, metadata); + + IF_ALOGV() { + if (validate_camera_metadata_structure( + (const camera_metadata_t*)blob.data(), + &metadataSize) != OK) { + ALOGV("%s: Failed to validate metadata %p after writing blob", + __FUNCTION__, blob.data()); + } else { + ALOGV("%s: Metadata written to blob. Validation success", + __FUNCTION__); + } + } + + // Not too big of a problem since receiving side does hard validation + if (validate_camera_metadata_structure(metadata, &metadataSize) != OK) { + ALOGW("%s: Failed to validate metadata %p before writing blob", + __FUNCTION__, metadata); + } + } blob.release(); } -- cgit v1.1 From 93747b9c7724f690b3068300514c05629e0b0a3e Mon Sep 17 00:00:00 2001 From: Igor Murashkin Date: Wed, 1 May 2013 15:42:20 -0700 Subject: Camera: Hotplug - conditionally transition to PRESENT when clients disconnect Fixes an issue where a client could unconditionally transition to PRESENT after a client disconnects, even though the underlying HAL status was actually NOT_PRESENT or ENUMERATING. Bug: 8780114 Change-Id: I68adb5fc819eec3b046ddcb2507b84bedc999a0f --- services/camera/libcameraservice/CameraService.cpp | 37 ++++++++++++++++++---- services/camera/libcameraservice/CameraService.h | 8 ++--- 2 files changed, 33 insertions(+), 12 deletions(-) diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp index cdeb92e..757a781 100644 --- a/services/camera/libcameraservice/CameraService.cpp +++ b/services/camera/libcameraservice/CameraService.cpp @@ -928,8 +928,15 @@ void CameraService::Client::disconnect() { ALOGV("Client::disconnect"); BasicClient::disconnect(); mCameraService->setCameraFree(mCameraId); + + StatusVector rejectSourceStates; + rejectSourceStates.push_back(ICameraServiceListener::STATUS_NOT_PRESENT); + rejectSourceStates.push_back(ICameraServiceListener::STATUS_ENUMERATING); + + // Transition to PRESENT if the camera is not in either of above 2 states mCameraService->updateStatus(ICameraServiceListener::STATUS_PRESENT, - mCameraId); + mCameraId, + &rejectSourceStates); } CameraService::Client::OpsCallback::OpsCallback(wp client): @@ -1111,15 +1118,11 @@ status_t CameraService::dump(int fd, const Vector& args) { } void CameraService::updateStatus(ICameraServiceListener::Status status, - int32_t cameraId) { + int32_t cameraId, + const StatusVector *rejectSourceStates) { // do not lock mServiceLock here or can get into a deadlock from // connect() -> ProClient::disconnect -> updateStatus Mutex::Autolock lock(mStatusMutex); - updateStatusUnsafe(status, cameraId); -} - -void CameraService::updateStatusUnsafe(ICameraServiceListener::Status status, - int32_t cameraId) { ICameraServiceListener::Status oldStatus = mStatusList[cameraId]; @@ -1139,6 +1142,26 @@ void CameraService::updateStatusUnsafe(ICameraServiceListener::Status status, return; } + if (rejectSourceStates != NULL) { + const StatusVector &rejectList = *rejectSourceStates; + StatusVector::const_iterator it = rejectList.begin(); + + /** + * Sometimes we want to conditionally do a transition. + * For example if a client disconnects, we want to go to PRESENT + * only if we weren't already in NOT_PRESENT or ENUMERATING. + */ + for (; it != rejectList.end(); ++it) { + if (oldStatus == *it) { + ALOGV("%s: Rejecting status transition for Camera ID %d, " + " since the source state was was in one of the bad " + " states.", __FUNCTION__, cameraId); + mStatusList[cameraId] = oldStatus; + return; + } + } + } + /** * ProClients lose their exclusive lock. * - Done before the CameraClient can initialize the HAL device, diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h index 8cb1691..710f164 100644 --- a/services/camera/libcameraservice/CameraService.h +++ b/services/camera/libcameraservice/CameraService.h @@ -341,14 +341,12 @@ private: ICameraServiceListener::Status getStatus(int cameraId) const; + typedef Vector StatusVector; // Broadcast the new status if it changed (locks the service mutex) void updateStatus( ICameraServiceListener::Status status, - int32_t cameraId); - // Call this one when the service mutex is already held (idempotent) - void updateStatusUnsafe( - ICameraServiceListener::Status status, - int32_t cameraId); + int32_t cameraId, + const StatusVector *rejectSourceStates = NULL); // IBinder::DeathRecipient implementation virtual void binderDied(const wp &who); -- cgit v1.1 From 831a0055665c3d15ff9c99ad23e5ab2b7346f2ac Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Mon, 6 May 2013 16:06:24 -0700 Subject: Fix Audioflinger crash when TeeSink is enabled Bug: 8834855 Change-Id: I54665f16d79901970348a8247d9a354da2990f42 --- services/audioflinger/AudioFlinger.h | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h index d0ef922..b0efef6 100644 --- a/services/audioflinger/AudioFlinger.h +++ b/services/audioflinger/AudioFlinger.h @@ -588,11 +588,12 @@ private: status_t closeOutput_nonvirtual(audio_io_handle_t output); status_t closeInput_nonvirtual(audio_io_handle_t input); -#ifdef TEE_SINK +// do not use #ifdef here, since AudioFlinger.h is included by more than one module +//#ifdef TEE_SINK // all record threads serially share a common tee sink, which is re-created on format change sp mRecordTeeSink; sp mRecordTeeSource; -#endif +//#endif public: -- cgit v1.1 From 4a66ad403b21a256773c719ae39f7b5e705b244e Mon Sep 17 00:00:00 2001 From: Eino-Ville Talvala Date: Sat, 4 May 2013 18:24:30 -0700 Subject: Camera3: Support flexible YUV for preview callbacks When the HAL supports it, and the client asks for YV12 or NV21, use the new flexible YUV format instead. Bug: 8734880 Change-Id: Ib0129d9c26a6b30f3be7aa624c2439c6edba1bbd --- .../libcameraservice/camera2/CallbackProcessor.cpp | 142 +++++++++++++++++++-- .../libcameraservice/camera2/CallbackProcessor.h | 7 + .../camera/libcameraservice/camera2/Parameters.cpp | 52 ++++++-- .../camera/libcameraservice/camera2/Parameters.h | 1 + 4 files changed, 181 insertions(+), 21 deletions(-) diff --git a/services/camera/libcameraservice/camera2/CallbackProcessor.cpp b/services/camera/libcameraservice/camera2/CallbackProcessor.cpp index dd37283..a3d6cb2 100644 --- a/services/camera/libcameraservice/camera2/CallbackProcessor.cpp +++ b/services/camera/libcameraservice/camera2/CallbackProcessor.cpp @@ -26,6 +26,7 @@ #include "../CameraDeviceBase.h" #include "../Camera2Client.h" +#define ALIGN(x, mask) ( ((x) + (mask) - 1) & ~((mask) - 1) ) namespace android { namespace camera2 { @@ -64,6 +65,14 @@ status_t CallbackProcessor::updateStream(const Parameters ¶ms) { return INVALID_OPERATION; } + // If possible, use the flexible YUV format + int32_t callbackFormat = params.previewFormat; + if (params.fastInfo.useFlexibleYuv && + (params.previewFormat == HAL_PIXEL_FORMAT_YCrCb_420_SP || + params.previewFormat == HAL_PIXEL_FORMAT_YV12) ) { + callbackFormat = HAL_PIXEL_FORMAT_YCbCr_420_888; + } + if (mCallbackConsumer == 0) { // Create CPU buffer queue endpoint mCallbackConsumer = new CpuConsumer(kCallbackHeapCount); @@ -86,12 +95,12 @@ status_t CallbackProcessor::updateStream(const Parameters ¶ms) { } if (currentWidth != (uint32_t)params.previewWidth || currentHeight != (uint32_t)params.previewHeight || - currentFormat != (uint32_t)params.previewFormat) { + currentFormat != (uint32_t)callbackFormat) { // Since size should only change while preview is not running, // assuming that all existing use of old callback stream is // completed. - ALOGV("%s: Camera %d: Deleting stream %d since the buffer dimensions changed", - __FUNCTION__, mId, mCallbackStreamId); + ALOGV("%s: Camera %d: Deleting stream %d since the buffer " + "parameters changed", __FUNCTION__, mId, mCallbackStreamId); res = device->deleteStream(mCallbackStreamId); if (res != OK) { ALOGE("%s: Camera %d: Unable to delete old output stream " @@ -104,12 +113,12 @@ status_t CallbackProcessor::updateStream(const Parameters ¶ms) { } if (mCallbackStreamId == NO_STREAM) { - ALOGV("Creating callback stream: %d %d format 0x%x", + ALOGV("Creating callback stream: %d x %d, format 0x%x, API format 0x%x", params.previewWidth, params.previewHeight, - params.previewFormat); + callbackFormat, params.previewFormat); res = device->createStream(mCallbackWindow, params.previewWidth, params.previewHeight, - params.previewFormat, 0, &mCallbackStreamId); + callbackFormat, 0, &mCallbackStreamId); if (res != OK) { ALOGE("%s: Camera %d: Can't create output stream for callbacks: " "%s (%d)", __FUNCTION__, mId, @@ -220,6 +229,8 @@ status_t CallbackProcessor::processNewCallback(sp &client) { ALOGV("%s: Camera %d: Preview callback available", __FUNCTION__, mId); + bool useFlexibleYuv = false; + int32_t previewFormat = 0; { SharedParameters::Lock l(client->getParameters()); @@ -246,10 +257,18 @@ status_t CallbackProcessor::processNewCallback(sp &client) { return OK; } - if (imgBuffer.format != l.mParameters.previewFormat) { + previewFormat = l.mParameters.previewFormat; + useFlexibleYuv = l.mParameters.fastInfo.useFlexibleYuv && + (previewFormat == HAL_PIXEL_FORMAT_YCrCb_420_SP || + previewFormat == HAL_PIXEL_FORMAT_YV12); + + int32_t expectedFormat = useFlexibleYuv ? + HAL_PIXEL_FORMAT_YCbCr_420_888 : previewFormat; + + if (imgBuffer.format != expectedFormat) { ALOGE("%s: Camera %d: Unexpected format for callback: " - "%x, expected %x", __FUNCTION__, mId, - imgBuffer.format, l.mParameters.previewFormat); + "0x%x, expected 0x%x", __FUNCTION__, mId, + imgBuffer.format, expectedFormat); mCallbackConsumer->unlockBuffer(imgBuffer); return INVALID_OPERATION; } @@ -262,9 +281,28 @@ status_t CallbackProcessor::processNewCallback(sp &client) { } } + uint32_t destYStride = 0; + uint32_t destCStride = 0; + if (useFlexibleYuv) { + if (previewFormat == HAL_PIXEL_FORMAT_YV12) { + // Strides must align to 16 for YV12 + destYStride = ALIGN(imgBuffer.width, 16); + destCStride = ALIGN(destYStride / 2, 16); + } else { + // No padding for NV21 + ALOG_ASSERT(previewFormat == HAL_PIXEL_FORMAT_YCrCb_420_SP, + "Unexpected preview format 0x%x", previewFormat); + destYStride = imgBuffer.width; + destCStride = destYStride / 2; + } + } else { + destYStride = imgBuffer.stride; + // don't care about cStride + } + size_t bufferSize = Camera2Client::calculateBufferSize( imgBuffer.width, imgBuffer.height, - imgBuffer.format, imgBuffer.stride); + previewFormat, destYStride); size_t currentBufferSize = (mCallbackHeap == 0) ? 0 : (mCallbackHeap->mHeap->getSize() / kCallbackHeapCount); if (bufferSize != currentBufferSize) { @@ -294,7 +332,7 @@ status_t CallbackProcessor::processNewCallback(sp &client) { mCallbackHeapHead = (mCallbackHeapHead + 1) & kCallbackHeapCount; mCallbackHeapFree--; - // TODO: Get rid of this memcpy by passing the gralloc queue all the way + // TODO: Get rid of this copy by passing the gralloc queue all the way // to app ssize_t offset; @@ -303,7 +341,20 @@ status_t CallbackProcessor::processNewCallback(sp &client) { mCallbackHeap->mBuffers[heapIdx]->getMemory(&offset, &size); uint8_t *data = (uint8_t*)heap->getBase() + offset; - memcpy(data, imgBuffer.data, bufferSize); + + if (!useFlexibleYuv) { + // Can just memcpy when HAL format matches API format + memcpy(data, imgBuffer.data, bufferSize); + } else { + res = convertFromFlexibleYuv(previewFormat, data, imgBuffer, + destYStride, destCStride); + if (res != OK) { + ALOGE("%s: Camera %d: Can't convert between 0x%x and 0x%x formats!", + __FUNCTION__, mId, imgBuffer.format, previewFormat); + mCallbackConsumer->unlockBuffer(imgBuffer); + return BAD_VALUE; + } + } ALOGV("%s: Freeing buffer", __FUNCTION__); mCallbackConsumer->unlockBuffer(imgBuffer); @@ -328,5 +379,72 @@ status_t CallbackProcessor::processNewCallback(sp &client) { return OK; } +status_t CallbackProcessor::convertFromFlexibleYuv(int32_t previewFormat, + uint8_t *dst, + const CpuConsumer::LockedBuffer &src, + uint32_t dstYStride, + uint32_t dstCStride) const { + + if (previewFormat != HAL_PIXEL_FORMAT_YCrCb_420_SP && + previewFormat != HAL_PIXEL_FORMAT_YV12) { + ALOGE("%s: Camera %d: Unexpected preview format when using " + "flexible YUV: 0x%x", __FUNCTION__, mId, previewFormat); + return INVALID_OPERATION; + } + + // Copy Y plane, adjusting for stride + const uint8_t *ySrc = src.data; + uint8_t *yDst = dst; + for (size_t row = 0; row < src.height; row++) { + memcpy(yDst, ySrc, src.width); + ySrc += src.stride; + yDst += dstYStride; + } + + // Copy/swizzle chroma planes, 4:2:0 subsampling + const uint8_t *uSrc = src.dataCb; + const uint8_t *vSrc = src.dataCr; + size_t chromaHeight = src.height / 2; + size_t chromaWidth = src.width / 2; + ssize_t chromaGap = src.chromaStride - + (chromaWidth * src.chromaStep); + size_t dstChromaGap = dstCStride - chromaWidth; + + if (previewFormat == HAL_PIXEL_FORMAT_YCrCb_420_SP) { + // NV21 + uint8_t *vuDst = yDst; + for (size_t row = 0; row < chromaHeight; row++) { + for (size_t col = 0; col < chromaWidth; col++) { + *(vuDst++) = *vSrc; + *(vuDst++) = *uSrc; + vSrc += src.chromaStep; + uSrc += src.chromaStep; + } + vSrc += chromaGap; + uSrc += chromaGap; + } + } else { + // YV12 + ALOG_ASSERT(previewFormat == HAL_PIXEL_FORMAT_YV12, + "Unexpected preview format 0x%x", previewFormat); + uint8_t *vDst = yDst; + uint8_t *uDst = yDst + chromaHeight * dstCStride; + for (size_t row = 0; row < chromaHeight; row++) { + for (size_t col = 0; col < chromaWidth; col++) { + *(vDst++) = *vSrc; + *(uDst++) = *uSrc; + vSrc += src.chromaStep; + uSrc += src.chromaStep; + } + vSrc += chromaGap; + uSrc += chromaGap; + vDst += dstChromaGap; + uDst += dstChromaGap; + } + } + + return OK; +} + }; // namespace camera2 }; // namespace android diff --git a/services/camera/libcameraservice/camera2/CallbackProcessor.h b/services/camera/libcameraservice/camera2/CallbackProcessor.h index 1c40a03..d851a84 100644 --- a/services/camera/libcameraservice/camera2/CallbackProcessor.h +++ b/services/camera/libcameraservice/camera2/CallbackProcessor.h @@ -77,6 +77,13 @@ class CallbackProcessor: status_t processNewCallback(sp &client); // Used when shutting down status_t discardNewCallback(); + + // Convert from flexible YUV to NV21 or YV12 + status_t convertFromFlexibleYuv(int32_t previewFormat, + uint8_t *dst, + const CpuConsumer::LockedBuffer &src, + uint32_t dstYStride, + uint32_t dstCStride) const; }; diff --git a/services/camera/libcameraservice/camera2/Parameters.cpp b/services/camera/libcameraservice/camera2/Parameters.cpp index b26cd09..3503869 100644 --- a/services/camera/libcameraservice/camera2/Parameters.cpp +++ b/services/camera/libcameraservice/camera2/Parameters.cpp @@ -152,7 +152,16 @@ status_t Parameters::initialize(const CameraMetadata *info) { supportedPreviewFormats += CameraParameters::PIXEL_FORMAT_RGBA8888; break; + case HAL_PIXEL_FORMAT_YCbCr_420_888: + // Flexible YUV allows both YV12 and NV21 + supportedPreviewFormats += + CameraParameters::PIXEL_FORMAT_YUV420P; + supportedPreviewFormats += ","; + supportedPreviewFormats += + CameraParameters::PIXEL_FORMAT_YUV420SP; + break; // Not advertizing JPEG, RAW_SENSOR, etc, for preview formats + case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: case HAL_PIXEL_FORMAT_RAW_SENSOR: case HAL_PIXEL_FORMAT_BLOB: addComma = false; @@ -863,6 +872,11 @@ status_t Parameters::buildFastInfo() { staticInfo(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS); if (!availableFocalLengths.count) return NO_INIT; + camera_metadata_ro_entry_t availableFormats = + staticInfo(ANDROID_SCALER_AVAILABLE_FORMATS); + if (!availableFormats.count) return NO_INIT; + + if (sceneModeOverrides.count > 0) { // sceneModeOverrides is defined to have 3 entries for each scene mode, // which are AE, AWB, and AF override modes the HAL wants for that scene @@ -940,6 +954,17 @@ status_t Parameters::buildFastInfo() { } } + // Check if the HAL supports HAL_PIXEL_FORMAT_YCbCr_420_888 + fastInfo.useFlexibleYuv = false; + for (size_t i = 0; i < availableFormats.count; i++) { + if (availableFormats.data.i32[i] == HAL_PIXEL_FORMAT_YCbCr_420_888) { + fastInfo.useFlexibleYuv = true; + break; + } + } + ALOGV("Camera %d: Flexible YUV %s supported", + cameraId, fastInfo.useFlexibleYuv ? "is" : "is not"); + return OK; } @@ -1085,15 +1110,24 @@ status_t Parameters::set(const String8& paramString) { } camera_metadata_ro_entry_t availableFormats = staticInfo(ANDROID_SCALER_AVAILABLE_FORMATS); - for (i = 0; i < availableFormats.count; i++) { - if (availableFormats.data.i32[i] == validatedParams.previewFormat) - break; - } - if (i == availableFormats.count) { - ALOGE("%s: Requested preview format %s (0x%x) is not supported", - __FUNCTION__, newParams.getPreviewFormat(), - validatedParams.previewFormat); - return BAD_VALUE; + // If using flexible YUV, always support NV21/YV12. Otherwise, check + // HAL's list. + if (! (fastInfo.useFlexibleYuv && + (validatedParams.previewFormat == + HAL_PIXEL_FORMAT_YCrCb_420_SP || + validatedParams.previewFormat == + HAL_PIXEL_FORMAT_YV12) ) ) { + // Not using flexible YUV format, so check explicitly + for (i = 0; i < availableFormats.count; i++) { + if (availableFormats.data.i32[i] == + validatedParams.previewFormat) break; + } + if (i == availableFormats.count) { + ALOGE("%s: Requested preview format %s (0x%x) is not supported", + __FUNCTION__, newParams.getPreviewFormat(), + validatedParams.previewFormat); + return BAD_VALUE; + } } } diff --git a/services/camera/libcameraservice/camera2/Parameters.h b/services/camera/libcameraservice/camera2/Parameters.h index 6d85037..b994ec9 100644 --- a/services/camera/libcameraservice/camera2/Parameters.h +++ b/services/camera/libcameraservice/camera2/Parameters.h @@ -184,6 +184,7 @@ struct Parameters { }; DefaultKeyedVector sceneModeOverrides; float minFocalLength; + bool useFlexibleYuv; } fastInfo; // Quirks information; these are short-lived flags to enable workarounds for -- cgit v1.1 From 05bbae99e0fc30ffdef8a3c5037fb3db7c55211b Mon Sep 17 00:00:00 2001 From: Igor Murashkin Date: Tue, 7 May 2013 13:22:15 -0700 Subject: camera2: Add height to the crop region metadata property Bug: 8756080 Change-Id: I90b9166440e92f7675255a01ddc4a980c2e52201 --- services/camera/libcameraservice/camera2/Parameters.cpp | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/services/camera/libcameraservice/camera2/Parameters.cpp b/services/camera/libcameraservice/camera2/Parameters.cpp index b26cd09..f50ca9e 100644 --- a/services/camera/libcameraservice/camera2/Parameters.cpp +++ b/services/camera/libcameraservice/camera2/Parameters.cpp @@ -1810,13 +1810,14 @@ status_t Parameters::updateRequest(CameraMetadata *request) const { CropRegion::OUTPUT_PREVIEW | CropRegion::OUTPUT_VIDEO | CropRegion::OUTPUT_PICTURE )); - int32_t reqCropRegion[3] = { + int32_t reqCropRegion[4] = { static_cast(crop.left), static_cast(crop.top), - static_cast(crop.width) + static_cast(crop.width), + static_cast(crop.height) }; res = request->update(ANDROID_SCALER_CROP_REGION, - reqCropRegion, 3); + reqCropRegion, 4); if (res != OK) return res; uint8_t reqVstabMode = videoStabilization ? -- cgit v1.1 From c3624cbf410c8915a103fcb3dacaab583badcfe0 Mon Sep 17 00:00:00 2001 From: Alex Ray Date: Tue, 7 May 2013 14:31:02 -0700 Subject: Camera2: FoV quirk crop regions Bug: 8484377 Change-Id: I5ffcc20b68dc92b502acc9898e57f12cadb92848 --- .../camera/libcameraservice/camera2/Parameters.cpp | 37 ++++++++++------------ 1 file changed, 17 insertions(+), 20 deletions(-) diff --git a/services/camera/libcameraservice/camera2/Parameters.cpp b/services/camera/libcameraservice/camera2/Parameters.cpp index b26cd09..b3d0984 100644 --- a/services/camera/libcameraservice/camera2/Parameters.cpp +++ b/services/camera/libcameraservice/camera2/Parameters.cpp @@ -2524,27 +2524,24 @@ status_t Parameters::calculatePictureFovs(float *horizFov, float *vertFov) * stream cropping. */ if (quirks.meteringCropRegion) { - /** - * All streams are the same in height, so narrower aspect ratios will - * get cropped on the sides. First find the largest (widest) aspect - * ratio, then calculate the crop of the still FOV based on that. - */ - float cropAspect = arrayAspect; - float aspects[] = { - stillAspect, - static_cast(previewWidth) / previewHeight, - static_cast(videoWidth) / videoHeight - }; - for (size_t i = 0; i < sizeof(aspects)/sizeof(aspects[0]); i++) { - if (cropAspect < aspects[i]) cropAspect = aspects[i]; + // Use max of preview and video as first crop + float previewAspect = static_cast(previewWidth) / previewHeight; + float videoAspect = static_cast(videoWidth) / videoHeight; + if (videoAspect > previewAspect) { + previewAspect = videoAspect; + } + // First crop sensor to preview aspect ratio + if (arrayAspect < previewAspect) { + vertCropFactor = arrayAspect / previewAspect; + } else { + horizCropFactor = previewAspect / arrayAspect; + } + // Second crop to still aspect ratio + if (stillAspect < previewAspect) { + horizCropFactor *= stillAspect / previewAspect; + } else { + vertCropFactor *= previewAspect / stillAspect; } - ALOGV("Widest crop aspect: %f", cropAspect); - // Horizontal crop of still is done based on fitting in the widest - // aspect ratio - horizCropFactor = stillAspect / cropAspect; - // Vertical crop is a function of the array aspect ratio and the - // widest aspect ratio. - vertCropFactor = arrayAspect / cropAspect; } else { /** * Crop are just a function of just the still/array relative aspect -- cgit v1.1 From 1f7d356fa094b975ad2ebf9217be6abba2c70825 Mon Sep 17 00:00:00 2001 From: Mathias Agopian Date: Mon, 6 May 2013 20:20:16 -0700 Subject: libutils clean-up Change-Id: I3bf32d72aa8eec627249a675c130c91a8aff6710 --- media/libmedia/Android.mk | 3 +- media/libmedia/MediaScannerClient.cpp | 2 +- media/libmedia/StringArray.cpp | 113 ++++++++++++++++++++++++++++++++++ media/libmedia/StringArray.h | 83 +++++++++++++++++++++++++ 4 files changed, 199 insertions(+), 2 deletions(-) create mode 100644 media/libmedia/StringArray.cpp create mode 100644 media/libmedia/StringArray.h diff --git a/media/libmedia/Android.mk b/media/libmedia/Android.mk index 2c0c3a5..96755bb 100644 --- a/media/libmedia/Android.mk +++ b/media/libmedia/Android.mk @@ -53,7 +53,8 @@ LOCAL_SRC_FILES:= \ Visualizer.cpp \ MemoryLeakTrackUtil.cpp \ SoundPool.cpp \ - SoundPoolThread.cpp + SoundPoolThread.cpp \ + StringArray.cpp LOCAL_SRC_FILES += ../libnbaio/roundup.c diff --git a/media/libmedia/MediaScannerClient.cpp b/media/libmedia/MediaScannerClient.cpp index e1e3348..93a4a4c 100644 --- a/media/libmedia/MediaScannerClient.cpp +++ b/media/libmedia/MediaScannerClient.cpp @@ -16,7 +16,7 @@ #include -#include +#include "StringArray.h" #include "autodetect.h" #include "unicode/ucnv.h" diff --git a/media/libmedia/StringArray.cpp b/media/libmedia/StringArray.cpp new file mode 100644 index 0000000..5f5b57a --- /dev/null +++ b/media/libmedia/StringArray.cpp @@ -0,0 +1,113 @@ +/* + * Copyright (C) 2009 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// +// Sortable array of strings. STL-ish, but STL-free. +// + +#include +#include + +#include "StringArray.h" + +namespace android { + +// +// An expanding array of strings. Add, get, sort, delete. +// +StringArray::StringArray() + : mMax(0), mCurrent(0), mArray(NULL) +{ +} + +StringArray:: ~StringArray() { + for (int i = 0; i < mCurrent; i++) + delete[] mArray[i]; + delete[] mArray; +} + +// +// Add a string. A copy of the string is made. +// +bool StringArray::push_back(const char* str) { + if (mCurrent >= mMax) { + char** tmp; + + if (mMax == 0) + mMax = 16; // initial storage + else + mMax *= 2; + + tmp = new char*[mMax]; + if (tmp == NULL) + return false; + + memcpy(tmp, mArray, mCurrent * sizeof(char*)); + delete[] mArray; + mArray = tmp; + } + + int len = strlen(str); + mArray[mCurrent] = new char[len+1]; + memcpy(mArray[mCurrent], str, len+1); + mCurrent++; + + return true; +} + +// +// Delete an entry. +// +void StringArray::erase(int idx) { + if (idx < 0 || idx >= mCurrent) + return; + delete[] mArray[idx]; + if (idx < mCurrent-1) { + memmove(&mArray[idx], &mArray[idx+1], + (mCurrent-1 - idx) * sizeof(char*)); + } + mCurrent--; +} + +// +// Sort the array. +// +void StringArray::sort(int (*compare)(const void*, const void*)) { + qsort(mArray, mCurrent, sizeof(char*), compare); +} + +// +// Pass this to the sort routine to do an ascending alphabetical sort. +// +int StringArray::cmpAscendingAlpha(const void* pstr1, const void* pstr2) { + return strcmp(*(const char**)pstr1, *(const char**)pstr2); +} + +// +// Set entry N to specified string. +// [should use operator[] here] +// +void StringArray::setEntry(int idx, const char* str) { + if (idx < 0 || idx >= mCurrent) + return; + delete[] mArray[idx]; + int len = strlen(str); + mArray[idx] = new char[len+1]; + memcpy(mArray[idx], str, len+1); +} + + +}; // namespace android diff --git a/media/libmedia/StringArray.h b/media/libmedia/StringArray.h new file mode 100644 index 0000000..ae47085 --- /dev/null +++ b/media/libmedia/StringArray.h @@ -0,0 +1,83 @@ +/* + * Copyright (C) 2009 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// +// Sortable array of strings. STL-ish, but STL-free. +// +#ifndef _LIBS_MEDIA_STRING_ARRAY_H +#define _LIBS_MEDIA_STRING_ARRAY_H + +#include +#include + +namespace android { + +// +// An expanding array of strings. Add, get, sort, delete. +// +class StringArray { +public: + StringArray(); + virtual ~StringArray(); + + // + // Add a string. A copy of the string is made. + // + bool push_back(const char* str); + + // + // Delete an entry. + // + void erase(int idx); + + // + // Sort the array. + // + void sort(int (*compare)(const void*, const void*)); + + // + // Pass this to the sort routine to do an ascending alphabetical sort. + // + static int cmpAscendingAlpha(const void* pstr1, const void* pstr2); + + // + // Get the #of items in the array. + // + inline int size(void) const { return mCurrent; } + + // + // Return entry N. + // [should use operator[] here] + // + const char* getEntry(int idx) const { + return (unsigned(idx) >= unsigned(mCurrent)) ? NULL : mArray[idx]; + } + + // + // Set entry N to specified string. + // [should use operator[] here] + // + void setEntry(int idx, const char* str); + +private: + int mMax; + int mCurrent; + char** mArray; +}; + +}; // namespace android + +#endif // _LIBS_MEDIA_STRING_ARRAY_H -- cgit v1.1 From df2c8bfd48b687bfefacc9167c5dbc66b20f5e91 Mon Sep 17 00:00:00 2001 From: Igor Murashkin Date: Tue, 7 May 2013 10:58:13 -0700 Subject: camera3: Manage fences properly when buffers are returned with an error Bug: 8622089 Change-Id: I4df0258d6803d460d4dd98f0a80829eb844ca30d --- .../camera3/Camera3OutputStream.cpp | 50 ++++++++++++++++------ .../libcameraservice/camera3/Camera3ZslStream.cpp | 49 +++++++++++++++------ 2 files changed, 75 insertions(+), 24 deletions(-) diff --git a/services/camera/libcameraservice/camera3/Camera3OutputStream.cpp b/services/camera/libcameraservice/camera3/Camera3OutputStream.cpp index ec8cf0d..9693346 100644 --- a/services/camera/libcameraservice/camera3/Camera3OutputStream.cpp +++ b/services/camera/libcameraservice/camera3/Camera3OutputStream.cpp @@ -152,15 +152,23 @@ status_t Camera3OutputStream::returnBufferLocked( mId); return INVALID_OPERATION; } + + sp releaseFence; + + /** + * Fence management - calculate Release Fence + */ if (buffer.status == CAMERA3_BUFFER_STATUS_ERROR) { - res = mConsumer->cancelBuffer(mConsumer.get(), - container_of(buffer.buffer, ANativeWindowBuffer, handle), - buffer.release_fence); - if (res != OK) { - ALOGE("%s: Stream %d: Error cancelling buffer to native window:" - " %s (%d)", __FUNCTION__, mId, strerror(-res), res); - return res; + if (buffer.release_fence != -1) { + ALOGE("%s: Stream %d: HAL should not set release_fence(%d) when " + "there is an error", __FUNCTION__, mId, buffer.release_fence); + close(buffer.release_fence); } + + /** + * Reassign release fence as the acquire fence in case of error + */ + releaseFence = new Fence(buffer.acquire_fence); } else { res = native_window_set_buffers_timestamp(mConsumer.get(), timestamp); if (res != OK) { @@ -169,22 +177,40 @@ status_t Camera3OutputStream::returnBufferLocked( return res; } - sp releaseFence = new Fence(buffer.release_fence); - int anwReleaseFence = releaseFence->dup(); + releaseFence = new Fence(buffer.release_fence); + } + int anwReleaseFence = releaseFence->dup(); + + /** + * Return buffer back to ANativeWindow + */ + if (buffer.status == CAMERA3_BUFFER_STATUS_ERROR) { + // Cancel buffer + res = mConsumer->cancelBuffer(mConsumer.get(), + container_of(buffer.buffer, ANativeWindowBuffer, handle), + anwReleaseFence); + if (res != OK) { + ALOGE("%s: Stream %d: Error cancelling buffer to native window:" + " %s (%d)", __FUNCTION__, mId, strerror(-res), res); + } + } else { res = mConsumer->queueBuffer(mConsumer.get(), container_of(buffer.buffer, ANativeWindowBuffer, handle), anwReleaseFence); if (res != OK) { ALOGE("%s: Stream %d: Error queueing buffer to native window: %s (%d)", __FUNCTION__, mId, strerror(-res), res); - close(anwReleaseFence); - return res; } + } - mCombinedFence = Fence::merge(mName, mCombinedFence, releaseFence); + if (res != OK) { + close(anwReleaseFence); + return res; } + mCombinedFence = Fence::merge(mName, mCombinedFence, releaseFence); + mDequeuedBufferCount--; mBufferReturnedSignal.signal(); mLastTimestamp = timestamp; diff --git a/services/camera/libcameraservice/camera3/Camera3ZslStream.cpp b/services/camera/libcameraservice/camera3/Camera3ZslStream.cpp index 0345d5b..5a13dde 100644 --- a/services/camera/libcameraservice/camera3/Camera3ZslStream.cpp +++ b/services/camera/libcameraservice/camera3/Camera3ZslStream.cpp @@ -208,15 +208,23 @@ status_t Camera3ZslStream::returnBufferLocked( mId); return INVALID_OPERATION; } + + sp releaseFence; + + /** + * Fence management - calculate Release Fence + */ if (buffer.status == CAMERA3_BUFFER_STATUS_ERROR) { - res = mConsumer->cancelBuffer(mConsumer.get(), - container_of(buffer.buffer, ANativeWindowBuffer, handle), - buffer.release_fence); - if (res != OK) { - ALOGE("%s: Stream %d: Error cancelling buffer to native window:" - " %s (%d)", __FUNCTION__, mId, strerror(-res), res); - return res; + if (buffer.release_fence != -1) { + ALOGE("%s: Stream %d: HAL should not set release_fence(%d) when " + "there is an error", __FUNCTION__, mId, buffer.release_fence); + close(buffer.release_fence); } + + /** + * Reassign release fence as the acquire fence in case of error + */ + releaseFence = new Fence(buffer.acquire_fence); } else { res = native_window_set_buffers_timestamp(mConsumer.get(), timestamp); if (res != OK) { @@ -225,21 +233,38 @@ status_t Camera3ZslStream::returnBufferLocked( return res; } - sp releaseFence = new Fence(buffer.release_fence); - int anwReleaseFence = releaseFence->dup(); + releaseFence = new Fence(buffer.release_fence); + } + int anwReleaseFence = releaseFence->dup(); + + /** + * Return buffer back to ANativeWindow + */ + if (buffer.status == CAMERA3_BUFFER_STATUS_ERROR) { + // Cancel buffer + res = mConsumer->cancelBuffer(mConsumer.get(), + container_of(buffer.buffer, ANativeWindowBuffer, handle), + anwReleaseFence); + if (res != OK) { + ALOGE("%s: Stream %d: Error cancelling buffer to native window:" + " %s (%d)", __FUNCTION__, mId, strerror(-res), res); + } + } else { res = mConsumer->queueBuffer(mConsumer.get(), container_of(buffer.buffer, ANativeWindowBuffer, handle), anwReleaseFence); if (res != OK) { ALOGE("%s: Stream %d: Error queueing buffer to native window: %s (%d)", __FUNCTION__, mId, strerror(-res), res); - close(anwReleaseFence); - return res; } + } - mCombinedFence = Fence::merge(mName, mCombinedFence, releaseFence); + if (res != OK) { + close(anwReleaseFence); + return res; } + mCombinedFence = Fence::merge(mName, mCombinedFence, releaseFence); mDequeuedBufferCount--; mBufferReturnedSignal.signal(); -- cgit v1.1 From 7e66ebc932b4cfa9b4611dffeeb1e91399deb442 Mon Sep 17 00:00:00 2001 From: Eino-Ville Talvala Date: Thu, 9 May 2013 12:10:35 -0700 Subject: Camera2/3: Optimize a few YCbCr_420_888 copy paths Covers cases where the HAL format and the API format have the same chroma layout. Bug: 8734880 Change-Id: Ia735082c260b5914fc14f12551f91917c4e53b01 --- .../libcameraservice/camera2/CallbackProcessor.cpp | 66 +++++++++++++++------- 1 file changed, 46 insertions(+), 20 deletions(-) diff --git a/services/camera/libcameraservice/camera2/CallbackProcessor.cpp b/services/camera/libcameraservice/camera2/CallbackProcessor.cpp index a3d6cb2..5e88102 100644 --- a/services/camera/libcameraservice/camera2/CallbackProcessor.cpp +++ b/services/camera/libcameraservice/camera2/CallbackProcessor.cpp @@ -411,35 +411,61 @@ status_t CallbackProcessor::convertFromFlexibleYuv(int32_t previewFormat, size_t dstChromaGap = dstCStride - chromaWidth; if (previewFormat == HAL_PIXEL_FORMAT_YCrCb_420_SP) { - // NV21 + // Flexible YUV chroma to NV21 chroma uint8_t *vuDst = yDst; - for (size_t row = 0; row < chromaHeight; row++) { - for (size_t col = 0; col < chromaWidth; col++) { - *(vuDst++) = *vSrc; - *(vuDst++) = *uSrc; - vSrc += src.chromaStep; - uSrc += src.chromaStep; + // Check for shortcuts + if (uSrc == vSrc + 1 && src.chromaStep == 2) { + // Source has semiplanar CrCb chroma layout, can copy by rows + for (size_t row = 0; row < chromaHeight; row++) { + memcpy(vuDst, uSrc, src.width); + vuDst += src.width; + uSrc += src.chromaStride; + } + } else { + // Generic copy, always works but not very efficient + for (size_t row = 0; row < chromaHeight; row++) { + for (size_t col = 0; col < chromaWidth; col++) { + *(vuDst++) = *vSrc; + *(vuDst++) = *uSrc; + vSrc += src.chromaStep; + uSrc += src.chromaStep; + } + vSrc += chromaGap; + uSrc += chromaGap; } - vSrc += chromaGap; - uSrc += chromaGap; } } else { - // YV12 + // flexible YUV chroma to YV12 chroma ALOG_ASSERT(previewFormat == HAL_PIXEL_FORMAT_YV12, "Unexpected preview format 0x%x", previewFormat); uint8_t *vDst = yDst; uint8_t *uDst = yDst + chromaHeight * dstCStride; - for (size_t row = 0; row < chromaHeight; row++) { - for (size_t col = 0; col < chromaWidth; col++) { - *(vDst++) = *vSrc; - *(uDst++) = *uSrc; - vSrc += src.chromaStep; - uSrc += src.chromaStep; + if (src.chromaStep == 1) { + // Source has planar chroma layout, can copy by row + for (size_t row = 0; row < chromaHeight; row++) { + memcpy(vDst, vSrc, chromaWidth); + vDst += dstCStride; + vSrc += src.chromaStride; + } + for (size_t row = 0; row < chromaHeight; row++) { + memcpy(uDst, uSrc, chromaWidth); + uDst += dstCStride; + uSrc += src.chromaStride; + } + } else { + // Generic copy, always works but not very efficient + for (size_t row = 0; row < chromaHeight; row++) { + for (size_t col = 0; col < chromaWidth; col++) { + *(vDst++) = *vSrc; + *(uDst++) = *uSrc; + vSrc += src.chromaStep; + uSrc += src.chromaStep; + } + vSrc += chromaGap; + uSrc += chromaGap; + vDst += dstChromaGap; + uDst += dstChromaGap; } - vSrc += chromaGap; - uSrc += chromaGap; - vDst += dstChromaGap; - uDst += dstChromaGap; } } -- cgit v1.1 From e762be91c3280d837b1d48455cba90459ced7511 Mon Sep 17 00:00:00 2001 From: Mathias Agopian Date: Thu, 9 May 2013 16:26:45 -0700 Subject: make libaudioflinger symbols visibility hidden we export only symbols needed by clients of this library. this saves about 130KB (1/3rd of the lib size) Change-Id: Id81f3ecb299ee3abc0811915cf6efe87180bf15c --- services/audioflinger/Android.mk | 2 ++ services/audioflinger/AudioFlinger.h | 6 ++++-- services/audioflinger/AudioPolicyService.h | 5 +++-- services/audioflinger/AudioResampler.h | 3 ++- 4 files changed, 11 insertions(+), 5 deletions(-) diff --git a/services/audioflinger/Android.mk b/services/audioflinger/Android.mk index 061a079..714854e 100644 --- a/services/audioflinger/Android.mk +++ b/services/audioflinger/Android.mk @@ -81,6 +81,8 @@ else LOCAL_CFLAGS += -DANDROID_SMP=0 endif +LOCAL_CFLAGS += -fvisibility=hidden + include $(BUILD_SHARED_LIBRARY) # diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h index b0efef6..cf68848 100644 --- a/services/audioflinger/AudioFlinger.h +++ b/services/audioflinger/AudioFlinger.h @@ -24,6 +24,8 @@ #include +#include + #include #include #include @@ -89,7 +91,7 @@ class AudioFlinger : { friend class BinderService; // for AudioFlinger() public: - static const char* getServiceName() { return "media.audio_flinger"; } + static const char* getServiceName() ANDROID_API { return "media.audio_flinger"; } virtual status_t dump(int fd, const Vector& args); @@ -278,7 +280,7 @@ private: bool btNrecIsOff() const { return mBtNrecIsOff; } - AudioFlinger(); + AudioFlinger() ANDROID_API; virtual ~AudioFlinger(); // call in any IAudioFlinger method that accesses mPrimaryHardwareDev diff --git a/services/audioflinger/AudioPolicyService.h b/services/audioflinger/AudioPolicyService.h index 35cf368..53238fa 100644 --- a/services/audioflinger/AudioPolicyService.h +++ b/services/audioflinger/AudioPolicyService.h @@ -19,6 +19,7 @@ #include #include +#include #include #include #include @@ -44,7 +45,7 @@ class AudioPolicyService : public: // for BinderService - static const char *getServiceName() { return "media.audio_policy"; } + static const char *getServiceName() ANDROID_API { return "media.audio_policy"; } virtual status_t dump(int fd, const Vector& args); @@ -137,7 +138,7 @@ public: virtual status_t setVoiceVolume(float volume, int delayMs = 0); private: - AudioPolicyService(); + AudioPolicyService() ANDROID_API; virtual ~AudioPolicyService(); status_t dumpInternals(int fd); diff --git a/services/audioflinger/AudioResampler.h b/services/audioflinger/AudioResampler.h index 2b8694f..29dc5b6 100644 --- a/services/audioflinger/AudioResampler.h +++ b/services/audioflinger/AudioResampler.h @@ -19,13 +19,14 @@ #include #include +#include #include namespace android { // ---------------------------------------------------------------------------- -class AudioResampler { +class ANDROID_API AudioResampler { public: // Determines quality of SRC. // LOW_QUALITY: linear interpolator (1st order) -- cgit v1.1 From 747b84925886dec39cd7532637b9338054e649ef Mon Sep 17 00:00:00 2001 From: Eino-Ville Talvala Date: Thu, 9 May 2013 17:03:54 -0700 Subject: Camera3: Fix initial orientation configuration. One call to set orientation too many. Bug: 8683719 Change-Id: I4b776fc8665eed940a1f18a6f617be4f1406e41a --- services/camera/libcameraservice/camera3/Camera3OutputStream.cpp | 5 ----- 1 file changed, 5 deletions(-) diff --git a/services/camera/libcameraservice/camera3/Camera3OutputStream.cpp b/services/camera/libcameraservice/camera3/Camera3OutputStream.cpp index 9693346..055913a 100644 --- a/services/camera/libcameraservice/camera3/Camera3OutputStream.cpp +++ b/services/camera/libcameraservice/camera3/Camera3OutputStream.cpp @@ -361,11 +361,6 @@ status_t Camera3OutputStream::configureQueueLocked() { return res; } - res = setTransformLocked(0); - if (res != OK) { - return res; - } - if (mMaxSize == 0) { // For buffers of known size res = native_window_set_buffers_geometry(mConsumer.get(), -- cgit v1.1 From 6aade6058521b0dbd35a9a4620f4d04f02f90444 Mon Sep 17 00:00:00 2001 From: Marco Nelissen Date: Thu, 9 May 2013 09:15:34 -0700 Subject: Don't render buffers that have size 0 b/8857451 Change-Id: I12a31a2f85af76602db9e6f0ec80632954b3f7ed --- media/libstagefright/ACodec.cpp | 3 ++- media/libstagefright/MediaCodec.cpp | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp index 058852e..994d3f4 100644 --- a/media/libstagefright/ACodec.cpp +++ b/media/libstagefright/ACodec.cpp @@ -3131,7 +3131,8 @@ void ACodec::BaseState::onOutputBufferDrained(const sp &msg) { int32_t render; if (mCodec->mNativeWindow != NULL - && msg->findInt32("render", &render) && render != 0) { + && msg->findInt32("render", &render) && render != 0 + && (info->mData == NULL || info->mData->size() != 0)) { // The client wants this buffer to be rendered. status_t err; diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp index 714da55..f412dc8 100644 --- a/media/libstagefright/MediaCodec.cpp +++ b/media/libstagefright/MediaCodec.cpp @@ -1656,7 +1656,7 @@ status_t MediaCodec::onReleaseOutputBuffer(const sp &msg) { return -EACCES; } - if (render) { + if (render && (info->mData == NULL || info->mData->size() != 0)) { info->mNotify->setInt32("render", true); if (mSoftRenderer != NULL) { -- cgit v1.1 From da1a325bc0a1421f4233e62704da4fab8b0acf7b Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Fri, 10 May 2013 09:29:51 -0700 Subject: Replace obsolete CLOCK_MONOTONIC_HR by CLOCK_MONOTONIC Bug: 8895727 Change-Id: I0abf6da941965e9e29e232943184dbc72b95d03c --- services/audioflinger/test-resample.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/services/audioflinger/test-resample.cpp b/services/audioflinger/test-resample.cpp index b082e8c..7a314cf 100644 --- a/services/audioflinger/test-resample.cpp +++ b/services/audioflinger/test-resample.cpp @@ -219,12 +219,12 @@ int main(int argc, char* argv[]) { memset(output_vaddr, 0, output_size); timespec start, end; - clock_gettime(CLOCK_MONOTONIC_HR, &start); + clock_gettime(CLOCK_MONOTONIC, &start); resampler->resample((int*) output_vaddr, out_frames, &provider); resampler->resample((int*) output_vaddr, out_frames, &provider); resampler->resample((int*) output_vaddr, out_frames, &provider); resampler->resample((int*) output_vaddr, out_frames, &provider); - clock_gettime(CLOCK_MONOTONIC_HR, &end); + clock_gettime(CLOCK_MONOTONIC, &end); int64_t start_ns = start.tv_sec * 1000000000LL + start.tv_nsec; int64_t end_ns = end.tv_sec * 1000000000LL + end.tv_nsec; int64_t time = (end_ns - start_ns)/4; -- cgit v1.1 From 082aa3f335ffeedacafeb8982684cbb371a18c32 Mon Sep 17 00:00:00 2001 From: Marco Nelissen Date: Fri, 10 May 2013 10:10:42 -0700 Subject: Don't purge effects we're about to start using By acquiring the new session id before releasing the old, we prevent purging existing effects with the new session id that aren't currently attached but will be once the player is switched to the new session id. b/8767565 Change-Id: I703881b69c5accd8832ac834246925a20ada4c21 --- media/libmedia/mediaplayer.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/media/libmedia/mediaplayer.cpp b/media/libmedia/mediaplayer.cpp index ecae3d3..4a34233 100644 --- a/media/libmedia/mediaplayer.cpp +++ b/media/libmedia/mediaplayer.cpp @@ -575,8 +575,8 @@ status_t MediaPlayer::setAudioSessionId(int sessionId) return BAD_VALUE; } if (sessionId != mAudioSessionId) { - AudioSystem::releaseAudioSessionId(mAudioSessionId); AudioSystem::acquireAudioSessionId(sessionId); + AudioSystem::releaseAudioSessionId(mAudioSessionId); mAudioSessionId = sessionId; } return NO_ERROR; -- cgit v1.1 From ae3d0babb9c5d68b107b53d5a67193309020c556 Mon Sep 17 00:00:00 2001 From: Igor Murashkin Date: Wed, 8 May 2013 18:03:15 -0700 Subject: Camera3: Refactor stream code to be DRY Bug: 8851039 Change-Id: Iaac2926bfa25dd6e9db8b307765d4fe709b88d21 --- services/camera/libcameraservice/Android.mk | 1 + .../camera3/Camera3IOStreamBase.cpp | 273 +++++++++++++ .../libcameraservice/camera3/Camera3IOStreamBase.h | 102 +++++ .../camera3/Camera3InputStream.cpp | 193 ++------- .../libcameraservice/camera3/Camera3InputStream.h | 23 +- .../camera3/Camera3OutputStream.cpp | 242 +++-------- .../libcameraservice/camera3/Camera3OutputStream.h | 30 +- .../libcameraservice/camera3/Camera3ZslStream.cpp | 452 ++------------------- .../libcameraservice/camera3/Camera3ZslStream.h | 37 +- 9 files changed, 554 insertions(+), 799 deletions(-) create mode 100644 services/camera/libcameraservice/camera3/Camera3IOStreamBase.cpp create mode 100644 services/camera/libcameraservice/camera3/Camera3IOStreamBase.h diff --git a/services/camera/libcameraservice/Android.mk b/services/camera/libcameraservice/Android.mk index 3479553..83d9ccd 100644 --- a/services/camera/libcameraservice/Android.mk +++ b/services/camera/libcameraservice/Android.mk @@ -27,6 +27,7 @@ LOCAL_SRC_FILES:= \ camera2/ProFrameProcessor.cpp \ camera2/ZslProcessor3.cpp \ camera3/Camera3Stream.cpp \ + camera3/Camera3IOStreamBase.cpp \ camera3/Camera3InputStream.cpp \ camera3/Camera3OutputStream.cpp \ camera3/Camera3ZslStream.cpp \ diff --git a/services/camera/libcameraservice/camera3/Camera3IOStreamBase.cpp b/services/camera/libcameraservice/camera3/Camera3IOStreamBase.cpp new file mode 100644 index 0000000..abc28fe --- /dev/null +++ b/services/camera/libcameraservice/camera3/Camera3IOStreamBase.cpp @@ -0,0 +1,273 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "Camera3-IOStreamBase" +#define ATRACE_TAG ATRACE_TAG_CAMERA +//#define LOG_NDEBUG 0 + +// This is needed for stdint.h to define INT64_MAX in C++ +#define __STDC_LIMIT_MACROS + +#include +#include +#include "Camera3IOStreamBase.h" + +namespace android { + +namespace camera3 { + +Camera3IOStreamBase::Camera3IOStreamBase(int id, camera3_stream_type_t type, + uint32_t width, uint32_t height, size_t maxSize, int format) : + Camera3Stream(id, type, + width, height, maxSize, format), + mTotalBufferCount(0), + mDequeuedBufferCount(0), + mFrameCount(0), + mLastTimestamp(0) { + + mCombinedFence = new Fence(); + + if (maxSize > 0 && format != HAL_PIXEL_FORMAT_BLOB) { + ALOGE("%s: Bad format for size-only stream: %d", __FUNCTION__, + format); + mState = STATE_ERROR; + } +} + +Camera3IOStreamBase::~Camera3IOStreamBase() { + disconnectLocked(); +} + +bool Camera3IOStreamBase::hasOutstandingBuffersLocked() const { + nsecs_t signalTime = mCombinedFence->getSignalTime(); + ALOGV("%s: Stream %d: Has %d outstanding buffers," + " buffer signal time is %lld", + __FUNCTION__, mId, mDequeuedBufferCount, signalTime); + if (mDequeuedBufferCount > 0 || signalTime == INT64_MAX) { + return true; + } + return false; +} + +status_t Camera3IOStreamBase::waitUntilIdle(nsecs_t timeout) { + status_t res; + { + Mutex::Autolock l(mLock); + while (mDequeuedBufferCount > 0) { + if (timeout != TIMEOUT_NEVER) { + nsecs_t startTime = systemTime(); + res = mBufferReturnedSignal.waitRelative(mLock, timeout); + if (res == TIMED_OUT) { + return res; + } else if (res != OK) { + ALOGE("%s: Error waiting for outstanding buffers: %s (%d)", + __FUNCTION__, strerror(-res), res); + return res; + } + nsecs_t deltaTime = systemTime() - startTime; + if (timeout <= deltaTime) { + timeout = 0; + } else { + timeout -= deltaTime; + } + } else { + res = mBufferReturnedSignal.wait(mLock); + if (res != OK) { + ALOGE("%s: Error waiting for outstanding buffers: %s (%d)", + __FUNCTION__, strerror(-res), res); + return res; + } + } + } + } + + // No lock + + unsigned int timeoutMs; + if (timeout == TIMEOUT_NEVER) { + timeoutMs = Fence::TIMEOUT_NEVER; + } else if (timeout == 0) { + timeoutMs = 0; + } else { + // Round up to wait at least 1 ms + timeoutMs = (timeout + 999999) / 1000000; + } + + return mCombinedFence->wait(timeoutMs); +} + +void Camera3IOStreamBase::dump(int fd, const Vector &args) const { + (void) args; + String8 lines; + lines.appendFormat(" State: %d\n", mState); + lines.appendFormat(" Dims: %d x %d, format 0x%x\n", + camera3_stream::width, camera3_stream::height, + camera3_stream::format); + lines.appendFormat(" Max size: %d\n", mMaxSize); + lines.appendFormat(" Usage: %d, max HAL buffers: %d\n", + camera3_stream::usage, camera3_stream::max_buffers); + lines.appendFormat(" Frames produced: %d, last timestamp: %lld ns\n", + mFrameCount, mLastTimestamp); + lines.appendFormat(" Total buffers: %d, currently dequeued: %d\n", + mTotalBufferCount, mDequeuedBufferCount); + write(fd, lines.string(), lines.size()); +} + +status_t Camera3IOStreamBase::configureQueueLocked() { + status_t res; + + switch (mState) { + case STATE_IN_RECONFIG: + res = disconnectLocked(); + if (res != OK) { + return res; + } + break; + case STATE_IN_CONFIG: + // OK + break; + default: + ALOGE("%s: Bad state: %d", __FUNCTION__, mState); + return INVALID_OPERATION; + } + + return OK; +} + +size_t Camera3IOStreamBase::getBufferCountLocked() { + return mTotalBufferCount; +} + +status_t Camera3IOStreamBase::disconnectLocked() { + switch (mState) { + case STATE_IN_RECONFIG: + case STATE_CONFIGURED: + // OK + break; + default: + // No connection, nothing to do + return OK; + } + + if (mDequeuedBufferCount > 0) { + ALOGE("%s: Can't disconnect with %d buffers still dequeued!", + __FUNCTION__, mDequeuedBufferCount); + return INVALID_OPERATION; + } + + return OK; +} + +void Camera3IOStreamBase::handoutBufferLocked(camera3_stream_buffer &buffer, + buffer_handle_t *handle, + int acquireFence, + int releaseFence, + camera3_buffer_status_t status) { + /** + * Note that all fences are now owned by HAL. + */ + + // Handing out a raw pointer to this object. Increment internal refcount. + incStrong(this); + buffer.stream = this; + buffer.buffer = handle; + buffer.acquire_fence = acquireFence; + buffer.release_fence = releaseFence; + buffer.status = status; + + mDequeuedBufferCount++; +} + +status_t Camera3IOStreamBase::getBufferPreconditionCheckLocked() const { + // Allow dequeue during IN_[RE]CONFIG for registration + if (mState != STATE_CONFIGURED && + mState != STATE_IN_CONFIG && mState != STATE_IN_RECONFIG) { + ALOGE("%s: Stream %d: Can't get buffers in unconfigured state %d", + __FUNCTION__, mId, mState); + return INVALID_OPERATION; + } + + // Only limit dequeue amount when fully configured + if (mState == STATE_CONFIGURED && + mDequeuedBufferCount == camera3_stream::max_buffers) { + ALOGE("%s: Stream %d: Already dequeued maximum number of simultaneous" + " buffers (%d)", __FUNCTION__, mId, + camera3_stream::max_buffers); + return INVALID_OPERATION; + } + + return OK; +} + +status_t Camera3IOStreamBase::returnBufferPreconditionCheckLocked() const { + // Allow buffers to be returned in the error state, to allow for disconnect + // and in the in-config states for registration + if (mState == STATE_CONSTRUCTED) { + ALOGE("%s: Stream %d: Can't return buffers in unconfigured state %d", + __FUNCTION__, mId, mState); + return INVALID_OPERATION; + } + if (mDequeuedBufferCount == 0) { + ALOGE("%s: Stream %d: No buffers outstanding to return", __FUNCTION__, + mId); + return INVALID_OPERATION; + } + + return OK; +} + +status_t Camera3IOStreamBase::returnAnyBufferLocked( + const camera3_stream_buffer &buffer, + nsecs_t timestamp, + bool output) { + status_t res; + + // returnBuffer may be called from a raw pointer, not a sp<>, and we'll be + // decrementing the internal refcount next. In case this is the last ref, we + // might get destructed on the decStrong(), so keep an sp around until the + // end of the call - otherwise have to sprinkle the decStrong on all exit + // points. + sp keepAlive(this); + decStrong(this); + + if ((res = returnBufferPreconditionCheckLocked()) != OK) { + return res; + } + + sp releaseFence; + res = returnBufferCheckedLocked(buffer, timestamp, output, + &releaseFence); + if (res != OK) { + return res; + } + + mCombinedFence = Fence::merge(mName, mCombinedFence, releaseFence); + + mDequeuedBufferCount--; + mBufferReturnedSignal.signal(); + + if (output) { + mLastTimestamp = timestamp; + } + + return OK; +} + + + +}; // namespace camera3 + +}; // namespace android diff --git a/services/camera/libcameraservice/camera3/Camera3IOStreamBase.h b/services/camera/libcameraservice/camera3/Camera3IOStreamBase.h new file mode 100644 index 0000000..74c4484 --- /dev/null +++ b/services/camera/libcameraservice/camera3/Camera3IOStreamBase.h @@ -0,0 +1,102 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_SERVERS_CAMERA3_IO_STREAM_BASE_H +#define ANDROID_SERVERS_CAMERA3_IO_STREAM_BASE_H + +#include +#include + +#include "Camera3Stream.h" + +namespace android { + +namespace camera3 { + +/** + * A base class for managing a single stream of I/O data from the camera device. + */ +class Camera3IOStreamBase : + public Camera3Stream { + protected: + Camera3IOStreamBase(int id, camera3_stream_type_t type, + uint32_t width, uint32_t height, size_t maxSize, int format); + + public: + + virtual ~Camera3IOStreamBase(); + + /** + * Camera3Stream interface + */ + + virtual status_t waitUntilIdle(nsecs_t timeout); + virtual void dump(int fd, const Vector &args) const; + + protected: + size_t mTotalBufferCount; + // sum of input and output buffers that are currently acquired by HAL + size_t mDequeuedBufferCount; + Condition mBufferReturnedSignal; + uint32_t mFrameCount; + // Last received output buffer's timestamp + nsecs_t mLastTimestamp; + + // The merged release fence for all returned buffers + sp mCombinedFence; + + status_t returnAnyBufferLocked( + const camera3_stream_buffer &buffer, + nsecs_t timestamp, + bool output); + + virtual status_t returnBufferCheckedLocked( + const camera3_stream_buffer &buffer, + nsecs_t timestamp, + bool output, + /*out*/ + sp *releaseFenceOut) = 0; + + /** + * Internal Camera3Stream interface + */ + virtual bool hasOutstandingBuffersLocked() const; + + virtual size_t getBufferCountLocked(); + + status_t getBufferPreconditionCheckLocked() const; + status_t returnBufferPreconditionCheckLocked() const; + + // State check only + virtual status_t configureQueueLocked(); + // State checks only + virtual status_t disconnectLocked(); + + // Hand out the buffer to a native location, + // incrementing the internal refcount and dequeued buffer count + void handoutBufferLocked(camera3_stream_buffer &buffer, + buffer_handle_t *handle, + int acquire_fence, + int release_fence, + camera3_buffer_status_t status); + +}; // class Camera3IOStreamBase + +} // namespace camera3 + +} // namespace android + +#endif diff --git a/services/camera/libcameraservice/camera3/Camera3InputStream.cpp b/services/camera/libcameraservice/camera3/Camera3InputStream.cpp index c7dd12a..13e9c83 100644 --- a/services/camera/libcameraservice/camera3/Camera3InputStream.cpp +++ b/services/camera/libcameraservice/camera3/Camera3InputStream.cpp @@ -18,9 +18,6 @@ #define ATRACE_TAG ATRACE_TAG_CAMERA //#define LOG_NDEBUG 0 -// This is needed for stdint.h to define INT64_MAX in C++ -#define __STDC_LIMIT_MACROS - #include #include #include "Camera3InputStream.h" @@ -31,12 +28,8 @@ namespace camera3 { Camera3InputStream::Camera3InputStream(int id, uint32_t width, uint32_t height, int format) : - Camera3Stream(id, CAMERA3_STREAM_INPUT, width, height, 0, format), - mTotalBufferCount(0), - mDequeuedBufferCount(0), - mFrameCount(0), - mLastTimestamp(0) { - mCombinedFence = new Fence(); + Camera3IOStreamBase(id, CAMERA3_STREAM_INPUT, width, height, + /*maxSize*/0, format) { if (format == HAL_PIXEL_FORMAT_BLOB) { ALOGE("%s: Bad format, BLOB not supported", __FUNCTION__); @@ -61,21 +54,8 @@ status_t Camera3InputStream::getInputBufferLocked( return INVALID_OPERATION; } - // Allow acquire during IN_[RE]CONFIG for registration - if (mState != STATE_CONFIGURED && - mState != STATE_IN_CONFIG && mState != STATE_IN_RECONFIG) { - ALOGE("%s: Stream %d: Can't get buffers in unconfigured state %d", - __FUNCTION__, mId, mState); - return INVALID_OPERATION; - } - - // Only limit acquire amount when fully configured - if (mState == STATE_CONFIGURED && - mDequeuedBufferCount == camera3_stream::max_buffers) { - ALOGE("%s: Stream %d: Already acquired maximum number of simultaneous" - " buffers (%d)", __FUNCTION__, mId, - camera3_stream::max_buffers); - return INVALID_OPERATION; + if ((res = getBufferPreconditionCheckLocked()) != OK) { + return res; } ANativeWindowBuffer* anb; @@ -95,51 +75,30 @@ status_t Camera3InputStream::getInputBufferLocked( anb = bufferItem.mGraphicBuffer->getNativeBuffer(); assert(anb != NULL); fenceFd = bufferItem.mFence->dup(); + /** * FenceFD now owned by HAL except in case of error, * in which case we reassign it to acquire_fence */ - - // Handing out a raw pointer to this object. Increment internal refcount. - incStrong(this); - buffer->stream = this; - buffer->buffer = &(anb->handle); - buffer->acquire_fence = fenceFd; - buffer->release_fence = -1; - buffer->status = CAMERA3_BUFFER_STATUS_OK; - - mDequeuedBufferCount++; - + handoutBufferLocked(*buffer, &(anb->handle), /*acquireFence*/fenceFd, + /*releaseFence*/-1, CAMERA3_BUFFER_STATUS_OK); mBuffersInFlight.push_back(bufferItem); return OK; } -status_t Camera3InputStream::returnInputBufferLocked( - const camera3_stream_buffer &buffer) { - ATRACE_CALL(); - status_t res; +status_t Camera3InputStream::returnBufferCheckedLocked( + const camera3_stream_buffer &buffer, + nsecs_t timestamp, + bool output, + /*out*/ + sp *releaseFenceOut) { - // returnBuffer may be called from a raw pointer, not a sp<>, and we'll be - // decrementing the internal refcount next. In case this is the last ref, we - // might get destructed on the decStrong(), so keep an sp around until the - // end of the call - otherwise have to sprinkle the decStrong on all exit - // points. - sp keepAlive(this); - decStrong(this); - - // Allow buffers to be returned in the error state, to allow for disconnect - // and in the in-config states for registration - if (mState == STATE_CONSTRUCTED) { - ALOGE("%s: Stream %d: Can't return buffers in unconfigured state %d", - __FUNCTION__, mId, mState); - return INVALID_OPERATION; - } - if (mDequeuedBufferCount == 0) { - ALOGE("%s: Stream %d: No buffers outstanding to return", __FUNCTION__, - mId); - return INVALID_OPERATION; - } + (void)timestamp; + (void)output; + ALOG_ASSERT(!output, "Expected output to be false"); + + status_t res; bool bufferFound = false; BufferItem bufferItem; @@ -192,91 +151,24 @@ status_t Camera3InputStream::returnInputBufferLocked( return res; } - mCombinedFence = Fence::merge(mName, mCombinedFence, releaseFence); - - mBufferReturnedSignal.signal(); + *releaseFenceOut = releaseFence; return OK; - -} - -bool Camera3InputStream::hasOutstandingBuffersLocked() const { - nsecs_t signalTime = mCombinedFence->getSignalTime(); - ALOGV("%s: Stream %d: Has %d outstanding buffers," - " buffer signal time is %lld", - __FUNCTION__, mId, mDequeuedBufferCount, signalTime); - if (mDequeuedBufferCount > 0 || signalTime == INT64_MAX) { - return true; - } - return false; } -status_t Camera3InputStream::waitUntilIdle(nsecs_t timeout) { - status_t res; - { - Mutex::Autolock l(mLock); - while (mDequeuedBufferCount > 0) { - if (timeout != TIMEOUT_NEVER) { - nsecs_t startTime = systemTime(); - res = mBufferReturnedSignal.waitRelative(mLock, timeout); - if (res == TIMED_OUT) { - return res; - } else if (res != OK) { - ALOGE("%s: Error waiting for outstanding buffers: %s (%d)", - __FUNCTION__, strerror(-res), res); - return res; - } - nsecs_t deltaTime = systemTime() - startTime; - if (timeout <= deltaTime) { - timeout = 0; - } else { - timeout -= deltaTime; - } - } else { - res = mBufferReturnedSignal.wait(mLock); - if (res != OK) { - ALOGE("%s: Error waiting for outstanding buffers: %s (%d)", - __FUNCTION__, strerror(-res), res); - return res; - } - } - } - } - - // No lock - - unsigned int timeoutMs; - if (timeout == TIMEOUT_NEVER) { - timeoutMs = Fence::TIMEOUT_NEVER; - } else if (timeout == 0) { - timeoutMs = 0; - } else { - // Round up to wait at least 1 ms - timeoutMs = (timeout + 999999) / 1000000; - } - - return mCombinedFence->wait(timeoutMs); -} +status_t Camera3InputStream::returnInputBufferLocked( + const camera3_stream_buffer &buffer) { + ATRACE_CALL(); -size_t Camera3InputStream::getBufferCountLocked() { - return mTotalBufferCount; + return returnAnyBufferLocked(buffer, /*timestamp*/0, /*output*/false); } status_t Camera3InputStream::disconnectLocked() { - switch (mState) { - case STATE_IN_RECONFIG: - case STATE_CONFIGURED: - // OK - break; - default: - // No connection, nothing to do - return OK; - } - if (mDequeuedBufferCount > 0) { - ALOGE("%s: Can't disconnect with %d buffers still acquired!", - __FUNCTION__, mDequeuedBufferCount); - return INVALID_OPERATION; + status_t res; + + if ((res = Camera3IOStreamBase::disconnectLocked()) != OK) { + return res; } assert(mBuffersInFlight.size() == 0); @@ -285,7 +177,8 @@ status_t Camera3InputStream::disconnectLocked() { * no-op since we can't disconnect the producer from the consumer-side */ - mState = (mState == STATE_IN_RECONFIG) ? STATE_IN_CONFIG : STATE_CONSTRUCTED; + mState = (mState == STATE_IN_RECONFIG) ? STATE_IN_CONFIG + : STATE_CONSTRUCTED; return OK; } @@ -297,36 +190,16 @@ void Camera3InputStream::dump(int fd, const Vector &args) const { (void) args; String8 lines; lines.appendFormat(" Stream[%d]: Input\n", mId); - lines.appendFormat(" State: %d\n", mState); - lines.appendFormat(" Dims: %d x %d, format 0x%x\n", - camera3_stream::width, camera3_stream::height, - camera3_stream::format); - lines.appendFormat(" Max size: %d\n", mMaxSize); - lines.appendFormat(" Usage: %d, max HAL buffers: %d\n", - camera3_stream::usage, camera3_stream::max_buffers); - lines.appendFormat(" Frames produced: %d, last timestamp: %lld ns\n", - mFrameCount, mLastTimestamp); - lines.appendFormat(" Total buffers: %d, currently acquired: %d\n", - mTotalBufferCount, mDequeuedBufferCount); write(fd, lines.string(), lines.size()); + + Camera3IOStreamBase::dump(fd, args); } status_t Camera3InputStream::configureQueueLocked() { status_t res; - switch (mState) { - case STATE_IN_RECONFIG: - res = disconnectLocked(); - if (res != OK) { - return res; - } - break; - case STATE_IN_CONFIG: - // OK - break; - default: - ALOGE("%s: Bad state: %d", __FUNCTION__, mState); - return INVALID_OPERATION; + if ((res = Camera3IOStreamBase::configureQueueLocked()) != OK) { + return res; } assert(mMaxSize == 0); diff --git a/services/camera/libcameraservice/camera3/Camera3InputStream.h b/services/camera/libcameraservice/camera3/Camera3InputStream.h index fd9f464..8adda88 100644 --- a/services/camera/libcameraservice/camera3/Camera3InputStream.h +++ b/services/camera/libcameraservice/camera3/Camera3InputStream.h @@ -21,7 +21,7 @@ #include #include -#include "Camera3Stream.h" +#include "Camera3IOStreamBase.h" namespace android { @@ -34,7 +34,7 @@ namespace camera3 { * buffers by feeding them into the HAL, as well as releasing the buffers back * the buffers once the HAL is done with them. */ -class Camera3InputStream : public Camera3Stream { +class Camera3InputStream : public Camera3IOStreamBase { public: /** * Set up a stream for formats that have fixed size, such as RAW and YUV. @@ -42,7 +42,6 @@ class Camera3InputStream : public Camera3Stream { Camera3InputStream(int id, uint32_t width, uint32_t height, int format); ~Camera3InputStream(); - virtual status_t waitUntilIdle(nsecs_t timeout); virtual void dump(int fd, const Vector &args) const; /** @@ -58,14 +57,16 @@ class Camera3InputStream : public Camera3Stream { sp mConsumer; Vector mBuffersInFlight; - size_t mTotalBufferCount; - size_t mDequeuedBufferCount; - Condition mBufferReturnedSignal; - uint32_t mFrameCount; - nsecs_t mLastTimestamp; - // The merged release fence for all returned buffers - sp mCombinedFence; + /** + * Camera3IOStreamBase + */ + virtual status_t returnBufferCheckedLocked( + const camera3_stream_buffer &buffer, + nsecs_t timestamp, + bool output, + /*out*/ + sp *releaseFenceOut); /** * Camera3Stream interface @@ -74,11 +75,9 @@ class Camera3InputStream : public Camera3Stream { virtual status_t getInputBufferLocked(camera3_stream_buffer *buffer); virtual status_t returnInputBufferLocked( const camera3_stream_buffer &buffer); - virtual bool hasOutstandingBuffersLocked() const; virtual status_t disconnectLocked(); virtual status_t configureQueueLocked(); - virtual size_t getBufferCountLocked(); }; // class Camera3InputStream diff --git a/services/camera/libcameraservice/camera3/Camera3OutputStream.cpp b/services/camera/libcameraservice/camera3/Camera3OutputStream.cpp index 9693346..bbcccaf 100644 --- a/services/camera/libcameraservice/camera3/Camera3OutputStream.cpp +++ b/services/camera/libcameraservice/camera3/Camera3OutputStream.cpp @@ -18,9 +18,6 @@ #define ATRACE_TAG ATRACE_TAG_CAMERA //#define LOG_NDEBUG 0 -// This is needed for stdint.h to define INT64_MAX in C++ -#define __STDC_LIMIT_MACROS - #include #include #include "Camera3OutputStream.h" @@ -37,15 +34,11 @@ namespace camera3 { Camera3OutputStream::Camera3OutputStream(int id, sp consumer, uint32_t width, uint32_t height, int format) : - Camera3Stream(id, CAMERA3_STREAM_OUTPUT, width, height, 0, format), + Camera3IOStreamBase(id, CAMERA3_STREAM_OUTPUT, width, height, + /*maxSize*/0, format), mConsumer(consumer), - mTransform(0), - mTotalBufferCount(0), - mDequeuedBufferCount(0), - mFrameCount(0), - mLastTimestamp(0) { + mTransform(0) { - mCombinedFence = new Fence(); if (mConsumer == NULL) { ALOGE("%s: Consumer is NULL!", __FUNCTION__); mState = STATE_ERROR; @@ -55,16 +48,10 @@ Camera3OutputStream::Camera3OutputStream(int id, Camera3OutputStream::Camera3OutputStream(int id, sp consumer, uint32_t width, uint32_t height, size_t maxSize, int format) : - Camera3Stream(id, CAMERA3_STREAM_OUTPUT, - width, height, maxSize, format), + Camera3IOStreamBase(id, CAMERA3_STREAM_OUTPUT, width, height, maxSize, + format), mConsumer(consumer), - mTransform(0), - mTotalBufferCount(0), - mDequeuedBufferCount(0), - mFrameCount(0), - mLastTimestamp(0) { - - mCombinedFence = new Fence(); + mTransform(0) { if (format != HAL_PIXEL_FORMAT_BLOB) { ALOGE("%s: Bad format for size-only stream: %d", __FUNCTION__, @@ -78,6 +65,18 @@ Camera3OutputStream::Camera3OutputStream(int id, } } +Camera3OutputStream::Camera3OutputStream(int id, camera3_stream_type_t type, + uint32_t width, uint32_t height, + int format) : + Camera3IOStreamBase(id, type, width, height, + /*maxSize*/0, + format), + mTransform(0) { + + // Subclasses expected to initialize mConsumer themselves +} + + Camera3OutputStream::~Camera3OutputStream() { disconnectLocked(); } @@ -86,21 +85,8 @@ status_t Camera3OutputStream::getBufferLocked(camera3_stream_buffer *buffer) { ATRACE_CALL(); status_t res; - // Allow dequeue during IN_[RE]CONFIG for registration - if (mState != STATE_CONFIGURED && - mState != STATE_IN_CONFIG && mState != STATE_IN_RECONFIG) { - ALOGE("%s: Stream %d: Can't get buffers in unconfigured state %d", - __FUNCTION__, mId, mState); - return INVALID_OPERATION; - } - - // Only limit dequeue amount when fully configured - if (mState == STATE_CONFIGURED && - mDequeuedBufferCount == camera3_stream::max_buffers) { - ALOGE("%s: Stream %d: Already dequeued maximum number of simultaneous" - " buffers (%d)", __FUNCTION__, mId, - camera3_stream::max_buffers); - return INVALID_OPERATION; + if ((res = getBufferPreconditionCheckLocked()) != OK) { + return res; } ANativeWindowBuffer* anb; @@ -113,15 +99,12 @@ status_t Camera3OutputStream::getBufferLocked(camera3_stream_buffer *buffer) { return res; } - // Handing out a raw pointer to this object. Increment internal refcount. - incStrong(this); - buffer->stream = this; - buffer->buffer = &(anb->handle); - buffer->acquire_fence = fenceFd; - buffer->release_fence = -1; - buffer->status = CAMERA3_BUFFER_STATUS_OK; - - mDequeuedBufferCount++; + /** + * FenceFD now owned by HAL except in case of error, + * in which case we reassign it to acquire_fence + */ + handoutBufferLocked(*buffer, &(anb->handle), /*acquireFence*/fenceFd, + /*releaseFence*/-1, CAMERA3_BUFFER_STATUS_OK); return OK; } @@ -130,29 +113,29 @@ status_t Camera3OutputStream::returnBufferLocked( const camera3_stream_buffer &buffer, nsecs_t timestamp) { ATRACE_CALL(); - status_t res; - // returnBuffer may be called from a raw pointer, not a sp<>, and we'll be - // decrementing the internal refcount next. In case this is the last ref, we - // might get destructed on the decStrong(), so keep an sp around until the - // end of the call - otherwise have to sprinkle the decStrong on all exit - // points. - sp keepAlive(this); - decStrong(this); - - // Allow buffers to be returned in the error state, to allow for disconnect - // and in the in-config states for registration - if (mState == STATE_CONSTRUCTED) { - ALOGE("%s: Stream %d: Can't return buffers in unconfigured state %d", - __FUNCTION__, mId, mState); - return INVALID_OPERATION; - } - if (mDequeuedBufferCount == 0) { - ALOGE("%s: Stream %d: No buffers outstanding to return", __FUNCTION__, - mId); - return INVALID_OPERATION; + status_t res = returnAnyBufferLocked(buffer, timestamp, /*output*/true); + + if (res != OK) { + return res; } + mLastTimestamp = timestamp; + + return OK; +} + +status_t Camera3OutputStream::returnBufferCheckedLocked( + const camera3_stream_buffer &buffer, + nsecs_t timestamp, + bool output, + /*out*/ + sp *releaseFenceOut) { + + (void)output; + ALOG_ASSERT(output, "Expected output to be true"); + + status_t res; sp releaseFence; /** @@ -173,7 +156,7 @@ status_t Camera3OutputStream::returnBufferLocked( res = native_window_set_buffers_timestamp(mConsumer.get(), timestamp); if (res != OK) { ALOGE("%s: Stream %d: Error setting timestamp: %s (%d)", - __FUNCTION__, mId, strerror(-res), res); + __FUNCTION__, mId, strerror(-res), res); return res; } @@ -192,15 +175,15 @@ status_t Camera3OutputStream::returnBufferLocked( anwReleaseFence); if (res != OK) { ALOGE("%s: Stream %d: Error cancelling buffer to native window:" - " %s (%d)", __FUNCTION__, mId, strerror(-res), res); + " %s (%d)", __FUNCTION__, mId, strerror(-res), res); } } else { res = mConsumer->queueBuffer(mConsumer.get(), container_of(buffer.buffer, ANativeWindowBuffer, handle), anwReleaseFence); if (res != OK) { - ALOGE("%s: Stream %d: Error queueing buffer to native window: %s (%d)", - __FUNCTION__, mId, strerror(-res), res); + ALOGE("%s: Stream %d: Error queueing buffer to native window: " + "%s (%d)", __FUNCTION__, mId, strerror(-res), res); } } @@ -209,89 +192,18 @@ status_t Camera3OutputStream::returnBufferLocked( return res; } - mCombinedFence = Fence::merge(mName, mCombinedFence, releaseFence); - - mDequeuedBufferCount--; - mBufferReturnedSignal.signal(); - mLastTimestamp = timestamp; + *releaseFenceOut = releaseFence; return OK; } -bool Camera3OutputStream::hasOutstandingBuffersLocked() const { - nsecs_t signalTime = mCombinedFence->getSignalTime(); - ALOGV("%s: Stream %d: Has %d outstanding buffers," - " buffer signal time is %lld", - __FUNCTION__, mId, mDequeuedBufferCount, signalTime); - if (mDequeuedBufferCount > 0 || signalTime == INT64_MAX) { - return true; - } - return false; -} - -status_t Camera3OutputStream::waitUntilIdle(nsecs_t timeout) { - status_t res; - { - Mutex::Autolock l(mLock); - while (mDequeuedBufferCount > 0) { - if (timeout != TIMEOUT_NEVER) { - nsecs_t startTime = systemTime(); - res = mBufferReturnedSignal.waitRelative(mLock, timeout); - if (res == TIMED_OUT) { - return res; - } else if (res != OK) { - ALOGE("%s: Error waiting for outstanding buffers: %s (%d)", - __FUNCTION__, strerror(-res), res); - return res; - } - nsecs_t deltaTime = systemTime() - startTime; - if (timeout <= deltaTime) { - timeout = 0; - } else { - timeout -= deltaTime; - } - } else { - res = mBufferReturnedSignal.wait(mLock); - if (res != OK) { - ALOGE("%s: Error waiting for outstanding buffers: %s (%d)", - __FUNCTION__, strerror(-res), res); - return res; - } - } - } - } - - // No lock - - unsigned int timeoutMs; - if (timeout == TIMEOUT_NEVER) { - timeoutMs = Fence::TIMEOUT_NEVER; - } else if (timeout == 0) { - timeoutMs = 0; - } else { - // Round up to wait at least 1 ms - timeoutMs = (timeout + 999999) / 1000000; - } - - return mCombinedFence->wait(timeoutMs); -} - void Camera3OutputStream::dump(int fd, const Vector &args) const { (void) args; String8 lines; lines.appendFormat(" Stream[%d]: Output\n", mId); - lines.appendFormat(" State: %d\n", mState); - lines.appendFormat(" Dims: %d x %d, format 0x%x\n", - camera3_stream::width, camera3_stream::height, - camera3_stream::format); - lines.appendFormat(" Max size: %d\n", mMaxSize); - lines.appendFormat(" Usage: %d, max HAL buffers: %d\n", - camera3_stream::usage, camera3_stream::max_buffers); - lines.appendFormat(" Frames produced: %d, last timestamp: %lld ns\n", - mFrameCount, mLastTimestamp); - lines.appendFormat(" Total buffers: %d, currently dequeued: %d\n", - mTotalBufferCount, mDequeuedBufferCount); write(fd, lines.string(), lines.size()); + + Camera3IOStreamBase::dump(fd, args); } status_t Camera3OutputStream::setTransform(int transform) { @@ -322,21 +234,12 @@ status_t Camera3OutputStream::setTransformLocked(int transform) { status_t Camera3OutputStream::configureQueueLocked() { status_t res; - switch (mState) { - case STATE_IN_RECONFIG: - res = disconnectLocked(); - if (res != OK) { - return res; - } - break; - case STATE_IN_CONFIG: - // OK - break; - default: - ALOGE("%s: Bad state: %d", __FUNCTION__, mState); - return INVALID_OPERATION; + if ((res = Camera3IOStreamBase::configureQueueLocked()) != OK) { + return res; } + ALOG_ASSERT(mConsumer != 0, "mConsumer should never be NULL"); + // Configure consumer-side ANativeWindow interface res = native_window_api_connect(mConsumer.get(), NATIVE_WINDOW_API_CAMERA); @@ -420,30 +323,15 @@ status_t Camera3OutputStream::configureQueueLocked() { return OK; } -size_t Camera3OutputStream::getBufferCountLocked() { - return mTotalBufferCount; -} - status_t Camera3OutputStream::disconnectLocked() { status_t res; - switch (mState) { - case STATE_IN_RECONFIG: - case STATE_CONFIGURED: - // OK - break; - default: - // No connection, nothing to do - return OK; - } - - if (mDequeuedBufferCount > 0) { - ALOGE("%s: Can't disconnect with %d buffers still dequeued!", - __FUNCTION__, mDequeuedBufferCount); - return INVALID_OPERATION; + if ((res = Camera3IOStreamBase::disconnectLocked()) != OK) { + return res; } - res = native_window_api_disconnect(mConsumer.get(), NATIVE_WINDOW_API_CAMERA); + res = native_window_api_disconnect(mConsumer.get(), + NATIVE_WINDOW_API_CAMERA); /** * This is not an error. if client calling process dies, the window will @@ -455,13 +343,15 @@ status_t Camera3OutputStream::disconnectLocked() { " native window died from under us", __FUNCTION__, mId); } else if (res != OK) { - ALOGE("%s: Unable to disconnect stream %d from native window (error %d %s)", - __FUNCTION__, mId, res, strerror(-res)); + ALOGE("%s: Unable to disconnect stream %d from native window " + "(error %d %s)", + __FUNCTION__, mId, res, strerror(-res)); mState = STATE_ERROR; return res; } - mState = (mState == STATE_IN_RECONFIG) ? STATE_IN_CONFIG : STATE_CONSTRUCTED; + mState = (mState == STATE_IN_RECONFIG) ? STATE_IN_CONFIG + : STATE_CONSTRUCTED; return OK; } diff --git a/services/camera/libcameraservice/camera3/Camera3OutputStream.h b/services/camera/libcameraservice/camera3/Camera3OutputStream.h index 2464dce..ce317f9 100644 --- a/services/camera/libcameraservice/camera3/Camera3OutputStream.h +++ b/services/camera/libcameraservice/camera3/Camera3OutputStream.h @@ -21,6 +21,7 @@ #include #include "Camera3Stream.h" +#include "Camera3IOStreamBase.h" #include "Camera3OutputStreamInterface.h" namespace android { @@ -31,7 +32,7 @@ namespace camera3 { * A class for managing a single stream of output data from the camera device. */ class Camera3OutputStream : - public Camera3Stream, + public Camera3IOStreamBase, public Camera3OutputStreamInterface { public: /** @@ -53,7 +54,6 @@ class Camera3OutputStream : * Camera3Stream interface */ - virtual status_t waitUntilIdle(nsecs_t timeout); virtual void dump(int fd, const Vector &args) const; /** @@ -62,19 +62,22 @@ class Camera3OutputStream : */ status_t setTransform(int transform); - private: + protected: + Camera3OutputStream(int id, camera3_stream_type_t type, + uint32_t width, uint32_t height, int format); + + virtual status_t returnBufferCheckedLocked( + const camera3_stream_buffer &buffer, + nsecs_t timestamp, + bool output, + /*out*/ + sp *releaseFenceOut); + sp mConsumer; + private: int mTransform; - size_t mTotalBufferCount; - size_t mDequeuedBufferCount; - Condition mBufferReturnedSignal; - uint32_t mFrameCount; - nsecs_t mLastTimestamp; - // The merged release fence for all returned buffers - sp mCombinedFence; - - status_t setTransformLocked(int transform); + virtual status_t setTransformLocked(int transform); /** * Internal Camera3Stream interface @@ -83,12 +86,9 @@ class Camera3OutputStream : virtual status_t returnBufferLocked( const camera3_stream_buffer &buffer, nsecs_t timestamp); - virtual bool hasOutstandingBuffersLocked() const; virtual status_t configureQueueLocked(); - virtual size_t getBufferCountLocked(); virtual status_t disconnectLocked(); - }; // class Camera3OutputStream } // namespace camera3 diff --git a/services/camera/libcameraservice/camera3/Camera3ZslStream.cpp b/services/camera/libcameraservice/camera3/Camera3ZslStream.cpp index 5a13dde..8790c8c 100644 --- a/services/camera/libcameraservice/camera3/Camera3ZslStream.cpp +++ b/services/camera/libcameraservice/camera3/Camera3ZslStream.cpp @@ -18,18 +18,10 @@ #define ATRACE_TAG ATRACE_TAG_CAMERA //#define LOG_NDEBUG 0 -// This is needed for stdint.h to define INT64_MAX in C++ -#define __STDC_LIMIT_MACROS - #include #include #include "Camera3ZslStream.h" -#ifndef container_of -#define container_of(ptr, type, member) \ - (type *)((char*)(ptr) - offsetof(type, member)) -#endif - typedef android::RingBufferConsumer::PinnedBufferItem PinnedBufferItem; namespace android { @@ -118,358 +110,24 @@ struct TimestampFinder : public RingBufferConsumer::RingBufferComparator { Camera3ZslStream::Camera3ZslStream(int id, uint32_t width, uint32_t height, int depth) : - Camera3Stream(id, CAMERA3_STREAM_BIDIRECTIONAL, width, height, 0, - HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED), + Camera3OutputStream(id, CAMERA3_STREAM_BIDIRECTIONAL, + width, height, + HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED), mDepth(depth), mProducer(new RingBufferConsumer(GRALLOC_USAGE_HW_CAMERA_ZSL, - depth)), - mConsumer(new Surface(mProducer->getProducerInterface())), - //mTransform(0), - mTotalBufferCount(0), - mDequeuedBufferCount(0), - mFrameCount(0), - mLastTimestamp(0), - mCombinedFence(new Fence()) { -} + depth)) { -Camera3ZslStream::~Camera3ZslStream() { - disconnectLocked(); + mConsumer = new Surface(mProducer->getProducerInterface()); } -status_t Camera3ZslStream::getBufferLocked(camera3_stream_buffer *buffer) { - // same as output stream code - ATRACE_CALL(); - status_t res; - - // Allow dequeue during IN_[RE]CONFIG for registration - if (mState != STATE_CONFIGURED && - mState != STATE_IN_CONFIG && mState != STATE_IN_RECONFIG) { - ALOGE("%s: Stream %d: Can't get buffers in unconfigured state %d", - __FUNCTION__, mId, mState); - return INVALID_OPERATION; - } - - // Only limit dequeue amount when fully configured - if (mState == STATE_CONFIGURED && - mDequeuedBufferCount == camera3_stream::max_buffers) { - ALOGE("%s: Stream %d: Already dequeued maximum number of simultaneous" - " buffers (%d)", __FUNCTION__, mId, - camera3_stream::max_buffers); - return INVALID_OPERATION; - } - - ANativeWindowBuffer* anb; - int fenceFd; - - res = mConsumer->dequeueBuffer(mConsumer.get(), &anb, &fenceFd); - if (res != OK) { - ALOGE("%s: Stream %d: Can't dequeue next output buffer: %s (%d)", - __FUNCTION__, mId, strerror(-res), res); - return res; - } - - // Handing out a raw pointer to this object. Increment internal refcount. - incStrong(this); - buffer->stream = this; - buffer->buffer = &(anb->handle); - buffer->acquire_fence = fenceFd; - buffer->release_fence = -1; - buffer->status = CAMERA3_BUFFER_STATUS_OK; - - mDequeuedBufferCount++; - - return OK; +Camera3ZslStream::~Camera3ZslStream() { } -status_t Camera3ZslStream::returnBufferLocked( - const camera3_stream_buffer &buffer, - nsecs_t timestamp) { - // same as output stream code +status_t Camera3ZslStream::getInputBufferLocked(camera3_stream_buffer *buffer) { ATRACE_CALL(); - status_t res; - - // returnBuffer may be called from a raw pointer, not a sp<>, and we'll be - // decrementing the internal refcount next. In case this is the last ref, we - // might get destructed on the decStrong(), so keep an sp around until the - // end of the call - otherwise have to sprinkle the decStrong on all exit - // points. - sp keepAlive(this); - decStrong(this); - - // Allow buffers to be returned in the error state, to allow for disconnect - // and in the in-config states for registration - if (mState == STATE_CONSTRUCTED) { - ALOGE("%s: Stream %d: Can't return buffers in unconfigured state %d", - __FUNCTION__, mId, mState); - return INVALID_OPERATION; - } - if (mDequeuedBufferCount == 0) { - ALOGE("%s: Stream %d: No buffers outstanding to return", __FUNCTION__, - mId); - return INVALID_OPERATION; - } - - sp releaseFence; - - /** - * Fence management - calculate Release Fence - */ - if (buffer.status == CAMERA3_BUFFER_STATUS_ERROR) { - if (buffer.release_fence != -1) { - ALOGE("%s: Stream %d: HAL should not set release_fence(%d) when " - "there is an error", __FUNCTION__, mId, buffer.release_fence); - close(buffer.release_fence); - } - - /** - * Reassign release fence as the acquire fence in case of error - */ - releaseFence = new Fence(buffer.acquire_fence); - } else { - res = native_window_set_buffers_timestamp(mConsumer.get(), timestamp); - if (res != OK) { - ALOGE("%s: Stream %d: Error setting timestamp: %s (%d)", - __FUNCTION__, mId, strerror(-res), res); - return res; - } - - releaseFence = new Fence(buffer.release_fence); - } - - int anwReleaseFence = releaseFence->dup(); - - /** - * Return buffer back to ANativeWindow - */ - if (buffer.status == CAMERA3_BUFFER_STATUS_ERROR) { - // Cancel buffer - res = mConsumer->cancelBuffer(mConsumer.get(), - container_of(buffer.buffer, ANativeWindowBuffer, handle), - anwReleaseFence); - if (res != OK) { - ALOGE("%s: Stream %d: Error cancelling buffer to native window:" - " %s (%d)", __FUNCTION__, mId, strerror(-res), res); - } - } else { - res = mConsumer->queueBuffer(mConsumer.get(), - container_of(buffer.buffer, ANativeWindowBuffer, handle), - anwReleaseFence); - if (res != OK) { - ALOGE("%s: Stream %d: Error queueing buffer to native window: %s (%d)", - __FUNCTION__, mId, strerror(-res), res); - } - } - - if (res != OK) { - close(anwReleaseFence); - return res; - } - mCombinedFence = Fence::merge(mName, mCombinedFence, releaseFence); - - mDequeuedBufferCount--; - mBufferReturnedSignal.signal(); - mLastTimestamp = timestamp; - - return OK; -} - -bool Camera3ZslStream::hasOutstandingBuffersLocked() const { - // same as output stream - nsecs_t signalTime = mCombinedFence->getSignalTime(); - ALOGV("%s: Stream %d: Has %d outstanding buffers," - " buffer signal time is %lld", - __FUNCTION__, mId, mDequeuedBufferCount, signalTime); - if (mDequeuedBufferCount > 0 || signalTime == INT64_MAX) { - return true; - } - return false; -} - -status_t Camera3ZslStream::waitUntilIdle(nsecs_t timeout) { - // same as output stream - status_t res; - { - Mutex::Autolock l(mLock); - while (mDequeuedBufferCount > 0) { - if (timeout != TIMEOUT_NEVER) { - nsecs_t startTime = systemTime(); - res = mBufferReturnedSignal.waitRelative(mLock, timeout); - if (res == TIMED_OUT) { - return res; - } else if (res != OK) { - ALOGE("%s: Error waiting for outstanding buffers: %s (%d)", - __FUNCTION__, strerror(-res), res); - return res; - } - nsecs_t deltaTime = systemTime() - startTime; - if (timeout <= deltaTime) { - timeout = 0; - } else { - timeout -= deltaTime; - } - } else { - res = mBufferReturnedSignal.wait(mLock); - if (res != OK) { - ALOGE("%s: Error waiting for outstanding buffers: %s (%d)", - __FUNCTION__, strerror(-res), res); - return res; - } - } - } - } - - // No lock - - unsigned int timeoutMs; - if (timeout == TIMEOUT_NEVER) { - timeoutMs = Fence::TIMEOUT_NEVER; - } else if (timeout == 0) { - timeoutMs = 0; - } else { - // Round up to wait at least 1 ms - timeoutMs = (timeout + 999999) / 1000000; - } - - return mCombinedFence->wait(timeoutMs); -} -status_t Camera3ZslStream::configureQueueLocked() { status_t res; - switch (mState) { - case STATE_IN_RECONFIG: - res = disconnectLocked(); - if (res != OK) { - return res; - } - break; - case STATE_IN_CONFIG: - // OK - break; - default: - ALOGE("%s: Bad state: %d", __FUNCTION__, mState); - return INVALID_OPERATION; - } - - // Configure consumer-side ANativeWindow interface - res = native_window_api_connect(mConsumer.get(), - NATIVE_WINDOW_API_CAMERA); - if (res != OK) { - ALOGE("%s: Unable to connect to native window for stream %d", - __FUNCTION__, mId); - return res; - } - - res = native_window_set_usage(mConsumer.get(), camera3_stream::usage); - if (res != OK) { - ALOGE("%s: Unable to configure usage %08x for stream %d", - __FUNCTION__, camera3_stream::usage, mId); - return res; - } - - res = native_window_set_scaling_mode(mConsumer.get(), - NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW); - if (res != OK) { - ALOGE("%s: Unable to configure stream scaling: %s (%d)", - __FUNCTION__, strerror(-res), res); - return res; - } - - if (mMaxSize == 0) { - // For buffers of known size - res = native_window_set_buffers_geometry(mConsumer.get(), - camera3_stream::width, camera3_stream::height, - camera3_stream::format); - } else { - // For buffers with bounded size - res = native_window_set_buffers_geometry(mConsumer.get(), - mMaxSize, 1, - camera3_stream::format); - } - if (res != OK) { - ALOGE("%s: Unable to configure stream buffer geometry" - " %d x %d, format %x for stream %d", - __FUNCTION__, camera3_stream::width, camera3_stream::height, - camera3_stream::format, mId); - return res; - } - - int maxConsumerBuffers; - res = mConsumer->query(mConsumer.get(), - NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &maxConsumerBuffers); - if (res != OK) { - ALOGE("%s: Unable to query consumer undequeued" - " buffer count for stream %d", __FUNCTION__, mId); - return res; - } - - ALOGV("%s: Consumer wants %d buffers", __FUNCTION__, - maxConsumerBuffers); - - mTotalBufferCount = maxConsumerBuffers + camera3_stream::max_buffers; - mDequeuedBufferCount = 0; - mFrameCount = 0; - mLastTimestamp = 0; - - res = native_window_set_buffer_count(mConsumer.get(), - mTotalBufferCount); - if (res != OK) { - ALOGE("%s: Unable to set buffer count for stream %d", - __FUNCTION__, mId); - return res; - } - - return OK; -} - -size_t Camera3ZslStream::getBufferCountLocked() { - return mTotalBufferCount; -} - -status_t Camera3ZslStream::disconnectLocked() { - status_t res; - - switch (mState) { - case STATE_IN_RECONFIG: - case STATE_CONFIGURED: - // OK - break; - default: - // No connection, nothing to do - return OK; - } - - if (mDequeuedBufferCount > 0) { - ALOGE("%s: Can't disconnect with %d buffers still dequeued!", - __FUNCTION__, mDequeuedBufferCount); - return INVALID_OPERATION; - } - - res = native_window_api_disconnect(mConsumer.get(), NATIVE_WINDOW_API_CAMERA); - - /** - * This is not an error. if client calling process dies, the window will - * also die and all calls to it will return DEAD_OBJECT, thus it's already - * "disconnected" - */ - if (res == DEAD_OBJECT) { - ALOGW("%s: While disconnecting stream %d from native window, the" - " native window died from under us", __FUNCTION__, mId); - } - else if (res != OK) { - ALOGE("%s: Unable to disconnect stream %d from native window (error %d %s)", - __FUNCTION__, mId, res, strerror(-res)); - mState = STATE_ERROR; - return res; - } - - mState = (mState == STATE_IN_RECONFIG) ? STATE_IN_CONFIG : STATE_CONSTRUCTED; - return OK; -} - -status_t Camera3ZslStream::getInputBufferLocked(camera3_stream_buffer *buffer) { - ATRACE_CALL(); - // TODO: potentially register from inputBufferLocked // this should be ok, registerBuffersLocked only calls getBuffer for now // register in output mode instead of input mode for ZSL streams. @@ -480,21 +138,8 @@ status_t Camera3ZslStream::getInputBufferLocked(camera3_stream_buffer *buffer) { return INVALID_OPERATION; } - // Allow dequeue during IN_[RE]CONFIG for registration - if (mState != STATE_CONFIGURED && - mState != STATE_IN_CONFIG && mState != STATE_IN_RECONFIG) { - ALOGE("%s: Stream %d: Can't get buffers in unconfigured state %d", - __FUNCTION__, mId, mState); - return INVALID_OPERATION; - } - - // Only limit dequeue amount when fully configured - if (mState == STATE_CONFIGURED && - mDequeuedBufferCount == camera3_stream::max_buffers) { - ALOGE("%s: Stream %d: Already dequeued maximum number of simultaneous" - " buffers (%d)", __FUNCTION__, mId, - camera3_stream::max_buffers); - return INVALID_OPERATION; + if ((res = getBufferPreconditionCheckLocked()) != OK) { + return res; } ANativeWindowBuffer* anb; @@ -526,47 +171,32 @@ status_t Camera3ZslStream::getInputBufferLocked(camera3_stream_buffer *buffer) { * FenceFD now owned by HAL except in case of error, * in which case we reassign it to acquire_fence */ - - // Handing out a raw pointer to this object. Increment internal refcount. - incStrong(this); - buffer->stream = this; - buffer->buffer = &(anb->handle); - buffer->acquire_fence = fenceFd; - buffer->release_fence = -1; - buffer->status = CAMERA3_BUFFER_STATUS_OK; - - mDequeuedBufferCount++; + handoutBufferLocked(*buffer, &(anb->handle), /*acquireFence*/fenceFd, + /*releaseFence*/-1, CAMERA3_BUFFER_STATUS_OK); mBuffersInFlight.push_back(bufferItem); return OK; } -status_t Camera3ZslStream::returnInputBufferLocked( - const camera3_stream_buffer &buffer) { - ATRACE_CALL(); +status_t Camera3ZslStream::returnBufferCheckedLocked( + const camera3_stream_buffer &buffer, + nsecs_t timestamp, + bool output, + /*out*/ + sp *releaseFenceOut) { - // returnBuffer may be called from a raw pointer, not a sp<>, and we'll be - // decrementing the internal refcount next. In case this is the last ref, we - // might get destructed on the decStrong(), so keep an sp around until the - // end of the call - otherwise have to sprinkle the decStrong on all exit - // points. - sp keepAlive(this); - decStrong(this); - - // Allow buffers to be returned in the error state, to allow for disconnect - // and in the in-config states for registration - if (mState == STATE_CONSTRUCTED) { - ALOGE("%s: Stream %d: Can't return buffers in unconfigured state %d", - __FUNCTION__, mId, mState); - return INVALID_OPERATION; - } - if (mDequeuedBufferCount == 0) { - ALOGE("%s: Stream %d: No buffers outstanding to return", __FUNCTION__, - mId); - return INVALID_OPERATION; + if (output) { + // Output stream path + return Camera3OutputStream::returnBufferCheckedLocked(buffer, + timestamp, + output, + releaseFenceOut); } + /** + * Input stream path + */ bool bufferFound = false; sp bufferItem; { @@ -583,7 +213,7 @@ status_t Camera3ZslStream::returnInputBufferLocked( bufferFound = true; bufferItem = tmp; mBuffersInFlight.erase(it); - mDequeuedBufferCount--; + break; } } } @@ -616,12 +246,19 @@ status_t Camera3ZslStream::returnInputBufferLocked( bufferItem->getBufferItem().mFence = releaseFence; bufferItem.clear(); // dropping last reference unpins buffer - mCombinedFence = Fence::merge(mName, mCombinedFence, releaseFence); - - mBufferReturnedSignal.signal(); + *releaseFenceOut = releaseFence; return OK; +} + +status_t Camera3ZslStream::returnInputBufferLocked( + const camera3_stream_buffer &buffer) { + ATRACE_CALL(); + status_t res = returnAnyBufferLocked(buffer, /*timestamp*/0, + /*output*/false); + + return res; } void Camera3ZslStream::dump(int fd, const Vector &args) const { @@ -629,16 +266,11 @@ void Camera3ZslStream::dump(int fd, const Vector &args) const { String8 lines; lines.appendFormat(" Stream[%d]: ZSL\n", mId); - lines.appendFormat(" State: %d\n", mState); - lines.appendFormat(" Dims: %d x %d, format 0x%x\n", - camera3_stream::width, camera3_stream::height, - camera3_stream::format); - lines.appendFormat(" Usage: %d, max HAL buffers: %d\n", - camera3_stream::usage, camera3_stream::max_buffers); - lines.appendFormat(" Frames produced: %d, last timestamp: %lld ns\n", - mFrameCount, mLastTimestamp); - lines.appendFormat(" Total buffers: %d, currently dequeued: %d\n", - mTotalBufferCount, mDequeuedBufferCount); + write(fd, lines.string(), lines.size()); + + Camera3IOStreamBase::dump(fd, args); + + lines = String8(); lines.appendFormat(" Input buffers pending: %d, in flight %d\n", mInputBufferQueue.size(), mBuffersInFlight.size()); write(fd, lines.string(), lines.size()); diff --git a/services/camera/libcameraservice/camera3/Camera3ZslStream.h b/services/camera/libcameraservice/camera3/Camera3ZslStream.h index b863e7f..c7f4490 100644 --- a/services/camera/libcameraservice/camera3/Camera3ZslStream.h +++ b/services/camera/libcameraservice/camera3/Camera3ZslStream.h @@ -21,8 +21,7 @@ #include #include -#include "Camera3Stream.h" -#include "Camera3OutputStreamInterface.h" +#include "Camera3OutputStream.h" namespace android { @@ -35,8 +34,7 @@ namespace camera3 { * processing. */ class Camera3ZslStream : - public Camera3Stream, - public Camera3OutputStreamInterface { + public Camera3OutputStream { public: /** * Set up a ZSL stream of a given resolution. Depth is the number of buffers @@ -45,7 +43,6 @@ class Camera3ZslStream : Camera3ZslStream(int id, uint32_t width, uint32_t height, int depth); ~Camera3ZslStream(); - virtual status_t waitUntilIdle(nsecs_t timeout); virtual void dump(int fd, const Vector &args) const; enum { NO_BUFFER_AVAILABLE = BufferQueue::NO_BUFFER_AVAILABLE }; @@ -65,6 +62,8 @@ class Camera3ZslStream : */ status_t clearInputRingBuffer(); + protected: + /** * Camera3OutputStreamInterface implementation */ @@ -76,41 +75,27 @@ class Camera3ZslStream : // Input buffers pending to be queued into HAL List > mInputBufferQueue; sp mProducer; - sp mConsumer; // Input buffers in flight to HAL Vector > mBuffersInFlight; - size_t mTotalBufferCount; - // sum of input and output buffers that are currently acquired by HAL - size_t mDequeuedBufferCount; - Condition mBufferReturnedSignal; - uint32_t mFrameCount; - // Last received output buffer's timestamp - nsecs_t mLastTimestamp; - - // The merged release fence for all returned buffers - sp mCombinedFence; /** * Camera3Stream interface */ - // getBuffer/returnBuffer operate the output stream side of the ZslStream. - virtual status_t getBufferLocked(camera3_stream_buffer *buffer); - virtual status_t returnBufferLocked(const camera3_stream_buffer &buffer, - nsecs_t timestamp); // getInputBuffer/returnInputBuffer operate the input stream side of the // ZslStream. virtual status_t getInputBufferLocked(camera3_stream_buffer *buffer); virtual status_t returnInputBufferLocked( const camera3_stream_buffer &buffer); - virtual bool hasOutstandingBuffersLocked() const; - virtual status_t disconnectLocked(); - - virtual status_t configureQueueLocked(); - virtual size_t getBufferCountLocked(); - + // Actual body to return either input or output buffers + virtual status_t returnBufferCheckedLocked( + const camera3_stream_buffer &buffer, + nsecs_t timestamp, + bool output, + /*out*/ + sp *releaseFenceOut); }; // class Camera3ZslStream }; // namespace camera3 -- cgit v1.1 From aef5c98cd3f67e0209e1fa28489078e9f40d6f46 Mon Sep 17 00:00:00 2001 From: Chong Zhang Date: Thu, 9 May 2013 01:27:59 -0700 Subject: wifi-display: misc fixes for M3 response parsing - fix wfd_video_format parsing w/ multiple H264-Codec - fix handling of CRLF at end of M3 response - fix missing wfd_client_rtp_ports Bug: 8871667 Bug: 8875749 Change-Id: Idb0b5605313bba6ff067bcd765512bbed42e5202 --- media/libstagefright/wifi-display/Parameters.cpp | 4 +- media/libstagefright/wifi-display/VideoFormats.cpp | 279 ++++++++++++--------- media/libstagefright/wifi-display/VideoFormats.h | 23 ++ .../wifi-display/source/WifiDisplaySource.cpp | 5 +- 4 files changed, 186 insertions(+), 125 deletions(-) diff --git a/media/libstagefright/wifi-display/Parameters.cpp b/media/libstagefright/wifi-display/Parameters.cpp index f7118b3..d2a61ea 100644 --- a/media/libstagefright/wifi-display/Parameters.cpp +++ b/media/libstagefright/wifi-display/Parameters.cpp @@ -65,7 +65,9 @@ status_t Parameters::parse(const char *data, size_t size) { mDict.add(name, value); - i += 2; + while (i + 1 < size && data[i] == '\r' && data[i + 1] == '\n') { + i += 2; + } } return OK; diff --git a/media/libstagefright/wifi-display/VideoFormats.cpp b/media/libstagefright/wifi-display/VideoFormats.cpp index da557f7..458b163 100644 --- a/media/libstagefright/wifi-display/VideoFormats.cpp +++ b/media/libstagefright/wifi-display/VideoFormats.cpp @@ -24,6 +24,114 @@ namespace android { +VideoFormats::config_t VideoFormats::mConfigs[][32] = { + { + // CEA Resolutions + { 640, 480, 60, false, 0, 0}, + { 720, 480, 60, false, 0, 0}, + { 720, 480, 60, true, 0, 0}, + { 720, 576, 50, false, 0, 0}, + { 720, 576, 50, true, 0, 0}, + { 1280, 720, 30, false, 0, 0}, + { 1280, 720, 60, false, 0, 0}, + { 1920, 1080, 30, false, 0, 0}, + { 1920, 1080, 60, false, 0, 0}, + { 1920, 1080, 60, true, 0, 0}, + { 1280, 720, 25, false, 0, 0}, + { 1280, 720, 50, false, 0, 0}, + { 1920, 1080, 25, false, 0, 0}, + { 1920, 1080, 50, false, 0, 0}, + { 1920, 1080, 50, true, 0, 0}, + { 1280, 720, 24, false, 0, 0}, + { 1920, 1080, 24, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + }, + { + // VESA Resolutions + { 800, 600, 30, false, 0, 0}, + { 800, 600, 60, false, 0, 0}, + { 1024, 768, 30, false, 0, 0}, + { 1024, 768, 60, false, 0, 0}, + { 1152, 864, 30, false, 0, 0}, + { 1152, 864, 60, false, 0, 0}, + { 1280, 768, 30, false, 0, 0}, + { 1280, 768, 60, false, 0, 0}, + { 1280, 800, 30, false, 0, 0}, + { 1280, 800, 60, false, 0, 0}, + { 1360, 768, 30, false, 0, 0}, + { 1360, 768, 60, false, 0, 0}, + { 1366, 768, 30, false, 0, 0}, + { 1366, 768, 60, false, 0, 0}, + { 1280, 1024, 30, false, 0, 0}, + { 1280, 1024, 60, false, 0, 0}, + { 1400, 1050, 30, false, 0, 0}, + { 1400, 1050, 60, false, 0, 0}, + { 1440, 900, 30, false, 0, 0}, + { 1440, 900, 60, false, 0, 0}, + { 1600, 900, 30, false, 0, 0}, + { 1600, 900, 60, false, 0, 0}, + { 1600, 1200, 30, false, 0, 0}, + { 1600, 1200, 60, false, 0, 0}, + { 1680, 1024, 30, false, 0, 0}, + { 1680, 1024, 60, false, 0, 0}, + { 1680, 1050, 30, false, 0, 0}, + { 1680, 1050, 60, false, 0, 0}, + { 1920, 1200, 30, false, 0, 0}, + { 1920, 1200, 60, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + }, + { + // HH Resolutions + { 800, 480, 30, false, 0, 0}, + { 800, 480, 60, false, 0, 0}, + { 854, 480, 30, false, 0, 0}, + { 854, 480, 60, false, 0, 0}, + { 864, 480, 30, false, 0, 0}, + { 864, 480, 60, false, 0, 0}, + { 640, 360, 30, false, 0, 0}, + { 640, 360, 60, false, 0, 0}, + { 960, 540, 30, false, 0, 0}, + { 960, 540, 60, false, 0, 0}, + { 848, 480, 30, false, 0, 0}, + { 848, 480, 60, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + } +}; + VideoFormats::VideoFormats() { for (size_t i = 0; i < kNumResolutionTypes; ++i) { mResolutionEnabled[i] = 0; @@ -51,12 +159,19 @@ void VideoFormats::getNativeResolution( void VideoFormats::disableAll() { for (size_t i = 0; i < kNumResolutionTypes; ++i) { mResolutionEnabled[i] = 0; + for (size_t j = 0; j < 32; j++) { + mConfigs[i][j].profile = mConfigs[i][j].level = 0; + } } } void VideoFormats::enableAll() { for (size_t i = 0; i < kNumResolutionTypes; ++i) { mResolutionEnabled[i] = 0xffffffff; + for (size_t j = 0; j < 32; j++) { + mConfigs[i][j].profile = (1ul << PROFILE_CBP); + mConfigs[i][j].level = (1ul << LEVEL_31); + } } } @@ -92,118 +207,7 @@ bool VideoFormats::GetConfiguration( return false; } - static const struct config_t { - size_t width, height, framesPerSecond; - bool interlaced; - } kConfigs[kNumResolutionTypes][32] = { - { - // CEA Resolutions - { 640, 480, 60, false }, - { 720, 480, 60, false }, - { 720, 480, 60, true }, - { 720, 576, 50, false }, - { 720, 576, 50, true }, - { 1280, 720, 30, false }, - { 1280, 720, 60, false }, - { 1920, 1080, 30, false }, - { 1920, 1080, 60, false }, - { 1920, 1080, 60, true }, - { 1280, 720, 25, false }, - { 1280, 720, 50, false }, - { 1920, 1080, 25, false }, - { 1920, 1080, 50, false }, - { 1920, 1080, 50, true }, - { 1280, 720, 24, false }, - { 1920, 1080, 24, false }, - { 0, 0, 0, false }, - { 0, 0, 0, false }, - { 0, 0, 0, false }, - { 0, 0, 0, false }, - { 0, 0, 0, false }, - { 0, 0, 0, false }, - { 0, 0, 0, false }, - { 0, 0, 0, false }, - { 0, 0, 0, false }, - { 0, 0, 0, false }, - { 0, 0, 0, false }, - { 0, 0, 0, false }, - { 0, 0, 0, false }, - { 0, 0, 0, false }, - { 0, 0, 0, false }, - }, - { - // VESA Resolutions - { 800, 600, 30, false }, - { 800, 600, 60, false }, - { 1024, 768, 30, false }, - { 1024, 768, 60, false }, - { 1152, 864, 30, false }, - { 1152, 864, 60, false }, - { 1280, 768, 30, false }, - { 1280, 768, 60, false }, - { 1280, 800, 30, false }, - { 1280, 800, 60, false }, - { 1360, 768, 30, false }, - { 1360, 768, 60, false }, - { 1366, 768, 30, false }, - { 1366, 768, 60, false }, - { 1280, 1024, 30, false }, - { 1280, 1024, 60, false }, - { 1400, 1050, 30, false }, - { 1400, 1050, 60, false }, - { 1440, 900, 30, false }, - { 1440, 900, 60, false }, - { 1600, 900, 30, false }, - { 1600, 900, 60, false }, - { 1600, 1200, 30, false }, - { 1600, 1200, 60, false }, - { 1680, 1024, 30, false }, - { 1680, 1024, 60, false }, - { 1680, 1050, 30, false }, - { 1680, 1050, 60, false }, - { 1920, 1200, 30, false }, - { 1920, 1200, 60, false }, - { 0, 0, 0, false }, - { 0, 0, 0, false }, - }, - { - // HH Resolutions - { 800, 480, 30, false }, - { 800, 480, 60, false }, - { 854, 480, 30, false }, - { 854, 480, 60, false }, - { 864, 480, 30, false }, - { 864, 480, 60, false }, - { 640, 360, 30, false }, - { 640, 360, 60, false }, - { 960, 540, 30, false }, - { 960, 540, 60, false }, - { 848, 480, 30, false }, - { 848, 480, 60, false }, - { 0, 0, 0, false }, - { 0, 0, 0, false }, - { 0, 0, 0, false }, - { 0, 0, 0, false }, - { 0, 0, 0, false }, - { 0, 0, 0, false }, - { 0, 0, 0, false }, - { 0, 0, 0, false }, - { 0, 0, 0, false }, - { 0, 0, 0, false }, - { 0, 0, 0, false }, - { 0, 0, 0, false }, - { 0, 0, 0, false }, - { 0, 0, 0, false }, - { 0, 0, 0, false }, - { 0, 0, 0, false }, - { 0, 0, 0, false }, - { 0, 0, 0, false }, - { 0, 0, 0, false }, - { 0, 0, 0, false }, - } - }; - - const config_t *config = &kConfigs[type][index]; + const config_t *config = &mConfigs[type][index]; if (config->width == 0) { return false; @@ -228,24 +232,55 @@ bool VideoFormats::GetConfiguration( return true; } +bool VideoFormats::parseH264Codec(const char *spec) { + unsigned profile, level, res[3]; + + if (sscanf( + spec, + "%02x %02x %08X %08X %08X", + &profile, + &level, + &res[0], + &res[1], + &res[2]) != 5) { + return false; + } + + for (size_t i = 0; i < kNumResolutionTypes; ++i) { + for (size_t j = 0; j < 32; ++j) { + if (res[i] & (1ul << j)){ + mResolutionEnabled[i] |= (1ul << j); + if (profile > mConfigs[i][j].profile) { + mConfigs[i][j].profile = profile; + if (level > mConfigs[i][j].level) + mConfigs[i][j].level = level; + } + } + } + } + + return true; +} + bool VideoFormats::parseFormatSpec(const char *spec) { CHECK_EQ(kNumResolutionTypes, 3); unsigned native, dummy; + unsigned res[3]; + size_t size = strlen(spec); + size_t offset = 0; - if (sscanf( - spec, - "%02x %02x %02x %02x %08X %08X %08X", - &native, - &dummy, - &dummy, - &dummy, - &mResolutionEnabled[0], - &mResolutionEnabled[1], - &mResolutionEnabled[2]) != 7) { + if (sscanf(spec, "%02x %02x ", &native, &dummy) != 2) { return false; } + offset += 6; // skip native and preferred-display-mode-supported + CHECK_LE(offset + 58, size); + while (offset < size) { + parseH264Codec(spec + offset); + offset += 60; // skip H.264-codec + ", " + } + mNativeIndex = native >> 3; mNativeType = (ResolutionType)(native & 7); diff --git a/media/libstagefright/wifi-display/VideoFormats.h b/media/libstagefright/wifi-display/VideoFormats.h index 69e2197..01de246 100644 --- a/media/libstagefright/wifi-display/VideoFormats.h +++ b/media/libstagefright/wifi-display/VideoFormats.h @@ -36,6 +36,27 @@ struct AString; struct VideoFormats { VideoFormats(); + struct config_t { + size_t width, height, framesPerSecond; + bool interlaced; + unsigned char profile, level; + }; + + enum ProfileType { + PROFILE_CBP = 0, + PROFILE_CHP, + kNumProfileTypes, + }; + + enum LevelType { + LEVEL_31 = 0, + LEVEL_32, + LEVEL_40, + LEVEL_41, + LEVEL_42, + kNumLevelTypes, + }; + enum ResolutionType { RESOLUTION_CEA, RESOLUTION_VESA, @@ -69,10 +90,12 @@ struct VideoFormats { size_t *chosenIndex); private: + bool parseH264Codec(const char *spec); ResolutionType mNativeType; size_t mNativeIndex; uint32_t mResolutionEnabled[kNumResolutionTypes]; + static config_t mConfigs[kNumResolutionTypes][32]; DISALLOW_EVIL_CONSTRUCTORS(VideoFormats); }; diff --git a/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp b/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp index 2b5bee9..22dd0b1 100644 --- a/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp +++ b/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp @@ -635,8 +635,9 @@ status_t WifiDisplaySource::sendM4(int32_t sessionID) { "wfd_presentation_URL: rtsp://%s/wfd1.0/streamid=0 none\r\n", mClientInfo.mLocalIP.c_str())); - body.append(mWfdClientRtpPorts); - body.append("\r\n"); + body.append( + StringPrintf( + "wfd_client_rtp_ports: %s\r\n", mWfdClientRtpPorts.c_str())); AString request = "SET_PARAMETER rtsp://localhost/wfd1.0 RTSP/1.0\r\n"; AppendCommonResponse(&request, mNextCSeq); -- cgit v1.1 From 704455a5a6cd22f03bb8984e0c7f46108eb1afb7 Mon Sep 17 00:00:00 2001 From: Eric Laurent Date: Mon, 13 May 2013 12:47:53 -0700 Subject: Fix unreleased mutex in ToneGenerator stopTone() Commit 09108ade introduced a regression by not releasing the ToneGenerator mutex before exiting in case of error. Bug: 8852855 Change-Id: I8ba2755b218842e2034ed8dbd54b18bf2a5fc571 --- media/libmedia/ToneGenerator.cpp | 1 + 1 file changed, 1 insertion(+) diff --git a/media/libmedia/ToneGenerator.cpp b/media/libmedia/ToneGenerator.cpp index f09ce75..f55b697 100644 --- a/media/libmedia/ToneGenerator.cpp +++ b/media/libmedia/ToneGenerator.cpp @@ -1012,6 +1012,7 @@ void ToneGenerator::stopTone() { if (lStatus == NO_ERROR) { // If the tone was restarted exit now before calling clearWaveGens(); if (mState != TONE_INIT) { + mLock.unlock(); return; } ALOGV("track stop complete, time %d", (unsigned int)(systemTime()/1000000)); -- cgit v1.1 From 614e95449a04ca495cddfa435fddca2945d03572 Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Mon, 13 May 2013 13:29:46 -0700 Subject: Fix receiver report handling. Not adjusting the size of the buffer would in effect only add a single report block (the last one added would survive) and a whole lot of uninitialized data to the report. Change-Id: I5b4353d6d8c3becb1bc102afd42385b7851b1c3a --- media/libstagefright/wifi-display/rtp/RTPReceiver.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/media/libstagefright/wifi-display/rtp/RTPReceiver.cpp b/media/libstagefright/wifi-display/rtp/RTPReceiver.cpp index 8fa1dae..2d22e79 100644 --- a/media/libstagefright/wifi-display/rtp/RTPReceiver.cpp +++ b/media/libstagefright/wifi-display/rtp/RTPReceiver.cpp @@ -489,6 +489,8 @@ void RTPReceiver::Source::addReportBlock( ptr[21] = 0x00; ptr[22] = 0x00; ptr[23] = 0x00; + + buf->setRange(buf->offset(), buf->size() + 24); } //////////////////////////////////////////////////////////////////////////////// @@ -1012,7 +1014,6 @@ void RTPReceiver::scheduleSendRR() { } void RTPReceiver::onSendRR() { -#if 0 sp buf = new ABuffer(kMaxUDPPacketSize); buf->setRange(0, 0); @@ -1053,7 +1054,6 @@ void RTPReceiver::onSendRR() { addSDES(buf); mNetSession->sendRequest(mRTCPSessionID, buf->data(), buf->size()); -#endif scheduleSendRR(); } -- cgit v1.1 From dca254aef0c09867e50fe6613c6fad405de72842 Mon Sep 17 00:00:00 2001 From: Igor Murashkin Date: Mon, 13 May 2013 14:53:44 -0700 Subject: Camera: Pause preview when needing to delete jpeg/zsl streams Bug: 8876221 Change-Id: Idf9490a48462fa44d49c4ebe99425c3149ae6378 --- services/camera/libcameraservice/Camera2Client.cpp | 48 +++++++++++++++- services/camera/libcameraservice/Camera2Client.h | 3 + services/camera/libcameraservice/Camera3Device.cpp | 4 +- .../libcameraservice/camera2/JpegProcessor.cpp | 6 +- .../camera2/StreamingProcessor.cpp | 64 +++++++++++++++++++++- .../libcameraservice/camera2/StreamingProcessor.h | 4 ++ .../libcameraservice/camera2/ZslProcessor3.cpp | 6 +- 7 files changed, 126 insertions(+), 9 deletions(-) diff --git a/services/camera/libcameraservice/Camera2Client.cpp b/services/camera/libcameraservice/Camera2Client.cpp index 48f3606..2e01dae 100644 --- a/services/camera/libcameraservice/Camera2Client.cpp +++ b/services/camera/libcameraservice/Camera2Client.cpp @@ -690,7 +690,7 @@ status_t Camera2Client::startPreviewL(Parameters ¶ms, bool restart) { outputStreams.push(getCallbackStreamId()); } if (params.zslMode && !params.recordingHint) { - res = mZslProcessor->updateStream(params); + res = updateProcessorStream(mZslProcessor, params); if (res != OK) { ALOGE("%s: Camera %d: Unable to update ZSL stream: %s (%d)", __FUNCTION__, mCameraId, strerror(-res), res); @@ -718,7 +718,7 @@ status_t Camera2Client::startPreviewL(Parameters ¶ms, bool restart) { // assumption that the user will record video. To optimize recording // startup time, create the necessary output streams for recording and // video snapshot now if they don't already exist. - res = mJpegProcessor->updateStream(params); + res = updateProcessorStream(mJpegProcessor, params); if (res != OK) { ALOGE("%s: Camera %d: Can't pre-configure still image " "stream: %s (%d)", @@ -1125,7 +1125,7 @@ status_t Camera2Client::takePicture(int msgType) { ALOGV("%s: Camera %d: Starting picture capture", __FUNCTION__, mCameraId); - res = mJpegProcessor->updateStream(l.mParameters); + res = updateProcessorStream(mJpegProcessor, l.mParameters); if (res != OK) { ALOGE("%s: Camera %d: Can't set up still image stream: %s (%d)", __FUNCTION__, mCameraId, strerror(-res), res); @@ -1605,5 +1605,47 @@ status_t Camera2Client::syncWithDevice() { return res; } +template +status_t Camera2Client::updateProcessorStream(sp processor, + camera2::Parameters params) { + status_t res; + + res = processor->updateStream(params); + + /** + * Can't update the stream if it's busy? + * + * Then we need to stop the device (by temporarily clearing the request + * queue) and then try again. Resume streaming once we're done. + */ + if (res == -EBUSY) { + res = mStreamingProcessor->togglePauseStream(/*pause*/true); + if (res != OK) { + ALOGE("%s: Camera %d: Can't pause streaming: %s (%d)", + __FUNCTION__, mCameraId, strerror(-res), res); + } + + res = mDevice->waitUntilDrained(); + if (res != OK) { + ALOGE("%s: Camera %d: Waiting to stop streaming failed: %s (%d)", + __FUNCTION__, mCameraId, strerror(-res), res); + } + + res = processor->updateStream(params); + if (res != OK) { + ALOGE("%s: Camera %d: Failed to update processing stream " + " despite having halted streaming first: %s (%d)", + __FUNCTION__, mCameraId, strerror(-res), res); + } + + res = mStreamingProcessor->togglePauseStream(/*pause*/false); + if (res != OK) { + ALOGE("%s: Camera %d: Can't unpause streaming: %s (%d)", + __FUNCTION__, mCameraId, strerror(-res), res); + } + } + + return res; +} } // namespace android diff --git a/services/camera/libcameraservice/Camera2Client.h b/services/camera/libcameraservice/Camera2Client.h index af72ab2..f42d022 100644 --- a/services/camera/libcameraservice/Camera2Client.h +++ b/services/camera/libcameraservice/Camera2Client.h @@ -159,6 +159,9 @@ private: // Used with stream IDs static const int NO_STREAM = -1; + template + status_t updateProcessorStream(sp processor, Parameters params); + sp mFrameProcessor; /* Preview/Recording related members */ diff --git a/services/camera/libcameraservice/Camera3Device.cpp b/services/camera/libcameraservice/Camera3Device.cpp index 5e5bfc2..6f4f4c8 100644 --- a/services/camera/libcameraservice/Camera3Device.cpp +++ b/services/camera/libcameraservice/Camera3Device.cpp @@ -680,8 +680,8 @@ status_t Camera3Device::deleteStream(int id) { // CameraDevice semantics require device to already be idle before // deleteStream is called, unlike for createStream. if (mStatus != STATUS_IDLE) { - CLOGE("Device not idle"); - return INVALID_OPERATION; + ALOGV("%s: Camera %d: Device not idle", __FUNCTION__, mId); + return -EBUSY; } sp deletedStream; diff --git a/services/camera/libcameraservice/camera2/JpegProcessor.cpp b/services/camera/libcameraservice/camera2/JpegProcessor.cpp index 01d7f9c..f0a13ca 100644 --- a/services/camera/libcameraservice/camera2/JpegProcessor.cpp +++ b/services/camera/libcameraservice/camera2/JpegProcessor.cpp @@ -113,7 +113,11 @@ status_t JpegProcessor::updateStream(const Parameters ¶ms) { ALOGV("%s: Camera %d: Deleting stream %d since the buffer dimensions changed", __FUNCTION__, mId, mCaptureStreamId); res = device->deleteStream(mCaptureStreamId); - if (res != OK) { + if (res == -EBUSY) { + ALOGV("%s: Camera %d: Device is busy, call updateStream again " + " after it becomes idle", __FUNCTION__, mId); + return res; + } else if (res != OK) { ALOGE("%s: Camera %d: Unable to delete old output stream " "for capture: %s (%d)", __FUNCTION__, mId, strerror(-res), res); diff --git a/services/camera/libcameraservice/camera2/StreamingProcessor.cpp b/services/camera/libcameraservice/camera2/StreamingProcessor.cpp index c36cf87..ab83714 100644 --- a/services/camera/libcameraservice/camera2/StreamingProcessor.cpp +++ b/services/camera/libcameraservice/camera2/StreamingProcessor.cpp @@ -36,6 +36,7 @@ StreamingProcessor::StreamingProcessor(sp client): mDevice(client->getCameraDevice()), mId(client->getCameraId()), mActiveRequest(NONE), + mPaused(false), mPreviewRequestId(Camera2Client::kPreviewRequestIdStart), mPreviewStreamId(NO_STREAM), mRecordingRequestId(Camera2Client::kRecordingRequestIdStart), @@ -419,10 +420,59 @@ status_t StreamingProcessor::startStream(StreamType type, return res; } mActiveRequest = type; + mPaused = false; return OK; } +status_t StreamingProcessor::togglePauseStream(bool pause) { + ATRACE_CALL(); + status_t res; + + sp device = mDevice.promote(); + if (device == 0) { + ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId); + return INVALID_OPERATION; + } + + ALOGV("%s: Camera %d: toggling pause to %d", __FUNCTION__, mId, pause); + + Mutex::Autolock m(mMutex); + + if (mActiveRequest == NONE) { + ALOGE("%s: Camera %d: Can't toggle pause, streaming was not started", + __FUNCTION__, mId); + return INVALID_OPERATION; + } + + if (mPaused == pause) { + return OK; + } + + if (pause) { + res = device->clearStreamingRequest(); + if (res != OK) { + ALOGE("%s: Camera %d: Can't clear stream request: %s (%d)", + __FUNCTION__, mId, strerror(-res), res); + return res; + } + } else { + CameraMetadata &request = + (mActiveRequest == PREVIEW) ? mPreviewRequest + : mRecordingRequest; + res = device->setStreamingRequest(request); + if (res != OK) { + ALOGE("%s: Camera %d: Unable to set preview request to resume: " + "%s (%d)", + __FUNCTION__, mId, strerror(-res), res); + return res; + } + } + + mPaused = pause; + return OK; +} + status_t StreamingProcessor::stopStream() { ATRACE_CALL(); status_t res; @@ -443,6 +493,7 @@ status_t StreamingProcessor::stopStream() { } mActiveRequest = NONE; + mPaused = false; return OK; } @@ -662,20 +713,29 @@ status_t StreamingProcessor::dump(int fd, const Vector& /*args*/) { result.append(" Preview request:\n"); write(fd, result.string(), result.size()); mPreviewRequest.dump(fd, 2, 6); + result.clear(); } else { result.append(" Preview request: undefined\n"); - write(fd, result.string(), result.size()); } if (mRecordingRequest.entryCount() != 0) { result = " Recording request:\n"; write(fd, result.string(), result.size()); mRecordingRequest.dump(fd, 2, 6); + result.clear(); } else { result = " Recording request: undefined\n"; - write(fd, result.string(), result.size()); } + const char* streamTypeString[] = { + "none", "preview", "record" + }; + result.append(String8::format(" Active request: %s (paused: %s)\n", + streamTypeString[mActiveRequest], + mPaused ? "yes" : "no")); + + write(fd, result.string(), result.size()); + return OK; } diff --git a/services/camera/libcameraservice/camera2/StreamingProcessor.h b/services/camera/libcameraservice/camera2/StreamingProcessor.h index 643114e..281b344 100644 --- a/services/camera/libcameraservice/camera2/StreamingProcessor.h +++ b/services/camera/libcameraservice/camera2/StreamingProcessor.h @@ -65,6 +65,9 @@ class StreamingProcessor: public BufferItemConsumer::FrameAvailableListener { status_t startStream(StreamType type, const Vector &outputStreams); + // Toggle between paused and unpaused. Stream must be started first. + status_t togglePauseStream(bool pause); + status_t stopStream(); // Returns the request ID for the currently streaming request @@ -91,6 +94,7 @@ class StreamingProcessor: public BufferItemConsumer::FrameAvailableListener { int mId; StreamType mActiveRequest; + bool mPaused; // Preview-related members int32_t mPreviewRequestId; diff --git a/services/camera/libcameraservice/camera2/ZslProcessor3.cpp b/services/camera/libcameraservice/camera2/ZslProcessor3.cpp index be1ffeb..defcafc 100644 --- a/services/camera/libcameraservice/camera2/ZslProcessor3.cpp +++ b/services/camera/libcameraservice/camera2/ZslProcessor3.cpp @@ -112,7 +112,11 @@ status_t ZslProcessor3::updateStream(const Parameters ¶ms) { "dimensions changed", __FUNCTION__, client->getCameraId(), mZslStreamId); res = device->deleteStream(mZslStreamId); - if (res != OK) { + if (res == -EBUSY) { + ALOGV("%s: Camera %d: Device is busy, call updateStream again " + " after it becomes idle", __FUNCTION__, mId); + return res; + } else if(res != OK) { ALOGE("%s: Camera %d: Unable to delete old output stream " "for ZSL: %s (%d)", __FUNCTION__, client->getCameraId(), strerror(-res), res); -- cgit v1.1 From 083a08ac82704fdcc37334a4253ff075f703cc87 Mon Sep 17 00:00:00 2001 From: Igor Murashkin Date: Tue, 14 May 2013 16:17:12 -0700 Subject: Camera: don't spam RingBufferConsumer logs in eng builds Bug: 8969579 Change-Id: Ia51d4072725754fd3b6ca028232a605885376287 --- services/camera/libcameraservice/gui/RingBufferConsumer.cpp | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/services/camera/libcameraservice/gui/RingBufferConsumer.cpp b/services/camera/libcameraservice/gui/RingBufferConsumer.cpp index 1b2a717..c7790fc 100644 --- a/services/camera/libcameraservice/gui/RingBufferConsumer.cpp +++ b/services/camera/libcameraservice/gui/RingBufferConsumer.cpp @@ -14,9 +14,6 @@ * limitations under the License. */ -#undef NDEBUG -#include - //#define LOG_NDEBUG 0 #define LOG_TAG "RingBufferConsumer" #define ATRACE_TAG ATRACE_TAG_GRAPHICS @@ -30,6 +27,9 @@ #define BI_LOGW(x, ...) ALOGW("[%s] "x, mName.string(), ##__VA_ARGS__) #define BI_LOGE(x, ...) ALOGE("[%s] "x, mName.string(), ##__VA_ARGS__) +#undef assert +#define assert(x) ALOG_ASSERT((x), #x) + typedef android::RingBufferConsumer::PinnedBufferItem PinnedBufferItem; namespace android { -- cgit v1.1 From 9c910c2d780ce1afa221f963da4d4a307443d9be Mon Sep 17 00:00:00 2001 From: Eino-Ville Talvala Date: Tue, 14 May 2013 16:05:27 -0700 Subject: Camera3: Fix fast flexible YUV->NV21 path. A U by any other name may smell just as sweet, but the color's pretty awful if you call it a V. Henceforth they never will be UV. Bug: 8949720 Change-Id: I4897e93d5a293e576bb1d7ba44d91792b869ad42 --- .../libcameraservice/camera2/CallbackProcessor.cpp | 62 ++++++++++++---------- 1 file changed, 33 insertions(+), 29 deletions(-) diff --git a/services/camera/libcameraservice/camera2/CallbackProcessor.cpp b/services/camera/libcameraservice/camera2/CallbackProcessor.cpp index 5e88102..5fa84e0 100644 --- a/services/camera/libcameraservice/camera2/CallbackProcessor.cpp +++ b/services/camera/libcameraservice/camera2/CallbackProcessor.cpp @@ -402,8 +402,8 @@ status_t CallbackProcessor::convertFromFlexibleYuv(int32_t previewFormat, } // Copy/swizzle chroma planes, 4:2:0 subsampling - const uint8_t *uSrc = src.dataCb; - const uint8_t *vSrc = src.dataCr; + const uint8_t *cbSrc = src.dataCb; + const uint8_t *crSrc = src.dataCr; size_t chromaHeight = src.height / 2; size_t chromaWidth = src.width / 2; ssize_t chromaGap = src.chromaStride - @@ -412,59 +412,63 @@ status_t CallbackProcessor::convertFromFlexibleYuv(int32_t previewFormat, if (previewFormat == HAL_PIXEL_FORMAT_YCrCb_420_SP) { // Flexible YUV chroma to NV21 chroma - uint8_t *vuDst = yDst; + uint8_t *crcbDst = yDst; // Check for shortcuts - if (uSrc == vSrc + 1 && src.chromaStep == 2) { + if (cbSrc == crSrc + 1 && src.chromaStep == 2) { + ALOGV("%s: Fast NV21->NV21", __FUNCTION__); // Source has semiplanar CrCb chroma layout, can copy by rows for (size_t row = 0; row < chromaHeight; row++) { - memcpy(vuDst, uSrc, src.width); - vuDst += src.width; - uSrc += src.chromaStride; + memcpy(crcbDst, crSrc, src.width); + crcbDst += src.width; + crSrc += src.chromaStride; } } else { + ALOGV("%s: Generic->NV21", __FUNCTION__); // Generic copy, always works but not very efficient for (size_t row = 0; row < chromaHeight; row++) { for (size_t col = 0; col < chromaWidth; col++) { - *(vuDst++) = *vSrc; - *(vuDst++) = *uSrc; - vSrc += src.chromaStep; - uSrc += src.chromaStep; + *(crcbDst++) = *crSrc; + *(crcbDst++) = *cbSrc; + crSrc += src.chromaStep; + cbSrc += src.chromaStep; } - vSrc += chromaGap; - uSrc += chromaGap; + crSrc += chromaGap; + cbSrc += chromaGap; } } } else { // flexible YUV chroma to YV12 chroma ALOG_ASSERT(previewFormat == HAL_PIXEL_FORMAT_YV12, "Unexpected preview format 0x%x", previewFormat); - uint8_t *vDst = yDst; - uint8_t *uDst = yDst + chromaHeight * dstCStride; + uint8_t *crDst = yDst; + uint8_t *cbDst = yDst + chromaHeight * dstCStride; if (src.chromaStep == 1) { + ALOGV("%s: Fast YV12->YV12", __FUNCTION__); // Source has planar chroma layout, can copy by row for (size_t row = 0; row < chromaHeight; row++) { - memcpy(vDst, vSrc, chromaWidth); - vDst += dstCStride; - vSrc += src.chromaStride; + memcpy(crDst, crSrc, chromaWidth); + crDst += dstCStride; + crSrc += src.chromaStride; } for (size_t row = 0; row < chromaHeight; row++) { - memcpy(uDst, uSrc, chromaWidth); - uDst += dstCStride; - uSrc += src.chromaStride; + memcpy(cbDst, cbSrc, chromaWidth); + cbDst += dstCStride; + cbSrc += src.chromaStride; } } else { + ALOGV("%s: Generic->YV12", __FUNCTION__); // Generic copy, always works but not very efficient for (size_t row = 0; row < chromaHeight; row++) { for (size_t col = 0; col < chromaWidth; col++) { - *(vDst++) = *vSrc; - *(uDst++) = *uSrc; - vSrc += src.chromaStep; - uSrc += src.chromaStep; + *(crDst++) = *crSrc; + *(cbDst++) = *cbSrc; + crSrc += src.chromaStep; + cbSrc += src.chromaStep; } - vSrc += chromaGap; - uSrc += chromaGap; - vDst += dstChromaGap; - uDst += dstChromaGap; + crSrc += chromaGap; + cbSrc += chromaGap; + crDst += dstChromaGap; + cbDst += dstChromaGap; } } } -- cgit v1.1 From 308bcaa44e578279e61be32b572fdb0b11b1e4c7 Mon Sep 17 00:00:00 2001 From: Chong Zhang Date: Fri, 3 May 2013 21:54:17 -0700 Subject: wifi-display: add support for metadata mode on encoder output pass buffer_handle_t from encoder output to HDCP encryptor input Bug: 8968123 Change-Id: Iea8007ce568641e213fd2e3cf6947a6f7a95746c --- include/media/IHDCP.h | 15 +++ include/media/stagefright/ACodec.h | 2 +- media/libmedia/IHDCP.cpp | 54 +++++++++ media/libmediaplayerservice/HDCP.cpp | 18 +++ media/libmediaplayerservice/HDCP.h | 5 + media/libstagefright/ACodec.cpp | 54 +++++++-- media/libstagefright/wifi-display/MediaSender.cpp | 37 ++++++- media/libstagefright/wifi-display/VideoFormats.cpp | 121 ++++++++++++++++++++- media/libstagefright/wifi-display/VideoFormats.h | 20 +++- .../wifi-display/source/Converter.cpp | 51 ++++++++- .../libstagefright/wifi-display/source/Converter.h | 1 + .../wifi-display/source/PlaybackSession.cpp | 44 ++++++-- .../wifi-display/source/PlaybackSession.h | 15 ++- .../wifi-display/source/TSPacketizer.cpp | 24 +++- .../wifi-display/source/WifiDisplaySource.cpp | 19 +++- .../wifi-display/source/WifiDisplaySource.h | 2 + 16 files changed, 432 insertions(+), 50 deletions(-) diff --git a/include/media/IHDCP.h b/include/media/IHDCP.h index 6d27b18..54fefa3 100644 --- a/include/media/IHDCP.h +++ b/include/media/IHDCP.h @@ -17,6 +17,7 @@ #include #include #include +#include namespace android { @@ -59,6 +60,20 @@ struct IHDCP : public IInterface { const void *inData, size_t size, uint32_t streamCTR, uint64_t *outInputCTR, void *outData) = 0; + // Encrypt data according to the HDCP spec. "size" bytes of data starting + // at location "offset" are available in "buffer" (buffer handle). "size" + // may not be a multiple of 128 bits (16 bytes). An equal number of + // encrypted bytes should be written to the buffer at "outData" (virtual + // address). This operation is to be synchronous, i.e. this call does not + // return until outData contains size bytes of encrypted data. + // streamCTR will be assigned by the caller (to 0 for the first PES stream, + // 1 for the second and so on) + // inputCTR _will_be_maintained_by_the_callee_ for each PES stream. + virtual status_t encryptNative( + const sp &graphicBuffer, + size_t offset, size_t size, uint32_t streamCTR, + uint64_t *outInputCTR, void *outData) = 0; + // DECRYPTION only: // Decrypt data according to the HDCP spec. // "size" bytes of encrypted data are available at "inData" diff --git a/include/media/stagefright/ACodec.h b/include/media/stagefright/ACodec.h index 34bae29..391c089 100644 --- a/include/media/stagefright/ACodec.h +++ b/include/media/stagefright/ACodec.h @@ -182,7 +182,7 @@ private: bool mSentFormat; bool mIsEncoder; - + bool mUseMetadataOnEncoderOutput; bool mShutdownInProgress; // If "mKeepComponentAllocated" we only transition back to Loaded state diff --git a/media/libmedia/IHDCP.cpp b/media/libmedia/IHDCP.cpp index f13addc..a46ff91 100644 --- a/media/libmedia/IHDCP.cpp +++ b/media/libmedia/IHDCP.cpp @@ -31,6 +31,7 @@ enum { HDCP_INIT_ASYNC, HDCP_SHUTDOWN_ASYNC, HDCP_ENCRYPT, + HDCP_ENCRYPT_NATIVE, HDCP_DECRYPT, }; @@ -108,6 +109,31 @@ struct BpHDCP : public BpInterface { return err; } + virtual status_t encryptNative( + const sp &graphicBuffer, + size_t offset, size_t size, uint32_t streamCTR, + uint64_t *outInputCTR, void *outData) { + Parcel data, reply; + data.writeInterfaceToken(IHDCP::getInterfaceDescriptor()); + data.write(*graphicBuffer); + data.writeInt32(offset); + data.writeInt32(size); + data.writeInt32(streamCTR); + remote()->transact(HDCP_ENCRYPT_NATIVE, data, &reply); + + status_t err = reply.readInt32(); + + if (err != OK) { + *outInputCTR = 0; + return err; + } + + *outInputCTR = reply.readInt64(); + reply.read(outData, size); + + return err; + } + virtual status_t decrypt( const void *inData, size_t size, uint32_t streamCTR, uint64_t inputCTR, @@ -222,6 +248,34 @@ status_t BnHDCP::onTransact( return OK; } + case HDCP_ENCRYPT_NATIVE: + { + CHECK_INTERFACE(IHDCP, data, reply); + + sp graphicBuffer = new GraphicBuffer(); + data.read(*graphicBuffer); + size_t offset = data.readInt32(); + size_t size = data.readInt32(); + uint32_t streamCTR = data.readInt32(); + void *outData = malloc(size); + uint64_t inputCTR; + + status_t err = encryptNative(graphicBuffer, offset, size, + streamCTR, &inputCTR, outData); + + reply->writeInt32(err); + + if (err == OK) { + reply->writeInt64(inputCTR); + reply->write(outData, size); + } + + free(outData); + outData = NULL; + + return OK; + } + case HDCP_DECRYPT: { size_t size = data.readInt32(); diff --git a/media/libmediaplayerservice/HDCP.cpp b/media/libmediaplayerservice/HDCP.cpp index 469a02e..8a3188c 100644 --- a/media/libmediaplayerservice/HDCP.cpp +++ b/media/libmediaplayerservice/HDCP.cpp @@ -116,6 +116,24 @@ status_t HDCP::encrypt( return mHDCPModule->encrypt(inData, size, streamCTR, outInputCTR, outData); } +status_t HDCP::encryptNative( + const sp &graphicBuffer, + size_t offset, size_t size, uint32_t streamCTR, + uint64_t *outInputCTR, void *outData) { + Mutex::Autolock autoLock(mLock); + + CHECK(mIsEncryptionModule); + + if (mHDCPModule == NULL) { + *outInputCTR = 0; + + return NO_INIT; + } + + return mHDCPModule->encryptNative(graphicBuffer->handle, + offset, size, streamCTR, outInputCTR, outData); +} + status_t HDCP::decrypt( const void *inData, size_t size, uint32_t streamCTR, uint64_t outInputCTR, void *outData) { diff --git a/media/libmediaplayerservice/HDCP.h b/media/libmediaplayerservice/HDCP.h index 42e6467..c60c2e0 100644 --- a/media/libmediaplayerservice/HDCP.h +++ b/media/libmediaplayerservice/HDCP.h @@ -35,6 +35,11 @@ struct HDCP : public BnHDCP { const void *inData, size_t size, uint32_t streamCTR, uint64_t *outInputCTR, void *outData); + virtual status_t encryptNative( + const sp &graphicBuffer, + size_t offset, size_t size, uint32_t streamCTR, + uint64_t *outInputCTR, void *outData); + virtual status_t decrypt( const void *inData, size_t size, uint32_t streamCTR, uint64_t outInputCTR, void *outData); diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp index 64e3885..d3ac734 100644 --- a/media/libstagefright/ACodec.cpp +++ b/media/libstagefright/ACodec.cpp @@ -359,6 +359,7 @@ ACodec::ACodec() mNode(NULL), mSentFormat(false), mIsEncoder(false), + mUseMetadataOnEncoderOutput(false), mShutdownInProgress(false), mEncoderDelay(0), mEncoderPadding(0), @@ -483,7 +484,8 @@ status_t ACodec::allocateBuffersOnPort(OMX_U32 portIndex) { ? OMXCodec::kRequiresAllocateBufferOnInputPorts : OMXCodec::kRequiresAllocateBufferOnOutputPorts; - if (portIndex == kPortIndexInput && (mFlags & kFlagIsSecure)) { + if ((portIndex == kPortIndexInput && (mFlags & kFlagIsSecure)) + || mUseMetadataOnEncoderOutput) { mem.clear(); void *ptr; @@ -491,7 +493,10 @@ status_t ACodec::allocateBuffersOnPort(OMX_U32 portIndex) { mNode, portIndex, def.nBufferSize, &info.mBufferID, &ptr); - info.mData = new ABuffer(ptr, def.nBufferSize); + int32_t bufSize = mUseMetadataOnEncoderOutput ? + (4 + sizeof(buffer_handle_t)) : def.nBufferSize; + + info.mData = new ABuffer(ptr, bufSize); } else if (mQuirks & requiresAllocateBufferBit) { err = mOMX->allocateBufferWithBackup( mNode, portIndex, mem, &info.mBufferID); @@ -912,14 +917,14 @@ status_t ACodec::configureCodec( err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexInput, OMX_TRUE); if (err != OK) { - ALOGE("[%s] storeMetaDataInBuffers failed w/ err %d", - mComponentName.c_str(), err); + ALOGE("[%s] storeMetaDataInBuffers (input) failed w/ err %d", + mComponentName.c_str(), err); - return err; - } - } + return err; + } + } - int32_t prependSPSPPS; + int32_t prependSPSPPS = 0; if (encoder && msg->findInt32("prepend-sps-pps-to-idr-frames", &prependSPSPPS) && prependSPSPPS != 0) { @@ -946,7 +951,27 @@ status_t ACodec::configureCodec( } } - if (!strncasecmp(mime, "video/", 6)) { + // Only enable metadata mode on encoder output if encoder can prepend + // sps/pps to idr frames, since in metadata mode the bitstream is in an + // opaque handle, to which we don't have access. + int32_t video = !strncasecmp(mime, "video/", 6); + if (encoder && video) { + OMX_BOOL enable = (OMX_BOOL) (prependSPSPPS + && msg->findInt32("store-metadata-in-buffers-output", &storeMeta) + && storeMeta != 0); + + err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexOutput, enable); + + if (err != OK) { + ALOGE("[%s] storeMetaDataInBuffers (output) failed w/ err %d", + mComponentName.c_str(), err); + mUseMetadataOnEncoderOutput = 0; + } else { + mUseMetadataOnEncoderOutput = enable; + } + } + + if (video) { if (encoder) { err = setupVideoEncoder(mime, msg); } else { @@ -3061,7 +3086,15 @@ bool ACodec::BaseState::onOMXFillBufferDone( mCodec->sendFormatChange(); } - info->mData->setRange(rangeOffset, rangeLength); + if (mCodec->mUseMetadataOnEncoderOutput) { + native_handle_t* handle = + *(native_handle_t**)(info->mData->data() + 4); + info->mData->meta()->setPointer("handle", handle); + info->mData->meta()->setInt32("rangeOffset", rangeOffset); + info->mData->meta()->setInt32("rangeLength", rangeLength); + } else { + info->mData->setRange(rangeOffset, rangeLength); + } #if 0 if (mCodec->mNativeWindow == NULL) { if (IsIDR(info->mData)) { @@ -3215,6 +3248,7 @@ void ACodec::UninitializedState::stateEntered() { mCodec->mOMX.clear(); mCodec->mQuirks = 0; mCodec->mFlags = 0; + mCodec->mUseMetadataOnEncoderOutput = 0; mCodec->mComponentName.clear(); } diff --git a/media/libstagefright/wifi-display/MediaSender.cpp b/media/libstagefright/wifi-display/MediaSender.cpp index 33af66d..a230cd8 100644 --- a/media/libstagefright/wifi-display/MediaSender.cpp +++ b/media/libstagefright/wifi-display/MediaSender.cpp @@ -27,9 +27,11 @@ #include "include/avc_utils.h" #include +#include #include #include #include +#include namespace android { @@ -408,11 +410,36 @@ status_t MediaSender::packetizeAccessUnit( info.mPacketizerTrackIndex, accessUnit); } - status_t err = mHDCP->encrypt( - accessUnit->data(), accessUnit->size(), - trackIndex /* streamCTR */, - &inputCTR, - accessUnit->data()); + status_t err; + native_handle_t* handle; + if (accessUnit->meta()->findPointer("handle", (void**)&handle) + && handle != NULL) { + int32_t rangeLength, rangeOffset; + sp notify; + CHECK(accessUnit->meta()->findInt32("rangeOffset", &rangeOffset)); + CHECK(accessUnit->meta()->findInt32("rangeLength", &rangeLength)); + CHECK(accessUnit->meta()->findMessage("notify", ¬ify) + && notify != NULL); + CHECK_GE(accessUnit->size(), rangeLength); + + sp grbuf(new GraphicBuffer( + rangeOffset + rangeLength, 1, HAL_PIXEL_FORMAT_Y8, + GRALLOC_USAGE_HW_VIDEO_ENCODER, rangeOffset + rangeLength, + handle, false)); + + err = mHDCP->encryptNative( + grbuf, rangeOffset, rangeLength, + trackIndex /* streamCTR */, + &inputCTR, + accessUnit->data()); + notify->post(); + } else { + err = mHDCP->encrypt( + accessUnit->data(), accessUnit->size(), + trackIndex /* streamCTR */, + &inputCTR, + accessUnit->data()); + } if (err != OK) { ALOGE("Failed to HDCP-encrypt media data (err %d)", diff --git a/media/libstagefright/wifi-display/VideoFormats.cpp b/media/libstagefright/wifi-display/VideoFormats.cpp index 458b163..c368c38 100644 --- a/media/libstagefright/wifi-display/VideoFormats.cpp +++ b/media/libstagefright/wifi-display/VideoFormats.cpp @@ -24,7 +24,8 @@ namespace android { -VideoFormats::config_t VideoFormats::mConfigs[][32] = { +// static +const VideoFormats::config_t VideoFormats::mResolutionTable[][32] = { { // CEA Resolutions { 640, 480, 60, false, 0, 0}, @@ -133,6 +134,8 @@ VideoFormats::config_t VideoFormats::mConfigs[][32] = { }; VideoFormats::VideoFormats() { + memcpy(mConfigs, mResolutionTable, sizeof(mConfigs)); + for (size_t i = 0; i < kNumResolutionTypes; ++i) { mResolutionEnabled[i] = 0; } @@ -182,11 +185,56 @@ void VideoFormats::setResolutionEnabled( if (enabled) { mResolutionEnabled[type] |= (1ul << index); + mConfigs[type][index].profile = (1ul << PROFILE_CBP); + mConfigs[type][index].level = (1ul << LEVEL_31); } else { mResolutionEnabled[type] &= ~(1ul << index); + mConfigs[type][index].profile = 0; + mConfigs[type][index].level = 0; } } +void VideoFormats::setProfileLevel( + ResolutionType type, size_t index, + ProfileType profile, LevelType level) { + CHECK_LT(type, kNumResolutionTypes); + CHECK(GetConfiguration(type, index, NULL, NULL, NULL, NULL)); + + mConfigs[type][index].profile = (1ul << profile); + mConfigs[type][index].level = (1ul << level); +} + +void VideoFormats::getProfileLevel( + ResolutionType type, size_t index, + ProfileType *profile, LevelType *level) const{ + CHECK_LT(type, kNumResolutionTypes); + CHECK(GetConfiguration(type, index, NULL, NULL, NULL, NULL)); + + int i, bestProfile = -1, bestLevel = -1; + + for (i = 0; i < kNumProfileTypes; ++i) { + if (mConfigs[type][index].profile & (1ul << i)) { + bestProfile = i; + } + } + + for (i = 0; i < kNumLevelTypes; ++i) { + if (mConfigs[type][index].level & (1ul << i)) { + bestLevel = i; + } + } + + if (bestProfile == -1 || bestLevel == -1) { + ALOGE("Profile or level not set for resolution type %d, index %d", + type, index); + bestProfile = PROFILE_CBP; + bestLevel = LEVEL_31; + } + + *profile = (ProfileType) bestProfile; + *level = (LevelType) bestLevel; +} + bool VideoFormats::isResolutionEnabled( ResolutionType type, size_t index) const { CHECK_LT(type, kNumResolutionTypes); @@ -207,7 +255,7 @@ bool VideoFormats::GetConfiguration( return false; } - const config_t *config = &mConfigs[type][index]; + const config_t *config = &mResolutionTable[type][index]; if (config->width == 0) { return false; @@ -251,9 +299,12 @@ bool VideoFormats::parseH264Codec(const char *spec) { if (res[i] & (1ul << j)){ mResolutionEnabled[i] |= (1ul << j); if (profile > mConfigs[i][j].profile) { + // prefer higher profile (even if level is lower) mConfigs[i][j].profile = profile; - if (level > mConfigs[i][j].level) - mConfigs[i][j].level = level; + mConfigs[i][j].level = level; + } else if (profile == mConfigs[i][j].profile && + level > mConfigs[i][j].level) { + mConfigs[i][j].level = level; } } } @@ -262,9 +313,51 @@ bool VideoFormats::parseH264Codec(const char *spec) { return true; } +// static +bool VideoFormats::GetProfileLevel( + ProfileType profile, LevelType level, unsigned *profileIdc, + unsigned *levelIdc, unsigned *constraintSet) { + CHECK_LT(profile, kNumProfileTypes); + CHECK_LT(level, kNumLevelTypes); + + static const unsigned kProfileIDC[kNumProfileTypes] = { + 66, // PROFILE_CBP + 100, // PROFILE_CHP + }; + + static const unsigned kLevelIDC[kNumLevelTypes] = { + 31, // LEVEL_31 + 32, // LEVEL_32 + 40, // LEVEL_40 + 41, // LEVEL_41 + 42, // LEVEL_42 + }; + + static const unsigned kConstraintSet[kNumProfileTypes] = { + 0xc0, // PROFILE_CBP + 0x0c, // PROFILE_CHP + }; + + if (profileIdc) { + *profileIdc = kProfileIDC[profile]; + } + + if (levelIdc) { + *levelIdc = kLevelIDC[level]; + } + + if (constraintSet) { + *constraintSet = kConstraintSet[profile]; + } + + return true; +} + bool VideoFormats::parseFormatSpec(const char *spec) { CHECK_EQ(kNumResolutionTypes, 3); + disableAll(); + unsigned native, dummy; unsigned res[3]; size_t size = strlen(spec); @@ -320,8 +413,10 @@ AString VideoFormats::getFormatSpec(bool forM4Message) const { // max-vres (none or 2 byte) return StringPrintf( - "%02x 00 02 02 %08x %08x %08x 00 0000 0000 00 none none", + "%02x 00 %02x %02x %08x %08x %08x 00 0000 0000 00 none none", forM4Message ? 0x00 : ((mNativeIndex << 3) | mNativeType), + mConfigs[mNativeType][mNativeIndex].profile, + mConfigs[mNativeType][mNativeIndex].level, mResolutionEnabled[0], mResolutionEnabled[1], mResolutionEnabled[2]); @@ -332,7 +427,9 @@ bool VideoFormats::PickBestFormat( const VideoFormats &sinkSupported, const VideoFormats &sourceSupported, ResolutionType *chosenType, - size_t *chosenIndex) { + size_t *chosenIndex, + ProfileType *chosenProfile, + LevelType *chosenLevel) { #if 0 // Support for the native format is a great idea, the spec includes // these features, but nobody supports it and the tests don't validate it. @@ -412,6 +509,18 @@ bool VideoFormats::PickBestFormat( *chosenType = (ResolutionType)bestType; *chosenIndex = bestIndex; + // Pick the best profile/level supported by both sink and source. + ProfileType srcProfile, sinkProfile; + LevelType srcLevel, sinkLevel; + sourceSupported.getProfileLevel( + (ResolutionType)bestType, bestIndex, + &srcProfile, &srcLevel); + sinkSupported.getProfileLevel( + (ResolutionType)bestType, bestIndex, + &sinkProfile, &sinkLevel); + *chosenProfile = srcProfile < sinkProfile ? srcProfile : sinkProfile; + *chosenLevel = srcLevel < sinkLevel ? srcLevel : sinkLevel; + return true; } diff --git a/media/libstagefright/wifi-display/VideoFormats.h b/media/libstagefright/wifi-display/VideoFormats.h index 01de246..b918652 100644 --- a/media/libstagefright/wifi-display/VideoFormats.h +++ b/media/libstagefright/wifi-display/VideoFormats.h @@ -75,11 +75,24 @@ struct VideoFormats { bool isResolutionEnabled(ResolutionType type, size_t index) const; + void setProfileLevel( + ResolutionType type, size_t index, + ProfileType profile, LevelType level); + + void getProfileLevel( + ResolutionType type, size_t index, + ProfileType *profile, LevelType *level) const; + static bool GetConfiguration( ResolutionType type, size_t index, size_t *width, size_t *height, size_t *framesPerSecond, bool *interlaced); + static bool GetProfileLevel( + ProfileType profile, LevelType level, + unsigned *profileIdc, unsigned *levelIdc, + unsigned *constraintSet); + bool parseFormatSpec(const char *spec); AString getFormatSpec(bool forM4Message = false) const; @@ -87,7 +100,9 @@ struct VideoFormats { const VideoFormats &sinkSupported, const VideoFormats &sourceSupported, ResolutionType *chosenType, - size_t *chosenIndex); + size_t *chosenIndex, + ProfileType *chosenProfile, + LevelType *chosenLevel); private: bool parseH264Codec(const char *spec); @@ -95,7 +110,8 @@ private: size_t mNativeIndex; uint32_t mResolutionEnabled[kNumResolutionTypes]; - static config_t mConfigs[kNumResolutionTypes][32]; + static const config_t mResolutionTable[kNumResolutionTypes][32]; + config_t mConfigs[kNumResolutionTypes][32]; DISALLOW_EVIL_CONSTRUCTORS(VideoFormats); }; diff --git a/media/libstagefright/wifi-display/source/Converter.cpp b/media/libstagefright/wifi-display/source/Converter.cpp index 5344623..e62505d 100644 --- a/media/libstagefright/wifi-display/source/Converter.cpp +++ b/media/libstagefright/wifi-display/source/Converter.cpp @@ -438,6 +438,17 @@ void Converter::onMessageReceived(const sp &msg) { break; } + case kWhatReleaseOutputBuffer: + { + if (mEncoder != NULL) { + size_t bufferIndex; + CHECK(msg->findInt32("bufferIndex", (int32_t*)&bufferIndex)); + CHECK(bufferIndex < mEncoderOutputBuffers.size()); + mEncoder->releaseOutputBuffer(bufferIndex); + } + break; + } + default: TRESPASS(); } @@ -645,6 +656,7 @@ status_t Converter::doMoreWork() { size_t size; int64_t timeUs; uint32_t flags; + native_handle_t* handle = NULL; err = mEncoder->dequeueOutputBuffer( &bufferIndex, &offset, &size, &timeUs, &flags); @@ -667,18 +679,43 @@ status_t Converter::doMoreWork() { notify->setInt32("what", kWhatEOS); notify->post(); } else { - sp buffer = new ABuffer(size); + sp buffer; + sp outbuf = mEncoderOutputBuffers.itemAt(bufferIndex); + + if (outbuf->meta()->findPointer("handle", (void**)&handle) && + handle != NULL) { + int32_t rangeLength, rangeOffset; + CHECK(outbuf->meta()->findInt32("rangeOffset", &rangeOffset)); + CHECK(outbuf->meta()->findInt32("rangeLength", &rangeLength)); + outbuf->meta()->setPointer("handle", NULL); + + // MediaSender will post the following message when HDCP + // is done, to release the output buffer back to encoder. + sp notify(new AMessage( + kWhatReleaseOutputBuffer, id())); + notify->setInt32("bufferIndex", bufferIndex); + + buffer = new ABuffer( + rangeLength > (int32_t)size ? rangeLength : size); + buffer->meta()->setPointer("handle", handle); + buffer->meta()->setInt32("rangeOffset", rangeOffset); + buffer->meta()->setInt32("rangeLength", rangeLength); + buffer->meta()->setMessage("notify", notify); + } else { + buffer = new ABuffer(size); + } + buffer->meta()->setInt64("timeUs", timeUs); ALOGV("[%s] time %lld us (%.2f secs)", mIsVideo ? "video" : "audio", timeUs, timeUs / 1E6); - memcpy(buffer->data(), - mEncoderOutputBuffers.itemAt(bufferIndex)->base() + offset, - size); + memcpy(buffer->data(), outbuf->base() + offset, size); if (flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) { - mOutputFormat->setBuffer("csd-0", buffer); + if (!handle) { + mOutputFormat->setBuffer("csd-0", buffer); + } } else { sp notify = mNotify->dup(); notify->setInt32("what", kWhatAccessUnit); @@ -687,7 +724,9 @@ status_t Converter::doMoreWork() { } } - mEncoder->releaseOutputBuffer(bufferIndex); + if (!handle) { + mEncoder->releaseOutputBuffer(bufferIndex); + } if (flags & MediaCodec::BUFFER_FLAG_EOS) { break; diff --git a/media/libstagefright/wifi-display/source/Converter.h b/media/libstagefright/wifi-display/source/Converter.h index ba297c4..fceef55 100644 --- a/media/libstagefright/wifi-display/source/Converter.h +++ b/media/libstagefright/wifi-display/source/Converter.h @@ -66,6 +66,7 @@ struct Converter : public AHandler { kWhatMediaPullerNotify, kWhatEncoderActivity, kWhatDropAFrame, + kWhatReleaseOutputBuffer, }; void shutdownAsync(); diff --git a/media/libstagefright/wifi-display/source/PlaybackSession.cpp b/media/libstagefright/wifi-display/source/PlaybackSession.cpp index cacfcca..7f0ba96 100644 --- a/media/libstagefright/wifi-display/source/PlaybackSession.cpp +++ b/media/libstagefright/wifi-display/source/PlaybackSession.cpp @@ -378,7 +378,9 @@ status_t WifiDisplaySource::PlaybackSession::init( bool usePCMAudio, bool enableVideo, VideoFormats::ResolutionType videoResolutionType, - size_t videoResolutionIndex) { + size_t videoResolutionIndex, + VideoFormats::ProfileType videoProfileType, + VideoFormats::LevelType videoLevelType) { sp notify = new AMessage(kWhatMediaSenderNotify, id()); mMediaSender = new MediaSender(mNetSession, notify); looper()->registerHandler(mMediaSender); @@ -390,7 +392,9 @@ status_t WifiDisplaySource::PlaybackSession::init( usePCMAudio, enableVideo, videoResolutionType, - videoResolutionIndex); + videoResolutionIndex, + videoProfileType, + videoLevelType); if (err == OK) { err = mMediaSender->initAsync( @@ -870,7 +874,9 @@ status_t WifiDisplaySource::PlaybackSession::setupPacketizer( bool usePCMAudio, bool enableVideo, VideoFormats::ResolutionType videoResolutionType, - size_t videoResolutionIndex) { + size_t videoResolutionIndex, + VideoFormats::ProfileType videoProfileType, + VideoFormats::LevelType videoLevelType) { CHECK(enableAudio || enableVideo); if (!mMediaPath.empty()) { @@ -879,7 +885,8 @@ status_t WifiDisplaySource::PlaybackSession::setupPacketizer( if (enableVideo) { status_t err = addVideoSource( - videoResolutionType, videoResolutionIndex); + videoResolutionType, videoResolutionIndex, videoProfileType, + videoLevelType); if (err != OK) { return err; @@ -895,9 +902,13 @@ status_t WifiDisplaySource::PlaybackSession::setupPacketizer( status_t WifiDisplaySource::PlaybackSession::addSource( bool isVideo, const sp &source, bool isRepeaterSource, - bool usePCMAudio, size_t *numInputBuffers) { + bool usePCMAudio, unsigned profileIdc, unsigned levelIdc, + unsigned constraintSet, size_t *numInputBuffers) { CHECK(!usePCMAudio || !isVideo); CHECK(!isRepeaterSource || isVideo); + CHECK(!profileIdc || isVideo); + CHECK(!levelIdc || isVideo); + CHECK(!constraintSet || isVideo); sp pullLooper = new ALooper; pullLooper->setName("pull_looper"); @@ -927,9 +938,12 @@ status_t WifiDisplaySource::PlaybackSession::addSource( if (isVideo) { format->setInt32("store-metadata-in-buffers", true); - + format->setInt32("store-metadata-in-buffers-output", (mHDCP != NULL)); format->setInt32( "color-format", OMX_COLOR_FormatAndroidOpaque); + format->setInt32("profile-idc", profileIdc); + format->setInt32("level-idc", levelIdc); + format->setInt32("constraint-set", constraintSet); } notify = new AMessage(kWhatConverterNotify, id()); @@ -990,7 +1004,9 @@ status_t WifiDisplaySource::PlaybackSession::addSource( status_t WifiDisplaySource::PlaybackSession::addVideoSource( VideoFormats::ResolutionType videoResolutionType, - size_t videoResolutionIndex) { + size_t videoResolutionIndex, + VideoFormats::ProfileType videoProfileType, + VideoFormats::LevelType videoLevelType) { size_t width, height, framesPerSecond; bool interlaced; CHECK(VideoFormats::GetConfiguration( @@ -1001,6 +1017,14 @@ status_t WifiDisplaySource::PlaybackSession::addVideoSource( &framesPerSecond, &interlaced)); + unsigned profileIdc, levelIdc, constraintSet; + CHECK(VideoFormats::GetProfileLevel( + videoProfileType, + videoLevelType, + &profileIdc, + &levelIdc, + &constraintSet)); + sp source = new SurfaceMediaSource(width, height); source->setUseAbsoluteTimestamps(); @@ -1011,7 +1035,8 @@ status_t WifiDisplaySource::PlaybackSession::addVideoSource( size_t numInputBuffers; status_t err = addSource( true /* isVideo */, videoSource, true /* isRepeaterSource */, - false /* usePCMAudio */, &numInputBuffers); + false /* usePCMAudio */, profileIdc, levelIdc, constraintSet, + &numInputBuffers); if (err != OK) { return err; @@ -1034,7 +1059,8 @@ status_t WifiDisplaySource::PlaybackSession::addAudioSource(bool usePCMAudio) { if (audioSource->initCheck() == OK) { return addSource( false /* isVideo */, audioSource, false /* isRepeaterSource */, - usePCMAudio, NULL /* numInputBuffers */); + usePCMAudio, 0 /* profileIdc */, 0 /* levelIdc */, + 0 /* constraintSet */, NULL /* numInputBuffers */); } ALOGW("Unable to instantiate audio source"); diff --git a/media/libstagefright/wifi-display/source/PlaybackSession.h b/media/libstagefright/wifi-display/source/PlaybackSession.h index 39086a1..5c8ee94 100644 --- a/media/libstagefright/wifi-display/source/PlaybackSession.h +++ b/media/libstagefright/wifi-display/source/PlaybackSession.h @@ -53,7 +53,9 @@ struct WifiDisplaySource::PlaybackSession : public AHandler { bool usePCMAudio, bool enableVideo, VideoFormats::ResolutionType videoResolutionType, - size_t videoResolutionIndex); + size_t videoResolutionIndex, + VideoFormats::ProfileType videoProfileType, + VideoFormats::LevelType videoLevelType); void destroyAsync(); @@ -130,18 +132,25 @@ private: bool usePCMAudio, bool enableVideo, VideoFormats::ResolutionType videoResolutionType, - size_t videoResolutionIndex); + size_t videoResolutionIndex, + VideoFormats::ProfileType videoProfileType, + VideoFormats::LevelType videoLevelType); status_t addSource( bool isVideo, const sp &source, bool isRepeaterSource, bool usePCMAudio, + unsigned profileIdc, + unsigned levelIdc, + unsigned contraintSet, size_t *numInputBuffers); status_t addVideoSource( VideoFormats::ResolutionType videoResolutionType, - size_t videoResolutionIndex); + size_t videoResolutionIndex, + VideoFormats::ProfileType videoProfileType, + VideoFormats::LevelType videoLevelType); status_t addAudioSource(bool usePCMAudio); diff --git a/media/libstagefright/wifi-display/source/TSPacketizer.cpp b/media/libstagefright/wifi-display/source/TSPacketizer.cpp index 2c4a373..c674700 100644 --- a/media/libstagefright/wifi-display/source/TSPacketizer.cpp +++ b/media/libstagefright/wifi-display/source/TSPacketizer.cpp @@ -261,12 +261,24 @@ void TSPacketizer::Track::finalize() { data[0] = 40; // descriptor_tag data[1] = 4; // descriptor_length - CHECK_GE(mCSD.size(), 1u); - const sp &sps = mCSD.itemAt(0); - CHECK(!memcmp("\x00\x00\x00\x01", sps->data(), 4)); - CHECK_GE(sps->size(), 7u); - // profile_idc, constraint_set*, level_idc - memcpy(&data[2], sps->data() + 4, 3); + if (mCSD.size() > 0) { + CHECK_GE(mCSD.size(), 1u); + const sp &sps = mCSD.itemAt(0); + CHECK(!memcmp("\x00\x00\x00\x01", sps->data(), 4)); + CHECK_GE(sps->size(), 7u); + // profile_idc, constraint_set*, level_idc + memcpy(&data[2], sps->data() + 4, 3); + } else { + int32_t profileIdc, levelIdc, constraintSet; + CHECK(mFormat->findInt32("profile-idc", &profileIdc)); + CHECK(mFormat->findInt32("level-idc", &levelIdc)); + CHECK(mFormat->findInt32("constraint-set", &constraintSet)); + CHECK_GE(profileIdc, 0u); + CHECK_GE(levelIdc, 0u); + data[2] = profileIdc; // profile_idc + data[3] = constraintSet; // constraint_set* + data[4] = levelIdc; // level_idc + } // AVC_still_present=0, AVC_24_hour_picture_flag=0, reserved data[5] = 0x3f; diff --git a/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp b/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp index b2cc66c..0b714f0 100644 --- a/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp +++ b/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp @@ -74,6 +74,11 @@ WifiDisplaySource::WifiDisplaySource( mSupportedSourceVideoFormats.setNativeResolution( VideoFormats::RESOLUTION_CEA, 5); // 1280x720 p30 + + mSupportedSourceVideoFormats.setProfileLevel( + VideoFormats::RESOLUTION_CEA, 5, + VideoFormats::PROFILE_CHP, // Constrained High Profile + VideoFormats::LEVEL_32); // Level 3.2 } WifiDisplaySource::~WifiDisplaySource() { @@ -631,6 +636,9 @@ status_t WifiDisplaySource::sendM4(int32_t sessionID) { chosenVideoFormat.disableAll(); chosenVideoFormat.setNativeResolution( mChosenVideoResolutionType, mChosenVideoResolutionIndex); + chosenVideoFormat.setProfileLevel( + mChosenVideoResolutionType, mChosenVideoResolutionIndex, + mChosenVideoProfile, mChosenVideoLevel); body.append(chosenVideoFormat.getFormatSpec(true /* forM4Message */)); body.append("\r\n"); @@ -859,7 +867,9 @@ status_t WifiDisplaySource::onReceiveM3Response( mSupportedSinkVideoFormats, mSupportedSourceVideoFormats, &mChosenVideoResolutionType, - &mChosenVideoResolutionIndex)) { + &mChosenVideoResolutionIndex, + &mChosenVideoProfile, + &mChosenVideoLevel)) { ALOGE("Sink and source share no commonly supported video " "formats."); @@ -878,6 +888,9 @@ status_t WifiDisplaySource::onReceiveM3Response( ALOGI("Picked video resolution %u x %u %c%u", width, height, interlaced ? 'i' : 'p', framesPerSecond); + + ALOGI("Picked AVC profile %d, level %d", + mChosenVideoProfile, mChosenVideoLevel); } else { ALOGI("Sink doesn't support video at all."); } @@ -1271,7 +1284,9 @@ status_t WifiDisplaySource::onSetupRequest( mUsingPCMAudio, mSinkSupportsVideo, mChosenVideoResolutionType, - mChosenVideoResolutionIndex); + mChosenVideoResolutionIndex, + mChosenVideoProfile, + mChosenVideoLevel); if (err != OK) { looper()->unregisterHandler(playbackSession->id()); diff --git a/media/libstagefright/wifi-display/source/WifiDisplaySource.h b/media/libstagefright/wifi-display/source/WifiDisplaySource.h index 3efa0b4..64186fc 100644 --- a/media/libstagefright/wifi-display/source/WifiDisplaySource.h +++ b/media/libstagefright/wifi-display/source/WifiDisplaySource.h @@ -134,6 +134,8 @@ private: VideoFormats::ResolutionType mChosenVideoResolutionType; size_t mChosenVideoResolutionIndex; + VideoFormats::ProfileType mChosenVideoProfile; + VideoFormats::LevelType mChosenVideoLevel; bool mSinkSupportsAudio; -- cgit v1.1 From 61d404efeaaf3ea1b615c160b843ac9f274d0018 Mon Sep 17 00:00:00 2001 From: Dianne Hackborn Date: Mon, 20 May 2013 11:22:20 -0700 Subject: Update to supply correct name for new power manager app ops. Change-Id: Ia686319509d98a4f467479ea494e8a54f2c1e238 --- services/audioflinger/Threads.cpp | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index 539bb4f..7e9550c 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -495,7 +495,8 @@ void AudioFlinger::ThreadBase::acquireWakeLock_l() sp binder = new BBinder(); status_t status = mPowerManager->acquireWakeLock(POWERMANAGER_PARTIAL_WAKE_LOCK, binder, - String16(mName)); + String16(mName), + String16("media")); if (status == NO_ERROR) { mWakeLockToken = binder; } -- cgit v1.1 From a02eae5e911f3bdc3f84f39c0ef223261b646128 Mon Sep 17 00:00:00 2001 From: Lajos Molnar Date: Wed, 22 May 2013 14:36:06 -0700 Subject: stagefright: SoftVP8: Handle EOS flag on frames with content. SoftVP8 decoder ignored frame content if EOS flag was set on input frame. Now, decode the frame first, unless it is empty. Change-Id: Id105a9eb86103a61390af3de60cae2507028e2d1 Signed-off-by: Lajos Molnar Bug: 9091495 --- media/libstagefright/codecs/on2/dec/SoftVPX.cpp | 28 ++++++++++++++----------- 1 file changed, 16 insertions(+), 12 deletions(-) diff --git a/media/libstagefright/codecs/on2/dec/SoftVPX.cpp b/media/libstagefright/codecs/on2/dec/SoftVPX.cpp index a400b4c..866e5b0 100644 --- a/media/libstagefright/codecs/on2/dec/SoftVPX.cpp +++ b/media/libstagefright/codecs/on2/dec/SoftVPX.cpp @@ -226,6 +226,7 @@ void SoftVPX::onQueueFilled(OMX_U32 portIndex) { List &inQueue = getPortQueue(0); List &outQueue = getPortQueue(1); + bool EOSseen = false; while (!inQueue.empty() && !outQueue.empty()) { BufferInfo *inInfo = *inQueue.begin(); @@ -235,17 +236,20 @@ void SoftVPX::onQueueFilled(OMX_U32 portIndex) { OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader; if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) { - inQueue.erase(inQueue.begin()); - inInfo->mOwnedByUs = false; - notifyEmptyBufferDone(inHeader); - - outHeader->nFilledLen = 0; - outHeader->nFlags = OMX_BUFFERFLAG_EOS; - - outQueue.erase(outQueue.begin()); - outInfo->mOwnedByUs = false; - notifyFillBufferDone(outHeader); - return; + EOSseen = true; + if (inHeader->nFilledLen == 0) { + inQueue.erase(inQueue.begin()); + inInfo->mOwnedByUs = false; + notifyEmptyBufferDone(inHeader); + + outHeader->nFilledLen = 0; + outHeader->nFlags = OMX_BUFFERFLAG_EOS; + + outQueue.erase(outQueue.begin()); + outInfo->mOwnedByUs = false; + notifyFillBufferDone(outHeader); + return; + } } if (vpx_codec_decode( @@ -282,7 +286,7 @@ void SoftVPX::onQueueFilled(OMX_U32 portIndex) { outHeader->nOffset = 0; outHeader->nFilledLen = (width * height * 3) / 2; - outHeader->nFlags = 0; + outHeader->nFlags = EOSseen ? OMX_BUFFERFLAG_EOS : 0; outHeader->nTimeStamp = inHeader->nTimeStamp; const uint8_t *srcLine = (const uint8_t *)img->planes[PLANE_Y]; -- cgit v1.1 From d030447b617105b31bf3013e5e4b39d422b53b77 Mon Sep 17 00:00:00 2001 From: Lajos Molnar Date: Wed, 15 May 2013 12:59:19 -0700 Subject: stagefright: BufferProducer updates Update BufferQueue and ConsumerBase users to new BufferQueue API, to allow BufferQueue slots to be reused. Buffer consumers generally now need to track the unique frameNumber belonging to each frame acquired if they are using BufferQueue directly. Otherwise, they can simply track the graphicBuffer. Change-Id: I30ee3158cf40fb10bbd085241646d5f1128ee480 Signed-off-by: Lajos Molnar Related-to-bug: 7093648 --- include/media/stagefright/SurfaceMediaSource.h | 8 +++- media/libstagefright/SurfaceMediaSource.cpp | 24 ++++++----- media/libstagefright/omx/GraphicBufferSource.cpp | 47 +++++++++------------- media/libstagefright/omx/GraphicBufferSource.h | 10 ++++- .../libcameraservice/gui/RingBufferConsumer.cpp | 13 ++++-- 5 files changed, 58 insertions(+), 44 deletions(-) diff --git a/include/media/stagefright/SurfaceMediaSource.h b/include/media/stagefright/SurfaceMediaSource.h index 5f21da9..7d40379 100644 --- a/include/media/stagefright/SurfaceMediaSource.h +++ b/include/media/stagefright/SurfaceMediaSource.h @@ -146,9 +146,13 @@ private: // this consumer sp mBufferQueue; - // mBufferSlot caches GraphicBuffers from the buffer queue - sp mBufferSlot[BufferQueue::NUM_BUFFER_SLOTS]; + struct SlotData { + sp mGraphicBuffer; + uint64_t mFrameNumber; + }; + // mSlots caches GraphicBuffers and frameNumbers from the buffer queue + SlotData mSlots[BufferQueue::NUM_BUFFER_SLOTS]; // The permenent width and height of SMS buffers int mWidth; diff --git a/media/libstagefright/SurfaceMediaSource.cpp b/media/libstagefright/SurfaceMediaSource.cpp index 409038a..71b6569 100644 --- a/media/libstagefright/SurfaceMediaSource.cpp +++ b/media/libstagefright/SurfaceMediaSource.cpp @@ -305,8 +305,9 @@ status_t SurfaceMediaSource::read( MediaBuffer **buffer, // First time seeing the buffer? Added it to the SMS slot if (item.mGraphicBuffer != NULL) { - mBufferSlot[item.mBuf] = item.mGraphicBuffer; + mSlots[item.mBuf].mGraphicBuffer = item.mGraphicBuffer; } + mSlots[item.mBuf].mFrameNumber = item.mFrameNumber; // check for the timing of this buffer if (mNumFramesReceived == 0 && !mUseAbsoluteTimestamps) { @@ -315,7 +316,8 @@ status_t SurfaceMediaSource::read( MediaBuffer **buffer, if (mStartTimeNs > 0) { if (item.mTimestamp < mStartTimeNs) { // This frame predates start of record, discard - mBufferQueue->releaseBuffer(item.mBuf, EGL_NO_DISPLAY, + mBufferQueue->releaseBuffer( + item.mBuf, item.mFrameNumber, EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, Fence::NO_FENCE); continue; } @@ -345,17 +347,18 @@ status_t SurfaceMediaSource::read( MediaBuffer **buffer, // First time seeing the buffer? Added it to the SMS slot if (item.mGraphicBuffer != NULL) { - mBufferSlot[mCurrentSlot] = item.mGraphicBuffer; + mSlots[item.mBuf].mGraphicBuffer = item.mGraphicBuffer; } + mSlots[item.mBuf].mFrameNumber = item.mFrameNumber; - mCurrentBuffers.push_back(mBufferSlot[mCurrentSlot]); + mCurrentBuffers.push_back(mSlots[mCurrentSlot].mGraphicBuffer); int64_t prevTimeStamp = mCurrentTimestamp; mCurrentTimestamp = item.mTimestamp; mNumFramesEncoded++; // Pass the data to the MediaBuffer. Pass in only the metadata - passMetadataBuffer(buffer, mBufferSlot[mCurrentSlot]->handle); + passMetadataBuffer(buffer, mSlots[mCurrentSlot].mGraphicBuffer->handle); (*buffer)->setObserver(this); (*buffer)->add_ref(); @@ -405,15 +408,16 @@ void SurfaceMediaSource::signalBufferReturned(MediaBuffer *buffer) { } for (int id = 0; id < BufferQueue::NUM_BUFFER_SLOTS; id++) { - if (mBufferSlot[id] == NULL) { + if (mSlots[id].mGraphicBuffer == NULL) { continue; } - if (bufferHandle == mBufferSlot[id]->handle) { + if (bufferHandle == mSlots[id].mGraphicBuffer->handle) { ALOGV("Slot %d returned, matches handle = %p", id, - mBufferSlot[id]->handle); + mSlots[id].mGraphicBuffer->handle); - mBufferQueue->releaseBuffer(id, EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, + mBufferQueue->releaseBuffer(id, mSlots[id].mFrameNumber, + EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, Fence::NO_FENCE); buffer->setObserver(0); @@ -469,7 +473,7 @@ void SurfaceMediaSource::onBuffersReleased() { mFrameAvailableCondition.signal(); for (int i = 0; i < BufferQueue::NUM_BUFFER_SLOTS; i++) { - mBufferSlot[i] = 0; + mSlots[i].mGraphicBuffer = 0; } } diff --git a/media/libstagefright/omx/GraphicBufferSource.cpp b/media/libstagefright/omx/GraphicBufferSource.cpp index ef27879..b3a8463 100644 --- a/media/libstagefright/omx/GraphicBufferSource.cpp +++ b/media/libstagefright/omx/GraphicBufferSource.cpp @@ -206,24 +206,15 @@ void GraphicBufferSource::codecBufferEmptied(OMX_BUFFERHEADERTYPE* header) { // Find matching entry in our cached copy of the BufferQueue slots. // If we find a match, release that slot. If we don't, the BufferQueue // has dropped that GraphicBuffer, and there's nothing for us to release. - // - // (We could store "id" in CodecBuffer and avoid the slot search.) - int id; - for (id = 0; id < BufferQueue::NUM_BUFFER_SLOTS; id++) { - if (mBufferSlot[id] == NULL) { - continue; - } - - if (mBufferSlot[id]->handle == codecBuffer.mGraphicBuffer->handle) { - ALOGV("cbi %d matches bq slot %d, handle=%p", - cbi, id, mBufferSlot[id]->handle); - - mBufferQueue->releaseBuffer(id, EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, - Fence::NO_FENCE); - break; - } - } - if (id == BufferQueue::NUM_BUFFER_SLOTS) { + int id = codecBuffer.mBuf; + if (mBufferSlot[id] != NULL && + mBufferSlot[id]->handle == codecBuffer.mGraphicBuffer->handle) { + ALOGV("cbi %d matches bq slot %d, handle=%p", + cbi, id, mBufferSlot[id]->handle); + + mBufferQueue->releaseBuffer(id, codecBuffer.mFrameNumber, + EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, Fence::NO_FENCE); + } else { ALOGV("codecBufferEmptied: no match for emptied buffer in cbi %d", cbi); } @@ -287,11 +278,11 @@ bool GraphicBufferSource::fillCodecBuffer_l() { mBufferSlot[item.mBuf] = item.mGraphicBuffer; } - err = submitBuffer_l(mBufferSlot[item.mBuf], item.mTimestamp / 1000, cbi); + err = submitBuffer_l(item, cbi); if (err != OK) { ALOGV("submitBuffer_l failed, releasing bq buf %d", item.mBuf); - mBufferQueue->releaseBuffer(item.mBuf, EGL_NO_DISPLAY, - EGL_NO_SYNC_KHR, Fence::NO_FENCE); + mBufferQueue->releaseBuffer(item.mBuf, item.mFrameNumber, + EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, Fence::NO_FENCE); } else { ALOGV("buffer submitted (bq %d, cbi %d)", item.mBuf, cbi); } @@ -326,11 +317,13 @@ status_t GraphicBufferSource::signalEndOfInputStream() { return OK; } -status_t GraphicBufferSource::submitBuffer_l(sp& graphicBuffer, - int64_t timestampUsec, int cbi) { +status_t GraphicBufferSource::submitBuffer_l( + const BufferQueue::BufferItem &item, int cbi) { ALOGV("submitBuffer_l cbi=%d", cbi); CodecBuffer& codecBuffer(mCodecBuffers.editItemAt(cbi)); - codecBuffer.mGraphicBuffer = graphicBuffer; + codecBuffer.mGraphicBuffer = mBufferSlot[item.mBuf]; + codecBuffer.mBuf = item.mBuf; + codecBuffer.mFrameNumber = item.mFrameNumber; OMX_BUFFERHEADERTYPE* header = codecBuffer.mHeader; CHECK(header->nAllocLen >= 4 + sizeof(buffer_handle_t)); @@ -342,7 +335,7 @@ status_t GraphicBufferSource::submitBuffer_l(sp& graphicBuffer, status_t err = mNodeInstance->emptyDirectBuffer(header, 0, 4 + sizeof(buffer_handle_t), OMX_BUFFERFLAG_ENDOFFRAME, - timestampUsec); + item.mTimestamp / 1000); if (err != OK) { ALOGW("WARNING: emptyDirectBuffer failed: 0x%x", err); codecBuffer.mGraphicBuffer = NULL; @@ -431,8 +424,8 @@ void GraphicBufferSource::onFrameAvailable() { BufferQueue::BufferItem item; status_t err = mBufferQueue->acquireBuffer(&item); if (err == OK) { - mBufferQueue->releaseBuffer(item.mBuf, EGL_NO_DISPLAY, - EGL_NO_SYNC_KHR, item.mFence); + mBufferQueue->releaseBuffer(item.mBuf, item.mFrameNumber, + EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, item.mFence); } return; } diff --git a/media/libstagefright/omx/GraphicBufferSource.h b/media/libstagefright/omx/GraphicBufferSource.h index 562d342..8c6b470 100644 --- a/media/libstagefright/omx/GraphicBufferSource.h +++ b/media/libstagefright/omx/GraphicBufferSource.h @@ -104,6 +104,13 @@ private: // (mGraphicBuffer == NULL) or in use by the codec. struct CodecBuffer { OMX_BUFFERHEADERTYPE* mHeader; + + // buffer producer's frame-number for buffer + uint64_t mFrameNumber; + + // buffer producer's buffer slot for buffer + int mBuf; + sp mGraphicBuffer; }; @@ -130,8 +137,7 @@ private: // Marks the mCodecBuffers entry as in-use, copies the GraphicBuffer // reference into the codec buffer, and submits the data to the codec. - status_t submitBuffer_l(sp& graphicBuffer, - int64_t timestampUsec, int cbi); + status_t submitBuffer_l(const BufferQueue::BufferItem &item, int cbi); // Submits an empty buffer, with the EOS flag set. Returns without // doing anything if we don't have a codec buffer available. diff --git a/services/camera/libcameraservice/gui/RingBufferConsumer.cpp b/services/camera/libcameraservice/gui/RingBufferConsumer.cpp index cd39bad..dfa1066 100644 --- a/services/camera/libcameraservice/gui/RingBufferConsumer.cpp +++ b/services/camera/libcameraservice/gui/RingBufferConsumer.cpp @@ -214,7 +214,11 @@ status_t RingBufferConsumer::releaseOldestBufferLocked(size_t* pinnedFrames) { // In case the object was never pinned, pass the acquire fence // back to the release fence. If the fence was already waited on, // it'll just be a no-op to wait on it again. - err = addReleaseFenceLocked(item.mBuf, item.mFence); + + // item.mGraphicBuffer was populated with the proper graphic-buffer + // at acquire even if it was previously acquired + err = addReleaseFenceLocked(item.mBuf, + item.mGraphicBuffer, item.mFence); if (err != OK) { BI_LOGE("Failed to add release fence to buffer " @@ -226,7 +230,9 @@ status_t RingBufferConsumer::releaseOldestBufferLocked(size_t* pinnedFrames) { BI_LOGV("Attempting to release buffer timestamp %lld, frame %lld", item.mTimestamp, item.mFrameNumber); - err = releaseBufferLocked(item.mBuf, + // item.mGraphicBuffer was populated with the proper graphic-buffer + // at acquire even if it was previously acquired + err = releaseBufferLocked(item.mBuf, item.mGraphicBuffer, EGL_NO_DISPLAY, EGL_NO_SYNC_KHR); if (err != OK) { @@ -310,7 +316,8 @@ void RingBufferConsumer::unpinBuffer(const BufferItem& item) { RingBufferItem& find = *it; if (item.mGraphicBuffer == find.mGraphicBuffer) { - status_t res = addReleaseFenceLocked(item.mBuf, item.mFence); + status_t res = addReleaseFenceLocked(item.mBuf, + item.mGraphicBuffer, item.mFence); if (res != OK) { BI_LOGE("Failed to add release fence to buffer " -- cgit v1.1 From 0a69bd281c76bf777ddb51d0c6c08519634b192d Mon Sep 17 00:00:00 2001 From: Marco Nelissen Date: Thu, 23 May 2013 15:17:24 -0700 Subject: Add support for MPEG editlist gapless info Change-Id: I862d89c805d738db9bdf81a8f1c5b317ff968dff --- media/libstagefright/MPEG4Extractor.cpp | 61 +++++++++++++++++++++++++++++++++ 1 file changed, 61 insertions(+) diff --git a/media/libstagefright/MPEG4Extractor.cpp b/media/libstagefright/MPEG4Extractor.cpp index 145869e..7697d55 100644 --- a/media/libstagefright/MPEG4Extractor.cpp +++ b/media/libstagefright/MPEG4Extractor.cpp @@ -817,6 +817,7 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { case FOURCC('i', 'l', 's', 't'): case FOURCC('s', 'i', 'n', 'f'): case FOURCC('s', 'c', 'h', 'i'): + case FOURCC('e', 'd', 't', 's'): { if (chunk_type == FOURCC('s', 't', 'b', 'l')) { ALOGV("sampleTable chunk is %d bytes long.", (size_t)chunk_size); @@ -904,6 +905,66 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { break; } + case FOURCC('e', 'l', 's', 't'): + { + // See 14496-12 8.6.6 + uint8_t version; + if (mDataSource->readAt(data_offset, &version, 1) < 1) { + return ERROR_IO; + } + + uint32_t entry_count; + if (!mDataSource->getUInt32(data_offset + 4, &entry_count)) { + return ERROR_IO; + } + + if (entry_count != 1) { + // we only support a single entry at the moment, for gapless playback + ALOGW("ignoring edit list with %d entries", entry_count); + } else { + off64_t entriesoffset = data_offset + 8; + uint64_t segment_duration; + int64_t media_time; + + if (version == 1) { + if (!mDataSource->getUInt64(entriesoffset, &segment_duration) || + !mDataSource->getUInt64(entriesoffset + 8, (uint64_t*)&media_time)) { + return ERROR_IO; + } + } else if (version == 0) { + uint32_t sd; + int32_t mt; + if (!mDataSource->getUInt32(entriesoffset, &sd) || + !mDataSource->getUInt32(entriesoffset + 4, (uint32_t*)&mt)) { + return ERROR_IO; + } + segment_duration = sd; + media_time = mt; + } else { + return ERROR_IO; + } + + uint64_t halfscale = mLastTrack->timescale / 2; + segment_duration = (segment_duration * 1000000 + halfscale)/ mLastTrack->timescale; + media_time = (media_time * 1000000 + halfscale) / mLastTrack->timescale; + + int64_t duration; + int32_t samplerate; + if (mLastTrack->meta->findInt64(kKeyDuration, &duration) && + mLastTrack->meta->findInt32(kKeySampleRate, &samplerate)) { + + int64_t delay = (media_time * samplerate + 500000) / 1000000; + mLastTrack->meta->setInt32(kKeyEncoderDelay, delay); + + int64_t paddingus = duration - (segment_duration + media_time); + int64_t paddingsamples = (paddingus * samplerate + 500000) / 1000000; + mLastTrack->meta->setInt32(kKeyEncoderPadding, paddingsamples); + } + } + *offset += chunk_size; + break; + } + case FOURCC('f', 'r', 'm', 'a'): { uint32_t original_fourcc; -- cgit v1.1 From 776a0023f5146423e88474c35691eb0e20fc8102 Mon Sep 17 00:00:00 2001 From: Ajay Dudani Date: Wed, 22 May 2013 22:16:33 -0700 Subject: libstagefright: Check for duration > 0 to avoid divide-by-zero exception Change-Id: I58ccacbf7ede892dff9626715162ea7b1f2ddbc6 --- media/libstagefright/AwesomePlayer.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/media/libstagefright/AwesomePlayer.cpp b/media/libstagefright/AwesomePlayer.cpp index bd28118..6c197e2 100644 --- a/media/libstagefright/AwesomePlayer.cpp +++ b/media/libstagefright/AwesomePlayer.cpp @@ -597,7 +597,7 @@ void AwesomePlayer::notifyListener_l(int msg, int ext1, int ext2) { bool AwesomePlayer::getBitrate(int64_t *bitrate) { off64_t size; - if (mDurationUs >= 0 && mCachedSource != NULL + if (mDurationUs > 0 && mCachedSource != NULL && mCachedSource->getSize(&size) == OK) { *bitrate = size * 8000000ll / mDurationUs; // in bits/sec return true; -- cgit v1.1 From 608cff22d70a0eabb7e01ef39f13b4d6db5ba48a Mon Sep 17 00:00:00 2001 From: Alex Ray Date: Fri, 24 May 2013 15:41:53 -0700 Subject: libcameraservice: missing argument when logging Change-Id: I6f3da400f8f47a387968b9da4ee7cd8e0c9dea70 --- services/camera/libcameraservice/camera2/JpegCompressor.cpp | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/services/camera/libcameraservice/camera2/JpegCompressor.cpp b/services/camera/libcameraservice/camera2/JpegCompressor.cpp index c9af71e..2f0c67d 100644 --- a/services/camera/libcameraservice/camera2/JpegCompressor.cpp +++ b/services/camera/libcameraservice/camera2/JpegCompressor.cpp @@ -210,7 +210,8 @@ boolean JpegCompressor::jpegEmptyOutputBuffer(j_compress_ptr /*cinfo*/) { return true; } -void JpegCompressor::jpegTermDestination(j_compress_ptr /*cinfo*/) { +void JpegCompressor::jpegTermDestination(j_compress_ptr cinfo) { + (void) cinfo; // TODO: clean up ALOGV("%s", __FUNCTION__); ALOGV("%s: Done writing JPEG data. %d bytes left in buffer", __FUNCTION__, cinfo->dest->free_in_buffer); -- cgit v1.1 From 211a2ed2c05044a10c8619d1dba1070b72a8b256 Mon Sep 17 00:00:00 2001 From: Alex Ray Date: Wed, 22 May 2013 17:44:26 -0700 Subject: Camera: static parameter default values Use sane default values for static metadata parameters. Change-Id: I180d9b6560ebba468a083e847b78e02b845db788 --- .../camera/libcameraservice/camera2/Parameters.cpp | 94 ++++++++++++---------- 1 file changed, 51 insertions(+), 43 deletions(-) diff --git a/services/camera/libcameraservice/camera2/Parameters.cpp b/services/camera/libcameraservice/camera2/Parameters.cpp index 5a9fd88..a567c15 100644 --- a/services/camera/libcameraservice/camera2/Parameters.cpp +++ b/services/camera/libcameraservice/camera2/Parameters.cpp @@ -292,8 +292,11 @@ status_t Parameters::initialize(const CameraMetadata *info) { CameraParameters::WHITE_BALANCE_AUTO); camera_metadata_ro_entry_t availableWhiteBalanceModes = - staticInfo(ANDROID_CONTROL_AWB_AVAILABLE_MODES); - { + staticInfo(ANDROID_CONTROL_AWB_AVAILABLE_MODES, 0, 0, false); + if (!availableWhiteBalanceModes.count) { + params.set(CameraParameters::KEY_SUPPORTED_WHITE_BALANCE, + CameraParameters::WHITE_BALANCE_AUTO); + } else { String8 supportedWhiteBalance; bool addComma = false; for (size_t i=0; i < availableWhiteBalanceModes.count; i++) { @@ -353,9 +356,11 @@ status_t Parameters::initialize(const CameraMetadata *info) { CameraParameters::EFFECT_NONE); camera_metadata_ro_entry_t availableEffects = - staticInfo(ANDROID_CONTROL_AVAILABLE_EFFECTS); - if (!availableEffects.count) return NO_INIT; - { + staticInfo(ANDROID_CONTROL_AVAILABLE_EFFECTS, 0, 0, false); + if (!availableEffects.count) { + params.set(CameraParameters::KEY_SUPPORTED_EFFECTS, + CameraParameters::EFFECT_NONE); + } else { String8 supportedEffects; bool addComma = false; for (size_t i=0; i < availableEffects.count; i++) { @@ -413,9 +418,11 @@ status_t Parameters::initialize(const CameraMetadata *info) { CameraParameters::ANTIBANDING_AUTO); camera_metadata_ro_entry_t availableAntibandingModes = - staticInfo(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES); - if (!availableAntibandingModes.count) return NO_INIT; - { + staticInfo(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES, 0, 0, false); + if (!availableAntibandingModes.count) { + params.set(CameraParameters::KEY_SUPPORTED_ANTIBANDING, + CameraParameters::ANTIBANDING_OFF); + } else { String8 supportedAntibanding; bool addComma = false; for (size_t i=0; i < availableAntibandingModes.count; i++) { @@ -455,9 +462,10 @@ status_t Parameters::initialize(const CameraMetadata *info) { CameraParameters::SCENE_MODE_AUTO); camera_metadata_ro_entry_t availableSceneModes = - staticInfo(ANDROID_CONTROL_AVAILABLE_SCENE_MODES); - if (!availableSceneModes.count) return NO_INIT; - { + staticInfo(ANDROID_CONTROL_AVAILABLE_SCENE_MODES, 0, 0, false); + if (!availableSceneModes.count) { + params.remove(CameraParameters::KEY_SCENE_MODE); + } else { String8 supportedSceneModes(CameraParameters::SCENE_MODE_AUTO); bool addComma = true; bool noSceneModes = false; @@ -548,15 +556,17 @@ status_t Parameters::initialize(const CameraMetadata *info) { } } + bool isFlashAvailable = false; camera_metadata_ro_entry_t flashAvailable = - staticInfo(ANDROID_FLASH_INFO_AVAILABLE, 1, 1); - if (!flashAvailable.count) return NO_INIT; + staticInfo(ANDROID_FLASH_INFO_AVAILABLE, 0, 1, false); + if (flashAvailable.count) { + isFlashAvailable = flashAvailable.data.u8[0]; + } camera_metadata_ro_entry_t availableAeModes = - staticInfo(ANDROID_CONTROL_AE_AVAILABLE_MODES); - if (!availableAeModes.count) return NO_INIT; + staticInfo(ANDROID_CONTROL_AE_AVAILABLE_MODES, 0, 0, false); - if (flashAvailable.data.u8[0]) { + if (isFlashAvailable) { flashMode = Parameters::FLASH_MODE_OFF; params.set(CameraParameters::KEY_FLASH_MODE, CameraParameters::FLASH_MODE_OFF); @@ -585,14 +595,12 @@ status_t Parameters::initialize(const CameraMetadata *info) { } camera_metadata_ro_entry_t minFocusDistance = - staticInfo(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE, 1, 1); - if (!minFocusDistance.count) return NO_INIT; + staticInfo(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE, 0, 1, false); camera_metadata_ro_entry_t availableAfModes = - staticInfo(ANDROID_CONTROL_AF_AVAILABLE_MODES); - if (!availableAfModes.count) return NO_INIT; + staticInfo(ANDROID_CONTROL_AF_AVAILABLE_MODES, 0, 0, false); - if (minFocusDistance.data.f[0] == 0) { + if (!minFocusDistance.count || minFocusDistance.data.f[0] == 0) { // Fixed-focus lens focusMode = Parameters::FOCUS_MODE_FIXED; params.set(CameraParameters::KEY_FOCUS_MODE, @@ -662,7 +670,7 @@ status_t Parameters::initialize(const CameraMetadata *info) { focusingAreas.add(Parameters::Area(0,0,0,0,0)); camera_metadata_ro_entry_t availableFocalLengths = - staticInfo(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS); + staticInfo(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS, 0, 0, false); if (!availableFocalLengths.count) return NO_INIT; float minFocalLength = availableFocalLengths.data.f[0]; @@ -768,8 +776,8 @@ status_t Parameters::initialize(const CameraMetadata *info) { CameraParameters::FALSE); camera_metadata_ro_entry_t availableVideoStabilizationModes = - staticInfo(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES); - if (!availableVideoStabilizationModes.count) return NO_INIT; + staticInfo(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES, 0, 0, + false); if (availableVideoStabilizationModes.count > 1) { params.set(CameraParameters::KEY_VIDEO_STABILIZATION_SUPPORTED, @@ -797,7 +805,7 @@ status_t Parameters::initialize(const CameraMetadata *info) { previewCallbackSurface = false; camera_metadata_ro_entry_t supportedHardwareLevel = - staticInfo(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL); + staticInfo(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL, 0, 0, false); if (!supportedHardwareLevel.count || (supportedHardwareLevel.data.u8[0] == ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED)) { ALOGI("Camera %d: ZSL mode disabled for limited mode HALs", cameraId); @@ -835,8 +843,8 @@ status_t Parameters::buildFastInfo() { int32_t arrayHeight = activeArraySize.data.i32[1]; camera_metadata_ro_entry_t availableFaceDetectModes = - staticInfo(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES); - if (!availableFaceDetectModes.count) return NO_INIT; + staticInfo(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES, 0, 0, + false); uint8_t bestFaceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF; @@ -863,19 +871,21 @@ status_t Parameters::buildFastInfo() { } } + int32_t maxFaces = 0; camera_metadata_ro_entry_t maxFacesDetected = - staticInfo(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, 1, 1); - if (!maxFacesDetected.count) return NO_INIT; - - int32_t maxFaces = maxFacesDetected.data.i32[0]; + staticInfo(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, 0, 1, false); + if (maxFacesDetected.count) { + maxFaces = maxFacesDetected.data.i32[0]; + } camera_metadata_ro_entry_t availableSceneModes = - staticInfo(ANDROID_CONTROL_AVAILABLE_SCENE_MODES); + staticInfo(ANDROID_CONTROL_AVAILABLE_SCENE_MODES, 0, 0, false); camera_metadata_ro_entry_t sceneModeOverrides = - staticInfo(ANDROID_CONTROL_SCENE_MODE_OVERRIDES); + staticInfo(ANDROID_CONTROL_SCENE_MODE_OVERRIDES, 0, 0, false); camera_metadata_ro_entry_t minFocusDistance = - staticInfo(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE); - bool fixedLens = (minFocusDistance.data.f[0] == 0); + staticInfo(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE, 0, 0, false); + bool fixedLens = minFocusDistance.count == 0 || + minFocusDistance.data.f[0] == 0; camera_metadata_ro_entry_t availableFocalLengths = staticInfo(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS); @@ -1466,7 +1476,7 @@ status_t Parameters::set(const String8& paramString) { } if (validatedParams.wbMode != wbMode) { camera_metadata_ro_entry_t availableWbModes = - staticInfo(ANDROID_CONTROL_AWB_AVAILABLE_MODES); + staticInfo(ANDROID_CONTROL_AWB_AVAILABLE_MODES, 0, 0, false); for (i = 0; i < availableWbModes.count; i++) { if (validatedParams.wbMode == availableWbModes.data.u8[i]) break; } @@ -1497,8 +1507,9 @@ status_t Parameters::set(const String8& paramString) { validatedParams.currentAfTriggerId = -1; if (validatedParams.focusMode != Parameters::FOCUS_MODE_FIXED) { camera_metadata_ro_entry_t minFocusDistance = - staticInfo(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE); - if (minFocusDistance.data.f[0] == 0) { + staticInfo(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE, 0, 0, + false); + if (minFocusDistance.count && minFocusDistance.data.f[0] == 0) { ALOGE("%s: Requested focus mode \"%s\" is not available: " "fixed focus lens", __FUNCTION__, @@ -1618,7 +1629,8 @@ status_t Parameters::set(const String8& paramString) { validatedParams.videoStabilization = boolFromString( newParams.get(CameraParameters::KEY_VIDEO_STABILIZATION) ); camera_metadata_ro_entry_t availableVideoStabilizationModes = - staticInfo(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES); + staticInfo(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES, 0, 0, + false); if (validatedParams.videoStabilization && availableVideoStabilizationModes.count == 1) { ALOGE("%s: Video stabilization not supported", __FUNCTION__); @@ -2545,10 +2557,6 @@ status_t Parameters::calculatePictureFovs(float *horizFov, float *vertFov) staticInfo(ANDROID_SENSOR_INFO_PHYSICAL_SIZE, 2, 2); if (!sensorSize.count) return NO_INIT; - camera_metadata_ro_entry_t availableFocalLengths = - staticInfo(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS); - if (!availableFocalLengths.count) return NO_INIT; - float arrayAspect = static_cast(fastInfo.arrayWidth) / fastInfo.arrayHeight; float stillAspect = static_cast(pictureWidth) / pictureHeight; -- cgit v1.1 From 210efd48ed21ca0084d9440f9a1db7d9878f8094 Mon Sep 17 00:00:00 2001 From: Marco Nelissen Date: Wed, 29 May 2013 10:58:08 -0700 Subject: Don't crash if no timescale was given b/9175577 Change-Id: Ie159a9c9b42e6c8d9366d0ef6a607234af569e36 --- media/libstagefright/MPEG4Extractor.cpp | 2 ++ 1 file changed, 2 insertions(+) diff --git a/media/libstagefright/MPEG4Extractor.cpp b/media/libstagefright/MPEG4Extractor.cpp index 7697d55..919766c 100644 --- a/media/libstagefright/MPEG4Extractor.cpp +++ b/media/libstagefright/MPEG4Extractor.cpp @@ -921,6 +921,8 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { if (entry_count != 1) { // we only support a single entry at the moment, for gapless playback ALOGW("ignoring edit list with %d entries", entry_count); + } else if (mLastTrack->timescale == 0) { + ALOGW("ignoring edit list because timescale is 0"); } else { off64_t entriesoffset = data_offset + 8; uint64_t segment_duration; -- cgit v1.1 From 20cb300bce9a2b80966a422ef2de35b18533e1dd Mon Sep 17 00:00:00 2001 From: Alex Ray Date: Tue, 28 May 2013 20:18:22 -0700 Subject: camera3: Error on minimum buffer count Change-Id: Idf16de20e940e411286ec3e477534b36ef1c9b11 --- services/camera/libcameraservice/camera3/Camera3OutputStream.cpp | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/services/camera/libcameraservice/camera3/Camera3OutputStream.cpp b/services/camera/libcameraservice/camera3/Camera3OutputStream.cpp index 2efeede..f085443 100644 --- a/services/camera/libcameraservice/camera3/Camera3OutputStream.cpp +++ b/services/camera/libcameraservice/camera3/Camera3OutputStream.cpp @@ -301,8 +301,13 @@ status_t Camera3OutputStream::configureQueueLocked() { return res; } - ALOGV("%s: Consumer wants %d buffers", __FUNCTION__, - maxConsumerBuffers); + ALOGV("%s: Consumer wants %d buffers, HAL wants %d", __FUNCTION__, + maxConsumerBuffers, camera3_stream::max_buffers); + if (camera3_stream::max_buffers == 0) { + ALOGE("%s: Camera HAL requested no max_buffers, requires at least 1", + __FUNCTION__, camera3_stream::max_buffers); + return INVALID_OPERATION; + } mTotalBufferCount = maxConsumerBuffers + camera3_stream::max_buffers; mDequeuedBufferCount = 0; -- cgit v1.1 From bd25dacce1187c827dde3fb72036c044c8106719 Mon Sep 17 00:00:00 2001 From: Chong Zhang Date: Thu, 30 May 2013 09:46:20 -0700 Subject: wifi-display: fixes for PAUSE/PLAY state PAUSE could be initiated by either source (via trigger method) or sink, in latter case we have to allow PAUSE from PLAYING state. Similarly PLAY should be allowed from PAUSED state. Bug: 8922515 Change-Id: I475534aa4ffa6dc6844f59c5868d8f88291019ae --- media/libstagefright/wifi-display/source/WifiDisplaySource.cpp | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp b/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp index 0b714f0..dee95eb 100644 --- a/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp +++ b/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp @@ -1369,7 +1369,9 @@ status_t WifiDisplaySource::onPlayRequest( return ERROR_MALFORMED; } - if (mState != AWAITING_CLIENT_PLAY) { + if (mState != AWAITING_CLIENT_PLAY + && mState != PAUSED_TO_PLAYING + && mState != PAUSED) { ALOGW("Received PLAY request but we're in state %d", mState); sendErrorResponse( @@ -1396,7 +1398,7 @@ status_t WifiDisplaySource::onPlayRequest( return err; } - if (mState == PAUSED_TO_PLAYING) { + if (mState == PAUSED_TO_PLAYING || mPlaybackSessionEstablished) { mState = PLAYING; return OK; } @@ -1430,7 +1432,7 @@ status_t WifiDisplaySource::onPauseRequest( ALOGI("Received PAUSE request."); - if (mState != PLAYING_TO_PAUSED) { + if (mState != PLAYING_TO_PAUSED && mState != PLAYING) { return INVALID_OPERATION; } -- cgit v1.1 From fc80e9ec5582770cb5a7fef172af3b52625ecce7 Mon Sep 17 00:00:00 2001 From: Marco Nelissen Date: Thu, 30 May 2013 11:00:47 -0700 Subject: Edit list uses timescale from movie header not from track media header Change-Id: I24063183f44027b999782cc9006e9a1b56e87355 --- media/libstagefright/MPEG4Extractor.cpp | 17 ++++++++++------- media/libstagefright/include/MPEG4Extractor.h | 1 + 2 files changed, 11 insertions(+), 7 deletions(-) diff --git a/media/libstagefright/MPEG4Extractor.cpp b/media/libstagefright/MPEG4Extractor.cpp index 919766c..42a9c7a 100644 --- a/media/libstagefright/MPEG4Extractor.cpp +++ b/media/libstagefright/MPEG4Extractor.cpp @@ -341,6 +341,7 @@ MPEG4Extractor::MPEG4Extractor(const sp &source) mDataSource(source), mInitCheck(NO_INIT), mHasVideo(false), + mHeaderTimescale(0), mFirstTrack(NULL), mLastTrack(NULL), mFileMetaData(new MetaData), @@ -921,7 +922,7 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { if (entry_count != 1) { // we only support a single entry at the moment, for gapless playback ALOGW("ignoring edit list with %d entries", entry_count); - } else if (mLastTrack->timescale == 0) { + } else if (mHeaderTimescale == 0) { ALOGW("ignoring edit list because timescale is 0"); } else { off64_t entriesoffset = data_offset + 8; @@ -946,9 +947,9 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { return ERROR_IO; } - uint64_t halfscale = mLastTrack->timescale / 2; - segment_duration = (segment_duration * 1000000 + halfscale)/ mLastTrack->timescale; - media_time = (media_time * 1000000 + halfscale) / mLastTrack->timescale; + uint64_t halfscale = mHeaderTimescale / 2; + segment_duration = (segment_duration * 1000000 + halfscale)/ mHeaderTimescale; + media_time = (media_time * 1000000 + halfscale) / mHeaderTimescale; int64_t duration; int32_t samplerate; @@ -1627,24 +1628,26 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { case FOURCC('m', 'v', 'h', 'd'): { - if (chunk_data_size < 12) { + if (chunk_data_size < 24) { return ERROR_MALFORMED; } - uint8_t header[12]; + uint8_t header[24]; if (mDataSource->readAt( data_offset, header, sizeof(header)) < (ssize_t)sizeof(header)) { return ERROR_IO; } - int64_t creationTime; + uint64_t creationTime; if (header[0] == 1) { creationTime = U64_AT(&header[4]); + mHeaderTimescale = U32_AT(&header[20]); } else if (header[0] != 0) { return ERROR_MALFORMED; } else { creationTime = U32_AT(&header[4]); + mHeaderTimescale = U32_AT(&header[12]); } String8 s; diff --git a/media/libstagefright/include/MPEG4Extractor.h b/media/libstagefright/include/MPEG4Extractor.h index 35eff96..bbec1c4 100644 --- a/media/libstagefright/include/MPEG4Extractor.h +++ b/media/libstagefright/include/MPEG4Extractor.h @@ -82,6 +82,7 @@ private: sp mDataSource; status_t mInitCheck; bool mHasVideo; + uint32_t mHeaderTimescale; Track *mFirstTrack, *mLastTrack; -- cgit v1.1 From fe7e0c6154309f2491463ee6ca4920d225289638 Mon Sep 17 00:00:00 2001 From: Alex Ray Date: Thu, 30 May 2013 00:12:13 -0700 Subject: camera: Dont segfault when result lacks timestamp Also make a log message more useful. Change-Id: Id8b65a9b55e1ebe41598dc2db4b0b2aa3f161965 --- services/camera/libcameraservice/Camera3Device.cpp | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/services/camera/libcameraservice/Camera3Device.cpp b/services/camera/libcameraservice/Camera3Device.cpp index 0b5e9c4..3f2287f 100644 --- a/services/camera/libcameraservice/Camera3Device.cpp +++ b/services/camera/libcameraservice/Camera3Device.cpp @@ -723,7 +723,7 @@ status_t Camera3Device::deleteReprocessStream(int id) { status_t Camera3Device::createDefaultRequest(int templateId, CameraMetadata *request) { ATRACE_CALL(); - ALOGV("%s: E", __FUNCTION__); + ALOGV("%s: for template %d", __FUNCTION__, templateId); Mutex::Autolock l(mLock); switch (mStatus) { @@ -1254,8 +1254,7 @@ void Camera3Device::processCaptureResult(const camera3_capture_result *result) { if (entry.count == 0) { SET_ERR("No timestamp provided by HAL for frame %d!", frameNumber); - } - if (timestamp != entry.data.i64[0]) { + } else if (timestamp != entry.data.i64[0]) { SET_ERR("Timestamp mismatch between shutter notify and result" " metadata for frame %d (%lld vs %lld respectively)", frameNumber, timestamp, entry.data.i64[0]); -- cgit v1.1 From 7c027248e1a4ccd5b22bc4deafb03e2d87ac8f38 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Wed, 26 Dec 2012 14:43:16 -0800 Subject: Consistent whitespace Change-Id: I118cce68d3b777f9ec9b6bfb70367496422a40f2 --- media/libmedia/AudioRecord.cpp | 2 +- media/libmedia/AudioTrack.cpp | 4 +++- media/libmedia/IMediaDeathNotifier.cpp | 8 ++++---- services/audioflinger/AudioFlinger.cpp | 28 +++++++++++++++++++++------- services/audioflinger/Threads.cpp | 3 ++- services/audioflinger/Tracks.cpp | 4 ++-- 6 files changed, 33 insertions(+), 16 deletions(-) diff --git a/media/libmedia/AudioRecord.cpp b/media/libmedia/AudioRecord.cpp index 40ff1bf..a2b8ae2 100644 --- a/media/libmedia/AudioRecord.cpp +++ b/media/libmedia/AudioRecord.cpp @@ -563,7 +563,7 @@ create_new_record: } } // read the server count again - start_loop_here: +start_loop_here: framesReady = mProxy->framesReady(); } cblk->lock.unlock(); diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp index 5ed8e3b..ff52b28 100644 --- a/media/libmedia/AudioTrack.cpp +++ b/media/libmedia/AudioTrack.cpp @@ -861,7 +861,9 @@ status_t AudioTrack::createTrack_l( // Ensure that buffer depth covers at least audio hardware latency uint32_t minBufCount = afLatency / ((1000 * afFrameCount)/afSampleRate); - if (minBufCount < 2) minBufCount = 2; + if (minBufCount < 2) { + minBufCount = 2; + } size_t minFrameCount = (afFrameCount*sampleRate*minBufCount)/afSampleRate; ALOGV("minFrameCount: %u, afFrameCount=%d, minBufCount=%d, sampleRate=%u, afSampleRate=%u" diff --git a/media/libmedia/IMediaDeathNotifier.cpp b/media/libmedia/IMediaDeathNotifier.cpp index 9199db6..9db5b1b 100644 --- a/media/libmedia/IMediaDeathNotifier.cpp +++ b/media/libmedia/IMediaDeathNotifier.cpp @@ -49,10 +49,10 @@ IMediaDeathNotifier::getMediaPlayerService() } while (true); if (sDeathNotifier == NULL) { - sDeathNotifier = new DeathNotifier(); - } - binder->linkToDeath(sDeathNotifier); - sMediaPlayerService = interface_cast(binder); + sDeathNotifier = new DeathNotifier(); + } + binder->linkToDeath(sDeathNotifier); + sMediaPlayerService = interface_cast(binder); } ALOGE_IF(sMediaPlayerService == 0, "no media player service!?"); return sMediaPlayerService; diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp index 87eb6aa..a6edb77 100644 --- a/services/audioflinger/AudioFlinger.cpp +++ b/services/audioflinger/AudioFlinger.cpp @@ -1453,10 +1453,18 @@ audio_io_handle_t AudioFlinger::openOutput(audio_module_handle_t module, } mPlaybackThreads.add(id, thread); - if (pSamplingRate != NULL) *pSamplingRate = config.sample_rate; - if (pFormat != NULL) *pFormat = config.format; - if (pChannelMask != NULL) *pChannelMask = config.channel_mask; - if (pLatencyMs != NULL) *pLatencyMs = thread->latency(); + if (pSamplingRate != NULL) { + *pSamplingRate = config.sample_rate; + } + if (pFormat != NULL) { + *pFormat = config.format; + } + if (pChannelMask != NULL) { + *pChannelMask = config.channel_mask; + } + if (pLatencyMs != NULL) { + *pLatencyMs = thread->latency(); + } // notify client processes of the new output creation thread->audioConfigChanged_l(AudioSystem::OUTPUT_OPENED); @@ -1698,9 +1706,15 @@ audio_io_handle_t AudioFlinger::openInput(audio_module_handle_t module, ); mRecordThreads.add(id, thread); ALOGV("openInput() created record thread: ID %d thread %p", id, thread); - if (pSamplingRate != NULL) *pSamplingRate = reqSamplingRate; - if (pFormat != NULL) *pFormat = config.format; - if (pChannelMask != NULL) *pChannelMask = reqChannels; + if (pSamplingRate != NULL) { + *pSamplingRate = reqSamplingRate; + } + if (pFormat != NULL) { + *pFormat = config.format; + } + if (pChannelMask != NULL) { + *pChannelMask = reqChannels; + } // notify client processes of the new input creation thread->audioConfigChanged_l(AudioSystem::INPUT_OPENED); diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index 7e9550c..213688e 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -1716,7 +1716,7 @@ void AudioFlinger::PlaybackThread::cacheParameters_l() void AudioFlinger::PlaybackThread::invalidateTracks(audio_stream_type_t streamType) { - ALOGV ("MixerThread::invalidateTracks() mixer %p, streamType %d, mTracks.size %d", + ALOGV("MixerThread::invalidateTracks() mixer %p, streamType %d, mTracks.size %d", this, streamType, mTracks.size()); Mutex::Autolock _l(mLock); @@ -3970,6 +3970,7 @@ status_t AudioFlinger::RecordThread::start(RecordThread::RecordTrack* recordTrac ALOGV("Record started OK"); return status; } + startError: AudioSystem::stopInput(mId); clearSyncStartEvent(); diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp index 5ac3129..41a763d 100644 --- a/services/audioflinger/Tracks.cpp +++ b/services/audioflinger/Tracks.cpp @@ -1477,7 +1477,7 @@ bool AudioFlinger::PlaybackThread::OutputTrack::write(int16_t* data, uint32_t fr memset(pInBuffer->raw, 0, startFrames * channelCount * sizeof(int16_t)); mBufferQueue.add(pInBuffer); } else { - ALOGW ("OutputTrack::write() %p no more buffers in queue", this); + ALOGW("OutputTrack::write() %p no more buffers in queue", this); } } } @@ -1499,7 +1499,7 @@ bool AudioFlinger::PlaybackThread::OutputTrack::write(int16_t* data, uint32_t fr mOutBuffer.frameCount = pInBuffer->frameCount; nsecs_t startTime = systemTime(); if (obtainBuffer(&mOutBuffer, waitTimeLeftMs) == (status_t)NO_MORE_BUFFERS) { - ALOGV ("OutputTrack::write() %p thread %p no more output buffers", this, + ALOGV("OutputTrack::write() %p thread %p no more output buffers", this, mThread.unsafe_get()); outputBufferFull = true; break; -- cgit v1.1 From 1ad3eb9441eb509c792c61aa0181b0e74dbe9984 Mon Sep 17 00:00:00 2001 From: Chong Zhang Date: Thu, 30 May 2013 21:51:38 -0700 Subject: wifi-display: fix resolution list and keepalive interval - add all resolutions lower than 1280x720p30 - schedule next keepalive when sending M16 bug 9116665 Change-Id: I7b3fea2101d3d882c0af5c153af5c502b8ce98f6 --- media/libstagefright/wifi-display/VideoFormats.cpp | 23 ++++++++++++++++++++++ media/libstagefright/wifi-display/VideoFormats.h | 3 +++ .../wifi-display/source/WifiDisplaySource.cpp | 7 ++++--- 3 files changed, 30 insertions(+), 3 deletions(-) diff --git a/media/libstagefright/wifi-display/VideoFormats.cpp b/media/libstagefright/wifi-display/VideoFormats.cpp index c368c38..04e02c1 100644 --- a/media/libstagefright/wifi-display/VideoFormats.cpp +++ b/media/libstagefright/wifi-display/VideoFormats.cpp @@ -178,6 +178,29 @@ void VideoFormats::enableAll() { } } +void VideoFormats::enableResolutionUpto( + ResolutionType type, size_t index, + ProfileType profile, LevelType level) { + size_t width, height, fps, score; + bool interlaced; + if (!GetConfiguration(type, index, &width, &height, + &fps, &interlaced)) { + ALOGE("Maximum resolution not found!"); + return; + } + score = width * height * fps * (!interlaced + 1); + for (size_t i = 0; i < kNumResolutionTypes; ++i) { + for (size_t j = 0; j < 32; j++) { + if (GetConfiguration((ResolutionType)i, j, + &width, &height, &fps, &interlaced) + && score >= width * height * fps * (!interlaced + 1)) { + setResolutionEnabled((ResolutionType)i, j); + setProfileLevel((ResolutionType)i, j, profile, level); + } + } + } +} + void VideoFormats::setResolutionEnabled( ResolutionType type, size_t index, bool enabled) { CHECK_LT(type, kNumResolutionTypes); diff --git a/media/libstagefright/wifi-display/VideoFormats.h b/media/libstagefright/wifi-display/VideoFormats.h index b918652..fd38fd1 100644 --- a/media/libstagefright/wifi-display/VideoFormats.h +++ b/media/libstagefright/wifi-display/VideoFormats.h @@ -69,6 +69,9 @@ struct VideoFormats { void disableAll(); void enableAll(); + void enableResolutionUpto( + ResolutionType type, size_t index, + ProfileType profile, LevelType level); void setResolutionEnabled( ResolutionType type, size_t index, bool enabled = true); diff --git a/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp b/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp index dee95eb..b421b35 100644 --- a/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp +++ b/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp @@ -75,7 +75,8 @@ WifiDisplaySource::WifiDisplaySource( mSupportedSourceVideoFormats.setNativeResolution( VideoFormats::RESOLUTION_CEA, 5); // 1280x720 p30 - mSupportedSourceVideoFormats.setProfileLevel( + // Enable all resolutions up to 1280x720p30 + mSupportedSourceVideoFormats.enableResolutionUpto( VideoFormats::RESOLUTION_CEA, 5, VideoFormats::PROFILE_CHP, // Constrained High Profile VideoFormats::LEVEL_32); // Level 3.2 @@ -751,6 +752,8 @@ status_t WifiDisplaySource::sendM16(int32_t sessionID) { ++mNextCSeq; + scheduleKeepAlive(sessionID); + return OK; } @@ -1021,8 +1024,6 @@ status_t WifiDisplaySource::onReceiveM16Response( if (mClientInfo.mPlaybackSession != NULL) { mClientInfo.mPlaybackSession->updateLiveness(); - - scheduleKeepAlive(sessionID); } return OK; -- cgit v1.1 From 14f7672b5d450ed26a06fd3bb3ce045ea78b11b2 Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Tue, 15 Jan 2013 09:04:18 -0800 Subject: New HLS implementation supporting independent stream sources, audio-only streams and more. Change-Id: Icfc45a0100243b2f7a14a9e65696be45b67d6495 --- cmds/stagefright/stagefright.cpp | 52 +- media/libmediaplayerservice/Android.mk | 1 + .../nuplayer/HTTPLiveSource.cpp | 140 +-- .../nuplayer/HTTPLiveSource.h | 8 +- media/libmediaplayerservice/nuplayer/NuPlayer.cpp | 88 +- media/libmediaplayerservice/nuplayer/NuPlayer.h | 7 +- .../nuplayer/NuPlayerRenderer.cpp | 6 +- .../nuplayer/NuPlayerSource.h | 1 + media/libstagefright/ACodec.cpp | 18 +- media/libstagefright/Android.mk | 2 - .../foundation/AHierarchicalStateMachine.cpp | 4 + media/libstagefright/httplive/Android.mk | 11 +- media/libstagefright/httplive/LiveSession.cpp | 1216 +++++++++++--------- media/libstagefright/httplive/LiveSession.h | 172 +++ media/libstagefright/httplive/M3UParser.cpp | 493 +++++++- media/libstagefright/httplive/M3UParser.h | 104 ++ media/libstagefright/httplive/PlaylistFetcher.cpp | 969 ++++++++++++++++ media/libstagefright/httplive/PlaylistFetcher.h | 155 +++ media/libstagefright/id3/ID3.cpp | 48 +- media/libstagefright/include/ID3.h | 7 + media/libstagefright/include/LiveSession.h | 165 --- media/libstagefright/include/M3UParser.h | 89 -- media/libstagefright/include/MPEG2TSExtractor.h | 5 - .../libstagefright/mpeg2ts/AnotherPacketSource.cpp | 29 +- media/libstagefright/mpeg2ts/AnotherPacketSource.h | 2 + media/libstagefright/mpeg2ts/MPEG2TSExtractor.cpp | 39 +- 26 files changed, 2769 insertions(+), 1062 deletions(-) create mode 100644 media/libstagefright/httplive/LiveSession.h create mode 100644 media/libstagefright/httplive/M3UParser.h create mode 100644 media/libstagefright/httplive/PlaylistFetcher.cpp create mode 100644 media/libstagefright/httplive/PlaylistFetcher.h delete mode 100644 media/libstagefright/include/LiveSession.h delete mode 100644 media/libstagefright/include/M3UParser.h diff --git a/cmds/stagefright/stagefright.cpp b/cmds/stagefright/stagefright.cpp index 115b07c..924cf6d 100644 --- a/cmds/stagefright/stagefright.cpp +++ b/cmds/stagefright/stagefright.cpp @@ -30,8 +30,6 @@ #include #include #include -#include -#include "include/LiveSession.h" #include "include/NuCachedSource2.h" #include #include @@ -678,7 +676,6 @@ int main(int argc, char **argv) { gDisplayHistogram = false; sp looper; - sp liveSession; int res; while ((res = getopt(argc, argv, "han:lm:b:ptsrow:kxSTd:D:")) >= 0) { @@ -961,9 +958,7 @@ int main(int argc, char **argv) { sp dataSource = DataSource::CreateFromURI(filename); - if (strncasecmp(filename, "sine:", 5) - && strncasecmp(filename, "httplive://", 11) - && dataSource == NULL) { + if (strncasecmp(filename, "sine:", 5) && dataSource == NULL) { fprintf(stderr, "Unable to create data source.\n"); return 1; } @@ -995,44 +990,21 @@ int main(int argc, char **argv) { mediaSources.push(mediaSource); } } else { - sp extractor; + sp extractor = MediaExtractor::Create(dataSource); - if (!strncasecmp("httplive://", filename, 11)) { - String8 uri("http://"); - uri.append(filename + 11); - - if (looper == NULL) { - looper = new ALooper; - looper->start(); - } - liveSession = new LiveSession(NULL /* notify */); - looper->registerHandler(liveSession); - - liveSession->connect(uri.string()); - dataSource = liveSession->getDataSource(); - - extractor = - MediaExtractor::Create( - dataSource, MEDIA_MIMETYPE_CONTAINER_MPEG2TS); - - syncInfoPresent = false; - } else { - extractor = MediaExtractor::Create(dataSource); - - if (extractor == NULL) { - fprintf(stderr, "could not create extractor.\n"); - return -1; - } + if (extractor == NULL) { + fprintf(stderr, "could not create extractor.\n"); + return -1; + } - sp meta = extractor->getMetaData(); + sp meta = extractor->getMetaData(); - if (meta != NULL) { - const char *mime; - CHECK(meta->findCString(kKeyMIMEType, &mime)); + if (meta != NULL) { + const char *mime; + CHECK(meta->findCString(kKeyMIMEType, &mime)); - if (!strcasecmp(mime, MEDIA_MIMETYPE_CONTAINER_MPEG2TS)) { - syncInfoPresent = false; - } + if (!strcasecmp(mime, MEDIA_MIMETYPE_CONTAINER_MPEG2TS)) { + syncInfoPresent = false; } } diff --git a/media/libmediaplayerservice/Android.mk b/media/libmediaplayerservice/Android.mk index d87bc7f..8f21632 100644 --- a/media/libmediaplayerservice/Android.mk +++ b/media/libmediaplayerservice/Android.mk @@ -34,6 +34,7 @@ LOCAL_SHARED_LIBRARIES := \ libsonivox \ libstagefright \ libstagefright_foundation \ + libstagefright_httplive \ libstagefright_omx \ libstagefright_wfd \ libutils \ diff --git a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp index 655ee55..c8901ce 100644 --- a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp +++ b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp @@ -20,7 +20,6 @@ #include "HTTPLiveSource.h" -#include "ATSParser.h" #include "AnotherPacketSource.h" #include "LiveDataSource.h" #include "LiveSession.h" @@ -62,7 +61,10 @@ NuPlayer::HTTPLiveSource::HTTPLiveSource( NuPlayer::HTTPLiveSource::~HTTPLiveSource() { if (mLiveSession != NULL) { mLiveSession->disconnect(); + mLiveSession.clear(); + mLiveLooper->stop(); + mLiveLooper.clear(); } } @@ -76,112 +78,42 @@ void NuPlayer::HTTPLiveSource::prepareAsync() { mLiveSession = new LiveSession( notify, (mFlags & kFlagIncognito) ? LiveSession::kFlagIncognito : 0, - mUIDValid, mUID); + mUIDValid, + mUID); mLiveLooper->registerHandler(mLiveSession); - mLiveSession->connect( + mLiveSession->connectAsync( mURL.c_str(), mExtraHeaders.isEmpty() ? NULL : &mExtraHeaders); - - mTSParser = new ATSParser; } void NuPlayer::HTTPLiveSource::start() { } -sp NuPlayer::HTTPLiveSource::getFormatMeta(bool audio) { - ATSParser::SourceType type = - audio ? ATSParser::AUDIO : ATSParser::VIDEO; - - sp source = - static_cast(mTSParser->getSource(type).get()); +sp NuPlayer::HTTPLiveSource::getFormat(bool audio) { + sp format; + status_t err = mLiveSession->getStreamFormat( + audio ? LiveSession::STREAMTYPE_AUDIO + : LiveSession::STREAMTYPE_VIDEO, + &format); - if (source == NULL) { + if (err != OK) { return NULL; } - return source->getFormat(); + return format; } status_t NuPlayer::HTTPLiveSource::feedMoreTSData() { - if (mFinalResult != OK) { - return mFinalResult; - } - - sp source = - static_cast(mLiveSession->getDataSource().get()); - - for (int32_t i = 0; i < 50; ++i) { - char buffer[188]; - ssize_t n = source->readAtNonBlocking(mOffset, buffer, sizeof(buffer)); - - if (n == -EWOULDBLOCK) { - break; - } else if (n < 0) { - if (n != ERROR_END_OF_STREAM) { - ALOGI("input data EOS reached, error %ld", n); - } else { - ALOGI("input data EOS reached."); - } - mTSParser->signalEOS(n); - mFinalResult = n; - break; - } else { - if (buffer[0] == 0x00) { - // XXX legacy - - uint8_t type = buffer[1]; - - sp extra = new AMessage; - - if (type & 2) { - int64_t mediaTimeUs; - memcpy(&mediaTimeUs, &buffer[2], sizeof(mediaTimeUs)); - - extra->setInt64(IStreamListener::kKeyMediaTimeUs, mediaTimeUs); - } - - mTSParser->signalDiscontinuity( - ((type & 1) == 0) - ? ATSParser::DISCONTINUITY_SEEK - : ATSParser::DISCONTINUITY_FORMATCHANGE, - extra); - } else { - status_t err = mTSParser->feedTSPacket(buffer, sizeof(buffer)); - - if (err != OK) { - ALOGE("TS Parser returned error %d", err); - mTSParser->signalEOS(err); - mFinalResult = err; - break; - } - } - - mOffset += n; - } - } - return OK; } status_t NuPlayer::HTTPLiveSource::dequeueAccessUnit( bool audio, sp *accessUnit) { - ATSParser::SourceType type = - audio ? ATSParser::AUDIO : ATSParser::VIDEO; - - sp source = - static_cast(mTSParser->getSource(type).get()); - - if (source == NULL) { - return -EWOULDBLOCK; - } - - status_t finalResult; - if (!source->hasBufferAvailable(&finalResult)) { - return finalResult == OK ? -EWOULDBLOCK : finalResult; - } - - return source->dequeueAccessUnit(accessUnit); + return mLiveSession->dequeueAccessUnit( + audio ? LiveSession::STREAMTYPE_AUDIO + : LiveSession::STREAMTYPE_VIDEO, + accessUnit); } status_t NuPlayer::HTTPLiveSource::getDuration(int64_t *durationUs) { @@ -189,15 +121,7 @@ status_t NuPlayer::HTTPLiveSource::getDuration(int64_t *durationUs) { } status_t NuPlayer::HTTPLiveSource::seekTo(int64_t seekTimeUs) { - // We need to make sure we're not seeking until we have seen the very first - // PTS timestamp in the whole stream (from the beginning of the stream). - while (!mTSParser->PTSTimeDeltaEstablished() && feedMoreTSData() == OK) { - usleep(100000); - } - - mLiveSession->seekTo(seekTimeUs); - - return OK; + return mLiveSession->seekTo(seekTimeUs); } void NuPlayer::HTTPLiveSource::onMessageReceived(const sp &msg) { @@ -249,6 +173,32 @@ void NuPlayer::HTTPLiveSource::onSessionNotify(const sp &msg) { break; } + case LiveSession::kWhatStreamsChanged: + { + uint32_t changedMask; + CHECK(msg->findInt32( + "changedMask", (int32_t *)&changedMask)); + + bool audio = changedMask & LiveSession::STREAMTYPE_AUDIO; + bool video = changedMask & LiveSession::STREAMTYPE_VIDEO; + + sp reply; + CHECK(msg->findMessage("reply", &reply)); + + sp notify = dupNotify(); + notify->setInt32("what", kWhatQueueDecoderShutdown); + notify->setInt32("audio", audio); + notify->setInt32("video", video); + notify->setMessage("reply", reply); + notify->post(); + break; + } + + case LiveSession::kWhatError: + { + break; + } + default: TRESPASS(); } diff --git a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h index 067d1da..aa9434b 100644 --- a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h +++ b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h @@ -23,7 +23,6 @@ namespace android { -struct ATSParser; struct LiveSession; struct NuPlayer::HTTPLiveSource : public NuPlayer::Source { @@ -37,18 +36,16 @@ struct NuPlayer::HTTPLiveSource : public NuPlayer::Source { virtual void prepareAsync(); virtual void start(); - virtual status_t feedMoreTSData(); - virtual status_t dequeueAccessUnit(bool audio, sp *accessUnit); + virtual sp getFormat(bool audio); + virtual status_t feedMoreTSData(); virtual status_t getDuration(int64_t *durationUs); virtual status_t seekTo(int64_t seekTimeUs); protected: virtual ~HTTPLiveSource(); - virtual sp getFormatMeta(bool audio); - virtual void onMessageReceived(const sp &msg); private: @@ -70,7 +67,6 @@ private: off64_t mOffset; sp mLiveLooper; sp mLiveSession; - sp mTSParser; void onSessionNotify(const sp &msg); diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp index b89b1c8..7e81035 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp +++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp @@ -89,6 +89,38 @@ private: DISALLOW_EVIL_CONSTRUCTORS(SetSurfaceAction); }; +struct NuPlayer::ShutdownDecoderAction : public Action { + ShutdownDecoderAction(bool audio, bool video) + : mAudio(audio), + mVideo(video) { + } + + virtual void execute(NuPlayer *player) { + player->performDecoderShutdown(mAudio, mVideo); + } + +private: + bool mAudio; + bool mVideo; + + DISALLOW_EVIL_CONSTRUCTORS(ShutdownDecoderAction); +}; + +struct NuPlayer::PostMessageAction : public Action { + PostMessageAction(const sp &msg) + : mMessage(msg) { + } + + virtual void execute(NuPlayer *) { + mMessage->post(); + } + +private: + sp mMessage; + + DISALLOW_EVIL_CONSTRUCTORS(PostMessageAction); +}; + // Use this if there's no state necessary to save in order to execute // the action. struct NuPlayer::SimpleAction : public Action { @@ -335,7 +367,8 @@ void NuPlayer::onMessageReceived(const sp &msg) { ALOGV("kWhatSetVideoNativeWindow"); mDeferredActions.push_back( - new SimpleAction(&NuPlayer::performDecoderShutdown)); + new ShutdownDecoderAction( + false /* audio */, true /* video */)); sp obj; CHECK(msg->findObject("native-window", &obj)); @@ -712,7 +745,8 @@ void NuPlayer::onMessageReceived(const sp &msg) { ALOGV("kWhatReset"); mDeferredActions.push_back( - new SimpleAction(&NuPlayer::performDecoderShutdown)); + new ShutdownDecoderAction( + true /* audio */, true /* video */)); mDeferredActions.push_back( new SimpleAction(&NuPlayer::performReset)); @@ -1023,6 +1057,9 @@ void NuPlayer::notifyListener(int msg, int ext1, int ext2) { } void NuPlayer::flushDecoder(bool audio, bool needShutdown) { + ALOGV("[%s] flushDecoder needShutdown=%d", + audio ? "audio" : "video", needShutdown); + if ((audio && mAudioDecoder == NULL) || (!audio && mVideoDecoder == NULL)) { ALOGI("flushDecoder %s without decoder present", audio ? "audio" : "video"); @@ -1173,20 +1210,29 @@ void NuPlayer::performDecoderFlush() { } } -void NuPlayer::performDecoderShutdown() { - ALOGV("performDecoderShutdown"); +void NuPlayer::performDecoderShutdown(bool audio, bool video) { + ALOGV("performDecoderShutdown audio=%d, video=%d", audio, video); - if (mAudioDecoder == NULL && mVideoDecoder == NULL) { + if ((!audio || mAudioDecoder == NULL) + && (!video || mVideoDecoder == NULL)) { return; } mTimeDiscontinuityPending = true; - if (mAudioDecoder != NULL) { + if (mFlushingAudio == NONE && (!audio || mAudioDecoder == NULL)) { + mFlushingAudio = FLUSHED; + } + + if (mFlushingVideo == NONE && (!video || mVideoDecoder == NULL)) { + mFlushingVideo = FLUSHED; + } + + if (audio && mAudioDecoder != NULL) { flushDecoder(true /* audio */, true /* needShutdown */); } - if (mVideoDecoder != NULL) { + if (video && mVideoDecoder != NULL) { flushDecoder(false /* audio */, true /* needShutdown */); } } @@ -1322,6 +1368,19 @@ void NuPlayer::onSourceNotify(const sp &msg) { break; } + case Source::kWhatQueueDecoderShutdown: + { + int32_t audio, video; + CHECK(msg->findInt32("audio", &audio)); + CHECK(msg->findInt32("video", &video)); + + sp reply; + CHECK(msg->findMessage("reply", &reply)); + + queueDecoderShutdown(audio, video, reply); + break; + } + default: TRESPASS(); } @@ -1355,4 +1414,19 @@ void NuPlayer::Source::onMessageReceived(const sp &msg) { TRESPASS(); } +void NuPlayer::queueDecoderShutdown( + bool audio, bool video, const sp &reply) { + ALOGI("queueDecoderShutdown audio=%d, video=%d", audio, video); + + mDeferredActions.push_back( + new ShutdownDecoderAction(audio, video)); + + mDeferredActions.push_back( + new SimpleAction(&NuPlayer::performScanSources)); + + mDeferredActions.push_back(new PostMessageAction(reply)); + + processDeferredActions(); +} + } // namespace android diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.h b/media/libmediaplayerservice/nuplayer/NuPlayer.h index 50d0462..8b6c8c1 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayer.h +++ b/media/libmediaplayerservice/nuplayer/NuPlayer.h @@ -80,6 +80,8 @@ private: struct Action; struct SeekAction; struct SetSurfaceAction; + struct ShutdownDecoderAction; + struct PostMessageAction; struct SimpleAction; enum { @@ -172,13 +174,16 @@ private: void performSeek(int64_t seekTimeUs); void performDecoderFlush(); - void performDecoderShutdown(); + void performDecoderShutdown(bool audio, bool video); void performReset(); void performScanSources(); void performSetSurface(const sp &wrapper); void onSourceNotify(const sp &msg); + void queueDecoderShutdown( + bool audio, bool video, const sp &reply); + DISALLOW_EVIL_CONSTRUCTORS(NuPlayer); }; diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp index 404b56f..b543d9d 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp +++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp @@ -95,11 +95,11 @@ void NuPlayer::Renderer::flush(bool audio) { } void NuPlayer::Renderer::signalTimeDiscontinuity() { - CHECK(mAudioQueue.empty()); - CHECK(mVideoQueue.empty()); + // CHECK(mAudioQueue.empty()); + // CHECK(mVideoQueue.empty()); mAnchorTimeMediaUs = -1; mAnchorTimeRealUs = -1; - mSyncQueues = mHasAudio && mHasVideo; + mSyncQueues = false; } void NuPlayer::Renderer::pause() { diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerSource.h b/media/libmediaplayerservice/nuplayer/NuPlayerSource.h index 1cbf575..81ffd21 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayerSource.h +++ b/media/libmediaplayerservice/nuplayer/NuPlayerSource.h @@ -42,6 +42,7 @@ struct NuPlayer::Source : public AHandler { kWhatVideoSizeChanged, kWhatBufferingStart, kWhatBufferingEnd, + kWhatQueueDecoderShutdown, }; // The provides message is used to notify the player about various diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp index d3ac734..a60c320 100644 --- a/media/libstagefright/ACodec.cpp +++ b/media/libstagefright/ACodec.cpp @@ -2348,10 +2348,15 @@ void ACodec::sendFormatChange() { ¶ms, sizeof(params)), (status_t)OK); + CHECK_GT(params.nChannels, 0); CHECK(params.nChannels == 1 || params.bInterleaved); CHECK_EQ(params.nBitPerSample, 16u); - CHECK_EQ((int)params.eNumData, (int)OMX_NumericalDataSigned); - CHECK_EQ((int)params.ePCMMode, (int)OMX_AUDIO_PCMModeLinear); + + CHECK_EQ((int)params.eNumData, + (int)OMX_NumericalDataSigned); + + CHECK_EQ((int)params.ePCMMode, + (int)OMX_AUDIO_PCMModeLinear); notify->setString("mime", MEDIA_MIMETYPE_AUDIO_RAW); notify->setInt32("channel-count", params.nChannels); @@ -2361,11 +2366,14 @@ void ACodec::sendFormatChange() { if (mSkipCutBuffer != NULL) { size_t prevbufsize = mSkipCutBuffer->size(); if (prevbufsize != 0) { - ALOGW("Replacing SkipCutBuffer holding %d bytes", prevbufsize); + ALOGW("Replacing SkipCutBuffer holding %d " + "bytes", + prevbufsize); } } - mSkipCutBuffer = new SkipCutBuffer(mEncoderDelay * frameSize, - mEncoderPadding * frameSize); + mSkipCutBuffer = new SkipCutBuffer( + mEncoderDelay * frameSize, + mEncoderPadding * frameSize); } if (mChannelMaskPresent) { diff --git a/media/libstagefright/Android.mk b/media/libstagefright/Android.mk index acc3abf..9544dbc 100644 --- a/media/libstagefright/Android.mk +++ b/media/libstagefright/Android.mk @@ -69,7 +69,6 @@ LOCAL_C_INCLUDES:= \ LOCAL_SHARED_LIBRARIES := \ libbinder \ libcamera_client \ - libcrypto \ libcutils \ libdl \ libdrmframework \ @@ -97,7 +96,6 @@ LOCAL_STATIC_LIBRARIES := \ libvpx \ libwebm \ libstagefright_mpeg2ts \ - libstagefright_httplive \ libstagefright_id3 \ libFLAC \ diff --git a/media/libstagefright/foundation/AHierarchicalStateMachine.cpp b/media/libstagefright/foundation/AHierarchicalStateMachine.cpp index 40c5a3c..f7a00d8 100644 --- a/media/libstagefright/foundation/AHierarchicalStateMachine.cpp +++ b/media/libstagefright/foundation/AHierarchicalStateMachine.cpp @@ -14,6 +14,10 @@ * limitations under the License. */ +//#define LOG_NDEBUG 0 +#define LOG_TAG "AHierarchicalStateMachine" +#include + #include #include diff --git a/media/libstagefright/httplive/Android.mk b/media/libstagefright/httplive/Android.mk index a3fa7a3..85bd492 100644 --- a/media/libstagefright/httplive/Android.mk +++ b/media/libstagefright/httplive/Android.mk @@ -6,16 +6,25 @@ LOCAL_SRC_FILES:= \ LiveDataSource.cpp \ LiveSession.cpp \ M3UParser.cpp \ + PlaylistFetcher.cpp \ LOCAL_C_INCLUDES:= \ $(TOP)/frameworks/av/media/libstagefright \ $(TOP)/frameworks/native/include/media/openmax \ $(TOP)/external/openssl/include +LOCAL_SHARED_LIBRARIES := \ + libcrypto \ + libcutils \ + libmedia \ + libstagefright \ + libstagefright_foundation \ + libutils \ + LOCAL_MODULE:= libstagefright_httplive ifeq ($(TARGET_ARCH),arm) LOCAL_CFLAGS += -Wno-psabi endif -include $(BUILD_STATIC_LIBRARY) +include $(BUILD_SHARED_LIBRARY) diff --git a/media/libstagefright/httplive/LiveSession.cpp b/media/libstagefright/httplive/LiveSession.cpp index 505bdb3..fff13eb 100644 --- a/media/libstagefright/httplive/LiveSession.cpp +++ b/media/libstagefright/httplive/LiveSession.cpp @@ -18,12 +18,13 @@ #define LOG_TAG "LiveSession" #include -#include "include/LiveSession.h" +#include "LiveSession.h" -#include "LiveDataSource.h" +#include "M3UParser.h" +#include "PlaylistFetcher.h" -#include "include/M3UParser.h" #include "include/HTTPBase.h" +#include "mpeg2ts/AnotherPacketSource.h" #include #include @@ -33,6 +34,8 @@ #include #include #include +#include +#include #include #include @@ -47,37 +50,107 @@ LiveSession::LiveSession( mUIDValid(uidValid), mUID(uid), mInPreparationPhase(true), - mDataSource(new LiveDataSource), mHTTPDataSource( HTTPBase::Create( (mFlags & kFlagIncognito) ? HTTPBase::kFlagIncognito : 0)), mPrevBandwidthIndex(-1), - mLastPlaylistFetchTimeUs(-1), - mSeqNumber(-1), - mSeekTimeUs(-1), - mNumRetries(0), - mStartOfPlayback(true), - mDurationUs(-1), - mDurationFixed(false), - mSeekDone(false), - mDisconnectPending(false), - mMonitorQueueGeneration(0), - mRefreshState(INITIAL_MINIMUM_RELOAD_DELAY) { + mStreamMask(0), + mCheckBandwidthGeneration(0), + mLastDequeuedTimeUs(0ll), + mReconfigurationInProgress(false), + mDisconnectReplyID(0) { if (mUIDValid) { mHTTPDataSource->setUID(mUID); } + + mPacketSources.add( + STREAMTYPE_AUDIO, new AnotherPacketSource(NULL /* meta */)); + + mPacketSources.add( + STREAMTYPE_VIDEO, new AnotherPacketSource(NULL /* meta */)); + + mPacketSources.add( + STREAMTYPE_SUBTITLES, new AnotherPacketSource(NULL /* meta */)); } LiveSession::~LiveSession() { } -sp LiveSession::getDataSource() { - return mDataSource; +status_t LiveSession::dequeueAccessUnit( + StreamType stream, sp *accessUnit) { + if (!(mStreamMask & stream)) { + return UNKNOWN_ERROR; + } + + sp packetSource = mPacketSources.valueFor(stream); + + status_t finalResult; + if (!packetSource->hasBufferAvailable(&finalResult)) { + return finalResult == OK ? -EAGAIN : finalResult; + } + + status_t err = packetSource->dequeueAccessUnit(accessUnit); + + const char *streamStr; + switch (stream) { + case STREAMTYPE_AUDIO: + streamStr = "audio"; + break; + case STREAMTYPE_VIDEO: + streamStr = "video"; + break; + case STREAMTYPE_SUBTITLES: + streamStr = "subs"; + break; + default: + TRESPASS(); + } + + if (err == INFO_DISCONTINUITY) { + int32_t type; + CHECK((*accessUnit)->meta()->findInt32("discontinuity", &type)); + + sp extra; + if (!(*accessUnit)->meta()->findMessage("extra", &extra)) { + extra.clear(); + } + + ALOGI("[%s] read discontinuity of type %d, extra = %s", + streamStr, + type, + extra == NULL ? "NULL" : extra->debugString().c_str()); + } else if (err == OK) { + int64_t timeUs; + CHECK((*accessUnit)->meta()->findInt64("timeUs", &timeUs)); + ALOGV("[%s] read buffer at time %lld us", streamStr, timeUs); + + mLastDequeuedTimeUs = timeUs; + } else { + ALOGI("[%s] encountered error %d", streamStr, err); + } + + return err; +} + +status_t LiveSession::getStreamFormat(StreamType stream, sp *format) { + if (!(mStreamMask & stream)) { + return UNKNOWN_ERROR; + } + + sp packetSource = mPacketSources.valueFor(stream); + + sp meta = packetSource->getFormat(); + + if (meta == NULL) { + return -EAGAIN; + } + + return convertMetaDataToMessage(meta, format); } -void LiveSession::connect( +void LiveSession::connectAsync( const char *url, const KeyedVector *headers) { sp msg = new AMessage(kWhatConnect, id()); msg->setString("url", url); @@ -91,55 +164,184 @@ void LiveSession::connect( msg->post(); } -void LiveSession::disconnect() { - Mutex::Autolock autoLock(mLock); - mDisconnectPending = true; +status_t LiveSession::disconnect() { + sp msg = new AMessage(kWhatDisconnect, id()); - mHTTPDataSource->disconnect(); + sp response; + status_t err = msg->postAndAwaitResponse(&response); - (new AMessage(kWhatDisconnect, id()))->post(); + return err; } -void LiveSession::seekTo(int64_t timeUs) { - Mutex::Autolock autoLock(mLock); - mSeekDone = false; - +status_t LiveSession::seekTo(int64_t timeUs) { sp msg = new AMessage(kWhatSeek, id()); msg->setInt64("timeUs", timeUs); - msg->post(); - while (!mSeekDone) { - mCondition.wait(mLock); - } + sp response; + status_t err = msg->postAndAwaitResponse(&response); + + return err; } void LiveSession::onMessageReceived(const sp &msg) { switch (msg->what()) { case kWhatConnect: + { onConnect(msg); break; + } case kWhatDisconnect: - onDisconnect(); + { + CHECK(msg->senderAwaitsResponse(&mDisconnectReplyID)); + + if (mReconfigurationInProgress) { + break; + } + + finishDisconnect(); break; + } - case kWhatMonitorQueue: + case kWhatSeek: + { + uint32_t replyID; + CHECK(msg->senderAwaitsResponse(&replyID)); + + status_t err = onSeek(msg); + + sp response = new AMessage; + response->setInt32("err", err); + + response->postReply(replyID); + break; + } + + case kWhatFetcherNotify: + { + int32_t what; + CHECK(msg->findInt32("what", &what)); + + switch (what) { + case PlaylistFetcher::kWhatStarted: + break; + case PlaylistFetcher::kWhatPaused: + case PlaylistFetcher::kWhatStopped: + { + if (what == PlaylistFetcher::kWhatStopped) { + AString uri; + CHECK(msg->findString("uri", &uri)); + mFetcherInfos.removeItem(uri); + } + + if (mContinuation != NULL) { + CHECK_GT(mContinuationCounter, 0); + if (--mContinuationCounter == 0) { + mContinuation->post(); + } + } + break; + } + + case PlaylistFetcher::kWhatDurationUpdate: + { + AString uri; + CHECK(msg->findString("uri", &uri)); + + int64_t durationUs; + CHECK(msg->findInt64("durationUs", &durationUs)); + + FetcherInfo *info = &mFetcherInfos.editValueFor(uri); + info->mDurationUs = durationUs; + break; + } + + case PlaylistFetcher::kWhatError: + { + status_t err; + CHECK(msg->findInt32("err", &err)); + + ALOGE("XXX Received error %d from PlaylistFetcher.", err); + + if (mInPreparationPhase) { + postPrepared(err); + } + + mPacketSources.valueFor(STREAMTYPE_AUDIO)->signalEOS(err); + + mPacketSources.valueFor(STREAMTYPE_VIDEO)->signalEOS(err); + + mPacketSources.valueFor( + STREAMTYPE_SUBTITLES)->signalEOS(err); + + sp notify = mNotify->dup(); + notify->setInt32("what", kWhatError); + notify->setInt32("err", err); + notify->post(); + break; + } + + case PlaylistFetcher::kWhatTemporarilyDoneFetching: + { + AString uri; + CHECK(msg->findString("uri", &uri)); + + FetcherInfo *info = &mFetcherInfos.editValueFor(uri); + info->mIsPrepared = true; + + if (mInPreparationPhase) { + bool allFetchersPrepared = true; + for (size_t i = 0; i < mFetcherInfos.size(); ++i) { + if (!mFetcherInfos.valueAt(i).mIsPrepared) { + allFetchersPrepared = false; + break; + } + } + + if (allFetchersPrepared) { + postPrepared(OK); + } + } + break; + } + + default: + TRESPASS(); + } + + break; + } + + case kWhatCheckBandwidth: { int32_t generation; CHECK(msg->findInt32("generation", &generation)); - if (generation != mMonitorQueueGeneration) { - // Stale event + if (generation != mCheckBandwidthGeneration) { break; } - onMonitorQueue(); + onCheckBandwidth(); break; } - case kWhatSeek: - onSeek(msg); + case kWhatChangeConfiguration2: + { + onChangeConfiguration2(msg); + break; + } + + case kWhatChangeConfiguration3: + { + onChangeConfiguration3(msg); + break; + } + + case kWhatFinishDisconnect2: + { + onFinishDisconnect2(); break; + } default: TRESPASS(); @@ -172,48 +374,127 @@ void LiveSession::onConnect(const sp &msg) { headers = NULL; } +#if 1 ALOGI("onConnect "); +#else + ALOGI("onConnect %s", url.c_str()); +#endif mMasterURL = url; bool dummy; - sp playlist = fetchPlaylist(url.c_str(), &dummy); + mPlaylist = fetchPlaylist(url.c_str(), NULL /* curPlaylistHash */, &dummy); - if (playlist == NULL) { + if (mPlaylist == NULL) { ALOGE("unable to fetch master playlist '%s'.", url.c_str()); - signalEOS(ERROR_IO); + postPrepared(ERROR_IO); return; } - if (playlist->isVariantPlaylist()) { - for (size_t i = 0; i < playlist->size(); ++i) { + // We trust the content provider to make a reasonable choice of preferred + // initial bandwidth by listing it first in the variant playlist. + // At startup we really don't have a good estimate on the available + // network bandwidth since we haven't tranferred any data yet. Once + // we have we can make a better informed choice. + size_t initialBandwidth = 0; + size_t initialBandwidthIndex = 0; + + if (mPlaylist->isVariantPlaylist()) { + for (size_t i = 0; i < mPlaylist->size(); ++i) { BandwidthItem item; + item.mPlaylistIndex = i; + sp meta; - playlist->itemAt(i, &item.mURI, &meta); + AString uri; + mPlaylist->itemAt(i, &uri, &meta); unsigned long bandwidth; CHECK(meta->findInt32("bandwidth", (int32_t *)&item.mBandwidth)); + if (initialBandwidth == 0) { + initialBandwidth = item.mBandwidth; + } + mBandwidthItems.push(item); } CHECK_GT(mBandwidthItems.size(), 0u); mBandwidthItems.sort(SortByBandwidth); + + for (size_t i = 0; i < mBandwidthItems.size(); ++i) { + if (mBandwidthItems.itemAt(i).mBandwidth == initialBandwidth) { + initialBandwidthIndex = i; + break; + } + } + } else { + // dummy item. + BandwidthItem item; + item.mPlaylistIndex = 0; + item.mBandwidth = 0; + mBandwidthItems.push(item); } - postMonitorQueue(); + changeConfiguration(0ll /* timeUs */, initialBandwidthIndex); } -void LiveSession::onDisconnect() { - ALOGI("onDisconnect"); +void LiveSession::finishDisconnect() { + // No reconfiguration is currently pending, make sure none will trigger + // during disconnection either. + cancelCheckBandwidthEvent(); + + for (size_t i = 0; i < mFetcherInfos.size(); ++i) { + mFetcherInfos.valueAt(i).mFetcher->stopAsync(); + } + + sp msg = new AMessage(kWhatFinishDisconnect2, id()); - signalEOS(ERROR_END_OF_STREAM); + mContinuationCounter = mFetcherInfos.size(); + mContinuation = msg; - Mutex::Autolock autoLock(mLock); - mDisconnectPending = false; + if (mContinuationCounter == 0) { + msg->post(); + } +} + +void LiveSession::onFinishDisconnect2() { + mContinuation.clear(); + + mPacketSources.valueFor(STREAMTYPE_AUDIO)->signalEOS(ERROR_END_OF_STREAM); + mPacketSources.valueFor(STREAMTYPE_VIDEO)->signalEOS(ERROR_END_OF_STREAM); + + mPacketSources.valueFor( + STREAMTYPE_SUBTITLES)->signalEOS(ERROR_END_OF_STREAM); + + sp response = new AMessage; + response->setInt32("err", OK); + + response->postReply(mDisconnectReplyID); + mDisconnectReplyID = 0; +} + +sp LiveSession::addFetcher(const char *uri) { + ssize_t index = mFetcherInfos.indexOfKey(uri); + + if (index >= 0) { + return NULL; + } + + sp notify = new AMessage(kWhatFetcherNotify, id()); + notify->setString("uri", uri); + + FetcherInfo info; + info.mFetcher = new PlaylistFetcher(notify, this, uri); + info.mDurationUs = -1ll; + info.mIsPrepared = false; + looper()->registerHandler(info.mFetcher); + + mFetcherInfos.add(uri, info); + + return info.mFetcher; } status_t LiveSession::fetchFile( @@ -229,14 +510,6 @@ status_t LiveSession::fetchFile( && strncasecmp(url, "https://", 8)) { return ERROR_UNSUPPORTED; } else { - { - Mutex::Autolock autoLock(mLock); - - if (mDisconnectPending) { - return ERROR_IO; - } - } - KeyedVector headers = mExtraHeaders; if (range_offset > 0 || range_length >= 0) { headers.add( @@ -315,7 +588,8 @@ status_t LiveSession::fetchFile( return OK; } -sp LiveSession::fetchPlaylist(const char *url, bool *unchanged) { +sp LiveSession::fetchPlaylist( + const char *url, uint8_t *curPlaylistHash, bool *unchanged) { ALOGV("fetchPlaylist '%s'", url); *unchanged = false; @@ -339,13 +613,8 @@ sp LiveSession::fetchPlaylist(const char *url, bool *unchanged) { MD5_Final(hash, &m); - if (mPlaylist != NULL && !memcmp(hash, mPlaylistHash, 16)) { + if (curPlaylistHash != NULL && !memcmp(hash, curPlaylistHash, 16)) { // playlist unchanged - - if (mRefreshState != THIRD_UNCHANGED_RELOAD_ATTEMPT) { - mRefreshState = (RefreshState)(mRefreshState + 1); - } - *unchanged = true; ALOGV("Playlist unchanged, refresh state is now %d", @@ -354,9 +623,9 @@ sp LiveSession::fetchPlaylist(const char *url, bool *unchanged) { return NULL; } - memcpy(mPlaylistHash, hash, sizeof(hash)); - - mRefreshState = INITIAL_MINIMUM_RELOAD_DELAY; + if (curPlaylistHash != NULL) { + memcpy(curPlaylistHash, hash, sizeof(hash)); + } #endif sp playlist = @@ -371,37 +640,6 @@ sp LiveSession::fetchPlaylist(const char *url, bool *unchanged) { return playlist; } -int64_t LiveSession::getSegmentStartTimeUs(int32_t seqNumber) const { - CHECK(mPlaylist != NULL); - - int32_t firstSeqNumberInPlaylist; - if (mPlaylist->meta() == NULL || !mPlaylist->meta()->findInt32( - "media-sequence", &firstSeqNumberInPlaylist)) { - firstSeqNumberInPlaylist = 0; - } - - int32_t lastSeqNumberInPlaylist = - firstSeqNumberInPlaylist + (int32_t)mPlaylist->size() - 1; - - CHECK_GE(seqNumber, firstSeqNumberInPlaylist); - CHECK_LE(seqNumber, lastSeqNumberInPlaylist); - - int64_t segmentStartUs = 0ll; - for (int32_t index = 0; - index < seqNumber - firstSeqNumberInPlaylist; ++index) { - sp itemMeta; - CHECK(mPlaylist->itemAt( - index, NULL /* uri */, &itemMeta)); - - int64_t itemDurationUs; - CHECK(itemMeta->findInt64("durationUs", &itemDurationUs)); - - segmentStartUs += itemDurationUs; - } - - return segmentStartUs; -} - static double uniformRand() { return (double)rand() / RAND_MAX; } @@ -412,36 +650,50 @@ size_t LiveSession::getBandwidthIndex() { } #if 1 - int32_t bandwidthBps; - if (mHTTPDataSource != NULL - && mHTTPDataSource->estimateBandwidth(&bandwidthBps)) { - ALOGV("bandwidth estimated at %.2f kbps", bandwidthBps / 1024.0f); - } else { - ALOGV("no bandwidth estimate."); - return 0; // Pick the lowest bandwidth stream by default. - } - char value[PROPERTY_VALUE_MAX]; - if (property_get("media.httplive.max-bw", value, NULL)) { + ssize_t index; + if (property_get("media.httplive.bw-index", value, NULL)) { char *end; - long maxBw = strtoul(value, &end, 10); - if (end > value && *end == '\0') { - if (maxBw > 0 && bandwidthBps > maxBw) { - ALOGV("bandwidth capped to %ld bps", maxBw); - bandwidthBps = maxBw; - } + index = strtol(value, &end, 10); + CHECK(end > value && *end == '\0'); + + if (index >= 0 && (size_t)index >= mBandwidthItems.size()) { + index = mBandwidthItems.size() - 1; } } - // Consider only 80% of the available bandwidth usable. - bandwidthBps = (bandwidthBps * 8) / 10; + if (index < 0) { + int32_t bandwidthBps; + if (mHTTPDataSource != NULL + && mHTTPDataSource->estimateBandwidth(&bandwidthBps)) { + ALOGV("bandwidth estimated at %.2f kbps", bandwidthBps / 1024.0f); + } else { + ALOGV("no bandwidth estimate."); + return 0; // Pick the lowest bandwidth stream by default. + } - // Pick the highest bandwidth stream below or equal to estimated bandwidth. + char value[PROPERTY_VALUE_MAX]; + if (property_get("media.httplive.max-bw", value, NULL)) { + char *end; + long maxBw = strtoul(value, &end, 10); + if (end > value && *end == '\0') { + if (maxBw > 0 && bandwidthBps > maxBw) { + ALOGV("bandwidth capped to %ld bps", maxBw); + bandwidthBps = maxBw; + } + } + } - size_t index = mBandwidthItems.size() - 1; - while (index > 0 && mBandwidthItems.itemAt(index).mBandwidth - > (size_t)bandwidthBps) { - --index; + // Consider only 80% of the available bandwidth usable. + bandwidthBps = (bandwidthBps * 8) / 10; + + // Pick the highest bandwidth stream below or equal to estimated bandwidth. + + index = mBandwidthItems.size() - 1; + while (index > 0 && mBandwidthItems.itemAt(index).mBandwidth + > (size_t)bandwidthBps) { + --index; + } } #elif 0 // Change bandwidth at random() @@ -452,6 +704,8 @@ size_t LiveSession::getBandwidthIndex() { // to lowest) const size_t kMinIndex = 0; + static ssize_t mPrevBandwidthIndex = -1; + size_t index; if (mPrevBandwidthIndex < 0) { index = kMinIndex; @@ -463,6 +717,7 @@ size_t LiveSession::getBandwidthIndex() { index = kMinIndex; } } + mPrevBandwidthIndex = index; #elif 0 // Pick the highest bandwidth stream below or equal to 1.2 Mbit/sec @@ -470,570 +725,381 @@ size_t LiveSession::getBandwidthIndex() { while (index > 0 && mBandwidthItems.itemAt(index).mBandwidth > 1200000) { --index; } +#elif 1 + char value[PROPERTY_VALUE_MAX]; + size_t index; + if (property_get("media.httplive.bw-index", value, NULL)) { + char *end; + index = strtoul(value, &end, 10); + CHECK(end > value && *end == '\0'); + + if (index >= mBandwidthItems.size()) { + index = mBandwidthItems.size() - 1; + } + } else { + index = 0; + } #else size_t index = mBandwidthItems.size() - 1; // Highest bandwidth stream #endif + CHECK_GE(index, 0); + return index; } -bool LiveSession::timeToRefreshPlaylist(int64_t nowUs) const { - if (mPlaylist == NULL) { - CHECK_EQ((int)mRefreshState, (int)INITIAL_MINIMUM_RELOAD_DELAY); - return true; - } - - int32_t targetDurationSecs; - CHECK(mPlaylist->meta()->findInt32("target-duration", &targetDurationSecs)); - - int64_t targetDurationUs = targetDurationSecs * 1000000ll; - - int64_t minPlaylistAgeUs; - - switch (mRefreshState) { - case INITIAL_MINIMUM_RELOAD_DELAY: - { - size_t n = mPlaylist->size(); - if (n > 0) { - sp itemMeta; - CHECK(mPlaylist->itemAt(n - 1, NULL /* uri */, &itemMeta)); - - int64_t itemDurationUs; - CHECK(itemMeta->findInt64("durationUs", &itemDurationUs)); - - minPlaylistAgeUs = itemDurationUs; - break; - } - - // fall through - } - - case FIRST_UNCHANGED_RELOAD_ATTEMPT: - { - minPlaylistAgeUs = targetDurationUs / 2; - break; - } - - case SECOND_UNCHANGED_RELOAD_ATTEMPT: - { - minPlaylistAgeUs = (targetDurationUs * 3) / 2; - break; - } - - case THIRD_UNCHANGED_RELOAD_ATTEMPT: - { - minPlaylistAgeUs = targetDurationUs * 3; - break; - } +status_t LiveSession::onSeek(const sp &msg) { + int64_t timeUs; + CHECK(msg->findInt64("timeUs", &timeUs)); - default: - TRESPASS(); - break; + if (!mReconfigurationInProgress) { + changeConfiguration(timeUs, getBandwidthIndex()); } - return mLastPlaylistFetchTimeUs + minPlaylistAgeUs <= nowUs; + return OK; } -void LiveSession::onDownloadNext() { - size_t bandwidthIndex = getBandwidthIndex(); - -rinse_repeat: - int64_t nowUs = ALooper::GetNowUs(); - - if (mLastPlaylistFetchTimeUs < 0 - || (ssize_t)bandwidthIndex != mPrevBandwidthIndex - || (!mPlaylist->isComplete() && timeToRefreshPlaylist(nowUs))) { - AString url; - if (mBandwidthItems.size() > 0) { - url = mBandwidthItems.editItemAt(bandwidthIndex).mURI; - } else { - url = mMasterURL; - } - - if ((ssize_t)bandwidthIndex != mPrevBandwidthIndex) { - // If we switch bandwidths, do not pay any heed to whether - // playlists changed since the last time... - mPlaylist.clear(); - } - - bool unchanged; - sp playlist = fetchPlaylist(url.c_str(), &unchanged); - if (playlist == NULL) { - if (unchanged) { - // We succeeded in fetching the playlist, but it was - // unchanged from the last time we tried. - } else { - ALOGE("failed to load playlist at url '%s'", url.c_str()); - signalEOS(ERROR_IO); - - return; - } - } else { - mPlaylist = playlist; - } - - if (!mDurationFixed) { - Mutex::Autolock autoLock(mLock); - - if (!mPlaylist->isComplete() && !mPlaylist->isEvent()) { - mDurationUs = -1; - mDurationFixed = true; - } else { - mDurationUs = 0; - for (size_t i = 0; i < mPlaylist->size(); ++i) { - sp itemMeta; - CHECK(mPlaylist->itemAt( - i, NULL /* uri */, &itemMeta)); - - int64_t itemDurationUs; - CHECK(itemMeta->findInt64("durationUs", &itemDurationUs)); - - mDurationUs += itemDurationUs; - } +status_t LiveSession::getDuration(int64_t *durationUs) const { + int64_t maxDurationUs = 0ll; + for (size_t i = 0; i < mFetcherInfos.size(); ++i) { + int64_t fetcherDurationUs = mFetcherInfos.valueAt(i).mDurationUs; - mDurationFixed = mPlaylist->isComplete(); - } + if (fetcherDurationUs >= 0ll && fetcherDurationUs > maxDurationUs) { + maxDurationUs = fetcherDurationUs; } - - mLastPlaylistFetchTimeUs = ALooper::GetNowUs(); } - int32_t firstSeqNumberInPlaylist; - if (mPlaylist->meta() == NULL || !mPlaylist->meta()->findInt32( - "media-sequence", &firstSeqNumberInPlaylist)) { - firstSeqNumberInPlaylist = 0; - } + *durationUs = maxDurationUs; - bool seekDiscontinuity = false; - bool explicitDiscontinuity = false; - bool bandwidthChanged = false; - - if (mSeekTimeUs >= 0) { - if (mPlaylist->isComplete() || mPlaylist->isEvent()) { - size_t index = 0; - int64_t segmentStartUs = 0; - while (index < mPlaylist->size()) { - sp itemMeta; - CHECK(mPlaylist->itemAt( - index, NULL /* uri */, &itemMeta)); - - int64_t itemDurationUs; - CHECK(itemMeta->findInt64("durationUs", &itemDurationUs)); + return OK; +} - if (mSeekTimeUs < segmentStartUs + itemDurationUs) { - break; - } +bool LiveSession::isSeekable() const { + int64_t durationUs; + return getDuration(&durationUs) == OK && durationUs >= 0; +} - segmentStartUs += itemDurationUs; - ++index; - } +bool LiveSession::hasDynamicDuration() const { + return false; +} - if (index < mPlaylist->size()) { - int32_t newSeqNumber = firstSeqNumberInPlaylist + index; +void LiveSession::changeConfiguration(int64_t timeUs, size_t bandwidthIndex) { + CHECK(!mReconfigurationInProgress); + mReconfigurationInProgress = true; - ALOGI("seeking to seq no %d", newSeqNumber); + mPrevBandwidthIndex = bandwidthIndex; - mSeqNumber = newSeqNumber; + ALOGV("changeConfiguration => timeUs:%lld us, bwIndex:%d", + timeUs, bandwidthIndex); - mDataSource->reset(); + mPlaylist->pickRandomMediaItems(); - // reseting the data source will have had the - // side effect of discarding any previously queued - // bandwidth change discontinuity. - // Therefore we'll need to treat these seek - // discontinuities as involving a bandwidth change - // even if they aren't directly. - seekDiscontinuity = true; - bandwidthChanged = true; - } - } + CHECK_LT(bandwidthIndex, mBandwidthItems.size()); + const BandwidthItem &item = mBandwidthItems.itemAt(bandwidthIndex); - mSeekTimeUs = -1; + uint32_t streamMask = 0; - Mutex::Autolock autoLock(mLock); - mSeekDone = true; - mCondition.broadcast(); + AString audioURI; + if (mPlaylist->getAudioURI(item.mPlaylistIndex, &audioURI)) { + streamMask |= STREAMTYPE_AUDIO; } - const int32_t lastSeqNumberInPlaylist = - firstSeqNumberInPlaylist + (int32_t)mPlaylist->size() - 1; - - if (mSeqNumber < 0) { - if (mPlaylist->isComplete()) { - mSeqNumber = firstSeqNumberInPlaylist; - } else { - // If this is a live session, start 3 segments from the end. - mSeqNumber = lastSeqNumberInPlaylist - 3; - if (mSeqNumber < firstSeqNumberInPlaylist) { - mSeqNumber = firstSeqNumberInPlaylist; - } - } + AString videoURI; + if (mPlaylist->getVideoURI(item.mPlaylistIndex, &videoURI)) { + streamMask |= STREAMTYPE_VIDEO; } - if (mSeqNumber < firstSeqNumberInPlaylist - || mSeqNumber > lastSeqNumberInPlaylist) { - if (mPrevBandwidthIndex != (ssize_t)bandwidthIndex) { - // Go back to the previous bandwidth. + AString subtitleURI; + if (mPlaylist->getSubtitleURI(item.mPlaylistIndex, &subtitleURI)) { + streamMask |= STREAMTYPE_SUBTITLES; + } - ALOGI("new bandwidth does not have the sequence number " - "we're looking for, switching back to previous bandwidth"); + // Step 1, stop and discard fetchers that are no longer needed. + // Pause those that we'll reuse. + for (size_t i = 0; i < mFetcherInfos.size(); ++i) { + const AString &uri = mFetcherInfos.keyAt(i); - mLastPlaylistFetchTimeUs = -1; - bandwidthIndex = mPrevBandwidthIndex; - goto rinse_repeat; - } + bool discardFetcher = true; - if (!mPlaylist->isComplete() && mNumRetries < kMaxNumRetries) { - ++mNumRetries; - - if (mSeqNumber > lastSeqNumberInPlaylist) { - mLastPlaylistFetchTimeUs = -1; - postMonitorQueue(3000000ll); - return; + // If we're seeking all current fetchers are discarded. + if (timeUs < 0ll) { + if (((streamMask & STREAMTYPE_AUDIO) && uri == audioURI) + || ((streamMask & STREAMTYPE_VIDEO) && uri == videoURI) + || ((streamMask & STREAMTYPE_SUBTITLES) && uri == subtitleURI)) { + discardFetcher = false; } + } - // we've missed the boat, let's start from the lowest sequence - // number available and signal a discontinuity. - - ALOGI("We've missed the boat, restarting playback."); - mSeqNumber = lastSeqNumberInPlaylist; - explicitDiscontinuity = true; - - // fall through + if (discardFetcher) { + mFetcherInfos.valueAt(i).mFetcher->stopAsync(); } else { - ALOGE("Cannot find sequence number %d in playlist " - "(contains %d - %d)", - mSeqNumber, firstSeqNumberInPlaylist, - firstSeqNumberInPlaylist + mPlaylist->size() - 1); - - signalEOS(ERROR_END_OF_STREAM); - return; + mFetcherInfos.valueAt(i).mFetcher->pauseAsync(); } } - mNumRetries = 0; - - AString uri; - sp itemMeta; - CHECK(mPlaylist->itemAt( - mSeqNumber - firstSeqNumberInPlaylist, - &uri, - &itemMeta)); - - int32_t val; - if (itemMeta->findInt32("discontinuity", &val) && val != 0) { - explicitDiscontinuity = true; + sp msg = new AMessage(kWhatChangeConfiguration2, id()); + msg->setInt32("streamMask", streamMask); + msg->setInt64("timeUs", timeUs); + if (streamMask & STREAMTYPE_AUDIO) { + msg->setString("audioURI", audioURI.c_str()); } - - int64_t range_offset, range_length; - if (!itemMeta->findInt64("range-offset", &range_offset) - || !itemMeta->findInt64("range-length", &range_length)) { - range_offset = 0; - range_length = -1; + if (streamMask & STREAMTYPE_VIDEO) { + msg->setString("videoURI", videoURI.c_str()); } - - ALOGV("fetching segment %d from (%d .. %d)", - mSeqNumber, firstSeqNumberInPlaylist, lastSeqNumberInPlaylist); - - sp buffer; - status_t err = fetchFile(uri.c_str(), &buffer, range_offset, range_length); - if (err != OK) { - ALOGE("failed to fetch .ts segment at url '%s'", uri.c_str()); - signalEOS(err); - return; + if (streamMask & STREAMTYPE_SUBTITLES) { + msg->setString("subtitleURI", subtitleURI.c_str()); } - CHECK(buffer != NULL); - - err = decryptBuffer(mSeqNumber - firstSeqNumberInPlaylist, buffer); + // Every time a fetcher acknowledges the stopAsync or pauseAsync request + // we'll decrement mContinuationCounter, once it reaches zero, i.e. all + // fetchers have completed their asynchronous operation, we'll post + // mContinuation, which then is handled below in onChangeConfiguration2. + mContinuationCounter = mFetcherInfos.size(); + mContinuation = msg; - if (err != OK) { - ALOGE("decryptBuffer failed w/ error %d", err); - - signalEOS(err); - return; + if (mContinuationCounter == 0) { + msg->post(); } +} - if (buffer->size() == 0 || buffer->data()[0] != 0x47) { - // Not a transport stream??? - - ALOGE("This doesn't look like a transport stream..."); - - mBandwidthItems.removeAt(bandwidthIndex); - - if (mBandwidthItems.isEmpty()) { - signalEOS(ERROR_UNSUPPORTED); - return; - } +void LiveSession::onChangeConfiguration2(const sp &msg) { + mContinuation.clear(); - ALOGI("Retrying with a different bandwidth stream."); + // All fetchers are either suspended or have been removed now. - mLastPlaylistFetchTimeUs = -1; - bandwidthIndex = getBandwidthIndex(); - mPrevBandwidthIndex = bandwidthIndex; - mSeqNumber = -1; + uint32_t streamMask; + CHECK(msg->findInt32("streamMask", (int32_t *)&streamMask)); - goto rinse_repeat; + AString audioURI, videoURI, subtitleURI; + if (streamMask & STREAMTYPE_AUDIO) { + CHECK(msg->findString("audioURI", &audioURI)); + ALOGV("audioURI = '%s'", audioURI.c_str()); } - - if ((size_t)mPrevBandwidthIndex != bandwidthIndex) { - bandwidthChanged = true; + if (streamMask & STREAMTYPE_VIDEO) { + CHECK(msg->findString("videoURI", &videoURI)); + ALOGV("videoURI = '%s'", videoURI.c_str()); } - - if (mPrevBandwidthIndex < 0) { - // Don't signal a bandwidth change at the very beginning of - // playback. - bandwidthChanged = false; + if (streamMask & STREAMTYPE_SUBTITLES) { + CHECK(msg->findString("subtitleURI", &subtitleURI)); + ALOGV("subtitleURI = '%s'", subtitleURI.c_str()); } - if (mStartOfPlayback) { - seekDiscontinuity = true; - mStartOfPlayback = false; + // Determine which decoders to shutdown on the player side, + // a decoder has to be shutdown if either + // 1) its streamtype was active before but now longer isn't. + // or + // 2) its streamtype was already active and still is but the URI + // has changed. + uint32_t changedMask = 0; + if (((mStreamMask & streamMask & STREAMTYPE_AUDIO) + && !(audioURI == mAudioURI)) + || (mStreamMask & ~streamMask & STREAMTYPE_AUDIO)) { + changedMask |= STREAMTYPE_AUDIO; + } + if (((mStreamMask & streamMask & STREAMTYPE_VIDEO) + && !(videoURI == mVideoURI)) + || (mStreamMask & ~streamMask & STREAMTYPE_VIDEO)) { + changedMask |= STREAMTYPE_VIDEO; } - if (seekDiscontinuity || explicitDiscontinuity || bandwidthChanged) { - // Signal discontinuity. - - ALOGI("queueing discontinuity (seek=%d, explicit=%d, bandwidthChanged=%d)", - seekDiscontinuity, explicitDiscontinuity, bandwidthChanged); + if (changedMask == 0) { + // If nothing changed as far as the audio/video decoders + // are concerned we can proceed. + onChangeConfiguration3(msg); + return; + } - sp tmp = new ABuffer(188); - memset(tmp->data(), 0, tmp->size()); + // Something changed, inform the player which will shutdown the + // corresponding decoders and will post the reply once that's done. + // Handling the reply will continue executing below in + // onChangeConfiguration3. + sp notify = mNotify->dup(); + notify->setInt32("what", kWhatStreamsChanged); + notify->setInt32("changedMask", changedMask); - // signal a 'hard' discontinuity for explicit or bandwidthChanged. - uint8_t type = (explicitDiscontinuity || bandwidthChanged) ? 1 : 0; + msg->setWhat(kWhatChangeConfiguration3); + msg->setTarget(id()); - if (mPlaylist->isComplete() || mPlaylist->isEvent()) { - // If this was a live event this made no sense since - // we don't have access to all the segment before the current - // one. - int64_t segmentStartTimeUs = getSegmentStartTimeUs(mSeqNumber); - memcpy(tmp->data() + 2, &segmentStartTimeUs, sizeof(segmentStartTimeUs)); + notify->setMessage("reply", msg); + notify->post(); +} - type |= 2; - } +void LiveSession::onChangeConfiguration3(const sp &msg) { + // All remaining fetchers are still suspended, the player has shutdown + // any decoders that needed it. - tmp->data()[1] = type; + uint32_t streamMask; + CHECK(msg->findInt32("streamMask", (int32_t *)&streamMask)); - mDataSource->queueBuffer(tmp); + AString audioURI, videoURI, subtitleURI; + if (streamMask & STREAMTYPE_AUDIO) { + CHECK(msg->findString("audioURI", &audioURI)); + } + if (streamMask & STREAMTYPE_VIDEO) { + CHECK(msg->findString("videoURI", &videoURI)); + } + if (streamMask & STREAMTYPE_SUBTITLES) { + CHECK(msg->findString("subtitleURI", &subtitleURI)); } - mDataSource->queueBuffer(buffer); + int64_t timeUs; + CHECK(msg->findInt64("timeUs", &timeUs)); - mPrevBandwidthIndex = bandwidthIndex; - ++mSeqNumber; + if (timeUs < 0ll) { + timeUs = mLastDequeuedTimeUs; + } - postMonitorQueue(); -} + mStreamMask = streamMask; + mAudioURI = audioURI; + mVideoURI = videoURI; + mSubtitleURI = subtitleURI; -void LiveSession::signalEOS(status_t err) { - if (mInPreparationPhase && mNotify != NULL) { - sp notify = mNotify->dup(); + // Resume all existing fetchers and assign them packet sources. + for (size_t i = 0; i < mFetcherInfos.size(); ++i) { + const AString &uri = mFetcherInfos.keyAt(i); - notify->setInt32( - "what", - err == ERROR_END_OF_STREAM - ? kWhatPrepared : kWhatPreparationFailed); + uint32_t resumeMask = 0; - if (err != ERROR_END_OF_STREAM) { - notify->setInt32("err", err); + sp audioSource; + if ((streamMask & STREAMTYPE_AUDIO) && uri == audioURI) { + audioSource = mPacketSources.valueFor(STREAMTYPE_AUDIO); + resumeMask |= STREAMTYPE_AUDIO; } - notify->post(); - - mInPreparationPhase = false; - } - - mDataSource->queueEOS(err); -} - -void LiveSession::onMonitorQueue() { - if (mSeekTimeUs >= 0 - || mDataSource->countQueuedBuffers() < kMaxNumQueuedFragments) { - onDownloadNext(); - } else { - if (mInPreparationPhase) { - if (mNotify != NULL) { - sp notify = mNotify->dup(); - notify->setInt32("what", kWhatPrepared); - notify->post(); - } - - mInPreparationPhase = false; + sp videoSource; + if ((streamMask & STREAMTYPE_VIDEO) && uri == videoURI) { + videoSource = mPacketSources.valueFor(STREAMTYPE_VIDEO); + resumeMask |= STREAMTYPE_VIDEO; } - postMonitorQueue(1000000ll); - } -} + sp subtitleSource; + if ((streamMask & STREAMTYPE_SUBTITLES) && uri == subtitleURI) { + subtitleSource = mPacketSources.valueFor(STREAMTYPE_SUBTITLES); + resumeMask |= STREAMTYPE_SUBTITLES; + } -status_t LiveSession::decryptBuffer( - size_t playlistIndex, const sp &buffer) { - sp itemMeta; - bool found = false; - AString method; + CHECK_NE(resumeMask, 0u); - for (ssize_t i = playlistIndex; i >= 0; --i) { - AString uri; - CHECK(mPlaylist->itemAt(i, &uri, &itemMeta)); + ALOGV("resuming fetchers for mask 0x%08x", resumeMask); - if (itemMeta->findString("cipher-method", &method)) { - found = true; - break; - } - } + streamMask &= ~resumeMask; - if (!found) { - method = "NONE"; + mFetcherInfos.valueAt(i).mFetcher->startAsync( + audioSource, videoSource, subtitleSource); } - if (method == "NONE") { - return OK; - } else if (!(method == "AES-128")) { - ALOGE("Unsupported cipher method '%s'", method.c_str()); - return ERROR_UNSUPPORTED; - } + // streamMask now only contains the types that need a new fetcher created. - AString keyURI; - if (!itemMeta->findString("cipher-uri", &keyURI)) { - ALOGE("Missing key uri"); - return ERROR_MALFORMED; + if (streamMask != 0) { + ALOGV("creating new fetchers for mask 0x%08x", streamMask); } - ssize_t index = mAESKeyForURI.indexOfKey(keyURI); - - sp key; - if (index >= 0) { - key = mAESKeyForURI.valueAt(index); - } else { - key = new ABuffer(16); - - sp keySource = - HTTPBase::Create( - (mFlags & kFlagIncognito) - ? HTTPBase::kFlagIncognito - : 0); + while (streamMask != 0) { + StreamType streamType = (StreamType)(streamMask & ~(streamMask - 1)); - if (mUIDValid) { - keySource->setUID(mUID); + AString uri; + switch (streamType) { + case STREAMTYPE_AUDIO: + uri = audioURI; + break; + case STREAMTYPE_VIDEO: + uri = videoURI; + break; + case STREAMTYPE_SUBTITLES: + uri = subtitleURI; + break; + default: + TRESPASS(); } - status_t err = - keySource->connect( - keyURI.c_str(), - mExtraHeaders.isEmpty() ? NULL : &mExtraHeaders); - - if (err == OK) { - size_t offset = 0; - while (offset < 16) { - ssize_t n = keySource->readAt( - offset, key->data() + offset, 16 - offset); - if (n <= 0) { - err = ERROR_IO; - break; - } + sp fetcher = addFetcher(uri.c_str()); + CHECK(fetcher != NULL); - offset += n; - } - } + sp audioSource; + if ((streamMask & STREAMTYPE_AUDIO) && uri == audioURI) { + audioSource = mPacketSources.valueFor(STREAMTYPE_AUDIO); + audioSource->clear(); - if (err != OK) { - ALOGE("failed to fetch cipher key from '%s'.", keyURI.c_str()); - return ERROR_IO; + streamMask &= ~STREAMTYPE_AUDIO; } - mAESKeyForURI.add(keyURI, key); - } + sp videoSource; + if ((streamMask & STREAMTYPE_VIDEO) && uri == videoURI) { + videoSource = mPacketSources.valueFor(STREAMTYPE_VIDEO); + videoSource->clear(); - AES_KEY aes_key; - if (AES_set_decrypt_key(key->data(), 128, &aes_key) != 0) { - ALOGE("failed to set AES decryption key."); - return UNKNOWN_ERROR; - } - - unsigned char aes_ivec[16]; - - AString iv; - if (itemMeta->findString("cipher-iv", &iv)) { - if ((!iv.startsWith("0x") && !iv.startsWith("0X")) - || iv.size() != 16 * 2 + 2) { - ALOGE("malformed cipher IV '%s'.", iv.c_str()); - return ERROR_MALFORMED; + streamMask &= ~STREAMTYPE_VIDEO; } - memset(aes_ivec, 0, sizeof(aes_ivec)); - for (size_t i = 0; i < 16; ++i) { - char c1 = tolower(iv.c_str()[2 + 2 * i]); - char c2 = tolower(iv.c_str()[3 + 2 * i]); - if (!isxdigit(c1) || !isxdigit(c2)) { - ALOGE("malformed cipher IV '%s'.", iv.c_str()); - return ERROR_MALFORMED; - } - uint8_t nibble1 = isdigit(c1) ? c1 - '0' : c1 - 'a' + 10; - uint8_t nibble2 = isdigit(c2) ? c2 - '0' : c2 - 'a' + 10; + sp subtitleSource; + if ((streamMask & STREAMTYPE_SUBTITLES) && uri == subtitleURI) { + subtitleSource = mPacketSources.valueFor(STREAMTYPE_SUBTITLES); + subtitleSource->clear(); - aes_ivec[i] = nibble1 << 4 | nibble2; + streamMask &= ~STREAMTYPE_SUBTITLES; } - } else { - memset(aes_ivec, 0, sizeof(aes_ivec)); - aes_ivec[15] = mSeqNumber & 0xff; - aes_ivec[14] = (mSeqNumber >> 8) & 0xff; - aes_ivec[13] = (mSeqNumber >> 16) & 0xff; - aes_ivec[12] = (mSeqNumber >> 24) & 0xff; + + fetcher->startAsync(audioSource, videoSource, subtitleSource, timeUs); } - AES_cbc_encrypt( - buffer->data(), buffer->data(), buffer->size(), - &aes_key, aes_ivec, AES_DECRYPT); + // All fetchers have now been started, the configuration change + // has completed. - // hexdump(buffer->data(), buffer->size()); + scheduleCheckBandwidthEvent(); - size_t n = buffer->size(); - CHECK_GT(n, 0u); + ALOGV("XXX configuration change completed."); - size_t pad = buffer->data()[n - 1]; + mReconfigurationInProgress = false; - CHECK_GT(pad, 0u); - CHECK_LE(pad, 16u); - CHECK_GE((size_t)n, pad); - for (size_t i = 0; i < pad; ++i) { - CHECK_EQ((unsigned)buffer->data()[n - 1 - i], pad); + if (mDisconnectReplyID != 0) { + finishDisconnect(); } +} - n -= pad; - - buffer->setRange(buffer->offset(), n); - - return OK; +void LiveSession::scheduleCheckBandwidthEvent() { + sp msg = new AMessage(kWhatCheckBandwidth, id()); + msg->setInt32("generation", mCheckBandwidthGeneration); + msg->post(10000000ll); } -void LiveSession::postMonitorQueue(int64_t delayUs) { - sp msg = new AMessage(kWhatMonitorQueue, id()); - msg->setInt32("generation", ++mMonitorQueueGeneration); - msg->post(delayUs); +void LiveSession::cancelCheckBandwidthEvent() { + ++mCheckBandwidthGeneration; } -void LiveSession::onSeek(const sp &msg) { - int64_t timeUs; - CHECK(msg->findInt64("timeUs", &timeUs)); +void LiveSession::onCheckBandwidth() { + if (mReconfigurationInProgress) { + scheduleCheckBandwidthEvent(); + return; + } + + size_t bandwidthIndex = getBandwidthIndex(); + if (mPrevBandwidthIndex < 0 + || bandwidthIndex != (size_t)mPrevBandwidthIndex) { + changeConfiguration(-1ll /* timeUs */, bandwidthIndex); + } - mSeekTimeUs = timeUs; - postMonitorQueue(); + // Handling the kWhatCheckBandwidth even here does _not_ automatically + // schedule another one on return, only an explicit call to + // scheduleCheckBandwidthEvent will do that. + // This ensures that only one configuration change is ongoing at any + // one time, once that completes it'll schedule another check bandwidth + // event. } -status_t LiveSession::getDuration(int64_t *durationUs) const { - Mutex::Autolock autoLock(mLock); - *durationUs = mDurationUs; +void LiveSession::postPrepared(status_t err) { + CHECK(mInPreparationPhase); - return OK; -} + sp notify = mNotify->dup(); + if (err == OK || err == ERROR_END_OF_STREAM) { + notify->setInt32("what", kWhatPrepared); + } else { + notify->setInt32("what", kWhatPreparationFailed); + notify->setInt32("err", err); + } -bool LiveSession::isSeekable() const { - int64_t durationUs; - return getDuration(&durationUs) == OK && durationUs >= 0; -} + notify->post(); -bool LiveSession::hasDynamicDuration() const { - return !mDurationFixed; + mInPreparationPhase = false; } } // namespace android diff --git a/media/libstagefright/httplive/LiveSession.h b/media/libstagefright/httplive/LiveSession.h new file mode 100644 index 0000000..b134725 --- /dev/null +++ b/media/libstagefright/httplive/LiveSession.h @@ -0,0 +1,172 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef LIVE_SESSION_H_ + +#define LIVE_SESSION_H_ + +#include + +#include + +namespace android { + +struct ABuffer; +struct AnotherPacketSource; +struct DataSource; +struct HTTPBase; +struct LiveDataSource; +struct M3UParser; +struct PlaylistFetcher; + +struct LiveSession : public AHandler { + enum Flags { + // Don't log any URLs. + kFlagIncognito = 1, + }; + LiveSession( + const sp ¬ify, + uint32_t flags = 0, bool uidValid = false, uid_t uid = 0); + + enum StreamType { + STREAMTYPE_AUDIO = 1, + STREAMTYPE_VIDEO = 2, + STREAMTYPE_SUBTITLES = 4, + }; + status_t dequeueAccessUnit(StreamType stream, sp *accessUnit); + + status_t getStreamFormat(StreamType stream, sp *format); + + void connectAsync( + const char *url, + const KeyedVector *headers = NULL); + + status_t disconnect(); + + // Blocks until seek is complete. + status_t seekTo(int64_t timeUs); + + status_t getDuration(int64_t *durationUs) const; + + bool isSeekable() const; + bool hasDynamicDuration() const; + + enum { + kWhatStreamsChanged, + kWhatError, + kWhatPrepared, + kWhatPreparationFailed, + }; + +protected: + virtual ~LiveSession(); + + virtual void onMessageReceived(const sp &msg); + +private: + friend struct PlaylistFetcher; + + enum { + kWhatConnect = 'conn', + kWhatDisconnect = 'disc', + kWhatSeek = 'seek', + kWhatFetcherNotify = 'notf', + kWhatCheckBandwidth = 'bndw', + kWhatChangeConfiguration2 = 'chC2', + kWhatChangeConfiguration3 = 'chC3', + kWhatFinishDisconnect2 = 'fin2', + }; + + struct BandwidthItem { + size_t mPlaylistIndex; + unsigned long mBandwidth; + }; + + struct FetcherInfo { + sp mFetcher; + int64_t mDurationUs; + bool mIsPrepared; + }; + + sp mNotify; + uint32_t mFlags; + bool mUIDValid; + uid_t mUID; + + bool mInPreparationPhase; + + sp mHTTPDataSource; + KeyedVector mExtraHeaders; + + AString mMasterURL; + + Vector mBandwidthItems; + ssize_t mPrevBandwidthIndex; + + sp mPlaylist; + + KeyedVector mFetcherInfos; + AString mAudioURI, mVideoURI, mSubtitleURI; + uint32_t mStreamMask; + + KeyedVector > mPacketSources; + + int32_t mCheckBandwidthGeneration; + + size_t mContinuationCounter; + sp mContinuation; + + int64_t mLastDequeuedTimeUs; + + bool mReconfigurationInProgress; + uint32_t mDisconnectReplyID; + + sp addFetcher(const char *uri); + + void onConnect(const sp &msg); + status_t onSeek(const sp &msg); + void onFinishDisconnect2(); + + status_t fetchFile( + const char *url, sp *out, + int64_t range_offset = 0, int64_t range_length = -1); + + sp fetchPlaylist( + const char *url, uint8_t *curPlaylistHash, bool *unchanged); + + size_t getBandwidthIndex(); + + static int SortByBandwidth(const BandwidthItem *, const BandwidthItem *); + + void changeConfiguration(int64_t timeUs, size_t bandwidthIndex); + void onChangeConfiguration2(const sp &msg); + void onChangeConfiguration3(const sp &msg); + + void scheduleCheckBandwidthEvent(); + void cancelCheckBandwidthEvent(); + + void onCheckBandwidth(); + + void finishDisconnect(); + + void postPrepared(status_t err); + + DISALLOW_EVIL_CONSTRUCTORS(LiveSession); +}; + +} // namespace android + +#endif // LIVE_SESSION_H_ diff --git a/media/libstagefright/httplive/M3UParser.cpp b/media/libstagefright/httplive/M3UParser.cpp index 68bbca2..be66252 100644 --- a/media/libstagefright/httplive/M3UParser.cpp +++ b/media/libstagefright/httplive/M3UParser.cpp @@ -18,14 +18,153 @@ #define LOG_TAG "M3UParser" #include -#include "include/M3UParser.h" +#include "M3UParser.h" +#include #include #include #include namespace android { +struct M3UParser::MediaGroup : public RefBase { + enum Type { + TYPE_AUDIO, + TYPE_VIDEO, + TYPE_SUBS, + }; + + enum FlagBits { + FLAG_AUTOSELECT = 1, + FLAG_DEFAULT = 2, + FLAG_FORCED = 4, + FLAG_HAS_LANGUAGE = 8, + FLAG_HAS_URI = 16, + }; + + MediaGroup(Type type); + + Type type() const; + + status_t addMedia( + const char *name, + const char *uri, + const char *language, + uint32_t flags); + + bool getActiveURI(AString *uri) const; + + void pickRandomMediaItems(); + +protected: + virtual ~MediaGroup(); + +private: + struct Media { + AString mName; + AString mURI; + AString mLanguage; + uint32_t mFlags; + }; + + Type mType; + Vector mMediaItems; + + ssize_t mSelectedIndex; + + DISALLOW_EVIL_CONSTRUCTORS(MediaGroup); +}; + +M3UParser::MediaGroup::MediaGroup(Type type) + : mType(type), + mSelectedIndex(-1) { +} + +M3UParser::MediaGroup::~MediaGroup() { +} + +M3UParser::MediaGroup::Type M3UParser::MediaGroup::type() const { + return mType; +} + +status_t M3UParser::MediaGroup::addMedia( + const char *name, + const char *uri, + const char *language, + uint32_t flags) { + mMediaItems.push(); + Media &item = mMediaItems.editItemAt(mMediaItems.size() - 1); + + item.mName = name; + + if (uri) { + item.mURI = uri; + } + + if (language) { + item.mLanguage = language; + } + + item.mFlags = flags; + + return OK; +} + +void M3UParser::MediaGroup::pickRandomMediaItems() { +#if 1 + switch (mType) { + case TYPE_AUDIO: + { + char value[PROPERTY_VALUE_MAX]; + if (property_get("media.httplive.audio-index", value, NULL)) { + char *end; + mSelectedIndex = strtoul(value, &end, 10); + CHECK(end > value && *end == '\0'); + + if (mSelectedIndex >= mMediaItems.size()) { + mSelectedIndex = mMediaItems.size() - 1; + } + } else { + mSelectedIndex = 0; + } + break; + } + + case TYPE_VIDEO: + { + mSelectedIndex = 0; + break; + } + + case TYPE_SUBS: + { + mSelectedIndex = -1; + break; + } + + default: + TRESPASS(); + } +#else + mSelectedIndex = (rand() * mMediaItems.size()) / RAND_MAX; +#endif +} + +bool M3UParser::MediaGroup::getActiveURI(AString *uri) const { + for (size_t i = 0; i < mMediaItems.size(); ++i) { + if (mSelectedIndex >= 0 && i == (size_t)mSelectedIndex) { + const Media &item = mMediaItems.itemAt(i); + + *uri = item.mURI; + return true; + } + } + + return false; +} + +//////////////////////////////////////////////////////////////////////////////// + M3UParser::M3UParser( const char *baseURI, const void *data, size_t size) : mInitCheck(NO_INIT), @@ -92,6 +231,58 @@ bool M3UParser::itemAt(size_t index, AString *uri, sp *meta) { return true; } +void M3UParser::pickRandomMediaItems() { + for (size_t i = 0; i < mMediaGroups.size(); ++i) { + mMediaGroups.valueAt(i)->pickRandomMediaItems(); + } +} + +bool M3UParser::getTypeURI(size_t index, const char *key, AString *uri) const { + if (!mIsVariantPlaylist) { + *uri = mBaseURI; + + // Assume media without any more specific attribute contains + // audio and video, but no subtitles. + return !strcmp("audio", key) || !strcmp("video", key); + } + + CHECK_LT(index, mItems.size()); + + sp meta = mItems.itemAt(index).mMeta; + + AString groupID; + if (!meta->findString(key, &groupID)) { + *uri = mItems.itemAt(index).mURI; + + // Assume media without any more specific attribute contains + // audio and video, but no subtitles. + return !strcmp("audio", key) || !strcmp("video", key); + } + + sp group = mMediaGroups.valueFor(groupID); + if (!group->getActiveURI(uri)) { + return false; + } + + if ((*uri).empty()) { + *uri = mItems.itemAt(index).mURI; + } + + return true; +} + +bool M3UParser::getAudioURI(size_t index, AString *uri) const { + return getTypeURI(index, "audio", uri); +} + +bool M3UParser::getVideoURI(size_t index, AString *uri) const { + return getTypeURI(index, "video", uri); +} + +bool M3UParser::getSubtitleURI(size_t index, AString *uri) const { + return getTypeURI(index, "subtitles", uri); +} + static bool MakeURL(const char *baseURL, const char *url, AString *out) { out->clear(); @@ -241,6 +432,8 @@ status_t M3UParser::parse(const void *_data, size_t size) { segmentRangeOffset = offset + length; } + } else if (line.startsWith("#EXT-X-MEDIA")) { + err = parseMedia(line); } if (err != OK) { @@ -322,9 +515,31 @@ status_t M3UParser::parseMetaDataDuration( return OK; } -// static +// Find the next occurence of the character "what" at or after "offset", +// but ignore occurences between quotation marks. +// Return the index of the occurrence or -1 if not found. +static ssize_t FindNextUnquoted( + const AString &line, char what, size_t offset) { + CHECK_NE((int)what, (int)'"'); + + bool quoted = false; + while (offset < line.size()) { + char c = line.c_str()[offset]; + + if (c == '"') { + quoted = !quoted; + } else if (c == what && !quoted) { + return offset; + } + + ++offset; + } + + return -1; +} + status_t M3UParser::parseStreamInf( - const AString &line, sp *meta) { + const AString &line, sp *meta) const { ssize_t colonPos = line.find(":"); if (colonPos < 0) { @@ -334,7 +549,7 @@ status_t M3UParser::parseStreamInf( size_t offset = colonPos + 1; while (offset < line.size()) { - ssize_t end = line.find(",", offset); + ssize_t end = FindNextUnquoted(line, ',', offset); if (end < 0) { end = line.size(); } @@ -371,33 +586,35 @@ status_t M3UParser::parseStreamInf( *meta = new AMessage; } (*meta)->setInt32("bandwidth", x); - } - } + } else if (!strcasecmp("audio", key.c_str()) + || !strcasecmp("video", key.c_str()) + || !strcasecmp("subtitles", key.c_str())) { + if (val.size() < 2 + || val.c_str()[0] != '"' + || val.c_str()[val.size() - 1] != '"') { + ALOGE("Expected quoted string for %s attribute, " + "got '%s' instead.", + key.c_str(), val.c_str()); + + return ERROR_MALFORMED; + } - return OK; -} + AString groupID(val, 1, val.size() - 2); + ssize_t groupIndex = mMediaGroups.indexOfKey(groupID); -// Find the next occurence of the character "what" at or after "offset", -// but ignore occurences between quotation marks. -// Return the index of the occurrence or -1 if not found. -static ssize_t FindNextUnquoted( - const AString &line, char what, size_t offset) { - CHECK_NE((int)what, (int)'"'); + if (groupIndex < 0) { + ALOGE("Undefined media group '%s' referenced in stream info.", + groupID.c_str()); - bool quoted = false; - while (offset < line.size()) { - char c = line.c_str()[offset]; + return ERROR_MALFORMED; + } - if (c == '"') { - quoted = !quoted; - } else if (c == what && !quoted) { - return offset; + key.tolower(); + (*meta)->setString(key.c_str(), groupID.c_str()); } - - ++offset; } - return -1; + return OK; } // static @@ -515,6 +732,234 @@ status_t M3UParser::parseByteRange( return OK; } +status_t M3UParser::parseMedia(const AString &line) { + ssize_t colonPos = line.find(":"); + + if (colonPos < 0) { + return ERROR_MALFORMED; + } + + bool haveGroupType = false; + MediaGroup::Type groupType = MediaGroup::TYPE_AUDIO; + + bool haveGroupID = false; + AString groupID; + + bool haveGroupLanguage = false; + AString groupLanguage; + + bool haveGroupName = false; + AString groupName; + + bool haveGroupAutoselect = false; + bool groupAutoselect = false; + + bool haveGroupDefault = false; + bool groupDefault = false; + + bool haveGroupForced = false; + bool groupForced = false; + + bool haveGroupURI = false; + AString groupURI; + + size_t offset = colonPos + 1; + + while (offset < line.size()) { + ssize_t end = FindNextUnquoted(line, ',', offset); + if (end < 0) { + end = line.size(); + } + + AString attr(line, offset, end - offset); + attr.trim(); + + offset = end + 1; + + ssize_t equalPos = attr.find("="); + if (equalPos < 0) { + continue; + } + + AString key(attr, 0, equalPos); + key.trim(); + + AString val(attr, equalPos + 1, attr.size() - equalPos - 1); + val.trim(); + + ALOGV("key=%s value=%s", key.c_str(), val.c_str()); + + if (!strcasecmp("type", key.c_str())) { + if (!strcasecmp("subtitles", val.c_str())) { + groupType = MediaGroup::TYPE_SUBS; + } else if (!strcasecmp("audio", val.c_str())) { + groupType = MediaGroup::TYPE_AUDIO; + } else if (!strcasecmp("video", val.c_str())) { + groupType = MediaGroup::TYPE_VIDEO; + } else { + ALOGE("Invalid media group type '%s'", val.c_str()); + return ERROR_MALFORMED; + } + + haveGroupType = true; + } else if (!strcasecmp("group-id", key.c_str())) { + if (val.size() < 2 + || val.c_str()[0] != '"' + || val.c_str()[val.size() - 1] != '"') { + ALOGE("Expected quoted string for GROUP-ID, got '%s' instead.", + val.c_str()); + + return ERROR_MALFORMED; + } + + groupID.setTo(val, 1, val.size() - 2); + haveGroupID = true; + } else if (!strcasecmp("language", key.c_str())) { + if (val.size() < 2 + || val.c_str()[0] != '"' + || val.c_str()[val.size() - 1] != '"') { + ALOGE("Expected quoted string for LANGUAGE, got '%s' instead.", + val.c_str()); + + return ERROR_MALFORMED; + } + + groupLanguage.setTo(val, 1, val.size() - 2); + haveGroupLanguage = true; + } else if (!strcasecmp("name", key.c_str())) { + if (val.size() < 2 + || val.c_str()[0] != '"' + || val.c_str()[val.size() - 1] != '"') { + ALOGE("Expected quoted string for NAME, got '%s' instead.", + val.c_str()); + + return ERROR_MALFORMED; + } + + groupName.setTo(val, 1, val.size() - 2); + haveGroupName = true; + } else if (!strcasecmp("autoselect", key.c_str())) { + groupAutoselect = false; + if (!strcasecmp("YES", val.c_str())) { + groupAutoselect = true; + } else if (!strcasecmp("NO", val.c_str())) { + groupAutoselect = false; + } else { + ALOGE("Expected YES or NO for AUTOSELECT attribute, " + "got '%s' instead.", + val.c_str()); + + return ERROR_MALFORMED; + } + + haveGroupAutoselect = true; + } else if (!strcasecmp("default", key.c_str())) { + groupDefault = false; + if (!strcasecmp("YES", val.c_str())) { + groupDefault = true; + } else if (!strcasecmp("NO", val.c_str())) { + groupDefault = false; + } else { + ALOGE("Expected YES or NO for DEFAULT attribute, " + "got '%s' instead.", + val.c_str()); + + return ERROR_MALFORMED; + } + + haveGroupDefault = true; + } else if (!strcasecmp("forced", key.c_str())) { + groupForced = false; + if (!strcasecmp("YES", val.c_str())) { + groupForced = true; + } else if (!strcasecmp("NO", val.c_str())) { + groupForced = false; + } else { + ALOGE("Expected YES or NO for FORCED attribute, " + "got '%s' instead.", + val.c_str()); + + return ERROR_MALFORMED; + } + + haveGroupForced = true; + } else if (!strcasecmp("uri", key.c_str())) { + if (val.size() < 2 + || val.c_str()[0] != '"' + || val.c_str()[val.size() - 1] != '"') { + ALOGE("Expected quoted string for URI, got '%s' instead.", + val.c_str()); + + return ERROR_MALFORMED; + } + + AString tmp(val, 1, val.size() - 2); + + if (!MakeURL(mBaseURI.c_str(), tmp.c_str(), &groupURI)) { + ALOGI("Failed to make absolute URI from '%s'.", tmp.c_str()); + } + + haveGroupURI = true; + } + } + + if (!haveGroupType || !haveGroupID || !haveGroupName) { + ALOGE("Incomplete EXT-X-MEDIA element."); + return ERROR_MALFORMED; + } + + uint32_t flags = 0; + if (haveGroupAutoselect && groupAutoselect) { + flags |= MediaGroup::FLAG_AUTOSELECT; + } + if (haveGroupDefault && groupDefault) { + flags |= MediaGroup::FLAG_DEFAULT; + } + if (haveGroupForced) { + if (groupType != MediaGroup::TYPE_SUBS) { + ALOGE("The FORCED attribute MUST not be present on anything " + "but SUBS media."); + + return ERROR_MALFORMED; + } + + if (groupForced) { + flags |= MediaGroup::FLAG_FORCED; + } + } + if (haveGroupLanguage) { + flags |= MediaGroup::FLAG_HAS_LANGUAGE; + } + if (haveGroupURI) { + flags |= MediaGroup::FLAG_HAS_URI; + } + + ssize_t groupIndex = mMediaGroups.indexOfKey(groupID); + sp group; + + if (groupIndex < 0) { + group = new MediaGroup(groupType); + mMediaGroups.add(groupID, group); + } else { + group = mMediaGroups.valueAt(groupIndex); + + if (group->type() != groupType) { + ALOGE("Attempt to put media item under group of different type " + "(groupType = %d, item type = %d", + group->type(), + groupType); + + return ERROR_MALFORMED; + } + } + + return group->addMedia( + groupName.c_str(), + haveGroupURI ? groupURI.c_str() : NULL, + haveGroupLanguage ? groupLanguage.c_str() : NULL, + flags); +} + // static status_t M3UParser::ParseInt32(const char *s, int32_t *x) { char *end; diff --git a/media/libstagefright/httplive/M3UParser.h b/media/libstagefright/httplive/M3UParser.h new file mode 100644 index 0000000..abea286 --- /dev/null +++ b/media/libstagefright/httplive/M3UParser.h @@ -0,0 +1,104 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef M3U_PARSER_H_ + +#define M3U_PARSER_H_ + +#include +#include +#include +#include + +namespace android { + +struct M3UParser : public RefBase { + M3UParser(const char *baseURI, const void *data, size_t size); + + status_t initCheck() const; + + bool isExtM3U() const; + bool isVariantPlaylist() const; + bool isComplete() const; + bool isEvent() const; + + sp meta(); + + size_t size(); + bool itemAt(size_t index, AString *uri, sp *meta = NULL); + + void pickRandomMediaItems(); + + bool getAudioURI(size_t index, AString *uri) const; + bool getVideoURI(size_t index, AString *uri) const; + bool getSubtitleURI(size_t index, AString *uri) const; + +protected: + virtual ~M3UParser(); + +private: + struct MediaGroup; + + struct Item { + AString mURI; + sp mMeta; + }; + + status_t mInitCheck; + + AString mBaseURI; + bool mIsExtM3U; + bool mIsVariantPlaylist; + bool mIsComplete; + bool mIsEvent; + + sp mMeta; + Vector mItems; + + // Media groups keyed by group ID. + KeyedVector > mMediaGroups; + + status_t parse(const void *data, size_t size); + + static status_t parseMetaData( + const AString &line, sp *meta, const char *key); + + static status_t parseMetaDataDuration( + const AString &line, sp *meta, const char *key); + + status_t parseStreamInf( + const AString &line, sp *meta) const; + + static status_t parseCipherInfo( + const AString &line, sp *meta, const AString &baseURI); + + static status_t parseByteRange( + const AString &line, uint64_t curOffset, + uint64_t *length, uint64_t *offset); + + status_t parseMedia(const AString &line); + + bool getTypeURI(size_t index, const char *key, AString *uri) const; + + static status_t ParseInt32(const char *s, int32_t *x); + static status_t ParseDouble(const char *s, double *x); + + DISALLOW_EVIL_CONSTRUCTORS(M3UParser); +}; + +} // namespace android + +#endif // M3U_PARSER_H_ diff --git a/media/libstagefright/httplive/PlaylistFetcher.cpp b/media/libstagefright/httplive/PlaylistFetcher.cpp new file mode 100644 index 0000000..8ae70b7 --- /dev/null +++ b/media/libstagefright/httplive/PlaylistFetcher.cpp @@ -0,0 +1,969 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "PlaylistFetcher" +#include + +#include "PlaylistFetcher.h" + +#include "LiveDataSource.h" +#include "LiveSession.h" +#include "M3UParser.h" + +#include "include/avc_utils.h" +#include "include/HTTPBase.h" +#include "include/ID3.h" +#include "mpeg2ts/AnotherPacketSource.h" + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include + +namespace android { + +// static +const int64_t PlaylistFetcher::kMinBufferedDurationUs = 10000000ll; + +PlaylistFetcher::PlaylistFetcher( + const sp ¬ify, + const sp &session, + const char *uri) + : mNotify(notify), + mSession(session), + mURI(uri), + mStreamTypeMask(0), + mStartTimeUs(-1ll), + mLastPlaylistFetchTimeUs(-1ll), + mSeqNumber(-1), + mNumRetries(0), + mStartup(true), + mNextPTSTimeUs(-1ll), + mMonitorQueueGeneration(0), + mRefreshState(INITIAL_MINIMUM_RELOAD_DELAY), + mFirstPTSValid(false), + mAbsoluteTimeAnchorUs(0ll) { + memset(mPlaylistHash, 0, sizeof(mPlaylistHash)); +} + +PlaylistFetcher::~PlaylistFetcher() { +} + +int64_t PlaylistFetcher::getSegmentStartTimeUs(int32_t seqNumber) const { + CHECK(mPlaylist != NULL); + + int32_t firstSeqNumberInPlaylist; + if (mPlaylist->meta() == NULL || !mPlaylist->meta()->findInt32( + "media-sequence", &firstSeqNumberInPlaylist)) { + firstSeqNumberInPlaylist = 0; + } + + int32_t lastSeqNumberInPlaylist = + firstSeqNumberInPlaylist + (int32_t)mPlaylist->size() - 1; + + CHECK_GE(seqNumber, firstSeqNumberInPlaylist); + CHECK_LE(seqNumber, lastSeqNumberInPlaylist); + + int64_t segmentStartUs = 0ll; + for (int32_t index = 0; + index < seqNumber - firstSeqNumberInPlaylist; ++index) { + sp itemMeta; + CHECK(mPlaylist->itemAt( + index, NULL /* uri */, &itemMeta)); + + int64_t itemDurationUs; + CHECK(itemMeta->findInt64("durationUs", &itemDurationUs)); + + segmentStartUs += itemDurationUs; + } + + return segmentStartUs; +} + +bool PlaylistFetcher::timeToRefreshPlaylist(int64_t nowUs) const { + if (mPlaylist == NULL) { + CHECK_EQ((int)mRefreshState, (int)INITIAL_MINIMUM_RELOAD_DELAY); + return true; + } + + int32_t targetDurationSecs; + CHECK(mPlaylist->meta()->findInt32("target-duration", &targetDurationSecs)); + + int64_t targetDurationUs = targetDurationSecs * 1000000ll; + + int64_t minPlaylistAgeUs; + + switch (mRefreshState) { + case INITIAL_MINIMUM_RELOAD_DELAY: + { + size_t n = mPlaylist->size(); + if (n > 0) { + sp itemMeta; + CHECK(mPlaylist->itemAt(n - 1, NULL /* uri */, &itemMeta)); + + int64_t itemDurationUs; + CHECK(itemMeta->findInt64("durationUs", &itemDurationUs)); + + minPlaylistAgeUs = itemDurationUs; + break; + } + + // fall through + } + + case FIRST_UNCHANGED_RELOAD_ATTEMPT: + { + minPlaylistAgeUs = targetDurationUs / 2; + break; + } + + case SECOND_UNCHANGED_RELOAD_ATTEMPT: + { + minPlaylistAgeUs = (targetDurationUs * 3) / 2; + break; + } + + case THIRD_UNCHANGED_RELOAD_ATTEMPT: + { + minPlaylistAgeUs = targetDurationUs * 3; + break; + } + + default: + TRESPASS(); + break; + } + + return mLastPlaylistFetchTimeUs + minPlaylistAgeUs <= nowUs; +} + +status_t PlaylistFetcher::decryptBuffer( + size_t playlistIndex, const sp &buffer) { + sp itemMeta; + bool found = false; + AString method; + + for (ssize_t i = playlistIndex; i >= 0; --i) { + AString uri; + CHECK(mPlaylist->itemAt(i, &uri, &itemMeta)); + + if (itemMeta->findString("cipher-method", &method)) { + found = true; + break; + } + } + + if (!found) { + method = "NONE"; + } + + if (method == "NONE") { + return OK; + } else if (!(method == "AES-128")) { + ALOGE("Unsupported cipher method '%s'", method.c_str()); + return ERROR_UNSUPPORTED; + } + + AString keyURI; + if (!itemMeta->findString("cipher-uri", &keyURI)) { + ALOGE("Missing key uri"); + return ERROR_MALFORMED; + } + + ssize_t index = mAESKeyForURI.indexOfKey(keyURI); + + sp key; + if (index >= 0) { + key = mAESKeyForURI.valueAt(index); + } else { + status_t err = mSession->fetchFile(keyURI.c_str(), &key); + + if (err != OK) { + ALOGE("failed to fetch cipher key from '%s'.", keyURI.c_str()); + return ERROR_IO; + } else if (key->size() != 16) { + ALOGE("key file '%s' wasn't 16 bytes in size.", keyURI.c_str()); + return ERROR_MALFORMED; + } + + mAESKeyForURI.add(keyURI, key); + } + + AES_KEY aes_key; + if (AES_set_decrypt_key(key->data(), 128, &aes_key) != 0) { + ALOGE("failed to set AES decryption key."); + return UNKNOWN_ERROR; + } + + unsigned char aes_ivec[16]; + + AString iv; + if (itemMeta->findString("cipher-iv", &iv)) { + if ((!iv.startsWith("0x") && !iv.startsWith("0X")) + || iv.size() != 16 * 2 + 2) { + ALOGE("malformed cipher IV '%s'.", iv.c_str()); + return ERROR_MALFORMED; + } + + memset(aes_ivec, 0, sizeof(aes_ivec)); + for (size_t i = 0; i < 16; ++i) { + char c1 = tolower(iv.c_str()[2 + 2 * i]); + char c2 = tolower(iv.c_str()[3 + 2 * i]); + if (!isxdigit(c1) || !isxdigit(c2)) { + ALOGE("malformed cipher IV '%s'.", iv.c_str()); + return ERROR_MALFORMED; + } + uint8_t nibble1 = isdigit(c1) ? c1 - '0' : c1 - 'a' + 10; + uint8_t nibble2 = isdigit(c2) ? c2 - '0' : c2 - 'a' + 10; + + aes_ivec[i] = nibble1 << 4 | nibble2; + } + } else { + memset(aes_ivec, 0, sizeof(aes_ivec)); + aes_ivec[15] = mSeqNumber & 0xff; + aes_ivec[14] = (mSeqNumber >> 8) & 0xff; + aes_ivec[13] = (mSeqNumber >> 16) & 0xff; + aes_ivec[12] = (mSeqNumber >> 24) & 0xff; + } + + AES_cbc_encrypt( + buffer->data(), buffer->data(), buffer->size(), + &aes_key, aes_ivec, AES_DECRYPT); + + // hexdump(buffer->data(), buffer->size()); + + size_t n = buffer->size(); + CHECK_GT(n, 0u); + + size_t pad = buffer->data()[n - 1]; + + CHECK_GT(pad, 0u); + CHECK_LE(pad, 16u); + CHECK_GE((size_t)n, pad); + for (size_t i = 0; i < pad; ++i) { + CHECK_EQ((unsigned)buffer->data()[n - 1 - i], pad); + } + + n -= pad; + + buffer->setRange(buffer->offset(), n); + + return OK; +} + +void PlaylistFetcher::postMonitorQueue(int64_t delayUs) { + sp msg = new AMessage(kWhatMonitorQueue, id()); + msg->setInt32("generation", mMonitorQueueGeneration); + msg->post(delayUs); +} + +void PlaylistFetcher::cancelMonitorQueue() { + ++mMonitorQueueGeneration; +} + +void PlaylistFetcher::startAsync( + const sp &audioSource, + const sp &videoSource, + const sp &subtitleSource, + int64_t startTimeUs) { + sp msg = new AMessage(kWhatStart, id()); + + uint32_t streamTypeMask = 0ul; + + if (audioSource != NULL) { + msg->setPointer("audioSource", audioSource.get()); + streamTypeMask |= LiveSession::STREAMTYPE_AUDIO; + } + + if (videoSource != NULL) { + msg->setPointer("videoSource", videoSource.get()); + streamTypeMask |= LiveSession::STREAMTYPE_VIDEO; + } + + if (subtitleSource != NULL) { + msg->setPointer("subtitleSource", subtitleSource.get()); + streamTypeMask |= LiveSession::STREAMTYPE_SUBTITLES; + } + + msg->setInt32("streamTypeMask", streamTypeMask); + msg->setInt64("startTimeUs", startTimeUs); + msg->post(); +} + +void PlaylistFetcher::pauseAsync() { + (new AMessage(kWhatPause, id()))->post(); +} + +void PlaylistFetcher::stopAsync() { + (new AMessage(kWhatStop, id()))->post(); +} + +void PlaylistFetcher::onMessageReceived(const sp &msg) { + switch (msg->what()) { + case kWhatStart: + { + status_t err = onStart(msg); + + sp notify = mNotify->dup(); + notify->setInt32("what", kWhatStarted); + notify->setInt32("err", err); + notify->post(); + break; + } + + case kWhatPause: + { + onPause(); + + sp notify = mNotify->dup(); + notify->setInt32("what", kWhatPaused); + notify->post(); + break; + } + + case kWhatStop: + { + onStop(); + + sp notify = mNotify->dup(); + notify->setInt32("what", kWhatStopped); + notify->post(); + break; + } + + case kWhatMonitorQueue: + { + int32_t generation; + CHECK(msg->findInt32("generation", &generation)); + + if (generation != mMonitorQueueGeneration) { + // Stale event + break; + } + + onMonitorQueue(); + break; + } + + default: + TRESPASS(); + } +} + +status_t PlaylistFetcher::onStart(const sp &msg) { + mPacketSources.clear(); + + uint32_t streamTypeMask; + CHECK(msg->findInt32("streamTypeMask", (int32_t *)&streamTypeMask)); + + int64_t startTimeUs; + CHECK(msg->findInt64("startTimeUs", &startTimeUs)); + + if (streamTypeMask & LiveSession::STREAMTYPE_AUDIO) { + void *ptr; + CHECK(msg->findPointer("audioSource", &ptr)); + + mPacketSources.add( + LiveSession::STREAMTYPE_AUDIO, + static_cast(ptr)); + } + + if (streamTypeMask & LiveSession::STREAMTYPE_VIDEO) { + void *ptr; + CHECK(msg->findPointer("videoSource", &ptr)); + + mPacketSources.add( + LiveSession::STREAMTYPE_VIDEO, + static_cast(ptr)); + } + + if (streamTypeMask & LiveSession::STREAMTYPE_SUBTITLES) { + void *ptr; + CHECK(msg->findPointer("subtitleSource", &ptr)); + + mPacketSources.add( + LiveSession::STREAMTYPE_SUBTITLES, + static_cast(ptr)); + } + + mStreamTypeMask = streamTypeMask; + mStartTimeUs = startTimeUs; + + if (mStartTimeUs >= 0ll) { + mSeqNumber = -1; + mStartup = true; + } + + postMonitorQueue(); + + return OK; +} + +void PlaylistFetcher::onPause() { + cancelMonitorQueue(); + + mPacketSources.clear(); + mStreamTypeMask = 0; +} + +void PlaylistFetcher::onStop() { + cancelMonitorQueue(); + + for (size_t i = 0; i < mPacketSources.size(); ++i) { + mPacketSources.valueAt(i)->clear(); + } + + mPacketSources.clear(); + mStreamTypeMask = 0; +} + +void PlaylistFetcher::notifyError(status_t err) { + sp notify = mNotify->dup(); + notify->setInt32("what", kWhatError); + notify->setInt32("err", err); + notify->post(); +} + +void PlaylistFetcher::queueDiscontinuity( + ATSParser::DiscontinuityType type, const sp &extra) { + for (size_t i = 0; i < mPacketSources.size(); ++i) { + mPacketSources.valueAt(i)->queueDiscontinuity(type, extra); + } +} + +void PlaylistFetcher::onMonitorQueue() { + bool downloadMore = false; + + status_t finalResult; + if (mStreamTypeMask == LiveSession::STREAMTYPE_SUBTITLES) { + sp packetSource = + mPacketSources.valueFor(LiveSession::STREAMTYPE_SUBTITLES); + + downloadMore = packetSource->hasBufferAvailable(&finalResult); + } else { + bool first = true; + int64_t minBufferedDurationUs = 0ll; + + for (size_t i = 0; i < mPacketSources.size(); ++i) { + if ((mStreamTypeMask & mPacketSources.keyAt(i)) == 0) { + continue; + } + + int64_t bufferedDurationUs = + mPacketSources.valueAt(i)->getBufferedDurationUs(&finalResult); + + if (first || bufferedDurationUs < minBufferedDurationUs) { + minBufferedDurationUs = bufferedDurationUs; + first = false; + } + } + + downloadMore = + !first && (minBufferedDurationUs < kMinBufferedDurationUs); + } + + if (finalResult == OK && downloadMore) { + onDownloadNext(); + } else { + // Nothing to do yet, try again in a second. + + sp msg = mNotify->dup(); + msg->setInt32("what", kWhatTemporarilyDoneFetching); + msg->post(); + + postMonitorQueue(1000000ll); + } +} + +void PlaylistFetcher::onDownloadNext() { + int64_t nowUs = ALooper::GetNowUs(); + + if (mLastPlaylistFetchTimeUs < 0ll + || (!mPlaylist->isComplete() && timeToRefreshPlaylist(nowUs))) { + bool unchanged; + sp playlist = mSession->fetchPlaylist( + mURI.c_str(), mPlaylistHash, &unchanged); + + if (playlist == NULL) { + if (unchanged) { + // We succeeded in fetching the playlist, but it was + // unchanged from the last time we tried. + + if (mRefreshState != THIRD_UNCHANGED_RELOAD_ATTEMPT) { + mRefreshState = (RefreshState)(mRefreshState + 1); + } + } else { + ALOGE("failed to load playlist at url '%s'", mURI.c_str()); + notifyError(ERROR_IO); + return; + } + } else { + mRefreshState = INITIAL_MINIMUM_RELOAD_DELAY; + mPlaylist = playlist; + + if (mPlaylist->isComplete() || mPlaylist->isEvent()) { + updateDuration(); + } + } + + mLastPlaylistFetchTimeUs = ALooper::GetNowUs(); + } + + int32_t firstSeqNumberInPlaylist; + if (mPlaylist->meta() == NULL || !mPlaylist->meta()->findInt32( + "media-sequence", &firstSeqNumberInPlaylist)) { + firstSeqNumberInPlaylist = 0; + } + + bool seekDiscontinuity = false; + bool explicitDiscontinuity = false; + + const int32_t lastSeqNumberInPlaylist = + firstSeqNumberInPlaylist + (int32_t)mPlaylist->size() - 1; + + if (mSeqNumber < 0) { + CHECK_GE(mStartTimeUs, 0ll); + + if (mPlaylist->isComplete() || mPlaylist->isEvent()) { + mSeqNumber = getSeqNumberForTime(mStartTimeUs); + } else { + // If this is a live session, start 3 segments from the end. + mSeqNumber = lastSeqNumberInPlaylist - 3; + if (mSeqNumber < firstSeqNumberInPlaylist) { + mSeqNumber = firstSeqNumberInPlaylist; + } + } + + mStartTimeUs = -1ll; + } + + if (mSeqNumber < firstSeqNumberInPlaylist + || mSeqNumber > lastSeqNumberInPlaylist) { + if (!mPlaylist->isComplete() && mNumRetries < kMaxNumRetries) { + ++mNumRetries; + + if (mSeqNumber > lastSeqNumberInPlaylist) { + mLastPlaylistFetchTimeUs = -1; + postMonitorQueue(3000000ll); + return; + } + + // we've missed the boat, let's start from the lowest sequence + // number available and signal a discontinuity. + + ALOGI("We've missed the boat, restarting playback."); + mSeqNumber = lastSeqNumberInPlaylist; + explicitDiscontinuity = true; + + // fall through + } else { + ALOGE("Cannot find sequence number %d in playlist " + "(contains %d - %d)", + mSeqNumber, firstSeqNumberInPlaylist, + firstSeqNumberInPlaylist + mPlaylist->size() - 1); + + notifyError(ERROR_END_OF_STREAM); + return; + } + } + + mNumRetries = 0; + + AString uri; + sp itemMeta; + CHECK(mPlaylist->itemAt( + mSeqNumber - firstSeqNumberInPlaylist, + &uri, + &itemMeta)); + + int32_t val; + if (itemMeta->findInt32("discontinuity", &val) && val != 0) { + explicitDiscontinuity = true; + } + + int64_t range_offset, range_length; + if (!itemMeta->findInt64("range-offset", &range_offset) + || !itemMeta->findInt64("range-length", &range_length)) { + range_offset = 0; + range_length = -1; + } + + ALOGV("fetching segment %d from (%d .. %d)", + mSeqNumber, firstSeqNumberInPlaylist, lastSeqNumberInPlaylist); + + ALOGV("fetching '%s'", uri.c_str()); + + sp buffer; + status_t err = mSession->fetchFile( + uri.c_str(), &buffer, range_offset, range_length); + + if (err != OK) { + ALOGE("failed to fetch .ts segment at url '%s'", uri.c_str()); + notifyError(err); + return; + } + + CHECK(buffer != NULL); + + err = decryptBuffer(mSeqNumber - firstSeqNumberInPlaylist, buffer); + + if (err != OK) { + ALOGE("decryptBuffer failed w/ error %d", err); + + notifyError(err); + return; + } + + if (mStartup || seekDiscontinuity || explicitDiscontinuity) { + // Signal discontinuity. + + if (mPlaylist->isComplete() || mPlaylist->isEvent()) { + // If this was a live event this made no sense since + // we don't have access to all the segment before the current + // one. + mNextPTSTimeUs = getSegmentStartTimeUs(mSeqNumber); + } + + if (seekDiscontinuity || explicitDiscontinuity) { + ALOGI("queueing discontinuity (seek=%d, explicit=%d)", + seekDiscontinuity, explicitDiscontinuity); + + queueDiscontinuity( + explicitDiscontinuity + ? ATSParser::DISCONTINUITY_FORMATCHANGE + : ATSParser::DISCONTINUITY_SEEK, + NULL /* extra */); + } + } + + err = extractAndQueueAccessUnits(buffer); + + if (err != OK) { + notifyError(err); + return; + } + + ++mSeqNumber; + + postMonitorQueue(); + + mStartup = false; +} + +int32_t PlaylistFetcher::getSeqNumberForTime(int64_t timeUs) const { + int32_t firstSeqNumberInPlaylist; + if (mPlaylist->meta() == NULL || !mPlaylist->meta()->findInt32( + "media-sequence", &firstSeqNumberInPlaylist)) { + firstSeqNumberInPlaylist = 0; + } + + size_t index = 0; + int64_t segmentStartUs = 0; + while (index < mPlaylist->size()) { + sp itemMeta; + CHECK(mPlaylist->itemAt( + index, NULL /* uri */, &itemMeta)); + + int64_t itemDurationUs; + CHECK(itemMeta->findInt64("durationUs", &itemDurationUs)); + + if (timeUs < segmentStartUs + itemDurationUs) { + break; + } + + segmentStartUs += itemDurationUs; + ++index; + } + + if (index >= mPlaylist->size()) { + index = mPlaylist->size() - 1; + } + + return firstSeqNumberInPlaylist + index; +} + +status_t PlaylistFetcher::extractAndQueueAccessUnits( + const sp &buffer) { + if (buffer->size() > 0 && buffer->data()[0] == 0x47) { + // Let's assume this is an MPEG2 transport stream. + + if ((buffer->size() % 188) != 0) { + ALOGE("MPEG2 transport stream is not an even multiple of 188 " + "bytes in length."); + return ERROR_MALFORMED; + } + + if (mTSParser == NULL) { + mTSParser = new ATSParser; + } + + if (mNextPTSTimeUs >= 0ll) { + sp extra = new AMessage; + extra->setInt64(IStreamListener::kKeyMediaTimeUs, mNextPTSTimeUs); + + mTSParser->signalDiscontinuity( + ATSParser::DISCONTINUITY_SEEK, extra); + + mNextPTSTimeUs = -1ll; + } + + size_t offset = 0; + while (offset < buffer->size()) { + status_t err = mTSParser->feedTSPacket(buffer->data() + offset, 188); + + if (err != OK) { + return err; + } + + offset += 188; + } + + for (size_t i = mPacketSources.size(); i-- > 0;) { + sp packetSource = mPacketSources.valueAt(i); + + ATSParser::SourceType type; + switch (mPacketSources.keyAt(i)) { + case LiveSession::STREAMTYPE_VIDEO: + type = ATSParser::VIDEO; + break; + + case LiveSession::STREAMTYPE_AUDIO: + type = ATSParser::AUDIO; + break; + + case LiveSession::STREAMTYPE_SUBTITLES: + { + ALOGE("MPEG2 Transport streams do not contain subtitles."); + return ERROR_MALFORMED; + break; + } + + default: + TRESPASS(); + } + + sp source = + static_cast( + mTSParser->getSource(type).get()); + + if (source == NULL) { + ALOGW("MPEG2 Transport stream does not contain %s data.", + type == ATSParser::VIDEO ? "video" : "audio"); + + mStreamTypeMask &= ~mPacketSources.keyAt(i); + mPacketSources.removeItemsAt(i); + continue; + } + + sp accessUnit; + status_t finalResult; + while (source->hasBufferAvailable(&finalResult) + && source->dequeueAccessUnit(&accessUnit) == OK) { + // Note that we do NOT dequeue any discontinuities. + + packetSource->queueAccessUnit(accessUnit); + } + + if (packetSource->getFormat() == NULL) { + packetSource->setFormat(source->getFormat()); + } + } + + return OK; + } else if (buffer->size() >= 7 && !memcmp("WEBVTT\n", buffer->data(), 7)) { + if (mStreamTypeMask != LiveSession::STREAMTYPE_SUBTITLES) { + ALOGE("This stream only contains subtitles."); + return ERROR_MALFORMED; + } + + const sp packetSource = + mPacketSources.valueFor(LiveSession::STREAMTYPE_SUBTITLES); + + buffer->meta()->setInt64("timeUs", 0ll); + + packetSource->queueAccessUnit(buffer); + return OK; + } + + if (mNextPTSTimeUs >= 0ll) { + mFirstPTSValid = false; + mAbsoluteTimeAnchorUs = mNextPTSTimeUs; + mNextPTSTimeUs = -1ll; + } + + // This better be an ISO 13818-7 (AAC) or ISO 13818-1 (MPEG) audio + // stream prefixed by an ID3 tag. + + bool firstID3Tag = true; + uint64_t PTS = 0; + + for (;;) { + // Make sure to skip all ID3 tags preceding the audio data. + // At least one must be present to provide the PTS timestamp. + + ID3 id3(buffer->data(), buffer->size(), true /* ignoreV1 */); + if (!id3.isValid()) { + if (firstID3Tag) { + ALOGE("Unable to parse ID3 tag."); + return ERROR_MALFORMED; + } else { + break; + } + } + + if (firstID3Tag) { + bool found = false; + + ID3::Iterator it(id3, "PRIV"); + while (!it.done()) { + size_t length; + const uint8_t *data = it.getData(&length); + + static const char *kMatchName = + "com.apple.streaming.transportStreamTimestamp"; + static const size_t kMatchNameLen = strlen(kMatchName); + + if (length == kMatchNameLen + 1 + 8 + && !strncmp((const char *)data, kMatchName, kMatchNameLen)) { + found = true; + PTS = U64_AT(&data[kMatchNameLen + 1]); + } + + it.next(); + } + + if (!found) { + ALOGE("Unable to extract transportStreamTimestamp from ID3 tag."); + return ERROR_MALFORMED; + } + } + + // skip the ID3 tag + buffer->setRange( + buffer->offset() + id3.rawSize(), buffer->size() - id3.rawSize()); + + firstID3Tag = false; + } + + if (!mFirstPTSValid) { + mFirstPTSValid = true; + mFirstPTS = PTS; + } + PTS -= mFirstPTS; + + int64_t timeUs = (PTS * 100ll) / 9ll + mAbsoluteTimeAnchorUs; + + if (mStreamTypeMask != LiveSession::STREAMTYPE_AUDIO) { + ALOGW("This stream only contains audio data!"); + + mStreamTypeMask &= LiveSession::STREAMTYPE_AUDIO; + + if (mStreamTypeMask == 0) { + return OK; + } + } + + sp packetSource = + mPacketSources.valueFor(LiveSession::STREAMTYPE_AUDIO); + + if (packetSource->getFormat() == NULL && buffer->size() >= 7) { + ABitReader bits(buffer->data(), buffer->size()); + + // adts_fixed_header + + CHECK_EQ(bits.getBits(12), 0xfffu); + bits.skipBits(3); // ID, layer + bool protection_absent = bits.getBits(1) != 0; + + unsigned profile = bits.getBits(2); + CHECK_NE(profile, 3u); + unsigned sampling_freq_index = bits.getBits(4); + bits.getBits(1); // private_bit + unsigned channel_configuration = bits.getBits(3); + CHECK_NE(channel_configuration, 0u); + bits.skipBits(2); // original_copy, home + + sp meta = MakeAACCodecSpecificData( + profile, sampling_freq_index, channel_configuration); + + meta->setInt32(kKeyIsADTS, true); + + packetSource->setFormat(meta); + } + + int64_t numSamples = 0ll; + int32_t sampleRate; + CHECK(packetSource->getFormat()->findInt32(kKeySampleRate, &sampleRate)); + + size_t offset = 0; + while (offset < buffer->size()) { + const uint8_t *adtsHeader = buffer->data() + offset; + CHECK_LT(offset + 5, buffer->size()); + + unsigned aac_frame_length = + ((adtsHeader[3] & 3) << 11) + | (adtsHeader[4] << 3) + | (adtsHeader[5] >> 5); + + CHECK_LE(offset + aac_frame_length, buffer->size()); + + sp unit = new ABuffer(aac_frame_length); + memcpy(unit->data(), adtsHeader, aac_frame_length); + + int64_t unitTimeUs = timeUs + numSamples * 1000000ll / sampleRate; + unit->meta()->setInt64("timeUs", unitTimeUs); + + // Each AAC frame encodes 1024 samples. + numSamples += 1024; + + packetSource->queueAccessUnit(unit); + + offset += aac_frame_length; + } + + return OK; +} + +void PlaylistFetcher::updateDuration() { + int64_t durationUs = 0ll; + for (size_t index = 0; index < mPlaylist->size(); ++index) { + sp itemMeta; + CHECK(mPlaylist->itemAt( + index, NULL /* uri */, &itemMeta)); + + int64_t itemDurationUs; + CHECK(itemMeta->findInt64("durationUs", &itemDurationUs)); + + durationUs += itemDurationUs; + } + + sp msg = mNotify->dup(); + msg->setInt32("what", kWhatDurationUpdate); + msg->setInt64("durationUs", durationUs); + msg->post(); +} + +} // namespace android diff --git a/media/libstagefright/httplive/PlaylistFetcher.h b/media/libstagefright/httplive/PlaylistFetcher.h new file mode 100644 index 0000000..5a2b901 --- /dev/null +++ b/media/libstagefright/httplive/PlaylistFetcher.h @@ -0,0 +1,155 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef PLAYLIST_FETCHER_H_ + +#define PLAYLIST_FETCHER_H_ + +#include + +#include "mpeg2ts/ATSParser.h" +#include "LiveSession.h" + +namespace android { + +struct ABuffer; +struct AnotherPacketSource; +struct DataSource; +struct HTTPBase; +struct LiveDataSource; +struct M3UParser; +struct String8; + +struct PlaylistFetcher : public AHandler { + enum { + kWhatStarted, + kWhatPaused, + kWhatStopped, + kWhatError, + kWhatDurationUpdate, + kWhatTemporarilyDoneFetching, + kWhatPrepared, + kWhatPreparationFailed, + }; + + PlaylistFetcher( + const sp ¬ify, + const sp &session, + const char *uri); + + sp getDataSource(); + + void startAsync( + const sp &audioSource, + const sp &videoSource, + const sp &subtitleSource, + int64_t startTimeUs = -1ll); + + void pauseAsync(); + + void stopAsync(); + +protected: + virtual ~PlaylistFetcher(); + virtual void onMessageReceived(const sp &msg); + +private: + enum { + kMaxNumRetries = 5, + }; + + enum { + kWhatStart = 'strt', + kWhatPause = 'paus', + kWhatStop = 'stop', + kWhatMonitorQueue = 'moni', + }; + + static const int64_t kMinBufferedDurationUs; + + sp mNotify; + sp mSession; + AString mURI; + + uint32_t mStreamTypeMask; + int64_t mStartTimeUs; + + KeyedVector > + mPacketSources; + + KeyedVector > mAESKeyForURI; + + int64_t mLastPlaylistFetchTimeUs; + sp mPlaylist; + int32_t mSeqNumber; + int32_t mNumRetries; + bool mStartup; + int64_t mNextPTSTimeUs; + + int32_t mMonitorQueueGeneration; + + enum RefreshState { + INITIAL_MINIMUM_RELOAD_DELAY, + FIRST_UNCHANGED_RELOAD_ATTEMPT, + SECOND_UNCHANGED_RELOAD_ATTEMPT, + THIRD_UNCHANGED_RELOAD_ATTEMPT + }; + RefreshState mRefreshState; + + uint8_t mPlaylistHash[16]; + + sp mTSParser; + + bool mFirstPTSValid; + uint64_t mFirstPTS; + int64_t mAbsoluteTimeAnchorUs; + + status_t decryptBuffer( + size_t playlistIndex, const sp &buffer); + + void postMonitorQueue(int64_t delayUs = 0); + void cancelMonitorQueue(); + + bool timeToRefreshPlaylist(int64_t nowUs) const; + + // Returns the media time in us of the segment specified by seqNumber. + // This is computed by summing the durations of all segments before it. + int64_t getSegmentStartTimeUs(int32_t seqNumber) const; + + status_t onStart(const sp &msg); + void onPause(); + void onStop(); + void onMonitorQueue(); + void onDownloadNext(); + + status_t extractAndQueueAccessUnits(const sp &buffer); + + void notifyError(status_t err); + + void queueDiscontinuity( + ATSParser::DiscontinuityType type, const sp &extra); + + int32_t getSeqNumberForTime(int64_t timeUs) const; + + void updateDuration(); + + DISALLOW_EVIL_CONSTRUCTORS(PlaylistFetcher); +}; + +} // namespace android + +#endif // PLAYLIST_FETCHER_H_ + diff --git a/media/libstagefright/id3/ID3.cpp b/media/libstagefright/id3/ID3.cpp index 22c2f5a..8d3013b 100644 --- a/media/libstagefright/id3/ID3.cpp +++ b/media/libstagefright/id3/ID3.cpp @@ -30,12 +30,55 @@ namespace android { static const size_t kMaxMetadataSize = 3 * 1024 * 1024; +struct MemorySource : public DataSource { + MemorySource(const uint8_t *data, size_t size) + : mData(data), + mSize(size) { + } + + virtual status_t initCheck() const { + return OK; + } + + virtual ssize_t readAt(off64_t offset, void *data, size_t size) { + off64_t available = (offset >= mSize) ? 0ll : mSize - offset; + + size_t copy = (available > size) ? size : available; + memcpy(data, mData + offset, copy); + + return copy; + } + +private: + const uint8_t *mData; + size_t mSize; + + DISALLOW_EVIL_CONSTRUCTORS(MemorySource); +}; + ID3::ID3(const sp &source, bool ignoreV1) : mIsValid(false), mData(NULL), mSize(0), mFirstFrameOffset(0), - mVersion(ID3_UNKNOWN) { + mVersion(ID3_UNKNOWN), + mRawSize(0) { + mIsValid = parseV2(source); + + if (!mIsValid && !ignoreV1) { + mIsValid = parseV1(source); + } +} + +ID3::ID3(const uint8_t *data, size_t size, bool ignoreV1) + : mIsValid(false), + mData(NULL), + mSize(0), + mFirstFrameOffset(0), + mVersion(ID3_UNKNOWN), + mRawSize(0) { + sp source = new MemorySource(data, size); + mIsValid = parseV2(source); if (!mIsValid && !ignoreV1) { @@ -140,6 +183,7 @@ struct id3_header { } mSize = size; + mRawSize = mSize + sizeof(header); if (source->readAt(sizeof(header), mData, mSize) != (ssize_t)mSize) { free(mData); @@ -505,7 +549,7 @@ void ID3::Iterator::getstring(String8 *id, bool otherdata) const { int32_t i = n - 4; while(--i >= 0 && *++frameData != 0) ; int skipped = (frameData - mFrameData); - if (skipped >= n) { + if (skipped >= (int)n) { return; } n -= skipped; diff --git a/media/libstagefright/include/ID3.h b/media/libstagefright/include/ID3.h index 3028f56..cca83ab 100644 --- a/media/libstagefright/include/ID3.h +++ b/media/libstagefright/include/ID3.h @@ -36,6 +36,7 @@ struct ID3 { }; ID3(const sp &source, bool ignoreV1 = false); + ID3(const uint8_t *data, size_t size, bool ignoreV1 = false); ~ID3(); bool isValid() const; @@ -71,6 +72,8 @@ struct ID3 { Iterator &operator=(const Iterator &); }; + size_t rawSize() const { return mRawSize; } + private: bool mIsValid; uint8_t *mData; @@ -78,6 +81,10 @@ private: size_t mFirstFrameOffset; Version mVersion; + // size of the ID3 tag including header before any unsynchronization. + // only valid for IDV2+ + size_t mRawSize; + bool parseV1(const sp &source); bool parseV2(const sp &source); void removeUnsynchronization(); diff --git a/media/libstagefright/include/LiveSession.h b/media/libstagefright/include/LiveSession.h deleted file mode 100644 index db44a33..0000000 --- a/media/libstagefright/include/LiveSession.h +++ /dev/null @@ -1,165 +0,0 @@ -/* - * Copyright (C) 2010 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef LIVE_SESSION_H_ - -#define LIVE_SESSION_H_ - -#include - -#include - -namespace android { - -struct ABuffer; -struct DataSource; -struct LiveDataSource; -struct M3UParser; -struct HTTPBase; - -struct LiveSession : public AHandler { - enum Flags { - // Don't log any URLs. - kFlagIncognito = 1, - }; - LiveSession( - const sp ¬ify, - uint32_t flags = 0, bool uidValid = false, uid_t uid = 0); - - sp getDataSource(); - - void connect( - const char *url, - const KeyedVector *headers = NULL); - - void disconnect(); - - // Blocks until seek is complete. - void seekTo(int64_t timeUs); - - status_t getDuration(int64_t *durationUs) const; - - bool isSeekable() const; - bool hasDynamicDuration() const; - - // Posted notification's "what" field will carry one of the following: - enum { - kWhatPrepared, - kWhatPreparationFailed, - }; - -protected: - virtual ~LiveSession(); - - virtual void onMessageReceived(const sp &msg); - -private: - enum { - kMaxNumQueuedFragments = 3, - kMaxNumRetries = 5, - }; - - enum { - kWhatConnect = 'conn', - kWhatDisconnect = 'disc', - kWhatMonitorQueue = 'moni', - kWhatSeek = 'seek', - }; - - struct BandwidthItem { - AString mURI; - unsigned long mBandwidth; - }; - - sp mNotify; - uint32_t mFlags; - bool mUIDValid; - uid_t mUID; - - bool mInPreparationPhase; - - sp mDataSource; - - sp mHTTPDataSource; - - AString mMasterURL; - KeyedVector mExtraHeaders; - - Vector mBandwidthItems; - - KeyedVector > mAESKeyForURI; - - ssize_t mPrevBandwidthIndex; - int64_t mLastPlaylistFetchTimeUs; - sp mPlaylist; - int32_t mSeqNumber; - int64_t mSeekTimeUs; - int32_t mNumRetries; - bool mStartOfPlayback; - - mutable Mutex mLock; - Condition mCondition; - int64_t mDurationUs; - bool mDurationFixed; // Duration has been determined once and for all. - bool mSeekDone; - bool mDisconnectPending; - - int32_t mMonitorQueueGeneration; - - enum RefreshState { - INITIAL_MINIMUM_RELOAD_DELAY, - FIRST_UNCHANGED_RELOAD_ATTEMPT, - SECOND_UNCHANGED_RELOAD_ATTEMPT, - THIRD_UNCHANGED_RELOAD_ATTEMPT - }; - RefreshState mRefreshState; - - uint8_t mPlaylistHash[16]; - - void onConnect(const sp &msg); - void onDisconnect(); - void onDownloadNext(); - void onMonitorQueue(); - void onSeek(const sp &msg); - - status_t fetchFile( - const char *url, sp *out, - int64_t range_offset = 0, int64_t range_length = -1); - - sp fetchPlaylist(const char *url, bool *unchanged); - size_t getBandwidthIndex(); - - status_t decryptBuffer( - size_t playlistIndex, const sp &buffer); - - void postMonitorQueue(int64_t delayUs = 0); - - bool timeToRefreshPlaylist(int64_t nowUs) const; - - static int SortByBandwidth(const BandwidthItem *, const BandwidthItem *); - - // Returns the media time in us of the segment specified by seqNumber. - // This is computed by summing the durations of all segments before it. - int64_t getSegmentStartTimeUs(int32_t seqNumber) const; - - void signalEOS(status_t err); - - DISALLOW_EVIL_CONSTRUCTORS(LiveSession); -}; - -} // namespace android - -#endif // LIVE_SESSION_H_ diff --git a/media/libstagefright/include/M3UParser.h b/media/libstagefright/include/M3UParser.h deleted file mode 100644 index 2d2f50f..0000000 --- a/media/libstagefright/include/M3UParser.h +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Copyright (C) 2010 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef M3U_PARSER_H_ - -#define M3U_PARSER_H_ - -#include -#include -#include -#include - -namespace android { - -struct M3UParser : public RefBase { - M3UParser(const char *baseURI, const void *data, size_t size); - - status_t initCheck() const; - - bool isExtM3U() const; - bool isVariantPlaylist() const; - bool isComplete() const; - bool isEvent() const; - - sp meta(); - - size_t size(); - bool itemAt(size_t index, AString *uri, sp *meta = NULL); - -protected: - virtual ~M3UParser(); - -private: - struct Item { - AString mURI; - sp mMeta; - }; - - status_t mInitCheck; - - AString mBaseURI; - bool mIsExtM3U; - bool mIsVariantPlaylist; - bool mIsComplete; - bool mIsEvent; - - sp mMeta; - Vector mItems; - - status_t parse(const void *data, size_t size); - - static status_t parseMetaData( - const AString &line, sp *meta, const char *key); - - static status_t parseMetaDataDuration( - const AString &line, sp *meta, const char *key); - - static status_t parseStreamInf( - const AString &line, sp *meta); - - static status_t parseCipherInfo( - const AString &line, sp *meta, const AString &baseURI); - - static status_t parseByteRange( - const AString &line, uint64_t curOffset, - uint64_t *length, uint64_t *offset); - - static status_t ParseInt32(const char *s, int32_t *x); - static status_t ParseDouble(const char *s, double *x); - - DISALLOW_EVIL_CONSTRUCTORS(M3UParser); -}; - -} // namespace android - -#endif // M3U_PARSER_H_ diff --git a/media/libstagefright/include/MPEG2TSExtractor.h b/media/libstagefright/include/MPEG2TSExtractor.h index fe74a42..c5e86a6 100644 --- a/media/libstagefright/include/MPEG2TSExtractor.h +++ b/media/libstagefright/include/MPEG2TSExtractor.h @@ -31,7 +31,6 @@ struct ATSParser; struct DataSource; struct MPEG2TSSource; struct String8; -struct LiveSession; struct MPEG2TSExtractor : public MediaExtractor { MPEG2TSExtractor(const sp &source); @@ -44,16 +43,12 @@ struct MPEG2TSExtractor : public MediaExtractor { virtual uint32_t flags() const; - void setLiveSession(const sp &liveSession); - void seekTo(int64_t seekTimeUs); - private: friend struct MPEG2TSSource; mutable Mutex mLock; sp mDataSource; - sp mLiveSession; sp mParser; diff --git a/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp b/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp index 3de3a61..3153c8b 100644 --- a/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp +++ b/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp @@ -32,9 +32,22 @@ const int64_t kNearEOSMarkUs = 2000000ll; // 2 secs AnotherPacketSource::AnotherPacketSource(const sp &meta) : mIsAudio(false), - mFormat(meta), + mFormat(NULL), mLastQueuedTimeUs(0), mEOSResult(OK) { + setFormat(meta); +} + +void AnotherPacketSource::setFormat(const sp &meta) { + CHECK(mFormat == NULL); + + mIsAudio = false; + + if (meta == NULL) { + return; + } + + mFormat = meta; const char *mime; CHECK(meta->findCString(kKeyMIMEType, &mime)); @@ -45,11 +58,6 @@ AnotherPacketSource::AnotherPacketSource(const sp &meta) } } -void AnotherPacketSource::setFormat(const sp &meta) { - CHECK(mFormat == NULL); - mFormat = meta; -} - AnotherPacketSource::~AnotherPacketSource() { } @@ -152,6 +160,15 @@ void AnotherPacketSource::queueAccessUnit(const sp &buffer) { mCondition.signal(); } +void AnotherPacketSource::clear() { + Mutex::Autolock autoLock(mLock); + + mBuffers.clear(); + mEOSResult = OK; + + mFormat = NULL; +} + void AnotherPacketSource::queueDiscontinuity( ATSParser::DiscontinuityType type, const sp &extra) { diff --git a/media/libstagefright/mpeg2ts/AnotherPacketSource.h b/media/libstagefright/mpeg2ts/AnotherPacketSource.h index 1db4068..e16cf78 100644 --- a/media/libstagefright/mpeg2ts/AnotherPacketSource.h +++ b/media/libstagefright/mpeg2ts/AnotherPacketSource.h @@ -41,6 +41,8 @@ struct AnotherPacketSource : public MediaSource { virtual status_t read( MediaBuffer **buffer, const ReadOptions *options = NULL); + void clear(); + bool hasBufferAvailable(status_t *finalResult); // Returns the difference between the last and the first queued diff --git a/media/libstagefright/mpeg2ts/MPEG2TSExtractor.cpp b/media/libstagefright/mpeg2ts/MPEG2TSExtractor.cpp index e1589b4..d449c34 100644 --- a/media/libstagefright/mpeg2ts/MPEG2TSExtractor.cpp +++ b/media/libstagefright/mpeg2ts/MPEG2TSExtractor.cpp @@ -19,7 +19,6 @@ #include #include "include/MPEG2TSExtractor.h" -#include "include/LiveSession.h" #include "include/NuCachedSource2.h" #include @@ -79,15 +78,7 @@ status_t MPEG2TSSource::stop() { } sp MPEG2TSSource::getFormat() { - sp meta = mImpl->getFormat(); - - int64_t durationUs; - if (mExtractor->mLiveSession != NULL - && mExtractor->mLiveSession->getDuration(&durationUs) == OK) { - meta->setInt64(kKeyDuration, durationUs); - } - - return meta; + return mImpl->getFormat(); } status_t MPEG2TSSource::read( @@ -97,7 +88,7 @@ status_t MPEG2TSSource::read( int64_t seekTimeUs; ReadOptions::SeekMode seekMode; if (mSeekable && options && options->getSeekTo(&seekTimeUs, &seekMode)) { - mExtractor->seekTo(seekTimeUs); + return ERROR_UNSUPPORTED; } status_t finalResult; @@ -216,32 +207,8 @@ status_t MPEG2TSExtractor::feedMore() { return mParser->feedTSPacket(packet, kTSPacketSize); } -void MPEG2TSExtractor::setLiveSession(const sp &liveSession) { - Mutex::Autolock autoLock(mLock); - - mLiveSession = liveSession; -} - -void MPEG2TSExtractor::seekTo(int64_t seekTimeUs) { - Mutex::Autolock autoLock(mLock); - - if (mLiveSession == NULL) { - return; - } - - mLiveSession->seekTo(seekTimeUs); -} - uint32_t MPEG2TSExtractor::flags() const { - Mutex::Autolock autoLock(mLock); - - uint32_t flags = CAN_PAUSE; - - if (mLiveSession != NULL && mLiveSession->isSeekable()) { - flags |= CAN_SEEK_FORWARD | CAN_SEEK_BACKWARD | CAN_SEEK; - } - - return flags; + return CAN_PAUSE; } //////////////////////////////////////////////////////////////////////////////// -- cgit v1.1 From 2799d743ee2ae5a25fe869a7f9c052acc029559f Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Thu, 30 May 2013 14:33:29 -0700 Subject: Use sp instead of raw AudioTrack * This change prepares for the new implementation of AudioTrack client, which will require clients to use only sp, not raw AudioTrack *. A raw delete will cause a race condition during AudioTrack destruction. AudioTrack was made a RefBase by commit b68a91a70bc8d0d18e7404e14443d4e4020b3635 on 2011/11/15, when it was needed by OpenSL ES (for the callback protector). At that time, the only other client that was also converted from AudioTrack * to sp was android.media.AudioTrack JNI in project frameworks/base (file android_media_AudioTrack.cpp). Details: * Use .clear() instead of delete followed by = NULL. * ALOG %p need .get(). * sp<> don't need to be listed in constructor initializer, if initially 0. * Use == 0 for sp<> vs == NULL for raw pointers. * Use if (sp != 0) instead of if (raw). Change-Id: Ic7cad25795d6e862e112abdc227b6d33afdfce17 --- include/media/AudioTrack.h | 4 ++- include/media/JetPlayer.h | 2 +- include/media/SoundPool.h | 4 +-- include/media/ToneGenerator.h | 4 +-- include/media/stagefright/AudioPlayer.h | 2 +- libvideoeditor/lvpp/VideoEditorAudioPlayer.cpp | 9 ++---- libvideoeditor/lvpp/VideoEditorAudioPlayer.h | 2 +- libvideoeditor/lvpp/VideoEditorPlayer.cpp | 21 ++++++------ libvideoeditor/lvpp/VideoEditorPlayer.h | 2 +- media/libmedia/JetPlayer.cpp | 6 ++-- media/libmedia/SoundPool.cpp | 14 ++++---- media/libmedia/ToneGenerator.cpp | 22 ++++--------- media/libmediaplayerservice/MediaPlayerService.cpp | 37 +++++++++------------- media/libmediaplayerservice/MediaPlayerService.h | 6 ++-- media/libstagefright/AudioPlayer.cpp | 11 +++---- 15 files changed, 60 insertions(+), 86 deletions(-) diff --git a/include/media/AudioTrack.h b/include/media/AudioTrack.h index 0b616e3..8dbc9ee 100644 --- a/include/media/AudioTrack.h +++ b/include/media/AudioTrack.h @@ -191,7 +191,9 @@ public: /* Terminates the AudioTrack and unregisters it from AudioFlinger. * Also destroys all resources associated with the AudioTrack. */ - ~AudioTrack(); +protected: + virtual ~AudioTrack(); +public: /* Initialize an uninitialized AudioTrack. * Returned status (from utils/Errors.h) can be: diff --git a/include/media/JetPlayer.h b/include/media/JetPlayer.h index 0616bf0..388f767 100644 --- a/include/media/JetPlayer.h +++ b/include/media/JetPlayer.h @@ -88,7 +88,7 @@ private: EAS_DATA_HANDLE mEasData; EAS_FILE_LOCATOR mEasJetFileLoc; EAS_PCM* mAudioBuffer;// EAS renders the MIDI data into this buffer, - AudioTrack* mAudioTrack; // and we play it in this audio track + sp mAudioTrack; // and we play it in this audio track int mTrackBufferSize; S_JET_STATUS mJetStatus; S_JET_STATUS mPreviousJetStatus; diff --git a/include/media/SoundPool.h b/include/media/SoundPool.h index 7bf3069..9e5654f 100644 --- a/include/media/SoundPool.h +++ b/include/media/SoundPool.h @@ -118,7 +118,7 @@ protected: class SoundChannel : public SoundEvent { public: enum state { IDLE, RESUMING, STOPPING, PAUSED, PLAYING }; - SoundChannel() : mAudioTrack(NULL), mState(IDLE), mNumChannels(1), + SoundChannel() : mState(IDLE), mNumChannels(1), mPos(0), mToggle(0), mAutoPaused(false) {} ~SoundChannel(); void init(SoundPool* soundPool); @@ -148,7 +148,7 @@ private: bool doStop_l(); SoundPool* mSoundPool; - AudioTrack* mAudioTrack; + sp mAudioTrack; SoundEvent mNextEvent; Mutex mLock; int mState; diff --git a/include/media/ToneGenerator.h b/include/media/ToneGenerator.h index 2183fbe..98c4332 100644 --- a/include/media/ToneGenerator.h +++ b/include/media/ToneGenerator.h @@ -160,7 +160,7 @@ public: bool isInited() { return (mState == TONE_IDLE)?false:true;} // returns the audio session this ToneGenerator belongs to or 0 if an error occured. - int getSessionId() { return (mpAudioTrack == NULL) ? 0 : mpAudioTrack->getSessionId(); } + int getSessionId() { return (mpAudioTrack == 0) ? 0 : mpAudioTrack->getSessionId(); } private: @@ -264,7 +264,7 @@ private: unsigned short mLoopCounter; // Current tone loopback count uint32_t mSamplingRate; // AudioFlinger Sampling rate - AudioTrack *mpAudioTrack; // Pointer to audio track used for playback + sp mpAudioTrack; // Pointer to audio track used for playback Mutex mLock; // Mutex to control concurent access to ToneGenerator object from audio callback and application API Mutex mCbkCondLock; // Mutex associated to mWaitCbkCond Condition mWaitCbkCond; // condition enabling interface to wait for audio callback completion after a change is requested diff --git a/include/media/stagefright/AudioPlayer.h b/include/media/stagefright/AudioPlayer.h index 1dc408f..3bf046d 100644 --- a/include/media/stagefright/AudioPlayer.h +++ b/include/media/stagefright/AudioPlayer.h @@ -70,7 +70,7 @@ public: private: friend class VideoEditorAudioPlayer; sp mSource; - AudioTrack *mAudioTrack; + sp mAudioTrack; MediaBuffer *mInputBuffer; diff --git a/libvideoeditor/lvpp/VideoEditorAudioPlayer.cpp b/libvideoeditor/lvpp/VideoEditorAudioPlayer.cpp index c111ba8..3fa8b87 100755 --- a/libvideoeditor/lvpp/VideoEditorAudioPlayer.cpp +++ b/libvideoeditor/lvpp/VideoEditorAudioPlayer.cpp @@ -35,8 +35,7 @@ namespace android { VideoEditorAudioPlayer::VideoEditorAudioPlayer( const sp &audioSink, PreviewPlayer *observer) - : mAudioTrack(NULL), - mInputBuffer(NULL), + : mInputBuffer(NULL), mSampleRate(0), mLatencyUs(0), mFrameSize(0), @@ -111,8 +110,7 @@ void VideoEditorAudioPlayer::clear() { } else { mAudioTrack->stop(); - delete mAudioTrack; - mAudioTrack = NULL; + mAudioTrack.clear(); } // Make sure to release any buffer we hold onto so that the @@ -538,8 +536,7 @@ status_t VideoEditorAudioPlayer::start(bool sourceAlreadyStarted) { 0, AUDIO_OUTPUT_FLAG_NONE, &AudioCallback, this, 0); if ((err = mAudioTrack->initCheck()) != OK) { - delete mAudioTrack; - mAudioTrack = NULL; + mAudioTrack.clear(); if (mFirstBuffer != NULL) { mFirstBuffer->release(); diff --git a/libvideoeditor/lvpp/VideoEditorAudioPlayer.h b/libvideoeditor/lvpp/VideoEditorAudioPlayer.h index 626df39..a5616c1 100755 --- a/libvideoeditor/lvpp/VideoEditorAudioPlayer.h +++ b/libvideoeditor/lvpp/VideoEditorAudioPlayer.h @@ -91,7 +91,7 @@ private: int64_t mBGAudioStoryBoardCurrentMediaVolumeVal; sp mSource; - AudioTrack *mAudioTrack; + sp mAudioTrack; MediaBuffer *mInputBuffer; diff --git a/libvideoeditor/lvpp/VideoEditorPlayer.cpp b/libvideoeditor/lvpp/VideoEditorPlayer.cpp index 91a4415..4a14b40 100755 --- a/libvideoeditor/lvpp/VideoEditorPlayer.cpp +++ b/libvideoeditor/lvpp/VideoEditorPlayer.cpp @@ -310,7 +310,6 @@ bool VideoEditorPlayer::VeAudioOutput::mIsOnEmulator = false; VideoEditorPlayer::VeAudioOutput::VeAudioOutput() : mCallback(NULL), mCallbackCookie(NULL) { - mTrack = 0; mStreamType = AUDIO_STREAM_MUSIC; mLeftVolume = 1.0; mRightVolume = 1.0; @@ -405,7 +404,7 @@ status_t VideoEditorPlayer::VeAudioOutput::open( } ALOGV("open(%u, %d, %d, %d)", sampleRate, channelCount, format, bufferCount); - if (mTrack) close(); + if (mTrack != 0) close(); uint32_t afSampleRate; size_t afFrameCount; int frameCount; @@ -434,7 +433,7 @@ status_t VideoEditorPlayer::VeAudioOutput::open( } } - AudioTrack *t; + sp t; if (mCallback != NULL) { t = new AudioTrack( mStreamType, @@ -457,7 +456,6 @@ status_t VideoEditorPlayer::VeAudioOutput::open( if ((t == 0) || (t->initCheck() != NO_ERROR)) { ALOGE("Unable to create audio track"); - delete t; return NO_INIT; } @@ -472,7 +470,7 @@ status_t VideoEditorPlayer::VeAudioOutput::open( void VideoEditorPlayer::VeAudioOutput::start() { ALOGV("start"); - if (mTrack) { + if (mTrack != 0) { mTrack->setVolume(mLeftVolume, mRightVolume); mTrack->start(); mTrack->getPosition(&mNumFramesWritten); @@ -492,7 +490,7 @@ ssize_t VideoEditorPlayer::VeAudioOutput::write( LOG_FATAL_IF(mCallback != NULL, "Don't call write if supplying a callback."); //ALOGV("write(%p, %u)", buffer, size); - if (mTrack) { + if (mTrack != 0) { snoopWrite(buffer, size); ssize_t ret = mTrack->write(buffer, size); mNumFramesWritten += ret / 4; // assume 16 bit stereo @@ -504,26 +502,25 @@ ssize_t VideoEditorPlayer::VeAudioOutput::write( void VideoEditorPlayer::VeAudioOutput::stop() { ALOGV("stop"); - if (mTrack) mTrack->stop(); + if (mTrack != 0) mTrack->stop(); } void VideoEditorPlayer::VeAudioOutput::flush() { ALOGV("flush"); - if (mTrack) mTrack->flush(); + if (mTrack != 0) mTrack->flush(); } void VideoEditorPlayer::VeAudioOutput::pause() { ALOGV("VeAudioOutput::pause"); - if (mTrack) mTrack->pause(); + if (mTrack != 0) mTrack->pause(); } void VideoEditorPlayer::VeAudioOutput::close() { ALOGV("close"); - delete mTrack; - mTrack = 0; + mTrack.clear(); } void VideoEditorPlayer::VeAudioOutput::setVolume(float left, float right) { @@ -531,7 +528,7 @@ void VideoEditorPlayer::VeAudioOutput::setVolume(float left, float right) { ALOGV("setVolume(%f, %f)", left, right); mLeftVolume = left; mRightVolume = right; - if (mTrack) { + if (mTrack != 0) { mTrack->setVolume(left, right); } } diff --git a/libvideoeditor/lvpp/VideoEditorPlayer.h b/libvideoeditor/lvpp/VideoEditorPlayer.h index 77194ab..defc90d 100755 --- a/libvideoeditor/lvpp/VideoEditorPlayer.h +++ b/libvideoeditor/lvpp/VideoEditorPlayer.h @@ -71,7 +71,7 @@ class VideoEditorPlayer : public MediaPlayerInterface { static void CallbackWrapper( int event, void *me, void *info); - AudioTrack* mTrack; + sp mTrack; AudioCallback mCallback; void * mCallbackCookie; audio_stream_type_t mStreamType; diff --git a/media/libmedia/JetPlayer.cpp b/media/libmedia/JetPlayer.cpp index 59e538f..8fe5bb3 100644 --- a/media/libmedia/JetPlayer.cpp +++ b/media/libmedia/JetPlayer.cpp @@ -39,7 +39,6 @@ JetPlayer::JetPlayer(void *javaJetPlayer, int maxTracks, int trackBufferSize) : mMaxTracks(maxTracks), mEasData(NULL), mEasJetFileLoc(NULL), - mAudioTrack(NULL), mTrackBufferSize(trackBufferSize) { ALOGV("JetPlayer constructor"); @@ -140,11 +139,10 @@ int JetPlayer::release() free(mEasJetFileLoc); mEasJetFileLoc = NULL; } - if (mAudioTrack) { + if (mAudioTrack != 0) { mAudioTrack->stop(); mAudioTrack->flush(); - delete mAudioTrack; - mAudioTrack = NULL; + mAudioTrack.clear(); } if (mAudioBuffer) { delete mAudioBuffer; diff --git a/media/libmedia/SoundPool.cpp b/media/libmedia/SoundPool.cpp index ee70ef7..e1e88ec 100644 --- a/media/libmedia/SoundPool.cpp +++ b/media/libmedia/SoundPool.cpp @@ -547,8 +547,8 @@ void SoundChannel::init(SoundPool* soundPool) void SoundChannel::play(const sp& sample, int nextChannelID, float leftVolume, float rightVolume, int priority, int loop, float rate) { - AudioTrack* oldTrack; - AudioTrack* newTrack; + sp oldTrack; + sp newTrack; status_t status; { // scope for the lock @@ -620,7 +620,7 @@ void SoundChannel::play(const sp& sample, int nextChannelID, float leftV ALOGE("Error creating AudioTrack"); goto exit; } - ALOGV("setVolume %p", newTrack); + ALOGV("setVolume %p", newTrack.get()); newTrack->setVolume(leftVolume, rightVolume); newTrack->setLoop(0, frameCount, loop); @@ -643,11 +643,9 @@ void SoundChannel::play(const sp& sample, int nextChannelID, float leftV } exit: - ALOGV("delete oldTrack %p", oldTrack); - delete oldTrack; + ALOGV("delete oldTrack %p", oldTrack.get()); if (status != NO_ERROR) { - delete newTrack; - mAudioTrack = NULL; + mAudioTrack.clear(); } } @@ -884,7 +882,7 @@ SoundChannel::~SoundChannel() } // do not call AudioTrack destructor with mLock held as it will wait for the AudioTrack // callback thread to exit which may need to execute process() and acquire the mLock. - delete mAudioTrack; + mAudioTrack.clear(); } void SoundChannel::dump() diff --git a/media/libmedia/ToneGenerator.cpp b/media/libmedia/ToneGenerator.cpp index f55b697..ebe1ba1 100644 --- a/media/libmedia/ToneGenerator.cpp +++ b/media/libmedia/ToneGenerator.cpp @@ -803,7 +803,6 @@ ToneGenerator::ToneGenerator(audio_stream_type_t streamType, float volume, bool ALOGV("ToneGenerator constructor: streamType=%d, volume=%f", streamType, volume); mState = TONE_IDLE; - mpAudioTrack = NULL; if (AudioSystem::getOutputSamplingRate(&mSamplingRate, streamType) != NO_ERROR) { ALOGE("Unable to marshal AudioFlinger"); @@ -855,10 +854,10 @@ ToneGenerator::ToneGenerator(audio_stream_type_t streamType, float volume, bool ToneGenerator::~ToneGenerator() { ALOGV("ToneGenerator destructor"); - if (mpAudioTrack != NULL) { + if (mpAudioTrack != 0) { stopTone(); - ALOGV("Delete Track: %p", mpAudioTrack); - delete mpAudioTrack; + ALOGV("Delete Track: %p", mpAudioTrack.get()); + mpAudioTrack.clear(); } } @@ -1047,14 +1046,9 @@ void ToneGenerator::stopTone() { //////////////////////////////////////////////////////////////////////////////// bool ToneGenerator::initAudioTrack() { - if (mpAudioTrack) { - delete mpAudioTrack; - mpAudioTrack = NULL; - } - // Open audio track in mono, PCM 16bit, default sampling rate, default buffer size mpAudioTrack = new AudioTrack(); - ALOGV("Create Track: %p", mpAudioTrack); + ALOGV("Create Track: %p", mpAudioTrack.get()); mpAudioTrack->set(mStreamType, 0, // sampleRate @@ -1081,12 +1075,10 @@ bool ToneGenerator::initAudioTrack() { initAudioTrack_exit: + ALOGV("Init failed: %p", mpAudioTrack.get()); + // Cleanup - if (mpAudioTrack != NULL) { - ALOGV("Delete Track I: %p", mpAudioTrack); - delete mpAudioTrack; - mpAudioTrack = NULL; - } + mpAudioTrack.clear(); return false; } diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp index e600a3f..fa1ff36 100644 --- a/media/libmediaplayerservice/MediaPlayerService.cpp +++ b/media/libmediaplayerservice/MediaPlayerService.cpp @@ -1295,8 +1295,6 @@ MediaPlayerService::AudioOutput::AudioOutput(int sessionId) mSessionId(sessionId), mFlags(AUDIO_OUTPUT_FLAG_NONE) { ALOGV("AudioOutput(%d)", sessionId); - mTrack = 0; - mRecycledTrack = 0; mStreamType = AUDIO_STREAM_MUSIC; mLeftVolume = 1.0; mRightVolume = 1.0; @@ -1311,7 +1309,6 @@ MediaPlayerService::AudioOutput::AudioOutput(int sessionId) MediaPlayerService::AudioOutput::~AudioOutput() { close(); - delete mRecycledTrack; delete mCallbackData; } @@ -1422,7 +1419,7 @@ status_t MediaPlayerService::AudioOutput::open( } } - AudioTrack *t; + sp t; CallbackData *newcbd = NULL; if (mCallback != NULL) { newcbd = new CallbackData(this); @@ -1453,13 +1450,12 @@ status_t MediaPlayerService::AudioOutput::open( if ((t == 0) || (t->initCheck() != NO_ERROR)) { ALOGE("Unable to create audio track"); - delete t; delete newcbd; return NO_INIT; } - if (mRecycledTrack) { + if (mRecycledTrack != 0) { // check if the existing track can be reused as-is, or if a new track needs to be created. bool reuse = true; @@ -1484,11 +1480,10 @@ status_t MediaPlayerService::AudioOutput::open( ALOGV("chaining to next output"); close(); mTrack = mRecycledTrack; - mRecycledTrack = NULL; + mRecycledTrack.clear(); if (mCallbackData != NULL) { mCallbackData->setOutput(this); } - delete t; delete newcbd; return OK; } @@ -1499,8 +1494,7 @@ status_t MediaPlayerService::AudioOutput::open( mCallbackData->endTrackSwitch(); } mRecycledTrack->flush(); - delete mRecycledTrack; - mRecycledTrack = NULL; + mRecycledTrack.clear(); delete mCallbackData; mCallbackData = NULL; close(); @@ -1533,7 +1527,7 @@ void MediaPlayerService::AudioOutput::start() if (mCallbackData != NULL) { mCallbackData->endTrackSwitch(); } - if (mTrack) { + if (mTrack != 0) { mTrack->setVolume(mLeftVolume, mRightVolume); mTrack->setAuxEffectSendLevel(mSendLevel); mTrack->start(); @@ -1555,7 +1549,7 @@ void MediaPlayerService::AudioOutput::switchToNextOutput() { mNextOutput->mCallbackData = mCallbackData; mCallbackData = NULL; mNextOutput->mRecycledTrack = mTrack; - mTrack = NULL; + mTrack.clear(); mNextOutput->mSampleRateHz = mSampleRateHz; mNextOutput->mMsecsPerFrame = mMsecsPerFrame; mNextOutput->mBytesWritten = mBytesWritten; @@ -1568,7 +1562,7 @@ ssize_t MediaPlayerService::AudioOutput::write(const void* buffer, size_t size) LOG_FATAL_IF(mCallback != NULL, "Don't call write if supplying a callback."); //ALOGV("write(%p, %u)", buffer, size); - if (mTrack) { + if (mTrack != 0) { ssize_t ret = mTrack->write(buffer, size); mBytesWritten += ret; return ret; @@ -1579,26 +1573,25 @@ ssize_t MediaPlayerService::AudioOutput::write(const void* buffer, size_t size) void MediaPlayerService::AudioOutput::stop() { ALOGV("stop"); - if (mTrack) mTrack->stop(); + if (mTrack != 0) mTrack->stop(); } void MediaPlayerService::AudioOutput::flush() { ALOGV("flush"); - if (mTrack) mTrack->flush(); + if (mTrack != 0) mTrack->flush(); } void MediaPlayerService::AudioOutput::pause() { ALOGV("pause"); - if (mTrack) mTrack->pause(); + if (mTrack != 0) mTrack->pause(); } void MediaPlayerService::AudioOutput::close() { ALOGV("close"); - delete mTrack; - mTrack = 0; + mTrack.clear(); } void MediaPlayerService::AudioOutput::setVolume(float left, float right) @@ -1606,7 +1599,7 @@ void MediaPlayerService::AudioOutput::setVolume(float left, float right) ALOGV("setVolume(%f, %f)", left, right); mLeftVolume = left; mRightVolume = right; - if (mTrack) { + if (mTrack != 0) { mTrack->setVolume(left, right); } } @@ -1615,7 +1608,7 @@ status_t MediaPlayerService::AudioOutput::setPlaybackRatePermille(int32_t ratePe { ALOGV("setPlaybackRatePermille(%d)", ratePermille); status_t res = NO_ERROR; - if (mTrack) { + if (mTrack != 0) { res = mTrack->setSampleRate(ratePermille * mSampleRateHz / 1000); } else { res = NO_INIT; @@ -1631,7 +1624,7 @@ status_t MediaPlayerService::AudioOutput::setAuxEffectSendLevel(float level) { ALOGV("setAuxEffectSendLevel(%f)", level); mSendLevel = level; - if (mTrack) { + if (mTrack != 0) { return mTrack->setAuxEffectSendLevel(level); } return NO_ERROR; @@ -1641,7 +1634,7 @@ status_t MediaPlayerService::AudioOutput::attachAuxEffect(int effectId) { ALOGV("attachAuxEffect(%d)", effectId); mAuxEffectId = effectId; - if (mTrack) { + if (mTrack != 0) { return mTrack->attachAuxEffect(effectId); } return NO_ERROR; diff --git a/media/libmediaplayerservice/MediaPlayerService.h b/media/libmediaplayerservice/MediaPlayerService.h index b33805d..e586156 100644 --- a/media/libmediaplayerservice/MediaPlayerService.h +++ b/media/libmediaplayerservice/MediaPlayerService.h @@ -78,7 +78,7 @@ class MediaPlayerService : public BnMediaPlayerService AudioOutput(int sessionId); virtual ~AudioOutput(); - virtual bool ready() const { return mTrack != NULL; } + virtual bool ready() const { return mTrack != 0; } virtual bool realtime() const { return true; } virtual ssize_t bufferSize() const; virtual ssize_t frameCount() const; @@ -120,8 +120,8 @@ class MediaPlayerService : public BnMediaPlayerService static void CallbackWrapper( int event, void *me, void *info); - AudioTrack* mTrack; - AudioTrack* mRecycledTrack; + sp mTrack; + sp mRecycledTrack; sp mNextOutput; AudioCallback mCallback; void * mCallbackCookie; diff --git a/media/libstagefright/AudioPlayer.cpp b/media/libstagefright/AudioPlayer.cpp index 4208019..92efae8 100644 --- a/media/libstagefright/AudioPlayer.cpp +++ b/media/libstagefright/AudioPlayer.cpp @@ -36,8 +36,7 @@ AudioPlayer::AudioPlayer( const sp &audioSink, bool allowDeepBuffering, AwesomePlayer *observer) - : mAudioTrack(NULL), - mInputBuffer(NULL), + : mInputBuffer(NULL), mSampleRate(0), mLatencyUs(0), mFrameSize(0), @@ -166,8 +165,7 @@ status_t AudioPlayer::start(bool sourceAlreadyStarted) { 0, AUDIO_OUTPUT_FLAG_NONE, &AudioCallback, this, 0); if ((err = mAudioTrack->initCheck()) != OK) { - delete mAudioTrack; - mAudioTrack = NULL; + mAudioTrack.clear(); if (mFirstBuffer != NULL) { mFirstBuffer->release(); @@ -235,8 +233,7 @@ void AudioPlayer::reset() { } else { mAudioTrack->stop(); - delete mAudioTrack; - mAudioTrack = NULL; + mAudioTrack.clear(); } // Make sure to release any buffer we hold onto so that the @@ -297,7 +294,7 @@ bool AudioPlayer::reachedEOS(status_t *finalStatus) { status_t AudioPlayer::setPlaybackRatePermille(int32_t ratePermille) { if (mAudioSink.get() != NULL) { return mAudioSink->setPlaybackRatePermille(ratePermille); - } else if (mAudioTrack != NULL){ + } else if (mAudioTrack != 0){ return mAudioTrack->setSampleRate(ratePermille * mSampleRate / 1000); } else { return NO_INIT; -- cgit v1.1 From d94e716af0e49d775f0c0c4f36dd2c136ba5f2b2 Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Mon, 3 Jun 2013 15:48:11 -0700 Subject: Fix our software decoders to reset (more of) their internal state properly on a transition from idle->loaded. Change-Id: I56ccfeef24c391e50e42b522194206e35c7ab700 related-to-bug: 9105408 --- media/libstagefright/codecs/aacdec/SoftAAC2.cpp | 3 +++ media/libstagefright/codecs/amrnb/dec/SoftAMR.cpp | 5 +++++ media/libstagefright/codecs/amrnb/dec/SoftAMR.h | 1 + media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp | 5 +++++ media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.h | 1 + media/libstagefright/codecs/mp3dec/SoftMP3.cpp | 2 ++ media/libstagefright/codecs/on2/dec/SoftVPX.cpp | 4 ++++ media/libstagefright/codecs/on2/dec/SoftVPX.h | 1 + media/libstagefright/codecs/on2/h264dec/SoftAVC.cpp | 5 +++++ media/libstagefright/codecs/on2/h264dec/SoftAVC.h | 1 + media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp | 2 ++ 11 files changed, 30 insertions(+) diff --git a/media/libstagefright/codecs/aacdec/SoftAAC2.cpp b/media/libstagefright/codecs/aacdec/SoftAAC2.cpp index cf50dc9..1b20cbb 100644 --- a/media/libstagefright/codecs/aacdec/SoftAAC2.cpp +++ b/media/libstagefright/codecs/aacdec/SoftAAC2.cpp @@ -604,6 +604,9 @@ void SoftAAC2::onReset() { // To make the codec behave the same before and after a reset, we need to invalidate the // streaminfo struct. This does that: mStreamInfo->sampleRate = 0; + + mSignalledError = false; + mOutputPortSettingsChange = NONE; } void SoftAAC2::onPortEnableCompleted(OMX_U32 portIndex, bool enabled) { diff --git a/media/libstagefright/codecs/amrnb/dec/SoftAMR.cpp b/media/libstagefright/codecs/amrnb/dec/SoftAMR.cpp index 4d4212f..3320688 100644 --- a/media/libstagefright/codecs/amrnb/dec/SoftAMR.cpp +++ b/media/libstagefright/codecs/amrnb/dec/SoftAMR.cpp @@ -457,6 +457,11 @@ void SoftAMR::onPortEnableCompleted(OMX_U32 portIndex, bool enabled) { } } +void SoftAMR::onReset() { + mSignalledError = false; + mOutputPortSettingsChange = NONE; +} + } // namespace android android::SoftOMXComponent *createSoftOMXComponent( diff --git a/media/libstagefright/codecs/amrnb/dec/SoftAMR.h b/media/libstagefright/codecs/amrnb/dec/SoftAMR.h index 9a596e5..758d6ac 100644 --- a/media/libstagefright/codecs/amrnb/dec/SoftAMR.h +++ b/media/libstagefright/codecs/amrnb/dec/SoftAMR.h @@ -40,6 +40,7 @@ protected: virtual void onQueueFilled(OMX_U32 portIndex); virtual void onPortFlushCompleted(OMX_U32 portIndex); virtual void onPortEnableCompleted(OMX_U32 portIndex, bool enabled); + virtual void onReset(); private: enum { diff --git a/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp b/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp index 020cc0a..3c15adc 100644 --- a/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp +++ b/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp @@ -571,6 +571,11 @@ void SoftMPEG4::onPortEnableCompleted(OMX_U32 portIndex, bool enabled) { } } +void SoftMPEG4::onReset() { + mSignalledError = false; + mOutputPortSettingsChange = NONE; +} + void SoftMPEG4::updatePortDefinitions() { OMX_PARAM_PORTDEFINITIONTYPE *def = &editPortInfo(0)->mDef; def->format.video.nFrameWidth = mWidth; diff --git a/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.h b/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.h index dff08a7..6df4c92 100644 --- a/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.h +++ b/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.h @@ -44,6 +44,7 @@ protected: virtual void onQueueFilled(OMX_U32 portIndex); virtual void onPortFlushCompleted(OMX_U32 portIndex); virtual void onPortEnableCompleted(OMX_U32 portIndex, bool enabled); + virtual void onReset(); private: enum { diff --git a/media/libstagefright/codecs/mp3dec/SoftMP3.cpp b/media/libstagefright/codecs/mp3dec/SoftMP3.cpp index 9f25536..7c382fb 100644 --- a/media/libstagefright/codecs/mp3dec/SoftMP3.cpp +++ b/media/libstagefright/codecs/mp3dec/SoftMP3.cpp @@ -361,6 +361,8 @@ void SoftMP3::onPortEnableCompleted(OMX_U32 portIndex, bool enabled) { void SoftMP3::onReset() { pvmp3_InitDecoder(mConfig, mDecoderBuf); mIsFirst = true; + mSignalledError = false; + mOutputPortSettingsChange = NONE; } } // namespace android diff --git a/media/libstagefright/codecs/on2/dec/SoftVPX.cpp b/media/libstagefright/codecs/on2/dec/SoftVPX.cpp index 866e5b0..fe76036 100644 --- a/media/libstagefright/codecs/on2/dec/SoftVPX.cpp +++ b/media/libstagefright/codecs/on2/dec/SoftVPX.cpp @@ -358,6 +358,10 @@ void SoftVPX::onPortEnableCompleted(OMX_U32 portIndex, bool enabled) { } } +void SoftVPX::onReset() { + mOutputPortSettingsChange = NONE; +} + void SoftVPX::updatePortDefinitions() { OMX_PARAM_PORTDEFINITIONTYPE *def = &editPortInfo(0)->mDef; def->format.video.nFrameWidth = mWidth; diff --git a/media/libstagefright/codecs/on2/dec/SoftVPX.h b/media/libstagefright/codecs/on2/dec/SoftVPX.h index 3e814a2..4cb05cf 100644 --- a/media/libstagefright/codecs/on2/dec/SoftVPX.h +++ b/media/libstagefright/codecs/on2/dec/SoftVPX.h @@ -40,6 +40,7 @@ protected: virtual void onQueueFilled(OMX_U32 portIndex); virtual void onPortFlushCompleted(OMX_U32 portIndex); virtual void onPortEnableCompleted(OMX_U32 portIndex, bool enabled); + virtual void onReset(); private: enum { diff --git a/media/libstagefright/codecs/on2/h264dec/SoftAVC.cpp b/media/libstagefright/codecs/on2/h264dec/SoftAVC.cpp index 6e36651..5e299d5 100644 --- a/media/libstagefright/codecs/on2/h264dec/SoftAVC.cpp +++ b/media/libstagefright/codecs/on2/h264dec/SoftAVC.cpp @@ -530,6 +530,11 @@ void SoftAVC::onPortEnableCompleted(OMX_U32 portIndex, bool enabled) { } } +void SoftAVC::onReset() { + mSignalledError = false; + mOutputPortSettingsChange = NONE; +} + void SoftAVC::updatePortDefinitions() { OMX_PARAM_PORTDEFINITIONTYPE *def = &editPortInfo(0)->mDef; def->format.video.nFrameWidth = mWidth; diff --git a/media/libstagefright/codecs/on2/h264dec/SoftAVC.h b/media/libstagefright/codecs/on2/h264dec/SoftAVC.h index 879b014..8c104c5 100644 --- a/media/libstagefright/codecs/on2/h264dec/SoftAVC.h +++ b/media/libstagefright/codecs/on2/h264dec/SoftAVC.h @@ -46,6 +46,7 @@ protected: virtual void onQueueFilled(OMX_U32 portIndex); virtual void onPortFlushCompleted(OMX_U32 portIndex); virtual void onPortEnableCompleted(OMX_U32 portIndex, bool enabled); + virtual void onReset(); private: enum { diff --git a/media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp b/media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp index 4115324..51bb958 100644 --- a/media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp +++ b/media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp @@ -424,6 +424,8 @@ void SoftVorbis::onReset() { delete mVi; mVi = NULL; } + + mOutputPortSettingsChange = NONE; } void SoftVorbis::onPortEnableCompleted(OMX_U32 portIndex, bool enabled) { -- cgit v1.1 From 673158582c9589cee1d5e4d7c79622609938b8f8 Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Mon, 3 Jun 2013 16:00:13 -0700 Subject: Fix uninitialized variable error in HLS bandwidth determination. Change-Id: I647e79443f2a06e5b1b4f9068c02b424b0e57989 --- media/libstagefright/httplive/LiveSession.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/media/libstagefright/httplive/LiveSession.cpp b/media/libstagefright/httplive/LiveSession.cpp index fff13eb..e91c60b 100644 --- a/media/libstagefright/httplive/LiveSession.cpp +++ b/media/libstagefright/httplive/LiveSession.cpp @@ -651,7 +651,7 @@ size_t LiveSession::getBandwidthIndex() { #if 1 char value[PROPERTY_VALUE_MAX]; - ssize_t index; + ssize_t index = -1; if (property_get("media.httplive.bw-index", value, NULL)) { char *end; index = strtol(value, &end, 10); -- cgit v1.1 From bb6f0a0bb413817d707cfb4c4a30847fda520787 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Mon, 3 Jun 2013 15:00:29 -0700 Subject: Fix underruns when sample rate != native sample rate This forces a minimum of 3 application buffers when the sample rates don't match, using the normal mixer and low latency HAL. There is still an issue that the latency() varies depending on whether screen was off or on at the time of creating the AudioTrack. With screen on: I/AudioTrack( 2028): afFrameCount=960, minBufCount=2, afSampleRate=48000, afLatency=50 I/AudioTrack( 2028): minFrameCount: 2646, afFrameCount=960, minBufCount=3, sampleRate=44100, afSampleRate=48000, afLatency=50 With screen off: I/AudioTrack( 2817): afFrameCount=960, minBufCount=4, afSampleRate=48000, afLatency=84 I/AudioTrack( 2817): minFrameCount: 3528, afFrameCount=960, minBufCount=4, sampleRate=44100, afSampleRate=48000, afLatency=84 Change-Id: Ib45515edff2afcd672dda34881b658c800ffc25a --- media/libmedia/AudioTrack.cpp | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp index ff52b28..77fc6f6 100644 --- a/media/libmedia/AudioTrack.cpp +++ b/media/libmedia/AudioTrack.cpp @@ -861,8 +861,10 @@ status_t AudioTrack::createTrack_l( // Ensure that buffer depth covers at least audio hardware latency uint32_t minBufCount = afLatency / ((1000 * afFrameCount)/afSampleRate); - if (minBufCount < 2) { - minBufCount = 2; + ALOGV("afFrameCount=%d, minBufCount=%d, afSampleRate=%u, afLatency=%d", + afFrameCount, minBufCount, afSampleRate, afLatency); + if (minBufCount <= 2) { + minBufCount = sampleRate == afSampleRate ? 2 : 3; } size_t minFrameCount = (afFrameCount*sampleRate*minBufCount)/afSampleRate; -- cgit v1.1 From 53b0a2b1f9cb6b99b3f0d1a639921d1b24bc30b7 Mon Sep 17 00:00:00 2001 From: Lajos Molnar Date: Tue, 4 Jun 2013 18:43:08 -0700 Subject: Reset PV decoder on SoftMPEG4 decoder reset Otherwise, state may be undefined for subsequent frames. Change-Id: Icdc0126247e1422eba21f2008a04cf7867d93f5d Signed-off-by: Lajos Molnar Bug: 9284771 (cherry picked from commit 0f15875b8e80fb49a3b18d88964b063326f307b9) --- media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp b/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp index 3c15adc..875674b 100644 --- a/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp +++ b/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp @@ -574,6 +574,11 @@ void SoftMPEG4::onPortEnableCompleted(OMX_U32 portIndex, bool enabled) { void SoftMPEG4::onReset() { mSignalledError = false; mOutputPortSettingsChange = NONE; + mFramesConfigured = false; + if (mInitialized) { + PVCleanUpVideoDecoder(mHandle); + mInitialized = false; + } } void SoftMPEG4::updatePortDefinitions() { -- cgit v1.1 From 269a355679fce6a71523faeefc2ff575abbd1a8e Mon Sep 17 00:00:00 2001 From: Lajos Molnar Date: Tue, 4 Jun 2013 19:35:03 -0700 Subject: Track exact timestamps in SoftMPEG4/H263 decoders Change-Id: I7772e3afec020f889dea80fd6372afbc36cd68d6 Signed-off-by: Lajos Molnar Bug: 9285553 (cherry picked from commit e113aa1f078cb3d5f8182058e144fd14ce945fca) --- media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp | 16 ++++++++++++---- media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.h | 3 +++ 2 files changed, 15 insertions(+), 4 deletions(-) diff --git a/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp b/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp index 875674b..bb5625f 100644 --- a/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp +++ b/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp @@ -76,6 +76,7 @@ SoftMPEG4::SoftMPEG4( mInitialized(false), mFramesConfigured(false), mNumSamplesOutput(0), + mPvTime(0), mOutputPortSettingsChange(NONE) { if (!strcmp(name, "OMX.google.h263.decoder")) { mMode = MODE_H263; @@ -415,9 +416,14 @@ void SoftMPEG4::onQueueFilled(OMX_U32 portIndex) { uint32_t useExtTimestamp = (inHeader->nOffset == 0); - // decoder deals in ms, OMX in us. - uint32_t timestamp = - useExtTimestamp ? (inHeader->nTimeStamp + 500) / 1000 : 0xFFFFFFFF; + // decoder deals in ms (int32_t), OMX in us (int64_t) + // so use fake timestamp instead + uint32_t timestamp = 0xFFFFFFFF; + if (useExtTimestamp) { + mPvToOmxTimeMap.add(mPvTime, inHeader->nTimeStamp); + timestamp = mPvTime; + mPvTime++; + } int32_t bufferSize = inHeader->nFilledLen; int32_t tmp = bufferSize; @@ -441,7 +447,8 @@ void SoftMPEG4::onQueueFilled(OMX_U32 portIndex) { } // decoder deals in ms, OMX in us. - outHeader->nTimeStamp = timestamp * 1000; + outHeader->nTimeStamp = mPvToOmxTimeMap.valueFor(timestamp); + mPvToOmxTimeMap.removeItem(timestamp); inHeader->nOffset += bufferSize; inHeader->nFilledLen = 0; @@ -572,6 +579,7 @@ void SoftMPEG4::onPortEnableCompleted(OMX_U32 portIndex, bool enabled) { } void SoftMPEG4::onReset() { + mPvToOmxTimeMap.clear(); mSignalledError = false; mOutputPortSettingsChange = NONE; mFramesConfigured = false; diff --git a/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.h b/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.h index 6df4c92..f71ccef 100644 --- a/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.h +++ b/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.h @@ -19,6 +19,7 @@ #define SOFT_MPEG4_H_ #include "SimpleSoftOMXComponent.h" +#include struct tagvideoDecControls; @@ -70,6 +71,8 @@ private: bool mFramesConfigured; int32_t mNumSamplesOutput; + int32_t mPvTime; + KeyedVector mPvToOmxTimeMap; enum { NONE, -- cgit v1.1 From 7f616d3cc5366a4b8af20d3d0c768e3de1df0666 Mon Sep 17 00:00:00 2001 From: Lajos Molnar Date: Tue, 4 Jun 2013 19:35:18 -0700 Subject: stagefright: created SoftVideoDecoderOMXComponent Created common base class for all software video decoders to make adding new features easier. Change-Id: Id89964e572d5cc5add02662273e6ae96c6b7eb12 Signed-off-by: Lajos Molnar --- .../codecs/m4v_h263/dec/SoftMPEG4.cpp | 322 +++------------------ .../libstagefright/codecs/m4v_h263/dec/SoftMPEG4.h | 31 +- media/libstagefright/codecs/on2/dec/SoftVPX.cpp | 211 +------------- media/libstagefright/codecs/on2/dec/SoftVPX.h | 25 +- .../libstagefright/codecs/on2/h264dec/SoftAVC.cpp | 249 +--------------- media/libstagefright/codecs/on2/h264dec/SoftAVC.h | 28 +- .../include/SoftVideoDecoderOMXComponent.h | 93 ++++++ media/libstagefright/omx/Android.mk | 1 + .../omx/SoftVideoDecoderOMXComponent.cpp | 290 +++++++++++++++++++ 9 files changed, 456 insertions(+), 794 deletions(-) create mode 100644 media/libstagefright/include/SoftVideoDecoderOMXComponent.h create mode 100644 media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp diff --git a/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp b/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp index bb5625f..fb2a430 100644 --- a/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp +++ b/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp @@ -48,43 +48,32 @@ static const CodecProfileLevel kH263ProfileLevels[] = { { OMX_VIDEO_H263ProfileISWV2, OMX_VIDEO_H263Level45 }, }; -template -static void InitOMXParams(T *params) { - params->nSize = sizeof(T); - params->nVersion.s.nVersionMajor = 1; - params->nVersion.s.nVersionMinor = 0; - params->nVersion.s.nRevision = 0; - params->nVersion.s.nStep = 0; -} - SoftMPEG4::SoftMPEG4( const char *name, + const char *componentRole, + OMX_VIDEO_CODINGTYPE codingType, + const CodecProfileLevel *profileLevels, + size_t numProfileLevels, const OMX_CALLBACKTYPE *callbacks, OMX_PTR appData, OMX_COMPONENTTYPE **component) - : SimpleSoftOMXComponent(name, callbacks, appData, component), - mMode(MODE_MPEG4), + : SoftVideoDecoderOMXComponent( + name, componentRole, codingType, profileLevels, numProfileLevels, + 352 /* width */, 288 /* height */, callbacks, appData, component), + mMode(codingType == OMX_VIDEO_CodingH263 ? MODE_H263 : MODE_MPEG4), mHandle(new tagvideoDecControls), mInputBufferCount(0), - mWidth(352), - mHeight(288), - mCropLeft(0), - mCropTop(0), - mCropRight(mWidth - 1), - mCropBottom(mHeight - 1), mSignalledError(false), mInitialized(false), mFramesConfigured(false), mNumSamplesOutput(0), - mPvTime(0), - mOutputPortSettingsChange(NONE) { - if (!strcmp(name, "OMX.google.h263.decoder")) { - mMode = MODE_H263; - } else { - CHECK(!strcmp(name, "OMX.google.mpeg4.decoder")); - } - - initPorts(); + mPvTime(0) { + initPorts( + kNumInputBuffers, + 8192 /* inputBufferSize */, + kNumOutputBuffers, + (mMode == MODE_MPEG4) + ? MEDIA_MIMETYPE_VIDEO_MPEG4 : MEDIA_MIMETYPE_VIDEO_H263); CHECK_EQ(initDecoder(), (status_t)OK); } @@ -97,219 +86,11 @@ SoftMPEG4::~SoftMPEG4() { mHandle = NULL; } -void SoftMPEG4::initPorts() { - OMX_PARAM_PORTDEFINITIONTYPE def; - InitOMXParams(&def); - - def.nPortIndex = 0; - def.eDir = OMX_DirInput; - def.nBufferCountMin = kNumInputBuffers; - def.nBufferCountActual = def.nBufferCountMin; - def.nBufferSize = 8192; - def.bEnabled = OMX_TRUE; - def.bPopulated = OMX_FALSE; - def.eDomain = OMX_PortDomainVideo; - def.bBuffersContiguous = OMX_FALSE; - def.nBufferAlignment = 1; - - def.format.video.cMIMEType = - (mMode == MODE_MPEG4) - ? const_cast(MEDIA_MIMETYPE_VIDEO_MPEG4) - : const_cast(MEDIA_MIMETYPE_VIDEO_H263); - - def.format.video.pNativeRender = NULL; - def.format.video.nFrameWidth = mWidth; - def.format.video.nFrameHeight = mHeight; - def.format.video.nStride = def.format.video.nFrameWidth; - def.format.video.nSliceHeight = def.format.video.nFrameHeight; - def.format.video.nBitrate = 0; - def.format.video.xFramerate = 0; - def.format.video.bFlagErrorConcealment = OMX_FALSE; - - def.format.video.eCompressionFormat = - mMode == MODE_MPEG4 ? OMX_VIDEO_CodingMPEG4 : OMX_VIDEO_CodingH263; - - def.format.video.eColorFormat = OMX_COLOR_FormatUnused; - def.format.video.pNativeWindow = NULL; - - addPort(def); - - def.nPortIndex = 1; - def.eDir = OMX_DirOutput; - def.nBufferCountMin = kNumOutputBuffers; - def.nBufferCountActual = def.nBufferCountMin; - def.bEnabled = OMX_TRUE; - def.bPopulated = OMX_FALSE; - def.eDomain = OMX_PortDomainVideo; - def.bBuffersContiguous = OMX_FALSE; - def.nBufferAlignment = 2; - - def.format.video.cMIMEType = const_cast(MEDIA_MIMETYPE_VIDEO_RAW); - def.format.video.pNativeRender = NULL; - def.format.video.nFrameWidth = mWidth; - def.format.video.nFrameHeight = mHeight; - def.format.video.nStride = def.format.video.nFrameWidth; - def.format.video.nSliceHeight = def.format.video.nFrameHeight; - def.format.video.nBitrate = 0; - def.format.video.xFramerate = 0; - def.format.video.bFlagErrorConcealment = OMX_FALSE; - def.format.video.eCompressionFormat = OMX_VIDEO_CodingUnused; - def.format.video.eColorFormat = OMX_COLOR_FormatYUV420Planar; - def.format.video.pNativeWindow = NULL; - - def.nBufferSize = - (def.format.video.nFrameWidth * def.format.video.nFrameHeight * 3) / 2; - - addPort(def); -} - status_t SoftMPEG4::initDecoder() { memset(mHandle, 0, sizeof(tagvideoDecControls)); return OK; } -OMX_ERRORTYPE SoftMPEG4::internalGetParameter( - OMX_INDEXTYPE index, OMX_PTR params) { - switch (index) { - case OMX_IndexParamVideoPortFormat: - { - OMX_VIDEO_PARAM_PORTFORMATTYPE *formatParams = - (OMX_VIDEO_PARAM_PORTFORMATTYPE *)params; - - if (formatParams->nPortIndex > 1) { - return OMX_ErrorUndefined; - } - - if (formatParams->nIndex != 0) { - return OMX_ErrorNoMore; - } - - if (formatParams->nPortIndex == 0) { - formatParams->eCompressionFormat = - (mMode == MODE_MPEG4) - ? OMX_VIDEO_CodingMPEG4 : OMX_VIDEO_CodingH263; - - formatParams->eColorFormat = OMX_COLOR_FormatUnused; - formatParams->xFramerate = 0; - } else { - CHECK_EQ(formatParams->nPortIndex, 1u); - - formatParams->eCompressionFormat = OMX_VIDEO_CodingUnused; - formatParams->eColorFormat = OMX_COLOR_FormatYUV420Planar; - formatParams->xFramerate = 0; - } - - return OMX_ErrorNone; - } - - case OMX_IndexParamVideoProfileLevelQuerySupported: - { - OMX_VIDEO_PARAM_PROFILELEVELTYPE *profileLevel = - (OMX_VIDEO_PARAM_PROFILELEVELTYPE *) params; - - if (profileLevel->nPortIndex != 0) { // Input port only - ALOGE("Invalid port index: %ld", profileLevel->nPortIndex); - return OMX_ErrorUnsupportedIndex; - } - - size_t index = profileLevel->nProfileIndex; - if (mMode == MODE_H263) { - size_t nProfileLevels = - sizeof(kH263ProfileLevels) / sizeof(kH263ProfileLevels[0]); - if (index >= nProfileLevels) { - return OMX_ErrorNoMore; - } - - profileLevel->eProfile = kH263ProfileLevels[index].mProfile; - profileLevel->eLevel = kH263ProfileLevels[index].mLevel; - } else { - size_t nProfileLevels = - sizeof(kM4VProfileLevels) / sizeof(kM4VProfileLevels[0]); - if (index >= nProfileLevels) { - return OMX_ErrorNoMore; - } - - profileLevel->eProfile = kM4VProfileLevels[index].mProfile; - profileLevel->eLevel = kM4VProfileLevels[index].mLevel; - } - return OMX_ErrorNone; - } - - default: - return SimpleSoftOMXComponent::internalGetParameter(index, params); - } -} - -OMX_ERRORTYPE SoftMPEG4::internalSetParameter( - OMX_INDEXTYPE index, const OMX_PTR params) { - switch (index) { - case OMX_IndexParamStandardComponentRole: - { - const OMX_PARAM_COMPONENTROLETYPE *roleParams = - (const OMX_PARAM_COMPONENTROLETYPE *)params; - - if (mMode == MODE_MPEG4) { - if (strncmp((const char *)roleParams->cRole, - "video_decoder.mpeg4", - OMX_MAX_STRINGNAME_SIZE - 1)) { - return OMX_ErrorUndefined; - } - } else { - if (strncmp((const char *)roleParams->cRole, - "video_decoder.h263", - OMX_MAX_STRINGNAME_SIZE - 1)) { - return OMX_ErrorUndefined; - } - } - - return OMX_ErrorNone; - } - - case OMX_IndexParamVideoPortFormat: - { - OMX_VIDEO_PARAM_PORTFORMATTYPE *formatParams = - (OMX_VIDEO_PARAM_PORTFORMATTYPE *)params; - - if (formatParams->nPortIndex > 1) { - return OMX_ErrorUndefined; - } - - if (formatParams->nIndex != 0) { - return OMX_ErrorNoMore; - } - - return OMX_ErrorNone; - } - - default: - return SimpleSoftOMXComponent::internalSetParameter(index, params); - } -} - -OMX_ERRORTYPE SoftMPEG4::getConfig( - OMX_INDEXTYPE index, OMX_PTR params) { - switch (index) { - case OMX_IndexConfigCommonOutputCrop: - { - OMX_CONFIG_RECTTYPE *rectParams = (OMX_CONFIG_RECTTYPE *)params; - - if (rectParams->nPortIndex != 1) { - return OMX_ErrorUndefined; - } - - rectParams->nLeft = mCropLeft; - rectParams->nTop = mCropTop; - rectParams->nWidth = mCropRight - mCropLeft + 1; - rectParams->nHeight = mCropBottom - mCropTop + 1; - - return OMX_ErrorNone; - } - - default: - return OMX_ErrorUnsupportedIndex; - } -} - void SoftMPEG4::onQueueFilled(OMX_U32 portIndex) { if (mSignalledError || mOutputPortSettingsChange != NONE) { return; @@ -489,11 +270,11 @@ void SoftMPEG4::onQueueFilled(OMX_U32 portIndex) { } bool SoftMPEG4::portSettingsChanged() { - int32_t disp_width, disp_height; - PVGetVideoDimensions(mHandle, &disp_width, &disp_height); + uint32_t disp_width, disp_height; + PVGetVideoDimensions(mHandle, (int32 *)&disp_width, (int32 *)&disp_height); - int32_t buf_width, buf_height; - PVGetBufferDimensions(mHandle, &buf_width, &buf_height); + uint32_t buf_width, buf_height; + PVGetBufferDimensions(mHandle, (int32 *)&buf_width, (int32 *)&buf_height); CHECK_LE(disp_width, buf_width); CHECK_LE(disp_height, buf_height); @@ -501,12 +282,12 @@ bool SoftMPEG4::portSettingsChanged() { ALOGV("disp_width = %d, disp_height = %d, buf_width = %d, buf_height = %d", disp_width, disp_height, buf_width, buf_height); - if (mCropRight != disp_width - 1 - || mCropBottom != disp_height - 1) { + if (mCropWidth != disp_width + || mCropHeight != disp_height) { mCropLeft = 0; mCropTop = 0; - mCropRight = disp_width - 1; - mCropBottom = disp_height - 1; + mCropWidth = disp_width; + mCropHeight = disp_height; notify(OMX_EventPortSettingsChanged, 1, @@ -552,36 +333,10 @@ void SoftMPEG4::onPortFlushCompleted(OMX_U32 portIndex) { } } -void SoftMPEG4::onPortEnableCompleted(OMX_U32 portIndex, bool enabled) { - if (portIndex != 1) { - return; - } - - switch (mOutputPortSettingsChange) { - case NONE: - break; - - case AWAITING_DISABLED: - { - CHECK(!enabled); - mOutputPortSettingsChange = AWAITING_ENABLED; - break; - } - - default: - { - CHECK_EQ((int)mOutputPortSettingsChange, (int)AWAITING_ENABLED); - CHECK(enabled); - mOutputPortSettingsChange = NONE; - break; - } - } -} - void SoftMPEG4::onReset() { + SoftVideoDecoderOMXComponent::onReset(); mPvToOmxTimeMap.clear(); mSignalledError = false; - mOutputPortSettingsChange = NONE; mFramesConfigured = false; if (mInitialized) { PVCleanUpVideoDecoder(mHandle); @@ -590,18 +345,10 @@ void SoftMPEG4::onReset() { } void SoftMPEG4::updatePortDefinitions() { - OMX_PARAM_PORTDEFINITIONTYPE *def = &editPortInfo(0)->mDef; - def->format.video.nFrameWidth = mWidth; - def->format.video.nFrameHeight = mHeight; - def->format.video.nStride = def->format.video.nFrameWidth; - def->format.video.nSliceHeight = def->format.video.nFrameHeight; - - def = &editPortInfo(1)->mDef; - def->format.video.nFrameWidth = mWidth; - def->format.video.nFrameHeight = mHeight; - def->format.video.nStride = def->format.video.nFrameWidth; - def->format.video.nSliceHeight = def->format.video.nFrameHeight; + SoftVideoDecoderOMXComponent::updatePortDefinitions(); + /* We have to align our width and height - this should affect stride! */ + OMX_PARAM_PORTDEFINITIONTYPE *def = &editPortInfo(kOutputPortIndex)->mDef; def->nBufferSize = (((def->format.video.nFrameWidth + 15) & -16) * ((def->format.video.nFrameHeight + 15) & -16) * 3) / 2; @@ -612,6 +359,19 @@ void SoftMPEG4::updatePortDefinitions() { android::SoftOMXComponent *createSoftOMXComponent( const char *name, const OMX_CALLBACKTYPE *callbacks, OMX_PTR appData, OMX_COMPONENTTYPE **component) { - return new android::SoftMPEG4(name, callbacks, appData, component); + using namespace android; + if (!strcmp(name, "OMX.google.h263.decoder")) { + return new android::SoftMPEG4( + name, "video_decoder.h263", OMX_VIDEO_CodingH263, + kH263ProfileLevels, ARRAY_SIZE(kH263ProfileLevels), + callbacks, appData, component); + } else if (!strcmp(name, "OMX.google.mpeg4.decoder")) { + return new android::SoftMPEG4( + name, "video_decoder.mpeg4", OMX_VIDEO_CodingMPEG4, + kM4VProfileLevels, ARRAY_SIZE(kM4VProfileLevels), + callbacks, appData, component); + } else { + CHECK(!"Unknown component"); + } } diff --git a/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.h b/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.h index f71ccef..de14aaf 100644 --- a/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.h +++ b/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.h @@ -18,15 +18,18 @@ #define SOFT_MPEG4_H_ -#include "SimpleSoftOMXComponent.h" -#include +#include "SoftVideoDecoderOMXComponent.h" struct tagvideoDecControls; namespace android { -struct SoftMPEG4 : public SimpleSoftOMXComponent { +struct SoftMPEG4 : public SoftVideoDecoderOMXComponent { SoftMPEG4(const char *name, + const char *componentRole, + OMX_VIDEO_CODINGTYPE codingType, + const CodecProfileLevel *profileLevels, + size_t numProfileLevels, const OMX_CALLBACKTYPE *callbacks, OMX_PTR appData, OMX_COMPONENTTYPE **component); @@ -34,17 +37,8 @@ struct SoftMPEG4 : public SimpleSoftOMXComponent { protected: virtual ~SoftMPEG4(); - virtual OMX_ERRORTYPE internalGetParameter( - OMX_INDEXTYPE index, OMX_PTR params); - - virtual OMX_ERRORTYPE internalSetParameter( - OMX_INDEXTYPE index, const OMX_PTR params); - - virtual OMX_ERRORTYPE getConfig(OMX_INDEXTYPE index, OMX_PTR params); - virtual void onQueueFilled(OMX_U32 portIndex); virtual void onPortFlushCompleted(OMX_U32 portIndex); - virtual void onPortEnableCompleted(OMX_U32 portIndex, bool enabled); virtual void onReset(); private: @@ -56,16 +50,12 @@ private: enum { MODE_MPEG4, MODE_H263, - } mMode; tagvideoDecControls *mHandle; size_t mInputBufferCount; - int32_t mWidth, mHeight; - int32_t mCropLeft, mCropTop, mCropRight, mCropBottom; - bool mSignalledError; bool mInitialized; bool mFramesConfigured; @@ -74,16 +64,9 @@ private: int32_t mPvTime; KeyedVector mPvToOmxTimeMap; - enum { - NONE, - AWAITING_DISABLED, - AWAITING_ENABLED - } mOutputPortSettingsChange; - - void initPorts(); status_t initDecoder(); - void updatePortDefinitions(); + virtual void updatePortDefinitions(); bool portSettingsChanged(); DISALLOW_EVIL_CONSTRUCTORS(SoftMPEG4); diff --git a/media/libstagefright/codecs/on2/dec/SoftVPX.cpp b/media/libstagefright/codecs/on2/dec/SoftVPX.cpp index fe76036..43d0263 100644 --- a/media/libstagefright/codecs/on2/dec/SoftVPX.cpp +++ b/media/libstagefright/codecs/on2/dec/SoftVPX.cpp @@ -29,26 +29,19 @@ namespace android { -template -static void InitOMXParams(T *params) { - params->nSize = sizeof(T); - params->nVersion.s.nVersionMajor = 1; - params->nVersion.s.nVersionMinor = 0; - params->nVersion.s.nRevision = 0; - params->nVersion.s.nStep = 0; -} - SoftVPX::SoftVPX( const char *name, const OMX_CALLBACKTYPE *callbacks, OMX_PTR appData, OMX_COMPONENTTYPE **component) - : SimpleSoftOMXComponent(name, callbacks, appData, component), - mCtx(NULL), - mWidth(320), - mHeight(240), - mOutputPortSettingsChange(NONE) { - initPorts(); + : SoftVideoDecoderOMXComponent( + name, "video_decoder.vpx", OMX_VIDEO_CodingVPX, + NULL /* profileLevels */, 0 /* numProfileLevels */, + 320 /* width */, 240 /* height */, callbacks, appData, component), + mCtx(NULL) { + initPorts(kNumBuffers, 768 * 1024 /* inputBufferSize */, + kNumBuffers, MEDIA_MIMETYPE_VIDEO_VPX); + CHECK_EQ(initDecoder(), (status_t)OK); } @@ -58,65 +51,6 @@ SoftVPX::~SoftVPX() { mCtx = NULL; } -void SoftVPX::initPorts() { - OMX_PARAM_PORTDEFINITIONTYPE def; - InitOMXParams(&def); - - def.nPortIndex = 0; - def.eDir = OMX_DirInput; - def.nBufferCountMin = kNumBuffers; - def.nBufferCountActual = def.nBufferCountMin; - def.nBufferSize = 768 * 1024; - def.bEnabled = OMX_TRUE; - def.bPopulated = OMX_FALSE; - def.eDomain = OMX_PortDomainVideo; - def.bBuffersContiguous = OMX_FALSE; - def.nBufferAlignment = 1; - - def.format.video.cMIMEType = const_cast(MEDIA_MIMETYPE_VIDEO_VPX); - def.format.video.pNativeRender = NULL; - def.format.video.nFrameWidth = mWidth; - def.format.video.nFrameHeight = mHeight; - def.format.video.nStride = def.format.video.nFrameWidth; - def.format.video.nSliceHeight = def.format.video.nFrameHeight; - def.format.video.nBitrate = 0; - def.format.video.xFramerate = 0; - def.format.video.bFlagErrorConcealment = OMX_FALSE; - def.format.video.eCompressionFormat = OMX_VIDEO_CodingVPX; - def.format.video.eColorFormat = OMX_COLOR_FormatUnused; - def.format.video.pNativeWindow = NULL; - - addPort(def); - - def.nPortIndex = 1; - def.eDir = OMX_DirOutput; - def.nBufferCountMin = kNumBuffers; - def.nBufferCountActual = def.nBufferCountMin; - def.bEnabled = OMX_TRUE; - def.bPopulated = OMX_FALSE; - def.eDomain = OMX_PortDomainVideo; - def.bBuffersContiguous = OMX_FALSE; - def.nBufferAlignment = 2; - - def.format.video.cMIMEType = const_cast(MEDIA_MIMETYPE_VIDEO_RAW); - def.format.video.pNativeRender = NULL; - def.format.video.nFrameWidth = mWidth; - def.format.video.nFrameHeight = mHeight; - def.format.video.nStride = def.format.video.nFrameWidth; - def.format.video.nSliceHeight = def.format.video.nFrameHeight; - def.format.video.nBitrate = 0; - def.format.video.xFramerate = 0; - def.format.video.bFlagErrorConcealment = OMX_FALSE; - def.format.video.eCompressionFormat = OMX_VIDEO_CodingUnused; - def.format.video.eColorFormat = OMX_COLOR_FormatYUV420Planar; - def.format.video.pNativeWindow = NULL; - - def.nBufferSize = - (def.format.video.nFrameWidth * def.format.video.nFrameHeight * 3) / 2; - - addPort(def); -} - static int GetCPUCoreCount() { int cpuCoreCount = 1; #if defined(_SC_NPROCESSORS_ONLN) @@ -145,80 +79,6 @@ status_t SoftVPX::initDecoder() { return OK; } -OMX_ERRORTYPE SoftVPX::internalGetParameter( - OMX_INDEXTYPE index, OMX_PTR params) { - switch (index) { - case OMX_IndexParamVideoPortFormat: - { - OMX_VIDEO_PARAM_PORTFORMATTYPE *formatParams = - (OMX_VIDEO_PARAM_PORTFORMATTYPE *)params; - - if (formatParams->nPortIndex > 1) { - return OMX_ErrorUndefined; - } - - if (formatParams->nIndex != 0) { - return OMX_ErrorNoMore; - } - - if (formatParams->nPortIndex == 0) { - formatParams->eCompressionFormat = OMX_VIDEO_CodingVPX; - formatParams->eColorFormat = OMX_COLOR_FormatUnused; - formatParams->xFramerate = 0; - } else { - CHECK_EQ(formatParams->nPortIndex, 1u); - - formatParams->eCompressionFormat = OMX_VIDEO_CodingUnused; - formatParams->eColorFormat = OMX_COLOR_FormatYUV420Planar; - formatParams->xFramerate = 0; - } - - return OMX_ErrorNone; - } - - default: - return SimpleSoftOMXComponent::internalGetParameter(index, params); - } -} - -OMX_ERRORTYPE SoftVPX::internalSetParameter( - OMX_INDEXTYPE index, const OMX_PTR params) { - switch (index) { - case OMX_IndexParamStandardComponentRole: - { - const OMX_PARAM_COMPONENTROLETYPE *roleParams = - (const OMX_PARAM_COMPONENTROLETYPE *)params; - - if (strncmp((const char *)roleParams->cRole, - "video_decoder.vpx", - OMX_MAX_STRINGNAME_SIZE - 1)) { - return OMX_ErrorUndefined; - } - - return OMX_ErrorNone; - } - - case OMX_IndexParamVideoPortFormat: - { - OMX_VIDEO_PARAM_PORTFORMATTYPE *formatParams = - (OMX_VIDEO_PARAM_PORTFORMATTYPE *)params; - - if (formatParams->nPortIndex > 1) { - return OMX_ErrorUndefined; - } - - if (formatParams->nIndex != 0) { - return OMX_ErrorNoMore; - } - - return OMX_ErrorNone; - } - - default: - return SimpleSoftOMXComponent::internalSetParameter(index, params); - } -} - void SoftVPX::onQueueFilled(OMX_U32 portIndex) { if (mOutputPortSettingsChange != NONE) { return; @@ -270,8 +130,8 @@ void SoftVPX::onQueueFilled(OMX_U32 portIndex) { if (img != NULL) { CHECK_EQ(img->fmt, IMG_FMT_I420); - int32_t width = img->d_w; - int32_t height = img->d_h; + uint32_t width = img->d_w; + uint32_t height = img->d_h; if (width != mWidth || height != mHeight) { mWidth = width; @@ -329,57 +189,6 @@ void SoftVPX::onQueueFilled(OMX_U32 portIndex) { } } -void SoftVPX::onPortFlushCompleted(OMX_U32 portIndex) { -} - -void SoftVPX::onPortEnableCompleted(OMX_U32 portIndex, bool enabled) { - if (portIndex != 1) { - return; - } - - switch (mOutputPortSettingsChange) { - case NONE: - break; - - case AWAITING_DISABLED: - { - CHECK(!enabled); - mOutputPortSettingsChange = AWAITING_ENABLED; - break; - } - - default: - { - CHECK_EQ((int)mOutputPortSettingsChange, (int)AWAITING_ENABLED); - CHECK(enabled); - mOutputPortSettingsChange = NONE; - break; - } - } -} - -void SoftVPX::onReset() { - mOutputPortSettingsChange = NONE; -} - -void SoftVPX::updatePortDefinitions() { - OMX_PARAM_PORTDEFINITIONTYPE *def = &editPortInfo(0)->mDef; - def->format.video.nFrameWidth = mWidth; - def->format.video.nFrameHeight = mHeight; - def->format.video.nStride = def->format.video.nFrameWidth; - def->format.video.nSliceHeight = def->format.video.nFrameHeight; - - def = &editPortInfo(1)->mDef; - def->format.video.nFrameWidth = mWidth; - def->format.video.nFrameHeight = mHeight; - def->format.video.nStride = def->format.video.nFrameWidth; - def->format.video.nSliceHeight = def->format.video.nFrameHeight; - - def->nBufferSize = - (def->format.video.nFrameWidth - * def->format.video.nFrameHeight * 3) / 2; -} - } // namespace android android::SoftOMXComponent *createSoftOMXComponent( diff --git a/media/libstagefright/codecs/on2/dec/SoftVPX.h b/media/libstagefright/codecs/on2/dec/SoftVPX.h index 4cb05cf..626307b 100644 --- a/media/libstagefright/codecs/on2/dec/SoftVPX.h +++ b/media/libstagefright/codecs/on2/dec/SoftVPX.h @@ -18,11 +18,11 @@ #define SOFT_VPX_H_ -#include "SimpleSoftOMXComponent.h" +#include "SoftVideoDecoderOMXComponent.h" namespace android { -struct SoftVPX : public SimpleSoftOMXComponent { +struct SoftVPX : public SoftVideoDecoderOMXComponent { SoftVPX(const char *name, const OMX_CALLBACKTYPE *callbacks, OMX_PTR appData, @@ -31,16 +31,7 @@ struct SoftVPX : public SimpleSoftOMXComponent { protected: virtual ~SoftVPX(); - virtual OMX_ERRORTYPE internalGetParameter( - OMX_INDEXTYPE index, OMX_PTR params); - - virtual OMX_ERRORTYPE internalSetParameter( - OMX_INDEXTYPE index, const OMX_PTR params); - virtual void onQueueFilled(OMX_U32 portIndex); - virtual void onPortFlushCompleted(OMX_U32 portIndex); - virtual void onPortEnableCompleted(OMX_U32 portIndex, bool enabled); - virtual void onReset(); private: enum { @@ -49,20 +40,8 @@ private: void *mCtx; - int32_t mWidth; - int32_t mHeight; - - enum { - NONE, - AWAITING_DISABLED, - AWAITING_ENABLED - } mOutputPortSettingsChange; - - void initPorts(); status_t initDecoder(); - void updatePortDefinitions(); - DISALLOW_EVIL_CONSTRUCTORS(SoftVPX); }; diff --git a/media/libstagefright/codecs/on2/h264dec/SoftAVC.cpp b/media/libstagefright/codecs/on2/h264dec/SoftAVC.cpp index 5e299d5..3bd9f47 100644 --- a/media/libstagefright/codecs/on2/h264dec/SoftAVC.cpp +++ b/media/libstagefright/codecs/on2/h264dec/SoftAVC.cpp @@ -47,38 +47,28 @@ static const CodecProfileLevel kProfileLevels[] = { { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel51 }, }; -template -static void InitOMXParams(T *params) { - params->nSize = sizeof(T); - params->nVersion.s.nVersionMajor = 1; - params->nVersion.s.nVersionMinor = 0; - params->nVersion.s.nRevision = 0; - params->nVersion.s.nStep = 0; -} - SoftAVC::SoftAVC( const char *name, const OMX_CALLBACKTYPE *callbacks, OMX_PTR appData, OMX_COMPONENTTYPE **component) - : SimpleSoftOMXComponent(name, callbacks, appData, component), + : SoftVideoDecoderOMXComponent( + name, "video_decoder.avc", OMX_VIDEO_CodingAVC, + kProfileLevels, ARRAY_SIZE(kProfileLevels), + 320 /* width */, 240 /* height */, callbacks, appData, component), mHandle(NULL), mInputBufferCount(0), - mWidth(320), - mHeight(240), mPictureSize(mWidth * mHeight * 3 / 2), - mCropLeft(0), - mCropTop(0), - mCropWidth(mWidth), - mCropHeight(mHeight), mFirstPicture(NULL), mFirstPictureId(-1), mPicId(0), mHeadersDecoded(false), mEOSStatus(INPUT_DATA_AVAILABLE), - mOutputPortSettingsChange(NONE), mSignalledError(false) { - initPorts(); + initPorts( + kNumInputBuffers, 8192 /* inputBufferSize */, + kNumOutputBuffers, MEDIA_MIMETYPE_VIDEO_AVC); + CHECK_EQ(initDecoder(), (status_t)OK); } @@ -100,65 +90,6 @@ SoftAVC::~SoftAVC() { delete[] mFirstPicture; } -void SoftAVC::initPorts() { - OMX_PARAM_PORTDEFINITIONTYPE def; - InitOMXParams(&def); - - def.nPortIndex = kInputPortIndex; - def.eDir = OMX_DirInput; - def.nBufferCountMin = kNumInputBuffers; - def.nBufferCountActual = def.nBufferCountMin; - def.nBufferSize = 8192; - def.bEnabled = OMX_TRUE; - def.bPopulated = OMX_FALSE; - def.eDomain = OMX_PortDomainVideo; - def.bBuffersContiguous = OMX_FALSE; - def.nBufferAlignment = 1; - - def.format.video.cMIMEType = const_cast(MEDIA_MIMETYPE_VIDEO_AVC); - def.format.video.pNativeRender = NULL; - def.format.video.nFrameWidth = mWidth; - def.format.video.nFrameHeight = mHeight; - def.format.video.nStride = def.format.video.nFrameWidth; - def.format.video.nSliceHeight = def.format.video.nFrameHeight; - def.format.video.nBitrate = 0; - def.format.video.xFramerate = 0; - def.format.video.bFlagErrorConcealment = OMX_FALSE; - def.format.video.eCompressionFormat = OMX_VIDEO_CodingAVC; - def.format.video.eColorFormat = OMX_COLOR_FormatUnused; - def.format.video.pNativeWindow = NULL; - - addPort(def); - - def.nPortIndex = kOutputPortIndex; - def.eDir = OMX_DirOutput; - def.nBufferCountMin = kNumOutputBuffers; - def.nBufferCountActual = def.nBufferCountMin; - def.bEnabled = OMX_TRUE; - def.bPopulated = OMX_FALSE; - def.eDomain = OMX_PortDomainVideo; - def.bBuffersContiguous = OMX_FALSE; - def.nBufferAlignment = 2; - - def.format.video.cMIMEType = const_cast(MEDIA_MIMETYPE_VIDEO_RAW); - def.format.video.pNativeRender = NULL; - def.format.video.nFrameWidth = mWidth; - def.format.video.nFrameHeight = mHeight; - def.format.video.nStride = def.format.video.nFrameWidth; - def.format.video.nSliceHeight = def.format.video.nFrameHeight; - def.format.video.nBitrate = 0; - def.format.video.xFramerate = 0; - def.format.video.bFlagErrorConcealment = OMX_FALSE; - def.format.video.eCompressionFormat = OMX_VIDEO_CodingUnused; - def.format.video.eColorFormat = OMX_COLOR_FormatYUV420Planar; - def.format.video.pNativeWindow = NULL; - - def.nBufferSize = - (def.format.video.nFrameWidth * def.format.video.nFrameHeight * 3) / 2; - - addPort(def); -} - status_t SoftAVC::initDecoder() { // Force decoder to output buffers in display order. if (H264SwDecInit(&mHandle, 0) == H264SWDEC_OK) { @@ -167,126 +98,6 @@ status_t SoftAVC::initDecoder() { return UNKNOWN_ERROR; } -OMX_ERRORTYPE SoftAVC::internalGetParameter( - OMX_INDEXTYPE index, OMX_PTR params) { - switch (index) { - case OMX_IndexParamVideoPortFormat: - { - OMX_VIDEO_PARAM_PORTFORMATTYPE *formatParams = - (OMX_VIDEO_PARAM_PORTFORMATTYPE *)params; - - if (formatParams->nPortIndex > kOutputPortIndex) { - return OMX_ErrorUndefined; - } - - if (formatParams->nIndex != 0) { - return OMX_ErrorNoMore; - } - - if (formatParams->nPortIndex == kInputPortIndex) { - formatParams->eCompressionFormat = OMX_VIDEO_CodingAVC; - formatParams->eColorFormat = OMX_COLOR_FormatUnused; - formatParams->xFramerate = 0; - } else { - CHECK(formatParams->nPortIndex == kOutputPortIndex); - - formatParams->eCompressionFormat = OMX_VIDEO_CodingUnused; - formatParams->eColorFormat = OMX_COLOR_FormatYUV420Planar; - formatParams->xFramerate = 0; - } - - return OMX_ErrorNone; - } - - case OMX_IndexParamVideoProfileLevelQuerySupported: - { - OMX_VIDEO_PARAM_PROFILELEVELTYPE *profileLevel = - (OMX_VIDEO_PARAM_PROFILELEVELTYPE *) params; - - if (profileLevel->nPortIndex != kInputPortIndex) { - ALOGE("Invalid port index: %ld", profileLevel->nPortIndex); - return OMX_ErrorUnsupportedIndex; - } - - size_t index = profileLevel->nProfileIndex; - size_t nProfileLevels = - sizeof(kProfileLevels) / sizeof(kProfileLevels[0]); - if (index >= nProfileLevels) { - return OMX_ErrorNoMore; - } - - profileLevel->eProfile = kProfileLevels[index].mProfile; - profileLevel->eLevel = kProfileLevels[index].mLevel; - return OMX_ErrorNone; - } - - default: - return SimpleSoftOMXComponent::internalGetParameter(index, params); - } -} - -OMX_ERRORTYPE SoftAVC::internalSetParameter( - OMX_INDEXTYPE index, const OMX_PTR params) { - switch (index) { - case OMX_IndexParamStandardComponentRole: - { - const OMX_PARAM_COMPONENTROLETYPE *roleParams = - (const OMX_PARAM_COMPONENTROLETYPE *)params; - - if (strncmp((const char *)roleParams->cRole, - "video_decoder.avc", - OMX_MAX_STRINGNAME_SIZE - 1)) { - return OMX_ErrorUndefined; - } - - return OMX_ErrorNone; - } - - case OMX_IndexParamVideoPortFormat: - { - OMX_VIDEO_PARAM_PORTFORMATTYPE *formatParams = - (OMX_VIDEO_PARAM_PORTFORMATTYPE *)params; - - if (formatParams->nPortIndex > kOutputPortIndex) { - return OMX_ErrorUndefined; - } - - if (formatParams->nIndex != 0) { - return OMX_ErrorNoMore; - } - - return OMX_ErrorNone; - } - - default: - return SimpleSoftOMXComponent::internalSetParameter(index, params); - } -} - -OMX_ERRORTYPE SoftAVC::getConfig( - OMX_INDEXTYPE index, OMX_PTR params) { - switch (index) { - case OMX_IndexConfigCommonOutputCrop: - { - OMX_CONFIG_RECTTYPE *rectParams = (OMX_CONFIG_RECTTYPE *)params; - - if (rectParams->nPortIndex != 1) { - return OMX_ErrorUndefined; - } - - rectParams->nLeft = mCropLeft; - rectParams->nTop = mCropTop; - rectParams->nWidth = mCropWidth; - rectParams->nHeight = mCropHeight; - - return OMX_ErrorNone; - } - - default: - return OMX_ErrorUnsupportedIndex; - } -} - void SoftAVC::onQueueFilled(OMX_U32 portIndex) { if (mSignalledError || mOutputPortSettingsChange != NONE) { return; @@ -409,8 +220,6 @@ bool SoftAVC::handlePortSettingChangeEvent(const H264SwDecInfo *info) { mWidth = info->picWidth; mHeight = info->picHeight; mPictureSize = mWidth * mHeight * 3 / 2; - mCropWidth = mWidth; - mCropHeight = mHeight; updatePortDefinitions(); notify(OMX_EventPortSettingsChanged, 1, 0, NULL); mOutputPortSettingsChange = AWAITING_DISABLED; @@ -508,49 +317,9 @@ void SoftAVC::onPortFlushCompleted(OMX_U32 portIndex) { } } -void SoftAVC::onPortEnableCompleted(OMX_U32 portIndex, bool enabled) { - switch (mOutputPortSettingsChange) { - case NONE: - break; - - case AWAITING_DISABLED: - { - CHECK(!enabled); - mOutputPortSettingsChange = AWAITING_ENABLED; - break; - } - - default: - { - CHECK_EQ((int)mOutputPortSettingsChange, (int)AWAITING_ENABLED); - CHECK(enabled); - mOutputPortSettingsChange = NONE; - break; - } - } -} - void SoftAVC::onReset() { + SoftVideoDecoderOMXComponent::onReset(); mSignalledError = false; - mOutputPortSettingsChange = NONE; -} - -void SoftAVC::updatePortDefinitions() { - OMX_PARAM_PORTDEFINITIONTYPE *def = &editPortInfo(0)->mDef; - def->format.video.nFrameWidth = mWidth; - def->format.video.nFrameHeight = mHeight; - def->format.video.nStride = def->format.video.nFrameWidth; - def->format.video.nSliceHeight = def->format.video.nFrameHeight; - - def = &editPortInfo(1)->mDef; - def->format.video.nFrameWidth = mWidth; - def->format.video.nFrameHeight = mHeight; - def->format.video.nStride = def->format.video.nFrameWidth; - def->format.video.nSliceHeight = def->format.video.nFrameHeight; - - def->nBufferSize = - (def->format.video.nFrameWidth - * def->format.video.nFrameHeight * 3) / 2; } } // namespace android diff --git a/media/libstagefright/codecs/on2/h264dec/SoftAVC.h b/media/libstagefright/codecs/on2/h264dec/SoftAVC.h index 8c104c5..0ed7ebe 100644 --- a/media/libstagefright/codecs/on2/h264dec/SoftAVC.h +++ b/media/libstagefright/codecs/on2/h264dec/SoftAVC.h @@ -18,7 +18,7 @@ #define SOFT_AVC_H_ -#include "SimpleSoftOMXComponent.h" +#include "SoftVideoDecoderOMXComponent.h" #include #include "H264SwDecApi.h" @@ -26,7 +26,7 @@ namespace android { -struct SoftAVC : public SimpleSoftOMXComponent { +struct SoftAVC : public SoftVideoDecoderOMXComponent { SoftAVC(const char *name, const OMX_CALLBACKTYPE *callbacks, OMX_PTR appData, @@ -35,23 +35,12 @@ struct SoftAVC : public SimpleSoftOMXComponent { protected: virtual ~SoftAVC(); - virtual OMX_ERRORTYPE internalGetParameter( - OMX_INDEXTYPE index, OMX_PTR params); - - virtual OMX_ERRORTYPE internalSetParameter( - OMX_INDEXTYPE index, const OMX_PTR params); - - virtual OMX_ERRORTYPE getConfig(OMX_INDEXTYPE index, OMX_PTR params); - virtual void onQueueFilled(OMX_U32 portIndex); virtual void onPortFlushCompleted(OMX_U32 portIndex); - virtual void onPortEnableCompleted(OMX_U32 portIndex, bool enabled); virtual void onReset(); private: enum { - kInputPortIndex = 0, - kOutputPortIndex = 1, kNumInputBuffers = 8, kNumOutputBuffers = 2, }; @@ -66,9 +55,7 @@ private: size_t mInputBufferCount; - uint32_t mWidth, mHeight, mPictureSize; - uint32_t mCropLeft, mCropTop; - uint32_t mCropWidth, mCropHeight; + uint32_t mPictureSize; uint8_t *mFirstPicture; int32_t mFirstPictureId; @@ -82,18 +69,9 @@ private: EOSStatus mEOSStatus; - enum OutputPortSettingChange { - NONE, - AWAITING_DISABLED, - AWAITING_ENABLED - }; - OutputPortSettingChange mOutputPortSettingsChange; - bool mSignalledError; - void initPorts(); status_t initDecoder(); - void updatePortDefinitions(); bool drainAllOutputBuffers(); void drainOneOutputBuffer(int32_t picId, uint8_t *data); void saveFirstOutputBuffer(int32_t pidId, uint8_t *data); diff --git a/media/libstagefright/include/SoftVideoDecoderOMXComponent.h b/media/libstagefright/include/SoftVideoDecoderOMXComponent.h new file mode 100644 index 0000000..d050fa6 --- /dev/null +++ b/media/libstagefright/include/SoftVideoDecoderOMXComponent.h @@ -0,0 +1,93 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef SOFT_VIDEO_DECODER_OMX_COMPONENT_H_ + +#define SOFT_VIDEO_DECODER_OMX_COMPONENT_H_ + +#include "SimpleSoftOMXComponent.h" + +#include +#include + +#include +#include +#include + +#define ARRAY_SIZE(a) (sizeof(a) / sizeof(*(a))) + +namespace android { + +struct SoftVideoDecoderOMXComponent : public SimpleSoftOMXComponent { + SoftVideoDecoderOMXComponent( + const char *name, + const char *componentRole, + OMX_VIDEO_CODINGTYPE codingType, + const CodecProfileLevel *profileLevels, + size_t numProfileLevels, + int32_t width, + int32_t height, + const OMX_CALLBACKTYPE *callbacks, + OMX_PTR appData, + OMX_COMPONENTTYPE **component); + +protected: + virtual void onPortEnableCompleted(OMX_U32 portIndex, bool enabled); + virtual void onReset(); + + virtual OMX_ERRORTYPE internalGetParameter( + OMX_INDEXTYPE index, OMX_PTR params); + + virtual OMX_ERRORTYPE internalSetParameter( + OMX_INDEXTYPE index, const OMX_PTR params); + + virtual OMX_ERRORTYPE getConfig( + OMX_INDEXTYPE index, OMX_PTR params); + + void initPorts(OMX_U32 numInputBuffers, + OMX_U32 inputBufferSize, + OMX_U32 numOutputBuffers, + const char *mimeType); + + virtual void updatePortDefinitions(); + + enum { + kInputPortIndex = 0, + kOutputPortIndex = 1, + kMaxPortIndex = 1, + }; + + uint32_t mWidth, mHeight; + uint32_t mCropLeft, mCropTop, mCropWidth, mCropHeight; + + enum { + NONE, + AWAITING_DISABLED, + AWAITING_ENABLED + } mOutputPortSettingsChange; + +private: + const char *mComponentRole; + OMX_VIDEO_CODINGTYPE mCodingType; + const CodecProfileLevel *mProfileLevels; + size_t mNumProfileLevels; + + DISALLOW_EVIL_CONSTRUCTORS(SoftVideoDecoderOMXComponent); +}; + +} // namespace android + +#endif // SOFT_VIDEO_DECODER_OMX_COMPONENT_H_ diff --git a/media/libstagefright/omx/Android.mk b/media/libstagefright/omx/Android.mk index a8b4939..cd912e7 100644 --- a/media/libstagefright/omx/Android.mk +++ b/media/libstagefright/omx/Android.mk @@ -9,6 +9,7 @@ LOCAL_SRC_FILES:= \ SimpleSoftOMXComponent.cpp \ SoftOMXComponent.cpp \ SoftOMXPlugin.cpp \ + SoftVideoDecoderOMXComponent.cpp \ LOCAL_C_INCLUDES += \ $(TOP)/frameworks/av/media/libstagefright \ diff --git a/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp b/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp new file mode 100644 index 0000000..08a3d42 --- /dev/null +++ b/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp @@ -0,0 +1,290 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "SoftVideoDecoderOMXComponent" +#include + +#include "include/SoftVideoDecoderOMXComponent.h" + +#include +#include +#include +#include + +namespace android { + +template +static void InitOMXParams(T *params) { + params->nSize = sizeof(T); + params->nVersion.s.nVersionMajor = 1; + params->nVersion.s.nVersionMinor = 0; + params->nVersion.s.nRevision = 0; + params->nVersion.s.nStep = 0; +} + +SoftVideoDecoderOMXComponent::SoftVideoDecoderOMXComponent( + const char *name, + const char *componentRole, + OMX_VIDEO_CODINGTYPE codingType, + const CodecProfileLevel *profileLevels, + size_t numProfileLevels, + int32_t width, + int32_t height, + const OMX_CALLBACKTYPE *callbacks, + OMX_PTR appData, + OMX_COMPONENTTYPE **component) + : SimpleSoftOMXComponent(name, callbacks, appData, component), + mWidth(width), + mHeight(height), + mCropLeft(0), + mCropTop(0), + mCropWidth(width), + mCropHeight(height), + mOutputPortSettingsChange(NONE), + mComponentRole(componentRole), + mCodingType(codingType), + mProfileLevels(profileLevels), + mNumProfileLevels(numProfileLevels) { +} + +void SoftVideoDecoderOMXComponent::initPorts( + OMX_U32 numInputBuffers, + OMX_U32 inputBufferSize, + OMX_U32 numOutputBuffers, + const char *mimeType) { + OMX_PARAM_PORTDEFINITIONTYPE def; + InitOMXParams(&def); + + def.nPortIndex = kInputPortIndex; + def.eDir = OMX_DirInput; + def.nBufferCountMin = numInputBuffers; + def.nBufferCountActual = def.nBufferCountMin; + def.nBufferSize = inputBufferSize; + def.bEnabled = OMX_TRUE; + def.bPopulated = OMX_FALSE; + def.eDomain = OMX_PortDomainVideo; + def.bBuffersContiguous = OMX_FALSE; + def.nBufferAlignment = 1; + + def.format.video.cMIMEType = const_cast(mimeType); + def.format.video.pNativeRender = NULL; + /* size is initialized in updatePortDefinitions() */ + def.format.video.nBitrate = 0; + def.format.video.xFramerate = 0; + def.format.video.bFlagErrorConcealment = OMX_FALSE; + def.format.video.eCompressionFormat = mCodingType; + def.format.video.eColorFormat = OMX_COLOR_FormatUnused; + def.format.video.pNativeWindow = NULL; + + addPort(def); + + def.nPortIndex = kOutputPortIndex; + def.eDir = OMX_DirOutput; + def.nBufferCountMin = numOutputBuffers; + def.nBufferCountActual = def.nBufferCountMin; + def.bEnabled = OMX_TRUE; + def.bPopulated = OMX_FALSE; + def.eDomain = OMX_PortDomainVideo; + def.bBuffersContiguous = OMX_FALSE; + def.nBufferAlignment = 2; + + def.format.video.cMIMEType = const_cast("video/raw"); + def.format.video.pNativeRender = NULL; + /* size is initialized in updatePortDefinitions() */ + def.format.video.nBitrate = 0; + def.format.video.xFramerate = 0; + def.format.video.bFlagErrorConcealment = OMX_FALSE; + def.format.video.eCompressionFormat = OMX_VIDEO_CodingUnused; + def.format.video.eColorFormat = OMX_COLOR_FormatYUV420Planar; + def.format.video.pNativeWindow = NULL; + + addPort(def); + + updatePortDefinitions(); +} + +void SoftVideoDecoderOMXComponent::updatePortDefinitions() { + OMX_PARAM_PORTDEFINITIONTYPE *def = &editPortInfo(kInputPortIndex)->mDef; + def->format.video.nFrameWidth = mWidth; + def->format.video.nFrameHeight = mHeight; + def->format.video.nStride = def->format.video.nFrameWidth; + def->format.video.nSliceHeight = def->format.video.nFrameHeight; + + def = &editPortInfo(kOutputPortIndex)->mDef; + def->format.video.nFrameWidth = mWidth; + def->format.video.nFrameHeight = mHeight; + def->format.video.nStride = def->format.video.nFrameWidth; + def->format.video.nSliceHeight = def->format.video.nFrameHeight; + + def->nBufferSize = + (def->format.video.nFrameWidth * + def->format.video.nFrameHeight * 3) / 2; + + mCropLeft = 0; + mCropTop = 0; + mCropWidth = mWidth; + mCropHeight = mHeight; +} + +OMX_ERRORTYPE SoftVideoDecoderOMXComponent::internalGetParameter( + OMX_INDEXTYPE index, OMX_PTR params) { + switch (index) { + case OMX_IndexParamVideoPortFormat: + { + OMX_VIDEO_PARAM_PORTFORMATTYPE *formatParams = + (OMX_VIDEO_PARAM_PORTFORMATTYPE *)params; + + if (formatParams->nPortIndex > kMaxPortIndex) { + return OMX_ErrorUndefined; + } + + if (formatParams->nIndex != 0) { + return OMX_ErrorNoMore; + } + + if (formatParams->nPortIndex == kInputPortIndex) { + formatParams->eCompressionFormat = mCodingType; + formatParams->eColorFormat = OMX_COLOR_FormatUnused; + formatParams->xFramerate = 0; + } else { + CHECK_EQ(formatParams->nPortIndex, 1u); + + formatParams->eCompressionFormat = OMX_VIDEO_CodingUnused; + formatParams->eColorFormat = OMX_COLOR_FormatYUV420Planar; + formatParams->xFramerate = 0; + } + + return OMX_ErrorNone; + } + + case OMX_IndexParamVideoProfileLevelQuerySupported: + { + OMX_VIDEO_PARAM_PROFILELEVELTYPE *profileLevel = + (OMX_VIDEO_PARAM_PROFILELEVELTYPE *) params; + + if (profileLevel->nPortIndex != kInputPortIndex) { + ALOGE("Invalid port index: %ld", profileLevel->nPortIndex); + return OMX_ErrorUnsupportedIndex; + } + + if (index >= mNumProfileLevels) { + return OMX_ErrorNoMore; + } + + profileLevel->eProfile = mProfileLevels[index].mProfile; + profileLevel->eLevel = mProfileLevels[index].mLevel; + return OMX_ErrorNone; + } + + default: + return SimpleSoftOMXComponent::internalGetParameter(index, params); + } +} + +OMX_ERRORTYPE SoftVideoDecoderOMXComponent::internalSetParameter( + OMX_INDEXTYPE index, const OMX_PTR params) { + switch (index) { + case OMX_IndexParamStandardComponentRole: + { + const OMX_PARAM_COMPONENTROLETYPE *roleParams = + (const OMX_PARAM_COMPONENTROLETYPE *)params; + + if (strncmp((const char *)roleParams->cRole, + mComponentRole, + OMX_MAX_STRINGNAME_SIZE - 1)) { + return OMX_ErrorUndefined; + } + + return OMX_ErrorNone; + } + + case OMX_IndexParamVideoPortFormat: + { + OMX_VIDEO_PARAM_PORTFORMATTYPE *formatParams = + (OMX_VIDEO_PARAM_PORTFORMATTYPE *)params; + + if (formatParams->nPortIndex > kMaxPortIndex) { + return OMX_ErrorUndefined; + } + + if (formatParams->nIndex != 0) { + return OMX_ErrorNoMore; + } + + return OMX_ErrorNone; + } + + default: + return SimpleSoftOMXComponent::internalSetParameter(index, params); + } +} + +OMX_ERRORTYPE SoftVideoDecoderOMXComponent::getConfig( + OMX_INDEXTYPE index, OMX_PTR params) { + switch (index) { + case OMX_IndexConfigCommonOutputCrop: + { + OMX_CONFIG_RECTTYPE *rectParams = (OMX_CONFIG_RECTTYPE *)params; + + if (rectParams->nPortIndex != kOutputPortIndex) { + return OMX_ErrorUndefined; + } + + rectParams->nLeft = mCropLeft; + rectParams->nTop = mCropTop; + rectParams->nWidth = mCropWidth; + rectParams->nHeight = mCropHeight; + + return OMX_ErrorNone; + } + + default: + return OMX_ErrorUnsupportedIndex; + } +} + +void SoftVideoDecoderOMXComponent::onReset() { + mOutputPortSettingsChange = NONE; +} + +void SoftVideoDecoderOMXComponent::onPortEnableCompleted(OMX_U32 portIndex, bool enabled) { + if (portIndex != kOutputPortIndex) { + return; + } + + switch (mOutputPortSettingsChange) { + case NONE: + break; + + case AWAITING_DISABLED: + { + CHECK(!enabled); + mOutputPortSettingsChange = AWAITING_ENABLED; + break; + } + + default: + { + CHECK_EQ((int)mOutputPortSettingsChange, (int)AWAITING_ENABLED); + CHECK(enabled); + mOutputPortSettingsChange = NONE; + break; + } + } +} + +} // namespace android -- cgit v1.1 From d1fffa24d9b5d0d6f5ff9eda372befe114ceefb6 Mon Sep 17 00:00:00 2001 From: Mike Lockwood Date: Thu, 6 Jun 2013 15:00:14 -0700 Subject: Remove "LOCAL_MODULE_TAGS := debug" for stagefright tests Change-Id: I53815d2f6d7dfe7eebb26c3802eb3d195244aab1 --- cmds/stagefright/Android.mk | 4 ---- media/libstagefright/wifi-display/Android.mk | 8 -------- 2 files changed, 12 deletions(-) diff --git a/cmds/stagefright/Android.mk b/cmds/stagefright/Android.mk index c45d70b..1060131 100644 --- a/cmds/stagefright/Android.mk +++ b/cmds/stagefright/Android.mk @@ -19,10 +19,6 @@ LOCAL_C_INCLUDES:= \ LOCAL_CFLAGS += -Wno-multichar -ifneq (true,$(ANDROID_BUILD_EMBEDDED)) -LOCAL_MODULE_TAGS := debug -endif - LOCAL_MODULE:= stagefright include $(BUILD_EXECUTABLE) diff --git a/media/libstagefright/wifi-display/Android.mk b/media/libstagefright/wifi-display/Android.mk index f99ef60..404b41e 100644 --- a/media/libstagefright/wifi-display/Android.mk +++ b/media/libstagefright/wifi-display/Android.mk @@ -64,8 +64,6 @@ LOCAL_SHARED_LIBRARIES:= \ LOCAL_MODULE:= wfd -LOCAL_MODULE_TAGS := debug - include $(BUILD_EXECUTABLE) ################################################################################ @@ -87,8 +85,6 @@ LOCAL_SHARED_LIBRARIES:= \ LOCAL_MODULE:= udptest -LOCAL_MODULE_TAGS := debug - include $(BUILD_EXECUTABLE) ################################################################################ @@ -110,8 +106,6 @@ LOCAL_SHARED_LIBRARIES:= \ LOCAL_MODULE:= rtptest -LOCAL_MODULE_TAGS := debug - include $(BUILD_EXECUTABLE) ################################################################################ @@ -133,6 +127,4 @@ LOCAL_SHARED_LIBRARIES:= \ LOCAL_MODULE:= nettest -LOCAL_MODULE_TAGS := debug - include $(BUILD_EXECUTABLE) -- cgit v1.1 From 17a61adbd7cc1abe432deeffc11660daa74f6496 Mon Sep 17 00:00:00 2001 From: Eino-Ville Talvala Date: Mon, 3 Jun 2013 16:53:32 -0700 Subject: Camera3: Add more tracing Change-Id: I5f5d0d3f37244f19446c473db70373bdf16cce56 --- services/camera/libcameraservice/Camera3Device.cpp | 24 +++++++++++++++++++--- .../libcameraservice/camera3/Camera3Stream.cpp | 2 ++ 2 files changed, 23 insertions(+), 3 deletions(-) diff --git a/services/camera/libcameraservice/Camera3Device.cpp b/services/camera/libcameraservice/Camera3Device.cpp index 3f2287f..da77a2d 100644 --- a/services/camera/libcameraservice/Camera3Device.cpp +++ b/services/camera/libcameraservice/Camera3Device.cpp @@ -128,7 +128,10 @@ status_t Camera3Device::initialize(camera_module_t *module) /** Initialize device with callback functions */ + ATRACE_BEGIN("camera3->initialize"); res = device->ops->initialize(device, this); + ATRACE_END(); + if (res != OK) { SET_ERR_L("Unable to initialize HAL device: %s (%d)", strerror(-res), res); @@ -140,7 +143,9 @@ status_t Camera3Device::initialize(camera_module_t *module) mVendorTagOps.get_camera_vendor_section_name = NULL; + ATRACE_BEGIN("camera3->get_metadata_vendor_tag_ops"); device->ops->get_metadata_vendor_tag_ops(device, &mVendorTagOps); + ATRACE_END(); if (mVendorTagOps.get_camera_vendor_section_name != NULL) { res = set_camera_metadata_vendor_tag_ops(&mVendorTagOps); @@ -743,8 +748,10 @@ status_t Camera3Device::createDefaultRequest(int templateId, } const camera_metadata_t *rawRequest; + ATRACE_BEGIN("camera3->construct_default_request_settings"); rawRequest = mHal3Device->ops->construct_default_request_settings( mHal3Device, templateId); + ATRACE_END(); if (rawRequest == NULL) { SET_ERR_L("HAL is unable to construct default settings for template %d", templateId); @@ -1049,8 +1056,9 @@ status_t Camera3Device::configureStreamsLocked() { // Do the HAL configuration; will potentially touch stream // max_buffers, usage, priv fields. - + ATRACE_BEGIN("camera3->configure_streams"); res = mHal3Device->ops->configure_streams(mHal3Device, &config); + ATRACE_END(); if (res != OK) { SET_ERR_L("Unable to configure streams with HAL: %s (%d)", @@ -1204,6 +1212,7 @@ void Camera3Device::processCaptureResult(const camera3_capture_result *result) { } if (request.haveResultMetadata && request.numBuffersLeft == 0) { + ATRACE_ASYNC_END("frame capture", frameNumber); mInFlightMap.removeItemsAt(idx, 1); } @@ -1353,6 +1362,7 @@ void Camera3Device::processCaptureResult(const camera3_capture_result *result) { } void Camera3Device::notify(const camera3_notify_msg *msg) { + ATRACE_CALL(); NotificationListener *listener; { Mutex::Autolock l(mOutputLock); @@ -1373,6 +1383,9 @@ void Camera3Device::notify(const camera3_notify_msg *msg) { msg->message.error.error_stream); streamId = stream->getId(); } + ALOGV("Camera %d: %s: HAL error, frame %d, stream %d: %d", + mId, __FUNCTION__, msg->message.error.frame_number, + streamId, msg->message.error.error_code); if (listener != NULL) { listener->notifyError(msg->message.error.error_code, msg->message.error.frame_number, streamId); @@ -1408,7 +1421,8 @@ void Camera3Device::notify(const camera3_notify_msg *msg) { frameNumber); break; } - + ALOGVV("Camera %d: %s: Shutter fired for frame %d at %lld", + mId, __FUNCTION__, frameNumber, timestamp); // Call listener, if any if (listener != NULL) { listener->notifyShutter(frameNumber, timestamp); @@ -1529,6 +1543,7 @@ void Camera3Device::RequestThread::setPaused(bool paused) { } status_t Camera3Device::RequestThread::waitUntilPaused(nsecs_t timeout) { + ATRACE_CALL(); status_t res; Mutex::Autolock l(mPauseLock); while (!mPaused) { @@ -1675,8 +1690,11 @@ bool Camera3Device::RequestThread::threadLoop() { } // Submit request and block until ready for next one - + ATRACE_ASYNC_BEGIN("frame capture", request.frame_number); + ATRACE_BEGIN("camera3->process_capture_request"); res = mHal3Device->ops->process_capture_request(mHal3Device, &request); + ATRACE_END(); + if (res != OK) { SET_ERR("RequestThread: Unable to submit capture request %d to HAL" " device: %s (%d)", request.frame_number, strerror(-res), res); diff --git a/services/camera/libcameraservice/camera3/Camera3Stream.cpp b/services/camera/libcameraservice/camera3/Camera3Stream.cpp index f05658a..ab563df 100644 --- a/services/camera/libcameraservice/camera3/Camera3Stream.cpp +++ b/services/camera/libcameraservice/camera3/Camera3Stream.cpp @@ -312,8 +312,10 @@ status_t Camera3Stream::registerBuffersLocked(camera3_device *hal3Device) { // Got all buffers, register with HAL ALOGV("%s: Registering %d buffers with camera HAL", __FUNCTION__, bufferCount); + ATRACE_BEGIN("camera3->register_stream_buffers"); res = hal3Device->ops->register_stream_buffers(hal3Device, &bufferSet); + ATRACE_END(); } // Return all valid buffers to stream, in ERROR state to indicate -- cgit v1.1 From bef3f23f16f2fc575b3f425febcfcc436a7db70f Mon Sep 17 00:00:00 2001 From: Igor Murashkin Date: Thu, 30 May 2013 17:47:38 -0700 Subject: camera: Make some binder interfaces compatible with AIDL Done: * ICameraService * ICameraServiceListener Partial: * ICamera (disconnect only) * IProCameraUser (disconnect only) Bug: 9213377 Change-Id: I8e2e6e05cfd02ec36be1d5b2c551f10ffb43b5b4 --- camera/ICamera.cpp | 2 + camera/ICameraService.cpp | 87 ++++++++++++++++++++++++++++++--- camera/ICameraServiceListener.cpp | 3 ++ camera/IProCameraUser.cpp | 2 + include/camera/ICamera.h | 3 ++ include/camera/ICameraClient.h | 3 ++ include/camera/ICameraService.h | 3 ++ include/camera/ICameraServiceListener.h | 3 ++ include/camera/IProCameraCallbacks.h | 3 ++ include/camera/IProCameraUser.h | 3 ++ 10 files changed, 106 insertions(+), 6 deletions(-) diff --git a/camera/ICamera.cpp b/camera/ICamera.cpp index 732c204..12356f0 100644 --- a/camera/ICamera.cpp +++ b/camera/ICamera.cpp @@ -66,6 +66,7 @@ public: Parcel data, reply; data.writeInterfaceToken(ICamera::getInterfaceDescriptor()); remote()->transact(DISCONNECT, data, &reply); + reply.readExceptionCode(); } // pass the buffered IGraphicBufferProducer to the camera service @@ -281,6 +282,7 @@ status_t BnCamera::onTransact( ALOGV("DISCONNECT"); CHECK_INTERFACE(ICamera, data, reply); disconnect(); + reply->writeNoException(); return NO_ERROR; } break; case SET_PREVIEW_TEXTURE: { diff --git a/camera/ICameraService.cpp b/camera/ICameraService.cpp index 134f7f0..819e410 100644 --- a/camera/ICameraService.cpp +++ b/camera/ICameraService.cpp @@ -15,6 +15,9 @@ ** limitations under the License. */ +#define LOG_TAG "BpCameraService" +#include + #include #include @@ -31,6 +34,53 @@ namespace android { +namespace { + +enum { + EX_SECURITY = -1, + EX_BAD_PARCELABLE = -2, + EX_ILLEGAL_ARGUMENT = -3, + EX_NULL_POINTER = -4, + EX_ILLEGAL_STATE = -5, + EX_HAS_REPLY_HEADER = -128, // special; see below +}; + +static bool readExceptionCode(Parcel& reply) { + int32_t exceptionCode = reply.readExceptionCode(); + + if (exceptionCode != 0) { + const char* errorMsg; + switch(exceptionCode) { + case EX_SECURITY: + errorMsg = "Security"; + break; + case EX_BAD_PARCELABLE: + errorMsg = "BadParcelable"; + break; + case EX_NULL_POINTER: + errorMsg = "NullPointer"; + break; + case EX_ILLEGAL_STATE: + errorMsg = "IllegalState"; + break; + // Binder should be handling this code inside Parcel::readException + // but lets have a to-string here anyway just in case. + case EX_HAS_REPLY_HEADER: + errorMsg = "HasReplyHeader"; + break; + default: + errorMsg = "Unknown"; + } + + ALOGE("Binder transmission error %s (%d)", errorMsg, exceptionCode); + return true; + } + + return false; +} + +}; + class BpCameraService: public BpInterface { public: @@ -45,6 +95,8 @@ public: Parcel data, reply; data.writeInterfaceToken(ICameraService::getInterfaceDescriptor()); remote()->transact(BnCameraService::GET_NUMBER_OF_CAMERAS, data, &reply); + + if (readExceptionCode(reply)) return 0; return reply.readInt32(); } @@ -55,9 +107,14 @@ public: data.writeInterfaceToken(ICameraService::getInterfaceDescriptor()); data.writeInt32(cameraId); remote()->transact(BnCameraService::GET_CAMERA_INFO, data, &reply); - cameraInfo->facing = reply.readInt32(); - cameraInfo->orientation = reply.readInt32(); - return reply.readInt32(); + + if (readExceptionCode(reply)) return -EPROTO; + status_t result = reply.readInt32(); + if (reply.readInt32() != 0) { + cameraInfo->facing = reply.readInt32(); + cameraInfo->orientation = reply.readInt32(); + } + return result; } // connect to camera service @@ -71,6 +128,8 @@ public: data.writeString16(clientPackageName); data.writeInt32(clientUid); remote()->transact(BnCameraService::CONNECT, data, &reply); + + if (readExceptionCode(reply)) return NULL; return interface_cast(reply.readStrongBinder()); } @@ -85,6 +144,8 @@ public: data.writeString16(clientPackageName); data.writeInt32(clientUid); remote()->transact(BnCameraService::CONNECT_PRO, data, &reply); + + if (readExceptionCode(reply)) return NULL; return interface_cast(reply.readStrongBinder()); } @@ -94,6 +155,8 @@ public: data.writeInterfaceToken(ICameraService::getInterfaceDescriptor()); data.writeStrongBinder(listener->asBinder()); remote()->transact(BnCameraService::ADD_LISTENER, data, &reply); + + if (readExceptionCode(reply)) return -EPROTO; return reply.readInt32(); } @@ -103,6 +166,8 @@ public: data.writeInterfaceToken(ICameraService::getInterfaceDescriptor()); data.writeStrongBinder(listener->asBinder()); remote()->transact(BnCameraService::REMOVE_LISTENER, data, &reply); + + if (readExceptionCode(reply)) return -EPROTO; return reply.readInt32(); } }; @@ -117,17 +182,22 @@ status_t BnCameraService::onTransact( switch(code) { case GET_NUMBER_OF_CAMERAS: { CHECK_INTERFACE(ICameraService, data, reply); + reply->writeNoException(); reply->writeInt32(getNumberOfCameras()); return NO_ERROR; } break; case GET_CAMERA_INFO: { CHECK_INTERFACE(ICameraService, data, reply); - CameraInfo cameraInfo; + CameraInfo cameraInfo = CameraInfo(); memset(&cameraInfo, 0, sizeof(cameraInfo)); status_t result = getCameraInfo(data.readInt32(), &cameraInfo); + reply->writeNoException(); + reply->writeInt32(result); + + // Fake a parcelable object here + reply->writeInt32(1); // means the parcelable is included reply->writeInt32(cameraInfo.facing); reply->writeInt32(cameraInfo.orientation); - reply->writeInt32(result); return NO_ERROR; } break; case CONNECT: { @@ -139,17 +209,20 @@ status_t BnCameraService::onTransact( int32_t clientUid = data.readInt32(); sp camera = connect(cameraClient, cameraId, clientName, clientUid); + reply->writeNoException(); reply->writeStrongBinder(camera->asBinder()); return NO_ERROR; } break; case CONNECT_PRO: { CHECK_INTERFACE(ICameraService, data, reply); - sp cameraClient = interface_cast(data.readStrongBinder()); + sp cameraClient = + interface_cast(data.readStrongBinder()); int32_t cameraId = data.readInt32(); const String16 clientName = data.readString16(); int32_t clientUid = data.readInt32(); sp camera = connect(cameraClient, cameraId, clientName, clientUid); + reply->writeNoException(); reply->writeStrongBinder(camera->asBinder()); return NO_ERROR; } break; @@ -157,6 +230,7 @@ status_t BnCameraService::onTransact( CHECK_INTERFACE(ICameraService, data, reply); sp listener = interface_cast(data.readStrongBinder()); + reply->writeNoException(); reply->writeInt32(addListener(listener)); return NO_ERROR; } break; @@ -164,6 +238,7 @@ status_t BnCameraService::onTransact( CHECK_INTERFACE(ICameraService, data, reply); sp listener = interface_cast(data.readStrongBinder()); + reply->writeNoException(); reply->writeInt32(removeListener(listener)); return NO_ERROR; } break; diff --git a/camera/ICameraServiceListener.cpp b/camera/ICameraServiceListener.cpp index 640ee35..b2f1729 100644 --- a/camera/ICameraServiceListener.cpp +++ b/camera/ICameraServiceListener.cpp @@ -54,6 +54,8 @@ public: data, &reply, IBinder::FLAG_ONEWAY); + + reply.readExceptionCode(); } }; @@ -73,6 +75,7 @@ status_t BnCameraServiceListener::onTransact( int32_t cameraId = data.readInt32(); onStatusChanged(status, cameraId); + reply->writeNoException(); return NO_ERROR; } break; diff --git a/camera/IProCameraUser.cpp b/camera/IProCameraUser.cpp index 4c4dec3..015cb5c 100644 --- a/camera/IProCameraUser.cpp +++ b/camera/IProCameraUser.cpp @@ -162,6 +162,7 @@ public: Parcel data, reply; data.writeInterfaceToken(IProCameraUser::getInterfaceDescriptor()); remote()->transact(DISCONNECT, data, &reply); + reply.readExceptionCode(); } virtual status_t connect(const sp& cameraClient) @@ -307,6 +308,7 @@ status_t BnProCameraUser::onTransact( ALOGV("DISCONNECT"); CHECK_INTERFACE(IProCameraUser, data, reply); disconnect(); + reply->writeNoException(); return NO_ERROR; } break; case CONNECT: { diff --git a/include/camera/ICamera.h b/include/camera/ICamera.h index b2125bd..f3a186e 100644 --- a/include/camera/ICamera.h +++ b/include/camera/ICamera.h @@ -32,6 +32,9 @@ class Surface; class ICamera: public IInterface { + /** + * Keep up-to-date with ICamera.aidl in frameworks/base + */ public: DECLARE_META_INTERFACE(Camera); diff --git a/include/camera/ICameraClient.h b/include/camera/ICameraClient.h index b30aa7a..1584dba 100644 --- a/include/camera/ICameraClient.h +++ b/include/camera/ICameraClient.h @@ -28,6 +28,9 @@ namespace android { class ICameraClient: public IInterface { + /** + * Keep up-to-date with ICameraClient.aidl in frameworks/base + */ public: DECLARE_META_INTERFACE(CameraClient); diff --git a/include/camera/ICameraService.h b/include/camera/ICameraService.h index aaf6eb3..3c2e60a 100644 --- a/include/camera/ICameraService.h +++ b/include/camera/ICameraService.h @@ -32,6 +32,9 @@ class ICameraServiceListener; class ICameraService : public IInterface { public: + /** + * Keep up-to-date with ICameraService.aidl in frameworks/base + */ enum { GET_NUMBER_OF_CAMERAS = IBinder::FIRST_CALL_TRANSACTION, GET_CAMERA_INFO, diff --git a/include/camera/ICameraServiceListener.h b/include/camera/ICameraServiceListener.h index f2a11c2..0a0e43a 100644 --- a/include/camera/ICameraServiceListener.h +++ b/include/camera/ICameraServiceListener.h @@ -26,6 +26,9 @@ namespace android { class ICameraServiceListener : public IInterface { + /** + * Keep up-to-date with ICameraServiceListener.aidl in frameworks/base + */ public: /** diff --git a/include/camera/IProCameraCallbacks.h b/include/camera/IProCameraCallbacks.h index 563ec17..c774698 100644 --- a/include/camera/IProCameraCallbacks.h +++ b/include/camera/IProCameraCallbacks.h @@ -30,6 +30,9 @@ namespace android { class IProCameraCallbacks : public IInterface { + /** + * Keep up-to-date with IProCameraCallbacks.aidl in frameworks/base + */ public: DECLARE_META_INTERFACE(ProCameraCallbacks); diff --git a/include/camera/IProCameraUser.h b/include/camera/IProCameraUser.h index 45b818c..2ccc4d2 100644 --- a/include/camera/IProCameraUser.h +++ b/include/camera/IProCameraUser.h @@ -34,6 +34,9 @@ class Surface; class IProCameraUser: public IInterface { + /** + * Keep up-to-date with IProCameraUser.aidl in frameworks/base + */ public: DECLARE_META_INTERFACE(ProCameraUser); -- cgit v1.1 From 9fef8d453b15a91a2b748faac2bfaff713bcf1e1 Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Wed, 12 Jun 2013 10:26:19 -0700 Subject: Converter now takes the desired _output_ format instead of the input format, allowing control over the type of encoding. Change-Id: Iaaa1a825f447ea130e373bbd8e5dc96f2762db18 --- .../wifi-display/source/Converter.cpp | 47 +++++++++------------- .../libstagefright/wifi-display/source/Converter.h | 11 ++--- .../wifi-display/source/PlaybackSession.cpp | 9 ++++- 3 files changed, 30 insertions(+), 37 deletions(-) diff --git a/media/libstagefright/wifi-display/source/Converter.cpp b/media/libstagefright/wifi-display/source/Converter.cpp index e62505d..0214520 100644 --- a/media/libstagefright/wifi-display/source/Converter.cpp +++ b/media/libstagefright/wifi-display/source/Converter.cpp @@ -40,14 +40,13 @@ namespace android { Converter::Converter( const sp ¬ify, const sp &codecLooper, - const sp &format, - bool usePCMAudio) + const sp &outputFormat) : mInitCheck(NO_INIT), mNotify(notify), mCodecLooper(codecLooper), - mInputFormat(format), + mOutputFormat(outputFormat), mIsVideo(false), - mIsPCMAudio(usePCMAudio), + mIsPCMAudio(false), mNeedToManuallyPrependSPSPPS(false), mDoMoreWorkPending(false) #if ENABLE_SILENCE_DETECTION @@ -58,14 +57,14 @@ Converter::Converter( ,mNumFramesToDrop(0) { AString mime; - CHECK(mInputFormat->findString("mime", &mime)); + CHECK(mOutputFormat->findString("mime", &mime)); if (!strncasecmp("video/", mime.c_str(), 6)) { mIsVideo = true; + } else if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_RAW, mime.c_str())) { + mIsPCMAudio = true; } - CHECK(!usePCMAudio || !mIsVideo); - mInitCheck = initEncoder(); if (mInitCheck != OK) { @@ -152,23 +151,10 @@ int32_t Converter::GetInt32Property( } status_t Converter::initEncoder() { - AString inputMIME; - CHECK(mInputFormat->findString("mime", &inputMIME)); - AString outputMIME; - bool isAudio = false; - if (!strcasecmp(inputMIME.c_str(), MEDIA_MIMETYPE_AUDIO_RAW)) { - if (mIsPCMAudio) { - outputMIME = MEDIA_MIMETYPE_AUDIO_RAW; - } else { - outputMIME = MEDIA_MIMETYPE_AUDIO_AAC; - } - isAudio = true; - } else if (!strcasecmp(inputMIME.c_str(), MEDIA_MIMETYPE_VIDEO_RAW)) { - outputMIME = MEDIA_MIMETYPE_VIDEO_AVC; - } else { - TRESPASS(); - } + CHECK(mOutputFormat->findString("mime", &outputMIME)); + + bool isAudio = !strncasecmp(outputMIME.c_str(), "audio/", 6); if (!mIsPCMAudio) { mEncoder = MediaCodec::CreateByType( @@ -179,14 +165,10 @@ status_t Converter::initEncoder() { } } - mOutputFormat = mInputFormat->dup(); - if (mIsPCMAudio) { return OK; } - mOutputFormat->setString("mime", outputMIME.c_str()); - int32_t audioBitrate = GetInt32Property("media.wfd.audio-bitrate", 128000); int32_t videoBitrate = GetInt32Property("media.wfd.video-bitrate", 5000000); mPrevVideoBitrate = videoBitrate; @@ -427,7 +409,7 @@ void Converter::onMessageReceived(const sp &msg) { releaseEncoder(); AString mime; - CHECK(mInputFormat->findString("mime", &mime)); + CHECK(mOutputFormat->findString("mime", &mime)); ALOGI("encoder (%s) shut down.", mime.c_str()); break; } @@ -679,6 +661,15 @@ status_t Converter::doMoreWork() { notify->setInt32("what", kWhatEOS); notify->post(); } else { +#if 0 + if (mIsVideo) { + int32_t videoBitrate = GetInt32Property( + "media.wfd.video-bitrate", 5000000); + + setVideoBitrate(videoBitrate); + } +#endif + sp buffer; sp outbuf = mEncoderOutputBuffers.itemAt(bufferIndex); diff --git a/media/libstagefright/wifi-display/source/Converter.h b/media/libstagefright/wifi-display/source/Converter.h index fceef55..76c8b19 100644 --- a/media/libstagefright/wifi-display/source/Converter.h +++ b/media/libstagefright/wifi-display/source/Converter.h @@ -33,11 +33,9 @@ struct MediaCodec; // media access unit of a different format. // Right now this'll convert raw video into H.264 and raw audio into AAC. struct Converter : public AHandler { - Converter( - const sp ¬ify, - const sp &codecLooper, - const sp &format, - bool usePCMAudio); + Converter(const sp ¬ify, + const sp &codecLooper, + const sp &outputFormat); status_t initCheck() const; @@ -84,10 +82,9 @@ private: status_t mInitCheck; sp mNotify; sp mCodecLooper; - sp mInputFormat; + sp mOutputFormat; bool mIsVideo; bool mIsPCMAudio; - sp mOutputFormat; bool mNeedToManuallyPrependSPSPPS; sp mEncoder; diff --git a/media/libstagefright/wifi-display/source/PlaybackSession.cpp b/media/libstagefright/wifi-display/source/PlaybackSession.cpp index 7f0ba96..a15fbac 100644 --- a/media/libstagefright/wifi-display/source/PlaybackSession.cpp +++ b/media/libstagefright/wifi-display/source/PlaybackSession.cpp @@ -937,6 +937,7 @@ status_t WifiDisplaySource::PlaybackSession::addSource( CHECK_EQ(err, (status_t)OK); if (isVideo) { + format->setString("mime", MEDIA_MIMETYPE_VIDEO_AVC); format->setInt32("store-metadata-in-buffers", true); format->setInt32("store-metadata-in-buffers-output", (mHDCP != NULL)); format->setInt32( @@ -944,13 +945,17 @@ status_t WifiDisplaySource::PlaybackSession::addSource( format->setInt32("profile-idc", profileIdc); format->setInt32("level-idc", levelIdc); format->setInt32("constraint-set", constraintSet); + } else { + format->setString( + "mime", + usePCMAudio + ? MEDIA_MIMETYPE_AUDIO_RAW : MEDIA_MIMETYPE_AUDIO_AAC); } notify = new AMessage(kWhatConverterNotify, id()); notify->setSize("trackIndex", trackIndex); - sp converter = - new Converter(notify, codecLooper, format, usePCMAudio); + sp converter = new Converter(notify, codecLooper, format); err = converter->initCheck(); if (err != OK) { -- cgit v1.1 From 9f80dd223d83d9bb9077fb6baee056cee4eaf7e5 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Tue, 18 Dec 2012 15:57:32 -0800 Subject: New control block for AudioTrack and AudioRecord Main differences between old and new control block: - removes the mutex, which was a potential source of priority inversion - circular indices into shared buffer, which is now always a power-of-2 size Change-Id: I4e9b7fa99858b488ac98a441fa70e31dbba1b865 --- include/media/AudioBufferProvider.h | 15 + include/media/AudioRecord.h | 251 ++++-- include/media/AudioTrack.h | 274 ++++--- include/private/media/AudioTrackShared.h | 391 +++++++--- media/libmedia/AudioRecord.cpp | 832 +++++++++++--------- media/libmedia/AudioTrack.cpp | 1219 ++++++++++++++++-------------- media/libmedia/AudioTrackShared.cpp | 716 +++++++++++++++--- media/libmedia/ToneGenerator.cpp | 4 +- services/audioflinger/AudioFlinger.h | 1 + services/audioflinger/PlaybackTracks.h | 7 +- services/audioflinger/RecordTracks.h | 1 + services/audioflinger/Threads.cpp | 59 +- services/audioflinger/TrackBase.h | 2 +- services/audioflinger/Tracks.cpp | 274 +++---- 14 files changed, 2496 insertions(+), 1550 deletions(-) diff --git a/include/media/AudioBufferProvider.h b/include/media/AudioBufferProvider.h index 43e4de7..ef392f0 100644 --- a/include/media/AudioBufferProvider.h +++ b/include/media/AudioBufferProvider.h @@ -26,6 +26,8 @@ class AudioBufferProvider { public: + // FIXME merge with AudioTrackShared::Buffer, AudioTrack::Buffer, and AudioRecord::Buffer + // and rename getNextBuffer() to obtainBuffer() struct Buffer { Buffer() : raw(NULL), frameCount(0) { } union { @@ -44,6 +46,19 @@ public: // pts is the local time when the next sample yielded by getNextBuffer // will be rendered. // Pass kInvalidPTS if the PTS is unknown or not applicable. + // On entry: + // buffer != NULL + // buffer->raw unused + // buffer->frameCount maximum number of desired frames + // On successful return: + // status NO_ERROR + // buffer->raw non-NULL pointer to buffer->frameCount contiguous available frames + // buffer->frameCount number of contiguous available frames at buffer->raw, + // 0 < buffer->frameCount <= entry value + // On error return: + // status != NO_ERROR + // buffer->raw NULL + // buffer->frameCount 0 virtual status_t getNextBuffer(Buffer* buffer, int64_t pts = kInvalidPTS) = 0; virtual void releaseBuffer(Buffer* buffer) = 0; diff --git a/include/media/AudioRecord.h b/include/media/AudioRecord.h index 38c6548..81be803 100644 --- a/include/media/AudioRecord.h +++ b/include/media/AudioRecord.h @@ -14,26 +14,24 @@ * limitations under the License. */ -#ifndef AUDIORECORD_H_ -#define AUDIORECORD_H_ +#ifndef ANDROID_AUDIORECORD_H +#define ANDROID_AUDIORECORD_H -#include #include #include #include -#include -#include -#include #include namespace android { +// ---------------------------------------------------------------------------- + class audio_track_cblk_t; class AudioRecordClientProxy; // ---------------------------------------------------------------------------- -class AudioRecord : virtual public RefBase +class AudioRecord : public RefBase { public: @@ -49,6 +47,8 @@ public: // (See setMarkerPosition()). EVENT_NEW_POS = 3, // Record head is at a new position // (See setPositionUpdatePeriod()). + EVENT_NEW_IAUDIORECORD = 4, // IAudioRecord was re-created, either due to re-routing and + // voluntary invalidation by mediaserver, or mediaserver crash. }; /* Client should declare Buffer on the stack and pass address to obtainBuffer() @@ -58,11 +58,16 @@ public: class Buffer { public: + // FIXME use m prefix size_t frameCount; // number of sample frames corresponding to size; // on input it is the number of frames available, // on output is the number of frames actually drained - size_t size; // total size in bytes == frameCount * frameSize + size_t size; // input/output in bytes == frameCount * frameSize + // FIXME this is redundant with respect to frameCount, + // and TRANSFER_OBTAIN mode is broken for 8-bit data + // since we don't define the frame format + union { void* raw; short* i16; // signed 16-bit @@ -84,6 +89,7 @@ public: * - EVENT_OVERRUN: unused. * - EVENT_MARKER: pointer to const uint32_t containing the marker position in frames. * - EVENT_NEW_POS: pointer to const uint32_t containing the new position in frames. + * - EVENT_NEW_IAUDIORECORD: unused. */ typedef void (*callback_t)(int event, void* user, void *info); @@ -101,20 +107,28 @@ public: audio_format_t format, audio_channel_mask_t channelMask); + /* How data is transferred from AudioRecord + */ + enum transfer_type { + TRANSFER_DEFAULT, // not specified explicitly; determine from other parameters + TRANSFER_CALLBACK, // callback EVENT_MORE_DATA + TRANSFER_OBTAIN, // FIXME deprecated: call obtainBuffer() and releaseBuffer() + TRANSFER_SYNC, // synchronous read() + }; + /* Constructs an uninitialized AudioRecord. No connection with - * AudioFlinger takes place. + * AudioFlinger takes place. Use set() after this. */ AudioRecord(); /* Creates an AudioRecord object and registers it with AudioFlinger. * Once created, the track needs to be started before it can be used. - * Unspecified values are set to the audio hardware's current - * values. + * Unspecified values are set to appropriate default values. * * Parameters: * - * inputSource: Select the audio input to record to (e.g. AUDIO_SOURCE_DEFAULT). - * sampleRate: Track sampling rate in Hz. + * inputSource: Select the audio input to record from (e.g. AUDIO_SOURCE_DEFAULT). + * sampleRate: Data sink sampling rate in Hz. * format: Audio format (e.g AUDIO_FORMAT_PCM_16_BIT for signed * 16 bits per sample). * channelMask: Channel mask. @@ -124,11 +138,13 @@ public: * be larger if the requested size is not compatible with current audio HAL * latency. Zero means to use a default value. * cbf: Callback function. If not null, this function is called periodically - * to consume new PCM data. + * to consume new PCM data and inform of marker, position updates, etc. * user: Context for use by the callback receiver. * notificationFrames: The callback function is called each time notificationFrames PCM * frames are ready in record track output buffer. * sessionId: Not yet supported. + * transferType: How data is transferred from AudioRecord. + * threadCanCallJava: Not present in parameter list, and so is fixed at false. */ AudioRecord(audio_source_t inputSource, @@ -139,22 +155,26 @@ public: callback_t cbf = NULL, void* user = NULL, int notificationFrames = 0, - int sessionId = 0); - + int sessionId = 0, + transfer_type transferType = TRANSFER_DEFAULT); /* Terminates the AudioRecord and unregisters it from AudioFlinger. * Also destroys all resources associated with the AudioRecord. */ ~AudioRecord(); - - /* Initialize an uninitialized AudioRecord. + /* Initialize an AudioRecord that was created using the AudioRecord() constructor. + * Don't call set() more than once, or after an AudioRecord() constructor that takes parameters. * Returned status (from utils/Errors.h) can be: * - NO_ERROR: successful intialization - * - INVALID_OPERATION: AudioRecord is already intitialized or record device is already in use + * - INVALID_OPERATION: AudioRecord is already initialized or record device is already in use * - BAD_VALUE: invalid parameter (channels, format, sampleRate...) * - NO_INIT: audio server or audio hardware not initialized * - PERMISSION_DENIED: recording is not allowed for the requesting process + * + * Parameters not listed in the AudioRecord constructors above: + * + * threadCanCallJava: Whether callbacks are made from an attached thread and thus can call JNI. */ status_t set(audio_source_t inputSource = AUDIO_SOURCE_DEFAULT, uint32_t sampleRate = 0, @@ -165,30 +185,29 @@ public: void* user = NULL, int notificationFrames = 0, bool threadCanCallJava = false, - int sessionId = 0); - + int sessionId = 0, + transfer_type transferType = TRANSFER_DEFAULT); /* Result of constructing the AudioRecord. This must be checked * before using any AudioRecord API (except for set()), because using * an uninitialized AudioRecord produces undefined results. * See set() method above for possible return codes. */ - status_t initCheck() const; + status_t initCheck() const { return mStatus; } /* Returns this track's estimated latency in milliseconds. * This includes the latency due to AudioRecord buffer size, * and audio hardware driver. */ - uint32_t latency() const; + uint32_t latency() const { return mLatency; } /* getters, see constructor and set() */ - audio_format_t format() const; - uint32_t channelCount() const; - size_t frameCount() const; - size_t frameSize() const { return mFrameSize; } - audio_source_t inputSource() const; - + audio_format_t format() const { return mFormat; } + uint32_t channelCount() const { return mChannelCount; } + size_t frameCount() const { return mFrameCount; } + size_t frameSize() const { return mFrameSize; } + audio_source_t inputSource() const { return mInputSource; } /* After it's created the track is not active. Call start() to * make it active. If set, the callback will start being called. @@ -198,26 +217,29 @@ public: status_t start(AudioSystem::sync_event_t event = AudioSystem::SYNC_EVENT_NONE, int triggerSession = 0); - /* Stop a track. If set, the callback will cease being called and - * obtainBuffer returns STOPPED. Note that obtainBuffer() still works - * and will drain buffers until the pool is exhausted. + /* Stop a track. If set, the callback will cease being called. Note that obtainBuffer() still + * works and will drain buffers until the pool is exhausted, and then will return WOULD_BLOCK. */ void stop(); bool stopped() const; - /* Get sample rate for this record track in Hz. + /* Return the sink sample rate for this record track in Hz. + * Unlike AudioTrack, the sample rate is const after initialization, so doesn't need a lock. */ - uint32_t getSampleRate() const; + uint32_t getSampleRate() const { return mSampleRate; } /* Sets marker position. When record reaches the number of frames specified, * a callback with event type EVENT_MARKER is called. Calling setMarkerPosition * with marker == 0 cancels marker notification callback. + * To set a marker at a position which would compute as 0, + * a workaround is to the set the marker at a nearby position such as ~0 or 1. * If the AudioRecord has been opened with no callback function associated, * the operation will fail. * * Parameters: * - * marker: marker position expressed in frames. + * marker: marker position expressed in wrapping (overflow) frame units, + * like the return value of getPosition(). * * Returned status (from utils/Errors.h) can be: * - NO_ERROR: successful operation @@ -226,13 +248,13 @@ public: status_t setMarkerPosition(uint32_t marker); status_t getMarkerPosition(uint32_t *marker) const; - /* Sets position update period. Every time the number of frames specified has been recorded, * a callback with event type EVENT_NEW_POS is called. * Calling setPositionUpdatePeriod with updatePeriod == 0 cancels new position notification * callback. * If the AudioRecord has been opened with no callback function associated, * the operation will fail. + * Extremely small values may be rounded up to a value the implementation can support. * * Parameters: * @@ -245,13 +267,13 @@ public: status_t setPositionUpdatePeriod(uint32_t updatePeriod); status_t getPositionUpdatePeriod(uint32_t *updatePeriod) const; - - /* Gets record head position. The position is the total number of frames - * recorded since record start. + /* Return the total number of frames recorded since recording started. + * The counter will wrap (overflow) periodically, e.g. every ~27 hours at 44.1 kHz. + * It is reset to zero by stop(). * * Parameters: * - * position: Address where to return record head position within AudioRecord buffer. + * position: Address where to return record head position. * * Returned status (from utils/Errors.h) can be: * - NO_ERROR: successful operation @@ -276,38 +298,70 @@ public: * * Returned value: * AudioRecord session ID. + * + * No lock needed because session ID doesn't change after first set(). */ - int getSessionId() const; - - /* Obtains a buffer of "frameCount" frames. The buffer must be - * drained entirely, and then released with releaseBuffer(). - * If the track is stopped, obtainBuffer() returns - * STOPPED instead of NO_ERROR as long as there are buffers available, - * at which point NO_MORE_BUFFERS is returned. + int getSessionId() const { return mSessionId; } + + /* Obtains a buffer of up to "audioBuffer->frameCount" full frames. + * After draining these frames of data, the caller should release them with releaseBuffer(). + * If the track buffer is not empty, obtainBuffer() returns as many contiguous + * full frames as are available immediately. + * If the track buffer is empty and track is stopped, obtainBuffer() returns WOULD_BLOCK + * regardless of the value of waitCount. + * If the track buffer is empty and track is not stopped, obtainBuffer() blocks with a + * maximum timeout based on waitCount; see chart below. * Buffers will be returned until the pool * is exhausted, at which point obtainBuffer() will either block - * or return WOULD_BLOCK depending on the value of the "blocking" + * or return WOULD_BLOCK depending on the value of the "waitCount" * parameter. * + * obtainBuffer() and releaseBuffer() are deprecated for direct use by applications, + * which should use read() or callback EVENT_MORE_DATA instead. + * * Interpretation of waitCount: * +n limits wait time to n * WAIT_PERIOD_MS, * -1 causes an (almost) infinite wait time, * 0 non-blocking. + * + * Buffer fields + * On entry: + * frameCount number of frames requested + * After error return: + * frameCount 0 + * size 0 + * raw undefined + * After successful return: + * frameCount actual number of frames available, <= number requested + * size actual number of bytes available + * raw pointer to the buffer */ - enum { - NO_MORE_BUFFERS = 0x80000001, // same name in AudioFlinger.h, ok to be different value - STOPPED = 1 - }; + /* FIXME Deprecated public API for TRANSFER_OBTAIN mode */ + status_t obtainBuffer(Buffer* audioBuffer, int32_t waitCount) + __attribute__((__deprecated__)); - status_t obtainBuffer(Buffer* audioBuffer, int32_t waitCount); +private: + /* New internal API. + * If nonContig is non-NULL, it is an output parameter that will be set to the number of + * additional non-contiguous frames that are available immediately. + * FIXME We could pass an array of Buffers instead of only one Buffer to obtainBuffer(), + * in case the requested amount of frames is in two or more non-contiguous regions. + * FIXME requested and elapsed are both relative times. Consider changing to absolute time. + */ + status_t obtainBuffer(Buffer* audioBuffer, const struct timespec *requested, + struct timespec *elapsed = NULL, size_t *nonContig = NULL); +public: - /* Release an emptied buffer of "frameCount" frames for AudioFlinger to re-fill. */ + /* Release an emptied buffer of "audioBuffer->frameCount" frames for AudioFlinger to re-fill. */ + // FIXME make private when obtainBuffer() for TRANSFER_OBTAIN is removed void releaseBuffer(Buffer* audioBuffer); - /* As a convenience we provide a read() interface to the audio buffer. - * This is implemented on top of obtainBuffer/releaseBuffer. + * Input parameter 'size' is in byte units. + * This is implemented on top of obtainBuffer/releaseBuffer. For best + * performance use callbacks. Returns actual number of bytes read >= 0, + * or a negative status code. */ ssize_t read(void* buffer, size_t size); @@ -336,68 +390,113 @@ private: void pause(); // suspend thread from execution at next loop boundary void resume(); // allow thread to execute, if not requested to exit + void pauseConditional(); + // like pause(), but only if prior resume() wasn't latched private: friend class AudioRecord; virtual bool threadLoop(); - AudioRecord& mReceiver; + AudioRecord& mReceiver; virtual ~AudioRecordThread(); Mutex mMyLock; // Thread::mLock is private Condition mMyCond; // Thread::mThreadExitedCondition is private bool mPaused; // whether thread is currently paused + bool mResumeLatch; // whether next pauseConditional() will be a nop }; // body of AudioRecordThread::threadLoop() - bool processAudioBuffer(const sp& thread); - + // returns the maximum amount of time before we would like to run again, where: + // 0 immediately + // > 0 no later than this many nanoseconds from now + // NS_WHENEVER still active but no particular deadline + // NS_INACTIVE inactive so don't run again until re-started + // NS_NEVER never again + static const nsecs_t NS_WHENEVER = -1, NS_INACTIVE = -2, NS_NEVER = -3; + nsecs_t processAudioBuffer(const sp& thread); + + // caller must hold lock on mLock for all _l methods status_t openRecord_l(uint32_t sampleRate, audio_format_t format, size_t frameCount, - audio_io_handle_t input); + audio_io_handle_t input, + size_t epoch); + audio_io_handle_t getInput_l(); - status_t restoreRecord_l(audio_track_cblk_t*& cblk); + + // FIXME enum is faster than strcmp() for parameter 'from' + status_t restoreRecord_l(const char *from); sp mAudioRecordThread; mutable Mutex mLock; - bool mActive; // protected by mLock + // Current client state: false = stopped, true = active. Protected by mLock. If more states + // are added, consider changing this to enum State { ... } mState as in AudioTrack. + bool mActive; // for client callback handler callback_t mCbf; // callback handler for events, or NULL - void* mUserData; + void* mUserData; // for client callback handler // for notification APIs - uint32_t mNotificationFrames; - uint32_t mRemainingFrames; - uint32_t mMarkerPosition; // in frames + uint32_t mNotificationFrames; // frames between each notification callback + bool mRefreshRemaining; // processAudioBuffer() should refresh next 2 + + // These are private to processAudioBuffer(), and are not protected by a lock + uint32_t mRemainingFrames; // number of frames to request in obtainBuffer() + bool mRetryOnPartialBuffer; // sleep and retry after partial obtainBuffer() + int mObservedSequence; // last observed value of mSequence + + uint32_t mMarkerPosition; // in wrapping (overflow) frame units bool mMarkerReached; uint32_t mNewPosition; // in frames - uint32_t mUpdatePeriod; // in ms + uint32_t mUpdatePeriod; // in frames, zero means no EVENT_NEW_POS + + status_t mStatus; // constant after constructor or set() uint32_t mSampleRate; size_t mFrameCount; audio_format_t mFormat; - uint8_t mChannelCount; + uint32_t mChannelCount; size_t mFrameSize; // app-level frame size == AudioFlinger frame size audio_source_t mInputSource; - status_t mStatus; - uint32_t mLatency; + uint32_t mLatency; // in ms audio_channel_mask_t mChannelMask; - audio_io_handle_t mInput; // returned by AudioSystem::getInput() int mSessionId; + transfer_type mTransfer; + + audio_io_handle_t mInput; // returned by AudioSystem::getInput() // may be changed if IAudioRecord object is re-created sp mAudioRecord; sp mCblkMemory; - audio_track_cblk_t* mCblk; - void* mBuffers; // starting address of buffers in shared memory + audio_track_cblk_t* mCblk; // re-load after mLock.unlock() - int mPreviousPriority; // before start() + int mPreviousPriority; // before start() SchedPolicy mPreviousSchedulingGroup; - AudioRecordClientProxy* mProxy; + + // The proxy should only be referenced while a lock is held because the proxy isn't + // multi-thread safe. + // An exception is that a blocking ClientProxy::obtainBuffer() may be called without a lock, + // provided that the caller also holds an extra reference to the proxy and shared memory to keep + sp mProxy; + + bool mInOverrun; // whether recorder is currently in overrun state + +private: + class DeathNotifier : public IBinder::DeathRecipient { + public: + DeathNotifier(AudioRecord* audioRecord) : mAudioRecord(audioRecord) { } + protected: + virtual void binderDied(const wp& who); + private: + const wp mAudioRecord; + }; + + sp mDeathNotifier; + uint32_t mSequence; // incremented for each new IAudioRecord attempt }; }; // namespace android -#endif /*AUDIORECORD_H_*/ +#endif // ANDROID_AUDIORECORD_H diff --git a/include/media/AudioTrack.h b/include/media/AudioTrack.h index 8dbc9ee..e9bb76a 100644 --- a/include/media/AudioTrack.h +++ b/include/media/AudioTrack.h @@ -17,18 +17,9 @@ #ifndef ANDROID_AUDIOTRACK_H #define ANDROID_AUDIOTRACK_H -#include -#include - -#include -#include -#include - -#include -#include -#include -#include #include +#include +#include #include namespace android { @@ -37,10 +28,11 @@ namespace android { class audio_track_cblk_t; class AudioTrackClientProxy; +class StaticAudioTrackClientProxy; // ---------------------------------------------------------------------------- -class AudioTrack : virtual public RefBase +class AudioTrack : public RefBase { public: enum channel_index { @@ -49,7 +41,7 @@ public: RIGHT = 1 }; - /* Events used by AudioTrack callback function (audio_track_cblk_t). + /* Events used by AudioTrack callback function (callback_t). * Keep in sync with frameworks/base/media/java/android/media/AudioTrack.java NATIVE_EVENT_*. */ enum event_type { @@ -64,7 +56,10 @@ public: // (See setMarkerPosition()). EVENT_NEW_POS = 4, // Playback head is at a new position // (See setPositionUpdatePeriod()). - EVENT_BUFFER_END = 5 // Playback head is at the end of the buffer. + EVENT_BUFFER_END = 5, // Playback head is at the end of the buffer. + // Not currently used by android.media.AudioTrack. + EVENT_NEW_IAUDIOTRACK = 6, // IAudioTrack was re-created, either due to re-routing and + // voluntary invalidation by mediaserver, or mediaserver crash. }; /* Client should declare Buffer on the stack and pass address to obtainBuffer() @@ -74,19 +69,23 @@ public: class Buffer { public: + // FIXME use m prefix size_t frameCount; // number of sample frames corresponding to size; // on input it is the number of frames desired, // on output is the number of frames actually filled - size_t size; // input/output in byte units + size_t size; // input/output in bytes == frameCount * frameSize + // FIXME this is redundant with respect to frameCount, + // and TRANSFER_OBTAIN mode is broken for 8-bit data + // since we don't define the frame format + union { void* raw; - short* i16; // signed 16-bit - int8_t* i8; // unsigned 8-bit, offset by 0x80 + short* i16; // signed 16-bit + int8_t* i8; // unsigned 8-bit, offset by 0x80 }; }; - /* As a convenience, if a callback is supplied, a handler thread * is automatically created with the appropriate priority. This thread * invokes the callback when a new buffer becomes available or various conditions occur. @@ -100,9 +99,10 @@ public: * written. * - EVENT_UNDERRUN: unused. * - EVENT_LOOP_END: pointer to an int indicating the number of loops remaining. - * - EVENT_MARKER: pointer to an uint32_t containing the marker position in frames. - * - EVENT_NEW_POS: pointer to an uint32_t containing the new position in frames. + * - EVENT_MARKER: pointer to const uint32_t containing the marker position in frames. + * - EVENT_NEW_POS: pointer to const uint32_t containing the new position in frames. * - EVENT_BUFFER_END: unused. + * - EVENT_NEW_IAUDIOTRACK: unused. */ typedef void (*callback_t)(int event, void* user, void *info); @@ -114,9 +114,19 @@ public: * - NO_INIT: audio server or audio hardware not initialized */ - static status_t getMinFrameCount(size_t* frameCount, - audio_stream_type_t streamType = AUDIO_STREAM_DEFAULT, - uint32_t sampleRate = 0); + static status_t getMinFrameCount(size_t* frameCount, + audio_stream_type_t streamType, + uint32_t sampleRate); + + /* How data is transferred to AudioTrack + */ + enum transfer_type { + TRANSFER_DEFAULT, // not specified explicitly; determine from the other parameters + TRANSFER_CALLBACK, // callback EVENT_MORE_DATA + TRANSFER_OBTAIN, // FIXME deprecated: call obtainBuffer() and releaseBuffer() + TRANSFER_SYNC, // synchronous write() + TRANSFER_SHARED, // shared memory + }; /* Constructs an uninitialized AudioTrack. No connection with * AudioFlinger takes place. Use set() after this. @@ -128,13 +138,13 @@ public: * Unspecified values are set to appropriate default values. * With this constructor, the track is configured for streaming mode. * Data to be rendered is supplied by write() or by the callback EVENT_MORE_DATA. - * Intermixing a combination of write() and non-ignored EVENT_MORE_DATA is deprecated. + * Intermixing a combination of write() and non-ignored EVENT_MORE_DATA is not allowed. * * Parameters: * * streamType: Select the type of audio stream this track is attached to * (e.g. AUDIO_STREAM_MUSIC). - * sampleRate: Track sampling rate in Hz. + * sampleRate: Data source sampling rate in Hz. * format: Audio format (e.g AUDIO_FORMAT_PCM_16_BIT for signed * 16 bits per sample). * channelMask: Channel mask. @@ -149,9 +159,10 @@ public: * user: Context for use by the callback receiver. * notificationFrames: The callback function is called each time notificationFrames PCM * frames have been consumed from track input buffer. + * This is expressed in units of frames at the initial source sample rate. * sessionId: Specific session ID, or zero to use default. - * threadCanCallJava: Whether callbacks are made from an attached thread and thus can call JNI. - * If not present in parameter list, then fixed at false. + * transferType: How data is transferred to AudioTrack. + * threadCanCallJava: Not present in parameter list, and so is fixed at false. */ AudioTrack( audio_stream_type_t streamType, @@ -163,7 +174,8 @@ public: callback_t cbf = NULL, void* user = NULL, int notificationFrames = 0, - int sessionId = 0); + int sessionId = 0, + transfer_type transferType = TRANSFER_DEFAULT); /* Creates an audio track and registers it with AudioFlinger. * With this constructor, the track is configured for static buffer mode. @@ -174,7 +186,6 @@ public: * The write() method is not supported in this case. * It is recommended to pass a callback function to be notified of playback end by an * EVENT_UNDERRUN event. - * FIXME EVENT_MORE_DATA still occurs; it must be ignored. */ AudioTrack( audio_stream_type_t streamType, @@ -186,7 +197,8 @@ public: callback_t cbf = NULL, void* user = NULL, int notificationFrames = 0, - int sessionId = 0); + int sessionId = 0, + transfer_type transferType = TRANSFER_DEFAULT); /* Terminates the AudioTrack and unregisters it from AudioFlinger. * Also destroys all resources associated with the AudioTrack. @@ -195,7 +207,8 @@ protected: virtual ~AudioTrack(); public: - /* Initialize an uninitialized AudioTrack. + /* Initialize an AudioTrack that was created using the AudioTrack() constructor. + * Don't call set() more than once, or after the AudioTrack() constructors that take parameters. * Returned status (from utils/Errors.h) can be: * - NO_ERROR: successful initialization * - INVALID_OPERATION: AudioTrack is already initialized @@ -203,6 +216,10 @@ public: * - NO_INIT: audio server or audio hardware not initialized * If sharedBuffer is non-0, the frameCount parameter is ignored and * replaced by the shared buffer's total allocated size in frame units. + * + * Parameters not listed in the AudioTrack constructors above: + * + * threadCanCallJava: Whether callbacks are made from an attached thread and thus can call JNI. */ status_t set(audio_stream_type_t streamType = AUDIO_STREAM_DEFAULT, uint32_t sampleRate = 0, @@ -215,7 +232,8 @@ public: int notificationFrames = 0, const sp& sharedBuffer = 0, bool threadCanCallJava = false, - int sessionId = 0); + int sessionId = 0, + transfer_type transferType = TRANSFER_DEFAULT); /* Result of constructing the AudioTrack. This must be checked * before using any AudioTrack API (except for set()), because using @@ -235,14 +253,15 @@ public: audio_stream_type_t streamType() const { return mStreamType; } audio_format_t format() const { return mFormat; } - /* Return frame size in bytes, which for linear PCM is channelCount * (bit depth per channel / 8). + /* Return frame size in bytes, which for linear PCM is + * channelCount * (bit depth per channel / 8). * channelCount is determined from channelMask, and bit depth comes from format. * For non-linear formats, the frame size is typically 1 byte. */ - uint32_t channelCount() const { return mChannelCount; } + size_t frameSize() const { return mFrameSize; } + uint32_t channelCount() const { return mChannelCount; } uint32_t frameCount() const { return mFrameCount; } - size_t frameSize() const { return mFrameSize; } /* Return the static buffer specified in constructor or set(), or 0 for streaming mode */ sp sharedBuffer() const { return mSharedBuffer; } @@ -255,10 +274,9 @@ public: /* Stop a track. * In static buffer mode, the track is stopped immediately. - * In streaming mode, the callback will cease being called and - * obtainBuffer returns STOPPED. Note that obtainBuffer() still works - * and will fill up buffers until the pool is exhausted. - * The stop does not occur immediately: any data remaining in the buffer + * In streaming mode, the callback will cease being called. Note that obtainBuffer() still + * works and will fill up buffers until the pool is exhausted, and then will return WOULD_BLOCK. + * In streaming mode the stop does not occur immediately: any data remaining in the buffer * is first drained, mixed, and output, and only then is the track marked as stopped. */ void stop(); @@ -272,7 +290,7 @@ public: void flush(); /* Pause a track. After pause, the callback will cease being called and - * obtainBuffer returns STOPPED. Note that obtainBuffer() still works + * obtainBuffer returns WOULD_BLOCK. Note that obtainBuffer() still works * and will fill up buffers until the pool is exhausted. * Volume is ramped down over the next mix buffer following the pause request, * and then the track is marked as paused. It can be resumed with ramp up by start(). @@ -296,11 +314,11 @@ public: status_t setAuxEffectSendLevel(float level); void getAuxEffectSendLevel(float* level) const; - /* Set sample rate for this track in Hz, mostly used for games' sound effects + /* Set source sample rate for this track in Hz, mostly used for games' sound effects */ status_t setSampleRate(uint32_t sampleRate); - /* Return current sample rate in Hz, or 0 if unknown */ + /* Return current source sample rate in Hz, or 0 if unknown */ uint32_t getSampleRate() const; /* Enables looping and sets the start and end points of looping. @@ -322,7 +340,7 @@ public: * loopCount != 0 implies 0 <= loopStart < loopEnd <= frameCount(). * * If the loop period (loopEnd - loopStart) is too small for the implementation to support, - * setLoop() will return BAD_VALUE. + * setLoop() will return BAD_VALUE. loopCount must be >= -1. * */ status_t setLoop(uint32_t loopStart, uint32_t loopEnd, int loopCount); @@ -330,7 +348,7 @@ public: /* Sets marker position. When playback reaches the number of frames specified, a callback with * event type EVENT_MARKER is called. Calling setMarkerPosition with marker == 0 cancels marker * notification callback. To set a marker at a position which would compute as 0, - * a workaround is to the set the marker at a nearby position such as -1 or 1. + * a workaround is to the set the marker at a nearby position such as ~0 or 1. * If the AudioTrack has been opened with no callback function associated, the operation will * fail. * @@ -390,16 +408,22 @@ public: /* Return the total number of frames played since playback start. * The counter will wrap (overflow) periodically, e.g. every ~27 hours at 44.1 kHz. * It is reset to zero by flush(), reload(), and stop(). + * + * Parameters: + * + * position: Address where to return play head position. + * + * Returned status (from utils/Errors.h) can be: + * - NO_ERROR: successful operation + * - BAD_VALUE: position is NULL */ - status_t getPosition(uint32_t *position); + status_t getPosition(uint32_t *position) const; -#if 0 /* For static buffer mode only, this returns the current playback position in frames * relative to start of buffer. It is analogous to the new API for * setLoop() and setPosition(). After underrun, the position will be at end of buffer. */ status_t getBufferPosition(uint32_t *position); -#endif /* Forces AudioTrack buffer full condition. When playing a static buffer, this method avoids * rewriting the buffer before restarting playback after a stop. @@ -446,15 +470,19 @@ public: */ status_t attachAuxEffect(int effectId); - /* Obtains a buffer of "frameCount" frames. The buffer must be - * filled entirely, and then released with releaseBuffer(). - * If the track is stopped, obtainBuffer() returns - * STOPPED instead of NO_ERROR as long as there are buffers available, - * at which point NO_MORE_BUFFERS is returned. + /* Obtains a buffer of up to "audioBuffer->frameCount" empty slots for frames. + * After filling these slots with data, the caller should release them with releaseBuffer(). + * If the track buffer is not full, obtainBuffer() returns as many contiguous + * [empty slots for] frames as are available immediately. + * If the track buffer is full and track is stopped, obtainBuffer() returns WOULD_BLOCK + * regardless of the value of waitCount. + * If the track buffer is full and track is not stopped, obtainBuffer() blocks with a + * maximum timeout based on waitCount; see chart below. * Buffers will be returned until the pool * is exhausted, at which point obtainBuffer() will either block - * or return WOULD_BLOCK depending on the value of the "blocking" + * or return WOULD_BLOCK depending on the value of the "waitCount" * parameter. + * Each sample is 16-bit signed PCM. * * obtainBuffer() and releaseBuffer() are deprecated for direct use by applications, * which should use write() or callback EVENT_MORE_DATA instead. @@ -477,24 +505,35 @@ public: * raw pointer to the buffer */ - enum { - NO_MORE_BUFFERS = 0x80000001, // same name in AudioFlinger.h, ok to be different value - STOPPED = 1 - }; + /* FIXME Deprecated public API for TRANSFER_OBTAIN mode */ + status_t obtainBuffer(Buffer* audioBuffer, int32_t waitCount) + __attribute__((__deprecated__)); - status_t obtainBuffer(Buffer* audioBuffer, int32_t waitCount); +private: + /* New internal API + * If nonContig is non-NULL, it is an output parameter that will be set to the number of + * additional non-contiguous frames that are available immediately. + * FIXME We could pass an array of Buffers instead of only one Buffer to obtainBuffer(), + * in case the requested amount of frames is in two or more non-contiguous regions. + * FIXME requested and elapsed are both relative times. Consider changing to absolute time. + */ + status_t obtainBuffer(Buffer* audioBuffer, const struct timespec *requested, + struct timespec *elapsed = NULL, size_t *nonContig = NULL); +public: - /* Release a filled buffer of "frameCount" frames for AudioFlinger to process. */ + /* Release a filled buffer of "audioBuffer->frameCount" frames for AudioFlinger to process. */ + // FIXME make private when obtainBuffer() for TRANSFER_OBTAIN is removed void releaseBuffer(Buffer* audioBuffer); /* As a convenience we provide a write() interface to the audio buffer. + * Input parameter 'size' is in byte units. * This is implemented on top of obtainBuffer/releaseBuffer. For best * performance use callbacks. Returns actual number of bytes written >= 0, * or one of the following negative status codes: * INVALID_OPERATION AudioTrack is configured for shared buffer mode * BAD_VALUE size is invalid - * STOPPED AudioTrack was stopped during the write - * NO_MORE_BUFFERS when obtainBuffer() returns same + * WOULD_BLOCK when obtainBuffer() returns same, or + * AudioTrack was stopped during the write * or any other error code returned by IAudioTrack::start() or restoreTrack_l(). * Not supported for static buffer mode. */ @@ -503,7 +542,13 @@ public: /* * Dumps the state of an audio track. */ - status_t dump(int fd, const Vector& args) const; + status_t dump(int fd, const Vector& args) const; + + /* + * Return the total number of frames which AudioFlinger desired but were unavailable, + * and thus which resulted in an underrun. Reset to zero by stop(). + */ + uint32_t getUnderrunFrames() const; protected: /* copying audio tracks is not allowed */ @@ -522,19 +567,29 @@ protected: void pause(); // suspend thread from execution at next loop boundary void resume(); // allow thread to execute, if not requested to exit + void pauseConditional(); + // like pause(), but only if prior resume() wasn't latched private: friend class AudioTrack; virtual bool threadLoop(); - AudioTrack& mReceiver; - ~AudioTrackThread(); + AudioTrack& mReceiver; + virtual ~AudioTrackThread(); Mutex mMyLock; // Thread::mLock is private Condition mMyCond; // Thread::mThreadExitedCondition is private bool mPaused; // whether thread is currently paused + bool mResumeLatch; // whether next pauseConditional() will be a nop }; // body of AudioTrackThread::threadLoop() - bool processAudioBuffer(const sp& thread); + // returns the maximum amount of time before we would like to run again, where: + // 0 immediately + // > 0 no later than this many nanoseconds from now + // NS_WHENEVER still active but no particular deadline + // NS_INACTIVE inactive so don't run again until re-started + // NS_NEVER never again + static const nsecs_t NS_WHENEVER = -1, NS_INACTIVE = -2, NS_NEVER = -3; + nsecs_t processAudioBuffer(const sp& thread); // caller must hold lock on mLock for all _l methods status_t createTrack_l(audio_stream_type_t streamType, @@ -543,20 +598,24 @@ protected: size_t frameCount, audio_output_flags_t flags, const sp& sharedBuffer, - audio_io_handle_t output); + audio_io_handle_t output, + size_t epoch); - // can only be called when !mActive + // can only be called when mState != STATE_ACTIVE void flush_l(); - status_t setLoop_l(uint32_t loopStart, uint32_t loopEnd, int loopCount); + void setLoop_l(uint32_t loopStart, uint32_t loopEnd, int loopCount); audio_io_handle_t getOutput_l(); - status_t restoreTrack_l(audio_track_cblk_t*& cblk, bool fromStart); - bool stopped_l() const { return !mActive; } + // FIXME enum is faster than strcmp() for parameter 'from' + status_t restoreTrack_l(const char *from); + + // may be changed if IAudioTrack is re-created sp mAudioTrack; sp mCblkMemory; - sp mAudioTrackThread; + audio_track_cblk_t* mCblk; // re-load after mLock.unlock() + sp mAudioTrackThread; float mVolume[2]; float mSendLevel; uint32_t mSampleRate; @@ -564,62 +623,89 @@ protected: size_t mReqFrameCount; // frame count to request the next time a new // IAudioTrack is needed - audio_track_cblk_t* mCblk; // re-load after mLock.unlock() - - // Starting address of buffers in shared memory. If there is a shared buffer, mBuffers - // is the value of pointer() for the shared buffer, otherwise mBuffers points - // immediately after the control block. This address is for the mapping within client - // address space. AudioFlinger::TrackBase::mBuffer is for the server address space. - void* mBuffers; + // constant after constructor or set() audio_format_t mFormat; // as requested by client, not forced to 16-bit audio_stream_type_t mStreamType; uint32_t mChannelCount; audio_channel_mask_t mChannelMask; + transfer_type mTransfer; - // mFrameSize is equal to mFrameSizeAF for non-PCM or 16-bit PCM data. - // For 8-bit PCM data, mFrameSizeAF is - // twice as large because data is expanded to 16-bit before being stored in buffer. + // mFrameSize is equal to mFrameSizeAF for non-PCM or 16-bit PCM data. For 8-bit PCM data, it's + // twice as large as mFrameSize because data is expanded to 16-bit before it's stored in buffer. size_t mFrameSize; // app-level frame size size_t mFrameSizeAF; // AudioFlinger frame size status_t mStatus; - uint32_t mLatency; - bool mActive; // protected by mLock + // can change dynamically when IAudioTrack invalidated + uint32_t mLatency; // in ms + + // Indicates the current track state. Protected by mLock. + enum State { + STATE_ACTIVE, + STATE_STOPPED, + STATE_PAUSED, + STATE_FLUSHED, + } mState; callback_t mCbf; // callback handler for events, or NULL void* mUserData; // for client callback handler // for notification APIs uint32_t mNotificationFramesReq; // requested number of frames between each - // notification callback + // notification callback, + // at initial source sample rate uint32_t mNotificationFramesAct; // actual number of frames between each - // notification callback + // notification callback, + // at initial source sample rate + bool mRefreshRemaining; // processAudioBuffer() should refresh next 2 + + // These are private to processAudioBuffer(), and are not protected by a lock + uint32_t mRemainingFrames; // number of frames to request in obtainBuffer() + bool mRetryOnPartialBuffer; // sleep and retry after partial obtainBuffer() + int mObservedSequence; // last observed value of mSequence + sp mSharedBuffer; - int mLoopCount; - uint32_t mRemainingFrames; + uint32_t mLoopPeriod; // in frames, zero means looping is disabled uint32_t mMarkerPosition; // in wrapping (overflow) frame units bool mMarkerReached; uint32_t mNewPosition; // in frames - uint32_t mUpdatePeriod; // in frames + uint32_t mUpdatePeriod; // in frames, zero means no EVENT_NEW_POS - bool mFlushed; // FIXME will be made obsolete by making flush() synchronous audio_output_flags_t mFlags; int mSessionId; int mAuxEffectId; - // When locking both mLock and mCblk->lock, must lock in this order to avoid deadlock: - // 1. mLock - // 2. mCblk->lock - // It is OK to lock only mCblk->lock. mutable Mutex mLock; bool mIsTimed; int mPreviousPriority; // before start() SchedPolicy mPreviousSchedulingGroup; - AudioTrackClientProxy* mProxy; bool mAwaitBoost; // thread should wait for priority boost before running + + // The proxy should only be referenced while a lock is held because the proxy isn't + // multi-thread safe, especially the SingleStateQueue part of the proxy. + // An exception is that a blocking ClientProxy::obtainBuffer() may be called without a lock, + // provided that the caller also holds an extra reference to the proxy and shared memory to keep + // them around in case they are replaced during the obtainBuffer(). + sp mStaticProxy; // for type safety only + sp mProxy; // primary owner of the memory + + bool mInUnderrun; // whether track is currently in underrun state + +private: + class DeathNotifier : public IBinder::DeathRecipient { + public: + DeathNotifier(AudioTrack* audioTrack) : mAudioTrack(audioTrack) { } + protected: + virtual void binderDied(const wp& who); + private: + const wp mAudioTrack; + }; + + sp mDeathNotifier; + uint32_t mSequence; // incremented for each new IAudioTrack attempt }; class TimedAudioTrack : public AudioTrack diff --git a/include/private/media/AudioTrackShared.h b/include/private/media/AudioTrackShared.h index 41e20f8..681f557 100644 --- a/include/private/media/AudioTrackShared.h +++ b/include/private/media/AudioTrackShared.h @@ -22,32 +22,46 @@ #include #include +#include +#include +#include +#include namespace android { // ---------------------------------------------------------------------------- -// Maximum cumulated timeout milliseconds before restarting audioflinger thread -#define MAX_STARTUP_TIMEOUT_MS 3000 // Longer timeout period at startup to cope with A2DP - // init time -#define MAX_RUN_TIMEOUT_MS 1000 -#define WAIT_PERIOD_MS 10 - -#define CBLK_UNDERRUN 0x01 // set: underrun (out) or overrrun (in), clear: no underrun or overrun +#define CBLK_UNDERRUN 0x01 // set by server immediately on output underrun, cleared by client #define CBLK_FORCEREADY 0x02 // set: track is considered ready immediately by AudioFlinger, // clear: track is ready when buffer full #define CBLK_INVALID 0x04 // track buffer invalidated by AudioFlinger, need to re-create -#define CBLK_DISABLED 0x08 // track disabled by AudioFlinger due to underrun, need to re-start +#define CBLK_DISABLED 0x08 // output track disabled by AudioFlinger due to underrun, + // need to re-start. Unlike CBLK_UNDERRUN, this is not set + // immediately, but only after a long string of underruns. +// 0x10 unused +#define CBLK_LOOP_CYCLE 0x20 // set by server each time a loop cycle other than final one completes +#define CBLK_LOOP_FINAL 0x40 // set by server when the final loop cycle completes +#define CBLK_BUFFER_END 0x80 // set by server when the position reaches end of buffer if not looping +#define CBLK_OVERRUN 0x100 // set by server immediately on input overrun, cleared by client +#define CBLK_INTERRUPT 0x200 // set by client on interrupt(), cleared by client in obtainBuffer() struct AudioTrackSharedStreaming { // similar to NBAIO MonoPipe - volatile int32_t mFront; - volatile int32_t mRear; + // in continuously incrementing frame units, take modulo buffer size, which must be a power of 2 + volatile int32_t mFront; // read by server + volatile int32_t mRear; // write by client + volatile int32_t mFlush; // incremented by client to indicate a request to flush; + // server notices and discards all data between mFront and mRear + volatile uint32_t mUnderrunFrames; // server increments for each unavailable but desired frame }; -// future +typedef SingleStateQueue StaticAudioTrackSingleStateQueue; + struct AudioTrackSharedStatic { - int mReserved; + StaticAudioTrackSingleStateQueue::Shared + mSingleStateQueue; + size_t mBufferPosition; // updated asynchronously by server, + // "for entertainment purposes only" }; // ---------------------------------------------------------------------------- @@ -55,65 +69,61 @@ struct AudioTrackSharedStatic { // Important: do not add any virtual methods, including ~ struct audio_track_cblk_t { + // Since the control block is always located in shared memory, this constructor + // is only used for placement new(). It is never used for regular new() or stack. + audio_track_cblk_t(); + /*virtual*/ ~audio_track_cblk_t() { } + friend class Proxy; + friend class ClientProxy; friend class AudioTrackClientProxy; friend class AudioRecordClientProxy; friend class ServerProxy; + friend class AudioTrackServerProxy; + friend class AudioRecordServerProxy; // The data members are grouped so that members accessed frequently and in the same context // are in the same line of data cache. - Mutex lock; // sizeof(int) - Condition cv; // sizeof(int) - - // next 4 are offsets within "buffers" - volatile uint32_t user; - volatile uint32_t server; - uint32_t userBase; - uint32_t serverBase; - int mPad1; // unused, but preserves cache line alignment + volatile uint32_t server; // updated asynchronously by server, + // "for entertainment purposes only" size_t frameCount_; // used during creation to pass actual track buffer size // from AudioFlinger to client, and not referenced again - // FIXME remove here and replace by createTrack() in/out parameter + // FIXME remove here and replace by createTrack() in/out + // parameter // renamed to "_" to detect incorrect use - // Cache line boundary (32 bytes) + volatile int32_t mFutex; // semaphore: down (P) by client, + // up (V) by server or binderDied() or interrupt() + +private: - uint32_t loopStart; - uint32_t loopEnd; // read-only for server, read/write for client - int loopCount; // read/write for client + size_t mMinimum; // server wakes up client if available >= mMinimum // Channel volumes are fixed point U4.12, so 0x1000 means 1.0. // Left channel is in [0:15], right channel is in [16:31]. // Always read and write the combined pair atomically. // For AudioTrack only, not used by AudioRecord. -private: uint32_t mVolumeLR; uint32_t mSampleRate; // AudioTrack only: client's requested sample rate in Hz // or 0 == default. Write-only client, read-only server. + // client write-only, server read-only + uint16_t mSendLevel; // Fixed point U4.12 so 0x1000 means 1.0 + uint8_t mPad2; // unused public: // read-only for client, server writes once at initialization and is then read-only uint8_t mName; // normal tracks: track name, fast tracks: track index - // used by client only - uint16_t bufferTimeoutMs; // Maximum cumulated timeout before restarting - // audioflinger - - uint16_t waitTimeMs; // Cumulated wait time, used by client only -private: - // client write-only, server read-only - uint16_t mSendLevel; // Fixed point U4.12 so 0x1000 means 1.0 -public: volatile int32_t flags; // Cache line boundary (32 bytes) -#if 0 +public: union { AudioTrackSharedStreaming mStreaming; AudioTrackSharedStatic mStatic; @@ -121,25 +131,6 @@ public: } u; // Cache line boundary (32 bytes) -#endif - - // Since the control block is always located in shared memory, this constructor - // is only used for placement new(). It is never used for regular new() or stack. - audio_track_cblk_t(); - -private: - // if there is a shared buffer, "buffers" is the value of pointer() for the shared - // buffer, otherwise "buffers" points immediately after the control block - void* buffer(void *buffers, uint32_t frameSize, size_t offset) const; - - bool tryLock(); - - // isOut == true means AudioTrack, isOut == false means AudioRecord - bool stepServer(size_t stepCount, size_t frameCount, bool isOut); - uint32_t stepUser(size_t stepCount, size_t frameCount, bool isOut); - uint32_t framesAvailable(size_t frameCount, bool isOut); - uint32_t framesAvailable_l(size_t frameCount, bool isOut); - uint32_t framesReady(bool isOut); }; // ---------------------------------------------------------------------------- @@ -147,29 +138,31 @@ private: // Proxy for shared memory control block, to isolate callers from needing to know the details. // There is exactly one ClientProxy and one ServerProxy per shared memory control block. // The proxies are located in normal memory, and are not multi-thread safe within a given side. -class Proxy { +class Proxy : public RefBase { protected: - Proxy(audio_track_cblk_t* cblk, void *buffers, size_t frameCount, size_t frameSize) - : mCblk(cblk), mBuffers(buffers), mFrameCount(frameCount), mFrameSize(frameSize) { } + Proxy(audio_track_cblk_t* cblk, void *buffers, size_t frameCount, size_t frameSize, bool isOut, + bool clientInServer); virtual ~Proxy() { } public: - void* buffer(size_t offset) const { - return mCblk->buffer(mBuffers, mFrameSize, offset); - } + struct Buffer { + size_t mFrameCount; // number of frames available in this buffer + void* mRaw; // pointer to first frame + size_t mNonContig; // number of additional non-contiguous frames available + }; protected: // These refer to shared memory, and are virtual addresses with respect to the current process. // They may have different virtual addresses within the other process. - audio_track_cblk_t* const mCblk; // the control block - void* const mBuffers; // starting address of buffers - - const size_t mFrameCount; // not necessarily a power of 2 - const size_t mFrameSize; // in bytes -#if 0 - const size_t mFrameCountP2; // mFrameCount rounded to power of 2, streaming mode -#endif - + audio_track_cblk_t* const mCblk; // the control block + void* const mBuffers; // starting address of buffers + + const size_t mFrameCount; // not necessarily a power of 2 + const size_t mFrameSize; // in bytes + const size_t mFrameCountP2; // mFrameCount rounded to power of 2, streaming mode + const bool mIsOut; // true for AudioTrack, false for AudioRecord + const bool mClientInServer; // true for OutputTrack, false for AudioTrack & AudioRecord + bool mIsShutdown; // latch set to true when shared memory corruption detected }; // ---------------------------------------------------------------------------- @@ -177,9 +170,86 @@ protected: // Proxy seen by AudioTrack client and AudioRecord client class ClientProxy : public Proxy { protected: - ClientProxy(audio_track_cblk_t* cblk, void *buffers, size_t frameCount, size_t frameSize) - : Proxy(cblk, buffers, frameCount, frameSize) { } + ClientProxy(audio_track_cblk_t* cblk, void *buffers, size_t frameCount, size_t frameSize, + bool isOut, bool clientInServer); virtual ~ClientProxy() { } + +public: + static const struct timespec kForever; + static const struct timespec kNonBlocking; + + // Obtain a buffer with filled frames (reading) or empty frames (writing). + // It is permitted to call obtainBuffer() multiple times in succession, without any intervening + // calls to releaseBuffer(). In that case, the final obtainBuffer() is the one that effectively + // sets or extends the unreleased frame count. + // On entry: + // buffer->mFrameCount should be initialized to maximum number of desired frames, + // which must be > 0. + // buffer->mNonContig is unused. + // buffer->mRaw is unused. + // requested is the requested timeout in local monotonic delta time units: + // NULL or &kNonBlocking means non-blocking (zero timeout). + // &kForever means block forever (infinite timeout). + // Other values mean a specific timeout in local monotonic delta time units. + // elapsed is a pointer to a location that will hold the total local monotonic time that + // elapsed while blocked, or NULL if not needed. + // On exit: + // buffer->mFrameCount has the actual number of contiguous available frames, + // which is always 0 when the return status != NO_ERROR. + // buffer->mNonContig is the number of additional non-contiguous available frames. + // buffer->mRaw is a pointer to the first available frame, + // or NULL when buffer->mFrameCount == 0. + // The return status is one of: + // NO_ERROR Success, buffer->mFrameCount > 0. + // WOULD_BLOCK Non-blocking mode and no frames are available. + // TIMED_OUT Timeout occurred before any frames became available. + // This can happen even for infinite timeout, due to a spurious wakeup. + // In this case, the caller should investigate and then re-try as appropriate. + // DEAD_OBJECT Server has died or invalidated, caller should destroy this proxy and re-create. + // -EINTR Call has been interrupted. Look around to see why, and then perhaps try again. + // NO_INIT Shared memory is corrupt. + // BAD_VALUE On entry buffer == NULL or buffer->mFrameCount == 0. + status_t obtainBuffer(Buffer* buffer, const struct timespec *requested = NULL, + struct timespec *elapsed = NULL); + + // Release (some of) the frames last obtained. + // On entry, buffer->mFrameCount should have the number of frames to release, + // which must (cumulatively) be <= the number of frames last obtained but not yet released. + // buffer->mRaw is ignored, but is normally same pointer returned by last obtainBuffer(). + // It is permitted to call releaseBuffer() multiple times to release the frames in chunks. + // On exit: + // buffer->mFrameCount is zero. + // buffer->mRaw is NULL. + void releaseBuffer(Buffer* buffer); + + // Call after detecting server's death + void binderDied(); + + // Call to force an obtainBuffer() to return quickly with -EINTR + void interrupt(); + + size_t getPosition() { + return mEpoch + mCblk->server; + } + + void setEpoch(size_t epoch) { + mEpoch = epoch; + } + + void setMinimum(size_t minimum) { + mCblk->mMinimum = minimum; + } + + // Return the number of frames that would need to be obtained and released + // in order for the client to be aligned at start of buffer + virtual size_t getMisalignment(); + + size_t getEpoch() const { + return mEpoch; + } + +private: + size_t mEpoch; }; // ---------------------------------------------------------------------------- @@ -187,8 +257,10 @@ protected: // Proxy used by AudioTrack client, which also includes AudioFlinger::PlaybackThread::OutputTrack class AudioTrackClientProxy : public ClientProxy { public: - AudioTrackClientProxy(audio_track_cblk_t* cblk, void *buffers, size_t frameCount, size_t frameSize) - : ClientProxy(cblk, buffers, frameCount, frameSize) { } + AudioTrackClientProxy(audio_track_cblk_t* cblk, void *buffers, size_t frameCount, + size_t frameSize, bool clientInServer = false) + : ClientProxy(cblk, buffers, frameCount, frameSize, true /*isOut*/, + clientInServer) { } virtual ~AudioTrackClientProxy() { } // No barriers on the following operations, so the ordering of loads/stores @@ -208,27 +280,36 @@ public: mCblk->mSampleRate = sampleRate; } - // called by: - // PlaybackThread::OutputTrack::write - // AudioTrack::createTrack_l - // AudioTrack::releaseBuffer - // AudioTrack::reload - // AudioTrack::restoreTrack_l (2 places) - size_t stepUser(size_t stepCount) { - return mCblk->stepUser(stepCount, mFrameCount, true /*isOut*/); + virtual void flush(); + + virtual uint32_t getUnderrunFrames() const { + return mCblk->u.mStreaming.mUnderrunFrames; } +}; + +class StaticAudioTrackClientProxy : public AudioTrackClientProxy { +public: + StaticAudioTrackClientProxy(audio_track_cblk_t* cblk, void *buffers, size_t frameCount, + size_t frameSize); + virtual ~StaticAudioTrackClientProxy() { } + + virtual void flush(); + +#define MIN_LOOP 16 // minimum length of each loop iteration in frames + void setLoop(size_t loopStart, size_t loopEnd, int loopCount); + size_t getBufferPosition(); - // called by AudioTrack::obtainBuffer and AudioTrack::processBuffer - size_t framesAvailable() { - return mCblk->framesAvailable(mFrameCount, true /*isOut*/); + virtual size_t getMisalignment() { + return 0; } - // called by AudioTrack::obtainBuffer and PlaybackThread::OutputTrack::obtainBuffer - // FIXME remove this API since it assumes a lock that should be invisible to caller - size_t framesAvailable_l() { - return mCblk->framesAvailable_l(mFrameCount, true /*isOut*/); + virtual uint32_t getUnderrunFrames() const { + return 0; } +private: + StaticAudioTrackSingleStateQueue::Mutator mMutator; + size_t mBufferPosition; // so that getBufferPosition() appears to be synchronous }; // ---------------------------------------------------------------------------- @@ -236,60 +317,122 @@ public: // Proxy used by AudioRecord client class AudioRecordClientProxy : public ClientProxy { public: - AudioRecordClientProxy(audio_track_cblk_t* cblk, void *buffers, size_t frameCount, size_t frameSize) - : ClientProxy(cblk, buffers, frameCount, frameSize) { } + AudioRecordClientProxy(audio_track_cblk_t* cblk, void *buffers, size_t frameCount, + size_t frameSize) + : ClientProxy(cblk, buffers, frameCount, frameSize, + false /*isOut*/, false /*clientInServer*/) { } ~AudioRecordClientProxy() { } - - // called by AudioRecord::releaseBuffer - size_t stepUser(size_t stepCount) { - return mCblk->stepUser(stepCount, mFrameCount, false /*isOut*/); - } - - // called by AudioRecord::processBuffer - size_t framesAvailable() { - return mCblk->framesAvailable(mFrameCount, false /*isOut*/); - } - - // called by AudioRecord::obtainBuffer - size_t framesReady() { - return mCblk->framesReady(false /*isOut*/); - } - }; // ---------------------------------------------------------------------------- // Proxy used by AudioFlinger server class ServerProxy : public Proxy { +protected: + ServerProxy(audio_track_cblk_t* cblk, void *buffers, size_t frameCount, size_t frameSize, + bool isOut, bool clientInServer); public: - ServerProxy(audio_track_cblk_t* cblk, void *buffers, size_t frameCount, size_t frameSize, bool isOut) - : Proxy(cblk, buffers, frameCount, frameSize), mIsOut(isOut) { } virtual ~ServerProxy() { } - // for AudioTrack and AudioRecord - bool step(size_t stepCount) { return mCblk->stepServer(stepCount, mFrameCount, mIsOut); } + // Obtain a buffer with filled frames (writing) or empty frames (reading). + // It is permitted to call obtainBuffer() multiple times in succession, without any intervening + // calls to releaseBuffer(). In that case, the final obtainBuffer() is the one that effectively + // sets or extends the unreleased frame count. + // Always non-blocking. + // On entry: + // buffer->mFrameCount should be initialized to maximum number of desired frames, + // which must be > 0. + // buffer->mNonContig is unused. + // buffer->mRaw is unused. + // On exit: + // buffer->mFrameCount has the actual number of contiguous available frames, + // which is always 0 when the return status != NO_ERROR. + // buffer->mNonContig is the number of additional non-contiguous available frames. + // buffer->mRaw is a pointer to the first available frame, + // or NULL when buffer->mFrameCount == 0. + // The return status is one of: + // NO_ERROR Success, buffer->mFrameCount > 0. + // WOULD_BLOCK No frames are available. + // NO_INIT Shared memory is corrupt. + virtual status_t obtainBuffer(Buffer* buffer); + + // Release (some of) the frames last obtained. + // On entry, buffer->mFrameCount should have the number of frames to release, + // which must (cumulatively) be <= the number of frames last obtained but not yet released. + // It is permitted to call releaseBuffer() multiple times to release the frames in chunks. + // buffer->mRaw is ignored, but is normally same pointer returned by last obtainBuffer(). + // On exit: + // buffer->mFrameCount is zero. + // buffer->mRaw is NULL. + virtual void releaseBuffer(Buffer* buffer); +protected: + size_t mUnreleased; // unreleased frames remaining from most recent obtainBuffer() + size_t mAvailToClient; // estimated frames available to client prior to releaseBuffer() +private: + int32_t mFlush; // our copy of cblk->u.mStreaming.mFlush, for streaming output only + bool mDeferWake; // whether another releaseBuffer() is expected soon +}; + +// Proxy used by AudioFlinger for servicing AudioTrack +class AudioTrackServerProxy : public ServerProxy { +public: + AudioTrackServerProxy(audio_track_cblk_t* cblk, void *buffers, size_t frameCount, + size_t frameSize, bool clientInServer = false) + : ServerProxy(cblk, buffers, frameCount, frameSize, true /*isOut*/, clientInServer) { } +protected: + virtual ~AudioTrackServerProxy() { } + +public: // return value of these methods must be validated by the caller uint32_t getSampleRate() const { return mCblk->mSampleRate; } uint16_t getSendLevel_U4_12() const { return mCblk->mSendLevel; } uint32_t getVolumeLR() const { return mCblk->mVolumeLR; } - // for AudioTrack only - size_t framesReady() { - ALOG_ASSERT(mIsOut); - return mCblk->framesReady(true); - } + // estimated total number of filled frames available to server to read, + // which may include non-contiguous frames + virtual size_t framesReady(); + + // Currently AudioFlinger will call framesReady() for a fast track from two threads: + // FastMixer thread, and normal mixer thread. This is dangerous, as the proxy is intended + // to be called from at most one thread of server, and one thread of client. + // As a temporary workaround, this method informs the proxy implementation that it + // should avoid doing a state queue poll from within framesReady(). + // FIXME Change AudioFlinger to not call framesReady() from normal mixer thread. + virtual void framesReadyIsCalledByMultipleThreads() { } +}; - // for AudioRecord only, called by RecordThread::RecordTrack::getNextBuffer - // FIXME remove this API since it assumes a lock that should be invisible to caller - size_t framesAvailableIn_l() { - ALOG_ASSERT(!mIsOut); - return mCblk->framesAvailable_l(mFrameCount, false); - } +class StaticAudioTrackServerProxy : public AudioTrackServerProxy { +public: + StaticAudioTrackServerProxy(audio_track_cblk_t* cblk, void *buffers, size_t frameCount, + size_t frameSize); +protected: + virtual ~StaticAudioTrackServerProxy() { } + +public: + virtual size_t framesReady(); + virtual void framesReadyIsCalledByMultipleThreads(); + virtual status_t obtainBuffer(Buffer* buffer); + virtual void releaseBuffer(Buffer* buffer); private: - const bool mIsOut; // true for AudioTrack, false for AudioRecord + ssize_t pollPosition(); // poll for state queue update, and return current position + StaticAudioTrackSingleStateQueue::Observer mObserver; + size_t mPosition; // server's current play position in frames, relative to 0 + size_t mEnd; // cached value computed from mState, safe for asynchronous read + bool mFramesReadyIsCalledByMultipleThreads; + StaticAudioTrackState mState; +}; +// Proxy used by AudioFlinger for servicing AudioRecord +class AudioRecordServerProxy : public ServerProxy { +public: + AudioRecordServerProxy(audio_track_cblk_t* cblk, void *buffers, size_t frameCount, + size_t frameSize) + : ServerProxy(cblk, buffers, frameCount, frameSize, false /*isOut*/, + false /*clientInServer*/) { } +protected: + virtual ~AudioRecordServerProxy() { } }; // ---------------------------------------------------------------------------- diff --git a/media/libmedia/AudioRecord.cpp b/media/libmedia/AudioRecord.cpp index a2b8ae2..9faa497 100644 --- a/media/libmedia/AudioRecord.cpp +++ b/media/libmedia/AudioRecord.cpp @@ -19,18 +19,13 @@ #define LOG_TAG "AudioRecord" #include -#include - #include -#include -#include #include -#include -#include #include - #include +#define WAIT_PERIOD_MS 10 + namespace android { // --------------------------------------------------------------------------- @@ -41,7 +36,9 @@ status_t AudioRecord::getMinFrameCount( audio_format_t format, audio_channel_mask_t channelMask) { - if (frameCount == NULL) return BAD_VALUE; + if (frameCount == NULL) { + return BAD_VALUE; + } // default to 0 in case of error *frameCount = 0; @@ -75,8 +72,7 @@ status_t AudioRecord::getMinFrameCount( AudioRecord::AudioRecord() : mStatus(NO_INIT), mSessionId(0), - mPreviousPriority(ANDROID_PRIORITY_NORMAL), mPreviousSchedulingGroup(SP_DEFAULT), - mProxy(NULL) + mPreviousPriority(ANDROID_PRIORITY_NORMAL), mPreviousSchedulingGroup(SP_DEFAULT) { } @@ -89,14 +85,15 @@ AudioRecord::AudioRecord( callback_t cbf, void* user, int notificationFrames, - int sessionId) + int sessionId, + transfer_type transferType) : mStatus(NO_INIT), mSessionId(0), mPreviousPriority(ANDROID_PRIORITY_NORMAL), mPreviousSchedulingGroup(SP_DEFAULT), mProxy(NULL) { - mStatus = set(inputSource, sampleRate, format, channelMask, - frameCount, cbf, user, notificationFrames, false /*threadCanCallJava*/, sessionId); + mStatus = set(inputSource, sampleRate, format, channelMask, frameCount, cbf, user, + notificationFrames, false /*threadCanCallJava*/, sessionId, transferType); } AudioRecord::~AudioRecord() @@ -111,11 +108,13 @@ AudioRecord::~AudioRecord() mAudioRecordThread->requestExitAndWait(); mAudioRecordThread.clear(); } - mAudioRecord.clear(); + if (mAudioRecord != 0) { + mAudioRecord->asBinder()->unlinkToDeath(mDeathNotifier, this); + mAudioRecord.clear(); + } IPCThreadState::self()->flushCommands(); AudioSystem::releaseAudioSessionId(mSessionId); } - delete mProxy; } status_t AudioRecord::set( @@ -128,8 +127,32 @@ status_t AudioRecord::set( void* user, int notificationFrames, bool threadCanCallJava, - int sessionId) + int sessionId, + transfer_type transferType) { + switch (transferType) { + case TRANSFER_DEFAULT: + if (cbf == NULL || threadCanCallJava) { + transferType = TRANSFER_SYNC; + } else { + transferType = TRANSFER_CALLBACK; + } + break; + case TRANSFER_CALLBACK: + if (cbf == NULL) { + ALOGE("Transfer type TRANSFER_CALLBACK but cbf == NULL"); + return BAD_VALUE; + } + break; + case TRANSFER_OBTAIN: + case TRANSFER_SYNC: + break; + default: + ALOGE("Invalid transfer type %d", transferType); + return BAD_VALUE; + } + mTransfer = transferType; + // FIXME "int" here is legacy and will be replaced by size_t later if (frameCountInt < 0) { ALOGE("Invalid frame count %d", frameCountInt); @@ -143,6 +166,7 @@ status_t AudioRecord::set( AutoMutex lock(mLock); if (mAudioRecord != 0) { + ALOGE("Track already in use"); return INVALID_OPERATION; } @@ -159,14 +183,16 @@ status_t AudioRecord::set( if (format == AUDIO_FORMAT_DEFAULT) { format = AUDIO_FORMAT_PCM_16_BIT; } + // validate parameters if (!audio_is_valid_format(format)) { - ALOGE("Invalid format"); + ALOGE("Invalid format %d", format); return BAD_VALUE; } mFormat = format; if (!audio_is_input_channel(channelMask)) { + ALOGE("Invalid channel mask %#x", channelMask); return BAD_VALUE; } mChannelMask = channelMask; @@ -200,6 +226,7 @@ status_t AudioRecord::set( size_t minFrameCount = 0; status_t status = getMinFrameCount(&minFrameCount, sampleRate, format, channelMask); if (status != NO_ERROR) { + ALOGE("getMinFrameCount() failed; status %d", status); return status; } ALOGV("AudioRecord::set() minFrameCount = %d", minFrameCount); @@ -207,6 +234,7 @@ status_t AudioRecord::set( if (frameCount == 0) { frameCount = minFrameCount; } else if (frameCount < minFrameCount) { + ALOGE("frameCount %u < minFrameCount %u", frameCount, minFrameCount); return BAD_VALUE; } @@ -215,7 +243,7 @@ status_t AudioRecord::set( } // create the IAudioRecord - status = openRecord_l(sampleRate, format, frameCount, input); + status = openRecord_l(sampleRate, format, frameCount, input, 0 /*epoch*/); if (status != NO_ERROR) { return status; } @@ -233,7 +261,7 @@ status_t AudioRecord::set( mActive = false; mCbf = cbf; mNotificationFrames = notificationFrames; - mRemainingFrames = notificationFrames; + mRefreshRemaining = true; mUserData = user; // TODO: add audio hardware input latency here mLatency = (1000*mFrameCount) / sampleRate; @@ -244,117 +272,78 @@ status_t AudioRecord::set( mInputSource = inputSource; mInput = input; AudioSystem::acquireAudioSessionId(mSessionId); + mSequence = 1; + mObservedSequence = mSequence; + mInOverrun = false; return NO_ERROR; } -status_t AudioRecord::initCheck() const -{ - return mStatus; -} - -// ------------------------------------------------------------------------- - -uint32_t AudioRecord::latency() const -{ - return mLatency; -} - -audio_format_t AudioRecord::format() const -{ - return mFormat; -} - -uint32_t AudioRecord::channelCount() const -{ - return mChannelCount; -} - -size_t AudioRecord::frameCount() const -{ - return mFrameCount; -} - -audio_source_t AudioRecord::inputSource() const -{ - return mInputSource; -} - // ------------------------------------------------------------------------- status_t AudioRecord::start(AudioSystem::sync_event_t event, int triggerSession) { - status_t ret = NO_ERROR; - sp t = mAudioRecordThread; - ALOGV("start, sync event %d trigger session %d", event, triggerSession); AutoMutex lock(mLock); - // acquire a strong reference on the IAudioRecord and IMemory so that they cannot be destroyed - // while we are accessing the cblk - sp audioRecord = mAudioRecord; - sp iMem = mCblkMemory; - audio_track_cblk_t* cblk = mCblk; + if (mActive) { + return NO_ERROR; + } - if (!mActive) { - mActive = true; + // reset current position as seen by client to 0 + mProxy->setEpoch(mProxy->getEpoch() - mProxy->getPosition()); - cblk->lock.lock(); - if (!(cblk->flags & CBLK_INVALID)) { - cblk->lock.unlock(); - ALOGV("mAudioRecord->start()"); - ret = mAudioRecord->start(event, triggerSession); - cblk->lock.lock(); - if (ret == DEAD_OBJECT) { - android_atomic_or(CBLK_INVALID, &cblk->flags); - } - } - if (cblk->flags & CBLK_INVALID) { - audio_track_cblk_t* temp = cblk; - ret = restoreRecord_l(temp); - cblk = temp; + mNewPosition = mProxy->getPosition() + mUpdatePeriod; + int32_t flags = android_atomic_acquire_load(&mCblk->flags); + + status_t status = NO_ERROR; + if (!(flags & CBLK_INVALID)) { + ALOGV("mAudioRecord->start()"); + status = mAudioRecord->start(event, triggerSession); + if (status == DEAD_OBJECT) { + flags |= CBLK_INVALID; } - cblk->lock.unlock(); - if (ret == NO_ERROR) { - mNewPosition = cblk->user + mUpdatePeriod; - cblk->bufferTimeoutMs = (event == AudioSystem::SYNC_EVENT_NONE) ? MAX_RUN_TIMEOUT_MS : - AudioSystem::kSyncRecordStartTimeOutMs; - cblk->waitTimeMs = 0; - if (t != 0) { - t->resume(); - } else { - mPreviousPriority = getpriority(PRIO_PROCESS, 0); - get_sched_policy(0, &mPreviousSchedulingGroup); - androidSetThreadPriority(0, ANDROID_PRIORITY_AUDIO); - } + } + if (flags & CBLK_INVALID) { + status = restoreRecord_l("start"); + } + + if (status != NO_ERROR) { + ALOGE("start() status %d", status); + } else { + mActive = true; + sp t = mAudioRecordThread; + if (t != 0) { + t->resume(); } else { - mActive = false; + mPreviousPriority = getpriority(PRIO_PROCESS, 0); + get_sched_policy(0, &mPreviousSchedulingGroup); + androidSetThreadPriority(0, ANDROID_PRIORITY_AUDIO); } } - return ret; + return status; } void AudioRecord::stop() { - sp t = mAudioRecordThread; - - ALOGV("stop"); - AutoMutex lock(mLock); - if (mActive) { - mActive = false; - mCblk->cv.signal(); - mAudioRecord->stop(); - // the record head position will reset to 0, so if a marker is set, we need - // to activate it again - mMarkerReached = false; - if (t != 0) { - t->pause(); - } else { - setpriority(PRIO_PROCESS, 0, mPreviousPriority); - set_sched_policy(0, mPreviousSchedulingGroup); - } + if (!mActive) { + return; + } + + mActive = false; + mProxy->interrupt(); + mAudioRecord->stop(); + // the record head position will reset to 0, so if a marker is set, we need + // to activate it again + mMarkerReached = false; + sp t = mAudioRecordThread; + if (t != 0) { + t->pause(); + } else { + setpriority(PRIO_PROCESS, 0, mPreviousPriority); + set_sched_policy(0, mPreviousSchedulingGroup); } } @@ -364,14 +353,11 @@ bool AudioRecord::stopped() const return !mActive; } -uint32_t AudioRecord::getSampleRate() const -{ - return mSampleRate; -} - status_t AudioRecord::setMarkerPosition(uint32_t marker) { - if (mCbf == NULL) return INVALID_OPERATION; + if (mCbf == NULL) { + return INVALID_OPERATION; + } AutoMutex lock(mLock); mMarkerPosition = marker; @@ -382,7 +368,9 @@ status_t AudioRecord::setMarkerPosition(uint32_t marker) status_t AudioRecord::getMarkerPosition(uint32_t *marker) const { - if (marker == NULL) return BAD_VALUE; + if (marker == NULL) { + return BAD_VALUE; + } AutoMutex lock(mLock); *marker = mMarkerPosition; @@ -392,13 +380,12 @@ status_t AudioRecord::getMarkerPosition(uint32_t *marker) const status_t AudioRecord::setPositionUpdatePeriod(uint32_t updatePeriod) { - if (mCbf == NULL) return INVALID_OPERATION; - - uint32_t curPosition; - getPosition(&curPosition); + if (mCbf == NULL) { + return INVALID_OPERATION; + } AutoMutex lock(mLock); - mNewPosition = curPosition + updatePeriod; + mNewPosition = mProxy->getPosition() + updatePeriod; mUpdatePeriod = updatePeriod; return NO_ERROR; @@ -406,7 +393,9 @@ status_t AudioRecord::setPositionUpdatePeriod(uint32_t updatePeriod) status_t AudioRecord::getPositionUpdatePeriod(uint32_t *updatePeriod) const { - if (updatePeriod == NULL) return BAD_VALUE; + if (updatePeriod == NULL) { + return BAD_VALUE; + } AutoMutex lock(mLock); *updatePeriod = mUpdatePeriod; @@ -416,10 +405,12 @@ status_t AudioRecord::getPositionUpdatePeriod(uint32_t *updatePeriod) const status_t AudioRecord::getPosition(uint32_t *position) const { - if (position == NULL) return BAD_VALUE; + if (position == NULL) { + return BAD_VALUE; + } AutoMutex lock(mLock); - *position = mCblk->user; + *position = mProxy->getPosition(); return NO_ERROR; } @@ -427,7 +418,7 @@ status_t AudioRecord::getPosition(uint32_t *position) const unsigned int AudioRecord::getInputFramesLost() const { // no need to check mActive, because if inactive this will return 0, which is what we want - return AudioSystem::getInputFramesLost(mInput); + return AudioSystem::getInputFramesLost(getInput()); } // ------------------------------------------------------------------------- @@ -437,7 +428,8 @@ status_t AudioRecord::openRecord_l( uint32_t sampleRate, audio_format_t format, size_t frameCount, - audio_io_handle_t input) + audio_io_handle_t input, + size_t epoch) { status_t status; const sp& audioFlinger = AudioSystem::get_audio_flinger(); @@ -447,7 +439,7 @@ status_t AudioRecord::openRecord_l( } pid_t tid = -1; - // FIXME see similar logic at AudioTrack + // FIXME see similar logic at AudioTrack for tid int originalSessionId = mSessionId; sp record = audioFlinger->openRecord(input, @@ -470,133 +462,138 @@ status_t AudioRecord::openRecord_l( ALOGE("Could not get control block"); return NO_INIT; } - mAudioRecord.clear(); + if (mAudioRecord != 0) { + mAudioRecord->asBinder()->unlinkToDeath(mDeathNotifier, this); + mDeathNotifier.clear(); + } mAudioRecord = record; - mCblkMemory.clear(); mCblkMemory = iMem; audio_track_cblk_t* cblk = static_cast(iMem->pointer()); mCblk = cblk; - mBuffers = (char*)cblk + sizeof(audio_track_cblk_t); - cblk->bufferTimeoutMs = MAX_RUN_TIMEOUT_MS; - cblk->waitTimeMs = 0; + + // starting address of buffers in shared memory + void *buffers = (char*)cblk + sizeof(audio_track_cblk_t); // update proxy - delete mProxy; - mProxy = new AudioRecordClientProxy(cblk, mBuffers, frameCount, mFrameSize); + mProxy = new AudioRecordClientProxy(cblk, buffers, frameCount, mFrameSize); + mProxy->setEpoch(epoch); + mProxy->setMinimum(mNotificationFrames); + + mDeathNotifier = new DeathNotifier(this); + mAudioRecord->asBinder()->linkToDeath(mDeathNotifier, this); return NO_ERROR; } status_t AudioRecord::obtainBuffer(Buffer* audioBuffer, int32_t waitCount) { - ALOG_ASSERT(mStatus == NO_ERROR && mProxy != NULL); + if (audioBuffer == NULL) { + return BAD_VALUE; + } + if (mTransfer != TRANSFER_OBTAIN) { + audioBuffer->frameCount = 0; + audioBuffer->size = 0; + audioBuffer->raw = NULL; + return INVALID_OPERATION; + } - AutoMutex lock(mLock); - bool active; - status_t result = NO_ERROR; - audio_track_cblk_t* cblk = mCblk; - uint32_t framesReq = audioBuffer->frameCount; - uint32_t waitTimeMs = (waitCount < 0) ? cblk->bufferTimeoutMs : WAIT_PERIOD_MS; - - audioBuffer->frameCount = 0; - audioBuffer->size = 0; - - size_t framesReady = mProxy->framesReady(); - - if (framesReady == 0) { - cblk->lock.lock(); - goto start_loop_here; - while (framesReady == 0) { - active = mActive; - if (CC_UNLIKELY(!active)) { - cblk->lock.unlock(); - return NO_MORE_BUFFERS; - } - if (CC_UNLIKELY(!waitCount)) { - cblk->lock.unlock(); - return WOULD_BLOCK; - } - if (!(cblk->flags & CBLK_INVALID)) { - mLock.unlock(); - // this condition is in shared memory, so if IAudioRecord and control block - // are replaced due to mediaserver death or IAudioRecord invalidation then - // cv won't be signalled, but fortunately the timeout will limit the wait - result = cblk->cv.waitRelative(cblk->lock, milliseconds(waitTimeMs)); - cblk->lock.unlock(); - mLock.lock(); - if (!mActive) { - return status_t(STOPPED); - } - // IAudioRecord may have been re-created while mLock was unlocked - cblk = mCblk; - cblk->lock.lock(); - } - if (cblk->flags & CBLK_INVALID) { - goto create_new_record; - } - if (CC_UNLIKELY(result != NO_ERROR)) { - cblk->waitTimeMs += waitTimeMs; - if (cblk->waitTimeMs >= cblk->bufferTimeoutMs) { - ALOGW( "obtainBuffer timed out (is the CPU pegged?) " - "user=%08x, server=%08x", cblk->user, cblk->server); - cblk->lock.unlock(); - // callback thread or sync event hasn't changed - result = mAudioRecord->start(AudioSystem::SYNC_EVENT_SAME, 0); - cblk->lock.lock(); - if (result == DEAD_OBJECT) { - android_atomic_or(CBLK_INVALID, &cblk->flags); -create_new_record: - audio_track_cblk_t* temp = cblk; - result = AudioRecord::restoreRecord_l(temp); - cblk = temp; - } - if (result != NO_ERROR) { - ALOGW("obtainBuffer create Track error %d", result); - cblk->lock.unlock(); - return result; + const struct timespec *requested; + if (waitCount == -1) { + requested = &ClientProxy::kForever; + } else if (waitCount == 0) { + requested = &ClientProxy::kNonBlocking; + } else if (waitCount > 0) { + long long ms = WAIT_PERIOD_MS * (long long) waitCount; + struct timespec timeout; + timeout.tv_sec = ms / 1000; + timeout.tv_nsec = (int) (ms % 1000) * 1000000; + requested = &timeout; + } else { + ALOGE("%s invalid waitCount %d", __func__, waitCount); + requested = NULL; + } + return obtainBuffer(audioBuffer, requested); +} + +status_t AudioRecord::obtainBuffer(Buffer* audioBuffer, const struct timespec *requested, + struct timespec *elapsed, size_t *nonContig) +{ + // previous and new IAudioRecord sequence numbers are used to detect track re-creation + uint32_t oldSequence = 0; + uint32_t newSequence; + + Proxy::Buffer buffer; + status_t status = NO_ERROR; + + static const int32_t kMaxTries = 5; + int32_t tryCounter = kMaxTries; + + do { + // obtainBuffer() is called with mutex unlocked, so keep extra references to these fields to + // keep them from going away if another thread re-creates the track during obtainBuffer() + sp proxy; + sp iMem; + { + // start of lock scope + AutoMutex lock(mLock); + + newSequence = mSequence; + // did previous obtainBuffer() fail due to media server death or voluntary invalidation? + if (status == DEAD_OBJECT) { + // re-create track, unless someone else has already done so + if (newSequence == oldSequence) { + status = restoreRecord_l("obtainBuffer"); + if (status != NO_ERROR) { + break; } - cblk->waitTimeMs = 0; - } - if (--waitCount == 0) { - cblk->lock.unlock(); - return TIMED_OUT; } } - // read the server count again -start_loop_here: - framesReady = mProxy->framesReady(); - } - cblk->lock.unlock(); - } + oldSequence = newSequence; - cblk->waitTimeMs = 0; - // reset time out to running value after obtaining a buffer - cblk->bufferTimeoutMs = MAX_RUN_TIMEOUT_MS; + // Keep the extra references + proxy = mProxy; + iMem = mCblkMemory; - if (framesReq > framesReady) { - framesReq = framesReady; - } + // Non-blocking if track is stopped + if (!mActive) { + requested = &ClientProxy::kNonBlocking; + } - uint32_t u = cblk->user; - uint32_t bufferEnd = cblk->userBase + mFrameCount; + } // end of lock scope - if (framesReq > bufferEnd - u) { - framesReq = bufferEnd - u; - } + buffer.mFrameCount = audioBuffer->frameCount; + // FIXME starts the requested timeout and elapsed over from scratch + status = proxy->obtainBuffer(&buffer, requested, elapsed); + + } while ((status == DEAD_OBJECT) && (tryCounter-- > 0)); - audioBuffer->frameCount = framesReq; - audioBuffer->size = framesReq * mFrameSize; - audioBuffer->raw = mProxy->buffer(u); - active = mActive; - return active ? status_t(NO_ERROR) : status_t(STOPPED); + audioBuffer->frameCount = buffer.mFrameCount; + audioBuffer->size = buffer.mFrameCount * mFrameSize; + audioBuffer->raw = buffer.mRaw; + if (nonContig != NULL) { + *nonContig = buffer.mNonContig; + } + return status; } void AudioRecord::releaseBuffer(Buffer* audioBuffer) { - ALOG_ASSERT(mStatus == NO_ERROR && mProxy != NULL); + // all TRANSFER_* are valid + + size_t stepCount = audioBuffer->size / mFrameSize; + if (stepCount == 0) { + return; + } + + Proxy::Buffer buffer; + buffer.mFrameCount = stepCount; + buffer.mRaw = audioBuffer->raw; AutoMutex lock(mLock); - (void) mProxy->stepUser(audioBuffer->frameCount); + mInOverrun = false; + mProxy->releaseBuffer(&buffer); + + // the server does not automatically disable recorder on overrun, so no need to restart } audio_io_handle_t AudioRecord::getInput() const @@ -616,215 +613,304 @@ audio_io_handle_t AudioRecord::getInput_l() return mInput; } -int AudioRecord::getSessionId() const -{ - // no lock needed because session ID doesn't change after first set() - return mSessionId; -} - // ------------------------------------------------------------------------- ssize_t AudioRecord::read(void* buffer, size_t userSize) { - ssize_t read = 0; - Buffer audioBuffer; - int8_t *dst = static_cast(buffer); + if (mTransfer != TRANSFER_SYNC) { + return INVALID_OPERATION; + } - if (ssize_t(userSize) < 0) { - // sanity-check. user is most-likely passing an error code. - ALOGE("AudioRecord::read(buffer=%p, size=%u (%d)", - buffer, userSize, userSize); + if (ssize_t(userSize) < 0 || (buffer == NULL && userSize != 0)) { + // sanity-check. user is most-likely passing an error code, and it would + // make the return value ambiguous (actualSize vs error). + ALOGE("AudioRecord::read(buffer=%p, size=%u (%d)", buffer, userSize, userSize); return BAD_VALUE; } - mLock.lock(); - // acquire a strong reference on the IAudioRecord and IMemory so that they cannot be destroyed - // while we are accessing the cblk - sp audioRecord = mAudioRecord; - sp iMem = mCblkMemory; - mLock.unlock(); - - do { + ssize_t read = 0; + Buffer audioBuffer; - audioBuffer.frameCount = userSize/frameSize(); + while (userSize >= mFrameSize) { + audioBuffer.frameCount = userSize / mFrameSize; - // By using a wait count corresponding to twice the timeout period in - // obtainBuffer() we give a chance to recover once for a read timeout - // (if media_server crashed for instance) before returning a length of - // 0 bytes read to the client - status_t err = obtainBuffer(&audioBuffer, ((2 * MAX_RUN_TIMEOUT_MS) / WAIT_PERIOD_MS)); + status_t err = obtainBuffer(&audioBuffer, &ClientProxy::kForever); if (err < 0) { - // out of buffers, return #bytes written - if (err == status_t(NO_MORE_BUFFERS)) { + if (read > 0) { break; } - if (err == status_t(TIMED_OUT)) { - // return partial transfer count - return read; - } return ssize_t(err); } size_t bytesRead = audioBuffer.size; - memcpy(dst, audioBuffer.i8, bytesRead); - - dst += bytesRead; + memcpy(buffer, audioBuffer.i8, bytesRead); + buffer = ((char *) buffer) + bytesRead; userSize -= bytesRead; read += bytesRead; releaseBuffer(&audioBuffer); - } while (userSize); + } return read; } // ------------------------------------------------------------------------- -bool AudioRecord::processAudioBuffer(const sp& thread) +nsecs_t AudioRecord::processAudioBuffer(const sp& thread) { - Buffer audioBuffer; - uint32_t frames = mRemainingFrames; - size_t readSize; - mLock.lock(); - // acquire a strong reference on the IAudioRecord and IMemory so that they cannot be destroyed - // while we are accessing the cblk - sp audioRecord = mAudioRecord; - sp iMem = mCblkMemory; - audio_track_cblk_t* cblk = mCblk; + + // Can only reference mCblk while locked + int32_t flags = android_atomic_and(~CBLK_OVERRUN, &mCblk->flags); + + // Check for track invalidation + if (flags & CBLK_INVALID) { + (void) restoreRecord_l("processAudioBuffer"); + mLock.unlock(); + // Run again immediately, but with a new IAudioRecord + return 0; + } + bool active = mActive; - uint32_t markerPosition = mMarkerPosition; - uint32_t newPosition = mNewPosition; - uint32_t user = cblk->user; - // determine whether a marker callback will be needed, while locked - bool needMarker = !mMarkerReached && (mMarkerPosition > 0) && (user >= mMarkerPosition); - if (needMarker) { - mMarkerReached = true; - } - // determine the number of new position callback(s) that will be needed, while locked + + // Manage overrun callback, must be done under lock to avoid race with releaseBuffer() + bool newOverrun = false; + if (flags & CBLK_OVERRUN) { + if (!mInOverrun) { + mInOverrun = true; + newOverrun = true; + } + } + + // Get current position of server + size_t position = mProxy->getPosition(); + + // Manage marker callback + bool markerReached = false; + size_t markerPosition = mMarkerPosition; + // FIXME fails for wraparound, need 64 bits + if (!mMarkerReached && (markerPosition > 0) && (position >= markerPosition)) { + mMarkerReached = markerReached = true; + } + + // Determine the number of new position callback(s) that will be needed, while locked + size_t newPosCount = 0; + size_t newPosition = mNewPosition; uint32_t updatePeriod = mUpdatePeriod; - uint32_t needNewPos = updatePeriod > 0 && user >= newPosition ? - ((user - newPosition) / updatePeriod) + 1 : 0; - mNewPosition = newPosition + updatePeriod * needNewPos; + // FIXME fails for wraparound, need 64 bits + if (updatePeriod > 0 && position >= newPosition) { + newPosCount = ((position - newPosition) / updatePeriod) + 1; + mNewPosition += updatePeriod * newPosCount; + } + + // Cache other fields that will be needed soon + size_t notificationFrames = mNotificationFrames; + if (mRefreshRemaining) { + mRefreshRemaining = false; + mRemainingFrames = notificationFrames; + mRetryOnPartialBuffer = false; + } + size_t misalignment = mProxy->getMisalignment(); + int32_t sequence = mSequence; + + // These fields don't need to be cached, because they are assigned only by set(): + // mTransfer, mCbf, mUserData, mSampleRate + mLock.unlock(); - // perform marker callback, while unlocked - if (needMarker) { + // perform callbacks while unlocked + if (newOverrun) { + mCbf(EVENT_OVERRUN, mUserData, NULL); + } + if (markerReached) { mCbf(EVENT_MARKER, mUserData, &markerPosition); } - - // perform new position callback(s), while unlocked - for (; needNewPos > 0; --needNewPos) { - uint32_t temp = newPosition; + while (newPosCount > 0) { + size_t temp = newPosition; mCbf(EVENT_NEW_POS, mUserData, &temp); newPosition += updatePeriod; + newPosCount--; + } + if (mObservedSequence != sequence) { + mObservedSequence = sequence; + mCbf(EVENT_NEW_IAUDIORECORD, mUserData, NULL); } - do { - audioBuffer.frameCount = frames; - // Calling obtainBuffer() with a wait count of 1 - // limits wait time to WAIT_PERIOD_MS. This prevents from being - // stuck here not being able to handle timed events (position, markers). - status_t err = obtainBuffer(&audioBuffer, 1); - if (err < NO_ERROR) { - if (err != TIMED_OUT) { - ALOGE_IF(err != status_t(NO_MORE_BUFFERS), - "Error obtaining an audio buffer, giving up."); - return false; + // if inactive, then don't run me again until re-started + if (!active) { + return NS_INACTIVE; + } + + // Compute the estimated time until the next timed event (position, markers) + uint32_t minFrames = ~0; + if (!markerReached && position < markerPosition) { + minFrames = markerPosition - position; + } + if (updatePeriod > 0 && updatePeriod < minFrames) { + minFrames = updatePeriod; + } + + // If > 0, poll periodically to recover from a stuck server. A good value is 2. + static const uint32_t kPoll = 0; + if (kPoll > 0 && mTransfer == TRANSFER_CALLBACK && kPoll * notificationFrames < minFrames) { + minFrames = kPoll * notificationFrames; + } + + // Convert frame units to time units + nsecs_t ns = NS_WHENEVER; + if (minFrames != (uint32_t) ~0) { + // This "fudge factor" avoids soaking CPU, and compensates for late progress by server + static const nsecs_t kFudgeNs = 10000000LL; // 10 ms + ns = ((minFrames * 1000000000LL) / mSampleRate) + kFudgeNs; + } + + // If not supplying data by EVENT_MORE_DATA, then we're done + if (mTransfer != TRANSFER_CALLBACK) { + return ns; + } + + struct timespec timeout; + const struct timespec *requested = &ClientProxy::kForever; + if (ns != NS_WHENEVER) { + timeout.tv_sec = ns / 1000000000LL; + timeout.tv_nsec = ns % 1000000000LL; + ALOGV("timeout %ld.%03d", timeout.tv_sec, (int) timeout.tv_nsec / 1000000); + requested = &timeout; + } + + while (mRemainingFrames > 0) { + + Buffer audioBuffer; + audioBuffer.frameCount = mRemainingFrames; + size_t nonContig; + status_t err = obtainBuffer(&audioBuffer, requested, NULL, &nonContig); + LOG_ALWAYS_FATAL_IF((err != NO_ERROR) != (audioBuffer.frameCount == 0), + "obtainBuffer() err=%d frameCount=%u", err, audioBuffer.frameCount); + requested = &ClientProxy::kNonBlocking; + size_t avail = audioBuffer.frameCount + nonContig; + ALOGV("obtainBuffer(%u) returned %u = %u + %u", + mRemainingFrames, avail, audioBuffer.frameCount, nonContig); + if (err != NO_ERROR) { + if (err == TIMED_OUT || err == WOULD_BLOCK || err == -EINTR) { + break; + } + ALOGE("Error %d obtaining an audio buffer, giving up.", err); + return NS_NEVER; + } + + if (mRetryOnPartialBuffer) { + mRetryOnPartialBuffer = false; + if (avail < mRemainingFrames) { + int64_t myns = ((mRemainingFrames - avail) * + 1100000000LL) / mSampleRate; + if (ns < 0 || myns < ns) { + ns = myns; + } + return ns; } - break; } - if (err == status_t(STOPPED)) return false; size_t reqSize = audioBuffer.size; mCbf(EVENT_MORE_DATA, mUserData, &audioBuffer); - readSize = audioBuffer.size; + size_t readSize = audioBuffer.size; // Sanity check on returned size - if (ssize_t(readSize) <= 0) { - // The callback is done filling buffers + if (ssize_t(readSize) < 0 || readSize > reqSize) { + ALOGE("EVENT_MORE_DATA requested %u bytes but callback returned %d bytes", + reqSize, (int) readSize); + return NS_NEVER; + } + + if (readSize == 0) { + // The callback is done consuming buffers // Keep this thread going to handle timed events and - // still try to get more data in intervals of WAIT_PERIOD_MS + // still try to provide more data in intervals of WAIT_PERIOD_MS // but don't just loop and block the CPU, so wait - usleep(WAIT_PERIOD_MS*1000); - break; + return WAIT_PERIOD_MS * 1000000LL; } - if (readSize > reqSize) readSize = reqSize; - audioBuffer.size = readSize; - audioBuffer.frameCount = readSize/frameSize(); - frames -= audioBuffer.frameCount; + size_t releasedFrames = readSize / mFrameSize; + audioBuffer.frameCount = releasedFrames; + mRemainingFrames -= releasedFrames; + if (misalignment >= releasedFrames) { + misalignment -= releasedFrames; + } else { + misalignment = 0; + } releaseBuffer(&audioBuffer); - } while (frames); + // FIXME here is where we would repeat EVENT_MORE_DATA again on same advanced buffer + // if callback doesn't like to accept the full chunk + if (readSize < reqSize) { + continue; + } + // There could be enough non-contiguous frames available to satisfy the remaining request + if (mRemainingFrames <= nonContig) { + continue; + } - // Manage overrun callback - if (active && (mProxy->framesAvailable() == 0)) { - // The value of active is stale, but we are almost sure to be active here because - // otherwise we would have exited when obtainBuffer returned STOPPED earlier. - ALOGV("Overrun user: %x, server: %x, flags %04x", cblk->user, cblk->server, cblk->flags); - if (!(android_atomic_or(CBLK_UNDERRUN, &cblk->flags) & CBLK_UNDERRUN)) { - mCbf(EVENT_OVERRUN, mUserData, NULL); +#if 0 + // This heuristic tries to collapse a series of EVENT_MORE_DATA that would total to a + // sum <= notificationFrames. It replaces that series by at most two EVENT_MORE_DATA + // that total to a sum == notificationFrames. + if (0 < misalignment && misalignment <= mRemainingFrames) { + mRemainingFrames = misalignment; + return (mRemainingFrames * 1100000000LL) / mSampleRate; } - } +#endif - if (frames == 0) { - mRemainingFrames = mNotificationFrames; - } else { - mRemainingFrames = frames; } - return true; + mRemainingFrames = notificationFrames; + mRetryOnPartialBuffer = true; + + // A lot has transpired since ns was calculated, so run again immediately and re-calculate + return 0; } -// must be called with mLock and cblk.lock held. Callers must also hold strong references on -// the IAudioRecord and IMemory in case they are recreated here. -// If the IAudioRecord is successfully restored, the cblk pointer is updated -status_t AudioRecord::restoreRecord_l(audio_track_cblk_t*& refCblk) +status_t AudioRecord::restoreRecord_l(const char *from) { + ALOGW("dead IAudioRecord, creating a new one from %s()", from); + ++mSequence; status_t result; - audio_track_cblk_t* cblk = refCblk; - audio_track_cblk_t* newCblk = cblk; - ALOGW("dead IAudioRecord, creating a new one"); - - // signal old cblk condition so that other threads waiting for available buffers stop - // waiting now - cblk->cv.broadcast(); - cblk->lock.unlock(); - // if the new IAudioRecord is created, openRecord_l() will modify the // following member variables: mAudioRecord, mCblkMemory and mCblk. // It will also delete the strong references on previous IAudioRecord and IMemory - result = openRecord_l(mSampleRate, mFormat, mFrameCount, getInput_l()); + size_t position = mProxy->getPosition(); + mNewPosition = position + mUpdatePeriod; + result = openRecord_l(mSampleRate, mFormat, mFrameCount, getInput_l(), position); if (result == NO_ERROR) { - newCblk = mCblk; - // callback thread or sync event hasn't changed - result = mAudioRecord->start(AudioSystem::SYNC_EVENT_SAME, 0); + if (mActive) { + // callback thread or sync event hasn't changed + // FIXME this fails if we have a new AudioFlinger instance + result = mAudioRecord->start(AudioSystem::SYNC_EVENT_SAME, 0); + } } if (result != NO_ERROR) { + ALOGW("restoreRecord_l() failed status %d", result); mActive = false; } - ALOGV("restoreRecord_l() status %d mActive %d cblk %p, old cblk %p flags %08x old flags %08x", - result, mActive, newCblk, cblk, newCblk->flags, cblk->flags); - - if (result == NO_ERROR) { - // from now on we switch to the newly created cblk - refCblk = newCblk; - } - newCblk->lock.lock(); + return result; +} - ALOGW_IF(result != NO_ERROR, "restoreRecord_l() error %d", result); +// ========================================================================= - return result; +void AudioRecord::DeathNotifier::binderDied(const wp& who) +{ + sp audioRecord = mAudioRecord.promote(); + if (audioRecord != 0) { + AutoMutex lock(audioRecord->mLock); + audioRecord->mProxy->binderDied(); + } } // ========================================================================= AudioRecord::AudioRecordThread::AudioRecordThread(AudioRecord& receiver, bool bCanCallJava) - : Thread(bCanCallJava), mReceiver(receiver), mPaused(true) + : Thread(bCanCallJava), mReceiver(receiver), mPaused(true), mResumeLatch(false) { } @@ -842,10 +928,26 @@ bool AudioRecord::AudioRecordThread::threadLoop() return true; } } - if (!mReceiver.processAudioBuffer(this)) { - pause(); + nsecs_t ns = mReceiver.processAudioBuffer(this); + switch (ns) { + case 0: + return true; + case NS_WHENEVER: + sleep(1); + return true; + case NS_INACTIVE: + pauseConditional(); + return true; + case NS_NEVER: + return false; + default: + LOG_ALWAYS_FATAL_IF(ns < 0, "processAudioBuffer() returned %lld", ns); + struct timespec req; + req.tv_sec = ns / 1000000000LL; + req.tv_nsec = ns % 1000000000LL; + nanosleep(&req, NULL /*rem*/); + return true; } - return true; } void AudioRecord::AudioRecordThread::requestExit() @@ -859,6 +961,17 @@ void AudioRecord::AudioRecordThread::pause() { AutoMutex _l(mMyLock); mPaused = true; + mResumeLatch = false; +} + +void AudioRecord::AudioRecordThread::pauseConditional() +{ + AutoMutex _l(mMyLock); + if (mResumeLatch) { + mResumeLatch = false; + } else { + mPaused = true; + } } void AudioRecord::AudioRecordThread::resume() @@ -866,7 +979,10 @@ void AudioRecord::AudioRecordThread::resume() AutoMutex _l(mMyLock); if (mPaused) { mPaused = false; + mResumeLatch = false; mMyCond.signal(); + } else { + mResumeLatch = true; } } diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp index 77fc6f6..faca054 100644 --- a/media/libmedia/AudioTrack.cpp +++ b/media/libmedia/AudioTrack.cpp @@ -19,31 +19,14 @@ //#define LOG_NDEBUG 0 #define LOG_TAG "AudioTrack" -#include -#include -#include - -#include #include - -#include - -#include +#include +#include #include - #include -#include -#include -#include -#include - -#include -#include +#include -#include -#include - -#include +#define WAIT_PERIOD_MS 10 namespace android { // --------------------------------------------------------------------------- @@ -82,7 +65,9 @@ status_t AudioTrack::getMinFrameCount( // Ensure that buffer depth covers at least audio hardware latency uint32_t minBufCount = afLatency / ((1000 * afFrameCount) / afSampleRate); - if (minBufCount < 2) minBufCount = 2; + if (minBufCount < 2) { + minBufCount = 2; + } *frameCount = (sampleRate == 0) ? afFrameCount * minBufCount : afFrameCount * minBufCount * sampleRate / afSampleRate; @@ -97,8 +82,7 @@ AudioTrack::AudioTrack() : mStatus(NO_INIT), mIsTimed(false), mPreviousPriority(ANDROID_PRIORITY_NORMAL), - mPreviousSchedulingGroup(SP_DEFAULT), - mProxy(NULL) + mPreviousSchedulingGroup(SP_DEFAULT) { } @@ -112,16 +96,16 @@ AudioTrack::AudioTrack( callback_t cbf, void* user, int notificationFrames, - int sessionId) + int sessionId, + transfer_type transferType) : mStatus(NO_INIT), mIsTimed(false), mPreviousPriority(ANDROID_PRIORITY_NORMAL), - mPreviousSchedulingGroup(SP_DEFAULT), - mProxy(NULL) + mPreviousSchedulingGroup(SP_DEFAULT) { mStatus = set(streamType, sampleRate, format, channelMask, frameCount, flags, cbf, user, notificationFrames, - 0 /*sharedBuffer*/, false /*threadCanCallJava*/, sessionId); + 0 /*sharedBuffer*/, false /*threadCanCallJava*/, sessionId, transferType); } AudioTrack::AudioTrack( @@ -134,27 +118,20 @@ AudioTrack::AudioTrack( callback_t cbf, void* user, int notificationFrames, - int sessionId) + int sessionId, + transfer_type transferType) : mStatus(NO_INIT), mIsTimed(false), mPreviousPriority(ANDROID_PRIORITY_NORMAL), - mPreviousSchedulingGroup(SP_DEFAULT), - mProxy(NULL) + mPreviousSchedulingGroup(SP_DEFAULT) { - if (sharedBuffer == 0) { - ALOGE("sharedBuffer must be non-0"); - mStatus = BAD_VALUE; - return; - } mStatus = set(streamType, sampleRate, format, channelMask, 0 /*frameCount*/, flags, cbf, user, notificationFrames, - sharedBuffer, false /*threadCanCallJava*/, sessionId); + sharedBuffer, false /*threadCanCallJava*/, sessionId, transferType); } AudioTrack::~AudioTrack() { - ALOGV_IF(mSharedBuffer != 0, "Destructor sharedBuffer: %p", mSharedBuffer->pointer()); - if (mStatus == NO_ERROR) { // Make sure that callback function exits in the case where // it is looping on buffer full condition in obtainBuffer(). @@ -165,11 +142,13 @@ AudioTrack::~AudioTrack() mAudioTrackThread->requestExitAndWait(); mAudioTrackThread.clear(); } - mAudioTrack.clear(); + if (mAudioTrack != 0) { + mAudioTrack->asBinder()->unlinkToDeath(mDeathNotifier, this); + mAudioTrack.clear(); + } IPCThreadState::self()->flushCommands(); AudioSystem::releaseAudioSessionId(mSessionId); } - delete mProxy; } status_t AudioTrack::set( @@ -184,8 +163,44 @@ status_t AudioTrack::set( int notificationFrames, const sp& sharedBuffer, bool threadCanCallJava, - int sessionId) + int sessionId, + transfer_type transferType) { + switch (transferType) { + case TRANSFER_DEFAULT: + if (sharedBuffer != 0) { + transferType = TRANSFER_SHARED; + } else if (cbf == NULL || threadCanCallJava) { + transferType = TRANSFER_SYNC; + } else { + transferType = TRANSFER_CALLBACK; + } + break; + case TRANSFER_CALLBACK: + if (cbf == NULL || sharedBuffer != 0) { + ALOGE("Transfer type TRANSFER_CALLBACK but cbf == NULL || sharedBuffer != 0"); + return BAD_VALUE; + } + break; + case TRANSFER_OBTAIN: + case TRANSFER_SYNC: + if (sharedBuffer != 0) { + ALOGE("Transfer type TRANSFER_OBTAIN but sharedBuffer != 0"); + return BAD_VALUE; + } + break; + case TRANSFER_SHARED: + if (sharedBuffer == 0) { + ALOGE("Transfer type TRANSFER_SHARED but sharedBuffer == 0"); + return BAD_VALUE; + } + break; + default: + ALOGE("Invalid transfer type %d", transferType); + return BAD_VALUE; + } + mTransfer = transferType; + // FIXME "int" here is legacy and will be replaced by size_t later if (frameCountInt < 0) { ALOGE("Invalid frame count %d", frameCountInt); @@ -199,6 +214,7 @@ status_t AudioTrack::set( ALOGV("set() streamType %d frameCount %u flags %04x", streamType, frameCount, flags); AutoMutex lock(mLock); + if (mAudioTrack != 0) { ALOGE("Track already in use"); return INVALID_OPERATION; @@ -228,7 +244,7 @@ status_t AudioTrack::set( // validate parameters if (!audio_is_valid_format(format)) { - ALOGE("Invalid format"); + ALOGE("Invalid format %d", format); return BAD_VALUE; } @@ -281,6 +297,7 @@ status_t AudioTrack::set( mFrameCount = frameCount; mReqFrameCount = frameCount; mNotificationFramesReq = notificationFrames; + mNotificationFramesAct = 0; mSessionId = sessionId; mAuxEffectId = 0; mFlags = flags; @@ -298,7 +315,8 @@ status_t AudioTrack::set( frameCount, flags, sharedBuffer, - output); + output, + 0 /*epoch*/); if (status != NO_ERROR) { if (mAudioTrackThread != 0) { @@ -309,20 +327,21 @@ status_t AudioTrack::set( } mStatus = NO_ERROR; - mStreamType = streamType; mFormat = format; - mSharedBuffer = sharedBuffer; - mActive = false; + mState = STATE_STOPPED; mUserData = user; - mLoopCount = 0; + mLoopPeriod = 0; mMarkerPosition = 0; mMarkerReached = false; mNewPosition = 0; mUpdatePeriod = 0; - mFlushed = false; AudioSystem::acquireAudioSessionId(mSessionId); + mSequence = 1; + mObservedSequence = mSequence; + mInUnderrun = false; + return NO_ERROR; } @@ -330,87 +349,45 @@ status_t AudioTrack::set( void AudioTrack::start() { - sp t = mAudioTrackThread; - - ALOGV("start %p", this); - AutoMutex lock(mLock); - // acquire a strong reference on the IMemory and IAudioTrack so that they cannot be destroyed - // while we are accessing the cblk - sp audioTrack = mAudioTrack; - sp iMem = mCblkMemory; - audio_track_cblk_t* cblk = mCblk; + if (mState == STATE_ACTIVE) { + return; + } - if (!mActive) { - mFlushed = false; - mActive = true; - mNewPosition = cblk->server + mUpdatePeriod; - cblk->lock.lock(); - cblk->bufferTimeoutMs = MAX_STARTUP_TIMEOUT_MS; - cblk->waitTimeMs = 0; - android_atomic_and(~CBLK_DISABLED, &cblk->flags); - if (t != 0) { - t->resume(); - } else { - mPreviousPriority = getpriority(PRIO_PROCESS, 0); - get_sched_policy(0, &mPreviousSchedulingGroup); - androidSetThreadPriority(0, ANDROID_PRIORITY_AUDIO); - } + mInUnderrun = true; - ALOGV("start %p before lock cblk %p", this, cblk); - status_t status = NO_ERROR; - if (!(cblk->flags & CBLK_INVALID)) { - cblk->lock.unlock(); - ALOGV("mAudioTrack->start()"); - status = mAudioTrack->start(); - cblk->lock.lock(); - if (status == DEAD_OBJECT) { - android_atomic_or(CBLK_INVALID, &cblk->flags); - } - } - if (cblk->flags & CBLK_INVALID) { - audio_track_cblk_t* temp = cblk; - status = restoreTrack_l(temp, true /*fromStart*/); - cblk = temp; - } - cblk->lock.unlock(); - if (status != NO_ERROR) { - ALOGV("start() failed"); - mActive = false; - if (t != 0) { - t->pause(); - } else { - setpriority(PRIO_PROCESS, 0, mPreviousPriority); - set_sched_policy(0, mPreviousSchedulingGroup); - } - } + State previousState = mState; + mState = STATE_ACTIVE; + if (previousState == STATE_STOPPED || previousState == STATE_FLUSHED) { + // reset current position as seen by client to 0 + mProxy->setEpoch(mProxy->getEpoch() - mProxy->getPosition()); } + mNewPosition = mProxy->getPosition() + mUpdatePeriod; + int32_t flags = android_atomic_and(~CBLK_DISABLED, &mCblk->flags); -} - -void AudioTrack::stop() -{ sp t = mAudioTrackThread; + if (t != 0) { + t->resume(); + } else { + mPreviousPriority = getpriority(PRIO_PROCESS, 0); + get_sched_policy(0, &mPreviousSchedulingGroup); + androidSetThreadPriority(0, ANDROID_PRIORITY_AUDIO); + } - ALOGV("stop %p", this); - - AutoMutex lock(mLock); - if (mActive) { - mActive = false; - mCblk->cv.signal(); - mAudioTrack->stop(); - // Cancel loops (If we are in the middle of a loop, playback - // would not stop until loopCount reaches 0). - setLoop_l(0, 0, 0); - // the playback head position will reset to 0, so if a marker is set, we need - // to activate it again - mMarkerReached = false; - // Force flush if a shared buffer is used otherwise audioflinger - // will not stop before end of buffer is reached. - // It may be needed to make sure that we stop playback, likely in case looping is on. - if (mSharedBuffer != 0) { - flush_l(); + status_t status = NO_ERROR; + if (!(flags & CBLK_INVALID)) { + status = mAudioTrack->start(); + if (status == DEAD_OBJECT) { + flags |= CBLK_INVALID; } + } + if (flags & CBLK_INVALID) { + status = restoreTrack_l("start"); + } + + if (status != NO_ERROR) { + ALOGE("start() status %d", status); + mState = previousState; if (t != 0) { t->pause(); } else { @@ -419,57 +396,85 @@ void AudioTrack::stop() } } + // FIXME discarding status +} + +void AudioTrack::stop() +{ + AutoMutex lock(mLock); + // FIXME pause then stop should not be a nop + if (mState != STATE_ACTIVE) { + return; + } + + mState = STATE_STOPPED; + mProxy->interrupt(); + mAudioTrack->stop(); + // the playback head position will reset to 0, so if a marker is set, we need + // to activate it again + mMarkerReached = false; +#if 0 + // Force flush if a shared buffer is used otherwise audioflinger + // will not stop before end of buffer is reached. + // It may be needed to make sure that we stop playback, likely in case looping is on. + if (mSharedBuffer != 0) { + flush_l(); + } +#endif + sp t = mAudioTrackThread; + if (t != 0) { + t->pause(); + } else { + setpriority(PRIO_PROCESS, 0, mPreviousPriority); + set_sched_policy(0, mPreviousSchedulingGroup); + } } bool AudioTrack::stopped() const { AutoMutex lock(mLock); - return stopped_l(); + return mState != STATE_ACTIVE; } void AudioTrack::flush() { + if (mSharedBuffer != 0) { + return; + } AutoMutex lock(mLock); - if (!mActive && mSharedBuffer == 0) { - flush_l(); + if (mState == STATE_ACTIVE || mState == STATE_FLUSHED) { + return; } + flush_l(); } void AudioTrack::flush_l() { - ALOGV("flush"); - ALOG_ASSERT(!mActive); + ALOG_ASSERT(mState != STATE_ACTIVE); // clear playback marker and periodic update counter mMarkerPosition = 0; mMarkerReached = false; mUpdatePeriod = 0; - mFlushed = true; + mState = STATE_FLUSHED; + mProxy->flush(); mAudioTrack->flush(); - // Release AudioTrack callback thread in case it was waiting for new buffers - // in AudioTrack::obtainBuffer() - mCblk->cv.signal(); } void AudioTrack::pause() { - ALOGV("pause"); AutoMutex lock(mLock); - if (mActive) { - mActive = false; - mCblk->cv.signal(); - mAudioTrack->pause(); + if (mState != STATE_ACTIVE) { + return; } + mState = STATE_PAUSED; + mProxy->interrupt(); + mAudioTrack->pause(); } status_t AudioTrack::setVolume(float left, float right) { - if (mStatus != NO_ERROR) { - return mStatus; - } - ALOG_ASSERT(mProxy != NULL); - if (left < 0.0f || left > 1.0f || right < 0.0f || right > 1.0f) { return BAD_VALUE; } @@ -490,18 +495,11 @@ status_t AudioTrack::setVolume(float volume) status_t AudioTrack::setAuxEffectSendLevel(float level) { - ALOGV("setAuxEffectSendLevel(%f)", level); - - if (mStatus != NO_ERROR) { - return mStatus; - } - ALOG_ASSERT(mProxy != NULL); - if (level < 0.0f || level > 1.0f) { return BAD_VALUE; } - AutoMutex lock(mLock); + AutoMutex lock(mLock); mSendLevel = level; mProxy->setSendLevel(level); @@ -511,18 +509,17 @@ status_t AudioTrack::setAuxEffectSendLevel(float level) void AudioTrack::getAuxEffectSendLevel(float* level) const { if (level != NULL) { - *level = mSendLevel; + *level = mSendLevel; } } status_t AudioTrack::setSampleRate(uint32_t rate) { - uint32_t afSamplingRate; - if (mIsTimed) { return INVALID_OPERATION; } + uint32_t afSamplingRate; if (AudioSystem::getOutputSamplingRate(&afSamplingRate, mStreamType) != NO_ERROR) { return NO_INIT; } @@ -550,78 +547,44 @@ uint32_t AudioTrack::getSampleRate() const status_t AudioTrack::setLoop(uint32_t loopStart, uint32_t loopEnd, int loopCount) { - AutoMutex lock(mLock); - return setLoop_l(loopStart, loopEnd, loopCount); -} - -// must be called with mLock held -status_t AudioTrack::setLoop_l(uint32_t loopStart, uint32_t loopEnd, int loopCount) -{ if (mSharedBuffer == 0 || mIsTimed) { return INVALID_OPERATION; } - if (loopCount < 0 && loopCount != -1) { - return BAD_VALUE; - } - -#if 0 - // This will be for the new interpretation of loopStart and loopEnd - - if (loopCount != 0) { - if (loopStart >= mFrameCount || loopEnd >= mFrameCount || loopStart >= loopEnd) { - return BAD_VALUE; - } - uint32_t periodFrames = loopEnd - loopStart; - if (periodFrames < PERIOD_FRAMES_MIN) { - return BAD_VALUE; - } - } - - // The remainder of this code still uses the old interpretation -#endif - - audio_track_cblk_t* cblk = mCblk; - - Mutex::Autolock _l(cblk->lock); - if (loopCount == 0) { - cblk->loopStart = UINT_MAX; - cblk->loopEnd = UINT_MAX; - cblk->loopCount = 0; - mLoopCount = 0; - return NO_ERROR; - } - - if (loopStart >= loopEnd || - loopEnd - loopStart > mFrameCount || - cblk->server > loopStart) { - ALOGE("setLoop invalid value: loopStart %d, loopEnd %d, loopCount %d, framecount %d, " - "user %d", loopStart, loopEnd, loopCount, mFrameCount, cblk->user); + ; + } else if (loopCount >= -1 && loopStart < loopEnd && loopEnd <= mFrameCount && + loopEnd - loopStart >= MIN_LOOP) { + ; + } else { return BAD_VALUE; } - if ((mSharedBuffer != 0) && (loopEnd > mFrameCount)) { - ALOGE("setLoop invalid value: loop markers beyond data: loopStart %d, loopEnd %d, " - "framecount %d", - loopStart, loopEnd, mFrameCount); - return BAD_VALUE; + AutoMutex lock(mLock); + // See setPosition() regarding setting parameters such as loop points or position while active + if (mState == STATE_ACTIVE) { + return INVALID_OPERATION; } - - cblk->loopStart = loopStart; - cblk->loopEnd = loopEnd; - cblk->loopCount = loopCount; - mLoopCount = loopCount; - + setLoop_l(loopStart, loopEnd, loopCount); return NO_ERROR; } +void AudioTrack::setLoop_l(uint32_t loopStart, uint32_t loopEnd, int loopCount) +{ + // FIXME If setting a loop also sets position to start of loop, then + // this is correct. Otherwise it should be removed. + mNewPosition = mProxy->getPosition() + mUpdatePeriod; + mLoopPeriod = loopCount != 0 ? loopEnd - loopStart : 0; + mStaticProxy->setLoop(loopStart, loopEnd, loopCount); +} + status_t AudioTrack::setMarkerPosition(uint32_t marker) { if (mCbf == NULL) { return INVALID_OPERATION; } + AutoMutex lock(mLock); mMarkerPosition = marker; mMarkerReached = false; @@ -634,6 +597,7 @@ status_t AudioTrack::getMarkerPosition(uint32_t *marker) const return BAD_VALUE; } + AutoMutex lock(mLock); *marker = mMarkerPosition; return NO_ERROR; @@ -645,9 +609,8 @@ status_t AudioTrack::setPositionUpdatePeriod(uint32_t updatePeriod) return INVALID_OPERATION; } - uint32_t curPosition; - getPosition(&curPosition); - mNewPosition = curPosition + updatePeriod; + AutoMutex lock(mLock); + mNewPosition = mProxy->getPosition() + updatePeriod; mUpdatePeriod = updatePeriod; return NO_ERROR; @@ -659,6 +622,7 @@ status_t AudioTrack::getPositionUpdatePeriod(uint32_t *updatePeriod) const return BAD_VALUE; } + AutoMutex lock(mLock); *updatePeriod = mUpdatePeriod; return NO_ERROR; @@ -669,49 +633,44 @@ status_t AudioTrack::setPosition(uint32_t position) if (mSharedBuffer == 0 || mIsTimed) { return INVALID_OPERATION; } - - AutoMutex lock(mLock); - - if (!stopped_l()) { - return INVALID_OPERATION; - } - -#if 0 - // This will be for the new interpretation of position - - if (position >= mFrameCount) { + if (position > mFrameCount) { return BAD_VALUE; } - // The remainder of this code still uses the old interpretation -#endif - - audio_track_cblk_t* cblk = mCblk; - Mutex::Autolock _l(cblk->lock); - - if (position > cblk->user) { - return BAD_VALUE; + AutoMutex lock(mLock); + // Currently we require that the player is inactive before setting parameters such as position + // or loop points. Otherwise, there could be a race condition: the application could read the + // current position, compute a new position or loop parameters, and then set that position or + // loop parameters but it would do the "wrong" thing since the position has continued to advance + // in the mean time. If we ever provide a sequencer in server, we could allow a way for the app + // to specify how it wants to handle such scenarios. + if (mState == STATE_ACTIVE) { + return INVALID_OPERATION; } - - cblk->server = position; - android_atomic_or(CBLK_FORCEREADY, &cblk->flags); + mNewPosition = mProxy->getPosition() + mUpdatePeriod; + mLoopPeriod = 0; + // FIXME Check whether loops and setting position are incompatible in old code. + // If we use setLoop for both purposes we lose the capability to set the position while looping. + mStaticProxy->setLoop(position, mFrameCount, 0); return NO_ERROR; } -status_t AudioTrack::getPosition(uint32_t *position) +status_t AudioTrack::getPosition(uint32_t *position) const { if (position == NULL) { return BAD_VALUE; } + AutoMutex lock(mLock); - *position = mFlushed ? 0 : mCblk->server; + // IAudioTrack::stop() isn't synchronous; we don't know when presentation completes + *position = (mState == STATE_STOPPED || mState == STATE_FLUSHED) ? 0 : + mProxy->getPosition(); return NO_ERROR; } -#if 0 -status_t AudioTrack::getBufferPosition(uint32_t *position) +status_t AudioTrack::getBufferPosition(size_t *position) { if (mSharedBuffer == 0 || mIsTimed) { return INVALID_OPERATION; @@ -719,33 +678,28 @@ status_t AudioTrack::getBufferPosition(uint32_t *position) if (position == NULL) { return BAD_VALUE; } - *position = 0; + AutoMutex lock(mLock); + *position = mStaticProxy->getBufferPosition(); return NO_ERROR; } -#endif status_t AudioTrack::reload() { - if (mStatus != NO_ERROR) { - return mStatus; - } - ALOG_ASSERT(mProxy != NULL); - if (mSharedBuffer == 0 || mIsTimed) { return INVALID_OPERATION; } AutoMutex lock(mLock); - - if (!stopped_l()) { + // See setPosition() regarding setting parameters such as loop points or position while active + if (mState == STATE_ACTIVE) { return INVALID_OPERATION; } - - flush_l(); - - (void) mProxy->stepUser(mFrameCount); - + mNewPosition = mUpdatePeriod; + mLoopPeriod = 0; + // FIXME The new code cannot reload while keeping a loop specified. + // Need to check how the old code handled this, and whether it's a significant change. + mStaticProxy->setLoop(0, mFrameCount, 0); return NO_ERROR; } @@ -764,7 +718,7 @@ audio_io_handle_t AudioTrack::getOutput_l() status_t AudioTrack::attachAuxEffect(int effectId) { - ALOGV("attachAuxEffect(%d)", effectId); + AutoMutex lock(mLock); status_t status = mAudioTrack->attachAuxEffect(effectId); if (status == NO_ERROR) { mAuxEffectId = effectId; @@ -782,7 +736,8 @@ status_t AudioTrack::createTrack_l( size_t frameCount, audio_output_flags_t flags, const sp& sharedBuffer, - audio_io_handle_t output) + audio_io_handle_t output, + size_t epoch) { status_t status; const sp& audioFlinger = AudioSystem::get_audio_flinger(); @@ -792,7 +747,8 @@ status_t AudioTrack::createTrack_l( } uint32_t afLatency; - if (AudioSystem::getLatency(output, streamType, &afLatency) != NO_ERROR) { + if ((status = AudioSystem::getLatency(output, streamType, &afLatency)) != NO_ERROR) { + ALOGE("getLatency(%d) failed status %d", output, status); return NO_INIT; } @@ -820,7 +776,10 @@ status_t AudioTrack::createTrack_l( frameCount = sharedBuffer->size(); } else if (frameCount == 0) { size_t afFrameCount; - if (AudioSystem::getFrameCount(output, streamType, &afFrameCount) != NO_ERROR) { + status = AudioSystem::getFrameCount(output, streamType, &afFrameCount); + if (status != NO_ERROR) { + ALOGE("getFrameCount(output=%d, streamType=%d) status %d", output, streamType, + status); return NO_INIT; } frameCount = afFrameCount; @@ -851,11 +810,16 @@ status_t AudioTrack::createTrack_l( // FIXME move these calculations and associated checks to server uint32_t afSampleRate; - if (AudioSystem::getSamplingRate(output, streamType, &afSampleRate) != NO_ERROR) { + status = AudioSystem::getSamplingRate(output, streamType, &afSampleRate); + if (status != NO_ERROR) { + ALOGE("getSamplingRate(output=%d, streamType=%d) status %d", output, streamType, + status); return NO_INIT; } size_t afFrameCount; - if (AudioSystem::getFrameCount(output, streamType, &afFrameCount) != NO_ERROR) { + status = AudioSystem::getFrameCount(output, streamType, &afFrameCount); + if (status != NO_ERROR) { + ALOGE("getFrameCount(output=%d, streamType=%d) status %d", output, streamType, status); return NO_INIT; } @@ -875,12 +839,9 @@ status_t AudioTrack::createTrack_l( if (frameCount == 0) { frameCount = minFrameCount; } - if (mNotificationFramesAct == 0) { - mNotificationFramesAct = frameCount/2; - } // Make sure that application is notified with sufficient margin // before underrun - if (mNotificationFramesAct > frameCount/2) { + if (mNotificationFramesAct == 0 || mNotificationFramesAct > frameCount/2) { mNotificationFramesAct = frameCount/2; } if (frameCount < minFrameCount) { @@ -930,6 +891,10 @@ status_t AudioTrack::createTrack_l( ALOGE("Could not get control block"); return NO_INIT; } + if (mAudioTrack != 0) { + mAudioTrack->asBinder()->unlinkToDeath(mDeathNotifier, this); + mDeathNotifier.clear(); + } mAudioTrack = track; mCblkMemory = iMem; audio_track_cblk_t* cblk = static_cast(iMem->pointer()); @@ -947,26 +912,38 @@ status_t AudioTrack::createTrack_l( if (trackFlags & IAudioFlinger::TRACK_FAST) { ALOGV("AUDIO_OUTPUT_FLAG_FAST successful; frameCount %u", frameCount); mAwaitBoost = true; + if (sharedBuffer == 0) { + // double-buffering is not required for fast tracks, due to tighter scheduling + if (mNotificationFramesAct == 0 || mNotificationFramesAct > frameCount) { + mNotificationFramesAct = frameCount; + } + } } else { ALOGV("AUDIO_OUTPUT_FLAG_FAST denied by server; frameCount %u", frameCount); // once denied, do not request again if IAudioTrack is re-created flags = (audio_output_flags_t) (flags & ~AUDIO_OUTPUT_FLAG_FAST); mFlags = flags; - } - if (sharedBuffer == 0) { - mNotificationFramesAct = frameCount/2; + if (sharedBuffer == 0) { + if (mNotificationFramesAct == 0 || mNotificationFramesAct > frameCount/2) { + mNotificationFramesAct = frameCount/2; + } + } } } + mRefreshRemaining = true; + + // Starting address of buffers in shared memory. If there is a shared buffer, buffers + // is the value of pointer() for the shared buffer, otherwise buffers points + // immediately after the control block. This address is for the mapping within client + // address space. AudioFlinger::TrackBase::mBuffer is for the server address space. + void* buffers; if (sharedBuffer == 0) { - mBuffers = (char*)cblk + sizeof(audio_track_cblk_t); + buffers = (char*)cblk + sizeof(audio_track_cblk_t); } else { - mBuffers = sharedBuffer->pointer(); + buffers = sharedBuffer->pointer(); } mAudioTrack->attachAuxEffect(mAuxEffectId); - cblk->bufferTimeoutMs = MAX_STARTUP_TIMEOUT_MS; - cblk->waitTimeMs = 0; - mRemainingFrames = mNotificationFramesAct; // FIXME don't believe this lie mLatency = afLatency + (1000*frameCount) / sampleRate; mFrameCount = frameCount; @@ -977,147 +954,143 @@ status_t AudioTrack::createTrack_l( } // update proxy - delete mProxy; - mProxy = new AudioTrackClientProxy(cblk, mBuffers, frameCount, mFrameSizeAF); + if (sharedBuffer == 0) { + mStaticProxy.clear(); + mProxy = new AudioTrackClientProxy(cblk, buffers, frameCount, mFrameSizeAF); + } else { + mStaticProxy = new StaticAudioTrackClientProxy(cblk, buffers, frameCount, mFrameSizeAF); + mProxy = mStaticProxy; + } mProxy->setVolumeLR((uint32_t(uint16_t(mVolume[RIGHT] * 0x1000)) << 16) | uint16_t(mVolume[LEFT] * 0x1000)); mProxy->setSendLevel(mSendLevel); mProxy->setSampleRate(mSampleRate); - if (sharedBuffer != 0) { - // Force buffer full condition as data is already present in shared memory - mProxy->stepUser(frameCount); - } + mProxy->setEpoch(epoch); + mProxy->setMinimum(mNotificationFramesAct); + + mDeathNotifier = new DeathNotifier(this); + mAudioTrack->asBinder()->linkToDeath(mDeathNotifier, this); return NO_ERROR; } status_t AudioTrack::obtainBuffer(Buffer* audioBuffer, int32_t waitCount) { - ALOG_ASSERT(mStatus == NO_ERROR && mProxy != NULL); + if (audioBuffer == NULL) { + return BAD_VALUE; + } + if (mTransfer != TRANSFER_OBTAIN) { + audioBuffer->frameCount = 0; + audioBuffer->size = 0; + audioBuffer->raw = NULL; + return INVALID_OPERATION; + } + + const struct timespec *requested; + if (waitCount == -1) { + requested = &ClientProxy::kForever; + } else if (waitCount == 0) { + requested = &ClientProxy::kNonBlocking; + } else if (waitCount > 0) { + long long ms = WAIT_PERIOD_MS * (long long) waitCount; + struct timespec timeout; + timeout.tv_sec = ms / 1000; + timeout.tv_nsec = (int) (ms % 1000) * 1000000; + requested = &timeout; + } else { + ALOGE("%s invalid waitCount %d", __func__, waitCount); + requested = NULL; + } + return obtainBuffer(audioBuffer, requested); +} + +status_t AudioTrack::obtainBuffer(Buffer* audioBuffer, const struct timespec *requested, + struct timespec *elapsed, size_t *nonContig) +{ + // previous and new IAudioTrack sequence numbers are used to detect track re-creation + uint32_t oldSequence = 0; + uint32_t newSequence; - AutoMutex lock(mLock); - bool active; - status_t result = NO_ERROR; - audio_track_cblk_t* cblk = mCblk; - uint32_t framesReq = audioBuffer->frameCount; - uint32_t waitTimeMs = (waitCount < 0) ? cblk->bufferTimeoutMs : WAIT_PERIOD_MS; + Proxy::Buffer buffer; + status_t status = NO_ERROR; - audioBuffer->frameCount = 0; - audioBuffer->size = 0; + static const int32_t kMaxTries = 5; + int32_t tryCounter = kMaxTries; - size_t framesAvail = mProxy->framesAvailable(); + do { + // obtainBuffer() is called with mutex unlocked, so keep extra references to these fields to + // keep them from going away if another thread re-creates the track during obtainBuffer() + sp proxy; + sp iMem; - cblk->lock.lock(); - if (cblk->flags & CBLK_INVALID) { - goto create_new_track; - } - cblk->lock.unlock(); - - if (framesAvail == 0) { - cblk->lock.lock(); - goto start_loop_here; - while (framesAvail == 0) { - active = mActive; - if (CC_UNLIKELY(!active)) { - ALOGV("Not active and NO_MORE_BUFFERS"); - cblk->lock.unlock(); - return NO_MORE_BUFFERS; - } - if (CC_UNLIKELY(!waitCount)) { - cblk->lock.unlock(); - return WOULD_BLOCK; - } - if (!(cblk->flags & CBLK_INVALID)) { - mLock.unlock(); - // this condition is in shared memory, so if IAudioTrack and control block - // are replaced due to mediaserver death or IAudioTrack invalidation then - // cv won't be signalled, but fortunately the timeout will limit the wait - result = cblk->cv.waitRelative(cblk->lock, milliseconds(waitTimeMs)); - cblk->lock.unlock(); - mLock.lock(); - if (!mActive) { - return status_t(STOPPED); - } - // IAudioTrack may have been re-created while mLock was unlocked - cblk = mCblk; - cblk->lock.lock(); - } + { // start of lock scope + AutoMutex lock(mLock); - if (cblk->flags & CBLK_INVALID) { - goto create_new_track; - } - if (CC_UNLIKELY(result != NO_ERROR)) { - cblk->waitTimeMs += waitTimeMs; - if (cblk->waitTimeMs >= cblk->bufferTimeoutMs) { - // timing out when a loop has been set and we have already written upto loop end - // is a normal condition: no need to wake AudioFlinger up. - if (cblk->user < cblk->loopEnd) { - ALOGW("obtainBuffer timed out (is the CPU pegged?) %p name=%#x user=%08x, " - "server=%08x", this, cblk->mName, cblk->user, cblk->server); - //unlock cblk mutex before calling mAudioTrack->start() (see issue #1617140) - cblk->lock.unlock(); - result = mAudioTrack->start(); - cblk->lock.lock(); - if (result == DEAD_OBJECT) { - android_atomic_or(CBLK_INVALID, &cblk->flags); -create_new_track: - audio_track_cblk_t* temp = cblk; - result = restoreTrack_l(temp, false /*fromStart*/); - cblk = temp; - } - if (result != NO_ERROR) { - ALOGW("obtainBuffer create Track error %d", result); - cblk->lock.unlock(); - return result; - } + newSequence = mSequence; + // did previous obtainBuffer() fail due to media server death or voluntary invalidation? + if (status == DEAD_OBJECT) { + // re-create track, unless someone else has already done so + if (newSequence == oldSequence) { + status = restoreTrack_l("obtainBuffer"); + if (status != NO_ERROR) { + break; } - cblk->waitTimeMs = 0; } + } + oldSequence = newSequence; - if (--waitCount == 0) { - cblk->lock.unlock(); - return TIMED_OUT; - } + // Keep the extra references + proxy = mProxy; + iMem = mCblkMemory; + + // Non-blocking if track is stopped or paused + if (mState != STATE_ACTIVE) { + requested = &ClientProxy::kNonBlocking; } - // read the server count again - start_loop_here: - framesAvail = mProxy->framesAvailable_l(); - } - cblk->lock.unlock(); - } - cblk->waitTimeMs = 0; + } // end of lock scope - if (framesReq > framesAvail) { - framesReq = framesAvail; - } + buffer.mFrameCount = audioBuffer->frameCount; + // FIXME starts the requested timeout and elapsed over from scratch + status = proxy->obtainBuffer(&buffer, requested, elapsed); - uint32_t u = cblk->user; - uint32_t bufferEnd = cblk->userBase + mFrameCount; + } while ((status == DEAD_OBJECT) && (tryCounter-- > 0)); - if (framesReq > bufferEnd - u) { - framesReq = bufferEnd - u; + audioBuffer->frameCount = buffer.mFrameCount; + audioBuffer->size = buffer.mFrameCount * mFrameSizeAF; + audioBuffer->raw = buffer.mRaw; + if (nonContig != NULL) { + *nonContig = buffer.mNonContig; } - - audioBuffer->frameCount = framesReq; - audioBuffer->size = framesReq * mFrameSizeAF; - audioBuffer->raw = mProxy->buffer(u); - active = mActive; - return active ? status_t(NO_ERROR) : status_t(STOPPED); + return status; } void AudioTrack::releaseBuffer(Buffer* audioBuffer) { - ALOG_ASSERT(mStatus == NO_ERROR && mProxy != NULL); + if (mTransfer == TRANSFER_SHARED) { + return; + } + + size_t stepCount = audioBuffer->size / mFrameSizeAF; + if (stepCount == 0) { + return; + } + + Proxy::Buffer buffer; + buffer.mFrameCount = stepCount; + buffer.mRaw = audioBuffer->raw; AutoMutex lock(mLock); - audio_track_cblk_t* cblk = mCblk; - (void) mProxy->stepUser(audioBuffer->frameCount); - if (audioBuffer->frameCount > 0) { - // restart track if it was disabled by audioflinger due to previous underrun - if (mActive && (cblk->flags & CBLK_DISABLED)) { - android_atomic_and(~CBLK_DISABLED, &cblk->flags); - ALOGW("releaseBuffer() track %p name=%#x disabled, restarting", this, cblk->mName); + mInUnderrun = false; + mProxy->releaseBuffer(&buffer); + + // restart track if it was disabled by audioflinger due to previous underrun + if (mState == STATE_ACTIVE) { + audio_track_cblk_t* cblk = mCblk; + if (android_atomic_and(~CBLK_DISABLED, &cblk->flags) & CBLK_DISABLED) { + ALOGW("releaseBuffer() track %p name=%#x disabled due to previous underrun, restarting", + this, cblk->mName); + // FIXME ignoring status mAudioTrack->start(); } } @@ -1127,68 +1100,46 @@ void AudioTrack::releaseBuffer(Buffer* audioBuffer) ssize_t AudioTrack::write(const void* buffer, size_t userSize) { - - if (mSharedBuffer != 0 || mIsTimed) { + if (mTransfer != TRANSFER_SYNC || mIsTimed) { return INVALID_OPERATION; } - if (ssize_t(userSize) < 0) { + if (ssize_t(userSize) < 0 || (buffer == NULL && userSize != 0)) { // Sanity-check: user is most-likely passing an error code, and it would // make the return value ambiguous (actualSize vs error). - ALOGE("AudioTrack::write(buffer=%p, size=%u (%d)", - buffer, userSize, userSize); + ALOGE("AudioTrack::write(buffer=%p, size=%u (%d)", buffer, userSize, userSize); return BAD_VALUE; } - ALOGV("write %p: %d bytes, mActive=%d", this, userSize, mActive); - - if (userSize == 0) { - return 0; - } - - // acquire a strong reference on the IMemory and IAudioTrack so that they cannot be destroyed - // while we are accessing the cblk - mLock.lock(); - sp audioTrack = mAudioTrack; - sp iMem = mCblkMemory; - mLock.unlock(); - - // since mLock is unlocked the IAudioTrack and shared memory may be re-created, - // so all cblk references might still refer to old shared memory, but that should be benign - - ssize_t written = 0; - const int8_t *src = (const int8_t *)buffer; + size_t written = 0; Buffer audioBuffer; - size_t frameSz = frameSize(); - do { - audioBuffer.frameCount = userSize/frameSz; + while (userSize >= mFrameSize) { + audioBuffer.frameCount = userSize / mFrameSize; - status_t err = obtainBuffer(&audioBuffer, -1); + status_t err = obtainBuffer(&audioBuffer, &ClientProxy::kForever); if (err < 0) { - // out of buffers, return #bytes written - if (err == status_t(NO_MORE_BUFFERS)) { + if (written > 0) { break; } return ssize_t(err); } size_t toWrite; - if (mFormat == AUDIO_FORMAT_PCM_8_BIT && !(mFlags & AUDIO_OUTPUT_FLAG_DIRECT)) { // Divide capacity by 2 to take expansion into account - toWrite = audioBuffer.size>>1; - memcpy_to_i16_from_u8(audioBuffer.i16, (const uint8_t *) src, toWrite); + toWrite = audioBuffer.size >> 1; + memcpy_to_i16_from_u8(audioBuffer.i16, (const uint8_t *) buffer, toWrite); } else { toWrite = audioBuffer.size; - memcpy(audioBuffer.i8, src, toWrite); + memcpy(audioBuffer.i8, buffer, toWrite); } - src += toWrite; + buffer = ((const char *) buffer) + toWrite; userSize -= toWrite; written += toWrite; releaseBuffer(&audioBuffer); - } while (userSize >= frameSz); + } return written; } @@ -1204,10 +1155,12 @@ status_t TimedAudioTrack::allocateTimedBuffer(size_t size, sp* buffer) AutoMutex lock(mLock); status_t result = UNKNOWN_ERROR; +#if 1 // acquire a strong reference on the IMemory and IAudioTrack so that they cannot be destroyed // while we are accessing the cblk sp audioTrack = mAudioTrack; sp iMem = mCblkMemory; +#endif // If the track is not invalid already, try to allocate a buffer. alloc // fails indicating that the server is dead, flag the track as invalid so @@ -1223,13 +1176,9 @@ status_t TimedAudioTrack::allocateTimedBuffer(size_t size, sp* buffer) // If the track is invalid at this point, attempt to restore it. and try the // allocation one more time. if (cblk->flags & CBLK_INVALID) { - cblk->lock.lock(); - audio_track_cblk_t* temp = cblk; - result = restoreTrack_l(temp, false /*fromStart*/); - cblk = temp; - cblk->lock.unlock(); + result = restoreTrack_l("allocateTimedBuffer"); - if (result == OK) { + if (result == NO_ERROR) { result = mAudioTrack->allocateTimedBuffer(size, buffer); } } @@ -1246,9 +1195,10 @@ status_t TimedAudioTrack::queueTimedBuffer(const sp& buffer, audio_track_cblk_t* cblk = mCblk; // restart track if it was disabled by audioflinger due to previous underrun if (buffer->size() != 0 && status == NO_ERROR && - mActive && (cblk->flags & CBLK_DISABLED)) { + (mState == STATE_ACTIVE) && (cblk->flags & CBLK_DISABLED)) { android_atomic_and(~CBLK_DISABLED, &cblk->flags); ALOGW("queueTimedBuffer() track %p disabled, restarting", this); + // FIXME ignoring status mAudioTrack->start(); } } @@ -1263,12 +1213,8 @@ status_t TimedAudioTrack::setMediaTimeTransform(const LinearTransform& xform, // ------------------------------------------------------------------------- -bool AudioTrack::processAudioBuffer(const sp& thread) +nsecs_t AudioTrack::processAudioBuffer(const sp& thread) { - Buffer audioBuffer; - uint32_t frames; - size_t writtenSize; - mLock.lock(); if (mAwaitBoost) { mAwaitBoost = false; @@ -1289,86 +1235,181 @@ bool AudioTrack::processAudioBuffer(const sp& thread) } return true; } - // acquire a strong reference on the IMemory and IAudioTrack so that they cannot be destroyed - // while we are accessing the cblk - sp audioTrack = mAudioTrack; - sp iMem = mCblkMemory; - audio_track_cblk_t* cblk = mCblk; - bool active = mActive; - mLock.unlock(); - // since mLock is unlocked the IAudioTrack and shared memory may be re-created, - // so all cblk references might still refer to old shared memory, but that should be benign + // Can only reference mCblk while locked + int32_t flags = android_atomic_and( + ~(CBLK_UNDERRUN | CBLK_LOOP_CYCLE | CBLK_LOOP_FINAL | CBLK_BUFFER_END), &mCblk->flags); - // Manage underrun callback - if (active && (mProxy->framesAvailable() == mFrameCount)) { - ALOGV("Underrun user: %x, server: %x, flags %04x", cblk->user, cblk->server, cblk->flags); - if (!(android_atomic_or(CBLK_UNDERRUN, &cblk->flags) & CBLK_UNDERRUN)) { - mCbf(EVENT_UNDERRUN, mUserData, 0); - if (cblk->server == mFrameCount) { - mCbf(EVENT_BUFFER_END, mUserData, 0); - } - if (mSharedBuffer != 0) { - return false; - } - } + // Check for track invalidation + if (flags & CBLK_INVALID) { + (void) restoreTrack_l("processAudioBuffer"); + mLock.unlock(); + // Run again immediately, but with a new IAudioTrack + return 0; } - // Manage loop end callback - while (mLoopCount > cblk->loopCount) { - int loopCount = -1; - mLoopCount--; - if (mLoopCount >= 0) loopCount = mLoopCount; + bool active = mState == STATE_ACTIVE; - mCbf(EVENT_LOOP_END, mUserData, (void *)&loopCount); + // Manage underrun callback, must be done under lock to avoid race with releaseBuffer() + bool newUnderrun = false; + if (flags & CBLK_UNDERRUN) { +#if 0 + // Currently in shared buffer mode, when the server reaches the end of buffer, + // the track stays active in continuous underrun state. It's up to the application + // to pause or stop the track, or set the position to a new offset within buffer. + // This was some experimental code to auto-pause on underrun. Keeping it here + // in "if 0" so we can re-visit this if we add a real sequencer for shared memory content. + if (mTransfer == TRANSFER_SHARED) { + mState = STATE_PAUSED; + active = false; + } +#endif + if (!mInUnderrun) { + mInUnderrun = true; + newUnderrun = true; + } } + // Get current position of server + size_t position = mProxy->getPosition(); + // Manage marker callback - if (!mMarkerReached && (mMarkerPosition > 0)) { - if (cblk->server >= mMarkerPosition) { - mCbf(EVENT_MARKER, mUserData, (void *)&mMarkerPosition); - mMarkerReached = true; - } + bool markerReached = false; + size_t markerPosition = mMarkerPosition; + // FIXME fails for wraparound, need 64 bits + if (!mMarkerReached && (markerPosition > 0) && (position >= markerPosition)) { + mMarkerReached = markerReached = true; + } + + // Determine number of new position callback(s) that will be needed, while locked + size_t newPosCount = 0; + size_t newPosition = mNewPosition; + size_t updatePeriod = mUpdatePeriod; + // FIXME fails for wraparound, need 64 bits + if (updatePeriod > 0 && position >= newPosition) { + newPosCount = ((position - newPosition) / updatePeriod) + 1; + mNewPosition += updatePeriod * newPosCount; + } + + // Cache other fields that will be needed soon + uint32_t loopPeriod = mLoopPeriod; + uint32_t sampleRate = mSampleRate; + size_t notificationFrames = mNotificationFramesAct; + if (mRefreshRemaining) { + mRefreshRemaining = false; + mRemainingFrames = notificationFrames; + mRetryOnPartialBuffer = false; + } + size_t misalignment = mProxy->getMisalignment(); + int32_t sequence = mSequence; + + // These fields don't need to be cached, because they are assigned only by set(): + // mTransfer, mCbf, mUserData, mFormat, mFrameSize, mFrameSizeAF, mFlags + // mFlags is also assigned by createTrack_l(), but not the bit we care about. + + mLock.unlock(); + + // perform callbacks while unlocked + if (newUnderrun) { + mCbf(EVENT_UNDERRUN, mUserData, NULL); + } + // FIXME we will miss loops if loop cycle was signaled several times since last call + // to processAudioBuffer() + if (flags & (CBLK_LOOP_CYCLE | CBLK_LOOP_FINAL)) { + mCbf(EVENT_LOOP_END, mUserData, NULL); + } + if (flags & CBLK_BUFFER_END) { + mCbf(EVENT_BUFFER_END, mUserData, NULL); + } + if (markerReached) { + mCbf(EVENT_MARKER, mUserData, &markerPosition); + } + while (newPosCount > 0) { + size_t temp = newPosition; + mCbf(EVENT_NEW_POS, mUserData, &temp); + newPosition += updatePeriod; + newPosCount--; + } + if (mObservedSequence != sequence) { + mObservedSequence = sequence; + mCbf(EVENT_NEW_IAUDIOTRACK, mUserData, NULL); } - // Manage new position callback - if (mUpdatePeriod > 0) { - while (cblk->server >= mNewPosition) { - mCbf(EVENT_NEW_POS, mUserData, (void *)&mNewPosition); - mNewPosition += mUpdatePeriod; - } + // if inactive, then don't run me again until re-started + if (!active) { + return NS_INACTIVE; } - // If Shared buffer is used, no data is requested from client. - if (mSharedBuffer != 0) { - frames = 0; - } else { - frames = mRemainingFrames; + // Compute the estimated time until the next timed event (position, markers, loops) + // FIXME only for non-compressed audio + uint32_t minFrames = ~0; + if (!markerReached && position < markerPosition) { + minFrames = markerPosition - position; + } + if (loopPeriod > 0 && loopPeriod < minFrames) { + minFrames = loopPeriod; + } + if (updatePeriod > 0 && updatePeriod < minFrames) { + minFrames = updatePeriod; } - // See description of waitCount parameter at declaration of obtainBuffer(). - // The logic below prevents us from being stuck below at obtainBuffer() - // not being able to handle timed events (position, markers, loops). - int32_t waitCount = -1; - if (mUpdatePeriod || (!mMarkerReached && mMarkerPosition) || mLoopCount) { - waitCount = 1; + // If > 0, poll periodically to recover from a stuck server. A good value is 2. + static const uint32_t kPoll = 0; + if (kPoll > 0 && mTransfer == TRANSFER_CALLBACK && kPoll * notificationFrames < minFrames) { + minFrames = kPoll * notificationFrames; } - do { + // Convert frame units to time units + nsecs_t ns = NS_WHENEVER; + if (minFrames != (uint32_t) ~0) { + // This "fudge factor" avoids soaking CPU, and compensates for late progress by server + static const nsecs_t kFudgeNs = 10000000LL; // 10 ms + ns = ((minFrames * 1000000000LL) / sampleRate) + kFudgeNs; + } + + // If not supplying data by EVENT_MORE_DATA, then we're done + if (mTransfer != TRANSFER_CALLBACK) { + return ns; + } - audioBuffer.frameCount = frames; + struct timespec timeout; + const struct timespec *requested = &ClientProxy::kForever; + if (ns != NS_WHENEVER) { + timeout.tv_sec = ns / 1000000000LL; + timeout.tv_nsec = ns % 1000000000LL; + ALOGV("timeout %ld.%03d", timeout.tv_sec, (int) timeout.tv_nsec / 1000000); + requested = &timeout; + } + + while (mRemainingFrames > 0) { - status_t err = obtainBuffer(&audioBuffer, waitCount); - if (err < NO_ERROR) { - if (err != TIMED_OUT) { - ALOGE_IF(err != status_t(NO_MORE_BUFFERS), - "Error obtaining an audio buffer, giving up."); - return false; + Buffer audioBuffer; + audioBuffer.frameCount = mRemainingFrames; + size_t nonContig; + status_t err = obtainBuffer(&audioBuffer, requested, NULL, &nonContig); + LOG_ALWAYS_FATAL_IF((err != NO_ERROR) != (audioBuffer.frameCount == 0), + "obtainBuffer() err=%d frameCount=%u", err, audioBuffer.frameCount); + requested = &ClientProxy::kNonBlocking; + size_t avail = audioBuffer.frameCount + nonContig; + ALOGV("obtainBuffer(%u) returned %u = %u + %u", + mRemainingFrames, avail, audioBuffer.frameCount, nonContig); + if (err != NO_ERROR) { + if (err == TIMED_OUT || err == WOULD_BLOCK || err == -EINTR) { + return 0; } - break; + ALOGE("Error %d obtaining an audio buffer, giving up.", err); + return NS_NEVER; } - if (err == status_t(STOPPED)) { - return false; + + if (mRetryOnPartialBuffer) { + mRetryOnPartialBuffer = false; + if (avail < mRemainingFrames) { + int64_t myns = ((mRemainingFrames - avail) * 1100000000LL) / sampleRate; + if (ns < 0 || myns < ns) { + ns = myns; + } + return ns; + } } // Divide buffer size by 2 to take into account the expansion @@ -1380,66 +1421,76 @@ bool AudioTrack::processAudioBuffer(const sp& thread) size_t reqSize = audioBuffer.size; mCbf(EVENT_MORE_DATA, mUserData, &audioBuffer); - writtenSize = audioBuffer.size; + size_t writtenSize = audioBuffer.size; + size_t writtenFrames = writtenSize / mFrameSize; // Sanity check on returned size - if (ssize_t(writtenSize) <= 0) { + if (ssize_t(writtenSize) < 0 || writtenSize > reqSize) { + ALOGE("EVENT_MORE_DATA requested %u bytes but callback returned %d bytes", + reqSize, (int) writtenSize); + return NS_NEVER; + } + + if (writtenSize == 0) { // The callback is done filling buffers // Keep this thread going to handle timed events and // still try to get more data in intervals of WAIT_PERIOD_MS // but don't just loop and block the CPU, so wait - usleep(WAIT_PERIOD_MS*1000); - break; - } - - if (writtenSize > reqSize) { - writtenSize = reqSize; + return WAIT_PERIOD_MS * 1000000LL; } if (mFormat == AUDIO_FORMAT_PCM_8_BIT && !(mFlags & AUDIO_OUTPUT_FLAG_DIRECT)) { // 8 to 16 bit conversion, note that source and destination are the same address memcpy_to_i16_from_u8(audioBuffer.i16, (const uint8_t *) audioBuffer.i8, writtenSize); - writtenSize <<= 1; + audioBuffer.size <<= 1; } - audioBuffer.size = writtenSize; - // NOTE: cblk->frameSize is not equal to AudioTrack::frameSize() for - // 8 bit PCM data: in this case, cblk->frameSize is based on a sample size of - // 16 bit. - audioBuffer.frameCount = writtenSize / mFrameSizeAF; - - frames -= audioBuffer.frameCount; + size_t releasedFrames = audioBuffer.size / mFrameSizeAF; + audioBuffer.frameCount = releasedFrames; + mRemainingFrames -= releasedFrames; + if (misalignment >= releasedFrames) { + misalignment -= releasedFrames; + } else { + misalignment = 0; + } releaseBuffer(&audioBuffer); - } - while (frames); - if (frames == 0) { - mRemainingFrames = mNotificationFramesAct; - } else { - mRemainingFrames = frames; + // FIXME here is where we would repeat EVENT_MORE_DATA again on same advanced buffer + // if callback doesn't like to accept the full chunk + if (writtenSize < reqSize) { + continue; + } + + // There could be enough non-contiguous frames available to satisfy the remaining request + if (mRemainingFrames <= nonContig) { + continue; + } + +#if 0 + // This heuristic tries to collapse a series of EVENT_MORE_DATA that would total to a + // sum <= notificationFrames. It replaces that series by at most two EVENT_MORE_DATA + // that total to a sum == notificationFrames. + if (0 < misalignment && misalignment <= mRemainingFrames) { + mRemainingFrames = misalignment; + return (mRemainingFrames * 1100000000LL) / sampleRate; + } +#endif + } - return true; + mRemainingFrames = notificationFrames; + mRetryOnPartialBuffer = true; + + // A lot has transpired since ns was calculated, so run again immediately and re-calculate + return 0; } -// must be called with mLock and refCblk.lock held. Callers must also hold strong references on -// the IAudioTrack and IMemory in case they are recreated here. -// If the IAudioTrack is successfully restored, the refCblk pointer is updated -// FIXME Don't depend on caller to hold strong references. -status_t AudioTrack::restoreTrack_l(audio_track_cblk_t*& refCblk, bool fromStart) +status_t AudioTrack::restoreTrack_l(const char *from) { + ALOGW("dead IAudioTrack, creating a new one from %s()", from); + ++mSequence; status_t result; - audio_track_cblk_t* cblk = refCblk; - audio_track_cblk_t* newCblk = cblk; - ALOGW("dead IAudioTrack, creating a new one from %s", - fromStart ? "start()" : "obtainBuffer()"); - - // signal old cblk condition so that other threads waiting for available buffers stop - // waiting now - cblk->cv.broadcast(); - cblk->lock.unlock(); - // refresh the audio configuration cache in this process to make sure we get new // output parameters in getOutput_l() and createTrack_l() AudioSystem::clearAudioConfigCache(); @@ -1447,68 +1498,47 @@ status_t AudioTrack::restoreTrack_l(audio_track_cblk_t*& refCblk, bool fromStart // if the new IAudioTrack is created, createTrack_l() will modify the // following member variables: mAudioTrack, mCblkMemory and mCblk. // It will also delete the strong references on previous IAudioTrack and IMemory + size_t position = mProxy->getPosition(); + mNewPosition = position + mUpdatePeriod; + size_t bufferPosition = mStaticProxy != NULL ? mStaticProxy->getBufferPosition() : 0; result = createTrack_l(mStreamType, mSampleRate, mFormat, mReqFrameCount, // so that frame count never goes down mFlags, mSharedBuffer, - getOutput_l()); + getOutput_l(), + position /*epoch*/); if (result == NO_ERROR) { - uint32_t user = cblk->user; - uint32_t server = cblk->server; + // continue playback from last known position, but + // don't attempt to restore loop after invalidation; it's difficult and not worthwhile + if (mStaticProxy != NULL) { + mLoopPeriod = 0; + mStaticProxy->setLoop(bufferPosition, mFrameCount, 0); + } + // FIXME How do we simulate the fact that all frames present in the buffer at the time of + // track destruction have been played? This is critical for SoundPool implementation + // This must be broken, and needs to be tested/debugged. +#if 0 // restore write index and set other indexes to reflect empty buffer status - newCblk = mCblk; - newCblk->user = user; - newCblk->server = user; - newCblk->userBase = user; - newCblk->serverBase = user; - // restore loop: this is not guaranteed to succeed if new frame count is not - // compatible with loop length - setLoop_l(cblk->loopStart, cblk->loopEnd, cblk->loopCount); - size_t frames = 0; - if (!fromStart) { - newCblk->bufferTimeoutMs = MAX_RUN_TIMEOUT_MS; + if (!strcmp(from, "start")) { // Make sure that a client relying on callback events indicating underrun or // the actual amount of audio frames played (e.g SoundPool) receives them. if (mSharedBuffer == 0) { - if (user > server) { - frames = ((user - server) > mFrameCount) ? - mFrameCount : (user - server); - memset(mBuffers, 0, frames * mFrameSizeAF); - } // restart playback even if buffer is not completely filled. - android_atomic_or(CBLK_FORCEREADY, &newCblk->flags); + android_atomic_or(CBLK_FORCEREADY, &mCblk->flags); } } - if (mSharedBuffer != 0) { - frames = mFrameCount; - } - if (frames > 0) { - // stepUser() clears CBLK_UNDERRUN flag enabling underrun callbacks to - // the client - mProxy->stepUser(frames); - } - if (mActive) { +#endif + if (mState == STATE_ACTIVE) { result = mAudioTrack->start(); - ALOGW_IF(result != NO_ERROR, "restoreTrack_l() start() failed status %d", result); - } - if (fromStart && result == NO_ERROR) { - mNewPosition = newCblk->server + mUpdatePeriod; } } - ALOGW_IF(result != NO_ERROR, "restoreTrack_l() failed status %d", result); - ALOGV("restoreTrack_l() status %d mActive %d cblk %p, old cblk %p flags %08x old flags %08x", - result, mActive, newCblk, cblk, newCblk->flags, cblk->flags); - - if (result == NO_ERROR) { - // from now on we switch to the newly created cblk - refCblk = newCblk; + if (result != NO_ERROR) { + ALOGW("restoreTrack_l() failed status %d", result); + mState = STATE_STOPPED; } - newCblk->lock.lock(); - - ALOGW_IF(result != NO_ERROR, "restoreTrack_l() error %d", result); return result; } @@ -1529,16 +1559,33 @@ status_t AudioTrack::dump(int fd, const Vector& args) const result.append(buffer); snprintf(buffer, 255, " sample rate(%u), status(%d)\n", mSampleRate, mStatus); result.append(buffer); - snprintf(buffer, 255, " active(%d), latency (%d)\n", mActive, mLatency); + snprintf(buffer, 255, " state(%d), latency (%d)\n", mState, mLatency); result.append(buffer); ::write(fd, result.string(), result.size()); return NO_ERROR; } +uint32_t AudioTrack::getUnderrunFrames() const +{ + AutoMutex lock(mLock); + return mProxy->getUnderrunFrames(); +} + +// ========================================================================= + +void AudioTrack::DeathNotifier::binderDied(const wp& who) +{ + sp audioTrack = mAudioTrack.promote(); + if (audioTrack != 0) { + AutoMutex lock(audioTrack->mLock); + audioTrack->mProxy->binderDied(); + } +} + // ========================================================================= AudioTrack::AudioTrackThread::AudioTrackThread(AudioTrack& receiver, bool bCanCallJava) - : Thread(bCanCallJava), mReceiver(receiver), mPaused(true) + : Thread(bCanCallJava), mReceiver(receiver), mPaused(true), mResumeLatch(false) { } @@ -1556,10 +1603,26 @@ bool AudioTrack::AudioTrackThread::threadLoop() return true; } } - if (!mReceiver.processAudioBuffer(this)) { - pause(); + nsecs_t ns = mReceiver.processAudioBuffer(this); + switch (ns) { + case 0: + return true; + case NS_WHENEVER: + sleep(1); + return true; + case NS_INACTIVE: + pauseConditional(); + return true; + case NS_NEVER: + return false; + default: + LOG_ALWAYS_FATAL_IF(ns < 0, "processAudioBuffer() returned %lld", ns); + struct timespec req; + req.tv_sec = ns / 1000000000LL; + req.tv_nsec = ns % 1000000000LL; + nanosleep(&req, NULL /*rem*/); + return true; } - return true; } void AudioTrack::AudioTrackThread::requestExit() @@ -1573,6 +1636,17 @@ void AudioTrack::AudioTrackThread::pause() { AutoMutex _l(mMyLock); mPaused = true; + mResumeLatch = false; +} + +void AudioTrack::AudioTrackThread::pauseConditional() +{ + AutoMutex _l(mMyLock); + if (mResumeLatch) { + mResumeLatch = false; + } else { + mPaused = true; + } } void AudioTrack::AudioTrackThread::resume() @@ -1580,7 +1654,10 @@ void AudioTrack::AudioTrackThread::resume() AutoMutex _l(mMyLock); if (mPaused) { mPaused = false; + mResumeLatch = false; mMyCond.signal(); + } else { + mResumeLatch = true; } } diff --git a/media/libmedia/AudioTrackShared.cpp b/media/libmedia/AudioTrackShared.cpp index 13d47c9..f034164 100644 --- a/media/libmedia/AudioTrackShared.cpp +++ b/media/libmedia/AudioTrackShared.cpp @@ -19,178 +19,664 @@ #include #include +extern "C" { +#include "../private/bionic_futex.h" +} namespace android { audio_track_cblk_t::audio_track_cblk_t() - : lock(Mutex::SHARED), cv(Condition::SHARED), user(0), server(0), - userBase(0), serverBase(0), frameCount_(0), - loopStart(UINT_MAX), loopEnd(UINT_MAX), loopCount(0), mVolumeLR(0x10001000), - mSampleRate(0), mSendLevel(0), flags(0) + : server(0), frameCount_(0), mFutex(0), mMinimum(0), + mVolumeLR(0x10001000), mSampleRate(0), mSendLevel(0), mName(0), flags(0) { + memset(&u, 0, sizeof(u)); } -uint32_t audio_track_cblk_t::stepUser(size_t stepCount, size_t frameCount, bool isOut) +// --------------------------------------------------------------------------- + +Proxy::Proxy(audio_track_cblk_t* cblk, void *buffers, size_t frameCount, size_t frameSize, + bool isOut, bool clientInServer) + : mCblk(cblk), mBuffers(buffers), mFrameCount(frameCount), mFrameSize(frameSize), + mFrameCountP2(roundup(frameCount)), mIsOut(isOut), mClientInServer(clientInServer), + mIsShutdown(false) { - ALOGV("stepuser %08x %08x %d", user, server, stepCount); +} - uint32_t u = user; - u += stepCount; - // Ensure that user is never ahead of server for AudioRecord - if (isOut) { - // If stepServer() has been called once, switch to normal obtainBuffer() timeout period - if (bufferTimeoutMs == MAX_STARTUP_TIMEOUT_MS-1) { - bufferTimeoutMs = MAX_RUN_TIMEOUT_MS; - } - } else if (u > server) { - ALOGW("stepUser occurred after track reset"); - u = server; +// --------------------------------------------------------------------------- + +ClientProxy::ClientProxy(audio_track_cblk_t* cblk, void *buffers, size_t frameCount, + size_t frameSize, bool isOut, bool clientInServer) + : Proxy(cblk, buffers, frameCount, frameSize, isOut, clientInServer), mEpoch(0) +{ +} + +const struct timespec ClientProxy::kForever = {INT_MAX /*tv_sec*/, 0 /*tv_nsec*/}; +const struct timespec ClientProxy::kNonBlocking = {0 /*tv_sec*/, 0 /*tv_nsec*/}; + +#define MEASURE_NS 10000000 // attempt to provide accurate timeouts if requested >= MEASURE_NS + +// To facilitate quicker recovery from server failure, this value limits the timeout per each futex +// wait. However it does not protect infinite timeouts. If defined to be zero, there is no limit. +// FIXME May not be compatible with audio tunneling requirements where timeout should be in the +// order of minutes. +#define MAX_SEC 5 + +status_t ClientProxy::obtainBuffer(Buffer* buffer, const struct timespec *requested, + struct timespec *elapsed) +{ + if (buffer == NULL || buffer->mFrameCount == 0) { + ALOGE("%s BAD_VALUE", __func__); + return BAD_VALUE; } + struct timespec total; // total elapsed time spent waiting + total.tv_sec = 0; + total.tv_nsec = 0; + bool measure = elapsed != NULL; // whether to measure total elapsed time spent waiting - if (u >= frameCount) { - // common case, user didn't just wrap - if (u - frameCount >= userBase ) { - userBase += frameCount; + status_t status; + enum { + TIMEOUT_ZERO, // requested == NULL || *requested == 0 + TIMEOUT_INFINITE, // *requested == infinity + TIMEOUT_FINITE, // 0 < *requested < infinity + TIMEOUT_CONTINUE, // additional chances after TIMEOUT_FINITE + } timeout; + if (requested == NULL) { + timeout = TIMEOUT_ZERO; + } else if (requested->tv_sec == 0 && requested->tv_nsec == 0) { + timeout = TIMEOUT_ZERO; + } else if (requested->tv_sec == INT_MAX) { + timeout = TIMEOUT_INFINITE; + } else { + timeout = TIMEOUT_FINITE; + if (requested->tv_sec > 0 || requested->tv_nsec >= MEASURE_NS) { + measure = true; + } + } + struct timespec before; + bool beforeIsValid = false; + audio_track_cblk_t* cblk = mCblk; + bool ignoreInitialPendingInterrupt = true; + // check for shared memory corruption + if (mIsShutdown) { + status = NO_INIT; + goto end; + } + for (;;) { + int32_t flags = android_atomic_and(~CBLK_INTERRUPT, &cblk->flags); + // check for track invalidation by server, or server death detection + if (flags & CBLK_INVALID) { + ALOGV("Track invalidated"); + status = DEAD_OBJECT; + goto end; + } + // check for obtainBuffer interrupted by client + if (!ignoreInitialPendingInterrupt && (flags & CBLK_INTERRUPT)) { + ALOGV("obtainBuffer() interrupted by client"); + status = -EINTR; + goto end; + } + ignoreInitialPendingInterrupt = false; + // compute number of frames available to write (AudioTrack) or read (AudioRecord) + int32_t front; + int32_t rear; + if (mIsOut) { + // The barrier following the read of mFront is probably redundant. + // We're about to perform a conditional branch based on 'filled', + // which will force the processor to observe the read of mFront + // prior to allowing data writes starting at mRaw. + // However, the processor may support speculative execution, + // and be unable to undo speculative writes into shared memory. + // The barrier will prevent such speculative execution. + front = android_atomic_acquire_load(&cblk->u.mStreaming.mFront); + rear = cblk->u.mStreaming.mRear; + } else { + // On the other hand, this barrier is required. + rear = android_atomic_acquire_load(&cblk->u.mStreaming.mRear); + front = cblk->u.mStreaming.mFront; + } + ssize_t filled = rear - front; + // pipe should not be overfull + if (!(0 <= filled && (size_t) filled <= mFrameCount)) { + ALOGE("Shared memory control block is corrupt (filled=%d); shutting down", filled); + mIsShutdown = true; + status = NO_INIT; + goto end; + } + // don't allow filling pipe beyond the nominal size + size_t avail = mIsOut ? mFrameCount - filled : filled; + if (avail > 0) { + // 'avail' may be non-contiguous, so return only the first contiguous chunk + size_t part1; + if (mIsOut) { + rear &= mFrameCountP2 - 1; + part1 = mFrameCountP2 - rear; + } else { + front &= mFrameCountP2 - 1; + part1 = mFrameCountP2 - front; + } + if (part1 > avail) { + part1 = avail; + } + if (part1 > buffer->mFrameCount) { + part1 = buffer->mFrameCount; + } + buffer->mFrameCount = part1; + buffer->mRaw = part1 > 0 ? + &((char *) mBuffers)[(mIsOut ? rear : front) * mFrameSize] : NULL; + buffer->mNonContig = avail - part1; + // mUnreleased = part1; + status = NO_ERROR; + break; + } + struct timespec remaining; + const struct timespec *ts; + switch (timeout) { + case TIMEOUT_ZERO: + status = WOULD_BLOCK; + goto end; + case TIMEOUT_INFINITE: + ts = NULL; + break; + case TIMEOUT_FINITE: + timeout = TIMEOUT_CONTINUE; + if (MAX_SEC == 0) { + ts = requested; + break; + } + // fall through + case TIMEOUT_CONTINUE: + // FIXME we do not retry if requested < 10ms? needs documentation on this state machine + if (!measure || requested->tv_sec < total.tv_sec || + (requested->tv_sec == total.tv_sec && requested->tv_nsec <= total.tv_nsec)) { + status = TIMED_OUT; + goto end; + } + remaining.tv_sec = requested->tv_sec - total.tv_sec; + if ((remaining.tv_nsec = requested->tv_nsec - total.tv_nsec) < 0) { + remaining.tv_nsec += 1000000000; + remaining.tv_sec++; + } + if (0 < MAX_SEC && MAX_SEC < remaining.tv_sec) { + remaining.tv_sec = MAX_SEC; + remaining.tv_nsec = 0; + } + ts = &remaining; + break; + default: + LOG_FATAL("%s timeout=%d", timeout); + ts = NULL; + break; + } + int32_t old = android_atomic_dec(&cblk->mFutex); + if (old <= 0) { + int rc; + if (measure && !beforeIsValid) { + clock_gettime(CLOCK_MONOTONIC, &before); + beforeIsValid = true; + } + int ret = __futex_syscall4(&cblk->mFutex, + mClientInServer ? FUTEX_WAIT_PRIVATE : FUTEX_WAIT, old - 1, ts); + // update total elapsed time spent waiting + if (measure) { + struct timespec after; + clock_gettime(CLOCK_MONOTONIC, &after); + total.tv_sec += after.tv_sec - before.tv_sec; + long deltaNs = after.tv_nsec - before.tv_nsec; + if (deltaNs < 0) { + deltaNs += 1000000000; + total.tv_sec--; + } + if ((total.tv_nsec += deltaNs) >= 1000000000) { + total.tv_nsec -= 1000000000; + total.tv_sec++; + } + before = after; + beforeIsValid = true; + } + switch (ret) { + case 0: // normal wakeup by server, or by binderDied() + case -EWOULDBLOCK: // benign race condition with server + case -EINTR: // wait was interrupted by signal or other spurious wakeup + case -ETIMEDOUT: // time-out expired + break; + default: + ALOGE("%s unexpected error %d", __func__, ret); + status = -ret; + goto end; + } } - } else if (u >= userBase + frameCount) { - // user just wrapped - userBase += frameCount; } - user = u; - - // Clear flow control error condition as new data has been written/read to/from buffer. - if (flags & CBLK_UNDERRUN) { - android_atomic_and(~CBLK_UNDERRUN, &flags); +end: + if (status != NO_ERROR) { + buffer->mFrameCount = 0; + buffer->mRaw = NULL; + buffer->mNonContig = 0; + } + if (elapsed != NULL) { + *elapsed = total; } + if (requested == NULL) { + requested = &kNonBlocking; + } + if (measure) { + ALOGV("requested %d.%03d elapsed %d.%03d", requested->tv_sec, requested->tv_nsec / 1000000, + total.tv_sec, total.tv_nsec / 1000000); + } + return status; +} - return u; +void ClientProxy::releaseBuffer(Buffer* buffer) +{ + size_t stepCount = buffer->mFrameCount; + // FIXME + // check mUnreleased + // verify that stepCount <= frameCount returned by the last obtainBuffer() + // verify stepCount not > total frame count of pipe + if (stepCount == 0) { + return; + } + audio_track_cblk_t* cblk = mCblk; + // Both of these barriers are required + if (mIsOut) { + int32_t rear = cblk->u.mStreaming.mRear; + android_atomic_release_store(stepCount + rear, &cblk->u.mStreaming.mRear); + } else { + int32_t front = cblk->u.mStreaming.mFront; + android_atomic_release_store(stepCount + front, &cblk->u.mStreaming.mFront); + } } -bool audio_track_cblk_t::stepServer(size_t stepCount, size_t frameCount, bool isOut) +void ClientProxy::binderDied() { - ALOGV("stepserver %08x %08x %d", user, server, stepCount); + audio_track_cblk_t* cblk = mCblk; + if (!(android_atomic_or(CBLK_INVALID, &cblk->flags) & CBLK_INVALID)) { + // it seems that a FUTEX_WAKE_PRIVATE will not wake a FUTEX_WAIT, even within same process + (void) __futex_syscall3(&cblk->mFutex, mClientInServer ? FUTEX_WAKE_PRIVATE : FUTEX_WAKE, + 1); + } +} - if (!tryLock()) { - ALOGW("stepServer() could not lock cblk"); - return false; +void ClientProxy::interrupt() +{ + audio_track_cblk_t* cblk = mCblk; + if (!(android_atomic_or(CBLK_INTERRUPT, &cblk->flags) & CBLK_INTERRUPT)) { + (void) __futex_syscall3(&cblk->mFutex, mClientInServer ? FUTEX_WAKE_PRIVATE : FUTEX_WAKE, + 1); } +} - uint32_t s = server; - bool flushed = (s == user); +size_t ClientProxy::getMisalignment() +{ + audio_track_cblk_t* cblk = mCblk; + return (mFrameCountP2 - (mIsOut ? cblk->u.mStreaming.mRear : cblk->u.mStreaming.mFront)) & + (mFrameCountP2 - 1); +} - s += stepCount; - if (isOut) { - // Mark that we have read the first buffer so that next time stepUser() is called - // we switch to normal obtainBuffer() timeout period - if (bufferTimeoutMs == MAX_STARTUP_TIMEOUT_MS) { - bufferTimeoutMs = MAX_STARTUP_TIMEOUT_MS - 1; - } - // It is possible that we receive a flush() - // while the mixer is processing a block: in this case, - // stepServer() is called After the flush() has reset u & s and - // we have s > u - if (flushed) { - ALOGW("stepServer occurred after track reset"); - s = user; +// --------------------------------------------------------------------------- + +void AudioTrackClientProxy::flush() +{ + mCblk->u.mStreaming.mFlush++; +} + +// --------------------------------------------------------------------------- + +StaticAudioTrackClientProxy::StaticAudioTrackClientProxy(audio_track_cblk_t* cblk, void *buffers, + size_t frameCount, size_t frameSize) + : AudioTrackClientProxy(cblk, buffers, frameCount, frameSize), + mMutator(&cblk->u.mStatic.mSingleStateQueue), mBufferPosition(0) +{ +} + +void StaticAudioTrackClientProxy::flush() +{ + LOG_FATAL("static flush"); +} + +void StaticAudioTrackClientProxy::setLoop(size_t loopStart, size_t loopEnd, int loopCount) +{ + StaticAudioTrackState newState; + newState.mLoopStart = loopStart; + newState.mLoopEnd = loopEnd; + newState.mLoopCount = loopCount; + mBufferPosition = loopStart; + (void) mMutator.push(newState); +} + +size_t StaticAudioTrackClientProxy::getBufferPosition() +{ + size_t bufferPosition; + if (mMutator.ack()) { + bufferPosition = mCblk->u.mStatic.mBufferPosition; + if (bufferPosition > mFrameCount) { + bufferPosition = mFrameCount; } + } else { + bufferPosition = mBufferPosition; } + return bufferPosition; +} + +// --------------------------------------------------------------------------- - if (s >= loopEnd) { - ALOGW_IF(s > loopEnd, "stepServer: s %u > loopEnd %u", s, loopEnd); - s = loopStart; - if (--loopCount == 0) { - loopEnd = UINT_MAX; - loopStart = UINT_MAX; +ServerProxy::ServerProxy(audio_track_cblk_t* cblk, void *buffers, size_t frameCount, + size_t frameSize, bool isOut, bool clientInServer) + : Proxy(cblk, buffers, frameCount, frameSize, isOut, clientInServer), mUnreleased(0), + mAvailToClient(0), mFlush(0), mDeferWake(false) +{ +} + +status_t ServerProxy::obtainBuffer(Buffer* buffer) +{ + if (mIsShutdown) { + buffer->mFrameCount = 0; + buffer->mRaw = NULL; + buffer->mNonContig = 0; + mUnreleased = 0; + return NO_INIT; + } + audio_track_cblk_t* cblk = mCblk; + // compute number of frames available to write (AudioTrack) or read (AudioRecord), + // or use previous cached value from framesReady(), with added barrier if it omits. + int32_t front; + int32_t rear; + // See notes on barriers at ClientProxy::obtainBuffer() + if (mIsOut) { + int32_t flush = cblk->u.mStreaming.mFlush; + rear = android_atomic_acquire_load(&cblk->u.mStreaming.mRear); + if (flush != mFlush) { + front = rear; + mFlush = flush; + } else { + front = cblk->u.mStreaming.mFront; } + } else { + front = android_atomic_acquire_load(&cblk->u.mStreaming.mFront); + rear = cblk->u.mStreaming.mRear; + } + ssize_t filled = rear - front; + // pipe should not already be overfull + if (!(0 <= filled && (size_t) filled <= mFrameCount)) { + ALOGE("Shared memory control block is corrupt (filled=%d); shutting down", filled); + mIsShutdown = true; + } + if (mIsShutdown) { + buffer->mFrameCount = 0; + buffer->mRaw = NULL; + buffer->mNonContig = 0; + mUnreleased = 0; + return NO_INIT; } + // don't allow filling pipe beyond the nominal size + size_t availToServer; + if (mIsOut) { + availToServer = filled; + mAvailToClient = mFrameCount - filled; + } else { + availToServer = mFrameCount - filled; + mAvailToClient = filled; + } + // 'availToServer' may be non-contiguous, so return only the first contiguous chunk + size_t part1; + if (mIsOut) { + front &= mFrameCountP2 - 1; + part1 = mFrameCountP2 - front; + } else { + rear &= mFrameCountP2 - 1; + part1 = mFrameCountP2 - rear; + } + if (part1 > availToServer) { + part1 = availToServer; + } + size_t ask = buffer->mFrameCount; + if (part1 > ask) { + part1 = ask; + } + // is assignment redundant in some cases? + buffer->mFrameCount = part1; + buffer->mRaw = part1 > 0 ? + &((char *) mBuffers)[(mIsOut ? front : rear) * mFrameSize] : NULL; + buffer->mNonContig = availToServer - part1; + mUnreleased = part1; + // optimization to avoid waking up the client too early + // FIXME need to test for recording + mDeferWake = part1 < ask && availToServer >= ask; + return part1 > 0 ? NO_ERROR : WOULD_BLOCK; +} - if (s >= frameCount) { - // common case, server didn't just wrap - if (s - frameCount >= serverBase ) { - serverBase += frameCount; - } - } else if (s >= serverBase + frameCount) { - // server just wrapped - serverBase += frameCount; +void ServerProxy::releaseBuffer(Buffer* buffer) +{ + if (mIsShutdown) { + buffer->mFrameCount = 0; + buffer->mRaw = NULL; + buffer->mNonContig = 0; + return; + } + size_t stepCount = buffer->mFrameCount; + LOG_ALWAYS_FATAL_IF(stepCount > mUnreleased); + if (stepCount == 0) { + buffer->mRaw = NULL; + buffer->mNonContig = 0; + return; + } + mUnreleased -= stepCount; + audio_track_cblk_t* cblk = mCblk; + if (mIsOut) { + int32_t front = cblk->u.mStreaming.mFront; + android_atomic_release_store(stepCount + front, &cblk->u.mStreaming.mFront); + } else { + int32_t rear = cblk->u.mStreaming.mRear; + android_atomic_release_store(stepCount + rear, &cblk->u.mStreaming.mRear); } - server = s; + mCblk->server += stepCount; - if (!(flags & CBLK_INVALID)) { - cv.signal(); + size_t half = mFrameCount / 2; + if (half == 0) { + half = 1; + } + size_t minimum = cblk->mMinimum; + if (minimum == 0) { + minimum = mIsOut ? half : 1; + } else if (minimum > half) { + minimum = half; + } + if (!mDeferWake && mAvailToClient + stepCount >= minimum) { + ALOGV("mAvailToClient=%u stepCount=%u minimum=%u", mAvailToClient, stepCount, minimum); + // could client be sleeping, or not need this increment and counter overflows? + int32_t old = android_atomic_inc(&cblk->mFutex); + if (old == -1) { + (void) __futex_syscall3(&cblk->mFutex, + mClientInServer ? FUTEX_WAKE_PRIVATE : FUTEX_WAKE, 1); + } } - lock.unlock(); - return true; + + buffer->mFrameCount = 0; + buffer->mRaw = NULL; + buffer->mNonContig = 0; } -void* audio_track_cblk_t::buffer(void *buffers, size_t frameSize, uint32_t offset) const +// --------------------------------------------------------------------------- + +size_t AudioTrackServerProxy::framesReady() { - return (int8_t *)buffers + (offset - userBase) * frameSize; + LOG_ALWAYS_FATAL_IF(!mIsOut); + + if (mIsShutdown) { + return 0; + } + audio_track_cblk_t* cblk = mCblk; + // the acquire might not be necessary since not doing a subsequent read + int32_t rear = android_atomic_acquire_load(&cblk->u.mStreaming.mRear); + ssize_t filled = rear - cblk->u.mStreaming.mFront; + // pipe should not already be overfull + if (!(0 <= filled && (size_t) filled <= mFrameCount)) { + ALOGE("Shared memory control block is corrupt (filled=%d); shutting down", filled); + mIsShutdown = true; + return 0; + } + // cache this value for later use by obtainBuffer(), with added barrier + // and racy if called by normal mixer thread + // ignores flush(), so framesReady() may report a larger mFrameCount than obtainBuffer() + return filled; } -uint32_t audio_track_cblk_t::framesAvailable(size_t frameCount, bool isOut) +// --------------------------------------------------------------------------- + +StaticAudioTrackServerProxy::StaticAudioTrackServerProxy(audio_track_cblk_t* cblk, void *buffers, + size_t frameCount, size_t frameSize) + : AudioTrackServerProxy(cblk, buffers, frameCount, frameSize), + mObserver(&cblk->u.mStatic.mSingleStateQueue), mPosition(0), + mEnd(frameCount), mFramesReadyIsCalledByMultipleThreads(false) { - Mutex::Autolock _l(lock); - return framesAvailable_l(frameCount, isOut); + mState.mLoopStart = 0; + mState.mLoopEnd = 0; + mState.mLoopCount = 0; } -uint32_t audio_track_cblk_t::framesAvailable_l(size_t frameCount, bool isOut) +void StaticAudioTrackServerProxy::framesReadyIsCalledByMultipleThreads() { - uint32_t u = user; - uint32_t s = server; + mFramesReadyIsCalledByMultipleThreads = true; +} - if (isOut) { - uint32_t limit = (s < loopStart) ? s : loopStart; - return limit + frameCount - u; - } else { - return frameCount + u - s; +size_t StaticAudioTrackServerProxy::framesReady() +{ + // FIXME + // This is racy if called by normal mixer thread, + // as we're reading 2 independent variables without a lock. + // Can't call mObserver.poll(), as we might be called from wrong thread. + // If looping is enabled, should return a higher number (since includes non-contiguous). + size_t position = mPosition; + if (!mFramesReadyIsCalledByMultipleThreads) { + ssize_t positionOrStatus = pollPosition(); + if (positionOrStatus >= 0) { + position = (size_t) positionOrStatus; + } } + size_t end = mEnd; + return position < end ? end - position : 0; } -uint32_t audio_track_cblk_t::framesReady(bool isOut) +ssize_t StaticAudioTrackServerProxy::pollPosition() { - uint32_t u = user; - uint32_t s = server; - - if (isOut) { - if (u < loopEnd) { - return u - s; - } else { - // do not block on mutex shared with client on AudioFlinger side - if (!tryLock()) { - ALOGW("framesReady() could not lock cblk"); - return 0; + size_t position = mPosition; + StaticAudioTrackState state; + if (mObserver.poll(state)) { + bool valid = false; + size_t loopStart = state.mLoopStart; + size_t loopEnd = state.mLoopEnd; + if (state.mLoopCount == 0) { + if (loopStart > mFrameCount) { + loopStart = mFrameCount; } - uint32_t frames = UINT_MAX; - if (loopCount >= 0) { - frames = (loopEnd - loopStart)*loopCount + u - s; + // ignore loopEnd + mPosition = position = loopStart; + mEnd = mFrameCount; + mState.mLoopCount = 0; + valid = true; + } else { + if (loopStart < loopEnd && loopEnd <= mFrameCount && + loopEnd - loopStart >= MIN_LOOP) { + if (!(loopStart <= position && position < loopEnd)) { + mPosition = position = loopStart; + } + mEnd = loopEnd; + mState = state; + valid = true; } - lock.unlock(); - return frames; } - } else { - return s - u; + if (!valid) { + ALOGE("%s client pushed an invalid state, shutting down", __func__); + mIsShutdown = true; + return (ssize_t) NO_INIT; + } + mCblk->u.mStatic.mBufferPosition = position; } + return (ssize_t) position; } -bool audio_track_cblk_t::tryLock() +status_t StaticAudioTrackServerProxy::obtainBuffer(Buffer* buffer) { - // the code below simulates lock-with-timeout - // we MUST do this to protect the AudioFlinger server - // as this lock is shared with the client. - status_t err; + if (mIsShutdown) { + buffer->mFrameCount = 0; + buffer->mRaw = NULL; + buffer->mNonContig = 0; + mUnreleased = 0; + return NO_INIT; + } + ssize_t positionOrStatus = pollPosition(); + if (positionOrStatus < 0) { + buffer->mFrameCount = 0; + buffer->mRaw = NULL; + buffer->mNonContig = 0; + mUnreleased = 0; + return (status_t) positionOrStatus; + } + size_t position = (size_t) positionOrStatus; + size_t avail; + if (position < mEnd) { + avail = mEnd - position; + size_t wanted = buffer->mFrameCount; + if (avail < wanted) { + buffer->mFrameCount = avail; + } else { + avail = wanted; + } + buffer->mRaw = &((char *) mBuffers)[position * mFrameSize]; + } else { + avail = 0; + buffer->mFrameCount = 0; + buffer->mRaw = NULL; + } + buffer->mNonContig = 0; // FIXME should be > 0 for looping + mUnreleased = avail; + return NO_ERROR; +} - err = lock.tryLock(); - if (err == -EBUSY) { // just wait a bit - usleep(1000); - err = lock.tryLock(); +void StaticAudioTrackServerProxy::releaseBuffer(Buffer* buffer) +{ + size_t stepCount = buffer->mFrameCount; + LOG_ALWAYS_FATAL_IF(stepCount > mUnreleased); + if (stepCount == 0) { + buffer->mRaw = NULL; + buffer->mNonContig = 0; + return; } - if (err != NO_ERROR) { - // probably, the client just died. - return false; + mUnreleased -= stepCount; + audio_track_cblk_t* cblk = mCblk; + size_t position = mPosition; + size_t newPosition = position + stepCount; + int32_t setFlags = 0; + if (!(position <= newPosition && newPosition <= mFrameCount)) { + ALOGW("%s newPosition %u outside [%u, %u]", __func__, newPosition, position, mFrameCount); + newPosition = mFrameCount; + } else if (mState.mLoopCount != 0 && newPosition == mState.mLoopEnd) { + if (mState.mLoopCount == -1 || --mState.mLoopCount != 0) { + newPosition = mState.mLoopStart; + setFlags = CBLK_LOOP_CYCLE; + } else { + mEnd = mFrameCount; // this is what allows playback to continue after the loop + setFlags = CBLK_LOOP_FINAL; + } } - return true; + if (newPosition == mFrameCount) { + setFlags |= CBLK_BUFFER_END; + } + mPosition = newPosition; + + cblk->server += stepCount; + cblk->u.mStatic.mBufferPosition = newPosition; + if (setFlags != 0) { + (void) android_atomic_or(setFlags, &cblk->flags); + // this would be a good place to wake a futex + } + + buffer->mFrameCount = 0; + buffer->mRaw = NULL; + buffer->mNonContig = 0; } +// --------------------------------------------------------------------------- + } // namespace android diff --git a/media/libmedia/ToneGenerator.cpp b/media/libmedia/ToneGenerator.cpp index ebe1ba1..f9ad31d 100644 --- a/media/libmedia/ToneGenerator.cpp +++ b/media/libmedia/ToneGenerator.cpp @@ -1060,7 +1060,9 @@ bool ToneGenerator::initAudioTrack() { this, // user 0, // notificationFrames 0, // sharedBuffer - mThreadCanCallJava); + mThreadCanCallJava, + 0, // sessionId + AudioTrack::TRANSFER_CALLBACK); if (mpAudioTrack->initCheck() != NO_ERROR) { ALOGE("AudioTrack->initCheck failed"); diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h index cf68848..05dbab1 100644 --- a/services/audioflinger/AudioFlinger.h +++ b/services/audioflinger/AudioFlinger.h @@ -56,6 +56,7 @@ #include #include +#include namespace android { diff --git a/services/audioflinger/PlaybackTracks.h b/services/audioflinger/PlaybackTracks.h index a749d7a..b1286d3 100644 --- a/services/audioflinger/PlaybackTracks.h +++ b/services/audioflinger/PlaybackTracks.h @@ -46,6 +46,8 @@ public: void destroy(); int name() const { return mName; } + virtual uint32_t sampleRate() const; + audio_stream_type_t streamType() const { return mStreamType; } @@ -139,6 +141,7 @@ private: // 'volatile' means accessed without lock or // barrier, but is read/written atomically bool mIsInvalid; // non-resettable latch, set by invalidate() + AudioTrackServerProxy* mAudioTrackServerProxy; }; // end of Track class TimedTrack : public Track { @@ -255,10 +258,6 @@ public: private: - enum { - NO_MORE_BUFFERS = 0x80000001, // same in AudioTrack.h, ok to be different value - }; - status_t obtainBuffer(AudioBufferProvider::Buffer* buffer, uint32_t waitTimeMs); void clearBufferQueue(); diff --git a/services/audioflinger/RecordTracks.h b/services/audioflinger/RecordTracks.h index 6c0d1d3..ffe3e9f 100644 --- a/services/audioflinger/RecordTracks.h +++ b/services/audioflinger/RecordTracks.h @@ -57,4 +57,5 @@ private: // releaseBuffer() not overridden bool mOverflow; // overflow on most recent attempt to fill client buffer + AudioRecordServerProxy* mAudioRecordServerProxy; }; diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index ee52fcb..0773534 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -139,7 +139,7 @@ static const int kPriorityFastMixer = 3; // FIXME It would be better for client to tell AudioFlinger whether it wants double-buffering or // N-buffering, so AudioFlinger could allocate the right amount of memory. // See the client's minBufCount and mNotificationFramesAct calculations for details. -static const int kFastTrackMultiplier = 2; +static const int kFastTrackMultiplier = 1; // ---------------------------------------------------------------------------- @@ -1327,7 +1327,7 @@ status_t AudioFlinger::PlaybackThread::addTrack_l(const sp& track) // the track is newly added, make sure it fills up all its // buffers before playing. This is to ensure the client will // effectively get the latency it requested. - track->mFillingUpStatus = Track::FS_FILLING; + track->mFillingUpStatus = track->sharedBuffer() != 0 ? Track::FS_FILLED : Track::FS_FILLING; track->mResetDone = false; track->mPresentationCompleteFrames = 0; mActiveTracks.add(track); @@ -2596,24 +2596,35 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::MixerThread::prepareTrac // app does not call stop() and relies on underrun to stop: // hence the test on (mMixerStatus == MIXER_TRACKS_READY) meaning the track was mixed // during last round + size_t desiredFrames; + if (t->sampleRate() == mSampleRate) { + desiredFrames = mNormalFrameCount; + } else { + // +1 for rounding and +1 for additional sample needed for interpolation + desiredFrames = (mNormalFrameCount * t->sampleRate()) / mSampleRate + 1 + 1; + // add frames already consumed but not yet released by the resampler + // because cblk->framesReady() will include these frames + desiredFrames += mAudioMixer->getUnreleasedFrames(track->name()); + // the minimum track buffer size is normally twice the number of frames necessary + // to fill one buffer and the resampler should not leave more than one buffer worth + // of unreleased frames after each pass, but just in case... + ALOG_ASSERT(desiredFrames <= cblk->frameCount_); + } uint32_t minFrames = 1; if ((track->sharedBuffer() == 0) && !track->isStopped() && !track->isPausing() && (mMixerStatusIgnoringFastTracks == MIXER_TRACKS_READY)) { - if (t->sampleRate() == mSampleRate) { - minFrames = mNormalFrameCount; - } else { - // +1 for rounding and +1 for additional sample needed for interpolation - minFrames = (mNormalFrameCount * t->sampleRate()) / mSampleRate + 1 + 1; - // add frames already consumed but not yet released by the resampler - // because cblk->framesReady() will include these frames - minFrames += mAudioMixer->getUnreleasedFrames(track->name()); - // the minimum track buffer size is normally twice the number of frames necessary - // to fill one buffer and the resampler should not leave more than one buffer worth - // of unreleased frames after each pass, but just in case... - ALOG_ASSERT(minFrames <= cblk->frameCount_); - } + minFrames = desiredFrames; } - if ((track->framesReady() >= minFrames) && track->isReady() && + // It's not safe to call framesReady() for a static buffer track, so assume it's ready + size_t framesReady; + if (track->sharedBuffer() == 0) { + framesReady = track->framesReady(); + } else if (track->isStopped()) { + framesReady = 0; + } else { + framesReady = 1; + } + if ((framesReady >= minFrames) && track->isReady() && !track->isPaused() && !track->isTerminated()) { ALOGVV("track %d u=%08x, s=%08x [OK] on thread %p", name, cblk->user, cblk->server, @@ -2664,7 +2675,7 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::MixerThread::prepareTrac // read original volumes with volume control float typeVolume = mStreamTypes[track->streamType()].volume; float v = masterVolume * typeVolume; - ServerProxy *proxy = track->mServerProxy; + AudioTrackServerProxy *proxy = track->mAudioTrackServerProxy; uint32_t vlr = proxy->getVolumeLR(); vl = vlr & 0xFFFF; vr = vlr >> 16; @@ -2737,7 +2748,7 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::MixerThread::prepareTrac AudioMixer::CHANNEL_MASK, (void *)track->channelMask()); // limit track sample rate to 2 x output sample rate, which changes at re-configuration uint32_t maxSampleRate = mSampleRate * 2; - uint32_t reqSampleRate = track->mServerProxy->getSampleRate(); + uint32_t reqSampleRate = track->mAudioTrackServerProxy->getSampleRate(); if (reqSampleRate == 0) { reqSampleRate = mSampleRate; } else if (reqSampleRate > maxSampleRate) { @@ -2768,6 +2779,13 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::MixerThread::prepareTrac mixerStatus = MIXER_TRACKS_READY; } } else { + // only implemented for normal tracks, not fast tracks + if (framesReady < desiredFrames && !track->isStopped() && !track->isPaused()) { + // we missed desiredFrames whatever the actual number of frames missing was + cblk->u.mStreaming.mUnderrunFrames += desiredFrames; + // FIXME also wake futex so that underrun is noticed more quickly + (void) android_atomic_or(CBLK_UNDERRUN, &cblk->flags); + } // clear effect chain input buffer if an active track underruns to avoid sending // previous audio buffer again to effects chain = getEffectChain_l(track->sessionId()); @@ -3170,7 +3188,7 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::DirectOutputThread::prep } else { float typeVolume = mStreamTypes[track->streamType()].volume; float v = mMasterVolume * typeVolume; - uint32_t vlr = track->mServerProxy->getVolumeLR(); + uint32_t vlr = track->mAudioTrackServerProxy->getVolumeLR(); float v_clamped = v * (vlr & 0xFFFF); if (v_clamped > MAX_GAIN) { v_clamped = MAX_GAIN; @@ -3696,7 +3714,8 @@ bool AudioFlinger::RecordThread::threadLoop() } buffer.frameCount = mFrameCount; - if (CC_LIKELY(mActiveTrack->getNextBuffer(&buffer) == NO_ERROR)) { + status_t status = mActiveTrack->getNextBuffer(&buffer); + if (CC_LIKELY(status == NO_ERROR)) { readOnce = true; size_t framesOut = buffer.frameCount; if (mResampler == NULL) { diff --git a/services/audioflinger/TrackBase.h b/services/audioflinger/TrackBase.h index fac7071..55d96fa 100644 --- a/services/audioflinger/TrackBase.h +++ b/services/audioflinger/TrackBase.h @@ -74,7 +74,7 @@ protected: audio_channel_mask_t channelMask() const { return mChannelMask; } - uint32_t sampleRate() const; // FIXME inline after cblk sr moved + virtual uint32_t sampleRate() const { return mSampleRate; } // Return a pointer to the start of a contiguous slice of the track buffer. // Parameter 'offset' is the requested start position, expressed in diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp index 41a763d..bfc197c 100644 --- a/services/audioflinger/Tracks.cpp +++ b/services/audioflinger/Tracks.cpp @@ -98,7 +98,7 @@ AudioFlinger::ThreadBase::TrackBase::TrackBase( // ALOGD("Creating track with %d buffers @ %d bytes", bufferCount, bufferSize); size_t size = sizeof(audio_track_cblk_t); - size_t bufferSize = frameCount * mFrameSize; + size_t bufferSize = (sharedBuffer == 0 ? roundup(frameCount) : frameCount) * mFrameSize; if (sharedBuffer == 0) { size += bufferSize; } @@ -124,22 +124,16 @@ AudioFlinger::ThreadBase::TrackBase::TrackBase( new(mCblk) audio_track_cblk_t(); // clear all buffers mCblk->frameCount_ = frameCount; -// uncomment the following lines to quickly test 32-bit wraparound -// mCblk->user = 0xffff0000; -// mCblk->server = 0xffff0000; -// mCblk->userBase = 0xffff0000; -// mCblk->serverBase = 0xffff0000; if (sharedBuffer == 0) { mBuffer = (char*)mCblk + sizeof(audio_track_cblk_t); memset(mBuffer, 0, bufferSize); - // Force underrun condition to avoid false underrun callback until first data is - // written to buffer (other flags are cleared) - mCblk->flags = CBLK_UNDERRUN; } else { mBuffer = sharedBuffer->pointer(); +#if 0 + mCblk->flags = CBLK_FORCEREADY; // FIXME hack, need to fix the track ready logic +#endif } mBufferEnd = (uint8_t *)mBuffer + bufferSize; - mServerProxy = new ServerProxy(mCblk, mBuffer, frameCount, mFrameSize, isOut); #ifdef TEE_SINK if (mTeeSinkTrackEnabled) { @@ -199,51 +193,17 @@ void AudioFlinger::ThreadBase::TrackBase::releaseBuffer(AudioBufferProvider::Buf } #endif - buffer->raw = NULL; - mStepCount = buffer->frameCount; - // FIXME See note at getNextBuffer() - (void) step(); // ignore return value of step() + ServerProxy::Buffer buf; + buf.mFrameCount = buffer->frameCount; + buf.mRaw = buffer->raw; buffer->frameCount = 0; -} - -bool AudioFlinger::ThreadBase::TrackBase::step() { - bool result = mServerProxy->step(mStepCount); - if (!result) { - ALOGV("stepServer failed acquiring cblk mutex"); - mStepServerFailed = true; - } - return result; + buffer->raw = NULL; + mServerProxy->releaseBuffer(&buf); } void AudioFlinger::ThreadBase::TrackBase::reset() { - audio_track_cblk_t* cblk = this->cblk(); - - cblk->user = 0; - cblk->server = 0; - cblk->userBase = 0; - cblk->serverBase = 0; - mStepServerFailed = false; ALOGV("TrackBase::reset"); -} - -uint32_t AudioFlinger::ThreadBase::TrackBase::sampleRate() const { - return mServerProxy->getSampleRate(); -} - -void* AudioFlinger::ThreadBase::TrackBase::getBuffer(uint32_t offset, uint32_t frames) const { - audio_track_cblk_t* cblk = this->cblk(); - int8_t *bufferStart = (int8_t *)mBuffer + (offset-cblk->serverBase) * mFrameSize; - int8_t *bufferEnd = bufferStart + frames * mFrameSize; - - // Check validity of returned pointer in case the track control block would have been corrupted. - ALOG_ASSERT(!(bufferStart < mBuffer || bufferStart > bufferEnd || bufferEnd > mBufferEnd), - "TrackBase::getBuffer buffer out of range:\n" - " start: %p, end %p , mBuffer %p mBufferEnd %p\n" - " server %u, serverBase %u, user %u, userBase %u, frameSize %u", - bufferStart, bufferEnd, mBuffer, mBufferEnd, - cblk->server, cblk->serverBase, cblk->user, cblk->userBase, mFrameSize); - - return bufferStart; + // FIXME still needed? } status_t AudioFlinger::ThreadBase::TrackBase::setSyncEvent(const sp& event) @@ -362,9 +322,18 @@ AudioFlinger::PlaybackThread::Track::Track( mFastIndex(-1), mUnderrunCount(0), mCachedVolume(1.0), - mIsInvalid(false) + mIsInvalid(false), + mAudioTrackServerProxy(NULL) { if (mCblk != NULL) { + if (sharedBuffer == 0) { + mAudioTrackServerProxy = new AudioTrackServerProxy(mCblk, mBuffer, frameCount, + mFrameSize); + } else { + mAudioTrackServerProxy = new StaticAudioTrackServerProxy(mCblk, mBuffer, frameCount, + mFrameSize); + } + mServerProxy = mAudioTrackServerProxy; // to avoid leaking a track name, do not allocate one unless there is an mCblk mName = thread->getTrackName_l(channelMask, sessionId); mCblk->mName = mName; @@ -374,6 +343,7 @@ AudioFlinger::PlaybackThread::Track::Track( } // only allocate a fast track index if we were able to allocate a normal track name if (flags & IAudioFlinger::TRACK_FAST) { + mAudioTrackServerProxy->framesReadyIsCalledByMultipleThreads(); ALOG_ASSERT(thread->mFastTrackAvailMask != 0); int i = __builtin_ctz(thread->mFastTrackAvailMask); ALOG_ASSERT(0 < i && i < (int)FastMixerState::kMaxFastTracks); @@ -432,12 +402,12 @@ void AudioFlinger::PlaybackThread::Track::destroy() /*static*/ void AudioFlinger::PlaybackThread::Track::appendDumpHeader(String8& result) { result.append(" Name Client Type Fmt Chn mask Session StpCnt fCount S F SRate " - "L dB R dB Server User Main buf Aux Buf Flags Underruns\n"); + "L dB R dB Server Main buf Aux Buf Flags Underruns\n"); } void AudioFlinger::PlaybackThread::Track::dump(char* buffer, size_t size) { - uint32_t vlr = mServerProxy->getVolumeLR(); + uint32_t vlr = mAudioTrackServerProxy->getVolumeLR(); if (isFastTrack()) { sprintf(buffer, " F %2d", mFastIndex); } else { @@ -496,7 +466,7 @@ void AudioFlinger::PlaybackThread::Track::dump(char* buffer, size_t size) break; } snprintf(&buffer[7], size-7, " %6d %4u %3u 0x%08x %7u %6u %6u %1c %1d %5u %5.2g %5.2g " - "0x%08x 0x%08x 0x%08x 0x%08x %#5x %9u%c\n", + "0x%08x 0x%08x 0x%08x %#5x %9u%c\n", (mClient == 0) ? getpid_cached : mClient->pid(), mStreamType, mFormat, @@ -506,11 +476,10 @@ void AudioFlinger::PlaybackThread::Track::dump(char* buffer, size_t size) mFrameCount, stateChar, mFillingUpStatus, - mServerProxy->getSampleRate(), + mAudioTrackServerProxy->getSampleRate(), 20.0 * log10((vlr & 0xFFFF) / 4096.0), 20.0 * log10((vlr >> 16) / 4096.0), mCblk->server, - mCblk->user, (int)mMainBuffer, (int)mAuxBuffer, mCblk->flags, @@ -518,53 +487,27 @@ void AudioFlinger::PlaybackThread::Track::dump(char* buffer, size_t size) nowInUnderrun); } +uint32_t AudioFlinger::PlaybackThread::Track::sampleRate() const { + return mAudioTrackServerProxy->getSampleRate(); +} + // AudioBufferProvider interface status_t AudioFlinger::PlaybackThread::Track::getNextBuffer( AudioBufferProvider::Buffer* buffer, int64_t pts) { - audio_track_cblk_t* cblk = this->cblk(); - uint32_t framesReady; - uint32_t framesReq = buffer->frameCount; - - // Check if last stepServer failed, try to step now - if (mStepServerFailed) { - // FIXME When called by fast mixer, this takes a mutex with tryLock(). - // Since the fast mixer is higher priority than client callback thread, - // it does not result in priority inversion for client. - // But a non-blocking solution would be preferable to avoid - // fast mixer being unable to tryLock(), and - // to avoid the extra context switches if the client wakes up, - // discovers the mutex is locked, then has to wait for fast mixer to unlock. - if (!step()) goto getNextBuffer_exit; - ALOGV("stepServer recovered"); - mStepServerFailed = false; + ServerProxy::Buffer buf; + size_t desiredFrames = buffer->frameCount; + buf.mFrameCount = desiredFrames; + status_t status = mServerProxy->obtainBuffer(&buf); + buffer->frameCount = buf.mFrameCount; + buffer->raw = buf.mRaw; + if (buf.mFrameCount == 0) { + // only implemented so far for normal tracks, not fast tracks + mCblk->u.mStreaming.mUnderrunFrames += desiredFrames; + // FIXME also wake futex so that underrun is noticed more quickly + (void) android_atomic_or(CBLK_UNDERRUN, &mCblk->flags); } - - // FIXME Same as above - framesReady = mServerProxy->framesReady(); - - if (CC_LIKELY(framesReady)) { - uint32_t s = cblk->server; - uint32_t bufferEnd = cblk->serverBase + mFrameCount; - - bufferEnd = (cblk->loopEnd < bufferEnd) ? cblk->loopEnd : bufferEnd; - if (framesReq > framesReady) { - framesReq = framesReady; - } - if (framesReq > bufferEnd - s) { - framesReq = bufferEnd - s; - } - - buffer->raw = getBuffer(s, framesReq); - buffer->frameCount = framesReq; - return NO_ERROR; - } - -getNextBuffer_exit: - buffer->raw = NULL; - buffer->frameCount = 0; - ALOGV("getNextBuffer() no more data for track %d on thread %p", mName, mThread.unsafe_get()); - return NOT_ENOUGH_DATA; + return status; } // Note that framesReady() takes a mutex on the control block using tryLock(). @@ -576,7 +519,7 @@ getNextBuffer_exit: // the tryLock() could block for up to 1 ms, and a sequence of these could delay fast mixer. // FIXME Replace AudioTrackShared control block implementation by a non-blocking FIFO queue. size_t AudioFlinger::PlaybackThread::Track::framesReady() const { - return mServerProxy->framesReady(); + return mAudioTrackServerProxy->framesReady(); } // Don't call for fast tracks; the framesReady() could result in priority inversion @@ -732,7 +675,6 @@ void AudioFlinger::PlaybackThread::Track::reset() // Force underrun condition to avoid false underrun callback until first data is // written to buffer android_atomic_and(~CBLK_FORCEREADY, &mCblk->flags); - android_atomic_or(CBLK_UNDERRUN, &mCblk->flags); mFillingUpStatus = FS_FILLING; mResetDone = true; if (mState == FLUSHED) { @@ -833,7 +775,7 @@ uint32_t AudioFlinger::PlaybackThread::Track::getVolumeLR() { // called by FastMixer, so not allowed to take any locks, block, or do I/O including logs ALOG_ASSERT(isFastTrack() && (mCblk != NULL)); - uint32_t vlr = mServerProxy->getVolumeLR(); + uint32_t vlr = mAudioTrackServerProxy->getVolumeLR(); uint32_t vl = vlr & 0xFFFF; uint32_t vr = vlr >> 16; // track volumes come from shared memory, so can't be trusted and must be clamped @@ -870,9 +812,12 @@ status_t AudioFlinger::PlaybackThread::Track::setSyncEvent(const sp& void AudioFlinger::PlaybackThread::Track::invalidate() { - // FIXME should use proxy - android_atomic_or(CBLK_INVALID, &mCblk->flags); - mCblk->cv.signal(); + // FIXME should use proxy, and needs work + audio_track_cblk_t* cblk = mCblk; + android_atomic_or(CBLK_INVALID, &cblk->flags); + android_atomic_release_store(0x40000000, &cblk->mFutex); + // client is not in server, so FUTEX_WAKE is needed instead of FUTEX_WAKE_PRIVATE + (void) __futex_syscall3(&cblk->mFutex, FUTEX_WAKE, INT_MAX); mIsInvalid = true; } @@ -1418,6 +1363,8 @@ AudioFlinger::PlaybackThread::OutputTrack::OutputTrack( mClientProxy->setVolumeLR((uint32_t(uint16_t(0x1000)) << 16) | uint16_t(0x1000)); mClientProxy->setSendLevel(0.0); mClientProxy->setSampleRate(sampleRate); + mClientProxy = new AudioTrackClientProxy(mCblk, mBuffer, mFrameCount, mFrameSize, + true /*clientInServer*/); } else { ALOGW("Error creating output track on thread %p", playbackThread); } @@ -1498,9 +1445,10 @@ bool AudioFlinger::PlaybackThread::OutputTrack::write(int16_t* data, uint32_t fr if (mOutBuffer.frameCount == 0) { mOutBuffer.frameCount = pInBuffer->frameCount; nsecs_t startTime = systemTime(); - if (obtainBuffer(&mOutBuffer, waitTimeLeftMs) == (status_t)NO_MORE_BUFFERS) { - ALOGV("OutputTrack::write() %p thread %p no more output buffers", this, - mThread.unsafe_get()); + status_t status = obtainBuffer(&mOutBuffer, waitTimeLeftMs); + if (status != NO_ERROR) { + ALOGV("OutputTrack::write() %p thread %p no more output buffers; status %d", this, + mThread.unsafe_get(), status); outputBufferFull = true; break; } @@ -1515,7 +1463,10 @@ bool AudioFlinger::PlaybackThread::OutputTrack::write(int16_t* data, uint32_t fr uint32_t outFrames = pInBuffer->frameCount > mOutBuffer.frameCount ? mOutBuffer.frameCount : pInBuffer->frameCount; memcpy(mOutBuffer.raw, pInBuffer->raw, outFrames * channelCount * sizeof(int16_t)); - mClientProxy->stepUser(outFrames); + Proxy::Buffer buf; + buf.mFrameCount = outFrames; + buf.mRaw = NULL; + mClientProxy->releaseBuffer(&buf); pInBuffer->frameCount -= outFrames; pInBuffer->i16 += outFrames * channelCount; mOutBuffer.frameCount -= outFrames; @@ -1559,8 +1510,10 @@ bool AudioFlinger::PlaybackThread::OutputTrack::write(int16_t* data, uint32_t fr // If no more buffers are pending, fill output track buffer to make sure it is started // by output mixer. if (frames == 0 && mBufferQueue.size() == 0) { - if (mCblk->user < mFrameCount) { - frames = mFrameCount - mCblk->user; + // FIXME borken, replace by getting framesReady() from proxy + size_t user = 0; // was mCblk->user + if (user < mFrameCount) { + frames = mFrameCount - user; pInBuffer = new Buffer; pInBuffer->mBuffer = new int16_t[frames * channelCount]; pInBuffer->frameCount = frames; @@ -1578,46 +1531,17 @@ bool AudioFlinger::PlaybackThread::OutputTrack::write(int16_t* data, uint32_t fr status_t AudioFlinger::PlaybackThread::OutputTrack::obtainBuffer( AudioBufferProvider::Buffer* buffer, uint32_t waitTimeMs) { - audio_track_cblk_t* cblk = mCblk; - uint32_t framesReq = buffer->frameCount; - - ALOGVV("OutputTrack::obtainBuffer user %d, server %d", cblk->user, cblk->server); - buffer->frameCount = 0; - - size_t framesAvail; - { - Mutex::Autolock _l(cblk->lock); - - // read the server count again - while (!(framesAvail = mClientProxy->framesAvailable_l())) { - if (CC_UNLIKELY(!mActive)) { - ALOGV("Not active and NO_MORE_BUFFERS"); - return NO_MORE_BUFFERS; - } - status_t result = cblk->cv.waitRelative(cblk->lock, milliseconds(waitTimeMs)); - if (result != NO_ERROR) { - return NO_MORE_BUFFERS; - } - } - } - - if (framesReq > framesAvail) { - framesReq = framesAvail; - } - - uint32_t u = cblk->user; - uint32_t bufferEnd = cblk->userBase + mFrameCount; - - if (framesReq > bufferEnd - u) { - framesReq = bufferEnd - u; - } - - buffer->frameCount = framesReq; - buffer->raw = mClientProxy->buffer(u); - return NO_ERROR; + ClientProxy::Buffer buf; + buf.mFrameCount = buffer->frameCount; + struct timespec timeout; + timeout.tv_sec = waitTimeMs / 1000; + timeout.tv_nsec = (int) (waitTimeMs % 1000) * 1000000; + status_t status = mClientProxy->obtainBuffer(&buf, &timeout); + buffer->frameCount = buf.mFrameCount; + buffer->raw = buf.mRaw; + return status; } - void AudioFlinger::PlaybackThread::OutputTrack::clearBufferQueue() { size_t size = mBufferQueue.size(); @@ -1688,6 +1612,11 @@ AudioFlinger::RecordThread::RecordTrack::RecordTrack( mOverflow(false) { ALOGV("RecordTrack constructor, size %d", (int)mBufferEnd - (int)mBuffer); + if (mCblk != NULL) { + mAudioRecordServerProxy = new AudioRecordServerProxy(mCblk, mBuffer, frameCount, + mFrameSize); + mServerProxy = mAudioRecordServerProxy; + } } AudioFlinger::RecordThread::RecordTrack::~RecordTrack() @@ -1699,42 +1628,16 @@ AudioFlinger::RecordThread::RecordTrack::~RecordTrack() status_t AudioFlinger::RecordThread::RecordTrack::getNextBuffer(AudioBufferProvider::Buffer* buffer, int64_t pts) { - audio_track_cblk_t* cblk = this->cblk(); - uint32_t framesAvail; - uint32_t framesReq = buffer->frameCount; - - // Check if last stepServer failed, try to step now - if (mStepServerFailed) { - if (!step()) { - goto getNextBuffer_exit; - } - ALOGV("stepServer recovered"); - mStepServerFailed = false; + ServerProxy::Buffer buf; + buf.mFrameCount = buffer->frameCount; + status_t status = mServerProxy->obtainBuffer(&buf); + buffer->frameCount = buf.mFrameCount; + buffer->raw = buf.mRaw; + if (buf.mFrameCount == 0) { + // FIXME also wake futex so that overrun is noticed more quickly + (void) android_atomic_or(CBLK_OVERRUN, &mCblk->flags); } - - // FIXME lock is not actually held, so overrun is possible - framesAvail = mServerProxy->framesAvailableIn_l(); - - if (CC_LIKELY(framesAvail)) { - uint32_t s = cblk->server; - uint32_t bufferEnd = cblk->serverBase + mFrameCount; - - if (framesReq > framesAvail) { - framesReq = framesAvail; - } - if (framesReq > bufferEnd - s) { - framesReq = bufferEnd - s; - } - - buffer->raw = getBuffer(s, framesReq); - buffer->frameCount = framesReq; - return NO_ERROR; - } - -getNextBuffer_exit: - buffer->raw = NULL; - buffer->frameCount = 0; - return NOT_ENOUGH_DATA; + return status; } status_t AudioFlinger::RecordThread::RecordTrack::start(AudioSystem::sync_event_t event, @@ -1790,12 +1693,12 @@ void AudioFlinger::RecordThread::RecordTrack::destroy() /*static*/ void AudioFlinger::RecordThread::RecordTrack::appendDumpHeader(String8& result) { - result.append(" Clien Fmt Chn mask Session Step S Serv User FrameCount\n"); + result.append(" Clien Fmt Chn mask Session Step S Serv FrameCount\n"); } void AudioFlinger::RecordThread::RecordTrack::dump(char* buffer, size_t size) { - snprintf(buffer, size, " %05d %03u 0x%08x %05d %04u %01d %08x %08x %05d\n", + snprintf(buffer, size, " %05d %03u 0x%08x %05d %04u %01d %08x %05d\n", (mClient == 0) ? getpid_cached : mClient->pid(), mFormat, mChannelMask, @@ -1803,7 +1706,6 @@ void AudioFlinger::RecordThread::RecordTrack::dump(char* buffer, size_t size) mStepCount, mState, mCblk->server, - mCblk->user, mFrameCount); } -- cgit v1.1 From 03e6579fc8d853ab6dd6b8ece10ebf4d434b1e17 Mon Sep 17 00:00:00 2001 From: Alex Ray Date: Thu, 13 Jun 2013 11:22:47 -0700 Subject: camera2: accept 4-value active array size Change-Id: Ia320a022b201a938a025efc30a54c05fcfe5b02f --- services/camera/libcameraservice/camera2/Parameters.cpp | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/services/camera/libcameraservice/camera2/Parameters.cpp b/services/camera/libcameraservice/camera2/Parameters.cpp index a567c15..e8f3f50 100644 --- a/services/camera/libcameraservice/camera2/Parameters.cpp +++ b/services/camera/libcameraservice/camera2/Parameters.cpp @@ -837,10 +837,17 @@ String8 Parameters::get() const { status_t Parameters::buildFastInfo() { camera_metadata_ro_entry_t activeArraySize = - staticInfo(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE, 2, 2); + staticInfo(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE, 2, 4); if (!activeArraySize.count) return NO_INIT; - int32_t arrayWidth = activeArraySize.data.i32[0]; - int32_t arrayHeight = activeArraySize.data.i32[1]; + int32_t arrayWidth; + int32_t arrayHeight; + if (activeArraySize.count == 2) { + arrayWidth = activeArraySize.data.i32[0]; + arrayHeight = activeArraySize.data.i32[1]; + } else if (activeArraySize.count == 4) { + arrayWidth = activeArraySize.data.i32[2]; + arrayHeight = activeArraySize.data.i32[3]; + } else return NO_INIT; camera_metadata_ro_entry_t availableFaceDetectModes = staticInfo(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES, 0, 0, -- cgit v1.1 From e2ffd5b583da9d30d96710b0e8879e90b2b51d30 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Thu, 13 Jun 2013 13:47:02 -0700 Subject: AudioRecord must be used as sp<> only Bug: 9423855 Change-Id: I78ba8228c60dff11fb466156bb632c5dda45cdaf --- include/media/AudioRecord.h | 4 +++- include/media/stagefright/AudioSource.h | 2 +- media/libstagefright/AudioSource.cpp | 9 ++------- 3 files changed, 6 insertions(+), 9 deletions(-) diff --git a/include/media/AudioRecord.h b/include/media/AudioRecord.h index 81be803..7aa3c24 100644 --- a/include/media/AudioRecord.h +++ b/include/media/AudioRecord.h @@ -161,7 +161,9 @@ public: /* Terminates the AudioRecord and unregisters it from AudioFlinger. * Also destroys all resources associated with the AudioRecord. */ - ~AudioRecord(); +protected: + virtual ~AudioRecord(); +public: /* Initialize an AudioRecord that was created using the AudioRecord() constructor. * Don't call set() more than once, or after an AudioRecord() constructor that takes parameters. diff --git a/include/media/stagefright/AudioSource.h b/include/media/stagefright/AudioSource.h index 99f3c3b..4c9aaad 100644 --- a/include/media/stagefright/AudioSource.h +++ b/include/media/stagefright/AudioSource.h @@ -73,7 +73,7 @@ private: Condition mFrameAvailableCondition; Condition mFrameEncodingCompletionCondition; - AudioRecord *mRecord; + sp mRecord; status_t mInitCheck; bool mStarted; int32_t mSampleRate; diff --git a/media/libstagefright/AudioSource.cpp b/media/libstagefright/AudioSource.cpp index 3cf4d5c..bdd842f 100644 --- a/media/libstagefright/AudioSource.cpp +++ b/media/libstagefright/AudioSource.cpp @@ -49,8 +49,7 @@ static void AudioRecordCallbackFunction(int event, void *user, void *info) { AudioSource::AudioSource( audio_source_t inputSource, uint32_t sampleRate, uint32_t channelCount) - : mRecord(NULL), - mStarted(false), + : mStarted(false), mSampleRate(sampleRate), mPrevSampleTimeUs(0), mNumFramesReceived(0), @@ -91,9 +90,6 @@ AudioSource::~AudioSource() { if (mStarted) { reset(); } - - delete mRecord; - mRecord = NULL; } status_t AudioSource::initCheck() const { @@ -122,8 +118,7 @@ status_t AudioSource::start(MetaData *params) { if (err == OK) { mStarted = true; } else { - delete mRecord; - mRecord = NULL; + mRecord.clear(); } -- cgit v1.1 From 2309d1a1ff016a31d9aa68272bcb471e64a26cfa Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Fri, 14 Jun 2013 11:58:27 -0700 Subject: The software avc decoder would silently drop output frames if not enough room was available in the output queue at the time they were available. No more. Change-Id: I5957290d40ba31bda7944271ec7f2aa0f1f7043c --- .../libstagefright/codecs/on2/h264dec/SoftAVC.cpp | 65 ++++++++++------------ media/libstagefright/codecs/on2/h264dec/SoftAVC.h | 2 +- 2 files changed, 31 insertions(+), 36 deletions(-) diff --git a/media/libstagefright/codecs/on2/h264dec/SoftAVC.cpp b/media/libstagefright/codecs/on2/h264dec/SoftAVC.cpp index 3bd9f47..7ddb13c 100644 --- a/media/libstagefright/codecs/on2/h264dec/SoftAVC.cpp +++ b/media/libstagefright/codecs/on2/h264dec/SoftAVC.cpp @@ -109,13 +109,21 @@ void SoftAVC::onQueueFilled(OMX_U32 portIndex) { List &inQueue = getPortQueue(kInputPortIndex); List &outQueue = getPortQueue(kOutputPortIndex); + + if (mHeadersDecoded) { + // Dequeue any already decoded output frames to free up space + // in the output queue. + + drainAllOutputBuffers(false /* eos */); + } + H264SwDecRet ret = H264SWDEC_PIC_RDY; bool portSettingsChanged = false; while ((mEOSStatus != INPUT_DATA_AVAILABLE || !inQueue.empty()) && outQueue.size() == kNumOutputBuffers) { if (mEOSStatus == INPUT_EOS_SEEN) { - drainAllOutputBuffers(); + drainAllOutputBuffers(true /* eos */); return; } @@ -203,15 +211,7 @@ void SoftAVC::onQueueFilled(OMX_U32 portIndex) { mFirstPictureId = -1; } - while (!outQueue.empty() && - mHeadersDecoded && - H264SwDecNextPicture(mHandle, &decodedPicture, 0) - == H264SWDEC_PIC_RDY) { - - int32_t picId = decodedPicture.picId; - uint8_t *data = (uint8_t *) decodedPicture.pOutputPicture; - drainOneOutputBuffer(picId, data); - } + drainAllOutputBuffers(false /* eos */); } } @@ -272,43 +272,38 @@ void SoftAVC::drainOneOutputBuffer(int32_t picId, uint8_t* data) { notifyFillBufferDone(outHeader); } -bool SoftAVC::drainAllOutputBuffers() { +void SoftAVC::drainAllOutputBuffers(bool eos) { List &outQueue = getPortQueue(kOutputPortIndex); H264SwDecPicture decodedPicture; + if (mHeadersDecoded) { + while (!outQueue.empty() + && H264SWDEC_PIC_RDY == H264SwDecNextPicture( + mHandle, &decodedPicture, eos /* flush */)) { + int32_t picId = decodedPicture.picId; + uint8_t *data = (uint8_t *) decodedPicture.pOutputPicture; + drainOneOutputBuffer(picId, data); + } + } + + if (!eos) { + return; + } + while (!outQueue.empty()) { BufferInfo *outInfo = *outQueue.begin(); outQueue.erase(outQueue.begin()); OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader; - if (mHeadersDecoded && - H264SWDEC_PIC_RDY == - H264SwDecNextPicture(mHandle, &decodedPicture, 1 /* flush */)) { - int32_t picId = decodedPicture.picId; - CHECK(mPicToHeaderMap.indexOfKey(picId) >= 0); - - memcpy(outHeader->pBuffer + outHeader->nOffset, - decodedPicture.pOutputPicture, - mPictureSize); - - OMX_BUFFERHEADERTYPE *header = mPicToHeaderMap.valueFor(picId); - outHeader->nTimeStamp = header->nTimeStamp; - outHeader->nFlags = header->nFlags; - outHeader->nFilledLen = mPictureSize; - mPicToHeaderMap.removeItem(picId); - delete header; - } else { - outHeader->nTimeStamp = 0; - outHeader->nFilledLen = 0; - outHeader->nFlags = OMX_BUFFERFLAG_EOS; - mEOSStatus = OUTPUT_FRAMES_FLUSHED; - } + outHeader->nTimeStamp = 0; + outHeader->nFilledLen = 0; + outHeader->nFlags = OMX_BUFFERFLAG_EOS; outInfo->mOwnedByUs = false; notifyFillBufferDone(outHeader); - } - return true; + mEOSStatus = OUTPUT_FRAMES_FLUSHED; + } } void SoftAVC::onPortFlushCompleted(OMX_U32 portIndex) { diff --git a/media/libstagefright/codecs/on2/h264dec/SoftAVC.h b/media/libstagefright/codecs/on2/h264dec/SoftAVC.h index 0ed7ebe..ee69926 100644 --- a/media/libstagefright/codecs/on2/h264dec/SoftAVC.h +++ b/media/libstagefright/codecs/on2/h264dec/SoftAVC.h @@ -72,7 +72,7 @@ private: bool mSignalledError; status_t initDecoder(); - bool drainAllOutputBuffers(); + void drainAllOutputBuffers(bool eos); void drainOneOutputBuffer(int32_t picId, uint8_t *data); void saveFirstOutputBuffer(int32_t pidId, uint8_t *data); bool handleCropRectEvent(const CropParams* crop); -- cgit v1.1 From 72a43b68da48890273508cb1c9d646b7d75fc101 Mon Sep 17 00:00:00 2001 From: Marco Nelissen Date: Mon, 17 Jun 2013 16:14:39 -0700 Subject: Speed up id3v2 unsynchronization Instead of doing many overlapping memmoves, do a single copy pass that skips over the inserted unsynchronization bytes. For some files this reduces parsing time from minutes to milliseconds. b/9463262 Change-Id: I735b7051e77a093d86fb7a3e46209875946225ed --- media/libstagefright/id3/ID3.cpp | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/media/libstagefright/id3/ID3.cpp b/media/libstagefright/id3/ID3.cpp index 8d3013b..34d671a 100644 --- a/media/libstagefright/id3/ID3.cpp +++ b/media/libstagefright/id3/ID3.cpp @@ -357,17 +357,22 @@ bool ID3::removeUnsynchronizationV2_4(bool iTunesHack) { } if (flags & 2) { - // Unsynchronization added. + // This file has "unsynchronization", so we have to replace occurrences + // of 0xff 0x00 with just 0xff in order to get the real data. + size_t readOffset = offset + 11; + size_t writeOffset = offset + 11; for (size_t i = 0; i + 1 < dataSize; ++i) { - if (mData[offset + 10 + i] == 0xff - && mData[offset + 11 + i] == 0x00) { - memmove(&mData[offset + 11 + i], &mData[offset + 12 + i], - mSize - offset - 12 - i); + if (mData[readOffset - 1] == 0xff + && mData[readOffset] == 0x00) { + ++readOffset; --mSize; --dataSize; } + mData[writeOffset++] = mData[readOffset++]; } + // move the remaining data following this frame + memmove(&mData[writeOffset], &mData[readOffset], oldSize - readOffset); flags &= ~2; } -- cgit v1.1 From 921832327619f7852b16f73a19504702c5a28a31 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Tue, 18 Jun 2013 09:39:15 -0700 Subject: mv libcpustats from frameworks/native to frameworks/av OK to lose history Change-Id: Ieca78edc5dfe479dd7ea48fe7e0f3c164356cee3 --- include/cpustats/CentralTendencyStatistics.h | 75 +++++++ include/cpustats/README.txt | 6 + include/cpustats/ThreadCpuUsage.h | 140 +++++++++++++ media/libcpustats/Android.mk | 11 + media/libcpustats/CentralTendencyStatistics.cpp | 81 ++++++++ media/libcpustats/ThreadCpuUsage.cpp | 255 ++++++++++++++++++++++++ 6 files changed, 568 insertions(+) create mode 100644 include/cpustats/CentralTendencyStatistics.h create mode 100644 include/cpustats/README.txt create mode 100644 include/cpustats/ThreadCpuUsage.h create mode 100644 media/libcpustats/Android.mk create mode 100644 media/libcpustats/CentralTendencyStatistics.cpp create mode 100644 media/libcpustats/ThreadCpuUsage.cpp diff --git a/include/cpustats/CentralTendencyStatistics.h b/include/cpustats/CentralTendencyStatistics.h new file mode 100644 index 0000000..21b6981 --- /dev/null +++ b/include/cpustats/CentralTendencyStatistics.h @@ -0,0 +1,75 @@ +/* + * Copyright (C) 2011 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef _CENTRAL_TENDENCY_STATISTICS_H +#define _CENTRAL_TENDENCY_STATISTICS_H + +#include + +// Not multithread safe +class CentralTendencyStatistics { + +public: + + CentralTendencyStatistics() : + mMean(NAN), mMedian(NAN), mMinimum(INFINITY), mMaximum(-INFINITY), mN(0), mM2(0), + mVariance(NAN), mVarianceKnownForN(0), mStddev(NAN), mStddevKnownForN(0) { } + + ~CentralTendencyStatistics() { } + + // add x to the set of samples + void sample(double x); + + // return the arithmetic mean of all samples so far + double mean() const { return mMean; } + + // return the minimum of all samples so far + double minimum() const { return mMinimum; } + + // return the maximum of all samples so far + double maximum() const { return mMaximum; } + + // return the variance of all samples so far + double variance() const; + + // return the standard deviation of all samples so far + double stddev() const; + + // return the number of samples added so far + unsigned n() const { return mN; } + + // reset the set of samples to be empty + void reset(); + +private: + double mMean; + double mMedian; + double mMinimum; + double mMaximum; + unsigned mN; // number of samples so far + double mM2; + + // cached variance, and n at time of caching + mutable double mVariance; + mutable unsigned mVarianceKnownForN; + + // cached standard deviation, and n at time of caching + mutable double mStddev; + mutable unsigned mStddevKnownForN; + +}; + +#endif // _CENTRAL_TENDENCY_STATISTICS_H diff --git a/include/cpustats/README.txt b/include/cpustats/README.txt new file mode 100644 index 0000000..14439f0 --- /dev/null +++ b/include/cpustats/README.txt @@ -0,0 +1,6 @@ +This is a static library of CPU usage statistics, originally written +for audio but most are not actually specific to audio. + +Requirements to be here: + * should be related to CPU usage statistics + * should be portable to host; avoid Android OS dependencies without a conditional diff --git a/include/cpustats/ThreadCpuUsage.h b/include/cpustats/ThreadCpuUsage.h new file mode 100644 index 0000000..9756844 --- /dev/null +++ b/include/cpustats/ThreadCpuUsage.h @@ -0,0 +1,140 @@ +/* + * Copyright (C) 2011 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef _THREAD_CPU_USAGE_H +#define _THREAD_CPU_USAGE_H + +#include +#include + +namespace android { + +// Track CPU usage for the current thread. +// Units are in per-thread CPU ns, as reported by +// clock_gettime(CLOCK_THREAD_CPUTIME_ID). Simple usage: for cyclic +// threads where you want to measure the execution time of the whole +// cycle, just call sampleAndEnable() at the start of each cycle. +// For acyclic threads, or for cyclic threads where you want to measure/track +// only part of each cycle, call enable(), disable(), and/or setEnabled() +// to demarcate the region(s) of interest, and then call sample() periodically. +// This class is not thread-safe for concurrent calls from multiple threads; +// the methods of this class may only be called by the current thread +// which constructed the object. + +class ThreadCpuUsage +{ + +public: + ThreadCpuUsage() : + mIsEnabled(false), + mWasEverEnabled(false), + mAccumulator(0), + // mPreviousTs + // mMonotonicTs + mMonotonicKnown(false) + { + (void) pthread_once(&sOnceControl, &init); + for (int i = 0; i < sKernelMax; ++i) { + mCurrentkHz[i] = (uint32_t) ~0; // unknown + } + } + + ~ThreadCpuUsage() { } + + // Return whether currently tracking CPU usage by current thread + bool isEnabled() const { return mIsEnabled; } + + // Enable tracking of CPU usage by current thread; + // any CPU used from this point forward will be tracked. + // Returns the previous enabled status. + bool enable() { return setEnabled(true); } + + // Disable tracking of CPU usage by current thread; + // any CPU used from this point forward will be ignored. + // Returns the previous enabled status. + bool disable() { return setEnabled(false); } + + // Set the enabled status and return the previous enabled status. + // This method is intended to be used for safe nested enable/disabling. + bool setEnabled(bool isEnabled); + + // Add a sample point, and also enable tracking if needed. + // If tracking has never been enabled, then this call enables tracking but + // does _not_ add a sample -- it is not possible to add a sample the + // first time because there is no previous point to subtract from. + // Otherwise, if tracking is enabled, + // then adds a sample for tracked CPU ns since the previous + // sample, or since the first call to sampleAndEnable(), enable(), or + // setEnabled(true). If there was a previous sample but tracking is + // now disabled, then adds a sample for the tracked CPU ns accumulated + // up until the most recent disable(), resets this accumulator, and then + // enables tracking. Calling this method rather than enable() followed + // by sample() avoids a race condition for the first sample. + // Returns true if the sample 'ns' is valid, or false if invalid. + // Note that 'ns' is an output parameter passed by reference. + // The caller does not need to initialize this variable. + // The units are CPU nanoseconds consumed by current thread. + bool sampleAndEnable(double& ns); + + // Add a sample point, but do not + // change the tracking enabled status. If tracking has either never been + // enabled, or has never been enabled since the last sample, then log a warning + // and don't add sample. Otherwise, adds a sample for tracked CPU ns since + // the previous sample or since the first call to sampleAndEnable(), + // enable(), or setEnabled(true) if no previous sample. + // Returns true if the sample is valid, or false if invalid. + // Note that 'ns' is an output parameter passed by reference. + // The caller does not need to initialize this variable. + // The units are CPU nanoseconds consumed by current thread. + bool sample(double& ns); + + // Return the elapsed delta wall clock ns since initial enable or reset, + // as reported by clock_gettime(CLOCK_MONOTONIC). + long long elapsed() const; + + // Reset elapsed wall clock. Has no effect on tracking or accumulator. + void resetElapsed(); + + // Return current clock frequency for specified CPU, in kHz. + // You can get your CPU number using sched_getcpu(2). Note that, unless CPU affinity + // has been configured appropriately, the CPU number can change. + // Also note that, unless the CPU governor has been configured appropriately, + // the CPU frequency can change. And even if the CPU frequency is locked down + // to a particular value, that the frequency might still be adjusted + // to prevent thermal overload. Therefore you should poll for your thread's + // current CPU number and clock frequency periodically. + uint32_t getCpukHz(int cpuNum); + +private: + bool mIsEnabled; // whether tracking is currently enabled + bool mWasEverEnabled; // whether tracking was ever enabled + long long mAccumulator; // accumulated thread CPU time since last sample, in ns + struct timespec mPreviousTs; // most recent thread CPU time, valid only if mIsEnabled is true + struct timespec mMonotonicTs; // most recent monotonic time + bool mMonotonicKnown; // whether mMonotonicTs has been set + + static const int MAX_CPU = 8; + static int sScalingFds[MAX_CPU];// file descriptor per CPU for reading scaling_cur_freq + uint32_t mCurrentkHz[MAX_CPU]; // current CPU frequency in kHz, not static to avoid a race + static pthread_once_t sOnceControl; + static int sKernelMax; // like MAX_CPU, but determined at runtime == cpu/kernel_max + 1 + static void init(); // called once at first ThreadCpuUsage construction + static pthread_mutex_t sMutex; // protects sScalingFds[] after initialization +}; + +} // namespace android + +#endif // _THREAD_CPU_USAGE_H diff --git a/media/libcpustats/Android.mk b/media/libcpustats/Android.mk new file mode 100644 index 0000000..b506353 --- /dev/null +++ b/media/libcpustats/Android.mk @@ -0,0 +1,11 @@ +LOCAL_PATH:= $(call my-dir) + +include $(CLEAR_VARS) + +LOCAL_SRC_FILES := \ + CentralTendencyStatistics.cpp \ + ThreadCpuUsage.cpp + +LOCAL_MODULE := libcpustats + +include $(BUILD_STATIC_LIBRARY) diff --git a/media/libcpustats/CentralTendencyStatistics.cpp b/media/libcpustats/CentralTendencyStatistics.cpp new file mode 100644 index 0000000..42ab62b --- /dev/null +++ b/media/libcpustats/CentralTendencyStatistics.cpp @@ -0,0 +1,81 @@ +/* + * Copyright (C) 2011 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include + +void CentralTendencyStatistics::sample(double x) +{ + // update min and max + if (x < mMinimum) + mMinimum = x; + if (x > mMaximum) + mMaximum = x; + // Knuth + if (mN == 0) { + mMean = 0; + } + ++mN; + double delta = x - mMean; + mMean += delta / mN; + mM2 += delta * (x - mMean); +} + +void CentralTendencyStatistics::reset() +{ + mMean = NAN; + mMedian = NAN; + mMinimum = INFINITY; + mMaximum = -INFINITY; + mN = 0; + mM2 = 0; + mVariance = NAN; + mVarianceKnownForN = 0; + mStddev = NAN; + mStddevKnownForN = 0; +} + +double CentralTendencyStatistics::variance() const +{ + double variance; + if (mVarianceKnownForN != mN) { + if (mN > 1) { + // double variance_n = M2/n; + variance = mM2 / (mN - 1); + } else { + variance = NAN; + } + mVariance = variance; + mVarianceKnownForN = mN; + } else { + variance = mVariance; + } + return variance; +} + +double CentralTendencyStatistics::stddev() const +{ + double stddev; + if (mStddevKnownForN != mN) { + stddev = sqrt(variance()); + mStddev = stddev; + mStddevKnownForN = mN; + } else { + stddev = mStddev; + } + return stddev; +} diff --git a/media/libcpustats/ThreadCpuUsage.cpp b/media/libcpustats/ThreadCpuUsage.cpp new file mode 100644 index 0000000..637402a --- /dev/null +++ b/media/libcpustats/ThreadCpuUsage.cpp @@ -0,0 +1,255 @@ +/* + * Copyright (C) 2011 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "ThreadCpuUsage" +//#define LOG_NDEBUG 0 + +#include +#include +#include + +#include +#include + +#include + +namespace android { + +bool ThreadCpuUsage::setEnabled(bool isEnabled) +{ + bool wasEnabled = mIsEnabled; + // only do something if there is a change + if (isEnabled != wasEnabled) { + ALOGV("setEnabled(%d)", isEnabled); + int rc; + // enabling + if (isEnabled) { + rc = clock_gettime(CLOCK_THREAD_CPUTIME_ID, &mPreviousTs); + if (rc) { + ALOGE("clock_gettime(CLOCK_THREAD_CPUTIME_ID) errno=%d", errno); + isEnabled = false; + } else { + mWasEverEnabled = true; + // record wall clock time at first enable + if (!mMonotonicKnown) { + rc = clock_gettime(CLOCK_MONOTONIC, &mMonotonicTs); + if (rc) { + ALOGE("clock_gettime(CLOCK_MONOTONIC) errno=%d", errno); + } else { + mMonotonicKnown = true; + } + } + } + // disabling + } else { + struct timespec ts; + rc = clock_gettime(CLOCK_THREAD_CPUTIME_ID, &ts); + if (rc) { + ALOGE("clock_gettime(CLOCK_THREAD_CPUTIME_ID) errno=%d", errno); + } else { + long long delta = (ts.tv_sec - mPreviousTs.tv_sec) * 1000000000LL + + (ts.tv_nsec - mPreviousTs.tv_nsec); + mAccumulator += delta; +#if 0 + mPreviousTs = ts; +#endif + } + } + mIsEnabled = isEnabled; + } + return wasEnabled; +} + +bool ThreadCpuUsage::sampleAndEnable(double& ns) +{ + bool ret; + bool wasEverEnabled = mWasEverEnabled; + if (enable()) { + // already enabled, so add a new sample relative to previous + return sample(ns); + } else if (wasEverEnabled) { + // was disabled, but add sample for accumulated time while enabled + ns = (double) mAccumulator; + mAccumulator = 0; + ALOGV("sampleAndEnable %.0f", ns); + return true; + } else { + // first time called + ns = 0.0; + ALOGV("sampleAndEnable false"); + return false; + } +} + +bool ThreadCpuUsage::sample(double &ns) +{ + if (mWasEverEnabled) { + if (mIsEnabled) { + struct timespec ts; + int rc; + rc = clock_gettime(CLOCK_THREAD_CPUTIME_ID, &ts); + if (rc) { + ALOGE("clock_gettime(CLOCK_THREAD_CPUTIME_ID) errno=%d", errno); + ns = 0.0; + return false; + } else { + long long delta = (ts.tv_sec - mPreviousTs.tv_sec) * 1000000000LL + + (ts.tv_nsec - mPreviousTs.tv_nsec); + mAccumulator += delta; + mPreviousTs = ts; + } + } else { + mWasEverEnabled = false; + } + ns = (double) mAccumulator; + ALOGV("sample %.0f", ns); + mAccumulator = 0; + return true; + } else { + ALOGW("Can't add sample because measurements have never been enabled"); + ns = 0.0; + return false; + } +} + +long long ThreadCpuUsage::elapsed() const +{ + long long elapsed; + if (mMonotonicKnown) { + struct timespec ts; + int rc; + rc = clock_gettime(CLOCK_MONOTONIC, &ts); + if (rc) { + ALOGE("clock_gettime(CLOCK_MONOTONIC) errno=%d", errno); + elapsed = 0; + } else { + // mMonotonicTs is updated only at first enable and resetStatistics + elapsed = (ts.tv_sec - mMonotonicTs.tv_sec) * 1000000000LL + + (ts.tv_nsec - mMonotonicTs.tv_nsec); + } + } else { + ALOGW("Can't compute elapsed time because measurements have never been enabled"); + elapsed = 0; + } + ALOGV("elapsed %lld", elapsed); + return elapsed; +} + +void ThreadCpuUsage::resetElapsed() +{ + ALOGV("resetElapsed"); + if (mMonotonicKnown) { + int rc; + rc = clock_gettime(CLOCK_MONOTONIC, &mMonotonicTs); + if (rc) { + ALOGE("clock_gettime(CLOCK_MONOTONIC) errno=%d", errno); + mMonotonicKnown = false; + } + } +} + +/*static*/ +int ThreadCpuUsage::sScalingFds[ThreadCpuUsage::MAX_CPU]; +pthread_once_t ThreadCpuUsage::sOnceControl = PTHREAD_ONCE_INIT; +int ThreadCpuUsage::sKernelMax; +pthread_mutex_t ThreadCpuUsage::sMutex = PTHREAD_MUTEX_INITIALIZER; + +/*static*/ +void ThreadCpuUsage::init() +{ + // read the number of CPUs + sKernelMax = 1; + int fd = open("/sys/devices/system/cpu/kernel_max", O_RDONLY); + if (fd >= 0) { +#define KERNEL_MAX_SIZE 12 + char kernelMax[KERNEL_MAX_SIZE]; + ssize_t actual = read(fd, kernelMax, sizeof(kernelMax)); + if (actual >= 2 && kernelMax[actual-1] == '\n') { + sKernelMax = atoi(kernelMax); + if (sKernelMax >= MAX_CPU - 1) { + ALOGW("kernel_max %d but MAX_CPU %d", sKernelMax, MAX_CPU); + sKernelMax = MAX_CPU; + } else if (sKernelMax < 0) { + ALOGW("kernel_max invalid %d", sKernelMax); + sKernelMax = 1; + } else { + ++sKernelMax; + ALOGV("number of CPUs %d", sKernelMax); + } + } else { + ALOGW("Can't read number of CPUs"); + } + (void) close(fd); + } else { + ALOGW("Can't open number of CPUs"); + } + int i; + for (i = 0; i < MAX_CPU; ++i) { + sScalingFds[i] = -1; + } +} + +uint32_t ThreadCpuUsage::getCpukHz(int cpuNum) +{ + if (cpuNum < 0 || cpuNum >= MAX_CPU) { + ALOGW("getCpukHz called with invalid CPU %d", cpuNum); + return 0; + } + // double-checked locking idiom is not broken for atomic values such as fd + int fd = sScalingFds[cpuNum]; + if (fd < 0) { + // some kernels can't open a scaling file until hot plug complete + pthread_mutex_lock(&sMutex); + fd = sScalingFds[cpuNum]; + if (fd < 0) { +#define FREQ_SIZE 64 + char freq_path[FREQ_SIZE]; +#define FREQ_DIGIT 27 + COMPILE_TIME_ASSERT_FUNCTION_SCOPE(MAX_CPU <= 10); +#define FREQ_PATH "/sys/devices/system/cpu/cpu?/cpufreq/scaling_cur_freq" + strlcpy(freq_path, FREQ_PATH, sizeof(freq_path)); + freq_path[FREQ_DIGIT] = cpuNum + '0'; + fd = open(freq_path, O_RDONLY | O_CLOEXEC); + // keep this fd until process exit or exec + sScalingFds[cpuNum] = fd; + } + pthread_mutex_unlock(&sMutex); + if (fd < 0) { + ALOGW("getCpukHz can't open CPU %d", cpuNum); + return 0; + } + } +#define KHZ_SIZE 12 + char kHz[KHZ_SIZE]; // kHz base 10 + ssize_t actual = pread(fd, kHz, sizeof(kHz), (off_t) 0); + uint32_t ret; + if (actual >= 2 && kHz[actual-1] == '\n') { + ret = atoi(kHz); + } else { + ret = 0; + } + if (ret != mCurrentkHz[cpuNum]) { + if (ret > 0) { + ALOGV("CPU %d frequency %u kHz", cpuNum, ret); + } else { + ALOGW("Can't read CPU %d frequency", cpuNum); + } + mCurrentkHz[cpuNum] = ret; + } + return ret; +} + +} // namespace android -- cgit v1.1 From e8fdbae47bf5793124f244c684ed597ebb78d3f5 Mon Sep 17 00:00:00 2001 From: Alex Ray Date: Thu, 20 Jun 2013 17:55:00 -0700 Subject: camera2/3: Warn on missing active array size parameters Change-Id: I752659e1f0522392b902839cd7f4d997dcd85966 --- services/camera/libcameraservice/camera2/Parameters.cpp | 2 ++ 1 file changed, 2 insertions(+) diff --git a/services/camera/libcameraservice/camera2/Parameters.cpp b/services/camera/libcameraservice/camera2/Parameters.cpp index e8f3f50..0459866 100644 --- a/services/camera/libcameraservice/camera2/Parameters.cpp +++ b/services/camera/libcameraservice/camera2/Parameters.cpp @@ -842,6 +842,8 @@ status_t Parameters::buildFastInfo() { int32_t arrayWidth; int32_t arrayHeight; if (activeArraySize.count == 2) { + ALOGW("%s: Camera %d: activeArraySize is missing xmin/ymin!", + __FUNCTION__, cameraId); arrayWidth = activeArraySize.data.i32[0]; arrayHeight = activeArraySize.data.i32[1]; } else if (activeArraySize.count == 4) { -- cgit v1.1 From e7ee7637747371635a85fedd24d2190bb1f38651 Mon Sep 17 00:00:00 2001 From: Igor Murashkin Date: Tue, 11 Jun 2013 18:10:18 -0700 Subject: Initial implementation of android.hardware.photography.CameraDevice (service) * Verified preview streaming requests * Other things *should* work but unverified / unimplemented in client side Missing: * CameraService needs to return static camera info metadata Bug: 9213377 Change-Id: I71568560fcf18d0e2b408ed1c4d0066647314868 --- camera/Android.mk | 3 + camera/CameraMetadata.cpp | 175 +++++++ camera/ICameraService.cpp | 36 +- camera/IProCameraCallbacks.cpp | 9 +- camera/IProCameraUser.cpp | 120 +---- camera/photography/CaptureRequest.cpp | 124 +++++ camera/photography/ICameraDeviceCallbacks.cpp | 110 +++++ camera/photography/ICameraDeviceUser.cpp | 307 ++++++++++++ include/camera/CameraMetadata.h | 32 ++ include/camera/ICameraService.h | 9 + include/camera/photography/CaptureRequest.h | 42 ++ .../camera/photography/ICameraDeviceCallbacks.h | 61 +++ include/camera/photography/ICameraDeviceUser.h | 80 ++++ services/camera/libcameraservice/Android.mk | 1 + .../camera/libcameraservice/Camera2ClientBase.cpp | 3 + .../camera/libcameraservice/Camera2ClientBase.h | 4 + services/camera/libcameraservice/CameraService.cpp | 125 ++++- services/camera/libcameraservice/CameraService.h | 32 +- .../photography/CameraDeviceClient.cpp | 517 +++++++++++++++++++++ .../photography/CameraDeviceClient.h | 141 ++++++ 20 files changed, 1786 insertions(+), 145 deletions(-) create mode 100644 camera/photography/CaptureRequest.cpp create mode 100644 camera/photography/ICameraDeviceCallbacks.cpp create mode 100644 camera/photography/ICameraDeviceUser.cpp create mode 100644 include/camera/photography/CaptureRequest.h create mode 100644 include/camera/photography/ICameraDeviceCallbacks.h create mode 100644 include/camera/photography/ICameraDeviceUser.h create mode 100644 services/camera/libcameraservice/photography/CameraDeviceClient.cpp create mode 100644 services/camera/libcameraservice/photography/CameraDeviceClient.h diff --git a/camera/Android.mk b/camera/Android.mk index fa518ff..8f58f87 100644 --- a/camera/Android.mk +++ b/camera/Android.mk @@ -16,6 +16,9 @@ LOCAL_SRC_FILES:= \ ICameraRecordingProxyListener.cpp \ IProCameraUser.cpp \ IProCameraCallbacks.cpp \ + photography/ICameraDeviceUser.cpp \ + photography/ICameraDeviceCallbacks.cpp \ + photography/CaptureRequest.cpp \ ProCamera.cpp \ CameraBase.cpp \ diff --git a/camera/CameraMetadata.cpp b/camera/CameraMetadata.cpp index a8f9eff..f447c5b 100644 --- a/camera/CameraMetadata.cpp +++ b/camera/CameraMetadata.cpp @@ -21,9 +21,13 @@ #include #include +#include namespace android { +typedef Parcel::WritableBlob WritableBlob; +typedef Parcel::ReadableBlob ReadableBlob; + CameraMetadata::CameraMetadata() : mBuffer(NULL), mLocked(false) { } @@ -408,4 +412,175 @@ status_t CameraMetadata::resizeIfNeeded(size_t extraEntries, size_t extraData) { return OK; } +status_t CameraMetadata::readFromParcel(const Parcel& data, + camera_metadata_t** out) { + + status_t err = OK; + + camera_metadata_t* metadata = NULL; + + if (out) { + *out = NULL; + } + + // arg0 = metadataSize (int32) + int32_t metadataSizeTmp = -1; + if ((err = data.readInt32(&metadataSizeTmp)) != OK) { + ALOGE("%s: Failed to read metadata size (error %d %s)", + __FUNCTION__, err, strerror(-err)); + return err; + } + const size_t metadataSize = static_cast(metadataSizeTmp); + + if (metadataSize == 0) { + ALOGV("%s: Read 0-sized metadata", __FUNCTION__); + return OK; + } + + // NOTE: this doesn't make sense to me. shouldnt the blob + // know how big it is? why do we have to specify the size + // to Parcel::readBlob ? + + ReadableBlob blob; + // arg1 = metadata (blob) + do { + if ((err = data.readBlob(metadataSize, &blob)) != OK) { + ALOGE("%s: Failed to read metadata blob (sized %d). Possible " + " serialization bug. Error %d %s", + __FUNCTION__, metadataSize, err, strerror(-err)); + break; + } + const camera_metadata_t* tmp = + reinterpret_cast(blob.data()); + + metadata = allocate_copy_camera_metadata_checked(tmp, metadataSize); + if (metadata == NULL) { + // We consider that allocation only fails if the validation + // also failed, therefore the readFromParcel was a failure. + err = BAD_VALUE; + } + } while(0); + blob.release(); + + if (out) { + ALOGV("%s: Set out metadata to %p", __FUNCTION__, metadata); + *out = metadata; + } else if (metadata != NULL) { + ALOGV("%s: Freed camera metadata at %p", __FUNCTION__, metadata); + free_camera_metadata(metadata); + } + + return err; +} + +status_t CameraMetadata::writeToParcel(Parcel& data, + const camera_metadata_t* metadata) { + status_t res = OK; + + // arg0 = metadataSize (int32) + + if (metadata == NULL) { + return data.writeInt32(0); + } + + const size_t metadataSize = get_camera_metadata_compact_size(metadata); + res = data.writeInt32(static_cast(metadataSize)); + if (res != OK) { + return res; + } + + // arg1 = metadata (blob) + WritableBlob blob; + do { + res = data.writeBlob(metadataSize, &blob); + if (res != OK) { + break; + } + copy_camera_metadata(blob.data(), metadataSize, metadata); + + IF_ALOGV() { + if (validate_camera_metadata_structure( + (const camera_metadata_t*)blob.data(), + &metadataSize) != OK) { + ALOGV("%s: Failed to validate metadata %p after writing blob", + __FUNCTION__, blob.data()); + } else { + ALOGV("%s: Metadata written to blob. Validation success", + __FUNCTION__); + } + } + + // Not too big of a problem since receiving side does hard validation + // Don't check the size since the compact size could be larger + if (validate_camera_metadata_structure(metadata, /*size*/NULL) != OK) { + ALOGW("%s: Failed to validate metadata %p before writing blob", + __FUNCTION__, metadata); + } + + } while(false); + blob.release(); + + return res; +} + +status_t CameraMetadata::readFromParcel(Parcel *parcel) { + + ALOGV("%s: parcel = %p", __FUNCTION__, parcel); + + status_t res = OK; + + if (parcel == NULL) { + ALOGE("%s: parcel is null", __FUNCTION__); + return BAD_VALUE; + } + + if (mLocked) { + ALOGE("%s: CameraMetadata is locked", __FUNCTION__); + return INVALID_OPERATION; + } + + camera_metadata *buffer = NULL; + // TODO: reading should return a status code, in case validation fails + res = CameraMetadata::readFromParcel(*parcel, &buffer); + + if (res != NO_ERROR) { + ALOGE("%s: Failed to read from parcel. Metadata is unchanged.", + __FUNCTION__); + return res; + } + + clear(); + mBuffer = buffer; + + return OK; +} + +status_t CameraMetadata::writeToParcel(Parcel *parcel) const { + + ALOGV("%s: parcel = %p", __FUNCTION__, parcel); + + if (parcel == NULL) { + ALOGE("%s: parcel is null", __FUNCTION__); + return BAD_VALUE; + } + + return CameraMetadata::writeToParcel(*parcel, mBuffer); +} + +void CameraMetadata::swap(CameraMetadata& other) { + if (mLocked) { + ALOGE("%s: CameraMetadata is locked", __FUNCTION__); + return; + } else if (other.mLocked) { + ALOGE("%s: Other CameraMetadata is locked", __FUNCTION__); + return; + } + + camera_metadata* thisBuf = mBuffer; + camera_metadata* otherBuf = other.mBuffer; + + other.mBuffer = thisBuf; + mBuffer = otherBuf; +} + }; // namespace android diff --git a/camera/ICameraService.cpp b/camera/ICameraService.cpp index 819e410..068fb0f 100644 --- a/camera/ICameraService.cpp +++ b/camera/ICameraService.cpp @@ -31,6 +31,8 @@ #include #include #include +#include +#include namespace android { @@ -117,7 +119,7 @@ public: return result; } - // connect to camera service + // connect to camera service (android.hardware.Camera) virtual sp connect(const sp& cameraClient, int cameraId, const String16 &clientPackageName, int clientUid) { @@ -149,6 +151,25 @@ public: return interface_cast(reply.readStrongBinder()); } + // connect to camera service (android.hardware.photography.CameraDevice) + virtual sp connect( + const sp& cameraCb, + int cameraId, + const String16& clientPackageName, + int clientUid) + { + Parcel data, reply; + data.writeInterfaceToken(ICameraService::getInterfaceDescriptor()); + data.writeStrongBinder(cameraCb->asBinder()); + data.writeInt32(cameraId); + data.writeString16(clientPackageName); + data.writeInt32(clientUid); + remote()->transact(BnCameraService::CONNECT_DEVICE, data, &reply); + + if (readExceptionCode(reply)) return NULL; + return interface_cast(reply.readStrongBinder()); + } + virtual status_t addListener(const sp& listener) { Parcel data, reply; @@ -226,6 +247,19 @@ status_t BnCameraService::onTransact( reply->writeStrongBinder(camera->asBinder()); return NO_ERROR; } break; + case CONNECT_DEVICE: { + CHECK_INTERFACE(ICameraService, data, reply); + sp cameraClient = + interface_cast(data.readStrongBinder()); + int32_t cameraId = data.readInt32(); + const String16 clientName = data.readString16(); + int32_t clientUid = data.readInt32(); + sp camera = connect(cameraClient, cameraId, + clientName, clientUid); + reply->writeNoException(); + reply->writeStrongBinder(camera->asBinder()); + return NO_ERROR; + } break; case ADD_LISTENER: { CHECK_INTERFACE(ICameraService, data, reply); sp listener = diff --git a/camera/IProCameraCallbacks.cpp b/camera/IProCameraCallbacks.cpp index b9cd14d..0fdb85a 100644 --- a/camera/IProCameraCallbacks.cpp +++ b/camera/IProCameraCallbacks.cpp @@ -28,7 +28,7 @@ #include -#include +#include "camera/CameraMetadata.h" namespace android { @@ -38,9 +38,6 @@ enum { RESULT_RECEIVED, }; -void readMetadata(const Parcel& data, camera_metadata_t** out); -void writeMetadata(Parcel& data, camera_metadata_t* metadata); - class BpProCameraCallbacks: public BpInterface { public: @@ -75,7 +72,7 @@ public: Parcel data, reply; data.writeInterfaceToken(IProCameraCallbacks::getInterfaceDescriptor()); data.writeInt32(frameId); - writeMetadata(data, result); + CameraMetadata::writeToParcel(data, result); remote()->transact(RESULT_RECEIVED, data, &reply, IBinder::FLAG_ONEWAY); } }; @@ -112,7 +109,7 @@ status_t BnProCameraCallbacks::onTransact( CHECK_INTERFACE(IProCameraCallbacks, data, reply); int32_t frameId = data.readInt32(); camera_metadata_t *result = NULL; - readMetadata(data, &result); + CameraMetadata::readFromParcel(data, &result); onResultReceived(frameId, result); return NO_ERROR; break; diff --git a/camera/IProCameraUser.cpp b/camera/IProCameraUser.cpp index 015cb5c..8f22124 100644 --- a/camera/IProCameraUser.cpp +++ b/camera/IProCameraUser.cpp @@ -15,7 +15,7 @@ ** limitations under the License. */ -//#define LOG_NDEBUG 0 +// #define LOG_NDEBUG 0 #define LOG_TAG "IProCameraUser" #include #include @@ -24,13 +24,10 @@ #include #include #include -#include +#include "camera/CameraMetadata.h" namespace android { -typedef Parcel::WritableBlob WritableBlob; -typedef Parcel::ReadableBlob ReadableBlob; - enum { DISCONNECT = IBinder::FIRST_CALL_TRANSACTION, CONNECT, @@ -46,107 +43,6 @@ enum { GET_CAMERA_INFO, }; -/** - * Caller becomes the owner of the new metadata - * 'const Parcel' doesnt prevent us from calling the read functions. - * which is interesting since it changes the internal state - * - * NULL can be returned when no metadata was sent, OR if there was an issue - * unpacking the serialized data (i.e. bad parcel or invalid structure). - */ -void readMetadata(const Parcel& data, camera_metadata_t** out) { - - status_t err = OK; - - camera_metadata_t* metadata = NULL; - - if (out) { - *out = NULL; - } - - // arg0 = metadataSize (int32) - int32_t metadataSizeTmp = -1; - if ((err = data.readInt32(&metadataSizeTmp)) != OK) { - ALOGE("%s: Failed to read metadata size (error %d %s)", - __FUNCTION__, err, strerror(-err)); - return; - } - const size_t metadataSize = static_cast(metadataSizeTmp); - - if (metadataSize == 0) { - return; - } - - // NOTE: this doesn't make sense to me. shouldnt the blob - // know how big it is? why do we have to specify the size - // to Parcel::readBlob ? - - ReadableBlob blob; - // arg1 = metadata (blob) - do { - if ((err = data.readBlob(metadataSize, &blob)) != OK) { - ALOGE("%s: Failed to read metadata blob (sized %d). Possible " - " serialization bug. Error %d %s", - __FUNCTION__, metadataSize, err, strerror(-err)); - break; - } - const camera_metadata_t* tmp = - reinterpret_cast(blob.data()); - - metadata = allocate_copy_camera_metadata_checked(tmp, metadataSize); - } while(0); - blob.release(); - - if (out) { - *out = metadata; - } else if (metadata != NULL) { - free_camera_metadata(metadata); - } -} - -/** - * Caller retains ownership of metadata - * - Write 2 (int32 + blob) args in the current position - */ -void writeMetadata(Parcel& data, camera_metadata_t* metadata) { - // arg0 = metadataSize (int32) - - if (metadata == NULL) { - data.writeInt32(0); - return; - } - - const size_t metadataSize = get_camera_metadata_compact_size(metadata); - data.writeInt32(static_cast(metadataSize)); - - // arg1 = metadata (blob) - WritableBlob blob; - { - data.writeBlob(metadataSize, &blob); - copy_camera_metadata(blob.data(), metadataSize, metadata); - - IF_ALOGV() { - if (validate_camera_metadata_structure( - (const camera_metadata_t*)blob.data(), - &metadataSize) != OK) { - ALOGV("%s: Failed to validate metadata %p after writing blob", - __FUNCTION__, blob.data()); - } else { - ALOGV("%s: Metadata written to blob. Validation success", - __FUNCTION__); - } - } - - // Not too big of a problem since receiving side does hard validation - if (validate_camera_metadata_structure(metadata, &metadataSize) != OK) { - ALOGW("%s: Failed to validate metadata %p before writing blob", - __FUNCTION__, metadata); - } - - } - blob.release(); -} - class BpProCameraUser: public BpInterface { public: @@ -214,7 +110,7 @@ public: data.writeInterfaceToken(IProCameraUser::getInterfaceDescriptor()); // arg0+arg1 - writeMetadata(data, metadata); + CameraMetadata::writeToParcel(data, metadata); // arg2 = streaming (bool) data.writeInt32(streaming); @@ -275,7 +171,7 @@ public: data.writeInterfaceToken(IProCameraUser::getInterfaceDescriptor()); data.writeInt32(templateId); remote()->transact(CREATE_DEFAULT_REQUEST, data, &reply); - readMetadata(reply, /*out*/request); + CameraMetadata::readFromParcel(reply, /*out*/request); return reply.readInt32(); } @@ -286,7 +182,7 @@ public: data.writeInterfaceToken(IProCameraUser::getInterfaceDescriptor()); data.writeInt32(cameraId); remote()->transact(GET_CAMERA_INFO, data, &reply); - readMetadata(reply, /*out*/info); + CameraMetadata::readFromParcel(reply, /*out*/info); return reply.readInt32(); } @@ -343,7 +239,7 @@ status_t BnProCameraUser::onTransact( case SUBMIT_REQUEST: { CHECK_INTERFACE(IProCameraUser, data, reply); camera_metadata_t* metadata; - readMetadata(data, /*out*/&metadata); + CameraMetadata::readFromParcel(data, /*out*/&metadata); // arg2 = streaming (bool) bool streaming = data.readInt32(); @@ -395,7 +291,7 @@ status_t BnProCameraUser::onTransact( status_t ret; ret = createDefaultRequest(templateId, &request); - writeMetadata(*reply, request); + CameraMetadata::writeToParcel(*reply, request); reply->writeInt32(ret); free_camera_metadata(request); @@ -411,7 +307,7 @@ status_t BnProCameraUser::onTransact( status_t ret; ret = getCameraInfo(cameraId, &info); - writeMetadata(*reply, info); + CameraMetadata::writeToParcel(*reply, info); reply->writeInt32(ret); free_camera_metadata(info); diff --git a/camera/photography/CaptureRequest.cpp b/camera/photography/CaptureRequest.cpp new file mode 100644 index 0000000..b822fc9 --- /dev/null +++ b/camera/photography/CaptureRequest.cpp @@ -0,0 +1,124 @@ +/* +** +** Copyright 2013, The Android Open Source Project +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +// #define LOG_NDEBUG 0 +#define LOG_TAG "CameraRequest" +#include + +#include + +#include +#include + +namespace android { + +status_t CaptureRequest::readFromParcel(Parcel* parcel) { + if (parcel == NULL) { + ALOGE("%s: Null parcel", __FUNCTION__); + return BAD_VALUE; + } + + mMetadata.clear(); + mSurfaceList.clear(); + + status_t err; + + if ((err = mMetadata.readFromParcel(parcel)) != OK) { + ALOGE("%s: Failed to read metadata from parcel", __FUNCTION__); + return err; + } + ALOGV("%s: Read metadata from parcel", __FUNCTION__); + + int32_t size; + if ((err = parcel->readInt32(&size)) != OK) { + ALOGE("%s: Failed to read surface list size from parcel", __FUNCTION__); + return err; + } + ALOGV("%s: Read surface list size = %d", __FUNCTION__, size); + + // Do not distinguish null arrays from 0-sized arrays. + for (int i = 0; i < size; ++i) { + // Parcel.writeParcelableArray + size_t len; + const char16_t* className = parcel->readString16Inplace(&len); + ALOGV("%s: Read surface class = %s", __FUNCTION__, + className != NULL ? String8(className).string() : ""); + + if (className == NULL) { + continue; + } + + // Surface.writeToParcel + String16 name = parcel->readString16(); + ALOGV("%s: Read surface name = %s", + __FUNCTION__, String8(name).string()); + sp binder(parcel->readStrongBinder()); + ALOGV("%s: Read surface binder = %p", + __FUNCTION__, binder.get()); + + sp surface; + + if (binder != NULL) { + sp gbp = + interface_cast(binder); + surface = new Surface(gbp); + } + + mSurfaceList.push_back(surface); + } + + return OK; +} + +status_t CaptureRequest::writeToParcel(Parcel* parcel) const { + if (parcel == NULL) { + ALOGE("%s: Null parcel", __FUNCTION__); + return BAD_VALUE; + } + + status_t err; + + if ((err = mMetadata.writeToParcel(parcel)) != OK) { + return err; + } + + int32_t size = static_cast(mSurfaceList.size()); + + // Send 0-sized arrays when it's empty. Do not send null arrays. + parcel->writeInt32(size); + + for (int32_t i = 0; i < size; ++i) { + sp surface = mSurfaceList[i]; + + sp binder; + if (surface != 0) { + binder = surface->getIGraphicBufferProducer()->asBinder(); + } + + // not sure if readParcelableArray does this, hard to tell from source + parcel->writeString16(String16("android.view.Surface")); + + // Surface.writeToParcel + parcel->writeString16(String16("unknown_name")); + // Surface.nativeWriteToParcel + parcel->writeStrongBinder(binder); + } + + return OK; +} + +}; // namespace android diff --git a/camera/photography/ICameraDeviceCallbacks.cpp b/camera/photography/ICameraDeviceCallbacks.cpp new file mode 100644 index 0000000..19763d7 --- /dev/null +++ b/camera/photography/ICameraDeviceCallbacks.cpp @@ -0,0 +1,110 @@ +/* +** +** Copyright 2013, The Android Open Source Project +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "ICameraDeviceCallbacks" +#include +#include +#include + +#include +#include +#include +#include + +#include +#include "camera/CameraMetadata.h" + +namespace android { + +enum { + NOTIFY_CALLBACK = IBinder::FIRST_CALL_TRANSACTION, + RESULT_RECEIVED, +}; + +class BpCameraDeviceCallbacks: public BpInterface +{ +public: + BpCameraDeviceCallbacks(const sp& impl) + : BpInterface(impl) + { + } + + // generic callback from camera service to app + void notifyCallback(int32_t msgType, int32_t ext1, int32_t ext2) + { + ALOGV("notifyCallback"); + Parcel data, reply; + data.writeInterfaceToken(ICameraDeviceCallbacks::getInterfaceDescriptor()); + data.writeInt32(msgType); + data.writeInt32(ext1); + data.writeInt32(ext2); + remote()->transact(NOTIFY_CALLBACK, data, &reply, IBinder::FLAG_ONEWAY); + data.writeNoException(); + } + + void onResultReceived(int32_t frameId, const CameraMetadata& result) { + ALOGV("onResultReceived"); + Parcel data, reply; + data.writeInterfaceToken(ICameraDeviceCallbacks::getInterfaceDescriptor()); + data.writeInt32(frameId); + result.writeToParcel(&data); + remote()->transact(RESULT_RECEIVED, data, &reply, IBinder::FLAG_ONEWAY); + data.writeNoException(); + } +}; + +IMPLEMENT_META_INTERFACE(CameraDeviceCallbacks, + "android.hardware.photography.ICameraDeviceCallbacks"); + +// ---------------------------------------------------------------------- + +status_t BnCameraDeviceCallbacks::onTransact( + uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) +{ + ALOGV("onTransact - code = %d", code); + switch(code) { + case NOTIFY_CALLBACK: { + ALOGV("NOTIFY_CALLBACK"); + CHECK_INTERFACE(ICameraDeviceCallbacks, data, reply); + int32_t msgType = data.readInt32(); + int32_t ext1 = data.readInt32(); + int32_t ext2 = data.readInt32(); + notifyCallback(msgType, ext1, ext2); + data.readExceptionCode(); + return NO_ERROR; + } break; + case RESULT_RECEIVED: { + ALOGV("RESULT_RECEIVED"); + CHECK_INTERFACE(ICameraDeviceCallbacks, data, reply); + int32_t frameId = data.readInt32(); + CameraMetadata result; + result.readFromParcel(const_cast(&data)); + onResultReceived(frameId, result); + data.readExceptionCode(); + return NO_ERROR; + break; + } + default: + return BBinder::onTransact(code, data, reply, flags); + } +} + +// ---------------------------------------------------------------------------- + +}; // namespace android + diff --git a/camera/photography/ICameraDeviceUser.cpp b/camera/photography/ICameraDeviceUser.cpp new file mode 100644 index 0000000..0515bd7 --- /dev/null +++ b/camera/photography/ICameraDeviceUser.cpp @@ -0,0 +1,307 @@ +/* +** +** Copyright 2013, The Android Open Source Project +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +// #define LOG_NDEBUG 0 +#define LOG_TAG "ICameraDeviceUser" +#include +#include +#include +#include +#include +#include +#include +#include +#include + +namespace android { + +typedef Parcel::WritableBlob WritableBlob; +typedef Parcel::ReadableBlob ReadableBlob; + +enum { + DISCONNECT = IBinder::FIRST_CALL_TRANSACTION, + SUBMIT_REQUEST, + CANCEL_REQUEST, + DELETE_STREAM, + CREATE_STREAM, + CREATE_DEFAULT_REQUEST, + GET_CAMERA_INFO, +}; + +class BpCameraDeviceUser : public BpInterface +{ +public: + BpCameraDeviceUser(const sp& impl) + : BpInterface(impl) + { + } + + // disconnect from camera service + void disconnect() + { + ALOGV("disconnect"); + Parcel data, reply; + data.writeInterfaceToken(ICameraDeviceUser::getInterfaceDescriptor()); + remote()->transact(DISCONNECT, data, &reply); + reply.readExceptionCode(); + } + + virtual int submitRequest(sp request, bool streaming) + { + Parcel data, reply; + data.writeInterfaceToken(ICameraDeviceUser::getInterfaceDescriptor()); + + // arg0 = CaptureRequest + if (request != 0) { + data.writeInt32(1); + request->writeToParcel(&data); + } else { + data.writeInt32(0); + } + + // arg1 = streaming (bool) + data.writeInt32(streaming); + + remote()->transact(SUBMIT_REQUEST, data, &reply); + + reply.readExceptionCode(); + return reply.readInt32(); + } + + virtual status_t cancelRequest(int requestId) + { + Parcel data, reply; + data.writeInterfaceToken(ICameraDeviceUser::getInterfaceDescriptor()); + data.writeInt32(requestId); + + remote()->transact(CANCEL_REQUEST, data, &reply); + + reply.readExceptionCode(); + return reply.readInt32(); + } + + virtual status_t deleteStream(int streamId) + { + Parcel data, reply; + data.writeInterfaceToken(ICameraDeviceUser::getInterfaceDescriptor()); + data.writeInt32(streamId); + + remote()->transact(DELETE_STREAM, data, &reply); + + reply.readExceptionCode(); + return reply.readInt32(); + } + + virtual status_t createStream(int width, int height, int format, + const sp& bufferProducer) + { + Parcel data, reply; + data.writeInterfaceToken(ICameraDeviceUser::getInterfaceDescriptor()); + data.writeInt32(width); + data.writeInt32(height); + data.writeInt32(format); + + data.writeInt32(1); // marker that bufferProducer is not null + data.writeString16(String16("unknown_name")); // name of surface + sp b(bufferProducer->asBinder()); + data.writeStrongBinder(b); + + remote()->transact(CREATE_STREAM, data, &reply); + + reply.readExceptionCode(); + return reply.readInt32(); + } + + // Create a request object from a template. + virtual status_t createDefaultRequest(int templateId, + /*out*/ + CameraMetadata* request) + { + Parcel data, reply; + data.writeInterfaceToken(ICameraDeviceUser::getInterfaceDescriptor()); + data.writeInt32(templateId); + remote()->transact(CREATE_DEFAULT_REQUEST, data, &reply); + + reply.readExceptionCode(); + status_t result = reply.readInt32(); + + CameraMetadata out; + if (reply.readInt32() != 0) { + out.readFromParcel(&reply); + } + + if (request != NULL) { + request->swap(out); + } + return result; + } + + + virtual status_t getCameraInfo(int cameraId, camera_metadata** info) + { + Parcel data, reply; + data.writeInterfaceToken(ICameraDeviceUser::getInterfaceDescriptor()); + data.writeInt32(cameraId); + remote()->transact(GET_CAMERA_INFO, data, &reply); + + + reply.readExceptionCode(); + status_t result = reply.readInt32(); + + if (reply.readInt32() != 0) { + CameraMetadata::readFromParcel(reply, /*out*/info); + } else if (info) { + *info = NULL; + } + + return result; + } + + +private: + + +}; + +IMPLEMENT_META_INTERFACE(CameraDeviceUser, + "android.hardware.photography.ICameraDeviceUser"); + +// ---------------------------------------------------------------------- + +status_t BnCameraDeviceUser::onTransact( + uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) +{ + switch(code) { + case DISCONNECT: { + ALOGV("DISCONNECT"); + CHECK_INTERFACE(ICameraDeviceUser, data, reply); + disconnect(); + reply->writeNoException(); + return NO_ERROR; + } break; + case SUBMIT_REQUEST: { + CHECK_INTERFACE(ICameraDeviceUser, data, reply); + + // arg0 = request + sp request; + if (data.readInt32() != 0) { + request = new CaptureRequest(); + request->readFromParcel(const_cast(&data)); + } + + // arg1 = streaming (bool) + bool streaming = data.readInt32(); + + // return code: requestId (int32) + reply->writeNoException(); + reply->writeInt32(submitRequest(request, streaming)); + + return NO_ERROR; + } break; + case CANCEL_REQUEST: { + CHECK_INTERFACE(ICameraDeviceUser, data, reply); + int requestId = data.readInt32(); + reply->writeNoException(); + reply->writeInt32(cancelRequest(requestId)); + return NO_ERROR; + } break; + case DELETE_STREAM: { + CHECK_INTERFACE(ICameraDeviceUser, data, reply); + int streamId = data.readInt32(); + reply->writeNoException(); + reply->writeInt32(deleteStream(streamId)); + return NO_ERROR; + } break; + case CREATE_STREAM: { + CHECK_INTERFACE(ICameraDeviceUser, data, reply); + int width, height, format; + + width = data.readInt32(); + ALOGV("%s: CREATE_STREAM: width = %d", __FUNCTION__, width); + height = data.readInt32(); + ALOGV("%s: CREATE_STREAM: height = %d", __FUNCTION__, height); + format = data.readInt32(); + ALOGV("%s: CREATE_STREAM: format = %d", __FUNCTION__, format); + + sp bp; + if (data.readInt32() != 0) { + String16 name = data.readString16(); + bp = interface_cast( + data.readStrongBinder()); + + ALOGV("%s: CREATE_STREAM: bp = %p, name = %s", __FUNCTION__, + bp.get(), String8(name).string()); + } else { + ALOGV("%s: CREATE_STREAM: bp = unset, name = unset", + __FUNCTION__); + } + + status_t ret; + ret = createStream(width, height, format, bp); + + reply->writeNoException(); + ALOGV("%s: CREATE_STREAM: write noException", __FUNCTION__); + reply->writeInt32(ret); + ALOGV("%s: CREATE_STREAM: write ret = %d", __FUNCTION__, ret); + + return NO_ERROR; + } break; + + case CREATE_DEFAULT_REQUEST: { + CHECK_INTERFACE(ICameraDeviceUser, data, reply); + + int templateId = data.readInt32(); + + CameraMetadata request; + status_t ret; + ret = createDefaultRequest(templateId, &request); + + reply->writeNoException(); + reply->writeInt32(ret); + + reply->writeInt32(1); // to mark presence of metadata object + request.writeToParcel(const_cast(reply)); + + return NO_ERROR; + } break; + case GET_CAMERA_INFO: { + CHECK_INTERFACE(ICameraDeviceUser, data, reply); + + int cameraId = data.readInt32(); + + camera_metadata_t* info = NULL; + status_t ret; + ret = getCameraInfo(cameraId, &info); + + reply->writeInt32(1); // to mark presence of metadata object + CameraMetadata::writeToParcel(*reply, info); + + reply->writeNoException(); + reply->writeInt32(ret); + + free_camera_metadata(info); + + return NO_ERROR; + } break; + default: + return BBinder::onTransact(code, data, reply, flags); + } +} + +// ---------------------------------------------------------------------------- + +}; // namespace android diff --git a/include/camera/CameraMetadata.h b/include/camera/CameraMetadata.h index 8eeb2e7..fe2bd19 100644 --- a/include/camera/CameraMetadata.h +++ b/include/camera/CameraMetadata.h @@ -22,6 +22,7 @@ #include namespace android { +class Parcel; /** * A convenience wrapper around the C-based camera_metadata_t library. @@ -159,6 +160,12 @@ class CameraMetadata { status_t erase(uint32_t tag); /** + * Swap the underlying camera metadata between this and the other + * metadata object. + */ + void swap(CameraMetadata &other); + + /** * Dump contents into FD for debugging. The verbosity levels are * 0: Tag entry information only, no data values * 1: Level 0 plus at most 16 data values per entry @@ -169,6 +176,31 @@ class CameraMetadata { */ void dump(int fd, int verbosity = 1, int indentation = 0) const; + /** + * Serialization over Binder + */ + + // Metadata object is unchanged when reading from parcel fails. + status_t readFromParcel(Parcel *parcel); + status_t writeToParcel(Parcel *parcel) const; + + /** + * Caller becomes the owner of the new metadata + * 'const Parcel' doesnt prevent us from calling the read functions. + * which is interesting since it changes the internal state + * + * NULL can be returned when no metadata was sent, OR if there was an issue + * unpacking the serialized data (i.e. bad parcel or invalid structure). + */ + static status_t readFromParcel(const Parcel &parcel, + camera_metadata_t** out); + /** + * Caller retains ownership of metadata + * - Write 2 (int32 + blob) args in the current position + */ + static status_t writeToParcel(Parcel &parcel, + const camera_metadata_t* metadata); + private: camera_metadata_t *mBuffer; bool mLocked; diff --git a/include/camera/ICameraService.h b/include/camera/ICameraService.h index 3c2e60a..fa715b7 100644 --- a/include/camera/ICameraService.h +++ b/include/camera/ICameraService.h @@ -28,6 +28,8 @@ class ICameraClient; class IProCameraUser; class IProCameraCallbacks; class ICameraServiceListener; +class ICameraDeviceUser; +class ICameraDeviceCallbacks; class ICameraService : public IInterface { @@ -40,6 +42,7 @@ public: GET_CAMERA_INFO, CONNECT, CONNECT_PRO, + CONNECT_DEVICE, ADD_LISTENER, REMOVE_LISTENER, }; @@ -77,6 +80,12 @@ public: int cameraId, const String16& clientPackageName, int clientUid) = 0; + + virtual sp connect( + const sp& cameraCb, + int cameraId, + const String16& clientPackageName, + int clientUid) = 0; }; // ---------------------------------------------------------------------------- diff --git a/include/camera/photography/CaptureRequest.h b/include/camera/photography/CaptureRequest.h new file mode 100644 index 0000000..e56d61f --- /dev/null +++ b/include/camera/photography/CaptureRequest.h @@ -0,0 +1,42 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_PHOTOGRAPHY_CAPTUREREQUEST_H +#define ANDROID_HARDWARE_PHOTOGRAPHY_CAPTUREREQUEST_H + +#include +#include +#include + +namespace android { + +class Surface; + +struct CaptureRequest : public virtual RefBase { +public: + + CameraMetadata mMetadata; + Vector > mSurfaceList; + + /** + * Keep impl up-to-date with CaptureRequest.java in frameworks/base + */ + status_t readFromParcel(Parcel* parcel); + status_t writeToParcel(Parcel* parcel) const; +}; +}; // namespace android + +#endif diff --git a/include/camera/photography/ICameraDeviceCallbacks.h b/include/camera/photography/ICameraDeviceCallbacks.h new file mode 100644 index 0000000..041fa65 --- /dev/null +++ b/include/camera/photography/ICameraDeviceCallbacks.h @@ -0,0 +1,61 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_PHOTOGRAPHY_CALLBACKS_H +#define ANDROID_HARDWARE_PHOTOGRAPHY_CALLBACKS_H + +#include +#include +#include +#include +#include +#include + +namespace android { +class CameraMetadata; + +class ICameraDeviceCallbacks : public IInterface +{ + /** + * Keep up-to-date with ICameraDeviceCallbacks.aidl in frameworks/base + */ +public: + DECLARE_META_INTERFACE(CameraDeviceCallbacks); + + // One way + virtual void notifyCallback(int32_t msgType, + int32_t ext1, + int32_t ext2) = 0; + + // One way + virtual void onResultReceived(int32_t frameId, + const CameraMetadata& result) = 0; +}; + +// ---------------------------------------------------------------------------- + +class BnCameraDeviceCallbacks : public BnInterface +{ +public: + virtual status_t onTransact( uint32_t code, + const Parcel& data, + Parcel* reply, + uint32_t flags = 0); +}; + +}; // namespace android + +#endif diff --git a/include/camera/photography/ICameraDeviceUser.h b/include/camera/photography/ICameraDeviceUser.h new file mode 100644 index 0000000..1b8d666 --- /dev/null +++ b/include/camera/photography/ICameraDeviceUser.h @@ -0,0 +1,80 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_PHOTOGRAPHY_ICAMERADEVICEUSER_H +#define ANDROID_HARDWARE_PHOTOGRAPHY_ICAMERADEVICEUSER_H + +#include +#include + +struct camera_metadata; + +namespace android { + +class ICameraDeviceUserClient; +class IGraphicBufferProducer; +class Surface; +class CaptureRequest; +class CameraMetadata; + +class ICameraDeviceUser : public IInterface +{ + /** + * Keep up-to-date with ICameraDeviceUser.aidl in frameworks/base + */ +public: + DECLARE_META_INTERFACE(CameraDeviceUser); + + virtual void disconnect() = 0; + + /** + * Request Handling + **/ + + virtual int submitRequest(sp request, + bool streaming = false) = 0; + virtual status_t cancelRequest(int requestId) = 0; + + virtual status_t deleteStream(int streamId) = 0; + virtual status_t createStream( + int width, int height, int format, + const sp& bufferProducer) = 0; + + // Create a request object from a template. + virtual status_t createDefaultRequest(int templateId, + /*out*/ + CameraMetadata* request) = 0; + // Get static camera metadata + virtual status_t getCameraInfo(int cameraId, + /*out*/ + camera_metadata** info) = 0; + +}; + +// ---------------------------------------------------------------------------- + +class BnCameraDeviceUser: public BnInterface +{ +public: + virtual status_t onTransact( uint32_t code, + const Parcel& data, + Parcel* reply, + uint32_t flags = 0); +}; + +}; // namespace android + +#endif diff --git a/services/camera/libcameraservice/Android.mk b/services/camera/libcameraservice/Android.mk index 83d9ccd..0eead1e 100644 --- a/services/camera/libcameraservice/Android.mk +++ b/services/camera/libcameraservice/Android.mk @@ -31,6 +31,7 @@ LOCAL_SRC_FILES:= \ camera3/Camera3InputStream.cpp \ camera3/Camera3OutputStream.cpp \ camera3/Camera3ZslStream.cpp \ + photography/CameraDeviceClient.cpp \ gui/RingBufferConsumer.cpp \ LOCAL_SHARED_LIBRARIES:= \ diff --git a/services/camera/libcameraservice/Camera2ClientBase.cpp b/services/camera/libcameraservice/Camera2ClientBase.cpp index 0623b89..5e4832c 100644 --- a/services/camera/libcameraservice/Camera2ClientBase.cpp +++ b/services/camera/libcameraservice/Camera2ClientBase.cpp @@ -28,6 +28,8 @@ #include "Camera2ClientBase.h" #include "camera2/ProFrameProcessor.h" +#include "photography/CameraDeviceClient.h" + #include "Camera2Device.h" namespace android { @@ -325,5 +327,6 @@ void Camera2ClientBase::SharedCameraCallbacks::clear() { template class Camera2ClientBase; template class Camera2ClientBase; +template class Camera2ClientBase; } // namespace android diff --git a/services/camera/libcameraservice/Camera2ClientBase.h b/services/camera/libcameraservice/Camera2ClientBase.h index 9001efb..c9a24d7 100644 --- a/services/camera/libcameraservice/Camera2ClientBase.h +++ b/services/camera/libcameraservice/Camera2ClientBase.h @@ -101,6 +101,10 @@ public: protected: + virtual sp asBinderWrapper() { + return IInterface::asBinder(); + } + virtual status_t dumpDevice(int fd, const Vector& args); /** Binder client interface-related private members */ diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp index 757a781..1b2204e 100644 --- a/services/camera/libcameraservice/CameraService.cpp +++ b/services/camera/libcameraservice/CameraService.cpp @@ -41,6 +41,7 @@ #include "CameraClient.h" #include "Camera2Client.h" #include "ProCamera2Client.h" +#include "photography/CameraDeviceClient.h" namespace android { @@ -164,7 +165,7 @@ void CameraService::onDeviceStatusChanged(int cameraId, Mutex::Autolock al(mServiceLock); /* Find all clients that we need to disconnect */ - sp client = mClient[cameraId].promote(); + sp client = mClient[cameraId].promote(); if (client.get() != NULL) { clientsToDisconnect.push_back(client); } @@ -313,14 +314,14 @@ bool CameraService::validateConnect(int cameraId, bool CameraService::canConnectUnsafe(int cameraId, const String16& clientPackageName, const sp& remoteCallback, - sp &client) { + sp &client) { String8 clientName8(clientPackageName); int callingPid = getCallingPid(); if (mClient[cameraId] != 0) { client = mClient[cameraId].promote(); if (client != 0) { - if (remoteCallback == client->getRemoteCallback()->asBinder()) { + if (remoteCallback == client->getRemote()) { LOG1("CameraService::connect X (pid %d) (the same client)", callingPid); return true; @@ -370,16 +371,17 @@ sp CameraService::connect( return NULL; } - sp client; + sp client; { Mutex::Autolock lock(mServiceLock); + sp clientTmp; if (!canConnectUnsafe(cameraId, clientPackageName, cameraClient->asBinder(), - /*out*/client)) { + /*out*/clientTmp)) { return NULL; } else if (client.get() != NULL) { - return client; + return static_cast(clientTmp.get()); } int facing = -1; @@ -415,7 +417,8 @@ sp CameraService::connect( return NULL; } - if (!connectFinishUnsafe(client, client->asBinder())) { + if (!connectFinishUnsafe(client, + client->getRemote())) { // this is probably not recoverable.. maybe the client can try again // OK: we can only get here if we were originally in PRESENT state updateStatus(ICameraServiceListener::STATUS_PRESENT, cameraId); @@ -434,12 +437,12 @@ sp CameraService::connect( } bool CameraService::connectFinishUnsafe(const sp& client, - const sp& clientBinder) { + const sp& remoteCallback) { if (client->initialize(mModule) != OK) { return false; } - clientBinder->linkToDeath(this); + remoteCallback->linkToDeath(this); return true; } @@ -464,7 +467,7 @@ sp CameraService::connect( { Mutex::Autolock lock(mServiceLock); { - sp client; + sp client; if (!canConnectUnsafe(cameraId, clientPackageName, cameraCb->asBinder(), /*out*/client)) { @@ -494,7 +497,7 @@ sp CameraService::connect( return NULL; } - if (!connectFinishUnsafe(client, client->asBinder())) { + if (!connectFinishUnsafe(client, client->getRemote())) { return NULL; } @@ -509,6 +512,88 @@ sp CameraService::connect( return client; } +sp CameraService::connect( + const sp& cameraCb, + int cameraId, + const String16& clientPackageName, + int clientUid) +{ + // TODO: this function needs to return status_t + // so that we have an error code when things go wrong and the client is NULL + + String8 clientName8(clientPackageName); + int callingPid = getCallingPid(); + + LOG1("CameraService::connectDevice E (pid %d \"%s\", id %d)", callingPid, + clientName8.string(), cameraId); + + if (!validateConnect(cameraId, /*inout*/clientUid)) { + return NULL; + } + + sp client; + { + Mutex::Autolock lock(mServiceLock); + { + sp client; + if (!canConnectUnsafe(cameraId, clientPackageName, + cameraCb->asBinder(), + /*out*/client)) { + return NULL; + } + } + + int facing = -1; + int deviceVersion = getDeviceVersion(cameraId, &facing); + + // If there are other non-exclusive users of the camera, + // this will tear them down before we can reuse the camera + if (isValidCameraId(cameraId)) { + // transition from PRESENT -> NOT_AVAILABLE + updateStatus(ICameraServiceListener::STATUS_NOT_AVAILABLE, + cameraId); + } + + switch(deviceVersion) { + case CAMERA_DEVICE_API_VERSION_1_0: + ALOGE("Camera id %d uses old HAL, doesn't support CameraDevice", + cameraId); + return NULL; + break; + // TODO: don't allow 2.0 Only allow 2.1 and higher + case CAMERA_DEVICE_API_VERSION_2_0: + case CAMERA_DEVICE_API_VERSION_2_1: + case CAMERA_DEVICE_API_VERSION_3_0: + client = new CameraDeviceClient(this, cameraCb, String16(), + cameraId, facing, callingPid, USE_CALLING_UID, getpid()); + break; + case -1: + ALOGE("Invalid camera id %d", cameraId); + return NULL; + default: + ALOGE("Unknown camera device HAL version: %d", deviceVersion); + return NULL; + } + + if (!connectFinishUnsafe(client, client->getRemote())) { + // this is probably not recoverable.. maybe the client can try again + // OK: we can only get here if we were originally in PRESENT state + updateStatus(ICameraServiceListener::STATUS_PRESENT, cameraId); + return NULL; + } + + LOG1("CameraService::connectDevice X (id %d, this pid is %d)", cameraId, + getpid()); + + mClient[cameraId] = client; + } + // important: release the mutex here so the client can call back + // into the service from its destructor (can be at the end of the call) + + return client; +} + + status_t CameraService::addListener( const sp& listener) { ALOGV("%s: Add listener %p", __FUNCTION__, listener.get()); @@ -566,14 +651,14 @@ void CameraService::removeClientByRemote(const wp& remoteBinder) { Mutex::Autolock lock(mServiceLock); int outIndex; - sp client = findClientUnsafe(remoteBinder, outIndex); + sp client = findClientUnsafe(remoteBinder, outIndex); if (client != 0) { // Found our camera, clear and leave. LOG1("removeClient: clear camera %d", outIndex); mClient[outIndex].clear(); - client->unlinkToDeath(this); + client->getRemote()->unlinkToDeath(this); } else { sp clientPro = findProClientUnsafe(remoteBinder); @@ -620,9 +705,9 @@ sp CameraService::findProClientUnsafe( return clientPro; } -sp CameraService::findClientUnsafe( +sp CameraService::findClientUnsafe( const wp& cameraClient, int& outIndex) { - sp client; + sp client; for (int i = 0; i < mNumberOfCameras; i++) { @@ -640,7 +725,7 @@ sp CameraService::findClientUnsafe( continue; } - if (cameraClient == client->getRemoteCallback()->asBinder()) { + if (cameraClient == client->getRemote()) { // Found our camera outIndex = i; return client; @@ -651,7 +736,7 @@ sp CameraService::findClientUnsafe( return NULL; } -CameraService::Client* CameraService::getClientByIdUnsafe(int cameraId) { +CameraService::BasicClient* CameraService::getClientByIdUnsafe(int cameraId) { if (cameraId < 0 || cameraId >= mNumberOfCameras) return NULL; return mClient[cameraId].unsafe_get(); } @@ -906,7 +991,9 @@ Mutex* CameraService::Client::getClientLockFromCookie(void* user) { // Provide client pointer for callbacks. Client lock returned from getClientLockFromCookie should // be acquired for this to be safe CameraService::Client* CameraService::Client::getClientFromCookie(void* user) { - Client* client = gCameraService->getClientByIdUnsafe((int) user); + BasicClient *basicClient = gCameraService->getClientByIdUnsafe((int) user); + // OK: only CameraClient calls this, and they already cast anyway. + Client* client = static_cast(basicClient); // This could happen if the Client is in the process of shutting down (the // last strong reference is gone, but the destructor hasn't finished @@ -1058,7 +1145,7 @@ status_t CameraService::dump(int fd, const Vector& args) { } } - sp client = mClient[i].promote(); + sp client = mClient[i].promote(); if (client == 0) { result = String8::format(" Device is closed, no client instance\n"); write(fd, result.string(), result.size()); diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h index eaa316a..cab804e 100644 --- a/services/camera/libcameraservice/CameraService.h +++ b/services/camera/libcameraservice/CameraService.h @@ -29,6 +29,8 @@ #include #include #include +#include +#include #include @@ -74,6 +76,11 @@ public: const String16& clientPackageName, int clientUid); virtual sp connect(const sp& cameraCb, int cameraId, const String16& clientPackageName, int clientUid); + virtual sp connect( + const sp& cameraCb, + int cameraId, + const String16& clientPackageName, + int clientUid); virtual status_t addListener(const sp& listener); virtual status_t removeListener( @@ -105,7 +112,7 @@ public: // returns plain pointer of client. Note that mClientLock should be acquired to // prevent the client from destruction. The result can be NULL. - virtual Client* getClientByIdUnsafe(int cameraId); + virtual BasicClient* getClientByIdUnsafe(int cameraId); virtual Mutex* getClientLockById(int cameraId); class BasicClient : public virtual RefBase { @@ -114,11 +121,17 @@ public: virtual void disconnect() = 0; + // because we can't virtually inherit IInterface, which breaks + // virtual inheritance + virtual sp asBinderWrapper() = 0; + // Return the remote callback binder object (e.g. IProCameraCallbacks) - wp getRemote() { + sp getRemote() { return mRemoteBinder; } + virtual status_t dump(int fd, const Vector& args) = 0; + protected: BasicClient(const sp& cameraService, const sp& remoteCallback, @@ -147,7 +160,7 @@ public: pid_t mServicePid; // immutable after constructor // - The app-side Binder interface to receive callbacks from us - wp mRemoteBinder; // immutable after constructor + sp mRemoteBinder; // immutable after constructor // permissions management status_t startCameraOps(); @@ -223,6 +236,10 @@ public: return mRemoteCallback; } + virtual sp asBinderWrapper() { + return asBinder(); + } + protected: static Mutex* getClientLockFromCookie(void* user); // convert client from cookie. Client lock should be acquired before getting Client. @@ -296,16 +313,17 @@ private: const String16& clientPackageName, const sp& remoteCallback, /*out*/ - sp &client); + sp &client); // When connection is successful, initialize client and track its death bool connectFinishUnsafe(const sp& client, - const sp& clientBinder); + const sp& remoteCallback); virtual sp getClientByRemote(const wp& cameraClient); Mutex mServiceLock; - wp mClient[MAX_CAMERAS]; // protected by mServiceLock + // either a Client or CameraDeviceClient + wp mClient[MAX_CAMERAS]; // protected by mServiceLock Mutex mClientLock[MAX_CAMERAS]; // prevent Client destruction inside callbacks int mNumberOfCameras; @@ -313,7 +331,7 @@ private: Vector mProClientList[MAX_CAMERAS]; // needs to be called with mServiceLock held - sp findClientUnsafe(const wp& cameraClient, int& outIndex); + sp findClientUnsafe(const wp& cameraClient, int& outIndex); sp findProClientUnsafe( const wp& cameraCallbacksRemote); diff --git a/services/camera/libcameraservice/photography/CameraDeviceClient.cpp b/services/camera/libcameraservice/photography/CameraDeviceClient.cpp new file mode 100644 index 0000000..3209a56 --- /dev/null +++ b/services/camera/libcameraservice/photography/CameraDeviceClient.cpp @@ -0,0 +1,517 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "CameraDeviceClient" +#define ATRACE_TAG ATRACE_TAG_CAMERA +// #define LOG_NDEBUG 0 + +#include +#include + +#include +#include +#include "camera2/Parameters.h" +#include "CameraDeviceClient.h" +#include "camera2/ProFrameProcessor.h" +#include "CameraDeviceBase.h" +#include + +namespace android { +using namespace camera2; + +CameraDeviceClientBase::CameraDeviceClientBase( + const sp& cameraService, + const sp& remoteCallback, + const String16& clientPackageName, + int cameraId, + int cameraFacing, + int clientPid, + uid_t clientUid, + int servicePid) : + BasicClient(cameraService, remoteCallback->asBinder(), clientPackageName, + cameraId, cameraFacing, clientPid, clientUid, servicePid), + mRemoteCallback(remoteCallback) { +} +void CameraDeviceClientBase::notifyError() { + // Thread safe. Don't bother locking. + sp remoteCb = mRemoteCallback; + + if (remoteCb != 0) { + remoteCb->notifyCallback(CAMERA_MSG_ERROR, CAMERA_ERROR_RELEASED, 0); + } +} + +// Interface used by CameraService + +CameraDeviceClient::CameraDeviceClient(const sp& cameraService, + const sp& remoteCallback, + const String16& clientPackageName, + int cameraId, + int cameraFacing, + int clientPid, + uid_t clientUid, + int servicePid) : + Camera2ClientBase(cameraService, remoteCallback, clientPackageName, + cameraId, cameraFacing, clientPid, clientUid, servicePid), + mRequestIdCounter(0) { + + ATRACE_CALL(); + ALOGI("CameraDeviceClient %d: Opened", cameraId); +} + +status_t CameraDeviceClient::initialize(camera_module_t *module) +{ + ATRACE_CALL(); + status_t res; + + res = Camera2ClientBase::initialize(module); + if (res != OK) { + return res; + } + + String8 threadName; + mFrameProcessor = new ProFrameProcessor(mDevice); + threadName = String8::format("CDU-%d-FrameProc", mCameraId); + mFrameProcessor->run(threadName.string()); + + mFrameProcessor->registerListener(FRAME_PROCESSOR_LISTENER_MIN_ID, + FRAME_PROCESSOR_LISTENER_MAX_ID, + /*listener*/this); + + return OK; +} + +CameraDeviceClient::~CameraDeviceClient() { +} + +status_t CameraDeviceClient::submitRequest(sp request, + bool streaming) { + ATRACE_CALL(); + ALOGV("%s", __FUNCTION__); + + status_t res; + + if ( (res = checkPid(__FUNCTION__) ) != OK) return res; + + Mutex::Autolock icl(mBinderSerializationLock); + + if (!mDevice.get()) return DEAD_OBJECT; + + if (request == 0) { + ALOGE("%s: Camera %d: Sent null request. Rejecting request.", + __FUNCTION__, mCameraId); + return BAD_VALUE; + } + + CameraMetadata metadata(request->mMetadata); + + if (metadata.isEmpty()) { + ALOGE("%s: Camera %d: Sent empty metadata packet. Rejecting request.", + __FUNCTION__, mCameraId); + return BAD_VALUE; + } else if (request->mSurfaceList.size() == 0) { + ALOGE("%s: Camera %d: Requests must have at least one surface target. " + "Rejecting request.", __FUNCTION__, mCameraId); + return BAD_VALUE; + } + + if (!enforceRequestPermissions(metadata)) { + // Callee logs + return PERMISSION_DENIED; + } + + /** + * Write in the output stream IDs which we calculate from + * the capture request's list of surface targets + */ + Vector outputStreamIds; + outputStreamIds.setCapacity(request->mSurfaceList.size()); + for (size_t i = 0; i < request->mSurfaceList.size(); ++i) { + sp surface = request->mSurfaceList[i]; + + if (surface == 0) continue; + + sp gbp = surface->getIGraphicBufferProducer(); + int idx = mStreamMap.indexOfKey(gbp->asBinder()); + + // Trying to submit request with surface that wasn't created + if (idx == NAME_NOT_FOUND) { + ALOGE("%s: Camera %d: Tried to submit a request with a surface that" + " we have not called createStream on", + __FUNCTION__, mCameraId); + return BAD_VALUE; + } + + int streamId = mStreamMap.valueAt(idx); + outputStreamIds.push_back(streamId); + ALOGV("%s: Camera %d: Appending output stream %d to request", + __FUNCTION__, mCameraId, streamId); + } + + metadata.update(ANDROID_REQUEST_OUTPUT_STREAMS, &outputStreamIds[0], + outputStreamIds.size()); + + // TODO: @hide ANDROID_REQUEST_ID, or use another request token + int32_t requestId = mRequestIdCounter++; + metadata.update(ANDROID_REQUEST_ID, &requestId, /*size*/1); + ALOGV("%s: Camera %d: Submitting request with ID %d", + __FUNCTION__, mCameraId, requestId); + + if (streaming) { + res = mDevice->setStreamingRequest(metadata); + if (res != OK) { + ALOGE("%s: Camera %d: Got error %d after trying to set streaming " + "request", __FUNCTION__, mCameraId, res); + } else { + mStreamingRequestList.push_back(mRequestIdCounter); + } + } else { + res = mDevice->capture(metadata); + if (res != OK) { + ALOGE("%s: Camera %d: Got error %d after trying to set capture", + __FUNCTION__, mCameraId, res); + } + } + + ALOGV("%s: Camera %d: End of function", __FUNCTION__, mCameraId); + if (res == OK) { + return requestId; + } + + return res; +} + +status_t CameraDeviceClient::cancelRequest(int requestId) { + ATRACE_CALL(); + ALOGV("%s, requestId = %d", __FUNCTION__, requestId); + + status_t res; + + if ( (res = checkPid(__FUNCTION__) ) != OK) return res; + + Mutex::Autolock icl(mBinderSerializationLock); + + if (!mDevice.get()) return DEAD_OBJECT; + + Vector::iterator it, end; + for (it = mStreamingRequestList.begin(), end = mStreamingRequestList.end(); + it != end; ++it) { + if (*it == requestId) { + break; + } + } + + if (it == end) { + ALOGE("%s: Camera%d: Did not find request id %d in list of streaming " + "requests", __FUNCTION__, mCameraId, requestId); + return BAD_VALUE; + } + + res = mDevice->clearStreamingRequest(); + + if (res == OK) { + ALOGV("%s: Camera %d: Successfully cleared streaming request", + __FUNCTION__, mCameraId); + mStreamingRequestList.erase(it); + } + + return res; +} + +status_t CameraDeviceClient::deleteStream(int streamId) { + ATRACE_CALL(); + ALOGV("%s (streamId = 0x%x)", __FUNCTION__, streamId); + + status_t res; + if ( (res = checkPid(__FUNCTION__) ) != OK) return res; + + Mutex::Autolock icl(mBinderSerializationLock); + + if (!mDevice.get()) return DEAD_OBJECT; + + // Guard against trying to delete non-created streams + ssize_t index = NAME_NOT_FOUND; + for (size_t i = 0; i < mStreamMap.size(); ++i) { + if (streamId == mStreamMap.valueAt(i)) { + index = i; + break; + } + } + + if (index == NAME_NOT_FOUND) { + ALOGW("%s: Camera %d: Invalid stream ID (%d) specified, no stream " + "created yet", __FUNCTION__, mCameraId, streamId); + return BAD_VALUE; + } + + // Also returns BAD_VALUE if stream ID was not valid + res = mDevice->deleteStream(streamId); + + if (res == BAD_VALUE) { + ALOGE("%s: Camera %d: Unexpected BAD_VALUE when deleting stream, but we" + " already checked and the stream ID (%d) should be valid.", + __FUNCTION__, mCameraId, streamId); + } else if (res == OK) { + mStreamMap.removeItemsAt(index); + + ALOGV("%s: Camera %d: Successfully deleted stream ID (%d)", + __FUNCTION__, mCameraId, streamId); + } + + return res; +} + +status_t CameraDeviceClient::createStream(int width, int height, int format, + const sp& bufferProducer) +{ + ATRACE_CALL(); + ALOGV("%s (w = %d, h = %d, f = 0x%x)", __FUNCTION__, width, height, format); + + status_t res; + if ( (res = checkPid(__FUNCTION__) ) != OK) return res; + + Mutex::Autolock icl(mBinderSerializationLock); + + if (!mDevice.get()) return DEAD_OBJECT; + + // Don't create multiple streams for the same target surface + { + ssize_t index = mStreamMap.indexOfKey(bufferProducer->asBinder()); + if (index != NAME_NOT_FOUND) { + ALOGW("%s: Camera %d: Buffer producer already has a stream for it " + "(ID %d)", + __FUNCTION__, mCameraId, index); + return ALREADY_EXISTS; + } + } + + sp binder; + sp anw; + if (bufferProducer != 0) { + binder = bufferProducer->asBinder(); + anw = new Surface(bufferProducer); + } + + // TODO: remove w,h,f since we are ignoring them + + if ((res = anw->query(anw.get(), NATIVE_WINDOW_WIDTH, &width)) != OK) { + ALOGE("%s: Camera %d: Failed to query Surface width", __FUNCTION__, + mCameraId); + return res; + } + if ((res = anw->query(anw.get(), NATIVE_WINDOW_HEIGHT, &height)) != OK) { + ALOGE("%s: Camera %d: Failed to query Surface height", __FUNCTION__, + mCameraId); + return res; + } + if ((res = anw->query(anw.get(), NATIVE_WINDOW_FORMAT, &format)) != OK) { + ALOGE("%s: Camera %d: Failed to query Surface format", __FUNCTION__, + mCameraId); + return res; + } + + // FIXME: remove this override since the default format should be + // IMPLEMENTATION_DEFINED. b/9487482 + if (format != HAL_PIXEL_FORMAT_BLOB && + format != HAL_PIXEL_FORMAT_YCbCr_420_888) { + ALOGW("%s: Camera %d: Overriding format 0x%x to IMPLEMENTATION_DEFINED", + __FUNCTION__, mCameraId, format); + format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED; + } + + // TODO: add startConfigure/stopConfigure call to CameraDeviceBase + // this will make it so Camera3Device doesn't call configure_streams + // after each call, but only once we are done with all. + + int streamId = -1; + res = mDevice->createStream(anw, width, height, format, /*size*/1, + &streamId); + + if (res == OK) { + mStreamMap.add(bufferProducer->asBinder(), streamId); + + ALOGV("%s: Camera %d: Successfully created a new stream ID %d", + __FUNCTION__, mCameraId, streamId); + return streamId; + } + + return res; +} + +// Create a request object from a template. +status_t CameraDeviceClient::createDefaultRequest(int templateId, + /*out*/ + CameraMetadata* request) +{ + ATRACE_CALL(); + ALOGV("%s (templateId = 0x%x)", __FUNCTION__, templateId); + + status_t res; + if ( (res = checkPid(__FUNCTION__) ) != OK) return res; + + Mutex::Autolock icl(mBinderSerializationLock); + + if (!mDevice.get()) return DEAD_OBJECT; + + CameraMetadata metadata; + if ( (res = mDevice->createDefaultRequest(templateId, &metadata) ) == OK && + request != NULL) { + + request->swap(metadata); + } + + return res; +} + +status_t CameraDeviceClient::getCameraInfo(int cameraId, + /*out*/ + camera_metadata** info) +{ + ATRACE_CALL(); + ALOGV("%s", __FUNCTION__); + + status_t res = OK; + + // TODO: remove cameraId. this should be device-specific info, not static. + if (cameraId != mCameraId) { + return INVALID_OPERATION; + } + + if ( (res = checkPid(__FUNCTION__) ) != OK) return res; + + Mutex::Autolock icl(mBinderSerializationLock); + + if (!mDevice.get()) return DEAD_OBJECT; + + CameraMetadata deviceInfo = mDevice->info(); + *info = deviceInfo.release(); + + return res; +} + +status_t CameraDeviceClient::dump(int fd, const Vector& args) { + String8 result; + result.appendFormat("CameraDeviceClient[%d] (%p) PID: %d, dump:\n", + mCameraId, + getRemoteCallback()->asBinder().get(), + mClientPid); + result.append(" State: "); + + // TODO: print dynamic/request section from most recent requests + mFrameProcessor->dump(fd, args); + + return dumpDevice(fd, args); +} + +// TODO: refactor the code below this with IProCameraUser. +// it's 100% copy-pasted, so lets not change it right now to make it easier. + +void CameraDeviceClient::detachDevice() { + if (mDevice == 0) return; + + ALOGV("Camera %d: Stopping processors", mCameraId); + + mFrameProcessor->removeListener(FRAME_PROCESSOR_LISTENER_MIN_ID, + FRAME_PROCESSOR_LISTENER_MAX_ID, + /*listener*/this); + mFrameProcessor->requestExit(); + ALOGV("Camera %d: Waiting for threads", mCameraId); + mFrameProcessor->join(); + ALOGV("Camera %d: Disconnecting device", mCameraId); + + // WORKAROUND: HAL refuses to disconnect while there's streams in flight + { + mDevice->clearStreamingRequest(); + + status_t code; + if ((code = mDevice->waitUntilDrained()) != OK) { + ALOGE("%s: waitUntilDrained failed with code 0x%x", __FUNCTION__, + code); + } + } + + Camera2ClientBase::detachDevice(); +} + +/** Device-related methods */ +void CameraDeviceClient::onFrameAvailable(int32_t frameId, + const CameraMetadata& frame) { + ATRACE_CALL(); + ALOGV("%s", __FUNCTION__); + + Mutex::Autolock icl(mBinderSerializationLock); + SharedCameraCallbacks::Lock l(mSharedCameraCallbacks); + + if (mRemoteCallback != NULL) { + ALOGV("%s: frame = %p ", __FUNCTION__, &frame); + mRemoteCallback->onResultReceived(frameId, frame); + } + +} + +// TODO: move to Camera2ClientBase +bool CameraDeviceClient::enforceRequestPermissions(CameraMetadata& metadata) { + + const int pid = IPCThreadState::self()->getCallingPid(); + const int selfPid = getpid(); + camera_metadata_entry_t entry; + + /** + * Mixin default important security values + * - android.led.transmit = defaulted ON + */ + CameraMetadata staticInfo = mDevice->info(); + entry = staticInfo.find(ANDROID_LED_AVAILABLE_LEDS); + for(size_t i = 0; i < entry.count; ++i) { + uint8_t led = entry.data.u8[i]; + + switch(led) { + case ANDROID_LED_AVAILABLE_LEDS_TRANSMIT: { + uint8_t transmitDefault = ANDROID_LED_TRANSMIT_ON; + if (!metadata.exists(ANDROID_LED_TRANSMIT)) { + metadata.update(ANDROID_LED_TRANSMIT, + &transmitDefault, 1); + } + break; + } + } + } + + // We can do anything! + if (pid == selfPid) { + return true; + } + + /** + * Permission check special fields in the request + * - android.led.transmit = android.permission.CAMERA_DISABLE_TRANSMIT + */ + entry = metadata.find(ANDROID_LED_TRANSMIT); + if (entry.count > 0 && entry.data.u8[0] != ANDROID_LED_TRANSMIT_ON) { + String16 permissionString = + String16("android.permission.CAMERA_DISABLE_TRANSMIT_LED"); + if (!checkCallingPermission(permissionString)) { + const int uid = IPCThreadState::self()->getCallingUid(); + ALOGE("Permission Denial: " + "can't disable transmit LED pid=%d, uid=%d", pid, uid); + return false; + } + } + + return true; +} + +} // namespace android diff --git a/services/camera/libcameraservice/photography/CameraDeviceClient.h b/services/camera/libcameraservice/photography/CameraDeviceClient.h new file mode 100644 index 0000000..806aa15 --- /dev/null +++ b/services/camera/libcameraservice/photography/CameraDeviceClient.h @@ -0,0 +1,141 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_SERVERS_CAMERA_PHOTOGRAPHY_CAMERADEVICECLIENT_H +#define ANDROID_SERVERS_CAMERA_PHOTOGRAPHY_CAMERADEVICECLIENT_H + +#include "CameraDeviceBase.h" +#include "CameraService.h" +#include "camera2/ProFrameProcessor.h" +#include "Camera2ClientBase.h" +#include +#include + +namespace android { + +struct CameraDeviceClientBase : + public CameraService::BasicClient, public BnCameraDeviceUser +{ + typedef ICameraDeviceCallbacks TCamCallbacks; + + const sp& getRemoteCallback() { + return mRemoteCallback; + } + +protected: + CameraDeviceClientBase(const sp& cameraService, + const sp& remoteCallback, + const String16& clientPackageName, + int cameraId, + int cameraFacing, + int clientPid, + uid_t clientUid, + int servicePid); + + virtual void notifyError(); + + sp mRemoteCallback; +}; + +/** + * Implements the binder ICameraDeviceUser API, + * meant for HAL3-public implementation of + * android.hardware.photography.CameraDevice + */ +class CameraDeviceClient : + public Camera2ClientBase, + public camera2::ProFrameProcessor::FilteredListener +{ +public: + /** + * ICameraDeviceUser interface (see ICameraDeviceUser for details) + */ + + // Note that the callee gets a copy of the metadata. + virtual int submitRequest(sp request, + bool streaming = false); + virtual status_t cancelRequest(int requestId); + + // Returns -EBUSY if device is not idle + virtual status_t deleteStream(int streamId); + + virtual status_t createStream( + int width, + int height, + int format, + const sp& bufferProducer); + + // Create a request object from a template. + virtual status_t createDefaultRequest(int templateId, + /*out*/ + CameraMetadata* request); + + // Get the static metadata for the camera + // -- Caller owns the newly allocated metadata + virtual status_t getCameraInfo(int cameraId, + /*out*/ + camera_metadata** info); + + /** + * Interface used by CameraService + */ + + CameraDeviceClient(const sp& cameraService, + const sp& remoteCallback, + const String16& clientPackageName, + int cameraId, + int cameraFacing, + int clientPid, + uid_t clientUid, + int servicePid); + virtual ~CameraDeviceClient(); + + virtual status_t initialize(camera_module_t *module); + + virtual status_t dump(int fd, const Vector& args); + + /** + * Interface used by independent components of CameraDeviceClient. + */ +protected: + /** FilteredListener implementation **/ + virtual void onFrameAvailable(int32_t frameId, + const CameraMetadata& frame); + virtual void detachDevice(); + +private: + /** ICameraDeviceUser interface-related private members */ + + /** Preview callback related members */ + sp mFrameProcessor; + static const int32_t FRAME_PROCESSOR_LISTENER_MIN_ID = 0; + static const int32_t FRAME_PROCESSOR_LISTENER_MAX_ID = 0x7fffffffL; + + /** Utility members */ + bool enforceRequestPermissions(CameraMetadata& metadata); + + // IGraphicsBufferProducer binder -> Stream ID + KeyedVector, int> mStreamMap; + + // Stream ID + Vector mStreamingRequestList; + + int32_t mRequestIdCounter; +}; + +}; // namespace android + +#endif -- cgit v1.1 From 98e24724fb77445d4d015a5fec4ecbd5b49abc9b Mon Sep 17 00:00:00 2001 From: Igor Murashkin Date: Wed, 19 Jun 2013 19:51:04 -0700 Subject: Make android.hardware.photography.Camera work on HAL3+ devices Bug: 9213377 Change-Id: I5b2eeab28985f53dfcb7b8e3029930f5adcd74f5 --- services/camera/libcameraservice/Android.mk | 1 + services/camera/libcameraservice/Camera2Client.cpp | 16 ----- .../camera/libcameraservice/Camera2ClientBase.cpp | 7 ++- .../libcameraservice/CameraDeviceFactory.cpp | 72 ++++++++++++++++++++++ .../camera/libcameraservice/CameraDeviceFactory.h | 44 +++++++++++++ services/camera/libcameraservice/CameraService.cpp | 3 + services/camera/libcameraservice/CameraService.h | 5 +- 7 files changed, 129 insertions(+), 19 deletions(-) create mode 100644 services/camera/libcameraservice/CameraDeviceFactory.cpp create mode 100644 services/camera/libcameraservice/CameraDeviceFactory.h diff --git a/services/camera/libcameraservice/Android.mk b/services/camera/libcameraservice/Android.mk index 0eead1e..0fede7e 100644 --- a/services/camera/libcameraservice/Android.mk +++ b/services/camera/libcameraservice/Android.mk @@ -15,6 +15,7 @@ LOCAL_SRC_FILES:= \ CameraDeviceBase.cpp \ Camera2Device.cpp \ Camera3Device.cpp \ + CameraDeviceFactory.cpp \ camera2/Parameters.cpp \ camera2/FrameProcessor.cpp \ camera2/StreamingProcessor.cpp \ diff --git a/services/camera/libcameraservice/Camera2Client.cpp b/services/camera/libcameraservice/Camera2Client.cpp index d1ab7eb..a94c658 100644 --- a/services/camera/libcameraservice/Camera2Client.cpp +++ b/services/camera/libcameraservice/Camera2Client.cpp @@ -58,22 +58,6 @@ Camera2Client::Camera2Client(const sp& cameraService, mDeviceVersion(deviceVersion) { ATRACE_CALL(); - ALOGI("Camera %d: Opened", cameraId); - - switch (mDeviceVersion) { - case CAMERA_DEVICE_API_VERSION_2_0: - mDevice = new Camera2Device(cameraId); - break; - case CAMERA_DEVICE_API_VERSION_3_0: - mDevice = new Camera3Device(cameraId); - break; - default: - ALOGE("Camera %d: Unknown HAL device version %d", - cameraId, mDeviceVersion); - mDevice = NULL; - break; - } - SharedParameters::Lock l(mParameters); l.mParameters.state = Parameters::DISCONNECTED; diff --git a/services/camera/libcameraservice/Camera2ClientBase.cpp b/services/camera/libcameraservice/Camera2ClientBase.cpp index 5e4832c..561dcfc 100644 --- a/services/camera/libcameraservice/Camera2ClientBase.cpp +++ b/services/camera/libcameraservice/Camera2ClientBase.cpp @@ -30,7 +30,8 @@ #include "photography/CameraDeviceClient.h" -#include "Camera2Device.h" +#include "CameraDeviceBase.h" +#include "CameraDeviceFactory.h" namespace android { using namespace camera2; @@ -56,7 +57,9 @@ Camera2ClientBase::Camera2ClientBase( mSharedCameraCallbacks(remoteCallback) { ALOGI("Camera %d: Opened", cameraId); - mDevice = new Camera2Device(cameraId); + + mDevice = CameraDeviceFactory::createDevice(cameraId); + LOG_ALWAYS_FATAL_IF(mDevice == 0, "Device should never be NULL here."); } template diff --git a/services/camera/libcameraservice/CameraDeviceFactory.cpp b/services/camera/libcameraservice/CameraDeviceFactory.cpp new file mode 100644 index 0000000..2acdb5e --- /dev/null +++ b/services/camera/libcameraservice/CameraDeviceFactory.cpp @@ -0,0 +1,72 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// #define LOG_NDEBUG 0 +#define LOG_TAG "CameraDeviceFactory" +#include + +#include "CameraDeviceBase.h" +#include "Camera2Device.h" +#include "Camera3Device.h" +#include "CameraService.h" +#include "CameraDeviceFactory.h" + +namespace android { + +wp CameraDeviceFactory::sService; + +sp CameraDeviceFactory::createDevice(int cameraId) { + + sp svc = sService.promote(); + if (svc == 0) { + ALOGE("%s: No service registered", __FUNCTION__); + return NULL; + } + + int deviceVersion = svc->getDeviceVersion(cameraId, /*facing*/NULL); + + sp device; + + switch (deviceVersion) { + case CAMERA_DEVICE_API_VERSION_2_0: + case CAMERA_DEVICE_API_VERSION_2_1: + device = new Camera2Device(cameraId); + break; + case CAMERA_DEVICE_API_VERSION_3_0: + device = new Camera3Device(cameraId); + break; + default: + ALOGE("%s: Camera %d: Unknown HAL device version %d", + __FUNCTION__, cameraId, deviceVersion); + device = NULL; + break; + } + + ALOGV_IF(device != 0, "Created a new camera device for version %d", + deviceVersion); + + return device; +} + +void CameraDeviceFactory::registerService(wp service) { + ALOGV("%s: Registered service %p", __FUNCTION__, + service.promote().get()); + + sService = service; +} + +}; // namespace android + diff --git a/services/camera/libcameraservice/CameraDeviceFactory.h b/services/camera/libcameraservice/CameraDeviceFactory.h new file mode 100644 index 0000000..93ffaf8 --- /dev/null +++ b/services/camera/libcameraservice/CameraDeviceFactory.h @@ -0,0 +1,44 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_SERVERS_CAMERA_CAMERADEVICEFACTORY_H +#define ANDROID_SERVERS_CAMERA_CAMERADEVICEFACTORY_H + +#include + +namespace android { +class CameraDeviceBase; +class CameraService; + +/** + * Create the right instance of Camera2Device or Camera3Device + * automatically based on the device version. + */ +class CameraDeviceFactory : public virtual RefBase { + public: + static void registerService(wp service); + + // Prerequisite: Call registerService. + static sp createDevice(int cameraId); + private: + CameraDeviceFactory(wp service); + + static wp sService; +}; + +}; // namespace android + +#endif diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp index 1b2204e..c284a0d 100644 --- a/services/camera/libcameraservice/CameraService.cpp +++ b/services/camera/libcameraservice/CameraService.cpp @@ -42,6 +42,7 @@ #include "Camera2Client.h" #include "ProCamera2Client.h" #include "photography/CameraDeviceClient.h" +#include "CameraDeviceFactory.h" namespace android { @@ -127,6 +128,8 @@ void CameraService::onFirstRef() CAMERA_MODULE_API_VERSION_2_1) { mModule->set_callbacks(this); } + + CameraDeviceFactory::registerService(this); } } diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h index cab804e..2bf7b49 100644 --- a/services/camera/libcameraservice/CameraService.h +++ b/services/camera/libcameraservice/CameraService.h @@ -106,6 +106,10 @@ public: void playSound(sound_kind kind); void releaseSound(); + ///////////////////////////////////////////////////////////////////// + // CameraDeviceFactory functionality + int getDeviceVersion(int cameraId, int* facing = NULL); + ///////////////////////////////////////////////////////////////////// // CameraClient functionality @@ -372,7 +376,6 @@ private: virtual void binderDied(const wp &who); // Helpers - int getDeviceVersion(int cameraId, int* facing); bool isValidCameraId(int cameraId); }; -- cgit v1.1 From 054e7347cc60ad4b9dd2e8f456406f122f9f5879 Mon Sep 17 00:00:00 2001 From: Lajos Molnar Date: Thu, 2 May 2013 16:37:36 -0700 Subject: stagefright: support for video decoder metadata mode Change-Id: Id360f29236798163f9f3a82135f601083a8a5058 Signed-off-by: Lajos Molnar Bug: 7093648 --- include/media/stagefright/ACodec.h | 9 ++ media/libstagefright/ACodec.cpp | 201 ++++++++++++++++++++++++++++++++++--- 2 files changed, 195 insertions(+), 15 deletions(-) diff --git a/include/media/stagefright/ACodec.h b/include/media/stagefright/ACodec.h index 8876c9b..6bf83dd 100644 --- a/include/media/stagefright/ACodec.h +++ b/include/media/stagefright/ACodec.h @@ -138,6 +138,7 @@ private: IOMX::buffer_id mBufferID; Status mStatus; + unsigned mDequeuedAt; sp mData; sp mGraphicBuffer; @@ -194,12 +195,20 @@ private: bool mChannelMaskPresent; int32_t mChannelMask; + unsigned mDequeueCounter; + bool mStoreMetaDataInOutputBuffers; + int32_t mMetaDataBuffersToSubmit; status_t setCyclicIntraMacroblockRefresh(const sp &msg, int32_t mode); status_t allocateBuffersOnPort(OMX_U32 portIndex); status_t freeBuffersOnPort(OMX_U32 portIndex); status_t freeBuffer(OMX_U32 portIndex, size_t i); + status_t configureOutputBuffersFromNativeWindow( + OMX_U32 *nBufferCount, OMX_U32 *nBufferSize, + OMX_U32 *nMinUndequeuedBuffers); + status_t allocateOutputMetaDataBuffers(); + status_t submitOutputMetaDataBuffer(); status_t allocateOutputBuffersFromNativeWindow(); status_t cancelBufferToNativeWindow(BufferInfo *info); status_t freeOutputBuffersNotOwnedByComponent(); diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp index bf650b4..1a4f069 100644 --- a/media/libstagefright/ACodec.cpp +++ b/media/libstagefright/ACodec.cpp @@ -255,6 +255,8 @@ private: struct ACodec::ExecutingState : public ACodec::BaseState { ExecutingState(ACodec *codec); + void submitRegularOutputBuffers(); + void submitOutputMetaBuffers(); void submitOutputBuffers(); // Submit output buffers to the decoder, submit input buffers to client @@ -364,7 +366,10 @@ ACodec::ACodec() mEncoderDelay(0), mEncoderPadding(0), mChannelMaskPresent(false), - mChannelMask(0) { + mChannelMask(0), + mDequeueCounter(0), + mStoreMetaDataInOutputBuffers(false), + mMetaDataBuffersToSubmit(0) { mUninitializedState = new UninitializedState(this); mLoadedState = new LoadedState(this); mLoadedToIdleState = new LoadedToIdleState(this); @@ -454,7 +459,11 @@ status_t ACodec::allocateBuffersOnPort(OMX_U32 portIndex) { status_t err; if (mNativeWindow != NULL && portIndex == kPortIndexOutput) { - err = allocateOutputBuffersFromNativeWindow(); + if (mStoreMetaDataInOutputBuffers) { + err = allocateOutputMetaDataBuffers(); + } else { + err = allocateOutputBuffersFromNativeWindow(); + } } else { OMX_PARAM_PORTDEFINITIONTYPE def; InitOMXParams(&def); @@ -536,7 +545,9 @@ status_t ACodec::allocateBuffersOnPort(OMX_U32 portIndex) { return OK; } -status_t ACodec::allocateOutputBuffersFromNativeWindow() { +status_t ACodec::configureOutputBuffersFromNativeWindow( + OMX_U32 *bufferCount, OMX_U32 *bufferSize, + OMX_U32 *minUndequeuedBuffers) { OMX_PARAM_PORTDEFINITIONTYPE def; InitOMXParams(&def); def.nPortIndex = kPortIndexOutput; @@ -601,10 +612,10 @@ status_t ACodec::allocateOutputBuffersFromNativeWindow() { return err; } - int minUndequeuedBufs = 0; + *minUndequeuedBuffers = 0; err = mNativeWindow->query( mNativeWindow.get(), NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, - &minUndequeuedBufs); + (int *)minUndequeuedBuffers); if (err != 0) { ALOGE("NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)", @@ -615,8 +626,8 @@ status_t ACodec::allocateOutputBuffersFromNativeWindow() { // XXX: Is this the right logic to use? It's not clear to me what the OMX // buffer counts refer to - how do they account for the renderer holding on // to buffers? - if (def.nBufferCountActual < def.nBufferCountMin + minUndequeuedBufs) { - OMX_U32 newBufferCount = def.nBufferCountMin + minUndequeuedBufs; + if (def.nBufferCountActual < def.nBufferCountMin + *minUndequeuedBuffers) { + OMX_U32 newBufferCount = def.nBufferCountMin + *minUndequeuedBuffers; def.nBufferCountActual = newBufferCount; err = mOMX->setParameter( mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); @@ -637,12 +648,24 @@ status_t ACodec::allocateOutputBuffersFromNativeWindow() { return err; } + *bufferCount = def.nBufferCountActual; + *bufferSize = def.nBufferSize; + return err; +} + +status_t ACodec::allocateOutputBuffersFromNativeWindow() { + OMX_U32 bufferCount, bufferSize, minUndequeuedBuffers; + status_t err = configureOutputBuffersFromNativeWindow( + &bufferCount, &bufferSize, &minUndequeuedBuffers); + if (err != 0) + return err; + ALOGV("[%s] Allocating %lu buffers from a native window of size %lu on " "output port", - mComponentName.c_str(), def.nBufferCountActual, def.nBufferSize); + mComponentName.c_str(), bufferCount, bufferSize); // Dequeue buffers and send them to OMX - for (OMX_U32 i = 0; i < def.nBufferCountActual; i++) { + for (OMX_U32 i = 0; i < bufferCount; i++) { ANativeWindowBuffer *buf; err = native_window_dequeue_buffer_and_wait(mNativeWindow.get(), &buf); if (err != 0) { @@ -653,7 +676,7 @@ status_t ACodec::allocateOutputBuffersFromNativeWindow() { sp graphicBuffer(new GraphicBuffer(buf, false)); BufferInfo info; info.mStatus = BufferInfo::OWNED_BY_US; - info.mData = new ABuffer(NULL /* data */, def.nBufferSize /* capacity */); + info.mData = new ABuffer(NULL /* data */, bufferSize /* capacity */); info.mGraphicBuffer = graphicBuffer; mBuffers[kPortIndexOutput].push(info); @@ -682,9 +705,9 @@ status_t ACodec::allocateOutputBuffersFromNativeWindow() { cancelStart = 0; cancelEnd = mBuffers[kPortIndexOutput].size(); } else { - // Return the last two buffers to the native window. - cancelStart = def.nBufferCountActual - minUndequeuedBufs; - cancelEnd = def.nBufferCountActual; + // Return the required minimum undequeued buffers to the native window. + cancelStart = bufferCount - minUndequeuedBuffers; + cancelEnd = bufferCount; } for (OMX_U32 i = cancelStart; i < cancelEnd; i++) { @@ -695,6 +718,65 @@ status_t ACodec::allocateOutputBuffersFromNativeWindow() { return err; } +status_t ACodec::allocateOutputMetaDataBuffers() { + OMX_U32 bufferCount, bufferSize, minUndequeuedBuffers; + status_t err = configureOutputBuffersFromNativeWindow( + &bufferCount, &bufferSize, &minUndequeuedBuffers); + if (err != 0) + return err; + + ALOGV("[%s] Allocating %lu meta buffers on output port", + mComponentName.c_str(), bufferCount); + + size_t totalSize = bufferCount * 8; + mDealer[kPortIndexOutput] = new MemoryDealer(totalSize, "ACodec"); + + // Dequeue buffers and send them to OMX + for (OMX_U32 i = 0; i < bufferCount; i++) { + BufferInfo info; + info.mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; + info.mGraphicBuffer = NULL; + info.mDequeuedAt = mDequeueCounter; + + sp mem = mDealer[kPortIndexOutput]->allocate( + sizeof(struct VideoDecoderOutputMetaData)); + CHECK(mem.get() != NULL); + info.mData = new ABuffer(mem->pointer(), mem->size()); + + // we use useBuffer for metadata regardless of quirks + err = mOMX->useBuffer( + mNode, kPortIndexOutput, mem, &info.mBufferID); + + mBuffers[kPortIndexOutput].push(info); + + ALOGV("[%s] allocated meta buffer with ID %p (pointer = %p)", + mComponentName.c_str(), info.mBufferID, mem->pointer()); + } + + mMetaDataBuffersToSubmit = bufferCount - minUndequeuedBuffers; + return err; +} + +status_t ACodec::submitOutputMetaDataBuffer() { + CHECK(mStoreMetaDataInOutputBuffers); + if (mMetaDataBuffersToSubmit == 0) + return OK; + + BufferInfo *info = dequeueBufferFromNativeWindow(); + if (info == NULL) + return ERROR_IO; + + ALOGV("[%s] submitting output meta buffer ID %p for graphic buffer %p", + mComponentName.c_str(), info->mBufferID, info->mGraphicBuffer.get()); + + --mMetaDataBuffersToSubmit; + CHECK_EQ(mOMX->fillBuffer(mNode, info->mBufferID), + (status_t)OK); + + info->mStatus = BufferInfo::OWNED_BY_COMPONENT; + return OK; +} + status_t ACodec::cancelBufferToNativeWindow(BufferInfo *info) { CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US); @@ -714,16 +796,19 @@ status_t ACodec::cancelBufferToNativeWindow(BufferInfo *info) { ACodec::BufferInfo *ACodec::dequeueBufferFromNativeWindow() { ANativeWindowBuffer *buf; int fenceFd = -1; + CHECK(mNativeWindow.get() != NULL); if (native_window_dequeue_buffer_and_wait(mNativeWindow.get(), &buf) != 0) { ALOGE("dequeueBuffer failed."); return NULL; } + BufferInfo *oldest = NULL; for (size_t i = mBuffers[kPortIndexOutput].size(); i-- > 0;) { BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); - if (info->mGraphicBuffer->handle == buf->handle) { + if (info->mGraphicBuffer != NULL && + info->mGraphicBuffer->handle == buf->handle) { CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_NATIVE_WINDOW); @@ -731,6 +816,34 @@ ACodec::BufferInfo *ACodec::dequeueBufferFromNativeWindow() { return info; } + + if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW && + (oldest == NULL || + // avoid potential issues from counter rolling over + mDequeueCounter - info->mDequeuedAt > + mDequeueCounter - oldest->mDequeuedAt)) { + oldest = info; + } + } + + if (oldest) { + CHECK(mStoreMetaDataInOutputBuffers); + + // discard buffer in LRU info and replace with new buffer + oldest->mGraphicBuffer = new GraphicBuffer(buf, false); + oldest->mStatus = BufferInfo::OWNED_BY_US; + + struct VideoDecoderOutputMetaData metaData; + metaData.eType = kMetadataBufferTypeGrallocSource; + metaData.pHandle = oldest->mGraphicBuffer->handle; + memcpy(oldest->mData->base(), &metaData, sizeof(metaData)); + + ALOGV("replaced oldest buffer #%u with age %u (%p stored in %p)", + oldest - &mBuffers[kPortIndexOutput][0], + mDequeueCounter - oldest->mDequeuedAt, + metaData.pHandle, oldest->mData->base()); + + return oldest; } TRESPASS(); @@ -971,6 +1084,24 @@ status_t ACodec::configureCodec( } } + // Always try to enable dynamic output buffers on native surface + sp obj; + int32_t haveNativeWindow = msg->findObject("native-window", &obj) && + obj != NULL; + mStoreMetaDataInOutputBuffers = false; + if (!encoder && video && haveNativeWindow) { + err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexOutput, OMX_TRUE); + if (err != OK) { + // allow failure + ALOGE("[%s] storeMetaDataInBuffers failed w/ err %d", + mComponentName.c_str(), err); + err = OK; + } else { + ALOGV("[%s] storeMetaDataInBuffers succeeded", mComponentName.c_str()); + mStoreMetaDataInOutputBuffers = true; + } + } + if (video) { if (encoder) { err = setupVideoEncoder(mime, msg); @@ -2949,6 +3080,20 @@ void ACodec::BaseState::onInputBufferFilled(const sp &msg) { mCodec->mBufferStats.add(timeUs, stats); #endif + if (mCodec->mStoreMetaDataInOutputBuffers) { + // try to submit an output buffer for each input buffer + PortMode outputMode = getPortMode(kPortIndexOutput); + + ALOGV("MetaDataBuffersToSubmit=%u portMode=%s", + mCodec->mMetaDataBuffersToSubmit, + (outputMode == FREE_BUFFERS ? "FREE" : + outputMode == KEEP_BUFFERS ? "KEEP" : "RESUBMIT")); + if (outputMode == RESUBMIT_BUFFERS) { + CHECK_EQ(mCodec->submitOutputMetaDataBuffer(), + (status_t)OK); + } + } + CHECK_EQ(mCodec->mOMX->emptyBuffer( mCodec->mNode, bufferID, @@ -3066,6 +3211,7 @@ bool ACodec::BaseState::onOMXFillBufferDone( CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_COMPONENT); + info->mDequeuedAt = ++mCodec->mDequeueCounter; info->mStatus = BufferInfo::OWNED_BY_US; PortMode mode = getPortMode(kPortIndexOutput); @@ -3447,6 +3593,9 @@ void ACodec::LoadedState::stateEntered() { mCodec->mInputEOSResult = OK; + mCodec->mDequeueCounter = 0; + mCodec->mMetaDataBuffersToSubmit = 0; + if (mCodec->mShutdownInProgress) { bool keepComponentAllocated = mCodec->mKeepComponentAllocated; @@ -3764,7 +3913,20 @@ ACodec::BaseState::PortMode ACodec::ExecutingState::getPortMode( return RESUBMIT_BUFFERS; } -void ACodec::ExecutingState::submitOutputBuffers() { +void ACodec::ExecutingState::submitOutputMetaBuffers() { + // submit as many buffers as there are input buffers with the codec + // in case we are in port reconfiguring + for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); ++i) { + BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); + + if (info->mStatus == BufferInfo::OWNED_BY_COMPONENT) { + if (mCodec->submitOutputMetaDataBuffer() != OK) + break; + } + } +} + +void ACodec::ExecutingState::submitRegularOutputBuffers() { for (size_t i = 0; i < mCodec->mBuffers[kPortIndexOutput].size(); ++i) { BufferInfo *info = &mCodec->mBuffers[kPortIndexOutput].editItemAt(i); @@ -3789,6 +3951,14 @@ void ACodec::ExecutingState::submitOutputBuffers() { } } +void ACodec::ExecutingState::submitOutputBuffers() { + if (mCodec->mStoreMetaDataInOutputBuffers) { + submitOutputMetaBuffers(); + } else { + submitRegularOutputBuffers(); + } +} + void ACodec::ExecutingState::resume() { if (mActive) { ALOGV("[%s] We're already active, no need to resume.", @@ -3955,6 +4125,7 @@ bool ACodec::ExecutingState::onOMXEvent( CHECK_EQ(data1, (OMX_U32)kPortIndexOutput); if (data2 == 0 || data2 == OMX_IndexParamPortDefinition) { + mCodec->mMetaDataBuffersToSubmit = 0; CHECK_EQ(mCodec->mOMX->sendCommand( mCodec->mNode, OMX_CommandPortDisable, kPortIndexOutput), -- cgit v1.1 From 94705aff3c9eef58cbb72ec6fe5d2dcfd9481646 Mon Sep 17 00:00:00 2001 From: hkuang Date: Mon, 24 Jun 2013 11:21:17 -0700 Subject: Adds VP9 decoding support for stagefright. Also change the VP8 encoder role name from video_encoder.vpx to video_encoder.vp8 for future VP9 encoder support. Requires the change in frameworks/native and media_codecs.xml corresponding to the device. VP9 decoding test will be added to cts repo later. --- cmds/stagefright/stagefright.cpp | 2 +- include/media/stagefright/MediaDefs.h | 3 ++- media/libstagefright/ACodec.cpp | 9 +++++--- media/libstagefright/MediaDefs.cpp | 3 ++- media/libstagefright/OMXCodec.cpp | 12 +++++++---- media/libstagefright/codecs/on2/dec/SoftVPX.cpp | 25 +++++++++++++++++----- media/libstagefright/codecs/on2/dec/SoftVPX.h | 7 ++++++ .../codecs/on2/enc/SoftVPXEncoder.cpp | 10 ++++----- .../libstagefright/codecs/on2/enc/SoftVPXEncoder.h | 2 +- .../libstagefright/matroska/MatroskaExtractor.cpp | 4 +++- media/libstagefright/omx/SoftOMXPlugin.cpp | 5 +++-- media/libstagefright/omx/tests/OMXHarness.cpp | 3 ++- 12 files changed, 60 insertions(+), 25 deletions(-) diff --git a/cmds/stagefright/stagefright.cpp b/cmds/stagefright/stagefright.cpp index 924cf6d..f8fc8ed 100644 --- a/cmds/stagefright/stagefright.cpp +++ b/cmds/stagefright/stagefright.cpp @@ -616,7 +616,7 @@ static void dumpCodecProfiles(const sp& omx, bool queryDecoders) { MEDIA_MIMETYPE_AUDIO_AMR_NB, MEDIA_MIMETYPE_AUDIO_AMR_WB, MEDIA_MIMETYPE_AUDIO_MPEG, MEDIA_MIMETYPE_AUDIO_G711_MLAW, MEDIA_MIMETYPE_AUDIO_G711_ALAW, MEDIA_MIMETYPE_AUDIO_VORBIS, - MEDIA_MIMETYPE_VIDEO_VPX + MEDIA_MIMETYPE_VIDEO_VP8, MEDIA_MIMETYPE_VIDEO_VP9 }; const char *codecType = queryDecoders? "decoder" : "encoder"; diff --git a/include/media/stagefright/MediaDefs.h b/include/media/stagefright/MediaDefs.h index 81de6e4..85693d4 100644 --- a/include/media/stagefright/MediaDefs.h +++ b/include/media/stagefright/MediaDefs.h @@ -22,7 +22,8 @@ namespace android { extern const char *MEDIA_MIMETYPE_IMAGE_JPEG; -extern const char *MEDIA_MIMETYPE_VIDEO_VPX; +extern const char *MEDIA_MIMETYPE_VIDEO_VP8; +extern const char *MEDIA_MIMETYPE_VIDEO_VP9; extern const char *MEDIA_MIMETYPE_VIDEO_AVC; extern const char *MEDIA_MIMETYPE_VIDEO_MPEG4; extern const char *MEDIA_MIMETYPE_VIDEO_H263; diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp index bf650b4..2466a6b 100644 --- a/media/libstagefright/ACodec.cpp +++ b/media/libstagefright/ACodec.cpp @@ -836,8 +836,10 @@ status_t ACodec::setComponentRole( "video_decoder.mpeg4", "video_encoder.mpeg4" }, { MEDIA_MIMETYPE_VIDEO_H263, "video_decoder.h263", "video_encoder.h263" }, - { MEDIA_MIMETYPE_VIDEO_VPX, - "video_decoder.vpx", "video_encoder.vpx" }, + { MEDIA_MIMETYPE_VIDEO_VP8, + "video_decoder.vp8", "video_encoder.vp8" }, + { MEDIA_MIMETYPE_VIDEO_VP9, + "video_decoder.vp9", "video_encoder.vp9" }, { MEDIA_MIMETYPE_AUDIO_RAW, "audio_decoder.raw", "audio_encoder.raw" }, { MEDIA_MIMETYPE_AUDIO_FLAC, @@ -1501,7 +1503,8 @@ static const struct VideoCodingMapEntry { { MEDIA_MIMETYPE_VIDEO_MPEG4, OMX_VIDEO_CodingMPEG4 }, { MEDIA_MIMETYPE_VIDEO_H263, OMX_VIDEO_CodingH263 }, { MEDIA_MIMETYPE_VIDEO_MPEG2, OMX_VIDEO_CodingMPEG2 }, - { MEDIA_MIMETYPE_VIDEO_VPX, OMX_VIDEO_CodingVPX }, + { MEDIA_MIMETYPE_VIDEO_VP8, OMX_VIDEO_CodingVP8 }, + { MEDIA_MIMETYPE_VIDEO_VP9, OMX_VIDEO_CodingVP9 }, }; static status_t GetVideoCodingTypeFromMime( diff --git a/media/libstagefright/MediaDefs.cpp b/media/libstagefright/MediaDefs.cpp index 5d8029c..b5d4e44 100644 --- a/media/libstagefright/MediaDefs.cpp +++ b/media/libstagefright/MediaDefs.cpp @@ -20,7 +20,8 @@ namespace android { const char *MEDIA_MIMETYPE_IMAGE_JPEG = "image/jpeg"; -const char *MEDIA_MIMETYPE_VIDEO_VPX = "video/x-vnd.on2.vp8"; +const char *MEDIA_MIMETYPE_VIDEO_VP8 = "video/x-vnd.on2.vp8"; +const char *MEDIA_MIMETYPE_VIDEO_VP9 = "video/x-vnd.on2.vp9"; const char *MEDIA_MIMETYPE_VIDEO_AVC = "video/avc"; const char *MEDIA_MIMETYPE_VIDEO_MPEG4 = "video/mp4v-es"; const char *MEDIA_MIMETYPE_VIDEO_H263 = "video/3gpp"; diff --git a/media/libstagefright/OMXCodec.cpp b/media/libstagefright/OMXCodec.cpp index 9d349a1..3de3c28 100644 --- a/media/libstagefright/OMXCodec.cpp +++ b/media/libstagefright/OMXCodec.cpp @@ -1195,8 +1195,10 @@ status_t OMXCodec::setVideoOutputFormat( compressionFormat = OMX_VIDEO_CodingMPEG4; } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_H263, mime)) { compressionFormat = OMX_VIDEO_CodingH263; - } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_VPX, mime)) { - compressionFormat = OMX_VIDEO_CodingVPX; + } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_VP8, mime)) { + compressionFormat = OMX_VIDEO_CodingVP8; + } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_VP9, mime)) { + compressionFormat = OMX_VIDEO_CodingVP9; } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_MPEG2, mime)) { compressionFormat = OMX_VIDEO_CodingMPEG2; } else { @@ -1388,8 +1390,10 @@ void OMXCodec::setComponentRole( "video_decoder.mpeg4", "video_encoder.mpeg4" }, { MEDIA_MIMETYPE_VIDEO_H263, "video_decoder.h263", "video_encoder.h263" }, - { MEDIA_MIMETYPE_VIDEO_VPX, - "video_decoder.vpx", "video_encoder.vpx" }, + { MEDIA_MIMETYPE_VIDEO_VP8, + "video_decoder.vp8", "video_encoder.vp8" }, + { MEDIA_MIMETYPE_VIDEO_VP9, + "video_decoder.vp9", "video_encoder.vp9" }, { MEDIA_MIMETYPE_AUDIO_RAW, "audio_decoder.raw", "audio_encoder.raw" }, { MEDIA_MIMETYPE_AUDIO_FLAC, diff --git a/media/libstagefright/codecs/on2/dec/SoftVPX.cpp b/media/libstagefright/codecs/on2/dec/SoftVPX.cpp index 43d0263..476e986 100644 --- a/media/libstagefright/codecs/on2/dec/SoftVPX.cpp +++ b/media/libstagefright/codecs/on2/dec/SoftVPX.cpp @@ -31,16 +31,20 @@ namespace android { SoftVPX::SoftVPX( const char *name, + const char *componentRole, + OMX_VIDEO_CODINGTYPE codingType, const OMX_CALLBACKTYPE *callbacks, OMX_PTR appData, OMX_COMPONENTTYPE **component) : SoftVideoDecoderOMXComponent( - name, "video_decoder.vpx", OMX_VIDEO_CodingVPX, + name, componentRole, codingType, NULL /* profileLevels */, 0 /* numProfileLevels */, 320 /* width */, 240 /* height */, callbacks, appData, component), + mMode(codingType == OMX_VIDEO_CodingVP8 ? MODE_VP8 : MODE_VP9), mCtx(NULL) { initPorts(kNumBuffers, 768 * 1024 /* inputBufferSize */, - kNumBuffers, MEDIA_MIMETYPE_VIDEO_VPX); + kNumBuffers, + codingType == OMX_VIDEO_CodingVP8 ? MEDIA_MIMETYPE_VIDEO_VP8 : MEDIA_MIMETYPE_VIDEO_VP9); CHECK_EQ(initDecoder(), (status_t)OK); } @@ -71,7 +75,9 @@ status_t SoftVPX::initDecoder() { memset(&cfg, 0, sizeof(vpx_codec_dec_cfg_t)); cfg.threads = GetCPUCoreCount(); if ((vpx_err = vpx_codec_dec_init( - (vpx_codec_ctx_t *)mCtx, &vpx_codec_vp8_dx_algo, &cfg, 0))) { + (vpx_codec_ctx_t *)mCtx, + mMode == MODE_VP8 ? &vpx_codec_vp8_dx_algo : &vpx_codec_vp9_dx_algo, + &cfg, 0))) { ALOGE("on2 decoder failed to initialize. (%d)", vpx_err); return UNKNOWN_ERROR; } @@ -194,6 +200,15 @@ void SoftVPX::onQueueFilled(OMX_U32 portIndex) { android::SoftOMXComponent *createSoftOMXComponent( const char *name, const OMX_CALLBACKTYPE *callbacks, OMX_PTR appData, OMX_COMPONENTTYPE **component) { - return new android::SoftVPX(name, callbacks, appData, component); + if (!strcmp(name, "OMX.google.vp8.decoder")) { + return new android::SoftVPX( + name, "video_decoder.vp8", OMX_VIDEO_CodingVP8, + callbacks, appData, component); + } else if (!strcmp(name, "OMX.google.vp9.decoder")) { + return new android::SoftVPX( + name, "video_decoder.vp9", OMX_VIDEO_CodingVP9, + callbacks, appData, component); + } else { + CHECK(!"Unknown component"); + } } - diff --git a/media/libstagefright/codecs/on2/dec/SoftVPX.h b/media/libstagefright/codecs/on2/dec/SoftVPX.h index 626307b..cd5eb28 100644 --- a/media/libstagefright/codecs/on2/dec/SoftVPX.h +++ b/media/libstagefright/codecs/on2/dec/SoftVPX.h @@ -24,6 +24,8 @@ namespace android { struct SoftVPX : public SoftVideoDecoderOMXComponent { SoftVPX(const char *name, + const char *componentRole, + OMX_VIDEO_CODINGTYPE codingType, const OMX_CALLBACKTYPE *callbacks, OMX_PTR appData, OMX_COMPONENTTYPE **component); @@ -38,6 +40,11 @@ private: kNumBuffers = 4 }; + enum { + MODE_VP8, + MODE_VP9 + } mMode; + void *mCtx; status_t initDecoder(); diff --git a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp index e25637a..74d6df5 100644 --- a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp +++ b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp @@ -165,8 +165,8 @@ void SoftVPXEncoder::initPorts() { outputPort.eDir = OMX_DirOutput; outputPort.nBufferAlignment = kOutputBufferAlignment; outputPort.format.video.cMIMEType = - const_cast(MEDIA_MIMETYPE_VIDEO_VPX); - outputPort.format.video.eCompressionFormat = OMX_VIDEO_CodingVPX; + const_cast(MEDIA_MIMETYPE_VIDEO_VP8); + outputPort.format.video.eCompressionFormat = OMX_VIDEO_CodingVP8; outputPort.format.video.eColorFormat = OMX_COLOR_FormatUnused; outputPort.format.video.pNativeWindow = NULL; outputPort.nBufferSize = 256 * 1024; // arbitrary @@ -315,7 +315,7 @@ OMX_ERRORTYPE SoftVPXEncoder::internalGetParameter(OMX_INDEXTYPE index, formatParams->xFramerate = (1000000/mFrameDurationUs) << 16; return OMX_ErrorNone; } else if (formatParams->nPortIndex == kOutputPortIndex) { - formatParams->eCompressionFormat = OMX_VIDEO_CodingVPX; + formatParams->eCompressionFormat = OMX_VIDEO_CodingVP8; formatParams->eColorFormat = OMX_COLOR_FormatUnused; formatParams->xFramerate = 0; return OMX_ErrorNone; @@ -513,7 +513,7 @@ OMX_ERRORTYPE SoftVPXEncoder::internalSetFormatParams( return OMX_ErrorUnsupportedSetting; } } else if (format->nPortIndex == kOutputPortIndex) { - if (format->eCompressionFormat == OMX_VIDEO_CodingVPX) { + if (format->eCompressionFormat == OMX_VIDEO_CodingVP8) { return OMX_ErrorNone; } else { return OMX_ErrorUnsupportedSetting; @@ -529,7 +529,7 @@ OMX_ERRORTYPE SoftVPXEncoder::internalSetRoleParams( const char* roleText = (const char*)role->cRole; const size_t roleTextMaxSize = OMX_MAX_STRINGNAME_SIZE - 1; - if (strncmp(roleText, "video_encoder.vpx", roleTextMaxSize)) { + if (strncmp(roleText, "video_encoder.vp8", roleTextMaxSize)) { ALOGE("Unsupported component role"); return OMX_ErrorBadParameter; } diff --git a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h index 3bc05c0..a0a8ee6 100644 --- a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h +++ b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h @@ -175,7 +175,7 @@ class SoftVPXEncoder : public SimpleSoftOMXComponent { const OMX_VIDEO_PARAM_PORTFORMATTYPE* format); // Verifies the component role tried to be set to this OMX component is - // strictly video_encoder.vpx + // strictly video_encoder.vp8 OMX_ERRORTYPE internalSetRoleParams( const OMX_PARAM_COMPONENTROLETYPE* role); diff --git a/media/libstagefright/matroska/MatroskaExtractor.cpp b/media/libstagefright/matroska/MatroskaExtractor.cpp index b304749..d260d0f 100644 --- a/media/libstagefright/matroska/MatroskaExtractor.cpp +++ b/media/libstagefright/matroska/MatroskaExtractor.cpp @@ -870,7 +870,9 @@ void MatroskaExtractor::addTracks() { continue; } } else if (!strcmp("V_VP8", codecID)) { - meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_VPX); + meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_VP8); + } else if (!strcmp("V_VP9", codecID)) { + meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_VP9); } else { ALOGW("%s is not supported.", codecID); continue; diff --git a/media/libstagefright/omx/SoftOMXPlugin.cpp b/media/libstagefright/omx/SoftOMXPlugin.cpp index b3fe98e..d6cde73 100644 --- a/media/libstagefright/omx/SoftOMXPlugin.cpp +++ b/media/libstagefright/omx/SoftOMXPlugin.cpp @@ -50,8 +50,9 @@ static const struct { { "OMX.google.mpeg4.encoder", "mpeg4enc", "video_encoder.mpeg4" }, { "OMX.google.mp3.decoder", "mp3dec", "audio_decoder.mp3" }, { "OMX.google.vorbis.decoder", "vorbisdec", "audio_decoder.vorbis" }, - { "OMX.google.vpx.decoder", "vpxdec", "video_decoder.vpx" }, - { "OMX.google.vpx.encoder", "vpxenc", "video_encoder.vpx" }, + { "OMX.google.vp8.decoder", "vpxdec", "video_decoder.vp8" }, + { "OMX.google.vp9.decoder", "vpxdec", "video_decoder.vp9" }, + { "OMX.google.vp8.encoder", "vpxenc", "video_encoder.vp8" }, { "OMX.google.raw.decoder", "rawdec", "audio_decoder.raw" }, { "OMX.google.flac.encoder", "flacenc", "audio_encoder.flac" }, { "OMX.google.gsm.decoder", "gsmdec", "audio_decoder.gsm" }, diff --git a/media/libstagefright/omx/tests/OMXHarness.cpp b/media/libstagefright/omx/tests/OMXHarness.cpp index 6cca8da..4bee808 100644 --- a/media/libstagefright/omx/tests/OMXHarness.cpp +++ b/media/libstagefright/omx/tests/OMXHarness.cpp @@ -449,7 +449,8 @@ static const char *GetMimeFromComponentRole(const char *componentRole) { { "video_decoder.avc", "video/avc" }, { "video_decoder.mpeg4", "video/mp4v-es" }, { "video_decoder.h263", "video/3gpp" }, - { "video_decoder.vpx", "video/x-vnd.on2.vp8" }, + { "video_decoder.vp8", "video/x-vnd.on2.vp8" }, + { "video_decoder.vp9", "video/x-vnd.on2.vp9" }, // we appear to use this as a synonym to amrnb. { "audio_decoder.amr", "audio/3gpp" }, -- cgit v1.1 From 0d09a9bec07b3bec78bd473ff0bfcf0a261f3f25 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Mon, 24 Jun 2013 12:06:46 -0700 Subject: Use mFutex as an event flag rather than semaphore An event flag can be more fault-tolerant in case of loss of synchronization, as it cannot overflow. It also allows more bits to be used in the future. See http://en.wikipedia.org/wiki/Event_flag Change-Id: I01ca25d951eb263124da54bb4738f0d94ec4a48b --- include/private/media/AudioTrackShared.h | 3 ++- media/libmedia/AudioTrackShared.cpp | 11 +++++------ 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/include/private/media/AudioTrackShared.h b/include/private/media/AudioTrackShared.h index 681f557..ef5bb8d 100644 --- a/include/private/media/AudioTrackShared.h +++ b/include/private/media/AudioTrackShared.h @@ -94,8 +94,9 @@ struct audio_track_cblk_t // parameter // renamed to "_" to detect incorrect use - volatile int32_t mFutex; // semaphore: down (P) by client, + volatile int32_t mFutex; // event flag: down (P) by client, // up (V) by server or binderDied() or interrupt() +#define CBLK_FUTEX_WAKE 1 // if event flag bit is set, then a deferred wake is pending private: diff --git a/media/libmedia/AudioTrackShared.cpp b/media/libmedia/AudioTrackShared.cpp index f034164..4b7f368 100644 --- a/media/libmedia/AudioTrackShared.cpp +++ b/media/libmedia/AudioTrackShared.cpp @@ -207,15 +207,15 @@ status_t ClientProxy::obtainBuffer(Buffer* buffer, const struct timespec *reques ts = NULL; break; } - int32_t old = android_atomic_dec(&cblk->mFutex); - if (old <= 0) { + int32_t old = android_atomic_and(~CBLK_FUTEX_WAKE, &cblk->mFutex); + if (!(old & CBLK_FUTEX_WAKE)) { int rc; if (measure && !beforeIsValid) { clock_gettime(CLOCK_MONOTONIC, &before); beforeIsValid = true; } int ret = __futex_syscall4(&cblk->mFutex, - mClientInServer ? FUTEX_WAIT_PRIVATE : FUTEX_WAIT, old - 1, ts); + mClientInServer ? FUTEX_WAIT_PRIVATE : FUTEX_WAIT, old & ~CBLK_FUTEX_WAKE, ts); // update total elapsed time spent waiting if (measure) { struct timespec after; @@ -484,9 +484,8 @@ void ServerProxy::releaseBuffer(Buffer* buffer) } if (!mDeferWake && mAvailToClient + stepCount >= minimum) { ALOGV("mAvailToClient=%u stepCount=%u minimum=%u", mAvailToClient, stepCount, minimum); - // could client be sleeping, or not need this increment and counter overflows? - int32_t old = android_atomic_inc(&cblk->mFutex); - if (old == -1) { + int32_t old = android_atomic_or(CBLK_FUTEX_WAKE, &cblk->mFutex); + if (!(old & CBLK_FUTEX_WAKE)) { (void) __futex_syscall3(&cblk->mFutex, mClientInServer ? FUTEX_WAKE_PRIVATE : FUTEX_WAKE, 1); } -- cgit v1.1 From 93bb77da5481ab75c2cd6e3aa681839273c6e43d Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Mon, 24 Jun 2013 12:10:45 -0700 Subject: Workaround AudioRecord bug for large buffer sizes Bug: 9556436 Change-Id: I92d1238b623d2cfd648e0a684d0e710fb0bd8b43 --- media/libmedia/AudioTrackShared.cpp | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/media/libmedia/AudioTrackShared.cpp b/media/libmedia/AudioTrackShared.cpp index 4b7f368..5f8f292 100644 --- a/media/libmedia/AudioTrackShared.cpp +++ b/media/libmedia/AudioTrackShared.cpp @@ -482,7 +482,8 @@ void ServerProxy::releaseBuffer(Buffer* buffer) } else if (minimum > half) { minimum = half; } - if (!mDeferWake && mAvailToClient + stepCount >= minimum) { + // FIXME AudioRecord wakeup needs to be optimized; it currently wakes up client every time + if (!mIsOut || (!mDeferWake && mAvailToClient + stepCount >= minimum)) { ALOGV("mAvailToClient=%u stepCount=%u minimum=%u", mAvailToClient, stepCount, minimum); int32_t old = android_atomic_or(CBLK_FUTEX_WAKE, &cblk->mFutex); if (!(old & CBLK_FUTEX_WAKE)) { -- cgit v1.1 From 9fdcb0a9497ca290bcf364b10868587b6bde3a34 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Wed, 26 Jun 2013 16:11:36 -0700 Subject: Fix theoretical race using TrackBase::sampleRate() In two places we assumed that TrackBase::sampleRate() would return the same value when it is called twice in the same function. This is not guaranteed; sampleRate() reads from the control block so the return value could change. To fix this, only call sampleRate() once and cache the return value to get a consistent value. This was only a theoretical race. In MixerThread::prepareTracks_l() it would have no bad effect. In TimedTrack::getNextBuffer() it could cause a real problem, but we don't currently support dynamic sample rate ratios for timed tracks. Change-Id: I8e5c33f0121fc058d1e70c2ab5e9135397d3e0b7 --- services/audioflinger/Threads.cpp | 7 ++++--- services/audioflinger/Tracks.cpp | 6 ++++-- 2 files changed, 8 insertions(+), 5 deletions(-) diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index 0773534..3b5727b 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -2427,7 +2427,7 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::MixerThread::prepareTrac } for (size_t i=0 ; i t = mActiveTracks[i].promote(); + const sp t = mActiveTracks[i].promote(); if (t == 0) { continue; } @@ -2597,11 +2597,12 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::MixerThread::prepareTrac // hence the test on (mMixerStatus == MIXER_TRACKS_READY) meaning the track was mixed // during last round size_t desiredFrames; - if (t->sampleRate() == mSampleRate) { + uint32_t sr = track->sampleRate(); + if (sr == mSampleRate) { desiredFrames = mNormalFrameCount; } else { // +1 for rounding and +1 for additional sample needed for interpolation - desiredFrames = (mNormalFrameCount * t->sampleRate()) / mSampleRate + 1 + 1; + desiredFrames = (mNormalFrameCount * sr) / mSampleRate + 1 + 1; // add frames already consumed but not yet released by the resampler // because cblk->framesReady() will include these frames desiredFrames += mAudioMixer->getUnreleasedFrames(track->name()); diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp index bfc197c..6aca95f 100644 --- a/services/audioflinger/Tracks.cpp +++ b/services/audioflinger/Tracks.cpp @@ -1130,10 +1130,12 @@ status_t AudioFlinger::PlaybackThread::TimedTrack::getNextBuffer( } } + uint32_t sr = sampleRate(); + // adjust the head buffer's PTS to reflect the portion of the head buffer // that has already been consumed int64_t effectivePTS = headLocalPTS + - ((head.position() / mFrameSize) * mLocalTimeFreq / sampleRate()); + ((head.position() / mFrameSize) * mLocalTimeFreq / sr); // Calculate the delta in samples between the head of the input buffer // queue and the start of the next output buffer that will be written. @@ -1165,7 +1167,7 @@ status_t AudioFlinger::PlaybackThread::TimedTrack::getNextBuffer( // the current output position is within this threshold, then we will // concatenate the next input samples to the previous output const int64_t kSampleContinuityThreshold = - (static_cast(sampleRate()) << 32) / 250; + (static_cast(sr) << 32) / 250; // if this is the first buffer of audio that we're emitting from this track // then it should be almost exactly on time. -- cgit v1.1 From ad3af3305f024bcbbd55c894a4995e449498e1ba Mon Sep 17 00:00:00 2001 From: Richard Fitzgerald Date: Mon, 25 Mar 2013 16:54:37 +0000 Subject: Public API changes for audio offload support. NOTE: this does _not_ include all private member variables added to classes as part of offload support. Only public/protected functions and stubs functions/variables needed to make the changes buildable. - isOffloadSupported() added to audio policy service A stub implementation is required to build, this always returns false - setParameters() added to IAudioTrack A stub implementation is required to build, this always returns INVALID_OPERATION - CBlk flag for stream end - Change AudioSystem::getRenderPosition() to take an audio_output_t so caller can specify which output to query - Add AudioSystem::isOffloadSupported() This is fully implemented down to the AudioFlinger function AudioPolicyServer::isOffloadSupported() which is just a stub that always returns false. - Add EVENT_STREAM_END to AudioTrack interface. STREAM_END is used to signal when the hardware has actually finished playing all the data it was sent. - Add event type enumeration to media player interface AudioSink callbacks so that the same callback can be used to handle multiple types of event. For offloaded tracks we also have to handle STREAM_END and TEAR_DOWN events - Pass audio_offload_info_t to various functions used for opening outputs, tracks and audio players. This passes additional information about the compressed stream down to the HAL when using offload. For publicly-available APIs this is an optional parameter (for some of the internal and low-level APIs around the HAL interface it is mandatory) - Add getParameters() and setParameters() API to AudioTrack Currently dummy implementations. - Change AudioPlayer contructor so that it takes a set of bitflags defining what options are required. This replaces the original bool which only specified whether to use deep buffering. - Changes to StageFright class definition related to handling tearing-down of an offloaded track when we need to switch back to software decode - Define new StageFright utility functions used for offloaded tracks Currently dummy implementations. - AudioFlinger changes to use extended audio_config_t. Fills in audio_offload_info_t member if this info is passed in when opening an output. - libvideoeditor changes required to add the new event type parameter to AudioSink callback functions - libmediaplayerservice changes required to add the new event type parameter to AudioSink callback functions Change-Id: I3ab41138aa1083d81fe83b886a9b1021ec7320f1 Signed-off-by: Richard Fitzgerald Signed-off-by: Eric Laurent --- include/media/AudioSystem.h | 13 ++++++-- include/media/AudioTrack.h | 34 ++++++++++++++++++-- include/media/IAudioFlinger.h | 4 ++- include/media/IAudioPolicyService.h | 6 +++- include/media/IAudioTrack.h | 4 +++ include/media/MediaPlayerInterface.h | 17 ++++++++-- include/media/stagefright/AudioPlayer.h | 13 ++++++-- include/media/stagefright/Utils.h | 11 +++++++ include/private/media/AudioTrackShared.h | 4 +++ libvideoeditor/lvpp/VideoEditorAudioPlayer.cpp | 9 ++++-- libvideoeditor/lvpp/VideoEditorAudioPlayer.h | 3 +- libvideoeditor/lvpp/VideoEditorPlayer.cpp | 6 ++-- libvideoeditor/lvpp/VideoEditorPlayer.h | 3 +- media/libmedia/AudioSystem.cpp | 23 +++++++++++--- media/libmedia/AudioTrack.cpp | 31 ++++++++++++++---- media/libmedia/IAudioFlinger.cpp | 3 +- media/libmedia/IAudioPolicyService.cpp | 12 +++++-- media/libmedia/IAudioTrack.cpp | 18 +++++++++++ media/libmediaplayerservice/MediaPlayerService.cpp | 12 ++++--- media/libmediaplayerservice/MediaPlayerService.h | 6 ++-- media/libstagefright/AudioPlayer.cpp | 7 ++-- media/libstagefright/Utils.cpp | 19 +++++++++++ media/libstagefright/include/AwesomePlayer.h | 17 ++++++++-- media/libstagefright/include/ESDS.h | 6 ++++ services/audioflinger/AudioFlinger.cpp | 37 ++++++++++++---------- services/audioflinger/AudioFlinger.h | 5 ++- services/audioflinger/AudioPolicyService.cpp | 15 ++++++--- services/audioflinger/AudioPolicyService.h | 4 ++- services/audioflinger/Tracks.cpp | 4 +++ 29 files changed, 281 insertions(+), 65 deletions(-) diff --git a/include/media/AudioSystem.h b/include/media/AudioSystem.h index b11c812..09160cc 100644 --- a/include/media/AudioSystem.h +++ b/include/media/AudioSystem.h @@ -128,8 +128,10 @@ public: // - BAD_VALUE: invalid parameter // NOTE: this feature is not supported on all hardware platforms and it is // necessary to check returned status before using the returned values. - static status_t getRenderPosition(size_t *halFrames, size_t *dspFrames, - audio_stream_type_t stream = AUDIO_STREAM_DEFAULT); + static status_t getRenderPosition(audio_io_handle_t output, + size_t *halFrames, + size_t *dspFrames, + audio_stream_type_t stream = AUDIO_STREAM_DEFAULT); // return the number of input frames lost by HAL implementation, or 0 if the handle is invalid static size_t getInputFramesLost(audio_io_handle_t ioHandle); @@ -197,7 +199,8 @@ public: uint32_t samplingRate = 0, audio_format_t format = AUDIO_FORMAT_DEFAULT, audio_channel_mask_t channelMask = AUDIO_CHANNEL_OUT_STEREO, - audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE); + audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE, + const audio_offload_info_t *offloadInfo = NULL); static status_t startOutput(audio_io_handle_t output, audio_stream_type_t stream, int session = 0); @@ -245,6 +248,10 @@ public: static uint32_t getPrimaryOutputSamplingRate(); static size_t getPrimaryOutputFrameCount(); + // Check if hw offload is possible for given format, stream type, sample rate, + // bit rate, duration, video and streaming or offload property is enabled + static bool isOffloadSupported(const audio_offload_info_t& info); + // ---------------------------------------------------------------------------- private: diff --git a/include/media/AudioTrack.h b/include/media/AudioTrack.h index e9bb76a..6727601 100644 --- a/include/media/AudioTrack.h +++ b/include/media/AudioTrack.h @@ -60,6 +60,8 @@ public: // Not currently used by android.media.AudioTrack. EVENT_NEW_IAUDIOTRACK = 6, // IAudioTrack was re-created, either due to re-routing and // voluntary invalidation by mediaserver, or mediaserver crash. + EVENT_STREAM_END = 7, // Sent after all the buffers queued in AF and HW are played + // back (after stop is called) }; /* Client should declare Buffer on the stack and pass address to obtainBuffer() @@ -175,7 +177,8 @@ public: void* user = NULL, int notificationFrames = 0, int sessionId = 0, - transfer_type transferType = TRANSFER_DEFAULT); + transfer_type transferType = TRANSFER_DEFAULT, + const audio_offload_info_t *offloadInfo = NULL); /* Creates an audio track and registers it with AudioFlinger. * With this constructor, the track is configured for static buffer mode. @@ -198,7 +201,8 @@ public: void* user = NULL, int notificationFrames = 0, int sessionId = 0, - transfer_type transferType = TRANSFER_DEFAULT); + transfer_type transferType = TRANSFER_DEFAULT, + const audio_offload_info_t *offloadInfo = NULL); /* Terminates the AudioTrack and unregisters it from AudioFlinger. * Also destroys all resources associated with the AudioTrack. @@ -233,7 +237,8 @@ public: const sp& sharedBuffer = 0, bool threadCanCallJava = false, int sessionId = 0, - transfer_type transferType = TRANSFER_DEFAULT); + transfer_type transferType = TRANSFER_DEFAULT, + const audio_offload_info_t *offloadInfo = NULL); /* Result of constructing the AudioTrack. This must be checked * before using any AudioTrack API (except for set()), because using @@ -521,6 +526,15 @@ private: struct timespec *elapsed = NULL, size_t *nonContig = NULL); public: +//EL_FIXME to be reconciled with new obtainBuffer() return codes and control block proxy +// enum { +// NO_MORE_BUFFERS = 0x80000001, // same name in AudioFlinger.h, ok to be different value +// TEAR_DOWN = 0x80000002, +// STOPPED = 1, +// STREAM_END_WAIT, +// STREAM_END +// }; + /* Release a filled buffer of "audioBuffer->frameCount" frames for AudioFlinger to process. */ // FIXME make private when obtainBuffer() for TRANSFER_OBTAIN is removed void releaseBuffer(Buffer* audioBuffer); @@ -550,6 +564,15 @@ public: */ uint32_t getUnderrunFrames() const; + /* Get the flags */ + audio_output_flags_t getFlags() const { return mFlags; } + + /* Set parameters - only possible when using direct output */ + status_t setParameters(const String8& keyValuePairs); + + /* Get parameters */ + String8 getParameters(const String8& keys); + protected: /* copying audio tracks is not allowed */ AudioTrack(const AudioTrack& other); @@ -590,8 +613,11 @@ protected: // NS_NEVER never again static const nsecs_t NS_WHENEVER = -1, NS_INACTIVE = -2, NS_NEVER = -3; nsecs_t processAudioBuffer(const sp& thread); + status_t processStreamEnd(int32_t waitCount); + // caller must hold lock on mLock for all _l methods + status_t createTrack_l(audio_stream_type_t streamType, uint32_t sampleRate, audio_format_t format, @@ -607,6 +633,8 @@ protected: void setLoop_l(uint32_t loopStart, uint32_t loopEnd, int loopCount); audio_io_handle_t getOutput_l(); + status_t getPosition_l(uint32_t *position); + // FIXME enum is faster than strcmp() for parameter 'from' status_t restoreTrack_l(const char *from); diff --git a/include/media/IAudioFlinger.h b/include/media/IAudioFlinger.h index 9c3067e..f8a9f2b 100644 --- a/include/media/IAudioFlinger.h +++ b/include/media/IAudioFlinger.h @@ -49,6 +49,7 @@ public: TRACK_DEFAULT = 0, // client requests a default AudioTrack TRACK_TIMED = 1, // client requests a TimedAudioTrack TRACK_FAST = 2, // client requests a fast AudioTrack or AudioRecord + TRACK_OFFLOAD = 4, // client requests offload to hw codec }; typedef uint32_t track_flags_t; @@ -137,7 +138,8 @@ public: audio_format_t *pFormat, audio_channel_mask_t *pChannelMask, uint32_t *pLatencyMs, - audio_output_flags_t flags) = 0; + audio_output_flags_t flags, + const audio_offload_info_t *offloadInfo = NULL) = 0; virtual audio_io_handle_t openDuplicateOutput(audio_io_handle_t output1, audio_io_handle_t output2) = 0; virtual status_t closeOutput(audio_io_handle_t output) = 0; diff --git a/include/media/IAudioPolicyService.h b/include/media/IAudioPolicyService.h index b5ad4ef..09b9ea6 100644 --- a/include/media/IAudioPolicyService.h +++ b/include/media/IAudioPolicyService.h @@ -53,7 +53,8 @@ public: uint32_t samplingRate = 0, audio_format_t format = AUDIO_FORMAT_DEFAULT, audio_channel_mask_t channelMask = 0, - audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE) = 0; + audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE, + const audio_offload_info_t *offloadInfo = NULL) = 0; virtual status_t startOutput(audio_io_handle_t output, audio_stream_type_t stream, int session = 0) = 0; @@ -95,6 +96,9 @@ public: virtual status_t queryDefaultPreProcessing(int audioSession, effect_descriptor_t *descriptors, uint32_t *count) = 0; + // Check if offload is possible for given format, stream type, sample rate, + // bit rate, duration, video and streaming or offload property is enabled + virtual bool isOffloadSupported(const audio_offload_info_t& info) = 0; }; diff --git a/include/media/IAudioTrack.h b/include/media/IAudioTrack.h index 144be0e..1014403 100644 --- a/include/media/IAudioTrack.h +++ b/include/media/IAudioTrack.h @@ -25,6 +25,7 @@ #include #include #include +#include namespace android { @@ -82,6 +83,9 @@ public: or Tungsten time. The values for target are defined in AudioTrack.h */ virtual status_t setMediaTimeTransform(const LinearTransform& xform, int target) = 0; + + /* Send parameters to the audio hardware */ + virtual status_t setParameters(const String8& keyValuePairs) = 0; }; // ---------------------------------------------------------------------------- diff --git a/include/media/MediaPlayerInterface.h b/include/media/MediaPlayerInterface.h index 9a75f81..61f7dc7 100644 --- a/include/media/MediaPlayerInterface.h +++ b/include/media/MediaPlayerInterface.h @@ -74,9 +74,18 @@ public: // AudioSink: abstraction layer for audio output class AudioSink : public RefBase { public: + enum cb_event_t { + CB_EVENT_FILL_BUFFER, // Request to write more data to buffer. + CB_EVENT_STREAM_END, // Sent after all the buffers queued in AF and HW are played + // back (after stop is called) + CB_EVENT_TEAR_DOWN // The AudioTrack was invalidated due to use case change: + // Need to re-evaluate offloading options + }; + // Callback returns the number of bytes actually written to the buffer. typedef size_t (*AudioCallback)( - AudioSink *audioSink, void *buffer, size_t size, void *cookie); + AudioSink *audioSink, void *buffer, size_t size, void *cookie, + cb_event_t event); virtual ~AudioSink() {} virtual bool ready() const = 0; // audio output is open and ready @@ -99,7 +108,8 @@ public: int bufferCount=DEFAULT_AUDIOSINK_BUFFERCOUNT, AudioCallback cb = NULL, void *cookie = NULL, - audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE) = 0; + audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE, + const audio_offload_info_t *offloadInfo = NULL) = 0; virtual void start() = 0; virtual ssize_t write(const void* buffer, size_t size) = 0; @@ -110,6 +120,9 @@ public: virtual status_t setPlaybackRatePermille(int32_t rate) { return INVALID_OPERATION; } virtual bool needsTrailingPadding() { return true; } + + virtual status_t setParameters(const String8& keyValuePairs) { return NO_ERROR; }; + virtual String8 getParameters(const String8& keys) { return String8::empty(); }; }; MediaPlayerBase() : mCookie(0), mNotify(0) {} diff --git a/include/media/stagefright/AudioPlayer.h b/include/media/stagefright/AudioPlayer.h index 3bf046d..ec9f2df 100644 --- a/include/media/stagefright/AudioPlayer.h +++ b/include/media/stagefright/AudioPlayer.h @@ -36,8 +36,13 @@ public: SEEK_COMPLETE }; + enum { + ALLOW_DEEP_BUFFERING = 0x01, + USE_OFFLOAD = 0x02 + }; + AudioPlayer(const sp &audioSink, - bool allowDeepBuffering = false, + uint32_t flags = 0, AwesomePlayer *audioObserver = NULL); virtual ~AudioPlayer(); @@ -67,6 +72,8 @@ public: status_t setPlaybackRatePermille(int32_t ratePermille); + void notifyAudioEOS(); + private: friend class VideoEditorAudioPlayer; sp mSource; @@ -107,7 +114,8 @@ private: static size_t AudioSinkCallback( MediaPlayerBase::AudioSink *audioSink, - void *data, size_t size, void *me); + void *data, size_t size, void *me, + MediaPlayerBase::AudioSink::cb_event_t event); size_t fillBuffer(void *data, size_t size); @@ -116,6 +124,7 @@ private: void reset(); uint32_t getNumFramesPendingPlayout() const; + int64_t getOutputPlayPositionUs_l() const; AudioPlayer(const AudioPlayer &); AudioPlayer &operator=(const AudioPlayer &); diff --git a/include/media/stagefright/Utils.h b/include/media/stagefright/Utils.h index 73940d3..c24f612 100644 --- a/include/media/stagefright/Utils.h +++ b/include/media/stagefright/Utils.h @@ -22,6 +22,8 @@ #include #include #include +#include +#include namespace android { @@ -48,6 +50,15 @@ void convertMessageToMetaData( AString MakeUserAgent(); +// Convert a MIME type to a AudioSystem::audio_format +status_t mapMimeToAudioFormat(audio_format_t& format, const char* mime); + +// Send information from MetaData to the HAL via AudioSink +status_t sendMetaDataToHal(sp& sink, const sp& meta); + +// Check whether the stream defined by meta can be offloaded to hardware +bool canOffloadStream(const sp& meta, bool hasVideo, bool isStreaming); + } // namespace android #endif // UTILS_H_ diff --git a/include/private/media/AudioTrackShared.h b/include/private/media/AudioTrackShared.h index ef5bb8d..b41684a 100644 --- a/include/private/media/AudioTrackShared.h +++ b/include/private/media/AudioTrackShared.h @@ -44,6 +44,10 @@ namespace android { #define CBLK_BUFFER_END 0x80 // set by server when the position reaches end of buffer if not looping #define CBLK_OVERRUN 0x100 // set by server immediately on input overrun, cleared by client #define CBLK_INTERRUPT 0x200 // set by client on interrupt(), cleared by client in obtainBuffer() +#define CBLK_STREAM_END_DONE 0x400 // set by server on render completion, cleared by client + +//EL_FIXME 20 seconds may not be enough and must be reconciled with new obtainBuffer implementation +#define MAX_RUN_OFFLOADED_TIMEOUT_MS 20000 //assuming upto a maximum of 20 seconds of offloaded struct AudioTrackSharedStreaming { // similar to NBAIO MonoPipe diff --git a/libvideoeditor/lvpp/VideoEditorAudioPlayer.cpp b/libvideoeditor/lvpp/VideoEditorAudioPlayer.cpp index 3fa8b87..dc360a5 100755 --- a/libvideoeditor/lvpp/VideoEditorAudioPlayer.cpp +++ b/libvideoeditor/lvpp/VideoEditorAudioPlayer.cpp @@ -575,10 +575,15 @@ void VideoEditorAudioPlayer::reset() { size_t VideoEditorAudioPlayer::AudioSinkCallback( MediaPlayerBase::AudioSink *audioSink, - void *buffer, size_t size, void *cookie) { + void *buffer, size_t size, void *cookie, + MediaPlayerBase::AudioSink::cb_event_t event) { VideoEditorAudioPlayer *me = (VideoEditorAudioPlayer *)cookie; - return me->fillBuffer(buffer, size); + if (event == MediaPlayerBase::AudioSink::CB_EVENT_FILL_BUFFER ) { + return me->fillBuffer(buffer, size); + } else { + return 0; + } } diff --git a/libvideoeditor/lvpp/VideoEditorAudioPlayer.h b/libvideoeditor/lvpp/VideoEditorAudioPlayer.h index a5616c1..d2e652d 100755 --- a/libvideoeditor/lvpp/VideoEditorAudioPlayer.h +++ b/libvideoeditor/lvpp/VideoEditorAudioPlayer.h @@ -124,7 +124,8 @@ private: size_t fillBuffer(void *data, size_t size); static size_t AudioSinkCallback( MediaPlayerBase::AudioSink *audioSink, - void *data, size_t size, void *me); + void *data, size_t size, void *me, + MediaPlayerBase::AudioSink::cb_event_t event); void reset(); void clear(); diff --git a/libvideoeditor/lvpp/VideoEditorPlayer.cpp b/libvideoeditor/lvpp/VideoEditorPlayer.cpp index 4a14b40..3384e34 100755 --- a/libvideoeditor/lvpp/VideoEditorPlayer.cpp +++ b/libvideoeditor/lvpp/VideoEditorPlayer.cpp @@ -391,7 +391,8 @@ status_t VideoEditorPlayer::VeAudioOutput::getFramesWritten(uint32_t *written) c status_t VideoEditorPlayer::VeAudioOutput::open( uint32_t sampleRate, int channelCount, audio_channel_mask_t channelMask, audio_format_t format, int bufferCount, - AudioCallback cb, void *cookie, audio_output_flags_t flags) { + AudioCallback cb, void *cookie, audio_output_flags_t flags, + const audio_offload_info_t *offloadInfo) { mCallback = cb; mCallbackCookie = cookie; @@ -545,7 +546,8 @@ void VideoEditorPlayer::VeAudioOutput::CallbackWrapper( AudioTrack::Buffer *buffer = (AudioTrack::Buffer *)info; size_t actualSize = (*me->mCallback)( - me, buffer->raw, buffer->size, me->mCallbackCookie); + me, buffer->raw, buffer->size, me->mCallbackCookie, + MediaPlayerBase::AudioSink::CB_EVENT_FILL_BUFFER); buffer->size = actualSize; diff --git a/libvideoeditor/lvpp/VideoEditorPlayer.h b/libvideoeditor/lvpp/VideoEditorPlayer.h index defc90d..69323c3 100755 --- a/libvideoeditor/lvpp/VideoEditorPlayer.h +++ b/libvideoeditor/lvpp/VideoEditorPlayer.h @@ -52,7 +52,8 @@ class VideoEditorPlayer : public MediaPlayerInterface { virtual status_t open( uint32_t sampleRate, int channelCount, audio_channel_mask_t channelMask, audio_format_t format, int bufferCount, - AudioCallback cb, void *cookie, audio_output_flags_t flags); + AudioCallback cb, void *cookie, audio_output_flags_t flags, + const audio_offload_info_t *offloadInfo); virtual void start(); virtual ssize_t write(const void* buffer, size_t size); diff --git a/media/libmedia/AudioSystem.cpp b/media/libmedia/AudioSystem.cpp index 693df60..a6dedec 100644 --- a/media/libmedia/AudioSystem.cpp +++ b/media/libmedia/AudioSystem.cpp @@ -361,8 +361,8 @@ status_t AudioSystem::setVoiceVolume(float value) return af->setVoiceVolume(value); } -status_t AudioSystem::getRenderPosition(size_t *halFrames, size_t *dspFrames, - audio_stream_type_t stream) +status_t AudioSystem::getRenderPosition(audio_io_handle_t output, size_t *halFrames, + size_t *dspFrames, audio_stream_type_t stream) { const sp& af = AudioSystem::get_audio_flinger(); if (af == 0) return PERMISSION_DENIED; @@ -371,7 +371,11 @@ status_t AudioSystem::getRenderPosition(size_t *halFrames, size_t *dspFrames, stream = AUDIO_STREAM_MUSIC; } - return af->getRenderPosition(halFrames, dspFrames, getOutput(stream)); + if (output == 0) { + output = getOutput(stream); + } + + return af->getRenderPosition(halFrames, dspFrames, output); } size_t AudioSystem::getInputFramesLost(audio_io_handle_t ioHandle) { @@ -585,11 +589,12 @@ audio_io_handle_t AudioSystem::getOutput(audio_stream_type_t stream, uint32_t samplingRate, audio_format_t format, audio_channel_mask_t channelMask, - audio_output_flags_t flags) + audio_output_flags_t flags, + const audio_offload_info_t *offloadInfo) { const sp& aps = AudioSystem::get_audio_policy_service(); if (aps == 0) return 0; - return aps->getOutput(stream, samplingRate, format, channelMask, flags); + return aps->getOutput(stream, samplingRate, format, channelMask, flags, offloadInfo); } status_t AudioSystem::startOutput(audio_io_handle_t output, @@ -771,6 +776,14 @@ void AudioSystem::clearAudioConfigCache() gOutputs.clear(); } +bool AudioSystem::isOffloadSupported(const audio_offload_info_t& info) +{ + ALOGV("isOffloadSupported()"); + const sp& aps = AudioSystem::get_audio_policy_service(); + if (aps == 0) return false; + return aps->isOffloadSupported(info); +} + // --------------------------------------------------------------------------- void AudioSystem::AudioPolicyServiceClient::binderDied(const wp& who) { diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp index faca054..2af162c 100644 --- a/media/libmedia/AudioTrack.cpp +++ b/media/libmedia/AudioTrack.cpp @@ -97,7 +97,8 @@ AudioTrack::AudioTrack( void* user, int notificationFrames, int sessionId, - transfer_type transferType) + transfer_type transferType, + const audio_offload_info_t *offloadInfo) : mStatus(NO_INIT), mIsTimed(false), mPreviousPriority(ANDROID_PRIORITY_NORMAL), @@ -105,7 +106,7 @@ AudioTrack::AudioTrack( { mStatus = set(streamType, sampleRate, format, channelMask, frameCount, flags, cbf, user, notificationFrames, - 0 /*sharedBuffer*/, false /*threadCanCallJava*/, sessionId, transferType); + 0 /*sharedBuffer*/, false /*threadCanCallJava*/, sessionId, transferType, offloadInfo); } AudioTrack::AudioTrack( @@ -119,7 +120,8 @@ AudioTrack::AudioTrack( void* user, int notificationFrames, int sessionId, - transfer_type transferType) + transfer_type transferType, + const audio_offload_info_t *offloadInfo) : mStatus(NO_INIT), mIsTimed(false), mPreviousPriority(ANDROID_PRIORITY_NORMAL), @@ -127,7 +129,7 @@ AudioTrack::AudioTrack( { mStatus = set(streamType, sampleRate, format, channelMask, 0 /*frameCount*/, flags, cbf, user, notificationFrames, - sharedBuffer, false /*threadCanCallJava*/, sessionId, transferType); + sharedBuffer, false /*threadCanCallJava*/, sessionId, transferType, offloadInfo); } AudioTrack::~AudioTrack() @@ -164,7 +166,8 @@ status_t AudioTrack::set( const sp& sharedBuffer, bool threadCanCallJava, int sessionId, - transfer_type transferType) + transfer_type transferType, + const audio_offload_info_t *offloadInfo) { switch (transferType) { case TRANSFER_DEFAULT: @@ -284,7 +287,8 @@ status_t AudioTrack::set( audio_io_handle_t output = AudioSystem::getOutput( streamType, sampleRate, format, channelMask, - flags); + flags, + offloadInfo); if (output == 0) { ALOGE("Could not get audio output for stream type %d", streamType); @@ -1543,6 +1547,21 @@ status_t AudioTrack::restoreTrack_l(const char *from) return result; } +status_t AudioTrack::setParameters(const String8& keyValuePairs) +{ + AutoMutex lock(mLock); + if (mAudioTrack != 0) { + return mAudioTrack->setParameters(keyValuePairs); + } else { + return NO_INIT; + } +} + +String8 AudioTrack::getParameters(const String8& keys) +{ + return String8::empty(); +} + status_t AudioTrack::dump(int fd, const Vector& args) const { diff --git a/media/libmedia/IAudioFlinger.cpp b/media/libmedia/IAudioFlinger.cpp index 2f18680..e4df77d 100644 --- a/media/libmedia/IAudioFlinger.cpp +++ b/media/libmedia/IAudioFlinger.cpp @@ -361,7 +361,8 @@ public: audio_format_t *pFormat, audio_channel_mask_t *pChannelMask, uint32_t *pLatencyMs, - audio_output_flags_t flags) + audio_output_flags_t flags, + const audio_offload_info_t *offloadInfo) { Parcel data, reply; audio_devices_t devices = pDevices ? *pDevices : (audio_devices_t)0; diff --git a/media/libmedia/IAudioPolicyService.cpp b/media/libmedia/IAudioPolicyService.cpp index 386c351..57de58f 100644 --- a/media/libmedia/IAudioPolicyService.cpp +++ b/media/libmedia/IAudioPolicyService.cpp @@ -56,7 +56,8 @@ enum { GET_DEVICES_FOR_STREAM, QUERY_DEFAULT_PRE_PROCESSING, SET_EFFECT_ENABLED, - IS_STREAM_ACTIVE_REMOTELY + IS_STREAM_ACTIVE_REMOTELY, + IS_OFFLOAD_SUPPORTED }; class BpAudioPolicyService : public BpInterface @@ -126,7 +127,8 @@ public: uint32_t samplingRate, audio_format_t format, audio_channel_mask_t channelMask, - audio_output_flags_t flags) + audio_output_flags_t flags, + const audio_offload_info_t *offloadInfo) { Parcel data, reply; data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor()); @@ -374,6 +376,12 @@ public: *count = retCount; return status; } + + virtual bool isOffloadSupported(const audio_offload_info_t& info) + { + // stub function + return false; + } }; IMPLEMENT_META_INTERFACE(AudioPolicyService, "android.media.IAudioPolicyService"); diff --git a/media/libmedia/IAudioTrack.cpp b/media/libmedia/IAudioTrack.cpp index e92f8aa..a2b49a3 100644 --- a/media/libmedia/IAudioTrack.cpp +++ b/media/libmedia/IAudioTrack.cpp @@ -39,6 +39,7 @@ enum { ALLOCATE_TIMED_BUFFER, QUEUE_TIMED_BUFFER, SET_MEDIA_TIME_TRANSFORM, + SET_PARAMETERS }; class BpAudioTrack : public BpInterface @@ -154,6 +155,17 @@ public: } return status; } + + virtual status_t setParameters(const String8& keyValuePairs) { + Parcel data, reply; + data.writeInterfaceToken(IAudioTrack::getInterfaceDescriptor()); + data.writeString8(keyValuePairs); + status_t status = remote()->transact(SET_PARAMETERS, data, &reply); + if (status == NO_ERROR) { + status = reply.readInt32(); + } + return status; + } }; IMPLEMENT_META_INTERFACE(AudioTrack, "android.media.IAudioTrack"); @@ -223,6 +235,12 @@ status_t BnAudioTrack::onTransact( reply->writeInt32(setMediaTimeTransform(xform, target)); return NO_ERROR; } break; + case SET_PARAMETERS: { + CHECK_INTERFACE(IAudioTrack, data, reply); + String8 keyValuePairs(data.readString8()); + reply->writeInt32(setParameters(keyValuePairs)); + return NO_ERROR; + } break; default: return BBinder::onTransact(code, data, reply, flags); } diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp index fa1ff36..53dce65 100644 --- a/media/libmediaplayerservice/MediaPlayerService.cpp +++ b/media/libmediaplayerservice/MediaPlayerService.cpp @@ -1385,7 +1385,8 @@ status_t MediaPlayerService::AudioOutput::open( uint32_t sampleRate, int channelCount, audio_channel_mask_t channelMask, audio_format_t format, int bufferCount, AudioCallback cb, void *cookie, - audio_output_flags_t flags) + audio_output_flags_t flags, + const audio_offload_info_t *offloadInfo) { mCallback = cb; mCallbackCookie = cookie; @@ -1661,7 +1662,8 @@ void MediaPlayerService::AudioOutput::CallbackWrapper( } size_t actualSize = (*me->mCallback)( - me, buffer->raw, buffer->size, me->mCallbackCookie); + me, buffer->raw, buffer->size, me->mCallbackCookie, + CB_EVENT_FILL_BUFFER); if (actualSize == 0 && buffer->size > 0 && me->mNextOutput == NULL) { // We've reached EOS but the audio track is not stopped yet, @@ -1767,7 +1769,8 @@ bool CallbackThread::threadLoop() { } size_t actualSize = - (*mCallback)(sink.get(), mBuffer, mBufferSize, mCookie); + (*mCallback)(sink.get(), mBuffer, mBufferSize, mCookie, + MediaPlayerBase::AudioSink::CB_EVENT_FILL_BUFFER); if (actualSize > 0) { sink->write(mBuffer, actualSize); @@ -1781,7 +1784,8 @@ bool CallbackThread::threadLoop() { status_t MediaPlayerService::AudioCache::open( uint32_t sampleRate, int channelCount, audio_channel_mask_t channelMask, audio_format_t format, int bufferCount, - AudioCallback cb, void *cookie, audio_output_flags_t flags) + AudioCallback cb, void *cookie, audio_output_flags_t flags, + const audio_offload_info_t *offloadInfo) { ALOGV("open(%u, %d, 0x%x, %d, %d)", sampleRate, channelCount, channelMask, format, bufferCount); if (mHeap->getHeapID() < 0) { diff --git a/media/libmediaplayerservice/MediaPlayerService.h b/media/libmediaplayerservice/MediaPlayerService.h index e586156..1f8bcc7 100644 --- a/media/libmediaplayerservice/MediaPlayerService.h +++ b/media/libmediaplayerservice/MediaPlayerService.h @@ -94,7 +94,8 @@ class MediaPlayerService : public BnMediaPlayerService uint32_t sampleRate, int channelCount, audio_channel_mask_t channelMask, audio_format_t format, int bufferCount, AudioCallback cb, void *cookie, - audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE); + audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE, + const audio_offload_info_t *offloadInfo = NULL); virtual void start(); virtual ssize_t write(const void* buffer, size_t size); @@ -195,7 +196,8 @@ class MediaPlayerService : public BnMediaPlayerService uint32_t sampleRate, int channelCount, audio_channel_mask_t channelMask, audio_format_t format, int bufferCount = 1, AudioCallback cb = NULL, void *cookie = NULL, - audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE); + audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE, + const audio_offload_info_t *offloadInfo = NULL); virtual void start(); virtual ssize_t write(const void* buffer, size_t size); diff --git a/media/libstagefright/AudioPlayer.cpp b/media/libstagefright/AudioPlayer.cpp index 92efae8..61d6746 100644 --- a/media/libstagefright/AudioPlayer.cpp +++ b/media/libstagefright/AudioPlayer.cpp @@ -34,7 +34,7 @@ namespace android { AudioPlayer::AudioPlayer( const sp &audioSink, - bool allowDeepBuffering, + uint32_t flags, AwesomePlayer *observer) : mInputBuffer(NULL), mSampleRate(0), @@ -52,7 +52,7 @@ AudioPlayer::AudioPlayer( mFirstBufferResult(OK), mFirstBuffer(NULL), mAudioSink(audioSink), - mAllowDeepBuffering(allowDeepBuffering), + mAllowDeepBuffering((flags & ALLOW_DEEP_BUFFERING) != 0), mObserver(observer), mPinnedTimeUs(-1ll) { } @@ -304,7 +304,8 @@ status_t AudioPlayer::setPlaybackRatePermille(int32_t ratePermille) { // static size_t AudioPlayer::AudioSinkCallback( MediaPlayerBase::AudioSink *audioSink, - void *buffer, size_t size, void *cookie) { + void *buffer, size_t size, void *cookie, + MediaPlayerBase::AudioSink::cb_event_t event) { AudioPlayer *me = (AudioPlayer *)cookie; return me->fillBuffer(buffer, size); diff --git a/media/libstagefright/Utils.cpp b/media/libstagefright/Utils.cpp index b0df379..e9789d3 100644 --- a/media/libstagefright/Utils.cpp +++ b/media/libstagefright/Utils.cpp @@ -471,5 +471,24 @@ AString MakeUserAgent() { return ua; } +status_t sendMetaDataToHal(sp& sink, + const sp& meta) +{ + // stub + return OK; +} + +status_t mapMimeToAudioFormat(audio_format_t& format, const char* mime) +{ + // stub + return BAD_VALUE; +} + +bool canOffloadStream(const sp& meta, bool hasVideo, bool isStreaming) +{ + // stub + return false; +} + } // namespace android diff --git a/media/libstagefright/include/AwesomePlayer.h b/media/libstagefright/include/AwesomePlayer.h index 2306f31..0d17d65 100644 --- a/media/libstagefright/include/AwesomePlayer.h +++ b/media/libstagefright/include/AwesomePlayer.h @@ -25,6 +25,7 @@ #include #include #include +#include #include #include @@ -100,7 +101,7 @@ struct AwesomePlayer { void postAudioEOS(int64_t delayUs = 0ll); void postAudioSeekComplete(); - + void postAudioTearDown(); status_t dump(int fd, const Vector &args) const; private: @@ -171,6 +172,7 @@ private: ssize_t mActiveAudioTrackIndex; sp mAudioTrack; + sp mOmxSource; sp mAudioSource; AudioPlayer *mAudioPlayer; int64_t mDurationUs; @@ -211,7 +213,8 @@ private: bool mAudioStatusEventPending; sp mVideoLagEvent; bool mVideoLagEventPending; - + sp mAudioTearDownEvent; + bool mAudioTearDownEventPending; sp mAsyncPrepareEvent; Condition mPreparedCondition; bool mIsAsyncPrepare; @@ -223,6 +226,8 @@ private: void postStreamDoneEvent_l(status_t status); void postCheckAudioStatusEvent(int64_t delayUs); void postVideoLagEvent_l(); + void postAudioTearDownEvent(); + status_t play_l(); MediaBuffer *mVideoBuffer; @@ -257,6 +262,7 @@ private: void setAudioSource(sp source); status_t initAudioDecoder(); + void setVideoSource(sp source); status_t initVideoDecoder(uint32_t flags = 0); @@ -273,6 +279,9 @@ private: void abortPrepare(status_t err); void finishAsyncPrepare_l(); void onVideoLagUpdate(); + void onAudioTearDownEvent(); + + void beginPrepareAsync_l(); bool getCachedDuration_l(int64_t *durationUs, bool *eos); @@ -285,6 +294,7 @@ private: void finishSeekIfNecessary(int64_t videoTimeUs); void ensureCacheIsFetching_l(); + void createAudioPlayer_l(); status_t startAudioPlayer_l(bool sendErrorNotification = true); void shutdownVideoDecoder_l(); @@ -327,6 +337,9 @@ private: Vector mTracks; } mStats; + bool mOffloadAudio; + bool mAudioTearDown; + status_t setVideoScalingMode(int32_t mode); status_t setVideoScalingMode_l(int32_t mode); status_t getTrackInfo(Parcel* reply) const; diff --git a/media/libstagefright/include/ESDS.h b/media/libstagefright/include/ESDS.h index 3a79951..2f40dae 100644 --- a/media/libstagefright/include/ESDS.h +++ b/media/libstagefright/include/ESDS.h @@ -33,6 +33,9 @@ public: status_t getObjectTypeIndication(uint8_t *objectTypeIndication) const; status_t getCodecSpecificInfo(const void **data, size_t *size) const; + status_t getCodecSpecificOffset(size_t *offset, size_t *size) const; + status_t getBitRate(uint32_t *brateMax, uint32_t *brateAvg) const; + status_t getStreamType(uint8_t *streamType) const; private: enum { @@ -49,6 +52,9 @@ private: size_t mDecoderSpecificOffset; size_t mDecoderSpecificLength; uint8_t mObjectTypeIndication; + uint8_t mStreamType; + uint32_t mBitRateMax; + uint32_t mBitRateAvg; status_t skipDescriptorHeader( size_t offset, size_t size, diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp index a6edb77..c8e8aba 100644 --- a/services/audioflinger/AudioFlinger.cpp +++ b/services/audioflinger/AudioFlinger.cpp @@ -981,11 +981,12 @@ size_t AudioFlinger::getInputBufferSize(uint32_t sampleRate, audio_format_t form AutoMutex lock(mHardwareLock); mHardwareStatus = AUDIO_HW_GET_INPUT_BUFFER_SIZE; - struct audio_config config = { - sample_rate: sampleRate, - channel_mask: channelMask, - format: format, - }; + struct audio_config config; + memset(&config, 0, sizeof(config)); + config.sample_rate = sampleRate; + config.channel_mask = channelMask; + config.format = format; + audio_hw_device_t *dev = mPrimaryHardwareDev->hwDevice(); size_t size = dev->get_input_buffer_size(dev, &config); mHardwareStatus = AUDIO_HW_IDLE; @@ -1388,15 +1389,19 @@ audio_io_handle_t AudioFlinger::openOutput(audio_module_handle_t module, audio_format_t *pFormat, audio_channel_mask_t *pChannelMask, uint32_t *pLatencyMs, - audio_output_flags_t flags) + audio_output_flags_t flags, + const audio_offload_info_t *offloadInfo) { status_t status; PlaybackThread *thread = NULL; - struct audio_config config = { - sample_rate: pSamplingRate ? *pSamplingRate : 0, - channel_mask: pChannelMask ? *pChannelMask : 0, - format: pFormat ? *pFormat : AUDIO_FORMAT_DEFAULT, - }; + struct audio_config config; + config.sample_rate = (pSamplingRate != NULL) ? *pSamplingRate : 0; + config.channel_mask = (pChannelMask != NULL) ? *pChannelMask : 0; + config.format = (pFormat != NULL) ? *pFormat : AUDIO_FORMAT_DEFAULT; + if (offloadInfo) { + config.offload_info = *offloadInfo; + } + audio_stream_out_t *outStream = NULL; AudioHwDevice *outHwDev; @@ -1591,11 +1596,11 @@ audio_io_handle_t AudioFlinger::openInput(audio_module_handle_t module, { status_t status; RecordThread *thread = NULL; - struct audio_config config = { - sample_rate: pSamplingRate ? *pSamplingRate : 0, - channel_mask: pChannelMask ? *pChannelMask : 0, - format: pFormat ? *pFormat : AUDIO_FORMAT_DEFAULT, - }; + struct audio_config config; + config.sample_rate = (pSamplingRate != NULL) ? *pSamplingRate : 0; + config.channel_mask = (pChannelMask != NULL) ? *pChannelMask : 0; + config.format = (pFormat != NULL) ? *pFormat : AUDIO_FORMAT_DEFAULT; + uint32_t reqSamplingRate = config.sample_rate; audio_format_t reqFormat = config.format; audio_channel_mask_t reqChannels = config.channel_mask; diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h index 05dbab1..b640b31 100644 --- a/services/audioflinger/AudioFlinger.h +++ b/services/audioflinger/AudioFlinger.h @@ -160,7 +160,8 @@ public: audio_format_t *pFormat, audio_channel_mask_t *pChannelMask, uint32_t *pLatencyMs, - audio_output_flags_t flags); + audio_output_flags_t flags, + const audio_offload_info_t *offloadInfo); virtual audio_io_handle_t openDuplicateOutput(audio_io_handle_t output1, audio_io_handle_t output2); @@ -406,6 +407,8 @@ private: int target); virtual status_t onTransact( uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags); + + virtual status_t setParameters(const String8& keyValuePairs); private: const sp mTrack; }; diff --git a/services/audioflinger/AudioPolicyService.cpp b/services/audioflinger/AudioPolicyService.cpp index 2706880..fd4431c 100644 --- a/services/audioflinger/AudioPolicyService.cpp +++ b/services/audioflinger/AudioPolicyService.cpp @@ -222,15 +222,16 @@ audio_io_handle_t AudioPolicyService::getOutput(audio_stream_type_t stream, uint32_t samplingRate, audio_format_t format, audio_channel_mask_t channelMask, - audio_output_flags_t flags) + audio_output_flags_t flags, + const audio_offload_info_t *offloadInfo) { if (mpAudioPolicy == NULL) { return 0; } ALOGV("getOutput()"); Mutex::Autolock _l(mLock); - return mpAudioPolicy->get_output(mpAudioPolicy, stream, samplingRate, format, channelMask, - flags); + return mpAudioPolicy->get_output(mpAudioPolicy, stream, samplingRate, + format, channelMask, flags, offloadInfo); } status_t AudioPolicyService::startOutput(audio_io_handle_t output, @@ -1055,6 +1056,11 @@ int AudioPolicyService::setVoiceVolume(float volume, int delayMs) return (int)mAudioCommandThread->voiceVolumeCommand(volume, delayMs); } +bool AudioPolicyService::isOffloadSupported(const audio_offload_info_t& info) +{ + return false; // stub function +} + // ---------------------------------------------------------------------------- // Audio pre-processing configuration // ---------------------------------------------------------------------------- @@ -1387,7 +1393,8 @@ static audio_io_handle_t aps_open_output_on_module(void *service, audio_format_t *pFormat, audio_channel_mask_t *pChannelMask, uint32_t *pLatencyMs, - audio_output_flags_t flags) + audio_output_flags_t flags, + const audio_offload_info_t *offloadInfo) { sp af = AudioSystem::get_audio_flinger(); if (af == 0) { diff --git a/services/audioflinger/AudioPolicyService.h b/services/audioflinger/AudioPolicyService.h index 53238fa..e723c47 100644 --- a/services/audioflinger/AudioPolicyService.h +++ b/services/audioflinger/AudioPolicyService.h @@ -67,7 +67,8 @@ public: audio_format_t format = AUDIO_FORMAT_DEFAULT, audio_channel_mask_t channelMask = 0, audio_output_flags_t flags = - AUDIO_OUTPUT_FLAG_NONE); + AUDIO_OUTPUT_FLAG_NONE, + const audio_offload_info_t *offloadInfo = NULL); virtual status_t startOutput(audio_io_handle_t output, audio_stream_type_t stream, int session = 0); @@ -136,6 +137,7 @@ public: virtual status_t startTone(audio_policy_tone_t tone, audio_stream_type_t stream); virtual status_t stopTone(); virtual status_t setVoiceVolume(float volume, int delayMs = 0); + virtual bool isOffloadSupported(const audio_offload_info_t &config); private: AudioPolicyService() ANDROID_API; diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp index bfc197c..f0dbee3 100644 --- a/services/audioflinger/Tracks.cpp +++ b/services/audioflinger/Tracks.cpp @@ -250,6 +250,10 @@ void AudioFlinger::TrackHandle::pause() { mTrack->pause(); } +status_t AudioFlinger::TrackHandle::setParameters(const String8& keyValuePairs) { + return INVALID_OPERATION; // stub function +} + status_t AudioFlinger::TrackHandle::attachAuxEffect(int EffectId) { return mTrack->attachAuxEffect(EffectId); -- cgit v1.1 From c7ba4a5c938d191bf0e477fc9b9aa4f0cba986ef Mon Sep 17 00:00:00 2001 From: Eino-Ville Talvala Date: Mon, 1 Jul 2013 09:23:55 -0700 Subject: Camera2Api: Creating a JPEG stream needs to use the right size JPEG streams have variable size, so use the HAL-provided max size to size the stream buffers. Change-Id: Ie6900bc9ece6e972eae93fca6aca779224c9bfc6 --- .../photography/CameraDeviceClient.cpp | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/services/camera/libcameraservice/photography/CameraDeviceClient.cpp b/services/camera/libcameraservice/photography/CameraDeviceClient.cpp index 3209a56..bd6b60a 100644 --- a/services/camera/libcameraservice/photography/CameraDeviceClient.cpp +++ b/services/camera/libcameraservice/photography/CameraDeviceClient.cpp @@ -337,8 +337,23 @@ status_t CameraDeviceClient::createStream(int width, int height, int format, // after each call, but only once we are done with all. int streamId = -1; - res = mDevice->createStream(anw, width, height, format, /*size*/1, - &streamId); + if (format == HAL_PIXEL_FORMAT_BLOB) { + // JPEG buffers need to be sized for maximum possible compressed size + CameraMetadata staticInfo = mDevice->info(); + camera_metadata_entry_t entry = staticInfo.find(ANDROID_JPEG_MAX_SIZE); + if (entry.count == 0) { + ALOGE("%s: Camera %d: Can't find maximum JPEG size in " + "static metadata!", __FUNCTION__, mCameraId); + return INVALID_OPERATION; + } + int32_t maxJpegSize = entry.data.i32[0]; + res = mDevice->createStream(anw, width, height, format, maxJpegSize, + &streamId); + } else { + // All other streams are a known size + res = mDevice->createStream(anw, width, height, format, /*size*/0, + &streamId); + } if (res == OK) { mStreamMap.add(bufferProducer->asBinder(), streamId); -- cgit v1.1 From 1ab85ec401801ef9a9184650d0f5a1639b45eeb9 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Fri, 31 May 2013 09:18:43 -0700 Subject: Include what is needed Remove old includes. Header files only include other header files that they directly need themselves. Change-Id: Ic471386808d9f42ea19ccbd59cb50a5f83a89dd0 --- include/media/AudioSystem.h | 12 +++++------- include/media/Visualizer.h | 2 +- media/libmedia/AudioRecord.cpp | 1 + media/libmedia/AudioSystem.cpp | 1 + media/libmedia/AudioTrack.cpp | 1 + media/libmedia/JetPlayer.cpp | 2 -- media/libmedia/SoundPool.cpp | 6 ------ media/libmedia/ToneGenerator.cpp | 4 ---- media/libmedia/Visualizer.cpp | 1 + media/libmediaplayerservice/MediaPlayerService.h | 3 --- media/libstagefright/wifi-display/sink/DirectRenderer.h | 2 -- services/audioflinger/AudioFlinger.cpp | 5 +---- services/audioflinger/AudioPolicyService.cpp | 1 + services/audioflinger/Threads.cpp | 1 + 14 files changed, 13 insertions(+), 29 deletions(-) diff --git a/include/media/AudioSystem.h b/include/media/AudioSystem.h index 09160cc..fb1d631 100644 --- a/include/media/AudioSystem.h +++ b/include/media/AudioSystem.h @@ -17,20 +17,18 @@ #ifndef ANDROID_AUDIOSYSTEM_H_ #define ANDROID_AUDIOSYSTEM_H_ -#include -#include -#include - +#include +#include #include #include - -/* XXX: Should be include by all the users instead */ -#include +#include +#include namespace android { typedef void (*audio_error_callback)(status_t err); +class IAudioFlinger; class IAudioPolicyService; class String8; diff --git a/include/media/Visualizer.h b/include/media/Visualizer.h index aa58905..e429263 100644 --- a/include/media/Visualizer.h +++ b/include/media/Visualizer.h @@ -19,7 +19,7 @@ #include #include -#include +#include /** * The Visualizer class enables application to retrieve part of the currently playing audio for diff --git a/media/libmedia/AudioRecord.cpp b/media/libmedia/AudioRecord.cpp index 9faa497..8ae0908 100644 --- a/media/libmedia/AudioRecord.cpp +++ b/media/libmedia/AudioRecord.cpp @@ -23,6 +23,7 @@ #include #include #include +#include #define WAIT_PERIOD_MS 10 diff --git a/media/libmedia/AudioSystem.cpp b/media/libmedia/AudioSystem.cpp index a6dedec..22d6763 100644 --- a/media/libmedia/AudioSystem.cpp +++ b/media/libmedia/AudioSystem.cpp @@ -20,6 +20,7 @@ #include #include #include +#include #include #include diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp index 2af162c..33c4462 100644 --- a/media/libmedia/AudioTrack.cpp +++ b/media/libmedia/AudioTrack.cpp @@ -25,6 +25,7 @@ #include #include #include +#include #define WAIT_PERIOD_MS 10 diff --git a/media/libmedia/JetPlayer.cpp b/media/libmedia/JetPlayer.cpp index 8fe5bb3..e914b34 100644 --- a/media/libmedia/JetPlayer.cpp +++ b/media/libmedia/JetPlayer.cpp @@ -18,8 +18,6 @@ #define LOG_TAG "JetPlayer-C" #include -#include - #include diff --git a/media/libmedia/SoundPool.cpp b/media/libmedia/SoundPool.cpp index e1e88ec..7f10e05 100644 --- a/media/libmedia/SoundPool.cpp +++ b/media/libmedia/SoundPool.cpp @@ -20,14 +20,8 @@ //#define USE_SHARED_MEM_BUFFER -// XXX needed for timing latency -#include - #include #include - -#include - #include #include "SoundPoolThread.h" diff --git a/media/libmedia/ToneGenerator.cpp b/media/libmedia/ToneGenerator.cpp index f9ad31d..adef3be 100644 --- a/media/libmedia/ToneGenerator.cpp +++ b/media/libmedia/ToneGenerator.cpp @@ -16,13 +16,9 @@ //#define LOG_NDEBUG 0 #define LOG_TAG "ToneGenerator" -#include -#include #include #include -#include -#include #include #include "media/ToneGenerator.h" diff --git a/media/libmedia/Visualizer.cpp b/media/libmedia/Visualizer.cpp index 5b4071b..e519f13 100644 --- a/media/libmedia/Visualizer.cpp +++ b/media/libmedia/Visualizer.cpp @@ -28,6 +28,7 @@ #include #include +#include namespace android { diff --git a/media/libmediaplayerservice/MediaPlayerService.h b/media/libmediaplayerservice/MediaPlayerService.h index 1f8bcc7..f7076cc 100644 --- a/media/libmediaplayerservice/MediaPlayerService.h +++ b/media/libmediaplayerservice/MediaPlayerService.h @@ -20,15 +20,12 @@ #include -#include #include -#include #include #include #include #include -#include #include #include #include diff --git a/media/libstagefright/wifi-display/sink/DirectRenderer.h b/media/libstagefright/wifi-display/sink/DirectRenderer.h index c5a4a83..1e7dc34 100644 --- a/media/libstagefright/wifi-display/sink/DirectRenderer.h +++ b/media/libstagefright/wifi-display/sink/DirectRenderer.h @@ -23,9 +23,7 @@ namespace android { struct ABuffer; -struct AudioTrack; struct IGraphicBufferProducer; -struct MediaCodec; // Renders audio and video data queued by calls to "queueAccessUnit". struct DirectRenderer : public AHandler { diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp index c8e8aba..17a69fa 100644 --- a/services/audioflinger/AudioFlinger.cpp +++ b/services/audioflinger/AudioFlinger.cpp @@ -38,9 +38,6 @@ #include #include -//#include -//#include - #include #include @@ -58,12 +55,12 @@ #include #include -//#include #include #include #include +#include // ---------------------------------------------------------------------------- diff --git a/services/audioflinger/AudioPolicyService.cpp b/services/audioflinger/AudioPolicyService.cpp index fd4431c..eacecf0 100644 --- a/services/audioflinger/AudioPolicyService.cpp +++ b/services/audioflinger/AudioPolicyService.cpp @@ -40,6 +40,7 @@ #include #include #include +#include namespace android { diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index 0773534..ef109af 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -25,6 +25,7 @@ #include #include #include +#include #include #include -- cgit v1.1 From 85007a9bd3c310f96fed47208dfee566fd00351f Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Tue, 13 Nov 2012 15:06:37 -0800 Subject: Fix typo in logs Change-Id: I889e31ea3a45a3d8d34fdfb54ebc3947de51d2be --- media/libmedia/AudioSystem.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/media/libmedia/AudioSystem.cpp b/media/libmedia/AudioSystem.cpp index a6dedec..fb19357 100644 --- a/media/libmedia/AudioSystem.cpp +++ b/media/libmedia/AudioSystem.cpp @@ -453,7 +453,7 @@ void AudioSystem::AudioFlingerClient::ioConfigChanged(int event, audio_io_handle } break; case OUTPUT_CLOSED: { if (gOutputs.indexOfKey(ioHandle) < 0) { - ALOGW("ioConfigChanged() closing unknow output! %d", ioHandle); + ALOGW("ioConfigChanged() closing unknown output! %d", ioHandle); break; } ALOGV("ioConfigChanged() output %d closed", ioHandle); @@ -464,7 +464,7 @@ void AudioSystem::AudioFlingerClient::ioConfigChanged(int event, audio_io_handle case OUTPUT_CONFIG_CHANGED: { int index = gOutputs.indexOfKey(ioHandle); if (index < 0) { - ALOGW("ioConfigChanged() modifying unknow output! %d", ioHandle); + ALOGW("ioConfigChanged() modifying unknown output! %d", ioHandle); break; } if (param2 == NULL) break; -- cgit v1.1 From 7db7df0e8d9d7cee8ba374468cdbfa0108e3337c Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Tue, 25 Jun 2013 16:13:23 -0700 Subject: AudioTrackShared cleanup Maintain unreleased frame count on client side also (was already there on server side). Assertion failure instead of BAD_VALUE status for incorrect usage of APIs. Clean up error handling code. Change-Id: I23ca2f6f8a7c18645309ee5d64fbc844429bcba8 --- include/private/media/AudioTrackShared.h | 4 +- media/libmedia/AudioTrackShared.cpp | 63 ++++++++++++++++---------------- 2 files changed, 34 insertions(+), 33 deletions(-) diff --git a/include/private/media/AudioTrackShared.h b/include/private/media/AudioTrackShared.h index b41684a..0592683 100644 --- a/include/private/media/AudioTrackShared.h +++ b/include/private/media/AudioTrackShared.h @@ -168,6 +168,7 @@ protected: const bool mIsOut; // true for AudioTrack, false for AudioRecord const bool mClientInServer; // true for OutputTrack, false for AudioTrack & AudioRecord bool mIsShutdown; // latch set to true when shared memory corruption detected + size_t mUnreleased; // unreleased frames remaining from most recent obtainBuffer }; // ---------------------------------------------------------------------------- @@ -213,7 +214,7 @@ public: // DEAD_OBJECT Server has died or invalidated, caller should destroy this proxy and re-create. // -EINTR Call has been interrupted. Look around to see why, and then perhaps try again. // NO_INIT Shared memory is corrupt. - // BAD_VALUE On entry buffer == NULL or buffer->mFrameCount == 0. + // Assertion failure on entry, if buffer == NULL or buffer->mFrameCount == 0. status_t obtainBuffer(Buffer* buffer, const struct timespec *requested = NULL, struct timespec *elapsed = NULL); @@ -372,7 +373,6 @@ public: virtual void releaseBuffer(Buffer* buffer); protected: - size_t mUnreleased; // unreleased frames remaining from most recent obtainBuffer() size_t mAvailToClient; // estimated frames available to client prior to releaseBuffer() private: int32_t mFlush; // our copy of cblk->u.mStreaming.mFlush, for streaming output only diff --git a/media/libmedia/AudioTrackShared.cpp b/media/libmedia/AudioTrackShared.cpp index 5f8f292..554802d 100644 --- a/media/libmedia/AudioTrackShared.cpp +++ b/media/libmedia/AudioTrackShared.cpp @@ -38,7 +38,7 @@ Proxy::Proxy(audio_track_cblk_t* cblk, void *buffers, size_t frameCount, size_t bool isOut, bool clientInServer) : mCblk(cblk), mBuffers(buffers), mFrameCount(frameCount), mFrameSize(frameSize), mFrameCountP2(roundup(frameCount)), mIsOut(isOut), mClientInServer(clientInServer), - mIsShutdown(false) + mIsShutdown(false), mUnreleased(0) { } @@ -64,10 +64,7 @@ const struct timespec ClientProxy::kNonBlocking = {0 /*tv_sec*/, 0 /*tv_nsec*/}; status_t ClientProxy::obtainBuffer(Buffer* buffer, const struct timespec *requested, struct timespec *elapsed) { - if (buffer == NULL || buffer->mFrameCount == 0) { - ALOGE("%s BAD_VALUE", __func__); - return BAD_VALUE; - } + LOG_ALWAYS_FATAL_IF(buffer == NULL || buffer->mFrameCount == 0); struct timespec total; // total elapsed time spent waiting total.tv_sec = 0; total.tv_nsec = 0; @@ -164,7 +161,7 @@ status_t ClientProxy::obtainBuffer(Buffer* buffer, const struct timespec *reques buffer->mRaw = part1 > 0 ? &((char *) mBuffers)[(mIsOut ? rear : front) * mFrameSize] : NULL; buffer->mNonContig = avail - part1; - // mUnreleased = part1; + mUnreleased = part1; status = NO_ERROR; break; } @@ -238,6 +235,7 @@ status_t ClientProxy::obtainBuffer(Buffer* buffer, const struct timespec *reques case -EWOULDBLOCK: // benign race condition with server case -EINTR: // wait was interrupted by signal or other spurious wakeup case -ETIMEDOUT: // time-out expired + // FIXME these error/non-0 status are being dropped break; default: ALOGE("%s unexpected error %d", __func__, ret); @@ -252,6 +250,7 @@ end: buffer->mFrameCount = 0; buffer->mRaw = NULL; buffer->mNonContig = 0; + mUnreleased = 0; } if (elapsed != NULL) { *elapsed = total; @@ -268,14 +267,17 @@ end: void ClientProxy::releaseBuffer(Buffer* buffer) { + LOG_ALWAYS_FATAL_IF(buffer == NULL); size_t stepCount = buffer->mFrameCount; - // FIXME - // check mUnreleased - // verify that stepCount <= frameCount returned by the last obtainBuffer() - // verify stepCount not > total frame count of pipe - if (stepCount == 0) { + if (stepCount == 0 || mIsShutdown) { + // prevent accidental re-use of buffer + buffer->mFrameCount = 0; + buffer->mRaw = NULL; + buffer->mNonContig = 0; return; } + LOG_ALWAYS_FATAL_IF(!(stepCount <= mUnreleased && mUnreleased <= mFrameCount)); + mUnreleased -= stepCount; audio_track_cblk_t* cblk = mCblk; // Both of these barriers are required if (mIsOut) { @@ -362,20 +364,18 @@ size_t StaticAudioTrackClientProxy::getBufferPosition() ServerProxy::ServerProxy(audio_track_cblk_t* cblk, void *buffers, size_t frameCount, size_t frameSize, bool isOut, bool clientInServer) - : Proxy(cblk, buffers, frameCount, frameSize, isOut, clientInServer), mUnreleased(0), + : Proxy(cblk, buffers, frameCount, frameSize, isOut, clientInServer), mAvailToClient(0), mFlush(0), mDeferWake(false) { } status_t ServerProxy::obtainBuffer(Buffer* buffer) { + LOG_ALWAYS_FATAL_IF(buffer == NULL || buffer->mFrameCount == 0); if (mIsShutdown) { - buffer->mFrameCount = 0; - buffer->mRaw = NULL; - buffer->mNonContig = 0; - mUnreleased = 0; - return NO_INIT; + goto no_init; } + { audio_track_cblk_t* cblk = mCblk; // compute number of frames available to write (AudioTrack) or read (AudioRecord), // or use previous cached value from framesReady(), with added barrier if it omits. @@ -402,11 +402,7 @@ status_t ServerProxy::obtainBuffer(Buffer* buffer) mIsShutdown = true; } if (mIsShutdown) { - buffer->mFrameCount = 0; - buffer->mRaw = NULL; - buffer->mNonContig = 0; - mUnreleased = 0; - return NO_INIT; + goto no_init; } // don't allow filling pipe beyond the nominal size size_t availToServer; @@ -443,23 +439,27 @@ status_t ServerProxy::obtainBuffer(Buffer* buffer) // FIXME need to test for recording mDeferWake = part1 < ask && availToServer >= ask; return part1 > 0 ? NO_ERROR : WOULD_BLOCK; + } +no_init: + buffer->mFrameCount = 0; + buffer->mRaw = NULL; + buffer->mNonContig = 0; + mUnreleased = 0; + return NO_INIT; } void ServerProxy::releaseBuffer(Buffer* buffer) { - if (mIsShutdown) { - buffer->mFrameCount = 0; - buffer->mRaw = NULL; - buffer->mNonContig = 0; - return; - } + LOG_ALWAYS_FATAL_IF(buffer == NULL); size_t stepCount = buffer->mFrameCount; - LOG_ALWAYS_FATAL_IF(stepCount > mUnreleased); - if (stepCount == 0) { + if (stepCount == 0 || mIsShutdown) { + // prevent accidental re-use of buffer + buffer->mFrameCount = 0; buffer->mRaw = NULL; buffer->mNonContig = 0; return; } + LOG_ALWAYS_FATAL_IF(!(stepCount <= mUnreleased && mUnreleased <= mFrameCount)); mUnreleased -= stepCount; audio_track_cblk_t* cblk = mCblk; if (mIsOut) { @@ -637,8 +637,9 @@ status_t StaticAudioTrackServerProxy::obtainBuffer(Buffer* buffer) void StaticAudioTrackServerProxy::releaseBuffer(Buffer* buffer) { size_t stepCount = buffer->mFrameCount; - LOG_ALWAYS_FATAL_IF(stepCount > mUnreleased); + LOG_ALWAYS_FATAL_IF(!(stepCount <= mUnreleased)); if (stepCount == 0) { + // prevent accidental re-use of buffer buffer->mRaw = NULL; buffer->mNonContig = 0; return; -- cgit v1.1 From 7c5977f0322204240b3d1874a44c1f3911275ae5 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Tue, 2 Jul 2013 14:17:22 -0700 Subject: Explicitly compare raw pointers to NULL Change-Id: Id2c7828a36a6912333465475b21fa87e294c83c7 --- media/libmedia/IAudioFlinger.cpp | 38 ++++++++++++++++++++------------------ 1 file changed, 20 insertions(+), 18 deletions(-) diff --git a/media/libmedia/IAudioFlinger.cpp b/media/libmedia/IAudioFlinger.cpp index e4df77d..6bb7df6 100644 --- a/media/libmedia/IAudioFlinger.cpp +++ b/media/libmedia/IAudioFlinger.cpp @@ -365,11 +365,12 @@ public: const audio_offload_info_t *offloadInfo) { Parcel data, reply; - audio_devices_t devices = pDevices ? *pDevices : (audio_devices_t)0; - uint32_t samplingRate = pSamplingRate ? *pSamplingRate : 0; - audio_format_t format = pFormat ? *pFormat : AUDIO_FORMAT_DEFAULT; - audio_channel_mask_t channelMask = pChannelMask ? *pChannelMask : (audio_channel_mask_t)0; - uint32_t latency = pLatencyMs ? *pLatencyMs : 0; + audio_devices_t devices = pDevices != NULL ? *pDevices : (audio_devices_t)0; + uint32_t samplingRate = pSamplingRate != NULL ? *pSamplingRate : 0; + audio_format_t format = pFormat != NULL ? *pFormat : AUDIO_FORMAT_DEFAULT; + audio_channel_mask_t channelMask = pChannelMask != NULL ? + *pChannelMask : (audio_channel_mask_t)0; + uint32_t latency = pLatencyMs != NULL ? *pLatencyMs : 0; data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor()); data.writeInt32(module); @@ -383,15 +384,15 @@ public: audio_io_handle_t output = (audio_io_handle_t) reply.readInt32(); ALOGV("openOutput() returned output, %d", output); devices = (audio_devices_t)reply.readInt32(); - if (pDevices) *pDevices = devices; + if (pDevices != NULL) *pDevices = devices; samplingRate = reply.readInt32(); - if (pSamplingRate) *pSamplingRate = samplingRate; + if (pSamplingRate != NULL) *pSamplingRate = samplingRate; format = (audio_format_t) reply.readInt32(); - if (pFormat) *pFormat = format; + if (pFormat != NULL) *pFormat = format; channelMask = (audio_channel_mask_t)reply.readInt32(); - if (pChannelMask) *pChannelMask = channelMask; + if (pChannelMask != NULL) *pChannelMask = channelMask; latency = reply.readInt32(); - if (pLatencyMs) *pLatencyMs = latency; + if (pLatencyMs != NULL) *pLatencyMs = latency; return output; } @@ -440,10 +441,11 @@ public: audio_channel_mask_t *pChannelMask) { Parcel data, reply; - audio_devices_t devices = pDevices ? *pDevices : (audio_devices_t)0; - uint32_t samplingRate = pSamplingRate ? *pSamplingRate : 0; - audio_format_t format = pFormat ? *pFormat : AUDIO_FORMAT_DEFAULT; - audio_channel_mask_t channelMask = pChannelMask ? *pChannelMask : (audio_channel_mask_t)0; + audio_devices_t devices = pDevices != NULL ? *pDevices : (audio_devices_t)0; + uint32_t samplingRate = pSamplingRate != NULL ? *pSamplingRate : 0; + audio_format_t format = pFormat != NULL ? *pFormat : AUDIO_FORMAT_DEFAULT; + audio_channel_mask_t channelMask = pChannelMask != NULL ? + *pChannelMask : (audio_channel_mask_t)0; data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor()); data.writeInt32(module); @@ -454,13 +456,13 @@ public: remote()->transact(OPEN_INPUT, data, &reply); audio_io_handle_t input = (audio_io_handle_t) reply.readInt32(); devices = (audio_devices_t)reply.readInt32(); - if (pDevices) *pDevices = devices; + if (pDevices != NULL) *pDevices = devices; samplingRate = reply.readInt32(); - if (pSamplingRate) *pSamplingRate = samplingRate; + if (pSamplingRate != NULL) *pSamplingRate = samplingRate; format = (audio_format_t) reply.readInt32(); - if (pFormat) *pFormat = format; + if (pFormat != NULL) *pFormat = format; channelMask = (audio_channel_mask_t)reply.readInt32(); - if (pChannelMask) *pChannelMask = channelMask; + if (pChannelMask != NULL) *pChannelMask = channelMask; return input; } -- cgit v1.1 From 656e86250cd68f7f362c50a4bc92a865e9deacbe Mon Sep 17 00:00:00 2001 From: Andy McFadden Date: Fri, 28 Jun 2013 14:03:03 -0700 Subject: Pass additional arg to acquireBuffer calls. Bug 7900302 Change-Id: I30b9cca783e0a48f77035b745b7d5e20edf10f27 --- media/libstagefright/SurfaceMediaSource.cpp | 2 +- media/libstagefright/omx/GraphicBufferSource.cpp | 4 ++-- services/camera/libcameraservice/camera2/StreamingProcessor.cpp | 4 ++-- services/camera/libcameraservice/camera2/ZslProcessor.cpp | 2 +- services/camera/libcameraservice/gui/RingBufferConsumer.cpp | 2 +- 5 files changed, 7 insertions(+), 7 deletions(-) diff --git a/media/libstagefright/SurfaceMediaSource.cpp b/media/libstagefright/SurfaceMediaSource.cpp index 71b6569..305e7e0 100644 --- a/media/libstagefright/SurfaceMediaSource.cpp +++ b/media/libstagefright/SurfaceMediaSource.cpp @@ -293,7 +293,7 @@ status_t SurfaceMediaSource::read( MediaBuffer **buffer, // wait here till the frames come in from the client side while (mStarted) { - status_t err = mBufferQueue->acquireBuffer(&item); + status_t err = mBufferQueue->acquireBuffer(&item, 0); if (err == BufferQueue::NO_BUFFER_AVAILABLE) { // wait for a buffer to be queued mFrameAvailableCondition.wait(mMutex); diff --git a/media/libstagefright/omx/GraphicBufferSource.cpp b/media/libstagefright/omx/GraphicBufferSource.cpp index b3a8463..b3167b5 100644 --- a/media/libstagefright/omx/GraphicBufferSource.cpp +++ b/media/libstagefright/omx/GraphicBufferSource.cpp @@ -251,7 +251,7 @@ bool GraphicBufferSource::fillCodecBuffer_l() { ALOGV("fillCodecBuffer_l: acquiring buffer, avail=%d", mNumFramesAvailable); BufferQueue::BufferItem item; - status_t err = mBufferQueue->acquireBuffer(&item); + status_t err = mBufferQueue->acquireBuffer(&item, 0); if (err == BufferQueue::NO_BUFFER_AVAILABLE) { // shouldn't happen ALOGW("fillCodecBuffer_l: frame was not available"); @@ -422,7 +422,7 @@ void GraphicBufferSource::onFrameAvailable() { ALOGW("onFrameAvailable: EOS is set, ignoring frame"); BufferQueue::BufferItem item; - status_t err = mBufferQueue->acquireBuffer(&item); + status_t err = mBufferQueue->acquireBuffer(&item, 0); if (err == OK) { mBufferQueue->releaseBuffer(item.mBuf, item.mFrameNumber, EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, item.mFence); diff --git a/services/camera/libcameraservice/camera2/StreamingProcessor.cpp b/services/camera/libcameraservice/camera2/StreamingProcessor.cpp index fed05a6..6fa58be 100644 --- a/services/camera/libcameraservice/camera2/StreamingProcessor.cpp +++ b/services/camera/libcameraservice/camera2/StreamingProcessor.cpp @@ -576,7 +576,7 @@ status_t StreamingProcessor::processRecordingFrame() { if (client == 0) { // Discard frames during shutdown BufferItemConsumer::BufferItem imgBuffer; - res = mRecordingConsumer->acquireBuffer(&imgBuffer); + res = mRecordingConsumer->acquireBuffer(&imgBuffer, 0); if (res != OK) { if (res != BufferItemConsumer::NO_BUFFER_AVAILABLE) { ALOGE("%s: Camera %d: Can't acquire recording buffer: %s (%d)", @@ -594,7 +594,7 @@ status_t StreamingProcessor::processRecordingFrame() { SharedParameters::Lock l(client->getParameters()); Mutex::Autolock m(mMutex); BufferItemConsumer::BufferItem imgBuffer; - res = mRecordingConsumer->acquireBuffer(&imgBuffer); + res = mRecordingConsumer->acquireBuffer(&imgBuffer, 0); if (res != OK) { if (res != BufferItemConsumer::NO_BUFFER_AVAILABLE) { ALOGE("%s: Camera %d: Can't acquire recording buffer: %s (%d)", diff --git a/services/camera/libcameraservice/camera2/ZslProcessor.cpp b/services/camera/libcameraservice/camera2/ZslProcessor.cpp index 94059cd..8af8276 100644 --- a/services/camera/libcameraservice/camera2/ZslProcessor.cpp +++ b/services/camera/libcameraservice/camera2/ZslProcessor.cpp @@ -426,7 +426,7 @@ status_t ZslProcessor::processNewZslBuffer() { } ALOGVV("Trying to get next buffer"); BufferItemConsumer::BufferItem item; - res = zslConsumer->acquireBuffer(&item); + res = zslConsumer->acquireBuffer(&item, 0); if (res != OK) { if (res != BufferItemConsumer::NO_BUFFER_AVAILABLE) { ALOGE("%s: Camera %d: Error receiving ZSL image buffer: " diff --git a/services/camera/libcameraservice/gui/RingBufferConsumer.cpp b/services/camera/libcameraservice/gui/RingBufferConsumer.cpp index dfa1066..7625735 100644 --- a/services/camera/libcameraservice/gui/RingBufferConsumer.cpp +++ b/services/camera/libcameraservice/gui/RingBufferConsumer.cpp @@ -284,7 +284,7 @@ void RingBufferConsumer::onFrameAvailable() { /** * Acquire new frame */ - err = acquireBufferLocked(&item); + err = acquireBufferLocked(&item, 0); if (err != OK) { if (err != NO_BUFFER_AVAILABLE) { BI_LOGE("Error acquiring buffer: %s (%d)", strerror(err), err); -- cgit v1.1 From f90b123a3a67316284ba4b48a4fb0c5a36158545 Mon Sep 17 00:00:00 2001 From: Sungsoo Lim Date: Wed, 10 Jul 2013 15:09:38 +0900 Subject: Fix typo in AwesomePlayer Change-Id: I32113e382a3033c9a1b038dc06e4ccddc2a97d7f --- media/libstagefright/AwesomePlayer.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/media/libstagefright/AwesomePlayer.cpp b/media/libstagefright/AwesomePlayer.cpp index b505518..a4b0194 100644 --- a/media/libstagefright/AwesomePlayer.cpp +++ b/media/libstagefright/AwesomePlayer.cpp @@ -206,7 +206,7 @@ AwesomePlayer::AwesomePlayer() mBufferingEvent = new AwesomeEvent(this, &AwesomePlayer::onBufferingUpdate); mBufferingEventPending = false; mVideoLagEvent = new AwesomeEvent(this, &AwesomePlayer::onVideoLagUpdate); - mVideoEventPending = false; + mVideoLagEventPending = false; mCheckAudioStatusEvent = new AwesomeEvent( this, &AwesomePlayer::onCheckAudioStatus); -- cgit v1.1 From b7f08d386f2bddb8f3c87858f9204754b7fdb857 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Tue, 18 Jun 2013 11:46:28 -0700 Subject: Clean up references to AUDIO_FORMAT_PCM_8_24_BIT Change-Id: I08771eb2664b7082561a40937218c7f4414e2cce --- media/libeffects/testlibs/AudioFormatAdapter.h | 1 + media/libeffects/testlibs/EffectEqualizer.cpp | 3 +-- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/media/libeffects/testlibs/AudioFormatAdapter.h b/media/libeffects/testlibs/AudioFormatAdapter.h index 41f1810..dea2734 100644 --- a/media/libeffects/testlibs/AudioFormatAdapter.h +++ b/media/libeffects/testlibs/AudioFormatAdapter.h @@ -75,6 +75,7 @@ public: while (numSamples > 0) { uint32_t numSamplesIter = min(numSamples, mMaxSamplesPerCall); uint32_t nSamplesChannels = numSamplesIter * mNumChannels; + // This branch of "if" is untested if (mPcmFormat == AUDIO_FORMAT_PCM_8_24_BIT) { if (mBehavior == EFFECT_BUFFER_ACCESS_WRITE) { mpProcessor->process( diff --git a/media/libeffects/testlibs/EffectEqualizer.cpp b/media/libeffects/testlibs/EffectEqualizer.cpp index c35453b..8d00206 100644 --- a/media/libeffects/testlibs/EffectEqualizer.cpp +++ b/media/libeffects/testlibs/EffectEqualizer.cpp @@ -234,8 +234,7 @@ int Equalizer_setConfig(EqualizerContext *pContext, effect_config_t *pConfig) (pConfig->inputCfg.channels == AUDIO_CHANNEL_OUT_STEREO)); CHECK_ARG(pConfig->outputCfg.accessMode == EFFECT_BUFFER_ACCESS_WRITE || pConfig->outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE); - CHECK_ARG(pConfig->inputCfg.format == AUDIO_FORMAT_PCM_8_24_BIT - || pConfig->inputCfg.format == AUDIO_FORMAT_PCM_16_BIT); + CHECK_ARG(pConfig->inputCfg.format == AUDIO_FORMAT_PCM_16_BIT); int channelCount; if (pConfig->inputCfg.channels == AUDIO_CHANNEL_OUT_MONO) { -- cgit v1.1 From b0dfd4613225a3b2a17bdf8d85e89a4b04d65ef3 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Wed, 10 Jul 2013 16:52:47 -0700 Subject: Fix type error in AudioTrack::processAudioBuffer It returned a bool instead of nsecs_t Change-Id: If0c096dac411afc0a4142ec1e59c1fdd36d4867c --- media/libmedia/AudioTrack.cpp | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp index 33c4462..00f4640 100644 --- a/media/libmedia/AudioTrack.cpp +++ b/media/libmedia/AudioTrack.cpp @@ -1238,7 +1238,8 @@ nsecs_t AudioTrack::processAudioBuffer(const sp& thread) if (tryCounter < 0) { ALOGE("did not receive expected priority boost on time"); } - return true; + // Run again immediately + return 0; } // Can only reference mCblk while locked -- cgit v1.1 From fb1fdc9d6603aa228362e7349451f6455c9849c2 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Wed, 10 Jul 2013 17:03:19 -0700 Subject: Add comments Change-Id: Ifbf3a46a4183c8abc0feee1c588953ab10303cc1 --- include/media/AudioTrack.h | 2 ++ include/media/IAudioFlinger.h | 4 +++- include/media/nbaio/NBLog.h | 10 ++++++++++ media/libmedia/AudioSystem.cpp | 2 ++ media/libmedia/AudioTrack.cpp | 7 +++++++ media/libmediaplayerservice/MidiFile.cpp | 2 +- services/audioflinger/StateQueue.h | 8 +++++++- services/audioflinger/Threads.h | 2 ++ 8 files changed, 34 insertions(+), 3 deletions(-) diff --git a/include/media/AudioTrack.h b/include/media/AudioTrack.h index 6727601..58e0deb 100644 --- a/include/media/AudioTrack.h +++ b/include/media/AudioTrack.h @@ -75,8 +75,10 @@ public: size_t frameCount; // number of sample frames corresponding to size; // on input it is the number of frames desired, // on output is the number of frames actually filled + // (currently ignored, but will make the primary field in future) size_t size; // input/output in bytes == frameCount * frameSize + // on output is the number of bytes actually filled // FIXME this is redundant with respect to frameCount, // and TRANSFER_OBTAIN mode is broken for 8-bit data // since we don't define the frame format diff --git a/include/media/IAudioFlinger.h b/include/media/IAudioFlinger.h index f8a9f2b..0aa5870 100644 --- a/include/media/IAudioFlinger.h +++ b/include/media/IAudioFlinger.h @@ -125,7 +125,9 @@ public: virtual String8 getParameters(audio_io_handle_t ioHandle, const String8& keys) const = 0; - // register a current process for audio output change notifications + // Register an object to receive audio input/output change and track notifications. + // For a given calling pid, AudioFlinger disregards any registrations after the first. + // Thus the IAudioFlingerClient must be a singleton per process. virtual void registerClient(const sp& client) = 0; // retrieve the audio recording buffer size diff --git a/include/media/nbaio/NBLog.h b/include/media/nbaio/NBLog.h index 107ba66..6d59ea7 100644 --- a/include/media/nbaio/NBLog.h +++ b/include/media/nbaio/NBLog.h @@ -90,6 +90,8 @@ public: virtual ~Timeline(); #endif + // Input parameter 'size' is the desired size of the timeline in byte units. + // Returns the size rounded up to a power-of-2, plus the constant size overhead for indices. static size_t sharedSize(size_t size); #if 0 @@ -110,8 +112,12 @@ private: class Writer : public RefBase { public: Writer(); // dummy nop implementation without shared memory + + // Input parameter 'size' is the desired size of the timeline in byte units. + // The size of the shared memory must be at least Timeline::sharedSize(size). Writer(size_t size, void *shared); Writer(size_t size, const sp& iMemory); + virtual ~Writer() { } virtual void log(const char *string); @@ -165,8 +171,12 @@ private: class Reader : public RefBase { public: + + // Input parameter 'size' is the desired size of the timeline in byte units. + // The size of the shared memory must be at least Timeline::sharedSize(size). Reader(size_t size, const void *shared); Reader(size_t size, const sp& iMemory); + virtual ~Reader() { } void dump(int fd, size_t indent = 0); diff --git a/media/libmedia/AudioSystem.cpp b/media/libmedia/AudioSystem.cpp index 22d6763..7ceffd3 100644 --- a/media/libmedia/AudioSystem.cpp +++ b/media/libmedia/AudioSystem.cpp @@ -537,6 +537,8 @@ const sp& AudioSystem::get_audio_policy_service() return gAudioPolicyService; } +// --------------------------------------------------------------------------- + status_t AudioSystem::setDeviceConnectionState(audio_devices_t device, audio_policy_dev_state_t state, const char *device_address) diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp index 33c4462..3f4cbc2 100644 --- a/media/libmedia/AudioTrack.cpp +++ b/media/libmedia/AudioTrack.cpp @@ -585,6 +585,7 @@ void AudioTrack::setLoop_l(uint32_t loopStart, uint32_t loopEnd, int loopCount) status_t AudioTrack::setMarkerPosition(uint32_t marker) { + // The only purpose of setting marker position is to get a callback if (mCbf == NULL) { return INVALID_OPERATION; } @@ -610,6 +611,7 @@ status_t AudioTrack::getMarkerPosition(uint32_t *marker) const status_t AudioTrack::setPositionUpdatePeriod(uint32_t updatePeriod) { + // The only purpose of setting position update period is to get a callback if (mCbf == NULL) { return INVALID_OPERATION; } @@ -1220,6 +1222,11 @@ status_t TimedAudioTrack::setMediaTimeTransform(const LinearTransform& xform, nsecs_t AudioTrack::processAudioBuffer(const sp& thread) { + // Currently the AudioTrack thread is not created if there are no callbacks. + // Would it ever make sense to run the thread, even without callbacks? + // If so, then replace this by checks at each use for mCbf != NULL. + LOG_ALWAYS_FATAL_IF(mCblk == NULL); + mLock.lock(); if (mAwaitBoost) { mAwaitBoost = false; diff --git a/media/libmediaplayerservice/MidiFile.cpp b/media/libmediaplayerservice/MidiFile.cpp index 8db5b9b..270b872 100644 --- a/media/libmediaplayerservice/MidiFile.cpp +++ b/media/libmediaplayerservice/MidiFile.cpp @@ -422,7 +422,7 @@ status_t MidiFile::setLooping(int loop) status_t MidiFile::createOutputTrack() { if (mAudioSink->open(pLibConfig->sampleRate, pLibConfig->numChannels, - CHANNEL_MASK_USE_CHANNEL_ORDER, AUDIO_FORMAT_PCM_16_BIT, 2) != NO_ERROR) { + CHANNEL_MASK_USE_CHANNEL_ORDER, AUDIO_FORMAT_PCM_16_BIT, 2 /*bufferCount*/) != NO_ERROR) { ALOGE("mAudioSink open failed"); return ERROR_OPEN_FAILED; } diff --git a/services/audioflinger/StateQueue.h b/services/audioflinger/StateQueue.h index e33b3c6..9cde642 100644 --- a/services/audioflinger/StateQueue.h +++ b/services/audioflinger/StateQueue.h @@ -31,8 +31,14 @@ // and this may result in an audible artifact // needs read-only access to a recent stable state, // but not necessarily the most current one +// only allocate and free memory when configuration changes +// avoid conventional logging, as this is a form of I/O and could block +// defer computation to other threads when feasible; for example +// cycle times are collected by fast mixer thread but the floating-point +// statistical calculations on these cycle times are computed by normal mixer +// these requirements also apply to callouts such as AudioBufferProvider and VolumeProvider // Normal mixer thread: -// periodic with typical period ~40 ms +// periodic with typical period ~20 ms // SCHED_OTHER scheduling policy and nice priority == urgent audio // ok to block, but prefer to avoid as much as possible // needs read/write access to state diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h index 7de6872..e15d98a 100644 --- a/services/audioflinger/Threads.h +++ b/services/audioflinger/Threads.h @@ -429,6 +429,8 @@ public: virtual status_t setSyncEvent(const sp& event); virtual bool isValidSyncEvent(const sp& event) const; + + // called with AudioFlinger lock held void invalidateTracks(audio_stream_type_t streamType); -- cgit v1.1 From 3d1982595cb9cb0856841f1f57fbdb4581189e99 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Wed, 10 Jul 2013 17:20:54 -0700 Subject: Fix indentation Change-Id: Ia28720a7d0fad8cf110c2448b967d5648d42e017 --- services/audioflinger/FastMixer.cpp | 148 ++++++++++++++++++------------------ 1 file changed, 74 insertions(+), 74 deletions(-) diff --git a/services/audioflinger/FastMixer.cpp b/services/audioflinger/FastMixer.cpp index 21df1d7..12e4683 100644 --- a/services/audioflinger/FastMixer.cpp +++ b/services/audioflinger/FastMixer.cpp @@ -498,91 +498,91 @@ bool FastMixer::threadLoop() } } sleepNs = -1; - if (isWarm) { - if (sec > 0 || nsec > underrunNs) { - ATRACE_NAME("underrun"); - // FIXME only log occasionally - ALOGV("underrun: time since last cycle %d.%03ld sec", - (int) sec, nsec / 1000000L); - dumpState->mUnderruns++; - ignoreNextOverrun = true; - } else if (nsec < overrunNs) { - if (ignoreNextOverrun) { - ignoreNextOverrun = false; - } else { + if (isWarm) { + if (sec > 0 || nsec > underrunNs) { + ATRACE_NAME("underrun"); // FIXME only log occasionally - ALOGV("overrun: time since last cycle %d.%03ld sec", + ALOGV("underrun: time since last cycle %d.%03ld sec", (int) sec, nsec / 1000000L); - dumpState->mOverruns++; + dumpState->mUnderruns++; + ignoreNextOverrun = true; + } else if (nsec < overrunNs) { + if (ignoreNextOverrun) { + ignoreNextOverrun = false; + } else { + // FIXME only log occasionally + ALOGV("overrun: time since last cycle %d.%03ld sec", + (int) sec, nsec / 1000000L); + dumpState->mOverruns++; + } + // This forces a minimum cycle time. It: + // - compensates for an audio HAL with jitter due to sample rate conversion + // - works with a variable buffer depth audio HAL that never pulls at a + // rate < than overrunNs per buffer. + // - recovers from overrun immediately after underrun + // It doesn't work with a non-blocking audio HAL. + sleepNs = forceNs - nsec; + } else { + ignoreNextOverrun = false; } - // This forces a minimum cycle time. It: - // - compensates for an audio HAL with jitter due to sample rate conversion - // - works with a variable buffer depth audio HAL that never pulls at a rate - // < than overrunNs per buffer. - // - recovers from overrun immediately after underrun - // It doesn't work with a non-blocking audio HAL. - sleepNs = forceNs - nsec; - } else { - ignoreNextOverrun = false; } - } #ifdef FAST_MIXER_STATISTICS - if (isWarm) { - // advance the FIFO queue bounds - size_t i = bounds & (FastMixerDumpState::kSamplingN - 1); - bounds = (bounds & 0xFFFF0000) | ((bounds + 1) & 0xFFFF); - if (full) { - bounds += 0x10000; - } else if (!(bounds & (FastMixerDumpState::kSamplingN - 1))) { - full = true; - } - // compute the delta value of clock_gettime(CLOCK_MONOTONIC) - uint32_t monotonicNs = nsec; - if (sec > 0 && sec < 4) { - monotonicNs += sec * 1000000000; - } - // compute the raw CPU load = delta value of clock_gettime(CLOCK_THREAD_CPUTIME_ID) - uint32_t loadNs = 0; - struct timespec newLoad; - rc = clock_gettime(CLOCK_THREAD_CPUTIME_ID, &newLoad); - if (rc == 0) { - if (oldLoadValid) { - sec = newLoad.tv_sec - oldLoad.tv_sec; - nsec = newLoad.tv_nsec - oldLoad.tv_nsec; - if (nsec < 0) { - --sec; - nsec += 1000000000; - } - loadNs = nsec; - if (sec > 0 && sec < 4) { - loadNs += sec * 1000000000; + if (isWarm) { + // advance the FIFO queue bounds + size_t i = bounds & (FastMixerDumpState::kSamplingN - 1); + bounds = (bounds & 0xFFFF0000) | ((bounds + 1) & 0xFFFF); + if (full) { + bounds += 0x10000; + } else if (!(bounds & (FastMixerDumpState::kSamplingN - 1))) { + full = true; + } + // compute the delta value of clock_gettime(CLOCK_MONOTONIC) + uint32_t monotonicNs = nsec; + if (sec > 0 && sec < 4) { + monotonicNs += sec * 1000000000; + } + // compute raw CPU load = delta value of clock_gettime(CLOCK_THREAD_CPUTIME_ID) + uint32_t loadNs = 0; + struct timespec newLoad; + rc = clock_gettime(CLOCK_THREAD_CPUTIME_ID, &newLoad); + if (rc == 0) { + if (oldLoadValid) { + sec = newLoad.tv_sec - oldLoad.tv_sec; + nsec = newLoad.tv_nsec - oldLoad.tv_nsec; + if (nsec < 0) { + --sec; + nsec += 1000000000; + } + loadNs = nsec; + if (sec > 0 && sec < 4) { + loadNs += sec * 1000000000; + } + } else { + // first time through the loop + oldLoadValid = true; } - } else { - // first time through the loop - oldLoadValid = true; + oldLoad = newLoad; } - oldLoad = newLoad; - } #ifdef CPU_FREQUENCY_STATISTICS - // get the absolute value of CPU clock frequency in kHz - int cpuNum = sched_getcpu(); - uint32_t kHz = tcu.getCpukHz(cpuNum); - kHz = (kHz << 4) | (cpuNum & 0xF); + // get the absolute value of CPU clock frequency in kHz + int cpuNum = sched_getcpu(); + uint32_t kHz = tcu.getCpukHz(cpuNum); + kHz = (kHz << 4) | (cpuNum & 0xF); #endif - // save values in FIFO queues for dumpsys - // these stores #1, #2, #3 are not atomic with respect to each other, - // or with respect to store #4 below - dumpState->mMonotonicNs[i] = monotonicNs; - dumpState->mLoadNs[i] = loadNs; + // save values in FIFO queues for dumpsys + // these stores #1, #2, #3 are not atomic with respect to each other, + // or with respect to store #4 below + dumpState->mMonotonicNs[i] = monotonicNs; + dumpState->mLoadNs[i] = loadNs; #ifdef CPU_FREQUENCY_STATISTICS - dumpState->mCpukHz[i] = kHz; + dumpState->mCpukHz[i] = kHz; #endif - // this store #4 is not atomic with respect to stores #1, #2, #3 above, but - // the newest open and oldest closed halves are atomic with respect to each other - dumpState->mBounds = bounds; - ATRACE_INT("cycle_ms", monotonicNs / 1000000); - ATRACE_INT("load_us", loadNs / 1000); - } + // this store #4 is not atomic with respect to stores #1, #2, #3 above, but + // the newest open & oldest closed halves are atomic with respect to each other + dumpState->mBounds = bounds; + ATRACE_INT("cycle_ms", monotonicNs / 1000000); + ATRACE_INT("load_us", loadNs / 1000); + } #endif } else { // first time through the loop -- cgit v1.1 From 050501d11d944dcb256d37d3b86bd658d94f6a7f Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Thu, 11 Jul 2013 10:35:38 -0700 Subject: Fix AudioTrack::flush() It was only flushing at a surface level, and even then only the first time the server observed the client's flush request. Now it flushes at a deeper level, but there may be even deeper device-specific flushing. Bug: 9770947 Change-Id: I687cc3410ff9e5e5d4a5dcb9e3b129501e53d247 --- media/libmedia/AudioTrackShared.cpp | 2 ++ 1 file changed, 2 insertions(+) diff --git a/media/libmedia/AudioTrackShared.cpp b/media/libmedia/AudioTrackShared.cpp index 5f8f292..6bb4ff7 100644 --- a/media/libmedia/AudioTrackShared.cpp +++ b/media/libmedia/AudioTrackShared.cpp @@ -388,6 +388,8 @@ status_t ServerProxy::obtainBuffer(Buffer* buffer) if (flush != mFlush) { front = rear; mFlush = flush; + // effectively obtain then release whatever is in the buffer + android_atomic_release_store(rear, &cblk->u.mStreaming.mFront); } else { front = cblk->u.mStreaming.mFront; } -- cgit v1.1 From 8d764bfc74c40641f018a0aa87d6f484aec92eae Mon Sep 17 00:00:00 2001 From: Mathias Agopian Date: Fri, 12 Jul 2013 22:06:20 -0700 Subject: always pass the BufferQueue explicitely to consumers Change-Id: Ic2d9a9cf184a482b3c78a481ed693ee32df5ca13 --- camera/ProCamera.cpp | 3 ++- cmds/stagefright/stagefright.cpp | 3 ++- libvideoeditor/lvpp/NativeWindowRenderer.cpp | 3 ++- services/camera/libcameraservice/camera2/CallbackProcessor.cpp | 3 ++- services/camera/libcameraservice/camera2/JpegProcessor.cpp | 3 ++- services/camera/libcameraservice/camera2/StreamingProcessor.cpp | 3 ++- services/camera/libcameraservice/camera2/ZslProcessor.cpp | 3 ++- services/camera/libcameraservice/camera3/Camera3InputStream.cpp | 3 ++- 8 files changed, 16 insertions(+), 8 deletions(-) diff --git a/camera/ProCamera.cpp b/camera/ProCamera.cpp index fec5461..190402e 100644 --- a/camera/ProCamera.cpp +++ b/camera/ProCamera.cpp @@ -247,7 +247,8 @@ status_t ProCamera::createStreamCpu(int width, int height, int format, sp c = mCamera; if (c == 0) return NO_INIT; - sp cc = new CpuConsumer(heapCount, synchronousMode); + sp bq = new BufferQueue(); + sp cc = new CpuConsumer(bq, heapCount, synchronousMode); cc->setName(String8("ProCamera::mCpuConsumer")); sp stc = new Surface( diff --git a/cmds/stagefright/stagefright.cpp b/cmds/stagefright/stagefright.cpp index f8fc8ed..529b96c 100644 --- a/cmds/stagefright/stagefright.cpp +++ b/cmds/stagefright/stagefright.cpp @@ -937,7 +937,8 @@ int main(int argc, char **argv) { } else { CHECK(useSurfaceTexAlloc); - sp texture = new GLConsumer(0 /* tex */); + sp bq = new BufferQueue(); + sp texture = new GLConsumer(bq, 0 /* tex */); gSurface = new Surface(texture->getBufferQueue()); } diff --git a/libvideoeditor/lvpp/NativeWindowRenderer.cpp b/libvideoeditor/lvpp/NativeWindowRenderer.cpp index 702900b..84a8e15 100755 --- a/libvideoeditor/lvpp/NativeWindowRenderer.cpp +++ b/libvideoeditor/lvpp/NativeWindowRenderer.cpp @@ -568,7 +568,8 @@ void NativeWindowRenderer::destroyRenderInput(RenderInput* input) { RenderInput::RenderInput(NativeWindowRenderer* renderer, GLuint textureId) : mRenderer(renderer) , mTextureId(textureId) { - mST = new GLConsumer(mTextureId); + sp bq = new BufferQueue(); + mST = new GLConsumer(bq, mTextureId); mSTC = new Surface(mST->getBufferQueue()); native_window_connect(mSTC.get(), NATIVE_WINDOW_API_MEDIA); } diff --git a/services/camera/libcameraservice/camera2/CallbackProcessor.cpp b/services/camera/libcameraservice/camera2/CallbackProcessor.cpp index aae2504..efbbe57 100644 --- a/services/camera/libcameraservice/camera2/CallbackProcessor.cpp +++ b/services/camera/libcameraservice/camera2/CallbackProcessor.cpp @@ -110,7 +110,8 @@ status_t CallbackProcessor::updateStream(const Parameters ¶ms) { if (!mCallbackToApp && mCallbackConsumer == 0) { // Create CPU buffer queue endpoint, since app hasn't given us one // Make it async to avoid disconnect deadlocks - mCallbackConsumer = new CpuConsumer(kCallbackHeapCount, + sp bq = new BufferQueue(); + mCallbackConsumer = new CpuConsumer(bq, kCallbackHeapCount, /*synchronized*/ false); mCallbackConsumer->setFrameAvailableListener(this); mCallbackConsumer->setName(String8("Camera2Client::CallbackConsumer")); diff --git a/services/camera/libcameraservice/camera2/JpegProcessor.cpp b/services/camera/libcameraservice/camera2/JpegProcessor.cpp index f0a13ca..1d739cd 100644 --- a/services/camera/libcameraservice/camera2/JpegProcessor.cpp +++ b/services/camera/libcameraservice/camera2/JpegProcessor.cpp @@ -82,7 +82,8 @@ status_t JpegProcessor::updateStream(const Parameters ¶ms) { if (mCaptureConsumer == 0) { // Create CPU buffer queue endpoint - mCaptureConsumer = new CpuConsumer(1); + sp bq = new BufferQueue(); + mCaptureConsumer = new CpuConsumer(bq, 1); mCaptureConsumer->setFrameAvailableListener(this); mCaptureConsumer->setName(String8("Camera2Client::CaptureConsumer")); mCaptureWindow = new Surface( diff --git a/services/camera/libcameraservice/camera2/StreamingProcessor.cpp b/services/camera/libcameraservice/camera2/StreamingProcessor.cpp index 35eb433..76fa46c 100644 --- a/services/camera/libcameraservice/camera2/StreamingProcessor.cpp +++ b/services/camera/libcameraservice/camera2/StreamingProcessor.cpp @@ -319,7 +319,8 @@ status_t StreamingProcessor::updateRecordingStream(const Parameters ¶ms) { // Create CPU buffer queue endpoint. We need one more buffer here so that we can // always acquire and free a buffer when the heap is full; otherwise the consumer // will have buffers in flight we'll never clear out. - mRecordingConsumer = new BufferItemConsumer( + sp bq = new BufferQueue(); + mRecordingConsumer = new BufferItemConsumer(bq, GRALLOC_USAGE_HW_VIDEO_ENCODER, mRecordingHeapCount + 1, true); diff --git a/services/camera/libcameraservice/camera2/ZslProcessor.cpp b/services/camera/libcameraservice/camera2/ZslProcessor.cpp index 8af8276..3c575f6 100644 --- a/services/camera/libcameraservice/camera2/ZslProcessor.cpp +++ b/services/camera/libcameraservice/camera2/ZslProcessor.cpp @@ -128,7 +128,8 @@ status_t ZslProcessor::updateStream(const Parameters ¶ms) { if (mZslConsumer == 0) { // Create CPU buffer queue endpoint - mZslConsumer = new BufferItemConsumer( + sp bq = new BufferQueue(); + mZslConsumer = new BufferItemConsumer(bq, GRALLOC_USAGE_HW_CAMERA_ZSL, kZslBufferDepth, true); diff --git a/services/camera/libcameraservice/camera3/Camera3InputStream.cpp b/services/camera/libcameraservice/camera3/Camera3InputStream.cpp index 13e9c83..6d9acc3 100644 --- a/services/camera/libcameraservice/camera3/Camera3InputStream.cpp +++ b/services/camera/libcameraservice/camera3/Camera3InputStream.cpp @@ -211,7 +211,8 @@ status_t Camera3InputStream::configureQueueLocked() { mFrameCount = 0; if (mConsumer.get() == 0) { - mConsumer = new BufferItemConsumer(camera3_stream::usage, + sp bq = new BufferQueue(); + mConsumer = new BufferItemConsumer(bq, camera3_stream::usage, mTotalBufferCount, /*synchronousMode*/true); mConsumer->setName(String8::format("Camera3-InputStream-%d", mId)); -- cgit v1.1 From 099b457f3203fa51387e21bd450495abb973ab31 Mon Sep 17 00:00:00 2001 From: Igor Murashkin Date: Fri, 12 Jul 2013 17:52:16 -0700 Subject: camera2: Implement ICameraDeviceUser::getCameraInfo Bug: 9529161 Change-Id: I927e39c124cd5fb19e38423506f4463acc381cb0 --- camera/photography/ICameraDeviceUser.cpp | 27 +++++++++++----------- include/camera/photography/ICameraDeviceUser.h | 5 ++-- .../photography/CameraDeviceClient.cpp | 15 ++++-------- .../photography/CameraDeviceClient.h | 4 +--- 4 files changed, 21 insertions(+), 30 deletions(-) diff --git a/camera/photography/ICameraDeviceUser.cpp b/camera/photography/ICameraDeviceUser.cpp index 0515bd7..325f94d 100644 --- a/camera/photography/ICameraDeviceUser.cpp +++ b/camera/photography/ICameraDeviceUser.cpp @@ -151,21 +151,22 @@ public: } - virtual status_t getCameraInfo(int cameraId, camera_metadata** info) + virtual status_t getCameraInfo(CameraMetadata* info) { Parcel data, reply; data.writeInterfaceToken(ICameraDeviceUser::getInterfaceDescriptor()); - data.writeInt32(cameraId); remote()->transact(GET_CAMERA_INFO, data, &reply); - reply.readExceptionCode(); status_t result = reply.readInt32(); + CameraMetadata out; if (reply.readInt32() != 0) { - CameraMetadata::readFromParcel(reply, /*out*/info); - } else if (info) { - *info = NULL; + out.readFromParcel(&reply); + } + + if (info != NULL) { + info->swap(out); } return result; @@ -273,6 +274,7 @@ status_t BnCameraDeviceUser::onTransact( reply->writeNoException(); reply->writeInt32(ret); + // out-variables are after exception and return value reply->writeInt32(1); // to mark presence of metadata object request.writeToParcel(const_cast(reply)); @@ -281,19 +283,16 @@ status_t BnCameraDeviceUser::onTransact( case GET_CAMERA_INFO: { CHECK_INTERFACE(ICameraDeviceUser, data, reply); - int cameraId = data.readInt32(); - - camera_metadata_t* info = NULL; + CameraMetadata info; status_t ret; - ret = getCameraInfo(cameraId, &info); - - reply->writeInt32(1); // to mark presence of metadata object - CameraMetadata::writeToParcel(*reply, info); + ret = getCameraInfo(&info); reply->writeNoException(); reply->writeInt32(ret); - free_camera_metadata(info); + // out-variables are after exception and return value + reply->writeInt32(1); // to mark presence of metadata object + info.writeToParcel(reply); return NO_ERROR; } break; diff --git a/include/camera/photography/ICameraDeviceUser.h b/include/camera/photography/ICameraDeviceUser.h index 1b8d666..3ea49f4 100644 --- a/include/camera/photography/ICameraDeviceUser.h +++ b/include/camera/photography/ICameraDeviceUser.h @@ -58,9 +58,8 @@ public: /*out*/ CameraMetadata* request) = 0; // Get static camera metadata - virtual status_t getCameraInfo(int cameraId, - /*out*/ - camera_metadata** info) = 0; + virtual status_t getCameraInfo(/*out*/ + CameraMetadata* info) = 0; }; diff --git a/services/camera/libcameraservice/photography/CameraDeviceClient.cpp b/services/camera/libcameraservice/photography/CameraDeviceClient.cpp index bd6b60a..a6a2dc1 100644 --- a/services/camera/libcameraservice/photography/CameraDeviceClient.cpp +++ b/services/camera/libcameraservice/photography/CameraDeviceClient.cpp @@ -391,28 +391,23 @@ status_t CameraDeviceClient::createDefaultRequest(int templateId, return res; } -status_t CameraDeviceClient::getCameraInfo(int cameraId, - /*out*/ - camera_metadata** info) +status_t CameraDeviceClient::getCameraInfo(/*out*/CameraMetadata* info) { ATRACE_CALL(); ALOGV("%s", __FUNCTION__); status_t res = OK; - // TODO: remove cameraId. this should be device-specific info, not static. - if (cameraId != mCameraId) { - return INVALID_OPERATION; - } - if ( (res = checkPid(__FUNCTION__) ) != OK) return res; Mutex::Autolock icl(mBinderSerializationLock); if (!mDevice.get()) return DEAD_OBJECT; - CameraMetadata deviceInfo = mDevice->info(); - *info = deviceInfo.release(); + if (info != NULL) { + *info = mDevice->info(); // static camera metadata + // TODO: merge with device-specific camera metadata + } return res; } diff --git a/services/camera/libcameraservice/photography/CameraDeviceClient.h b/services/camera/libcameraservice/photography/CameraDeviceClient.h index 806aa15..c6c241a 100644 --- a/services/camera/libcameraservice/photography/CameraDeviceClient.h +++ b/services/camera/libcameraservice/photography/CameraDeviceClient.h @@ -85,9 +85,7 @@ public: // Get the static metadata for the camera // -- Caller owns the newly allocated metadata - virtual status_t getCameraInfo(int cameraId, - /*out*/ - camera_metadata** info); + virtual status_t getCameraInfo(/*out*/CameraMetadata* info); /** * Interface used by CameraService -- cgit v1.1 From 5e1f08b3917ac7900f8a11118afb7e8bf3e61c64 Mon Sep 17 00:00:00 2001 From: Mathias Agopian Date: Tue, 16 Jul 2013 22:54:39 -0700 Subject: update to new Consumer APIs Change-Id: I3c5d4be2a2e8783fbf98b3e268fd02658f71dc7d --- camera/ProCamera.cpp | 2 +- media/libstagefright/SurfaceMediaSource.cpp | 7 +++---- media/libstagefright/omx/GraphicBufferSource.cpp | 9 ++++----- services/camera/libcameraservice/camera2/CallbackProcessor.cpp | 3 +-- services/camera/libcameraservice/camera2/StreamingProcessor.cpp | 3 +-- services/camera/libcameraservice/camera2/ZslProcessor.cpp | 3 +-- services/camera/libcameraservice/camera3/Camera3InputStream.cpp | 3 +-- services/camera/libcameraservice/gui/RingBufferConsumer.cpp | 3 +-- 8 files changed, 13 insertions(+), 20 deletions(-) diff --git a/camera/ProCamera.cpp b/camera/ProCamera.cpp index 190402e..1040415 100644 --- a/camera/ProCamera.cpp +++ b/camera/ProCamera.cpp @@ -248,7 +248,7 @@ status_t ProCamera::createStreamCpu(int width, int height, int format, if (c == 0) return NO_INIT; sp bq = new BufferQueue(); - sp cc = new CpuConsumer(bq, heapCount, synchronousMode); + sp cc = new CpuConsumer(bq, heapCount/*, synchronousMode*/); cc->setName(String8("ProCamera::mCpuConsumer")); sp stc = new Surface( diff --git a/media/libstagefright/SurfaceMediaSource.cpp b/media/libstagefright/SurfaceMediaSource.cpp index 305e7e0..befd4cc 100644 --- a/media/libstagefright/SurfaceMediaSource.cpp +++ b/media/libstagefright/SurfaceMediaSource.cpp @@ -21,7 +21,7 @@ #include #include #include -#include +#include #include #include @@ -54,9 +54,8 @@ SurfaceMediaSource::SurfaceMediaSource(uint32_t bufferWidth, uint32_t bufferHeig ALOGE("Invalid dimensions %dx%d", bufferWidth, bufferHeight); } - mBufferQueue = new BufferQueue(true); + mBufferQueue = new BufferQueue(); mBufferQueue->setDefaultBufferSize(bufferWidth, bufferHeight); - mBufferQueue->setSynchronousMode(true); mBufferQueue->setConsumerUsageBits(GRALLOC_USAGE_HW_VIDEO_ENCODER | GRALLOC_USAGE_HW_TEXTURE); @@ -71,7 +70,7 @@ SurfaceMediaSource::SurfaceMediaSource(uint32_t bufferWidth, uint32_t bufferHeig listener = static_cast(this); proxy = new BufferQueue::ProxyConsumerListener(listener); - status_t err = mBufferQueue->consumerConnect(proxy); + status_t err = mBufferQueue->consumerConnect(proxy, false); if (err != NO_ERROR) { ALOGE("SurfaceMediaSource: error connecting to BufferQueue: %s (%d)", strerror(-err), err); diff --git a/media/libstagefright/omx/GraphicBufferSource.cpp b/media/libstagefright/omx/GraphicBufferSource.cpp index b3167b5..5f7c26a 100644 --- a/media/libstagefright/omx/GraphicBufferSource.cpp +++ b/media/libstagefright/omx/GraphicBufferSource.cpp @@ -18,12 +18,12 @@ //#define LOG_NDEBUG 0 #include -#include +#include "GraphicBufferSource.h" #include #include -#include +#include #include namespace android { @@ -51,10 +51,9 @@ GraphicBufferSource::GraphicBufferSource(OMXNodeInstance* nodeInstance, String8 name("GraphicBufferSource"); - mBufferQueue = new BufferQueue(true); + mBufferQueue = new BufferQueue(); mBufferQueue->setConsumerName(name); mBufferQueue->setDefaultBufferSize(bufferWidth, bufferHeight); - mBufferQueue->setSynchronousMode(true); mBufferQueue->setConsumerUsageBits(GRALLOC_USAGE_HW_VIDEO_ENCODER | GRALLOC_USAGE_HW_TEXTURE); @@ -75,7 +74,7 @@ GraphicBufferSource::GraphicBufferSource(OMXNodeInstance* nodeInstance, sp proxy; proxy = new BufferQueue::ProxyConsumerListener(listener); - mInitCheck = mBufferQueue->consumerConnect(proxy); + mInitCheck = mBufferQueue->consumerConnect(proxy, false); if (mInitCheck != NO_ERROR) { ALOGE("Error connecting to BufferQueue: %s (%d)", strerror(-mInitCheck), mInitCheck); diff --git a/services/camera/libcameraservice/camera2/CallbackProcessor.cpp b/services/camera/libcameraservice/camera2/CallbackProcessor.cpp index efbbe57..d7bafda 100644 --- a/services/camera/libcameraservice/camera2/CallbackProcessor.cpp +++ b/services/camera/libcameraservice/camera2/CallbackProcessor.cpp @@ -111,8 +111,7 @@ status_t CallbackProcessor::updateStream(const Parameters ¶ms) { // Create CPU buffer queue endpoint, since app hasn't given us one // Make it async to avoid disconnect deadlocks sp bq = new BufferQueue(); - mCallbackConsumer = new CpuConsumer(bq, kCallbackHeapCount, - /*synchronized*/ false); + mCallbackConsumer = new CpuConsumer(bq, kCallbackHeapCount); mCallbackConsumer->setFrameAvailableListener(this); mCallbackConsumer->setName(String8("Camera2Client::CallbackConsumer")); mCallbackWindow = new Surface( diff --git a/services/camera/libcameraservice/camera2/StreamingProcessor.cpp b/services/camera/libcameraservice/camera2/StreamingProcessor.cpp index 76fa46c..5981be7 100644 --- a/services/camera/libcameraservice/camera2/StreamingProcessor.cpp +++ b/services/camera/libcameraservice/camera2/StreamingProcessor.cpp @@ -322,8 +322,7 @@ status_t StreamingProcessor::updateRecordingStream(const Parameters ¶ms) { sp bq = new BufferQueue(); mRecordingConsumer = new BufferItemConsumer(bq, GRALLOC_USAGE_HW_VIDEO_ENCODER, - mRecordingHeapCount + 1, - true); + mRecordingHeapCount + 1); mRecordingConsumer->setFrameAvailableListener(this); mRecordingConsumer->setName(String8("Camera2-RecordingConsumer")); mRecordingWindow = new Surface( diff --git a/services/camera/libcameraservice/camera2/ZslProcessor.cpp b/services/camera/libcameraservice/camera2/ZslProcessor.cpp index 3c575f6..0094992 100644 --- a/services/camera/libcameraservice/camera2/ZslProcessor.cpp +++ b/services/camera/libcameraservice/camera2/ZslProcessor.cpp @@ -131,8 +131,7 @@ status_t ZslProcessor::updateStream(const Parameters ¶ms) { sp bq = new BufferQueue(); mZslConsumer = new BufferItemConsumer(bq, GRALLOC_USAGE_HW_CAMERA_ZSL, - kZslBufferDepth, - true); + kZslBufferDepth); mZslConsumer->setFrameAvailableListener(this); mZslConsumer->setName(String8("Camera2Client::ZslConsumer")); mZslWindow = new Surface( diff --git a/services/camera/libcameraservice/camera3/Camera3InputStream.cpp b/services/camera/libcameraservice/camera3/Camera3InputStream.cpp index 6d9acc3..e9a9c2b 100644 --- a/services/camera/libcameraservice/camera3/Camera3InputStream.cpp +++ b/services/camera/libcameraservice/camera3/Camera3InputStream.cpp @@ -213,8 +213,7 @@ status_t Camera3InputStream::configureQueueLocked() { if (mConsumer.get() == 0) { sp bq = new BufferQueue(); mConsumer = new BufferItemConsumer(bq, camera3_stream::usage, - mTotalBufferCount, - /*synchronousMode*/true); + mTotalBufferCount); mConsumer->setName(String8::format("Camera3-InputStream-%d", mId)); } diff --git a/services/camera/libcameraservice/gui/RingBufferConsumer.cpp b/services/camera/libcameraservice/gui/RingBufferConsumer.cpp index 7625735..8141f4e 100644 --- a/services/camera/libcameraservice/gui/RingBufferConsumer.cpp +++ b/services/camera/libcameraservice/gui/RingBufferConsumer.cpp @@ -36,11 +36,10 @@ namespace android { RingBufferConsumer::RingBufferConsumer(uint32_t consumerUsage, int bufferCount) : - ConsumerBase(new BufferQueue(true)), + ConsumerBase(new BufferQueue()), mBufferCount(bufferCount) { mBufferQueue->setConsumerUsageBits(consumerUsage); - mBufferQueue->setSynchronousMode(true); mBufferQueue->setMaxAcquiredBufferCount(bufferCount); assert(bufferCount > 0); -- cgit v1.1 From d1eff5718510228503958e8fafa698c9e6a4a230 Mon Sep 17 00:00:00 2001 From: "leozwang@google.com" Date: Sat, 13 Jul 2013 21:52:50 -0700 Subject: Output more detailed error message if loading library fails Bug: 9805979 Change-Id: I77b19d6a65ff9fb72e7428ce79b117628e4c8658 --- media/libmediaplayerservice/Crypto.cpp | 3 ++- media/libmediaplayerservice/SharedLibrary.cpp | 6 ++++++ media/libmediaplayerservice/SharedLibrary.h | 1 + 3 files changed, 9 insertions(+), 1 deletion(-) diff --git a/media/libmediaplayerservice/Crypto.cpp b/media/libmediaplayerservice/Crypto.cpp index ae4d845..62593b2 100644 --- a/media/libmediaplayerservice/Crypto.cpp +++ b/media/libmediaplayerservice/Crypto.cpp @@ -134,7 +134,6 @@ void Crypto::findFactoryForScheme(const uint8_t uuid[16]) { return; } - ALOGE("Failed to find crypto plugin"); mInitCheck = ERROR_UNSUPPORTED; } @@ -151,6 +150,7 @@ bool Crypto::loadLibraryForScheme(const String8 &path, const uint8_t uuid[16]) { if (!mLibrary.get()) { mLibrary = new SharedLibrary(path); if (!*mLibrary) { + ALOGE("loadLibraryForScheme failed:%s", mLibrary->lastError()); return false; } @@ -165,6 +165,7 @@ bool Crypto::loadLibraryForScheme(const String8 &path, const uint8_t uuid[16]) { if (createCryptoFactory == NULL || (mFactory = createCryptoFactory()) == NULL || !mFactory->isCryptoSchemeSupported(uuid)) { + ALOGE("createCryptoFactory failed:%s", mLibrary->lastError()); closeFactory(); return false; } diff --git a/media/libmediaplayerservice/SharedLibrary.cpp b/media/libmediaplayerservice/SharedLibrary.cpp index 178e15d..34db761 100644 --- a/media/libmediaplayerservice/SharedLibrary.cpp +++ b/media/libmediaplayerservice/SharedLibrary.cpp @@ -46,4 +46,10 @@ namespace android { } return dlsym(mLibHandle, symbol); } + + const char *SharedLibrary::lastError() const { + const char *error = dlerror(); + return error ? error : "No errors or unknown error"; + } + }; diff --git a/media/libmediaplayerservice/SharedLibrary.h b/media/libmediaplayerservice/SharedLibrary.h index 5353642..88451a0 100644 --- a/media/libmediaplayerservice/SharedLibrary.h +++ b/media/libmediaplayerservice/SharedLibrary.h @@ -29,6 +29,7 @@ namespace android { bool operator!() const; void *lookup(const char *symbol) const; + const char *lastError() const; private: void *mLibHandle; -- cgit v1.1 From a0a63e13788a77bc502da0c72269d82c4779ac91 Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Wed, 17 Jul 2013 14:02:31 -0700 Subject: Experimental support for enabling the use of "surface input" mode even with the software VP8 encoder. This relies heavily on the fact that the "Nexus" devices use ARGB32 as the colorspace for the data underlying a surface provided by SurfaceFlinger (mirroring). Generally there are no such guarantees. Change-Id: I1de32f591a3bb935ca76151816b3a02665bec40b --- media/libstagefright/codecs/on2/enc/Android.mk | 5 + .../codecs/on2/enc/SoftVPXEncoder.cpp | 159 ++++++++++++++++++--- .../libstagefright/codecs/on2/enc/SoftVPXEncoder.h | 15 +- 3 files changed, 158 insertions(+), 21 deletions(-) diff --git a/media/libstagefright/codecs/on2/enc/Android.mk b/media/libstagefright/codecs/on2/enc/Android.mk index a92d376..4060a0a 100644 --- a/media/libstagefright/codecs/on2/enc/Android.mk +++ b/media/libstagefright/codecs/on2/enc/Android.mk @@ -12,11 +12,16 @@ LOCAL_C_INCLUDES := \ frameworks/av/media/libstagefright/include \ frameworks/native/include/media/openmax \ +ifeq ($(TARGET_DEVICE), manta) + LOCAL_CFLAGS += -DSURFACE_IS_BGR32 +endif + LOCAL_STATIC_LIBRARIES := \ libvpx LOCAL_SHARED_LIBRARIES := \ libstagefright libstagefright_omx libstagefright_foundation libutils liblog \ + libhardware \ LOCAL_MODULE := libstagefright_soft_vpxenc LOCAL_MODULE_TAGS := optional diff --git a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp index 74d6df5..d8456fe 100644 --- a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp +++ b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp @@ -20,6 +20,8 @@ #include +#include +#include #include #include @@ -81,6 +83,52 @@ inline static void ConvertSemiPlanarToPlanar(uint8_t *inyuv, } } +static void ConvertRGB32ToPlanar( + const uint8_t *src, uint8_t *dstY, int32_t width, int32_t height) { + CHECK((width & 1) == 0); + CHECK((height & 1) == 0); + + uint8_t *dstU = dstY + width * height; + uint8_t *dstV = dstU + (width / 2) * (height / 2); + + for (int32_t y = 0; y < height; ++y) { + for (int32_t x = 0; x < width; ++x) { +#ifdef SURFACE_IS_BGR32 + unsigned blue = src[4 * x]; + unsigned green = src[4 * x + 1]; + unsigned red= src[4 * x + 2]; +#else + unsigned red= src[4 * x]; + unsigned green = src[4 * x + 1]; + unsigned blue = src[4 * x + 2]; +#endif + + unsigned luma = + ((red * 66 + green * 129 + blue * 25) >> 8) + 16; + + dstY[x] = luma; + + if ((x & 1) == 0 && (y & 1) == 0) { + unsigned U = + ((-red * 38 - green * 74 + blue * 112) >> 8) + 128; + + unsigned V = + ((red * 112 - green * 94 - blue * 18) >> 8) + 128; + + dstU[x / 2] = U; + dstV[x / 2] = V; + } + } + + if ((y & 1) == 0) { + dstU += width / 2; + dstV += width / 2; + } + + src += 4 * width; + dstY += width; + } +} SoftVPXEncoder::SoftVPXEncoder(const char *name, const OMX_CALLBACKTYPE *callbacks, @@ -99,8 +147,9 @@ SoftVPXEncoder::SoftVPXEncoder(const char *name, mErrorResilience(OMX_FALSE), mColorFormat(OMX_COLOR_FormatYUV420Planar), mLevel(OMX_VIDEO_VP8Level_Version0), - mConversionBuffer(NULL) { - + mConversionBuffer(NULL), + mInputDataIsMeta(false), + mGrallocModule(NULL) { initPorts(); } @@ -247,7 +296,7 @@ status_t SoftVPXEncoder::initEncoder() { return UNKNOWN_ERROR; } - if (mColorFormat == OMX_COLOR_FormatYUV420SemiPlanar) { + if (mColorFormat == OMX_COLOR_FormatYUV420SemiPlanar || mInputDataIsMeta) { if (mConversionBuffer == NULL) { mConversionBuffer = (uint8_t *)malloc(mWidth * mHeight * 3 / 2); if (mConversionBuffer == NULL) { @@ -427,9 +476,17 @@ OMX_ERRORTYPE SoftVPXEncoder::internalSetParameter(OMX_INDEXTYPE index, (const OMX_VIDEO_PARAM_BITRATETYPE *)param); case OMX_IndexParamPortDefinition: - return internalSetPortParams( + { + OMX_ERRORTYPE err = internalSetPortParams( (const OMX_PARAM_PORTDEFINITIONTYPE *)param); + if (err != OMX_ErrorNone) { + return err; + } + + return SimpleSoftOMXComponent::internalSetParameter(index, param); + } + case OMX_IndexParamVideoPortFormat: return internalSetFormatParams( (const OMX_VIDEO_PARAM_PORTFORMATTYPE *)param); @@ -442,6 +499,21 @@ OMX_ERRORTYPE SoftVPXEncoder::internalSetParameter(OMX_INDEXTYPE index, return internalSetProfileLevel( (const OMX_VIDEO_PARAM_PROFILELEVELTYPE *)param); + case OMX_IndexVendorStartUnused: + { + // storeMetaDataInBuffers + const StoreMetaDataInBuffersParams *storeParam = + (const StoreMetaDataInBuffersParams *)param; + + if (storeParam->nPortIndex != kInputPortIndex) { + return OMX_ErrorBadPortIndex; + } + + mInputDataIsMeta = (storeParam->bStoreMetaData == OMX_TRUE); + + return OMX_ErrorNone; + } + default: return SimpleSoftOMXComponent::internalSetParameter(index, param); } @@ -507,6 +579,10 @@ OMX_ERRORTYPE SoftVPXEncoder::internalSetFormatParams( format->eColorFormat == OMX_COLOR_FormatYUV420SemiPlanar || format->eColorFormat == OMX_COLOR_FormatAndroidOpaque) { mColorFormat = format->eColorFormat; + + OMX_PARAM_PORTDEFINITIONTYPE *def = &editPortInfo(kInputPortIndex)->mDef; + def->format.video.eColorFormat = mColorFormat; + return OMX_ErrorNone; } else { ALOGE("Unsupported color format %i", format->eColorFormat); @@ -552,11 +628,17 @@ OMX_ERRORTYPE SoftVPXEncoder::internalSetPortParams( if (port->format.video.eColorFormat == OMX_COLOR_FormatYUV420Planar || port->format.video.eColorFormat == OMX_COLOR_FormatYUV420SemiPlanar || port->format.video.eColorFormat == OMX_COLOR_FormatAndroidOpaque) { - mColorFormat = port->format.video.eColorFormat; + mColorFormat = port->format.video.eColorFormat; } else { return OMX_ErrorUnsupportedSetting; } + OMX_PARAM_PORTDEFINITIONTYPE *def = &editPortInfo(kInputPortIndex)->mDef; + def->format.video.nFrameWidth = mWidth; + def->format.video.nFrameHeight = mHeight; + def->format.video.xFramerate = port->format.video.xFramerate; + def->format.video.eColorFormat = mColorFormat; + return OMX_ErrorNone; } else if (port->nPortIndex == kOutputPortIndex) { mBitrate = port->format.video.nBitrate; @@ -625,24 +707,56 @@ void SoftVPXEncoder::onQueueFilled(OMX_U32 portIndex) { return; } - uint8_t* source = inputBufferHeader->pBuffer + inputBufferHeader->nOffset; + uint8_t *source = + inputBufferHeader->pBuffer + inputBufferHeader->nOffset; + + if (mInputDataIsMeta) { + CHECK_GE(inputBufferHeader->nFilledLen, + 4 + sizeof(buffer_handle_t)); + + uint32_t bufferType = *(uint32_t *)source; + CHECK_EQ(bufferType, kMetadataBufferTypeGrallocSource); + + if (mGrallocModule == NULL) { + CHECK_EQ(0, hw_get_module( + GRALLOC_HARDWARE_MODULE_ID, &mGrallocModule)); + } + + const gralloc_module_t *grmodule = + (const gralloc_module_t *)mGrallocModule; + + buffer_handle_t handle = *(buffer_handle_t *)(source + 4); + + void *bits; + CHECK_EQ(0, + grmodule->lock( + grmodule, handle, + GRALLOC_USAGE_SW_READ_OFTEN + | GRALLOC_USAGE_SW_WRITE_NEVER, + 0, 0, mWidth, mHeight, &bits)); + + ConvertRGB32ToPlanar( + (const uint8_t *)bits, mConversionBuffer, mWidth, mHeight); + + source = mConversionBuffer; + + CHECK_EQ(0, grmodule->unlock(grmodule, handle)); + } else if (mColorFormat == OMX_COLOR_FormatYUV420SemiPlanar) { + ConvertSemiPlanarToPlanar( + source, mConversionBuffer, mWidth, mHeight); - // NOTE: As much as nothing is known about color format - // when it is denoted as AndroidOpaque, it is at least - // assumed to be planar. - if (mColorFormat == OMX_COLOR_FormatYUV420SemiPlanar) { - ConvertSemiPlanarToPlanar(source, mConversionBuffer, mWidth, mHeight); source = mConversionBuffer; } vpx_image_t raw_frame; vpx_img_wrap(&raw_frame, VPX_IMG_FMT_I420, mWidth, mHeight, kInputBufferAlignment, source); - codec_return = vpx_codec_encode(mCodecContext, - &raw_frame, - inputBufferHeader->nTimeStamp, // in timebase units - mFrameDurationUs, // frame duration in timebase units - 0, // frame flags - VPX_DL_REALTIME); // encoding deadline + codec_return = vpx_codec_encode( + mCodecContext, + &raw_frame, + inputBufferHeader->nTimeStamp, // in timebase units + mFrameDurationUs, // frame duration in timebase units + 0, // frame flags + VPX_DL_REALTIME); // encoding deadline if (codec_return != VPX_CODEC_OK) { ALOGE("vpx encoder failed to encode frame"); notify(OMX_EventError, @@ -676,6 +790,17 @@ void SoftVPXEncoder::onQueueFilled(OMX_U32 portIndex) { notifyEmptyBufferDone(inputBufferHeader); } } + +OMX_ERRORTYPE SoftVPXEncoder::getExtensionIndex( + const char *name, OMX_INDEXTYPE *index) { + if (!strcmp(name, "OMX.google.android.index.storeMetaDataInBuffers")) { + *index = OMX_IndexVendorStartUnused; + return OMX_ErrorNone; + } + + return SimpleSoftOMXComponent::getExtensionIndex(name, index); +} + } // namespace android diff --git a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h index a0a8ee6..d570154 100644 --- a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h +++ b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h @@ -23,6 +23,8 @@ #include #include +#include + #include "vpx/vpx_encoder.h" #include "vpx/vpx_codec.h" #include "vpx/vp8cx.h" @@ -57,14 +59,13 @@ namespace android { // - OMX timestamps are in microseconds, therefore // encoder timebase is fixed to 1/1000000 -class SoftVPXEncoder : public SimpleSoftOMXComponent { - public: +struct SoftVPXEncoder : public SimpleSoftOMXComponent { SoftVPXEncoder(const char *name, const OMX_CALLBACKTYPE *callbacks, OMX_PTR appData, OMX_COMPONENTTYPE **component); - protected: +protected: virtual ~SoftVPXEncoder(); // Returns current values for requested OMX @@ -83,7 +84,10 @@ class SoftVPXEncoder : public SimpleSoftOMXComponent { // encoding of the frame virtual void onQueueFilled(OMX_U32 portIndex); - private: + virtual OMX_ERRORTYPE getExtensionIndex( + const char *name, OMX_INDEXTYPE *index); + +private: // number of buffers allocated per port static const uint32_t kNumBuffers = 4; @@ -156,6 +160,9 @@ class SoftVPXEncoder : public SimpleSoftOMXComponent { // indeed YUV420SemiPlanar. uint8_t* mConversionBuffer; + bool mInputDataIsMeta; + const hw_module_t *mGrallocModule; + // Initializes input and output OMX ports with sensible // default values. void initPorts(); -- cgit v1.1 From e40cda70eec141fa05cbcca1de420fdb22b98be6 Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Wed, 17 Jul 2013 13:55:26 -0700 Subject: Support "suspension" of a video encoder in "surface-input" mode. i.e. feed no more input frames to the encoder while suspended. Change-Id: I51391e18c1517548e869f8ddece19f4af37e78f9 --- include/media/IOMX.h | 10 +++++ media/libmedia/IOMX.cpp | 29 ++++++++++++ media/libstagefright/ACodec.cpp | 13 ++++++ media/libstagefright/OMXClient.cpp | 16 +++++++ media/libstagefright/include/OMX.h | 7 +++ media/libstagefright/include/OMXNodeInstance.h | 6 +++ media/libstagefright/omx/GraphicBufferSource.cpp | 48 ++++++++++++++++++-- media/libstagefright/omx/GraphicBufferSource.h | 6 +++ media/libstagefright/omx/OMX.cpp | 9 ++++ media/libstagefright/omx/OMXNodeInstance.cpp | 57 +++++++++++++++++++----- 10 files changed, 187 insertions(+), 14 deletions(-) diff --git a/include/media/IOMX.h b/include/media/IOMX.h index 0b1d1e4..38f9d11 100644 --- a/include/media/IOMX.h +++ b/include/media/IOMX.h @@ -130,6 +130,16 @@ public: node_id node, const char *parameter_name, OMX_INDEXTYPE *index) = 0; + + enum InternalOptionType { + INTERNAL_OPTION_SUSPEND, // data is a bool + }; + virtual status_t setInternalOption( + node_id node, + OMX_U32 port_index, + InternalOptionType type, + const void *data, + size_t size) = 0; }; struct omx_message { diff --git a/media/libmedia/IOMX.cpp b/media/libmedia/IOMX.cpp index d6cd43a..5bbb2f0 100644 --- a/media/libmedia/IOMX.cpp +++ b/media/libmedia/IOMX.cpp @@ -51,6 +51,7 @@ enum { GET_EXTENSION_INDEX, OBSERVER_ON_MSG, GET_GRAPHIC_BUFFER_USAGE, + SET_INTERNAL_OPTION, }; class BpOMX : public BpInterface { @@ -439,6 +440,24 @@ public: return err; } + + virtual status_t setInternalOption( + node_id node, + OMX_U32 port_index, + InternalOptionType type, + const void *optionData, + size_t size) { + Parcel data, reply; + data.writeInterfaceToken(IOMX::getInterfaceDescriptor()); + data.writeIntPtr((intptr_t)node); + data.writeInt32(port_index); + data.writeInt32(size); + data.write(optionData, size); + data.writeInt32(type); + remote()->transact(SET_INTERNAL_OPTION, data, &reply); + + return reply.readInt32(); + } }; IMPLEMENT_META_INTERFACE(OMX, "android.hardware.IOMX"); @@ -537,6 +556,7 @@ status_t BnOMX::onTransact( case SET_PARAMETER: case GET_CONFIG: case SET_CONFIG: + case SET_INTERNAL_OPTION: { CHECK_OMX_INTERFACE(IOMX, data, reply); @@ -562,6 +582,15 @@ status_t BnOMX::onTransact( case SET_CONFIG: err = setConfig(node, index, params, size); break; + case SET_INTERNAL_OPTION: + { + InternalOptionType type = + (InternalOptionType)data.readInt32(); + + err = setInternalOption(node, index, type, params, size); + break; + } + default: TRESPASS(); } diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp index 6bc7718..8d1020e 100644 --- a/media/libstagefright/ACodec.cpp +++ b/media/libstagefright/ACodec.cpp @@ -4106,6 +4106,19 @@ status_t ACodec::setParameters(const sp ¶ms) { } } + int32_t dropInputFrames; + if (params->findInt32("drop-input-frames", &dropInputFrames)) { + bool suspend = dropInputFrames != 0; + + CHECK_EQ((status_t)OK, + mOMX->setInternalOption( + mNode, + kPortIndexInput, + IOMX::INTERNAL_OPTION_SUSPEND, + &suspend, + sizeof(suspend))); + } + return OK; } diff --git a/media/libstagefright/OMXClient.cpp b/media/libstagefright/OMXClient.cpp index 1822f07..810d88f 100644 --- a/media/libstagefright/OMXClient.cpp +++ b/media/libstagefright/OMXClient.cpp @@ -113,6 +113,13 @@ struct MuxOMX : public IOMX { const char *parameter_name, OMX_INDEXTYPE *index); + virtual status_t setInternalOption( + node_id node, + OMX_U32 port_index, + InternalOptionType type, + const void *data, + size_t size); + private: mutable Mutex mLock; @@ -331,6 +338,15 @@ status_t MuxOMX::getExtensionIndex( return getOMX(node)->getExtensionIndex(node, parameter_name, index); } +status_t MuxOMX::setInternalOption( + node_id node, + OMX_U32 port_index, + InternalOptionType type, + const void *data, + size_t size) { + return getOMX(node)->setInternalOption(node, port_index, type, data, size); +} + OMXClient::OMXClient() { } diff --git a/media/libstagefright/include/OMX.h b/media/libstagefright/include/OMX.h index 24b8d98..7fed7d4 100644 --- a/media/libstagefright/include/OMX.h +++ b/media/libstagefright/include/OMX.h @@ -109,6 +109,13 @@ public: const char *parameter_name, OMX_INDEXTYPE *index); + virtual status_t setInternalOption( + node_id node, + OMX_U32 port_index, + InternalOptionType type, + const void *data, + size_t size); + virtual void binderDied(const wp &the_late_who); OMX_ERRORTYPE OnEvent( diff --git a/media/libstagefright/include/OMXNodeInstance.h b/media/libstagefright/include/OMXNodeInstance.h index 67aba6b..f6ae376 100644 --- a/media/libstagefright/include/OMXNodeInstance.h +++ b/media/libstagefright/include/OMXNodeInstance.h @@ -96,6 +96,12 @@ struct OMXNodeInstance { status_t getExtensionIndex( const char *parameterName, OMX_INDEXTYPE *index); + status_t setInternalOption( + OMX_U32 portIndex, + IOMX::InternalOptionType type, + const void *data, + size_t size); + void onMessage(const omx_message &msg); void onObserverDied(OMXMaster *master); void onGetHandleFailed(); diff --git a/media/libstagefright/omx/GraphicBufferSource.cpp b/media/libstagefright/omx/GraphicBufferSource.cpp index b3167b5..6f3ed0d 100644 --- a/media/libstagefright/omx/GraphicBufferSource.cpp +++ b/media/libstagefright/omx/GraphicBufferSource.cpp @@ -36,6 +36,7 @@ GraphicBufferSource::GraphicBufferSource(OMXNodeInstance* nodeInstance, mInitCheck(UNKNOWN_ERROR), mNodeInstance(nodeInstance), mExecuting(false), + mSuspended(false), mNumFramesAvailable(0), mEndOfStream(false), mEndOfStreamSent(false) { @@ -237,9 +238,43 @@ void GraphicBufferSource::codecBufferEmptied(OMX_BUFFERHEADERTYPE* header) { return; } +void GraphicBufferSource::suspend(bool suspend) { + Mutex::Autolock autoLock(mMutex); + + if (suspend) { + mSuspended = true; + + while (mNumFramesAvailable > 0) { + BufferQueue::BufferItem item; + status_t err = mBufferQueue->acquireBuffer(&item, 0); + + if (err == BufferQueue::NO_BUFFER_AVAILABLE) { + // shouldn't happen. + ALOGW("suspend: frame was not available"); + break; + } else if (err != OK) { + ALOGW("suspend: acquireBuffer returned err=%d", err); + break; + } + + --mNumFramesAvailable; + + mBufferQueue->releaseBuffer(item.mBuf, item.mFrameNumber, + EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, item.mFence); + } + return; + } + + mSuspended = false; +} + bool GraphicBufferSource::fillCodecBuffer_l() { CHECK(mExecuting && mNumFramesAvailable > 0); + if (mSuspended) { + return false; + } + int cbi = findAvailableCodecBuffer_l(); if (cbi < 0) { // No buffers available, bail. @@ -416,10 +451,15 @@ void GraphicBufferSource::onFrameAvailable() { ALOGV("onFrameAvailable exec=%d avail=%d", mExecuting, mNumFramesAvailable); - if (mEndOfStream) { - // This should only be possible if a new buffer was queued after - // EOS was signaled, i.e. the app is misbehaving. - ALOGW("onFrameAvailable: EOS is set, ignoring frame"); + if (mEndOfStream || mSuspended) { + if (mEndOfStream) { + // This should only be possible if a new buffer was queued after + // EOS was signaled, i.e. the app is misbehaving. + + ALOGW("onFrameAvailable: EOS is set, ignoring frame"); + } else { + ALOGV("onFrameAvailable: suspended, ignoring frame"); + } BufferQueue::BufferItem item; status_t err = mBufferQueue->acquireBuffer(&item, 0); diff --git a/media/libstagefright/omx/GraphicBufferSource.h b/media/libstagefright/omx/GraphicBufferSource.h index 8c6b470..ac73770 100644 --- a/media/libstagefright/omx/GraphicBufferSource.h +++ b/media/libstagefright/omx/GraphicBufferSource.h @@ -85,6 +85,10 @@ public: // have a codec buffer ready, we just set the mEndOfStream flag. status_t signalEndOfInputStream(); + // If suspend is true, all incoming buffers (including those currently + // in the BufferQueue) will be discarded until the suspension is lifted. + void suspend(bool suspend); + protected: // BufferQueue::ConsumerListener interface, called when a new frame of // data is available. If we're executing and a codec buffer is @@ -155,6 +159,8 @@ private: // Set by omxExecuting() / omxIdling(). bool mExecuting; + bool mSuspended; + // We consume graphic buffers from this. sp mBufferQueue; diff --git a/media/libstagefright/omx/OMX.cpp b/media/libstagefright/omx/OMX.cpp index 3987ead..4b1dbe6 100644 --- a/media/libstagefright/omx/OMX.cpp +++ b/media/libstagefright/omx/OMX.cpp @@ -396,6 +396,15 @@ status_t OMX::getExtensionIndex( parameter_name, index); } +status_t OMX::setInternalOption( + node_id node, + OMX_U32 port_index, + InternalOptionType type, + const void *data, + size_t size) { + return findInstance(node)->setInternalOption(port_index, type, data, size); +} + OMX_ERRORTYPE OMX::OnEvent( node_id node, OMX_IN OMX_EVENTTYPE eEvent, diff --git a/media/libstagefright/omx/OMXNodeInstance.cpp b/media/libstagefright/omx/OMXNodeInstance.cpp index a9eb94f..61a866f 100644 --- a/media/libstagefright/omx/OMXNodeInstance.cpp +++ b/media/libstagefright/omx/OMXNodeInstance.cpp @@ -238,6 +238,18 @@ status_t OMXNodeInstance::freeNode(OMXMaster *master) { status_t OMXNodeInstance::sendCommand( OMX_COMMANDTYPE cmd, OMX_S32 param) { + const sp& bufferSource(getGraphicBufferSource()); + if (bufferSource != NULL + && cmd == OMX_CommandStateSet + && param == OMX_StateLoaded) { + // Initiating transition from Executing -> Loaded + // Buffers are about to be freed. + bufferSource->omxLoaded(); + setGraphicBufferSource(NULL); + + // fall through + } + Mutex::Autolock autoLock(mLock); OMX_ERRORTYPE err = OMX_SendCommand(mHandle, cmd, param, NULL); @@ -769,6 +781,36 @@ status_t OMXNodeInstance::getExtensionIndex( return StatusFromOMXError(err); } +status_t OMXNodeInstance::setInternalOption( + OMX_U32 portIndex, + IOMX::InternalOptionType type, + const void *data, + size_t size) { + switch (type) { + case IOMX::INTERNAL_OPTION_SUSPEND: + { + const sp &bufferSource = + getGraphicBufferSource(); + + if (bufferSource == NULL || portIndex != kPortIndexInput) { + return ERROR_UNSUPPORTED; + } + + if (size != sizeof(bool)) { + return INVALID_OPERATION; + } + + bool suspend = *(bool *)data; + bufferSource->suspend(suspend); + + return OK; + } + + default: + return ERROR_UNSUPPORTED; + } +} + void OMXNodeInstance::onMessage(const omx_message &msg) { if (msg.type == omx_message::FILL_BUFFER_DONE) { OMX_BUFFERHEADERTYPE *buffer = @@ -818,16 +860,11 @@ void OMXNodeInstance::onEvent( OMX_EVENTTYPE event, OMX_U32 arg1, OMX_U32 arg2) { const sp& bufferSource(getGraphicBufferSource()); - if (bufferSource != NULL && event == OMX_EventCmdComplete && - arg1 == OMX_CommandStateSet) { - if (arg2 == OMX_StateExecuting) { - bufferSource->omxExecuting(); - } else if (arg2 == OMX_StateLoaded) { - // Must be shutting down -- won't have a GraphicBufferSource - // on the way up. - bufferSource->omxLoaded(); - setGraphicBufferSource(NULL); - } + if (bufferSource != NULL + && event == OMX_EventCmdComplete + && arg1 == OMX_CommandStateSet + && arg2 == OMX_StateExecuting) { + bufferSource->omxExecuting(); } } -- cgit v1.1 From c6ae3c8a261794fd4445e4e152d1ada074a3f92f Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Wed, 17 Jul 2013 09:08:51 -0700 Subject: Fix theoretical memory leak in mConfigEvents Change-Id: I137f70676c8919661e716c33e0dd9c25c2b6285c --- services/audioflinger/Threads.cpp | 6 ++++++ services/audioflinger/Threads.h | 1 + 2 files changed, 7 insertions(+) diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index ef109af..d4cd0ea 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -282,6 +282,12 @@ AudioFlinger::ThreadBase::ThreadBase(const sp& audioFlinger, audio AudioFlinger::ThreadBase::~ThreadBase() { + // mConfigEvents should be empty, but just in case it isn't, free the memory it owns + for (size_t i = 0; i < mConfigEvents.size(); i++) { + delete mConfigEvents[i]; + } + mConfigEvents.clear(); + mParamCond.broadcast(); // do not lock the mutex in destructor releaseWakeLock_l(); diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h index e15d98a..365c790 100644 --- a/services/audioflinger/Threads.h +++ b/services/audioflinger/Threads.h @@ -290,6 +290,7 @@ protected: Vector mNewParameters; status_t mParamStatus; + // vector owns each ConfigEvent *, so must delete after removing Vector mConfigEvents; // These fields are written and read by thread itself without lock or barrier, -- cgit v1.1 From 87eb285dca94b20dc5f0ff8e60a0d395a4ca3be9 Mon Sep 17 00:00:00 2001 From: Dima Zavin Date: Thu, 18 Jul 2013 11:43:39 -0700 Subject: stagefright: set scaling mode for blank frames in ACodec Analogous to 1d5ac80d0c6d3deabcc9e9b4abc9e3ef536aeb27 (by jgennis), this sets the scaling mode for the ANativeWindow to SCALE_TO_WINDOW prior to pushing the blank frames during decoder tear down. Without this, the window defaults to FREEZE and SF ignores the new frames. Bug: 9516405 Change-Id: I39ef30922d733034bf01100d7ff24ac9c0c33b7d Signed-off-by: Dima Zavin --- media/libstagefright/ACodec.cpp | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp index 6bc7718..9b24d44 100644 --- a/media/libstagefright/ACodec.cpp +++ b/media/libstagefright/ACodec.cpp @@ -2630,6 +2630,14 @@ status_t ACodec::pushBlankBuffersToNativeWindow() { goto error; } + err = native_window_set_scaling_mode(mNativeWindow.get(), + NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW); + if (err != NO_ERROR) { + ALOGE("error pushing blank_frames: set_scaling_mode failed: %s (%d)", + strerror(-err), -err); + goto error; + } + err = native_window_set_usage(mNativeWindow.get(), GRALLOC_USAGE_SW_WRITE_OFTEN); if (err != NO_ERROR) { -- cgit v1.1 From 8060060217ff16cd67c8f6a15c649f44c343acf0 Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Thu, 18 Jul 2013 14:36:18 -0700 Subject: ANetworkSession and ParsedMessage are now part of stagefright foundation. Also fixes some bugs in ParseMessage and adds "WebSocket" mode to ANetworkSession, something that's opt-in and should not affect existing clients of the API. Change-Id: I86d5748e0d818231d85d3590d86c2b41d4f8b1f1 --- .../media/stagefright/foundation/ANetworkSession.h | 135 ++ .../media/stagefright/foundation/ParsedMessage.h | 60 + media/libmediaplayerservice/RemoteDisplay.cpp | 13 +- media/libmediaplayerservice/RemoteDisplay.h | 5 +- .../libstagefright/foundation/ANetworkSession.cpp | 1412 ++++++++++++++++++++ media/libstagefright/foundation/Android.mk | 2 + media/libstagefright/foundation/ParsedMessage.cpp | 302 +++++ .../wifi-display/ANetworkSession.cpp | 1255 ----------------- .../libstagefright/wifi-display/ANetworkSession.h | 132 -- media/libstagefright/wifi-display/Android.mk | 2 - .../libstagefright/wifi-display/MediaReceiver.cpp | 2 +- media/libstagefright/wifi-display/MediaSender.cpp | 2 +- .../libstagefright/wifi-display/ParsedMessage.cpp | 284 ---- media/libstagefright/wifi-display/ParsedMessage.h | 60 - media/libstagefright/wifi-display/TimeSyncer.cpp | 3 +- media/libstagefright/wifi-display/nettest.cpp | 2 +- .../wifi-display/rtp/RTPReceiver.cpp | 3 +- .../libstagefright/wifi-display/rtp/RTPSender.cpp | 3 +- media/libstagefright/wifi-display/rtptest.cpp | 2 +- .../wifi-display/sink/WifiDisplaySink.cpp | 2 +- .../wifi-display/sink/WifiDisplaySink.h | 3 +- .../wifi-display/source/MediaPuller.cpp | 3 + .../wifi-display/source/WifiDisplaySource.cpp | 2 +- .../wifi-display/source/WifiDisplaySource.h | 2 +- media/libstagefright/wifi-display/udptest.cpp | 2 +- media/libstagefright/wifi-display/wfd.cpp | 3 +- 26 files changed, 1941 insertions(+), 1755 deletions(-) create mode 100644 include/media/stagefright/foundation/ANetworkSession.h create mode 100644 include/media/stagefright/foundation/ParsedMessage.h create mode 100644 media/libstagefright/foundation/ANetworkSession.cpp create mode 100644 media/libstagefright/foundation/ParsedMessage.cpp delete mode 100644 media/libstagefright/wifi-display/ANetworkSession.cpp delete mode 100644 media/libstagefright/wifi-display/ANetworkSession.h delete mode 100644 media/libstagefright/wifi-display/ParsedMessage.cpp delete mode 100644 media/libstagefright/wifi-display/ParsedMessage.h diff --git a/include/media/stagefright/foundation/ANetworkSession.h b/include/media/stagefright/foundation/ANetworkSession.h new file mode 100644 index 0000000..fd3ebaa --- /dev/null +++ b/include/media/stagefright/foundation/ANetworkSession.h @@ -0,0 +1,135 @@ +/* + * Copyright 2012, The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef A_NETWORK_SESSION_H_ + +#define A_NETWORK_SESSION_H_ + +#include +#include +#include +#include + +#include + +namespace android { + +struct AMessage; + +// Helper class to manage a number of live sockets (datagram and stream-based) +// on a single thread. Clients are notified about activity through AMessages. +struct ANetworkSession : public RefBase { + ANetworkSession(); + + status_t start(); + status_t stop(); + + status_t createRTSPClient( + const char *host, unsigned port, const sp ¬ify, + int32_t *sessionID); + + status_t createRTSPServer( + const struct in_addr &addr, unsigned port, + const sp ¬ify, int32_t *sessionID); + + status_t createUDPSession( + unsigned localPort, const sp ¬ify, int32_t *sessionID); + + status_t createUDPSession( + unsigned localPort, + const char *remoteHost, + unsigned remotePort, + const sp ¬ify, + int32_t *sessionID); + + status_t connectUDPSession( + int32_t sessionID, const char *remoteHost, unsigned remotePort); + + // passive + status_t createTCPDatagramSession( + const struct in_addr &addr, unsigned port, + const sp ¬ify, int32_t *sessionID); + + // active + status_t createTCPDatagramSession( + unsigned localPort, + const char *remoteHost, + unsigned remotePort, + const sp ¬ify, + int32_t *sessionID); + + status_t destroySession(int32_t sessionID); + + status_t sendRequest( + int32_t sessionID, const void *data, ssize_t size = -1, + bool timeValid = false, int64_t timeUs = -1ll); + + status_t switchToWebSocketMode(int32_t sessionID); + + enum NotificationReason { + kWhatError, + kWhatConnected, + kWhatClientConnected, + kWhatData, + kWhatDatagram, + kWhatBinaryData, + kWhatWebSocketMessage, + kWhatNetworkStall, + }; + +protected: + virtual ~ANetworkSession(); + +private: + struct NetworkThread; + struct Session; + + Mutex mLock; + sp mThread; + + int32_t mNextSessionID; + + int mPipeFd[2]; + + KeyedVector > mSessions; + + enum Mode { + kModeCreateUDPSession, + kModeCreateTCPDatagramSessionPassive, + kModeCreateTCPDatagramSessionActive, + kModeCreateRTSPServer, + kModeCreateRTSPClient, + }; + status_t createClientOrServer( + Mode mode, + const struct in_addr *addr, + unsigned port, + const char *remoteHost, + unsigned remotePort, + const sp ¬ify, + int32_t *sessionID); + + void threadLoop(); + void interrupt(); + + static status_t MakeSocketNonBlocking(int s); + + DISALLOW_EVIL_CONSTRUCTORS(ANetworkSession); +}; + +} // namespace android + +#endif // A_NETWORK_SESSION_H_ diff --git a/include/media/stagefright/foundation/ParsedMessage.h b/include/media/stagefright/foundation/ParsedMessage.h new file mode 100644 index 0000000..9d43a93 --- /dev/null +++ b/include/media/stagefright/foundation/ParsedMessage.h @@ -0,0 +1,60 @@ +/* + * Copyright 2012, The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include +#include + +namespace android { + +// Encapsulates an "HTTP/RTSP style" response, i.e. a status line, +// key/value pairs making up the headers and an optional body/content. +struct ParsedMessage : public RefBase { + static sp Parse( + const char *data, size_t size, bool noMoreData, size_t *length); + + bool findString(const char *name, AString *value) const; + bool findInt32(const char *name, int32_t *value) const; + + const char *getContent() const; + + bool getRequestField(size_t index, AString *field) const; + bool getStatusCode(int32_t *statusCode) const; + + AString debugString() const; + + static bool GetAttribute(const char *s, const char *key, AString *value); + + static bool GetInt32Attribute( + const char *s, const char *key, int32_t *value); + + +protected: + virtual ~ParsedMessage(); + +private: + KeyedVector mDict; + AString mContent; + + ParsedMessage(); + + ssize_t parse(const char *data, size_t size, bool noMoreData); + + DISALLOW_EVIL_CONSTRUCTORS(ParsedMessage); +}; + +} // namespace android diff --git a/media/libmediaplayerservice/RemoteDisplay.cpp b/media/libmediaplayerservice/RemoteDisplay.cpp index 20e6513..eb959b4 100644 --- a/media/libmediaplayerservice/RemoteDisplay.cpp +++ b/media/libmediaplayerservice/RemoteDisplay.cpp @@ -16,19 +16,23 @@ #include "RemoteDisplay.h" -#include "ANetworkSession.h" #include "source/WifiDisplaySource.h" #include +#include +#include +#include namespace android { RemoteDisplay::RemoteDisplay( - const sp &client, const char *iface) + const sp &client, + const char *iface) : mLooper(new ALooper), - mNetSession(new ANetworkSession), - mSource(new WifiDisplaySource(mNetSession, client)) { + mNetSession(new ANetworkSession) { mLooper->setName("wfd_looper"); + + mSource = new WifiDisplaySource(mNetSession, client); mLooper->registerHandler(mSource); mNetSession->start(); @@ -50,6 +54,7 @@ status_t RemoteDisplay::resume() { status_t RemoteDisplay::dispose() { mSource->stop(); + mSource.clear(); mLooper->stop(); mNetSession->stop(); diff --git a/media/libmediaplayerservice/RemoteDisplay.h b/media/libmediaplayerservice/RemoteDisplay.h index bd8b684..82a0116 100644 --- a/media/libmediaplayerservice/RemoteDisplay.h +++ b/media/libmediaplayerservice/RemoteDisplay.h @@ -18,6 +18,7 @@ #define REMOTE_DISPLAY_H_ +#include #include #include #include @@ -31,7 +32,9 @@ struct IRemoteDisplayClient; struct WifiDisplaySource; struct RemoteDisplay : public BnRemoteDisplay { - RemoteDisplay(const sp &client, const char *iface); + RemoteDisplay( + const sp &client, + const char *iface); virtual status_t pause(); virtual status_t resume(); diff --git a/media/libstagefright/foundation/ANetworkSession.cpp b/media/libstagefright/foundation/ANetworkSession.cpp new file mode 100644 index 0000000..e629588 --- /dev/null +++ b/media/libstagefright/foundation/ANetworkSession.cpp @@ -0,0 +1,1412 @@ +/* + * Copyright 2012, The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "NetworkSession" +#include + +#include "ANetworkSession.h" +#include "ParsedMessage.h" + +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include + +namespace android { + +static uint16_t U16_AT(const uint8_t *ptr) { + return ptr[0] << 8 | ptr[1]; +} + +static uint32_t U32_AT(const uint8_t *ptr) { + return ptr[0] << 24 | ptr[1] << 16 | ptr[2] << 8 | ptr[3]; +} + +static uint64_t U64_AT(const uint8_t *ptr) { + return ((uint64_t)U32_AT(ptr)) << 32 | U32_AT(ptr + 4); +} + +static const size_t kMaxUDPSize = 1500; +static const int32_t kMaxUDPRetries = 200; + +struct ANetworkSession::NetworkThread : public Thread { + NetworkThread(ANetworkSession *session); + +protected: + virtual ~NetworkThread(); + +private: + ANetworkSession *mSession; + + virtual bool threadLoop(); + + DISALLOW_EVIL_CONSTRUCTORS(NetworkThread); +}; + +struct ANetworkSession::Session : public RefBase { + enum Mode { + MODE_RTSP, + MODE_DATAGRAM, + MODE_WEBSOCKET, + }; + + enum State { + CONNECTING, + CONNECTED, + LISTENING_RTSP, + LISTENING_TCP_DGRAMS, + DATAGRAM, + }; + + Session(int32_t sessionID, + State state, + int s, + const sp ¬ify); + + int32_t sessionID() const; + int socket() const; + sp getNotificationMessage() const; + + bool isRTSPServer() const; + bool isTCPDatagramServer() const; + + bool wantsToRead(); + bool wantsToWrite(); + + status_t readMore(); + status_t writeMore(); + + status_t sendRequest( + const void *data, ssize_t size, bool timeValid, int64_t timeUs); + + void setMode(Mode mode); + + status_t switchToWebSocketMode(); + +protected: + virtual ~Session(); + +private: + enum { + FRAGMENT_FLAG_TIME_VALID = 1, + }; + struct Fragment { + uint32_t mFlags; + int64_t mTimeUs; + sp mBuffer; + }; + + int32_t mSessionID; + State mState; + Mode mMode; + int mSocket; + sp mNotify; + bool mSawReceiveFailure, mSawSendFailure; + int32_t mUDPRetries; + + List mOutFragments; + + AString mInBuffer; + + int64_t mLastStallReportUs; + + void notifyError(bool send, status_t err, const char *detail); + void notify(NotificationReason reason); + + void dumpFragmentStats(const Fragment &frag); + + DISALLOW_EVIL_CONSTRUCTORS(Session); +}; +//////////////////////////////////////////////////////////////////////////////// + +ANetworkSession::NetworkThread::NetworkThread(ANetworkSession *session) + : mSession(session) { +} + +ANetworkSession::NetworkThread::~NetworkThread() { +} + +bool ANetworkSession::NetworkThread::threadLoop() { + mSession->threadLoop(); + + return true; +} + +//////////////////////////////////////////////////////////////////////////////// + +ANetworkSession::Session::Session( + int32_t sessionID, + State state, + int s, + const sp ¬ify) + : mSessionID(sessionID), + mState(state), + mMode(MODE_DATAGRAM), + mSocket(s), + mNotify(notify), + mSawReceiveFailure(false), + mSawSendFailure(false), + mUDPRetries(kMaxUDPRetries), + mLastStallReportUs(-1ll) { + if (mState == CONNECTED) { + struct sockaddr_in localAddr; + socklen_t localAddrLen = sizeof(localAddr); + + int res = getsockname( + mSocket, (struct sockaddr *)&localAddr, &localAddrLen); + CHECK_GE(res, 0); + + struct sockaddr_in remoteAddr; + socklen_t remoteAddrLen = sizeof(remoteAddr); + + res = getpeername( + mSocket, (struct sockaddr *)&remoteAddr, &remoteAddrLen); + CHECK_GE(res, 0); + + in_addr_t addr = ntohl(localAddr.sin_addr.s_addr); + AString localAddrString = StringPrintf( + "%d.%d.%d.%d", + (addr >> 24), + (addr >> 16) & 0xff, + (addr >> 8) & 0xff, + addr & 0xff); + + addr = ntohl(remoteAddr.sin_addr.s_addr); + AString remoteAddrString = StringPrintf( + "%d.%d.%d.%d", + (addr >> 24), + (addr >> 16) & 0xff, + (addr >> 8) & 0xff, + addr & 0xff); + + sp msg = mNotify->dup(); + msg->setInt32("sessionID", mSessionID); + msg->setInt32("reason", kWhatClientConnected); + msg->setString("server-ip", localAddrString.c_str()); + msg->setInt32("server-port", ntohs(localAddr.sin_port)); + msg->setString("client-ip", remoteAddrString.c_str()); + msg->setInt32("client-port", ntohs(remoteAddr.sin_port)); + msg->post(); + } +} + +ANetworkSession::Session::~Session() { + ALOGV("Session %d gone", mSessionID); + + close(mSocket); + mSocket = -1; +} + +int32_t ANetworkSession::Session::sessionID() const { + return mSessionID; +} + +int ANetworkSession::Session::socket() const { + return mSocket; +} + +void ANetworkSession::Session::setMode(Mode mode) { + mMode = mode; +} + +status_t ANetworkSession::Session::switchToWebSocketMode() { + if (mState != CONNECTED || mMode != MODE_RTSP) { + return INVALID_OPERATION; + } + + mMode = MODE_WEBSOCKET; + + return OK; +} + +sp ANetworkSession::Session::getNotificationMessage() const { + return mNotify; +} + +bool ANetworkSession::Session::isRTSPServer() const { + return mState == LISTENING_RTSP; +} + +bool ANetworkSession::Session::isTCPDatagramServer() const { + return mState == LISTENING_TCP_DGRAMS; +} + +bool ANetworkSession::Session::wantsToRead() { + return !mSawReceiveFailure && mState != CONNECTING; +} + +bool ANetworkSession::Session::wantsToWrite() { + return !mSawSendFailure + && (mState == CONNECTING + || (mState == CONNECTED && !mOutFragments.empty()) + || (mState == DATAGRAM && !mOutFragments.empty())); +} + +status_t ANetworkSession::Session::readMore() { + if (mState == DATAGRAM) { + CHECK_EQ(mMode, MODE_DATAGRAM); + + status_t err; + do { + sp buf = new ABuffer(kMaxUDPSize); + + struct sockaddr_in remoteAddr; + socklen_t remoteAddrLen = sizeof(remoteAddr); + + ssize_t n; + do { + n = recvfrom( + mSocket, buf->data(), buf->capacity(), 0, + (struct sockaddr *)&remoteAddr, &remoteAddrLen); + } while (n < 0 && errno == EINTR); + + err = OK; + if (n < 0) { + err = -errno; + } else if (n == 0) { + err = -ECONNRESET; + } else { + buf->setRange(0, n); + + int64_t nowUs = ALooper::GetNowUs(); + buf->meta()->setInt64("arrivalTimeUs", nowUs); + + sp notify = mNotify->dup(); + notify->setInt32("sessionID", mSessionID); + notify->setInt32("reason", kWhatDatagram); + + uint32_t ip = ntohl(remoteAddr.sin_addr.s_addr); + notify->setString( + "fromAddr", + StringPrintf( + "%u.%u.%u.%u", + ip >> 24, + (ip >> 16) & 0xff, + (ip >> 8) & 0xff, + ip & 0xff).c_str()); + + notify->setInt32("fromPort", ntohs(remoteAddr.sin_port)); + + notify->setBuffer("data", buf); + notify->post(); + } + } while (err == OK); + + if (err == -EAGAIN) { + err = OK; + } + + if (err != OK) { + if (!mUDPRetries) { + notifyError(false /* send */, err, "Recvfrom failed."); + mSawReceiveFailure = true; + } else { + mUDPRetries--; + ALOGE("Recvfrom failed, %d/%d retries left", + mUDPRetries, kMaxUDPRetries); + err = OK; + } + } else { + mUDPRetries = kMaxUDPRetries; + } + + return err; + } + + char tmp[512]; + ssize_t n; + do { + n = recv(mSocket, tmp, sizeof(tmp), 0); + } while (n < 0 && errno == EINTR); + + status_t err = OK; + + if (n > 0) { + mInBuffer.append(tmp, n); + +#if 0 + ALOGI("in:"); + hexdump(tmp, n); +#endif + } else if (n < 0) { + err = -errno; + } else { + err = -ECONNRESET; + } + + if (mMode == MODE_DATAGRAM) { + // TCP stream carrying 16-bit length-prefixed datagrams. + + while (mInBuffer.size() >= 2) { + size_t packetSize = U16_AT((const uint8_t *)mInBuffer.c_str()); + + if (mInBuffer.size() < packetSize + 2) { + break; + } + + sp packet = new ABuffer(packetSize); + memcpy(packet->data(), mInBuffer.c_str() + 2, packetSize); + + int64_t nowUs = ALooper::GetNowUs(); + packet->meta()->setInt64("arrivalTimeUs", nowUs); + + sp notify = mNotify->dup(); + notify->setInt32("sessionID", mSessionID); + notify->setInt32("reason", kWhatDatagram); + notify->setBuffer("data", packet); + notify->post(); + + mInBuffer.erase(0, packetSize + 2); + } + } else if (mMode == MODE_RTSP) { + for (;;) { + size_t length; + + if (mInBuffer.size() > 0 && mInBuffer.c_str()[0] == '$') { + if (mInBuffer.size() < 4) { + break; + } + + length = U16_AT((const uint8_t *)mInBuffer.c_str() + 2); + + if (mInBuffer.size() < 4 + length) { + break; + } + + sp notify = mNotify->dup(); + notify->setInt32("sessionID", mSessionID); + notify->setInt32("reason", kWhatBinaryData); + notify->setInt32("channel", mInBuffer.c_str()[1]); + + sp data = new ABuffer(length); + memcpy(data->data(), mInBuffer.c_str() + 4, length); + + int64_t nowUs = ALooper::GetNowUs(); + data->meta()->setInt64("arrivalTimeUs", nowUs); + + notify->setBuffer("data", data); + notify->post(); + + mInBuffer.erase(0, 4 + length); + continue; + } + + sp msg = + ParsedMessage::Parse( + mInBuffer.c_str(), mInBuffer.size(), err != OK, &length); + + if (msg == NULL) { + break; + } + + sp notify = mNotify->dup(); + notify->setInt32("sessionID", mSessionID); + notify->setInt32("reason", kWhatData); + notify->setObject("data", msg); + notify->post(); + +#if 1 + // XXX The (old) dongle sends the wrong content length header on a + // SET_PARAMETER request that signals a "wfd_idr_request". + // (17 instead of 19). + const char *content = msg->getContent(); + if (content + && !memcmp(content, "wfd_idr_request\r\n", 17) + && length >= 19 + && mInBuffer.c_str()[length] == '\r' + && mInBuffer.c_str()[length + 1] == '\n') { + length += 2; + } +#endif + + mInBuffer.erase(0, length); + + if (err != OK) { + break; + } + } + } else { + CHECK_EQ(mMode, MODE_WEBSOCKET); + + const uint8_t *data = (const uint8_t *)mInBuffer.c_str(); + // hexdump(data, mInBuffer.size()); + + while (mInBuffer.size() >= 2) { + size_t offset = 2; + + unsigned payloadLen = data[1] & 0x7f; + if (payloadLen == 126) { + if (offset + 2 > mInBuffer.size()) { + break; + } + + payloadLen = U16_AT(&data[offset]); + offset += 2; + } else if (payloadLen == 127) { + if (offset + 8 > mInBuffer.size()) { + break; + } + + payloadLen = U64_AT(&data[offset]); + offset += 8; + } + + uint32_t mask = 0; + if (data[1] & 0x80) { + // MASK==1 + if (offset + 4 > mInBuffer.size()) { + break; + } + + mask = U32_AT(&data[offset]); + offset += 4; + } + + if (offset + payloadLen > mInBuffer.size()) { + break; + } + + // We have the full message. + + sp packet = new ABuffer(payloadLen); + memcpy(packet->data(), &data[offset], payloadLen); + + if (mask != 0) { + for (size_t i = 0; i < payloadLen; ++i) { + packet->data()[i] = + data[offset + i] + ^ ((mask >> (8 * (3 - (i % 4)))) & 0xff); + } + } + + sp notify = mNotify->dup(); + notify->setInt32("sessionID", mSessionID); + notify->setInt32("reason", kWhatWebSocketMessage); + notify->setBuffer("data", packet); + notify->setInt32("headerByte", data[0]); + notify->post(); + + mInBuffer.erase(0, offset + payloadLen); + } + } + + if (err != OK) { + notifyError(false /* send */, err, "Recv failed."); + mSawReceiveFailure = true; + } + + return err; +} + +void ANetworkSession::Session::dumpFragmentStats(const Fragment &frag) { +#if 0 + int64_t nowUs = ALooper::GetNowUs(); + int64_t delayMs = (nowUs - frag.mTimeUs) / 1000ll; + + static const int64_t kMinDelayMs = 0; + static const int64_t kMaxDelayMs = 300; + + const char *kPattern = "########################################"; + size_t kPatternSize = strlen(kPattern); + + int n = (kPatternSize * (delayMs - kMinDelayMs)) + / (kMaxDelayMs - kMinDelayMs); + + if (n < 0) { + n = 0; + } else if ((size_t)n > kPatternSize) { + n = kPatternSize; + } + + ALOGI("[%lld]: (%4lld ms) %s\n", + frag.mTimeUs / 1000, + delayMs, + kPattern + kPatternSize - n); +#endif +} + +status_t ANetworkSession::Session::writeMore() { + if (mState == DATAGRAM) { + CHECK(!mOutFragments.empty()); + + status_t err; + do { + const Fragment &frag = *mOutFragments.begin(); + const sp &datagram = frag.mBuffer; + + int n; + do { + n = send(mSocket, datagram->data(), datagram->size(), 0); + } while (n < 0 && errno == EINTR); + + err = OK; + + if (n > 0) { + if (frag.mFlags & FRAGMENT_FLAG_TIME_VALID) { + dumpFragmentStats(frag); + } + + mOutFragments.erase(mOutFragments.begin()); + } else if (n < 0) { + err = -errno; + } else if (n == 0) { + err = -ECONNRESET; + } + } while (err == OK && !mOutFragments.empty()); + + if (err == -EAGAIN) { + if (!mOutFragments.empty()) { + ALOGI("%d datagrams remain queued.", mOutFragments.size()); + } + err = OK; + } + + if (err != OK) { + if (!mUDPRetries) { + notifyError(true /* send */, err, "Send datagram failed."); + mSawSendFailure = true; + } else { + mUDPRetries--; + ALOGE("Send datagram failed, %d/%d retries left", + mUDPRetries, kMaxUDPRetries); + err = OK; + } + } else { + mUDPRetries = kMaxUDPRetries; + } + + return err; + } + + if (mState == CONNECTING) { + int err; + socklen_t optionLen = sizeof(err); + CHECK_EQ(getsockopt(mSocket, SOL_SOCKET, SO_ERROR, &err, &optionLen), 0); + CHECK_EQ(optionLen, (socklen_t)sizeof(err)); + + if (err != 0) { + notifyError(kWhatError, -err, "Connection failed"); + mSawSendFailure = true; + + return -err; + } + + mState = CONNECTED; + notify(kWhatConnected); + + return OK; + } + + CHECK_EQ(mState, CONNECTED); + CHECK(!mOutFragments.empty()); + + ssize_t n; + while (!mOutFragments.empty()) { + const Fragment &frag = *mOutFragments.begin(); + + do { + n = send(mSocket, frag.mBuffer->data(), frag.mBuffer->size(), 0); + } while (n < 0 && errno == EINTR); + + if (n <= 0) { + break; + } + + frag.mBuffer->setRange( + frag.mBuffer->offset() + n, frag.mBuffer->size() - n); + + if (frag.mBuffer->size() > 0) { + break; + } + + if (frag.mFlags & FRAGMENT_FLAG_TIME_VALID) { + dumpFragmentStats(frag); + } + + mOutFragments.erase(mOutFragments.begin()); + } + + status_t err = OK; + + if (n < 0) { + err = -errno; + } else if (n == 0) { + err = -ECONNRESET; + } + + if (err != OK) { + notifyError(true /* send */, err, "Send failed."); + mSawSendFailure = true; + } + +#if 0 + int numBytesQueued; + int res = ioctl(mSocket, SIOCOUTQ, &numBytesQueued); + if (res == 0 && numBytesQueued > 50 * 1024) { + if (numBytesQueued > 409600) { + ALOGW("!!! numBytesQueued = %d", numBytesQueued); + } + + int64_t nowUs = ALooper::GetNowUs(); + + if (mLastStallReportUs < 0ll + || nowUs > mLastStallReportUs + 100000ll) { + sp msg = mNotify->dup(); + msg->setInt32("sessionID", mSessionID); + msg->setInt32("reason", kWhatNetworkStall); + msg->setSize("numBytesQueued", numBytesQueued); + msg->post(); + + mLastStallReportUs = nowUs; + } + } +#endif + + return err; +} + +status_t ANetworkSession::Session::sendRequest( + const void *data, ssize_t size, bool timeValid, int64_t timeUs) { + CHECK(mState == CONNECTED || mState == DATAGRAM); + + if (size < 0) { + size = strlen((const char *)data); + } + + if (size == 0) { + return OK; + } + + sp buffer; + + if (mState == CONNECTED && mMode == MODE_DATAGRAM) { + CHECK_LE(size, 65535); + + buffer = new ABuffer(size + 2); + buffer->data()[0] = size >> 8; + buffer->data()[1] = size & 0xff; + memcpy(buffer->data() + 2, data, size); + } else if (mState == CONNECTED && mMode == MODE_WEBSOCKET) { + static const bool kUseMask = false; // Chromium doesn't like it. + + size_t numHeaderBytes = 2 + (kUseMask ? 4 : 0); + if (size > 65535) { + numHeaderBytes += 8; + } else if (size > 125) { + numHeaderBytes += 2; + } + + buffer = new ABuffer(numHeaderBytes + size); + buffer->data()[0] = 0x81; // FIN==1 | opcode=1 (text) + buffer->data()[1] = kUseMask ? 0x80 : 0x00; + + if (size > 65535) { + buffer->data()[1] |= 127; + buffer->data()[2] = 0x00; + buffer->data()[3] = 0x00; + buffer->data()[4] = 0x00; + buffer->data()[5] = 0x00; + buffer->data()[6] = (size >> 24) & 0xff; + buffer->data()[7] = (size >> 16) & 0xff; + buffer->data()[8] = (size >> 8) & 0xff; + buffer->data()[9] = size & 0xff; + } else if (size > 125) { + buffer->data()[1] |= 126; + buffer->data()[2] = (size >> 8) & 0xff; + buffer->data()[3] = size & 0xff; + } else { + buffer->data()[1] |= size; + } + + if (kUseMask) { + uint32_t mask = rand(); + + buffer->data()[numHeaderBytes - 4] = (mask >> 24) & 0xff; + buffer->data()[numHeaderBytes - 3] = (mask >> 16) & 0xff; + buffer->data()[numHeaderBytes - 2] = (mask >> 8) & 0xff; + buffer->data()[numHeaderBytes - 1] = mask & 0xff; + + for (size_t i = 0; i < (size_t)size; ++i) { + buffer->data()[numHeaderBytes + i] = + ((const uint8_t *)data)[i] + ^ ((mask >> (8 * (3 - (i % 4)))) & 0xff); + } + } else { + memcpy(buffer->data() + numHeaderBytes, data, size); + } + } else { + buffer = new ABuffer(size); + memcpy(buffer->data(), data, size); + } + + Fragment frag; + + frag.mFlags = 0; + if (timeValid) { + frag.mFlags = FRAGMENT_FLAG_TIME_VALID; + frag.mTimeUs = timeUs; + } + + frag.mBuffer = buffer; + + mOutFragments.push_back(frag); + + return OK; +} + +void ANetworkSession::Session::notifyError( + bool send, status_t err, const char *detail) { + sp msg = mNotify->dup(); + msg->setInt32("sessionID", mSessionID); + msg->setInt32("reason", kWhatError); + msg->setInt32("send", send); + msg->setInt32("err", err); + msg->setString("detail", detail); + msg->post(); +} + +void ANetworkSession::Session::notify(NotificationReason reason) { + sp msg = mNotify->dup(); + msg->setInt32("sessionID", mSessionID); + msg->setInt32("reason", reason); + msg->post(); +} + +//////////////////////////////////////////////////////////////////////////////// + +ANetworkSession::ANetworkSession() + : mNextSessionID(1) { + mPipeFd[0] = mPipeFd[1] = -1; +} + +ANetworkSession::~ANetworkSession() { + stop(); +} + +status_t ANetworkSession::start() { + if (mThread != NULL) { + return INVALID_OPERATION; + } + + int res = pipe(mPipeFd); + if (res != 0) { + mPipeFd[0] = mPipeFd[1] = -1; + return -errno; + } + + mThread = new NetworkThread(this); + + status_t err = mThread->run("ANetworkSession", ANDROID_PRIORITY_AUDIO); + + if (err != OK) { + mThread.clear(); + + close(mPipeFd[0]); + close(mPipeFd[1]); + mPipeFd[0] = mPipeFd[1] = -1; + + return err; + } + + return OK; +} + +status_t ANetworkSession::stop() { + if (mThread == NULL) { + return INVALID_OPERATION; + } + + mThread->requestExit(); + interrupt(); + mThread->requestExitAndWait(); + + mThread.clear(); + + close(mPipeFd[0]); + close(mPipeFd[1]); + mPipeFd[0] = mPipeFd[1] = -1; + + return OK; +} + +status_t ANetworkSession::createRTSPClient( + const char *host, unsigned port, const sp ¬ify, + int32_t *sessionID) { + return createClientOrServer( + kModeCreateRTSPClient, + NULL /* addr */, + 0 /* port */, + host, + port, + notify, + sessionID); +} + +status_t ANetworkSession::createRTSPServer( + const struct in_addr &addr, unsigned port, + const sp ¬ify, int32_t *sessionID) { + return createClientOrServer( + kModeCreateRTSPServer, + &addr, + port, + NULL /* remoteHost */, + 0 /* remotePort */, + notify, + sessionID); +} + +status_t ANetworkSession::createUDPSession( + unsigned localPort, const sp ¬ify, int32_t *sessionID) { + return createUDPSession(localPort, NULL, 0, notify, sessionID); +} + +status_t ANetworkSession::createUDPSession( + unsigned localPort, + const char *remoteHost, + unsigned remotePort, + const sp ¬ify, + int32_t *sessionID) { + return createClientOrServer( + kModeCreateUDPSession, + NULL /* addr */, + localPort, + remoteHost, + remotePort, + notify, + sessionID); +} + +status_t ANetworkSession::createTCPDatagramSession( + const struct in_addr &addr, unsigned port, + const sp ¬ify, int32_t *sessionID) { + return createClientOrServer( + kModeCreateTCPDatagramSessionPassive, + &addr, + port, + NULL /* remoteHost */, + 0 /* remotePort */, + notify, + sessionID); +} + +status_t ANetworkSession::createTCPDatagramSession( + unsigned localPort, + const char *remoteHost, + unsigned remotePort, + const sp ¬ify, + int32_t *sessionID) { + return createClientOrServer( + kModeCreateTCPDatagramSessionActive, + NULL /* addr */, + localPort, + remoteHost, + remotePort, + notify, + sessionID); +} + +status_t ANetworkSession::destroySession(int32_t sessionID) { + Mutex::Autolock autoLock(mLock); + + ssize_t index = mSessions.indexOfKey(sessionID); + + if (index < 0) { + return -ENOENT; + } + + mSessions.removeItemsAt(index); + + interrupt(); + + return OK; +} + +// static +status_t ANetworkSession::MakeSocketNonBlocking(int s) { + int flags = fcntl(s, F_GETFL, 0); + if (flags < 0) { + flags = 0; + } + + int res = fcntl(s, F_SETFL, flags | O_NONBLOCK); + if (res < 0) { + return -errno; + } + + return OK; +} + +status_t ANetworkSession::createClientOrServer( + Mode mode, + const struct in_addr *localAddr, + unsigned port, + const char *remoteHost, + unsigned remotePort, + const sp ¬ify, + int32_t *sessionID) { + Mutex::Autolock autoLock(mLock); + + *sessionID = 0; + status_t err = OK; + int s, res; + sp session; + + s = socket( + AF_INET, + (mode == kModeCreateUDPSession) ? SOCK_DGRAM : SOCK_STREAM, + 0); + + if (s < 0) { + err = -errno; + goto bail; + } + + if (mode == kModeCreateRTSPServer + || mode == kModeCreateTCPDatagramSessionPassive) { + const int yes = 1; + res = setsockopt(s, SOL_SOCKET, SO_REUSEADDR, &yes, sizeof(yes)); + + if (res < 0) { + err = -errno; + goto bail2; + } + } + + if (mode == kModeCreateUDPSession) { + int size = 256 * 1024; + + res = setsockopt(s, SOL_SOCKET, SO_RCVBUF, &size, sizeof(size)); + + if (res < 0) { + err = -errno; + goto bail2; + } + + res = setsockopt(s, SOL_SOCKET, SO_SNDBUF, &size, sizeof(size)); + + if (res < 0) { + err = -errno; + goto bail2; + } + } else if (mode == kModeCreateTCPDatagramSessionActive) { + int flag = 1; + res = setsockopt(s, IPPROTO_TCP, TCP_NODELAY, &flag, sizeof(flag)); + + if (res < 0) { + err = -errno; + goto bail2; + } + + int tos = 224; // VOICE + res = setsockopt(s, IPPROTO_IP, IP_TOS, &tos, sizeof(tos)); + + if (res < 0) { + err = -errno; + goto bail2; + } + } + + err = MakeSocketNonBlocking(s); + + if (err != OK) { + goto bail2; + } + + struct sockaddr_in addr; + memset(addr.sin_zero, 0, sizeof(addr.sin_zero)); + addr.sin_family = AF_INET; + + if (mode == kModeCreateRTSPClient + || mode == kModeCreateTCPDatagramSessionActive) { + struct hostent *ent= gethostbyname(remoteHost); + if (ent == NULL) { + err = -h_errno; + goto bail2; + } + + addr.sin_addr.s_addr = *(in_addr_t *)ent->h_addr; + addr.sin_port = htons(remotePort); + } else if (localAddr != NULL) { + addr.sin_addr = *localAddr; + addr.sin_port = htons(port); + } else { + addr.sin_addr.s_addr = htonl(INADDR_ANY); + addr.sin_port = htons(port); + } + + if (mode == kModeCreateRTSPClient + || mode == kModeCreateTCPDatagramSessionActive) { + in_addr_t x = ntohl(addr.sin_addr.s_addr); + ALOGI("connecting socket %d to %d.%d.%d.%d:%d", + s, + (x >> 24), + (x >> 16) & 0xff, + (x >> 8) & 0xff, + x & 0xff, + ntohs(addr.sin_port)); + + res = connect(s, (const struct sockaddr *)&addr, sizeof(addr)); + + CHECK_LT(res, 0); + if (errno == EINPROGRESS) { + res = 0; + } + } else { + res = bind(s, (const struct sockaddr *)&addr, sizeof(addr)); + + if (res == 0) { + if (mode == kModeCreateRTSPServer + || mode == kModeCreateTCPDatagramSessionPassive) { + res = listen(s, 4); + } else { + CHECK_EQ(mode, kModeCreateUDPSession); + + if (remoteHost != NULL) { + struct sockaddr_in remoteAddr; + memset(remoteAddr.sin_zero, 0, sizeof(remoteAddr.sin_zero)); + remoteAddr.sin_family = AF_INET; + remoteAddr.sin_port = htons(remotePort); + + struct hostent *ent= gethostbyname(remoteHost); + if (ent == NULL) { + err = -h_errno; + goto bail2; + } + + remoteAddr.sin_addr.s_addr = *(in_addr_t *)ent->h_addr; + + res = connect( + s, + (const struct sockaddr *)&remoteAddr, + sizeof(remoteAddr)); + } + } + } + } + + if (res < 0) { + err = -errno; + goto bail2; + } + + Session::State state; + switch (mode) { + case kModeCreateRTSPClient: + state = Session::CONNECTING; + break; + + case kModeCreateTCPDatagramSessionActive: + state = Session::CONNECTING; + break; + + case kModeCreateTCPDatagramSessionPassive: + state = Session::LISTENING_TCP_DGRAMS; + break; + + case kModeCreateRTSPServer: + state = Session::LISTENING_RTSP; + break; + + default: + CHECK_EQ(mode, kModeCreateUDPSession); + state = Session::DATAGRAM; + break; + } + + session = new Session( + mNextSessionID++, + state, + s, + notify); + + if (mode == kModeCreateTCPDatagramSessionActive) { + session->setMode(Session::MODE_DATAGRAM); + } else if (mode == kModeCreateRTSPClient) { + session->setMode(Session::MODE_RTSP); + } + + mSessions.add(session->sessionID(), session); + + interrupt(); + + *sessionID = session->sessionID(); + + goto bail; + +bail2: + close(s); + s = -1; + +bail: + return err; +} + +status_t ANetworkSession::connectUDPSession( + int32_t sessionID, const char *remoteHost, unsigned remotePort) { + Mutex::Autolock autoLock(mLock); + + ssize_t index = mSessions.indexOfKey(sessionID); + + if (index < 0) { + return -ENOENT; + } + + const sp session = mSessions.valueAt(index); + int s = session->socket(); + + struct sockaddr_in remoteAddr; + memset(remoteAddr.sin_zero, 0, sizeof(remoteAddr.sin_zero)); + remoteAddr.sin_family = AF_INET; + remoteAddr.sin_port = htons(remotePort); + + status_t err = OK; + struct hostent *ent = gethostbyname(remoteHost); + if (ent == NULL) { + err = -h_errno; + } else { + remoteAddr.sin_addr.s_addr = *(in_addr_t *)ent->h_addr; + + int res = connect( + s, + (const struct sockaddr *)&remoteAddr, + sizeof(remoteAddr)); + + if (res < 0) { + err = -errno; + } + } + + return err; +} + +status_t ANetworkSession::sendRequest( + int32_t sessionID, const void *data, ssize_t size, + bool timeValid, int64_t timeUs) { + Mutex::Autolock autoLock(mLock); + + ssize_t index = mSessions.indexOfKey(sessionID); + + if (index < 0) { + return -ENOENT; + } + + const sp session = mSessions.valueAt(index); + + status_t err = session->sendRequest(data, size, timeValid, timeUs); + + interrupt(); + + return err; +} + +status_t ANetworkSession::switchToWebSocketMode(int32_t sessionID) { + Mutex::Autolock autoLock(mLock); + + ssize_t index = mSessions.indexOfKey(sessionID); + + if (index < 0) { + return -ENOENT; + } + + const sp session = mSessions.valueAt(index); + return session->switchToWebSocketMode(); +} + +void ANetworkSession::interrupt() { + static const char dummy = 0; + + ssize_t n; + do { + n = write(mPipeFd[1], &dummy, 1); + } while (n < 0 && errno == EINTR); + + if (n < 0) { + ALOGW("Error writing to pipe (%s)", strerror(errno)); + } +} + +void ANetworkSession::threadLoop() { + fd_set rs, ws; + FD_ZERO(&rs); + FD_ZERO(&ws); + + FD_SET(mPipeFd[0], &rs); + int maxFd = mPipeFd[0]; + + { + Mutex::Autolock autoLock(mLock); + + for (size_t i = 0; i < mSessions.size(); ++i) { + const sp &session = mSessions.valueAt(i); + + int s = session->socket(); + + if (s < 0) { + continue; + } + + if (session->wantsToRead()) { + FD_SET(s, &rs); + if (s > maxFd) { + maxFd = s; + } + } + + if (session->wantsToWrite()) { + FD_SET(s, &ws); + if (s > maxFd) { + maxFd = s; + } + } + } + } + + int res = select(maxFd + 1, &rs, &ws, NULL, NULL /* tv */); + + if (res == 0) { + return; + } + + if (res < 0) { + if (errno == EINTR) { + return; + } + + ALOGE("select failed w/ error %d (%s)", errno, strerror(errno)); + return; + } + + if (FD_ISSET(mPipeFd[0], &rs)) { + char c; + ssize_t n; + do { + n = read(mPipeFd[0], &c, 1); + } while (n < 0 && errno == EINTR); + + if (n < 0) { + ALOGW("Error reading from pipe (%s)", strerror(errno)); + } + + --res; + } + + { + Mutex::Autolock autoLock(mLock); + + List > sessionsToAdd; + + for (size_t i = mSessions.size(); res > 0 && i-- > 0;) { + const sp &session = mSessions.valueAt(i); + + int s = session->socket(); + + if (s < 0) { + continue; + } + + if (FD_ISSET(s, &rs) || FD_ISSET(s, &ws)) { + --res; + } + + if (FD_ISSET(s, &rs)) { + if (session->isRTSPServer() || session->isTCPDatagramServer()) { + struct sockaddr_in remoteAddr; + socklen_t remoteAddrLen = sizeof(remoteAddr); + + int clientSocket = accept( + s, (struct sockaddr *)&remoteAddr, &remoteAddrLen); + + if (clientSocket >= 0) { + status_t err = MakeSocketNonBlocking(clientSocket); + + if (err != OK) { + ALOGE("Unable to make client socket non blocking, " + "failed w/ error %d (%s)", + err, strerror(-err)); + + close(clientSocket); + clientSocket = -1; + } else { + in_addr_t addr = ntohl(remoteAddr.sin_addr.s_addr); + + ALOGI("incoming connection from %d.%d.%d.%d:%d " + "(socket %d)", + (addr >> 24), + (addr >> 16) & 0xff, + (addr >> 8) & 0xff, + addr & 0xff, + ntohs(remoteAddr.sin_port), + clientSocket); + + sp clientSession = + new Session( + mNextSessionID++, + Session::CONNECTED, + clientSocket, + session->getNotificationMessage()); + + clientSession->setMode( + session->isRTSPServer() + ? Session::MODE_RTSP + : Session::MODE_DATAGRAM); + + sessionsToAdd.push_back(clientSession); + } + } else { + ALOGE("accept returned error %d (%s)", + errno, strerror(errno)); + } + } else { + status_t err = session->readMore(); + if (err != OK) { + ALOGE("readMore on socket %d failed w/ error %d (%s)", + s, err, strerror(-err)); + } + } + } + + if (FD_ISSET(s, &ws)) { + status_t err = session->writeMore(); + if (err != OK) { + ALOGE("writeMore on socket %d failed w/ error %d (%s)", + s, err, strerror(-err)); + } + } + } + + while (!sessionsToAdd.empty()) { + sp session = *sessionsToAdd.begin(); + sessionsToAdd.erase(sessionsToAdd.begin()); + + mSessions.add(session->sessionID(), session); + + ALOGI("added clientSession %d", session->sessionID()); + } + } +} + +} // namespace android + diff --git a/media/libstagefright/foundation/Android.mk b/media/libstagefright/foundation/Android.mk index d65e213..ad2dab5 100644 --- a/media/libstagefright/foundation/Android.mk +++ b/media/libstagefright/foundation/Android.mk @@ -10,7 +10,9 @@ LOCAL_SRC_FILES:= \ ALooper.cpp \ ALooperRoster.cpp \ AMessage.cpp \ + ANetworkSession.cpp \ AString.cpp \ + ParsedMessage.cpp \ base64.cpp \ hexdump.cpp diff --git a/media/libstagefright/foundation/ParsedMessage.cpp b/media/libstagefright/foundation/ParsedMessage.cpp new file mode 100644 index 0000000..049c9ad --- /dev/null +++ b/media/libstagefright/foundation/ParsedMessage.cpp @@ -0,0 +1,302 @@ +/* + * Copyright 2012, The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "ParsedMessage.h" + +#include +#include +#include +#include + +namespace android { + +// static +sp ParsedMessage::Parse( + const char *data, size_t size, bool noMoreData, size_t *length) { + sp msg = new ParsedMessage; + ssize_t res = msg->parse(data, size, noMoreData); + + if (res < 0) { + *length = 0; + return NULL; + } + + *length = res; + return msg; +} + +ParsedMessage::ParsedMessage() { +} + +ParsedMessage::~ParsedMessage() { +} + +bool ParsedMessage::findString(const char *name, AString *value) const { + AString key = name; + key.tolower(); + + ssize_t index = mDict.indexOfKey(key); + + if (index < 0) { + value->clear(); + + return false; + } + + *value = mDict.valueAt(index); + return true; +} + +bool ParsedMessage::findInt32(const char *name, int32_t *value) const { + AString stringValue; + + if (!findString(name, &stringValue)) { + return false; + } + + char *end; + *value = strtol(stringValue.c_str(), &end, 10); + + if (end == stringValue.c_str() || *end != '\0') { + *value = 0; + return false; + } + + return true; +} + +const char *ParsedMessage::getContent() const { + return mContent.c_str(); +} + +ssize_t ParsedMessage::parse(const char *data, size_t size, bool noMoreData) { + if (size == 0) { + return -1; + } + + ssize_t lastDictIndex = -1; + + size_t offset = 0; + bool headersComplete = false; + while (offset < size) { + size_t lineEndOffset = offset; + while (lineEndOffset + 1 < size + && (data[lineEndOffset] != '\r' + || data[lineEndOffset + 1] != '\n')) { + ++lineEndOffset; + } + + if (lineEndOffset + 1 >= size) { + return -1; + } + + AString line(&data[offset], lineEndOffset - offset); + + if (offset == 0) { + // Special handling for the request/status line. + + mDict.add(AString("_"), line); + offset = lineEndOffset + 2; + + continue; + } + + if (lineEndOffset == offset) { + // An empty line separates headers from body. + headersComplete = true; + offset += 2; + break; + } + + if (line.c_str()[0] == ' ' || line.c_str()[0] == '\t') { + // Support for folded header values. + + if (lastDictIndex >= 0) { + // Otherwise it's malformed since the first header line + // cannot continue anything... + + AString &value = mDict.editValueAt(lastDictIndex); + value.append(line); + } + + offset = lineEndOffset + 2; + continue; + } + + ssize_t colonPos = line.find(":"); + if (colonPos >= 0) { + AString key(line, 0, colonPos); + key.trim(); + key.tolower(); + + line.erase(0, colonPos + 1); + + lastDictIndex = mDict.add(key, line); + } + + offset = lineEndOffset + 2; + } + + if (!headersComplete && (!noMoreData || offset == 0)) { + // We either saw the empty line separating headers from body + // or we saw at least the status line and know that no more data + // is going to follow. + return -1; + } + + for (size_t i = 0; i < mDict.size(); ++i) { + mDict.editValueAt(i).trim(); + } + + int32_t contentLength; + if (!findInt32("content-length", &contentLength) || contentLength < 0) { + contentLength = 0; + } + + size_t totalLength = offset + contentLength; + + if (size < totalLength) { + return -1; + } + + mContent.setTo(&data[offset], contentLength); + + return totalLength; +} + +bool ParsedMessage::getRequestField(size_t index, AString *field) const { + AString line; + CHECK(findString("_", &line)); + + size_t prevOffset = 0; + size_t offset = 0; + for (size_t i = 0; i <= index; ++i) { + if (offset >= line.size()) { + return false; + } + + ssize_t spacePos = line.find(" ", offset); + + if (spacePos < 0) { + spacePos = line.size(); + } + + prevOffset = offset; + offset = spacePos + 1; + } + + field->setTo(line, prevOffset, offset - prevOffset - 1); + + return true; +} + +bool ParsedMessage::getStatusCode(int32_t *statusCode) const { + AString statusCodeString; + if (!getRequestField(1, &statusCodeString)) { + *statusCode = 0; + return false; + } + + char *end; + *statusCode = strtol(statusCodeString.c_str(), &end, 10); + + if (*end != '\0' || end == statusCodeString.c_str() + || (*statusCode) < 100 || (*statusCode) > 999) { + *statusCode = 0; + return false; + } + + return true; +} + +AString ParsedMessage::debugString() const { + AString line; + CHECK(findString("_", &line)); + + line.append("\n"); + + for (size_t i = 0; i < mDict.size(); ++i) { + const AString &key = mDict.keyAt(i); + const AString &value = mDict.valueAt(i); + + if (key == AString("_")) { + continue; + } + + line.append(key); + line.append(": "); + line.append(value); + line.append("\n"); + } + + line.append("\n"); + line.append(mContent); + + return line; +} + +// static +bool ParsedMessage::GetAttribute( + const char *s, const char *key, AString *value) { + value->clear(); + + size_t keyLen = strlen(key); + + for (;;) { + while (isspace(*s)) { + ++s; + } + + const char *colonPos = strchr(s, ';'); + + size_t len = + (colonPos == NULL) ? strlen(s) : colonPos - s; + + if (len >= keyLen + 1 && s[keyLen] == '=' && !strncmp(s, key, keyLen)) { + value->setTo(&s[keyLen + 1], len - keyLen - 1); + return true; + } + + if (colonPos == NULL) { + return false; + } + + s = colonPos + 1; + } +} + +// static +bool ParsedMessage::GetInt32Attribute( + const char *s, const char *key, int32_t *value) { + AString stringValue; + if (!GetAttribute(s, key, &stringValue)) { + *value = 0; + return false; + } + + char *end; + *value = strtol(stringValue.c_str(), &end, 10); + + if (end == stringValue.c_str() || *end != '\0') { + *value = 0; + return false; + } + + return true; +} + +} // namespace android + diff --git a/media/libstagefright/wifi-display/ANetworkSession.cpp b/media/libstagefright/wifi-display/ANetworkSession.cpp deleted file mode 100644 index 938d601..0000000 --- a/media/libstagefright/wifi-display/ANetworkSession.cpp +++ /dev/null @@ -1,1255 +0,0 @@ -/* - * Copyright 2012, The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -//#define LOG_NDEBUG 0 -#define LOG_TAG "NetworkSession" -#include - -#include "ANetworkSession.h" -#include "ParsedMessage.h" - -#include -#include -#include -#include -#include -#include -#include -#include - -#include -#include -#include -#include -#include - -namespace android { - -static const size_t kMaxUDPSize = 1500; -static const int32_t kMaxUDPRetries = 200; - -struct ANetworkSession::NetworkThread : public Thread { - NetworkThread(ANetworkSession *session); - -protected: - virtual ~NetworkThread(); - -private: - ANetworkSession *mSession; - - virtual bool threadLoop(); - - DISALLOW_EVIL_CONSTRUCTORS(NetworkThread); -}; - -struct ANetworkSession::Session : public RefBase { - enum State { - CONNECTING, - CONNECTED, - LISTENING_RTSP, - LISTENING_TCP_DGRAMS, - DATAGRAM, - }; - - Session(int32_t sessionID, - State state, - int s, - const sp ¬ify); - - int32_t sessionID() const; - int socket() const; - sp getNotificationMessage() const; - - bool isRTSPServer() const; - bool isTCPDatagramServer() const; - - bool wantsToRead(); - bool wantsToWrite(); - - status_t readMore(); - status_t writeMore(); - - status_t sendRequest( - const void *data, ssize_t size, bool timeValid, int64_t timeUs); - - void setIsRTSPConnection(bool yesno); - -protected: - virtual ~Session(); - -private: - enum { - FRAGMENT_FLAG_TIME_VALID = 1, - }; - struct Fragment { - uint32_t mFlags; - int64_t mTimeUs; - sp mBuffer; - }; - - int32_t mSessionID; - State mState; - bool mIsRTSPConnection; - int mSocket; - sp mNotify; - bool mSawReceiveFailure, mSawSendFailure; - int32_t mUDPRetries; - - List mOutFragments; - - AString mInBuffer; - - int64_t mLastStallReportUs; - - void notifyError(bool send, status_t err, const char *detail); - void notify(NotificationReason reason); - - void dumpFragmentStats(const Fragment &frag); - - DISALLOW_EVIL_CONSTRUCTORS(Session); -}; -//////////////////////////////////////////////////////////////////////////////// - -ANetworkSession::NetworkThread::NetworkThread(ANetworkSession *session) - : mSession(session) { -} - -ANetworkSession::NetworkThread::~NetworkThread() { -} - -bool ANetworkSession::NetworkThread::threadLoop() { - mSession->threadLoop(); - - return true; -} - -//////////////////////////////////////////////////////////////////////////////// - -ANetworkSession::Session::Session( - int32_t sessionID, - State state, - int s, - const sp ¬ify) - : mSessionID(sessionID), - mState(state), - mIsRTSPConnection(false), - mSocket(s), - mNotify(notify), - mSawReceiveFailure(false), - mSawSendFailure(false), - mUDPRetries(kMaxUDPRetries), - mLastStallReportUs(-1ll) { - if (mState == CONNECTED) { - struct sockaddr_in localAddr; - socklen_t localAddrLen = sizeof(localAddr); - - int res = getsockname( - mSocket, (struct sockaddr *)&localAddr, &localAddrLen); - CHECK_GE(res, 0); - - struct sockaddr_in remoteAddr; - socklen_t remoteAddrLen = sizeof(remoteAddr); - - res = getpeername( - mSocket, (struct sockaddr *)&remoteAddr, &remoteAddrLen); - CHECK_GE(res, 0); - - in_addr_t addr = ntohl(localAddr.sin_addr.s_addr); - AString localAddrString = StringPrintf( - "%d.%d.%d.%d", - (addr >> 24), - (addr >> 16) & 0xff, - (addr >> 8) & 0xff, - addr & 0xff); - - addr = ntohl(remoteAddr.sin_addr.s_addr); - AString remoteAddrString = StringPrintf( - "%d.%d.%d.%d", - (addr >> 24), - (addr >> 16) & 0xff, - (addr >> 8) & 0xff, - addr & 0xff); - - sp msg = mNotify->dup(); - msg->setInt32("sessionID", mSessionID); - msg->setInt32("reason", kWhatClientConnected); - msg->setString("server-ip", localAddrString.c_str()); - msg->setInt32("server-port", ntohs(localAddr.sin_port)); - msg->setString("client-ip", remoteAddrString.c_str()); - msg->setInt32("client-port", ntohs(remoteAddr.sin_port)); - msg->post(); - } -} - -ANetworkSession::Session::~Session() { - ALOGV("Session %d gone", mSessionID); - - close(mSocket); - mSocket = -1; -} - -int32_t ANetworkSession::Session::sessionID() const { - return mSessionID; -} - -int ANetworkSession::Session::socket() const { - return mSocket; -} - -void ANetworkSession::Session::setIsRTSPConnection(bool yesno) { - mIsRTSPConnection = yesno; -} - -sp ANetworkSession::Session::getNotificationMessage() const { - return mNotify; -} - -bool ANetworkSession::Session::isRTSPServer() const { - return mState == LISTENING_RTSP; -} - -bool ANetworkSession::Session::isTCPDatagramServer() const { - return mState == LISTENING_TCP_DGRAMS; -} - -bool ANetworkSession::Session::wantsToRead() { - return !mSawReceiveFailure && mState != CONNECTING; -} - -bool ANetworkSession::Session::wantsToWrite() { - return !mSawSendFailure - && (mState == CONNECTING - || (mState == CONNECTED && !mOutFragments.empty()) - || (mState == DATAGRAM && !mOutFragments.empty())); -} - -status_t ANetworkSession::Session::readMore() { - if (mState == DATAGRAM) { - status_t err; - do { - sp buf = new ABuffer(kMaxUDPSize); - - struct sockaddr_in remoteAddr; - socklen_t remoteAddrLen = sizeof(remoteAddr); - - ssize_t n; - do { - n = recvfrom( - mSocket, buf->data(), buf->capacity(), 0, - (struct sockaddr *)&remoteAddr, &remoteAddrLen); - } while (n < 0 && errno == EINTR); - - err = OK; - if (n < 0) { - err = -errno; - } else if (n == 0) { - err = -ECONNRESET; - } else { - buf->setRange(0, n); - - int64_t nowUs = ALooper::GetNowUs(); - buf->meta()->setInt64("arrivalTimeUs", nowUs); - - sp notify = mNotify->dup(); - notify->setInt32("sessionID", mSessionID); - notify->setInt32("reason", kWhatDatagram); - - uint32_t ip = ntohl(remoteAddr.sin_addr.s_addr); - notify->setString( - "fromAddr", - StringPrintf( - "%u.%u.%u.%u", - ip >> 24, - (ip >> 16) & 0xff, - (ip >> 8) & 0xff, - ip & 0xff).c_str()); - - notify->setInt32("fromPort", ntohs(remoteAddr.sin_port)); - - notify->setBuffer("data", buf); - notify->post(); - } - } while (err == OK); - - if (err == -EAGAIN) { - err = OK; - } - - if (err != OK) { - if (!mUDPRetries) { - notifyError(false /* send */, err, "Recvfrom failed."); - mSawReceiveFailure = true; - } else { - mUDPRetries--; - ALOGE("Recvfrom failed, %d/%d retries left", - mUDPRetries, kMaxUDPRetries); - err = OK; - } - } else { - mUDPRetries = kMaxUDPRetries; - } - - return err; - } - - char tmp[512]; - ssize_t n; - do { - n = recv(mSocket, tmp, sizeof(tmp), 0); - } while (n < 0 && errno == EINTR); - - status_t err = OK; - - if (n > 0) { - mInBuffer.append(tmp, n); - -#if 0 - ALOGI("in:"); - hexdump(tmp, n); -#endif - } else if (n < 0) { - err = -errno; - } else { - err = -ECONNRESET; - } - - if (!mIsRTSPConnection) { - // TCP stream carrying 16-bit length-prefixed datagrams. - - while (mInBuffer.size() >= 2) { - size_t packetSize = U16_AT((const uint8_t *)mInBuffer.c_str()); - - if (mInBuffer.size() < packetSize + 2) { - break; - } - - sp packet = new ABuffer(packetSize); - memcpy(packet->data(), mInBuffer.c_str() + 2, packetSize); - - int64_t nowUs = ALooper::GetNowUs(); - packet->meta()->setInt64("arrivalTimeUs", nowUs); - - sp notify = mNotify->dup(); - notify->setInt32("sessionID", mSessionID); - notify->setInt32("reason", kWhatDatagram); - notify->setBuffer("data", packet); - notify->post(); - - mInBuffer.erase(0, packetSize + 2); - } - } else { - for (;;) { - size_t length; - - if (mInBuffer.size() > 0 && mInBuffer.c_str()[0] == '$') { - if (mInBuffer.size() < 4) { - break; - } - - length = U16_AT((const uint8_t *)mInBuffer.c_str() + 2); - - if (mInBuffer.size() < 4 + length) { - break; - } - - sp notify = mNotify->dup(); - notify->setInt32("sessionID", mSessionID); - notify->setInt32("reason", kWhatBinaryData); - notify->setInt32("channel", mInBuffer.c_str()[1]); - - sp data = new ABuffer(length); - memcpy(data->data(), mInBuffer.c_str() + 4, length); - - int64_t nowUs = ALooper::GetNowUs(); - data->meta()->setInt64("arrivalTimeUs", nowUs); - - notify->setBuffer("data", data); - notify->post(); - - mInBuffer.erase(0, 4 + length); - continue; - } - - sp msg = - ParsedMessage::Parse( - mInBuffer.c_str(), mInBuffer.size(), err != OK, &length); - - if (msg == NULL) { - break; - } - - sp notify = mNotify->dup(); - notify->setInt32("sessionID", mSessionID); - notify->setInt32("reason", kWhatData); - notify->setObject("data", msg); - notify->post(); - -#if 1 - // XXX The (old) dongle sends the wrong content length header on a - // SET_PARAMETER request that signals a "wfd_idr_request". - // (17 instead of 19). - const char *content = msg->getContent(); - if (content - && !memcmp(content, "wfd_idr_request\r\n", 17) - && length >= 19 - && mInBuffer.c_str()[length] == '\r' - && mInBuffer.c_str()[length + 1] == '\n') { - length += 2; - } -#endif - - mInBuffer.erase(0, length); - - if (err != OK) { - break; - } - } - } - - if (err != OK) { - notifyError(false /* send */, err, "Recv failed."); - mSawReceiveFailure = true; - } - - return err; -} - -void ANetworkSession::Session::dumpFragmentStats(const Fragment &frag) { -#if 0 - int64_t nowUs = ALooper::GetNowUs(); - int64_t delayMs = (nowUs - frag.mTimeUs) / 1000ll; - - static const int64_t kMinDelayMs = 0; - static const int64_t kMaxDelayMs = 300; - - const char *kPattern = "########################################"; - size_t kPatternSize = strlen(kPattern); - - int n = (kPatternSize * (delayMs - kMinDelayMs)) - / (kMaxDelayMs - kMinDelayMs); - - if (n < 0) { - n = 0; - } else if ((size_t)n > kPatternSize) { - n = kPatternSize; - } - - ALOGI("[%lld]: (%4lld ms) %s\n", - frag.mTimeUs / 1000, - delayMs, - kPattern + kPatternSize - n); -#endif -} - -status_t ANetworkSession::Session::writeMore() { - if (mState == DATAGRAM) { - CHECK(!mOutFragments.empty()); - - status_t err; - do { - const Fragment &frag = *mOutFragments.begin(); - const sp &datagram = frag.mBuffer; - - int n; - do { - n = send(mSocket, datagram->data(), datagram->size(), 0); - } while (n < 0 && errno == EINTR); - - err = OK; - - if (n > 0) { - if (frag.mFlags & FRAGMENT_FLAG_TIME_VALID) { - dumpFragmentStats(frag); - } - - mOutFragments.erase(mOutFragments.begin()); - } else if (n < 0) { - err = -errno; - } else if (n == 0) { - err = -ECONNRESET; - } - } while (err == OK && !mOutFragments.empty()); - - if (err == -EAGAIN) { - if (!mOutFragments.empty()) { - ALOGI("%d datagrams remain queued.", mOutFragments.size()); - } - err = OK; - } - - if (err != OK) { - if (!mUDPRetries) { - notifyError(true /* send */, err, "Send datagram failed."); - mSawSendFailure = true; - } else { - mUDPRetries--; - ALOGE("Send datagram failed, %d/%d retries left", - mUDPRetries, kMaxUDPRetries); - err = OK; - } - } else { - mUDPRetries = kMaxUDPRetries; - } - - return err; - } - - if (mState == CONNECTING) { - int err; - socklen_t optionLen = sizeof(err); - CHECK_EQ(getsockopt(mSocket, SOL_SOCKET, SO_ERROR, &err, &optionLen), 0); - CHECK_EQ(optionLen, (socklen_t)sizeof(err)); - - if (err != 0) { - notifyError(kWhatError, -err, "Connection failed"); - mSawSendFailure = true; - - return -err; - } - - mState = CONNECTED; - notify(kWhatConnected); - - return OK; - } - - CHECK_EQ(mState, CONNECTED); - CHECK(!mOutFragments.empty()); - - ssize_t n; - while (!mOutFragments.empty()) { - const Fragment &frag = *mOutFragments.begin(); - - do { - n = send(mSocket, frag.mBuffer->data(), frag.mBuffer->size(), 0); - } while (n < 0 && errno == EINTR); - - if (n <= 0) { - break; - } - - frag.mBuffer->setRange( - frag.mBuffer->offset() + n, frag.mBuffer->size() - n); - - if (frag.mBuffer->size() > 0) { - break; - } - - if (frag.mFlags & FRAGMENT_FLAG_TIME_VALID) { - dumpFragmentStats(frag); - } - - mOutFragments.erase(mOutFragments.begin()); - } - - status_t err = OK; - - if (n < 0) { - err = -errno; - } else if (n == 0) { - err = -ECONNRESET; - } - - if (err != OK) { - notifyError(true /* send */, err, "Send failed."); - mSawSendFailure = true; - } - -#if 0 - int numBytesQueued; - int res = ioctl(mSocket, SIOCOUTQ, &numBytesQueued); - if (res == 0 && numBytesQueued > 50 * 1024) { - if (numBytesQueued > 409600) { - ALOGW("!!! numBytesQueued = %d", numBytesQueued); - } - - int64_t nowUs = ALooper::GetNowUs(); - - if (mLastStallReportUs < 0ll - || nowUs > mLastStallReportUs + 100000ll) { - sp msg = mNotify->dup(); - msg->setInt32("sessionID", mSessionID); - msg->setInt32("reason", kWhatNetworkStall); - msg->setSize("numBytesQueued", numBytesQueued); - msg->post(); - - mLastStallReportUs = nowUs; - } - } -#endif - - return err; -} - -status_t ANetworkSession::Session::sendRequest( - const void *data, ssize_t size, bool timeValid, int64_t timeUs) { - CHECK(mState == CONNECTED || mState == DATAGRAM); - - if (size < 0) { - size = strlen((const char *)data); - } - - if (size == 0) { - return OK; - } - - sp buffer; - - if (mState == CONNECTED && !mIsRTSPConnection) { - CHECK_LE(size, 65535); - - buffer = new ABuffer(size + 2); - buffer->data()[0] = size >> 8; - buffer->data()[1] = size & 0xff; - memcpy(buffer->data() + 2, data, size); - } else { - buffer = new ABuffer(size); - memcpy(buffer->data(), data, size); - } - - Fragment frag; - - frag.mFlags = 0; - if (timeValid) { - frag.mFlags = FRAGMENT_FLAG_TIME_VALID; - frag.mTimeUs = timeUs; - } - - frag.mBuffer = buffer; - - mOutFragments.push_back(frag); - - return OK; -} - -void ANetworkSession::Session::notifyError( - bool send, status_t err, const char *detail) { - sp msg = mNotify->dup(); - msg->setInt32("sessionID", mSessionID); - msg->setInt32("reason", kWhatError); - msg->setInt32("send", send); - msg->setInt32("err", err); - msg->setString("detail", detail); - msg->post(); -} - -void ANetworkSession::Session::notify(NotificationReason reason) { - sp msg = mNotify->dup(); - msg->setInt32("sessionID", mSessionID); - msg->setInt32("reason", reason); - msg->post(); -} - -//////////////////////////////////////////////////////////////////////////////// - -ANetworkSession::ANetworkSession() - : mNextSessionID(1) { - mPipeFd[0] = mPipeFd[1] = -1; -} - -ANetworkSession::~ANetworkSession() { - stop(); -} - -status_t ANetworkSession::start() { - if (mThread != NULL) { - return INVALID_OPERATION; - } - - int res = pipe(mPipeFd); - if (res != 0) { - mPipeFd[0] = mPipeFd[1] = -1; - return -errno; - } - - mThread = new NetworkThread(this); - - status_t err = mThread->run("ANetworkSession", ANDROID_PRIORITY_AUDIO); - - if (err != OK) { - mThread.clear(); - - close(mPipeFd[0]); - close(mPipeFd[1]); - mPipeFd[0] = mPipeFd[1] = -1; - - return err; - } - - return OK; -} - -status_t ANetworkSession::stop() { - if (mThread == NULL) { - return INVALID_OPERATION; - } - - mThread->requestExit(); - interrupt(); - mThread->requestExitAndWait(); - - mThread.clear(); - - close(mPipeFd[0]); - close(mPipeFd[1]); - mPipeFd[0] = mPipeFd[1] = -1; - - return OK; -} - -status_t ANetworkSession::createRTSPClient( - const char *host, unsigned port, const sp ¬ify, - int32_t *sessionID) { - return createClientOrServer( - kModeCreateRTSPClient, - NULL /* addr */, - 0 /* port */, - host, - port, - notify, - sessionID); -} - -status_t ANetworkSession::createRTSPServer( - const struct in_addr &addr, unsigned port, - const sp ¬ify, int32_t *sessionID) { - return createClientOrServer( - kModeCreateRTSPServer, - &addr, - port, - NULL /* remoteHost */, - 0 /* remotePort */, - notify, - sessionID); -} - -status_t ANetworkSession::createUDPSession( - unsigned localPort, const sp ¬ify, int32_t *sessionID) { - return createUDPSession(localPort, NULL, 0, notify, sessionID); -} - -status_t ANetworkSession::createUDPSession( - unsigned localPort, - const char *remoteHost, - unsigned remotePort, - const sp ¬ify, - int32_t *sessionID) { - return createClientOrServer( - kModeCreateUDPSession, - NULL /* addr */, - localPort, - remoteHost, - remotePort, - notify, - sessionID); -} - -status_t ANetworkSession::createTCPDatagramSession( - const struct in_addr &addr, unsigned port, - const sp ¬ify, int32_t *sessionID) { - return createClientOrServer( - kModeCreateTCPDatagramSessionPassive, - &addr, - port, - NULL /* remoteHost */, - 0 /* remotePort */, - notify, - sessionID); -} - -status_t ANetworkSession::createTCPDatagramSession( - unsigned localPort, - const char *remoteHost, - unsigned remotePort, - const sp ¬ify, - int32_t *sessionID) { - return createClientOrServer( - kModeCreateTCPDatagramSessionActive, - NULL /* addr */, - localPort, - remoteHost, - remotePort, - notify, - sessionID); -} - -status_t ANetworkSession::destroySession(int32_t sessionID) { - Mutex::Autolock autoLock(mLock); - - ssize_t index = mSessions.indexOfKey(sessionID); - - if (index < 0) { - return -ENOENT; - } - - mSessions.removeItemsAt(index); - - interrupt(); - - return OK; -} - -// static -status_t ANetworkSession::MakeSocketNonBlocking(int s) { - int flags = fcntl(s, F_GETFL, 0); - if (flags < 0) { - flags = 0; - } - - int res = fcntl(s, F_SETFL, flags | O_NONBLOCK); - if (res < 0) { - return -errno; - } - - return OK; -} - -status_t ANetworkSession::createClientOrServer( - Mode mode, - const struct in_addr *localAddr, - unsigned port, - const char *remoteHost, - unsigned remotePort, - const sp ¬ify, - int32_t *sessionID) { - Mutex::Autolock autoLock(mLock); - - *sessionID = 0; - status_t err = OK; - int s, res; - sp session; - - s = socket( - AF_INET, - (mode == kModeCreateUDPSession) ? SOCK_DGRAM : SOCK_STREAM, - 0); - - if (s < 0) { - err = -errno; - goto bail; - } - - if (mode == kModeCreateRTSPServer - || mode == kModeCreateTCPDatagramSessionPassive) { - const int yes = 1; - res = setsockopt(s, SOL_SOCKET, SO_REUSEADDR, &yes, sizeof(yes)); - - if (res < 0) { - err = -errno; - goto bail2; - } - } - - if (mode == kModeCreateUDPSession) { - int size = 256 * 1024; - - res = setsockopt(s, SOL_SOCKET, SO_RCVBUF, &size, sizeof(size)); - - if (res < 0) { - err = -errno; - goto bail2; - } - - res = setsockopt(s, SOL_SOCKET, SO_SNDBUF, &size, sizeof(size)); - - if (res < 0) { - err = -errno; - goto bail2; - } - } else if (mode == kModeCreateTCPDatagramSessionActive) { - int flag = 1; - res = setsockopt(s, IPPROTO_TCP, TCP_NODELAY, &flag, sizeof(flag)); - - if (res < 0) { - err = -errno; - goto bail2; - } - - int tos = 224; // VOICE - res = setsockopt(s, IPPROTO_IP, IP_TOS, &tos, sizeof(tos)); - - if (res < 0) { - err = -errno; - goto bail2; - } - } - - err = MakeSocketNonBlocking(s); - - if (err != OK) { - goto bail2; - } - - struct sockaddr_in addr; - memset(addr.sin_zero, 0, sizeof(addr.sin_zero)); - addr.sin_family = AF_INET; - - if (mode == kModeCreateRTSPClient - || mode == kModeCreateTCPDatagramSessionActive) { - struct hostent *ent= gethostbyname(remoteHost); - if (ent == NULL) { - err = -h_errno; - goto bail2; - } - - addr.sin_addr.s_addr = *(in_addr_t *)ent->h_addr; - addr.sin_port = htons(remotePort); - } else if (localAddr != NULL) { - addr.sin_addr = *localAddr; - addr.sin_port = htons(port); - } else { - addr.sin_addr.s_addr = htonl(INADDR_ANY); - addr.sin_port = htons(port); - } - - if (mode == kModeCreateRTSPClient - || mode == kModeCreateTCPDatagramSessionActive) { - in_addr_t x = ntohl(addr.sin_addr.s_addr); - ALOGI("connecting socket %d to %d.%d.%d.%d:%d", - s, - (x >> 24), - (x >> 16) & 0xff, - (x >> 8) & 0xff, - x & 0xff, - ntohs(addr.sin_port)); - - res = connect(s, (const struct sockaddr *)&addr, sizeof(addr)); - - CHECK_LT(res, 0); - if (errno == EINPROGRESS) { - res = 0; - } - } else { - res = bind(s, (const struct sockaddr *)&addr, sizeof(addr)); - - if (res == 0) { - if (mode == kModeCreateRTSPServer - || mode == kModeCreateTCPDatagramSessionPassive) { - res = listen(s, 4); - } else { - CHECK_EQ(mode, kModeCreateUDPSession); - - if (remoteHost != NULL) { - struct sockaddr_in remoteAddr; - memset(remoteAddr.sin_zero, 0, sizeof(remoteAddr.sin_zero)); - remoteAddr.sin_family = AF_INET; - remoteAddr.sin_port = htons(remotePort); - - struct hostent *ent= gethostbyname(remoteHost); - if (ent == NULL) { - err = -h_errno; - goto bail2; - } - - remoteAddr.sin_addr.s_addr = *(in_addr_t *)ent->h_addr; - - res = connect( - s, - (const struct sockaddr *)&remoteAddr, - sizeof(remoteAddr)); - } - } - } - } - - if (res < 0) { - err = -errno; - goto bail2; - } - - Session::State state; - switch (mode) { - case kModeCreateRTSPClient: - state = Session::CONNECTING; - break; - - case kModeCreateTCPDatagramSessionActive: - state = Session::CONNECTING; - break; - - case kModeCreateTCPDatagramSessionPassive: - state = Session::LISTENING_TCP_DGRAMS; - break; - - case kModeCreateRTSPServer: - state = Session::LISTENING_RTSP; - break; - - default: - CHECK_EQ(mode, kModeCreateUDPSession); - state = Session::DATAGRAM; - break; - } - - session = new Session( - mNextSessionID++, - state, - s, - notify); - - if (mode == kModeCreateTCPDatagramSessionActive) { - session->setIsRTSPConnection(false); - } else if (mode == kModeCreateRTSPClient) { - session->setIsRTSPConnection(true); - } - - mSessions.add(session->sessionID(), session); - - interrupt(); - - *sessionID = session->sessionID(); - - goto bail; - -bail2: - close(s); - s = -1; - -bail: - return err; -} - -status_t ANetworkSession::connectUDPSession( - int32_t sessionID, const char *remoteHost, unsigned remotePort) { - Mutex::Autolock autoLock(mLock); - - ssize_t index = mSessions.indexOfKey(sessionID); - - if (index < 0) { - return -ENOENT; - } - - const sp session = mSessions.valueAt(index); - int s = session->socket(); - - struct sockaddr_in remoteAddr; - memset(remoteAddr.sin_zero, 0, sizeof(remoteAddr.sin_zero)); - remoteAddr.sin_family = AF_INET; - remoteAddr.sin_port = htons(remotePort); - - status_t err = OK; - struct hostent *ent = gethostbyname(remoteHost); - if (ent == NULL) { - err = -h_errno; - } else { - remoteAddr.sin_addr.s_addr = *(in_addr_t *)ent->h_addr; - - int res = connect( - s, - (const struct sockaddr *)&remoteAddr, - sizeof(remoteAddr)); - - if (res < 0) { - err = -errno; - } - } - - return err; -} - -status_t ANetworkSession::sendRequest( - int32_t sessionID, const void *data, ssize_t size, - bool timeValid, int64_t timeUs) { - Mutex::Autolock autoLock(mLock); - - ssize_t index = mSessions.indexOfKey(sessionID); - - if (index < 0) { - return -ENOENT; - } - - const sp session = mSessions.valueAt(index); - - status_t err = session->sendRequest(data, size, timeValid, timeUs); - - interrupt(); - - return err; -} - -void ANetworkSession::interrupt() { - static const char dummy = 0; - - ssize_t n; - do { - n = write(mPipeFd[1], &dummy, 1); - } while (n < 0 && errno == EINTR); - - if (n < 0) { - ALOGW("Error writing to pipe (%s)", strerror(errno)); - } -} - -void ANetworkSession::threadLoop() { - fd_set rs, ws; - FD_ZERO(&rs); - FD_ZERO(&ws); - - FD_SET(mPipeFd[0], &rs); - int maxFd = mPipeFd[0]; - - { - Mutex::Autolock autoLock(mLock); - - for (size_t i = 0; i < mSessions.size(); ++i) { - const sp &session = mSessions.valueAt(i); - - int s = session->socket(); - - if (s < 0) { - continue; - } - - if (session->wantsToRead()) { - FD_SET(s, &rs); - if (s > maxFd) { - maxFd = s; - } - } - - if (session->wantsToWrite()) { - FD_SET(s, &ws); - if (s > maxFd) { - maxFd = s; - } - } - } - } - - int res = select(maxFd + 1, &rs, &ws, NULL, NULL /* tv */); - - if (res == 0) { - return; - } - - if (res < 0) { - if (errno == EINTR) { - return; - } - - ALOGE("select failed w/ error %d (%s)", errno, strerror(errno)); - return; - } - - if (FD_ISSET(mPipeFd[0], &rs)) { - char c; - ssize_t n; - do { - n = read(mPipeFd[0], &c, 1); - } while (n < 0 && errno == EINTR); - - if (n < 0) { - ALOGW("Error reading from pipe (%s)", strerror(errno)); - } - - --res; - } - - { - Mutex::Autolock autoLock(mLock); - - List > sessionsToAdd; - - for (size_t i = mSessions.size(); res > 0 && i-- > 0;) { - const sp &session = mSessions.valueAt(i); - - int s = session->socket(); - - if (s < 0) { - continue; - } - - if (FD_ISSET(s, &rs) || FD_ISSET(s, &ws)) { - --res; - } - - if (FD_ISSET(s, &rs)) { - if (session->isRTSPServer() || session->isTCPDatagramServer()) { - struct sockaddr_in remoteAddr; - socklen_t remoteAddrLen = sizeof(remoteAddr); - - int clientSocket = accept( - s, (struct sockaddr *)&remoteAddr, &remoteAddrLen); - - if (clientSocket >= 0) { - status_t err = MakeSocketNonBlocking(clientSocket); - - if (err != OK) { - ALOGE("Unable to make client socket non blocking, " - "failed w/ error %d (%s)", - err, strerror(-err)); - - close(clientSocket); - clientSocket = -1; - } else { - in_addr_t addr = ntohl(remoteAddr.sin_addr.s_addr); - - ALOGI("incoming connection from %d.%d.%d.%d:%d " - "(socket %d)", - (addr >> 24), - (addr >> 16) & 0xff, - (addr >> 8) & 0xff, - addr & 0xff, - ntohs(remoteAddr.sin_port), - clientSocket); - - sp clientSession = - new Session( - mNextSessionID++, - Session::CONNECTED, - clientSocket, - session->getNotificationMessage()); - - clientSession->setIsRTSPConnection( - session->isRTSPServer()); - - sessionsToAdd.push_back(clientSession); - } - } else { - ALOGE("accept returned error %d (%s)", - errno, strerror(errno)); - } - } else { - status_t err = session->readMore(); - if (err != OK) { - ALOGE("readMore on socket %d failed w/ error %d (%s)", - s, err, strerror(-err)); - } - } - } - - if (FD_ISSET(s, &ws)) { - status_t err = session->writeMore(); - if (err != OK) { - ALOGE("writeMore on socket %d failed w/ error %d (%s)", - s, err, strerror(-err)); - } - } - } - - while (!sessionsToAdd.empty()) { - sp session = *sessionsToAdd.begin(); - sessionsToAdd.erase(sessionsToAdd.begin()); - - mSessions.add(session->sessionID(), session); - - ALOGI("added clientSession %d", session->sessionID()); - } - } -} - -} // namespace android - diff --git a/media/libstagefright/wifi-display/ANetworkSession.h b/media/libstagefright/wifi-display/ANetworkSession.h deleted file mode 100644 index 7c62b29..0000000 --- a/media/libstagefright/wifi-display/ANetworkSession.h +++ /dev/null @@ -1,132 +0,0 @@ -/* - * Copyright 2012, The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef A_NETWORK_SESSION_H_ - -#define A_NETWORK_SESSION_H_ - -#include -#include -#include -#include - -#include - -namespace android { - -struct AMessage; - -// Helper class to manage a number of live sockets (datagram and stream-based) -// on a single thread. Clients are notified about activity through AMessages. -struct ANetworkSession : public RefBase { - ANetworkSession(); - - status_t start(); - status_t stop(); - - status_t createRTSPClient( - const char *host, unsigned port, const sp ¬ify, - int32_t *sessionID); - - status_t createRTSPServer( - const struct in_addr &addr, unsigned port, - const sp ¬ify, int32_t *sessionID); - - status_t createUDPSession( - unsigned localPort, const sp ¬ify, int32_t *sessionID); - - status_t createUDPSession( - unsigned localPort, - const char *remoteHost, - unsigned remotePort, - const sp ¬ify, - int32_t *sessionID); - - status_t connectUDPSession( - int32_t sessionID, const char *remoteHost, unsigned remotePort); - - // passive - status_t createTCPDatagramSession( - const struct in_addr &addr, unsigned port, - const sp ¬ify, int32_t *sessionID); - - // active - status_t createTCPDatagramSession( - unsigned localPort, - const char *remoteHost, - unsigned remotePort, - const sp ¬ify, - int32_t *sessionID); - - status_t destroySession(int32_t sessionID); - - status_t sendRequest( - int32_t sessionID, const void *data, ssize_t size = -1, - bool timeValid = false, int64_t timeUs = -1ll); - - enum NotificationReason { - kWhatError, - kWhatConnected, - kWhatClientConnected, - kWhatData, - kWhatDatagram, - kWhatBinaryData, - kWhatNetworkStall, - }; - -protected: - virtual ~ANetworkSession(); - -private: - struct NetworkThread; - struct Session; - - Mutex mLock; - sp mThread; - - int32_t mNextSessionID; - - int mPipeFd[2]; - - KeyedVector > mSessions; - - enum Mode { - kModeCreateUDPSession, - kModeCreateTCPDatagramSessionPassive, - kModeCreateTCPDatagramSessionActive, - kModeCreateRTSPServer, - kModeCreateRTSPClient, - }; - status_t createClientOrServer( - Mode mode, - const struct in_addr *addr, - unsigned port, - const char *remoteHost, - unsigned remotePort, - const sp ¬ify, - int32_t *sessionID); - - void threadLoop(); - void interrupt(); - - static status_t MakeSocketNonBlocking(int s); - - DISALLOW_EVIL_CONSTRUCTORS(ANetworkSession); -}; - -} // namespace android - -#endif // A_NETWORK_SESSION_H_ diff --git a/media/libstagefright/wifi-display/Android.mk b/media/libstagefright/wifi-display/Android.mk index 404b41e..c7d107e 100644 --- a/media/libstagefright/wifi-display/Android.mk +++ b/media/libstagefright/wifi-display/Android.mk @@ -3,11 +3,9 @@ LOCAL_PATH:= $(call my-dir) include $(CLEAR_VARS) LOCAL_SRC_FILES:= \ - ANetworkSession.cpp \ MediaReceiver.cpp \ MediaSender.cpp \ Parameters.cpp \ - ParsedMessage.cpp \ rtp/RTPAssembler.cpp \ rtp/RTPReceiver.cpp \ rtp/RTPSender.cpp \ diff --git a/media/libstagefright/wifi-display/MediaReceiver.cpp b/media/libstagefright/wifi-display/MediaReceiver.cpp index 364acb9..5524235 100644 --- a/media/libstagefright/wifi-display/MediaReceiver.cpp +++ b/media/libstagefright/wifi-display/MediaReceiver.cpp @@ -20,13 +20,13 @@ #include "MediaReceiver.h" -#include "ANetworkSession.h" #include "AnotherPacketSource.h" #include "rtp/RTPReceiver.h" #include #include #include +#include #include #include diff --git a/media/libstagefright/wifi-display/MediaSender.cpp b/media/libstagefright/wifi-display/MediaSender.cpp index a230cd8..b1cdec0 100644 --- a/media/libstagefright/wifi-display/MediaSender.cpp +++ b/media/libstagefright/wifi-display/MediaSender.cpp @@ -20,7 +20,6 @@ #include "MediaSender.h" -#include "ANetworkSession.h" #include "rtp/RTPSender.h" #include "source/TSPacketizer.h" @@ -31,6 +30,7 @@ #include #include #include +#include #include namespace android { diff --git a/media/libstagefright/wifi-display/ParsedMessage.cpp b/media/libstagefright/wifi-display/ParsedMessage.cpp deleted file mode 100644 index c0e60c3..0000000 --- a/media/libstagefright/wifi-display/ParsedMessage.cpp +++ /dev/null @@ -1,284 +0,0 @@ -/* - * Copyright 2012, The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#include "ParsedMessage.h" - -#include -#include -#include - -namespace android { - -// static -sp ParsedMessage::Parse( - const char *data, size_t size, bool noMoreData, size_t *length) { - sp msg = new ParsedMessage; - ssize_t res = msg->parse(data, size, noMoreData); - - if (res < 0) { - *length = 0; - return NULL; - } - - *length = res; - return msg; -} - -ParsedMessage::ParsedMessage() { -} - -ParsedMessage::~ParsedMessage() { -} - -bool ParsedMessage::findString(const char *name, AString *value) const { - AString key = name; - key.tolower(); - - ssize_t index = mDict.indexOfKey(key); - - if (index < 0) { - value->clear(); - - return false; - } - - *value = mDict.valueAt(index); - return true; -} - -bool ParsedMessage::findInt32(const char *name, int32_t *value) const { - AString stringValue; - - if (!findString(name, &stringValue)) { - return false; - } - - char *end; - *value = strtol(stringValue.c_str(), &end, 10); - - if (end == stringValue.c_str() || *end != '\0') { - *value = 0; - return false; - } - - return true; -} - -const char *ParsedMessage::getContent() const { - return mContent.c_str(); -} - -ssize_t ParsedMessage::parse(const char *data, size_t size, bool noMoreData) { - if (size == 0) { - return -1; - } - - ssize_t lastDictIndex = -1; - - size_t offset = 0; - while (offset < size) { - size_t lineEndOffset = offset; - while (lineEndOffset + 1 < size - && (data[lineEndOffset] != '\r' - || data[lineEndOffset + 1] != '\n')) { - ++lineEndOffset; - } - - if (lineEndOffset + 1 >= size) { - return -1; - } - - AString line(&data[offset], lineEndOffset - offset); - - if (offset == 0) { - // Special handling for the request/status line. - - mDict.add(AString("_"), line); - offset = lineEndOffset + 2; - - continue; - } - - if (lineEndOffset == offset) { - offset += 2; - break; - } - - if (line.c_str()[0] == ' ' || line.c_str()[0] == '\t') { - // Support for folded header values. - - if (lastDictIndex >= 0) { - // Otherwise it's malformed since the first header line - // cannot continue anything... - - AString &value = mDict.editValueAt(lastDictIndex); - value.append(line); - } - - offset = lineEndOffset + 2; - continue; - } - - ssize_t colonPos = line.find(":"); - if (colonPos >= 0) { - AString key(line, 0, colonPos); - key.trim(); - key.tolower(); - - line.erase(0, colonPos + 1); - - lastDictIndex = mDict.add(key, line); - } - - offset = lineEndOffset + 2; - } - - for (size_t i = 0; i < mDict.size(); ++i) { - mDict.editValueAt(i).trim(); - } - - // Found the end of headers. - - int32_t contentLength; - if (!findInt32("content-length", &contentLength) || contentLength < 0) { - contentLength = 0; - } - - size_t totalLength = offset + contentLength; - - if (size < totalLength) { - return -1; - } - - mContent.setTo(&data[offset], contentLength); - - return totalLength; -} - -void ParsedMessage::getRequestField(size_t index, AString *field) const { - AString line; - CHECK(findString("_", &line)); - - size_t prevOffset = 0; - size_t offset = 0; - for (size_t i = 0; i <= index; ++i) { - ssize_t spacePos = line.find(" ", offset); - - if (spacePos < 0) { - spacePos = line.size(); - } - - prevOffset = offset; - offset = spacePos + 1; - } - - field->setTo(line, prevOffset, offset - prevOffset - 1); -} - -bool ParsedMessage::getStatusCode(int32_t *statusCode) const { - AString statusCodeString; - getRequestField(1, &statusCodeString); - - char *end; - *statusCode = strtol(statusCodeString.c_str(), &end, 10); - - if (*end != '\0' || end == statusCodeString.c_str() - || (*statusCode) < 100 || (*statusCode) > 999) { - *statusCode = 0; - return false; - } - - return true; -} - -AString ParsedMessage::debugString() const { - AString line; - CHECK(findString("_", &line)); - - line.append("\n"); - - for (size_t i = 0; i < mDict.size(); ++i) { - const AString &key = mDict.keyAt(i); - const AString &value = mDict.valueAt(i); - - if (key == AString("_")) { - continue; - } - - line.append(key); - line.append(": "); - line.append(value); - line.append("\n"); - } - - line.append("\n"); - line.append(mContent); - - return line; -} - -// static -bool ParsedMessage::GetAttribute( - const char *s, const char *key, AString *value) { - value->clear(); - - size_t keyLen = strlen(key); - - for (;;) { - while (isspace(*s)) { - ++s; - } - - const char *colonPos = strchr(s, ';'); - - size_t len = - (colonPos == NULL) ? strlen(s) : colonPos - s; - - if (len >= keyLen + 1 && s[keyLen] == '=' && !strncmp(s, key, keyLen)) { - value->setTo(&s[keyLen + 1], len - keyLen - 1); - return true; - } - - if (colonPos == NULL) { - return false; - } - - s = colonPos + 1; - } -} - -// static -bool ParsedMessage::GetInt32Attribute( - const char *s, const char *key, int32_t *value) { - AString stringValue; - if (!GetAttribute(s, key, &stringValue)) { - *value = 0; - return false; - } - - char *end; - *value = strtol(stringValue.c_str(), &end, 10); - - if (end == stringValue.c_str() || *end != '\0') { - *value = 0; - return false; - } - - return true; -} - -} // namespace android - diff --git a/media/libstagefright/wifi-display/ParsedMessage.h b/media/libstagefright/wifi-display/ParsedMessage.h deleted file mode 100644 index e9a1859..0000000 --- a/media/libstagefright/wifi-display/ParsedMessage.h +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Copyright 2012, The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#include -#include -#include -#include - -namespace android { - -// Encapsulates an "HTTP/RTSP style" response, i.e. a status line, -// key/value pairs making up the headers and an optional body/content. -struct ParsedMessage : public RefBase { - static sp Parse( - const char *data, size_t size, bool noMoreData, size_t *length); - - bool findString(const char *name, AString *value) const; - bool findInt32(const char *name, int32_t *value) const; - - const char *getContent() const; - - void getRequestField(size_t index, AString *field) const; - bool getStatusCode(int32_t *statusCode) const; - - AString debugString() const; - - static bool GetAttribute(const char *s, const char *key, AString *value); - - static bool GetInt32Attribute( - const char *s, const char *key, int32_t *value); - - -protected: - virtual ~ParsedMessage(); - -private: - KeyedVector mDict; - AString mContent; - - ParsedMessage(); - - ssize_t parse(const char *data, size_t size, bool noMoreData); - - DISALLOW_EVIL_CONSTRUCTORS(ParsedMessage); -}; - -} // namespace android diff --git a/media/libstagefright/wifi-display/TimeSyncer.cpp b/media/libstagefright/wifi-display/TimeSyncer.cpp index cb429bc..0f4d93a 100644 --- a/media/libstagefright/wifi-display/TimeSyncer.cpp +++ b/media/libstagefright/wifi-display/TimeSyncer.cpp @@ -20,13 +20,12 @@ #include "TimeSyncer.h" -#include "ANetworkSession.h" - #include #include #include #include #include +#include #include namespace android { diff --git a/media/libstagefright/wifi-display/nettest.cpp b/media/libstagefright/wifi-display/nettest.cpp index 0779bf5..73c0d80 100644 --- a/media/libstagefright/wifi-display/nettest.cpp +++ b/media/libstagefright/wifi-display/nettest.cpp @@ -18,7 +18,6 @@ #define LOG_TAG "nettest" #include -#include "ANetworkSession.h" #include "TimeSyncer.h" #include @@ -27,6 +26,7 @@ #include #include #include +#include #include #include #include diff --git a/media/libstagefright/wifi-display/rtp/RTPReceiver.cpp b/media/libstagefright/wifi-display/rtp/RTPReceiver.cpp index 2d22e79..3b3bd63 100644 --- a/media/libstagefright/wifi-display/rtp/RTPReceiver.cpp +++ b/media/libstagefright/wifi-display/rtp/RTPReceiver.cpp @@ -21,11 +21,10 @@ #include "RTPAssembler.h" #include "RTPReceiver.h" -#include "ANetworkSession.h" - #include #include #include +#include #include #include #include diff --git a/media/libstagefright/wifi-display/rtp/RTPSender.cpp b/media/libstagefright/wifi-display/rtp/RTPSender.cpp index 6bbe650..1887b8b 100644 --- a/media/libstagefright/wifi-display/rtp/RTPSender.cpp +++ b/media/libstagefright/wifi-display/rtp/RTPSender.cpp @@ -20,11 +20,10 @@ #include "RTPSender.h" -#include "ANetworkSession.h" - #include #include #include +#include #include #include #include diff --git a/media/libstagefright/wifi-display/rtptest.cpp b/media/libstagefright/wifi-display/rtptest.cpp index 764a38b..b902f29 100644 --- a/media/libstagefright/wifi-display/rtptest.cpp +++ b/media/libstagefright/wifi-display/rtptest.cpp @@ -18,7 +18,6 @@ #define LOG_TAG "rtptest" #include -#include "ANetworkSession.h" #include "rtp/RTPSender.h" #include "rtp/RTPReceiver.h" #include "TimeSyncer.h" @@ -29,6 +28,7 @@ #include #include #include +#include #include #include #include diff --git a/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp b/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp index 5db2099..bc88f1e 100644 --- a/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp +++ b/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp @@ -22,13 +22,13 @@ #include "DirectRenderer.h" #include "MediaReceiver.h" -#include "ParsedMessage.h" #include "TimeSyncer.h" #include #include #include #include +#include #include #include diff --git a/media/libstagefright/wifi-display/sink/WifiDisplaySink.h b/media/libstagefright/wifi-display/sink/WifiDisplaySink.h index adb9d89..dc1fc32 100644 --- a/media/libstagefright/wifi-display/sink/WifiDisplaySink.h +++ b/media/libstagefright/wifi-display/sink/WifiDisplaySink.h @@ -18,12 +18,11 @@ #define WIFI_DISPLAY_SINK_H_ -#include "ANetworkSession.h" - #include "VideoFormats.h" #include #include +#include namespace android { diff --git a/media/libstagefright/wifi-display/source/MediaPuller.cpp b/media/libstagefright/wifi-display/source/MediaPuller.cpp index 189bea3..7e8891d 100644 --- a/media/libstagefright/wifi-display/source/MediaPuller.cpp +++ b/media/libstagefright/wifi-display/source/MediaPuller.cpp @@ -93,6 +93,9 @@ void MediaPuller::onMessageReceived(const sp &msg) { err = mSource->start(params.get()); } else { err = mSource->start(); + if (err != OK) { + ALOGE("source failed to start w/ err %d", err); + } } if (err == OK) { diff --git a/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp b/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp index b421b35..4b59e62 100644 --- a/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp +++ b/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp @@ -21,7 +21,6 @@ #include "WifiDisplaySource.h" #include "PlaybackSession.h" #include "Parameters.h" -#include "ParsedMessage.h" #include "rtp/RTPSender.h" #include "TimeSyncer.h" @@ -33,6 +32,7 @@ #include #include #include +#include #include #include diff --git a/media/libstagefright/wifi-display/source/WifiDisplaySource.h b/media/libstagefright/wifi-display/source/WifiDisplaySource.h index 64186fc..4f11712 100644 --- a/media/libstagefright/wifi-display/source/WifiDisplaySource.h +++ b/media/libstagefright/wifi-display/source/WifiDisplaySource.h @@ -18,10 +18,10 @@ #define WIFI_DISPLAY_SOURCE_H_ -#include "ANetworkSession.h" #include "VideoFormats.h" #include +#include #include diff --git a/media/libstagefright/wifi-display/udptest.cpp b/media/libstagefright/wifi-display/udptest.cpp index 111846d..61eb9f9 100644 --- a/media/libstagefright/wifi-display/udptest.cpp +++ b/media/libstagefright/wifi-display/udptest.cpp @@ -18,11 +18,11 @@ #define LOG_TAG "udptest" #include -#include "ANetworkSession.h" #include "TimeSyncer.h" #include #include +#include namespace android { diff --git a/media/libstagefright/wifi-display/wfd.cpp b/media/libstagefright/wifi-display/wfd.cpp index 9fee4d0..4607606 100644 --- a/media/libstagefright/wifi-display/wfd.cpp +++ b/media/libstagefright/wifi-display/wfd.cpp @@ -175,7 +175,8 @@ static void createSource(const AString &addr, int32_t port) { iface.append(StringPrintf(":%d", port).c_str()); sp client = new RemoteDisplayClient; - sp display = service->listenForRemoteDisplay(client, iface); + sp display = + service->listenForRemoteDisplay(client, iface); client->waitUntilDone(); -- cgit v1.1 From 153b9fe667e6e78e0218ff0159353097428c7657 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Mon, 15 Jul 2013 11:23:36 -0700 Subject: Make AudioFlinger::instantiate() more resilient when called from separate module Bug: 8834855 Change-Id: I4cd842cdfb09d2aaaaab9df9ac3bec6179709bd3 --- services/audioflinger/Android.mk | 19 +-------- services/audioflinger/AudioFlinger.cpp | 1 + services/audioflinger/AudioMixer.cpp | 1 + services/audioflinger/AudioPolicyService.cpp | 1 + services/audioflinger/AudioWatchdog.cpp | 5 +++ services/audioflinger/Configuration.h | 47 ++++++++++++++++++++++ services/audioflinger/Effects.cpp | 1 + services/audioflinger/FastMixer.cpp | 7 ++++ services/audioflinger/FastMixerState.cpp | 1 + services/audioflinger/StateQueue.cpp | 1 + services/audioflinger/StateQueueInstantiations.cpp | 1 + services/audioflinger/Threads.cpp | 4 +- services/audioflinger/Tracks.cpp | 1 + 13 files changed, 69 insertions(+), 21 deletions(-) create mode 100644 services/audioflinger/Configuration.h diff --git a/services/audioflinger/Android.mk b/services/audioflinger/Android.mk index 714854e..54377f1 100644 --- a/services/audioflinger/Android.mk +++ b/services/audioflinger/Android.mk @@ -27,9 +27,6 @@ LOCAL_SRC_FILES:= \ LOCAL_SRC_FILES += StateQueue.cpp -# uncomment for debugging timing problems related to StateQueue::push() -LOCAL_CFLAGS += -DSTATE_QUEUE_DUMP - LOCAL_C_INCLUDES := \ $(call include-path-for, audio-effects) \ $(call include-path-for, audio-utils) @@ -56,24 +53,10 @@ LOCAL_STATIC_LIBRARIES := \ LOCAL_MODULE:= libaudioflinger -LOCAL_SRC_FILES += FastMixer.cpp FastMixerState.cpp - -LOCAL_CFLAGS += -DFAST_MIXER_STATISTICS - -# uncomment to display CPU load adjusted for CPU frequency -# LOCAL_CFLAGS += -DCPU_FREQUENCY_STATISTICS +LOCAL_SRC_FILES += FastMixer.cpp FastMixerState.cpp AudioWatchdog.cpp LOCAL_CFLAGS += -DSTATE_QUEUE_INSTANTIATIONS='"StateQueueInstantiations.cpp"' -LOCAL_CFLAGS += -UFAST_TRACKS_AT_NON_NATIVE_SAMPLE_RATE - -# uncomment to allow tee sink debugging to be enabled by property -# LOCAL_CFLAGS += -DTEE_SINK - -# uncomment to enable the audio watchdog -# LOCAL_SRC_FILES += AudioWatchdog.cpp -# LOCAL_CFLAGS += -DAUDIO_WATCHDOG - # Define ANDROID_SMP appropriately. Used to get inline tracing fast-path. ifeq ($(TARGET_CPU_SMP),true) LOCAL_CFLAGS += -DANDROID_SMP=1 diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp index 17a69fa..6a3007b 100644 --- a/services/audioflinger/AudioFlinger.cpp +++ b/services/audioflinger/AudioFlinger.cpp @@ -19,6 +19,7 @@ #define LOG_TAG "AudioFlinger" //#define LOG_NDEBUG 0 +#include "Configuration.h" #include #include #include diff --git a/services/audioflinger/AudioMixer.cpp b/services/audioflinger/AudioMixer.cpp index 7d38f80..df4e029 100644 --- a/services/audioflinger/AudioMixer.cpp +++ b/services/audioflinger/AudioMixer.cpp @@ -18,6 +18,7 @@ #define LOG_TAG "AudioMixer" //#define LOG_NDEBUG 0 +#include "Configuration.h" #include #include #include diff --git a/services/audioflinger/AudioPolicyService.cpp b/services/audioflinger/AudioPolicyService.cpp index eacecf0..d192787 100644 --- a/services/audioflinger/AudioPolicyService.cpp +++ b/services/audioflinger/AudioPolicyService.cpp @@ -17,6 +17,7 @@ #define LOG_TAG "AudioPolicyService" //#define LOG_NDEBUG 0 +#include "Configuration.h" #undef __STRICT_ANSI__ #define __STDINT_LIMITS #define __STDC_LIMIT_MACROS diff --git a/services/audioflinger/AudioWatchdog.cpp b/services/audioflinger/AudioWatchdog.cpp index 8f328ee..93d185e 100644 --- a/services/audioflinger/AudioWatchdog.cpp +++ b/services/audioflinger/AudioWatchdog.cpp @@ -17,9 +17,12 @@ #define LOG_TAG "AudioWatchdog" //#define LOG_NDEBUG 0 +#include "Configuration.h" #include #include "AudioWatchdog.h" +#ifdef AUDIO_WATCHDOG + namespace android { void AudioWatchdogDump::dump(int fd) @@ -132,3 +135,5 @@ void AudioWatchdog::setDump(AudioWatchdogDump *dump) } } // namespace android + +#endif // AUDIO_WATCHDOG diff --git a/services/audioflinger/Configuration.h b/services/audioflinger/Configuration.h new file mode 100644 index 0000000..bc2038a --- /dev/null +++ b/services/audioflinger/Configuration.h @@ -0,0 +1,47 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// Put build-time configuration options here rather than Android.mk, +// so that the instantiate for AudioFlinger service will pick up the same options. + +#ifndef ANDROID_AUDIOFLINGER_CONFIGURATION_H +#define ANDROID_AUDIOFLINGER_CONFIGURATION_H + +// uncomment to enable detailed battery usage reporting (not debugged) +//#define ADD_BATTERY_DATA + +// uncomment to enable the audio watchdog +//#define AUDIO_WATCHDOG + +// uncomment to display CPU load adjusted for CPU frequency +//#define CPU_FREQUENCY_STATISTICS + +// uncomment to enable fast mixer to take performance samples for later statistical analysis +#define FAST_MIXER_STATISTICS + +// uncomment to allow fast tracks at non-native sample rate +//#define FAST_TRACKS_AT_NON_NATIVE_SAMPLE_RATE + +// uncomment for debugging timing problems related to StateQueue::push() +//#define STATE_QUEUE_DUMP + +// uncomment to allow tee sink debugging to be enabled by property +//#define TEE_SINK + +// uncomment to log CPU statistics every n wall clock seconds +//#define DEBUG_CPU_USAGE 10 + +#endif // ANDROID_AUDIOFLINGER_CONFIGURATION_H diff --git a/services/audioflinger/Effects.cpp b/services/audioflinger/Effects.cpp index 942ea35..1c7a64b 100644 --- a/services/audioflinger/Effects.cpp +++ b/services/audioflinger/Effects.cpp @@ -19,6 +19,7 @@ #define LOG_TAG "AudioFlinger" //#define LOG_NDEBUG 0 +#include "Configuration.h" #include #include #include diff --git a/services/audioflinger/FastMixer.cpp b/services/audioflinger/FastMixer.cpp index 12e4683..819e8ec 100644 --- a/services/audioflinger/FastMixer.cpp +++ b/services/audioflinger/FastMixer.cpp @@ -25,6 +25,7 @@ #define ATRACE_TAG ATRACE_TAG_AUDIO +#include "Configuration.h" #include #include #include @@ -142,7 +143,9 @@ bool FastMixer::threadLoop() preIdle = *current; current = &preIdle; oldTsValid = false; +#ifdef FAST_MIXER_STATISTICS oldLoadValid = false; +#endif ignoreNextOverrun = true; } previous = current; @@ -182,8 +185,10 @@ bool FastMixer::threadLoop() warmupCycles = 0; sleepNs = -1; coldGen = current->mColdGen; +#ifdef FAST_MIXER_STATISTICS bounds = 0; full = false; +#endif oldTsValid = !clock_gettime(CLOCK_MONOTONIC, &oldTs); } else { sleepNs = FAST_HOT_IDLE_NS; @@ -614,6 +619,7 @@ FastMixerDumpState::FastMixerDumpState() : { mMeasuredWarmupTs.tv_sec = 0; mMeasuredWarmupTs.tv_nsec = 0; +#ifdef FAST_MIXER_STATISTICS // sample arrays aren't accessed atomically with respect to the bounds, // so clearing reduces chance for dumpsys to read random uninitialized samples memset(&mMonotonicNs, 0, sizeof(mMonotonicNs)); @@ -621,6 +627,7 @@ FastMixerDumpState::FastMixerDumpState() : #ifdef CPU_FREQUENCY_STATISTICS memset(&mCpukHz, 0, sizeof(mCpukHz)); #endif +#endif } FastMixerDumpState::~FastMixerDumpState() diff --git a/services/audioflinger/FastMixerState.cpp b/services/audioflinger/FastMixerState.cpp index c45c81b..737de97 100644 --- a/services/audioflinger/FastMixerState.cpp +++ b/services/audioflinger/FastMixerState.cpp @@ -14,6 +14,7 @@ * limitations under the License. */ +#include "Configuration.h" #include "FastMixerState.h" namespace android { diff --git a/services/audioflinger/StateQueue.cpp b/services/audioflinger/StateQueue.cpp index 3e891a5..c2d3bbd 100644 --- a/services/audioflinger/StateQueue.cpp +++ b/services/audioflinger/StateQueue.cpp @@ -17,6 +17,7 @@ #define LOG_TAG "StateQueue" //#define LOG_NDEBUG 0 +#include "Configuration.h" #include #include #include diff --git a/services/audioflinger/StateQueueInstantiations.cpp b/services/audioflinger/StateQueueInstantiations.cpp index 077582f..0d5cd0c 100644 --- a/services/audioflinger/StateQueueInstantiations.cpp +++ b/services/audioflinger/StateQueueInstantiations.cpp @@ -14,6 +14,7 @@ * limitations under the License. */ +#include "Configuration.h" #include "FastMixerState.h" #include "StateQueue.h" diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index d4cd0ea..97a1e43 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -20,6 +20,7 @@ //#define LOG_NDEBUG 0 #define ATRACE_TAG ATRACE_TAG_AUDIO +#include "Configuration.h" #include #include #include @@ -54,14 +55,11 @@ #include "ServiceUtilities.h" #include "SchedulingPolicyService.h" -#undef ADD_BATTERY_DATA - #ifdef ADD_BATTERY_DATA #include #include #endif -// #define DEBUG_CPU_USAGE 10 // log statistics every n wall clock seconds #ifdef DEBUG_CPU_USAGE #include #include diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp index f0dbee3..c45daae 100644 --- a/services/audioflinger/Tracks.cpp +++ b/services/audioflinger/Tracks.cpp @@ -19,6 +19,7 @@ #define LOG_TAG "AudioFlinger" //#define LOG_NDEBUG 0 +#include "Configuration.h" #include #include #include -- cgit v1.1 From 0d61251648b5110bfc33ef5b3d19bbf65db0a7b5 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Mon, 15 Jul 2013 16:09:26 -0700 Subject: Revert "Fix Audioflinger crash when TeeSink is enabled" This reverts commit 84e391686d7eced293913d1d7993721224ee0ba1. Bug: 8834855 Change-Id: I8211ef5ea5d87d97ada115723df31c8057f38ca8 --- services/audioflinger/AudioFlinger.h | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h index b640b31..2df9173 100644 --- a/services/audioflinger/AudioFlinger.h +++ b/services/audioflinger/AudioFlinger.h @@ -594,12 +594,11 @@ private: status_t closeOutput_nonvirtual(audio_io_handle_t output); status_t closeInput_nonvirtual(audio_io_handle_t input); -// do not use #ifdef here, since AudioFlinger.h is included by more than one module -//#ifdef TEE_SINK +#ifdef TEE_SINK // all record threads serially share a common tee sink, which is re-created on format change sp mRecordTeeSink; sp mRecordTeeSource; -//#endif +#endif public: -- cgit v1.1 From 4182c4e2a07e2441fcd5c22eaff0ddfe7f826f61 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Mon, 15 Jul 2013 14:45:07 -0700 Subject: Use AudioSystem::setLowRamDevice() to configure memory Bug: 9798886 Change-Id: I9321e3f369f1ed9429ae222e3926ebdeb012b8b0 --- include/media/AudioSystem.h | 2 ++ include/media/IAudioFlinger.h | 4 ++++ media/libmedia/AudioSystem.cpp | 7 ++++++ media/libmedia/IAudioFlinger.cpp | 16 +++++++++++++ services/audioflinger/AudioFlinger.cpp | 22 +++++++++++++++++- services/audioflinger/AudioFlinger.h | 11 +++++++++ services/audioflinger/FastMixer.cpp | 42 +++++++++++++++++++++++----------- services/audioflinger/FastMixer.h | 19 ++++++++++++--- services/audioflinger/Threads.cpp | 4 +++- 9 files changed, 109 insertions(+), 18 deletions(-) diff --git a/include/media/AudioSystem.h b/include/media/AudioSystem.h index fb1d631..e7b85c0 100644 --- a/include/media/AudioSystem.h +++ b/include/media/AudioSystem.h @@ -246,6 +246,8 @@ public: static uint32_t getPrimaryOutputSamplingRate(); static size_t getPrimaryOutputFrameCount(); + static status_t setLowRamDevice(bool isLowRamDevice); + // Check if hw offload is possible for given format, stream type, sample rate, // bit rate, duration, video and streaming or offload property is enabled static bool isOffloadSupported(const audio_offload_info_t& info); diff --git a/include/media/IAudioFlinger.h b/include/media/IAudioFlinger.h index 0aa5870..de45aa8 100644 --- a/include/media/IAudioFlinger.h +++ b/include/media/IAudioFlinger.h @@ -197,6 +197,10 @@ public: virtual uint32_t getPrimaryOutputSamplingRate() = 0; virtual size_t getPrimaryOutputFrameCount() = 0; + // Intended for AudioService to inform AudioFlinger of device's low RAM attribute, + // and should be called at most once. For a definition of what "low RAM" means, see + // android.app.ActivityManager.isLowRamDevice(). + virtual status_t setLowRamDevice(bool isLowRamDevice) = 0; }; diff --git a/media/libmedia/AudioSystem.cpp b/media/libmedia/AudioSystem.cpp index 6b9b3be..0d59af0 100644 --- a/media/libmedia/AudioSystem.cpp +++ b/media/libmedia/AudioSystem.cpp @@ -772,6 +772,13 @@ size_t AudioSystem::getPrimaryOutputFrameCount() return af->getPrimaryOutputFrameCount(); } +status_t AudioSystem::setLowRamDevice(bool isLowRamDevice) +{ + const sp& af = AudioSystem::get_audio_flinger(); + if (af == 0) return PERMISSION_DENIED; + return af->setLowRamDevice(isLowRamDevice); +} + void AudioSystem::clearAudioConfigCache() { Mutex::Autolock _l(gLock); diff --git a/media/libmedia/IAudioFlinger.cpp b/media/libmedia/IAudioFlinger.cpp index 6bb7df6..2e2c0cc 100644 --- a/media/libmedia/IAudioFlinger.cpp +++ b/media/libmedia/IAudioFlinger.cpp @@ -73,6 +73,7 @@ enum { LOAD_HW_MODULE, GET_PRIMARY_OUTPUT_SAMPLING_RATE, GET_PRIMARY_OUTPUT_FRAME_COUNT, + SET_LOW_RAM_DEVICE, }; class BpAudioFlinger : public BpInterface @@ -698,6 +699,15 @@ public: return reply.readInt32(); } + virtual status_t setLowRamDevice(bool isLowRamDevice) + { + Parcel data, reply; + data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor()); + data.writeInt32((int) isLowRamDevice); + remote()->transact(SET_LOW_RAM_DEVICE, data, &reply); + return reply.readInt32(); + } + }; IMPLEMENT_META_INTERFACE(AudioFlinger, "android.media.IAudioFlinger"); @@ -1059,6 +1069,12 @@ status_t BnAudioFlinger::onTransact( reply->writeInt32(getPrimaryOutputFrameCount()); return NO_ERROR; } break; + case SET_LOW_RAM_DEVICE: { + CHECK_INTERFACE(IAudioFlinger, data, reply); + bool isLowRamDevice = data.readInt32() != 0; + reply->writeInt32(setLowRamDevice(isLowRamDevice)); + return NO_ERROR; + } break; default: return BBinder::onTransact(code, data, reply, flags); } diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp index 6a3007b..99e077c 100644 --- a/services/audioflinger/AudioFlinger.cpp +++ b/services/audioflinger/AudioFlinger.cpp @@ -62,6 +62,7 @@ #include #include #include +#include // ---------------------------------------------------------------------------- @@ -139,7 +140,9 @@ AudioFlinger::AudioFlinger() mMasterMute(false), mNextUniqueId(1), mMode(AUDIO_MODE_INVALID), - mBtNrecIsOff(false) + mBtNrecIsOff(false), + mIsLowRamDevice(true), + mIsDeviceTypeKnown(false) { getpid_cached = getpid(); char value[PROPERTY_VALUE_MAX]; @@ -1381,6 +1384,23 @@ size_t AudioFlinger::getPrimaryOutputFrameCount() // ---------------------------------------------------------------------------- +status_t AudioFlinger::setLowRamDevice(bool isLowRamDevice) +{ + uid_t uid = IPCThreadState::self()->getCallingUid(); + if (uid != AID_SYSTEM) { + return PERMISSION_DENIED; + } + Mutex::Autolock _l(mLock); + if (mIsDeviceTypeKnown) { + return INVALID_OPERATION; + } + mIsLowRamDevice = isLowRamDevice; + mIsDeviceTypeKnown = true; + return NO_ERROR; +} + +// ---------------------------------------------------------------------------- + audio_io_handle_t AudioFlinger::openOutput(audio_module_handle_t module, audio_devices_t *pDevices, uint32_t *pSamplingRate, diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h index 2df9173..f31619b 100644 --- a/services/audioflinger/AudioFlinger.h +++ b/services/audioflinger/AudioFlinger.h @@ -220,6 +220,8 @@ public: virtual uint32_t getPrimaryOutputSamplingRate(); virtual size_t getPrimaryOutputFrameCount(); + virtual status_t setLowRamDevice(bool isLowRamDevice); + virtual status_t onTransact( uint32_t code, const Parcel& data, @@ -623,6 +625,15 @@ public: static const size_t kTeeSinkTrackFramesDefault = 0x1000; #endif + // This method reads from a variable without mLock, but the variable is updated under mLock. So + // we might read a stale value, or a value that's inconsistent with respect to other variables. + // In this case, it's safe because the return value isn't used for making an important decision. + // The reason we don't want to take mLock is because it could block the caller for a long time. + bool isLowRamDevice() const { return mIsLowRamDevice; } + +private: + bool mIsLowRamDevice; + bool mIsDeviceTypeKnown; }; #undef INCLUDING_FROM_AUDIOFLINGER_H diff --git a/services/audioflinger/FastMixer.cpp b/services/audioflinger/FastMixer.cpp index 819e8ec..5350e2c 100644 --- a/services/audioflinger/FastMixer.cpp +++ b/services/audioflinger/FastMixer.cpp @@ -83,7 +83,7 @@ bool FastMixer::threadLoop() struct timespec oldLoad = {0, 0}; // previous value of clock_gettime(CLOCK_THREAD_CPUTIME_ID) bool oldLoadValid = false; // whether oldLoad is valid uint32_t bounds = 0; - bool full = false; // whether we have collected at least kSamplingN samples + bool full = false; // whether we have collected at least mSamplingN samples #ifdef CPU_FREQUENCY_STATISTICS ThreadCpuUsage tcu; // for reading the current CPU clock frequency in kHz #endif @@ -534,11 +534,11 @@ bool FastMixer::threadLoop() #ifdef FAST_MIXER_STATISTICS if (isWarm) { // advance the FIFO queue bounds - size_t i = bounds & (FastMixerDumpState::kSamplingN - 1); + size_t i = bounds & (dumpState->mSamplingN - 1); bounds = (bounds & 0xFFFF0000) | ((bounds + 1) & 0xFFFF); if (full) { bounds += 0x10000; - } else if (!(bounds & (FastMixerDumpState::kSamplingN - 1))) { + } else if (!(bounds & (dumpState->mSamplingN - 1))) { full = true; } // compute the delta value of clock_gettime(CLOCK_MONOTONIC) @@ -608,27 +608,43 @@ bool FastMixer::threadLoop() // never return 'true'; Thread::_threadLoop() locks mutex which can result in priority inversion } -FastMixerDumpState::FastMixerDumpState() : +FastMixerDumpState::FastMixerDumpState( +#ifdef FAST_MIXER_STATISTICS + uint32_t samplingN +#endif + ) : mCommand(FastMixerState::INITIAL), mWriteSequence(0), mFramesWritten(0), mNumTracks(0), mWriteErrors(0), mUnderruns(0), mOverruns(0), mSampleRate(0), mFrameCount(0), /* mMeasuredWarmupTs({0, 0}), */ mWarmupCycles(0), mTrackMask(0) #ifdef FAST_MIXER_STATISTICS - , mBounds(0) + , mSamplingN(0), mBounds(0) #endif { mMeasuredWarmupTs.tv_sec = 0; mMeasuredWarmupTs.tv_nsec = 0; #ifdef FAST_MIXER_STATISTICS + increaseSamplingN(samplingN); +#endif +} + +#ifdef FAST_MIXER_STATISTICS +void FastMixerDumpState::increaseSamplingN(uint32_t samplingN) +{ + if (samplingN <= mSamplingN || samplingN > kSamplingN || roundup(samplingN) != samplingN) { + return; + } + uint32_t additional = samplingN - mSamplingN; // sample arrays aren't accessed atomically with respect to the bounds, // so clearing reduces chance for dumpsys to read random uninitialized samples - memset(&mMonotonicNs, 0, sizeof(mMonotonicNs)); - memset(&mLoadNs, 0, sizeof(mLoadNs)); + memset(&mMonotonicNs[mSamplingN], 0, sizeof(mMonotonicNs[0]) * additional); + memset(&mLoadNs[mSamplingN], 0, sizeof(mLoadNs[0]) * additional); #ifdef CPU_FREQUENCY_STATISTICS - memset(&mCpukHz, 0, sizeof(mCpukHz)); -#endif + memset(&mCpukHz[mSamplingN], 0, sizeof(mCpukHz[0]) * additional); #endif + mSamplingN = samplingN; } +#endif FastMixerDumpState::~FastMixerDumpState() { @@ -648,7 +664,7 @@ static int compare_uint32_t(const void *pa, const void *pb) } } -void FastMixerDumpState::dump(int fd) +void FastMixerDumpState::dump(int fd) const { if (mCommand == FastMixerState::INITIAL) { fdprintf(fd, "FastMixer not initialized\n"); @@ -699,9 +715,9 @@ void FastMixerDumpState::dump(int fd) uint32_t newestOpen = bounds & 0xFFFF; uint32_t oldestClosed = bounds >> 16; uint32_t n = (newestOpen - oldestClosed) & 0xFFFF; - if (n > kSamplingN) { + if (n > mSamplingN) { ALOGE("too many samples %u", n); - n = kSamplingN; + n = mSamplingN; } // statistics for monotonic (wall clock) time, thread raw CPU load in time, CPU clock frequency, // and adjusted CPU load in MHz normalized for CPU clock frequency @@ -717,7 +733,7 @@ void FastMixerDumpState::dump(int fd) uint32_t *tail = n >= kTailDenominator ? new uint32_t[n] : NULL; // loop over all the samples for (uint32_t j = 0; j < n; ++j) { - size_t i = oldestClosed++ & (kSamplingN - 1); + size_t i = oldestClosed++ & (mSamplingN - 1); uint32_t wallNs = mMonotonicNs[i]; if (tail != NULL) { tail[j] = wallNs; diff --git a/services/audioflinger/FastMixer.h b/services/audioflinger/FastMixer.h index 2ab1d04..6158925 100644 --- a/services/audioflinger/FastMixer.h +++ b/services/audioflinger/FastMixer.h @@ -85,10 +85,14 @@ struct FastTrackDump { // Only POD types are permitted, and the contents shouldn't be trusted (i.e. do range checks). // It has a different lifetime than the FastMixer, and so it can't be a member of FastMixer. struct FastMixerDumpState { - FastMixerDumpState(); + FastMixerDumpState( +#ifdef FAST_MIXER_STATISTICS + uint32_t samplingN = kSamplingNforLowRamDevice +#endif + ); /*virtual*/ ~FastMixerDumpState(); - void dump(int fd); // should only be called on a stable copy, not the original + void dump(int fd) const; // should only be called on a stable copy, not the original FastMixerState::Command mCommand; // current command uint32_t mWriteSequence; // incremented before and after each write() @@ -106,8 +110,15 @@ struct FastMixerDumpState { #ifdef FAST_MIXER_STATISTICS // Recently collected samples of per-cycle monotonic time, thread CPU time, and CPU frequency. - // kSamplingN is the size of the sampling frame, and must be a power of 2 <= 0x8000. + // kSamplingN is max size of sampling frame (statistics), and must be a power of 2 <= 0x8000. + // The sample arrays are virtually allocated based on this compile-time constant, + // but are only initialized and used based on the runtime parameter mSamplingN. static const uint32_t kSamplingN = 0x8000; + // Compile-time constant for a "low RAM device", must be a power of 2 <= kSamplingN. + // This value was chosen such that each array uses 1 small page (4 Kbytes). + static const uint32_t kSamplingNforLowRamDevice = 0x400; + // Corresponding runtime maximum size of sample arrays, must be a power of 2 <= kSamplingN. + uint32_t mSamplingN; // The bounds define the interval of valid samples, and are represented as follows: // newest open (excluded) endpoint = lower 16 bits of bounds, modulo N // oldest closed (included) endpoint = upper 16 bits of bounds, modulo N @@ -119,6 +130,8 @@ struct FastMixerDumpState { #ifdef CPU_FREQUENCY_STATISTICS uint32_t mCpukHz[kSamplingN]; // absolute CPU clock frequency in kHz, bits 0-3 are CPU# #endif + // Increase sampling window after construction, must be a power of 2 <= kSamplingN + void increaseSamplingN(uint32_t samplingN); #endif }; diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index 97a1e43..f27d908 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -2285,6 +2285,8 @@ void AudioFlinger::MixerThread::threadLoop_write() #endif } state->mCommand = FastMixerState::MIX_WRITE; + mFastMixerDumpState.increaseSamplingN(mAudioFlinger->isLowRamDevice() ? + FastMixerDumpState::kSamplingNforLowRamDevice : FastMixerDumpState::kSamplingN); sq->end(); sq->push(FastMixerStateQueue::BLOCK_UNTIL_PUSHED); if (kUseFastMixer == FastMixer_Dynamic) { @@ -3085,7 +3087,7 @@ void AudioFlinger::MixerThread::dumpInternals(int fd, const Vector& ar write(fd, result.string(), result.size()); // Make a non-atomic copy of fast mixer dump state so it won't change underneath us - FastMixerDumpState copy = mFastMixerDumpState; + const FastMixerDumpState copy(mFastMixerDumpState); copy.dump(fd); #ifdef STATE_QUEUE_DUMP -- cgit v1.1 From 1b8ae3d9f3605fab7d14c323f2118ba3c769a42a Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Fri, 19 Jul 2013 10:32:41 -0700 Subject: Fix compile warning Change-Id: I80de4a013dc65eb7c532561438fd10e005354c03 --- services/audioflinger/AudioPolicyService.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/audioflinger/AudioPolicyService.cpp b/services/audioflinger/AudioPolicyService.cpp index eacecf0..07607e1 100644 --- a/services/audioflinger/AudioPolicyService.cpp +++ b/services/audioflinger/AudioPolicyService.cpp @@ -50,7 +50,7 @@ static const char kCmdDeadlockedString[] = "AudioPolicyService command thread ma static const int kDumpLockRetries = 50; static const int kDumpLockSleepUs = 20000; -static const nsecs_t kAudioCommandTimeout = 3000000000; // 3 seconds +static const nsecs_t kAudioCommandTimeout = 3000000000LL; // 3 seconds namespace { extern struct audio_policy_service_ops aps_ops; -- cgit v1.1 From a05822a368dfc8c220b413c3d23dcc1af58b4b5f Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Fri, 19 Jul 2013 11:32:07 -0700 Subject: Minor tweaks to DirectRenderer and Converter Converter now supports automatic prepending of SPS/PPS to IDR frames (h264) as well as using the encoder in "surface-input" mode. The new features are all opt-in and should not affect existing clients. Change-Id: I543cf1d31ba068c1a01ab4e6814ac8d817b63faa --- .../wifi-display/sink/DirectRenderer.cpp | 50 ++++++-- .../wifi-display/sink/DirectRenderer.h | 7 ++ .../wifi-display/source/Converter.cpp | 139 +++++++++++++++++---- .../libstagefright/wifi-display/source/Converter.h | 63 ++++++---- .../wifi-display/source/PlaybackSession.cpp | 10 +- 5 files changed, 209 insertions(+), 60 deletions(-) diff --git a/media/libstagefright/wifi-display/sink/DirectRenderer.cpp b/media/libstagefright/wifi-display/sink/DirectRenderer.cpp index 15f9c88..cdb2267 100644 --- a/media/libstagefright/wifi-display/sink/DirectRenderer.cpp +++ b/media/libstagefright/wifi-display/sink/DirectRenderer.cpp @@ -29,9 +29,8 @@ #include #include #include +#include #include -#include -#include namespace android { @@ -488,12 +487,38 @@ void DirectRenderer::onMessageReceived(const sp &msg) { break; } + case kWhatQueueAccessUnit: + onQueueAccessUnit(msg); + break; + + case kWhatSetFormat: + onSetFormat(msg); + break; + default: TRESPASS(); } } void DirectRenderer::setFormat(size_t trackIndex, const sp &format) { + sp msg = new AMessage(kWhatSetFormat, id()); + msg->setSize("trackIndex", trackIndex); + msg->setMessage("format", format); + msg->post(); +} + +void DirectRenderer::onSetFormat(const sp &msg) { + size_t trackIndex; + CHECK(msg->findSize("trackIndex", &trackIndex)); + + sp format; + CHECK(msg->findMessage("format", &format)); + + internalSetFormat(trackIndex, format); +} + +void DirectRenderer::internalSetFormat( + size_t trackIndex, const sp &format) { CHECK_LT(trackIndex, 2u); CHECK(mDecoderContext[trackIndex] == NULL); @@ -517,18 +542,21 @@ void DirectRenderer::setFormat(size_t trackIndex, const sp &format) { void DirectRenderer::queueAccessUnit( size_t trackIndex, const sp &accessUnit) { - CHECK_LT(trackIndex, 2u); + sp msg = new AMessage(kWhatQueueAccessUnit, id()); + msg->setSize("trackIndex", trackIndex); + msg->setBuffer("accessUnit", accessUnit); + msg->post(); +} - if (mDecoderContext[trackIndex] == NULL) { - CHECK_EQ(trackIndex, 0u); +void DirectRenderer::onQueueAccessUnit(const sp &msg) { + size_t trackIndex; + CHECK(msg->findSize("trackIndex", &trackIndex)); - sp format = new AMessage; - format->setString("mime", "video/avc"); - format->setInt32("width", 640); - format->setInt32("height", 360); + sp accessUnit; + CHECK(msg->findBuffer("accessUnit", &accessUnit)); - setFormat(trackIndex, format); - } + CHECK_LT(trackIndex, 2u); + CHECK(mDecoderContext[trackIndex] != NULL); mDecoderContext[trackIndex]->queueInputBuffer(accessUnit); } diff --git a/media/libstagefright/wifi-display/sink/DirectRenderer.h b/media/libstagefright/wifi-display/sink/DirectRenderer.h index 1e7dc34..07c2170 100644 --- a/media/libstagefright/wifi-display/sink/DirectRenderer.h +++ b/media/libstagefright/wifi-display/sink/DirectRenderer.h @@ -43,6 +43,8 @@ private: enum { kWhatDecoderNotify, kWhatRenderVideo, + kWhatQueueAccessUnit, + kWhatSetFormat, }; struct OutputInfo { @@ -72,6 +74,11 @@ private: void scheduleVideoRenderIfNecessary(); void onRenderVideo(); + void onSetFormat(const sp &msg); + void onQueueAccessUnit(const sp &msg); + + void internalSetFormat(size_t trackIndex, const sp &format); + DISALLOW_EVIL_CONSTRUCTORS(DirectRenderer); }; diff --git a/media/libstagefright/wifi-display/source/Converter.cpp b/media/libstagefright/wifi-display/source/Converter.cpp index 0214520..6f23854 100644 --- a/media/libstagefright/wifi-display/source/Converter.cpp +++ b/media/libstagefright/wifi-display/source/Converter.cpp @@ -21,6 +21,7 @@ #include "Converter.h" #include "MediaPuller.h" +#include "include/avc_utils.h" #include #include @@ -33,6 +34,8 @@ #include #include +#include + #include namespace android { @@ -40,12 +43,14 @@ namespace android { Converter::Converter( const sp ¬ify, const sp &codecLooper, - const sp &outputFormat) - : mInitCheck(NO_INIT), - mNotify(notify), + const sp &outputFormat, + uint32_t flags) + : mNotify(notify), mCodecLooper(codecLooper), mOutputFormat(outputFormat), + mFlags(flags), mIsVideo(false), + mIsH264(false), mIsPCMAudio(false), mNeedToManuallyPrependSPSPPS(false), mDoMoreWorkPending(false) @@ -55,21 +60,18 @@ Converter::Converter( #endif ,mPrevVideoBitrate(-1) ,mNumFramesToDrop(0) + ,mEncodingSuspended(false) { AString mime; CHECK(mOutputFormat->findString("mime", &mime)); if (!strncasecmp("video/", mime.c_str(), 6)) { mIsVideo = true; + + mIsH264 = !strcasecmp(mime.c_str(), MEDIA_MIMETYPE_VIDEO_AVC); } else if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_RAW, mime.c_str())) { mIsPCMAudio = true; } - - mInitCheck = initEncoder(); - - if (mInitCheck != OK) { - releaseEncoder(); - } } static void ReleaseMediaBufferReference(const sp &accessUnit) { @@ -118,8 +120,19 @@ void Converter::shutdownAsync() { (new AMessage(kWhatShutdown, id()))->post(); } -status_t Converter::initCheck() const { - return mInitCheck; +status_t Converter::init() { + status_t err = initEncoder(); + + if (err != OK) { + releaseEncoder(); + } + + return err; +} + +sp Converter::getGraphicBufferProducer() { + CHECK(mFlags & FLAG_USE_SURFACE_INPUT); + return mGraphicBufferProducer; } size_t Converter::getInputBufferCount() const { @@ -244,6 +257,16 @@ status_t Converter::initEncoder() { return err; } + if (mFlags & FLAG_USE_SURFACE_INPUT) { + CHECK(mIsVideo); + + err = mEncoder->createInputSurface(&mGraphicBufferProducer); + + if (err != OK) { + return err; + } + } + err = mEncoder->start(); if (err != OK) { @@ -256,7 +279,17 @@ status_t Converter::initEncoder() { return err; } - return mEncoder->getOutputBuffers(&mEncoderOutputBuffers); + err = mEncoder->getOutputBuffers(&mEncoderOutputBuffers); + + if (err != OK) { + return err; + } + + if (mFlags & FLAG_USE_SURFACE_INPUT) { + scheduleDoMoreWork(); + } + + return OK; } void Converter::notifyError(status_t err) { @@ -312,9 +345,12 @@ void Converter::onMessageReceived(const sp &msg) { sp accessUnit; CHECK(msg->findBuffer("accessUnit", &accessUnit)); - if (mIsVideo && mNumFramesToDrop) { - --mNumFramesToDrop; - ALOGI("dropping frame."); + if (mNumFramesToDrop > 0 || mEncodingSuspended) { + if (mNumFramesToDrop > 0) { + --mNumFramesToDrop; + ALOGI("dropping frame."); + } + ReleaseMediaBufferReference(accessUnit); break; } @@ -396,7 +432,7 @@ void Converter::onMessageReceived(const sp &msg) { } if (mIsVideo) { - ALOGI("requesting IDR frame"); + ALOGV("requesting IDR frame"); mEncoder->requestIDRFrame(); } break; @@ -411,6 +447,10 @@ void Converter::onMessageReceived(const sp &msg) { AString mime; CHECK(mOutputFormat->findString("mime", &mime)); ALOGI("encoder (%s) shut down.", mime.c_str()); + + sp notify = mNotify->dup(); + notify->setInt32("what", kWhatShutdownCompleted); + notify->post(); break; } @@ -431,6 +471,21 @@ void Converter::onMessageReceived(const sp &msg) { break; } + case kWhatSuspendEncoding: + { + int32_t suspend; + CHECK(msg->findInt32("suspend", &suspend)); + + mEncodingSuspended = suspend; + + if (mFlags & FLAG_USE_SURFACE_INPUT) { + sp params = new AMessage; + params->setInt32("drop-input-frames",suspend); + mEncoder->setParameters(params); + } + break; + } + default: TRESPASS(); } @@ -616,22 +671,39 @@ status_t Converter::feedEncoderInputBuffers() { return OK; } +sp Converter::prependCSD(const sp &accessUnit) const { + CHECK(mCSD0 != NULL); + + sp dup = new ABuffer(accessUnit->size() + mCSD0->size()); + memcpy(dup->data(), mCSD0->data(), mCSD0->size()); + memcpy(dup->data() + mCSD0->size(), accessUnit->data(), accessUnit->size()); + + int64_t timeUs; + CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs)); + + dup->meta()->setInt64("timeUs", timeUs); + + return dup; +} + status_t Converter::doMoreWork() { status_t err; - for (;;) { - size_t bufferIndex; - err = mEncoder->dequeueInputBuffer(&bufferIndex); + if (!(mFlags & FLAG_USE_SURFACE_INPUT)) { + for (;;) { + size_t bufferIndex; + err = mEncoder->dequeueInputBuffer(&bufferIndex); - if (err != OK) { - break; + if (err != OK) { + break; + } + + mAvailEncoderInputIndices.push_back(bufferIndex); } - mAvailEncoderInputIndices.push_back(bufferIndex); + feedEncoderInputBuffers(); } - feedEncoderInputBuffers(); - for (;;) { size_t bufferIndex; size_t offset; @@ -705,9 +777,19 @@ status_t Converter::doMoreWork() { if (flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) { if (!handle) { + if (mIsH264) { + mCSD0 = buffer; + } mOutputFormat->setBuffer("csd-0", buffer); } } else { + if (mNeedToManuallyPrependSPSPPS + && mIsH264 + && (mFlags & FLAG_PREPEND_CSD_IF_NECESSARY) + && IsIDR(buffer)) { + buffer = prependCSD(buffer); + } + sp notify = mNotify->dup(); notify->setInt32("what", kWhatAccessUnit); notify->setBuffer("accessUnit", buffer); @@ -732,9 +814,18 @@ void Converter::requestIDRFrame() { } void Converter::dropAFrame() { + // Unsupported in surface input mode. + CHECK(!(mFlags & FLAG_USE_SURFACE_INPUT)); + (new AMessage(kWhatDropAFrame, id()))->post(); } +void Converter::suspendEncoding(bool suspend) { + sp msg = new AMessage(kWhatSuspendEncoding, id()); + msg->setInt32("suspend", suspend); + msg->post(); +} + int32_t Converter::getVideoBitrate() const { return mPrevVideoBitrate; } diff --git a/media/libstagefright/wifi-display/source/Converter.h b/media/libstagefright/wifi-display/source/Converter.h index 76c8b19..5876e07 100644 --- a/media/libstagefright/wifi-display/source/Converter.h +++ b/media/libstagefright/wifi-display/source/Converter.h @@ -18,13 +18,12 @@ #define CONVERTER_H_ -#include "WifiDisplaySource.h" - #include namespace android { struct ABuffer; +struct IGraphicBufferProducer; struct MediaCodec; #define ENABLE_SILENCE_DETECTION 0 @@ -33,11 +32,25 @@ struct MediaCodec; // media access unit of a different format. // Right now this'll convert raw video into H.264 and raw audio into AAC. struct Converter : public AHandler { + enum { + kWhatAccessUnit, + kWhatEOS, + kWhatError, + kWhatShutdownCompleted, + }; + + enum FlagBits { + FLAG_USE_SURFACE_INPUT = 1, + FLAG_PREPEND_CSD_IF_NECESSARY = 2, + }; Converter(const sp ¬ify, const sp &codecLooper, - const sp &outputFormat); + const sp &outputFormat, + uint32_t flags = 0); - status_t initCheck() const; + status_t init(); + + sp getGraphicBufferProducer(); size_t getInputBufferCount() const; @@ -50,22 +63,7 @@ struct Converter : public AHandler { void requestIDRFrame(); void dropAFrame(); - - enum { - kWhatAccessUnit, - kWhatEOS, - kWhatError, - }; - - enum { - kWhatDoMoreWork, - kWhatRequestIDRFrame, - kWhatShutdown, - kWhatMediaPullerNotify, - kWhatEncoderActivity, - kWhatDropAFrame, - kWhatReleaseOutputBuffer, - }; + void suspendEncoding(bool suspend); void shutdownAsync(); @@ -74,22 +72,40 @@ struct Converter : public AHandler { static int32_t GetInt32Property(const char *propName, int32_t defaultValue); + enum { + // MUST not conflict with private enums below. + kWhatMediaPullerNotify = 'pulN', + }; + protected: virtual ~Converter(); virtual void onMessageReceived(const sp &msg); private: - status_t mInitCheck; + enum { + kWhatDoMoreWork, + kWhatRequestIDRFrame, + kWhatSuspendEncoding, + kWhatShutdown, + kWhatEncoderActivity, + kWhatDropAFrame, + kWhatReleaseOutputBuffer, + }; + sp mNotify; sp mCodecLooper; sp mOutputFormat; + uint32_t mFlags; bool mIsVideo; + bool mIsH264; bool mIsPCMAudio; bool mNeedToManuallyPrependSPSPPS; sp mEncoder; sp mEncoderActivityNotify; + sp mGraphicBufferProducer; + Vector > mEncoderInputBuffers; Vector > mEncoderOutputBuffers; @@ -97,6 +113,8 @@ private: List > mInputBufferQueue; + sp mCSD0; + bool mDoMoreWorkPending; #if ENABLE_SILENCE_DETECTION @@ -109,6 +127,7 @@ private: int32_t mPrevVideoBitrate; int32_t mNumFramesToDrop; + bool mEncodingSuspended; status_t initEncoder(); void releaseEncoder(); @@ -127,6 +146,8 @@ private: static bool IsSilence(const sp &accessUnit); + sp prependCSD(const sp &accessUnit) const; + DISALLOW_EVIL_CONSTRUCTORS(Converter); }; diff --git a/media/libstagefright/wifi-display/source/PlaybackSession.cpp b/media/libstagefright/wifi-display/source/PlaybackSession.cpp index a15fbac..0aa4ee5 100644 --- a/media/libstagefright/wifi-display/source/PlaybackSession.cpp +++ b/media/libstagefright/wifi-display/source/PlaybackSession.cpp @@ -521,7 +521,7 @@ void WifiDisplaySource::PlaybackSession::onMessageReceived( if (mTracks.isEmpty()) { ALOGI("Reached EOS"); } - } else { + } else if (what != Converter::kWhatShutdownCompleted) { CHECK_EQ(what, Converter::kWhatError); status_t err; @@ -957,14 +957,16 @@ status_t WifiDisplaySource::PlaybackSession::addSource( sp converter = new Converter(notify, codecLooper, format); - err = converter->initCheck(); + looper()->registerHandler(converter); + + err = converter->init(); if (err != OK) { ALOGE("%s converter returned err %d", isVideo ? "video" : "audio", err); + + looper()->unregisterHandler(converter->id()); return err; } - looper()->registerHandler(converter); - notify = new AMessage(Converter::kWhatMediaPullerNotify, converter->id()); notify->setSize("trackIndex", trackIndex); -- cgit v1.1 From fe0799e8f1c5db22df3bafdfb9ec995f5494d260 Mon Sep 17 00:00:00 2001 From: Zhijun He Date: Fri, 19 Jul 2013 13:18:43 -0700 Subject: camera2: Fix request ID bug Wrong request id was pushed in request list. Change-Id: I55bf7ae84ca41eec79db1a81d2cde35beaa2a6b9 --- services/camera/libcameraservice/photography/CameraDeviceClient.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/camera/libcameraservice/photography/CameraDeviceClient.cpp b/services/camera/libcameraservice/photography/CameraDeviceClient.cpp index a6a2dc1..e1c7e79 100644 --- a/services/camera/libcameraservice/photography/CameraDeviceClient.cpp +++ b/services/camera/libcameraservice/photography/CameraDeviceClient.cpp @@ -176,7 +176,7 @@ status_t CameraDeviceClient::submitRequest(sp request, ALOGE("%s: Camera %d: Got error %d after trying to set streaming " "request", __FUNCTION__, mCameraId, res); } else { - mStreamingRequestList.push_back(mRequestIdCounter); + mStreamingRequestList.push_back(requestId); } } else { res = mDevice->capture(metadata); -- cgit v1.1 From 336da16a12423c496efba6ca9813d5d42a1f70f6 Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Fri, 19 Jul 2013 11:00:43 -0700 Subject: Enable support for explicitly requesting an encoded keyframe from the vp8 encoder. Change-Id: I370d5831f7d6037faf361a92521390f19f179cbe --- .../codecs/on2/enc/SoftVPXEncoder.cpp | 33 ++++++++++++++++++++-- .../libstagefright/codecs/on2/enc/SoftVPXEncoder.h | 5 ++++ 2 files changed, 36 insertions(+), 2 deletions(-) diff --git a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp index d8456fe..5f2b5c8 100644 --- a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp +++ b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp @@ -149,7 +149,8 @@ SoftVPXEncoder::SoftVPXEncoder(const char *name, mLevel(OMX_VIDEO_VP8Level_Version0), mConversionBuffer(NULL), mInputDataIsMeta(false), - mGrallocModule(NULL) { + mGrallocModule(NULL), + mKeyFrameRequested(false) { initPorts(); } @@ -519,6 +520,27 @@ OMX_ERRORTYPE SoftVPXEncoder::internalSetParameter(OMX_INDEXTYPE index, } } +OMX_ERRORTYPE SoftVPXEncoder::setConfig( + OMX_INDEXTYPE index, const OMX_PTR _params) { + switch (index) { + case OMX_IndexConfigVideoIntraVOPRefresh: + { + OMX_CONFIG_INTRAREFRESHVOPTYPE *params = + (OMX_CONFIG_INTRAREFRESHVOPTYPE *)_params; + + if (params->nPortIndex != kOutputPortIndex) { + return OMX_ErrorBadPortIndex; + } + + mKeyFrameRequested = params->IntraRefreshVOP; + return OMX_ErrorNone; + } + + default: + return SimpleSoftOMXComponent::setConfig(index, _params); + } +} + OMX_ERRORTYPE SoftVPXEncoder::internalSetProfileLevel( const OMX_VIDEO_PARAM_PROFILELEVELTYPE* profileAndLevel) { if (profileAndLevel->nPortIndex != kOutputPortIndex) { @@ -750,12 +772,19 @@ void SoftVPXEncoder::onQueueFilled(OMX_U32 portIndex) { vpx_image_t raw_frame; vpx_img_wrap(&raw_frame, VPX_IMG_FMT_I420, mWidth, mHeight, kInputBufferAlignment, source); + + vpx_enc_frame_flags_t flags = 0; + if (mKeyFrameRequested) { + flags |= VPX_EFLAG_FORCE_KF; + mKeyFrameRequested = false; + } + codec_return = vpx_codec_encode( mCodecContext, &raw_frame, inputBufferHeader->nTimeStamp, // in timebase units mFrameDurationUs, // frame duration in timebase units - 0, // frame flags + flags, // frame flags VPX_DL_REALTIME); // encoding deadline if (codec_return != VPX_CODEC_OK) { ALOGE("vpx encoder failed to encode frame"); diff --git a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h index d570154..4ee5e51 100644 --- a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h +++ b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h @@ -78,6 +78,9 @@ protected: virtual OMX_ERRORTYPE internalSetParameter( OMX_INDEXTYPE index, const OMX_PTR param); + virtual OMX_ERRORTYPE setConfig( + OMX_INDEXTYPE index, const OMX_PTR params); + // OMX callback when buffers available // Note that both an input and output buffer // is expected to be available to carry out @@ -163,6 +166,8 @@ private: bool mInputDataIsMeta; const hw_module_t *mGrallocModule; + bool mKeyFrameRequested; + // Initializes input and output OMX ports with sensible // default values. void initPorts(); -- cgit v1.1 From 5908f88a7e45380a9b0d71a3b1ea535d76c420b3 Mon Sep 17 00:00:00 2001 From: Chad Brubaker Date: Mon, 15 Jul 2013 21:17:03 -0700 Subject: Add routing sockets for the requesting user Mediaserver sockets are now routed as if the connection was in the requesting app in per user routing. Change-Id: I60f4649c3c4145a65264b54c1aa2c6c7741efaba --- media/libstagefright/Android.mk | 2 ++ media/libstagefright/HTTPBase.cpp | 12 ++++++++++++ media/libstagefright/include/HTTPBase.h | 3 +++ media/libstagefright/rtsp/ARTSPConnection.cpp | 6 ++++++ media/libstagefright/rtsp/MyHandler.h | 6 ++++++ 5 files changed, 29 insertions(+) diff --git a/media/libstagefright/Android.mk b/media/libstagefright/Android.mk index 9544dbc..90bf324 100644 --- a/media/libstagefright/Android.mk +++ b/media/libstagefright/Android.mk @@ -62,6 +62,7 @@ LOCAL_C_INCLUDES:= \ $(TOP)/frameworks/av/include/media/stagefright/timedtext \ $(TOP)/frameworks/native/include/media/hardware \ $(TOP)/frameworks/native/include/media/openmax \ + $(TOP)/frameworks/native/services/connectivitymanager \ $(TOP)/external/flac/include \ $(TOP)/external/tremolo \ $(TOP)/external/openssl/include \ @@ -69,6 +70,7 @@ LOCAL_C_INCLUDES:= \ LOCAL_SHARED_LIBRARIES := \ libbinder \ libcamera_client \ + libconnectivitymanager \ libcutils \ libdl \ libdrmframework \ diff --git a/media/libstagefright/HTTPBase.cpp b/media/libstagefright/HTTPBase.cpp index d2cc6c2..5fa4b6f 100644 --- a/media/libstagefright/HTTPBase.cpp +++ b/media/libstagefright/HTTPBase.cpp @@ -30,6 +30,8 @@ #include #include +#include + namespace android { HTTPBase::HTTPBase() @@ -164,4 +166,14 @@ void HTTPBase::UnRegisterSocketUserTag(int sockfd) { } } +// static +void HTTPBase::RegisterSocketUserMark(int sockfd, uid_t uid) { + ConnectivityManager::markSocketAsUser(sockfd, uid); +} + +// static +void HTTPBase::UnRegisterSocketUserMark(int sockfd) { + RegisterSocketUserMark(sockfd, geteuid()); +} + } // namespace android diff --git a/media/libstagefright/include/HTTPBase.h b/media/libstagefright/include/HTTPBase.h index c2dc351..d4b7f9f 100644 --- a/media/libstagefright/include/HTTPBase.h +++ b/media/libstagefright/include/HTTPBase.h @@ -59,6 +59,9 @@ struct HTTPBase : public DataSource { static void RegisterSocketUserTag(int sockfd, uid_t uid, uint32_t kTag); static void UnRegisterSocketUserTag(int sockfd); + static void RegisterSocketUserMark(int sockfd, uid_t uid); + static void UnRegisterSocketUserMark(int sockfd); + protected: void addBandwidthMeasurement(size_t numBytes, int64_t delayUs); diff --git a/media/libstagefright/rtsp/ARTSPConnection.cpp b/media/libstagefright/rtsp/ARTSPConnection.cpp index 3068541..906aef3 100644 --- a/media/libstagefright/rtsp/ARTSPConnection.cpp +++ b/media/libstagefright/rtsp/ARTSPConnection.cpp @@ -60,6 +60,7 @@ ARTSPConnection::~ARTSPConnection() { ALOGE("Connection is still open, closing the socket."); if (mUIDValid) { HTTPBase::UnRegisterSocketUserTag(mSocket); + HTTPBase::UnRegisterSocketUserMark(mSocket); } close(mSocket); mSocket = -1; @@ -214,6 +215,7 @@ void ARTSPConnection::onConnect(const sp &msg) { if (mState != DISCONNECTED) { if (mUIDValid) { HTTPBase::UnRegisterSocketUserTag(mSocket); + HTTPBase::UnRegisterSocketUserMark(mSocket); } close(mSocket); mSocket = -1; @@ -266,6 +268,7 @@ void ARTSPConnection::onConnect(const sp &msg) { if (mUIDValid) { HTTPBase::RegisterSocketUserTag(mSocket, mUID, (uint32_t)*(uint32_t*) "RTSP"); + HTTPBase::RegisterSocketUserMark(mSocket, mUID); } MakeSocketBlocking(mSocket, false); @@ -295,6 +298,7 @@ void ARTSPConnection::onConnect(const sp &msg) { if (mUIDValid) { HTTPBase::UnRegisterSocketUserTag(mSocket); + HTTPBase::UnRegisterSocketUserMark(mSocket); } close(mSocket); mSocket = -1; @@ -312,6 +316,7 @@ void ARTSPConnection::onConnect(const sp &msg) { void ARTSPConnection::performDisconnect() { if (mUIDValid) { HTTPBase::UnRegisterSocketUserTag(mSocket); + HTTPBase::UnRegisterSocketUserMark(mSocket); } close(mSocket); mSocket = -1; @@ -385,6 +390,7 @@ void ARTSPConnection::onCompleteConnection(const sp &msg) { mState = DISCONNECTED; if (mUIDValid) { HTTPBase::UnRegisterSocketUserTag(mSocket); + HTTPBase::UnRegisterSocketUserMark(mSocket); } close(mSocket); mSocket = -1; diff --git a/media/libstagefright/rtsp/MyHandler.h b/media/libstagefright/rtsp/MyHandler.h index e51d9e3..5e9ace2 100644 --- a/media/libstagefright/rtsp/MyHandler.h +++ b/media/libstagefright/rtsp/MyHandler.h @@ -712,7 +712,9 @@ struct MyHandler : public AHandler { // Clear the tag if (mUIDValid) { HTTPBase::UnRegisterSocketUserTag(track->mRTPSocket); + HTTPBase::UnRegisterSocketUserMark(track->mRTPSocket); HTTPBase::UnRegisterSocketUserTag(track->mRTCPSocket); + HTTPBase::UnRegisterSocketUserMark(track->mRTCPSocket); } close(track->mRTPSocket); @@ -843,7 +845,9 @@ struct MyHandler : public AHandler { // Clear the tag if (mUIDValid) { HTTPBase::UnRegisterSocketUserTag(info->mRTPSocket); + HTTPBase::UnRegisterSocketUserMark(info->mRTPSocket); HTTPBase::UnRegisterSocketUserTag(info->mRTCPSocket); + HTTPBase::UnRegisterSocketUserMark(info->mRTPCSocket); } close(info->mRTPSocket); @@ -1599,6 +1603,8 @@ private: (uint32_t)*(uint32_t*) "RTP_"); HTTPBase::RegisterSocketUserTag(info->mRTCPSocket, mUID, (uint32_t)*(uint32_t*) "RTP_"); + HTTPBase::RegisterSocketUserMark(info->mRTPSocket, mUID); + HTTPBase::RegisterSocketUserMark(info->mRTCPSocket, mUID); } request.append("Transport: RTP/AVP/UDP;unicast;client_port="); -- cgit v1.1 From 47110057829ba7d5c7b41fd2aa381b5f5ed811ca Mon Sep 17 00:00:00 2001 From: Zhijun He Date: Mon, 22 Jul 2013 17:34:34 -0700 Subject: Camera: ProCamera connect need support HAL3 device Change-Id: Iae7e5f9e6387f61f1c4bbb91f75803fc1fdf2668 --- services/camera/libcameraservice/CameraService.cpp | 1 + 1 file changed, 1 insertion(+) diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp index c284a0d..0eb3e32 100644 --- a/services/camera/libcameraservice/CameraService.cpp +++ b/services/camera/libcameraservice/CameraService.cpp @@ -489,6 +489,7 @@ sp CameraService::connect( break; case CAMERA_DEVICE_API_VERSION_2_0: case CAMERA_DEVICE_API_VERSION_2_1: + case CAMERA_DEVICE_API_VERSION_3_0: client = new ProCamera2Client(this, cameraCb, String16(), cameraId, facing, callingPid, USE_CALLING_UID, getpid()); break; -- cgit v1.1 From 2ab500c632569e2f131a1a2288459933da70c4ee Mon Sep 17 00:00:00 2001 From: Zhijun He Date: Tue, 23 Jul 2013 08:02:53 -0700 Subject: camera2: Implement ICameraDeviceUser::waitUntilIdle Also fixed some logging typo Change-Id: Ib254bdb137dca10b12595c23aeb1c53097423425 --- camera/photography/ICameraDeviceUser.cpp | 16 +++++++++++++++ include/camera/photography/ICameraDeviceUser.h | 2 ++ .../camera3/Camera3OutputStream.cpp | 2 +- .../photography/CameraDeviceClient.cpp | 24 ++++++++++++++++++++++ .../photography/CameraDeviceClient.h | 2 ++ 5 files changed, 45 insertions(+), 1 deletion(-) diff --git a/camera/photography/ICameraDeviceUser.cpp b/camera/photography/ICameraDeviceUser.cpp index 325f94d..95609da 100644 --- a/camera/photography/ICameraDeviceUser.cpp +++ b/camera/photography/ICameraDeviceUser.cpp @@ -40,6 +40,7 @@ enum { CREATE_STREAM, CREATE_DEFAULT_REQUEST, GET_CAMERA_INFO, + WAIT_UNTIL_IDLE, }; class BpCameraDeviceUser : public BpInterface @@ -172,6 +173,15 @@ public: return result; } + virtual status_t waitUntilIdle() + { + ALOGV("waitUntilIdle"); + Parcel data, reply; + data.writeInterfaceToken(ICameraDeviceUser::getInterfaceDescriptor()); + remote()->transact(WAIT_UNTIL_IDLE, data, &reply); + reply.readExceptionCode(); + return reply.readInt32(); + } private: @@ -296,6 +306,12 @@ status_t BnCameraDeviceUser::onTransact( return NO_ERROR; } break; + case WAIT_UNTIL_IDLE: { + CHECK_INTERFACE(ICameraDeviceUser, data, reply); + reply->writeNoException(); + reply->writeInt32(waitUntilIdle()); + return NO_ERROR; + } break; default: return BBinder::onTransact(code, data, reply, flags); } diff --git a/include/camera/photography/ICameraDeviceUser.h b/include/camera/photography/ICameraDeviceUser.h index 3ea49f4..45988d0 100644 --- a/include/camera/photography/ICameraDeviceUser.h +++ b/include/camera/photography/ICameraDeviceUser.h @@ -61,6 +61,8 @@ public: virtual status_t getCameraInfo(/*out*/ CameraMetadata* info) = 0; + // Wait until all the submitted requests have finished processing + virtual status_t waitUntilIdle() = 0; }; // ---------------------------------------------------------------------------- diff --git a/services/camera/libcameraservice/camera3/Camera3OutputStream.cpp b/services/camera/libcameraservice/camera3/Camera3OutputStream.cpp index f085443..0ec2b05 100644 --- a/services/camera/libcameraservice/camera3/Camera3OutputStream.cpp +++ b/services/camera/libcameraservice/camera3/Camera3OutputStream.cpp @@ -304,7 +304,7 @@ status_t Camera3OutputStream::configureQueueLocked() { ALOGV("%s: Consumer wants %d buffers, HAL wants %d", __FUNCTION__, maxConsumerBuffers, camera3_stream::max_buffers); if (camera3_stream::max_buffers == 0) { - ALOGE("%s: Camera HAL requested no max_buffers, requires at least 1", + ALOGE("%s: Camera HAL requested max_buffer count: %d, requires at least 1", __FUNCTION__, camera3_stream::max_buffers); return INVALID_OPERATION; } diff --git a/services/camera/libcameraservice/photography/CameraDeviceClient.cpp b/services/camera/libcameraservice/photography/CameraDeviceClient.cpp index e1c7e79..dd845f6 100644 --- a/services/camera/libcameraservice/photography/CameraDeviceClient.cpp +++ b/services/camera/libcameraservice/photography/CameraDeviceClient.cpp @@ -412,6 +412,30 @@ status_t CameraDeviceClient::getCameraInfo(/*out*/CameraMetadata* info) return res; } +status_t CameraDeviceClient::waitUntilIdle() +{ + ATRACE_CALL(); + ALOGV("%s", __FUNCTION__); + + status_t res = OK; + if ( (res = checkPid(__FUNCTION__) ) != OK) return res; + + Mutex::Autolock icl(mBinderSerializationLock); + + if (!mDevice.get()) return DEAD_OBJECT; + + // FIXME: Also need check repeating burst. + if (!mStreamingRequestList.isEmpty()) { + ALOGE("%s: Camera %d: Try to waitUntilIdle when there are active streaming requests", + __FUNCTION__, mCameraId); + return INVALID_OPERATION; + } + res = mDevice->waitUntilDrained(); + ALOGV("%s Done", __FUNCTION__); + + return res; +} + status_t CameraDeviceClient::dump(int fd, const Vector& args) { String8 result; result.appendFormat("CameraDeviceClient[%d] (%p) PID: %d, dump:\n", diff --git a/services/camera/libcameraservice/photography/CameraDeviceClient.h b/services/camera/libcameraservice/photography/CameraDeviceClient.h index c6c241a..bb2949c 100644 --- a/services/camera/libcameraservice/photography/CameraDeviceClient.h +++ b/services/camera/libcameraservice/photography/CameraDeviceClient.h @@ -87,6 +87,8 @@ public: // -- Caller owns the newly allocated metadata virtual status_t getCameraInfo(/*out*/CameraMetadata* info); + // Wait until all the submitted requests have finished processing + virtual status_t waitUntilIdle(); /** * Interface used by CameraService */ -- cgit v1.1 From 59d3f809024ae5b5a7ea35dcfdd056f1c7ca42b2 Mon Sep 17 00:00:00 2001 From: Chad Brubaker Date: Tue, 23 Jul 2013 11:09:19 -0700 Subject: Fix typo in socket name Change-Id: I29171368f1b69333ef7eae53ada2fab94e3e28b9 --- media/libstagefright/rtsp/MyHandler.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/media/libstagefright/rtsp/MyHandler.h b/media/libstagefright/rtsp/MyHandler.h index 5e9ace2..946f602 100644 --- a/media/libstagefright/rtsp/MyHandler.h +++ b/media/libstagefright/rtsp/MyHandler.h @@ -847,7 +847,7 @@ struct MyHandler : public AHandler { HTTPBase::UnRegisterSocketUserTag(info->mRTPSocket); HTTPBase::UnRegisterSocketUserMark(info->mRTPSocket); HTTPBase::UnRegisterSocketUserTag(info->mRTCPSocket); - HTTPBase::UnRegisterSocketUserMark(info->mRTPCSocket); + HTTPBase::UnRegisterSocketUserMark(info->mRTCPSocket); } close(info->mRTPSocket); -- cgit v1.1 From f355f18aa2cc1706761e373fe19298a9ccc9c75a Mon Sep 17 00:00:00 2001 From: Jeff Tinker Date: Mon, 22 Jul 2013 15:54:42 -0700 Subject: Remove obsolete drm manager code bug: 9545965 Change-Id: Ia6e09efa826b1349d027045e782980daeb7d7596 --- drm/common/IDrmManagerService.cpp | 25 ---------------------- drm/drmserver/DrmManager.cpp | 15 ------------- drm/drmserver/DrmManagerService.cpp | 5 ----- drm/libdrmframework/DrmManagerClientImpl.cpp | 9 -------- drm/libdrmframework/include/DrmManager.h | 2 -- drm/libdrmframework/include/DrmManagerClientImpl.h | 11 ---------- drm/libdrmframework/include/DrmManagerService.h | 2 -- drm/libdrmframework/include/IDrmManagerService.h | 4 ---- 8 files changed, 73 deletions(-) diff --git a/drm/common/IDrmManagerService.cpp b/drm/common/IDrmManagerService.cpp index 91fd91e..db41e0b 100644 --- a/drm/common/IDrmManagerService.cpp +++ b/drm/common/IDrmManagerService.cpp @@ -153,18 +153,6 @@ status_t BpDrmManagerService::setDrmServiceListener( return reply.readInt32(); } -status_t BpDrmManagerService::installDrmEngine(int uniqueId, const String8& drmEngineFile) { - ALOGV("Install DRM Engine"); - Parcel data, reply; - - data.writeInterfaceToken(IDrmManagerService::getInterfaceDescriptor()); - data.writeInt32(uniqueId); - data.writeString8(drmEngineFile); - - remote()->transact(INSTALL_DRM_ENGINE, data, &reply); - return reply.readInt32(); -} - DrmConstraints* BpDrmManagerService::getConstraints( int uniqueId, const String8* path, const int action) { ALOGV("Get Constraints"); @@ -855,19 +843,6 @@ status_t BnDrmManagerService::onTransact( return DRM_NO_ERROR; } - case INSTALL_DRM_ENGINE: - { - ALOGV("BnDrmManagerService::onTransact :INSTALL_DRM_ENGINE"); - CHECK_INTERFACE(IDrmManagerService, data, reply); - - const int uniqueId = data.readInt32(); - const String8 engineFile = data.readString8(); - status_t status = installDrmEngine(uniqueId, engineFile); - - reply->writeInt32(status); - return DRM_NO_ERROR; - } - case GET_CONSTRAINTS_FROM_CONTENT: { ALOGV("BnDrmManagerService::onTransact :GET_CONSTRAINTS_FROM_CONTENT"); diff --git a/drm/drmserver/DrmManager.cpp b/drm/drmserver/DrmManager.cpp index bfaf4bc..dccd23d 100644 --- a/drm/drmserver/DrmManager.cpp +++ b/drm/drmserver/DrmManager.cpp @@ -175,21 +175,6 @@ DrmMetadata* DrmManager::getMetadata(int uniqueId, const String8* path) { return NULL; } -status_t DrmManager::installDrmEngine(int uniqueId, const String8& absolutePath) { - Mutex::Autolock _l(mLock); - mPlugInManager.loadPlugIn(absolutePath); - - IDrmEngine& rDrmEngine = mPlugInManager.getPlugIn(absolutePath); - rDrmEngine.initialize(uniqueId); - rDrmEngine.setOnInfoListener(uniqueId, this); - - DrmSupportInfo* info = rDrmEngine.getSupportInfo(0); - mSupportInfoToPlugInIdMap.add(*info, absolutePath); - delete info; - - return DRM_NO_ERROR; -} - bool DrmManager::canHandle(int uniqueId, const String8& path, const String8& mimeType) { Mutex::Autolock _l(mLock); const String8 plugInId = getSupportedPlugInId(mimeType); diff --git a/drm/drmserver/DrmManagerService.cpp b/drm/drmserver/DrmManagerService.cpp index bbd3b7f..2b71904 100644 --- a/drm/drmserver/DrmManagerService.cpp +++ b/drm/drmserver/DrmManagerService.cpp @@ -87,11 +87,6 @@ status_t DrmManagerService::setDrmServiceListener( return DRM_NO_ERROR; } -status_t DrmManagerService::installDrmEngine(int uniqueId, const String8& drmEngineFile) { - ALOGV("Entering installDrmEngine"); - return mDrmManager->installDrmEngine(uniqueId, drmEngineFile); -} - DrmConstraints* DrmManagerService::getConstraints( int uniqueId, const String8* path, const int action) { ALOGV("Entering getConstraints from content"); diff --git a/drm/libdrmframework/DrmManagerClientImpl.cpp b/drm/libdrmframework/DrmManagerClientImpl.cpp index a970035..ffefd74 100644 --- a/drm/libdrmframework/DrmManagerClientImpl.cpp +++ b/drm/libdrmframework/DrmManagerClientImpl.cpp @@ -86,15 +86,6 @@ status_t DrmManagerClientImpl::setOnInfoListener( (NULL != infoListener.get()) ? this : NULL); } -status_t DrmManagerClientImpl::installDrmEngine( - int uniqueId, const String8& drmEngineFile) { - status_t status = DRM_ERROR_UNKNOWN; - if (EMPTY_STRING != drmEngineFile) { - status = getDrmManagerService()->installDrmEngine(uniqueId, drmEngineFile); - } - return status; -} - DrmConstraints* DrmManagerClientImpl::getConstraints( int uniqueId, const String8* path, const int action) { DrmConstraints *drmConstraints = NULL; diff --git a/drm/libdrmframework/include/DrmManager.h b/drm/libdrmframework/include/DrmManager.h index 8ab693f..e7cdd36 100644 --- a/drm/libdrmframework/include/DrmManager.h +++ b/drm/libdrmframework/include/DrmManager.h @@ -70,8 +70,6 @@ public: status_t setDrmServiceListener( int uniqueId, const sp& drmServiceListener); - status_t installDrmEngine(int uniqueId, const String8& drmEngineFile); - DrmConstraints* getConstraints(int uniqueId, const String8* path, const int action); DrmMetadata* getMetadata(int uniqueId, const String8* path); diff --git a/drm/libdrmframework/include/DrmManagerClientImpl.h b/drm/libdrmframework/include/DrmManagerClientImpl.h index 9b4c9ae..3400cb1 100644 --- a/drm/libdrmframework/include/DrmManagerClientImpl.h +++ b/drm/libdrmframework/include/DrmManagerClientImpl.h @@ -410,17 +410,6 @@ public: status_t notify(const DrmInfoEvent& event); private: - /** - * Install new DRM Engine Plug-in at the runtime - * - * @param[in] uniqueId Unique identifier for a session - * @param[in] drmEngine Shared Object(so) File in which DRM Engine defined - * @return status_t - * Returns DRM_NO_ERROR for success, DRM_ERROR_UNKNOWN for failure - */ - status_t installDrmEngine(int uniqueId, const String8& drmEngineFile); - -private: Mutex mLock; sp mOnInfoListener; diff --git a/drm/libdrmframework/include/DrmManagerService.h b/drm/libdrmframework/include/DrmManagerService.h index 0dfdca6..8bc59b4 100644 --- a/drm/libdrmframework/include/DrmManagerService.h +++ b/drm/libdrmframework/include/DrmManagerService.h @@ -57,8 +57,6 @@ public: status_t setDrmServiceListener( int uniqueId, const sp& drmServiceListener); - status_t installDrmEngine(int uniqueId, const String8& drmEngineFile); - DrmConstraints* getConstraints(int uniqueId, const String8* path, const int action); DrmMetadata* getMetadata(int uniqueId, const String8* path); diff --git a/drm/libdrmframework/include/IDrmManagerService.h b/drm/libdrmframework/include/IDrmManagerService.h index 5a4d70a..fe55650 100644 --- a/drm/libdrmframework/include/IDrmManagerService.h +++ b/drm/libdrmframework/include/IDrmManagerService.h @@ -93,8 +93,6 @@ public: virtual status_t setDrmServiceListener( int uniqueId, const sp& infoListener) = 0; - virtual status_t installDrmEngine(int uniqueId, const String8& drmEngineFile) = 0; - virtual DrmConstraints* getConstraints( int uniqueId, const String8* path, const int action) = 0; @@ -185,8 +183,6 @@ public: virtual status_t setDrmServiceListener( int uniqueId, const sp& infoListener); - virtual status_t installDrmEngine(int uniqueId, const String8& drmEngineFile); - virtual DrmConstraints* getConstraints(int uniqueId, const String8* path, const int action); virtual DrmMetadata* getMetadata(int uniqueId, const String8* path); -- cgit v1.1 From ba812e3b3ca0a0c9459fe29bbc211c9a73313b8b Mon Sep 17 00:00:00 2001 From: Andy McFadden Date: Tue, 23 Jul 2013 13:05:29 -0700 Subject: Don't abort on unusual state transition The state transition check was too strict, and we were crashing mediaserver inappropriately. Bug 9819944 Change-Id: I1482ed1cfee37088d4893ee81cf1b2b950d2e930 --- media/libstagefright/omx/GraphicBufferSource.cpp | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/media/libstagefright/omx/GraphicBufferSource.cpp b/media/libstagefright/omx/GraphicBufferSource.cpp index bbd71be..d6fd95b 100644 --- a/media/libstagefright/omx/GraphicBufferSource.cpp +++ b/media/libstagefright/omx/GraphicBufferSource.cpp @@ -130,10 +130,12 @@ void GraphicBufferSource::omxExecuting() { void GraphicBufferSource::omxLoaded(){ Mutex::Autolock autoLock(mMutex); - ALOGV("--> loaded"); - CHECK(mExecuting); + if (!mExecuting) { + // This can happen if something failed very early. + ALOGW("Dropped back down to Loaded without Executing"); + } - ALOGV("Dropped down to loaded, avail=%d eos=%d eosSent=%d", + ALOGV("--> loaded; avail=%d eos=%d eosSent=%d", mNumFramesAvailable, mEndOfStream, mEndOfStreamSent); // Codec is no longer executing. Discard all codec-related state. -- cgit v1.1 From 1f5a90bc795475896044fcb1f74816c102851f06 Mon Sep 17 00:00:00 2001 From: Andy McFadden Date: Mon, 22 Jul 2013 12:23:07 -0700 Subject: Prototype screen recording command This records the screen contents as a movie. It works by feeding the output of a virtual display to the video/avc encoder. Recording continues until Ctrl-C is hit. Video only, no sound. Does not track screen rotations. Change-Id: I91d5c4e781792c740699b7a83590e846295b3617 --- cmds/screenrecord/Android.mk | 38 +++ cmds/screenrecord/screenrecord.cpp | 568 +++++++++++++++++++++++++++++++++++++ 2 files changed, 606 insertions(+) create mode 100644 cmds/screenrecord/Android.mk create mode 100644 cmds/screenrecord/screenrecord.cpp diff --git a/cmds/screenrecord/Android.mk b/cmds/screenrecord/Android.mk new file mode 100644 index 0000000..b4a5947 --- /dev/null +++ b/cmds/screenrecord/Android.mk @@ -0,0 +1,38 @@ +# Copyright 2013 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +LOCAL_PATH:= $(call my-dir) + +include $(CLEAR_VARS) + +LOCAL_SRC_FILES := \ + screenrecord.cpp \ + +LOCAL_SHARED_LIBRARIES := \ + libstagefright libmedia libutils libbinder libstagefright_foundation \ + libjpeg libgui libcutils liblog + +LOCAL_C_INCLUDES := \ + frameworks/av/media/libstagefright \ + frameworks/av/media/libstagefright/include \ + $(TOP)/frameworks/native/include/media/openmax \ + external/jpeg + +LOCAL_CFLAGS += -Wno-multichar + +LOCAL_MODULE_TAGS := optional + +LOCAL_MODULE:= screenrecord + +include $(BUILD_EXECUTABLE) diff --git a/cmds/screenrecord/screenrecord.cpp b/cmds/screenrecord/screenrecord.cpp new file mode 100644 index 0000000..3e79ee0 --- /dev/null +++ b/cmds/screenrecord/screenrecord.cpp @@ -0,0 +1,568 @@ +/* + * Copyright 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "ScreenRecord" +//#define LOG_NDEBUG 0 +#include + +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include + +using namespace android; + +// Command-line parameters. +static bool gVerbose = false; // chatty on stdout +static bool gRotate = false; // rotate 90 degrees +static uint32_t gVideoWidth = 1280; // 720p +static uint32_t gVideoHeight = 720; +static uint32_t gBitRate = 4000000; // 4Mbps + +// Set by signal handler to stop recording. +static bool gStopRequested; + +// Previous signal handler state, restored after first hit. +static struct sigaction gOrigSigactionINT; +static struct sigaction gOrigSigactionHUP; + +static const uint32_t kMinBitRate = 100000; // 0.1Mbps +static const uint32_t kMaxBitRate = 100 * 1000000; // 100Mbps + +/* + * Catch keyboard interrupt signals. On receipt, the "stop requested" + * flag is raised, and the original handler is restored (so that, if + * we get stuck finishing, a second Ctrl-C will kill the process). + */ +static void signalCatcher(int signum) +{ + gStopRequested = true; + switch (signum) { + case SIGINT: + sigaction(SIGINT, &gOrigSigactionINT, NULL); + break; + case SIGHUP: + sigaction(SIGHUP, &gOrigSigactionHUP, NULL); + break; + default: + abort(); + break; + } +} + +/* + * Configures signal handlers. The previous handlers are saved. + * + * If the command is run from an interactive adb shell, we get SIGINT + * when Ctrl-C is hit. If we're run from the host, the local adb process + * gets the signal, and we get a SIGHUP when the terminal disconnects. + */ +static status_t configureSignals() +{ + struct sigaction act; + memset(&act, 0, sizeof(act)); + act.sa_handler = signalCatcher; + if (sigaction(SIGINT, &act, &gOrigSigactionINT) != 0) { + status_t err = -errno; + fprintf(stderr, "Unable to configure SIGINT handler: %s\n", + strerror(errno)); + return err; + } + if (sigaction(SIGHUP, &act, &gOrigSigactionHUP) != 0) { + status_t err = -errno; + fprintf(stderr, "Unable to configure SIGHUP handler: %s\n", + strerror(errno)); + return err; + } + return NO_ERROR; +} + +/* + * Configures and starts the MediaCodec encoder. Obtains an input surface + * from the codec. + */ +static status_t prepareEncoder(float displayFps, sp* pCodec, + sp* pBufferProducer) { + status_t err; + + sp format = new AMessage; + format->setInt32("width", gVideoWidth); + format->setInt32("height", gVideoHeight); + format->setString("mime", "video/avc"); + format->setInt32("color-format", OMX_COLOR_FormatAndroidOpaque); + format->setInt32("bitrate", gBitRate); + format->setFloat("frame-rate", displayFps); + format->setInt32("i-frame-interval", 10); + + /// MediaCodec + sp looper = new ALooper; + looper->setName("screenrecord_looper"); + looper->start(); + ALOGV("Creating codec"); + sp codec = MediaCodec::CreateByType(looper, "video/avc", true); + err = codec->configure(format, NULL, NULL, + MediaCodec::CONFIGURE_FLAG_ENCODE); + if (err != NO_ERROR) { + fprintf(stderr, "ERROR: unable to configure codec (err=%d)\n", err); + return err; + } + + ALOGV("Creating buffer producer"); + sp bufferProducer; + err = codec->createInputSurface(&bufferProducer); + if (err != NO_ERROR) { + fprintf(stderr, + "ERROR: unable to create encoder input surface (err=%d)\n", err); + return err; + } + + ALOGV("Starting codec"); + err = codec->start(); + if (err != NO_ERROR) { + fprintf(stderr, "ERROR: unable to start codec (err=%d)\n", err); + return err; + } + + *pCodec = codec; + *pBufferProducer = bufferProducer; + return 0; +} + +/* + * Configures the virtual display. When this completes, virtual display + * frames will start being sent to the encoder's surface. + */ +static status_t prepareVirtualDisplay(const DisplayInfo& mainDpyInfo, + const sp& bufferProducer, + sp* pDisplayHandle) { + status_t err; + + // Set the region of the layer stack we're interested in, which in our + // case is "all of it". If the app is rotated (so that the width of the + // app is based on the height of the display), reverse width/height. + bool deviceRotated = mainDpyInfo.orientation != DISPLAY_ORIENTATION_0 && + mainDpyInfo.orientation != DISPLAY_ORIENTATION_180; + uint32_t sourceWidth, sourceHeight; + if (!deviceRotated) { + sourceWidth = mainDpyInfo.w; + sourceHeight = mainDpyInfo.h; + } else { + ALOGV("using rotated width/height"); + sourceHeight = mainDpyInfo.w; + sourceWidth = mainDpyInfo.h; + } + Rect layerStackRect(sourceWidth, sourceHeight); + + // We need to preserve the aspect ratio of the display. + float displayAspect = (float) sourceHeight / (float) sourceWidth; + + + // Set the way we map the output onto the display surface (which will + // be e.g. 1280x720 for a 720p video). The rect is interpreted + // post-rotation, so if the display is rotated 90 degrees we need to + // "pre-rotate" it by flipping width/height, so that the orientation + // adjustment changes it back. + // + // We might want to encode a portrait display as landscape to use more + // of the screen real estate. (If players respect a 90-degree rotation + // hint, we can essentially get a 720x1280 video instead of 1280x720.) + // In that case, we swap the configured video width/height and then + // supply a rotation value to the display projection. + uint32_t videoWidth, videoHeight; + uint32_t outWidth, outHeight; + if (!gRotate) { + videoWidth = gVideoWidth; + videoHeight = gVideoHeight; + } else { + videoWidth = gVideoHeight; + videoHeight = gVideoWidth; + } + if (videoHeight > (uint32_t)(videoWidth * displayAspect)) { + // limited by narrow width; reduce height + outWidth = videoWidth; + outHeight = (uint32_t)(videoWidth * displayAspect); + } else { + // limited by short height; restrict width + outHeight = videoHeight; + outWidth = (uint32_t)(videoHeight / displayAspect); + } + uint32_t offX, offY; + offX = (videoWidth - outWidth) / 2; + offY = (videoHeight - outHeight) / 2; + Rect displayRect(offX, offY, offX + outWidth, offY + outHeight); + + if (gVerbose) { + if (gRotate) { + printf("Rotated content area is %ux%u at offset x=%d y=%d\n", + outHeight, outWidth, offY, offX); + } else { + printf("Content area is %ux%u at offset x=%d y=%d\n", + outWidth, outHeight, offX, offY); + } + } + + + sp dpy = SurfaceComposerClient::createDisplay( + String8("ScreenRecorder"), false /* secure */); + + SurfaceComposerClient::openGlobalTransaction(); + SurfaceComposerClient::setDisplaySurface(dpy, bufferProducer); + SurfaceComposerClient::setDisplayProjection(dpy, + gRotate ? DISPLAY_ORIENTATION_90 : DISPLAY_ORIENTATION_0, + layerStackRect, displayRect); + SurfaceComposerClient::setDisplayLayerStack(dpy, 0); // default stack + SurfaceComposerClient::closeGlobalTransaction(); + + *pDisplayHandle = dpy; + + return NO_ERROR; +} + +/* + * Runs the MediaCodec encoder, sending the output to the MediaMuxer. The + * input frames are coming from the virtual display as fast as SurfaceFlinger + * wants to send them. + * + * The muxer must *not* have been started before calling. + */ +static status_t runEncoder(const sp& encoder, + const sp& muxer) { + static int kTimeout = 250000; // be responsive on signal + status_t err; + ssize_t trackIdx = -1; + uint32_t debugNumFrames = 0; + time_t debugStartWhen = time(NULL); + + Vector > buffers; + err = encoder->getOutputBuffers(&buffers); + if (err != NO_ERROR) { + fprintf(stderr, "Unable to get output buffers (err=%d)\n", err); + return err; + } + + // This is set by the signal handler. + gStopRequested = false; + + // Run until we're signaled. + while (!gStopRequested) { + size_t bufIndex, offset, size; + int64_t ptsUsec; + uint32_t flags; + ALOGV("Calling dequeueOutputBuffer"); + err = encoder->dequeueOutputBuffer(&bufIndex, &offset, &size, &ptsUsec, + &flags, kTimeout); + ALOGV("dequeueOutputBuffer returned %d", err); + switch (err) { + case NO_ERROR: + // got a buffer + if ((flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) != 0) { + // ignore this -- we passed the CSD into MediaMuxer when + // we got the format change notification + ALOGV("Got codec config buffer (%u bytes); ignoring", size); + size = 0; + } + if (size != 0) { + ALOGV("Got data in buffer %d, size=%d, pts=%lld", + bufIndex, size, ptsUsec); + CHECK(trackIdx != -1); + + // The MediaMuxer docs are unclear, but it appears that we + // need to pass either the full set of BufferInfo flags, or + // (flags & BUFFER_FLAG_SYNCFRAME). + err = muxer->writeSampleData(buffers[bufIndex], trackIdx, + ptsUsec, flags); + if (err != NO_ERROR) { + fprintf(stderr, "Failed writing data to muxer (err=%d)\n", + err); + return err; + } + debugNumFrames++; + } + err = encoder->releaseOutputBuffer(bufIndex); + if (err != NO_ERROR) { + fprintf(stderr, "Unable to release output buffer (err=%d)\n", + err); + return err; + } + if ((flags & MediaCodec::BUFFER_FLAG_EOS) != 0) { + // Not expecting EOS from SurfaceFlinger. Go with it. + ALOGD("Received end-of-stream"); + gStopRequested = false; + } + break; + case -EAGAIN: // INFO_TRY_AGAIN_LATER + // not expected with infinite timeout + ALOGV("Got -EAGAIN, looping"); + break; + case INFO_FORMAT_CHANGED: // INFO_OUTPUT_FORMAT_CHANGED + { + // format includes CSD, which we must provide to muxer + ALOGV("Encoder format changed"); + sp newFormat; + encoder->getOutputFormat(&newFormat); + trackIdx = muxer->addTrack(newFormat); + ALOGV("Starting muxer"); + err = muxer->start(); + if (err != NO_ERROR) { + fprintf(stderr, "Unable to start muxer (err=%d)\n", err); + return err; + } + } + break; + case INFO_OUTPUT_BUFFERS_CHANGED: // INFO_OUTPUT_BUFFERS_CHANGED + // not expected for an encoder; handle it anyway + ALOGV("Encoder buffers changed"); + err = encoder->getOutputBuffers(&buffers); + if (err != NO_ERROR) { + fprintf(stderr, + "Unable to get new output buffers (err=%d)\n", err); + } + break; + default: + ALOGW("Got weird result %d from dequeueOutputBuffer", err); + return err; + } + } + + ALOGV("Encoder stopping (req=%d)", gStopRequested); + if (gVerbose) { + printf("Encoder stopping; recorded %u frames in %ld seconds\n", + debugNumFrames, time(NULL) - debugStartWhen); + } + return NO_ERROR; +} + +/* + * Main "do work" method. + * + * Configures codec, muxer, and virtual display, then starts moving bits + * around. + */ +static status_t recordScreen(const char* fileName) { + status_t err; + + if (gVerbose) { + printf("Recording %dx%d video at %.2fMbps\n", + gVideoWidth, gVideoHeight, gBitRate / 1000000.0); + } + + // Configure signal handler. + err = configureSignals(); + if (err != NO_ERROR) return err; + + // Start Binder thread pool. MediaCodec needs to be able to receive + // messages from mediaserver. + sp self = ProcessState::self(); + self->startThreadPool(); + + // Get main display parameters. + sp mainDpy = SurfaceComposerClient::getBuiltInDisplay( + ISurfaceComposer::eDisplayIdMain); + DisplayInfo mainDpyInfo; + err = SurfaceComposerClient::getDisplayInfo(mainDpy, &mainDpyInfo); + if (err != NO_ERROR) { + fprintf(stderr, "ERROR: unable to get display characteristics\n"); + return err; + } + if (gVerbose) { + printf("Main display is %dx%d @%.2ffps (orientation=%u)\n", + mainDpyInfo.w, mainDpyInfo.h, mainDpyInfo.fps, + mainDpyInfo.orientation); + } + + // Configure and start the encoder. + sp encoder; + sp bufferProducer; + err = prepareEncoder(mainDpyInfo.fps, &encoder, &bufferProducer); + if (err != NO_ERROR) return err; + + // Configure virtual display. + sp dpy; + err = prepareVirtualDisplay(mainDpyInfo, bufferProducer, &dpy); + if (err != NO_ERROR) return err; + + // Configure, but do not start, muxer. + sp muxer = new MediaMuxer(fileName, + MediaMuxer::OUTPUT_FORMAT_MPEG_4); + if (gRotate) { + muxer->setOrientationHint(90); + } + + // Main encoder loop. + err = runEncoder(encoder, muxer); + if (err != NO_ERROR) return err; + + if (gVerbose) { + printf("Stopping encoder and muxer\n"); + } + + // Shut everything down. + // + // The virtual display will continue to produce frames until "dpy" + // goes out of scope (and something causes the Binder traffic to transmit; + // can be forced with IPCThreadState::self()->flushCommands()). This + // could cause SurfaceFlinger to get stuck trying to feed us, so we want + // to set a NULL Surface to make the virtual display "dormant". + bufferProducer = NULL; + SurfaceComposerClient::openGlobalTransaction(); + SurfaceComposerClient::setDisplaySurface(dpy, bufferProducer); + SurfaceComposerClient::closeGlobalTransaction(); + + encoder->stop(); + muxer->stop(); + encoder->release(); + + return 0; +} + +/* + * Parses a string of the form "1280x720". + * + * Returns true on success. + */ +static bool parseWidthHeight(const char* widthHeight, uint32_t* pWidth, + uint32_t* pHeight) { + long width, height; + char* end; + + // Must specify base 10, or "0x0" gets parsed differently. + width = strtol(widthHeight, &end, 10); + if (end == widthHeight || *end != 'x' || *(end+1) == '\0') { + // invalid chars in width, or missing 'x', or missing height + return false; + } + height = strtol(end + 1, &end, 10); + if (*end != '\0') { + // invalid chars in height + return false; + } + + *pWidth = width; + *pHeight = height; + return true; +} + +/* + * Dumps usage on stderr. + */ +static void usage() { + fprintf(stderr, + "Usage: screenrecord [options] \n" + "\n" + "Options:\n" + "--size WIDTHxHEIGHT\n" + " Set the video size, e.g. \"1280x720\". For best results, use\n" + " a size supported by the AVC encoder.\n" + "--bit-rate RATE\n" + " Set the video bit rate, in megabits per second. Default 4Mbps.\n" + "--rotate\n" + " Rotate the output 90 degrees. Useful for filling the frame\n" + " when in portrait mode.\n" + "--verbose\n" + " Display interesting information on stdout.\n" + "--help\n" + " Show this message.\n" + "\n" + "Recording continues until Ctrl-C is hit.\n" + "\n" + ); +} + +/* + * Parses args and kicks things off. + */ +int main(int argc, char* const argv[]) { + static const struct option longOptions[] = { + { "help", no_argument, NULL, 'h' }, + { "verbose", no_argument, NULL, 'v' }, + { "size", required_argument, NULL, 's' }, + { "bit-rate", required_argument, NULL, 'b' }, + { "rotate", no_argument, NULL, 'r' }, + { NULL, 0, NULL, 0 } + }; + + while (true) { + int optionIndex = 0; + int ic = getopt_long(argc, argv, "", longOptions, &optionIndex); + if (ic == -1) { + break; + } + + switch (ic) { + case 'h': + usage(); + return 0; + case 'v': + gVerbose = true; + break; + case 's': + if (!parseWidthHeight(optarg, &gVideoWidth, &gVideoHeight)) { + fprintf(stderr, "Invalid size '%s', must be width x height\n", + optarg); + return 2; + } + if (gVideoWidth == 0 || gVideoHeight == 0) { + fprintf(stderr, + "Invalid size %ux%u, width and height may not be zero\n", + gVideoWidth, gVideoHeight); + return 2; + } + break; + case 'b': + gBitRate = atoi(optarg); + if (gBitRate < kMinBitRate || gBitRate > kMaxBitRate) { + fprintf(stderr, + "Bit rate %dbps outside acceptable range [%d,%d]\n", + gBitRate, kMinBitRate, kMaxBitRate); + return 2; + } + break; + case 'r': + gRotate = true; + break; + default: + if (ic != '?') { + fprintf(stderr, "getopt_long returned unexpected value 0x%x\n", ic); + } + return 2; + } + } + + if (optind != argc - 1) { + fprintf(stderr, "Must specify output file (see --help).\n"); + return 2; + } + + status_t err = recordScreen(argv[optind]); + ALOGD(err == NO_ERROR ? "success" : "failed"); + return (int) err; +} -- cgit v1.1 From 291bb6d8947c5b0c062f0895d623c529259bfa39 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Tue, 16 Jul 2013 17:23:39 -0700 Subject: AudioRecord and HAL input stream must be 16-bit PCM only Currently there are 16-bit PCM assumptions in several places for capture: - resampler API - mRsmpInBuffer and mRsmpOutBuffer - RecordThread::threadLoop upmix, downmix, and resampling - possibly other places Until those assumptions are removed, this CL enforces 16-bit PCM in both client and server at all places where a format is checked. Change-Id: I08b0570bff626ad0d341804825a72c14e61b4233 --- media/libmedia/AudioRecord.cpp | 5 +++++ services/audioflinger/AudioFlinger.cpp | 6 ++++++ services/audioflinger/Threads.cpp | 18 +++++++++++------- 3 files changed, 22 insertions(+), 7 deletions(-) diff --git a/media/libmedia/AudioRecord.cpp b/media/libmedia/AudioRecord.cpp index 8ae0908..603c16e 100644 --- a/media/libmedia/AudioRecord.cpp +++ b/media/libmedia/AudioRecord.cpp @@ -190,6 +190,11 @@ status_t AudioRecord::set( ALOGE("Invalid format %d", format); return BAD_VALUE; } + // Temporary restriction: AudioFlinger currently supports 16-bit PCM only + if (format != AUDIO_FORMAT_PCM_16_BIT) { + ALOGE("Format %d is not supported", format); + return BAD_VALUE; + } mFormat = format; if (!audio_is_input_channel(channelMask)) { diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp index 99e077c..bfd03b2 100644 --- a/services/audioflinger/AudioFlinger.cpp +++ b/services/audioflinger/AudioFlinger.cpp @@ -1228,6 +1228,12 @@ sp AudioFlinger::openRecord( goto Exit; } + if (format != AUDIO_FORMAT_PCM_16_BIT) { + ALOGE("openRecord() invalid format %d", format); + lStatus = BAD_VALUE; + goto Exit; + } + // add client to list { // scope for mLock Mutex::Autolock _l(mLock); diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index f27d908..c3a922a 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -3737,8 +3737,7 @@ bool AudioFlinger::RecordThread::threadLoop() framesIn = framesOut; mRsmpInIndex += framesIn; framesOut -= framesIn; - if (mChannelCount == mReqChannelCount || - mFormat != AUDIO_FORMAT_PCM_16_BIT) { + if (mChannelCount == mReqChannelCount) { memcpy(dst, src, framesIn * mFrameSize); } else { if (mChannelCount == 1) { @@ -3752,9 +3751,7 @@ bool AudioFlinger::RecordThread::threadLoop() } if (framesOut && mFrameCount == mRsmpInIndex) { void *readInto; - if (framesOut == mFrameCount && - (mChannelCount == mReqChannelCount || - mFormat != AUDIO_FORMAT_PCM_16_BIT)) { + if (framesOut == mFrameCount && mChannelCount == mReqChannelCount) { readInto = buffer.raw; framesOut = 0; } else { @@ -4224,8 +4221,12 @@ bool AudioFlinger::RecordThread::checkForNewParameters_l() reconfig = true; } if (param.getInt(String8(AudioParameter::keyFormat), value) == NO_ERROR) { - reqFormat = (audio_format_t) value; - reconfig = true; + if ((audio_format_t) value != AUDIO_FORMAT_PCM_16_BIT) { + status = BAD_VALUE; + } else { + reqFormat = (audio_format_t) value; + reconfig = true; + } } if (param.getInt(String8(AudioParameter::keyChannels), value) == NO_ERROR) { reqChannelCount = popcount(value); @@ -4366,6 +4367,9 @@ void AudioFlinger::RecordThread::readInputParameters() mChannelMask = mInput->stream->common.get_channels(&mInput->stream->common); mChannelCount = (uint16_t)popcount(mChannelMask); mFormat = mInput->stream->common.get_format(&mInput->stream->common); + if (mFormat != AUDIO_FORMAT_PCM_16_BIT) { + ALOGE("HAL format %d not supported; must be AUDIO_FORMAT_PCM_16_BIT", mFormat); + } mFrameSize = audio_stream_frame_size(&mInput->stream->common); mInputBytes = mInput->stream->common.get_buffer_size(&mInput->stream->common); mFrameCount = mInputBytes / mFrameSize; -- cgit v1.1 From 92cb8f928dc9e237c356c942d10b5c0c1e04b2ae Mon Sep 17 00:00:00 2001 From: Andy McFadden Date: Wed, 24 Jul 2013 16:35:12 -0700 Subject: Update error message The color format used for surfaces has two different names. The one in the error message is the "native" name, which doesn't mean anything to external developers. Change-Id: Ic0561f4ad12970b0e0a60bd17b4e3997af1a9f0e --- media/libstagefright/omx/OMXNodeInstance.cpp | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/media/libstagefright/omx/OMXNodeInstance.cpp b/media/libstagefright/omx/OMXNodeInstance.cpp index 61a866f..525e18d 100644 --- a/media/libstagefright/omx/OMXNodeInstance.cpp +++ b/media/libstagefright/omx/OMXNodeInstance.cpp @@ -596,7 +596,8 @@ status_t OMXNodeInstance::createInputSurface( CHECK(oerr == OMX_ErrorNone); if (def.format.video.eColorFormat != OMX_COLOR_FormatAndroidOpaque) { - ALOGE("createInputSurface requires AndroidOpaque color format"); + ALOGE("createInputSurface requires COLOR_FormatSurface " + "(AndroidOpaque) color format"); return INVALID_OPERATION; } -- cgit v1.1 From fad226abd12435dbcd232f7de396f1a097b2bd5f Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Tue, 16 Jul 2013 17:19:58 -0700 Subject: Use standard name and type for channel mask Former name 'channels' was ambiguous with respect to channel count. Change-Id: I716f792d95a7e0c787d27514ad6e93dbcef8a415 --- include/media/AudioSystem.h | 4 ++-- media/libmedia/AudioSystem.cpp | 8 ++++---- media/libmedia/IAudioFlingerClient.cpp | 4 ++-- services/audioflinger/Threads.cpp | 6 +++--- 4 files changed, 11 insertions(+), 11 deletions(-) diff --git a/include/media/AudioSystem.h b/include/media/AudioSystem.h index e7b85c0..f9e625e 100644 --- a/include/media/AudioSystem.h +++ b/include/media/AudioSystem.h @@ -155,11 +155,11 @@ public: class OutputDescriptor { public: OutputDescriptor() - : samplingRate(0), format(AUDIO_FORMAT_DEFAULT), channels(0), frameCount(0), latency(0) {} + : samplingRate(0), format(AUDIO_FORMAT_DEFAULT), channelMask(0), frameCount(0), latency(0) {} uint32_t samplingRate; int32_t format; - int32_t channels; + audio_channel_mask_t channelMask; size_t frameCount; uint32_t latency; }; diff --git a/media/libmedia/AudioSystem.cpp b/media/libmedia/AudioSystem.cpp index 0d59af0..a571fe4 100644 --- a/media/libmedia/AudioSystem.cpp +++ b/media/libmedia/AudioSystem.cpp @@ -447,9 +447,9 @@ void AudioSystem::AudioFlingerClient::ioConfigChanged(int event, audio_io_handle OutputDescriptor *outputDesc = new OutputDescriptor(*desc); gOutputs.add(ioHandle, outputDesc); - ALOGV("ioConfigChanged() new output samplingRate %u, format %d channels %#x frameCount %u " + ALOGV("ioConfigChanged() new output samplingRate %u, format %d channel mask %#x frameCount %u " "latency %d", - outputDesc->samplingRate, outputDesc->format, outputDesc->channels, + outputDesc->samplingRate, outputDesc->format, outputDesc->channelMask, outputDesc->frameCount, outputDesc->latency); } break; case OUTPUT_CLOSED: { @@ -471,10 +471,10 @@ void AudioSystem::AudioFlingerClient::ioConfigChanged(int event, audio_io_handle if (param2 == NULL) break; desc = (const OutputDescriptor *)param2; - ALOGV("ioConfigChanged() new config for output %d samplingRate %u, format %d channels %#x " + ALOGV("ioConfigChanged() new config for output %d samplingRate %u, format %d channel mask %#x " "frameCount %d latency %d", ioHandle, desc->samplingRate, desc->format, - desc->channels, desc->frameCount, desc->latency); + desc->channelMask, desc->frameCount, desc->latency); OutputDescriptor *outputDesc = gOutputs.valueAt(index); delete outputDesc; outputDesc = new OutputDescriptor(*desc); diff --git a/media/libmedia/IAudioFlingerClient.cpp b/media/libmedia/IAudioFlingerClient.cpp index 2d1e0f8..84a589a 100644 --- a/media/libmedia/IAudioFlingerClient.cpp +++ b/media/libmedia/IAudioFlingerClient.cpp @@ -54,7 +54,7 @@ public: (const AudioSystem::OutputDescriptor *)param2; data.writeInt32(desc->samplingRate); data.writeInt32(desc->format); - data.writeInt32(desc->channels); + data.writeInt32(desc->channelMask); data.writeInt32(desc->frameCount); data.writeInt32(desc->latency); } @@ -84,7 +84,7 @@ status_t BnAudioFlingerClient::onTransact( } else if (event != AudioSystem::OUTPUT_CLOSED && event != AudioSystem::INPUT_CLOSED) { desc.samplingRate = data.readInt32(); desc.format = data.readInt32(); - desc.channels = data.readInt32(); + desc.channelMask = (audio_channel_mask_t) data.readInt32(); desc.frameCount = data.readInt32(); desc.latency = data.readInt32(); param2 = &desc; diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index f27d908..b6c8531 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -1410,7 +1410,7 @@ void AudioFlinger::PlaybackThread::audioConfigChanged_l(int event, int param) { switch (event) { case AudioSystem::OUTPUT_OPENED: case AudioSystem::OUTPUT_CONFIG_CHANGED: - desc.channels = mChannelMask; + desc.channelMask = mChannelMask; desc.samplingRate = mSampleRate; desc.format = mFormat; desc.frameCount = mNormalFrameCount; // FIXME see @@ -2974,7 +2974,7 @@ bool AudioFlinger::MixerThread::checkForNewParameters_l() } } if (param.getInt(String8(AudioParameter::keyChannels), value) == NO_ERROR) { - if (value != AUDIO_CHANNEL_OUT_STEREO) { + if ((audio_channel_mask_t) value != AUDIO_CHANNEL_OUT_STEREO) { status = BAD_VALUE; } else { reconfig = true; @@ -4338,7 +4338,7 @@ void AudioFlinger::RecordThread::audioConfigChanged_l(int event, int param) { switch (event) { case AudioSystem::INPUT_OPENED: case AudioSystem::INPUT_CONFIG_CHANGED: - desc.channels = mChannelMask; + desc.channelMask = mChannelMask; desc.samplingRate = mSampleRate; desc.format = mFormat; desc.frameCount = mFrameCount; -- cgit v1.1 From 34542acfa25c6413c87a94b6f7cc315a0c496277 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Wed, 26 Jun 2013 11:29:02 -0700 Subject: Move local variable declarations to point of first use Change-Id: Ideb83dea2c3002651c34fa646753cba598e29e93 --- services/audioflinger/AudioFlinger.cpp | 3 +-- services/audioflinger/Threads.cpp | 2 +- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp index 99e077c..0c11f3b 100644 --- a/services/audioflinger/AudioFlinger.cpp +++ b/services/audioflinger/AudioFlinger.cpp @@ -1410,7 +1410,6 @@ audio_io_handle_t AudioFlinger::openOutput(audio_module_handle_t module, audio_output_flags_t flags, const audio_offload_info_t *offloadInfo) { - status_t status; PlaybackThread *thread = NULL; struct audio_config config; config.sample_rate = (pSamplingRate != NULL) ? *pSamplingRate : 0; @@ -1446,7 +1445,7 @@ audio_io_handle_t AudioFlinger::openOutput(audio_module_handle_t module, mHardwareStatus = AUDIO_HW_OUTPUT_OPEN; - status = hwDevHal->open_output_stream(hwDevHal, + status_t status = hwDevHal->open_output_stream(hwDevHal, id, *pDevices, (audio_output_flags_t)flags, diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index f27d908..4bcc058 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -3294,11 +3294,11 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::DirectOutputThread::prep void AudioFlinger::DirectOutputThread::threadLoop_mix() { - AudioBufferProvider::Buffer buffer; size_t frameCount = mFrameCount; int8_t *curBuf = (int8_t *)mMixBuffer; // output audio to hardware while (frameCount) { + AudioBufferProvider::Buffer buffer; buffer.frameCount = frameCount; mActiveTrack->getNextBuffer(&buffer); if (CC_UNLIKELY(buffer.raw == NULL)) { -- cgit v1.1 From d8ea699dc8e7dac58bb32e9cdb31b0758da25817 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Tue, 16 Jul 2013 14:17:15 -0700 Subject: Simplify getParameters() Change-Id: Iedfeca3cd477d023c350d6d4e6eed874ee467a32 --- services/audioflinger/Threads.cpp | 18 ++++++------------ 1 file changed, 6 insertions(+), 12 deletions(-) diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index f27d908..941307b 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -1385,16 +1385,13 @@ void AudioFlinger::PlaybackThread::removeTrack_l(const sp& track) String8 AudioFlinger::PlaybackThread::getParameters(const String8& keys) { - String8 out_s8 = String8(""); - char *s; - Mutex::Autolock _l(mLock); if (initCheck() != NO_ERROR) { - return out_s8; + return String8(); } - s = mOutput->stream->common.get_parameters(&mOutput->stream->common, keys.string()); - out_s8 = String8(s); + char *s = mOutput->stream->common.get_parameters(&mOutput->stream->common, keys.string()); + const String8 out_s8(s); free(s); return out_s8; } @@ -4317,16 +4314,13 @@ bool AudioFlinger::RecordThread::checkForNewParameters_l() String8 AudioFlinger::RecordThread::getParameters(const String8& keys) { - char *s; - String8 out_s8 = String8(); - Mutex::Autolock _l(mLock); if (initCheck() != NO_ERROR) { - return out_s8; + return String8(); } - s = mInput->stream->common.get_parameters(&mInput->stream->common, keys.string()); - out_s8 = String8(s); + char *s = mInput->stream->common.get_parameters(&mInput->stream->common, keys.string()); + const String8 out_s8(s); free(s); return out_s8; } -- cgit v1.1 From 7fc97ba08e2850f3f16db704b78ce78e3dbe1ff0 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Tue, 16 Jul 2013 17:18:58 -0700 Subject: HAL stream format for mixer output threads must be stereo 16-bit PCM Direct and tunnel output threads can support various HAL stream formats, included encoded. But currently there are stereo 16-bit PCM assumptions in several places for mixer and duplicating output threads: - mMixBuffer and mixBuffer() - AudioMixer including resampler - FastMixer's mixBuffer - effects - NBAIO_Format - anywhere FCC_2 is used - possibly other places Until those assumptions are removed, this CL enforces stereo 16-bit PCM in mixer and duplicating threads at the place where the HAL format is read. It was already being checked in checkForNewParameters_l(), but not in readOutputParameters(). Change-Id: Ibe344cc922743da234299097aa1bb1f54795cc9b --- services/audioflinger/FastMixer.cpp | 8 +++++--- services/audioflinger/Threads.cpp | 15 +++++++++++++++ 2 files changed, 20 insertions(+), 3 deletions(-) diff --git a/services/audioflinger/FastMixer.cpp b/services/audioflinger/FastMixer.cpp index 5350e2c..ad9f4f2 100644 --- a/services/audioflinger/FastMixer.cpp +++ b/services/audioflinger/FastMixer.cpp @@ -45,6 +45,8 @@ #define MIN_WARMUP_CYCLES 2 // minimum number of loop cycles to wait for warmup #define MAX_WARMUP_CYCLES 10 // maximum number of loop cycles to wait for warmup +#define FCC_2 2 // fixed channel count assumption + namespace android { // Fast mixer thread @@ -225,7 +227,7 @@ bool FastMixer::threadLoop() } else { format = outputSink->format(); sampleRate = Format_sampleRate(format); - ALOG_ASSERT(Format_channelCount(format) == 2); + ALOG_ASSERT(Format_channelCount(format) == FCC_2); } dumpState->mSampleRate = sampleRate; } @@ -241,7 +243,7 @@ bool FastMixer::threadLoop() // implementation; it would be better to have normal mixer allocate for us // to avoid blocking here and to prevent possible priority inversion mixer = new AudioMixer(frameCount, sampleRate, FastMixerState::kMaxFastTracks); - mixBuffer = new short[frameCount * 2]; + mixBuffer = new short[frameCount * FCC_2]; periodNs = (frameCount * 1000000000LL) / sampleRate; // 1.00 underrunNs = (frameCount * 1750000000LL) / sampleRate; // 1.75 overrunNs = (frameCount * 500000000LL) / sampleRate; // 0.50 @@ -438,7 +440,7 @@ bool FastMixer::threadLoop() //bool didFullWrite = false; // dumpsys could display a count of partial writes if ((command & FastMixerState::WRITE) && (outputSink != NULL) && (mixBuffer != NULL)) { if (mixBufferState == UNDEFINED) { - memset(mixBuffer, 0, frameCount * 2 * sizeof(short)); + memset(mixBuffer, 0, frameCount * FCC_2 * sizeof(short)); mixBufferState = ZEROED; } if (teeSink != NULL) { diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index f27d908..0928923 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -1430,10 +1430,25 @@ void AudioFlinger::PlaybackThread::audioConfigChanged_l(int event, int param) { void AudioFlinger::PlaybackThread::readOutputParameters() { + // unfortunately we have no way of recovering from errors here, hence the LOG_FATAL mSampleRate = mOutput->stream->common.get_sample_rate(&mOutput->stream->common); mChannelMask = mOutput->stream->common.get_channels(&mOutput->stream->common); + if (!audio_is_output_channel(mChannelMask)) { + LOG_FATAL("HAL channel mask %#x not valid for output", mChannelMask); + } + if ((mType == MIXER || mType == DUPLICATING) && mChannelMask != AUDIO_CHANNEL_OUT_STEREO) { + LOG_FATAL("HAL channel mask %#x not supported for mixed output; " + "must be AUDIO_CHANNEL_OUT_STEREO", mChannelMask); + } mChannelCount = (uint16_t)popcount(mChannelMask); mFormat = mOutput->stream->common.get_format(&mOutput->stream->common); + if (!audio_is_valid_format(mFormat)) { + LOG_FATAL("HAL format %d not valid for output", mFormat); + } + if ((mType == MIXER || mType == DUPLICATING) && mFormat != AUDIO_FORMAT_PCM_16_BIT) { + LOG_FATAL("HAL format %d not supported for mixed output; must be AUDIO_FORMAT_PCM_16_BIT", + mFormat); + } mFrameSize = audio_stream_frame_size(&mOutput->stream->common); mFrameCount = mOutput->stream->common.get_buffer_size(&mOutput->stream->common) / mFrameSize; if (mFrameCount & 15) { -- cgit v1.1 From 9e8fcbcd8efa51d70d1207ff57bfbfe31324287a Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Thu, 25 Jul 2013 10:09:11 -0700 Subject: Move delete AudioMixer closer to point of re-allocation No need to delete AudioMixer before readOutputParameters Change-Id: Icafa785c9021bbe8d985e4f9527f8fc8c5e62622 --- services/audioflinger/Threads.cpp | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index f27d908..6451cce 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -3035,10 +3035,8 @@ bool AudioFlinger::MixerThread::checkForNewParameters_l() keyValuePair.string()); } if (status == NO_ERROR && reconfig) { - delete mAudioMixer; - // for safety in case readOutputParameters() accesses mAudioMixer (it doesn't) - mAudioMixer = NULL; readOutputParameters(); + delete mAudioMixer; mAudioMixer = new AudioMixer(mNormalFrameCount, mSampleRate); for (size_t i = 0; i < mTracks.size() ; i++) { int name = getTrackName_l(mTracks[i]->mChannelMask, mTracks[i]->mSessionId); -- cgit v1.1 From bfb1b832079bbb9426f72f3863199a54aefd02da Mon Sep 17 00:00:00 2001 From: Eric Laurent Date: Mon, 7 Jan 2013 09:53:42 -0800 Subject: AudioFlinger: offload playback, non-blocking write - Added specialized playback thread class for offload playback, derived from directoutput thread. This thread type handles specific state transitions for offloaded tracks and offloading commands (pause/resume/drain/flush..) to audio HAL. As opposed to other threads, does not go to standby if the track is paused. - Added support for asynchronous write and drain operations at audio HAL. Use a thread to handle async callback events from HAL: this avoids locking playback thread mutex when executing the callback and cause deadlocks when calling audio HAL functions with the playback thread mutex locked. - Better accouting for track activity: call start/stop and release Output methods in audio policy manager when tracks are actually added and removed from the active tracks list. Added a command thread in audio policy service to handle stop/release commands asynchronously and avoid deadlocks with playback thread. - Track terminated status is not a state anymore. This condition is othogonal to state to permitted state transitions while terminated. Change-Id: Id157f4b3277620568d8eace7535d9186602564de --- include/private/media/AudioTrackShared.h | 7 + media/libmedia/AudioTrackShared.cpp | 18 + services/audioflinger/AudioFlinger.cpp | 32 +- services/audioflinger/AudioFlinger.h | 8 +- services/audioflinger/AudioPolicyService.cpp | 110 +++- services/audioflinger/AudioPolicyService.h | 30 +- services/audioflinger/Effects.cpp | 45 +- services/audioflinger/Effects.h | 1 + services/audioflinger/PlaybackTracks.h | 4 + services/audioflinger/Threads.cpp | 859 ++++++++++++++++++++++----- services/audioflinger/Threads.h | 111 +++- services/audioflinger/TrackBase.h | 13 +- services/audioflinger/Tracks.cpp | 261 ++++---- 13 files changed, 1178 insertions(+), 321 deletions(-) diff --git a/include/private/media/AudioTrackShared.h b/include/private/media/AudioTrackShared.h index 0592683..6129c80 100644 --- a/include/private/media/AudioTrackShared.h +++ b/include/private/media/AudioTrackShared.h @@ -291,6 +291,11 @@ public: virtual uint32_t getUnderrunFrames() const { return mCblk->u.mStreaming.mUnderrunFrames; } + + bool clearStreamEndDone(); // and return previous value + + bool getStreamEndDone() const; + }; class StaticAudioTrackClientProxy : public AudioTrackClientProxy { @@ -405,6 +410,8 @@ public: // should avoid doing a state queue poll from within framesReady(). // FIXME Change AudioFlinger to not call framesReady() from normal mixer thread. virtual void framesReadyIsCalledByMultipleThreads() { } + + bool setStreamEndDone(); // and return previous value }; class StaticAudioTrackServerProxy : public AudioTrackServerProxy { diff --git a/media/libmedia/AudioTrackShared.cpp b/media/libmedia/AudioTrackShared.cpp index 55bf175..bd43ad2 100644 --- a/media/libmedia/AudioTrackShared.cpp +++ b/media/libmedia/AudioTrackShared.cpp @@ -322,6 +322,14 @@ void AudioTrackClientProxy::flush() mCblk->u.mStreaming.mFlush++; } +bool AudioTrackClientProxy::clearStreamEndDone() { + return android_atomic_and(~CBLK_STREAM_END_DONE, &mCblk->flags) & CBLK_STREAM_END_DONE; +} + +bool AudioTrackClientProxy::getStreamEndDone() const { + return (mCblk->flags & CBLK_STREAM_END_DONE) != 0; +} + // --------------------------------------------------------------------------- StaticAudioTrackClientProxy::StaticAudioTrackClientProxy(audio_track_cblk_t* cblk, void *buffers, @@ -524,6 +532,16 @@ size_t AudioTrackServerProxy::framesReady() return filled; } +bool AudioTrackServerProxy::setStreamEndDone() { + bool old = + (android_atomic_or(CBLK_STREAM_END_DONE, &mCblk->flags) & CBLK_STREAM_END_DONE) != 0; + if (!old) { + (void) __futex_syscall3(&mCblk->mFutex, mClientInServer ? FUTEX_WAKE_PRIVATE : FUTEX_WAKE, + 1); + } + return old; +} + // --------------------------------------------------------------------------- StaticAudioTrackServerProxy::StaticAudioTrackServerProxy(audio_track_cblk_t* cblk, void *buffers, diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp index 99e077c..712772b 100644 --- a/services/audioflinger/AudioFlinger.cpp +++ b/services/audioflinger/AudioFlinger.cpp @@ -1423,13 +1423,15 @@ audio_io_handle_t AudioFlinger::openOutput(audio_module_handle_t module, audio_stream_out_t *outStream = NULL; AudioHwDevice *outHwDev; - ALOGV("openOutput(), module %d Device %x, SamplingRate %d, Format %d, Channels %x, flags %x", + ALOGV("openOutput(), module %d Device %x, SamplingRate %d, Format %#08x, Channels %x, flags %x", module, (pDevices != NULL) ? *pDevices : 0, config.sample_rate, config.format, config.channel_mask, flags); + ALOGV("openOutput(), offloadInfo %p version 0x%04x", + offloadInfo, offloadInfo == NULL ? -1 : offloadInfo->version ); if (pDevices == NULL || *pDevices == 0) { return 0; @@ -1454,7 +1456,7 @@ audio_io_handle_t AudioFlinger::openOutput(audio_module_handle_t module, &outStream); mHardwareStatus = AUDIO_HW_IDLE; - ALOGV("openOutput() openOutputStream returned output %p, SamplingRate %d, Format %d, " + ALOGV("openOutput() openOutputStream returned output %p, SamplingRate %d, Format %#08x, " "Channels %x, status %d", outStream, config.sample_rate, @@ -1463,9 +1465,12 @@ audio_io_handle_t AudioFlinger::openOutput(audio_module_handle_t module, status); if (status == NO_ERROR && outStream != NULL) { - AudioStreamOut *output = new AudioStreamOut(outHwDev, outStream); + AudioStreamOut *output = new AudioStreamOut(outHwDev, outStream, flags); - if ((flags & AUDIO_OUTPUT_FLAG_DIRECT) || + if (flags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) { + thread = new OffloadThread(this, output, id, *pDevices); + ALOGV("openOutput() created offload output: ID %d thread %p", id, thread); + } else if ((flags & AUDIO_OUTPUT_FLAG_DIRECT) || (config.format != AUDIO_FORMAT_PCM_16_BIT) || (config.channel_mask != AUDIO_CHANNEL_OUT_STEREO)) { thread = new DirectOutputThread(this, output, id, *pDevices); @@ -1555,11 +1560,28 @@ status_t AudioFlinger::closeOutput_nonvirtual(audio_io_handle_t output) DuplicatingThread *dupThread = (DuplicatingThread *)mPlaybackThreads.valueAt(i).get(); dupThread->removeOutputTrack((MixerThread *)thread.get()); + } } } - audioConfigChanged_l(AudioSystem::OUTPUT_CLOSED, output, NULL); + + mPlaybackThreads.removeItem(output); + // save all effects to the default thread + if (mPlaybackThreads.size()) { + PlaybackThread *dstThread = checkPlaybackThread_l(mPlaybackThreads.keyAt(0)); + if (dstThread != NULL) { + // audioflinger lock is held here so the acquisition order of thread locks does not + // matter + Mutex::Autolock _dl(dstThread->mLock); + Mutex::Autolock _sl(thread->mLock); + Vector< sp > effectChains = thread->getEffectChains_l(); + for (size_t i = 0; i < effectChains.size(); i ++) { + moveEffectChain_l(effectChains[i]->sessionId(), thread.get(), dstThread, true); + } + } + } + audioConfigChanged_l(AudioSystem::OUTPUT_CLOSED, output, NULL); } thread->exit(); // The thread entity (active unit of execution) is no longer running here, diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h index f31619b..262d194 100644 --- a/services/audioflinger/AudioFlinger.h +++ b/services/audioflinger/AudioFlinger.h @@ -365,7 +365,9 @@ private: class PlaybackThread; class MixerThread; class DirectOutputThread; + class OffloadThread; class DuplicatingThread; + class AsyncCallbackThread; class Track; class RecordTrack; class EffectModule; @@ -432,6 +434,7 @@ private: void stop_nonvirtual(); }; + PlaybackThread *checkPlaybackThread_l(audio_io_handle_t output) const; MixerThread *checkMixerThread_l(audio_io_handle_t output) const; RecordThread *checkRecordThread_l(audio_io_handle_t input) const; @@ -498,11 +501,12 @@ private: struct AudioStreamOut { AudioHwDevice* const audioHwDev; audio_stream_out_t* const stream; + audio_output_flags_t flags; audio_hw_device_t* hwDev() const { return audioHwDev->hwDevice(); } - AudioStreamOut(AudioHwDevice *dev, audio_stream_out_t *out) : - audioHwDev(dev), stream(out) {} + AudioStreamOut(AudioHwDevice *dev, audio_stream_out_t *out, audio_output_flags_t flags) : + audioHwDev(dev), stream(out), flags(flags) {} }; struct AudioStreamIn { diff --git a/services/audioflinger/AudioPolicyService.cpp b/services/audioflinger/AudioPolicyService.cpp index fa1e405..900b411 100644 --- a/services/audioflinger/AudioPolicyService.cpp +++ b/services/audioflinger/AudioPolicyService.cpp @@ -70,10 +70,11 @@ AudioPolicyService::AudioPolicyService() Mutex::Autolock _l(mLock); // start tone playback thread - mTonePlaybackThread = new AudioCommandThread(String8("")); + mTonePlaybackThread = new AudioCommandThread(String8("ApmTone"), this); // start audio commands thread - mAudioCommandThread = new AudioCommandThread(String8("ApmCommand")); - + mAudioCommandThread = new AudioCommandThread(String8("ApmAudio"), this); + // start output activity command thread + mOutputCommandThread = new AudioCommandThread(String8("ApmOutput"), this); /* instantiate the audio policy manager */ rc = hw_get_module(AUDIO_POLICY_HARDWARE_MODULE_ID, &module); if (rc) @@ -256,6 +257,15 @@ status_t AudioPolicyService::stopOutput(audio_io_handle_t output, return NO_INIT; } ALOGV("stopOutput()"); + mOutputCommandThread->stopOutputCommand(output, stream, session); + return NO_ERROR; +} + +status_t AudioPolicyService::doStopOutput(audio_io_handle_t output, + audio_stream_type_t stream, + int session) +{ + ALOGV("doStopOutput from tid %d", gettid()); Mutex::Autolock _l(mLock); return mpAudioPolicy->stop_output(mpAudioPolicy, output, stream, session); } @@ -266,6 +276,12 @@ void AudioPolicyService::releaseOutput(audio_io_handle_t output) return; } ALOGV("releaseOutput()"); + mOutputCommandThread->releaseOutputCommand(output); +} + +void AudioPolicyService::doReleaseOutput(audio_io_handle_t output) +{ + ALOGV("doReleaseOutput from tid %d", gettid()); Mutex::Autolock _l(mLock); mpAudioPolicy->release_output(mpAudioPolicy, output); } @@ -641,8 +657,9 @@ status_t AudioPolicyService::onTransact( // ----------- AudioPolicyService::AudioCommandThread implementation ---------- -AudioPolicyService::AudioCommandThread::AudioCommandThread(String8 name) - : Thread(false), mName(name) +AudioPolicyService::AudioCommandThread::AudioCommandThread(String8 name, + const wp& service) + : Thread(false), mName(name), mService(service) { mpToneGenerator = NULL; } @@ -650,7 +667,7 @@ AudioPolicyService::AudioCommandThread::AudioCommandThread(String8 name) AudioPolicyService::AudioCommandThread::~AudioCommandThread() { - if (mName != "" && !mAudioCommands.isEmpty()) { + if (!mAudioCommands.isEmpty()) { release_wake_lock(mName.string()); } mAudioCommands.clear(); @@ -659,11 +676,7 @@ AudioPolicyService::AudioCommandThread::~AudioCommandThread() void AudioPolicyService::AudioCommandThread::onFirstRef() { - if (mName != "") { - run(mName.string(), ANDROID_PRIORITY_AUDIO); - } else { - run("AudioCommand", ANDROID_PRIORITY_AUDIO); - } + run(mName.string(), ANDROID_PRIORITY_AUDIO); } bool AudioPolicyService::AudioCommandThread::threadLoop() @@ -738,6 +751,32 @@ bool AudioPolicyService::AudioCommandThread::threadLoop() } delete data; }break; + case STOP_OUTPUT: { + StopOutputData *data = (StopOutputData *)command->mParam; + ALOGV("AudioCommandThread() processing stop output %d", + data->mIO); + sp svc = mService.promote(); + if (svc == 0) { + break; + } + mLock.unlock(); + svc->doStopOutput(data->mIO, data->mStream, data->mSession); + mLock.lock(); + delete data; + }break; + case RELEASE_OUTPUT: { + ReleaseOutputData *data = (ReleaseOutputData *)command->mParam; + ALOGV("AudioCommandThread() processing release output %d", + data->mIO); + sp svc = mService.promote(); + if (svc == 0) { + break; + } + mLock.unlock(); + svc->doReleaseOutput(data->mIO); + mLock.lock(); + delete data; + }break; default: ALOGW("AudioCommandThread() unknown command %d", command->mCommand); } @@ -749,7 +788,7 @@ bool AudioPolicyService::AudioCommandThread::threadLoop() } } // release delayed commands wake lock - if (mName != "" && mAudioCommands.isEmpty()) { + if (mAudioCommands.isEmpty()) { release_wake_lock(mName.string()); } ALOGV("AudioCommandThread() going to sleep"); @@ -893,17 +932,46 @@ status_t AudioPolicyService::AudioCommandThread::voiceVolumeCommand(float volume return status; } +void AudioPolicyService::AudioCommandThread::stopOutputCommand(audio_io_handle_t output, + audio_stream_type_t stream, + int session) +{ + AudioCommand *command = new AudioCommand(); + command->mCommand = STOP_OUTPUT; + StopOutputData *data = new StopOutputData(); + data->mIO = output; + data->mStream = stream; + data->mSession = session; + command->mParam = (void *)data; + Mutex::Autolock _l(mLock); + insertCommand_l(command); + ALOGV("AudioCommandThread() adding stop output %d", output); + mWaitWorkCV.signal(); +} + +void AudioPolicyService::AudioCommandThread::releaseOutputCommand(audio_io_handle_t output) +{ + AudioCommand *command = new AudioCommand(); + command->mCommand = RELEASE_OUTPUT; + ReleaseOutputData *data = new ReleaseOutputData(); + data->mIO = output; + command->mParam = (void *)data; + Mutex::Autolock _l(mLock); + insertCommand_l(command); + ALOGV("AudioCommandThread() adding release output %d", output); + mWaitWorkCV.signal(); +} + // insertCommand_l() must be called with mLock held void AudioPolicyService::AudioCommandThread::insertCommand_l(AudioCommand *command, int delayMs) { ssize_t i; // not size_t because i will count down to -1 Vector removedCommands; - nsecs_t time = 0; command->mTime = systemTime() + milliseconds(delayMs); // acquire wake lock to make sure delayed commands are processed - if (mName != "" && mAudioCommands.isEmpty()) { + if (mAudioCommands.isEmpty()) { acquire_wake_lock(PARTIAL_WAKE_LOCK, mName.string()); } @@ -1060,7 +1128,17 @@ int AudioPolicyService::setVoiceVolume(float volume, int delayMs) bool AudioPolicyService::isOffloadSupported(const audio_offload_info_t& info) { - return false; // stub function + if (mpAudioPolicy == NULL) { + ALOGV("mpAudioPolicy == NULL"); + return false; + } + + if (mpAudioPolicy->is_offload_supported == NULL) { + ALOGV("HAL does not implement is_offload_supported"); + return false; + } + + return mpAudioPolicy->is_offload_supported(mpAudioPolicy, &info); } // ---------------------------------------------------------------------------- @@ -1404,7 +1482,7 @@ static audio_io_handle_t aps_open_output_on_module(void *service, return 0; } return af->openOutput(module, pDevices, pSamplingRate, pFormat, pChannelMask, - pLatencyMs, flags); + pLatencyMs, flags, offloadInfo); } static audio_io_handle_t aps_open_dup_output(void *service, diff --git a/services/audioflinger/AudioPolicyService.h b/services/audioflinger/AudioPolicyService.h index e723c47..ae053a9 100644 --- a/services/audioflinger/AudioPolicyService.h +++ b/services/audioflinger/AudioPolicyService.h @@ -139,6 +139,11 @@ public: virtual status_t setVoiceVolume(float volume, int delayMs = 0); virtual bool isOffloadSupported(const audio_offload_info_t &config); + status_t doStopOutput(audio_io_handle_t output, + audio_stream_type_t stream, + int session = 0); + void doReleaseOutput(audio_io_handle_t output); + private: AudioPolicyService() ANDROID_API; virtual ~AudioPolicyService(); @@ -161,10 +166,12 @@ private: STOP_TONE, SET_VOLUME, SET_PARAMETERS, - SET_VOICE_VOLUME + SET_VOICE_VOLUME, + STOP_OUTPUT, + RELEASE_OUTPUT }; - AudioCommandThread (String8 name); + AudioCommandThread (String8 name, const wp& service); virtual ~AudioCommandThread(); status_t dump(int fd); @@ -182,6 +189,11 @@ private: status_t parametersCommand(audio_io_handle_t ioHandle, const char *keyValuePairs, int delayMs = 0); status_t voiceVolumeCommand(float volume, int delayMs = 0); + void stopOutputCommand(audio_io_handle_t output, + audio_stream_type_t stream, + int session); + void releaseOutputCommand(audio_io_handle_t output); + void insertCommand_l(AudioCommand *command, int delayMs = 0); private: @@ -226,12 +238,25 @@ private: float mVolume; }; + class StopOutputData { + public: + audio_io_handle_t mIO; + audio_stream_type_t mStream; + int mSession; + }; + + class ReleaseOutputData { + public: + audio_io_handle_t mIO; + }; + Mutex mLock; Condition mWaitWorkCV; Vector mAudioCommands; // list of pending commands ToneGenerator *mpToneGenerator; // the tone generator AudioCommand mLastCommand; // last processed command (used by dump) String8 mName; // string used by wake lock fo delayed commands + wp mService; }; class EffectDesc { @@ -316,6 +341,7 @@ private: // device connection state or routing sp mAudioCommandThread; // audio commands thread sp mTonePlaybackThread; // tone playback thread + sp mOutputCommandThread; // process stop and release output struct audio_policy_device *mpAudioPolicyDev; struct audio_policy *mpAudioPolicy; KeyedVector< audio_source_t, InputSourceDesc* > mInputSources; diff --git a/services/audioflinger/Effects.cpp b/services/audioflinger/Effects.cpp index 1c7a64b..d5a21a7 100644 --- a/services/audioflinger/Effects.cpp +++ b/services/audioflinger/Effects.cpp @@ -95,16 +95,7 @@ AudioFlinger::EffectModule::~EffectModule() { ALOGV("Destructor %p", this); if (mEffectInterface != NULL) { - if ((mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_PRE_PROC || - (mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_POST_PROC) { - sp thread = mThread.promote(); - if (thread != 0) { - audio_stream_t *stream = thread->stream(); - if (stream != NULL) { - stream->remove_audio_effect(stream, mEffectInterface); - } - } - } + remove_effect_from_hal_l(); // release effect engine EffectRelease(mEffectInterface); } @@ -488,7 +479,7 @@ status_t AudioFlinger::EffectModule::stop_l() if (mStatus != NO_ERROR) { return mStatus; } - status_t cmdStatus; + status_t cmdStatus = NO_ERROR; uint32_t size = sizeof(status_t); status_t status = (*mEffectInterface)->command(mEffectInterface, EFFECT_CMD_DISABLE, @@ -496,12 +487,19 @@ status_t AudioFlinger::EffectModule::stop_l() NULL, &size, &cmdStatus); - if (status == 0) { + if (status == NO_ERROR) { status = cmdStatus; } - if (status == 0 && - ((mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_PRE_PROC || - (mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_POST_PROC)) { + if (status == NO_ERROR) { + status = remove_effect_from_hal_l(); + } + return status; +} + +status_t AudioFlinger::EffectModule::remove_effect_from_hal_l() +{ + if ((mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_PRE_PROC || + (mDescriptor.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_POST_PROC) { sp thread = mThread.promote(); if (thread != 0) { audio_stream_t *stream = thread->stream(); @@ -510,7 +508,7 @@ status_t AudioFlinger::EffectModule::stop_l() } } } - return status; + return NO_ERROR; } status_t AudioFlinger::EffectModule::command(uint32_t cmdCode, @@ -595,6 +593,17 @@ status_t AudioFlinger::EffectModule::setEnabled_l(bool enabled) h->setEnabled(enabled); } } +//EL_FIXME not sure why this is needed? +// sp thread = mThread.promote(); +// if (thread == 0) { +// return NO_ERROR; +// } +// +// if ((thread->type() == ThreadBase::OFFLOAD) && (enabled)) { +// PlaybackThread *p = (PlaybackThread *)thread.get(); +// ALOGV("setEnabled: Offload, invalidate tracks"); +// p->invalidateTracks(AUDIO_STREAM_MUSIC); +// } } return NO_ERROR; } @@ -1218,9 +1227,7 @@ void AudioFlinger::EffectChain::clearInputBuffer() // Must be called with EffectChain::mLock locked void AudioFlinger::EffectChain::clearInputBuffer_l(sp thread) { - size_t numSamples = thread->frameCount() * thread->channelCount(); - memset(mInBuffer, 0, numSamples * sizeof(int16_t)); - + memset(mInBuffer, 0, thread->frameCount() * thread->frameSize()); } // Must be called with EffectChain::mLock locked diff --git a/services/audioflinger/Effects.h b/services/audioflinger/Effects.h index 91303ee..0b7fb83 100644 --- a/services/audioflinger/Effects.h +++ b/services/audioflinger/Effects.h @@ -126,6 +126,7 @@ protected: status_t start_l(); status_t stop_l(); + status_t remove_effect_from_hal_l(); mutable Mutex mLock; // mutex for process, commands and handles list protection wp mThread; // parent thread diff --git a/services/audioflinger/PlaybackTracks.h b/services/audioflinger/PlaybackTracks.h index b1286d3..8b7433c 100644 --- a/services/audioflinger/PlaybackTracks.h +++ b/services/audioflinger/PlaybackTracks.h @@ -51,6 +51,8 @@ public: audio_stream_type_t streamType() const { return mStreamType; } + bool isOffloaded() const { return (mFlags & IAudioFlinger::TRACK_OFFLOAD) != 0; } + status_t setParameters(const String8& keyValuePairs); status_t attachAuxEffect(int EffectId); void setAuxBuffer(int EffectId, int32_t *buffer); int32_t *auxBuffer() const { return mAuxBuffer; } @@ -68,6 +70,7 @@ protected: friend class PlaybackThread; friend class MixerThread; friend class DirectOutputThread; + friend class OffloadThread; Track(const Track&); Track& operator = (const Track&); @@ -142,6 +145,7 @@ private: // barrier, but is read/written atomically bool mIsInvalid; // non-resettable latch, set by invalidate() AudioTrackServerProxy* mAudioTrackServerProxy; + bool mResumeToStopping; // track was paused in stopping state. }; // end of Track class TimedTrack : public Track { diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index f27d908..296a485 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -932,13 +932,18 @@ AudioFlinger::PlaybackThread::PlaybackThread(const sp& audioFlinge audio_devices_t device, type_t type) : ThreadBase(audioFlinger, id, device, AUDIO_DEVICE_NONE, type), - mMixBuffer(NULL), mSuspended(0), mBytesWritten(0), + mAllocMixBuffer(NULL), mSuspended(0), mBytesWritten(0), // mStreamTypes[] initialized in constructor body mOutput(output), mLastWriteTime(0), mNumWrites(0), mNumDelayedWrites(0), mInWrite(false), mMixerStatus(MIXER_IDLE), mMixerStatusIgnoringFastTracks(MIXER_IDLE), standbyDelay(AudioFlinger::mStandbyTimeInNsecs), + mBytesRemaining(0), + mCurrentWriteLength(0), + mUseAsyncWrite(false), + mWriteBlocked(false), + mDraining(false), mScreenState(AudioFlinger::mScreenState), // index 0 is reserved for normal mixer's submix mFastTrackAvailMask(((1 << FastMixerState::kMaxFastTracks) - 1) & ~1) @@ -981,7 +986,7 @@ AudioFlinger::PlaybackThread::PlaybackThread(const sp& audioFlinge AudioFlinger::PlaybackThread::~PlaybackThread() { mAudioFlinger->unregisterWriter(mNBLogWriter); - delete [] mMixBuffer; + delete [] mAllocMixBuffer; } void AudioFlinger::PlaybackThread::dump(int fd, const Vector& args) @@ -1187,7 +1192,22 @@ sp AudioFlinger::PlaybackThread::createTrac goto Exit; } } + } else if (mType == OFFLOAD) { + if (sampleRate != mSampleRate || format != mFormat || channelMask != mChannelMask) { + ALOGE("createTrack_l() Bad parameter: sampleRate %d format %d, channelMask 0x%08x \"" + "for output %p with format %d", + sampleRate, format, channelMask, mOutput, mFormat); + lStatus = BAD_VALUE; + goto Exit; + } } else { + if ((format & AUDIO_FORMAT_MAIN_MASK) != AUDIO_FORMAT_PCM) { + ALOGE("createTrack_l() Bad parameter: format %d \"" + "for output %p with format %d", + format, mOutput, mFormat); + lStatus = BAD_VALUE; + goto Exit; + } // Resampler implementation limits input sampling rate to 2 x output sampling rate. if (sampleRate > mSampleRate*2) { ALOGE("Sample rate out of range: %u mSampleRate %u", sampleRate, mSampleRate); @@ -1233,6 +1253,7 @@ sp AudioFlinger::PlaybackThread::createTrac lStatus = NO_MEMORY; goto Exit; } + mTracks.add(track); sp chain = getEffectChain_l(sessionId); @@ -1307,12 +1328,14 @@ void AudioFlinger::PlaybackThread::setStreamVolume(audio_stream_type_t stream, f { Mutex::Autolock _l(mLock); mStreamTypes[stream].volume = value; + signal_l(); } void AudioFlinger::PlaybackThread::setStreamMute(audio_stream_type_t stream, bool muted) { Mutex::Autolock _l(mLock); mStreamTypes[stream].mute = muted; + signal_l(); } float AudioFlinger::PlaybackThread::streamVolume(audio_stream_type_t stream) const @@ -1332,6 +1355,30 @@ status_t AudioFlinger::PlaybackThread::addTrack_l(const sp& track) // the track is newly added, make sure it fills up all its // buffers before playing. This is to ensure the client will // effectively get the latency it requested. + if (!track->isOutputTrack()) { + TrackBase::track_state state = track->mState; + mLock.unlock(); + status = AudioSystem::startOutput(mId, track->streamType(), track->sessionId()); + mLock.lock(); + // abort track was stopped/paused while we released the lock + if (state != track->mState) { + if (status == NO_ERROR) { + mLock.unlock(); + AudioSystem::stopOutput(mId, track->streamType(), track->sessionId()); + mLock.lock(); + } + return INVALID_OPERATION; + } + // abort if start is rejected by audio policy manager + if (status != NO_ERROR) { + return PERMISSION_DENIED; + } +#ifdef ADD_BATTERY_DATA + // to track the speaker usage + addBatteryData(IMediaPlayerService::kBatteryDataAudioFlingerStart); +#endif + } + track->mFillingUpStatus = track->sharedBuffer() != 0 ? Track::FS_FILLED : Track::FS_FILLING; track->mResetDone = false; track->mPresentationCompleteFrames = 0; @@ -1352,14 +1399,19 @@ status_t AudioFlinger::PlaybackThread::addTrack_l(const sp& track) return status; } -// destroyTrack_l() must be called with ThreadBase::mLock held -void AudioFlinger::PlaybackThread::destroyTrack_l(const sp& track) +bool AudioFlinger::PlaybackThread::destroyTrack_l(const sp& track) { - track->mState = TrackBase::TERMINATED; + track->terminate(); // active tracks are removed by threadLoop() - if (mActiveTracks.indexOf(track) < 0) { + bool trackActive = (mActiveTracks.indexOf(track) >= 0); + track->mState = TrackBase::STOPPED; + if (!trackActive) { removeTrack_l(track); + } else if (track->isFastTrack() || track->isOffloaded()) { + track->mState = TrackBase::STOPPING_1; } + + return trackActive; } void AudioFlinger::PlaybackThread::removeTrack_l(const sp& track) @@ -1383,6 +1435,16 @@ void AudioFlinger::PlaybackThread::removeTrack_l(const sp& track) } } +void AudioFlinger::PlaybackThread::signal_l() +{ + // Thread could be blocked waiting for async + // so signal it to handle state changes immediately + // If threadLoop is currently unlocked a signal of mWaitWorkCV will + // be lost so we also flag to prevent it blocking on mWaitWorkCV + mSignalPending = true; + mWaitWorkCV.signal(); +} + String8 AudioFlinger::PlaybackThread::getParameters(const String8& keys) { String8 out_s8 = String8(""); @@ -1428,6 +1490,57 @@ void AudioFlinger::PlaybackThread::audioConfigChanged_l(int event, int param) { mAudioFlinger->audioConfigChanged_l(event, mId, param2); } +void AudioFlinger::PlaybackThread::writeCallback() +{ + ALOG_ASSERT(mCallbackThread != 0); + mCallbackThread->setWriteBlocked(false); +} + +void AudioFlinger::PlaybackThread::drainCallback() +{ + ALOG_ASSERT(mCallbackThread != 0); + mCallbackThread->setDraining(false); +} + +void AudioFlinger::PlaybackThread::setWriteBlocked(bool value) +{ + Mutex::Autolock _l(mLock); + mWriteBlocked = value; + if (!value) { + mWaitWorkCV.signal(); + } +} + +void AudioFlinger::PlaybackThread::setDraining(bool value) +{ + Mutex::Autolock _l(mLock); + mDraining = value; + if (!value) { + mWaitWorkCV.signal(); + } +} + +// static +int AudioFlinger::PlaybackThread::asyncCallback(stream_callback_event_t event, + void *param, + void *cookie) +{ + AudioFlinger::PlaybackThread *me = (AudioFlinger::PlaybackThread *)cookie; + ALOGV("asyncCallback() event %d", event); + switch (event) { + case STREAM_CBK_EVENT_WRITE_READY: + me->writeCallback(); + break; + case STREAM_CBK_EVENT_DRAIN_READY: + me->drainCallback(); + break; + default: + ALOGW("asyncCallback() unknown event %d", event); + break; + } + return 0; +} + void AudioFlinger::PlaybackThread::readOutputParameters() { mSampleRate = mOutput->stream->common.get_sample_rate(&mOutput->stream->common); @@ -1441,6 +1554,14 @@ void AudioFlinger::PlaybackThread::readOutputParameters() mFrameCount); } + if ((mOutput->flags & AUDIO_OUTPUT_FLAG_NON_BLOCKING) && + (mOutput->stream->set_callback != NULL)) { + if (mOutput->stream->set_callback(mOutput->stream, + AudioFlinger::PlaybackThread::asyncCallback, this) == 0) { + mUseAsyncWrite = true; + } + } + // Calculate size of normal mix buffer relative to the HAL output buffer size double multiplier = 1.0; if (mType == MIXER && (kUseFastMixer == FastMixer_Static || @@ -1483,9 +1604,11 @@ void AudioFlinger::PlaybackThread::readOutputParameters() ALOGI("HAL output buffer size %u frames, normal mix buffer size %u frames", mFrameCount, mNormalFrameCount); - delete[] mMixBuffer; - mMixBuffer = new int16_t[mNormalFrameCount * mChannelCount]; - memset(mMixBuffer, 0, mNormalFrameCount * mChannelCount * sizeof(int16_t)); + delete[] mAllocMixBuffer; + size_t align = (mFrameSize < sizeof(int16_t)) ? sizeof(int16_t) : mFrameSize; + mAllocMixBuffer = new int8_t[mNormalFrameCount * mFrameSize + align - 1]; + mMixBuffer = (int16_t *) ((((size_t)mAllocMixBuffer + align - 1) / align) * align); + memset(mMixBuffer, 0, mNormalFrameCount * mFrameSize); // force reconfiguration of effect chains and engines to take new buffer size and audio // parameters into account @@ -1622,13 +1745,18 @@ void AudioFlinger::PlaybackThread::threadLoop_removeTracks( if (CC_UNLIKELY(count)) { for (size_t i = 0 ; i < count ; i++) { const sp& track = tracksToRemove.itemAt(i); - if ((track->sharedBuffer() != 0) && - (track->mState == TrackBase::ACTIVE || track->mState == TrackBase::RESUMING)) { + if (!track->isOutputTrack()) { AudioSystem::stopOutput(mId, track->streamType(), track->sessionId()); +#ifdef ADD_BATTERY_DATA + // to track the speaker usage + addBatteryData(IMediaPlayerService::kBatteryDataAudioFlingerStop); +#endif + if (track->isTerminated()) { + AudioSystem::releaseOutput(mId); + } } } } - } void AudioFlinger::PlaybackThread::checkSilentMode_l() @@ -1649,17 +1777,18 @@ void AudioFlinger::PlaybackThread::checkSilentMode_l() } // shared by MIXER and DIRECT, overridden by DUPLICATING -void AudioFlinger::PlaybackThread::threadLoop_write() +ssize_t AudioFlinger::PlaybackThread::threadLoop_write() { // FIXME rewrite to reduce number of system calls mLastWriteTime = systemTime(); mInWrite = true; - int bytesWritten; + ssize_t bytesWritten; // If an NBAIO sink is present, use it to write the normal mixer's submix if (mNormalSink != 0) { #define mBitShift 2 // FIXME - size_t count = mixBufferSize >> mBitShift; + size_t count = mBytesRemaining >> mBitShift; + size_t offset = (mCurrentWriteLength - mBytesRemaining) >> 1; ATRACE_BEGIN("write"); // update the setpoint when AudioFlinger::mScreenState changes uint32_t screenState = AudioFlinger::mScreenState; @@ -1671,7 +1800,7 @@ void AudioFlinger::PlaybackThread::threadLoop_write() (pipe->maxFrames() * 7) / 8 : mNormalFrameCount * 2); } } - ssize_t framesWritten = mNormalSink->write(mMixBuffer, count); + ssize_t framesWritten = mNormalSink->write(mMixBuffer + offset, count); ATRACE_END(); if (framesWritten > 0) { bytesWritten = framesWritten << mBitShift; @@ -1680,15 +1809,48 @@ void AudioFlinger::PlaybackThread::threadLoop_write() } // otherwise use the HAL / AudioStreamOut directly } else { - // Direct output thread. - bytesWritten = (int)mOutput->stream->write(mOutput->stream, mMixBuffer, mixBufferSize); + // Direct output and offload threads + size_t offset = (mCurrentWriteLength - mBytesRemaining) / sizeof(int16_t); + if (mUseAsyncWrite) { + mWriteBlocked = true; + ALOG_ASSERT(mCallbackThread != 0); + mCallbackThread->setWriteBlocked(true); + } + bytesWritten = mOutput->stream->write(mOutput->stream, + mMixBuffer + offset, mBytesRemaining); + if (mUseAsyncWrite && + ((bytesWritten < 0) || (bytesWritten == (ssize_t)mBytesRemaining))) { + // do not wait for async callback in case of error of full write + mWriteBlocked = false; + ALOG_ASSERT(mCallbackThread != 0); + mCallbackThread->setWriteBlocked(false); + } } - if (bytesWritten > 0) { - mBytesWritten += mixBufferSize; - } mNumWrites++; mInWrite = false; + + return bytesWritten; +} + +void AudioFlinger::PlaybackThread::threadLoop_drain() +{ + if (mOutput->stream->drain) { + ALOGV("draining %s", (mMixerStatus == MIXER_DRAIN_TRACK) ? "early" : "full"); + if (mUseAsyncWrite) { + mDraining = true; + ALOG_ASSERT(mCallbackThread != 0); + mCallbackThread->setDraining(true); + } + mOutput->stream->drain(mOutput->stream, + (mMixerStatus == MIXER_DRAIN_TRACK) ? AUDIO_DRAIN_EARLY_NOTIFY + : AUDIO_DRAIN_ALL); + } +} + +void AudioFlinger::PlaybackThread::threadLoop_exit() +{ + // Default implementation has nothing to do } /* @@ -1929,10 +2091,29 @@ bool AudioFlinger::PlaybackThread::threadLoop() saveOutputTracks(); - // put audio hardware into standby after short delay - if (CC_UNLIKELY((!mActiveTracks.size() && systemTime() > standbyTime) || - isSuspended())) { - if (!mStandby) { + if (mSignalPending) { + // A signal was raised while we were unlocked + mSignalPending = false; + } else if (waitingAsyncCallback_l()) { + if (exitPending()) { + break; + } + releaseWakeLock_l(); + ALOGV("wait async completion"); + mWaitWorkCV.wait(mLock); + ALOGV("async completion/wake"); + acquireWakeLock_l(); + if (exitPending()) { + break; + } + if (!mActiveTracks.size() && (systemTime() > standbyTime)) { + continue; + } + sleepTime = 0; + } else if ((!mActiveTracks.size() && systemTime() > standbyTime) || + isSuspended()) { + // put audio hardware into standby after short delay + if (shouldStandby_l()) { threadLoop_standby(); @@ -1959,7 +2140,7 @@ bool AudioFlinger::PlaybackThread::threadLoop() mMixerStatus = MIXER_IDLE; mMixerStatusIgnoringFastTracks = MIXER_IDLE; mBytesWritten = 0; - + mBytesRemaining = 0; checkSilentMode_l(); standbyTime = systemTime() + standbyDelay; @@ -1981,50 +2162,73 @@ bool AudioFlinger::PlaybackThread::threadLoop() lockEffectChains_l(effectChains); } - if (CC_LIKELY(mMixerStatus == MIXER_TRACKS_READY)) { - threadLoop_mix(); - } else { - threadLoop_sleepTime(); - } - - if (isSuspended()) { - sleepTime = suspendSleepTimeUs(); - mBytesWritten += mixBufferSize; - } + if (mBytesRemaining == 0) { + mCurrentWriteLength = 0; + if (mMixerStatus == MIXER_TRACKS_READY) { + // threadLoop_mix() sets mCurrentWriteLength + threadLoop_mix(); + } else if ((mMixerStatus != MIXER_DRAIN_TRACK) + && (mMixerStatus != MIXER_DRAIN_ALL)) { + // threadLoop_sleepTime sets sleepTime to 0 if data + // must be written to HAL + threadLoop_sleepTime(); + if (sleepTime == 0) { + mCurrentWriteLength = mixBufferSize; + } + } + mBytesRemaining = mCurrentWriteLength; + if (isSuspended()) { + sleepTime = suspendSleepTimeUs(); + // simulate write to HAL when suspended + mBytesWritten += mixBufferSize; + mBytesRemaining = 0; + } - // only process effects if we're going to write - if (sleepTime == 0) { - for (size_t i = 0; i < effectChains.size(); i ++) { - effectChains[i]->process_l(); + // only process effects if we're going to write + if (sleepTime == 0) { + for (size_t i = 0; i < effectChains.size(); i ++) { + effectChains[i]->process_l(); + } } } // enable changes in effect chain unlockEffectChains(effectChains); - // sleepTime == 0 means we must write to audio hardware - if (sleepTime == 0) { - - threadLoop_write(); - + if (!waitingAsyncCallback()) { + // sleepTime == 0 means we must write to audio hardware + if (sleepTime == 0) { + if (mBytesRemaining) { + ssize_t ret = threadLoop_write(); + if (ret < 0) { + mBytesRemaining = 0; + } else { + mBytesWritten += ret; + mBytesRemaining -= ret; + } + } else if ((mMixerStatus == MIXER_DRAIN_TRACK) || + (mMixerStatus == MIXER_DRAIN_ALL)) { + threadLoop_drain(); + } if (mType == MIXER) { - // write blocked detection - nsecs_t now = systemTime(); - nsecs_t delta = now - mLastWriteTime; - if (!mStandby && delta > maxPeriod) { - mNumDelayedWrites++; - if ((now - lastWarning) > kWarningThrottleNs) { - ATRACE_NAME("underrun"); - ALOGW("write blocked for %llu msecs, %d delayed writes, thread %p", - ns2ms(delta), mNumDelayedWrites, this); - lastWarning = now; + // write blocked detection + nsecs_t now = systemTime(); + nsecs_t delta = now - mLastWriteTime; + if (!mStandby && delta > maxPeriod) { + mNumDelayedWrites++; + if ((now - lastWarning) > kWarningThrottleNs) { + ATRACE_NAME("underrun"); + ALOGW("write blocked for %llu msecs, %d delayed writes, thread %p", + ns2ms(delta), mNumDelayedWrites, this); + lastWarning = now; + } } - } } - mStandby = false; - } else { - usleep(sleepTime); + mStandby = false; + } else { + usleep(sleepTime); + } } // Finally let go of removed track(s), without the lock held @@ -2046,8 +2250,10 @@ if (mType == MIXER) { // is now local to this block, but will keep it for now (at least until merge done). } + threadLoop_exit(); + // for DuplicatingThread, standby mode is handled by the outputTracks, otherwise ... - if (mType == MIXER || mType == DIRECT) { + if (mType == MIXER || mType == DIRECT || mType == OFFLOAD) { // put output stream into standby mode if (!mStandby) { mOutput->stream->common.standby(&mOutput->stream->common); @@ -2060,6 +2266,28 @@ if (mType == MIXER) { return false; } +// removeTracks_l() must be called with ThreadBase::mLock held +void AudioFlinger::PlaybackThread::removeTracks_l(const Vector< sp >& tracksToRemove) +{ + size_t count = tracksToRemove.size(); + if (CC_UNLIKELY(count)) { + for (size_t i=0 ; i& track = tracksToRemove.itemAt(i); + mActiveTracks.remove(track); + ALOGV("removeTracks_l removing track on session %d", track->sessionId()); + sp chain = getEffectChain_l(track->sessionId()); + if (chain != 0) { + ALOGV("stopping track on chain %p for session Id: %d", chain.get(), + track->sessionId()); + chain->decActiveTrackCnt(); + } + if (track->isTerminated()) { + removeTrack_l(track); + } + } + } + +} // ---------------------------------------------------------------------------- @@ -2264,7 +2492,7 @@ void AudioFlinger::MixerThread::threadLoop_removeTracks(const Vector< sp PlaybackThread::threadLoop_removeTracks(tracksToRemove); } -void AudioFlinger::MixerThread::threadLoop_write() +ssize_t AudioFlinger::MixerThread::threadLoop_write() { // FIXME we should only do one push per cycle; confirm this is true // Start the fast mixer if it's not already running @@ -2296,7 +2524,7 @@ void AudioFlinger::MixerThread::threadLoop_write() sq->end(false /*didModify*/); } } - PlaybackThread::threadLoop_write(); + return PlaybackThread::threadLoop_write(); } void AudioFlinger::MixerThread::threadLoop_standby() @@ -2328,11 +2556,40 @@ void AudioFlinger::MixerThread::threadLoop_standby() PlaybackThread::threadLoop_standby(); } +// Empty implementation for standard mixer +// Overridden for offloaded playback +void AudioFlinger::PlaybackThread::flushOutput_l() +{ +} + +bool AudioFlinger::PlaybackThread::waitingAsyncCallback_l() +{ + return false; +} + +bool AudioFlinger::PlaybackThread::shouldStandby_l() +{ + return !mStandby; +} + +bool AudioFlinger::PlaybackThread::waitingAsyncCallback() +{ + Mutex::Autolock _l(mLock); + return waitingAsyncCallback_l(); +} + // shared by MIXER and DIRECT, overridden by DUPLICATING void AudioFlinger::PlaybackThread::threadLoop_standby() { ALOGV("Audio hardware entering standby, mixer %p, suspend count %d", this, mSuspended); mOutput->stream->common.standby(&mOutput->stream->common); + if (mUseAsyncWrite != 0) { + mWriteBlocked = false; + mDraining = false; + ALOG_ASSERT(mCallbackThread != 0); + mCallbackThread->setWriteBlocked(false); + mCallbackThread->setDraining(false); + } } void AudioFlinger::MixerThread::threadLoop_mix() @@ -2353,6 +2610,7 @@ void AudioFlinger::MixerThread::threadLoop_mix() // mix buffers... mAudioMixer->process(pts); + mCurrentWriteLength = mixBufferSize; // increase sleep time progressively when application underrun condition clears. // Only increase sleep time if the mixer is ready for two consecutive times to avoid // that a steady state of alternating ready/not ready conditions keeps the sleep time @@ -2480,7 +2738,7 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::MixerThread::prepareTrac switch (track->mState) { case TrackBase::STOPPING_1: // track stays active in STOPPING_1 state until first underrun - if (recentUnderruns > 0) { + if (recentUnderruns > 0 || track->isTerminated()) { track->mState = TrackBase::STOPPING_2; } break; @@ -2522,7 +2780,6 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::MixerThread::prepareTrac // fall through case TrackBase::STOPPING_2: case TrackBase::PAUSED: - case TrackBase::TERMINATED: case TrackBase::STOPPED: case TrackBase::FLUSHED: // flush() while active // Check for presentation complete if track is inactive @@ -2634,8 +2891,7 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::MixerThread::prepareTrac if ((framesReady >= minFrames) && track->isReady() && !track->isPaused() && !track->isTerminated()) { - ALOGVV("track %d u=%08x, s=%08x [OK] on thread %p", name, cblk->user, cblk->server, - this); + ALOGVV("track %d s=%08x [OK] on thread %p", name, cblk->server, this); mixedTracks++; @@ -2709,6 +2965,7 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::MixerThread::prepareTrac } va = (uint32_t)(v * sendLevel); } + // Delegate volume control to effect in track effect chain if needed if (chain != 0 && chain->setVolume_l(&vl, &vr)) { // Do not ramp volume if volume is controlled by effect @@ -2800,8 +3057,7 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::MixerThread::prepareTrac chain->clearInputBuffer(); } - ALOGVV("track %d u=%08x, s=%08x [NOT READY] on thread %p", name, cblk->user, - cblk->server, this); + ALOGVV("track %d s=%08x [NOT READY] on thread %p", name, cblk->server, this); if ((track->sharedBuffer() != 0) || track->isTerminated() || track->isStopped() || track->isPaused()) { // We have consumed all the buffers of this track. @@ -2887,30 +3143,13 @@ track_is_ready: ; } // remove all the tracks that need to be... - count = tracksToRemove->size(); - if (CC_UNLIKELY(count)) { - for (size_t i=0 ; i& track = tracksToRemove->itemAt(i); - mActiveTracks.remove(track); - if (track->mainBuffer() != mMixBuffer) { - chain = getEffectChain_l(track->sessionId()); - if (chain != 0) { - ALOGV("stopping track on chain %p for session Id: %d", chain.get(), - track->sessionId()); - chain->decActiveTrackCnt(); - } - } - if (track->isTerminated()) { - removeTrack_l(track); - } - } - } + removeTracks_l(*tracksToRemove); // mix buffer must be cleared if all tracks are connected to an // effect chain as in this case the mixer will not write to // mix buffer and track effects will accumulate into it - if ((mixedTracks != 0 && mixedTracks == tracksWithEffect) || - (mixedTracks == 0 && fastTracks > 0)) { + if ((mBytesRemaining == 0) && ((mixedTracks != 0 && mixedTracks == tracksWithEffect) || + (mixedTracks == 0 && fastTracks > 0))) { // FIXME as a performance optimization, should remember previous zero status memset(mMixBuffer, 0, mNormalFrameCount * mChannelCount * sizeof(int16_t)); } @@ -3142,10 +3381,63 @@ AudioFlinger::DirectOutputThread::DirectOutputThread(const sp& aud { } +AudioFlinger::DirectOutputThread::DirectOutputThread(const sp& audioFlinger, + AudioStreamOut* output, audio_io_handle_t id, uint32_t device, + ThreadBase::type_t type) + : PlaybackThread(audioFlinger, output, id, device, type) + // mLeftVolFloat, mRightVolFloat +{ +} + AudioFlinger::DirectOutputThread::~DirectOutputThread() { } +void AudioFlinger::DirectOutputThread::processVolume_l(Track *track, bool lastTrack) +{ + audio_track_cblk_t* cblk = track->cblk(); + float left, right; + + if (mMasterMute || mStreamTypes[track->streamType()].mute) { + left = right = 0; + } else { + float typeVolume = mStreamTypes[track->streamType()].volume; + float v = mMasterVolume * typeVolume; + AudioTrackServerProxy *proxy = track->mAudioTrackServerProxy; + uint32_t vlr = proxy->getVolumeLR(); + float v_clamped = v * (vlr & 0xFFFF); + if (v_clamped > MAX_GAIN) v_clamped = MAX_GAIN; + left = v_clamped/MAX_GAIN; + v_clamped = v * (vlr >> 16); + if (v_clamped > MAX_GAIN) v_clamped = MAX_GAIN; + right = v_clamped/MAX_GAIN; + } + + if (lastTrack) { + if (left != mLeftVolFloat || right != mRightVolFloat) { + mLeftVolFloat = left; + mRightVolFloat = right; + + // Convert volumes from float to 8.24 + uint32_t vl = (uint32_t)(left * (1 << 24)); + uint32_t vr = (uint32_t)(right * (1 << 24)); + + // Delegate volume control to effect in track effect chain if needed + // only one effect chain can be present on DirectOutputThread, so if + // there is one, the track is connected to it + if (!mEffectChains.isEmpty()) { + mEffectChains[0]->setVolume_l(&vl, &vr); + left = (float)vl / (1 << 24); + right = (float)vr / (1 << 24); + } + if (mOutput->stream->set_volume) { + mOutput->stream->set_volume(mOutput->stream, left, right); + } + } + } +} + + AudioFlinger::PlaybackThread::mixer_state AudioFlinger::DirectOutputThread::prepareTracks_l( Vector< sp > *tracksToRemove ) @@ -3172,6 +3464,12 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::DirectOutputThread::prep } else { minFrames = 1; } + // Only consider last track started for volume and mixer state control. + // This is the last entry in mActiveTracks unless a track underruns. + // As we only care about the transition phase between two tracks on a + // direct output, it is not a problem to ignore the underrun case. + bool last = (i == (count - 1)); + if ((track->framesReady() >= minFrames) && track->isReady() && !track->isPaused() && !track->isTerminated()) { @@ -3186,52 +3484,8 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::DirectOutputThread::prep } // compute volume for this track - float left, right; - if (mMasterMute || track->isPausing() || mStreamTypes[track->streamType()].mute) { - left = right = 0; - if (track->isPausing()) { - track->setPaused(); - } - } else { - float typeVolume = mStreamTypes[track->streamType()].volume; - float v = mMasterVolume * typeVolume; - uint32_t vlr = track->mAudioTrackServerProxy->getVolumeLR(); - float v_clamped = v * (vlr & 0xFFFF); - if (v_clamped > MAX_GAIN) { - v_clamped = MAX_GAIN; - } - left = v_clamped/MAX_GAIN; - v_clamped = v * (vlr >> 16); - if (v_clamped > MAX_GAIN) { - v_clamped = MAX_GAIN; - } - right = v_clamped/MAX_GAIN; - } - // Only consider last track started for volume and mixer state control. - // This is the last entry in mActiveTracks unless a track underruns. - // As we only care about the transition phase between two tracks on a - // direct output, it is not a problem to ignore the underrun case. - if (i == (count - 1)) { - if (left != mLeftVolFloat || right != mRightVolFloat) { - mLeftVolFloat = left; - mRightVolFloat = right; - - // Convert volumes from float to 8.24 - uint32_t vl = (uint32_t)(left * (1 << 24)); - uint32_t vr = (uint32_t)(right * (1 << 24)); - - // Delegate volume control to effect in track effect chain if needed - // only one effect chain can be present on DirectOutputThread, so if - // there is one, the track is connected to it - if (!mEffectChains.isEmpty()) { - // Do not ramp volume if volume is controlled by effect - mEffectChains[0]->setVolume_l(&vl, &vr); - left = (float)vl / (1 << 24); - right = (float)vr / (1 << 24); - } - mOutput->stream->set_volume(mOutput->stream, left, right); - } - + processVolume_l(track, last); + if (last) { // reset retry count track->mRetryCount = kMaxTrackRetriesDirect; mActiveTrack = t; @@ -3265,7 +3519,7 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::DirectOutputThread::prep if (--(track->mRetryCount) <= 0) { ALOGV("BUFFER TIMEOUT: remove(%d) from active list", track->name()); tracksToRemove->add(track); - } else if (i == (count -1)){ + } else if (last) { mixerStatus = MIXER_TRACKS_ENABLED; } } @@ -3273,21 +3527,7 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::DirectOutputThread::prep } // remove all the tracks that need to be... - count = tracksToRemove->size(); - if (CC_UNLIKELY(count)) { - for (size_t i = 0 ; i < count ; i++) { - const sp& track = tracksToRemove->itemAt(i); - mActiveTracks.remove(track); - if (!mEffectChains.isEmpty()) { - ALOGV("stopping track on chain %p for session Id: %d", mEffectChains[0].get(), - track->sessionId()); - mEffectChains[0]->decActiveTrackCnt(); - } - if (track->isTerminated()) { - removeTrack_l(track); - } - } - } + removeTracks_l(*tracksToRemove); return mixerStatus; } @@ -3310,10 +3550,10 @@ void AudioFlinger::DirectOutputThread::threadLoop_mix() curBuf += buffer.frameCount * mFrameSize; mActiveTrack->releaseBuffer(&buffer); } + mCurrentWriteLength = curBuf - (int8_t *)mMixBuffer; sleepTime = 0; standbyTime = systemTime() + standbyDelay; mActiveTrack.clear(); - } void AudioFlinger::DirectOutputThread::threadLoop_sleepTime() @@ -3434,6 +3674,307 @@ void AudioFlinger::DirectOutputThread::cacheParameters_l() // ---------------------------------------------------------------------------- +AudioFlinger::AsyncCallbackThread::AsyncCallbackThread( + const sp& offloadThread) + : Thread(false /*canCallJava*/), + mOffloadThread(offloadThread), + mWriteBlocked(false), + mDraining(false) +{ +} + +AudioFlinger::AsyncCallbackThread::~AsyncCallbackThread() +{ +} + +void AudioFlinger::AsyncCallbackThread::onFirstRef() +{ + run("Offload Cbk", ANDROID_PRIORITY_URGENT_AUDIO); +} + +bool AudioFlinger::AsyncCallbackThread::threadLoop() +{ + while (!exitPending()) { + bool writeBlocked; + bool draining; + + { + Mutex::Autolock _l(mLock); + mWaitWorkCV.wait(mLock); + if (exitPending()) { + break; + } + writeBlocked = mWriteBlocked; + draining = mDraining; + ALOGV("AsyncCallbackThread mWriteBlocked %d mDraining %d", mWriteBlocked, mDraining); + } + { + sp offloadThread = mOffloadThread.promote(); + if (offloadThread != 0) { + if (writeBlocked == false) { + offloadThread->setWriteBlocked(false); + } + if (draining == false) { + offloadThread->setDraining(false); + } + } + } + } + return false; +} + +void AudioFlinger::AsyncCallbackThread::exit() +{ + ALOGV("AsyncCallbackThread::exit"); + Mutex::Autolock _l(mLock); + requestExit(); + mWaitWorkCV.broadcast(); +} + +void AudioFlinger::AsyncCallbackThread::setWriteBlocked(bool value) +{ + Mutex::Autolock _l(mLock); + mWriteBlocked = value; + if (!value) { + mWaitWorkCV.signal(); + } +} + +void AudioFlinger::AsyncCallbackThread::setDraining(bool value) +{ + Mutex::Autolock _l(mLock); + mDraining = value; + if (!value) { + mWaitWorkCV.signal(); + } +} + + +// ---------------------------------------------------------------------------- +AudioFlinger::OffloadThread::OffloadThread(const sp& audioFlinger, + AudioStreamOut* output, audio_io_handle_t id, uint32_t device) + : DirectOutputThread(audioFlinger, output, id, device, OFFLOAD), + mHwPaused(false), + mPausedBytesRemaining(0) +{ + mCallbackThread = new AudioFlinger::AsyncCallbackThread(this); +} + +AudioFlinger::OffloadThread::~OffloadThread() +{ + mPreviousTrack.clear(); +} + +void AudioFlinger::OffloadThread::threadLoop_exit() +{ + if (mFlushPending || mHwPaused) { + // If a flush is pending or track was paused, just discard buffered data + flushHw_l(); + } else { + mMixerStatus = MIXER_DRAIN_ALL; + threadLoop_drain(); + } + mCallbackThread->exit(); + PlaybackThread::threadLoop_exit(); +} + +AudioFlinger::PlaybackThread::mixer_state AudioFlinger::OffloadThread::prepareTracks_l( + Vector< sp > *tracksToRemove +) +{ + ALOGV("OffloadThread::prepareTracks_l"); + size_t count = mActiveTracks.size(); + + mixer_state mixerStatus = MIXER_IDLE; + if (mFlushPending) { + flushHw_l(); + mFlushPending = false; + } + // find out which tracks need to be processed + for (size_t i = 0; i < count; i++) { + sp t = mActiveTracks[i].promote(); + // The track died recently + if (t == 0) { + continue; + } + Track* const track = t.get(); + audio_track_cblk_t* cblk = track->cblk(); + if (mPreviousTrack != NULL) { + if (t != mPreviousTrack) { + // Flush any data still being written from last track + mBytesRemaining = 0; + if (mPausedBytesRemaining) { + // Last track was paused so we also need to flush saved + // mixbuffer state and invalidate track so that it will + // re-submit that unwritten data when it is next resumed + mPausedBytesRemaining = 0; + // Invalidate is a bit drastic - would be more efficient + // to have a flag to tell client that some of the + // previously written data was lost + mPreviousTrack->invalidate(); + } + } + } + mPreviousTrack = t; + bool last = (i == (count - 1)); + if (track->isPausing()) { + track->setPaused(); + if (last) { + if (!mHwPaused) { + mOutput->stream->pause(mOutput->stream); + mHwPaused = true; + } + // If we were part way through writing the mixbuffer to + // the HAL we must save this until we resume + // BUG - this will be wrong if a different track is made active, + // in that case we want to discard the pending data in the + // mixbuffer and tell the client to present it again when the + // track is resumed + mPausedWriteLength = mCurrentWriteLength; + mPausedBytesRemaining = mBytesRemaining; + mBytesRemaining = 0; // stop writing + } + tracksToRemove->add(track); + } else if (track->framesReady() && track->isReady() && + !track->isPaused() && !track->isTerminated()) { + ALOGVV("OffloadThread: track %d s=%08x [OK]", track->name(), cblk->server); + if (track->mFillingUpStatus == Track::FS_FILLED) { + track->mFillingUpStatus = Track::FS_ACTIVE; + mLeftVolFloat = mRightVolFloat = 0; + if (track->mState == TrackBase::RESUMING) { + if (CC_UNLIKELY(mPausedBytesRemaining)) { + // Need to continue write that was interrupted + mCurrentWriteLength = mPausedWriteLength; + mBytesRemaining = mPausedBytesRemaining; + mPausedBytesRemaining = 0; + } + track->mState = TrackBase::ACTIVE; + } + } + + if (last) { + if (mHwPaused) { + mOutput->stream->resume(mOutput->stream); + mHwPaused = false; + // threadLoop_mix() will handle the case that we need to + // resume an interrupted write + } + // reset retry count + track->mRetryCount = kMaxTrackRetriesOffload; + mActiveTrack = t; + mixerStatus = MIXER_TRACKS_READY; + } + } else { + ALOGVV("OffloadThread: track %d s=%08x [NOT READY]", track->name(), cblk->server); + if (track->isStopping_1()) { + // Hardware buffer can hold a large amount of audio so we must + // wait for all current track's data to drain before we say + // that the track is stopped. + if (mBytesRemaining == 0) { + // Only start draining when all data in mixbuffer + // has been written + ALOGV("OffloadThread: underrun and STOPPING_1 -> draining, STOPPING_2"); + track->mState = TrackBase::STOPPING_2; // so presentation completes after drain + sleepTime = 0; + standbyTime = systemTime() + standbyDelay; + if (last) { + mixerStatus = MIXER_DRAIN_TRACK; + if (mHwPaused) { + // It is possible to move from PAUSED to STOPPING_1 without + // a resume so we must ensure hardware is running + mOutput->stream->resume(mOutput->stream); + mHwPaused = false; + } + } + } + } else if (track->isStopping_2()) { + // Drain has completed, signal presentation complete + if (!mDraining || !last) { + track->mState = TrackBase::STOPPED; + size_t audioHALFrames = + (mOutput->stream->get_latency(mOutput->stream)*mSampleRate) / 1000; + size_t framesWritten = + mBytesWritten / audio_stream_frame_size(&mOutput->stream->common); + track->presentationComplete(framesWritten, audioHALFrames); + track->reset(); + tracksToRemove->add(track); + } + } else { + // No buffers for this track. Give it a few chances to + // fill a buffer, then remove it from active list. + if (--(track->mRetryCount) <= 0) { + ALOGV("OffloadThread: BUFFER TIMEOUT: remove(%d) from active list", + track->name()); + tracksToRemove->add(track); + } else if (last){ + mixerStatus = MIXER_TRACKS_ENABLED; + } + } + } + // compute volume for this track + processVolume_l(track, last); + } + // remove all the tracks that need to be... + removeTracks_l(*tracksToRemove); + + return mixerStatus; +} + +void AudioFlinger::OffloadThread::flushOutput_l() +{ + mFlushPending = true; +} + +// must be called with thread mutex locked +bool AudioFlinger::OffloadThread::waitingAsyncCallback_l() +{ + ALOGV("waitingAsyncCallback_l mWriteBlocked %d mDraining %d", mWriteBlocked, mDraining); + if (mUseAsyncWrite && (mWriteBlocked || mDraining)) { + return true; + } + return false; +} + +// must be called with thread mutex locked +bool AudioFlinger::OffloadThread::shouldStandby_l() +{ + bool TrackPaused = false; + + // do not put the HAL in standby when paused. AwesomePlayer clear the offloaded AudioTrack + // after a timeout and we will enter standby then. + if (mTracks.size() > 0) { + TrackPaused = mTracks[mTracks.size() - 1]->isPaused(); + } + + return !mStandby && !TrackPaused; +} + + +bool AudioFlinger::OffloadThread::waitingAsyncCallback() +{ + Mutex::Autolock _l(mLock); + return waitingAsyncCallback_l(); +} + +void AudioFlinger::OffloadThread::flushHw_l() +{ + mOutput->stream->flush(mOutput->stream); + // Flush anything still waiting in the mixbuffer + mCurrentWriteLength = 0; + mBytesRemaining = 0; + mPausedWriteLength = 0; + mPausedBytesRemaining = 0; + if (mUseAsyncWrite) { + mWriteBlocked = false; + mDraining = false; + ALOG_ASSERT(mCallbackThread != 0); + mCallbackThread->setWriteBlocked(false); + mCallbackThread->setDraining(false); + } +} + +// ---------------------------------------------------------------------------- + AudioFlinger::DuplicatingThread::DuplicatingThread(const sp& audioFlinger, AudioFlinger::MixerThread* mainThread, audio_io_handle_t id) : MixerThread(audioFlinger, mainThread->getOutput(), id, mainThread->outDevice(), @@ -3460,6 +4001,7 @@ void AudioFlinger::DuplicatingThread::threadLoop_mix() } sleepTime = 0; writeFrames = mNormalFrameCount; + mCurrentWriteLength = mixBufferSize; standbyTime = systemTime() + standbyDelay; } @@ -3483,12 +4025,12 @@ void AudioFlinger::DuplicatingThread::threadLoop_sleepTime() } } -void AudioFlinger::DuplicatingThread::threadLoop_write() +ssize_t AudioFlinger::DuplicatingThread::threadLoop_write() { for (size_t i = 0; i < outputTracks.size(); i++) { outputTracks[i]->write(mMixBuffer, writeFrames); } - mBytesWritten += mixBufferSize; + return (ssize_t)mixBufferSize; } void AudioFlinger::DuplicatingThread::threadLoop_standby() @@ -3682,7 +4224,10 @@ bool AudioFlinger::RecordThread::threadLoop() continue; } if (mActiveTrack != 0) { - if (mActiveTrack->mState == TrackBase::PAUSING) { + if (mActiveTrack->isTerminated()) { + removeTrack_l(mActiveTrack); + mActiveTrack.clear(); + } else if (mActiveTrack->mState == TrackBase::PAUSING) { standby(); mActiveTrack.clear(); mStartStopCond.broadcast(); @@ -3701,9 +4246,6 @@ bool AudioFlinger::RecordThread::threadLoop() mStartStopCond.broadcast(); } mStandby = false; - } else if (mActiveTrack->mState == TrackBase::TERMINATED) { - removeTrack_l(mActiveTrack); - mActiveTrack.clear(); } } lockEffectChains_l(effectChains); @@ -4083,7 +4625,8 @@ status_t AudioFlinger::RecordThread::setSyncEvent(const sp& event) // destroyTrack_l() must be called with ThreadBase::mLock held void AudioFlinger::RecordThread::destroyTrack_l(const sp& track) { - track->mState = TrackBase::TERMINATED; + track->terminate(); + track->mState = TrackBase::STOPPED; // active tracks are removed by threadLoop() if (mActiveTrack != track) { removeTrack_l(track); diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h index 365c790..7c7c6f0 100644 --- a/services/audioflinger/Threads.h +++ b/services/audioflinger/Threads.h @@ -28,7 +28,8 @@ public: MIXER, // Thread class is MixerThread DIRECT, // Thread class is DirectOutputThread DUPLICATING, // Thread class is DuplicatingThread - RECORD // Thread class is RecordThread + RECORD, // Thread class is RecordThread + OFFLOAD // Thread class is OffloadThread }; ThreadBase(const sp& audioFlinger, audio_io_handle_t id, @@ -129,6 +130,7 @@ public: size_t frameCount() const { return mNormalFrameCount; } // Return's the HAL's frame count i.e. fast mixer buffer size. size_t frameCountHAL() const { return mFrameCount; } + size_t frameSize() const { return mFrameSize; } // Should be "virtual status_t requestExitAndWait()" and override same // method in Thread, but Thread::requestExitAndWait() is not yet virtual. @@ -184,6 +186,8 @@ public: void lockEffectChains_l(Vector< sp >& effectChains); // unlock effect chains after process void unlockEffectChains(const Vector< sp >& effectChains); + // get a copy of mEffectChains vector + Vector< sp > getEffectChains_l() const { return mEffectChains; }; // set audio mode to all effect chains void setMode(audio_mode_t mode); // get effect module with corresponding ID on specified audio session @@ -329,11 +333,19 @@ public: enum mixer_state { MIXER_IDLE, // no active tracks MIXER_TRACKS_ENABLED, // at least one active track, but no track has any data ready - MIXER_TRACKS_READY // at least one active track, and at least one track has data + MIXER_TRACKS_READY, // at least one active track, and at least one track has data + MIXER_DRAIN_TRACK, // drain currently playing track + MIXER_DRAIN_ALL, // fully drain the hardware // standby mode does not have an enum value // suspend by audio policy manager is orthogonal to mixer state }; + // retry count before removing active track in case of underrun on offloaded thread: + // we need to make sure that AudioTrack client has enough time to send large buffers +//FIXME may be more appropriate if expressed in time units. Need to revise how underrun is handled + // for offloaded tracks + static const int8_t kMaxTrackRetriesOffload = 20; + PlaybackThread(const sp& audioFlinger, AudioStreamOut* output, audio_io_handle_t id, audio_devices_t device, type_t type); virtual ~PlaybackThread(); @@ -351,8 +363,10 @@ protected: // Code snippets that were lifted up out of threadLoop() virtual void threadLoop_mix() = 0; virtual void threadLoop_sleepTime() = 0; - virtual void threadLoop_write(); + virtual ssize_t threadLoop_write(); + virtual void threadLoop_drain(); virtual void threadLoop_standby(); + virtual void threadLoop_exit(); virtual void threadLoop_removeTracks(const Vector< sp >& tracksToRemove); // prepareTracks_l reads and writes mActiveTracks, and returns @@ -360,6 +374,19 @@ protected: // is responsible for clearing or destroying this Vector later on, when it // is safe to do so. That will drop the final ref count and destroy the tracks. virtual mixer_state prepareTracks_l(Vector< sp > *tracksToRemove) = 0; + void removeTracks_l(const Vector< sp >& tracksToRemove); + + void writeCallback(); + void setWriteBlocked(bool value); + void drainCallback(); + void setDraining(bool value); + + static int asyncCallback(stream_callback_event_t event, void *param, void *cookie); + + virtual bool waitingAsyncCallback(); + virtual bool waitingAsyncCallback_l(); + virtual bool shouldStandby_l(); + // ThreadBase virtuals virtual void preExit(); @@ -436,7 +463,8 @@ public: protected: - int16_t* mMixBuffer; + int16_t* mMixBuffer; // frame size aligned mix buffer + int8_t* mAllocMixBuffer; // mixer buffer allocation address // suspend count, > 0 means suspended. While suspended, the thread continues to pull from // tracks and mix, but doesn't write to HAL. A2DP and SCO HAL implementations can't handle @@ -489,8 +517,9 @@ private: PlaybackThread& operator = (const PlaybackThread&); status_t addTrack_l(const sp& track); - void destroyTrack_l(const sp& track); + bool destroyTrack_l(const sp& track); void removeTrack_l(const sp& track); + void signal_l(); void readOutputParameters(); @@ -538,6 +567,14 @@ private: // DUPLICATING only uint32_t writeFrames; + size_t mBytesRemaining; + size_t mCurrentWriteLength; + bool mUseAsyncWrite; + bool mWriteBlocked; + bool mDraining; + bool mSignalPending; + sp mCallbackThread; + private: // The HAL output sink is treated as non-blocking, but current implementation is blocking sp mOutputSink; @@ -561,7 +598,7 @@ public: protected: // accessed by both binder threads and within threadLoop(), lock on mutex needed unsigned mFastTrackAvailMask; // bit i set if fast track [i] is available - + virtual void flushOutput_l(); }; class MixerThread : public PlaybackThread { @@ -587,7 +624,7 @@ protected: virtual void cacheParameters_l(); // threadLoop snippets - virtual void threadLoop_write(); + virtual ssize_t threadLoop_write(); virtual void threadLoop_standby(); virtual void threadLoop_mix(); virtual void threadLoop_sleepTime(); @@ -644,17 +681,73 @@ protected: virtual void threadLoop_mix(); virtual void threadLoop_sleepTime(); -private: // volumes last sent to audio HAL with stream->set_volume() float mLeftVolFloat; float mRightVolFloat; + DirectOutputThread(const sp& audioFlinger, AudioStreamOut* output, + audio_io_handle_t id, uint32_t device, ThreadBase::type_t type); + void processVolume_l(Track *track, bool lastTrack); + // prepareTracks_l() tells threadLoop_mix() the name of the single active track sp mActiveTrack; public: virtual bool hasFastMixer() const { return false; } }; +class OffloadThread : public DirectOutputThread { +public: + + OffloadThread(const sp& audioFlinger, AudioStreamOut* output, + audio_io_handle_t id, uint32_t device); + virtual ~OffloadThread(); + +protected: + // threadLoop snippets + virtual mixer_state prepareTracks_l(Vector< sp > *tracksToRemove); + virtual void threadLoop_exit(); + virtual void flushOutput_l(); + + virtual bool waitingAsyncCallback(); + virtual bool waitingAsyncCallback_l(); + virtual bool shouldStandby_l(); + +private: + void flushHw_l(); + +private: + bool mHwPaused; + bool mFlushPending; + size_t mPausedWriteLength; // length in bytes of write interrupted by pause + size_t mPausedBytesRemaining; // bytes still waiting in mixbuffer after resume + sp mPreviousTrack; // used to detect track switch +}; + +class AsyncCallbackThread : public Thread { +public: + + AsyncCallbackThread(const sp& offloadThread); + + virtual ~AsyncCallbackThread(); + + // Thread virtuals + virtual bool threadLoop(); + + // RefBase + virtual void onFirstRef(); + + void exit(); + void setWriteBlocked(bool value); + void setDraining(bool value); + +private: + wp mOffloadThread; + bool mWriteBlocked; + bool mDraining; + Condition mWaitWorkCV; + Mutex mLock; +}; + class DuplicatingThread : public MixerThread { public: DuplicatingThread(const sp& audioFlinger, MixerThread* mainThread, @@ -674,7 +767,7 @@ protected: // threadLoop snippets virtual void threadLoop_mix(); virtual void threadLoop_sleepTime(); - virtual void threadLoop_write(); + virtual ssize_t threadLoop_write(); virtual void threadLoop_standby(); virtual void cacheParameters_l(); diff --git a/services/audioflinger/TrackBase.h b/services/audioflinger/TrackBase.h index 55d96fa..e69d1d7 100644 --- a/services/audioflinger/TrackBase.h +++ b/services/audioflinger/TrackBase.h @@ -25,10 +25,10 @@ class TrackBase : public ExtendedAudioBufferProvider, public RefBase { public: enum track_state { IDLE, - TERMINATED, FLUSHED, STOPPED, - // next 2 states are currently used for fast tracks only + // next 2 states are currently used for fast tracks + // and offloaded tracks only STOPPING_1, // waiting for first underrun STOPPING_2, // waiting for presentation complete RESUMING, @@ -89,7 +89,7 @@ protected: return (mState == STOPPED || mState == FLUSHED); } - // for fast tracks only + // for fast tracks and offloaded tracks only bool isStopping() const { return mState == STOPPING_1 || mState == STOPPING_2; } @@ -101,7 +101,11 @@ protected: } bool isTerminated() const { - return mState == TERMINATED; + return mTerminated; + } + + void terminate() { + mTerminated = true; } bool step(); // mStepCount is an implicit input @@ -142,4 +146,5 @@ protected: const int mId; sp mTeeSink; sp mTeeSource; + bool mTerminated; }; diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp index c45daae..e674a50 100644 --- a/services/audioflinger/Tracks.cpp +++ b/services/audioflinger/Tracks.cpp @@ -89,7 +89,8 @@ AudioFlinger::ThreadBase::TrackBase::TrackBase( mSessionId(sessionId), mIsOut(isOut), mServerProxy(NULL), - mId(android_atomic_inc(&nextTrackId)) + mId(android_atomic_inc(&nextTrackId)), + mTerminated(false) { // client == 0 implies sharedBuffer == 0 ALOG_ASSERT(!(client == 0 && sharedBuffer != 0)); @@ -252,7 +253,7 @@ void AudioFlinger::TrackHandle::pause() { } status_t AudioFlinger::TrackHandle::setParameters(const String8& keyValuePairs) { - return INVALID_OPERATION; // stub function + return mTrack->setParameters(keyValuePairs); } status_t AudioFlinger::TrackHandle::attachAuxEffect(int EffectId) @@ -328,7 +329,8 @@ AudioFlinger::PlaybackThread::Track::Track( mUnderrunCount(0), mCachedVolume(1.0), mIsInvalid(false), - mAudioTrackServerProxy(NULL) + mAudioTrackServerProxy(NULL), + mResumeToStopping(false) { if (mCblk != NULL) { if (sharedBuffer == 0) { @@ -386,27 +388,19 @@ void AudioFlinger::PlaybackThread::Track::destroy() { // scope for mLock sp thread = mThread.promote(); if (thread != 0) { - if (!isOutputTrack()) { - if (mState == ACTIVE || mState == RESUMING) { - AudioSystem::stopOutput(thread->id(), mStreamType, mSessionId); - -#ifdef ADD_BATTERY_DATA - // to track the speaker usage - addBatteryData(IMediaPlayerService::kBatteryDataAudioFlingerStop); -#endif - } - AudioSystem::releaseOutput(thread->id()); - } Mutex::Autolock _l(thread->mLock); PlaybackThread *playbackThread = (PlaybackThread *)thread.get(); - playbackThread->destroyTrack_l(this); + bool wasActive = playbackThread->destroyTrack_l(this); + if (!isOutputTrack() && !wasActive) { + AudioSystem::releaseOutput(thread->id()); + } } } } /*static*/ void AudioFlinger::PlaybackThread::Track::appendDumpHeader(String8& result) { - result.append(" Name Client Type Fmt Chn mask Session StpCnt fCount S F SRate " + result.append(" Name Client Type Fmt Chn mask Session StpCnt fCount S F SRate " "L dB R dB Server Main buf Aux Buf Flags Underruns\n"); } @@ -420,40 +414,41 @@ void AudioFlinger::PlaybackThread::Track::dump(char* buffer, size_t size) } track_state state = mState; char stateChar; - switch (state) { - case IDLE: - stateChar = 'I'; - break; - case TERMINATED: + if (isTerminated()) { stateChar = 'T'; - break; - case STOPPING_1: - stateChar = 's'; - break; - case STOPPING_2: - stateChar = '5'; - break; - case STOPPED: - stateChar = 'S'; - break; - case RESUMING: - stateChar = 'R'; - break; - case ACTIVE: - stateChar = 'A'; - break; - case PAUSING: - stateChar = 'p'; - break; - case PAUSED: - stateChar = 'P'; - break; - case FLUSHED: - stateChar = 'F'; - break; - default: - stateChar = '?'; - break; + } else { + switch (state) { + case IDLE: + stateChar = 'I'; + break; + case STOPPING_1: + stateChar = 's'; + break; + case STOPPING_2: + stateChar = '5'; + break; + case STOPPED: + stateChar = 'S'; + break; + case RESUMING: + stateChar = 'R'; + break; + case ACTIVE: + stateChar = 'A'; + break; + case PAUSING: + stateChar = 'p'; + break; + case PAUSED: + stateChar = 'P'; + break; + case FLUSHED: + stateChar = 'F'; + break; + default: + stateChar = '?'; + break; + } } char nowInUnderrun; switch (mObservedUnderruns.mBitFields.mMostRecent) { @@ -470,7 +465,7 @@ void AudioFlinger::PlaybackThread::Track::dump(char* buffer, size_t size) nowInUnderrun = '?'; break; } - snprintf(&buffer[7], size-7, " %6d %4u %3u 0x%08x %7u %6u %6u %1c %1d %5u %5.2g %5.2g " + snprintf(&buffer[7], size-7, " %6d %4u 0x%08x 0x%08x %7u %6u %6u %1c %1d %5u %5.2g %5.2g " "0x%08x 0x%08x 0x%08x %#5x %9u%c\n", (mClient == 0) ? getpid_cached : mClient->pid(), mStreamType, @@ -555,32 +550,33 @@ status_t AudioFlinger::PlaybackThread::Track::start(AudioSystem::sync_event_t ev track_state state = mState; // here the track could be either new, or restarted // in both cases "unstop" the track + if (state == PAUSED) { - mState = TrackBase::RESUMING; - ALOGV("PAUSED => RESUMING (%d) on thread %p", mName, this); + if (mResumeToStopping) { + // happened we need to resume to STOPPING_1 + mState = TrackBase::STOPPING_1; + ALOGV("PAUSED => STOPPING_1 (%d) on thread %p", mName, this); + } else { + mState = TrackBase::RESUMING; + ALOGV("PAUSED => RESUMING (%d) on thread %p", mName, this); + } } else { mState = TrackBase::ACTIVE; ALOGV("? => ACTIVE (%d) on thread %p", mName, this); } - if (!isOutputTrack() && state != ACTIVE && state != RESUMING) { - thread->mLock.unlock(); - status = AudioSystem::startOutput(thread->id(), mStreamType, mSessionId); - thread->mLock.lock(); - -#ifdef ADD_BATTERY_DATA - // to track the speaker usage - if (status == NO_ERROR) { - addBatteryData(IMediaPlayerService::kBatteryDataAudioFlingerStart); + PlaybackThread *playbackThread = (PlaybackThread *)thread.get(); + status = playbackThread->addTrack_l(this); + if (status == INVALID_OPERATION || status == PERMISSION_DENIED) { + triggerEvents(AudioSystem::SYNC_EVENT_PRESENTATION_COMPLETE); + // restore previous state if start was rejected by policy manager + if (status == PERMISSION_DENIED) { + mState = state; } -#endif } - if (status == NO_ERROR) { - PlaybackThread *playbackThread = (PlaybackThread *)thread.get(); - playbackThread->addTrack_l(this); - } else { - mState = state; - triggerEvents(AudioSystem::SYNC_EVENT_PRESENTATION_COMPLETE); + // track was already in the active list, not a problem + if (status == ALREADY_EXISTS) { + status = NO_ERROR; } } else { status = BAD_VALUE; @@ -601,26 +597,18 @@ void AudioFlinger::PlaybackThread::Track::stop() if (playbackThread->mActiveTracks.indexOf(this) < 0) { reset(); mState = STOPPED; - } else if (!isFastTrack()) { + } else if (!isFastTrack() && !isOffloaded()) { mState = STOPPED; } else { - // prepareTracks_l() will set state to STOPPING_2 after next underrun, - // and then to STOPPED and reset() when presentation is complete + // For fast tracks prepareTracks_l() will set state to STOPPING_2 + // presentation is complete + // For an offloaded track this starts a drain and state will + // move to STOPPING_2 when drain completes and then STOPPED mState = STOPPING_1; } ALOGV("not stopping/stopped => stopping/stopped (%d) on thread %p", mName, playbackThread); } - if (!isOutputTrack() && (state == ACTIVE || state == RESUMING)) { - thread->mLock.unlock(); - AudioSystem::stopOutput(thread->id(), mStreamType, mSessionId); - thread->mLock.lock(); - -#ifdef ADD_BATTERY_DATA - // to track the speaker usage - addBatteryData(IMediaPlayerService::kBatteryDataAudioFlingerStop); -#endif - } } } @@ -630,19 +618,27 @@ void AudioFlinger::PlaybackThread::Track::pause() sp thread = mThread.promote(); if (thread != 0) { Mutex::Autolock _l(thread->mLock); - if (mState == ACTIVE || mState == RESUMING) { + PlaybackThread *playbackThread = (PlaybackThread *)thread.get(); + switch (mState) { + case STOPPING_1: + case STOPPING_2: + if (!isOffloaded()) { + /* nothing to do if track is not offloaded */ + break; + } + + // Offloaded track was draining, we need to carry on draining when resumed + mResumeToStopping = true; + // fall through... + case ACTIVE: + case RESUMING: mState = PAUSING; ALOGV("ACTIVE/RESUMING => PAUSING (%d) on thread %p", mName, thread.get()); - if (!isOutputTrack()) { - thread->mLock.unlock(); - AudioSystem::stopOutput(thread->id(), mStreamType, mSessionId); - thread->mLock.lock(); - -#ifdef ADD_BATTERY_DATA - // to track the speaker usage - addBatteryData(IMediaPlayerService::kBatteryDataAudioFlingerStop); -#endif - } + playbackThread->signal_l(); + break; + + default: + break; } } } @@ -653,21 +649,52 @@ void AudioFlinger::PlaybackThread::Track::flush() sp thread = mThread.promote(); if (thread != 0) { Mutex::Autolock _l(thread->mLock); - if (mState != STOPPING_1 && mState != STOPPING_2 && mState != STOPPED && mState != PAUSED && - mState != PAUSING && mState != IDLE && mState != FLUSHED) { - return; - } - // No point remaining in PAUSED state after a flush => go to - // FLUSHED state - mState = FLUSHED; - // do not reset the track if it is still in the process of being stopped or paused. - // this will be done by prepareTracks_l() when the track is stopped. - // prepareTracks_l() will see mState == FLUSHED, then - // remove from active track list, reset(), and trigger presentation complete PlaybackThread *playbackThread = (PlaybackThread *)thread.get(); - if (playbackThread->mActiveTracks.indexOf(this) < 0) { + + if (isOffloaded()) { + // If offloaded we allow flush during any state except terminated + // and keep the track active to avoid problems if user is seeking + // rapidly and underlying hardware has a significant delay handling + // a pause + if (isTerminated()) { + return; + } + + ALOGV("flush: offload flush"); reset(); + + if (mState == STOPPING_1 || mState == STOPPING_2) { + ALOGV("flushed in STOPPING_1 or 2 state, change state to ACTIVE"); + mState = ACTIVE; + } + + if (mState == ACTIVE) { + ALOGV("flush called in active state, resetting buffer time out retry count"); + mRetryCount = PlaybackThread::kMaxTrackRetriesOffload; + } + + mResumeToStopping = false; + } else { + if (mState != STOPPING_1 && mState != STOPPING_2 && mState != STOPPED && + mState != PAUSED && mState != PAUSING && mState != IDLE && mState != FLUSHED) { + return; + } + // No point remaining in PAUSED state after a flush => go to + // FLUSHED state + mState = FLUSHED; + // do not reset the track if it is still in the process of being stopped or paused. + // this will be done by prepareTracks_l() when the track is stopped. + // prepareTracks_l() will see mState == FLUSHED, then + // remove from active track list, reset(), and trigger presentation complete + if (playbackThread->mActiveTracks.indexOf(this) < 0) { + reset(); + } } + // Prevent flush being lost if the track is flushed and then resumed + // before mixer thread can run. This is important when offloading + // because the hardware buffer could hold a large amount of audio + playbackThread->flushOutput_l(); + playbackThread->signal_l(); } } @@ -688,6 +715,20 @@ void AudioFlinger::PlaybackThread::Track::reset() } } +status_t AudioFlinger::PlaybackThread::Track::setParameters(const String8& keyValuePairs) +{ + sp thread = mThread.promote(); + if (thread == 0) { + ALOGE("thread is dead"); + return FAILED_TRANSACTION; + } else if ((thread->type() == ThreadBase::DIRECT) || + (thread->type() == ThreadBase::OFFLOAD)) { + return thread->setParameters(keyValuePairs); + } else { + return PERMISSION_DENIED; + } +} + status_t AudioFlinger::PlaybackThread::Track::attachAuxEffect(int EffectId) { status_t status = DEAD_OBJECT; @@ -749,15 +790,23 @@ bool AudioFlinger::PlaybackThread::Track::presentationComplete(size_t framesWrit // a track is considered presented when the total number of frames written to audio HAL // corresponds to the number of frames written when presentationComplete() is called for the // first time (mPresentationCompleteFrames == 0) plus the buffer filling status at that time. + // For an offloaded track the HAL+h/w delay is variable so a HAL drain() is used + // to detect when all frames have been played. In this case framesWritten isn't + // useful because it doesn't always reflect whether there is data in the h/w + // buffers, particularly if a track has been paused and resumed during draining + ALOGV("presentationComplete() mPresentationCompleteFrames %d framesWritten %d", + mPresentationCompleteFrames, framesWritten); if (mPresentationCompleteFrames == 0) { mPresentationCompleteFrames = framesWritten + audioHalFrames; ALOGV("presentationComplete() reset: mPresentationCompleteFrames %d audioHalFrames %d", mPresentationCompleteFrames, audioHalFrames); } - if (framesWritten >= mPresentationCompleteFrames) { + + if (framesWritten >= mPresentationCompleteFrames || isOffloaded()) { ALOGV("presentationComplete() session %d complete: framesWritten %d", mSessionId, framesWritten); triggerEvents(AudioSystem::SYNC_EVENT_PRESENTATION_COMPLETE); + mAudioTrackServerProxy->setStreamEndDone(); return true; } return false; @@ -803,7 +852,7 @@ uint32_t AudioFlinger::PlaybackThread::Track::getVolumeLR() status_t AudioFlinger::PlaybackThread::Track::setSyncEvent(const sp& event) { - if (mState == TERMINATED || mState == PAUSED || + if (isTerminated() || mState == PAUSED || ((framesReady() == 0) && ((mSharedBuffer != 0) || (mState == STOPPED)))) { ALOGW("Track::setSyncEvent() in invalid state %d on session %d %s mode, framesReady %d ", -- cgit v1.1 From b1a270d1e926fb9a01b4265a7675ed0c2c8f4868 Mon Sep 17 00:00:00 2001 From: Richard Fitzgerald Date: Tue, 14 May 2013 12:12:21 +0100 Subject: libmedia: offloaded playback support - start() returns a status so that upper layers can recreate a non offloaded track in case of error. - Added states to handle offloaded tracks specific: - waiting for stream end (drain) notification by audio flinger - allow pause while waiting for stream end notification - getPosition() queries the render position directly from audio HAL. - disable APIs not applicable to offloaded tracks - Modified track restoring behavior for invalidated offloaded tracks: just send the callback and wait for upper layers to create a new track. - Added wait for stream end management in audio track client proxy. Similar to obtainBuffer and should be factored in. Change-Id: I0fc48117946364cb255afd653195498891f622bd Signed-off-by: Eric Laurent --- include/media/AudioTrack.h | 12 +- include/private/media/AudioTrackShared.h | 3 +- media/libmedia/AudioTrack.cpp | 225 +++++++++++++++++++++++++------ media/libmedia/AudioTrackShared.cpp | 133 +++++++++++++++++- media/libmedia/IAudioFlinger.cpp | 15 ++- media/libmedia/IAudioPolicyService.cpp | 32 ++++- 6 files changed, 363 insertions(+), 57 deletions(-) diff --git a/include/media/AudioTrack.h b/include/media/AudioTrack.h index 58e0deb..da13a7f 100644 --- a/include/media/AudioTrack.h +++ b/include/media/AudioTrack.h @@ -277,7 +277,7 @@ public: * make it active. If set, the callback will start being called. * If the track was previously paused, volume is ramped up over the first mix buffer. */ - void start(); + status_t start(); /* Stop a track. * In static buffer mode, the track is stopped immediately. @@ -635,11 +635,12 @@ protected: void setLoop_l(uint32_t loopStart, uint32_t loopEnd, int loopCount); audio_io_handle_t getOutput_l(); - status_t getPosition_l(uint32_t *position); - // FIXME enum is faster than strcmp() for parameter 'from' status_t restoreTrack_l(const char *from); + bool isOffloaded() const + { return (mFlags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) != 0; } + // may be changed if IAudioTrack is re-created sp mAudioTrack; sp mCblkMemory; @@ -676,7 +677,9 @@ protected: STATE_ACTIVE, STATE_STOPPED, STATE_PAUSED, + STATE_PAUSED_STOPPING, STATE_FLUSHED, + STATE_STOPPING, } mState; callback_t mCbf; // callback handler for events, or NULL @@ -694,7 +697,7 @@ protected: // These are private to processAudioBuffer(), and are not protected by a lock uint32_t mRemainingFrames; // number of frames to request in obtainBuffer() bool mRetryOnPartialBuffer; // sleep and retry after partial obtainBuffer() - int mObservedSequence; // last observed value of mSequence + uint32_t mObservedSequence; // last observed value of mSequence sp mSharedBuffer; uint32_t mLoopPeriod; // in frames, zero means looping is disabled @@ -736,6 +739,7 @@ private: sp mDeathNotifier; uint32_t mSequence; // incremented for each new IAudioTrack attempt + audio_io_handle_t mOutput; // cached output io handle }; class TimedAudioTrack : public AudioTrack diff --git a/include/private/media/AudioTrackShared.h b/include/private/media/AudioTrackShared.h index 6129c80..b890180 100644 --- a/include/private/media/AudioTrackShared.h +++ b/include/private/media/AudioTrackShared.h @@ -296,6 +296,7 @@ public: bool getStreamEndDone() const; + status_t waitStreamEndDone(const struct timespec *requested); }; class StaticAudioTrackClientProxy : public AudioTrackClientProxy { @@ -379,8 +380,8 @@ public: protected: size_t mAvailToClient; // estimated frames available to client prior to releaseBuffer() -private: int32_t mFlush; // our copy of cblk->u.mStreaming.mFlush, for streaming output only +private: bool mDeferWake; // whether another releaseBuffer() is expected soon }; diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp index 7b6b38d..3653b7f 100644 --- a/media/libmedia/AudioTrack.cpp +++ b/media/libmedia/AudioTrack.cpp @@ -27,7 +27,9 @@ #include #include -#define WAIT_PERIOD_MS 10 +#define WAIT_PERIOD_MS 10 +#define WAIT_STREAM_END_TIMEOUT_SEC 120 + namespace android { // --------------------------------------------------------------------------- @@ -141,6 +143,7 @@ AudioTrack::~AudioTrack() // Otherwise the callback thread will never exit. stop(); if (mAudioTrackThread != 0) { + mProxy->interrupt(); mAudioTrackThread->requestExit(); // see comment in AudioTrack.h mAudioTrackThread->requestExitAndWait(); mAudioTrackThread.clear(); @@ -224,6 +227,8 @@ status_t AudioTrack::set( return INVALID_OPERATION; } + mOutput = 0; + // handle default values first. if (streamType == AUDIO_STREAM_DEFAULT) { streamType = AUDIO_STREAM_MUSIC; @@ -259,7 +264,12 @@ status_t AudioTrack::set( } // force direct flag if format is not linear PCM - if (!audio_is_linear_pcm(format)) { + // or offload was requested + if ((flags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) + || !audio_is_linear_pcm(format)) { + ALOGV( (flags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) + ? "Offload request, forcing to Direct Output" + : "Not linear PCM, forcing to Direct Output"); flags = (audio_output_flags_t) // FIXME why can't we allow direct AND fast? ((flags | AUDIO_OUTPUT_FLAG_DIRECT) & ~AUDIO_OUTPUT_FLAG_FAST); @@ -325,9 +335,14 @@ status_t AudioTrack::set( if (status != NO_ERROR) { if (mAudioTrackThread != 0) { - mAudioTrackThread->requestExit(); + mAudioTrackThread->requestExit(); // see comment in AudioTrack.h + mAudioTrackThread->requestExitAndWait(); mAudioTrackThread.clear(); } + //Use of direct and offloaded output streams is ref counted by audio policy manager. + // As getOutput was called above and resulted in an output stream to be opened, + // we need to release it. + AudioSystem::releaseOutput(output); return status; } @@ -346,23 +361,29 @@ status_t AudioTrack::set( mSequence = 1; mObservedSequence = mSequence; mInUnderrun = false; + mOutput = output; return NO_ERROR; } // ------------------------------------------------------------------------- -void AudioTrack::start() +status_t AudioTrack::start() { AutoMutex lock(mLock); + if (mState == STATE_ACTIVE) { - return; + return INVALID_OPERATION; } mInUnderrun = true; State previousState = mState; - mState = STATE_ACTIVE; + if (previousState == STATE_PAUSED_STOPPING) { + mState = STATE_STOPPING; + } else { + mState = STATE_ACTIVE; + } if (previousState == STATE_STOPPED || previousState == STATE_FLUSHED) { // reset current position as seen by client to 0 mProxy->setEpoch(mProxy->getEpoch() - mProxy->getPosition()); @@ -372,7 +393,11 @@ void AudioTrack::start() sp t = mAudioTrackThread; if (t != 0) { - t->resume(); + if (previousState == STATE_STOPPING) { + mProxy->interrupt(); + } else { + t->resume(); + } } else { mPreviousPriority = getpriority(PRIO_PROCESS, 0); get_sched_policy(0, &mPreviousSchedulingGroup); @@ -394,14 +419,16 @@ void AudioTrack::start() ALOGE("start() status %d", status); mState = previousState; if (t != 0) { - t->pause(); + if (previousState != STATE_STOPPING) { + t->pause(); + } } else { setpriority(PRIO_PROCESS, 0, mPreviousPriority); set_sched_policy(0, mPreviousSchedulingGroup); } } - // FIXME discarding status + return status; } void AudioTrack::stop() @@ -412,7 +439,12 @@ void AudioTrack::stop() return; } - mState = STATE_STOPPED; + if (isOffloaded()) { + mState = STATE_STOPPING; + } else { + mState = STATE_STOPPED; + } + mProxy->interrupt(); mAudioTrack->stop(); // the playback head position will reset to 0, so if a marker is set, we need @@ -426,9 +458,12 @@ void AudioTrack::stop() flush_l(); } #endif + sp t = mAudioTrackThread; if (t != 0) { - t->pause(); + if (!isOffloaded()) { + t->pause(); + } } else { setpriority(PRIO_PROCESS, 0, mPreviousPriority); set_sched_policy(0, mPreviousSchedulingGroup); @@ -461,8 +496,12 @@ void AudioTrack::flush_l() mMarkerPosition = 0; mMarkerReached = false; mUpdatePeriod = 0; + mRefreshRemaining = true; mState = STATE_FLUSHED; + if (isOffloaded()) { + mProxy->interrupt(); + } mProxy->flush(); mAudioTrack->flush(); } @@ -470,10 +509,13 @@ void AudioTrack::flush_l() void AudioTrack::pause() { AutoMutex lock(mLock); - if (mState != STATE_ACTIVE) { + if (mState == STATE_ACTIVE) { + mState = STATE_PAUSED; + } else if (mState == STATE_STOPPING) { + mState = STATE_PAUSED_STOPPING; + } else { return; } - mState = STATE_PAUSED; mProxy->interrupt(); mAudioTrack->pause(); } @@ -520,7 +562,7 @@ void AudioTrack::getAuxEffectSendLevel(float* level) const status_t AudioTrack::setSampleRate(uint32_t rate) { - if (mIsTimed) { + if (mIsTimed || isOffloaded()) { return INVALID_OPERATION; } @@ -552,7 +594,7 @@ uint32_t AudioTrack::getSampleRate() const status_t AudioTrack::setLoop(uint32_t loopStart, uint32_t loopEnd, int loopCount) { - if (mSharedBuffer == 0 || mIsTimed) { + if (mSharedBuffer == 0 || mIsTimed || isOffloaded()) { return INVALID_OPERATION; } @@ -586,7 +628,7 @@ void AudioTrack::setLoop_l(uint32_t loopStart, uint32_t loopEnd, int loopCount) status_t AudioTrack::setMarkerPosition(uint32_t marker) { // The only purpose of setting marker position is to get a callback - if (mCbf == NULL) { + if (mCbf == NULL || isOffloaded()) { return INVALID_OPERATION; } @@ -599,6 +641,9 @@ status_t AudioTrack::setMarkerPosition(uint32_t marker) status_t AudioTrack::getMarkerPosition(uint32_t *marker) const { + if (isOffloaded()) { + return INVALID_OPERATION; + } if (marker == NULL) { return BAD_VALUE; } @@ -612,19 +657,21 @@ status_t AudioTrack::getMarkerPosition(uint32_t *marker) const status_t AudioTrack::setPositionUpdatePeriod(uint32_t updatePeriod) { // The only purpose of setting position update period is to get a callback - if (mCbf == NULL) { + if (mCbf == NULL || isOffloaded()) { return INVALID_OPERATION; } AutoMutex lock(mLock); mNewPosition = mProxy->getPosition() + updatePeriod; mUpdatePeriod = updatePeriod; - return NO_ERROR; } status_t AudioTrack::getPositionUpdatePeriod(uint32_t *updatePeriod) const { + if (isOffloaded()) { + return INVALID_OPERATION; + } if (updatePeriod == NULL) { return BAD_VALUE; } @@ -637,7 +684,7 @@ status_t AudioTrack::getPositionUpdatePeriod(uint32_t *updatePeriod) const status_t AudioTrack::setPosition(uint32_t position) { - if (mSharedBuffer == 0 || mIsTimed) { + if (mSharedBuffer == 0 || mIsTimed || isOffloaded()) { return INVALID_OPERATION; } if (position > mFrameCount) { @@ -670,10 +717,19 @@ status_t AudioTrack::getPosition(uint32_t *position) const } AutoMutex lock(mLock); - // IAudioTrack::stop() isn't synchronous; we don't know when presentation completes - *position = (mState == STATE_STOPPED || mState == STATE_FLUSHED) ? 0 : - mProxy->getPosition(); + if (isOffloaded()) { + uint32_t dspFrames = 0; + if (mOutput != 0) { + uint32_t halFrames; + AudioSystem::getRenderPosition(mOutput, &halFrames, &dspFrames); + } + *position = dspFrames; + } else { + // IAudioTrack::stop() isn't synchronous; we don't know when presentation completes + *position = (mState == STATE_STOPPED || mState == STATE_FLUSHED) ? 0 : + mProxy->getPosition(); + } return NO_ERROR; } @@ -693,7 +749,7 @@ status_t AudioTrack::getBufferPosition(size_t *position) status_t AudioTrack::reload() { - if (mSharedBuffer == 0 || mIsTimed) { + if (mSharedBuffer == 0 || mIsTimed || isOffloaded()) { return INVALID_OPERATION; } @@ -713,14 +769,18 @@ status_t AudioTrack::reload() audio_io_handle_t AudioTrack::getOutput() { AutoMutex lock(mLock); - return getOutput_l(); + return mOutput; } // must be called with mLock held audio_io_handle_t AudioTrack::getOutput_l() { - return AudioSystem::getOutput(mStreamType, - mSampleRate, mFormat, mChannelMask, mFlags); + if (mOutput) { + return mOutput; + } else { + return AudioSystem::getOutput(mStreamType, + mSampleRate, mFormat, mChannelMask, mFlags); + } } status_t AudioTrack::attachAuxEffect(int effectId) @@ -791,7 +851,9 @@ status_t AudioTrack::createTrack_l( } frameCount = afFrameCount; } - + if (mNotificationFramesAct != frameCount) { + mNotificationFramesAct = frameCount; + } } else if (sharedBuffer != 0) { // Ensure that buffer alignment matches channel count @@ -875,6 +937,10 @@ status_t AudioTrack::createTrack_l( } } + if (flags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) { + trackFlags |= IAudioFlinger::TRACK_OFFLOAD; + } + sp track = audioFlinger->createTrack(streamType, sampleRate, // AudioFlinger only sees 16-bit PCM @@ -937,6 +1003,17 @@ status_t AudioTrack::createTrack_l( } } } + if (flags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) { + if (trackFlags & IAudioFlinger::TRACK_OFFLOAD) { + ALOGV("AUDIO_OUTPUT_FLAG_OFFLOAD successful"); + } else { + ALOGW("AUDIO_OUTPUT_FLAG_OFFLOAD denied by server"); + flags = (audio_output_flags_t) (flags & ~AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD); + mFlags = flags; + return NO_INIT; + } + } + mRefreshRemaining = true; // Starting address of buffers in shared memory. If there is a shared buffer, buffers @@ -1040,6 +1117,9 @@ status_t AudioTrack::obtainBuffer(Buffer* audioBuffer, const struct timespec *re if (newSequence == oldSequence) { status = restoreTrack_l("obtainBuffer"); if (status != NO_ERROR) { + buffer.mFrameCount = 0; + buffer.mRaw = NULL; + buffer.mNonContig = 0; break; } } @@ -1050,6 +1130,14 @@ status_t AudioTrack::obtainBuffer(Buffer* audioBuffer, const struct timespec *re proxy = mProxy; iMem = mCblkMemory; + if (mState == STATE_STOPPING) { + status = -EINTR; + buffer.mFrameCount = 0; + buffer.mRaw = NULL; + buffer.mNonContig = 0; + break; + } + // Non-blocking if track is stopped or paused if (mState != STATE_ACTIVE) { requested = &ClientProxy::kNonBlocking; @@ -1255,12 +1343,18 @@ nsecs_t AudioTrack::processAudioBuffer(const sp& thread) // Check for track invalidation if (flags & CBLK_INVALID) { - (void) restoreTrack_l("processAudioBuffer"); - mLock.unlock(); - // Run again immediately, but with a new IAudioTrack - return 0; + // for offloaded tracks restoreTrack_l() will just update the sequence and clear + // AudioSystem cache. We should not exit here but after calling the callback so + // that the upper layers can recreate the track + if (!isOffloaded() || (mSequence == mObservedSequence)) { + status_t status = restoreTrack_l("processAudioBuffer"); + mLock.unlock(); + // Run again immediately, but with a new IAudioTrack + return 0; + } } + bool waitStreamEnd = mState == STATE_STOPPING; bool active = mState == STATE_ACTIVE; // Manage underrun callback, must be done under lock to avoid race with releaseBuffer() @@ -1314,7 +1408,7 @@ nsecs_t AudioTrack::processAudioBuffer(const sp& thread) mRetryOnPartialBuffer = false; } size_t misalignment = mProxy->getMisalignment(); - int32_t sequence = mSequence; + uint32_t sequence = mSequence; // These fields don't need to be cached, because they are assigned only by set(): // mTransfer, mCbf, mUserData, mFormat, mFrameSize, mFrameSizeAF, mFlags @@ -1322,6 +1416,38 @@ nsecs_t AudioTrack::processAudioBuffer(const sp& thread) mLock.unlock(); + if (waitStreamEnd) { + AutoMutex lock(mLock); + + sp proxy = mProxy; + sp iMem = mCblkMemory; + + struct timespec timeout; + timeout.tv_sec = WAIT_STREAM_END_TIMEOUT_SEC; + timeout.tv_nsec = 0; + + mLock.unlock(); + status_t status = mProxy->waitStreamEndDone(&timeout); + mLock.lock(); + switch (status) { + case NO_ERROR: + case DEAD_OBJECT: + case TIMED_OUT: + mLock.unlock(); + mCbf(EVENT_STREAM_END, mUserData, NULL); + mLock.lock(); + if (mState == STATE_STOPPING) { + mState = STATE_STOPPED; + if (status != DEAD_OBJECT) { + return NS_INACTIVE; + } + } + return 0; + default: + return 0; + } + } + // perform callbacks while unlocked if (newUnderrun) { mCbf(EVENT_UNDERRUN, mUserData, NULL); @@ -1343,9 +1469,14 @@ nsecs_t AudioTrack::processAudioBuffer(const sp& thread) newPosition += updatePeriod; newPosCount--; } + if (mObservedSequence != sequence) { mObservedSequence = sequence; mCbf(EVENT_NEW_IAUDIOTRACK, mUserData, NULL); + // for offloaded tracks, just wait for the upper layers to recreate the track + if (isOffloaded()) { + return NS_INACTIVE; + } } // if inactive, then don't run me again until re-started @@ -1404,10 +1535,11 @@ nsecs_t AudioTrack::processAudioBuffer(const sp& thread) "obtainBuffer() err=%d frameCount=%u", err, audioBuffer.frameCount); requested = &ClientProxy::kNonBlocking; size_t avail = audioBuffer.frameCount + nonContig; - ALOGV("obtainBuffer(%u) returned %u = %u + %u", - mRemainingFrames, avail, audioBuffer.frameCount, nonContig); + ALOGV("obtainBuffer(%u) returned %u = %u + %u err %d", + mRemainingFrames, avail, audioBuffer.frameCount, nonContig, err); if (err != NO_ERROR) { - if (err == TIMED_OUT || err == WOULD_BLOCK || err == -EINTR) { + if (err == TIMED_OUT || err == WOULD_BLOCK || err == -EINTR || + (isOffloaded() && (err == DEAD_OBJECT))) { return 0; } ALOGE("Error %d obtaining an audio buffer, giving up.", err); @@ -1500,7 +1632,8 @@ nsecs_t AudioTrack::processAudioBuffer(const sp& thread) status_t AudioTrack::restoreTrack_l(const char *from) { - ALOGW("dead IAudioTrack, creating a new one from %s()", from); + ALOGW("dead IAudioTrack, %s, creating a new one from %s()", + isOffloaded() ? "Offloaded" : "PCM", from); ++mSequence; status_t result; @@ -1508,6 +1641,14 @@ status_t AudioTrack::restoreTrack_l(const char *from) // output parameters in getOutput_l() and createTrack_l() AudioSystem::clearAudioConfigCache(); + if (isOffloaded()) { + return DEAD_OBJECT; + } + + // force new output query from audio policy manager; + mOutput = 0; + audio_io_handle_t output = getOutput_l(); + // if the new IAudioTrack is created, createTrack_l() will modify the // following member variables: mAudioTrack, mCblkMemory and mCblk. // It will also delete the strong references on previous IAudioTrack and IMemory @@ -1520,7 +1661,7 @@ status_t AudioTrack::restoreTrack_l(const char *from) mReqFrameCount, // so that frame count never goes down mFlags, mSharedBuffer, - getOutput_l(), + output, position /*epoch*/); if (result == NO_ERROR) { @@ -1549,6 +1690,10 @@ status_t AudioTrack::restoreTrack_l(const char *from) } } if (result != NO_ERROR) { + //Use of direct and offloaded output streams is ref counted by audio policy manager. + // As getOutput was called above and resulted in an output stream to be opened, + // we need to release it. + AudioSystem::releaseOutput(output); ALOGW("restoreTrack_l() failed status %d", result); mState = STATE_STOPPED; } @@ -1568,7 +1713,11 @@ status_t AudioTrack::setParameters(const String8& keyValuePairs) String8 AudioTrack::getParameters(const String8& keys) { - return String8::empty(); + if (mOutput) { + return AudioSystem::getParameters(mOutput, keys); + } else { + return String8::empty(); + } } status_t AudioTrack::dump(int fd, const Vector& args) const diff --git a/media/libmedia/AudioTrackShared.cpp b/media/libmedia/AudioTrackShared.cpp index bd43ad2..aa45a2f 100644 --- a/media/libmedia/AudioTrackShared.cpp +++ b/media/libmedia/AudioTrackShared.cpp @@ -200,7 +200,7 @@ status_t ClientProxy::obtainBuffer(Buffer* buffer, const struct timespec *reques ts = &remaining; break; default: - LOG_FATAL("%s timeout=%d", timeout); + LOG_FATAL("obtainBuffer() timeout=%d", timeout); ts = NULL; break; } @@ -259,8 +259,9 @@ end: requested = &kNonBlocking; } if (measure) { - ALOGV("requested %d.%03d elapsed %d.%03d", requested->tv_sec, requested->tv_nsec / 1000000, - total.tv_sec, total.tv_nsec / 1000000); + ALOGV("requested %ld.%03ld elapsed %ld.%03ld", + requested->tv_sec, requested->tv_nsec / 1000000, + total.tv_sec, total.tv_nsec / 1000000); } return status; } @@ -323,13 +324,120 @@ void AudioTrackClientProxy::flush() } bool AudioTrackClientProxy::clearStreamEndDone() { - return android_atomic_and(~CBLK_STREAM_END_DONE, &mCblk->flags) & CBLK_STREAM_END_DONE; + return (android_atomic_and(~CBLK_STREAM_END_DONE, &mCblk->flags) & CBLK_STREAM_END_DONE) != 0; } bool AudioTrackClientProxy::getStreamEndDone() const { return (mCblk->flags & CBLK_STREAM_END_DONE) != 0; } +status_t AudioTrackClientProxy::waitStreamEndDone(const struct timespec *requested) +{ + struct timespec total; // total elapsed time spent waiting + total.tv_sec = 0; + total.tv_nsec = 0; + audio_track_cblk_t* cblk = mCblk; + status_t status; + enum { + TIMEOUT_ZERO, // requested == NULL || *requested == 0 + TIMEOUT_INFINITE, // *requested == infinity + TIMEOUT_FINITE, // 0 < *requested < infinity + TIMEOUT_CONTINUE, // additional chances after TIMEOUT_FINITE + } timeout; + if (requested == NULL) { + timeout = TIMEOUT_ZERO; + } else if (requested->tv_sec == 0 && requested->tv_nsec == 0) { + timeout = TIMEOUT_ZERO; + } else if (requested->tv_sec == INT_MAX) { + timeout = TIMEOUT_INFINITE; + } else { + timeout = TIMEOUT_FINITE; + } + for (;;) { + int32_t flags = android_atomic_and(~(CBLK_INTERRUPT|CBLK_STREAM_END_DONE), &cblk->flags); + // check for track invalidation by server, or server death detection + if (flags & CBLK_INVALID) { + ALOGV("Track invalidated"); + status = DEAD_OBJECT; + goto end; + } + if (flags & CBLK_STREAM_END_DONE) { + ALOGV("stream end received"); + status = NO_ERROR; + goto end; + } + // check for obtainBuffer interrupted by client + // check for obtainBuffer interrupted by client + if (flags & CBLK_INTERRUPT) { + ALOGV("waitStreamEndDone() interrupted by client"); + status = -EINTR; + goto end; + } + struct timespec remaining; + const struct timespec *ts; + switch (timeout) { + case TIMEOUT_ZERO: + status = WOULD_BLOCK; + goto end; + case TIMEOUT_INFINITE: + ts = NULL; + break; + case TIMEOUT_FINITE: + timeout = TIMEOUT_CONTINUE; + if (MAX_SEC == 0) { + ts = requested; + break; + } + // fall through + case TIMEOUT_CONTINUE: + // FIXME we do not retry if requested < 10ms? needs documentation on this state machine + if (requested->tv_sec < total.tv_sec || + (requested->tv_sec == total.tv_sec && requested->tv_nsec <= total.tv_nsec)) { + status = TIMED_OUT; + goto end; + } + remaining.tv_sec = requested->tv_sec - total.tv_sec; + if ((remaining.tv_nsec = requested->tv_nsec - total.tv_nsec) < 0) { + remaining.tv_nsec += 1000000000; + remaining.tv_sec++; + } + if (0 < MAX_SEC && MAX_SEC < remaining.tv_sec) { + remaining.tv_sec = MAX_SEC; + remaining.tv_nsec = 0; + } + ts = &remaining; + break; + default: + LOG_FATAL("waitStreamEndDone() timeout=%d", timeout); + ts = NULL; + break; + } + int32_t old = android_atomic_and(~CBLK_FUTEX_WAKE, &cblk->mFutex); + if (!(old & CBLK_FUTEX_WAKE)) { + int rc; + int ret = __futex_syscall4(&cblk->mFutex, + mClientInServer ? FUTEX_WAIT_PRIVATE : FUTEX_WAIT, old & ~CBLK_FUTEX_WAKE, ts); + switch (ret) { + case 0: // normal wakeup by server, or by binderDied() + case -EWOULDBLOCK: // benign race condition with server + case -EINTR: // wait was interrupted by signal or other spurious wakeup + case -ETIMEDOUT: // time-out expired + break; + default: + ALOGE("%s unexpected error %d", __func__, ret); + status = -ret; + goto end; + } + } + } + +end: + if (requested == NULL) { + requested = &kNonBlocking; + } + return status; +} + // --------------------------------------------------------------------------- StaticAudioTrackClientProxy::StaticAudioTrackClientProxy(audio_track_cblk_t* cblk, void *buffers, @@ -393,13 +501,19 @@ status_t ServerProxy::obtainBuffer(Buffer* buffer) if (mIsOut) { int32_t flush = cblk->u.mStreaming.mFlush; rear = android_atomic_acquire_load(&cblk->u.mStreaming.mRear); + front = cblk->u.mStreaming.mFront; if (flush != mFlush) { - front = rear; mFlush = flush; // effectively obtain then release whatever is in the buffer android_atomic_release_store(rear, &cblk->u.mStreaming.mFront); - } else { - front = cblk->u.mStreaming.mFront; + if (front != rear) { + int32_t old = android_atomic_or(CBLK_FUTEX_WAKE, &cblk->mFutex); + if (!(old & CBLK_FUTEX_WAKE)) { + (void) __futex_syscall3(&cblk->mFutex, + mClientInServer ? FUTEX_WAKE_PRIVATE : FUTEX_WAKE, 1); + } + } + front = rear; } } else { front = android_atomic_acquire_load(&cblk->u.mStreaming.mFront); @@ -517,6 +631,11 @@ size_t AudioTrackServerProxy::framesReady() return 0; } audio_track_cblk_t* cblk = mCblk; + + int32_t flush = cblk->u.mStreaming.mFlush; + if (flush != mFlush) { + return mFrameCount; + } // the acquire might not be necessary since not doing a subsequent read int32_t rear = android_atomic_acquire_load(&cblk->u.mStreaming.mRear); ssize_t filled = rear - cblk->u.mStreaming.mFront; diff --git a/media/libmedia/IAudioFlinger.cpp b/media/libmedia/IAudioFlinger.cpp index 2e2c0cc..c670936 100644 --- a/media/libmedia/IAudioFlinger.cpp +++ b/media/libmedia/IAudioFlinger.cpp @@ -372,7 +372,6 @@ public: audio_channel_mask_t channelMask = pChannelMask != NULL ? *pChannelMask : (audio_channel_mask_t)0; uint32_t latency = pLatencyMs != NULL ? *pLatencyMs : 0; - data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor()); data.writeInt32(module); data.writeInt32(devices); @@ -381,6 +380,12 @@ public: data.writeInt32(channelMask); data.writeInt32(latency); data.writeInt32((int32_t) flags); + if (offloadInfo == NULL) { + data.writeInt32(0); + } else { + data.writeInt32(1); + data.write(offloadInfo, sizeof(audio_offload_info_t)); + } remote()->transact(OPEN_OUTPUT, data, &reply); audio_io_handle_t output = (audio_io_handle_t) reply.readInt32(); ALOGV("openOutput() returned output, %d", output); @@ -881,13 +886,19 @@ status_t BnAudioFlinger::onTransact( audio_channel_mask_t channelMask = (audio_channel_mask_t)data.readInt32(); uint32_t latency = data.readInt32(); audio_output_flags_t flags = (audio_output_flags_t) data.readInt32(); + bool hasOffloadInfo = data.readInt32() != 0; + audio_offload_info_t offloadInfo; + if (hasOffloadInfo) { + data.read(&offloadInfo, sizeof(audio_offload_info_t)); + } audio_io_handle_t output = openOutput(module, &devices, &samplingRate, &format, &channelMask, &latency, - flags); + flags, + hasOffloadInfo ? &offloadInfo : NULL); ALOGV("OPEN_OUTPUT output, %p", output); reply->writeInt32((int32_t) output); reply->writeInt32(devices); diff --git a/media/libmedia/IAudioPolicyService.cpp b/media/libmedia/IAudioPolicyService.cpp index 57de58f..4be3c09 100644 --- a/media/libmedia/IAudioPolicyService.cpp +++ b/media/libmedia/IAudioPolicyService.cpp @@ -137,6 +137,12 @@ public: data.writeInt32(static_cast (format)); data.writeInt32(channelMask); data.writeInt32(static_cast (flags)); + if (offloadInfo == NULL) { + data.writeInt32(0); + } else { + data.writeInt32(1); + data.write(offloadInfo, sizeof(audio_offload_info_t)); + } remote()->transact(GET_OUTPUT, data, &reply); return static_cast (reply.readInt32()); } @@ -379,9 +385,11 @@ public: virtual bool isOffloadSupported(const audio_offload_info_t& info) { - // stub function - return false; - } + Parcel data, reply; + data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor()); + data.write(&info, sizeof(audio_offload_info_t)); + remote()->transact(IS_OFFLOAD_SUPPORTED, data, &reply); + return reply.readInt32(); } }; IMPLEMENT_META_INTERFACE(AudioPolicyService, "android.media.IAudioPolicyService"); @@ -450,12 +458,17 @@ status_t BnAudioPolicyService::onTransact( audio_channel_mask_t channelMask = data.readInt32(); audio_output_flags_t flags = static_cast (data.readInt32()); - + bool hasOffloadInfo = data.readInt32() != 0; + audio_offload_info_t offloadInfo; + if (hasOffloadInfo) { + data.read(&offloadInfo, sizeof(audio_offload_info_t)); + } audio_io_handle_t output = getOutput(stream, samplingRate, format, channelMask, - flags); + flags, + hasOffloadInfo ? &offloadInfo : NULL); reply->writeInt32(static_cast (output)); return NO_ERROR; } break; @@ -662,6 +675,15 @@ status_t BnAudioPolicyService::onTransact( return status; } + case IS_OFFLOAD_SUPPORTED: { + CHECK_INTERFACE(IAudioPolicyService, data, reply); + audio_offload_info_t info; + data.read(&info, sizeof(audio_offload_info_t)); + bool isSupported = isOffloadSupported(info); + reply->writeInt32(isSupported); + return NO_ERROR; + } + default: return BBinder::onTransact(code, data, reply, flags); } -- cgit v1.1 From 0fea74cdbc09c1259e08215e2ea90e7988d62df8 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Thu, 25 Jul 2013 14:34:57 -0700 Subject: Remove obsolete TrackBase::reset() Change-Id: I38100b7e28a12d7af8cb40ae3f4d9cb4a0ebe701 --- services/audioflinger/TrackBase.h | 1 - services/audioflinger/Tracks.cpp | 7 ------- 2 files changed, 8 deletions(-) diff --git a/services/audioflinger/TrackBase.h b/services/audioflinger/TrackBase.h index 55d96fa..f0c32b2 100644 --- a/services/audioflinger/TrackBase.h +++ b/services/audioflinger/TrackBase.h @@ -105,7 +105,6 @@ protected: } bool step(); // mStepCount is an implicit input - void reset(); bool isOut() const { return mIsOut; } // true for Track and TimedTrack, false for RecordTrack, diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp index c45daae..f41cb22 100644 --- a/services/audioflinger/Tracks.cpp +++ b/services/audioflinger/Tracks.cpp @@ -202,11 +202,6 @@ void AudioFlinger::ThreadBase::TrackBase::releaseBuffer(AudioBufferProvider::Buf mServerProxy->releaseBuffer(&buf); } -void AudioFlinger::ThreadBase::TrackBase::reset() { - ALOGV("TrackBase::reset"); - // FIXME still needed? -} - status_t AudioFlinger::ThreadBase::TrackBase::setSyncEvent(const sp& event) { mSyncEvents.add(event); @@ -676,7 +671,6 @@ void AudioFlinger::PlaybackThread::Track::reset() // Do not reset twice to avoid discarding data written just after a flush and before // the audioflinger thread detects the track is stopped. if (!mResetDone) { - TrackBase::reset(); // Force underrun condition to avoid false underrun callback until first data is // written to buffer android_atomic_and(~CBLK_FORCEREADY, &mCblk->flags); @@ -1665,7 +1659,6 @@ void AudioFlinger::RecordThread::RecordTrack::stop() recordThread->mLock.lock(); bool doStop = recordThread->stop_l(this); if (doStop) { - TrackBase::reset(); // Force overrun condition to avoid false overrun callback until first data is // read from buffer android_atomic_or(CBLK_UNDERRUN, &mCblk->flags); -- cgit v1.1 From fc38a2e0268b5e531db2975c3a81462a3593c861 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Thu, 25 Jul 2013 14:41:19 -0700 Subject: CBLK_OVERRUN, not CBLK_UNDERRUN, is for record threads Change-Id: I8948f76ef4717a423c37cd6ea7db4381636af612 --- services/audioflinger/Tracks.cpp | 3 --- 1 file changed, 3 deletions(-) diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp index f41cb22..d424e16 100644 --- a/services/audioflinger/Tracks.cpp +++ b/services/audioflinger/Tracks.cpp @@ -1659,9 +1659,6 @@ void AudioFlinger::RecordThread::RecordTrack::stop() recordThread->mLock.lock(); bool doStop = recordThread->stop_l(this); if (doStop) { - // Force overrun condition to avoid false overrun callback until first data is - // read from buffer - android_atomic_or(CBLK_UNDERRUN, &mCblk->flags); } recordThread->mLock.unlock(); if (doStop) { -- cgit v1.1 From a8356f663014e7d4c27869629af83d8bb3441e19 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Thu, 25 Jul 2013 14:37:52 -0700 Subject: Simplify RecordTrack::stop() Change-Id: Ib959c1e9dc9544d12277ce11bea445118b2e0521 --- services/audioflinger/Threads.cpp | 3 ++- services/audioflinger/Threads.h | 2 +- services/audioflinger/Tracks.cpp | 7 +------ 3 files changed, 4 insertions(+), 8 deletions(-) diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index f27d908..e2e023a 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -4031,8 +4031,9 @@ void AudioFlinger::RecordThread::handleSyncStartEvent(const sp& event } } -bool AudioFlinger::RecordThread::stop_l(RecordThread::RecordTrack* recordTrack) { +bool AudioFlinger::RecordThread::stop(RecordThread::RecordTrack* recordTrack) { ALOGV("RecordThread::stop"); + AutoMutex _l(mLock); if (recordTrack != mActiveTrack.get() || recordTrack->mState == TrackBase::PAUSING) { return false; } diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h index 365c790..4c969d8 100644 --- a/services/audioflinger/Threads.h +++ b/services/audioflinger/Threads.h @@ -747,7 +747,7 @@ public: // ask the thread to stop the specified track, and // return true if the caller should then do it's part of the stopping process - bool stop_l(RecordTrack* recordTrack); + bool stop(RecordTrack* recordTrack); void dump(int fd, const Vector& args); AudioStreamIn* clearInput(); diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp index d424e16..1df333f 100644 --- a/services/audioflinger/Tracks.cpp +++ b/services/audioflinger/Tracks.cpp @@ -1656,12 +1656,7 @@ void AudioFlinger::RecordThread::RecordTrack::stop() sp thread = mThread.promote(); if (thread != 0) { RecordThread *recordThread = (RecordThread *)thread.get(); - recordThread->mLock.lock(); - bool doStop = recordThread->stop_l(this); - if (doStop) { - } - recordThread->mLock.unlock(); - if (doStop) { + if (recordThread->stop(this)) { AudioSystem::stopInput(recordThread->id()); } } -- cgit v1.1 From d89532e133b881c7e0dac089333ad7642fc510f1 Mon Sep 17 00:00:00 2001 From: Richard Fitzgerald Date: Tue, 14 May 2013 13:18:21 +0100 Subject: libmediaplayerservice: offload playback support Main change is to how recycled tracks are used for gapless playback. If we are playing offloaded tracks that can't be recycled we don't open a new offloaded output until we have closed the previous one. This is because offloaded tracks are a limited resource so we don't want to spuriously create unnecessary instances. If the tracks cannot be recycled this means that the formats are incompatible and so the hardware most likely will also be unable to use the existing output channel for the new track. If we already have the maximum number of hardware offload channels open (which could be only one) then creation of the next output would fail if we attempted it while the previous output was still open. Change-Id: I4f5958074e7ffd2e17108157fee86329506730ea Signed-off-by: Eric Laurent --- include/media/MediaPlayerInterface.h | 2 +- libvideoeditor/lvpp/VideoEditorPlayer.cpp | 10 +- libvideoeditor/lvpp/VideoEditorPlayer.h | 2 +- media/libmediaplayerservice/MediaPlayerService.cpp | 284 +++++++++++++++------ media/libmediaplayerservice/MediaPlayerService.h | 7 +- 5 files changed, 216 insertions(+), 89 deletions(-) diff --git a/include/media/MediaPlayerInterface.h b/include/media/MediaPlayerInterface.h index 61f7dc7..3b151ef 100644 --- a/include/media/MediaPlayerInterface.h +++ b/include/media/MediaPlayerInterface.h @@ -111,7 +111,7 @@ public: audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE, const audio_offload_info_t *offloadInfo = NULL) = 0; - virtual void start() = 0; + virtual status_t start() = 0; virtual ssize_t write(const void* buffer, size_t size) = 0; virtual void stop() = 0; virtual void flush() = 0; diff --git a/libvideoeditor/lvpp/VideoEditorPlayer.cpp b/libvideoeditor/lvpp/VideoEditorPlayer.cpp index 3384e34..5aeba4f 100755 --- a/libvideoeditor/lvpp/VideoEditorPlayer.cpp +++ b/libvideoeditor/lvpp/VideoEditorPlayer.cpp @@ -468,14 +468,18 @@ status_t VideoEditorPlayer::VeAudioOutput::open( return NO_ERROR; } -void VideoEditorPlayer::VeAudioOutput::start() { +status_t VideoEditorPlayer::VeAudioOutput::start() { ALOGV("start"); if (mTrack != 0) { mTrack->setVolume(mLeftVolume, mRightVolume); - mTrack->start(); - mTrack->getPosition(&mNumFramesWritten); + status_t status = mTrack->start(); + if (status == NO_ERROR) { + mTrack->getPosition(&mNumFramesWritten); + } + return status; } + return NO_INIT; } void VideoEditorPlayer::VeAudioOutput::snoopWrite( diff --git a/libvideoeditor/lvpp/VideoEditorPlayer.h b/libvideoeditor/lvpp/VideoEditorPlayer.h index 69323c3..ab6d731 100755 --- a/libvideoeditor/lvpp/VideoEditorPlayer.h +++ b/libvideoeditor/lvpp/VideoEditorPlayer.h @@ -55,7 +55,7 @@ class VideoEditorPlayer : public MediaPlayerInterface { AudioCallback cb, void *cookie, audio_output_flags_t flags, const audio_offload_info_t *offloadInfo); - virtual void start(); + virtual status_t start(); virtual ssize_t write(const void* buffer, size_t size); virtual void stop(); virtual void flush(); diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp index afde373..8833bd7 100644 --- a/media/libmediaplayerservice/MediaPlayerService.cpp +++ b/media/libmediaplayerservice/MediaPlayerService.cpp @@ -53,6 +53,8 @@ #include #include #include +#include +#include #include @@ -1381,6 +1383,45 @@ status_t MediaPlayerService::AudioOutput::getFramesWritten(uint32_t *frameswritt return OK; } +status_t MediaPlayerService::AudioOutput::setParameters(const String8& keyValuePairs) +{ + if (mTrack == 0) return NO_INIT; + return mTrack->setParameters(keyValuePairs); +} + +String8 MediaPlayerService::AudioOutput::getParameters(const String8& keys) +{ + if (mTrack == 0) return String8::empty(); + return mTrack->getParameters(keys); +} + +void MediaPlayerService::AudioOutput::deleteRecycledTrack() +{ + ALOGV("deleteRecycledTrack"); + + if (mRecycledTrack != 0) { + + if (mCallbackData != NULL) { + mCallbackData->setOutput(NULL); + mCallbackData->endTrackSwitch(); + } + + if ((mRecycledTrack->getFlags() & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) == 0) { + mRecycledTrack->flush(); + } + // An offloaded track isn't flushed because the STREAM_END is reported + // slightly prematurely to allow time for the gapless track switch + // but this means that if we decide not to recycle the track there + // could be a small amount of residual data still playing. We leave + // AudioFlinger to drain the track. + + mRecycledTrack.clear(); + delete mCallbackData; + mCallbackData = NULL; + close(); + } +} + status_t MediaPlayerService::AudioOutput::open( uint32_t sampleRate, int channelCount, audio_channel_mask_t channelMask, audio_format_t format, int bufferCount, @@ -1397,20 +1438,34 @@ status_t MediaPlayerService::AudioOutput::open( bufferCount = mMinBufferCount; } - ALOGV("open(%u, %d, 0x%x, %d, %d, %d)", sampleRate, channelCount, channelMask, - format, bufferCount, mSessionId); + ALOGV("open(%u, %d, 0x%x, 0x%x, %d, %d 0x%x)", sampleRate, channelCount, channelMask, + format, bufferCount, mSessionId, flags); uint32_t afSampleRate; size_t afFrameCount; uint32_t frameCount; - if (AudioSystem::getOutputFrameCount(&afFrameCount, mStreamType) != NO_ERROR) { - return NO_INIT; - } - if (AudioSystem::getOutputSamplingRate(&afSampleRate, mStreamType) != NO_ERROR) { - return NO_INIT; + // offloading is only supported in callback mode for now. + // offloadInfo must be present if offload flag is set + if (((flags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) != 0) && + ((cb == NULL) || (offloadInfo == NULL))) { + return BAD_VALUE; } - frameCount = (sampleRate*afFrameCount*bufferCount)/afSampleRate; + if ((flags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) != 0) { + frameCount = 0; // AudioTrack will get frame count from AudioFlinger + } else { + uint32_t afSampleRate; + size_t afFrameCount; + + if (AudioSystem::getOutputFrameCount(&afFrameCount, mStreamType) != NO_ERROR) { + return NO_INIT; + } + if (AudioSystem::getOutputSamplingRate(&afSampleRate, mStreamType) != NO_ERROR) { + return NO_INIT; + } + + frameCount = (sampleRate*afFrameCount*bufferCount)/afSampleRate; + } if (channelMask == CHANNEL_MASK_USE_CHANNEL_ORDER) { channelMask = audio_channel_out_mask_from_count(channelCount); @@ -1420,65 +1475,108 @@ status_t MediaPlayerService::AudioOutput::open( } } - sp t; - CallbackData *newcbd = NULL; - if (mCallback != NULL) { - newcbd = new CallbackData(this); - t = new AudioTrack( - mStreamType, - sampleRate, - format, - channelMask, - frameCount, - flags, - CallbackWrapper, - newcbd, - 0, // notification frames - mSessionId); - } else { - t = new AudioTrack( - mStreamType, - sampleRate, - format, - channelMask, - frameCount, - flags, - NULL, - NULL, - 0, - mSessionId); - } - - if ((t == 0) || (t->initCheck() != NO_ERROR)) { - ALOGE("Unable to create audio track"); - delete newcbd; - return NO_INIT; - } - + // Check whether we can recycle the track + bool reuse = false; + bool bothOffloaded = false; if (mRecycledTrack != 0) { + // check whether we are switching between two offloaded tracks + bothOffloaded = (flags & mRecycledTrack->getFlags() + & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) != 0; + // check if the existing track can be reused as-is, or if a new track needs to be created. + reuse = true; - bool reuse = true; if ((mCallbackData == NULL && mCallback != NULL) || (mCallbackData != NULL && mCallback == NULL)) { // recycled track uses callbacks but the caller wants to use writes, or vice versa ALOGV("can't chain callback and write"); reuse = false; } else if ((mRecycledTrack->getSampleRate() != sampleRate) || - (mRecycledTrack->channelCount() != channelCount) || - (mRecycledTrack->frameCount() != t->frameCount())) { - ALOGV("samplerate, channelcount or framecount differ: %d/%d Hz, %d/%d ch, %d/%d frames", + (mRecycledTrack->channelCount() != (uint32_t)channelCount) ) { + ALOGV("samplerate, channelcount differ: %u/%u Hz, %u/%d ch", mRecycledTrack->getSampleRate(), sampleRate, - mRecycledTrack->channelCount(), channelCount, - mRecycledTrack->frameCount(), t->frameCount()); + mRecycledTrack->channelCount(), channelCount); reuse = false; } else if (flags != mFlags) { ALOGV("output flags differ %08x/%08x", flags, mFlags); reuse = false; + } else if (mRecycledTrack->format() != format) { + reuse = false; + } + } else { + ALOGV("no track available to recycle"); + } + + ALOGV_IF(bothOffloaded, "both tracks offloaded"); + + // If we can't recycle and both tracks are offloaded + // we must close the previous output before opening a new one + if (bothOffloaded && !reuse) { + ALOGV("both offloaded and not recycling"); + deleteRecycledTrack(); + } + + sp t; + CallbackData *newcbd = NULL; + + // We don't attempt to create a new track if we are recycling an + // offloaded track. But, if we are recycling a non-offloaded or we + // are switching where one is offloaded and one isn't then we create + // the new track in advance so that we can read additional stream info + + if (!(reuse && bothOffloaded)) { + ALOGV("creating new AudioTrack"); + + if (mCallback != NULL) { + newcbd = new CallbackData(this); + t = new AudioTrack( + mStreamType, + sampleRate, + format, + channelMask, + frameCount, + flags, + CallbackWrapper, + newcbd, + 0, // notification frames + mSessionId, + AudioTrack::TRANSFER_CALLBACK, + offloadInfo); + } else { + t = new AudioTrack( + mStreamType, + sampleRate, + format, + channelMask, + frameCount, + flags, + NULL, + NULL, + 0, + mSessionId); + } + + if ((t == 0) || (t->initCheck() != NO_ERROR)) { + ALOGE("Unable to create audio track"); + delete newcbd; + return NO_INIT; + } + } + + if (reuse) { + CHECK(mRecycledTrack != NULL); + + if (!bothOffloaded) { + if (mRecycledTrack->frameCount() != t->frameCount()) { + ALOGV("framecount differs: %u/%u frames", + mRecycledTrack->frameCount(), t->frameCount()); + reuse = false; + } } + if (reuse) { - ALOGV("chaining to next output"); + ALOGV("chaining to next output and recycling track"); close(); mTrack = mRecycledTrack; mRecycledTrack.clear(); @@ -1488,19 +1586,16 @@ status_t MediaPlayerService::AudioOutput::open( delete newcbd; return OK; } + } - // if we're not going to reuse the track, unblock and flush it - if (mCallbackData != NULL) { - mCallbackData->setOutput(NULL); - mCallbackData->endTrackSwitch(); - } - mRecycledTrack->flush(); - mRecycledTrack.clear(); - delete mCallbackData; - mCallbackData = NULL; - close(); + // we're not going to reuse the track, unblock and flush it + // this was done earlier if both tracks are offloaded + if (!bothOffloaded) { + deleteRecycledTrack(); } + CHECK((t != NULL) && ((mCallback == NULL) || (newcbd != NULL))); + mCallbackData = newcbd; ALOGV("setVolume"); t->setVolume(mLeftVolume, mRightVolume); @@ -1514,15 +1609,19 @@ status_t MediaPlayerService::AudioOutput::open( } mTrack = t; - status_t res = t->setSampleRate(mPlaybackRatePermille * mSampleRateHz / 1000); - if (res != NO_ERROR) { - return res; + status_t res = NO_ERROR; + if ((flags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) == 0) { + res = t->setSampleRate(mPlaybackRatePermille * mSampleRateHz / 1000); + if (res == NO_ERROR) { + t->setAuxEffectSendLevel(mSendLevel); + res = t->attachAuxEffect(mAuxEffectId); + } } - t->setAuxEffectSendLevel(mSendLevel); - return t->attachAuxEffect(mAuxEffectId);; + ALOGV("open() DONE status %d", res); + return res; } -void MediaPlayerService::AudioOutput::start() +status_t MediaPlayerService::AudioOutput::start() { ALOGV("start"); if (mCallbackData != NULL) { @@ -1531,8 +1630,9 @@ void MediaPlayerService::AudioOutput::start() if (mTrack != 0) { mTrack->setVolume(mLeftVolume, mRightVolume); mTrack->setAuxEffectSendLevel(mSendLevel); - mTrack->start(); + return mTrack->start(); } + return NO_INIT; } void MediaPlayerService::AudioOutput::setNextOutput(const sp& nextOutput) { @@ -1645,10 +1745,6 @@ status_t MediaPlayerService::AudioOutput::attachAuxEffect(int effectId) void MediaPlayerService::AudioOutput::CallbackWrapper( int event, void *cookie, void *info) { //ALOGV("callbackwrapper"); - if (event != AudioTrack::EVENT_MORE_DATA) { - return; - } - CallbackData *data = (CallbackData*)cookie; data->lock(); AudioOutput *me = data->getOutput(); @@ -1657,23 +1753,46 @@ void MediaPlayerService::AudioOutput::CallbackWrapper( // no output set, likely because the track was scheduled to be reused // by another player, but the format turned out to be incompatible. data->unlock(); - buffer->size = 0; + if (buffer != NULL) { + buffer->size = 0; + } return; } - size_t actualSize = (*me->mCallback)( - me, buffer->raw, buffer->size, me->mCallbackCookie, - CB_EVENT_FILL_BUFFER); + switch(event) { + case AudioTrack::EVENT_MORE_DATA: { + size_t actualSize = (*me->mCallback)( + me, buffer->raw, buffer->size, me->mCallbackCookie, + CB_EVENT_FILL_BUFFER); + + if (actualSize == 0 && buffer->size > 0 && me->mNextOutput == NULL) { + // We've reached EOS but the audio track is not stopped yet, + // keep playing silence. + + memset(buffer->raw, 0, buffer->size); + actualSize = buffer->size; + } + + buffer->size = actualSize; + } break; - if (actualSize == 0 && buffer->size > 0 && me->mNextOutput == NULL) { - // We've reached EOS but the audio track is not stopped yet, - // keep playing silence. - memset(buffer->raw, 0, buffer->size); - actualSize = buffer->size; + case AudioTrack::EVENT_STREAM_END: + ALOGV("callbackwrapper: deliver EVENT_STREAM_END"); + (*me->mCallback)(me, NULL /* buffer */, 0 /* size */, + me->mCallbackCookie, CB_EVENT_STREAM_END); + break; + + case AudioTrack::EVENT_NEW_IAUDIOTRACK : + ALOGV("callbackwrapper: deliver EVENT_TEAR_DOWN"); + (*me->mCallback)(me, NULL /* buffer */, 0 /* size */, + me->mCallbackCookie, CB_EVENT_TEAR_DOWN); + break; + + default: + ALOGE("received unknown event type: %d inside CallbackWrapper !", event); } - buffer->size = actualSize; data->unlock(); } @@ -1803,10 +1922,11 @@ status_t MediaPlayerService::AudioCache::open( return NO_ERROR; } -void MediaPlayerService::AudioCache::start() { +status_t MediaPlayerService::AudioCache::start() { if (mCallbackThread != NULL) { mCallbackThread->run("AudioCache callback"); } + return NO_ERROR; } void MediaPlayerService::AudioCache::stop() { diff --git a/media/libmediaplayerservice/MediaPlayerService.h b/media/libmediaplayerservice/MediaPlayerService.h index f7076cc..7d27944 100644 --- a/media/libmediaplayerservice/MediaPlayerService.h +++ b/media/libmediaplayerservice/MediaPlayerService.h @@ -94,7 +94,7 @@ class MediaPlayerService : public BnMediaPlayerService audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE, const audio_offload_info_t *offloadInfo = NULL); - virtual void start(); + virtual status_t start(); virtual ssize_t write(const void* buffer, size_t size); virtual void stop(); virtual void flush(); @@ -112,11 +112,14 @@ class MediaPlayerService : public BnMediaPlayerService void setNextOutput(const sp& nextOutput); void switchToNextOutput(); virtual bool needsTrailingPadding() { return mNextOutput == NULL; } + virtual status_t setParameters(const String8& keyValuePairs); + virtual String8 getParameters(const String8& keys); private: static void setMinBufferCount(); static void CallbackWrapper( int event, void *me, void *info); + void deleteRecycledTrack(); sp mTrack; sp mRecycledTrack; @@ -196,7 +199,7 @@ class MediaPlayerService : public BnMediaPlayerService audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE, const audio_offload_info_t *offloadInfo = NULL); - virtual void start(); + virtual status_t start(); virtual ssize_t write(const void* buffer, size_t size); virtual void stop(); virtual void flush() {} -- cgit v1.1 From 94ea60f975c3eb7ce6d2a4430538a42a5fc3babd Mon Sep 17 00:00:00 2001 From: Richard Fitzgerald Date: Tue, 14 May 2013 15:52:03 +0100 Subject: stagefright: offload playback support Offloading of compressed audio decoding to audio DSP is implemented for audio only, non streamed content. when the datasource is AudioPlayer: - Create an offloaded sink when playing a compressed source - Send metadata to audio HAL - Return sink start error to AwesomePlayer so that a new player for PCM audio can be created in case of problem. - Forward stream end and tear down callback events to AwesomePlayer - Stop the sink and wait for stream end callback when EOS is reached. - Pause and restart the sink if needed before flushing when seeking (otherwise flush is a no op). - For current media time, directly query the render position from the sink and offset by the start position (seek to time) AwesomePlayer: - When initializing the audio decoder, check with audio policy manager if offloading is supported. If yes, create the software decoder in case a reconfiguration is needed but connect the audio track directly to the AudioPlayer. - In case of error when starting the AudioPlayer, reconnect the software decoder (OMXSource) and recreate a PCM AudioPlayer. - Handle AudioPlayer tear down event by detroying and recreating the AudioPlayer to allow transitions between situations were offloading is supported or not. - Force tear down of offloaded AudioPlayer when paused for a certain time: This will close the sink and allow the DSP to power down. Utils: - Added helper methods: - send meta data to audio ia sink setParameters - query audio policy manager if offloading is supported for a given audio content Change-Id: I115842ce424f947b966d45e253a74d3fd5df9aae Signed-off-by: Eric Laurent --- include/media/stagefright/AudioPlayer.h | 16 +- libvideoeditor/lvpp/VideoEditorAudioPlayer.cpp | 3 +- libvideoeditor/lvpp/VideoEditorAudioPlayer.h | 2 +- media/libstagefright/Android.mk | 1 + media/libstagefright/AudioPlayer.cpp | 348 ++++++++++++++++++++----- media/libstagefright/AwesomePlayer.cpp | 244 ++++++++++++++--- media/libstagefright/Utils.cpp | 123 ++++++++- media/libstagefright/include/AwesomePlayer.h | 2 +- 8 files changed, 618 insertions(+), 121 deletions(-) diff --git a/include/media/stagefright/AudioPlayer.h b/include/media/stagefright/AudioPlayer.h index ec9f2df..912a43c 100644 --- a/include/media/stagefright/AudioPlayer.h +++ b/include/media/stagefright/AudioPlayer.h @@ -38,7 +38,10 @@ public: enum { ALLOW_DEEP_BUFFERING = 0x01, - USE_OFFLOAD = 0x02 + USE_OFFLOAD = 0x02, + HAS_VIDEO = 0x1000, + IS_STREAMING = 0x2000 + }; AudioPlayer(const sp &audioSink, @@ -56,7 +59,7 @@ public: status_t start(bool sourceAlreadyStarted = false); void pause(bool playPendingSamples = false); - void resume(); + status_t resume(); // Returns the timestamp of the last buffer played (in us). int64_t getMediaTimeUs(); @@ -104,11 +107,13 @@ private: MediaBuffer *mFirstBuffer; sp mAudioSink; - bool mAllowDeepBuffering; // allow audio deep audio buffers. Helps with low power audio - // playback but implies high latency AwesomePlayer *mObserver; int64_t mPinnedTimeUs; + bool mPlaying; + int64_t mStartPosUs; + const uint32_t mCreateFlags; + static void AudioCallback(int event, void *user, void *info); void AudioCallback(int event, void *info); @@ -126,6 +131,9 @@ private: uint32_t getNumFramesPendingPlayout() const; int64_t getOutputPlayPositionUs_l() const; + bool allowDeepBuffering() const { return (mCreateFlags & ALLOW_DEEP_BUFFERING) != 0; } + bool useOffload() const { return (mCreateFlags & USE_OFFLOAD) != 0; } + AudioPlayer(const AudioPlayer &); AudioPlayer &operator=(const AudioPlayer &); }; diff --git a/libvideoeditor/lvpp/VideoEditorAudioPlayer.cpp b/libvideoeditor/lvpp/VideoEditorAudioPlayer.cpp index dc360a5..176f8e9 100755 --- a/libvideoeditor/lvpp/VideoEditorAudioPlayer.cpp +++ b/libvideoeditor/lvpp/VideoEditorAudioPlayer.cpp @@ -149,7 +149,7 @@ void VideoEditorAudioPlayer::clear() { mStarted = false; } -void VideoEditorAudioPlayer::resume() { +status_t VideoEditorAudioPlayer::resume() { ALOGV("resume"); AudioMixSettings audioMixSettings; @@ -180,6 +180,7 @@ void VideoEditorAudioPlayer::resume() { } else { mAudioTrack->start(); } + return OK; } status_t VideoEditorAudioPlayer::seekTo(int64_t time_us) { diff --git a/libvideoeditor/lvpp/VideoEditorAudioPlayer.h b/libvideoeditor/lvpp/VideoEditorAudioPlayer.h index d2e652d..2caf5e8 100755 --- a/libvideoeditor/lvpp/VideoEditorAudioPlayer.h +++ b/libvideoeditor/lvpp/VideoEditorAudioPlayer.h @@ -58,7 +58,7 @@ public: status_t start(bool sourceAlreadyStarted = false); void pause(bool playPendingSamples = false); - void resume(); + status_t resume(); status_t seekTo(int64_t time_us); bool isSeeking(); bool reachedEOS(status_t *finalStatus); diff --git a/media/libstagefright/Android.mk b/media/libstagefright/Android.mk index 90bf324..1f68b51 100644 --- a/media/libstagefright/Android.mk +++ b/media/libstagefright/Android.mk @@ -100,6 +100,7 @@ LOCAL_STATIC_LIBRARIES := \ libstagefright_mpeg2ts \ libstagefright_id3 \ libFLAC \ + libmedia_helper LOCAL_SRC_FILES += \ chromium_http_stub.cpp diff --git a/media/libstagefright/AudioPlayer.cpp b/media/libstagefright/AudioPlayer.cpp index 61d6746..2418aab 100644 --- a/media/libstagefright/AudioPlayer.cpp +++ b/media/libstagefright/AudioPlayer.cpp @@ -17,6 +17,7 @@ //#define LOG_NDEBUG 0 #define LOG_TAG "AudioPlayer" #include +#include #include #include @@ -27,6 +28,7 @@ #include #include #include +#include #include "include/AwesomePlayer.h" @@ -47,14 +49,17 @@ AudioPlayer::AudioPlayer( mSeeking(false), mReachedEOS(false), mFinalStatus(OK), + mSeekTimeUs(0), mStarted(false), mIsFirstBuffer(false), mFirstBufferResult(OK), mFirstBuffer(NULL), mAudioSink(audioSink), - mAllowDeepBuffering((flags & ALLOW_DEEP_BUFFERING) != 0), mObserver(observer), - mPinnedTimeUs(-1ll) { + mPinnedTimeUs(-1ll), + mPlaying(false), + mStartPosUs(0), + mCreateFlags(flags) { } AudioPlayer::~AudioPlayer() { @@ -109,7 +114,7 @@ status_t AudioPlayer::start(bool sourceAlreadyStarted) { const char *mime; bool success = format->findCString(kKeyMIMEType, &mime); CHECK(success); - CHECK(!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW)); + CHECK(useOffload() || !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW)); success = format->findInt32(kKeySampleRate, &mSampleRate); CHECK(success); @@ -125,16 +130,74 @@ status_t AudioPlayer::start(bool sourceAlreadyStarted) { channelMask = CHANNEL_MASK_USE_CHANNEL_ORDER; } + audio_format_t audioFormat = AUDIO_FORMAT_PCM_16_BIT; + + if (useOffload()) { + if (mapMimeToAudioFormat(audioFormat, mime) != OK) { + ALOGE("Couldn't map mime type \"%s\" to a valid AudioSystem::audio_format", mime); + audioFormat = AUDIO_FORMAT_INVALID; + } else { + ALOGV("Mime type \"%s\" mapped to audio_format 0x%x", mime, audioFormat); + } + } + + int avgBitRate = -1; + format->findInt32(kKeyBitRate, &avgBitRate); + if (mAudioSink.get() != NULL) { + uint32_t flags = AUDIO_OUTPUT_FLAG_NONE; + audio_offload_info_t offloadInfo = AUDIO_INFO_INITIALIZER; + + if (allowDeepBuffering()) { + flags |= AUDIO_OUTPUT_FLAG_DEEP_BUFFER; + } + if (useOffload()) { + flags |= AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD; + + int64_t durationUs; + if (format->findInt64(kKeyDuration, &durationUs)) { + offloadInfo.duration_us = durationUs; + } else { + offloadInfo.duration_us = -1; + } + + offloadInfo.sample_rate = mSampleRate; + offloadInfo.channel_mask = channelMask; + offloadInfo.format = audioFormat; + offloadInfo.stream_type = AUDIO_STREAM_MUSIC; + offloadInfo.bit_rate = avgBitRate; + offloadInfo.has_video = ((mCreateFlags & HAS_VIDEO) != 0); + offloadInfo.is_streaming = ((mCreateFlags & IS_STREAMING) != 0); + } + status_t err = mAudioSink->open( - mSampleRate, numChannels, channelMask, AUDIO_FORMAT_PCM_16_BIT, + mSampleRate, numChannels, channelMask, audioFormat, DEFAULT_AUDIOSINK_BUFFERCOUNT, &AudioPlayer::AudioSinkCallback, this, - (mAllowDeepBuffering ? - AUDIO_OUTPUT_FLAG_DEEP_BUFFER : - AUDIO_OUTPUT_FLAG_NONE)); + (audio_output_flags_t)flags, + useOffload() ? &offloadInfo : NULL); + + if (err == OK) { + mLatencyUs = (int64_t)mAudioSink->latency() * 1000; + mFrameSize = mAudioSink->frameSize(); + + if (useOffload()) { + // If the playback is offloaded to h/w we pass the + // HAL some metadata information + // We don't want to do this for PCM because it will be going + // through the AudioFlinger mixer before reaching the hardware + sendMetaDataToHal(mAudioSink, format); + } + + err = mAudioSink->start(); + // do not alter behavior for non offloaded tracks: ignore start status. + if (!useOffload()) { + err = OK; + } + } + if (err != OK) { if (mFirstBuffer != NULL) { mFirstBuffer->release(); @@ -148,10 +211,6 @@ status_t AudioPlayer::start(bool sourceAlreadyStarted) { return err; } - mLatencyUs = (int64_t)mAudioSink->latency() * 1000; - mFrameSize = mAudioSink->frameSize(); - - mAudioSink->start(); } else { // playing to an AudioTrack, set up mask if necessary audio_channel_mask_t audioMask = channelMask == CHANNEL_MASK_USE_CHANNEL_ORDER ? @@ -186,6 +245,7 @@ status_t AudioPlayer::start(bool sourceAlreadyStarted) { } mStarted = true; + mPlaying = true; mPinnedTimeUs = -1ll; return OK; @@ -212,27 +272,56 @@ void AudioPlayer::pause(bool playPendingSamples) { mPinnedTimeUs = ALooper::GetNowUs(); } + + mPlaying = false; } -void AudioPlayer::resume() { +status_t AudioPlayer::resume() { CHECK(mStarted); + status_t err; if (mAudioSink.get() != NULL) { - mAudioSink->start(); + err = mAudioSink->start(); } else { - mAudioTrack->start(); + err = mAudioTrack->start(); } + + if (err == OK) { + mPlaying = true; + } + + return err; } void AudioPlayer::reset() { CHECK(mStarted); + ALOGV("reset: mPlaying=%d mReachedEOS=%d useOffload=%d", + mPlaying, mReachedEOS, useOffload() ); + if (mAudioSink.get() != NULL) { mAudioSink->stop(); + // If we're closing and have reached EOS, we don't want to flush + // the track because if it is offloaded there could be a small + // amount of residual data in the hardware buffer which we must + // play to give gapless playback. + // But if we're resetting when paused or before we've reached EOS + // we can't be doing a gapless playback and there could be a large + // amount of data queued in the hardware if the track is offloaded, + // so we must flush to prevent a track switch being delayed playing + // the buffered data that we don't want now + if (!mPlaying || !mReachedEOS) { + mAudioSink->flush(); + } + mAudioSink->close(); } else { mAudioTrack->stop(); + if (!mPlaying || !mReachedEOS) { + mAudioTrack->flush(); + } + mAudioTrack.clear(); } @@ -256,10 +345,16 @@ void AudioPlayer::reset() { // The following hack is necessary to ensure that the OMX // component is completely released by the time we may try // to instantiate it again. - wp tmp = mSource; - mSource.clear(); - while (tmp.promote() != NULL) { - usleep(1000); + // When offloading, the OMX component is not used so this hack + // is not needed + if (!useOffload()) { + wp tmp = mSource; + mSource.clear(); + while (tmp.promote() != NULL) { + usleep(1000); + } + } else { + mSource.clear(); } IPCThreadState::self()->flushCommands(); @@ -271,6 +366,8 @@ void AudioPlayer::reset() { mReachedEOS = false; mFinalStatus = OK; mStarted = false; + mPlaying = false; + mStartPosUs = 0; } // static @@ -291,6 +388,15 @@ bool AudioPlayer::reachedEOS(status_t *finalStatus) { return mReachedEOS; } +void AudioPlayer::notifyAudioEOS() { + ALOGV("AudioPlayer@0x%p notifyAudioEOS", this); + + if (mObserver != NULL) { + mObserver->postAudioEOS(0); + ALOGV("Notified observer of EOS!"); + } +} + status_t AudioPlayer::setPlaybackRatePermille(int32_t ratePermille) { if (mAudioSink.get() != NULL) { return mAudioSink->setPlaybackRatePermille(ratePermille); @@ -308,18 +414,40 @@ size_t AudioPlayer::AudioSinkCallback( MediaPlayerBase::AudioSink::cb_event_t event) { AudioPlayer *me = (AudioPlayer *)cookie; - return me->fillBuffer(buffer, size); -} + switch(event) { + case MediaPlayerBase::AudioSink::CB_EVENT_FILL_BUFFER: + return me->fillBuffer(buffer, size); -void AudioPlayer::AudioCallback(int event, void *info) { - if (event != AudioTrack::EVENT_MORE_DATA) { - return; + case MediaPlayerBase::AudioSink::CB_EVENT_STREAM_END: + ALOGV("AudioSinkCallback: stream end"); + me->mReachedEOS = true; + me->notifyAudioEOS(); + break; + + case MediaPlayerBase::AudioSink::CB_EVENT_TEAR_DOWN: + ALOGV("AudioSinkCallback: Tear down event"); + me->mObserver->postAudioTearDown(); + break; } - AudioTrack::Buffer *buffer = (AudioTrack::Buffer *)info; - size_t numBytesWritten = fillBuffer(buffer->raw, buffer->size); + return 0; +} + +void AudioPlayer::AudioCallback(int event, void *info) { + switch (event) { + case AudioTrack::EVENT_MORE_DATA: + { + AudioTrack::Buffer *buffer = (AudioTrack::Buffer *)info; + size_t numBytesWritten = fillBuffer(buffer->raw, buffer->size); + buffer->size = numBytesWritten; + } + break; - buffer->size = numBytesWritten; + case AudioTrack::EVENT_STREAM_END: + mReachedEOS = true; + notifyAudioEOS(); + break; + } } uint32_t AudioPlayer::getNumFramesPendingPlayout() const { @@ -359,6 +487,7 @@ size_t AudioPlayer::fillBuffer(void *data, size_t size) { size_t size_remaining = size; while (size_remaining > 0) { MediaSource::ReadOptions options; + bool refreshSeekTime = false; { Mutex::Autolock autoLock(mLock); @@ -373,6 +502,7 @@ size_t AudioPlayer::fillBuffer(void *data, size_t size) { } options.setSeekTo(mSeekTimeUs); + refreshSeekTime = true; if (mInputBuffer != NULL) { mInputBuffer->release(); @@ -405,43 +535,56 @@ size_t AudioPlayer::fillBuffer(void *data, size_t size) { Mutex::Autolock autoLock(mLock); if (err != OK) { - if (mObserver && !mReachedEOS) { - // We don't want to post EOS right away but only - // after all frames have actually been played out. - - // These are the number of frames submitted to the - // AudioTrack that you haven't heard yet. - uint32_t numFramesPendingPlayout = - getNumFramesPendingPlayout(); - - // These are the number of frames we're going to - // submit to the AudioTrack by returning from this - // callback. - uint32_t numAdditionalFrames = size_done / mFrameSize; - - numFramesPendingPlayout += numAdditionalFrames; - - int64_t timeToCompletionUs = - (1000000ll * numFramesPendingPlayout) / mSampleRate; - - ALOGV("total number of frames played: %lld (%lld us)", - (mNumFramesPlayed + numAdditionalFrames), - 1000000ll * (mNumFramesPlayed + numAdditionalFrames) - / mSampleRate); - - ALOGV("%d frames left to play, %lld us (%.2f secs)", - numFramesPendingPlayout, - timeToCompletionUs, timeToCompletionUs / 1E6); - - postEOS = true; - if (mAudioSink->needsTrailingPadding()) { - postEOSDelayUs = timeToCompletionUs + mLatencyUs; + if (!mReachedEOS) { + if (useOffload()) { + // no more buffers to push - stop() and wait for STREAM_END + // don't set mReachedEOS until stream end received + if (mAudioSink != NULL) { + mAudioSink->stop(); + } else { + mAudioTrack->stop(); + } } else { - postEOSDelayUs = 0; + if (mObserver) { + // We don't want to post EOS right away but only + // after all frames have actually been played out. + + // These are the number of frames submitted to the + // AudioTrack that you haven't heard yet. + uint32_t numFramesPendingPlayout = + getNumFramesPendingPlayout(); + + // These are the number of frames we're going to + // submit to the AudioTrack by returning from this + // callback. + uint32_t numAdditionalFrames = size_done / mFrameSize; + + numFramesPendingPlayout += numAdditionalFrames; + + int64_t timeToCompletionUs = + (1000000ll * numFramesPendingPlayout) / mSampleRate; + + ALOGV("total number of frames played: %lld (%lld us)", + (mNumFramesPlayed + numAdditionalFrames), + 1000000ll * (mNumFramesPlayed + numAdditionalFrames) + / mSampleRate); + + ALOGV("%d frames left to play, %lld us (%.2f secs)", + numFramesPendingPlayout, + timeToCompletionUs, timeToCompletionUs / 1E6); + + postEOS = true; + if (mAudioSink->needsTrailingPadding()) { + postEOSDelayUs = timeToCompletionUs + mLatencyUs; + } else { + postEOSDelayUs = 0; + } + } + + mReachedEOS = true; } } - mReachedEOS = true; mFinalStatus = err; break; } @@ -452,17 +595,34 @@ size_t AudioPlayer::fillBuffer(void *data, size_t size) { mLatencyUs = (int64_t)mAudioTrack->latency() * 1000; } - CHECK(mInputBuffer->meta_data()->findInt64( + if(mInputBuffer->range_length() != 0) { + CHECK(mInputBuffer->meta_data()->findInt64( kKeyTime, &mPositionTimeMediaUs)); + } + + // need to adjust the mStartPosUs for offload decoding since parser + // might not be able to get the exact seek time requested. + if (refreshSeekTime && useOffload()) { + if (postSeekComplete) { + ALOGV("fillBuffer is going to post SEEK_COMPLETE"); + mObserver->postAudioSeekComplete(); + postSeekComplete = false; + } + + mStartPosUs = mPositionTimeMediaUs; + ALOGV("adjust seek time to: %.2f", mStartPosUs/ 1E6); + } - mPositionTimeRealUs = - ((mNumFramesPlayed + size_done / mFrameSize) * 1000000) - / mSampleRate; + if (!useOffload()) { + mPositionTimeRealUs = + ((mNumFramesPlayed + size_done / mFrameSize) * 1000000) + / mSampleRate; + ALOGV("buffer->size() = %d, " + "mPositionTimeMediaUs=%.2f mPositionTimeRealUs=%.2f", + mInputBuffer->range_length(), + mPositionTimeMediaUs / 1E6, mPositionTimeRealUs / 1E6); + } - ALOGV("buffer->size() = %d, " - "mPositionTimeMediaUs=%.2f mPositionTimeRealUs=%.2f", - mInputBuffer->range_length(), - mPositionTimeMediaUs / 1E6, mPositionTimeRealUs / 1E6); } if (mInputBuffer->range_length() == 0) { @@ -488,6 +648,13 @@ size_t AudioPlayer::fillBuffer(void *data, size_t size) { size_remaining -= copy; } + if (useOffload()) { + // We must ask the hardware what it has played + mPositionTimeRealUs = getOutputPlayPositionUs_l(); + ALOGV("mPositionTimeMediaUs=%.2f mPositionTimeRealUs=%.2f", + mPositionTimeMediaUs / 1E6, mPositionTimeRealUs / 1E6); + } + { Mutex::Autolock autoLock(mLock); mNumFramesPlayed += size_done / mFrameSize; @@ -536,9 +703,36 @@ int64_t AudioPlayer::getRealTimeUsLocked() const { return result + diffUs; } +int64_t AudioPlayer::getOutputPlayPositionUs_l() const +{ + uint32_t playedSamples = 0; + if (mAudioSink != NULL) { + mAudioSink->getPosition(&playedSamples); + } else { + mAudioTrack->getPosition(&playedSamples); + } + + const int64_t playedUs = (static_cast(playedSamples) * 1000000 ) / mSampleRate; + + // HAL position is relative to the first buffer we sent at mStartPosUs + const int64_t renderedDuration = mStartPosUs + playedUs; + ALOGV("getOutputPlayPositionUs_l %lld", renderedDuration); + return renderedDuration; +} + int64_t AudioPlayer::getMediaTimeUs() { Mutex::Autolock autoLock(mLock); + if (useOffload()) { + if (mSeeking) { + return mSeekTimeUs; + } + mPositionTimeRealUs = getOutputPlayPositionUs_l(); + ALOGV("getMediaTimeUs getOutputPlayPositionUs_l() mPositionTimeRealUs %lld", + mPositionTimeRealUs); + return mPositionTimeRealUs; + } + if (mPositionTimeMediaUs < 0 || mPositionTimeRealUs < 0) { if (mSeeking) { return mSeekTimeUs; @@ -547,6 +741,11 @@ int64_t AudioPlayer::getMediaTimeUs() { return 0; } + if (useOffload()) { + mPositionTimeRealUs = getOutputPlayPositionUs_l(); + return mPositionTimeRealUs; + } + int64_t realTimeOffset = getRealTimeUsLocked() - mPositionTimeRealUs; if (realTimeOffset < 0) { realTimeOffset = 0; @@ -568,19 +767,34 @@ bool AudioPlayer::getMediaTimeMapping( status_t AudioPlayer::seekTo(int64_t time_us) { Mutex::Autolock autoLock(mLock); + ALOGV("seekTo( %lld )", time_us); + mSeeking = true; mPositionTimeRealUs = mPositionTimeMediaUs = -1; mReachedEOS = false; mSeekTimeUs = time_us; + mStartPosUs = time_us; // Flush resets the number of played frames mNumFramesPlayed = 0; mNumFramesPlayedSysTimeUs = ALooper::GetNowUs(); if (mAudioSink != NULL) { + if (mPlaying) { + mAudioSink->pause(); + } mAudioSink->flush(); + if (mPlaying) { + mAudioSink->start(); + } } else { + if (mPlaying) { + mAudioTrack->pause(); + } mAudioTrack->flush(); + if (mPlaying) { + mAudioTrack->start(); + } } return OK; diff --git a/media/libstagefright/AwesomePlayer.cpp b/media/libstagefright/AwesomePlayer.cpp index b505518..3e70dd7 100644 --- a/media/libstagefright/AwesomePlayer.cpp +++ b/media/libstagefright/AwesomePlayer.cpp @@ -47,6 +47,7 @@ #include #include #include +#include #include #include @@ -65,6 +66,11 @@ static int64_t kHighWaterMarkUs = 5000000ll; // 5secs static const size_t kLowWaterMarkBytes = 40000; static const size_t kHighWaterMarkBytes = 200000; +// maximum time in paused state when offloading audio decompression. When elapsed, the AudioPlayer +// is destroyed to allow the audio DSP to power down. +static int64_t kOffloadPauseMaxUs = 60000000ll; + + struct AwesomeEvent : public TimedEventQueue::Event { AwesomeEvent( AwesomePlayer *player, @@ -194,7 +200,9 @@ AwesomePlayer::AwesomePlayer() mVideoBuffer(NULL), mDecryptHandle(NULL), mLastVideoTimeUs(-1), - mTextDriver(NULL) { + mTextDriver(NULL), + mOffloadAudio(false), + mAudioTearDown(false) { CHECK_EQ(mClient.connect(), (status_t)OK); DataSource::RegisterDefaultSniffers(); @@ -213,6 +221,10 @@ AwesomePlayer::AwesomePlayer() mAudioStatusEventPending = false; + mAudioTearDownEvent = new AwesomeEvent(this, + &AwesomePlayer::onAudioTearDownEvent); + mAudioTearDownEventPending = false; + reset(); } @@ -232,6 +244,11 @@ void AwesomePlayer::cancelPlayerEvents(bool keepNotifications) { mQueue.cancelEvent(mVideoLagEvent->eventID()); mVideoLagEventPending = false; + if (mOffloadAudio) { + mQueue.cancelEvent(mAudioTearDownEvent->eventID()); + mAudioTearDownEventPending = false; + } + if (!keepNotifications) { mQueue.cancelEvent(mStreamDoneEvent->eventID()); mStreamDoneEventPending = false; @@ -518,7 +535,7 @@ void AwesomePlayer::reset_l() { mVideoTrack.clear(); mExtractor.clear(); - // Shutdown audio first, so that the respone to the reset request + // Shutdown audio first, so that the response to the reset request // appears to happen instantaneously as far as the user is concerned // If we did this later, audio would continue playing while we // shutdown the video-related resources and the player appear to @@ -531,6 +548,7 @@ void AwesomePlayer::reset_l() { mAudioSource->stop(); } mAudioSource.clear(); + mOmxSource.clear(); mTimeSource = NULL; @@ -586,7 +604,7 @@ void AwesomePlayer::reset_l() { } void AwesomePlayer::notifyListener_l(int msg, int ext1, int ext2) { - if (mListener != NULL) { + if ((mListener != NULL) && !mAudioTearDown) { sp listener = mListener.promote(); if (listener != NULL) { @@ -842,6 +860,13 @@ void AwesomePlayer::onStreamDone() { pause_l(true /* at eos */); + // If audio hasn't completed MEDIA_SEEK_COMPLETE yet, + // notify MEDIA_SEEK_COMPLETE to observer immediately for state persistence. + if (mWatchForAudioSeekComplete) { + notifyListener_l(MEDIA_SEEK_COMPLETE); + mWatchForAudioSeekComplete = false; + } + modifyFlags(AT_EOS, SET); } } @@ -883,41 +908,42 @@ status_t AwesomePlayer::play_l() { if (mAudioSource != NULL) { if (mAudioPlayer == NULL) { - if (mAudioSink != NULL) { - bool allowDeepBuffering; - int64_t cachedDurationUs; - bool eos; - if (mVideoSource == NULL - && (mDurationUs > AUDIO_SINK_MIN_DEEP_BUFFER_DURATION_US || - (getCachedDuration_l(&cachedDurationUs, &eos) && - cachedDurationUs > AUDIO_SINK_MIN_DEEP_BUFFER_DURATION_US))) { - allowDeepBuffering = true; - } else { - allowDeepBuffering = false; - } - - mAudioPlayer = new AudioPlayer(mAudioSink, allowDeepBuffering, this); - mAudioPlayer->setSource(mAudioSource); - - mTimeSource = mAudioPlayer; - - // If there was a seek request before we ever started, - // honor the request now. - // Make sure to do this before starting the audio player - // to avoid a race condition. - seekAudioIfNecessary_l(); - } + createAudioPlayer_l(); } CHECK(!(mFlags & AUDIO_RUNNING)); if (mVideoSource == NULL) { + // We don't want to post an error notification at this point, // the error returned from MediaPlayer::start() will suffice. status_t err = startAudioPlayer_l( false /* sendErrorNotification */); + if ((err != OK) && mOffloadAudio) { + ALOGI("play_l() cannot create offload output, fallback to sw decode"); + delete mAudioPlayer; + mAudioPlayer = NULL; + // if the player was started it will take care of stopping the source when destroyed + if (!(mFlags & AUDIOPLAYER_STARTED)) { + mAudioSource->stop(); + } + modifyFlags((AUDIO_RUNNING | AUDIOPLAYER_STARTED), CLEAR); + mOffloadAudio = false; + mAudioSource = mOmxSource; + if (mAudioSource != NULL) { + err = mAudioSource->start(); + + if (err != OK) { + mAudioSource.clear(); + } else { + createAudioPlayer_l(); + err = startAudioPlayer_l(false); + } + } + } + if (err != OK) { delete mAudioPlayer; mAudioPlayer = NULL; @@ -966,19 +992,58 @@ status_t AwesomePlayer::play_l() { return OK; } +void AwesomePlayer::createAudioPlayer_l() +{ + uint32_t flags = 0; + int64_t cachedDurationUs; + bool eos; + + if (mOffloadAudio) { + flags |= AudioPlayer::USE_OFFLOAD; + } else if (mVideoSource == NULL + && (mDurationUs > AUDIO_SINK_MIN_DEEP_BUFFER_DURATION_US || + (getCachedDuration_l(&cachedDurationUs, &eos) && + cachedDurationUs > AUDIO_SINK_MIN_DEEP_BUFFER_DURATION_US))) { + flags |= AudioPlayer::ALLOW_DEEP_BUFFERING; + } + if (isStreamingHTTP()) { + flags |= AudioPlayer::IS_STREAMING; + } + if (mVideoSource != NULL) { + flags |= AudioPlayer::HAS_VIDEO; + } + + mAudioPlayer = new AudioPlayer(mAudioSink, flags, this); + mAudioPlayer->setSource(mAudioSource); + + mTimeSource = mAudioPlayer; + + // If there was a seek request before we ever started, + // honor the request now. + // Make sure to do this before starting the audio player + // to avoid a race condition. + seekAudioIfNecessary_l(); +} + status_t AwesomePlayer::startAudioPlayer_l(bool sendErrorNotification) { CHECK(!(mFlags & AUDIO_RUNNING)); + status_t err = OK; if (mAudioSource == NULL || mAudioPlayer == NULL) { return OK; } + if (mOffloadAudio) { + mQueue.cancelEvent(mAudioTearDownEvent->eventID()); + mAudioTearDownEventPending = false; + } + if (!(mFlags & AUDIOPLAYER_STARTED)) { bool wasSeeking = mAudioPlayer->isSeeking(); // We've already started the MediaSource in order to enable // the prefetcher to read its data. - status_t err = mAudioPlayer->start( + err = mAudioPlayer->start( true /* sourceAlreadyStarted */); if (err != OK) { @@ -998,14 +1063,16 @@ status_t AwesomePlayer::startAudioPlayer_l(bool sendErrorNotification) { postAudioSeekComplete(); } } else { - mAudioPlayer->resume(); + err = mAudioPlayer->resume(); } - modifyFlags(AUDIO_RUNNING, SET); + if (err == OK) { + modifyFlags(AUDIO_RUNNING, SET); - mWatchForAudioEOS = true; + mWatchForAudioEOS = true; + } - return OK; + return err; } void AwesomePlayer::notifyVideoSize_l() { @@ -1137,15 +1204,14 @@ status_t AwesomePlayer::pause_l(bool at_eos) { cancelPlayerEvents(true /* keepNotifications */); if (mAudioPlayer != NULL && (mFlags & AUDIO_RUNNING)) { - if (at_eos) { - // If we played the audio stream to completion we - // want to make sure that all samples remaining in the audio - // track's queue are played out. - mAudioPlayer->pause(true /* playPendingSamples */); - } else { - mAudioPlayer->pause(); + // If we played the audio stream to completion we + // want to make sure that all samples remaining in the audio + // track's queue are played out. + mAudioPlayer->pause(at_eos /* playPendingSamples */); + // send us a reminder to tear down the AudioPlayer if paused for too long. + if (mOffloadAudio) { + postAudioTearDownEvent(kOffloadPauseMaxUs); } - modifyFlags(AUDIO_RUNNING, CLEAR); } @@ -1290,7 +1356,6 @@ status_t AwesomePlayer::getPosition(int64_t *positionUs) { } else { *positionUs = 0; } - return OK; } @@ -1385,14 +1450,29 @@ status_t AwesomePlayer::initAudioDecoder() { const char *mime; CHECK(meta->findCString(kKeyMIMEType, &mime)); + // Check whether there is a hardware codec for this stream + // This doesn't guarantee that the hardware has a free stream + // but it avoids us attempting to open (and re-open) an offload + // stream to hardware that doesn't have the necessary codec + mOffloadAudio = canOffloadStream(meta, (mVideoSource != NULL), isStreamingHTTP()); if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW)) { + ALOGV("createAudioPlayer: bypass OMX (raw)"); mAudioSource = mAudioTrack; } else { - mAudioSource = OMXCodec::Create( + // If offloading we still create a OMX decoder as a fall-back + // but we don't start it + mOmxSource = OMXCodec::Create( mClient.interface(), mAudioTrack->getFormat(), false, // createEncoder mAudioTrack); + + if (mOffloadAudio) { + ALOGV("createAudioPlayer: bypass OMX (offload)"); + mAudioSource = mAudioTrack; + } else { + mAudioSource = mOmxSource; + } } if (mAudioSource != NULL) { @@ -1408,6 +1488,7 @@ status_t AwesomePlayer::initAudioDecoder() { if (err != OK) { mAudioSource.clear(); + mOmxSource.clear(); return err; } } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_QCELP)) { @@ -1885,6 +1966,15 @@ void AwesomePlayer::postCheckAudioStatusEvent(int64_t delayUs) { mQueue.postEventWithDelay(mCheckAudioStatusEvent, delayUs); } +void AwesomePlayer::postAudioTearDownEvent(int64_t delayUs) { + Mutex::Autolock autoLock(mAudioLock); + if (mAudioTearDownEventPending) { + return; + } + mAudioTearDownEventPending = true; + mQueue.postEventWithDelay(mAudioTearDownEvent, delayUs); +} + void AwesomePlayer::onCheckAudioStatus() { { Mutex::Autolock autoLock(mAudioLock); @@ -2200,7 +2290,10 @@ bool AwesomePlayer::ContinuePreparation(void *cookie) { void AwesomePlayer::onPrepareAsyncEvent() { Mutex::Autolock autoLock(mLock); + beginPrepareAsync_l(); +} +void AwesomePlayer::beginPrepareAsync_l() { if (mFlags & PREPARE_CANCELLED) { ALOGI("prepare was cancelled before doing anything"); abortPrepare(UNKNOWN_ERROR); @@ -2273,6 +2366,10 @@ void AwesomePlayer::postAudioSeekComplete() { postCheckAudioStatusEvent(0); } +void AwesomePlayer::postAudioTearDown() { + postAudioTearDownEvent(0); +} + status_t AwesomePlayer::setParameter(int key, const Parcel &request) { switch (key) { case KEY_PARAMETER_CACHE_STAT_COLLECT_FREQ_MS: @@ -2404,6 +2501,7 @@ status_t AwesomePlayer::selectAudioTrack_l( mAudioSource->stop(); } mAudioSource.clear(); + mOmxSource.clear(); mTimeSource = NULL; @@ -2660,4 +2758,66 @@ void AwesomePlayer::modifyFlags(unsigned value, FlagMode mode) { } } +void AwesomePlayer::onAudioTearDownEvent() { + + Mutex::Autolock autoLock(mLock); + if (!mAudioTearDownEventPending) { + return; + } + mAudioTearDownEventPending = false; + + ALOGV("onAudioTearDownEvent"); + + // stream info is cleared by reset_l() so copy what we need + const bool wasPlaying = (mFlags & PLAYING); + KeyedVector uriHeaders(mUriHeaders); + sp fileSource(mFileSource); + + mStatsLock.lock(); + String8 uri(mStats.mURI); + mStatsLock.unlock(); + + // get current position so we can start recreated stream from here + int64_t position = 0; + getPosition(&position); + + // Reset and recreate + reset_l(); + mFlags |= PREPARING; + + status_t err; + + if (fileSource != NULL) { + mFileSource = fileSource; + err = setDataSource_l(fileSource); + } else { + err = setDataSource_l(uri, &uriHeaders); + } + + if ( err != OK ) { + // This will force beingPrepareAsync_l() to notify + // a MEDIA_ERROR to the client and abort the prepare + mFlags |= PREPARE_CANCELLED; + } + + mAudioTearDown = true; + mIsAsyncPrepare = true; + + // Call parepare for the host decoding + beginPrepareAsync_l(); + + if (mPrepareResult == OK) { + if (mExtractorFlags & MediaExtractor::CAN_SEEK) { + seekTo_l(position); + } + + if (wasPlaying) { + modifyFlags(CACHE_UNDERRUN, CLEAR); + play_l(); + } + } + + mAudioTearDown = false; +} + } // namespace android diff --git a/media/libstagefright/Utils.cpp b/media/libstagefright/Utils.cpp index e9789d3..4db8e80 100644 --- a/media/libstagefright/Utils.cpp +++ b/media/libstagefright/Utils.cpp @@ -26,7 +26,12 @@ #include #include #include +#include +#include +#include +#include #include +#include namespace android { @@ -474,20 +479,128 @@ AString MakeUserAgent() { status_t sendMetaDataToHal(sp& sink, const sp& meta) { - // stub + int32_t sampleRate = 0; + int32_t bitRate = 0; + int32_t channelMask = 0; + int32_t delaySamples = 0; + int32_t paddingSamples = 0; + + AudioParameter param = AudioParameter(); + + if (meta->findInt32(kKeySampleRate, &sampleRate)) { + param.addInt(String8(AUDIO_OFFLOAD_CODEC_SAMPLE_RATE), sampleRate); + } + if (meta->findInt32(kKeyChannelMask, &channelMask)) { + param.addInt(String8(AUDIO_OFFLOAD_CODEC_NUM_CHANNEL), channelMask); + } + if (meta->findInt32(kKeyBitRate, &bitRate)) { + param.addInt(String8(AUDIO_OFFLOAD_CODEC_AVG_BIT_RATE), bitRate); + } + if (meta->findInt32(kKeyEncoderDelay, &delaySamples)) { + param.addInt(String8(AUDIO_OFFLOAD_CODEC_DELAY_SAMPLES), delaySamples); + } + if (meta->findInt32(kKeyEncoderPadding, &paddingSamples)) { + param.addInt(String8(AUDIO_OFFLOAD_CODEC_PADDING_SAMPLES), paddingSamples); + } + + ALOGV("sendMetaDataToHal: bitRate %d, sampleRate %d, chanMask %d," + "delaySample %d, paddingSample %d", bitRate, sampleRate, + channelMask, delaySamples, paddingSamples); + + sink->setParameters(param.toString()); return OK; } -status_t mapMimeToAudioFormat(audio_format_t& format, const char* mime) +struct mime_conv_t { + const char* mime; + audio_format_t format; +}; + +static const struct mime_conv_t mimeLookup[] = { + { MEDIA_MIMETYPE_AUDIO_MPEG, AUDIO_FORMAT_MP3 }, + { MEDIA_MIMETYPE_AUDIO_RAW, AUDIO_FORMAT_PCM_16_BIT }, + { MEDIA_MIMETYPE_AUDIO_AMR_NB, AUDIO_FORMAT_AMR_NB }, + { MEDIA_MIMETYPE_AUDIO_AMR_WB, AUDIO_FORMAT_AMR_WB }, + { MEDIA_MIMETYPE_AUDIO_AAC, AUDIO_FORMAT_AAC }, + { MEDIA_MIMETYPE_AUDIO_VORBIS, AUDIO_FORMAT_VORBIS }, + { 0, AUDIO_FORMAT_INVALID } +}; + +status_t mapMimeToAudioFormat( audio_format_t& format, const char* mime ) { - // stub +const struct mime_conv_t* p = &mimeLookup[0]; + while (p->mime != NULL) { + if (0 == strcasecmp(mime, p->mime)) { + format = p->format; + return OK; + } + ++p; + } + return BAD_VALUE; } bool canOffloadStream(const sp& meta, bool hasVideo, bool isStreaming) { - // stub - return false; + const char *mime; + CHECK(meta->findCString(kKeyMIMEType, &mime)); + + audio_offload_info_t info = AUDIO_INFO_INITIALIZER; + + info.format = AUDIO_FORMAT_INVALID; + if (mapMimeToAudioFormat(info.format, mime) != OK) { + ALOGE(" Couldn't map mime type \"%s\" to a valid AudioSystem::audio_format !", mime); + return false; + } else { + ALOGV("Mime type \"%s\" mapped to audio_format %d", mime, info.format); + } + + if (AUDIO_FORMAT_INVALID == info.format) { + // can't offload if we don't know what the source format is + ALOGE("mime type \"%s\" not a known audio format", mime); + return false; + } + + int32_t srate = -1; + if (!meta->findInt32(kKeySampleRate, &srate)) { + ALOGV("track of type '%s' does not publish sample rate", mime); + } + info.sample_rate = srate; + + int32_t cmask = 0; + if (!meta->findInt32(kKeyChannelMask, &cmask)) { + ALOGV("track of type '%s' does not publish channel mask", mime); + + // Try a channel count instead + int32_t channelCount; + if (!meta->findInt32(kKeyChannelCount, &channelCount)) { + ALOGV("track of type '%s' does not publish channel count", mime); + } else { + cmask = audio_channel_out_mask_from_count(channelCount); + } + } + info.channel_mask = cmask; + + int64_t duration = 0; + if (!meta->findInt64(kKeyDuration, &duration)) { + ALOGV("track of type '%s' does not publish duration", mime); + } + info.duration_us = duration; + + int32_t brate = -1; + if (!meta->findInt32(kKeyBitRate, &brate)) { + ALOGV("track of type '%s' does not publish bitrate", mime); + } + info.bit_rate = brate; + + + info.stream_type = AUDIO_STREAM_MUSIC; + info.has_video = hasVideo; + info.is_streaming = isStreaming; + + // Check if offload is possible for given format, stream type, sample rate, + // bit rate, duration, video and streaming + return AudioSystem::isOffloadSupported(info); } } // namespace android diff --git a/media/libstagefright/include/AwesomePlayer.h b/media/libstagefright/include/AwesomePlayer.h index 0d17d65..d3c74e2 100644 --- a/media/libstagefright/include/AwesomePlayer.h +++ b/media/libstagefright/include/AwesomePlayer.h @@ -226,7 +226,7 @@ private: void postStreamDoneEvent_l(status_t status); void postCheckAudioStatusEvent(int64_t delayUs); void postVideoLagEvent_l(); - void postAudioTearDownEvent(); + void postAudioTearDownEvent(int64_t delayUs); status_t play_l(); -- cgit v1.1 From bd4c4fbb3b073e48963185d11a15da1fa18d2e54 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Thu, 25 Jul 2013 14:21:14 -0700 Subject: Remove obsolete TrackBase::step(), mStepCount, mStepServerFailed Change-Id: I6347096f066b8b19451c6472db7b0671f0cf7702 --- services/audioflinger/TrackBase.h | 5 ----- services/audioflinger/Tracks.cpp | 16 ++++++---------- 2 files changed, 6 insertions(+), 15 deletions(-) diff --git a/services/audioflinger/TrackBase.h b/services/audioflinger/TrackBase.h index a243563..44a63c3 100644 --- a/services/audioflinger/TrackBase.h +++ b/services/audioflinger/TrackBase.h @@ -108,8 +108,6 @@ protected: mTerminated = true; } - bool step(); // mStepCount is an implicit input - bool isOut() const { return mIsOut; } // true for Track and TimedTrack, false for RecordTrack, // this could be a track type if needed later @@ -122,8 +120,6 @@ protected: // except for OutputTrack when it is in local memory void* mBufferEnd; // &mBuffer[mFrameCount * frameSize], where frameSize // is based on mChannelCount and 16-bit samples - uint32_t mStepCount; // saves AudioBufferProvider::Buffer::frameCount as of - // time of releaseBuffer() for later use by step() // we don't really need a lock for these track_state mState; const uint32_t mSampleRate; // initial sample rate only; for tracks which @@ -137,7 +133,6 @@ protected: const size_t mFrameCount;// size of track buffer given at createTrack() or // openRecord(), and then adjusted as needed - bool mStepServerFailed; const int mSessionId; Vector < sp >mSyncEvents; const bool mIsOut; diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp index 52518ae..ad4db98 100644 --- a/services/audioflinger/Tracks.cpp +++ b/services/audioflinger/Tracks.cpp @@ -76,7 +76,6 @@ AudioFlinger::ThreadBase::TrackBase::TrackBase( mCblk(NULL), // mBuffer // mBufferEnd - mStepCount(0), mState(IDLE), mSampleRate(sampleRate), mFormat(format), @@ -85,7 +84,6 @@ AudioFlinger::ThreadBase::TrackBase::TrackBase( mFrameSize(audio_is_linear_pcm(format) ? mChannelCount * audio_bytes_per_sample(format) : sizeof(int8_t)), mFrameCount(frameCount), - mStepServerFailed(false), mSessionId(sessionId), mIsOut(isOut), mServerProxy(NULL), @@ -395,8 +393,8 @@ void AudioFlinger::PlaybackThread::Track::destroy() /*static*/ void AudioFlinger::PlaybackThread::Track::appendDumpHeader(String8& result) { - result.append(" Name Client Type Fmt Chn mask Session StpCnt fCount S F SRate " - "L dB R dB Server Main buf Aux Buf Flags Underruns\n"); + result.append(" Name Client Type Fmt Chn mask Session fCount S F SRate " + "L dB R dB Server Main buf Aux Buf Flags Underruns\n"); } void AudioFlinger::PlaybackThread::Track::dump(char* buffer, size_t size) @@ -460,14 +458,13 @@ void AudioFlinger::PlaybackThread::Track::dump(char* buffer, size_t size) nowInUnderrun = '?'; break; } - snprintf(&buffer[7], size-7, " %6d %4u 0x%08x 0x%08x %7u %6u %6u %1c %1d %5u %5.2g %5.2g " - "0x%08x 0x%08x 0x%08x %#5x %9u%c\n", + snprintf(&buffer[7], size-7, " %6u %4u %3u %08X %7u %6u %1c %1d %5u %5.2g %5.2g " + "%08X %08X %08X 0x%03X %9u%c\n", (mClient == 0) ? getpid_cached : mClient->pid(), mStreamType, mFormat, mChannelMask, mSessionId, - mStepCount, mFrameCount, stateChar, mFillingUpStatus, @@ -1732,17 +1729,16 @@ void AudioFlinger::RecordThread::RecordTrack::destroy() /*static*/ void AudioFlinger::RecordThread::RecordTrack::appendDumpHeader(String8& result) { - result.append(" Clien Fmt Chn mask Session Step S Serv FrameCount\n"); + result.append("Client Fmt Chn mask Session S Server fCount\n"); } void AudioFlinger::RecordThread::RecordTrack::dump(char* buffer, size_t size) { - snprintf(buffer, size, " %05d %03u 0x%08x %05d %04u %01d %08x %05d\n", + snprintf(buffer, size, "%6u %3u %08X %7u %1d %08X %6u\n", (mClient == 0) ? getpid_cached : mClient->pid(), mFormat, mChannelMask, mSessionId, - mStepCount, mState, mCblk->server, mFrameCount); -- cgit v1.1 From 35cc4f3127322ad3e3dd1e15e8ae29ff4b4a3af6 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Thu, 25 Jul 2013 14:21:35 -0700 Subject: Remove obsolete mBufferEnd Change-Id: I507c6109d66000bb30933ca23b912a1316f55e5e --- services/audioflinger/TrackBase.h | 2 -- services/audioflinger/Tracks.cpp | 8 +++----- 2 files changed, 3 insertions(+), 7 deletions(-) diff --git a/services/audioflinger/TrackBase.h b/services/audioflinger/TrackBase.h index 44a63c3..7052a0f 100644 --- a/services/audioflinger/TrackBase.h +++ b/services/audioflinger/TrackBase.h @@ -118,8 +118,6 @@ protected: audio_track_cblk_t* mCblk; void* mBuffer; // start of track buffer, typically in shared memory // except for OutputTrack when it is in local memory - void* mBufferEnd; // &mBuffer[mFrameCount * frameSize], where frameSize - // is based on mChannelCount and 16-bit samples // we don't really need a lock for these track_state mState; const uint32_t mSampleRate; // initial sample rate only; for tracks which diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp index ad4db98..aa1bcc2 100644 --- a/services/audioflinger/Tracks.cpp +++ b/services/audioflinger/Tracks.cpp @@ -75,7 +75,6 @@ AudioFlinger::ThreadBase::TrackBase::TrackBase( mClient(client), mCblk(NULL), // mBuffer - // mBufferEnd mState(IDLE), mSampleRate(sampleRate), mFormat(format), @@ -133,7 +132,6 @@ AudioFlinger::ThreadBase::TrackBase::TrackBase( mCblk->flags = CBLK_FORCEREADY; // FIXME hack, need to fix the track ready logic #endif } - mBufferEnd = (uint8_t *)mBuffer + bufferSize; #ifdef TEE_SINK if (mTeeSinkTrackEnabled) { @@ -1399,9 +1397,9 @@ AudioFlinger::PlaybackThread::OutputTrack::OutputTrack( mOutBuffer.frameCount = 0; playbackThread->mTracks.add(this); ALOGV("OutputTrack constructor mCblk %p, mBuffer %p, " - "mCblk->frameCount_ %u, mChannelMask 0x%08x mBufferEnd %p", + "mCblk->frameCount_ %u, mChannelMask 0x%08x", mCblk, mBuffer, - mCblk->frameCount_, mChannelMask, mBufferEnd); + mCblk->frameCount_, mChannelMask); // since client and server are in the same process, // the buffer has the same virtual address on both sides mClientProxy = new AudioTrackClientProxy(mCblk, mBuffer, mFrameCount, mFrameSize); @@ -1656,7 +1654,7 @@ AudioFlinger::RecordThread::RecordTrack::RecordTrack( channelMask, frameCount, 0 /*sharedBuffer*/, sessionId, false /*isOut*/), mOverflow(false) { - ALOGV("RecordTrack constructor, size %d", (int)mBufferEnd - (int)mBuffer); + ALOGV("RecordTrack constructor"); if (mCblk != NULL) { mAudioRecordServerProxy = new AudioRecordServerProxy(mCblk, mBuffer, frameCount, mFrameSize); -- cgit v1.1 From 3dcd00dddec86a1c5133083ad7ba2265d49c048c Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Wed, 17 Jul 2013 10:10:23 -0700 Subject: Declare methods in binder opcode order Change-Id: I5f624b7a51ffe1a17a67c056cf984f74e4c56eac --- include/media/IAudioRecord.h | 6 +++--- media/libmedia/IAudioRecord.cpp | 23 ++++++++++++----------- services/audioflinger/AudioFlinger.h | 3 ++- services/audioflinger/Tracks.cpp | 8 ++++---- 4 files changed, 21 insertions(+), 19 deletions(-) diff --git a/include/media/IAudioRecord.h b/include/media/IAudioRecord.h index d6e3141..eccc2ca 100644 --- a/include/media/IAudioRecord.h +++ b/include/media/IAudioRecord.h @@ -34,6 +34,9 @@ class IAudioRecord : public IInterface public: DECLARE_META_INTERFACE(AudioRecord); + /* get this tracks control block */ + virtual sp getCblk() const = 0; + /* After it's created the track is not active. Call start() to * make it active. */ @@ -44,9 +47,6 @@ public: * will be processed, unless flush() is called. */ virtual void stop() = 0; - - /* get this tracks control block */ - virtual sp getCblk() const = 0; }; // ---------------------------------------------------------------------------- diff --git a/media/libmedia/IAudioRecord.cpp b/media/libmedia/IAudioRecord.cpp index 0d06e98..4a7de65 100644 --- a/media/libmedia/IAudioRecord.cpp +++ b/media/libmedia/IAudioRecord.cpp @@ -42,6 +42,18 @@ public: { } + virtual sp getCblk() const + { + Parcel data, reply; + sp cblk; + data.writeInterfaceToken(IAudioRecord::getInterfaceDescriptor()); + status_t status = remote()->transact(GET_CBLK, data, &reply); + if (status == NO_ERROR) { + cblk = interface_cast(reply.readStrongBinder()); + } + return cblk; + } + virtual status_t start(int /*AudioSystem::sync_event_t*/ event, int triggerSession) { Parcel data, reply; @@ -64,17 +76,6 @@ public: remote()->transact(STOP, data, &reply); } - virtual sp getCblk() const - { - Parcel data, reply; - sp cblk; - data.writeInterfaceToken(IAudioRecord::getInterfaceDescriptor()); - status_t status = remote()->transact(GET_CBLK, data, &reply); - if (status == NO_ERROR) { - cblk = interface_cast(reply.readStrongBinder()); - } - return cblk; - } }; IMPLEMENT_META_INTERFACE(AudioRecord, "android.media.IAudioRecord"); diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h index 262d194..eee5da5 100644 --- a/services/audioflinger/AudioFlinger.h +++ b/services/audioflinger/AudioFlinger.h @@ -409,10 +409,11 @@ private: int64_t pts); virtual status_t setMediaTimeTransform(const LinearTransform& xform, int target); + virtual status_t setParameters(const String8& keyValuePairs); + virtual status_t onTransact( uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags); - virtual status_t setParameters(const String8& keyValuePairs); private: const sp mTrack; }; diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp index ad4db98..f87689d 100644 --- a/services/audioflinger/Tracks.cpp +++ b/services/audioflinger/Tracks.cpp @@ -245,10 +245,6 @@ void AudioFlinger::TrackHandle::pause() { mTrack->pause(); } -status_t AudioFlinger::TrackHandle::setParameters(const String8& keyValuePairs) { - return mTrack->setParameters(keyValuePairs); -} - status_t AudioFlinger::TrackHandle::attachAuxEffect(int EffectId) { return mTrack->attachAuxEffect(EffectId); @@ -286,6 +282,10 @@ status_t AudioFlinger::TrackHandle::setMediaTimeTransform( xform, static_cast(target)); } +status_t AudioFlinger::TrackHandle::setParameters(const String8& keyValuePairs) { + return mTrack->setParameters(keyValuePairs); +} + status_t AudioFlinger::TrackHandle::onTransact( uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) { -- cgit v1.1 From 04022b34d2b97938b0926ab62e6c283418da3bba Mon Sep 17 00:00:00 2001 From: Marco Nelissen Date: Mon, 29 Jul 2013 11:22:27 -0700 Subject: Fail more gracefully on version mismatch b/9900647 Change-Id: I9ea508a2685ff8adc780edd5ecec30dd1a9b0997 --- media/libstagefright/MPEG4Extractor.cpp | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/media/libstagefright/MPEG4Extractor.cpp b/media/libstagefright/MPEG4Extractor.cpp index 42a9c7a..ad985ee 100644 --- a/media/libstagefright/MPEG4Extractor.cpp +++ b/media/libstagefright/MPEG4Extractor.cpp @@ -1924,13 +1924,13 @@ status_t MPEG4Extractor::parseTrackHeader( mtime = U64_AT(&buffer[12]); id = U32_AT(&buffer[20]); duration = U64_AT(&buffer[28]); - } else { - CHECK_EQ((unsigned)version, 0u); - + } else if (version == 0) { ctime = U32_AT(&buffer[4]); mtime = U32_AT(&buffer[8]); id = U32_AT(&buffer[12]); duration = U32_AT(&buffer[20]); + } else { + return ERROR_UNSUPPORTED; } mLastTrack->meta->setInt32(kKeyTrackID, id); -- cgit v1.1 From 1581101ce2a8c1b8d0b07b643ad891595221d781 Mon Sep 17 00:00:00 2001 From: Igor Murashkin Date: Mon, 29 Jul 2013 12:25:59 -0700 Subject: camera2 api: Override default RGBx formats to IMPLEMENTATION_DEFINED Bug: 9487482 Change-Id: I09d3b4e41454d350c4bc7b1f8e893c3dad655e73 --- services/camera/libcameraservice/photography/CameraDeviceClient.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/services/camera/libcameraservice/photography/CameraDeviceClient.cpp b/services/camera/libcameraservice/photography/CameraDeviceClient.cpp index e1c7e79..485c843 100644 --- a/services/camera/libcameraservice/photography/CameraDeviceClient.cpp +++ b/services/camera/libcameraservice/photography/CameraDeviceClient.cpp @@ -325,8 +325,8 @@ status_t CameraDeviceClient::createStream(int width, int height, int format, // FIXME: remove this override since the default format should be // IMPLEMENTATION_DEFINED. b/9487482 - if (format != HAL_PIXEL_FORMAT_BLOB && - format != HAL_PIXEL_FORMAT_YCbCr_420_888) { + if (format >= HAL_PIXEL_FORMAT_RGBA_8888 && + format <= HAL_PIXEL_FORMAT_BGRA_8888) { ALOGW("%s: Camera %d: Overriding format 0x%x to IMPLEMENTATION_DEFINED", __FUNCTION__, mCameraId, format); format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED; -- cgit v1.1 From f6ed423af92a56ef54bba23eba883b1f21448b54 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Tue, 16 Jul 2013 11:16:27 -0700 Subject: Treat mChannelCount as uint32_t consistently mChannelCount was 8-, 16-, or 32-bits Change-Id: I2cc2fedf3e33144e5c8bbd9894763282d9217f63 --- services/audioflinger/Threads.cpp | 8 ++++---- services/audioflinger/Threads.h | 2 +- services/audioflinger/TrackBase.h | 2 +- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index 0fc31f6..3574aea 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -427,7 +427,7 @@ void AudioFlinger::ThreadBase::dumpBase(int fd, const Vector& args) result.append(buffer); snprintf(buffer, SIZE, "Normal frame count: %d\n", mNormalFrameCount); result.append(buffer); - snprintf(buffer, SIZE, "Channel Count: %d\n", mChannelCount); + snprintf(buffer, SIZE, "Channel Count: %u\n", mChannelCount); result.append(buffer); snprintf(buffer, SIZE, "Channel Mask: 0x%08x\n", mChannelMask); result.append(buffer); @@ -1553,7 +1553,7 @@ void AudioFlinger::PlaybackThread::readOutputParameters() LOG_FATAL("HAL channel mask %#x not supported for mixed output; " "must be AUDIO_CHANNEL_OUT_STEREO", mChannelMask); } - mChannelCount = (uint16_t)popcount(mChannelMask); + mChannelCount = popcount(mChannelMask); mFormat = mOutput->stream->common.get_format(&mOutput->stream->common); if (!audio_is_valid_format(mFormat)) { LOG_FATAL("HAL format %d not valid for output", mFormat); @@ -2317,7 +2317,7 @@ AudioFlinger::MixerThread::MixerThread(const sp& audioFlinger, Aud // mNormalSink below { ALOGV("MixerThread() id=%d device=%#x type=%d", id, device, type); - ALOGV("mSampleRate=%u, mChannelMask=%#x, mChannelCount=%d, mFormat=%d, mFrameSize=%u, " + ALOGV("mSampleRate=%u, mChannelMask=%#x, mChannelCount=%u, mFormat=%d, mFrameSize=%u, " "mFrameCount=%d, mNormalFrameCount=%d", mSampleRate, mChannelMask, mChannelCount, mFormat, mFrameSize, mFrameCount, mNormalFrameCount); @@ -4921,7 +4921,7 @@ void AudioFlinger::RecordThread::readInputParameters() mSampleRate = mInput->stream->common.get_sample_rate(&mInput->stream->common); mChannelMask = mInput->stream->common.get_channels(&mInput->stream->common); - mChannelCount = (uint16_t)popcount(mChannelMask); + mChannelCount = popcount(mChannelMask); mFormat = mInput->stream->common.get_format(&mInput->stream->common); mFrameSize = audio_stream_frame_size(&mInput->stream->common); mInputBytes = mInput->stream->common.get_buffer_size(&mInput->stream->common); diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h index c5818ae..a17c279 100644 --- a/services/audioflinger/Threads.h +++ b/services/audioflinger/Threads.h @@ -267,7 +267,7 @@ protected: size_t mFrameCount; // output HAL, direct output, record size_t mNormalFrameCount; // normal mixer and effects audio_channel_mask_t mChannelMask; - uint16_t mChannelCount; + uint32_t mChannelCount; size_t mFrameSize; audio_format_t mFormat; diff --git a/services/audioflinger/TrackBase.h b/services/audioflinger/TrackBase.h index 7052a0f..523e4b2 100644 --- a/services/audioflinger/TrackBase.h +++ b/services/audioflinger/TrackBase.h @@ -124,7 +124,7 @@ protected: // support dynamic rates, the current value is in control block const audio_format_t mFormat; const audio_channel_mask_t mChannelMask; - const uint8_t mChannelCount; + const uint32_t mChannelCount; const size_t mFrameSize; // AudioFlinger's view of frame size in shared memory, // where for AudioTrack (but not AudioRecord), // 8-bit PCM samples are stored as 16-bit -- cgit v1.1 From 9b58f63e45ef2fdfb839b9b9bb3411d81eb96128 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Tue, 16 Jul 2013 11:37:48 -0700 Subject: Move members from ThreadBase to PlaybackThread Move mNormalFrameCount and frameCountHAL(), since they're not used by record threads. Also comment which fields are updated by readParameters(). Change-Id: I5fc0a8a89cc637976f22d49271a5a3e136dab4e1 --- services/audioflinger/Threads.cpp | 13 ++++++------- services/audioflinger/Threads.h | 21 ++++++++++++++++----- 2 files changed, 22 insertions(+), 12 deletions(-) diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index 1a513c4..6a224ac 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -266,10 +266,9 @@ AudioFlinger::ThreadBase::ThreadBase(const sp& audioFlinger, audio audio_devices_t outDevice, audio_devices_t inDevice, type_t type) : Thread(false /*canCallJava*/), mType(type), - mAudioFlinger(audioFlinger), mSampleRate(0), mFrameCount(0), mNormalFrameCount(0), - // mChannelMask - mChannelCount(0), - mFrameSize(1), mFormat(AUDIO_FORMAT_INVALID), + mAudioFlinger(audioFlinger), + // mSampleRate, mFrameCount, mChannelMask, mChannelCount, mFrameSize, and mFormat are + // set by PlaybackThread::readOutputParameters() or RecordThread::readInputParameters() mParamStatus(NO_ERROR), mStandby(false), mOutDevice(outDevice), mInDevice(inDevice), mAudioSource(AUDIO_SOURCE_DEFAULT), mId(id), @@ -425,8 +424,6 @@ void AudioFlinger::ThreadBase::dumpBase(int fd, const Vector& args) result.append(buffer); snprintf(buffer, SIZE, "HAL frame count: %d\n", mFrameCount); result.append(buffer); - snprintf(buffer, SIZE, "Normal frame count: %d\n", mNormalFrameCount); - result.append(buffer); snprintf(buffer, SIZE, "Channel Count: %u\n", mChannelCount); result.append(buffer); snprintf(buffer, SIZE, "Channel Mask: 0x%08x\n", mChannelMask); @@ -932,6 +929,7 @@ AudioFlinger::PlaybackThread::PlaybackThread(const sp& audioFlinge audio_devices_t device, type_t type) : ThreadBase(audioFlinger, id, device, AUDIO_DEVICE_NONE, type), + mNormalFrameCount(0), mMixBuffer(NULL), mAllocMixBuffer(NULL), mSuspended(0), mBytesWritten(0), // mStreamTypes[] initialized in constructor body mOutput(output), @@ -1054,6 +1052,8 @@ void AudioFlinger::PlaybackThread::dumpInternals(int fd, const Vector& snprintf(buffer, SIZE, "\nOutput thread %p internals\n", this); result.append(buffer); + snprintf(buffer, SIZE, "Normal frame count: %d\n", mNormalFrameCount); + result.append(buffer); snprintf(buffer, SIZE, "last write occurred (msecs): %llu\n", ns2ms(systemTime() - mLastWriteTime)); result.append(buffer); @@ -4920,7 +4920,6 @@ void AudioFlinger::RecordThread::readInputParameters() mFrameSize = audio_stream_frame_size(&mInput->stream->common); mInputBytes = mInput->stream->common.get_buffer_size(&mInput->stream->common); mFrameCount = mInputBytes / mFrameSize; - mNormalFrameCount = mFrameCount; // not used by record, but used by input effects mRsmpInBuffer = new int16_t[mFrameCount * mChannelCount]; if (mSampleRate != mReqSampleRate && mChannelCount <= FCC_2 && mReqChannelCount <= FCC_2) diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h index a17c279..b1b33b0 100644 --- a/services/audioflinger/Threads.h +++ b/services/audioflinger/Threads.h @@ -126,10 +126,8 @@ public: audio_channel_mask_t channelMask() const { return mChannelMask; } audio_format_t format() const { return mFormat; } // Called by AudioFlinger::frameCount(audio_io_handle_t output) and effects, - // and returns the normal mix buffer's frame count. - size_t frameCount() const { return mNormalFrameCount; } - // Return's the HAL's frame count i.e. fast mixer buffer size. - size_t frameCountHAL() const { return mFrameCount; } + // and returns the [normal mix] buffer's frame count. + virtual size_t frameCount() const = 0; size_t frameSize() const { return mFrameSize; } // Should be "virtual status_t requestExitAndWait()" and override same @@ -263,9 +261,11 @@ protected: Condition mWaitWorkCV; const sp mAudioFlinger; + + // updated by PlaybackThread::readOutputParameters() or + // RecordThread::readInputParameters() uint32_t mSampleRate; size_t mFrameCount; // output HAL, direct output, record - size_t mNormalFrameCount; // normal mixer and effects audio_channel_mask_t mChannelMask; uint32_t mChannelCount; size_t mFrameSize; @@ -461,8 +461,15 @@ public: // called with AudioFlinger lock held void invalidateTracks(audio_stream_type_t streamType); + virtual size_t frameCount() const { return mNormalFrameCount; } + + // Return's the HAL's frame count i.e. fast mixer buffer size. + size_t frameCountHAL() const { return mFrameCount; } protected: + // updated by readOutputParameters() + size_t mNormalFrameCount; // normal mixer and effects + int16_t* mMixBuffer; // frame size aligned mix buffer int8_t* mAllocMixBuffer; // mixer buffer allocation address @@ -871,6 +878,8 @@ public: static void syncStartEventCallback(const wp& event); void handleSyncStartEvent(const sp& event); + virtual size_t frameCount() const { return mFrameCount; } + private: void clearSyncStartEvent(); @@ -886,6 +895,8 @@ private: // is used together with mStartStopCond to indicate start()/stop() progress sp mActiveTrack; Condition mStartStopCond; + + // updated by RecordThread::readInputParameters() AudioResampler *mResampler; int32_t *mRsmpOutBuffer; int16_t *mRsmpInBuffer; -- cgit v1.1 From 548efc94813c1dec6e8cf6c085ae41ccb04827f1 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Thu, 29 Nov 2012 08:48:51 -0800 Subject: Rename RecordThread::mInputBytes to the more generic mBufferSize This prepares for using it in PlaybackThreads later Change-Id: Id90a92aa6372e4b69914b0008cef07296ca5d6a3 --- services/audioflinger/Threads.cpp | 12 ++++++------ services/audioflinger/Threads.h | 2 +- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index 6a224ac..3d8fd45 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -4161,7 +4161,7 @@ AudioFlinger::RecordThread::RecordThread(const sp& audioFlinger, ) : ThreadBase(audioFlinger, id, outDevice, inDevice, RECORD), mInput(input), mResampler(NULL), mRsmpOutBuffer(NULL), mRsmpInBuffer(NULL), - // mRsmpInIndex and mInputBytes set by readInputParameters() + // mRsmpInIndex and mBufferSize set by readInputParameters() mReqChannelCount(popcount(channelMask)), mReqSampleRate(sampleRate) // mBytesRead is only meaningful while active, and so is cleared in start() @@ -4314,7 +4314,7 @@ bool AudioFlinger::RecordThread::threadLoop() mRsmpInIndex = 0; } mBytesRead = mInput->stream->read(mInput->stream, readInto, - mInputBytes); + mBufferSize); if (mBytesRead <= 0) { if ((mBytesRead < 0) && (mActiveTrack->mState == TrackBase::ACTIVE)) { @@ -4669,7 +4669,7 @@ void AudioFlinger::RecordThread::dumpInternals(int fd, const Vector& a if (mActiveTrack != 0) { snprintf(buffer, SIZE, "In index: %d\n", mRsmpInIndex); result.append(buffer); - snprintf(buffer, SIZE, "In size: %d\n", mInputBytes); + snprintf(buffer, SIZE, "Buffer size: %u bytes\n", mBufferSize); result.append(buffer); snprintf(buffer, SIZE, "Resampling: %d\n", (mResampler != NULL)); result.append(buffer); @@ -4722,7 +4722,7 @@ status_t AudioFlinger::RecordThread::getNextBuffer(AudioBufferProvider::Buffer* int channelCount; if (framesReady == 0) { - mBytesRead = mInput->stream->read(mInput->stream, mRsmpInBuffer, mInputBytes); + mBytesRead = mInput->stream->read(mInput->stream, mRsmpInBuffer, mBufferSize); if (mBytesRead <= 0) { if ((mBytesRead < 0) && (mActiveTrack->mState == TrackBase::ACTIVE)) { ALOGE("RecordThread::getNextBuffer() Error reading audio input"); @@ -4918,8 +4918,8 @@ void AudioFlinger::RecordThread::readInputParameters() mChannelCount = popcount(mChannelMask); mFormat = mInput->stream->common.get_format(&mInput->stream->common); mFrameSize = audio_stream_frame_size(&mInput->stream->common); - mInputBytes = mInput->stream->common.get_buffer_size(&mInput->stream->common); - mFrameCount = mInputBytes / mFrameSize; + mBufferSize = mInput->stream->common.get_buffer_size(&mInput->stream->common); + mFrameCount = mBufferSize / mFrameSize; mRsmpInBuffer = new int16_t[mFrameCount * mChannelCount]; if (mSampleRate != mReqSampleRate && mChannelCount <= FCC_2 && mReqChannelCount <= FCC_2) diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h index b1b33b0..7be6043 100644 --- a/services/audioflinger/Threads.h +++ b/services/audioflinger/Threads.h @@ -901,7 +901,7 @@ private: int32_t *mRsmpOutBuffer; int16_t *mRsmpInBuffer; size_t mRsmpInIndex; - size_t mInputBytes; + size_t mBufferSize; // stream buffer size for read() const uint32_t mReqChannelCount; const uint32_t mReqSampleRate; ssize_t mBytesRead; -- cgit v1.1 From fa319e6d918b84f93fb5457af5d1cca6421ac517 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Mon, 29 Jul 2013 17:17:38 -0700 Subject: Remove CC_LIKELY and CC_UNLIKELY where not needed Only keep them in performance-sensitive code Change-Id: Ib257ddd7bc39ce9896997ffae008c524ac743d01 --- services/audioflinger/AudioFlinger.cpp | 1 - services/audioflinger/Threads.cpp | 11 +++++------ services/audioflinger/Tracks.cpp | 1 - 3 files changed, 5 insertions(+), 8 deletions(-) diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp index cc5af87..b30e2cf 100644 --- a/services/audioflinger/AudioFlinger.cpp +++ b/services/audioflinger/AudioFlinger.cpp @@ -37,7 +37,6 @@ #include #include -#include #include #include diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index 3d8fd45..e71c66e 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -25,7 +25,6 @@ #include #include #include -#include #include #include #include @@ -1754,7 +1753,7 @@ void AudioFlinger::PlaybackThread::threadLoop_removeTracks( const Vector< sp >& tracksToRemove) { size_t count = tracksToRemove.size(); - if (CC_UNLIKELY(count)) { + if (count) { for (size_t i = 0 ; i < count ; i++) { const sp& track = tracksToRemove.itemAt(i); if (!track->isOutputTrack()) { @@ -2282,7 +2281,7 @@ if (mType == MIXER) { void AudioFlinger::PlaybackThread::removeTracks_l(const Vector< sp >& tracksToRemove) { size_t count = tracksToRemove.size(); - if (CC_UNLIKELY(count)) { + if (count) { for (size_t i=0 ; i& track = tracksToRemove.itemAt(i); mActiveTracks.remove(track); @@ -3551,7 +3550,7 @@ void AudioFlinger::DirectOutputThread::threadLoop_mix() AudioBufferProvider::Buffer buffer; buffer.frameCount = frameCount; mActiveTrack->getNextBuffer(&buffer); - if (CC_UNLIKELY(buffer.raw == NULL)) { + if (buffer.raw == NULL) { memset(curBuf, 0, frameCount * mFrameSize); break; } @@ -3852,7 +3851,7 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::OffloadThread::prepareTr track->mFillingUpStatus = Track::FS_ACTIVE; mLeftVolFloat = mRightVolFloat = 0; if (track->mState == TrackBase::RESUMING) { - if (CC_UNLIKELY(mPausedBytesRemaining)) { + if (mPausedBytesRemaining) { // Need to continue write that was interrupted mCurrentWriteLength = mPausedWriteLength; mBytesRemaining = mPausedBytesRemaining; @@ -4274,7 +4273,7 @@ bool AudioFlinger::RecordThread::threadLoop() buffer.frameCount = mFrameCount; status_t status = mActiveTrack->getNextBuffer(&buffer); - if (CC_LIKELY(status == NO_ERROR)) { + if (status == NO_ERROR) { readOnce = true; size_t framesOut = buffer.frameCount; if (mResampler == NULL) { diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp index a6c4bda..4ea1355 100644 --- a/services/audioflinger/Tracks.cpp +++ b/services/audioflinger/Tracks.cpp @@ -21,7 +21,6 @@ #include "Configuration.h" #include -#include #include #include -- cgit v1.1 From eced2daaa6c91a3731eef978ce65c6ec319c5e6a Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Tue, 16 Jul 2013 17:17:28 -0700 Subject: Use correct type for OutputDescriptor::format Change-Id: Ide608ef452d57da29b708180d90470361c123d1d --- include/media/AudioSystem.h | 2 +- media/libmedia/IAudioFlingerClient.cpp | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/include/media/AudioSystem.h b/include/media/AudioSystem.h index f9e625e..006af08 100644 --- a/include/media/AudioSystem.h +++ b/include/media/AudioSystem.h @@ -158,7 +158,7 @@ public: : samplingRate(0), format(AUDIO_FORMAT_DEFAULT), channelMask(0), frameCount(0), latency(0) {} uint32_t samplingRate; - int32_t format; + audio_format_t format; audio_channel_mask_t channelMask; size_t frameCount; uint32_t latency; diff --git a/media/libmedia/IAudioFlingerClient.cpp b/media/libmedia/IAudioFlingerClient.cpp index 84a589a..3c0d4cf 100644 --- a/media/libmedia/IAudioFlingerClient.cpp +++ b/media/libmedia/IAudioFlingerClient.cpp @@ -83,7 +83,7 @@ status_t BnAudioFlingerClient::onTransact( ALOGV("STREAM_CONFIG_CHANGED stream %d", stream); } else if (event != AudioSystem::OUTPUT_CLOSED && event != AudioSystem::INPUT_CLOSED) { desc.samplingRate = data.readInt32(); - desc.format = data.readInt32(); + desc.format = (audio_format_t) data.readInt32(); desc.channelMask = (audio_channel_mask_t) data.readInt32(); desc.frameCount = data.readInt32(); desc.latency = data.readInt32(); -- cgit v1.1 From f20e1d8df84c5fbeeace0052d100982ae39bb7a4 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Fri, 12 Jul 2013 09:45:18 -0700 Subject: Rename control block server to mServer and add comments Change-Id: Ieabd91acee92d0e84e66fbd358df5282b856306e --- include/private/media/AudioTrackShared.h | 9 ++++++--- media/libmedia/AudioTrackShared.cpp | 6 +++--- services/audioflinger/Threads.cpp | 15 ++++++++------- services/audioflinger/Tracks.cpp | 4 ++-- 4 files changed, 19 insertions(+), 15 deletions(-) diff --git a/include/private/media/AudioTrackShared.h b/include/private/media/AudioTrackShared.h index b890180..e950b9e 100644 --- a/include/private/media/AudioTrackShared.h +++ b/include/private/media/AudioTrackShared.h @@ -89,8 +89,11 @@ struct audio_track_cblk_t // The data members are grouped so that members accessed frequently and in the same context // are in the same line of data cache. - volatile uint32_t server; // updated asynchronously by server, - // "for entertainment purposes only" + uint32_t mServer; // Number of filled frames consumed by server (mIsOut), + // or filled frames provided by server (!mIsOut). + // It is updated asynchronously by server without a barrier. + // The value should be used "for entertainment purposes only", + // which means don't make important decisions based on it. size_t frameCount_; // used during creation to pass actual track buffer size // from AudioFlinger to client, and not referenced again @@ -235,7 +238,7 @@ public: void interrupt(); size_t getPosition() { - return mEpoch + mCblk->server; + return mEpoch + mCblk->mServer; } void setEpoch(size_t epoch) { diff --git a/media/libmedia/AudioTrackShared.cpp b/media/libmedia/AudioTrackShared.cpp index aa45a2f..e5f7fcd 100644 --- a/media/libmedia/AudioTrackShared.cpp +++ b/media/libmedia/AudioTrackShared.cpp @@ -26,7 +26,7 @@ extern "C" { namespace android { audio_track_cblk_t::audio_track_cblk_t() - : server(0), frameCount_(0), mFutex(0), mMinimum(0), + : mServer(0), frameCount_(0), mFutex(0), mMinimum(0), mVolumeLR(0x10001000), mSampleRate(0), mSendLevel(0), mName(0), flags(0) { memset(&u, 0, sizeof(u)); @@ -594,7 +594,7 @@ void ServerProxy::releaseBuffer(Buffer* buffer) android_atomic_release_store(stepCount + rear, &cblk->u.mStreaming.mRear); } - mCblk->server += stepCount; + mCblk->mServer += stepCount; size_t half = mFrameCount / 2; if (half == 0) { @@ -805,7 +805,7 @@ void StaticAudioTrackServerProxy::releaseBuffer(Buffer* buffer) } mPosition = newPosition; - cblk->server += stepCount; + cblk->mServer += stepCount; cblk->u.mStatic.mBufferPosition = newPosition; if (setFlags != 0) { (void) android_atomic_or(setFlags, &cblk->flags); diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index 62e2e1e..2bb6495 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -2903,7 +2903,7 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::MixerThread::prepareTrac if ((framesReady >= minFrames) && track->isReady() && !track->isPaused() && !track->isTerminated()) { - ALOGVV("track %d s=%08x [OK] on thread %p", name, cblk->server, this); + ALOGVV("track %d s=%08x [OK] on thread %p", name, cblk->mServer, this); mixedTracks++; @@ -2932,7 +2932,8 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::MixerThread::prepareTrac param = AudioMixer::RAMP_VOLUME; } mAudioMixer->setParameter(name, AudioMixer::RESAMPLE, AudioMixer::RESET, NULL); - } else if (cblk->server != 0) { + // FIXME should not make a decision based on mServer + } else if (cblk->mServer != 0) { // If the track is stopped before the first frame was mixed, // do not apply ramp param = AudioMixer::RAMP_VOLUME; @@ -3069,7 +3070,7 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::MixerThread::prepareTrac chain->clearInputBuffer(); } - ALOGVV("track %d s=%08x [NOT READY] on thread %p", name, cblk->server, this); + ALOGVV("track %d s=%08x [NOT READY] on thread %p", name, cblk->mServer, this); if ((track->sharedBuffer() != 0) || track->isTerminated() || track->isStopped() || track->isPaused()) { // We have consumed all the buffers of this track. @@ -3483,7 +3484,7 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::DirectOutputThread::prep if ((track->framesReady() >= minFrames) && track->isReady() && !track->isPaused() && !track->isTerminated()) { - ALOGVV("track %d u=%08x, s=%08x [OK]", track->name(), cblk->user, cblk->server); + ALOGVV("track %d s=%08x [OK]", track->name(), cblk->mServer); if (track->mFillingUpStatus == Track::FS_FILLED) { track->mFillingUpStatus = Track::FS_ACTIVE; @@ -3508,7 +3509,7 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::DirectOutputThread::prep mEffectChains[0]->clearInputBuffer(); } - ALOGVV("track %d u=%08x, s=%08x [NOT READY]", track->name(), cblk->user, cblk->server); + ALOGVV("track %d s=%08x [NOT READY]", track->name(), cblk->mServer); if ((track->sharedBuffer() != 0) || track->isTerminated() || track->isStopped() || track->isPaused()) { // We have consumed all the buffers of this track. @@ -3847,7 +3848,7 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::OffloadThread::prepareTr tracksToRemove->add(track); } else if (track->framesReady() && track->isReady() && !track->isPaused() && !track->isTerminated()) { - ALOGVV("OffloadThread: track %d s=%08x [OK]", track->name(), cblk->server); + ALOGVV("OffloadThread: track %d s=%08x [OK]", track->name(), cblk->mServer); if (track->mFillingUpStatus == Track::FS_FILLED) { track->mFillingUpStatus = Track::FS_ACTIVE; mLeftVolFloat = mRightVolFloat = 0; @@ -3875,7 +3876,7 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::OffloadThread::prepareTr mixerStatus = MIXER_TRACKS_READY; } } else { - ALOGVV("OffloadThread: track %d s=%08x [NOT READY]", track->name(), cblk->server); + ALOGVV("OffloadThread: track %d s=%08x [NOT READY]", track->name(), cblk->mServer); if (track->isStopping_1()) { // Hardware buffer can hold a large amount of audio so we must // wait for all current track's data to drain before we say diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp index 3e184b4..4f2e372 100644 --- a/services/audioflinger/Tracks.cpp +++ b/services/audioflinger/Tracks.cpp @@ -468,7 +468,7 @@ void AudioFlinger::PlaybackThread::Track::dump(char* buffer, size_t size) mAudioTrackServerProxy->getSampleRate(), 20.0 * log10((vlr & 0xFFFF) / 4096.0), 20.0 * log10((vlr >> 16) / 4096.0), - mCblk->server, + mCblk->mServer, (int)mMainBuffer, (int)mAuxBuffer, mCblk->flags, @@ -1739,7 +1739,7 @@ void AudioFlinger::RecordThread::RecordTrack::dump(char* buffer, size_t size) mChannelMask, mSessionId, mState, - mCblk->server, + mCblk->mServer, mFrameCount); } -- cgit v1.1 From 96f60d8f04432a1ed503b3e24d5736d28c63c9a2 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Fri, 12 Jul 2013 10:21:18 -0700 Subject: Rename control block flags to mFlags Change-Id: I7b6d31e24531954ab1ecdf3ed56c19433700bd89 --- include/private/media/AudioTrackShared.h | 3 ++- media/libmedia/AudioRecord.cpp | 4 ++-- media/libmedia/AudioTrack.cpp | 18 +++++++++--------- media/libmedia/AudioTrackShared.cpp | 18 +++++++++--------- services/audioflinger/Threads.cpp | 6 +++--- services/audioflinger/Tracks.cpp | 16 ++++++++-------- 6 files changed, 33 insertions(+), 32 deletions(-) diff --git a/include/private/media/AudioTrackShared.h b/include/private/media/AudioTrackShared.h index e950b9e..c5d8145 100644 --- a/include/private/media/AudioTrackShared.h +++ b/include/private/media/AudioTrackShared.h @@ -31,6 +31,7 @@ namespace android { // ---------------------------------------------------------------------------- +// for audio_track_cblk_t::mFlags #define CBLK_UNDERRUN 0x01 // set by server immediately on output underrun, cleared by client #define CBLK_FORCEREADY 0x02 // set: track is considered ready immediately by AudioFlinger, // clear: track is ready when buffer full @@ -127,7 +128,7 @@ public: // read-only for client, server writes once at initialization and is then read-only uint8_t mName; // normal tracks: track name, fast tracks: track index - volatile int32_t flags; + volatile int32_t mFlags; // combinations of CBLK_* // Cache line boundary (32 bytes) diff --git a/media/libmedia/AudioRecord.cpp b/media/libmedia/AudioRecord.cpp index 603c16e..0e7e17f 100644 --- a/media/libmedia/AudioRecord.cpp +++ b/media/libmedia/AudioRecord.cpp @@ -300,7 +300,7 @@ status_t AudioRecord::start(AudioSystem::sync_event_t event, int triggerSession) mProxy->setEpoch(mProxy->getEpoch() - mProxy->getPosition()); mNewPosition = mProxy->getPosition() + mUpdatePeriod; - int32_t flags = android_atomic_acquire_load(&mCblk->flags); + int32_t flags = android_atomic_acquire_load(&mCblk->mFlags); status_t status = NO_ERROR; if (!(flags & CBLK_INVALID)) { @@ -667,7 +667,7 @@ nsecs_t AudioRecord::processAudioBuffer(const sp& thread) mLock.lock(); // Can only reference mCblk while locked - int32_t flags = android_atomic_and(~CBLK_OVERRUN, &mCblk->flags); + int32_t flags = android_atomic_and(~CBLK_OVERRUN, &mCblk->mFlags); // Check for track invalidation if (flags & CBLK_INVALID) { diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp index 3653b7f..64a59be 100644 --- a/media/libmedia/AudioTrack.cpp +++ b/media/libmedia/AudioTrack.cpp @@ -389,7 +389,7 @@ status_t AudioTrack::start() mProxy->setEpoch(mProxy->getEpoch() - mProxy->getPosition()); } mNewPosition = mProxy->getPosition() + mUpdatePeriod; - int32_t flags = android_atomic_and(~CBLK_DISABLED, &mCblk->flags); + int32_t flags = android_atomic_and(~CBLK_DISABLED, &mCblk->mFlags); sp t = mAudioTrackThread; if (t != 0) { @@ -1182,7 +1182,7 @@ void AudioTrack::releaseBuffer(Buffer* audioBuffer) // restart track if it was disabled by audioflinger due to previous underrun if (mState == STATE_ACTIVE) { audio_track_cblk_t* cblk = mCblk; - if (android_atomic_and(~CBLK_DISABLED, &cblk->flags) & CBLK_DISABLED) { + if (android_atomic_and(~CBLK_DISABLED, &cblk->mFlags) & CBLK_DISABLED) { ALOGW("releaseBuffer() track %p name=%#x disabled due to previous underrun, restarting", this, cblk->mName); // FIXME ignoring status @@ -1261,16 +1261,16 @@ status_t TimedAudioTrack::allocateTimedBuffer(size_t size, sp* buffer) // fails indicating that the server is dead, flag the track as invalid so // we can attempt to restore in just a bit. audio_track_cblk_t* cblk = mCblk; - if (!(cblk->flags & CBLK_INVALID)) { + if (!(cblk->mFlags & CBLK_INVALID)) { result = mAudioTrack->allocateTimedBuffer(size, buffer); if (result == DEAD_OBJECT) { - android_atomic_or(CBLK_INVALID, &cblk->flags); + android_atomic_or(CBLK_INVALID, &cblk->mFlags); } } // If the track is invalid at this point, attempt to restore it. and try the // allocation one more time. - if (cblk->flags & CBLK_INVALID) { + if (cblk->mFlags & CBLK_INVALID) { result = restoreTrack_l("allocateTimedBuffer"); if (result == NO_ERROR) { @@ -1290,8 +1290,8 @@ status_t TimedAudioTrack::queueTimedBuffer(const sp& buffer, audio_track_cblk_t* cblk = mCblk; // restart track if it was disabled by audioflinger due to previous underrun if (buffer->size() != 0 && status == NO_ERROR && - (mState == STATE_ACTIVE) && (cblk->flags & CBLK_DISABLED)) { - android_atomic_and(~CBLK_DISABLED, &cblk->flags); + (mState == STATE_ACTIVE) && (cblk->mFlags & CBLK_DISABLED)) { + android_atomic_and(~CBLK_DISABLED, &cblk->mFlags); ALOGW("queueTimedBuffer() track %p disabled, restarting", this); // FIXME ignoring status mAudioTrack->start(); @@ -1339,7 +1339,7 @@ nsecs_t AudioTrack::processAudioBuffer(const sp& thread) // Can only reference mCblk while locked int32_t flags = android_atomic_and( - ~(CBLK_UNDERRUN | CBLK_LOOP_CYCLE | CBLK_LOOP_FINAL | CBLK_BUFFER_END), &mCblk->flags); + ~(CBLK_UNDERRUN | CBLK_LOOP_CYCLE | CBLK_LOOP_FINAL | CBLK_BUFFER_END), &mCblk->mFlags); // Check for track invalidation if (flags & CBLK_INVALID) { @@ -1681,7 +1681,7 @@ status_t AudioTrack::restoreTrack_l(const char *from) // the actual amount of audio frames played (e.g SoundPool) receives them. if (mSharedBuffer == 0) { // restart playback even if buffer is not completely filled. - android_atomic_or(CBLK_FORCEREADY, &mCblk->flags); + android_atomic_or(CBLK_FORCEREADY, &mCblk->mFlags); } } #endif diff --git a/media/libmedia/AudioTrackShared.cpp b/media/libmedia/AudioTrackShared.cpp index e5f7fcd..5015b8d 100644 --- a/media/libmedia/AudioTrackShared.cpp +++ b/media/libmedia/AudioTrackShared.cpp @@ -27,7 +27,7 @@ namespace android { audio_track_cblk_t::audio_track_cblk_t() : mServer(0), frameCount_(0), mFutex(0), mMinimum(0), - mVolumeLR(0x10001000), mSampleRate(0), mSendLevel(0), mName(0), flags(0) + mVolumeLR(0x10001000), mSampleRate(0), mSendLevel(0), mName(0), mFlags(0) { memset(&u, 0, sizeof(u)); } @@ -99,7 +99,7 @@ status_t ClientProxy::obtainBuffer(Buffer* buffer, const struct timespec *reques goto end; } for (;;) { - int32_t flags = android_atomic_and(~CBLK_INTERRUPT, &cblk->flags); + int32_t flags = android_atomic_and(~CBLK_INTERRUPT, &cblk->mFlags); // check for track invalidation by server, or server death detection if (flags & CBLK_INVALID) { ALOGV("Track invalidated"); @@ -293,7 +293,7 @@ void ClientProxy::releaseBuffer(Buffer* buffer) void ClientProxy::binderDied() { audio_track_cblk_t* cblk = mCblk; - if (!(android_atomic_or(CBLK_INVALID, &cblk->flags) & CBLK_INVALID)) { + if (!(android_atomic_or(CBLK_INVALID, &cblk->mFlags) & CBLK_INVALID)) { // it seems that a FUTEX_WAKE_PRIVATE will not wake a FUTEX_WAIT, even within same process (void) __futex_syscall3(&cblk->mFutex, mClientInServer ? FUTEX_WAKE_PRIVATE : FUTEX_WAKE, 1); @@ -303,7 +303,7 @@ void ClientProxy::binderDied() void ClientProxy::interrupt() { audio_track_cblk_t* cblk = mCblk; - if (!(android_atomic_or(CBLK_INTERRUPT, &cblk->flags) & CBLK_INTERRUPT)) { + if (!(android_atomic_or(CBLK_INTERRUPT, &cblk->mFlags) & CBLK_INTERRUPT)) { (void) __futex_syscall3(&cblk->mFutex, mClientInServer ? FUTEX_WAKE_PRIVATE : FUTEX_WAKE, 1); } @@ -324,11 +324,11 @@ void AudioTrackClientProxy::flush() } bool AudioTrackClientProxy::clearStreamEndDone() { - return (android_atomic_and(~CBLK_STREAM_END_DONE, &mCblk->flags) & CBLK_STREAM_END_DONE) != 0; + return (android_atomic_and(~CBLK_STREAM_END_DONE, &mCblk->mFlags) & CBLK_STREAM_END_DONE) != 0; } bool AudioTrackClientProxy::getStreamEndDone() const { - return (mCblk->flags & CBLK_STREAM_END_DONE) != 0; + return (mCblk->mFlags & CBLK_STREAM_END_DONE) != 0; } status_t AudioTrackClientProxy::waitStreamEndDone(const struct timespec *requested) @@ -354,7 +354,7 @@ status_t AudioTrackClientProxy::waitStreamEndDone(const struct timespec *request timeout = TIMEOUT_FINITE; } for (;;) { - int32_t flags = android_atomic_and(~(CBLK_INTERRUPT|CBLK_STREAM_END_DONE), &cblk->flags); + int32_t flags = android_atomic_and(~(CBLK_INTERRUPT|CBLK_STREAM_END_DONE), &cblk->mFlags); // check for track invalidation by server, or server death detection if (flags & CBLK_INVALID) { ALOGV("Track invalidated"); @@ -653,7 +653,7 @@ size_t AudioTrackServerProxy::framesReady() bool AudioTrackServerProxy::setStreamEndDone() { bool old = - (android_atomic_or(CBLK_STREAM_END_DONE, &mCblk->flags) & CBLK_STREAM_END_DONE) != 0; + (android_atomic_or(CBLK_STREAM_END_DONE, &mCblk->mFlags) & CBLK_STREAM_END_DONE) != 0; if (!old) { (void) __futex_syscall3(&mCblk->mFutex, mClientInServer ? FUTEX_WAKE_PRIVATE : FUTEX_WAKE, 1); @@ -808,7 +808,7 @@ void StaticAudioTrackServerProxy::releaseBuffer(Buffer* buffer) cblk->mServer += stepCount; cblk->u.mStatic.mBufferPosition = newPosition; if (setFlags != 0) { - (void) android_atomic_or(setFlags, &cblk->flags); + (void) android_atomic_or(setFlags, &cblk->mFlags); // this would be a good place to wake a futex } diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index 5ba64d5..e6a9d65c 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -2783,7 +2783,7 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::MixerThread::prepareTrac } // indicate to client process that the track was disabled because of underrun; // it will then automatically call start() when data is available - android_atomic_or(CBLK_DISABLED, &track->mCblk->flags); + android_atomic_or(CBLK_DISABLED, &track->mCblk->mFlags); // remove from active list, but state remains ACTIVE [confusing but true] isActive = false; break; @@ -3061,7 +3061,7 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::MixerThread::prepareTrac // we missed desiredFrames whatever the actual number of frames missing was cblk->u.mStreaming.mUnderrunFrames += desiredFrames; // FIXME also wake futex so that underrun is noticed more quickly - (void) android_atomic_or(CBLK_UNDERRUN, &cblk->flags); + (void) android_atomic_or(CBLK_UNDERRUN, &cblk->mFlags); } // clear effect chain input buffer if an active track underruns to avoid sending // previous audio buffer again to effects @@ -3094,7 +3094,7 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::MixerThread::prepareTrac tracksToRemove->add(track); // indicate to client process that the track was disabled because of underrun; // it will then automatically call start() when data is available - android_atomic_or(CBLK_DISABLED, &cblk->flags); + android_atomic_or(CBLK_DISABLED, &cblk->mFlags); // If one track is not ready, mark the mixer also not ready if: // - the mixer was ready during previous round OR // - no other track is ready diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp index 4f2e372..58af204 100644 --- a/services/audioflinger/Tracks.cpp +++ b/services/audioflinger/Tracks.cpp @@ -128,7 +128,7 @@ AudioFlinger::ThreadBase::TrackBase::TrackBase( } else { mBuffer = sharedBuffer->pointer(); #if 0 - mCblk->flags = CBLK_FORCEREADY; // FIXME hack, need to fix the track ready logic + mCblk->mFlags = CBLK_FORCEREADY; // FIXME hack, need to fix the track ready logic #endif } @@ -471,7 +471,7 @@ void AudioFlinger::PlaybackThread::Track::dump(char* buffer, size_t size) mCblk->mServer, (int)mMainBuffer, (int)mAuxBuffer, - mCblk->flags, + mCblk->mFlags, mUnderrunCount, nowInUnderrun); } @@ -494,7 +494,7 @@ status_t AudioFlinger::PlaybackThread::Track::getNextBuffer( // only implemented so far for normal tracks, not fast tracks mCblk->u.mStreaming.mUnderrunFrames += desiredFrames; // FIXME also wake futex so that underrun is noticed more quickly - (void) android_atomic_or(CBLK_UNDERRUN, &mCblk->flags); + (void) android_atomic_or(CBLK_UNDERRUN, &mCblk->mFlags); } return status; } @@ -518,9 +518,9 @@ bool AudioFlinger::PlaybackThread::Track::isReady() const { } if (framesReady() >= mFrameCount || - (mCblk->flags & CBLK_FORCEREADY)) { + (mCblk->mFlags & CBLK_FORCEREADY)) { mFillingUpStatus = FS_FILLED; - android_atomic_and(~CBLK_FORCEREADY, &mCblk->flags); + android_atomic_and(~CBLK_FORCEREADY, &mCblk->mFlags); return true; } return false; @@ -694,7 +694,7 @@ void AudioFlinger::PlaybackThread::Track::reset() if (!mResetDone) { // Force underrun condition to avoid false underrun callback until first data is // written to buffer - android_atomic_and(~CBLK_FORCEREADY, &mCblk->flags); + android_atomic_and(~CBLK_FORCEREADY, &mCblk->mFlags); mFillingUpStatus = FS_FILLING; mResetDone = true; if (mState == FLUSHED) { @@ -856,7 +856,7 @@ void AudioFlinger::PlaybackThread::Track::invalidate() { // FIXME should use proxy, and needs work audio_track_cblk_t* cblk = mCblk; - android_atomic_or(CBLK_INVALID, &cblk->flags); + android_atomic_or(CBLK_INVALID, &cblk->mFlags); android_atomic_release_store(0x40000000, &cblk->mFutex); // client is not in server, so FUTEX_WAKE is needed instead of FUTEX_WAKE_PRIVATE (void) __futex_syscall3(&cblk->mFutex, FUTEX_WAKE, INT_MAX); @@ -1679,7 +1679,7 @@ status_t AudioFlinger::RecordThread::RecordTrack::getNextBuffer(AudioBufferProvi buffer->raw = buf.mRaw; if (buf.mFrameCount == 0) { // FIXME also wake futex so that overrun is noticed more quickly - (void) android_atomic_or(CBLK_OVERRUN, &mCblk->flags); + (void) android_atomic_or(CBLK_OVERRUN, &mCblk->mFlags); } return status; } -- cgit v1.1 From d054c32443a493513ab63529b0c8b1aca290278c Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Fri, 12 Jul 2013 12:59:20 -0700 Subject: Move control block mName to createTrack() output This is part of a series of CLs to clean up the shared memory control block, by removing any fields that don't have to be there. Change-Id: I6e51003a1293b6800258c31b22cff2eba42162e7 --- include/media/AudioTrack.h | 1 + include/media/IAudioFlinger.h | 4 ++++ include/private/media/AudioTrackShared.h | 4 +--- media/libmedia/AudioTrack.cpp | 5 +++-- media/libmedia/AudioTrackShared.cpp | 2 +- media/libmedia/IAudioFlinger.cpp | 6 +++++- services/audioflinger/AudioFlinger.cpp | 4 ++++ services/audioflinger/AudioFlinger.h | 1 + services/audioflinger/PlaybackTracks.h | 1 + services/audioflinger/Tracks.cpp | 2 -- 10 files changed, 21 insertions(+), 9 deletions(-) diff --git a/include/media/AudioTrack.h b/include/media/AudioTrack.h index da13a7f..523bd32 100644 --- a/include/media/AudioTrack.h +++ b/include/media/AudioTrack.h @@ -726,6 +726,7 @@ protected: sp mProxy; // primary owner of the memory bool mInUnderrun; // whether track is currently in underrun state + String8 mName; // server's name for this IAudioTrack private: class DeathNotifier : public IBinder::DeathRecipient { diff --git a/include/media/IAudioFlinger.h b/include/media/IAudioFlinger.h index de45aa8..82aae62 100644 --- a/include/media/IAudioFlinger.h +++ b/include/media/IAudioFlinger.h @@ -67,6 +67,10 @@ public: audio_io_handle_t output, pid_t tid, // -1 means unused, otherwise must be valid non-0 int *sessionId, + // input: ignored + // output: server's description of IAudioTrack for display in logs. + // Don't attempt to parse, as the format could change. + String8& name, status_t *status) = 0; virtual sp openRecord( diff --git a/include/private/media/AudioTrackShared.h b/include/private/media/AudioTrackShared.h index c5d8145..6d778dd 100644 --- a/include/private/media/AudioTrackShared.h +++ b/include/private/media/AudioTrackShared.h @@ -122,11 +122,9 @@ private: // client write-only, server read-only uint16_t mSendLevel; // Fixed point U4.12 so 0x1000 means 1.0 - uint8_t mPad2; // unused + uint16_t mPad2; // unused public: - // read-only for client, server writes once at initialization and is then read-only - uint8_t mName; // normal tracks: track name, fast tracks: track index volatile int32_t mFlags; // combinations of CBLK_* diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp index 64a59be..dd0ec73 100644 --- a/media/libmedia/AudioTrack.cpp +++ b/media/libmedia/AudioTrack.cpp @@ -953,6 +953,7 @@ status_t AudioTrack::createTrack_l( output, tid, &mSessionId, + mName, &status); if (track == 0) { @@ -1183,8 +1184,8 @@ void AudioTrack::releaseBuffer(Buffer* audioBuffer) if (mState == STATE_ACTIVE) { audio_track_cblk_t* cblk = mCblk; if (android_atomic_and(~CBLK_DISABLED, &cblk->mFlags) & CBLK_DISABLED) { - ALOGW("releaseBuffer() track %p name=%#x disabled due to previous underrun, restarting", - this, cblk->mName); + ALOGW("releaseBuffer() track %p name=%s disabled due to previous underrun, restarting", + this, mName.string()); // FIXME ignoring status mAudioTrack->start(); } diff --git a/media/libmedia/AudioTrackShared.cpp b/media/libmedia/AudioTrackShared.cpp index 5015b8d..3b7616f 100644 --- a/media/libmedia/AudioTrackShared.cpp +++ b/media/libmedia/AudioTrackShared.cpp @@ -27,7 +27,7 @@ namespace android { audio_track_cblk_t::audio_track_cblk_t() : mServer(0), frameCount_(0), mFutex(0), mMinimum(0), - mVolumeLR(0x10001000), mSampleRate(0), mSendLevel(0), mName(0), mFlags(0) + mVolumeLR(0x10001000), mSampleRate(0), mSendLevel(0), mFlags(0) { memset(&u, 0, sizeof(u)); } diff --git a/media/libmedia/IAudioFlinger.cpp b/media/libmedia/IAudioFlinger.cpp index c670936..c6e43e7 100644 --- a/media/libmedia/IAudioFlinger.cpp +++ b/media/libmedia/IAudioFlinger.cpp @@ -95,6 +95,7 @@ public: audio_io_handle_t output, pid_t tid, int *sessionId, + String8& name, status_t *status) { Parcel data, reply; @@ -127,6 +128,7 @@ public: if (sessionId != NULL) { *sessionId = lSessionId; } + name = reply.readString8(); lStatus = reply.readInt32(); track = interface_cast(reply.readStrongBinder()); } @@ -735,12 +737,14 @@ status_t BnAudioFlinger::onTransact( audio_io_handle_t output = (audio_io_handle_t) data.readInt32(); pid_t tid = (pid_t) data.readInt32(); int sessionId = data.readInt32(); + String8 name; status_t status; sp track = createTrack( (audio_stream_type_t) streamType, sampleRate, format, - channelMask, frameCount, &flags, buffer, output, tid, &sessionId, &status); + channelMask, frameCount, &flags, buffer, output, tid, &sessionId, name, &status); reply->writeInt32(flags); reply->writeInt32(sessionId); + reply->writeString8(name); reply->writeInt32(status); reply->writeStrongBinder(track->asBinder()); return NO_ERROR; diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp index d510641..1ae51ca 100644 --- a/services/audioflinger/AudioFlinger.cpp +++ b/services/audioflinger/AudioFlinger.cpp @@ -436,6 +436,7 @@ sp AudioFlinger::createTrack( audio_io_handle_t output, pid_t tid, int *sessionId, + String8& name, status_t *status) { sp track; @@ -524,6 +525,9 @@ sp AudioFlinger::createTrack( } } if (lStatus == NO_ERROR) { + // s for server's pid, n for normal mixer name, f for fast index + name = String8::format("s:%d;n:%d;f:%d", getpid_cached, track->name() - AudioMixer::TRACK0, + track->fastIndex()); trackHandle = new TrackHandle(track); } else { // remove local strong reference to Client before deleting the Track so that the Client diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h index eee5da5..d99b779 100644 --- a/services/audioflinger/AudioFlinger.h +++ b/services/audioflinger/AudioFlinger.h @@ -108,6 +108,7 @@ public: audio_io_handle_t output, pid_t tid, int *sessionId, + String8& name, status_t *status); virtual sp openRecord( diff --git a/services/audioflinger/PlaybackTracks.h b/services/audioflinger/PlaybackTracks.h index 8b7433c..628f5af 100644 --- a/services/audioflinger/PlaybackTracks.h +++ b/services/audioflinger/PlaybackTracks.h @@ -106,6 +106,7 @@ public: bool isInvalid() const { return mIsInvalid; } virtual bool isTimedTrack() const { return false; } bool isFastTrack() const { return (mFlags & IAudioFlinger::TRACK_FAST) != 0; } + int fastIndex() const { return mFastIndex; } protected: diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp index 58af204..1f75468 100644 --- a/services/audioflinger/Tracks.cpp +++ b/services/audioflinger/Tracks.cpp @@ -333,7 +333,6 @@ AudioFlinger::PlaybackThread::Track::Track( mServerProxy = mAudioTrackServerProxy; // to avoid leaking a track name, do not allocate one unless there is an mCblk mName = thread->getTrackName_l(channelMask, sessionId); - mCblk->mName = mName; if (mName < 0) { ALOGE("no more track names available"); return; @@ -349,7 +348,6 @@ AudioFlinger::PlaybackThread::Track::Track( // this means we are potentially denying other more important fast tracks from // being created. It would be better to allocate the index dynamically. mFastIndex = i; - mCblk->mName = i; // Read the initial underruns because this field is never cleared by the fast mixer mObservedUnderruns = thread->getFastTrackUnderruns(i); thread->mFastTrackAvailMask &= ~(1 << i); -- cgit v1.1 From 7b82efe7a376c882f8f938e1c41b8311a8cdda4a Mon Sep 17 00:00:00 2001 From: Eino-Ville Talvala Date: Thu, 25 Jul 2013 17:12:35 -0700 Subject: Camera: Rename new API to camera2, rearrange camera service - Support API rename from photography to camera2 - Reorganize camera service files - API support files to api1/, api2/, api_pro/ - HAL device support files into device{1,2,3}/ - Common files into common/ - Camera service remains at top-level Change-Id: Ie474c12536f543832fba0a2dc936ac4fd39fe6a9 --- camera/Android.mk | 6 +- camera/ICameraService.cpp | 6 +- camera/camera2/CaptureRequest.cpp | 124 + camera/camera2/ICameraDeviceCallbacks.cpp | 109 + camera/camera2/ICameraDeviceUser.cpp | 322 +++ camera/photography/CaptureRequest.cpp | 124 - camera/photography/ICameraDeviceCallbacks.cpp | 110 - camera/photography/ICameraDeviceUser.cpp | 322 --- include/camera/camera2/CaptureRequest.h | 42 + include/camera/camera2/ICameraDeviceCallbacks.h | 61 + include/camera/camera2/ICameraDeviceUser.h | 81 + include/camera/photography/CaptureRequest.h | 42 - .../camera/photography/ICameraDeviceCallbacks.h | 61 - include/camera/photography/ICameraDeviceUser.h | 81 - services/camera/libcameraservice/Android.mk | 48 +- services/camera/libcameraservice/Camera2Client.cpp | 1777 ------------- services/camera/libcameraservice/Camera2Client.h | 201 -- .../camera/libcameraservice/Camera2ClientBase.cpp | 335 --- .../camera/libcameraservice/Camera2ClientBase.h | 132 - services/camera/libcameraservice/Camera2Device.cpp | 1515 ----------- services/camera/libcameraservice/Camera2Device.h | 345 --- services/camera/libcameraservice/Camera3Device.cpp | 1972 --------------- services/camera/libcameraservice/Camera3Device.h | 413 --- services/camera/libcameraservice/CameraClient.cpp | 971 ------- services/camera/libcameraservice/CameraClient.h | 165 -- .../camera/libcameraservice/CameraDeviceBase.cpp | 30 - .../camera/libcameraservice/CameraDeviceBase.h | 216 -- .../libcameraservice/CameraDeviceFactory.cpp | 7 +- .../camera/libcameraservice/CameraDeviceFactory.h | 1 + .../libcameraservice/CameraHardwareInterface.h | 691 ----- services/camera/libcameraservice/CameraService.cpp | 8 +- services/camera/libcameraservice/CameraService.h | 4 +- .../camera/libcameraservice/ProCamera2Client.cpp | 446 ---- .../camera/libcameraservice/ProCamera2Client.h | 123 - .../camera/libcameraservice/api1/Camera2Client.cpp | 1779 +++++++++++++ .../camera/libcameraservice/api1/Camera2Client.h | 211 ++ .../camera/libcameraservice/api1/CameraClient.cpp | 972 +++++++ .../camera/libcameraservice/api1/CameraClient.h | 165 ++ .../libcameraservice/api1/client2/BurstCapture.cpp | 113 + .../libcameraservice/api1/client2/BurstCapture.h | 72 + .../api1/client2/CallbackProcessor.cpp | 539 ++++ .../api1/client2/CallbackProcessor.h | 99 + .../libcameraservice/api1/client2/Camera2Heap.h | 55 + .../api1/client2/CaptureSequencer.cpp | 710 ++++++ .../api1/client2/CaptureSequencer.h | 177 ++ .../api1/client2/FrameProcessor.cpp | 315 +++ .../libcameraservice/api1/client2/FrameProcessor.h | 85 + .../api1/client2/JpegCompressor.cpp | 221 ++ .../libcameraservice/api1/client2/JpegCompressor.h | 107 + .../api1/client2/JpegProcessor.cpp | 388 +++ .../libcameraservice/api1/client2/JpegProcessor.h | 87 + .../libcameraservice/api1/client2/Parameters.cpp | 2645 ++++++++++++++++++++ .../libcameraservice/api1/client2/Parameters.h | 372 +++ .../api1/client2/StreamingProcessor.cpp | 880 +++++++ .../api1/client2/StreamingProcessor.h | 143 ++ .../libcameraservice/api1/client2/ZslProcessor.cpp | 556 ++++ .../libcameraservice/api1/client2/ZslProcessor.h | 135 + .../api1/client2/ZslProcessor3.cpp | 482 ++++ .../libcameraservice/api1/client2/ZslProcessor3.h | 136 + .../api1/client2/ZslProcessorInterface.h | 59 + .../libcameraservice/api2/CameraDeviceClient.cpp | 551 ++++ .../libcameraservice/api2/CameraDeviceClient.h | 141 ++ .../libcameraservice/api_pro/ProCamera2Client.cpp | 446 ++++ .../libcameraservice/api_pro/ProCamera2Client.h | 123 + .../libcameraservice/camera2/BurstCapture.cpp | 113 - .../camera/libcameraservice/camera2/BurstCapture.h | 71 - .../libcameraservice/camera2/CallbackProcessor.cpp | 539 ---- .../libcameraservice/camera2/CallbackProcessor.h | 98 - .../camera/libcameraservice/camera2/Camera2Heap.h | 55 - .../libcameraservice/camera2/CaptureSequencer.cpp | 711 ------ .../libcameraservice/camera2/CaptureSequencer.h | 177 -- .../libcameraservice/camera2/FrameProcessor.cpp | 315 --- .../libcameraservice/camera2/FrameProcessor.h | 85 - .../libcameraservice/camera2/JpegCompressor.cpp | 221 -- .../libcameraservice/camera2/JpegCompressor.h | 107 - .../libcameraservice/camera2/JpegProcessor.cpp | 387 --- .../libcameraservice/camera2/JpegProcessor.h | 86 - .../camera/libcameraservice/camera2/Parameters.cpp | 2645 -------------------- .../camera/libcameraservice/camera2/Parameters.h | 372 --- .../libcameraservice/camera2/ProFrameProcessor.cpp | 176 -- .../libcameraservice/camera2/ProFrameProcessor.h | 84 - .../camera2/StreamingProcessor.cpp | 880 ------- .../libcameraservice/camera2/StreamingProcessor.h | 143 -- .../libcameraservice/camera2/ZslProcessor.cpp | 556 ---- .../camera/libcameraservice/camera2/ZslProcessor.h | 135 - .../libcameraservice/camera2/ZslProcessor3.cpp | 482 ---- .../libcameraservice/camera2/ZslProcessor3.h | 137 - .../camera2/ZslProcessorInterface.h | 59 - .../camera3/Camera3IOStreamBase.cpp | 275 -- .../libcameraservice/camera3/Camera3IOStreamBase.h | 102 - .../camera3/Camera3InputStream.cpp | 239 -- .../libcameraservice/camera3/Camera3InputStream.h | 88 - .../camera3/Camera3OutputStream.cpp | 369 --- .../libcameraservice/camera3/Camera3OutputStream.h | 101 - .../camera3/Camera3OutputStreamInterface.h | 43 - .../libcameraservice/camera3/Camera3Stream.cpp | 383 --- .../libcameraservice/camera3/Camera3Stream.h | 283 --- .../camera3/Camera3StreamBufferListener.h | 48 - .../camera3/Camera3StreamInterface.h | 162 -- .../libcameraservice/camera3/Camera3ZslStream.cpp | 328 --- .../libcameraservice/camera3/Camera3ZslStream.h | 105 - .../libcameraservice/common/Camera2ClientBase.cpp | 333 +++ .../libcameraservice/common/Camera2ClientBase.h | 133 + .../libcameraservice/common/CameraDeviceBase.cpp | 30 + .../libcameraservice/common/CameraDeviceBase.h | 216 ++ .../libcameraservice/common/FrameProcessorBase.cpp | 176 ++ .../libcameraservice/common/FrameProcessorBase.h | 84 + .../device1/CameraHardwareInterface.h | 691 +++++ .../libcameraservice/device2/Camera2Device.cpp | 1515 +++++++++++ .../libcameraservice/device2/Camera2Device.h | 345 +++ .../libcameraservice/device3/Camera3Device.cpp | 1974 +++++++++++++++ .../libcameraservice/device3/Camera3Device.h | 419 ++++ .../device3/Camera3IOStreamBase.cpp | 275 ++ .../libcameraservice/device3/Camera3IOStreamBase.h | 102 + .../device3/Camera3InputStream.cpp | 239 ++ .../libcameraservice/device3/Camera3InputStream.h | 88 + .../device3/Camera3OutputStream.cpp | 369 +++ .../libcameraservice/device3/Camera3OutputStream.h | 101 + .../device3/Camera3OutputStreamInterface.h | 43 + .../libcameraservice/device3/Camera3Stream.cpp | 383 +++ .../libcameraservice/device3/Camera3Stream.h | 283 +++ .../device3/Camera3StreamBufferListener.h | 48 + .../device3/Camera3StreamInterface.h | 162 ++ .../libcameraservice/device3/Camera3ZslStream.cpp | 328 +++ .../libcameraservice/device3/Camera3ZslStream.h | 105 + .../photography/CameraDeviceClient.cpp | 551 ---- .../photography/CameraDeviceClient.h | 141 -- 127 files changed, 21985 insertions(+), 21964 deletions(-) create mode 100644 camera/camera2/CaptureRequest.cpp create mode 100644 camera/camera2/ICameraDeviceCallbacks.cpp create mode 100644 camera/camera2/ICameraDeviceUser.cpp delete mode 100644 camera/photography/CaptureRequest.cpp delete mode 100644 camera/photography/ICameraDeviceCallbacks.cpp delete mode 100644 camera/photography/ICameraDeviceUser.cpp create mode 100644 include/camera/camera2/CaptureRequest.h create mode 100644 include/camera/camera2/ICameraDeviceCallbacks.h create mode 100644 include/camera/camera2/ICameraDeviceUser.h delete mode 100644 include/camera/photography/CaptureRequest.h delete mode 100644 include/camera/photography/ICameraDeviceCallbacks.h delete mode 100644 include/camera/photography/ICameraDeviceUser.h delete mode 100644 services/camera/libcameraservice/Camera2Client.cpp delete mode 100644 services/camera/libcameraservice/Camera2Client.h delete mode 100644 services/camera/libcameraservice/Camera2ClientBase.cpp delete mode 100644 services/camera/libcameraservice/Camera2ClientBase.h delete mode 100644 services/camera/libcameraservice/Camera2Device.cpp delete mode 100644 services/camera/libcameraservice/Camera2Device.h delete mode 100644 services/camera/libcameraservice/Camera3Device.cpp delete mode 100644 services/camera/libcameraservice/Camera3Device.h delete mode 100644 services/camera/libcameraservice/CameraClient.cpp delete mode 100644 services/camera/libcameraservice/CameraClient.h delete mode 100644 services/camera/libcameraservice/CameraDeviceBase.cpp delete mode 100644 services/camera/libcameraservice/CameraDeviceBase.h delete mode 100644 services/camera/libcameraservice/CameraHardwareInterface.h delete mode 100644 services/camera/libcameraservice/ProCamera2Client.cpp delete mode 100644 services/camera/libcameraservice/ProCamera2Client.h create mode 100644 services/camera/libcameraservice/api1/Camera2Client.cpp create mode 100644 services/camera/libcameraservice/api1/Camera2Client.h create mode 100644 services/camera/libcameraservice/api1/CameraClient.cpp create mode 100644 services/camera/libcameraservice/api1/CameraClient.h create mode 100644 services/camera/libcameraservice/api1/client2/BurstCapture.cpp create mode 100644 services/camera/libcameraservice/api1/client2/BurstCapture.h create mode 100644 services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp create mode 100644 services/camera/libcameraservice/api1/client2/CallbackProcessor.h create mode 100644 services/camera/libcameraservice/api1/client2/Camera2Heap.h create mode 100644 services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp create mode 100644 services/camera/libcameraservice/api1/client2/CaptureSequencer.h create mode 100644 services/camera/libcameraservice/api1/client2/FrameProcessor.cpp create mode 100644 services/camera/libcameraservice/api1/client2/FrameProcessor.h create mode 100644 services/camera/libcameraservice/api1/client2/JpegCompressor.cpp create mode 100644 services/camera/libcameraservice/api1/client2/JpegCompressor.h create mode 100644 services/camera/libcameraservice/api1/client2/JpegProcessor.cpp create mode 100644 services/camera/libcameraservice/api1/client2/JpegProcessor.h create mode 100644 services/camera/libcameraservice/api1/client2/Parameters.cpp create mode 100644 services/camera/libcameraservice/api1/client2/Parameters.h create mode 100644 services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp create mode 100644 services/camera/libcameraservice/api1/client2/StreamingProcessor.h create mode 100644 services/camera/libcameraservice/api1/client2/ZslProcessor.cpp create mode 100644 services/camera/libcameraservice/api1/client2/ZslProcessor.h create mode 100644 services/camera/libcameraservice/api1/client2/ZslProcessor3.cpp create mode 100644 services/camera/libcameraservice/api1/client2/ZslProcessor3.h create mode 100644 services/camera/libcameraservice/api1/client2/ZslProcessorInterface.h create mode 100644 services/camera/libcameraservice/api2/CameraDeviceClient.cpp create mode 100644 services/camera/libcameraservice/api2/CameraDeviceClient.h create mode 100644 services/camera/libcameraservice/api_pro/ProCamera2Client.cpp create mode 100644 services/camera/libcameraservice/api_pro/ProCamera2Client.h delete mode 100644 services/camera/libcameraservice/camera2/BurstCapture.cpp delete mode 100644 services/camera/libcameraservice/camera2/BurstCapture.h delete mode 100644 services/camera/libcameraservice/camera2/CallbackProcessor.cpp delete mode 100644 services/camera/libcameraservice/camera2/CallbackProcessor.h delete mode 100644 services/camera/libcameraservice/camera2/Camera2Heap.h delete mode 100644 services/camera/libcameraservice/camera2/CaptureSequencer.cpp delete mode 100644 services/camera/libcameraservice/camera2/CaptureSequencer.h delete mode 100644 services/camera/libcameraservice/camera2/FrameProcessor.cpp delete mode 100644 services/camera/libcameraservice/camera2/FrameProcessor.h delete mode 100644 services/camera/libcameraservice/camera2/JpegCompressor.cpp delete mode 100644 services/camera/libcameraservice/camera2/JpegCompressor.h delete mode 100644 services/camera/libcameraservice/camera2/JpegProcessor.cpp delete mode 100644 services/camera/libcameraservice/camera2/JpegProcessor.h delete mode 100644 services/camera/libcameraservice/camera2/Parameters.cpp delete mode 100644 services/camera/libcameraservice/camera2/Parameters.h delete mode 100644 services/camera/libcameraservice/camera2/ProFrameProcessor.cpp delete mode 100644 services/camera/libcameraservice/camera2/ProFrameProcessor.h delete mode 100644 services/camera/libcameraservice/camera2/StreamingProcessor.cpp delete mode 100644 services/camera/libcameraservice/camera2/StreamingProcessor.h delete mode 100644 services/camera/libcameraservice/camera2/ZslProcessor.cpp delete mode 100644 services/camera/libcameraservice/camera2/ZslProcessor.h delete mode 100644 services/camera/libcameraservice/camera2/ZslProcessor3.cpp delete mode 100644 services/camera/libcameraservice/camera2/ZslProcessor3.h delete mode 100644 services/camera/libcameraservice/camera2/ZslProcessorInterface.h delete mode 100644 services/camera/libcameraservice/camera3/Camera3IOStreamBase.cpp delete mode 100644 services/camera/libcameraservice/camera3/Camera3IOStreamBase.h delete mode 100644 services/camera/libcameraservice/camera3/Camera3InputStream.cpp delete mode 100644 services/camera/libcameraservice/camera3/Camera3InputStream.h delete mode 100644 services/camera/libcameraservice/camera3/Camera3OutputStream.cpp delete mode 100644 services/camera/libcameraservice/camera3/Camera3OutputStream.h delete mode 100644 services/camera/libcameraservice/camera3/Camera3OutputStreamInterface.h delete mode 100644 services/camera/libcameraservice/camera3/Camera3Stream.cpp delete mode 100644 services/camera/libcameraservice/camera3/Camera3Stream.h delete mode 100644 services/camera/libcameraservice/camera3/Camera3StreamBufferListener.h delete mode 100644 services/camera/libcameraservice/camera3/Camera3StreamInterface.h delete mode 100644 services/camera/libcameraservice/camera3/Camera3ZslStream.cpp delete mode 100644 services/camera/libcameraservice/camera3/Camera3ZslStream.h create mode 100644 services/camera/libcameraservice/common/Camera2ClientBase.cpp create mode 100644 services/camera/libcameraservice/common/Camera2ClientBase.h create mode 100644 services/camera/libcameraservice/common/CameraDeviceBase.cpp create mode 100644 services/camera/libcameraservice/common/CameraDeviceBase.h create mode 100644 services/camera/libcameraservice/common/FrameProcessorBase.cpp create mode 100644 services/camera/libcameraservice/common/FrameProcessorBase.h create mode 100644 services/camera/libcameraservice/device1/CameraHardwareInterface.h create mode 100644 services/camera/libcameraservice/device2/Camera2Device.cpp create mode 100644 services/camera/libcameraservice/device2/Camera2Device.h create mode 100644 services/camera/libcameraservice/device3/Camera3Device.cpp create mode 100644 services/camera/libcameraservice/device3/Camera3Device.h create mode 100644 services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp create mode 100644 services/camera/libcameraservice/device3/Camera3IOStreamBase.h create mode 100644 services/camera/libcameraservice/device3/Camera3InputStream.cpp create mode 100644 services/camera/libcameraservice/device3/Camera3InputStream.h create mode 100644 services/camera/libcameraservice/device3/Camera3OutputStream.cpp create mode 100644 services/camera/libcameraservice/device3/Camera3OutputStream.h create mode 100644 services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h create mode 100644 services/camera/libcameraservice/device3/Camera3Stream.cpp create mode 100644 services/camera/libcameraservice/device3/Camera3Stream.h create mode 100644 services/camera/libcameraservice/device3/Camera3StreamBufferListener.h create mode 100644 services/camera/libcameraservice/device3/Camera3StreamInterface.h create mode 100644 services/camera/libcameraservice/device3/Camera3ZslStream.cpp create mode 100644 services/camera/libcameraservice/device3/Camera3ZslStream.h delete mode 100644 services/camera/libcameraservice/photography/CameraDeviceClient.cpp delete mode 100644 services/camera/libcameraservice/photography/CameraDeviceClient.h diff --git a/camera/Android.mk b/camera/Android.mk index 8f58f87..e633450 100644 --- a/camera/Android.mk +++ b/camera/Android.mk @@ -16,9 +16,9 @@ LOCAL_SRC_FILES:= \ ICameraRecordingProxyListener.cpp \ IProCameraUser.cpp \ IProCameraCallbacks.cpp \ - photography/ICameraDeviceUser.cpp \ - photography/ICameraDeviceCallbacks.cpp \ - photography/CaptureRequest.cpp \ + camera2/ICameraDeviceUser.cpp \ + camera2/ICameraDeviceCallbacks.cpp \ + camera2/CaptureRequest.cpp \ ProCamera.cpp \ CameraBase.cpp \ diff --git a/camera/ICameraService.cpp b/camera/ICameraService.cpp index 068fb0f..876a2df 100644 --- a/camera/ICameraService.cpp +++ b/camera/ICameraService.cpp @@ -31,8 +31,8 @@ #include #include #include -#include -#include +#include +#include namespace android { @@ -151,7 +151,7 @@ public: return interface_cast(reply.readStrongBinder()); } - // connect to camera service (android.hardware.photography.CameraDevice) + // connect to camera service (android.hardware.camera2.CameraDevice) virtual sp connect( const sp& cameraCb, int cameraId, diff --git a/camera/camera2/CaptureRequest.cpp b/camera/camera2/CaptureRequest.cpp new file mode 100644 index 0000000..57e5319 --- /dev/null +++ b/camera/camera2/CaptureRequest.cpp @@ -0,0 +1,124 @@ +/* +** +** Copyright 2013, The Android Open Source Project +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +// #define LOG_NDEBUG 0 +#define LOG_TAG "CameraRequest" +#include + +#include + +#include +#include + +namespace android { + +status_t CaptureRequest::readFromParcel(Parcel* parcel) { + if (parcel == NULL) { + ALOGE("%s: Null parcel", __FUNCTION__); + return BAD_VALUE; + } + + mMetadata.clear(); + mSurfaceList.clear(); + + status_t err; + + if ((err = mMetadata.readFromParcel(parcel)) != OK) { + ALOGE("%s: Failed to read metadata from parcel", __FUNCTION__); + return err; + } + ALOGV("%s: Read metadata from parcel", __FUNCTION__); + + int32_t size; + if ((err = parcel->readInt32(&size)) != OK) { + ALOGE("%s: Failed to read surface list size from parcel", __FUNCTION__); + return err; + } + ALOGV("%s: Read surface list size = %d", __FUNCTION__, size); + + // Do not distinguish null arrays from 0-sized arrays. + for (int i = 0; i < size; ++i) { + // Parcel.writeParcelableArray + size_t len; + const char16_t* className = parcel->readString16Inplace(&len); + ALOGV("%s: Read surface class = %s", __FUNCTION__, + className != NULL ? String8(className).string() : ""); + + if (className == NULL) { + continue; + } + + // Surface.writeToParcel + String16 name = parcel->readString16(); + ALOGV("%s: Read surface name = %s", + __FUNCTION__, String8(name).string()); + sp binder(parcel->readStrongBinder()); + ALOGV("%s: Read surface binder = %p", + __FUNCTION__, binder.get()); + + sp surface; + + if (binder != NULL) { + sp gbp = + interface_cast(binder); + surface = new Surface(gbp); + } + + mSurfaceList.push_back(surface); + } + + return OK; +} + +status_t CaptureRequest::writeToParcel(Parcel* parcel) const { + if (parcel == NULL) { + ALOGE("%s: Null parcel", __FUNCTION__); + return BAD_VALUE; + } + + status_t err; + + if ((err = mMetadata.writeToParcel(parcel)) != OK) { + return err; + } + + int32_t size = static_cast(mSurfaceList.size()); + + // Send 0-sized arrays when it's empty. Do not send null arrays. + parcel->writeInt32(size); + + for (int32_t i = 0; i < size; ++i) { + sp surface = mSurfaceList[i]; + + sp binder; + if (surface != 0) { + binder = surface->getIGraphicBufferProducer()->asBinder(); + } + + // not sure if readParcelableArray does this, hard to tell from source + parcel->writeString16(String16("android.view.Surface")); + + // Surface.writeToParcel + parcel->writeString16(String16("unknown_name")); + // Surface.nativeWriteToParcel + parcel->writeStrongBinder(binder); + } + + return OK; +} + +}; // namespace android diff --git a/camera/camera2/ICameraDeviceCallbacks.cpp b/camera/camera2/ICameraDeviceCallbacks.cpp new file mode 100644 index 0000000..188bd8e --- /dev/null +++ b/camera/camera2/ICameraDeviceCallbacks.cpp @@ -0,0 +1,109 @@ +/* +** +** Copyright 2013, The Android Open Source Project +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "ICameraDeviceCallbacks" +#include +#include +#include + +#include +#include +#include +#include + +#include +#include "camera/CameraMetadata.h" + +namespace android { + +enum { + NOTIFY_CALLBACK = IBinder::FIRST_CALL_TRANSACTION, + RESULT_RECEIVED, +}; + +class BpCameraDeviceCallbacks: public BpInterface +{ +public: + BpCameraDeviceCallbacks(const sp& impl) + : BpInterface(impl) + { + } + + // generic callback from camera service to app + void notifyCallback(int32_t msgType, int32_t ext1, int32_t ext2) + { + ALOGV("notifyCallback"); + Parcel data, reply; + data.writeInterfaceToken(ICameraDeviceCallbacks::getInterfaceDescriptor()); + data.writeInt32(msgType); + data.writeInt32(ext1); + data.writeInt32(ext2); + remote()->transact(NOTIFY_CALLBACK, data, &reply, IBinder::FLAG_ONEWAY); + data.writeNoException(); + } + + void onResultReceived(int32_t frameId, const CameraMetadata& result) { + ALOGV("onResultReceived"); + Parcel data, reply; + data.writeInterfaceToken(ICameraDeviceCallbacks::getInterfaceDescriptor()); + data.writeInt32(frameId); + result.writeToParcel(&data); + remote()->transact(RESULT_RECEIVED, data, &reply, IBinder::FLAG_ONEWAY); + data.writeNoException(); + } +}; + +IMPLEMENT_META_INTERFACE(CameraDeviceCallbacks, + "android.hardware.camera2.ICameraDeviceCallbacks"); + +// ---------------------------------------------------------------------- + +status_t BnCameraDeviceCallbacks::onTransact( + uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) +{ + ALOGV("onTransact - code = %d", code); + switch(code) { + case NOTIFY_CALLBACK: { + ALOGV("NOTIFY_CALLBACK"); + CHECK_INTERFACE(ICameraDeviceCallbacks, data, reply); + int32_t msgType = data.readInt32(); + int32_t ext1 = data.readInt32(); + int32_t ext2 = data.readInt32(); + notifyCallback(msgType, ext1, ext2); + data.readExceptionCode(); + return NO_ERROR; + } break; + case RESULT_RECEIVED: { + ALOGV("RESULT_RECEIVED"); + CHECK_INTERFACE(ICameraDeviceCallbacks, data, reply); + int32_t frameId = data.readInt32(); + CameraMetadata result; + result.readFromParcel(const_cast(&data)); + onResultReceived(frameId, result); + data.readExceptionCode(); + return NO_ERROR; + break; + } + default: + return BBinder::onTransact(code, data, reply, flags); + } +} + +// ---------------------------------------------------------------------------- + +}; // namespace android diff --git a/camera/camera2/ICameraDeviceUser.cpp b/camera/camera2/ICameraDeviceUser.cpp new file mode 100644 index 0000000..923f487 --- /dev/null +++ b/camera/camera2/ICameraDeviceUser.cpp @@ -0,0 +1,322 @@ +/* +** +** Copyright 2013, The Android Open Source Project +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +// #define LOG_NDEBUG 0 +#define LOG_TAG "ICameraDeviceUser" +#include +#include +#include +#include +#include +#include +#include +#include +#include + +namespace android { + +typedef Parcel::WritableBlob WritableBlob; +typedef Parcel::ReadableBlob ReadableBlob; + +enum { + DISCONNECT = IBinder::FIRST_CALL_TRANSACTION, + SUBMIT_REQUEST, + CANCEL_REQUEST, + DELETE_STREAM, + CREATE_STREAM, + CREATE_DEFAULT_REQUEST, + GET_CAMERA_INFO, + WAIT_UNTIL_IDLE, +}; + +class BpCameraDeviceUser : public BpInterface +{ +public: + BpCameraDeviceUser(const sp& impl) + : BpInterface(impl) + { + } + + // disconnect from camera service + void disconnect() + { + ALOGV("disconnect"); + Parcel data, reply; + data.writeInterfaceToken(ICameraDeviceUser::getInterfaceDescriptor()); + remote()->transact(DISCONNECT, data, &reply); + reply.readExceptionCode(); + } + + virtual int submitRequest(sp request, bool streaming) + { + Parcel data, reply; + data.writeInterfaceToken(ICameraDeviceUser::getInterfaceDescriptor()); + + // arg0 = CaptureRequest + if (request != 0) { + data.writeInt32(1); + request->writeToParcel(&data); + } else { + data.writeInt32(0); + } + + // arg1 = streaming (bool) + data.writeInt32(streaming); + + remote()->transact(SUBMIT_REQUEST, data, &reply); + + reply.readExceptionCode(); + return reply.readInt32(); + } + + virtual status_t cancelRequest(int requestId) + { + Parcel data, reply; + data.writeInterfaceToken(ICameraDeviceUser::getInterfaceDescriptor()); + data.writeInt32(requestId); + + remote()->transact(CANCEL_REQUEST, data, &reply); + + reply.readExceptionCode(); + return reply.readInt32(); + } + + virtual status_t deleteStream(int streamId) + { + Parcel data, reply; + data.writeInterfaceToken(ICameraDeviceUser::getInterfaceDescriptor()); + data.writeInt32(streamId); + + remote()->transact(DELETE_STREAM, data, &reply); + + reply.readExceptionCode(); + return reply.readInt32(); + } + + virtual status_t createStream(int width, int height, int format, + const sp& bufferProducer) + { + Parcel data, reply; + data.writeInterfaceToken(ICameraDeviceUser::getInterfaceDescriptor()); + data.writeInt32(width); + data.writeInt32(height); + data.writeInt32(format); + + data.writeInt32(1); // marker that bufferProducer is not null + data.writeString16(String16("unknown_name")); // name of surface + sp b(bufferProducer->asBinder()); + data.writeStrongBinder(b); + + remote()->transact(CREATE_STREAM, data, &reply); + + reply.readExceptionCode(); + return reply.readInt32(); + } + + // Create a request object from a template. + virtual status_t createDefaultRequest(int templateId, + /*out*/ + CameraMetadata* request) + { + Parcel data, reply; + data.writeInterfaceToken(ICameraDeviceUser::getInterfaceDescriptor()); + data.writeInt32(templateId); + remote()->transact(CREATE_DEFAULT_REQUEST, data, &reply); + + reply.readExceptionCode(); + status_t result = reply.readInt32(); + + CameraMetadata out; + if (reply.readInt32() != 0) { + out.readFromParcel(&reply); + } + + if (request != NULL) { + request->swap(out); + } + return result; + } + + + virtual status_t getCameraInfo(CameraMetadata* info) + { + Parcel data, reply; + data.writeInterfaceToken(ICameraDeviceUser::getInterfaceDescriptor()); + remote()->transact(GET_CAMERA_INFO, data, &reply); + + reply.readExceptionCode(); + status_t result = reply.readInt32(); + + CameraMetadata out; + if (reply.readInt32() != 0) { + out.readFromParcel(&reply); + } + + if (info != NULL) { + info->swap(out); + } + + return result; + } + + virtual status_t waitUntilIdle() + { + ALOGV("waitUntilIdle"); + Parcel data, reply; + data.writeInterfaceToken(ICameraDeviceUser::getInterfaceDescriptor()); + remote()->transact(WAIT_UNTIL_IDLE, data, &reply); + reply.readExceptionCode(); + return reply.readInt32(); + } + +private: + + +}; + +IMPLEMENT_META_INTERFACE(CameraDeviceUser, + "android.hardware.camera2.ICameraDeviceUser"); + +// ---------------------------------------------------------------------- + +status_t BnCameraDeviceUser::onTransact( + uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) +{ + switch(code) { + case DISCONNECT: { + ALOGV("DISCONNECT"); + CHECK_INTERFACE(ICameraDeviceUser, data, reply); + disconnect(); + reply->writeNoException(); + return NO_ERROR; + } break; + case SUBMIT_REQUEST: { + CHECK_INTERFACE(ICameraDeviceUser, data, reply); + + // arg0 = request + sp request; + if (data.readInt32() != 0) { + request = new CaptureRequest(); + request->readFromParcel(const_cast(&data)); + } + + // arg1 = streaming (bool) + bool streaming = data.readInt32(); + + // return code: requestId (int32) + reply->writeNoException(); + reply->writeInt32(submitRequest(request, streaming)); + + return NO_ERROR; + } break; + case CANCEL_REQUEST: { + CHECK_INTERFACE(ICameraDeviceUser, data, reply); + int requestId = data.readInt32(); + reply->writeNoException(); + reply->writeInt32(cancelRequest(requestId)); + return NO_ERROR; + } break; + case DELETE_STREAM: { + CHECK_INTERFACE(ICameraDeviceUser, data, reply); + int streamId = data.readInt32(); + reply->writeNoException(); + reply->writeInt32(deleteStream(streamId)); + return NO_ERROR; + } break; + case CREATE_STREAM: { + CHECK_INTERFACE(ICameraDeviceUser, data, reply); + int width, height, format; + + width = data.readInt32(); + ALOGV("%s: CREATE_STREAM: width = %d", __FUNCTION__, width); + height = data.readInt32(); + ALOGV("%s: CREATE_STREAM: height = %d", __FUNCTION__, height); + format = data.readInt32(); + ALOGV("%s: CREATE_STREAM: format = %d", __FUNCTION__, format); + + sp bp; + if (data.readInt32() != 0) { + String16 name = data.readString16(); + bp = interface_cast( + data.readStrongBinder()); + + ALOGV("%s: CREATE_STREAM: bp = %p, name = %s", __FUNCTION__, + bp.get(), String8(name).string()); + } else { + ALOGV("%s: CREATE_STREAM: bp = unset, name = unset", + __FUNCTION__); + } + + status_t ret; + ret = createStream(width, height, format, bp); + + reply->writeNoException(); + ALOGV("%s: CREATE_STREAM: write noException", __FUNCTION__); + reply->writeInt32(ret); + ALOGV("%s: CREATE_STREAM: write ret = %d", __FUNCTION__, ret); + + return NO_ERROR; + } break; + + case CREATE_DEFAULT_REQUEST: { + CHECK_INTERFACE(ICameraDeviceUser, data, reply); + + int templateId = data.readInt32(); + + CameraMetadata request; + status_t ret; + ret = createDefaultRequest(templateId, &request); + + reply->writeNoException(); + reply->writeInt32(ret); + + // out-variables are after exception and return value + reply->writeInt32(1); // to mark presence of metadata object + request.writeToParcel(const_cast(reply)); + + return NO_ERROR; + } break; + case GET_CAMERA_INFO: { + CHECK_INTERFACE(ICameraDeviceUser, data, reply); + + CameraMetadata info; + status_t ret; + ret = getCameraInfo(&info); + + reply->writeNoException(); + reply->writeInt32(ret); + + // out-variables are after exception and return value + reply->writeInt32(1); // to mark presence of metadata object + info.writeToParcel(reply); + + return NO_ERROR; + } break; + case WAIT_UNTIL_IDLE: { + CHECK_INTERFACE(ICameraDeviceUser, data, reply); + reply->writeNoException(); + reply->writeInt32(waitUntilIdle()); + return NO_ERROR; + } break; + default: + return BBinder::onTransact(code, data, reply, flags); + } +} + +// ---------------------------------------------------------------------------- + +}; // namespace android diff --git a/camera/photography/CaptureRequest.cpp b/camera/photography/CaptureRequest.cpp deleted file mode 100644 index b822fc9..0000000 --- a/camera/photography/CaptureRequest.cpp +++ /dev/null @@ -1,124 +0,0 @@ -/* -** -** Copyright 2013, The Android Open Source Project -** -** Licensed under the Apache License, Version 2.0 (the "License"); -** you may not use this file except in compliance with the License. -** You may obtain a copy of the License at -** -** http://www.apache.org/licenses/LICENSE-2.0 -** -** Unless required by applicable law or agreed to in writing, software -** distributed under the License is distributed on an "AS IS" BASIS, -** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -** See the License for the specific language governing permissions and -** limitations under the License. -*/ - -// #define LOG_NDEBUG 0 -#define LOG_TAG "CameraRequest" -#include - -#include - -#include -#include - -namespace android { - -status_t CaptureRequest::readFromParcel(Parcel* parcel) { - if (parcel == NULL) { - ALOGE("%s: Null parcel", __FUNCTION__); - return BAD_VALUE; - } - - mMetadata.clear(); - mSurfaceList.clear(); - - status_t err; - - if ((err = mMetadata.readFromParcel(parcel)) != OK) { - ALOGE("%s: Failed to read metadata from parcel", __FUNCTION__); - return err; - } - ALOGV("%s: Read metadata from parcel", __FUNCTION__); - - int32_t size; - if ((err = parcel->readInt32(&size)) != OK) { - ALOGE("%s: Failed to read surface list size from parcel", __FUNCTION__); - return err; - } - ALOGV("%s: Read surface list size = %d", __FUNCTION__, size); - - // Do not distinguish null arrays from 0-sized arrays. - for (int i = 0; i < size; ++i) { - // Parcel.writeParcelableArray - size_t len; - const char16_t* className = parcel->readString16Inplace(&len); - ALOGV("%s: Read surface class = %s", __FUNCTION__, - className != NULL ? String8(className).string() : ""); - - if (className == NULL) { - continue; - } - - // Surface.writeToParcel - String16 name = parcel->readString16(); - ALOGV("%s: Read surface name = %s", - __FUNCTION__, String8(name).string()); - sp binder(parcel->readStrongBinder()); - ALOGV("%s: Read surface binder = %p", - __FUNCTION__, binder.get()); - - sp surface; - - if (binder != NULL) { - sp gbp = - interface_cast(binder); - surface = new Surface(gbp); - } - - mSurfaceList.push_back(surface); - } - - return OK; -} - -status_t CaptureRequest::writeToParcel(Parcel* parcel) const { - if (parcel == NULL) { - ALOGE("%s: Null parcel", __FUNCTION__); - return BAD_VALUE; - } - - status_t err; - - if ((err = mMetadata.writeToParcel(parcel)) != OK) { - return err; - } - - int32_t size = static_cast(mSurfaceList.size()); - - // Send 0-sized arrays when it's empty. Do not send null arrays. - parcel->writeInt32(size); - - for (int32_t i = 0; i < size; ++i) { - sp surface = mSurfaceList[i]; - - sp binder; - if (surface != 0) { - binder = surface->getIGraphicBufferProducer()->asBinder(); - } - - // not sure if readParcelableArray does this, hard to tell from source - parcel->writeString16(String16("android.view.Surface")); - - // Surface.writeToParcel - parcel->writeString16(String16("unknown_name")); - // Surface.nativeWriteToParcel - parcel->writeStrongBinder(binder); - } - - return OK; -} - -}; // namespace android diff --git a/camera/photography/ICameraDeviceCallbacks.cpp b/camera/photography/ICameraDeviceCallbacks.cpp deleted file mode 100644 index 19763d7..0000000 --- a/camera/photography/ICameraDeviceCallbacks.cpp +++ /dev/null @@ -1,110 +0,0 @@ -/* -** -** Copyright 2013, The Android Open Source Project -** -** Licensed under the Apache License, Version 2.0 (the "License"); -** you may not use this file except in compliance with the License. -** You may obtain a copy of the License at -** -** http://www.apache.org/licenses/LICENSE-2.0 -** -** Unless required by applicable law or agreed to in writing, software -** distributed under the License is distributed on an "AS IS" BASIS, -** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -** See the License for the specific language governing permissions and -** limitations under the License. -*/ - -//#define LOG_NDEBUG 0 -#define LOG_TAG "ICameraDeviceCallbacks" -#include -#include -#include - -#include -#include -#include -#include - -#include -#include "camera/CameraMetadata.h" - -namespace android { - -enum { - NOTIFY_CALLBACK = IBinder::FIRST_CALL_TRANSACTION, - RESULT_RECEIVED, -}; - -class BpCameraDeviceCallbacks: public BpInterface -{ -public: - BpCameraDeviceCallbacks(const sp& impl) - : BpInterface(impl) - { - } - - // generic callback from camera service to app - void notifyCallback(int32_t msgType, int32_t ext1, int32_t ext2) - { - ALOGV("notifyCallback"); - Parcel data, reply; - data.writeInterfaceToken(ICameraDeviceCallbacks::getInterfaceDescriptor()); - data.writeInt32(msgType); - data.writeInt32(ext1); - data.writeInt32(ext2); - remote()->transact(NOTIFY_CALLBACK, data, &reply, IBinder::FLAG_ONEWAY); - data.writeNoException(); - } - - void onResultReceived(int32_t frameId, const CameraMetadata& result) { - ALOGV("onResultReceived"); - Parcel data, reply; - data.writeInterfaceToken(ICameraDeviceCallbacks::getInterfaceDescriptor()); - data.writeInt32(frameId); - result.writeToParcel(&data); - remote()->transact(RESULT_RECEIVED, data, &reply, IBinder::FLAG_ONEWAY); - data.writeNoException(); - } -}; - -IMPLEMENT_META_INTERFACE(CameraDeviceCallbacks, - "android.hardware.photography.ICameraDeviceCallbacks"); - -// ---------------------------------------------------------------------- - -status_t BnCameraDeviceCallbacks::onTransact( - uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) -{ - ALOGV("onTransact - code = %d", code); - switch(code) { - case NOTIFY_CALLBACK: { - ALOGV("NOTIFY_CALLBACK"); - CHECK_INTERFACE(ICameraDeviceCallbacks, data, reply); - int32_t msgType = data.readInt32(); - int32_t ext1 = data.readInt32(); - int32_t ext2 = data.readInt32(); - notifyCallback(msgType, ext1, ext2); - data.readExceptionCode(); - return NO_ERROR; - } break; - case RESULT_RECEIVED: { - ALOGV("RESULT_RECEIVED"); - CHECK_INTERFACE(ICameraDeviceCallbacks, data, reply); - int32_t frameId = data.readInt32(); - CameraMetadata result; - result.readFromParcel(const_cast(&data)); - onResultReceived(frameId, result); - data.readExceptionCode(); - return NO_ERROR; - break; - } - default: - return BBinder::onTransact(code, data, reply, flags); - } -} - -// ---------------------------------------------------------------------------- - -}; // namespace android - diff --git a/camera/photography/ICameraDeviceUser.cpp b/camera/photography/ICameraDeviceUser.cpp deleted file mode 100644 index 95609da..0000000 --- a/camera/photography/ICameraDeviceUser.cpp +++ /dev/null @@ -1,322 +0,0 @@ -/* -** -** Copyright 2013, The Android Open Source Project -** -** Licensed under the Apache License, Version 2.0 (the "License"); -** you may not use this file except in compliance with the License. -** You may obtain a copy of the License at -** -** http://www.apache.org/licenses/LICENSE-2.0 -** -** Unless required by applicable law or agreed to in writing, software -** distributed under the License is distributed on an "AS IS" BASIS, -** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -** See the License for the specific language governing permissions and -** limitations under the License. -*/ - -// #define LOG_NDEBUG 0 -#define LOG_TAG "ICameraDeviceUser" -#include -#include -#include -#include -#include -#include -#include -#include -#include - -namespace android { - -typedef Parcel::WritableBlob WritableBlob; -typedef Parcel::ReadableBlob ReadableBlob; - -enum { - DISCONNECT = IBinder::FIRST_CALL_TRANSACTION, - SUBMIT_REQUEST, - CANCEL_REQUEST, - DELETE_STREAM, - CREATE_STREAM, - CREATE_DEFAULT_REQUEST, - GET_CAMERA_INFO, - WAIT_UNTIL_IDLE, -}; - -class BpCameraDeviceUser : public BpInterface -{ -public: - BpCameraDeviceUser(const sp& impl) - : BpInterface(impl) - { - } - - // disconnect from camera service - void disconnect() - { - ALOGV("disconnect"); - Parcel data, reply; - data.writeInterfaceToken(ICameraDeviceUser::getInterfaceDescriptor()); - remote()->transact(DISCONNECT, data, &reply); - reply.readExceptionCode(); - } - - virtual int submitRequest(sp request, bool streaming) - { - Parcel data, reply; - data.writeInterfaceToken(ICameraDeviceUser::getInterfaceDescriptor()); - - // arg0 = CaptureRequest - if (request != 0) { - data.writeInt32(1); - request->writeToParcel(&data); - } else { - data.writeInt32(0); - } - - // arg1 = streaming (bool) - data.writeInt32(streaming); - - remote()->transact(SUBMIT_REQUEST, data, &reply); - - reply.readExceptionCode(); - return reply.readInt32(); - } - - virtual status_t cancelRequest(int requestId) - { - Parcel data, reply; - data.writeInterfaceToken(ICameraDeviceUser::getInterfaceDescriptor()); - data.writeInt32(requestId); - - remote()->transact(CANCEL_REQUEST, data, &reply); - - reply.readExceptionCode(); - return reply.readInt32(); - } - - virtual status_t deleteStream(int streamId) - { - Parcel data, reply; - data.writeInterfaceToken(ICameraDeviceUser::getInterfaceDescriptor()); - data.writeInt32(streamId); - - remote()->transact(DELETE_STREAM, data, &reply); - - reply.readExceptionCode(); - return reply.readInt32(); - } - - virtual status_t createStream(int width, int height, int format, - const sp& bufferProducer) - { - Parcel data, reply; - data.writeInterfaceToken(ICameraDeviceUser::getInterfaceDescriptor()); - data.writeInt32(width); - data.writeInt32(height); - data.writeInt32(format); - - data.writeInt32(1); // marker that bufferProducer is not null - data.writeString16(String16("unknown_name")); // name of surface - sp b(bufferProducer->asBinder()); - data.writeStrongBinder(b); - - remote()->transact(CREATE_STREAM, data, &reply); - - reply.readExceptionCode(); - return reply.readInt32(); - } - - // Create a request object from a template. - virtual status_t createDefaultRequest(int templateId, - /*out*/ - CameraMetadata* request) - { - Parcel data, reply; - data.writeInterfaceToken(ICameraDeviceUser::getInterfaceDescriptor()); - data.writeInt32(templateId); - remote()->transact(CREATE_DEFAULT_REQUEST, data, &reply); - - reply.readExceptionCode(); - status_t result = reply.readInt32(); - - CameraMetadata out; - if (reply.readInt32() != 0) { - out.readFromParcel(&reply); - } - - if (request != NULL) { - request->swap(out); - } - return result; - } - - - virtual status_t getCameraInfo(CameraMetadata* info) - { - Parcel data, reply; - data.writeInterfaceToken(ICameraDeviceUser::getInterfaceDescriptor()); - remote()->transact(GET_CAMERA_INFO, data, &reply); - - reply.readExceptionCode(); - status_t result = reply.readInt32(); - - CameraMetadata out; - if (reply.readInt32() != 0) { - out.readFromParcel(&reply); - } - - if (info != NULL) { - info->swap(out); - } - - return result; - } - - virtual status_t waitUntilIdle() - { - ALOGV("waitUntilIdle"); - Parcel data, reply; - data.writeInterfaceToken(ICameraDeviceUser::getInterfaceDescriptor()); - remote()->transact(WAIT_UNTIL_IDLE, data, &reply); - reply.readExceptionCode(); - return reply.readInt32(); - } - -private: - - -}; - -IMPLEMENT_META_INTERFACE(CameraDeviceUser, - "android.hardware.photography.ICameraDeviceUser"); - -// ---------------------------------------------------------------------- - -status_t BnCameraDeviceUser::onTransact( - uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) -{ - switch(code) { - case DISCONNECT: { - ALOGV("DISCONNECT"); - CHECK_INTERFACE(ICameraDeviceUser, data, reply); - disconnect(); - reply->writeNoException(); - return NO_ERROR; - } break; - case SUBMIT_REQUEST: { - CHECK_INTERFACE(ICameraDeviceUser, data, reply); - - // arg0 = request - sp request; - if (data.readInt32() != 0) { - request = new CaptureRequest(); - request->readFromParcel(const_cast(&data)); - } - - // arg1 = streaming (bool) - bool streaming = data.readInt32(); - - // return code: requestId (int32) - reply->writeNoException(); - reply->writeInt32(submitRequest(request, streaming)); - - return NO_ERROR; - } break; - case CANCEL_REQUEST: { - CHECK_INTERFACE(ICameraDeviceUser, data, reply); - int requestId = data.readInt32(); - reply->writeNoException(); - reply->writeInt32(cancelRequest(requestId)); - return NO_ERROR; - } break; - case DELETE_STREAM: { - CHECK_INTERFACE(ICameraDeviceUser, data, reply); - int streamId = data.readInt32(); - reply->writeNoException(); - reply->writeInt32(deleteStream(streamId)); - return NO_ERROR; - } break; - case CREATE_STREAM: { - CHECK_INTERFACE(ICameraDeviceUser, data, reply); - int width, height, format; - - width = data.readInt32(); - ALOGV("%s: CREATE_STREAM: width = %d", __FUNCTION__, width); - height = data.readInt32(); - ALOGV("%s: CREATE_STREAM: height = %d", __FUNCTION__, height); - format = data.readInt32(); - ALOGV("%s: CREATE_STREAM: format = %d", __FUNCTION__, format); - - sp bp; - if (data.readInt32() != 0) { - String16 name = data.readString16(); - bp = interface_cast( - data.readStrongBinder()); - - ALOGV("%s: CREATE_STREAM: bp = %p, name = %s", __FUNCTION__, - bp.get(), String8(name).string()); - } else { - ALOGV("%s: CREATE_STREAM: bp = unset, name = unset", - __FUNCTION__); - } - - status_t ret; - ret = createStream(width, height, format, bp); - - reply->writeNoException(); - ALOGV("%s: CREATE_STREAM: write noException", __FUNCTION__); - reply->writeInt32(ret); - ALOGV("%s: CREATE_STREAM: write ret = %d", __FUNCTION__, ret); - - return NO_ERROR; - } break; - - case CREATE_DEFAULT_REQUEST: { - CHECK_INTERFACE(ICameraDeviceUser, data, reply); - - int templateId = data.readInt32(); - - CameraMetadata request; - status_t ret; - ret = createDefaultRequest(templateId, &request); - - reply->writeNoException(); - reply->writeInt32(ret); - - // out-variables are after exception and return value - reply->writeInt32(1); // to mark presence of metadata object - request.writeToParcel(const_cast(reply)); - - return NO_ERROR; - } break; - case GET_CAMERA_INFO: { - CHECK_INTERFACE(ICameraDeviceUser, data, reply); - - CameraMetadata info; - status_t ret; - ret = getCameraInfo(&info); - - reply->writeNoException(); - reply->writeInt32(ret); - - // out-variables are after exception and return value - reply->writeInt32(1); // to mark presence of metadata object - info.writeToParcel(reply); - - return NO_ERROR; - } break; - case WAIT_UNTIL_IDLE: { - CHECK_INTERFACE(ICameraDeviceUser, data, reply); - reply->writeNoException(); - reply->writeInt32(waitUntilIdle()); - return NO_ERROR; - } break; - default: - return BBinder::onTransact(code, data, reply, flags); - } -} - -// ---------------------------------------------------------------------------- - -}; // namespace android diff --git a/include/camera/camera2/CaptureRequest.h b/include/camera/camera2/CaptureRequest.h new file mode 100644 index 0000000..e56d61f --- /dev/null +++ b/include/camera/camera2/CaptureRequest.h @@ -0,0 +1,42 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_PHOTOGRAPHY_CAPTUREREQUEST_H +#define ANDROID_HARDWARE_PHOTOGRAPHY_CAPTUREREQUEST_H + +#include +#include +#include + +namespace android { + +class Surface; + +struct CaptureRequest : public virtual RefBase { +public: + + CameraMetadata mMetadata; + Vector > mSurfaceList; + + /** + * Keep impl up-to-date with CaptureRequest.java in frameworks/base + */ + status_t readFromParcel(Parcel* parcel); + status_t writeToParcel(Parcel* parcel) const; +}; +}; // namespace android + +#endif diff --git a/include/camera/camera2/ICameraDeviceCallbacks.h b/include/camera/camera2/ICameraDeviceCallbacks.h new file mode 100644 index 0000000..041fa65 --- /dev/null +++ b/include/camera/camera2/ICameraDeviceCallbacks.h @@ -0,0 +1,61 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_PHOTOGRAPHY_CALLBACKS_H +#define ANDROID_HARDWARE_PHOTOGRAPHY_CALLBACKS_H + +#include +#include +#include +#include +#include +#include + +namespace android { +class CameraMetadata; + +class ICameraDeviceCallbacks : public IInterface +{ + /** + * Keep up-to-date with ICameraDeviceCallbacks.aidl in frameworks/base + */ +public: + DECLARE_META_INTERFACE(CameraDeviceCallbacks); + + // One way + virtual void notifyCallback(int32_t msgType, + int32_t ext1, + int32_t ext2) = 0; + + // One way + virtual void onResultReceived(int32_t frameId, + const CameraMetadata& result) = 0; +}; + +// ---------------------------------------------------------------------------- + +class BnCameraDeviceCallbacks : public BnInterface +{ +public: + virtual status_t onTransact( uint32_t code, + const Parcel& data, + Parcel* reply, + uint32_t flags = 0); +}; + +}; // namespace android + +#endif diff --git a/include/camera/camera2/ICameraDeviceUser.h b/include/camera/camera2/ICameraDeviceUser.h new file mode 100644 index 0000000..45988d0 --- /dev/null +++ b/include/camera/camera2/ICameraDeviceUser.h @@ -0,0 +1,81 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_PHOTOGRAPHY_ICAMERADEVICEUSER_H +#define ANDROID_HARDWARE_PHOTOGRAPHY_ICAMERADEVICEUSER_H + +#include +#include + +struct camera_metadata; + +namespace android { + +class ICameraDeviceUserClient; +class IGraphicBufferProducer; +class Surface; +class CaptureRequest; +class CameraMetadata; + +class ICameraDeviceUser : public IInterface +{ + /** + * Keep up-to-date with ICameraDeviceUser.aidl in frameworks/base + */ +public: + DECLARE_META_INTERFACE(CameraDeviceUser); + + virtual void disconnect() = 0; + + /** + * Request Handling + **/ + + virtual int submitRequest(sp request, + bool streaming = false) = 0; + virtual status_t cancelRequest(int requestId) = 0; + + virtual status_t deleteStream(int streamId) = 0; + virtual status_t createStream( + int width, int height, int format, + const sp& bufferProducer) = 0; + + // Create a request object from a template. + virtual status_t createDefaultRequest(int templateId, + /*out*/ + CameraMetadata* request) = 0; + // Get static camera metadata + virtual status_t getCameraInfo(/*out*/ + CameraMetadata* info) = 0; + + // Wait until all the submitted requests have finished processing + virtual status_t waitUntilIdle() = 0; +}; + +// ---------------------------------------------------------------------------- + +class BnCameraDeviceUser: public BnInterface +{ +public: + virtual status_t onTransact( uint32_t code, + const Parcel& data, + Parcel* reply, + uint32_t flags = 0); +}; + +}; // namespace android + +#endif diff --git a/include/camera/photography/CaptureRequest.h b/include/camera/photography/CaptureRequest.h deleted file mode 100644 index e56d61f..0000000 --- a/include/camera/photography/CaptureRequest.h +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Copyright (C) 2013 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef ANDROID_HARDWARE_PHOTOGRAPHY_CAPTUREREQUEST_H -#define ANDROID_HARDWARE_PHOTOGRAPHY_CAPTUREREQUEST_H - -#include -#include -#include - -namespace android { - -class Surface; - -struct CaptureRequest : public virtual RefBase { -public: - - CameraMetadata mMetadata; - Vector > mSurfaceList; - - /** - * Keep impl up-to-date with CaptureRequest.java in frameworks/base - */ - status_t readFromParcel(Parcel* parcel); - status_t writeToParcel(Parcel* parcel) const; -}; -}; // namespace android - -#endif diff --git a/include/camera/photography/ICameraDeviceCallbacks.h b/include/camera/photography/ICameraDeviceCallbacks.h deleted file mode 100644 index 041fa65..0000000 --- a/include/camera/photography/ICameraDeviceCallbacks.h +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright (C) 2013 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef ANDROID_HARDWARE_PHOTOGRAPHY_CALLBACKS_H -#define ANDROID_HARDWARE_PHOTOGRAPHY_CALLBACKS_H - -#include -#include -#include -#include -#include -#include - -namespace android { -class CameraMetadata; - -class ICameraDeviceCallbacks : public IInterface -{ - /** - * Keep up-to-date with ICameraDeviceCallbacks.aidl in frameworks/base - */ -public: - DECLARE_META_INTERFACE(CameraDeviceCallbacks); - - // One way - virtual void notifyCallback(int32_t msgType, - int32_t ext1, - int32_t ext2) = 0; - - // One way - virtual void onResultReceived(int32_t frameId, - const CameraMetadata& result) = 0; -}; - -// ---------------------------------------------------------------------------- - -class BnCameraDeviceCallbacks : public BnInterface -{ -public: - virtual status_t onTransact( uint32_t code, - const Parcel& data, - Parcel* reply, - uint32_t flags = 0); -}; - -}; // namespace android - -#endif diff --git a/include/camera/photography/ICameraDeviceUser.h b/include/camera/photography/ICameraDeviceUser.h deleted file mode 100644 index 45988d0..0000000 --- a/include/camera/photography/ICameraDeviceUser.h +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Copyright (C) 2013 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef ANDROID_HARDWARE_PHOTOGRAPHY_ICAMERADEVICEUSER_H -#define ANDROID_HARDWARE_PHOTOGRAPHY_ICAMERADEVICEUSER_H - -#include -#include - -struct camera_metadata; - -namespace android { - -class ICameraDeviceUserClient; -class IGraphicBufferProducer; -class Surface; -class CaptureRequest; -class CameraMetadata; - -class ICameraDeviceUser : public IInterface -{ - /** - * Keep up-to-date with ICameraDeviceUser.aidl in frameworks/base - */ -public: - DECLARE_META_INTERFACE(CameraDeviceUser); - - virtual void disconnect() = 0; - - /** - * Request Handling - **/ - - virtual int submitRequest(sp request, - bool streaming = false) = 0; - virtual status_t cancelRequest(int requestId) = 0; - - virtual status_t deleteStream(int streamId) = 0; - virtual status_t createStream( - int width, int height, int format, - const sp& bufferProducer) = 0; - - // Create a request object from a template. - virtual status_t createDefaultRequest(int templateId, - /*out*/ - CameraMetadata* request) = 0; - // Get static camera metadata - virtual status_t getCameraInfo(/*out*/ - CameraMetadata* info) = 0; - - // Wait until all the submitted requests have finished processing - virtual status_t waitUntilIdle() = 0; -}; - -// ---------------------------------------------------------------------------- - -class BnCameraDeviceUser: public BnInterface -{ -public: - virtual status_t onTransact( uint32_t code, - const Parcel& data, - Parcel* reply, - uint32_t flags = 0); -}; - -}; // namespace android - -#endif diff --git a/services/camera/libcameraservice/Android.mk b/services/camera/libcameraservice/Android.mk index 0fede7e..d659ebb 100644 --- a/services/camera/libcameraservice/Android.mk +++ b/services/camera/libcameraservice/Android.mk @@ -8,31 +8,31 @@ include $(CLEAR_VARS) LOCAL_SRC_FILES:= \ CameraService.cpp \ - CameraClient.cpp \ - Camera2Client.cpp \ - ProCamera2Client.cpp \ - Camera2ClientBase.cpp \ - CameraDeviceBase.cpp \ - Camera2Device.cpp \ - Camera3Device.cpp \ CameraDeviceFactory.cpp \ - camera2/Parameters.cpp \ - camera2/FrameProcessor.cpp \ - camera2/StreamingProcessor.cpp \ - camera2/JpegProcessor.cpp \ - camera2/CallbackProcessor.cpp \ - camera2/ZslProcessor.cpp \ - camera2/BurstCapture.cpp \ - camera2/JpegCompressor.cpp \ - camera2/CaptureSequencer.cpp \ - camera2/ProFrameProcessor.cpp \ - camera2/ZslProcessor3.cpp \ - camera3/Camera3Stream.cpp \ - camera3/Camera3IOStreamBase.cpp \ - camera3/Camera3InputStream.cpp \ - camera3/Camera3OutputStream.cpp \ - camera3/Camera3ZslStream.cpp \ - photography/CameraDeviceClient.cpp \ + common/Camera2ClientBase.cpp \ + common/CameraDeviceBase.cpp \ + common/FrameProcessorBase.cpp \ + api1/CameraClient.cpp \ + api1/Camera2Client.cpp \ + api1/client2/Parameters.cpp \ + api1/client2/FrameProcessor.cpp \ + api1/client2/StreamingProcessor.cpp \ + api1/client2/JpegProcessor.cpp \ + api1/client2/CallbackProcessor.cpp \ + api1/client2/ZslProcessor.cpp \ + api1/client2/BurstCapture.cpp \ + api1/client2/JpegCompressor.cpp \ + api1/client2/CaptureSequencer.cpp \ + api1/client2/ZslProcessor3.cpp \ + api2/CameraDeviceClient.cpp \ + api_pro/ProCamera2Client.cpp \ + device2/Camera2Device.cpp \ + device3/Camera3Device.cpp \ + device3/Camera3Stream.cpp \ + device3/Camera3IOStreamBase.cpp \ + device3/Camera3InputStream.cpp \ + device3/Camera3OutputStream.cpp \ + device3/Camera3ZslStream.cpp \ gui/RingBufferConsumer.cpp \ LOCAL_SHARED_LIBRARIES:= \ diff --git a/services/camera/libcameraservice/Camera2Client.cpp b/services/camera/libcameraservice/Camera2Client.cpp deleted file mode 100644 index 203d7c0..0000000 --- a/services/camera/libcameraservice/Camera2Client.cpp +++ /dev/null @@ -1,1777 +0,0 @@ -/* - * Copyright (C) 2012 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#define LOG_TAG "Camera2" -#define ATRACE_TAG ATRACE_TAG_CAMERA -//#define LOG_NDEBUG 0 - -#include -#include - -#include -#include -#include "camera2/Parameters.h" -#include "Camera2Client.h" -#include "Camera2Device.h" -#include "Camera3Device.h" - -#include "camera2/ZslProcessor.h" -#include "camera2/ZslProcessor3.h" - -#define ALOG1(...) ALOGD_IF(gLogLevel >= 1, __VA_ARGS__); -#define ALOG2(...) ALOGD_IF(gLogLevel >= 2, __VA_ARGS__); - -namespace android { -using namespace camera2; - -static int getCallingPid() { - return IPCThreadState::self()->getCallingPid(); -} - -// Interface used by CameraService - -Camera2Client::Camera2Client(const sp& cameraService, - const sp& cameraClient, - const String16& clientPackageName, - int cameraId, - int cameraFacing, - int clientPid, - uid_t clientUid, - int servicePid, - int deviceVersion): - Camera2ClientBase(cameraService, cameraClient, clientPackageName, - cameraId, cameraFacing, clientPid, clientUid, servicePid), - mParameters(cameraId, cameraFacing), - mDeviceVersion(deviceVersion) -{ - ATRACE_CALL(); - - SharedParameters::Lock l(mParameters); - l.mParameters.state = Parameters::DISCONNECTED; -} - -status_t Camera2Client::initialize(camera_module_t *module) -{ - ATRACE_CALL(); - ALOGV("%s: Initializing client for camera %d", __FUNCTION__, mCameraId); - status_t res; - - res = Camera2ClientBase::initialize(module); - if (res != OK) { - return res; - } - - SharedParameters::Lock l(mParameters); - - res = l.mParameters.initialize(&(mDevice->info())); - if (res != OK) { - ALOGE("%s: Camera %d: unable to build defaults: %s (%d)", - __FUNCTION__, mCameraId, strerror(-res), res); - return NO_INIT; - } - - String8 threadName; - - mStreamingProcessor = new StreamingProcessor(this); - threadName = String8::format("C2-%d-StreamProc", - mCameraId); - mStreamingProcessor->run(threadName.string()); - - mFrameProcessor = new FrameProcessor(mDevice, this); - threadName = String8::format("C2-%d-FrameProc", - mCameraId); - mFrameProcessor->run(threadName.string()); - - mCaptureSequencer = new CaptureSequencer(this); - threadName = String8::format("C2-%d-CaptureSeq", - mCameraId); - mCaptureSequencer->run(threadName.string()); - - mJpegProcessor = new JpegProcessor(this, mCaptureSequencer); - threadName = String8::format("C2-%d-JpegProc", - mCameraId); - mJpegProcessor->run(threadName.string()); - - switch (mDeviceVersion) { - case CAMERA_DEVICE_API_VERSION_2_0: { - sp zslProc = - new ZslProcessor(this, mCaptureSequencer); - mZslProcessor = zslProc; - mZslProcessorThread = zslProc; - break; - } - case CAMERA_DEVICE_API_VERSION_3_0:{ - sp zslProc = - new ZslProcessor3(this, mCaptureSequencer); - mZslProcessor = zslProc; - mZslProcessorThread = zslProc; - break; - } - default: - break; - } - threadName = String8::format("C2-%d-ZslProc", - mCameraId); - mZslProcessorThread->run(threadName.string()); - - mCallbackProcessor = new CallbackProcessor(this); - threadName = String8::format("C2-%d-CallbkProc", - mCameraId); - mCallbackProcessor->run(threadName.string()); - - if (gLogLevel >= 1) { - ALOGD("%s: Default parameters converted from camera %d:", __FUNCTION__, - mCameraId); - ALOGD("%s", l.mParameters.paramsFlattened.string()); - } - - return OK; -} - -Camera2Client::~Camera2Client() { - ATRACE_CALL(); - ALOGV("~Camera2Client"); - - mDestructionStarted = true; - - disconnect(); - - ALOGI("Camera %d: Closed", mCameraId); -} - -status_t Camera2Client::dump(int fd, const Vector& args) { - String8 result; - result.appendFormat("Client2[%d] (%p) Client: %s PID: %d, dump:\n", - mCameraId, - getRemoteCallback()->asBinder().get(), - String8(mClientPackageName).string(), - mClientPid); - result.append(" State: "); -#define CASE_APPEND_ENUM(x) case x: result.append(#x "\n"); break; - - const Parameters& p = mParameters.unsafeAccess(); - - result.append(Parameters::getStateName(p.state)); - - result.append("\n Current parameters:\n"); - result.appendFormat(" Preview size: %d x %d\n", - p.previewWidth, p.previewHeight); - result.appendFormat(" Preview FPS range: %d - %d\n", - p.previewFpsRange[0], p.previewFpsRange[1]); - result.appendFormat(" Preview HAL pixel format: 0x%x\n", - p.previewFormat); - result.appendFormat(" Preview transform: %x\n", - p.previewTransform); - result.appendFormat(" Picture size: %d x %d\n", - p.pictureWidth, p.pictureHeight); - result.appendFormat(" Jpeg thumbnail size: %d x %d\n", - p.jpegThumbSize[0], p.jpegThumbSize[1]); - result.appendFormat(" Jpeg quality: %d, thumbnail quality: %d\n", - p.jpegQuality, p.jpegThumbQuality); - result.appendFormat(" Jpeg rotation: %d\n", p.jpegRotation); - result.appendFormat(" GPS tags %s\n", - p.gpsEnabled ? "enabled" : "disabled"); - if (p.gpsEnabled) { - result.appendFormat(" GPS lat x long x alt: %f x %f x %f\n", - p.gpsCoordinates[0], p.gpsCoordinates[1], - p.gpsCoordinates[2]); - result.appendFormat(" GPS timestamp: %lld\n", - p.gpsTimestamp); - result.appendFormat(" GPS processing method: %s\n", - p.gpsProcessingMethod.string()); - } - - result.append(" White balance mode: "); - switch (p.wbMode) { - CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_AUTO) - CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_INCANDESCENT) - CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_FLUORESCENT) - CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT) - CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_DAYLIGHT) - CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT) - CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_TWILIGHT) - CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_SHADE) - default: result.append("UNKNOWN\n"); - } - - result.append(" Effect mode: "); - switch (p.effectMode) { - CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_OFF) - CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_MONO) - CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_NEGATIVE) - CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_SOLARIZE) - CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_SEPIA) - CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_POSTERIZE) - CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD) - CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD) - CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_AQUA) - default: result.append("UNKNOWN\n"); - } - - result.append(" Antibanding mode: "); - switch (p.antibandingMode) { - CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO) - CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF) - CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ) - CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ) - default: result.append("UNKNOWN\n"); - } - - result.append(" Scene mode: "); - switch (p.sceneMode) { - case ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED: - result.append("AUTO\n"); break; - CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_ACTION) - CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_PORTRAIT) - CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_LANDSCAPE) - CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_NIGHT) - CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT) - CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_THEATRE) - CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_BEACH) - CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_SNOW) - CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_SUNSET) - CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO) - CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_FIREWORKS) - CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_SPORTS) - CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_PARTY) - CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT) - CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_BARCODE) - default: result.append("UNKNOWN\n"); - } - - result.append(" Flash mode: "); - switch (p.flashMode) { - CASE_APPEND_ENUM(Parameters::FLASH_MODE_OFF) - CASE_APPEND_ENUM(Parameters::FLASH_MODE_AUTO) - CASE_APPEND_ENUM(Parameters::FLASH_MODE_ON) - CASE_APPEND_ENUM(Parameters::FLASH_MODE_TORCH) - CASE_APPEND_ENUM(Parameters::FLASH_MODE_RED_EYE) - CASE_APPEND_ENUM(Parameters::FLASH_MODE_INVALID) - default: result.append("UNKNOWN\n"); - } - - result.append(" Focus mode: "); - switch (p.focusMode) { - CASE_APPEND_ENUM(Parameters::FOCUS_MODE_AUTO) - CASE_APPEND_ENUM(Parameters::FOCUS_MODE_MACRO) - CASE_APPEND_ENUM(Parameters::FOCUS_MODE_CONTINUOUS_VIDEO) - CASE_APPEND_ENUM(Parameters::FOCUS_MODE_CONTINUOUS_PICTURE) - CASE_APPEND_ENUM(Parameters::FOCUS_MODE_EDOF) - CASE_APPEND_ENUM(Parameters::FOCUS_MODE_INFINITY) - CASE_APPEND_ENUM(Parameters::FOCUS_MODE_FIXED) - CASE_APPEND_ENUM(Parameters::FOCUS_MODE_INVALID) - default: result.append("UNKNOWN\n"); - } - - result.append(" Focus state: "); - switch (p.focusState) { - CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_INACTIVE) - CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN) - CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED) - CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN) - CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED) - CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED) - default: result.append("UNKNOWN\n"); - } - - result.append(" Focusing areas:\n"); - for (size_t i = 0; i < p.focusingAreas.size(); i++) { - result.appendFormat(" [ (%d, %d, %d, %d), weight %d ]\n", - p.focusingAreas[i].left, - p.focusingAreas[i].top, - p.focusingAreas[i].right, - p.focusingAreas[i].bottom, - p.focusingAreas[i].weight); - } - - result.appendFormat(" Exposure compensation index: %d\n", - p.exposureCompensation); - - result.appendFormat(" AE lock %s, AWB lock %s\n", - p.autoExposureLock ? "enabled" : "disabled", - p.autoWhiteBalanceLock ? "enabled" : "disabled" ); - - result.appendFormat(" Metering areas:\n"); - for (size_t i = 0; i < p.meteringAreas.size(); i++) { - result.appendFormat(" [ (%d, %d, %d, %d), weight %d ]\n", - p.meteringAreas[i].left, - p.meteringAreas[i].top, - p.meteringAreas[i].right, - p.meteringAreas[i].bottom, - p.meteringAreas[i].weight); - } - - result.appendFormat(" Zoom index: %d\n", p.zoom); - result.appendFormat(" Video size: %d x %d\n", p.videoWidth, - p.videoHeight); - - result.appendFormat(" Recording hint is %s\n", - p.recordingHint ? "set" : "not set"); - - result.appendFormat(" Video stabilization is %s\n", - p.videoStabilization ? "enabled" : "disabled"); - - result.append(" Current streams:\n"); - result.appendFormat(" Preview stream ID: %d\n", - getPreviewStreamId()); - result.appendFormat(" Capture stream ID: %d\n", - getCaptureStreamId()); - result.appendFormat(" Recording stream ID: %d\n", - getRecordingStreamId()); - - result.append(" Quirks for this camera:\n"); - bool haveQuirk = false; - if (p.quirks.triggerAfWithAuto) { - result.appendFormat(" triggerAfWithAuto\n"); - haveQuirk = true; - } - if (p.quirks.useZslFormat) { - result.appendFormat(" useZslFormat\n"); - haveQuirk = true; - } - if (p.quirks.meteringCropRegion) { - result.appendFormat(" meteringCropRegion\n"); - haveQuirk = true; - } - if (!haveQuirk) { - result.appendFormat(" none\n"); - } - - write(fd, result.string(), result.size()); - - mStreamingProcessor->dump(fd, args); - - mCaptureSequencer->dump(fd, args); - - mFrameProcessor->dump(fd, args); - - mZslProcessor->dump(fd, args); - - return dumpDevice(fd, args); -#undef CASE_APPEND_ENUM -} - -// ICamera interface - -void Camera2Client::disconnect() { - ATRACE_CALL(); - Mutex::Autolock icl(mBinderSerializationLock); - - // Allow both client and the media server to disconnect at all times - int callingPid = getCallingPid(); - if (callingPid != mClientPid && callingPid != mServicePid) return; - - if (mDevice == 0) return; - - ALOGV("Camera %d: Shutting down", mCameraId); - - /** - * disconnect() cannot call any methods that might need to promote a - * wp, since disconnect can be called from the destructor, at - * which point all such promotions will fail. - */ - - stopPreviewL(); - - { - SharedParameters::Lock l(mParameters); - if (l.mParameters.state == Parameters::DISCONNECTED) return; - l.mParameters.state = Parameters::DISCONNECTED; - } - - mStreamingProcessor->deletePreviewStream(); - mStreamingProcessor->deleteRecordingStream(); - mJpegProcessor->deleteStream(); - mCallbackProcessor->deleteStream(); - mZslProcessor->deleteStream(); - - mStreamingProcessor->requestExit(); - mFrameProcessor->requestExit(); - mCaptureSequencer->requestExit(); - mJpegProcessor->requestExit(); - mZslProcessorThread->requestExit(); - mCallbackProcessor->requestExit(); - - ALOGV("Camera %d: Waiting for threads", mCameraId); - - mStreamingProcessor->join(); - mFrameProcessor->join(); - mCaptureSequencer->join(); - mJpegProcessor->join(); - mZslProcessorThread->join(); - mCallbackProcessor->join(); - - ALOGV("Camera %d: Disconnecting device", mCameraId); - - mDevice->disconnect(); - - mDevice.clear(); - - CameraService::Client::disconnect(); -} - -status_t Camera2Client::connect(const sp& client) { - ATRACE_CALL(); - ALOGV("%s: E", __FUNCTION__); - Mutex::Autolock icl(mBinderSerializationLock); - - if (mClientPid != 0 && getCallingPid() != mClientPid) { - ALOGE("%s: Camera %d: Connection attempt from pid %d; " - "current locked to pid %d", __FUNCTION__, - mCameraId, getCallingPid(), mClientPid); - return BAD_VALUE; - } - - mClientPid = getCallingPid(); - - mRemoteCallback = client; - mSharedCameraCallbacks = client; - - return OK; -} - -status_t Camera2Client::lock() { - ATRACE_CALL(); - ALOGV("%s: E", __FUNCTION__); - Mutex::Autolock icl(mBinderSerializationLock); - ALOGV("%s: Camera %d: Lock call from pid %d; current client pid %d", - __FUNCTION__, mCameraId, getCallingPid(), mClientPid); - - if (mClientPid == 0) { - mClientPid = getCallingPid(); - return OK; - } - - if (mClientPid != getCallingPid()) { - ALOGE("%s: Camera %d: Lock call from pid %d; currently locked to pid %d", - __FUNCTION__, mCameraId, getCallingPid(), mClientPid); - return EBUSY; - } - - return OK; -} - -status_t Camera2Client::unlock() { - ATRACE_CALL(); - ALOGV("%s: E", __FUNCTION__); - Mutex::Autolock icl(mBinderSerializationLock); - ALOGV("%s: Camera %d: Unlock call from pid %d; current client pid %d", - __FUNCTION__, mCameraId, getCallingPid(), mClientPid); - - if (mClientPid == getCallingPid()) { - SharedParameters::Lock l(mParameters); - if (l.mParameters.state == Parameters::RECORD || - l.mParameters.state == Parameters::VIDEO_SNAPSHOT) { - ALOGD("Not allowed to unlock camera during recording."); - return INVALID_OPERATION; - } - mClientPid = 0; - mRemoteCallback.clear(); - mSharedCameraCallbacks.clear(); - return OK; - } - - ALOGE("%s: Camera %d: Unlock call from pid %d; currently locked to pid %d", - __FUNCTION__, mCameraId, getCallingPid(), mClientPid); - return EBUSY; -} - -status_t Camera2Client::setPreviewDisplay( - const sp& surface) { - ATRACE_CALL(); - ALOGV("%s: E", __FUNCTION__); - Mutex::Autolock icl(mBinderSerializationLock); - status_t res; - if ( (res = checkPid(__FUNCTION__) ) != OK) return res; - - sp binder; - sp window; - if (surface != 0) { - binder = surface->getIGraphicBufferProducer()->asBinder(); - window = surface; - } - - return setPreviewWindowL(binder,window); -} - -status_t Camera2Client::setPreviewTexture( - const sp& bufferProducer) { - ATRACE_CALL(); - ALOGV("%s: E", __FUNCTION__); - Mutex::Autolock icl(mBinderSerializationLock); - status_t res; - if ( (res = checkPid(__FUNCTION__) ) != OK) return res; - - sp binder; - sp window; - if (bufferProducer != 0) { - binder = bufferProducer->asBinder(); - window = new Surface(bufferProducer); - } - return setPreviewWindowL(binder, window); -} - -status_t Camera2Client::setPreviewWindowL(const sp& binder, - sp window) { - ATRACE_CALL(); - status_t res; - - if (binder == mPreviewSurface) { - ALOGV("%s: Camera %d: New window is same as old window", - __FUNCTION__, mCameraId); - return NO_ERROR; - } - - Parameters::State state; - { - SharedParameters::Lock l(mParameters); - state = l.mParameters.state; - } - switch (state) { - case Parameters::DISCONNECTED: - case Parameters::RECORD: - case Parameters::STILL_CAPTURE: - case Parameters::VIDEO_SNAPSHOT: - ALOGE("%s: Camera %d: Cannot set preview display while in state %s", - __FUNCTION__, mCameraId, - Parameters::getStateName(state)); - return INVALID_OPERATION; - case Parameters::STOPPED: - case Parameters::WAITING_FOR_PREVIEW_WINDOW: - // OK - break; - case Parameters::PREVIEW: - // Already running preview - need to stop and create a new stream - res = stopStream(); - if (res != OK) { - ALOGE("%s: Unable to stop preview to swap windows: %s (%d)", - __FUNCTION__, strerror(-res), res); - return res; - } - state = Parameters::WAITING_FOR_PREVIEW_WINDOW; - break; - } - - mPreviewSurface = binder; - res = mStreamingProcessor->setPreviewWindow(window); - if (res != OK) { - ALOGE("%s: Unable to set new preview window: %s (%d)", - __FUNCTION__, strerror(-res), res); - return res; - } - - if (state == Parameters::WAITING_FOR_PREVIEW_WINDOW) { - SharedParameters::Lock l(mParameters); - l.mParameters.state = state; - return startPreviewL(l.mParameters, false); - } - - return OK; -} - -void Camera2Client::setPreviewCallbackFlag(int flag) { - ATRACE_CALL(); - ALOGV("%s: Camera %d: Flag 0x%x", __FUNCTION__, mCameraId, flag); - Mutex::Autolock icl(mBinderSerializationLock); - - if ( checkPid(__FUNCTION__) != OK) return; - - SharedParameters::Lock l(mParameters); - setPreviewCallbackFlagL(l.mParameters, flag); -} - -void Camera2Client::setPreviewCallbackFlagL(Parameters ¶ms, int flag) { - status_t res = OK; - - switch(params.state) { - case Parameters::STOPPED: - case Parameters::WAITING_FOR_PREVIEW_WINDOW: - case Parameters::PREVIEW: - case Parameters::STILL_CAPTURE: - // OK - break; - default: - if (flag & CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK) { - ALOGE("%s: Camera %d: Can't use preview callbacks " - "in state %d", __FUNCTION__, mCameraId, params.state); - return; - } - } - - if (flag & CAMERA_FRAME_CALLBACK_FLAG_ONE_SHOT_MASK) { - ALOGV("%s: setting oneshot", __FUNCTION__); - params.previewCallbackOneShot = true; - } - if (params.previewCallbackFlags != (uint32_t)flag) { - - if (flag != CAMERA_FRAME_CALLBACK_FLAG_NOOP) { - // Disable any existing preview callback window when enabling - // preview callback flags - res = mCallbackProcessor->setCallbackWindow(NULL); - if (res != OK) { - ALOGE("%s: Camera %d: Unable to clear preview callback surface:" - " %s (%d)", __FUNCTION__, mCameraId, strerror(-res), res); - return; - } - params.previewCallbackSurface = false; - } - - params.previewCallbackFlags = flag; - - if (params.state == Parameters::PREVIEW) { - res = startPreviewL(params, true); - if (res != OK) { - ALOGE("%s: Camera %d: Unable to refresh request in state %s", - __FUNCTION__, mCameraId, - Parameters::getStateName(params.state)); - } - } - } -} - -status_t Camera2Client::setPreviewCallbackTarget( - const sp& callbackProducer) { - ATRACE_CALL(); - ALOGV("%s: E", __FUNCTION__); - Mutex::Autolock icl(mBinderSerializationLock); - status_t res; - if ( (res = checkPid(__FUNCTION__) ) != OK) return res; - - sp window; - if (callbackProducer != 0) { - window = new Surface(callbackProducer); - } - - res = mCallbackProcessor->setCallbackWindow(window); - if (res != OK) { - ALOGE("%s: Camera %d: Unable to set preview callback surface: %s (%d)", - __FUNCTION__, mCameraId, strerror(-res), res); - return res; - } - - SharedParameters::Lock l(mParameters); - - if (window != NULL) { - // Disable traditional callbacks when a valid callback target is given - l.mParameters.previewCallbackFlags = CAMERA_FRAME_CALLBACK_FLAG_NOOP; - l.mParameters.previewCallbackOneShot = false; - l.mParameters.previewCallbackSurface = true; - } else { - // Disable callback target if given a NULL interface. - l.mParameters.previewCallbackSurface = false; - } - - switch(l.mParameters.state) { - case Parameters::PREVIEW: - res = startPreviewL(l.mParameters, true); - break; - case Parameters::RECORD: - case Parameters::VIDEO_SNAPSHOT: - res = startRecordingL(l.mParameters, true); - break; - default: - break; - } - if (res != OK) { - ALOGE("%s: Camera %d: Unable to refresh request in state %s", - __FUNCTION__, mCameraId, - Parameters::getStateName(l.mParameters.state)); - } - - return OK; -} - - -status_t Camera2Client::startPreview() { - ATRACE_CALL(); - ALOGV("%s: E", __FUNCTION__); - Mutex::Autolock icl(mBinderSerializationLock); - status_t res; - if ( (res = checkPid(__FUNCTION__) ) != OK) return res; - SharedParameters::Lock l(mParameters); - return startPreviewL(l.mParameters, false); -} - -status_t Camera2Client::startPreviewL(Parameters ¶ms, bool restart) { - ATRACE_CALL(); - status_t res; - - ALOGV("%s: state == %d, restart = %d", __FUNCTION__, params.state, restart); - - if ( (params.state == Parameters::PREVIEW || - params.state == Parameters::RECORD || - params.state == Parameters::VIDEO_SNAPSHOT) - && !restart) { - // Succeed attempt to re-enter a streaming state - ALOGI("%s: Camera %d: Preview already active, ignoring restart", - __FUNCTION__, mCameraId); - return OK; - } - if (params.state > Parameters::PREVIEW && !restart) { - ALOGE("%s: Can't start preview in state %s", - __FUNCTION__, - Parameters::getStateName(params.state)); - return INVALID_OPERATION; - } - - if (!mStreamingProcessor->haveValidPreviewWindow()) { - params.state = Parameters::WAITING_FOR_PREVIEW_WINDOW; - return OK; - } - params.state = Parameters::STOPPED; - - res = mStreamingProcessor->updatePreviewStream(params); - if (res != OK) { - ALOGE("%s: Camera %d: Unable to update preview stream: %s (%d)", - __FUNCTION__, mCameraId, strerror(-res), res); - return res; - } - - // We could wait to create the JPEG output stream until first actual use - // (first takePicture call). However, this would substantially increase the - // first capture latency on HAL3 devices, and potentially on some HAL2 - // devices. So create it unconditionally at preview start. As a drawback, - // this increases gralloc memory consumption for applications that don't - // ever take a picture. - // TODO: Find a better compromise, though this likely would involve HAL - // changes. - res = updateProcessorStream(mJpegProcessor, params); - if (res != OK) { - ALOGE("%s: Camera %d: Can't pre-configure still image " - "stream: %s (%d)", - __FUNCTION__, mCameraId, strerror(-res), res); - return res; - } - - Vector outputStreams; - bool callbacksEnabled = (params.previewCallbackFlags & - CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK) || - params.previewCallbackSurface; - - if (callbacksEnabled) { - // Can't have recording stream hanging around when enabling callbacks, - // since it exceeds the max stream count on some devices. - if (mStreamingProcessor->getRecordingStreamId() != NO_STREAM) { - ALOGV("%s: Camera %d: Clearing out recording stream before " - "creating callback stream", __FUNCTION__, mCameraId); - res = mStreamingProcessor->stopStream(); - if (res != OK) { - ALOGE("%s: Camera %d: Can't stop streaming to delete " - "recording stream", __FUNCTION__, mCameraId); - return res; - } - res = mStreamingProcessor->deleteRecordingStream(); - if (res != OK) { - ALOGE("%s: Camera %d: Unable to delete recording stream before " - "enabling callbacks: %s (%d)", __FUNCTION__, mCameraId, - strerror(-res), res); - return res; - } - } - - res = mCallbackProcessor->updateStream(params); - if (res != OK) { - ALOGE("%s: Camera %d: Unable to update callback stream: %s (%d)", - __FUNCTION__, mCameraId, strerror(-res), res); - return res; - } - outputStreams.push(getCallbackStreamId()); - } - if (params.zslMode && !params.recordingHint) { - res = updateProcessorStream(mZslProcessor, params); - if (res != OK) { - ALOGE("%s: Camera %d: Unable to update ZSL stream: %s (%d)", - __FUNCTION__, mCameraId, strerror(-res), res); - return res; - } - outputStreams.push(getZslStreamId()); - } - - outputStreams.push(getPreviewStreamId()); - - if (!params.recordingHint) { - if (!restart) { - res = mStreamingProcessor->updatePreviewRequest(params); - if (res != OK) { - ALOGE("%s: Camera %d: Can't set up preview request: " - "%s (%d)", __FUNCTION__, mCameraId, - strerror(-res), res); - return res; - } - } - res = mStreamingProcessor->startStream(StreamingProcessor::PREVIEW, - outputStreams); - } else { - if (!restart) { - res = mStreamingProcessor->updateRecordingRequest(params); - if (res != OK) { - ALOGE("%s: Camera %d: Can't set up preview request with " - "record hint: %s (%d)", __FUNCTION__, mCameraId, - strerror(-res), res); - return res; - } - } - res = mStreamingProcessor->startStream(StreamingProcessor::RECORD, - outputStreams); - } - if (res != OK) { - ALOGE("%s: Camera %d: Unable to start streaming preview: %s (%d)", - __FUNCTION__, mCameraId, strerror(-res), res); - return res; - } - - params.state = Parameters::PREVIEW; - return OK; -} - -void Camera2Client::stopPreview() { - ATRACE_CALL(); - ALOGV("%s: E", __FUNCTION__); - Mutex::Autolock icl(mBinderSerializationLock); - status_t res; - if ( (res = checkPid(__FUNCTION__) ) != OK) return; - stopPreviewL(); -} - -void Camera2Client::stopPreviewL() { - ATRACE_CALL(); - status_t res; - const nsecs_t kStopCaptureTimeout = 3000000000LL; // 3 seconds - Parameters::State state; - { - SharedParameters::Lock l(mParameters); - state = l.mParameters.state; - } - - switch (state) { - case Parameters::DISCONNECTED: - // Nothing to do. - break; - case Parameters::STOPPED: - case Parameters::VIDEO_SNAPSHOT: - case Parameters::STILL_CAPTURE: - mCaptureSequencer->waitUntilIdle(kStopCaptureTimeout); - // no break - case Parameters::RECORD: - case Parameters::PREVIEW: - res = stopStream(); - if (res != OK) { - ALOGE("%s: Camera %d: Can't stop streaming: %s (%d)", - __FUNCTION__, mCameraId, strerror(-res), res); - } - res = mDevice->waitUntilDrained(); - if (res != OK) { - ALOGE("%s: Camera %d: Waiting to stop streaming failed: %s (%d)", - __FUNCTION__, mCameraId, strerror(-res), res); - } - // no break - case Parameters::WAITING_FOR_PREVIEW_WINDOW: { - SharedParameters::Lock l(mParameters); - l.mParameters.state = Parameters::STOPPED; - commandStopFaceDetectionL(l.mParameters); - break; - } - default: - ALOGE("%s: Camera %d: Unknown state %d", __FUNCTION__, mCameraId, - state); - } -} - -bool Camera2Client::previewEnabled() { - ATRACE_CALL(); - Mutex::Autolock icl(mBinderSerializationLock); - status_t res; - if ( (res = checkPid(__FUNCTION__) ) != OK) return false; - - SharedParameters::Lock l(mParameters); - return l.mParameters.state == Parameters::PREVIEW; -} - -status_t Camera2Client::storeMetaDataInBuffers(bool enabled) { - ATRACE_CALL(); - Mutex::Autolock icl(mBinderSerializationLock); - status_t res; - if ( (res = checkPid(__FUNCTION__) ) != OK) return res; - - SharedParameters::Lock l(mParameters); - switch (l.mParameters.state) { - case Parameters::RECORD: - case Parameters::VIDEO_SNAPSHOT: - ALOGE("%s: Camera %d: Can't be called in state %s", - __FUNCTION__, mCameraId, - Parameters::getStateName(l.mParameters.state)); - return INVALID_OPERATION; - default: - // OK - break; - } - - l.mParameters.storeMetadataInBuffers = enabled; - - return OK; -} - -status_t Camera2Client::startRecording() { - ATRACE_CALL(); - ALOGV("%s: E", __FUNCTION__); - Mutex::Autolock icl(mBinderSerializationLock); - status_t res; - if ( (res = checkPid(__FUNCTION__) ) != OK) return res; - SharedParameters::Lock l(mParameters); - - return startRecordingL(l.mParameters, false); -} - -status_t Camera2Client::startRecordingL(Parameters ¶ms, bool restart) { - status_t res; - - ALOGV("%s: state == %d, restart = %d", __FUNCTION__, params.state, restart); - - switch (params.state) { - case Parameters::STOPPED: - res = startPreviewL(params, false); - if (res != OK) return res; - break; - case Parameters::PREVIEW: - // Ready to go - break; - case Parameters::RECORD: - case Parameters::VIDEO_SNAPSHOT: - // OK to call this when recording is already on, just skip unless - // we're looking to restart - if (!restart) return OK; - break; - default: - ALOGE("%s: Camera %d: Can't start recording in state %s", - __FUNCTION__, mCameraId, - Parameters::getStateName(params.state)); - return INVALID_OPERATION; - }; - - if (!params.storeMetadataInBuffers) { - ALOGE("%s: Camera %d: Recording only supported in metadata mode, but " - "non-metadata recording mode requested!", __FUNCTION__, - mCameraId); - return INVALID_OPERATION; - } - - if (!restart) { - mCameraService->playSound(CameraService::SOUND_RECORDING); - mStreamingProcessor->updateRecordingRequest(params); - if (res != OK) { - ALOGE("%s: Camera %d: Unable to update recording request: %s (%d)", - __FUNCTION__, mCameraId, strerror(-res), res); - return res; - } - } - - // Not all devices can support a preview callback stream and a recording - // stream at the same time, so assume none of them can. - if (mCallbackProcessor->getStreamId() != NO_STREAM) { - ALOGV("%s: Camera %d: Clearing out callback stream before " - "creating recording stream", __FUNCTION__, mCameraId); - res = mStreamingProcessor->stopStream(); - if (res != OK) { - ALOGE("%s: Camera %d: Can't stop streaming to delete callback stream", - __FUNCTION__, mCameraId); - return res; - } - res = mCallbackProcessor->deleteStream(); - if (res != OK) { - ALOGE("%s: Camera %d: Unable to delete callback stream before " - "record: %s (%d)", __FUNCTION__, mCameraId, - strerror(-res), res); - return res; - } - } - // Disable callbacks if they're enabled; can't record and use callbacks, - // and we can't fail record start without stagefright asserting. - params.previewCallbackFlags = 0; - - res = updateProcessorStream< - StreamingProcessor, - &StreamingProcessor::updateRecordingStream>(mStreamingProcessor, - params); - if (res != OK) { - ALOGE("%s: Camera %d: Unable to update recording stream: %s (%d)", - __FUNCTION__, mCameraId, strerror(-res), res); - return res; - } - - Vector outputStreams; - outputStreams.push(getPreviewStreamId()); - outputStreams.push(getRecordingStreamId()); - - res = mStreamingProcessor->startStream(StreamingProcessor::RECORD, - outputStreams); - if (res != OK) { - ALOGE("%s: Camera %d: Unable to start recording stream: %s (%d)", - __FUNCTION__, mCameraId, strerror(-res), res); - return res; - } - - if (params.state < Parameters::RECORD) { - params.state = Parameters::RECORD; - } - - return OK; -} - -void Camera2Client::stopRecording() { - ATRACE_CALL(); - ALOGV("%s: E", __FUNCTION__); - Mutex::Autolock icl(mBinderSerializationLock); - SharedParameters::Lock l(mParameters); - - status_t res; - if ( (res = checkPid(__FUNCTION__) ) != OK) return; - - switch (l.mParameters.state) { - case Parameters::RECORD: - // OK to stop - break; - case Parameters::STOPPED: - case Parameters::PREVIEW: - case Parameters::STILL_CAPTURE: - case Parameters::VIDEO_SNAPSHOT: - default: - ALOGE("%s: Camera %d: Can't stop recording in state %s", - __FUNCTION__, mCameraId, - Parameters::getStateName(l.mParameters.state)); - return; - }; - - mCameraService->playSound(CameraService::SOUND_RECORDING); - - res = startPreviewL(l.mParameters, true); - if (res != OK) { - ALOGE("%s: Camera %d: Unable to return to preview", - __FUNCTION__, mCameraId); - } -} - -bool Camera2Client::recordingEnabled() { - ATRACE_CALL(); - Mutex::Autolock icl(mBinderSerializationLock); - - if ( checkPid(__FUNCTION__) != OK) return false; - - return recordingEnabledL(); -} - -bool Camera2Client::recordingEnabledL() { - ATRACE_CALL(); - SharedParameters::Lock l(mParameters); - - return (l.mParameters.state == Parameters::RECORD - || l.mParameters.state == Parameters::VIDEO_SNAPSHOT); -} - -void Camera2Client::releaseRecordingFrame(const sp& mem) { - ATRACE_CALL(); - Mutex::Autolock icl(mBinderSerializationLock); - if ( checkPid(__FUNCTION__) != OK) return; - - mStreamingProcessor->releaseRecordingFrame(mem); -} - -status_t Camera2Client::autoFocus() { - ATRACE_CALL(); - Mutex::Autolock icl(mBinderSerializationLock); - ALOGV("%s: Camera %d", __FUNCTION__, mCameraId); - status_t res; - if ( (res = checkPid(__FUNCTION__) ) != OK) return res; - - int triggerId; - bool notifyImmediately = false; - bool notifySuccess = false; - { - SharedParameters::Lock l(mParameters); - if (l.mParameters.state < Parameters::PREVIEW) { - return INVALID_OPERATION; - } - - /** - * If the camera does not support auto-focus, it is a no-op and - * onAutoFocus(boolean, Camera) callback will be called immediately - * with a fake value of success set to true. - * - * Similarly, if focus mode is set to INFINITY, there's no reason to - * bother the HAL. - */ - if (l.mParameters.focusMode == Parameters::FOCUS_MODE_FIXED || - l.mParameters.focusMode == Parameters::FOCUS_MODE_INFINITY) { - notifyImmediately = true; - notifySuccess = true; - } - /** - * If we're in CAF mode, and AF has already been locked, just fire back - * the callback right away; the HAL would not send a notification since - * no state change would happen on a AF trigger. - */ - if ( (l.mParameters.focusMode == Parameters::FOCUS_MODE_CONTINUOUS_PICTURE || - l.mParameters.focusMode == Parameters::FOCUS_MODE_CONTINUOUS_VIDEO) && - l.mParameters.focusState == ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED ) { - notifyImmediately = true; - notifySuccess = true; - } - /** - * Send immediate notification back to client - */ - if (notifyImmediately) { - SharedCameraCallbacks::Lock l(mSharedCameraCallbacks); - if (l.mRemoteCallback != 0) { - l.mRemoteCallback->notifyCallback(CAMERA_MSG_FOCUS, - notifySuccess ? 1 : 0, 0); - } - return OK; - } - /** - * Handle quirk mode for AF in scene modes - */ - if (l.mParameters.quirks.triggerAfWithAuto && - l.mParameters.sceneMode != ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED && - l.mParameters.focusMode != Parameters::FOCUS_MODE_AUTO && - !l.mParameters.focusingAreas[0].isEmpty()) { - ALOGV("%s: Quirk: Switching from focusMode %d to AUTO", - __FUNCTION__, l.mParameters.focusMode); - l.mParameters.shadowFocusMode = l.mParameters.focusMode; - l.mParameters.focusMode = Parameters::FOCUS_MODE_AUTO; - updateRequests(l.mParameters); - } - - l.mParameters.currentAfTriggerId = ++l.mParameters.afTriggerCounter; - triggerId = l.mParameters.currentAfTriggerId; - } - syncWithDevice(); - - mDevice->triggerAutofocus(triggerId); - - return OK; -} - -status_t Camera2Client::cancelAutoFocus() { - ATRACE_CALL(); - Mutex::Autolock icl(mBinderSerializationLock); - ALOGV("%s: Camera %d", __FUNCTION__, mCameraId); - status_t res; - if ( (res = checkPid(__FUNCTION__) ) != OK) return res; - - int triggerId; - { - SharedParameters::Lock l(mParameters); - // Canceling does nothing in FIXED or INFINITY modes - if (l.mParameters.focusMode == Parameters::FOCUS_MODE_FIXED || - l.mParameters.focusMode == Parameters::FOCUS_MODE_INFINITY) { - return OK; - } - triggerId = ++l.mParameters.afTriggerCounter; - - // When using triggerAfWithAuto quirk, may need to reset focus mode to - // the real state at this point. No need to cancel explicitly if - // changing the AF mode. - if (l.mParameters.shadowFocusMode != Parameters::FOCUS_MODE_INVALID) { - ALOGV("%s: Quirk: Restoring focus mode to %d", __FUNCTION__, - l.mParameters.shadowFocusMode); - l.mParameters.focusMode = l.mParameters.shadowFocusMode; - l.mParameters.shadowFocusMode = Parameters::FOCUS_MODE_INVALID; - updateRequests(l.mParameters); - - return OK; - } - } - syncWithDevice(); - - mDevice->triggerCancelAutofocus(triggerId); - - return OK; -} - -status_t Camera2Client::takePicture(int msgType) { - ATRACE_CALL(); - Mutex::Autolock icl(mBinderSerializationLock); - status_t res; - if ( (res = checkPid(__FUNCTION__) ) != OK) return res; - - { - SharedParameters::Lock l(mParameters); - switch (l.mParameters.state) { - case Parameters::DISCONNECTED: - case Parameters::STOPPED: - case Parameters::WAITING_FOR_PREVIEW_WINDOW: - ALOGE("%s: Camera %d: Cannot take picture without preview enabled", - __FUNCTION__, mCameraId); - return INVALID_OPERATION; - case Parameters::PREVIEW: - // Good to go for takePicture - res = commandStopFaceDetectionL(l.mParameters); - if (res != OK) { - ALOGE("%s: Camera %d: Unable to stop face detection for still capture", - __FUNCTION__, mCameraId); - return res; - } - l.mParameters.state = Parameters::STILL_CAPTURE; - break; - case Parameters::RECORD: - // Good to go for video snapshot - l.mParameters.state = Parameters::VIDEO_SNAPSHOT; - break; - case Parameters::STILL_CAPTURE: - case Parameters::VIDEO_SNAPSHOT: - ALOGE("%s: Camera %d: Already taking a picture", - __FUNCTION__, mCameraId); - return INVALID_OPERATION; - } - - ALOGV("%s: Camera %d: Starting picture capture", __FUNCTION__, mCameraId); - - res = updateProcessorStream(mJpegProcessor, l.mParameters); - if (res != OK) { - ALOGE("%s: Camera %d: Can't set up still image stream: %s (%d)", - __FUNCTION__, mCameraId, strerror(-res), res); - return res; - } - } - - // Need HAL to have correct settings before (possibly) triggering precapture - syncWithDevice(); - - res = mCaptureSequencer->startCapture(msgType); - if (res != OK) { - ALOGE("%s: Camera %d: Unable to start capture: %s (%d)", - __FUNCTION__, mCameraId, strerror(-res), res); - } - - return res; -} - -status_t Camera2Client::setParameters(const String8& params) { - ATRACE_CALL(); - ALOGV("%s: Camera %d", __FUNCTION__, mCameraId); - Mutex::Autolock icl(mBinderSerializationLock); - status_t res; - if ( (res = checkPid(__FUNCTION__) ) != OK) return res; - - SharedParameters::Lock l(mParameters); - - res = l.mParameters.set(params); - if (res != OK) return res; - - res = updateRequests(l.mParameters); - - return res; -} - -String8 Camera2Client::getParameters() const { - ATRACE_CALL(); - ALOGV("%s: Camera %d", __FUNCTION__, mCameraId); - Mutex::Autolock icl(mBinderSerializationLock); - if ( checkPid(__FUNCTION__) != OK) return String8(); - - SharedParameters::ReadLock l(mParameters); - - return l.mParameters.get(); -} - -status_t Camera2Client::sendCommand(int32_t cmd, int32_t arg1, int32_t arg2) { - ATRACE_CALL(); - Mutex::Autolock icl(mBinderSerializationLock); - status_t res; - if ( (res = checkPid(__FUNCTION__) ) != OK) return res; - - ALOGV("%s: Camera %d: Command %d (%d, %d)", __FUNCTION__, mCameraId, - cmd, arg1, arg2); - - switch (cmd) { - case CAMERA_CMD_START_SMOOTH_ZOOM: - return commandStartSmoothZoomL(); - case CAMERA_CMD_STOP_SMOOTH_ZOOM: - return commandStopSmoothZoomL(); - case CAMERA_CMD_SET_DISPLAY_ORIENTATION: - return commandSetDisplayOrientationL(arg1); - case CAMERA_CMD_ENABLE_SHUTTER_SOUND: - return commandEnableShutterSoundL(arg1 == 1); - case CAMERA_CMD_PLAY_RECORDING_SOUND: - return commandPlayRecordingSoundL(); - case CAMERA_CMD_START_FACE_DETECTION: - return commandStartFaceDetectionL(arg1); - case CAMERA_CMD_STOP_FACE_DETECTION: { - SharedParameters::Lock l(mParameters); - return commandStopFaceDetectionL(l.mParameters); - } - case CAMERA_CMD_ENABLE_FOCUS_MOVE_MSG: - return commandEnableFocusMoveMsgL(arg1 == 1); - case CAMERA_CMD_PING: - return commandPingL(); - case CAMERA_CMD_SET_VIDEO_BUFFER_COUNT: - return commandSetVideoBufferCountL(arg1); - default: - ALOGE("%s: Unknown command %d (arguments %d, %d)", - __FUNCTION__, cmd, arg1, arg2); - return BAD_VALUE; - } -} - -status_t Camera2Client::commandStartSmoothZoomL() { - ALOGE("%s: Unimplemented!", __FUNCTION__); - return OK; -} - -status_t Camera2Client::commandStopSmoothZoomL() { - ALOGE("%s: Unimplemented!", __FUNCTION__); - return OK; -} - -status_t Camera2Client::commandSetDisplayOrientationL(int degrees) { - int transform = Parameters::degToTransform(degrees, - mCameraFacing == CAMERA_FACING_FRONT); - if (transform == -1) { - ALOGE("%s: Camera %d: Error setting %d as display orientation value", - __FUNCTION__, mCameraId, degrees); - return BAD_VALUE; - } - SharedParameters::Lock l(mParameters); - if (transform != l.mParameters.previewTransform && - getPreviewStreamId() != NO_STREAM) { - mDevice->setStreamTransform(getPreviewStreamId(), transform); - } - l.mParameters.previewTransform = transform; - return OK; -} - -status_t Camera2Client::commandEnableShutterSoundL(bool enable) { - SharedParameters::Lock l(mParameters); - if (enable) { - l.mParameters.playShutterSound = true; - return OK; - } - - // Disabling shutter sound may not be allowed. In that case only - // allow the mediaserver process to disable the sound. - char value[PROPERTY_VALUE_MAX]; - property_get("ro.camera.sound.forced", value, "0"); - if (strncmp(value, "0", 2) != 0) { - // Disabling shutter sound is not allowed. Deny if the current - // process is not mediaserver. - if (getCallingPid() != getpid()) { - ALOGE("Failed to disable shutter sound. Permission denied (pid %d)", - getCallingPid()); - return PERMISSION_DENIED; - } - } - - l.mParameters.playShutterSound = false; - return OK; -} - -status_t Camera2Client::commandPlayRecordingSoundL() { - mCameraService->playSound(CameraService::SOUND_RECORDING); - return OK; -} - -status_t Camera2Client::commandStartFaceDetectionL(int /*type*/) { - ALOGV("%s: Camera %d: Starting face detection", - __FUNCTION__, mCameraId); - status_t res; - SharedParameters::Lock l(mParameters); - switch (l.mParameters.state) { - case Parameters::DISCONNECTED: - case Parameters::STOPPED: - case Parameters::WAITING_FOR_PREVIEW_WINDOW: - case Parameters::STILL_CAPTURE: - ALOGE("%s: Camera %d: Cannot start face detection without preview active", - __FUNCTION__, mCameraId); - return INVALID_OPERATION; - case Parameters::PREVIEW: - case Parameters::RECORD: - case Parameters::VIDEO_SNAPSHOT: - // Good to go for starting face detect - break; - } - // Ignoring type - if (l.mParameters.fastInfo.bestFaceDetectMode == - ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) { - ALOGE("%s: Camera %d: Face detection not supported", - __FUNCTION__, mCameraId); - return BAD_VALUE; - } - if (l.mParameters.enableFaceDetect) return OK; - - l.mParameters.enableFaceDetect = true; - - res = updateRequests(l.mParameters); - - return res; -} - -status_t Camera2Client::commandStopFaceDetectionL(Parameters ¶ms) { - status_t res = OK; - ALOGV("%s: Camera %d: Stopping face detection", - __FUNCTION__, mCameraId); - - if (!params.enableFaceDetect) return OK; - - params.enableFaceDetect = false; - - if (params.state == Parameters::PREVIEW - || params.state == Parameters::RECORD - || params.state == Parameters::VIDEO_SNAPSHOT) { - res = updateRequests(params); - } - - return res; -} - -status_t Camera2Client::commandEnableFocusMoveMsgL(bool enable) { - SharedParameters::Lock l(mParameters); - l.mParameters.enableFocusMoveMessages = enable; - - return OK; -} - -status_t Camera2Client::commandPingL() { - // Always ping back if access is proper and device is alive - SharedParameters::Lock l(mParameters); - if (l.mParameters.state != Parameters::DISCONNECTED) { - return OK; - } else { - return NO_INIT; - } -} - -status_t Camera2Client::commandSetVideoBufferCountL(size_t count) { - if (recordingEnabledL()) { - ALOGE("%s: Camera %d: Error setting video buffer count after " - "recording was started", __FUNCTION__, mCameraId); - return INVALID_OPERATION; - } - - return mStreamingProcessor->setRecordingBufferCount(count); -} - -/** Device-related methods */ -void Camera2Client::notifyAutoFocus(uint8_t newState, int triggerId) { - ALOGV("%s: Autofocus state now %d, last trigger %d", - __FUNCTION__, newState, triggerId); - bool sendCompletedMessage = false; - bool sendMovingMessage = false; - - bool success = false; - bool afInMotion = false; - { - SharedParameters::Lock l(mParameters); - l.mParameters.focusState = newState; - switch (l.mParameters.focusMode) { - case Parameters::FOCUS_MODE_AUTO: - case Parameters::FOCUS_MODE_MACRO: - // Don't send notifications upstream if they're not for the current AF - // trigger. For example, if cancel was called in between, or if we - // already sent a notification about this AF call. - if (triggerId != l.mParameters.currentAfTriggerId) break; - switch (newState) { - case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED: - success = true; - // no break - case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED: - sendCompletedMessage = true; - l.mParameters.currentAfTriggerId = -1; - break; - case ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN: - // Just starting focusing, ignore - break; - case ANDROID_CONTROL_AF_STATE_INACTIVE: - case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN: - case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED: - default: - // Unexpected in AUTO/MACRO mode - ALOGE("%s: Unexpected AF state transition in AUTO/MACRO mode: %d", - __FUNCTION__, newState); - break; - } - break; - case Parameters::FOCUS_MODE_CONTINUOUS_VIDEO: - case Parameters::FOCUS_MODE_CONTINUOUS_PICTURE: - switch (newState) { - case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED: - success = true; - // no break - case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED: - // Don't send notifications upstream if they're not for - // the current AF trigger. For example, if cancel was - // called in between, or if we already sent a - // notification about this AF call. - // Send both a 'AF done' callback and a 'AF move' callback - if (triggerId != l.mParameters.currentAfTriggerId) break; - sendCompletedMessage = true; - afInMotion = false; - if (l.mParameters.enableFocusMoveMessages && - l.mParameters.afInMotion) { - sendMovingMessage = true; - } - l.mParameters.currentAfTriggerId = -1; - break; - case ANDROID_CONTROL_AF_STATE_INACTIVE: - // Cancel was called, or we switched state; care if - // currently moving - afInMotion = false; - if (l.mParameters.enableFocusMoveMessages && - l.mParameters.afInMotion) { - sendMovingMessage = true; - } - break; - case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN: - // Start passive scan, inform upstream - afInMotion = true; - // no break - case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED: - // Stop passive scan, inform upstream - if (l.mParameters.enableFocusMoveMessages) { - sendMovingMessage = true; - } - break; - } - l.mParameters.afInMotion = afInMotion; - break; - case Parameters::FOCUS_MODE_EDOF: - case Parameters::FOCUS_MODE_INFINITY: - case Parameters::FOCUS_MODE_FIXED: - default: - if (newState != ANDROID_CONTROL_AF_STATE_INACTIVE) { - ALOGE("%s: Unexpected AF state change %d " - "(ID %d) in focus mode %d", - __FUNCTION__, newState, triggerId, - l.mParameters.focusMode); - } - } - } - if (sendMovingMessage) { - SharedCameraCallbacks::Lock l(mSharedCameraCallbacks); - if (l.mRemoteCallback != 0) { - l.mRemoteCallback->notifyCallback(CAMERA_MSG_FOCUS_MOVE, - afInMotion ? 1 : 0, 0); - } - } - if (sendCompletedMessage) { - SharedCameraCallbacks::Lock l(mSharedCameraCallbacks); - if (l.mRemoteCallback != 0) { - l.mRemoteCallback->notifyCallback(CAMERA_MSG_FOCUS, - success ? 1 : 0, 0); - } - } -} - -void Camera2Client::notifyAutoExposure(uint8_t newState, int triggerId) { - ALOGV("%s: Autoexposure state now %d, last trigger %d", - __FUNCTION__, newState, triggerId); - mCaptureSequencer->notifyAutoExposure(newState, triggerId); -} - -camera2::SharedParameters& Camera2Client::getParameters() { - return mParameters; -} - -int Camera2Client::getPreviewStreamId() const { - return mStreamingProcessor->getPreviewStreamId(); -} - -int Camera2Client::getCaptureStreamId() const { - return mJpegProcessor->getStreamId(); -} - -int Camera2Client::getCallbackStreamId() const { - return mCallbackProcessor->getStreamId(); -} - -int Camera2Client::getRecordingStreamId() const { - return mStreamingProcessor->getRecordingStreamId(); -} - -int Camera2Client::getZslStreamId() const { - return mZslProcessor->getStreamId(); -} - -status_t Camera2Client::registerFrameListener(int32_t minId, int32_t maxId, - wp listener) { - return mFrameProcessor->registerListener(minId, maxId, listener); -} - -status_t Camera2Client::removeFrameListener(int32_t minId, int32_t maxId, - wp listener) { - return mFrameProcessor->removeListener(minId, maxId, listener); -} - -status_t Camera2Client::stopStream() { - return mStreamingProcessor->stopStream(); -} - -const int32_t Camera2Client::kPreviewRequestIdStart; -const int32_t Camera2Client::kPreviewRequestIdEnd; -const int32_t Camera2Client::kRecordingRequestIdStart; -const int32_t Camera2Client::kRecordingRequestIdEnd; -const int32_t Camera2Client::kCaptureRequestIdStart; -const int32_t Camera2Client::kCaptureRequestIdEnd; - -/** Utility methods */ - -status_t Camera2Client::updateRequests(Parameters ¶ms) { - status_t res; - - ALOGV("%s: Camera %d: state = %d", __FUNCTION__, getCameraId(), params.state); - - res = mStreamingProcessor->incrementStreamingIds(); - if (res != OK) { - ALOGE("%s: Camera %d: Unable to increment request IDs: %s (%d)", - __FUNCTION__, mCameraId, strerror(-res), res); - return res; - } - - res = mStreamingProcessor->updatePreviewRequest(params); - if (res != OK) { - ALOGE("%s: Camera %d: Unable to update preview request: %s (%d)", - __FUNCTION__, mCameraId, strerror(-res), res); - return res; - } - res = mStreamingProcessor->updateRecordingRequest(params); - if (res != OK) { - ALOGE("%s: Camera %d: Unable to update recording request: %s (%d)", - __FUNCTION__, mCameraId, strerror(-res), res); - return res; - } - - if (params.state == Parameters::PREVIEW) { - res = startPreviewL(params, true); - if (res != OK) { - ALOGE("%s: Camera %d: Error streaming new preview request: %s (%d)", - __FUNCTION__, mCameraId, strerror(-res), res); - return res; - } - } else if (params.state == Parameters::RECORD || - params.state == Parameters::VIDEO_SNAPSHOT) { - res = startRecordingL(params, true); - if (res != OK) { - ALOGE("%s: Camera %d: Error streaming new record request: %s (%d)", - __FUNCTION__, mCameraId, strerror(-res), res); - return res; - } - } - return res; -} - - -size_t Camera2Client::calculateBufferSize(int width, int height, - int format, int stride) { - switch (format) { - case HAL_PIXEL_FORMAT_YCbCr_422_SP: // NV16 - return width * height * 2; - case HAL_PIXEL_FORMAT_YCrCb_420_SP: // NV21 - return width * height * 3 / 2; - case HAL_PIXEL_FORMAT_YCbCr_422_I: // YUY2 - return width * height * 2; - case HAL_PIXEL_FORMAT_YV12: { // YV12 - size_t ySize = stride * height; - size_t uvStride = (stride / 2 + 0xF) & ~0xF; - size_t uvSize = uvStride * height / 2; - return ySize + uvSize * 2; - } - case HAL_PIXEL_FORMAT_RGB_565: - return width * height * 2; - case HAL_PIXEL_FORMAT_RGBA_8888: - return width * height * 4; - case HAL_PIXEL_FORMAT_RAW_SENSOR: - return width * height * 2; - default: - ALOGE("%s: Unknown preview format: %x", - __FUNCTION__, format); - return 0; - } -} - -status_t Camera2Client::syncWithDevice() { - ATRACE_CALL(); - const nsecs_t kMaxSyncTimeout = 500000000; // 500 ms - status_t res; - - int32_t activeRequestId = mStreamingProcessor->getActiveRequestId(); - if (activeRequestId == 0) return OK; - - res = mDevice->waitUntilRequestReceived(activeRequestId, kMaxSyncTimeout); - if (res == TIMED_OUT) { - ALOGE("%s: Camera %d: Timed out waiting sync with HAL", - __FUNCTION__, mCameraId); - } else if (res != OK) { - ALOGE("%s: Camera %d: Error while waiting to sync with HAL", - __FUNCTION__, mCameraId); - } - return res; -} - -template -status_t Camera2Client::updateProcessorStream(sp processor, - camera2::Parameters params) { - // No default template arguments until C++11, so we need this overload - return updateProcessorStream( - processor, params); -} - -template -status_t Camera2Client::updateProcessorStream(sp processor, - Parameters params) { - status_t res; - - // Get raw pointer since sp doesn't have operator->* - ProcessorT *processorPtr = processor.get(); - res = (processorPtr->*updateStreamF)(params); - - /** - * Can't update the stream if it's busy? - * - * Then we need to stop the device (by temporarily clearing the request - * queue) and then try again. Resume streaming once we're done. - */ - if (res == -EBUSY) { - ALOGV("%s: Camera %d: Pausing to update stream", __FUNCTION__, - mCameraId); - res = mStreamingProcessor->togglePauseStream(/*pause*/true); - if (res != OK) { - ALOGE("%s: Camera %d: Can't pause streaming: %s (%d)", - __FUNCTION__, mCameraId, strerror(-res), res); - } - - res = mDevice->waitUntilDrained(); - if (res != OK) { - ALOGE("%s: Camera %d: Waiting to stop streaming failed: %s (%d)", - __FUNCTION__, mCameraId, strerror(-res), res); - } - - res = (processorPtr->*updateStreamF)(params); - if (res != OK) { - ALOGE("%s: Camera %d: Failed to update processing stream " - " despite having halted streaming first: %s (%d)", - __FUNCTION__, mCameraId, strerror(-res), res); - } - - res = mStreamingProcessor->togglePauseStream(/*pause*/false); - if (res != OK) { - ALOGE("%s: Camera %d: Can't unpause streaming: %s (%d)", - __FUNCTION__, mCameraId, strerror(-res), res); - } - } - - return res; -} - -} // namespace android diff --git a/services/camera/libcameraservice/Camera2Client.h b/services/camera/libcameraservice/Camera2Client.h deleted file mode 100644 index 078e3a3..0000000 --- a/services/camera/libcameraservice/Camera2Client.h +++ /dev/null @@ -1,201 +0,0 @@ -/* - * Copyright (C) 2012 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef ANDROID_SERVERS_CAMERA_CAMERA2CLIENT_H -#define ANDROID_SERVERS_CAMERA_CAMERA2CLIENT_H - -#include "CameraDeviceBase.h" -#include "CameraService.h" -#include "camera2/Parameters.h" -#include "camera2/FrameProcessor.h" -#include "camera2/StreamingProcessor.h" -#include "camera2/JpegProcessor.h" -#include "camera2/ZslProcessorInterface.h" -#include "camera2/CaptureSequencer.h" -#include "camera2/CallbackProcessor.h" -#include "Camera2ClientBase.h" - -namespace android { - -class IMemory; -/** - * Interface between android.hardware.Camera API and Camera HAL device for versions - * CAMERA_DEVICE_API_VERSION_2_0 and 3_0. - */ -class Camera2Client : - public Camera2ClientBase -{ -public: - /** - * ICamera interface (see ICamera for details) - */ - - virtual void disconnect(); - virtual status_t connect(const sp& client); - virtual status_t lock(); - virtual status_t unlock(); - virtual status_t setPreviewDisplay(const sp& surface); - virtual status_t setPreviewTexture( - const sp& bufferProducer); - virtual void setPreviewCallbackFlag(int flag); - virtual status_t setPreviewCallbackTarget( - const sp& callbackProducer); - - virtual status_t startPreview(); - virtual void stopPreview(); - virtual bool previewEnabled(); - virtual status_t storeMetaDataInBuffers(bool enabled); - virtual status_t startRecording(); - virtual void stopRecording(); - virtual bool recordingEnabled(); - virtual void releaseRecordingFrame(const sp& mem); - virtual status_t autoFocus(); - virtual status_t cancelAutoFocus(); - virtual status_t takePicture(int msgType); - virtual status_t setParameters(const String8& params); - virtual String8 getParameters() const; - virtual status_t sendCommand(int32_t cmd, int32_t arg1, int32_t arg2); - - /** - * Interface used by CameraService - */ - - Camera2Client(const sp& cameraService, - const sp& cameraClient, - const String16& clientPackageName, - int cameraId, - int cameraFacing, - int clientPid, - uid_t clientUid, - int servicePid, - int deviceVersion); - - virtual ~Camera2Client(); - - status_t initialize(camera_module_t *module); - - virtual status_t dump(int fd, const Vector& args); - - /** - * Interface used by CameraDeviceBase - */ - - virtual void notifyAutoFocus(uint8_t newState, int triggerId); - virtual void notifyAutoExposure(uint8_t newState, int triggerId); - - /** - * Interface used by independent components of Camera2Client. - */ - - camera2::SharedParameters& getParameters(); - - int getPreviewStreamId() const; - int getCaptureStreamId() const; - int getCallbackStreamId() const; - int getRecordingStreamId() const; - int getZslStreamId() const; - - status_t registerFrameListener(int32_t minId, int32_t maxId, - wp listener); - status_t removeFrameListener(int32_t minId, int32_t maxId, - wp listener); - - status_t stopStream(); - - static size_t calculateBufferSize(int width, int height, - int format, int stride); - - static const int32_t kPreviewRequestIdStart = 10000000; - static const int32_t kPreviewRequestIdEnd = 20000000; - - static const int32_t kRecordingRequestIdStart = 20000000; - static const int32_t kRecordingRequestIdEnd = 30000000; - - static const int32_t kCaptureRequestIdStart = 30000000; - static const int32_t kCaptureRequestIdEnd = 40000000; - -private: - /** ICamera interface-related private members */ - typedef camera2::Parameters Parameters; - - status_t setPreviewWindowL(const sp& binder, - sp window); - status_t startPreviewL(Parameters ¶ms, bool restart); - void stopPreviewL(); - status_t startRecordingL(Parameters ¶ms, bool restart); - bool recordingEnabledL(); - - // Individual commands for sendCommand() - status_t commandStartSmoothZoomL(); - status_t commandStopSmoothZoomL(); - status_t commandSetDisplayOrientationL(int degrees); - status_t commandEnableShutterSoundL(bool enable); - status_t commandPlayRecordingSoundL(); - status_t commandStartFaceDetectionL(int type); - status_t commandStopFaceDetectionL(Parameters ¶ms); - status_t commandEnableFocusMoveMsgL(bool enable); - status_t commandPingL(); - status_t commandSetVideoBufferCountL(size_t count); - - // Current camera device configuration - camera2::SharedParameters mParameters; - - /** Camera device-related private members */ - - void setPreviewCallbackFlagL(Parameters ¶ms, int flag); - status_t updateRequests(Parameters ¶ms); - int mDeviceVersion; - - // Used with stream IDs - static const int NO_STREAM = -1; - - template - status_t updateProcessorStream(sp processor, Parameters params); - template - status_t updateProcessorStream(sp processor, Parameters params); - - sp mFrameProcessor; - - /* Preview/Recording related members */ - - sp mPreviewSurface; - sp mStreamingProcessor; - - /** Preview callback related members */ - - sp mCallbackProcessor; - - /* Still image capture related members */ - - sp mCaptureSequencer; - sp mJpegProcessor; - sp mZslProcessor; - sp mZslProcessorThread; - - /** Notification-related members */ - - bool mAfInMotion; - - /** Utility members */ - - // Wait until the camera device has received the latest control settings - status_t syncWithDevice(); -}; - -}; // namespace android - -#endif diff --git a/services/camera/libcameraservice/Camera2ClientBase.cpp b/services/camera/libcameraservice/Camera2ClientBase.cpp deleted file mode 100644 index 561dcfc..0000000 --- a/services/camera/libcameraservice/Camera2ClientBase.cpp +++ /dev/null @@ -1,335 +0,0 @@ -/* - * Copyright (C) 2013 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#define LOG_TAG "Camera2ClientBase" -#define ATRACE_TAG ATRACE_TAG_CAMERA -//#define LOG_NDEBUG 0 - -#include -#include - -#include -#include -#include -#include "camera2/Parameters.h" -#include "Camera2ClientBase.h" -#include "camera2/ProFrameProcessor.h" - -#include "photography/CameraDeviceClient.h" - -#include "CameraDeviceBase.h" -#include "CameraDeviceFactory.h" - -namespace android { -using namespace camera2; - -static int getCallingPid() { - return IPCThreadState::self()->getCallingPid(); -} - -// Interface used by CameraService - -template -Camera2ClientBase::Camera2ClientBase( - const sp& cameraService, - const sp& remoteCallback, - const String16& clientPackageName, - int cameraId, - int cameraFacing, - int clientPid, - uid_t clientUid, - int servicePid): - TClientBase(cameraService, remoteCallback, clientPackageName, - cameraId, cameraFacing, clientPid, clientUid, servicePid), - mSharedCameraCallbacks(remoteCallback) -{ - ALOGI("Camera %d: Opened", cameraId); - - mDevice = CameraDeviceFactory::createDevice(cameraId); - LOG_ALWAYS_FATAL_IF(mDevice == 0, "Device should never be NULL here."); -} - -template -status_t Camera2ClientBase::checkPid(const char* checkLocation) - const { - - int callingPid = getCallingPid(); - if (callingPid == TClientBase::mClientPid) return NO_ERROR; - - ALOGE("%s: attempt to use a locked camera from a different process" - " (old pid %d, new pid %d)", checkLocation, TClientBase::mClientPid, callingPid); - return PERMISSION_DENIED; -} - -template -status_t Camera2ClientBase::initialize(camera_module_t *module) { - ATRACE_CALL(); - ALOGV("%s: Initializing client for camera %d", __FUNCTION__, - TClientBase::mCameraId); - status_t res; - - // Verify ops permissions - res = TClientBase::startCameraOps(); - if (res != OK) { - return res; - } - - if (mDevice == NULL) { - ALOGE("%s: Camera %d: No device connected", - __FUNCTION__, TClientBase::mCameraId); - return NO_INIT; - } - - res = mDevice->initialize(module); - if (res != OK) { - ALOGE("%s: Camera %d: unable to initialize device: %s (%d)", - __FUNCTION__, TClientBase::mCameraId, strerror(-res), res); - return NO_INIT; - } - - res = mDevice->setNotifyCallback(this); - - return OK; -} - -template -Camera2ClientBase::~Camera2ClientBase() { - ATRACE_CALL(); - - TClientBase::mDestructionStarted = true; - - TClientBase::finishCameraOps(); - - disconnect(); - - ALOGI("Closed Camera %d", TClientBase::mCameraId); -} - -template -status_t Camera2ClientBase::dump(int fd, - const Vector& args) { - String8 result; - result.appendFormat("Camera2ClientBase[%d] (%p) PID: %d, dump:\n", - TClientBase::mCameraId, - TClientBase::getRemoteCallback()->asBinder().get(), - TClientBase::mClientPid); - result.append(" State: "); - - write(fd, result.string(), result.size()); - // TODO: print dynamic/request section from most recent requests - - return dumpDevice(fd, args); -} - -template -status_t Camera2ClientBase::dumpDevice( - int fd, - const Vector& args) { - String8 result; - - result = " Device dump:\n"; - write(fd, result.string(), result.size()); - - if (!mDevice.get()) { - result = " *** Device is detached\n"; - write(fd, result.string(), result.size()); - return NO_ERROR; - } - - status_t res = mDevice->dump(fd, args); - if (res != OK) { - result = String8::format(" Error dumping device: %s (%d)", - strerror(-res), res); - write(fd, result.string(), result.size()); - } - - return NO_ERROR; -} - -// ICameraClient2BaseUser interface - - -template -void Camera2ClientBase::disconnect() { - ATRACE_CALL(); - Mutex::Autolock icl(mBinderSerializationLock); - - // Allow both client and the media server to disconnect at all times - int callingPid = getCallingPid(); - if (callingPid != TClientBase::mClientPid && - callingPid != TClientBase::mServicePid) return; - - ALOGV("Camera %d: Shutting down", TClientBase::mCameraId); - - detachDevice(); - - CameraService::BasicClient::disconnect(); - - ALOGV("Camera %d: Shut down complete complete", TClientBase::mCameraId); -} - -template -void Camera2ClientBase::detachDevice() { - if (mDevice == 0) return; - mDevice->disconnect(); - - mDevice.clear(); - - ALOGV("Camera %d: Detach complete", TClientBase::mCameraId); -} - -template -status_t Camera2ClientBase::connect( - const sp& client) { - ATRACE_CALL(); - ALOGV("%s: E", __FUNCTION__); - Mutex::Autolock icl(mBinderSerializationLock); - - if (TClientBase::mClientPid != 0 && - getCallingPid() != TClientBase::mClientPid) { - - ALOGE("%s: Camera %d: Connection attempt from pid %d; " - "current locked to pid %d", - __FUNCTION__, - TClientBase::mCameraId, - getCallingPid(), - TClientBase::mClientPid); - return BAD_VALUE; - } - - TClientBase::mClientPid = getCallingPid(); - - TClientBase::mRemoteCallback = client; - mSharedCameraCallbacks = client; - - return OK; -} - -/** Device-related methods */ - -template -void Camera2ClientBase::notifyError(int errorCode, int arg1, - int arg2) { - ALOGE("Error condition %d reported by HAL, arguments %d, %d", errorCode, - arg1, arg2); -} - -template -void Camera2ClientBase::notifyShutter(int frameNumber, - nsecs_t timestamp) { - (void)frameNumber; - (void)timestamp; - - ALOGV("%s: Shutter notification for frame %d at time %lld", __FUNCTION__, - frameNumber, timestamp); -} - -template -void Camera2ClientBase::notifyAutoFocus(uint8_t newState, - int triggerId) { - (void)newState; - (void)triggerId; - - ALOGV("%s: Autofocus state now %d, last trigger %d", - __FUNCTION__, newState, triggerId); - - typename SharedCameraCallbacks::Lock l(mSharedCameraCallbacks); - if (l.mRemoteCallback != 0) { - l.mRemoteCallback->notifyCallback(CAMERA_MSG_FOCUS_MOVE, 1, 0); - } - if (l.mRemoteCallback != 0) { - l.mRemoteCallback->notifyCallback(CAMERA_MSG_FOCUS, 1, 0); - } -} - -template -void Camera2ClientBase::notifyAutoExposure(uint8_t newState, - int triggerId) { - (void)newState; - (void)triggerId; - - ALOGV("%s: Autoexposure state now %d, last trigger %d", - __FUNCTION__, newState, triggerId); -} - -template -void Camera2ClientBase::notifyAutoWhitebalance(uint8_t newState, - int triggerId) { - (void)newState; - (void)triggerId; - - ALOGV("%s: Auto-whitebalance state now %d, last trigger %d", - __FUNCTION__, newState, triggerId); -} - -template -int Camera2ClientBase::getCameraId() const { - return TClientBase::mCameraId; -} - -template -const sp& Camera2ClientBase::getCameraDevice() { - return mDevice; -} - -template -const sp& Camera2ClientBase::getCameraService() { - return TClientBase::mCameraService; -} - -template -Camera2ClientBase::SharedCameraCallbacks::Lock::Lock( - SharedCameraCallbacks &client) : - - mRemoteCallback(client.mRemoteCallback), - mSharedClient(client) { - - mSharedClient.mRemoteCallbackLock.lock(); -} - -template -Camera2ClientBase::SharedCameraCallbacks::Lock::~Lock() { - mSharedClient.mRemoteCallbackLock.unlock(); -} - -template -Camera2ClientBase::SharedCameraCallbacks::SharedCameraCallbacks( - const sp&client) : - - mRemoteCallback(client) { -} - -template -typename Camera2ClientBase::SharedCameraCallbacks& -Camera2ClientBase::SharedCameraCallbacks::operator=( - const sp&client) { - - Mutex::Autolock l(mRemoteCallbackLock); - mRemoteCallback = client; - return *this; -} - -template -void Camera2ClientBase::SharedCameraCallbacks::clear() { - Mutex::Autolock l(mRemoteCallbackLock); - mRemoteCallback.clear(); -} - -template class Camera2ClientBase; -template class Camera2ClientBase; -template class Camera2ClientBase; - -} // namespace android diff --git a/services/camera/libcameraservice/Camera2ClientBase.h b/services/camera/libcameraservice/Camera2ClientBase.h deleted file mode 100644 index c9a24d7..0000000 --- a/services/camera/libcameraservice/Camera2ClientBase.h +++ /dev/null @@ -1,132 +0,0 @@ -/* - * Copyright (C) 2013 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef ANDROID_SERVERS_CAMERA_CAMERA2CLIENT_BASE_H -#define ANDROID_SERVERS_CAMERA_CAMERA2CLIENT_BASE_H - -#include "CameraDeviceBase.h" -#include "CameraService.h" - -namespace android { - -class IMemory; - -template -class Camera2ClientBase : - public TClientBase, - public CameraDeviceBase::NotificationListener -{ -public: - typedef typename TClientBase::TCamCallbacks TCamCallbacks; - - /** - * Base binder interface (see ICamera/IProCameraUser for details) - */ - virtual status_t connect(const sp& callbacks); - virtual void disconnect(); - - /** - * Interface used by CameraService - */ - - // TODO: too many params, move into a ClientArgs - Camera2ClientBase(const sp& cameraService, - const sp& remoteCallback, - const String16& clientPackageName, - int cameraId, - int cameraFacing, - int clientPid, - uid_t clientUid, - int servicePid); - virtual ~Camera2ClientBase(); - - virtual status_t initialize(camera_module_t *module); - virtual status_t dump(int fd, const Vector& args); - - /** - * CameraDeviceBase::NotificationListener implementation - */ - - virtual void notifyError(int errorCode, int arg1, int arg2); - virtual void notifyShutter(int frameNumber, nsecs_t timestamp); - virtual void notifyAutoFocus(uint8_t newState, int triggerId); - virtual void notifyAutoExposure(uint8_t newState, int triggerId); - virtual void notifyAutoWhitebalance(uint8_t newState, - int triggerId); - - - int getCameraId() const; - const sp& - getCameraDevice(); - const sp& - getCameraService(); - - /** - * Interface used by independent components of CameraClient2Base. - */ - - // Simple class to ensure that access to TCamCallbacks is serialized - // by requiring mRemoteCallbackLock to be locked before access to - // mRemoteCallback is possible. - class SharedCameraCallbacks { - public: - class Lock { - public: - Lock(SharedCameraCallbacks &client); - ~Lock(); - sp &mRemoteCallback; - private: - SharedCameraCallbacks &mSharedClient; - }; - SharedCameraCallbacks(const sp& client); - SharedCameraCallbacks& operator=(const sp& client); - void clear(); - private: - sp mRemoteCallback; - mutable Mutex mRemoteCallbackLock; - } mSharedCameraCallbacks; - -protected: - - virtual sp asBinderWrapper() { - return IInterface::asBinder(); - } - - virtual status_t dumpDevice(int fd, const Vector& args); - - /** Binder client interface-related private members */ - - // Mutex that must be locked by methods implementing the binder client - // interface. Ensures serialization between incoming client calls. - // All methods in this class hierarchy that append 'L' to the name assume - // that mBinderSerializationLock is locked when they're called - mutable Mutex mBinderSerializationLock; - - /** CameraDeviceBase instance wrapping HAL2+ entry */ - - sp mDevice; - - /** Utility members */ - - // Verify that caller is the owner of the camera - status_t checkPid(const char *checkLocation) const; - - virtual void detachDevice(); -}; - -}; // namespace android - -#endif diff --git a/services/camera/libcameraservice/Camera2Device.cpp b/services/camera/libcameraservice/Camera2Device.cpp deleted file mode 100644 index 710d0e9..0000000 --- a/services/camera/libcameraservice/Camera2Device.cpp +++ /dev/null @@ -1,1515 +0,0 @@ -/* - * Copyright (C) 2012 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#define LOG_TAG "Camera2-Device" -#define ATRACE_TAG ATRACE_TAG_CAMERA -//#define LOG_NDEBUG 0 -//#define LOG_NNDEBUG 0 // Per-frame verbose logging - -#ifdef LOG_NNDEBUG -#define ALOGVV(...) ALOGV(__VA_ARGS__) -#else -#define ALOGVV(...) ((void)0) -#endif - -#include -#include -#include -#include "Camera2Device.h" - -namespace android { - -Camera2Device::Camera2Device(int id): - mId(id), - mHal2Device(NULL) -{ - ATRACE_CALL(); - ALOGV("%s: Created device for camera %d", __FUNCTION__, id); -} - -Camera2Device::~Camera2Device() -{ - ATRACE_CALL(); - ALOGV("%s: Tearing down for camera id %d", __FUNCTION__, mId); - disconnect(); -} - -int Camera2Device::getId() const { - return mId; -} - -status_t Camera2Device::initialize(camera_module_t *module) -{ - ATRACE_CALL(); - ALOGV("%s: Initializing device for camera %d", __FUNCTION__, mId); - if (mHal2Device != NULL) { - ALOGE("%s: Already initialized!", __FUNCTION__); - return INVALID_OPERATION; - } - - status_t res; - char name[10]; - snprintf(name, sizeof(name), "%d", mId); - - camera2_device_t *device; - - res = module->common.methods->open(&module->common, name, - reinterpret_cast(&device)); - - if (res != OK) { - ALOGE("%s: Could not open camera %d: %s (%d)", __FUNCTION__, - mId, strerror(-res), res); - return res; - } - - if (device->common.version != CAMERA_DEVICE_API_VERSION_2_0) { - ALOGE("%s: Could not open camera %d: " - "Camera device is not version %x, reports %x instead", - __FUNCTION__, mId, CAMERA_DEVICE_API_VERSION_2_0, - device->common.version); - device->common.close(&device->common); - return BAD_VALUE; - } - - camera_info info; - res = module->get_camera_info(mId, &info); - if (res != OK ) return res; - - if (info.device_version != device->common.version) { - ALOGE("%s: HAL reporting mismatched camera_info version (%x)" - " and device version (%x).", __FUNCTION__, - device->common.version, info.device_version); - device->common.close(&device->common); - return BAD_VALUE; - } - - res = mRequestQueue.setConsumerDevice(device); - if (res != OK) { - ALOGE("%s: Camera %d: Unable to connect request queue to device: %s (%d)", - __FUNCTION__, mId, strerror(-res), res); - device->common.close(&device->common); - return res; - } - res = mFrameQueue.setProducerDevice(device); - if (res != OK) { - ALOGE("%s: Camera %d: Unable to connect frame queue to device: %s (%d)", - __FUNCTION__, mId, strerror(-res), res); - device->common.close(&device->common); - return res; - } - - res = device->ops->get_metadata_vendor_tag_ops(device, &mVendorTagOps); - if (res != OK ) { - ALOGE("%s: Camera %d: Unable to retrieve tag ops from device: %s (%d)", - __FUNCTION__, mId, strerror(-res), res); - device->common.close(&device->common); - return res; - } - res = set_camera_metadata_vendor_tag_ops(mVendorTagOps); - if (res != OK) { - ALOGE("%s: Camera %d: Unable to set tag ops: %s (%d)", - __FUNCTION__, mId, strerror(-res), res); - device->common.close(&device->common); - return res; - } - res = device->ops->set_notify_callback(device, notificationCallback, - NULL); - if (res != OK) { - ALOGE("%s: Camera %d: Unable to initialize notification callback!", - __FUNCTION__, mId); - device->common.close(&device->common); - return res; - } - - mDeviceInfo = info.static_camera_characteristics; - mHal2Device = device; - - return OK; -} - -status_t Camera2Device::disconnect() { - ATRACE_CALL(); - status_t res = OK; - if (mHal2Device) { - ALOGV("%s: Closing device for camera %d", __FUNCTION__, mId); - - int inProgressCount = mHal2Device->ops->get_in_progress_count(mHal2Device); - if (inProgressCount > 0) { - ALOGW("%s: Closing camera device %d with %d requests in flight!", - __FUNCTION__, mId, inProgressCount); - } - mReprocessStreams.clear(); - mStreams.clear(); - res = mHal2Device->common.close(&mHal2Device->common); - if (res != OK) { - ALOGE("%s: Could not close camera %d: %s (%d)", - __FUNCTION__, - mId, strerror(-res), res); - } - mHal2Device = NULL; - ALOGV("%s: Shutdown complete", __FUNCTION__); - } - return res; -} - -status_t Camera2Device::dump(int fd, const Vector& args) { - ATRACE_CALL(); - String8 result; - int detailLevel = 0; - int n = args.size(); - String16 detailOption("-d"); - for (int i = 0; i + 1 < n; i++) { - if (args[i] == detailOption) { - String8 levelStr(args[i+1]); - detailLevel = atoi(levelStr.string()); - } - } - - result.appendFormat(" Camera2Device[%d] dump (detail level %d):\n", - mId, detailLevel); - - if (detailLevel > 0) { - result = " Request queue contents:\n"; - write(fd, result.string(), result.size()); - mRequestQueue.dump(fd, args); - - result = " Frame queue contents:\n"; - write(fd, result.string(), result.size()); - mFrameQueue.dump(fd, args); - } - - result = " Active streams:\n"; - write(fd, result.string(), result.size()); - for (StreamList::iterator s = mStreams.begin(); s != mStreams.end(); s++) { - (*s)->dump(fd, args); - } - - result = " HAL device dump:\n"; - write(fd, result.string(), result.size()); - - status_t res; - res = mHal2Device->ops->dump(mHal2Device, fd); - - return res; -} - -const CameraMetadata& Camera2Device::info() const { - ALOGVV("%s: E", __FUNCTION__); - - return mDeviceInfo; -} - -status_t Camera2Device::capture(CameraMetadata &request) { - ATRACE_CALL(); - ALOGV("%s: E", __FUNCTION__); - - mRequestQueue.enqueue(request.release()); - return OK; -} - - -status_t Camera2Device::setStreamingRequest(const CameraMetadata &request) { - ATRACE_CALL(); - ALOGV("%s: E", __FUNCTION__); - CameraMetadata streamRequest(request); - return mRequestQueue.setStreamSlot(streamRequest.release()); -} - -status_t Camera2Device::clearStreamingRequest() { - ATRACE_CALL(); - return mRequestQueue.setStreamSlot(NULL); -} - -status_t Camera2Device::waitUntilRequestReceived(int32_t requestId, nsecs_t timeout) { - ATRACE_CALL(); - return mRequestQueue.waitForDequeue(requestId, timeout); -} - -status_t Camera2Device::createStream(sp consumer, - uint32_t width, uint32_t height, int format, size_t size, int *id) { - ATRACE_CALL(); - status_t res; - ALOGV("%s: E", __FUNCTION__); - - sp stream = new StreamAdapter(mHal2Device); - - res = stream->connectToDevice(consumer, width, height, format, size); - if (res != OK) { - ALOGE("%s: Camera %d: Unable to create stream (%d x %d, format %x):" - "%s (%d)", - __FUNCTION__, mId, width, height, format, strerror(-res), res); - return res; - } - - *id = stream->getId(); - - mStreams.push_back(stream); - return OK; -} - -status_t Camera2Device::createReprocessStreamFromStream(int outputId, int *id) { - ATRACE_CALL(); - status_t res; - ALOGV("%s: E", __FUNCTION__); - - bool found = false; - StreamList::iterator streamI; - for (streamI = mStreams.begin(); - streamI != mStreams.end(); streamI++) { - if ((*streamI)->getId() == outputId) { - found = true; - break; - } - } - if (!found) { - ALOGE("%s: Camera %d: Output stream %d doesn't exist; can't create " - "reprocess stream from it!", __FUNCTION__, mId, outputId); - return BAD_VALUE; - } - - sp stream = new ReprocessStreamAdapter(mHal2Device); - - res = stream->connectToDevice((*streamI)); - if (res != OK) { - ALOGE("%s: Camera %d: Unable to create reprocessing stream from "\ - "stream %d: %s (%d)", __FUNCTION__, mId, outputId, - strerror(-res), res); - return res; - } - - *id = stream->getId(); - - mReprocessStreams.push_back(stream); - return OK; -} - - -status_t Camera2Device::getStreamInfo(int id, - uint32_t *width, uint32_t *height, uint32_t *format) { - ATRACE_CALL(); - ALOGV("%s: E", __FUNCTION__); - bool found = false; - StreamList::iterator streamI; - for (streamI = mStreams.begin(); - streamI != mStreams.end(); streamI++) { - if ((*streamI)->getId() == id) { - found = true; - break; - } - } - if (!found) { - ALOGE("%s: Camera %d: Stream %d does not exist", - __FUNCTION__, mId, id); - return BAD_VALUE; - } - - if (width) *width = (*streamI)->getWidth(); - if (height) *height = (*streamI)->getHeight(); - if (format) *format = (*streamI)->getFormat(); - - return OK; -} - -status_t Camera2Device::setStreamTransform(int id, - int transform) { - ATRACE_CALL(); - ALOGV("%s: E", __FUNCTION__); - bool found = false; - StreamList::iterator streamI; - for (streamI = mStreams.begin(); - streamI != mStreams.end(); streamI++) { - if ((*streamI)->getId() == id) { - found = true; - break; - } - } - if (!found) { - ALOGE("%s: Camera %d: Stream %d does not exist", - __FUNCTION__, mId, id); - return BAD_VALUE; - } - - return (*streamI)->setTransform(transform); -} - -status_t Camera2Device::deleteStream(int id) { - ATRACE_CALL(); - ALOGV("%s: E", __FUNCTION__); - bool found = false; - for (StreamList::iterator streamI = mStreams.begin(); - streamI != mStreams.end(); streamI++) { - if ((*streamI)->getId() == id) { - status_t res = (*streamI)->release(); - if (res != OK) { - ALOGE("%s: Unable to release stream %d from HAL device: " - "%s (%d)", __FUNCTION__, id, strerror(-res), res); - return res; - } - mStreams.erase(streamI); - found = true; - break; - } - } - if (!found) { - ALOGE("%s: Camera %d: Unable to find stream %d to delete", - __FUNCTION__, mId, id); - return BAD_VALUE; - } - return OK; -} - -status_t Camera2Device::deleteReprocessStream(int id) { - ATRACE_CALL(); - ALOGV("%s: E", __FUNCTION__); - bool found = false; - for (ReprocessStreamList::iterator streamI = mReprocessStreams.begin(); - streamI != mReprocessStreams.end(); streamI++) { - if ((*streamI)->getId() == id) { - status_t res = (*streamI)->release(); - if (res != OK) { - ALOGE("%s: Unable to release reprocess stream %d from " - "HAL device: %s (%d)", __FUNCTION__, id, - strerror(-res), res); - return res; - } - mReprocessStreams.erase(streamI); - found = true; - break; - } - } - if (!found) { - ALOGE("%s: Camera %d: Unable to find stream %d to delete", - __FUNCTION__, mId, id); - return BAD_VALUE; - } - return OK; -} - - -status_t Camera2Device::createDefaultRequest(int templateId, - CameraMetadata *request) { - ATRACE_CALL(); - status_t err; - ALOGV("%s: E", __FUNCTION__); - camera_metadata_t *rawRequest; - err = mHal2Device->ops->construct_default_request( - mHal2Device, templateId, &rawRequest); - request->acquire(rawRequest); - return err; -} - -status_t Camera2Device::waitUntilDrained() { - ATRACE_CALL(); - static const uint32_t kSleepTime = 50000; // 50 ms - static const uint32_t kMaxSleepTime = 10000000; // 10 s - ALOGV("%s: Camera %d: Starting wait", __FUNCTION__, mId); - if (mRequestQueue.getBufferCount() == - CAMERA2_REQUEST_QUEUE_IS_BOTTOMLESS) return INVALID_OPERATION; - - // TODO: Set up notifications from HAL, instead of sleeping here - uint32_t totalTime = 0; - while (mHal2Device->ops->get_in_progress_count(mHal2Device) > 0) { - usleep(kSleepTime); - totalTime += kSleepTime; - if (totalTime > kMaxSleepTime) { - ALOGE("%s: Waited %d us, %d requests still in flight", __FUNCTION__, - totalTime, mHal2Device->ops->get_in_progress_count(mHal2Device)); - return TIMED_OUT; - } - } - ALOGV("%s: Camera %d: HAL is idle", __FUNCTION__, mId); - return OK; -} - -status_t Camera2Device::setNotifyCallback(NotificationListener *listener) { - ATRACE_CALL(); - status_t res; - res = mHal2Device->ops->set_notify_callback(mHal2Device, notificationCallback, - reinterpret_cast(listener) ); - if (res != OK) { - ALOGE("%s: Unable to set notification callback!", __FUNCTION__); - } - return res; -} - -bool Camera2Device::willNotify3A() { - return true; -} - -void Camera2Device::notificationCallback(int32_t msg_type, - int32_t ext1, - int32_t ext2, - int32_t ext3, - void *user) { - ATRACE_CALL(); - NotificationListener *listener = reinterpret_cast(user); - ALOGV("%s: Notification %d, arguments %d, %d, %d", __FUNCTION__, msg_type, - ext1, ext2, ext3); - if (listener != NULL) { - switch (msg_type) { - case CAMERA2_MSG_ERROR: - listener->notifyError(ext1, ext2, ext3); - break; - case CAMERA2_MSG_SHUTTER: { - nsecs_t timestamp = (nsecs_t)ext2 | ((nsecs_t)(ext3) << 32 ); - listener->notifyShutter(ext1, timestamp); - break; - } - case CAMERA2_MSG_AUTOFOCUS: - listener->notifyAutoFocus(ext1, ext2); - break; - case CAMERA2_MSG_AUTOEXPOSURE: - listener->notifyAutoExposure(ext1, ext2); - break; - case CAMERA2_MSG_AUTOWB: - listener->notifyAutoWhitebalance(ext1, ext2); - break; - default: - ALOGE("%s: Unknown notification %d (arguments %d, %d, %d)!", - __FUNCTION__, msg_type, ext1, ext2, ext3); - } - } -} - -status_t Camera2Device::waitForNextFrame(nsecs_t timeout) { - return mFrameQueue.waitForBuffer(timeout); -} - -status_t Camera2Device::getNextFrame(CameraMetadata *frame) { - ATRACE_CALL(); - status_t res; - camera_metadata_t *rawFrame; - res = mFrameQueue.dequeue(&rawFrame); - if (rawFrame == NULL) { - return NOT_ENOUGH_DATA; - } else if (res == OK) { - frame->acquire(rawFrame); - } - return res; -} - -status_t Camera2Device::triggerAutofocus(uint32_t id) { - ATRACE_CALL(); - status_t res; - ALOGV("%s: Triggering autofocus, id %d", __FUNCTION__, id); - res = mHal2Device->ops->trigger_action(mHal2Device, - CAMERA2_TRIGGER_AUTOFOCUS, id, 0); - if (res != OK) { - ALOGE("%s: Error triggering autofocus (id %d)", - __FUNCTION__, id); - } - return res; -} - -status_t Camera2Device::triggerCancelAutofocus(uint32_t id) { - ATRACE_CALL(); - status_t res; - ALOGV("%s: Canceling autofocus, id %d", __FUNCTION__, id); - res = mHal2Device->ops->trigger_action(mHal2Device, - CAMERA2_TRIGGER_CANCEL_AUTOFOCUS, id, 0); - if (res != OK) { - ALOGE("%s: Error canceling autofocus (id %d)", - __FUNCTION__, id); - } - return res; -} - -status_t Camera2Device::triggerPrecaptureMetering(uint32_t id) { - ATRACE_CALL(); - status_t res; - ALOGV("%s: Triggering precapture metering, id %d", __FUNCTION__, id); - res = mHal2Device->ops->trigger_action(mHal2Device, - CAMERA2_TRIGGER_PRECAPTURE_METERING, id, 0); - if (res != OK) { - ALOGE("%s: Error triggering precapture metering (id %d)", - __FUNCTION__, id); - } - return res; -} - -status_t Camera2Device::pushReprocessBuffer(int reprocessStreamId, - buffer_handle_t *buffer, wp listener) { - ATRACE_CALL(); - ALOGV("%s: E", __FUNCTION__); - bool found = false; - status_t res = OK; - for (ReprocessStreamList::iterator streamI = mReprocessStreams.begin(); - streamI != mReprocessStreams.end(); streamI++) { - if ((*streamI)->getId() == reprocessStreamId) { - res = (*streamI)->pushIntoStream(buffer, listener); - if (res != OK) { - ALOGE("%s: Unable to push buffer to reprocess stream %d: %s (%d)", - __FUNCTION__, reprocessStreamId, strerror(-res), res); - return res; - } - found = true; - break; - } - } - if (!found) { - ALOGE("%s: Camera %d: Unable to find reprocess stream %d", - __FUNCTION__, mId, reprocessStreamId); - res = BAD_VALUE; - } - return res; -} - -/** - * Camera2Device::MetadataQueue - */ - -Camera2Device::MetadataQueue::MetadataQueue(): - mHal2Device(NULL), - mFrameCount(0), - mLatestRequestId(0), - mCount(0), - mStreamSlotCount(0), - mSignalConsumer(true) -{ - ATRACE_CALL(); - camera2_request_queue_src_ops::dequeue_request = consumer_dequeue; - camera2_request_queue_src_ops::request_count = consumer_buffer_count; - camera2_request_queue_src_ops::free_request = consumer_free; - - camera2_frame_queue_dst_ops::dequeue_frame = producer_dequeue; - camera2_frame_queue_dst_ops::cancel_frame = producer_cancel; - camera2_frame_queue_dst_ops::enqueue_frame = producer_enqueue; -} - -Camera2Device::MetadataQueue::~MetadataQueue() { - ATRACE_CALL(); - Mutex::Autolock l(mMutex); - freeBuffers(mEntries.begin(), mEntries.end()); - freeBuffers(mStreamSlot.begin(), mStreamSlot.end()); -} - -// Connect to camera2 HAL as consumer (input requests/reprocessing) -status_t Camera2Device::MetadataQueue::setConsumerDevice(camera2_device_t *d) { - ATRACE_CALL(); - status_t res; - res = d->ops->set_request_queue_src_ops(d, - this); - if (res != OK) return res; - mHal2Device = d; - return OK; -} - -status_t Camera2Device::MetadataQueue::setProducerDevice(camera2_device_t *d) { - ATRACE_CALL(); - status_t res; - res = d->ops->set_frame_queue_dst_ops(d, - this); - return res; -} - -// Real interfaces -status_t Camera2Device::MetadataQueue::enqueue(camera_metadata_t *buf) { - ATRACE_CALL(); - ALOGVV("%s: E", __FUNCTION__); - Mutex::Autolock l(mMutex); - - mCount++; - mEntries.push_back(buf); - - return signalConsumerLocked(); -} - -int Camera2Device::MetadataQueue::getBufferCount() { - ATRACE_CALL(); - Mutex::Autolock l(mMutex); - if (mStreamSlotCount > 0) { - return CAMERA2_REQUEST_QUEUE_IS_BOTTOMLESS; - } - return mCount; -} - -status_t Camera2Device::MetadataQueue::dequeue(camera_metadata_t **buf, - bool incrementCount) -{ - ATRACE_CALL(); - ALOGVV("%s: E", __FUNCTION__); - status_t res; - Mutex::Autolock l(mMutex); - - if (mCount == 0) { - if (mStreamSlotCount == 0) { - ALOGVV("%s: Empty", __FUNCTION__); - *buf = NULL; - mSignalConsumer = true; - return OK; - } - ALOGVV("%s: Streaming %d frames to queue", __FUNCTION__, - mStreamSlotCount); - - for (List::iterator slotEntry = mStreamSlot.begin(); - slotEntry != mStreamSlot.end(); - slotEntry++ ) { - size_t entries = get_camera_metadata_entry_count(*slotEntry); - size_t dataBytes = get_camera_metadata_data_count(*slotEntry); - - camera_metadata_t *copy = - allocate_camera_metadata(entries, dataBytes); - append_camera_metadata(copy, *slotEntry); - mEntries.push_back(copy); - } - mCount = mStreamSlotCount; - } - ALOGVV("MetadataQueue: deque (%d buffers)", mCount); - camera_metadata_t *b = *(mEntries.begin()); - mEntries.erase(mEntries.begin()); - - if (incrementCount) { - ATRACE_INT("cam2_request", mFrameCount); - camera_metadata_entry_t frameCount; - res = find_camera_metadata_entry(b, - ANDROID_REQUEST_FRAME_COUNT, - &frameCount); - if (res != OK) { - ALOGE("%s: Unable to add frame count: %s (%d)", - __FUNCTION__, strerror(-res), res); - } else { - *frameCount.data.i32 = mFrameCount; - } - mFrameCount++; - } - - // Check for request ID, and if present, signal waiters. - camera_metadata_entry_t requestId; - res = find_camera_metadata_entry(b, - ANDROID_REQUEST_ID, - &requestId); - if (res == OK) { - mLatestRequestId = requestId.data.i32[0]; - mNewRequestId.signal(); - } - - *buf = b; - mCount--; - - return OK; -} - -status_t Camera2Device::MetadataQueue::waitForBuffer(nsecs_t timeout) -{ - Mutex::Autolock l(mMutex); - status_t res; - while (mCount == 0) { - res = notEmpty.waitRelative(mMutex,timeout); - if (res != OK) return res; - } - return OK; -} - -status_t Camera2Device::MetadataQueue::waitForDequeue(int32_t id, - nsecs_t timeout) { - Mutex::Autolock l(mMutex); - status_t res; - while (mLatestRequestId != id) { - nsecs_t startTime = systemTime(); - - res = mNewRequestId.waitRelative(mMutex, timeout); - if (res != OK) return res; - - timeout -= (systemTime() - startTime); - } - - return OK; -} - -status_t Camera2Device::MetadataQueue::setStreamSlot(camera_metadata_t *buf) -{ - ATRACE_CALL(); - ALOGV("%s: E", __FUNCTION__); - Mutex::Autolock l(mMutex); - if (buf == NULL) { - freeBuffers(mStreamSlot.begin(), mStreamSlot.end()); - mStreamSlotCount = 0; - return OK; - } - camera_metadata_t *buf2 = clone_camera_metadata(buf); - if (!buf2) { - ALOGE("%s: Unable to clone metadata buffer!", __FUNCTION__); - return NO_MEMORY; - } - - if (mStreamSlotCount > 1) { - List::iterator deleter = ++mStreamSlot.begin(); - freeBuffers(++mStreamSlot.begin(), mStreamSlot.end()); - mStreamSlotCount = 1; - } - if (mStreamSlotCount == 1) { - free_camera_metadata( *(mStreamSlot.begin()) ); - *(mStreamSlot.begin()) = buf2; - } else { - mStreamSlot.push_front(buf2); - mStreamSlotCount = 1; - } - return signalConsumerLocked(); -} - -status_t Camera2Device::MetadataQueue::setStreamSlot( - const List &bufs) -{ - ATRACE_CALL(); - ALOGV("%s: E", __FUNCTION__); - Mutex::Autolock l(mMutex); - - if (mStreamSlotCount > 0) { - freeBuffers(mStreamSlot.begin(), mStreamSlot.end()); - } - mStreamSlotCount = 0; - for (List::const_iterator r = bufs.begin(); - r != bufs.end(); r++) { - camera_metadata_t *r2 = clone_camera_metadata(*r); - if (!r2) { - ALOGE("%s: Unable to clone metadata buffer!", __FUNCTION__); - return NO_MEMORY; - } - mStreamSlot.push_back(r2); - mStreamSlotCount++; - } - return signalConsumerLocked(); -} - -status_t Camera2Device::MetadataQueue::dump(int fd, - const Vector& /*args*/) { - ATRACE_CALL(); - String8 result; - status_t notLocked; - notLocked = mMutex.tryLock(); - if (notLocked) { - result.append(" (Unable to lock queue mutex)\n"); - } - result.appendFormat(" Current frame number: %d\n", mFrameCount); - if (mStreamSlotCount == 0) { - result.append(" Stream slot: Empty\n"); - write(fd, result.string(), result.size()); - } else { - result.appendFormat(" Stream slot: %d entries\n", - mStreamSlot.size()); - int i = 0; - for (List::iterator r = mStreamSlot.begin(); - r != mStreamSlot.end(); r++) { - result = String8::format(" Stream slot buffer %d:\n", i); - write(fd, result.string(), result.size()); - dump_indented_camera_metadata(*r, fd, 2, 10); - i++; - } - } - if (mEntries.size() == 0) { - result = " Main queue is empty\n"; - write(fd, result.string(), result.size()); - } else { - result = String8::format(" Main queue has %d entries:\n", - mEntries.size()); - int i = 0; - for (List::iterator r = mEntries.begin(); - r != mEntries.end(); r++) { - result = String8::format(" Queue entry %d:\n", i); - write(fd, result.string(), result.size()); - dump_indented_camera_metadata(*r, fd, 2, 10); - i++; - } - } - - if (notLocked == 0) { - mMutex.unlock(); - } - - return OK; -} - -status_t Camera2Device::MetadataQueue::signalConsumerLocked() { - ATRACE_CALL(); - status_t res = OK; - notEmpty.signal(); - if (mSignalConsumer && mHal2Device != NULL) { - mSignalConsumer = false; - - mMutex.unlock(); - ALOGV("%s: Signaling consumer", __FUNCTION__); - res = mHal2Device->ops->notify_request_queue_not_empty(mHal2Device); - mMutex.lock(); - } - return res; -} - -status_t Camera2Device::MetadataQueue::freeBuffers( - List::iterator start, - List::iterator end) -{ - ATRACE_CALL(); - while (start != end) { - free_camera_metadata(*start); - start = mStreamSlot.erase(start); - } - return OK; -} - -Camera2Device::MetadataQueue* Camera2Device::MetadataQueue::getInstance( - const camera2_request_queue_src_ops_t *q) -{ - const MetadataQueue* cmq = static_cast(q); - return const_cast(cmq); -} - -Camera2Device::MetadataQueue* Camera2Device::MetadataQueue::getInstance( - const camera2_frame_queue_dst_ops_t *q) -{ - const MetadataQueue* cmq = static_cast(q); - return const_cast(cmq); -} - -int Camera2Device::MetadataQueue::consumer_buffer_count( - const camera2_request_queue_src_ops_t *q) -{ - MetadataQueue *queue = getInstance(q); - return queue->getBufferCount(); -} - -int Camera2Device::MetadataQueue::consumer_dequeue( - const camera2_request_queue_src_ops_t *q, - camera_metadata_t **buffer) -{ - MetadataQueue *queue = getInstance(q); - return queue->dequeue(buffer, true); -} - -int Camera2Device::MetadataQueue::consumer_free( - const camera2_request_queue_src_ops_t *q, - camera_metadata_t *old_buffer) -{ - ATRACE_CALL(); - MetadataQueue *queue = getInstance(q); - (void)queue; - free_camera_metadata(old_buffer); - return OK; -} - -int Camera2Device::MetadataQueue::producer_dequeue( - const camera2_frame_queue_dst_ops_t * /*q*/, - size_t entries, size_t bytes, - camera_metadata_t **buffer) -{ - ATRACE_CALL(); - camera_metadata_t *new_buffer = - allocate_camera_metadata(entries, bytes); - if (new_buffer == NULL) return NO_MEMORY; - *buffer = new_buffer; - return OK; -} - -int Camera2Device::MetadataQueue::producer_cancel( - const camera2_frame_queue_dst_ops_t * /*q*/, - camera_metadata_t *old_buffer) -{ - ATRACE_CALL(); - free_camera_metadata(old_buffer); - return OK; -} - -int Camera2Device::MetadataQueue::producer_enqueue( - const camera2_frame_queue_dst_ops_t *q, - camera_metadata_t *filled_buffer) -{ - MetadataQueue *queue = getInstance(q); - return queue->enqueue(filled_buffer); -} - -/** - * Camera2Device::StreamAdapter - */ - -#ifndef container_of -#define container_of(ptr, type, member) \ - (type *)((char*)(ptr) - offsetof(type, member)) -#endif - -Camera2Device::StreamAdapter::StreamAdapter(camera2_device_t *d): - mState(RELEASED), - mHal2Device(d), - mId(-1), - mWidth(0), mHeight(0), mFormat(0), mSize(0), mUsage(0), - mMaxProducerBuffers(0), mMaxConsumerBuffers(0), - mTotalBuffers(0), - mFormatRequested(0), - mActiveBuffers(0), - mFrameCount(0), - mLastTimestamp(0) -{ - camera2_stream_ops::dequeue_buffer = dequeue_buffer; - camera2_stream_ops::enqueue_buffer = enqueue_buffer; - camera2_stream_ops::cancel_buffer = cancel_buffer; - camera2_stream_ops::set_crop = set_crop; -} - -Camera2Device::StreamAdapter::~StreamAdapter() { - ATRACE_CALL(); - if (mState != RELEASED) { - release(); - } -} - -status_t Camera2Device::StreamAdapter::connectToDevice( - sp consumer, - uint32_t width, uint32_t height, int format, size_t size) { - ATRACE_CALL(); - status_t res; - ALOGV("%s: E", __FUNCTION__); - - if (mState != RELEASED) return INVALID_OPERATION; - if (consumer == NULL) { - ALOGE("%s: Null consumer passed to stream adapter", __FUNCTION__); - return BAD_VALUE; - } - - ALOGV("%s: New stream parameters %d x %d, format 0x%x, size %d", - __FUNCTION__, width, height, format, size); - - mConsumerInterface = consumer; - mWidth = width; - mHeight = height; - mSize = (format == HAL_PIXEL_FORMAT_BLOB) ? size : 0; - mFormatRequested = format; - - // Allocate device-side stream interface - - uint32_t id; - uint32_t formatActual; - uint32_t usage; - uint32_t maxBuffers = 2; - res = mHal2Device->ops->allocate_stream(mHal2Device, - mWidth, mHeight, mFormatRequested, getStreamOps(), - &id, &formatActual, &usage, &maxBuffers); - if (res != OK) { - ALOGE("%s: Device stream allocation failed: %s (%d)", - __FUNCTION__, strerror(-res), res); - return res; - } - - ALOGV("%s: Allocated stream id %d, actual format 0x%x, " - "usage 0x%x, producer wants %d buffers", __FUNCTION__, - id, formatActual, usage, maxBuffers); - - mId = id; - mFormat = formatActual; - mUsage = usage; - mMaxProducerBuffers = maxBuffers; - - mState = ALLOCATED; - - // Configure consumer-side ANativeWindow interface - res = native_window_api_connect(mConsumerInterface.get(), - NATIVE_WINDOW_API_CAMERA); - if (res != OK) { - ALOGE("%s: Unable to connect to native window for stream %d", - __FUNCTION__, mId); - - return res; - } - - mState = CONNECTED; - - res = native_window_set_usage(mConsumerInterface.get(), mUsage); - if (res != OK) { - ALOGE("%s: Unable to configure usage %08x for stream %d", - __FUNCTION__, mUsage, mId); - return res; - } - - res = native_window_set_scaling_mode(mConsumerInterface.get(), - NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW); - if (res != OK) { - ALOGE("%s: Unable to configure stream scaling: %s (%d)", - __FUNCTION__, strerror(-res), res); - return res; - } - - res = setTransform(0); - if (res != OK) { - return res; - } - - if (mFormat == HAL_PIXEL_FORMAT_BLOB) { - res = native_window_set_buffers_geometry(mConsumerInterface.get(), - mSize, 1, mFormat); - if (res != OK) { - ALOGE("%s: Unable to configure compressed stream buffer geometry" - " %d x %d, size %d for stream %d", - __FUNCTION__, mWidth, mHeight, mSize, mId); - return res; - } - } else { - res = native_window_set_buffers_geometry(mConsumerInterface.get(), - mWidth, mHeight, mFormat); - if (res != OK) { - ALOGE("%s: Unable to configure stream buffer geometry" - " %d x %d, format 0x%x for stream %d", - __FUNCTION__, mWidth, mHeight, mFormat, mId); - return res; - } - } - - int maxConsumerBuffers; - res = mConsumerInterface->query(mConsumerInterface.get(), - NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &maxConsumerBuffers); - if (res != OK) { - ALOGE("%s: Unable to query consumer undequeued" - " buffer count for stream %d", __FUNCTION__, mId); - return res; - } - mMaxConsumerBuffers = maxConsumerBuffers; - - ALOGV("%s: Consumer wants %d buffers", __FUNCTION__, - mMaxConsumerBuffers); - - mTotalBuffers = mMaxConsumerBuffers + mMaxProducerBuffers; - mActiveBuffers = 0; - mFrameCount = 0; - mLastTimestamp = 0; - - res = native_window_set_buffer_count(mConsumerInterface.get(), - mTotalBuffers); - if (res != OK) { - ALOGE("%s: Unable to set buffer count for stream %d", - __FUNCTION__, mId); - return res; - } - - // Register allocated buffers with HAL device - buffer_handle_t *buffers = new buffer_handle_t[mTotalBuffers]; - ANativeWindowBuffer **anwBuffers = new ANativeWindowBuffer*[mTotalBuffers]; - uint32_t bufferIdx = 0; - for (; bufferIdx < mTotalBuffers; bufferIdx++) { - res = native_window_dequeue_buffer_and_wait(mConsumerInterface.get(), - &anwBuffers[bufferIdx]); - if (res != OK) { - ALOGE("%s: Unable to dequeue buffer %d for initial registration for " - "stream %d", __FUNCTION__, bufferIdx, mId); - goto cleanUpBuffers; - } - - buffers[bufferIdx] = anwBuffers[bufferIdx]->handle; - ALOGV("%s: Buffer %p allocated", __FUNCTION__, (void*)buffers[bufferIdx]); - } - - ALOGV("%s: Registering %d buffers with camera HAL", __FUNCTION__, mTotalBuffers); - res = mHal2Device->ops->register_stream_buffers(mHal2Device, - mId, - mTotalBuffers, - buffers); - if (res != OK) { - ALOGE("%s: Unable to register buffers with HAL device for stream %d", - __FUNCTION__, mId); - } else { - mState = ACTIVE; - } - -cleanUpBuffers: - ALOGV("%s: Cleaning up %d buffers", __FUNCTION__, bufferIdx); - for (uint32_t i = 0; i < bufferIdx; i++) { - res = mConsumerInterface->cancelBuffer(mConsumerInterface.get(), - anwBuffers[i], -1); - if (res != OK) { - ALOGE("%s: Unable to cancel buffer %d after registration", - __FUNCTION__, i); - } - } - delete[] anwBuffers; - delete[] buffers; - - return res; -} - -status_t Camera2Device::StreamAdapter::release() { - ATRACE_CALL(); - status_t res; - ALOGV("%s: Releasing stream %d (%d x %d, format %d)", __FUNCTION__, mId, - mWidth, mHeight, mFormat); - if (mState >= ALLOCATED) { - res = mHal2Device->ops->release_stream(mHal2Device, mId); - if (res != OK) { - ALOGE("%s: Unable to release stream %d", - __FUNCTION__, mId); - return res; - } - } - if (mState >= CONNECTED) { - res = native_window_api_disconnect(mConsumerInterface.get(), - NATIVE_WINDOW_API_CAMERA); - - /* this is not an error. if client calling process dies, - the window will also die and all calls to it will return - DEAD_OBJECT, thus it's already "disconnected" */ - if (res == DEAD_OBJECT) { - ALOGW("%s: While disconnecting stream %d from native window, the" - " native window died from under us", __FUNCTION__, mId); - } - else if (res != OK) { - ALOGE("%s: Unable to disconnect stream %d from native window (error %d %s)", - __FUNCTION__, mId, res, strerror(-res)); - return res; - } - } - mId = -1; - mState = RELEASED; - return OK; -} - -status_t Camera2Device::StreamAdapter::setTransform(int transform) { - ATRACE_CALL(); - status_t res; - if (mState < CONNECTED) { - ALOGE("%s: Cannot set transform on unconnected stream", __FUNCTION__); - return INVALID_OPERATION; - } - res = native_window_set_buffers_transform(mConsumerInterface.get(), - transform); - if (res != OK) { - ALOGE("%s: Unable to configure stream transform to %x: %s (%d)", - __FUNCTION__, transform, strerror(-res), res); - } - return res; -} - -status_t Camera2Device::StreamAdapter::dump(int fd, - const Vector& /*args*/) { - ATRACE_CALL(); - String8 result = String8::format(" Stream %d: %d x %d, format 0x%x\n", - mId, mWidth, mHeight, mFormat); - result.appendFormat(" size %d, usage 0x%x, requested format 0x%x\n", - mSize, mUsage, mFormatRequested); - result.appendFormat(" total buffers: %d, dequeued buffers: %d\n", - mTotalBuffers, mActiveBuffers); - result.appendFormat(" frame count: %d, last timestamp %lld\n", - mFrameCount, mLastTimestamp); - write(fd, result.string(), result.size()); - return OK; -} - -const camera2_stream_ops *Camera2Device::StreamAdapter::getStreamOps() { - return static_cast(this); -} - -ANativeWindow* Camera2Device::StreamAdapter::toANW( - const camera2_stream_ops_t *w) { - return static_cast(w)->mConsumerInterface.get(); -} - -int Camera2Device::StreamAdapter::dequeue_buffer(const camera2_stream_ops_t *w, - buffer_handle_t** buffer) { - ATRACE_CALL(); - int res; - StreamAdapter* stream = - const_cast(static_cast(w)); - if (stream->mState != ACTIVE) { - ALOGE("%s: Called when in bad state: %d", __FUNCTION__, stream->mState); - return INVALID_OPERATION; - } - - ANativeWindow *a = toANW(w); - ANativeWindowBuffer* anb; - res = native_window_dequeue_buffer_and_wait(a, &anb); - if (res != OK) { - ALOGE("Stream %d dequeue: Error from native_window: %s (%d)", stream->mId, - strerror(-res), res); - return res; - } - - *buffer = &(anb->handle); - stream->mActiveBuffers++; - - ALOGVV("Stream %d dequeue: Buffer %p dequeued", stream->mId, (void*)(**buffer)); - return res; -} - -int Camera2Device::StreamAdapter::enqueue_buffer(const camera2_stream_ops_t* w, - int64_t timestamp, - buffer_handle_t* buffer) { - ATRACE_CALL(); - StreamAdapter *stream = - const_cast(static_cast(w)); - stream->mFrameCount++; - ALOGVV("Stream %d enqueue: Frame %d (%p) captured at %lld ns", - stream->mId, stream->mFrameCount, (void*)(*buffer), timestamp); - int state = stream->mState; - if (state != ACTIVE) { - ALOGE("%s: Called when in bad state: %d", __FUNCTION__, state); - return INVALID_OPERATION; - } - ANativeWindow *a = toANW(w); - status_t err; - - err = native_window_set_buffers_timestamp(a, timestamp); - if (err != OK) { - ALOGE("%s: Error setting timestamp on native window: %s (%d)", - __FUNCTION__, strerror(-err), err); - return err; - } - err = a->queueBuffer(a, - container_of(buffer, ANativeWindowBuffer, handle), -1); - if (err != OK) { - ALOGE("%s: Error queueing buffer to native window: %s (%d)", - __FUNCTION__, strerror(-err), err); - return err; - } - - stream->mActiveBuffers--; - stream->mLastTimestamp = timestamp; - return OK; -} - -int Camera2Device::StreamAdapter::cancel_buffer(const camera2_stream_ops_t* w, - buffer_handle_t* buffer) { - ATRACE_CALL(); - StreamAdapter *stream = - const_cast(static_cast(w)); - ALOGVV("Stream %d cancel: Buffer %p", - stream->mId, (void*)(*buffer)); - if (stream->mState != ACTIVE) { - ALOGE("%s: Called when in bad state: %d", __FUNCTION__, stream->mState); - return INVALID_OPERATION; - } - - ANativeWindow *a = toANW(w); - int err = a->cancelBuffer(a, - container_of(buffer, ANativeWindowBuffer, handle), -1); - if (err != OK) { - ALOGE("%s: Error canceling buffer to native window: %s (%d)", - __FUNCTION__, strerror(-err), err); - return err; - } - - stream->mActiveBuffers--; - return OK; -} - -int Camera2Device::StreamAdapter::set_crop(const camera2_stream_ops_t* w, - int left, int top, int right, int bottom) { - ATRACE_CALL(); - int state = static_cast(w)->mState; - if (state != ACTIVE) { - ALOGE("%s: Called when in bad state: %d", __FUNCTION__, state); - return INVALID_OPERATION; - } - ANativeWindow *a = toANW(w); - android_native_rect_t crop = { left, top, right, bottom }; - return native_window_set_crop(a, &crop); -} - -/** - * Camera2Device::ReprocessStreamAdapter - */ - -#ifndef container_of -#define container_of(ptr, type, member) \ - (type *)((char*)(ptr) - offsetof(type, member)) -#endif - -Camera2Device::ReprocessStreamAdapter::ReprocessStreamAdapter(camera2_device_t *d): - mState(RELEASED), - mHal2Device(d), - mId(-1), - mWidth(0), mHeight(0), mFormat(0), - mActiveBuffers(0), - mFrameCount(0) -{ - ATRACE_CALL(); - camera2_stream_in_ops::acquire_buffer = acquire_buffer; - camera2_stream_in_ops::release_buffer = release_buffer; -} - -Camera2Device::ReprocessStreamAdapter::~ReprocessStreamAdapter() { - ATRACE_CALL(); - if (mState != RELEASED) { - release(); - } -} - -status_t Camera2Device::ReprocessStreamAdapter::connectToDevice( - const sp &outputStream) { - ATRACE_CALL(); - status_t res; - ALOGV("%s: E", __FUNCTION__); - - if (mState != RELEASED) return INVALID_OPERATION; - if (outputStream == NULL) { - ALOGE("%s: Null base stream passed to reprocess stream adapter", - __FUNCTION__); - return BAD_VALUE; - } - - mBaseStream = outputStream; - mWidth = outputStream->getWidth(); - mHeight = outputStream->getHeight(); - mFormat = outputStream->getFormat(); - - ALOGV("%s: New reprocess stream parameters %d x %d, format 0x%x", - __FUNCTION__, mWidth, mHeight, mFormat); - - // Allocate device-side stream interface - - uint32_t id; - res = mHal2Device->ops->allocate_reprocess_stream_from_stream(mHal2Device, - outputStream->getId(), getStreamOps(), - &id); - if (res != OK) { - ALOGE("%s: Device reprocess stream allocation failed: %s (%d)", - __FUNCTION__, strerror(-res), res); - return res; - } - - ALOGV("%s: Allocated reprocess stream id %d based on stream %d", - __FUNCTION__, id, outputStream->getId()); - - mId = id; - - mState = ACTIVE; - - return OK; -} - -status_t Camera2Device::ReprocessStreamAdapter::release() { - ATRACE_CALL(); - status_t res; - ALOGV("%s: Releasing stream %d", __FUNCTION__, mId); - if (mState >= ACTIVE) { - res = mHal2Device->ops->release_reprocess_stream(mHal2Device, mId); - if (res != OK) { - ALOGE("%s: Unable to release stream %d", - __FUNCTION__, mId); - return res; - } - } - - List::iterator s; - for (s = mQueue.begin(); s != mQueue.end(); s++) { - sp listener = s->releaseListener.promote(); - if (listener != 0) listener->onBufferReleased(s->handle); - } - for (s = mInFlightQueue.begin(); s != mInFlightQueue.end(); s++) { - sp listener = s->releaseListener.promote(); - if (listener != 0) listener->onBufferReleased(s->handle); - } - mQueue.clear(); - mInFlightQueue.clear(); - - mState = RELEASED; - return OK; -} - -status_t Camera2Device::ReprocessStreamAdapter::pushIntoStream( - buffer_handle_t *handle, const wp &releaseListener) { - ATRACE_CALL(); - // TODO: Some error checking here would be nice - ALOGV("%s: Pushing buffer %p to stream", __FUNCTION__, (void*)(*handle)); - - QueueEntry entry; - entry.handle = handle; - entry.releaseListener = releaseListener; - mQueue.push_back(entry); - return OK; -} - -status_t Camera2Device::ReprocessStreamAdapter::dump(int fd, - const Vector& /*args*/) { - ATRACE_CALL(); - String8 result = - String8::format(" Reprocess stream %d: %d x %d, fmt 0x%x\n", - mId, mWidth, mHeight, mFormat); - result.appendFormat(" acquired buffers: %d\n", - mActiveBuffers); - result.appendFormat(" frame count: %d\n", - mFrameCount); - write(fd, result.string(), result.size()); - return OK; -} - -const camera2_stream_in_ops *Camera2Device::ReprocessStreamAdapter::getStreamOps() { - return static_cast(this); -} - -int Camera2Device::ReprocessStreamAdapter::acquire_buffer( - const camera2_stream_in_ops_t *w, - buffer_handle_t** buffer) { - ATRACE_CALL(); - - ReprocessStreamAdapter* stream = - const_cast( - static_cast(w)); - if (stream->mState != ACTIVE) { - ALOGE("%s: Called when in bad state: %d", __FUNCTION__, stream->mState); - return INVALID_OPERATION; - } - - if (stream->mQueue.empty()) { - *buffer = NULL; - return OK; - } - - QueueEntry &entry = *(stream->mQueue.begin()); - - *buffer = entry.handle; - - stream->mInFlightQueue.push_back(entry); - stream->mQueue.erase(stream->mQueue.begin()); - - stream->mActiveBuffers++; - - ALOGV("Stream %d acquire: Buffer %p acquired", stream->mId, - (void*)(**buffer)); - return OK; -} - -int Camera2Device::ReprocessStreamAdapter::release_buffer( - const camera2_stream_in_ops_t* w, - buffer_handle_t* buffer) { - ATRACE_CALL(); - ReprocessStreamAdapter *stream = - const_cast( - static_cast(w) ); - stream->mFrameCount++; - ALOGV("Reprocess stream %d release: Frame %d (%p)", - stream->mId, stream->mFrameCount, (void*)*buffer); - int state = stream->mState; - if (state != ACTIVE) { - ALOGE("%s: Called when in bad state: %d", __FUNCTION__, state); - return INVALID_OPERATION; - } - stream->mActiveBuffers--; - - List::iterator s; - for (s = stream->mInFlightQueue.begin(); s != stream->mInFlightQueue.end(); s++) { - if ( s->handle == buffer ) break; - } - if (s == stream->mInFlightQueue.end()) { - ALOGE("%s: Can't find buffer %p in in-flight list!", __FUNCTION__, - buffer); - return INVALID_OPERATION; - } - - sp listener = s->releaseListener.promote(); - if (listener != 0) { - listener->onBufferReleased(s->handle); - } else { - ALOGE("%s: Can't free buffer - missing listener", __FUNCTION__); - } - stream->mInFlightQueue.erase(s); - - return OK; -} - -}; // namespace android diff --git a/services/camera/libcameraservice/Camera2Device.h b/services/camera/libcameraservice/Camera2Device.h deleted file mode 100644 index 372ce9f..0000000 --- a/services/camera/libcameraservice/Camera2Device.h +++ /dev/null @@ -1,345 +0,0 @@ -/* - * Copyright (C) 2012 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef ANDROID_SERVERS_CAMERA_CAMERA2DEVICE_H -#define ANDROID_SERVERS_CAMERA_CAMERA2DEVICE_H - -#include -#include -#include -#include - -#include "CameraDeviceBase.h" - -namespace android { - -/** - * CameraDevice for HAL devices with version CAMERA_DEVICE_API_VERSION_2_0 - */ -class Camera2Device: public CameraDeviceBase { - public: - Camera2Device(int id); - - virtual ~Camera2Device(); - - /** - * CameraDevice interface - */ - virtual int getId() const; - virtual status_t initialize(camera_module_t *module); - virtual status_t disconnect(); - virtual status_t dump(int fd, const Vector& args); - virtual const CameraMetadata& info() const; - virtual status_t capture(CameraMetadata &request); - virtual status_t setStreamingRequest(const CameraMetadata &request); - virtual status_t clearStreamingRequest(); - virtual status_t waitUntilRequestReceived(int32_t requestId, nsecs_t timeout); - virtual status_t createStream(sp consumer, - uint32_t width, uint32_t height, int format, size_t size, - int *id); - virtual status_t createReprocessStreamFromStream(int outputId, int *id); - virtual status_t getStreamInfo(int id, - uint32_t *width, uint32_t *height, uint32_t *format); - virtual status_t setStreamTransform(int id, int transform); - virtual status_t deleteStream(int id); - virtual status_t deleteReprocessStream(int id); - virtual status_t createDefaultRequest(int templateId, CameraMetadata *request); - virtual status_t waitUntilDrained(); - virtual status_t setNotifyCallback(NotificationListener *listener); - virtual bool willNotify3A(); - virtual status_t waitForNextFrame(nsecs_t timeout); - virtual status_t getNextFrame(CameraMetadata *frame); - virtual status_t triggerAutofocus(uint32_t id); - virtual status_t triggerCancelAutofocus(uint32_t id); - virtual status_t triggerPrecaptureMetering(uint32_t id); - virtual status_t pushReprocessBuffer(int reprocessStreamId, - buffer_handle_t *buffer, wp listener); - private: - const int mId; - camera2_device_t *mHal2Device; - - CameraMetadata mDeviceInfo; - vendor_tag_query_ops_t *mVendorTagOps; - - /** - * Queue class for both sending requests to a camera2 device, and for - * receiving frames from a camera2 device. - */ - class MetadataQueue: public camera2_request_queue_src_ops_t, - public camera2_frame_queue_dst_ops_t { - public: - MetadataQueue(); - ~MetadataQueue(); - - // Interface to camera2 HAL device, either for requests (device is - // consumer) or for frames (device is producer) - const camera2_request_queue_src_ops_t* getToConsumerInterface(); - void setFromConsumerInterface(camera2_device_t *d); - - // Connect queue consumer endpoint to a camera2 device - status_t setConsumerDevice(camera2_device_t *d); - // Connect queue producer endpoint to a camera2 device - status_t setProducerDevice(camera2_device_t *d); - - const camera2_frame_queue_dst_ops_t* getToProducerInterface(); - - // Real interfaces. On enqueue, queue takes ownership of buffer pointer - // On dequeue, user takes ownership of buffer pointer. - status_t enqueue(camera_metadata_t *buf); - status_t dequeue(camera_metadata_t **buf, bool incrementCount = false); - int getBufferCount(); - status_t waitForBuffer(nsecs_t timeout); - // Wait until a buffer with the given ID is dequeued. Will return - // immediately if the latest buffer dequeued has that ID. - status_t waitForDequeue(int32_t id, nsecs_t timeout); - - // Set repeating buffer(s); if the queue is empty on a dequeue call, the - // queue copies the contents of the stream slot into the queue, and then - // dequeues the first new entry. The metadata buffers passed in are - // copied. - status_t setStreamSlot(camera_metadata_t *buf); - status_t setStreamSlot(const List &bufs); - - status_t dump(int fd, const Vector& args); - - private: - status_t signalConsumerLocked(); - status_t freeBuffers(List::iterator start, - List::iterator end); - - camera2_device_t *mHal2Device; - - Mutex mMutex; - Condition notEmpty; - - int mFrameCount; - int32_t mLatestRequestId; - Condition mNewRequestId; - - int mCount; - List mEntries; - int mStreamSlotCount; - List mStreamSlot; - - bool mSignalConsumer; - - static MetadataQueue* getInstance( - const camera2_frame_queue_dst_ops_t *q); - static MetadataQueue* getInstance( - const camera2_request_queue_src_ops_t *q); - - static int consumer_buffer_count( - const camera2_request_queue_src_ops_t *q); - - static int consumer_dequeue(const camera2_request_queue_src_ops_t *q, - camera_metadata_t **buffer); - - static int consumer_free(const camera2_request_queue_src_ops_t *q, - camera_metadata_t *old_buffer); - - static int producer_dequeue(const camera2_frame_queue_dst_ops_t *q, - size_t entries, size_t bytes, - camera_metadata_t **buffer); - - static int producer_cancel(const camera2_frame_queue_dst_ops_t *q, - camera_metadata_t *old_buffer); - - static int producer_enqueue(const camera2_frame_queue_dst_ops_t *q, - camera_metadata_t *filled_buffer); - - }; // class MetadataQueue - - MetadataQueue mRequestQueue; - MetadataQueue mFrameQueue; - - /** - * Adapter from an ANativeWindow interface to camera2 device stream ops. - * Also takes care of allocating/deallocating stream in device interface - */ - class StreamAdapter: public camera2_stream_ops, public virtual RefBase { - public: - StreamAdapter(camera2_device_t *d); - - ~StreamAdapter(); - - /** - * Create a HAL device stream of the requested size and format. - * - * If format is CAMERA2_HAL_PIXEL_FORMAT_OPAQUE, then the HAL device - * selects an appropriate format; it can be queried with getFormat. - * - * If format is HAL_PIXEL_FORMAT_COMPRESSED, the size parameter must - * be equal to the size in bytes of the buffers to allocate for the - * stream. For other formats, the size parameter is ignored. - */ - status_t connectToDevice(sp consumer, - uint32_t width, uint32_t height, int format, size_t size); - - status_t release(); - - status_t setTransform(int transform); - - // Get stream parameters. - // Only valid after a successful connectToDevice call. - int getId() const { return mId; } - uint32_t getWidth() const { return mWidth; } - uint32_t getHeight() const { return mHeight; } - uint32_t getFormat() const { return mFormat; } - - // Dump stream information - status_t dump(int fd, const Vector& args); - - private: - enum { - ERROR = -1, - RELEASED = 0, - ALLOCATED, - CONNECTED, - ACTIVE - } mState; - - sp mConsumerInterface; - camera2_device_t *mHal2Device; - - uint32_t mId; - uint32_t mWidth; - uint32_t mHeight; - uint32_t mFormat; - size_t mSize; - uint32_t mUsage; - uint32_t mMaxProducerBuffers; - uint32_t mMaxConsumerBuffers; - uint32_t mTotalBuffers; - int mFormatRequested; - - /** Debugging information */ - uint32_t mActiveBuffers; - uint32_t mFrameCount; - int64_t mLastTimestamp; - - const camera2_stream_ops *getStreamOps(); - - static ANativeWindow* toANW(const camera2_stream_ops_t *w); - - static int dequeue_buffer(const camera2_stream_ops_t *w, - buffer_handle_t** buffer); - - static int enqueue_buffer(const camera2_stream_ops_t* w, - int64_t timestamp, - buffer_handle_t* buffer); - - static int cancel_buffer(const camera2_stream_ops_t* w, - buffer_handle_t* buffer); - - static int set_crop(const camera2_stream_ops_t* w, - int left, int top, int right, int bottom); - }; // class StreamAdapter - - typedef List > StreamList; - StreamList mStreams; - - /** - * Adapter from an ANativeWindow interface to camera2 device stream ops. - * Also takes care of allocating/deallocating stream in device interface - */ - class ReprocessStreamAdapter: public camera2_stream_in_ops, public virtual RefBase { - public: - ReprocessStreamAdapter(camera2_device_t *d); - - ~ReprocessStreamAdapter(); - - /** - * Create a HAL device reprocess stream based on an existing output stream. - */ - status_t connectToDevice(const sp &outputStream); - - status_t release(); - - /** - * Push buffer into stream for reprocessing. Takes ownership until it notifies - * that the buffer has been released - */ - status_t pushIntoStream(buffer_handle_t *handle, - const wp &releaseListener); - - /** - * Get stream parameters. - * Only valid after a successful connectToDevice call. - */ - int getId() const { return mId; } - uint32_t getWidth() const { return mWidth; } - uint32_t getHeight() const { return mHeight; } - uint32_t getFormat() const { return mFormat; } - - // Dump stream information - status_t dump(int fd, const Vector& args); - - private: - enum { - ERROR = -1, - RELEASED = 0, - ACTIVE - } mState; - - sp mConsumerInterface; - wp mBaseStream; - - struct QueueEntry { - buffer_handle_t *handle; - wp releaseListener; - }; - - List mQueue; - - List mInFlightQueue; - - camera2_device_t *mHal2Device; - - uint32_t mId; - uint32_t mWidth; - uint32_t mHeight; - uint32_t mFormat; - - /** Debugging information */ - uint32_t mActiveBuffers; - uint32_t mFrameCount; - int64_t mLastTimestamp; - - const camera2_stream_in_ops *getStreamOps(); - - static int acquire_buffer(const camera2_stream_in_ops_t *w, - buffer_handle_t** buffer); - - static int release_buffer(const camera2_stream_in_ops_t* w, - buffer_handle_t* buffer); - - }; // class ReprocessStreamAdapter - - typedef List > ReprocessStreamList; - ReprocessStreamList mReprocessStreams; - - // Receives HAL notifications and routes them to the NotificationListener - static void notificationCallback(int32_t msg_type, - int32_t ext1, - int32_t ext2, - int32_t ext3, - void *user); - -}; // class Camera2Device - -}; // namespace android - -#endif diff --git a/services/camera/libcameraservice/Camera3Device.cpp b/services/camera/libcameraservice/Camera3Device.cpp deleted file mode 100644 index 9d0f392..0000000 --- a/services/camera/libcameraservice/Camera3Device.cpp +++ /dev/null @@ -1,1972 +0,0 @@ -/* - * Copyright (C) 2013 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#define LOG_TAG "Camera3-Device" -#define ATRACE_TAG ATRACE_TAG_CAMERA -//#define LOG_NDEBUG 0 -//#define LOG_NNDEBUG 0 // Per-frame verbose logging - -#ifdef LOG_NNDEBUG -#define ALOGVV(...) ALOGV(__VA_ARGS__) -#else -#define ALOGVV(...) ((void)0) -#endif - -// Convenience macro for transient errors -#define CLOGE(fmt, ...) ALOGE("Camera %d: %s: " fmt, mId, __FUNCTION__, \ - ##__VA_ARGS__) - -// Convenience macros for transitioning to the error state -#define SET_ERR(fmt, ...) setErrorState( \ - "%s: " fmt, __FUNCTION__, \ - ##__VA_ARGS__) -#define SET_ERR_L(fmt, ...) setErrorStateLocked( \ - "%s: " fmt, __FUNCTION__, \ - ##__VA_ARGS__) - -#include -#include -#include -#include "Camera3Device.h" -#include "camera3/Camera3OutputStream.h" -#include "camera3/Camera3InputStream.h" - -using namespace android::camera3; - -namespace android { - -Camera3Device::Camera3Device(int id): - mId(id), - mHal3Device(NULL), - mStatus(STATUS_UNINITIALIZED), - mNextResultFrameNumber(0), - mNextShutterFrameNumber(0), - mListener(NULL) -{ - ATRACE_CALL(); - camera3_callback_ops::notify = &sNotify; - camera3_callback_ops::process_capture_result = &sProcessCaptureResult; - ALOGV("%s: Created device for camera %d", __FUNCTION__, id); -} - -Camera3Device::~Camera3Device() -{ - ATRACE_CALL(); - ALOGV("%s: Tearing down for camera id %d", __FUNCTION__, mId); - disconnect(); -} - -int Camera3Device::getId() const { - return mId; -} - -/** - * CameraDeviceBase interface - */ - -status_t Camera3Device::initialize(camera_module_t *module) -{ - ATRACE_CALL(); - Mutex::Autolock l(mLock); - - ALOGV("%s: Initializing device for camera %d", __FUNCTION__, mId); - if (mStatus != STATUS_UNINITIALIZED) { - CLOGE("Already initialized!"); - return INVALID_OPERATION; - } - - /** Open HAL device */ - - status_t res; - String8 deviceName = String8::format("%d", mId); - - camera3_device_t *device; - - res = module->common.methods->open(&module->common, deviceName.string(), - reinterpret_cast(&device)); - - if (res != OK) { - SET_ERR_L("Could not open camera: %s (%d)", strerror(-res), res); - return res; - } - - /** Cross-check device version */ - - if (device->common.version != CAMERA_DEVICE_API_VERSION_3_0) { - SET_ERR_L("Could not open camera: " - "Camera device is not version %x, reports %x instead", - CAMERA_DEVICE_API_VERSION_3_0, - device->common.version); - device->common.close(&device->common); - return BAD_VALUE; - } - - camera_info info; - res = module->get_camera_info(mId, &info); - if (res != OK) return res; - - if (info.device_version != device->common.version) { - SET_ERR_L("HAL reporting mismatched camera_info version (%x)" - " and device version (%x).", - device->common.version, info.device_version); - device->common.close(&device->common); - return BAD_VALUE; - } - - /** Initialize device with callback functions */ - - ATRACE_BEGIN("camera3->initialize"); - res = device->ops->initialize(device, this); - ATRACE_END(); - - if (res != OK) { - SET_ERR_L("Unable to initialize HAL device: %s (%d)", - strerror(-res), res); - device->common.close(&device->common); - return BAD_VALUE; - } - - /** Get vendor metadata tags */ - - mVendorTagOps.get_camera_vendor_section_name = NULL; - - ATRACE_BEGIN("camera3->get_metadata_vendor_tag_ops"); - device->ops->get_metadata_vendor_tag_ops(device, &mVendorTagOps); - ATRACE_END(); - - if (mVendorTagOps.get_camera_vendor_section_name != NULL) { - res = set_camera_metadata_vendor_tag_ops(&mVendorTagOps); - if (res != OK) { - SET_ERR_L("Unable to set tag ops: %s (%d)", - strerror(-res), res); - device->common.close(&device->common); - return res; - } - } - - /** Start up request queue thread */ - - mRequestThread = new RequestThread(this, device); - res = mRequestThread->run(String8::format("C3Dev-%d-ReqQueue", mId).string()); - if (res != OK) { - SET_ERR_L("Unable to start request queue thread: %s (%d)", - strerror(-res), res); - device->common.close(&device->common); - mRequestThread.clear(); - return res; - } - - /** Everything is good to go */ - - mDeviceInfo = info.static_camera_characteristics; - mHal3Device = device; - mStatus = STATUS_IDLE; - mNextStreamId = 0; - mNeedConfig = true; - - return OK; -} - -status_t Camera3Device::disconnect() { - ATRACE_CALL(); - Mutex::Autolock l(mLock); - - ALOGV("%s: E", __FUNCTION__); - - status_t res = OK; - if (mStatus == STATUS_UNINITIALIZED) return res; - - if (mStatus == STATUS_ACTIVE || - (mStatus == STATUS_ERROR && mRequestThread != NULL)) { - res = mRequestThread->clearRepeatingRequests(); - if (res != OK) { - SET_ERR_L("Can't stop streaming"); - // Continue to close device even in case of error - } else { - res = waitUntilDrainedLocked(); - if (res != OK) { - SET_ERR_L("Timeout waiting for HAL to drain"); - // Continue to close device even in case of error - } - } - } - assert(mStatus == STATUS_IDLE || mStatus == STATUS_ERROR); - - if (mStatus == STATUS_ERROR) { - CLOGE("Shutting down in an error state"); - } - - if (mRequestThread != NULL) { - mRequestThread->requestExit(); - } - - mOutputStreams.clear(); - mInputStream.clear(); - - if (mRequestThread != NULL) { - if (mStatus != STATUS_ERROR) { - // HAL may be in a bad state, so waiting for request thread - // (which may be stuck in the HAL processCaptureRequest call) - // could be dangerous. - mRequestThread->join(); - } - mRequestThread.clear(); - } - - if (mHal3Device != NULL) { - mHal3Device->common.close(&mHal3Device->common); - mHal3Device = NULL; - } - - mStatus = STATUS_UNINITIALIZED; - - ALOGV("%s: X", __FUNCTION__); - return res; -} - -status_t Camera3Device::dump(int fd, const Vector &args) { - ATRACE_CALL(); - (void)args; - String8 lines; - - const char *status = - mStatus == STATUS_ERROR ? "ERROR" : - mStatus == STATUS_UNINITIALIZED ? "UNINITIALIZED" : - mStatus == STATUS_IDLE ? "IDLE" : - mStatus == STATUS_ACTIVE ? "ACTIVE" : - "Unknown"; - lines.appendFormat(" Device status: %s\n", status); - if (mStatus == STATUS_ERROR) { - lines.appendFormat(" Error cause: %s\n", mErrorCause.string()); - } - lines.appendFormat(" Stream configuration:\n"); - - if (mInputStream != NULL) { - write(fd, lines.string(), lines.size()); - mInputStream->dump(fd, args); - } else { - lines.appendFormat(" No input stream.\n"); - write(fd, lines.string(), lines.size()); - } - for (size_t i = 0; i < mOutputStreams.size(); i++) { - mOutputStreams[i]->dump(fd,args); - } - - lines = String8(" In-flight requests:\n"); - if (mInFlightMap.size() == 0) { - lines.append(" None\n"); - } else { - for (size_t i = 0; i < mInFlightMap.size(); i++) { - InFlightRequest r = mInFlightMap.valueAt(i); - lines.appendFormat(" Frame %d | Timestamp: %lld, metadata" - " arrived: %s, buffers left: %d\n", mInFlightMap.keyAt(i), - r.captureTimestamp, r.haveResultMetadata ? "true" : "false", - r.numBuffersLeft); - } - } - write(fd, lines.string(), lines.size()); - - if (mHal3Device != NULL) { - lines = String8(" HAL device dump:\n"); - write(fd, lines.string(), lines.size()); - mHal3Device->ops->dump(mHal3Device, fd); - } - - return OK; -} - -const CameraMetadata& Camera3Device::info() const { - ALOGVV("%s: E", __FUNCTION__); - if (CC_UNLIKELY(mStatus == STATUS_UNINITIALIZED || - mStatus == STATUS_ERROR)) { - ALOGW("%s: Access to static info %s!", __FUNCTION__, - mStatus == STATUS_ERROR ? - "when in error state" : "before init"); - } - return mDeviceInfo; -} - -status_t Camera3Device::capture(CameraMetadata &request) { - ATRACE_CALL(); - Mutex::Autolock l(mLock); - - // TODO: take ownership of the request - - switch (mStatus) { - case STATUS_ERROR: - CLOGE("Device has encountered a serious error"); - return INVALID_OPERATION; - case STATUS_UNINITIALIZED: - CLOGE("Device not initialized"); - return INVALID_OPERATION; - case STATUS_IDLE: - case STATUS_ACTIVE: - // OK - break; - default: - SET_ERR_L("Unexpected status: %d", mStatus); - return INVALID_OPERATION; - } - - sp newRequest = setUpRequestLocked(request); - if (newRequest == NULL) { - CLOGE("Can't create capture request"); - return BAD_VALUE; - } - - return mRequestThread->queueRequest(newRequest); -} - - -status_t Camera3Device::setStreamingRequest(const CameraMetadata &request) { - ATRACE_CALL(); - Mutex::Autolock l(mLock); - - switch (mStatus) { - case STATUS_ERROR: - CLOGE("Device has encountered a serious error"); - return INVALID_OPERATION; - case STATUS_UNINITIALIZED: - CLOGE("Device not initialized"); - return INVALID_OPERATION; - case STATUS_IDLE: - case STATUS_ACTIVE: - // OK - break; - default: - SET_ERR_L("Unexpected status: %d", mStatus); - return INVALID_OPERATION; - } - - sp newRepeatingRequest = setUpRequestLocked(request); - if (newRepeatingRequest == NULL) { - CLOGE("Can't create repeating request"); - return BAD_VALUE; - } - - RequestList newRepeatingRequests; - newRepeatingRequests.push_back(newRepeatingRequest); - - return mRequestThread->setRepeatingRequests(newRepeatingRequests); -} - - -sp Camera3Device::setUpRequestLocked( - const CameraMetadata &request) { - status_t res; - - if (mStatus == STATUS_IDLE) { - res = configureStreamsLocked(); - if (res != OK) { - SET_ERR_L("Can't set up streams: %s (%d)", strerror(-res), res); - return NULL; - } - } - - sp newRequest = createCaptureRequest(request); - return newRequest; -} - -status_t Camera3Device::clearStreamingRequest() { - ATRACE_CALL(); - Mutex::Autolock l(mLock); - - switch (mStatus) { - case STATUS_ERROR: - CLOGE("Device has encountered a serious error"); - return INVALID_OPERATION; - case STATUS_UNINITIALIZED: - CLOGE("Device not initialized"); - return INVALID_OPERATION; - case STATUS_IDLE: - case STATUS_ACTIVE: - // OK - break; - default: - SET_ERR_L("Unexpected status: %d", mStatus); - return INVALID_OPERATION; - } - - return mRequestThread->clearRepeatingRequests(); -} - -status_t Camera3Device::waitUntilRequestReceived(int32_t requestId, nsecs_t timeout) { - ATRACE_CALL(); - - return mRequestThread->waitUntilRequestProcessed(requestId, timeout); -} - -status_t Camera3Device::createInputStream( - uint32_t width, uint32_t height, int format, int *id) { - ATRACE_CALL(); - Mutex::Autolock l(mLock); - - status_t res; - bool wasActive = false; - - switch (mStatus) { - case STATUS_ERROR: - ALOGE("%s: Device has encountered a serious error", __FUNCTION__); - return INVALID_OPERATION; - case STATUS_UNINITIALIZED: - ALOGE("%s: Device not initialized", __FUNCTION__); - return INVALID_OPERATION; - case STATUS_IDLE: - // OK - break; - case STATUS_ACTIVE: - ALOGV("%s: Stopping activity to reconfigure streams", __FUNCTION__); - mRequestThread->setPaused(true); - res = waitUntilDrainedLocked(); - if (res != OK) { - ALOGE("%s: Can't pause captures to reconfigure streams!", - __FUNCTION__); - mStatus = STATUS_ERROR; - return res; - } - wasActive = true; - break; - default: - ALOGE("%s: Unexpected status: %d", __FUNCTION__, mStatus); - return INVALID_OPERATION; - } - assert(mStatus == STATUS_IDLE); - - if (mInputStream != 0) { - ALOGE("%s: Cannot create more than 1 input stream", __FUNCTION__); - return INVALID_OPERATION; - } - - sp newStream = new Camera3InputStream(mNextStreamId, - width, height, format); - - mInputStream = newStream; - - *id = mNextStreamId++; - - // Continue captures if active at start - if (wasActive) { - ALOGV("%s: Restarting activity to reconfigure streams", __FUNCTION__); - res = configureStreamsLocked(); - if (res != OK) { - ALOGE("%s: Can't reconfigure device for new stream %d: %s (%d)", - __FUNCTION__, mNextStreamId, strerror(-res), res); - return res; - } - mRequestThread->setPaused(false); - } - - return OK; -} - - -status_t Camera3Device::createZslStream( - uint32_t width, uint32_t height, - int depth, - /*out*/ - int *id, - sp* zslStream) { - ATRACE_CALL(); - Mutex::Autolock l(mLock); - - status_t res; - bool wasActive = false; - - switch (mStatus) { - case STATUS_ERROR: - ALOGE("%s: Device has encountered a serious error", __FUNCTION__); - return INVALID_OPERATION; - case STATUS_UNINITIALIZED: - ALOGE("%s: Device not initialized", __FUNCTION__); - return INVALID_OPERATION; - case STATUS_IDLE: - // OK - break; - case STATUS_ACTIVE: - ALOGV("%s: Stopping activity to reconfigure streams", __FUNCTION__); - mRequestThread->setPaused(true); - res = waitUntilDrainedLocked(); - if (res != OK) { - ALOGE("%s: Can't pause captures to reconfigure streams!", - __FUNCTION__); - mStatus = STATUS_ERROR; - return res; - } - wasActive = true; - break; - default: - ALOGE("%s: Unexpected status: %d", __FUNCTION__, mStatus); - return INVALID_OPERATION; - } - assert(mStatus == STATUS_IDLE); - - if (mInputStream != 0) { - ALOGE("%s: Cannot create more than 1 input stream", __FUNCTION__); - return INVALID_OPERATION; - } - - sp newStream = new Camera3ZslStream(mNextStreamId, - width, height, depth); - - res = mOutputStreams.add(mNextStreamId, newStream); - if (res < 0) { - ALOGE("%s: Can't add new stream to set: %s (%d)", - __FUNCTION__, strerror(-res), res); - return res; - } - mInputStream = newStream; - - *id = mNextStreamId++; - *zslStream = newStream; - - // Continue captures if active at start - if (wasActive) { - ALOGV("%s: Restarting activity to reconfigure streams", __FUNCTION__); - res = configureStreamsLocked(); - if (res != OK) { - ALOGE("%s: Can't reconfigure device for new stream %d: %s (%d)", - __FUNCTION__, mNextStreamId, strerror(-res), res); - return res; - } - mRequestThread->setPaused(false); - } - - return OK; -} - -status_t Camera3Device::createStream(sp consumer, - uint32_t width, uint32_t height, int format, size_t size, int *id) { - ATRACE_CALL(); - Mutex::Autolock l(mLock); - - status_t res; - bool wasActive = false; - - switch (mStatus) { - case STATUS_ERROR: - CLOGE("Device has encountered a serious error"); - return INVALID_OPERATION; - case STATUS_UNINITIALIZED: - CLOGE("Device not initialized"); - return INVALID_OPERATION; - case STATUS_IDLE: - // OK - break; - case STATUS_ACTIVE: - ALOGV("%s: Stopping activity to reconfigure streams", __FUNCTION__); - mRequestThread->setPaused(true); - res = waitUntilDrainedLocked(); - if (res != OK) { - ALOGE("%s: Can't pause captures to reconfigure streams!", - __FUNCTION__); - return res; - } - wasActive = true; - break; - default: - SET_ERR_L("Unexpected status: %d", mStatus); - return INVALID_OPERATION; - } - assert(mStatus == STATUS_IDLE); - - sp newStream; - if (format == HAL_PIXEL_FORMAT_BLOB) { - newStream = new Camera3OutputStream(mNextStreamId, consumer, - width, height, size, format); - } else { - newStream = new Camera3OutputStream(mNextStreamId, consumer, - width, height, format); - } - - res = mOutputStreams.add(mNextStreamId, newStream); - if (res < 0) { - SET_ERR_L("Can't add new stream to set: %s (%d)", strerror(-res), res); - return res; - } - - *id = mNextStreamId++; - mNeedConfig = true; - - // Continue captures if active at start - if (wasActive) { - ALOGV("%s: Restarting activity to reconfigure streams", __FUNCTION__); - res = configureStreamsLocked(); - if (res != OK) { - CLOGE("Can't reconfigure device for new stream %d: %s (%d)", - mNextStreamId, strerror(-res), res); - return res; - } - mRequestThread->setPaused(false); - } - - return OK; -} - -status_t Camera3Device::createReprocessStreamFromStream(int outputId, int *id) { - ATRACE_CALL(); - (void)outputId; (void)id; - - CLOGE("Unimplemented"); - return INVALID_OPERATION; -} - - -status_t Camera3Device::getStreamInfo(int id, - uint32_t *width, uint32_t *height, uint32_t *format) { - ATRACE_CALL(); - Mutex::Autolock l(mLock); - - switch (mStatus) { - case STATUS_ERROR: - CLOGE("Device has encountered a serious error"); - return INVALID_OPERATION; - case STATUS_UNINITIALIZED: - CLOGE("Device not initialized!"); - return INVALID_OPERATION; - case STATUS_IDLE: - case STATUS_ACTIVE: - // OK - break; - default: - SET_ERR_L("Unexpected status: %d", mStatus); - return INVALID_OPERATION; - } - - ssize_t idx = mOutputStreams.indexOfKey(id); - if (idx == NAME_NOT_FOUND) { - CLOGE("Stream %d is unknown", id); - return idx; - } - - if (width) *width = mOutputStreams[idx]->getWidth(); - if (height) *height = mOutputStreams[idx]->getHeight(); - if (format) *format = mOutputStreams[idx]->getFormat(); - - return OK; -} - -status_t Camera3Device::setStreamTransform(int id, - int transform) { - ATRACE_CALL(); - Mutex::Autolock l(mLock); - - switch (mStatus) { - case STATUS_ERROR: - CLOGE("Device has encountered a serious error"); - return INVALID_OPERATION; - case STATUS_UNINITIALIZED: - CLOGE("Device not initialized"); - return INVALID_OPERATION; - case STATUS_IDLE: - case STATUS_ACTIVE: - // OK - break; - default: - SET_ERR_L("Unexpected status: %d", mStatus); - return INVALID_OPERATION; - } - - ssize_t idx = mOutputStreams.indexOfKey(id); - if (idx == NAME_NOT_FOUND) { - CLOGE("Stream %d does not exist", - id); - return BAD_VALUE; - } - - return mOutputStreams.editValueAt(idx)->setTransform(transform); -} - -status_t Camera3Device::deleteStream(int id) { - ATRACE_CALL(); - Mutex::Autolock l(mLock); - status_t res; - - ALOGV("%s: Camera %d: Deleting stream %d", __FUNCTION__, mId, id); - - // CameraDevice semantics require device to already be idle before - // deleteStream is called, unlike for createStream. - if (mStatus != STATUS_IDLE) { - ALOGV("%s: Camera %d: Device not idle", __FUNCTION__, mId); - return -EBUSY; - } - - sp deletedStream; - if (mInputStream != NULL && id == mInputStream->getId()) { - deletedStream = mInputStream; - mInputStream.clear(); - } else { - ssize_t idx = mOutputStreams.indexOfKey(id); - if (idx == NAME_NOT_FOUND) { - CLOGE("Stream %d does not exist", id); - return BAD_VALUE; - } - deletedStream = mOutputStreams.editValueAt(idx); - mOutputStreams.removeItem(id); - } - - // Free up the stream endpoint so that it can be used by some other stream - res = deletedStream->disconnect(); - if (res != OK) { - SET_ERR_L("Can't disconnect deleted stream %d", id); - // fall through since we want to still list the stream as deleted. - } - mDeletedStreams.add(deletedStream); - mNeedConfig = true; - - return res; -} - -status_t Camera3Device::deleteReprocessStream(int id) { - ATRACE_CALL(); - (void)id; - - CLOGE("Unimplemented"); - return INVALID_OPERATION; -} - - -status_t Camera3Device::createDefaultRequest(int templateId, - CameraMetadata *request) { - ATRACE_CALL(); - ALOGV("%s: for template %d", __FUNCTION__, templateId); - Mutex::Autolock l(mLock); - - switch (mStatus) { - case STATUS_ERROR: - CLOGE("Device has encountered a serious error"); - return INVALID_OPERATION; - case STATUS_UNINITIALIZED: - CLOGE("Device is not initialized!"); - return INVALID_OPERATION; - case STATUS_IDLE: - case STATUS_ACTIVE: - // OK - break; - default: - SET_ERR_L("Unexpected status: %d", mStatus); - return INVALID_OPERATION; - } - - const camera_metadata_t *rawRequest; - ATRACE_BEGIN("camera3->construct_default_request_settings"); - rawRequest = mHal3Device->ops->construct_default_request_settings( - mHal3Device, templateId); - ATRACE_END(); - if (rawRequest == NULL) { - SET_ERR_L("HAL is unable to construct default settings for template %d", - templateId); - return DEAD_OBJECT; - } - *request = rawRequest; - - return OK; -} - -status_t Camera3Device::waitUntilDrained() { - ATRACE_CALL(); - Mutex::Autolock l(mLock); - - return waitUntilDrainedLocked(); -} - -status_t Camera3Device::waitUntilDrainedLocked() { - ATRACE_CALL(); - status_t res; - - switch (mStatus) { - case STATUS_UNINITIALIZED: - case STATUS_IDLE: - ALOGV("%s: Already idle", __FUNCTION__); - return OK; - case STATUS_ERROR: - case STATUS_ACTIVE: - // Need to shut down - break; - default: - SET_ERR_L("Unexpected status: %d",mStatus); - return INVALID_OPERATION; - } - - if (mRequestThread != NULL) { - res = mRequestThread->waitUntilPaused(kShutdownTimeout); - if (res != OK) { - SET_ERR_L("Can't stop request thread in %f seconds!", - kShutdownTimeout/1e9); - return res; - } - } - if (mInputStream != NULL) { - res = mInputStream->waitUntilIdle(kShutdownTimeout); - if (res != OK) { - SET_ERR_L("Can't idle input stream %d in %f seconds!", - mInputStream->getId(), kShutdownTimeout/1e9); - return res; - } - } - for (size_t i = 0; i < mOutputStreams.size(); i++) { - res = mOutputStreams.editValueAt(i)->waitUntilIdle(kShutdownTimeout); - if (res != OK) { - SET_ERR_L("Can't idle output stream %d in %f seconds!", - mOutputStreams.keyAt(i), kShutdownTimeout/1e9); - return res; - } - } - - if (mStatus != STATUS_ERROR) { - mStatus = STATUS_IDLE; - } - - return OK; -} - -status_t Camera3Device::setNotifyCallback(NotificationListener *listener) { - ATRACE_CALL(); - Mutex::Autolock l(mOutputLock); - - if (listener != NULL && mListener != NULL) { - ALOGW("%s: Replacing old callback listener", __FUNCTION__); - } - mListener = listener; - - return OK; -} - -bool Camera3Device::willNotify3A() { - return false; -} - -status_t Camera3Device::waitForNextFrame(nsecs_t timeout) { - ATRACE_CALL(); - status_t res; - Mutex::Autolock l(mOutputLock); - - while (mResultQueue.empty()) { - res = mResultSignal.waitRelative(mOutputLock, timeout); - if (res == TIMED_OUT) { - return res; - } else if (res != OK) { - ALOGW("%s: Camera %d: No frame in %lld ns: %s (%d)", - __FUNCTION__, mId, timeout, strerror(-res), res); - return res; - } - } - return OK; -} - -status_t Camera3Device::getNextFrame(CameraMetadata *frame) { - ATRACE_CALL(); - Mutex::Autolock l(mOutputLock); - - if (mResultQueue.empty()) { - return NOT_ENOUGH_DATA; - } - - CameraMetadata &result = *(mResultQueue.begin()); - frame->acquire(result); - mResultQueue.erase(mResultQueue.begin()); - - return OK; -} - -status_t Camera3Device::triggerAutofocus(uint32_t id) { - ATRACE_CALL(); - - ALOGV("%s: Triggering autofocus, id %d", __FUNCTION__, id); - // Mix-in this trigger into the next request and only the next request. - RequestTrigger trigger[] = { - { - ANDROID_CONTROL_AF_TRIGGER, - ANDROID_CONTROL_AF_TRIGGER_START - }, - { - ANDROID_CONTROL_AF_TRIGGER_ID, - static_cast(id) - }, - }; - - return mRequestThread->queueTrigger(trigger, - sizeof(trigger)/sizeof(trigger[0])); -} - -status_t Camera3Device::triggerCancelAutofocus(uint32_t id) { - ATRACE_CALL(); - - ALOGV("%s: Triggering cancel autofocus, id %d", __FUNCTION__, id); - // Mix-in this trigger into the next request and only the next request. - RequestTrigger trigger[] = { - { - ANDROID_CONTROL_AF_TRIGGER, - ANDROID_CONTROL_AF_TRIGGER_CANCEL - }, - { - ANDROID_CONTROL_AF_TRIGGER_ID, - static_cast(id) - }, - }; - - return mRequestThread->queueTrigger(trigger, - sizeof(trigger)/sizeof(trigger[0])); -} - -status_t Camera3Device::triggerPrecaptureMetering(uint32_t id) { - ATRACE_CALL(); - - ALOGV("%s: Triggering precapture metering, id %d", __FUNCTION__, id); - // Mix-in this trigger into the next request and only the next request. - RequestTrigger trigger[] = { - { - ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, - ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_START - }, - { - ANDROID_CONTROL_AE_PRECAPTURE_ID, - static_cast(id) - }, - }; - - return mRequestThread->queueTrigger(trigger, - sizeof(trigger)/sizeof(trigger[0])); -} - -status_t Camera3Device::pushReprocessBuffer(int reprocessStreamId, - buffer_handle_t *buffer, wp listener) { - ATRACE_CALL(); - (void)reprocessStreamId; (void)buffer; (void)listener; - - CLOGE("Unimplemented"); - return INVALID_OPERATION; -} - -/** - * Camera3Device private methods - */ - -sp Camera3Device::createCaptureRequest( - const CameraMetadata &request) { - ATRACE_CALL(); - status_t res; - - sp newRequest = new CaptureRequest; - newRequest->mSettings = request; - - camera_metadata_entry_t inputStreams = - newRequest->mSettings.find(ANDROID_REQUEST_INPUT_STREAMS); - if (inputStreams.count > 0) { - if (mInputStream == NULL || - mInputStream->getId() != inputStreams.data.u8[0]) { - CLOGE("Request references unknown input stream %d", - inputStreams.data.u8[0]); - return NULL; - } - // Lazy completion of stream configuration (allocation/registration) - // on first use - if (mInputStream->isConfiguring()) { - res = mInputStream->finishConfiguration(mHal3Device); - if (res != OK) { - SET_ERR_L("Unable to finish configuring input stream %d:" - " %s (%d)", - mInputStream->getId(), strerror(-res), res); - return NULL; - } - } - - newRequest->mInputStream = mInputStream; - newRequest->mSettings.erase(ANDROID_REQUEST_INPUT_STREAMS); - } - - camera_metadata_entry_t streams = - newRequest->mSettings.find(ANDROID_REQUEST_OUTPUT_STREAMS); - if (streams.count == 0) { - CLOGE("Zero output streams specified!"); - return NULL; - } - - for (size_t i = 0; i < streams.count; i++) { - int idx = mOutputStreams.indexOfKey(streams.data.u8[i]); - if (idx == NAME_NOT_FOUND) { - CLOGE("Request references unknown stream %d", - streams.data.u8[i]); - return NULL; - } - sp stream = - mOutputStreams.editValueAt(idx); - - // Lazy completion of stream configuration (allocation/registration) - // on first use - if (stream->isConfiguring()) { - res = stream->finishConfiguration(mHal3Device); - if (res != OK) { - SET_ERR_L("Unable to finish configuring stream %d: %s (%d)", - stream->getId(), strerror(-res), res); - return NULL; - } - } - - newRequest->mOutputStreams.push(stream); - } - newRequest->mSettings.erase(ANDROID_REQUEST_OUTPUT_STREAMS); - - return newRequest; -} - -status_t Camera3Device::configureStreamsLocked() { - ATRACE_CALL(); - status_t res; - - if (mStatus != STATUS_IDLE) { - CLOGE("Not idle"); - return INVALID_OPERATION; - } - - if (!mNeedConfig) { - ALOGV("%s: Skipping config, no stream changes", __FUNCTION__); - mStatus = STATUS_ACTIVE; - return OK; - } - - // Start configuring the streams - - camera3_stream_configuration config; - - config.num_streams = (mInputStream != NULL) + mOutputStreams.size(); - - Vector streams; - streams.setCapacity(config.num_streams); - - if (mInputStream != NULL) { - camera3_stream_t *inputStream; - inputStream = mInputStream->startConfiguration(); - if (inputStream == NULL) { - SET_ERR_L("Can't start input stream configuration"); - return INVALID_OPERATION; - } - streams.add(inputStream); - } - - for (size_t i = 0; i < mOutputStreams.size(); i++) { - - // Don't configure bidi streams twice, nor add them twice to the list - if (mOutputStreams[i].get() == - static_cast(mInputStream.get())) { - - config.num_streams--; - continue; - } - - camera3_stream_t *outputStream; - outputStream = mOutputStreams.editValueAt(i)->startConfiguration(); - if (outputStream == NULL) { - SET_ERR_L("Can't start output stream configuration"); - return INVALID_OPERATION; - } - streams.add(outputStream); - } - - config.streams = streams.editArray(); - - // Do the HAL configuration; will potentially touch stream - // max_buffers, usage, priv fields. - ATRACE_BEGIN("camera3->configure_streams"); - res = mHal3Device->ops->configure_streams(mHal3Device, &config); - ATRACE_END(); - - if (res != OK) { - SET_ERR_L("Unable to configure streams with HAL: %s (%d)", - strerror(-res), res); - return res; - } - - // Finish all stream configuration immediately. - // TODO: Try to relax this later back to lazy completion, which should be - // faster - - if (mInputStream != NULL && mInputStream->isConfiguring()) { - res = mInputStream->finishConfiguration(mHal3Device); - if (res != OK) { - SET_ERR_L("Can't finish configuring input stream %d: %s (%d)", - mInputStream->getId(), strerror(-res), res); - return res; - } - } - - for (size_t i = 0; i < mOutputStreams.size(); i++) { - sp outputStream = - mOutputStreams.editValueAt(i); - if (outputStream->isConfiguring()) { - res = outputStream->finishConfiguration(mHal3Device); - if (res != OK) { - SET_ERR_L("Can't finish configuring output stream %d: %s (%d)", - outputStream->getId(), strerror(-res), res); - return res; - } - } - } - - // Request thread needs to know to avoid using repeat-last-settings protocol - // across configure_streams() calls - mRequestThread->configurationComplete(); - - // Finish configuring the streams lazily on first reference - - mStatus = STATUS_ACTIVE; - mNeedConfig = false; - - return OK; -} - -void Camera3Device::setErrorState(const char *fmt, ...) { - Mutex::Autolock l(mLock); - va_list args; - va_start(args, fmt); - - setErrorStateLockedV(fmt, args); - - va_end(args); -} - -void Camera3Device::setErrorStateV(const char *fmt, va_list args) { - Mutex::Autolock l(mLock); - setErrorStateLockedV(fmt, args); -} - -void Camera3Device::setErrorStateLocked(const char *fmt, ...) { - va_list args; - va_start(args, fmt); - - setErrorStateLockedV(fmt, args); - - va_end(args); -} - -void Camera3Device::setErrorStateLockedV(const char *fmt, va_list args) { - // Print out all error messages to log - String8 errorCause = String8::formatV(fmt, args); - ALOGE("Camera %d: %s", mId, errorCause.string()); - - // But only do error state transition steps for the first error - if (mStatus == STATUS_ERROR || mStatus == STATUS_UNINITIALIZED) return; - - mErrorCause = errorCause; - - mRequestThread->setPaused(true); - mStatus = STATUS_ERROR; -} - -/** - * In-flight request management - */ - -status_t Camera3Device::registerInFlight(int32_t frameNumber, - int32_t numBuffers) { - ATRACE_CALL(); - Mutex::Autolock l(mInFlightLock); - - ssize_t res; - res = mInFlightMap.add(frameNumber, InFlightRequest(numBuffers)); - if (res < 0) return res; - - return OK; -} - -/** - * Camera HAL device callback methods - */ - -void Camera3Device::processCaptureResult(const camera3_capture_result *result) { - ATRACE_CALL(); - - status_t res; - - uint32_t frameNumber = result->frame_number; - if (result->result == NULL && result->num_output_buffers == 0) { - SET_ERR("No result data provided by HAL for frame %d", - frameNumber); - return; - } - - // Get capture timestamp from list of in-flight requests, where it was added - // by the shutter notification for this frame. Then update the in-flight - // status and remove the in-flight entry if all result data has been - // received. - nsecs_t timestamp = 0; - { - Mutex::Autolock l(mInFlightLock); - ssize_t idx = mInFlightMap.indexOfKey(frameNumber); - if (idx == NAME_NOT_FOUND) { - SET_ERR("Unknown frame number for capture result: %d", - frameNumber); - return; - } - InFlightRequest &request = mInFlightMap.editValueAt(idx); - timestamp = request.captureTimestamp; - if (timestamp == 0) { - SET_ERR("Called before shutter notify for frame %d", - frameNumber); - return; - } - - if (result->result != NULL) { - if (request.haveResultMetadata) { - SET_ERR("Called multiple times with metadata for frame %d", - frameNumber); - return; - } - request.haveResultMetadata = true; - } - - request.numBuffersLeft -= result->num_output_buffers; - - if (request.numBuffersLeft < 0) { - SET_ERR("Too many buffers returned for frame %d", - frameNumber); - return; - } - - if (request.haveResultMetadata && request.numBuffersLeft == 0) { - ATRACE_ASYNC_END("frame capture", frameNumber); - mInFlightMap.removeItemsAt(idx, 1); - } - - // Sanity check - if we have too many in-flight frames, something has - // likely gone wrong - if (mInFlightMap.size() > kInFlightWarnLimit) { - CLOGE("In-flight list too large: %d", mInFlightMap.size()); - } - - } - - // Process the result metadata, if provided - if (result->result != NULL) { - Mutex::Autolock l(mOutputLock); - - if (frameNumber != mNextResultFrameNumber) { - SET_ERR("Out-of-order capture result metadata submitted! " - "(got frame number %d, expecting %d)", - frameNumber, mNextResultFrameNumber); - return; - } - mNextResultFrameNumber++; - - CameraMetadata &captureResult = - *mResultQueue.insert(mResultQueue.end(), CameraMetadata()); - - captureResult = result->result; - if (captureResult.update(ANDROID_REQUEST_FRAME_COUNT, - (int32_t*)&frameNumber, 1) != OK) { - SET_ERR("Failed to set frame# in metadata (%d)", - frameNumber); - } else { - ALOGVV("%s: Camera %d: Set frame# in metadata (%d)", - __FUNCTION__, mId, frameNumber); - } - - // Check that there's a timestamp in the result metadata - - camera_metadata_entry entry = - captureResult.find(ANDROID_SENSOR_TIMESTAMP); - if (entry.count == 0) { - SET_ERR("No timestamp provided by HAL for frame %d!", - frameNumber); - } else if (timestamp != entry.data.i64[0]) { - SET_ERR("Timestamp mismatch between shutter notify and result" - " metadata for frame %d (%lld vs %lld respectively)", - frameNumber, timestamp, entry.data.i64[0]); - } - } // scope for mOutputLock - - // Return completed buffers to their streams with the timestamp - - for (size_t i = 0; i < result->num_output_buffers; i++) { - Camera3Stream *stream = - Camera3Stream::cast(result->output_buffers[i].stream); - res = stream->returnBuffer(result->output_buffers[i], timestamp); - // Note: stream may be deallocated at this point, if this buffer was the - // last reference to it. - if (res != OK) { - SET_ERR("Can't return buffer %d for frame %d to its stream: " - " %s (%d)", i, frameNumber, strerror(-res), res); - } - } - - // Finally, signal any waiters for new frames - - if (result->result != NULL) { - mResultSignal.signal(); - } - -} - - - -void Camera3Device::notify(const camera3_notify_msg *msg) { - ATRACE_CALL(); - NotificationListener *listener; - { - Mutex::Autolock l(mOutputLock); - listener = mListener; - } - - if (msg == NULL) { - SET_ERR("HAL sent NULL notify message!"); - return; - } - - switch (msg->type) { - case CAMERA3_MSG_ERROR: { - int streamId = 0; - if (msg->message.error.error_stream != NULL) { - Camera3Stream *stream = - Camera3Stream::cast( - msg->message.error.error_stream); - streamId = stream->getId(); - } - ALOGV("Camera %d: %s: HAL error, frame %d, stream %d: %d", - mId, __FUNCTION__, msg->message.error.frame_number, - streamId, msg->message.error.error_code); - if (listener != NULL) { - listener->notifyError(msg->message.error.error_code, - msg->message.error.frame_number, streamId); - } - break; - } - case CAMERA3_MSG_SHUTTER: { - ssize_t idx; - uint32_t frameNumber = msg->message.shutter.frame_number; - nsecs_t timestamp = msg->message.shutter.timestamp; - // Verify ordering of shutter notifications - { - Mutex::Autolock l(mOutputLock); - if (frameNumber != mNextShutterFrameNumber) { - SET_ERR("Shutter notification out-of-order. Expected " - "notification for frame %d, got frame %d", - mNextShutterFrameNumber, frameNumber); - break; - } - mNextShutterFrameNumber++; - } - - // Set timestamp for the request in the in-flight tracking - { - Mutex::Autolock l(mInFlightLock); - idx = mInFlightMap.indexOfKey(frameNumber); - if (idx >= 0) { - mInFlightMap.editValueAt(idx).captureTimestamp = timestamp; - } - } - if (idx < 0) { - SET_ERR("Shutter notification for non-existent frame number %d", - frameNumber); - break; - } - ALOGVV("Camera %d: %s: Shutter fired for frame %d at %lld", - mId, __FUNCTION__, frameNumber, timestamp); - // Call listener, if any - if (listener != NULL) { - listener->notifyShutter(frameNumber, timestamp); - } - break; - } - default: - SET_ERR("Unknown notify message from HAL: %d", - msg->type); - } -} - -/** - * RequestThread inner class methods - */ - -Camera3Device::RequestThread::RequestThread(wp parent, - camera3_device_t *hal3Device) : - Thread(false), - mParent(parent), - mHal3Device(hal3Device), - mId(getId(parent)), - mReconfigured(false), - mDoPause(false), - mPaused(true), - mFrameNumber(0), - mLatestRequestId(NAME_NOT_FOUND) { -} - -void Camera3Device::RequestThread::configurationComplete() { - Mutex::Autolock l(mRequestLock); - mReconfigured = true; -} - -status_t Camera3Device::RequestThread::queueRequest( - sp request) { - Mutex::Autolock l(mRequestLock); - mRequestQueue.push_back(request); - - return OK; -} - - -status_t Camera3Device::RequestThread::queueTrigger( - RequestTrigger trigger[], - size_t count) { - - Mutex::Autolock l(mTriggerMutex); - status_t ret; - - for (size_t i = 0; i < count; ++i) { - ret = queueTriggerLocked(trigger[i]); - - if (ret != OK) { - return ret; - } - } - - return OK; -} - -int Camera3Device::RequestThread::getId(const wp &device) { - sp d = device.promote(); - if (d != NULL) return d->mId; - return 0; -} - -status_t Camera3Device::RequestThread::queueTriggerLocked( - RequestTrigger trigger) { - - uint32_t tag = trigger.metadataTag; - ssize_t index = mTriggerMap.indexOfKey(tag); - - switch (trigger.getTagType()) { - case TYPE_BYTE: - // fall-through - case TYPE_INT32: - break; - default: - ALOGE("%s: Type not supported: 0x%x", __FUNCTION__, - trigger.getTagType()); - return INVALID_OPERATION; - } - - /** - * Collect only the latest trigger, since we only have 1 field - * in the request settings per trigger tag, and can't send more than 1 - * trigger per request. - */ - if (index != NAME_NOT_FOUND) { - mTriggerMap.editValueAt(index) = trigger; - } else { - mTriggerMap.add(tag, trigger); - } - - return OK; -} - -status_t Camera3Device::RequestThread::setRepeatingRequests( - const RequestList &requests) { - Mutex::Autolock l(mRequestLock); - mRepeatingRequests.clear(); - mRepeatingRequests.insert(mRepeatingRequests.begin(), - requests.begin(), requests.end()); - return OK; -} - -status_t Camera3Device::RequestThread::clearRepeatingRequests() { - Mutex::Autolock l(mRequestLock); - mRepeatingRequests.clear(); - return OK; -} - -void Camera3Device::RequestThread::setPaused(bool paused) { - Mutex::Autolock l(mPauseLock); - mDoPause = paused; - mDoPauseSignal.signal(); -} - -status_t Camera3Device::RequestThread::waitUntilPaused(nsecs_t timeout) { - ATRACE_CALL(); - status_t res; - Mutex::Autolock l(mPauseLock); - while (!mPaused) { - res = mPausedSignal.waitRelative(mPauseLock, timeout); - if (res == TIMED_OUT) { - return res; - } - } - return OK; -} - -status_t Camera3Device::RequestThread::waitUntilRequestProcessed( - int32_t requestId, nsecs_t timeout) { - Mutex::Autolock l(mLatestRequestMutex); - status_t res; - while (mLatestRequestId != requestId) { - nsecs_t startTime = systemTime(); - - res = mLatestRequestSignal.waitRelative(mLatestRequestMutex, timeout); - if (res != OK) return res; - - timeout -= (systemTime() - startTime); - } - - return OK; -} - - - -bool Camera3Device::RequestThread::threadLoop() { - - status_t res; - - // Handle paused state. - if (waitIfPaused()) { - return true; - } - - // Get work to do - - sp nextRequest = waitForNextRequest(); - if (nextRequest == NULL) { - return true; - } - - // Create request to HAL - camera3_capture_request_t request = camera3_capture_request_t(); - Vector outputBuffers; - - // Insert any queued triggers (before metadata is locked) - int32_t triggerCount; - res = insertTriggers(nextRequest); - if (res < 0) { - SET_ERR("RequestThread: Unable to insert triggers " - "(capture request %d, HAL device: %s (%d)", - (mFrameNumber+1), strerror(-res), res); - cleanUpFailedRequest(request, nextRequest, outputBuffers); - return false; - } - triggerCount = res; - - bool triggersMixedIn = (triggerCount > 0 || mPrevTriggers > 0); - - // If the request is the same as last, or we had triggers last time - if (mPrevRequest != nextRequest || triggersMixedIn) { - /** - * The request should be presorted so accesses in HAL - * are O(logn). Sidenote, sorting a sorted metadata is nop. - */ - nextRequest->mSettings.sort(); - request.settings = nextRequest->mSettings.getAndLock(); - mPrevRequest = nextRequest; - ALOGVV("%s: Request settings are NEW", __FUNCTION__); - - IF_ALOGV() { - camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t(); - find_camera_metadata_ro_entry( - request.settings, - ANDROID_CONTROL_AF_TRIGGER, - &e - ); - if (e.count > 0) { - ALOGV("%s: Request (frame num %d) had AF trigger 0x%x", - __FUNCTION__, - mFrameNumber+1, - e.data.u8[0]); - } - } - } else { - // leave request.settings NULL to indicate 'reuse latest given' - ALOGVV("%s: Request settings are REUSED", - __FUNCTION__); - } - - camera3_stream_buffer_t inputBuffer; - - // Fill in buffers - - if (nextRequest->mInputStream != NULL) { - request.input_buffer = &inputBuffer; - res = nextRequest->mInputStream->getInputBuffer(&inputBuffer); - if (res != OK) { - SET_ERR("RequestThread: Can't get input buffer, skipping request:" - " %s (%d)", strerror(-res), res); - cleanUpFailedRequest(request, nextRequest, outputBuffers); - return true; - } - } else { - request.input_buffer = NULL; - } - - outputBuffers.insertAt(camera3_stream_buffer_t(), 0, - nextRequest->mOutputStreams.size()); - request.output_buffers = outputBuffers.array(); - for (size_t i = 0; i < nextRequest->mOutputStreams.size(); i++) { - res = nextRequest->mOutputStreams.editItemAt(i)-> - getBuffer(&outputBuffers.editItemAt(i)); - if (res != OK) { - SET_ERR("RequestThread: Can't get output buffer, skipping request:" - "%s (%d)", strerror(-res), res); - cleanUpFailedRequest(request, nextRequest, outputBuffers); - return true; - } - request.num_output_buffers++; - } - - request.frame_number = mFrameNumber++; - - // Log request in the in-flight queue - sp parent = mParent.promote(); - if (parent == NULL) { - CLOGE("RequestThread: Parent is gone"); - cleanUpFailedRequest(request, nextRequest, outputBuffers); - return false; - } - - res = parent->registerInFlight(request.frame_number, - request.num_output_buffers); - if (res != OK) { - SET_ERR("RequestThread: Unable to register new in-flight request:" - " %s (%d)", strerror(-res), res); - cleanUpFailedRequest(request, nextRequest, outputBuffers); - return false; - } - - // Submit request and block until ready for next one - ATRACE_ASYNC_BEGIN("frame capture", request.frame_number); - ATRACE_BEGIN("camera3->process_capture_request"); - res = mHal3Device->ops->process_capture_request(mHal3Device, &request); - ATRACE_END(); - - if (res != OK) { - SET_ERR("RequestThread: Unable to submit capture request %d to HAL" - " device: %s (%d)", request.frame_number, strerror(-res), res); - cleanUpFailedRequest(request, nextRequest, outputBuffers); - return false; - } - - if (request.settings != NULL) { - nextRequest->mSettings.unlock(request.settings); - } - - // Remove any previously queued triggers (after unlock) - res = removeTriggers(mPrevRequest); - if (res != OK) { - SET_ERR("RequestThread: Unable to remove triggers " - "(capture request %d, HAL device: %s (%d)", - request.frame_number, strerror(-res), res); - return false; - } - mPrevTriggers = triggerCount; - - // Read android.request.id from the request settings metadata - // - inform waitUntilRequestProcessed thread of a new request ID - { - Mutex::Autolock al(mLatestRequestMutex); - - camera_metadata_entry_t requestIdEntry = - nextRequest->mSettings.find(ANDROID_REQUEST_ID); - if (requestIdEntry.count > 0) { - mLatestRequestId = requestIdEntry.data.i32[0]; - } else { - ALOGW("%s: Did not have android.request.id set in the request", - __FUNCTION__); - mLatestRequestId = NAME_NOT_FOUND; - } - - mLatestRequestSignal.signal(); - } - - // Return input buffer back to framework - if (request.input_buffer != NULL) { - Camera3Stream *stream = - Camera3Stream::cast(request.input_buffer->stream); - res = stream->returnInputBuffer(*(request.input_buffer)); - // Note: stream may be deallocated at this point, if this buffer was the - // last reference to it. - if (res != OK) { - ALOGE("%s: RequestThread: Can't return input buffer for frame %d to" - " its stream:%s (%d)", __FUNCTION__, - request.frame_number, strerror(-res), res); - // TODO: Report error upstream - } - } - - - - return true; -} - -void Camera3Device::RequestThread::cleanUpFailedRequest( - camera3_capture_request_t &request, - sp &nextRequest, - Vector &outputBuffers) { - - if (request.settings != NULL) { - nextRequest->mSettings.unlock(request.settings); - } - if (request.input_buffer != NULL) { - request.input_buffer->status = CAMERA3_BUFFER_STATUS_ERROR; - nextRequest->mInputStream->returnInputBuffer(*(request.input_buffer)); - } - for (size_t i = 0; i < request.num_output_buffers; i++) { - outputBuffers.editItemAt(i).status = CAMERA3_BUFFER_STATUS_ERROR; - nextRequest->mOutputStreams.editItemAt(i)->returnBuffer( - outputBuffers[i], 0); - } -} - -sp - Camera3Device::RequestThread::waitForNextRequest() { - status_t res; - sp nextRequest; - - // Optimized a bit for the simple steady-state case (single repeating - // request), to avoid putting that request in the queue temporarily. - Mutex::Autolock l(mRequestLock); - - while (mRequestQueue.empty()) { - if (!mRepeatingRequests.empty()) { - // Always atomically enqueue all requests in a repeating request - // list. Guarantees a complete in-sequence set of captures to - // application. - const RequestList &requests = mRepeatingRequests; - RequestList::const_iterator firstRequest = - requests.begin(); - nextRequest = *firstRequest; - mRequestQueue.insert(mRequestQueue.end(), - ++firstRequest, - requests.end()); - // No need to wait any longer - break; - } - - res = mRequestSignal.waitRelative(mRequestLock, kRequestTimeout); - - if (res == TIMED_OUT) { - // Signal that we're paused by starvation - Mutex::Autolock pl(mPauseLock); - if (mPaused == false) { - mPaused = true; - mPausedSignal.signal(); - } - // Stop waiting for now and let thread management happen - return NULL; - } - } - - if (nextRequest == NULL) { - // Don't have a repeating request already in hand, so queue - // must have an entry now. - RequestList::iterator firstRequest = - mRequestQueue.begin(); - nextRequest = *firstRequest; - mRequestQueue.erase(firstRequest); - } - - // Not paused - Mutex::Autolock pl(mPauseLock); - mPaused = false; - - // Check if we've reconfigured since last time, and reset the preview - // request if so. Can't use 'NULL request == repeat' across configure calls. - if (mReconfigured) { - mPrevRequest.clear(); - mReconfigured = false; - } - - return nextRequest; -} - -bool Camera3Device::RequestThread::waitIfPaused() { - status_t res; - Mutex::Autolock l(mPauseLock); - while (mDoPause) { - // Signal that we're paused by request - if (mPaused == false) { - mPaused = true; - mPausedSignal.signal(); - } - res = mDoPauseSignal.waitRelative(mPauseLock, kRequestTimeout); - if (res == TIMED_OUT) { - return true; - } - } - // We don't set mPaused to false here, because waitForNextRequest needs - // to further manage the paused state in case of starvation. - return false; -} - -void Camera3Device::RequestThread::setErrorState(const char *fmt, ...) { - sp parent = mParent.promote(); - if (parent != NULL) { - va_list args; - va_start(args, fmt); - - parent->setErrorStateV(fmt, args); - - va_end(args); - } -} - -status_t Camera3Device::RequestThread::insertTriggers( - const sp &request) { - - Mutex::Autolock al(mTriggerMutex); - - CameraMetadata &metadata = request->mSettings; - size_t count = mTriggerMap.size(); - - for (size_t i = 0; i < count; ++i) { - RequestTrigger trigger = mTriggerMap.valueAt(i); - - uint32_t tag = trigger.metadataTag; - camera_metadata_entry entry = metadata.find(tag); - - if (entry.count > 0) { - /** - * Already has an entry for this trigger in the request. - * Rewrite it with our requested trigger value. - */ - RequestTrigger oldTrigger = trigger; - - oldTrigger.entryValue = entry.data.u8[0]; - - mTriggerReplacedMap.add(tag, oldTrigger); - } else { - /** - * More typical, no trigger entry, so we just add it - */ - mTriggerRemovedMap.add(tag, trigger); - } - - status_t res; - - switch (trigger.getTagType()) { - case TYPE_BYTE: { - uint8_t entryValue = static_cast(trigger.entryValue); - res = metadata.update(tag, - &entryValue, - /*count*/1); - break; - } - case TYPE_INT32: - res = metadata.update(tag, - &trigger.entryValue, - /*count*/1); - break; - default: - ALOGE("%s: Type not supported: 0x%x", - __FUNCTION__, - trigger.getTagType()); - return INVALID_OPERATION; - } - - if (res != OK) { - ALOGE("%s: Failed to update request metadata with trigger tag %s" - ", value %d", __FUNCTION__, trigger.getTagName(), - trigger.entryValue); - return res; - } - - ALOGV("%s: Mixed in trigger %s, value %d", __FUNCTION__, - trigger.getTagName(), - trigger.entryValue); - } - - mTriggerMap.clear(); - - return count; -} - -status_t Camera3Device::RequestThread::removeTriggers( - const sp &request) { - Mutex::Autolock al(mTriggerMutex); - - CameraMetadata &metadata = request->mSettings; - - /** - * Replace all old entries with their old values. - */ - for (size_t i = 0; i < mTriggerReplacedMap.size(); ++i) { - RequestTrigger trigger = mTriggerReplacedMap.valueAt(i); - - status_t res; - - uint32_t tag = trigger.metadataTag; - switch (trigger.getTagType()) { - case TYPE_BYTE: { - uint8_t entryValue = static_cast(trigger.entryValue); - res = metadata.update(tag, - &entryValue, - /*count*/1); - break; - } - case TYPE_INT32: - res = metadata.update(tag, - &trigger.entryValue, - /*count*/1); - break; - default: - ALOGE("%s: Type not supported: 0x%x", - __FUNCTION__, - trigger.getTagType()); - return INVALID_OPERATION; - } - - if (res != OK) { - ALOGE("%s: Failed to restore request metadata with trigger tag %s" - ", trigger value %d", __FUNCTION__, - trigger.getTagName(), trigger.entryValue); - return res; - } - } - mTriggerReplacedMap.clear(); - - /** - * Remove all new entries. - */ - for (size_t i = 0; i < mTriggerRemovedMap.size(); ++i) { - RequestTrigger trigger = mTriggerRemovedMap.valueAt(i); - status_t res = metadata.erase(trigger.metadataTag); - - if (res != OK) { - ALOGE("%s: Failed to erase metadata with trigger tag %s" - ", trigger value %d", __FUNCTION__, - trigger.getTagName(), trigger.entryValue); - return res; - } - } - mTriggerRemovedMap.clear(); - - return OK; -} - - - -/** - * Static callback forwarding methods from HAL to instance - */ - -void Camera3Device::sProcessCaptureResult(const camera3_callback_ops *cb, - const camera3_capture_result *result) { - Camera3Device *d = - const_cast(static_cast(cb)); - d->processCaptureResult(result); -} - -void Camera3Device::sNotify(const camera3_callback_ops *cb, - const camera3_notify_msg *msg) { - Camera3Device *d = - const_cast(static_cast(cb)); - d->notify(msg); -} - -}; // namespace android diff --git a/services/camera/libcameraservice/Camera3Device.h b/services/camera/libcameraservice/Camera3Device.h deleted file mode 100644 index 2328f89..0000000 --- a/services/camera/libcameraservice/Camera3Device.h +++ /dev/null @@ -1,413 +0,0 @@ -/* - * Copyright (C) 2013 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef ANDROID_SERVERS_CAMERA3DEVICE_H -#define ANDROID_SERVERS_CAMERA3DEVICE_H - -#include -#include -#include -#include -#include - -#include "CameraDeviceBase.h" -#include "camera3/Camera3Stream.h" -#include "camera3/Camera3OutputStream.h" -#include "camera3/Camera3ZslStream.h" - -#include "hardware/camera3.h" - -/** - * Function pointer types with C calling convention to - * use for HAL callback functions. - */ -extern "C" { - typedef void (callbacks_process_capture_result_t)( - const struct camera3_callback_ops *, - const camera3_capture_result_t *); - - typedef void (callbacks_notify_t)( - const struct camera3_callback_ops *, - const camera3_notify_msg_t *); -} - -namespace android { - -/** - * CameraDevice for HAL devices with version CAMERA_DEVICE_API_VERSION_3_0 - */ -class Camera3Device : - public CameraDeviceBase, - private camera3_callback_ops { - public: - Camera3Device(int id); - - virtual ~Camera3Device(); - - /** - * CameraDeviceBase interface - */ - - virtual int getId() const; - - // Transitions to idle state on success. - virtual status_t initialize(camera_module_t *module); - virtual status_t disconnect(); - virtual status_t dump(int fd, const Vector &args); - virtual const CameraMetadata& info() const; - - // Capture and setStreamingRequest will configure streams if currently in - // idle state - virtual status_t capture(CameraMetadata &request); - virtual status_t setStreamingRequest(const CameraMetadata &request); - virtual status_t clearStreamingRequest(); - - virtual status_t waitUntilRequestReceived(int32_t requestId, nsecs_t timeout); - - // Actual stream creation/deletion is delayed until first request is submitted - // If adding streams while actively capturing, will pause device before adding - // stream, reconfiguring device, and unpausing. - virtual status_t createStream(sp consumer, - uint32_t width, uint32_t height, int format, size_t size, - int *id); - virtual status_t createInputStream( - uint32_t width, uint32_t height, int format, - int *id); - virtual status_t createZslStream( - uint32_t width, uint32_t height, - int depth, - /*out*/ - int *id, - sp* zslStream); - virtual status_t createReprocessStreamFromStream(int outputId, int *id); - - virtual status_t getStreamInfo(int id, - uint32_t *width, uint32_t *height, uint32_t *format); - virtual status_t setStreamTransform(int id, int transform); - - virtual status_t deleteStream(int id); - virtual status_t deleteReprocessStream(int id); - - virtual status_t createDefaultRequest(int templateId, CameraMetadata *request); - - // Transitions to the idle state on success - virtual status_t waitUntilDrained(); - - virtual status_t setNotifyCallback(NotificationListener *listener); - virtual bool willNotify3A(); - virtual status_t waitForNextFrame(nsecs_t timeout); - virtual status_t getNextFrame(CameraMetadata *frame); - - virtual status_t triggerAutofocus(uint32_t id); - virtual status_t triggerCancelAutofocus(uint32_t id); - virtual status_t triggerPrecaptureMetering(uint32_t id); - - virtual status_t pushReprocessBuffer(int reprocessStreamId, - buffer_handle_t *buffer, wp listener); - - private: - static const size_t kInFlightWarnLimit = 20; - static const nsecs_t kShutdownTimeout = 5000000000; // 5 sec - struct RequestTrigger; - - Mutex mLock; - - /**** Scope for mLock ****/ - - const int mId; - camera3_device_t *mHal3Device; - - CameraMetadata mDeviceInfo; - vendor_tag_query_ops_t mVendorTagOps; - - enum { - STATUS_ERROR, - STATUS_UNINITIALIZED, - STATUS_IDLE, - STATUS_ACTIVE - } mStatus; - - // Tracking cause of fatal errors when in STATUS_ERROR - String8 mErrorCause; - - // Mapping of stream IDs to stream instances - typedef KeyedVector > - StreamSet; - - StreamSet mOutputStreams; - sp mInputStream; - int mNextStreamId; - bool mNeedConfig; - - // Need to hold on to stream references until configure completes. - Vector > mDeletedStreams; - - /**** End scope for mLock ****/ - - class CaptureRequest : public LightRefBase { - public: - CameraMetadata mSettings; - sp mInputStream; - Vector > - mOutputStreams; - }; - typedef List > RequestList; - - /** - * Lock-held version of waitUntilDrained. Will transition to IDLE on - * success. - */ - status_t waitUntilDrainedLocked(); - - /** - * Do common work for setting up a streaming or single capture request. - * On success, will transition to ACTIVE if in IDLE. - */ - sp setUpRequestLocked(const CameraMetadata &request); - - /** - * Build a CaptureRequest request from the CameraDeviceBase request - * settings. - */ - sp createCaptureRequest(const CameraMetadata &request); - - /** - * Take the currently-defined set of streams and configure the HAL to use - * them. This is a long-running operation (may be several hundered ms). - */ - status_t configureStreamsLocked(); - - /** - * Set device into an error state due to some fatal failure, and set an - * error message to indicate why. Only the first call's message will be - * used. The message is also sent to the log. - */ - void setErrorState(const char *fmt, ...); - void setErrorStateV(const char *fmt, va_list args); - void setErrorStateLocked(const char *fmt, ...); - void setErrorStateLockedV(const char *fmt, va_list args); - - struct RequestTrigger { - // Metadata tag number, e.g. android.control.aePrecaptureTrigger - uint32_t metadataTag; - // Metadata value, e.g. 'START' or the trigger ID - int32_t entryValue; - - // The last part of the fully qualified path, e.g. afTrigger - const char *getTagName() const { - return get_camera_metadata_tag_name(metadataTag) ?: "NULL"; - } - - // e.g. TYPE_BYTE, TYPE_INT32, etc. - int getTagType() const { - return get_camera_metadata_tag_type(metadataTag); - } - }; - - /** - * Thread for managing capture request submission to HAL device. - */ - class RequestThread : public Thread { - - public: - - RequestThread(wp parent, - camera3_device_t *hal3Device); - - /** - * Call after stream (re)-configuration is completed. - */ - void configurationComplete(); - - /** - * Set or clear the list of repeating requests. Does not block - * on either. Use waitUntilPaused to wait until request queue - * has emptied out. - */ - status_t setRepeatingRequests(const RequestList& requests); - status_t clearRepeatingRequests(); - - status_t queueRequest(sp request); - - /** - * Queue a trigger to be dispatched with the next outgoing - * process_capture_request. The settings for that request only - * will be temporarily rewritten to add the trigger tag/value. - * Subsequent requests will not be rewritten (for this tag). - */ - status_t queueTrigger(RequestTrigger trigger[], size_t count); - - /** - * Pause/unpause the capture thread. Doesn't block, so use - * waitUntilPaused to wait until the thread is paused. - */ - void setPaused(bool paused); - - /** - * Wait until thread is paused, either due to setPaused(true) - * or due to lack of input requests. Returns TIMED_OUT in case - * the thread does not pause within the timeout. - */ - status_t waitUntilPaused(nsecs_t timeout); - - /** - * Wait until thread processes the capture request with settings' - * android.request.id == requestId. - * - * Returns TIMED_OUT in case the thread does not process the request - * within the timeout. - */ - status_t waitUntilRequestProcessed(int32_t requestId, nsecs_t timeout); - - protected: - - virtual bool threadLoop(); - - private: - static int getId(const wp &device); - - status_t queueTriggerLocked(RequestTrigger trigger); - // Mix-in queued triggers into this request - int32_t insertTriggers(const sp &request); - // Purge the queued triggers from this request, - // restoring the old field values for those tags. - status_t removeTriggers(const sp &request); - - static const nsecs_t kRequestTimeout = 50e6; // 50 ms - - // Waits for a request, or returns NULL if times out. - sp waitForNextRequest(); - - // Return buffers, etc, for a request that couldn't be fully - // constructed. The buffers will be returned in the ERROR state - // to mark them as not having valid data. - // All arguments will be modified. - void cleanUpFailedRequest(camera3_capture_request_t &request, - sp &nextRequest, - Vector &outputBuffers); - - // Pause handling - bool waitIfPaused(); - - // Relay error to parent device object setErrorState - void setErrorState(const char *fmt, ...); - - wp mParent; - camera3_device_t *mHal3Device; - - const int mId; - - Mutex mRequestLock; - Condition mRequestSignal; - RequestList mRequestQueue; - RequestList mRepeatingRequests; - - bool mReconfigured; - - // Used by waitIfPaused, waitForNextRequest, and waitUntilPaused - Mutex mPauseLock; - bool mDoPause; - Condition mDoPauseSignal; - bool mPaused; - Condition mPausedSignal; - - sp mPrevRequest; - int32_t mPrevTriggers; - - uint32_t mFrameNumber; - - Mutex mLatestRequestMutex; - Condition mLatestRequestSignal; - // android.request.id for latest process_capture_request - int32_t mLatestRequestId; - - typedef KeyedVector TriggerMap; - Mutex mTriggerMutex; - TriggerMap mTriggerMap; - TriggerMap mTriggerRemovedMap; - TriggerMap mTriggerReplacedMap; - }; - sp mRequestThread; - - /** - * In-flight queue for tracking completion of capture requests. - */ - - struct InFlightRequest { - // Set by notify() SHUTTER call. - nsecs_t captureTimestamp; - // Set by process_capture_result call with valid metadata - bool haveResultMetadata; - // Decremented by calls to process_capture_result with valid output - // buffers - int numBuffersLeft; - - InFlightRequest() : - captureTimestamp(0), - haveResultMetadata(false), - numBuffersLeft(0) { - } - - explicit InFlightRequest(int numBuffers) : - captureTimestamp(0), - haveResultMetadata(false), - numBuffersLeft(numBuffers) { - } - }; - // Map from frame number to the in-flight request state - typedef KeyedVector InFlightMap; - - Mutex mInFlightLock; // Protects mInFlightMap - InFlightMap mInFlightMap; - - status_t registerInFlight(int32_t frameNumber, int32_t numBuffers); - - /** - * Output result queue and current HAL device 3A state - */ - - // Lock for output side of device - Mutex mOutputLock; - - /**** Scope for mOutputLock ****/ - - uint32_t mNextResultFrameNumber; - uint32_t mNextShutterFrameNumber; - List mResultQueue; - Condition mResultSignal; - NotificationListener *mListener; - - /**** End scope for mOutputLock ****/ - - /** - * Callback functions from HAL device - */ - void processCaptureResult(const camera3_capture_result *result); - - void notify(const camera3_notify_msg *msg); - - /** - * Static callback forwarding methods from HAL to instance - */ - static callbacks_process_capture_result_t sProcessCaptureResult; - - static callbacks_notify_t sNotify; - -}; // class Camera3Device - -}; // namespace android - -#endif diff --git a/services/camera/libcameraservice/CameraClient.cpp b/services/camera/libcameraservice/CameraClient.cpp deleted file mode 100644 index be78f69..0000000 --- a/services/camera/libcameraservice/CameraClient.cpp +++ /dev/null @@ -1,971 +0,0 @@ -/* - * Copyright (C) 2012 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#define LOG_TAG "CameraClient" -//#define LOG_NDEBUG 0 - -#include -#include - -#include "CameraClient.h" -#include "CameraHardwareInterface.h" -#include "CameraService.h" - -namespace android { - -#define LOG1(...) ALOGD_IF(gLogLevel >= 1, __VA_ARGS__); -#define LOG2(...) ALOGD_IF(gLogLevel >= 2, __VA_ARGS__); - -static int getCallingPid() { - return IPCThreadState::self()->getCallingPid(); -} - -CameraClient::CameraClient(const sp& cameraService, - const sp& cameraClient, - const String16& clientPackageName, - int cameraId, int cameraFacing, - int clientPid, int clientUid, - int servicePid): - Client(cameraService, cameraClient, clientPackageName, - cameraId, cameraFacing, clientPid, clientUid, servicePid) -{ - int callingPid = getCallingPid(); - LOG1("CameraClient::CameraClient E (pid %d, id %d)", callingPid, cameraId); - - mHardware = NULL; - mMsgEnabled = 0; - mSurface = 0; - mPreviewWindow = 0; - mDestructionStarted = false; - - // Callback is disabled by default - mPreviewCallbackFlag = CAMERA_FRAME_CALLBACK_FLAG_NOOP; - mOrientation = getOrientation(0, mCameraFacing == CAMERA_FACING_FRONT); - mPlayShutterSound = true; - LOG1("CameraClient::CameraClient X (pid %d, id %d)", callingPid, cameraId); -} - -status_t CameraClient::initialize(camera_module_t *module) { - int callingPid = getCallingPid(); - status_t res; - - LOG1("CameraClient::initialize E (pid %d, id %d)", callingPid, mCameraId); - - // Verify ops permissions - res = startCameraOps(); - if (res != OK) { - return res; - } - - char camera_device_name[10]; - snprintf(camera_device_name, sizeof(camera_device_name), "%d", mCameraId); - - mHardware = new CameraHardwareInterface(camera_device_name); - res = mHardware->initialize(&module->common); - if (res != OK) { - ALOGE("%s: Camera %d: unable to initialize device: %s (%d)", - __FUNCTION__, mCameraId, strerror(-res), res); - mHardware.clear(); - return NO_INIT; - } - - mHardware->setCallbacks(notifyCallback, - dataCallback, - dataCallbackTimestamp, - (void *)mCameraId); - - // Enable zoom, error, focus, and metadata messages by default - enableMsgType(CAMERA_MSG_ERROR | CAMERA_MSG_ZOOM | CAMERA_MSG_FOCUS | - CAMERA_MSG_PREVIEW_METADATA | CAMERA_MSG_FOCUS_MOVE); - - LOG1("CameraClient::initialize X (pid %d, id %d)", callingPid, mCameraId); - return OK; -} - - -// tear down the client -CameraClient::~CameraClient() { - // this lock should never be NULL - Mutex* lock = mCameraService->getClientLockById(mCameraId); - lock->lock(); - mDestructionStarted = true; - // client will not be accessed from callback. should unlock to prevent dead-lock in disconnect - lock->unlock(); - int callingPid = getCallingPid(); - LOG1("CameraClient::~CameraClient E (pid %d, this %p)", callingPid, this); - - disconnect(); - LOG1("CameraClient::~CameraClient X (pid %d, this %p)", callingPid, this); -} - -status_t CameraClient::dump(int fd, const Vector& args) { - const size_t SIZE = 256; - char buffer[SIZE]; - - size_t len = snprintf(buffer, SIZE, "Client[%d] (%p) PID: %d\n", - mCameraId, - getRemoteCallback()->asBinder().get(), - mClientPid); - len = (len > SIZE - 1) ? SIZE - 1 : len; - write(fd, buffer, len); - return mHardware->dump(fd, args); -} - -// ---------------------------------------------------------------------------- - -status_t CameraClient::checkPid() const { - int callingPid = getCallingPid(); - if (callingPid == mClientPid) return NO_ERROR; - - ALOGW("attempt to use a locked camera from a different process" - " (old pid %d, new pid %d)", mClientPid, callingPid); - return EBUSY; -} - -status_t CameraClient::checkPidAndHardware() const { - status_t result = checkPid(); - if (result != NO_ERROR) return result; - if (mHardware == 0) { - ALOGE("attempt to use a camera after disconnect() (pid %d)", getCallingPid()); - return INVALID_OPERATION; - } - return NO_ERROR; -} - -status_t CameraClient::lock() { - int callingPid = getCallingPid(); - LOG1("lock (pid %d)", callingPid); - Mutex::Autolock lock(mLock); - - // lock camera to this client if the the camera is unlocked - if (mClientPid == 0) { - mClientPid = callingPid; - return NO_ERROR; - } - - // returns NO_ERROR if the client already owns the camera, EBUSY otherwise - return checkPid(); -} - -status_t CameraClient::unlock() { - int callingPid = getCallingPid(); - LOG1("unlock (pid %d)", callingPid); - Mutex::Autolock lock(mLock); - - // allow anyone to use camera (after they lock the camera) - status_t result = checkPid(); - if (result == NO_ERROR) { - if (mHardware->recordingEnabled()) { - ALOGE("Not allowed to unlock camera during recording."); - return INVALID_OPERATION; - } - mClientPid = 0; - LOG1("clear mRemoteCallback (pid %d)", callingPid); - // we need to remove the reference to ICameraClient so that when the app - // goes away, the reference count goes to 0. - mRemoteCallback.clear(); - } - return result; -} - -// connect a new client to the camera -status_t CameraClient::connect(const sp& client) { - int callingPid = getCallingPid(); - LOG1("connect E (pid %d)", callingPid); - Mutex::Autolock lock(mLock); - - if (mClientPid != 0 && checkPid() != NO_ERROR) { - ALOGW("Tried to connect to a locked camera (old pid %d, new pid %d)", - mClientPid, callingPid); - return EBUSY; - } - - if (mRemoteCallback != 0 && - (client->asBinder() == mRemoteCallback->asBinder())) { - LOG1("Connect to the same client"); - return NO_ERROR; - } - - mPreviewCallbackFlag = CAMERA_FRAME_CALLBACK_FLAG_NOOP; - mClientPid = callingPid; - mRemoteCallback = client; - - LOG1("connect X (pid %d)", callingPid); - return NO_ERROR; -} - -static void disconnectWindow(const sp& window) { - if (window != 0) { - status_t result = native_window_api_disconnect(window.get(), - NATIVE_WINDOW_API_CAMERA); - if (result != NO_ERROR) { - ALOGW("native_window_api_disconnect failed: %s (%d)", strerror(-result), - result); - } - } -} - -void CameraClient::disconnect() { - int callingPid = getCallingPid(); - LOG1("disconnect E (pid %d)", callingPid); - Mutex::Autolock lock(mLock); - - // Allow both client and the media server to disconnect at all times - if (callingPid != mClientPid && callingPid != mServicePid) { - ALOGW("different client - don't disconnect"); - return; - } - - if (mClientPid <= 0) { - LOG1("camera is unlocked (mClientPid = %d), don't tear down hardware", mClientPid); - return; - } - - // Make sure disconnect() is done once and once only, whether it is called - // from the user directly, or called by the destructor. - if (mHardware == 0) return; - - LOG1("hardware teardown"); - // Before destroying mHardware, we must make sure it's in the - // idle state. - // Turn off all messages. - disableMsgType(CAMERA_MSG_ALL_MSGS); - mHardware->stopPreview(); - mHardware->cancelPicture(); - // Release the hardware resources. - mHardware->release(); - - // Release the held ANativeWindow resources. - if (mPreviewWindow != 0) { - disconnectWindow(mPreviewWindow); - mPreviewWindow = 0; - mHardware->setPreviewWindow(mPreviewWindow); - } - mHardware.clear(); - - CameraService::Client::disconnect(); - - LOG1("disconnect X (pid %d)", callingPid); -} - -// ---------------------------------------------------------------------------- - -status_t CameraClient::setPreviewWindow(const sp& binder, - const sp& window) { - Mutex::Autolock lock(mLock); - status_t result = checkPidAndHardware(); - if (result != NO_ERROR) return result; - - // return if no change in surface. - if (binder == mSurface) { - return NO_ERROR; - } - - if (window != 0) { - result = native_window_api_connect(window.get(), NATIVE_WINDOW_API_CAMERA); - if (result != NO_ERROR) { - ALOGE("native_window_api_connect failed: %s (%d)", strerror(-result), - result); - return result; - } - } - - // If preview has been already started, register preview buffers now. - if (mHardware->previewEnabled()) { - if (window != 0) { - native_window_set_scaling_mode(window.get(), - NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW); - native_window_set_buffers_transform(window.get(), mOrientation); - result = mHardware->setPreviewWindow(window); - } - } - - if (result == NO_ERROR) { - // Everything has succeeded. Disconnect the old window and remember the - // new window. - disconnectWindow(mPreviewWindow); - mSurface = binder; - mPreviewWindow = window; - } else { - // Something went wrong after we connected to the new window, so - // disconnect here. - disconnectWindow(window); - } - - return result; -} - -// set the Surface that the preview will use -status_t CameraClient::setPreviewDisplay(const sp& surface) { - LOG1("setPreviewDisplay(%p) (pid %d)", surface.get(), getCallingPid()); - - sp binder(surface != 0 ? surface->getIGraphicBufferProducer()->asBinder() : 0); - sp window(surface); - return setPreviewWindow(binder, window); -} - -// set the SurfaceTextureClient that the preview will use -status_t CameraClient::setPreviewTexture( - const sp& bufferProducer) { - LOG1("setPreviewTexture(%p) (pid %d)", bufferProducer.get(), - getCallingPid()); - - sp binder; - sp window; - if (bufferProducer != 0) { - binder = bufferProducer->asBinder(); - window = new Surface(bufferProducer); - } - return setPreviewWindow(binder, window); -} - -// set the preview callback flag to affect how the received frames from -// preview are handled. -void CameraClient::setPreviewCallbackFlag(int callback_flag) { - LOG1("setPreviewCallbackFlag(%d) (pid %d)", callback_flag, getCallingPid()); - Mutex::Autolock lock(mLock); - if (checkPidAndHardware() != NO_ERROR) return; - - mPreviewCallbackFlag = callback_flag; - if (mPreviewCallbackFlag & CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK) { - enableMsgType(CAMERA_MSG_PREVIEW_FRAME); - } else { - disableMsgType(CAMERA_MSG_PREVIEW_FRAME); - } -} - -status_t CameraClient::setPreviewCallbackTarget( - const sp& callbackProducer) { - ALOGE("%s: Unimplemented!", __FUNCTION__); - return INVALID_OPERATION; -} - -// start preview mode -status_t CameraClient::startPreview() { - LOG1("startPreview (pid %d)", getCallingPid()); - return startCameraMode(CAMERA_PREVIEW_MODE); -} - -// start recording mode -status_t CameraClient::startRecording() { - LOG1("startRecording (pid %d)", getCallingPid()); - return startCameraMode(CAMERA_RECORDING_MODE); -} - -// start preview or recording -status_t CameraClient::startCameraMode(camera_mode mode) { - LOG1("startCameraMode(%d)", mode); - Mutex::Autolock lock(mLock); - status_t result = checkPidAndHardware(); - if (result != NO_ERROR) return result; - - switch(mode) { - case CAMERA_PREVIEW_MODE: - if (mSurface == 0 && mPreviewWindow == 0) { - LOG1("mSurface is not set yet."); - // still able to start preview in this case. - } - return startPreviewMode(); - case CAMERA_RECORDING_MODE: - if (mSurface == 0 && mPreviewWindow == 0) { - ALOGE("mSurface or mPreviewWindow must be set before startRecordingMode."); - return INVALID_OPERATION; - } - return startRecordingMode(); - default: - return UNKNOWN_ERROR; - } -} - -status_t CameraClient::startPreviewMode() { - LOG1("startPreviewMode"); - status_t result = NO_ERROR; - - // if preview has been enabled, nothing needs to be done - if (mHardware->previewEnabled()) { - return NO_ERROR; - } - - if (mPreviewWindow != 0) { - native_window_set_scaling_mode(mPreviewWindow.get(), - NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW); - native_window_set_buffers_transform(mPreviewWindow.get(), - mOrientation); - } - mHardware->setPreviewWindow(mPreviewWindow); - result = mHardware->startPreview(); - - return result; -} - -status_t CameraClient::startRecordingMode() { - LOG1("startRecordingMode"); - status_t result = NO_ERROR; - - // if recording has been enabled, nothing needs to be done - if (mHardware->recordingEnabled()) { - return NO_ERROR; - } - - // if preview has not been started, start preview first - if (!mHardware->previewEnabled()) { - result = startPreviewMode(); - if (result != NO_ERROR) { - return result; - } - } - - // start recording mode - enableMsgType(CAMERA_MSG_VIDEO_FRAME); - mCameraService->playSound(CameraService::SOUND_RECORDING); - result = mHardware->startRecording(); - if (result != NO_ERROR) { - ALOGE("mHardware->startRecording() failed with status %d", result); - } - return result; -} - -// stop preview mode -void CameraClient::stopPreview() { - LOG1("stopPreview (pid %d)", getCallingPid()); - Mutex::Autolock lock(mLock); - if (checkPidAndHardware() != NO_ERROR) return; - - - disableMsgType(CAMERA_MSG_PREVIEW_FRAME); - mHardware->stopPreview(); - - mPreviewBuffer.clear(); -} - -// stop recording mode -void CameraClient::stopRecording() { - LOG1("stopRecording (pid %d)", getCallingPid()); - Mutex::Autolock lock(mLock); - if (checkPidAndHardware() != NO_ERROR) return; - - disableMsgType(CAMERA_MSG_VIDEO_FRAME); - mHardware->stopRecording(); - mCameraService->playSound(CameraService::SOUND_RECORDING); - - mPreviewBuffer.clear(); -} - -// release a recording frame -void CameraClient::releaseRecordingFrame(const sp& mem) { - Mutex::Autolock lock(mLock); - if (checkPidAndHardware() != NO_ERROR) return; - mHardware->releaseRecordingFrame(mem); -} - -status_t CameraClient::storeMetaDataInBuffers(bool enabled) -{ - LOG1("storeMetaDataInBuffers: %s", enabled? "true": "false"); - Mutex::Autolock lock(mLock); - if (checkPidAndHardware() != NO_ERROR) { - return UNKNOWN_ERROR; - } - return mHardware->storeMetaDataInBuffers(enabled); -} - -bool CameraClient::previewEnabled() { - LOG1("previewEnabled (pid %d)", getCallingPid()); - - Mutex::Autolock lock(mLock); - if (checkPidAndHardware() != NO_ERROR) return false; - return mHardware->previewEnabled(); -} - -bool CameraClient::recordingEnabled() { - LOG1("recordingEnabled (pid %d)", getCallingPid()); - - Mutex::Autolock lock(mLock); - if (checkPidAndHardware() != NO_ERROR) return false; - return mHardware->recordingEnabled(); -} - -status_t CameraClient::autoFocus() { - LOG1("autoFocus (pid %d)", getCallingPid()); - - Mutex::Autolock lock(mLock); - status_t result = checkPidAndHardware(); - if (result != NO_ERROR) return result; - - return mHardware->autoFocus(); -} - -status_t CameraClient::cancelAutoFocus() { - LOG1("cancelAutoFocus (pid %d)", getCallingPid()); - - Mutex::Autolock lock(mLock); - status_t result = checkPidAndHardware(); - if (result != NO_ERROR) return result; - - return mHardware->cancelAutoFocus(); -} - -// take a picture - image is returned in callback -status_t CameraClient::takePicture(int msgType) { - LOG1("takePicture (pid %d): 0x%x", getCallingPid(), msgType); - - Mutex::Autolock lock(mLock); - status_t result = checkPidAndHardware(); - if (result != NO_ERROR) return result; - - if ((msgType & CAMERA_MSG_RAW_IMAGE) && - (msgType & CAMERA_MSG_RAW_IMAGE_NOTIFY)) { - ALOGE("CAMERA_MSG_RAW_IMAGE and CAMERA_MSG_RAW_IMAGE_NOTIFY" - " cannot be both enabled"); - return BAD_VALUE; - } - - // We only accept picture related message types - // and ignore other types of messages for takePicture(). - int picMsgType = msgType - & (CAMERA_MSG_SHUTTER | - CAMERA_MSG_POSTVIEW_FRAME | - CAMERA_MSG_RAW_IMAGE | - CAMERA_MSG_RAW_IMAGE_NOTIFY | - CAMERA_MSG_COMPRESSED_IMAGE); - - enableMsgType(picMsgType); - - return mHardware->takePicture(); -} - -// set preview/capture parameters - key/value pairs -status_t CameraClient::setParameters(const String8& params) { - LOG1("setParameters (pid %d) (%s)", getCallingPid(), params.string()); - - Mutex::Autolock lock(mLock); - status_t result = checkPidAndHardware(); - if (result != NO_ERROR) return result; - - CameraParameters p(params); - return mHardware->setParameters(p); -} - -// get preview/capture parameters - key/value pairs -String8 CameraClient::getParameters() const { - Mutex::Autolock lock(mLock); - if (checkPidAndHardware() != NO_ERROR) return String8(); - - String8 params(mHardware->getParameters().flatten()); - LOG1("getParameters (pid %d) (%s)", getCallingPid(), params.string()); - return params; -} - -// enable shutter sound -status_t CameraClient::enableShutterSound(bool enable) { - LOG1("enableShutterSound (pid %d)", getCallingPid()); - - status_t result = checkPidAndHardware(); - if (result != NO_ERROR) return result; - - if (enable) { - mPlayShutterSound = true; - return OK; - } - - // Disabling shutter sound may not be allowed. In that case only - // allow the mediaserver process to disable the sound. - char value[PROPERTY_VALUE_MAX]; - property_get("ro.camera.sound.forced", value, "0"); - if (strcmp(value, "0") != 0) { - // Disabling shutter sound is not allowed. Deny if the current - // process is not mediaserver. - if (getCallingPid() != getpid()) { - ALOGE("Failed to disable shutter sound. Permission denied (pid %d)", getCallingPid()); - return PERMISSION_DENIED; - } - } - - mPlayShutterSound = false; - return OK; -} - -status_t CameraClient::sendCommand(int32_t cmd, int32_t arg1, int32_t arg2) { - LOG1("sendCommand (pid %d)", getCallingPid()); - int orientation; - Mutex::Autolock lock(mLock); - status_t result = checkPidAndHardware(); - if (result != NO_ERROR) return result; - - if (cmd == CAMERA_CMD_SET_DISPLAY_ORIENTATION) { - // Mirror the preview if the camera is front-facing. - orientation = getOrientation(arg1, mCameraFacing == CAMERA_FACING_FRONT); - if (orientation == -1) return BAD_VALUE; - - if (mOrientation != orientation) { - mOrientation = orientation; - if (mPreviewWindow != 0) { - native_window_set_buffers_transform(mPreviewWindow.get(), - mOrientation); - } - } - return OK; - } else if (cmd == CAMERA_CMD_ENABLE_SHUTTER_SOUND) { - switch (arg1) { - case 0: - return enableShutterSound(false); - case 1: - return enableShutterSound(true); - default: - return BAD_VALUE; - } - return OK; - } else if (cmd == CAMERA_CMD_PLAY_RECORDING_SOUND) { - mCameraService->playSound(CameraService::SOUND_RECORDING); - } else if (cmd == CAMERA_CMD_SET_VIDEO_BUFFER_COUNT) { - // Silently ignore this command - return INVALID_OPERATION; - } else if (cmd == CAMERA_CMD_PING) { - // If mHardware is 0, checkPidAndHardware will return error. - return OK; - } - - return mHardware->sendCommand(cmd, arg1, arg2); -} - -// ---------------------------------------------------------------------------- - -void CameraClient::enableMsgType(int32_t msgType) { - android_atomic_or(msgType, &mMsgEnabled); - mHardware->enableMsgType(msgType); -} - -void CameraClient::disableMsgType(int32_t msgType) { - android_atomic_and(~msgType, &mMsgEnabled); - mHardware->disableMsgType(msgType); -} - -#define CHECK_MESSAGE_INTERVAL 10 // 10ms -bool CameraClient::lockIfMessageWanted(int32_t msgType) { - int sleepCount = 0; - while (mMsgEnabled & msgType) { - if (mLock.tryLock() == NO_ERROR) { - if (sleepCount > 0) { - LOG1("lockIfMessageWanted(%d): waited for %d ms", - msgType, sleepCount * CHECK_MESSAGE_INTERVAL); - } - return true; - } - if (sleepCount++ == 0) { - LOG1("lockIfMessageWanted(%d): enter sleep", msgType); - } - usleep(CHECK_MESSAGE_INTERVAL * 1000); - } - ALOGW("lockIfMessageWanted(%d): dropped unwanted message", msgType); - return false; -} - -// Callback messages can be dispatched to internal handlers or pass to our -// client's callback functions, depending on the message type. -// -// notifyCallback: -// CAMERA_MSG_SHUTTER handleShutter -// (others) c->notifyCallback -// dataCallback: -// CAMERA_MSG_PREVIEW_FRAME handlePreviewData -// CAMERA_MSG_POSTVIEW_FRAME handlePostview -// CAMERA_MSG_RAW_IMAGE handleRawPicture -// CAMERA_MSG_COMPRESSED_IMAGE handleCompressedPicture -// (others) c->dataCallback -// dataCallbackTimestamp -// (others) c->dataCallbackTimestamp -// -// NOTE: the *Callback functions grab mLock of the client before passing -// control to handle* functions. So the handle* functions must release the -// lock before calling the ICameraClient's callbacks, so those callbacks can -// invoke methods in the Client class again (For example, the preview frame -// callback may want to releaseRecordingFrame). The handle* functions must -// release the lock after all accesses to member variables, so it must be -// handled very carefully. - -void CameraClient::notifyCallback(int32_t msgType, int32_t ext1, - int32_t ext2, void* user) { - LOG2("notifyCallback(%d)", msgType); - - Mutex* lock = getClientLockFromCookie(user); - if (lock == NULL) return; - Mutex::Autolock alock(*lock); - - CameraClient* client = - static_cast(getClientFromCookie(user)); - if (client == NULL) return; - - if (!client->lockIfMessageWanted(msgType)) return; - - switch (msgType) { - case CAMERA_MSG_SHUTTER: - // ext1 is the dimension of the yuv picture. - client->handleShutter(); - break; - default: - client->handleGenericNotify(msgType, ext1, ext2); - break; - } -} - -void CameraClient::dataCallback(int32_t msgType, - const sp& dataPtr, camera_frame_metadata_t *metadata, void* user) { - LOG2("dataCallback(%d)", msgType); - - Mutex* lock = getClientLockFromCookie(user); - if (lock == NULL) return; - Mutex::Autolock alock(*lock); - - CameraClient* client = - static_cast(getClientFromCookie(user)); - if (client == NULL) return; - - if (!client->lockIfMessageWanted(msgType)) return; - if (dataPtr == 0 && metadata == NULL) { - ALOGE("Null data returned in data callback"); - client->handleGenericNotify(CAMERA_MSG_ERROR, UNKNOWN_ERROR, 0); - return; - } - - switch (msgType & ~CAMERA_MSG_PREVIEW_METADATA) { - case CAMERA_MSG_PREVIEW_FRAME: - client->handlePreviewData(msgType, dataPtr, metadata); - break; - case CAMERA_MSG_POSTVIEW_FRAME: - client->handlePostview(dataPtr); - break; - case CAMERA_MSG_RAW_IMAGE: - client->handleRawPicture(dataPtr); - break; - case CAMERA_MSG_COMPRESSED_IMAGE: - client->handleCompressedPicture(dataPtr); - break; - default: - client->handleGenericData(msgType, dataPtr, metadata); - break; - } -} - -void CameraClient::dataCallbackTimestamp(nsecs_t timestamp, - int32_t msgType, const sp& dataPtr, void* user) { - LOG2("dataCallbackTimestamp(%d)", msgType); - - Mutex* lock = getClientLockFromCookie(user); - if (lock == NULL) return; - Mutex::Autolock alock(*lock); - - CameraClient* client = - static_cast(getClientFromCookie(user)); - if (client == NULL) return; - - if (!client->lockIfMessageWanted(msgType)) return; - - if (dataPtr == 0) { - ALOGE("Null data returned in data with timestamp callback"); - client->handleGenericNotify(CAMERA_MSG_ERROR, UNKNOWN_ERROR, 0); - return; - } - - client->handleGenericDataTimestamp(timestamp, msgType, dataPtr); -} - -// snapshot taken callback -void CameraClient::handleShutter(void) { - if (mPlayShutterSound) { - mCameraService->playSound(CameraService::SOUND_SHUTTER); - } - - sp c = mRemoteCallback; - if (c != 0) { - mLock.unlock(); - c->notifyCallback(CAMERA_MSG_SHUTTER, 0, 0); - if (!lockIfMessageWanted(CAMERA_MSG_SHUTTER)) return; - } - disableMsgType(CAMERA_MSG_SHUTTER); - - mLock.unlock(); -} - -// preview callback - frame buffer update -void CameraClient::handlePreviewData(int32_t msgType, - const sp& mem, - camera_frame_metadata_t *metadata) { - ssize_t offset; - size_t size; - sp heap = mem->getMemory(&offset, &size); - - // local copy of the callback flags - int flags = mPreviewCallbackFlag; - - // is callback enabled? - if (!(flags & CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK)) { - // If the enable bit is off, the copy-out and one-shot bits are ignored - LOG2("frame callback is disabled"); - mLock.unlock(); - return; - } - - // hold a strong pointer to the client - sp c = mRemoteCallback; - - // clear callback flags if no client or one-shot mode - if (c == 0 || (mPreviewCallbackFlag & CAMERA_FRAME_CALLBACK_FLAG_ONE_SHOT_MASK)) { - LOG2("Disable preview callback"); - mPreviewCallbackFlag &= ~(CAMERA_FRAME_CALLBACK_FLAG_ONE_SHOT_MASK | - CAMERA_FRAME_CALLBACK_FLAG_COPY_OUT_MASK | - CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK); - disableMsgType(CAMERA_MSG_PREVIEW_FRAME); - } - - if (c != 0) { - // Is the received frame copied out or not? - if (flags & CAMERA_FRAME_CALLBACK_FLAG_COPY_OUT_MASK) { - LOG2("frame is copied"); - copyFrameAndPostCopiedFrame(msgType, c, heap, offset, size, metadata); - } else { - LOG2("frame is forwarded"); - mLock.unlock(); - c->dataCallback(msgType, mem, metadata); - } - } else { - mLock.unlock(); - } -} - -// picture callback - postview image ready -void CameraClient::handlePostview(const sp& mem) { - disableMsgType(CAMERA_MSG_POSTVIEW_FRAME); - - sp c = mRemoteCallback; - mLock.unlock(); - if (c != 0) { - c->dataCallback(CAMERA_MSG_POSTVIEW_FRAME, mem, NULL); - } -} - -// picture callback - raw image ready -void CameraClient::handleRawPicture(const sp& mem) { - disableMsgType(CAMERA_MSG_RAW_IMAGE); - - ssize_t offset; - size_t size; - sp heap = mem->getMemory(&offset, &size); - - sp c = mRemoteCallback; - mLock.unlock(); - if (c != 0) { - c->dataCallback(CAMERA_MSG_RAW_IMAGE, mem, NULL); - } -} - -// picture callback - compressed picture ready -void CameraClient::handleCompressedPicture(const sp& mem) { - disableMsgType(CAMERA_MSG_COMPRESSED_IMAGE); - - sp c = mRemoteCallback; - mLock.unlock(); - if (c != 0) { - c->dataCallback(CAMERA_MSG_COMPRESSED_IMAGE, mem, NULL); - } -} - - -void CameraClient::handleGenericNotify(int32_t msgType, - int32_t ext1, int32_t ext2) { - sp c = mRemoteCallback; - mLock.unlock(); - if (c != 0) { - c->notifyCallback(msgType, ext1, ext2); - } -} - -void CameraClient::handleGenericData(int32_t msgType, - const sp& dataPtr, camera_frame_metadata_t *metadata) { - sp c = mRemoteCallback; - mLock.unlock(); - if (c != 0) { - c->dataCallback(msgType, dataPtr, metadata); - } -} - -void CameraClient::handleGenericDataTimestamp(nsecs_t timestamp, - int32_t msgType, const sp& dataPtr) { - sp c = mRemoteCallback; - mLock.unlock(); - if (c != 0) { - c->dataCallbackTimestamp(timestamp, msgType, dataPtr); - } -} - -void CameraClient::copyFrameAndPostCopiedFrame( - int32_t msgType, const sp& client, - const sp& heap, size_t offset, size_t size, - camera_frame_metadata_t *metadata) { - LOG2("copyFrameAndPostCopiedFrame"); - // It is necessary to copy out of pmem before sending this to - // the callback. For efficiency, reuse the same MemoryHeapBase - // provided it's big enough. Don't allocate the memory or - // perform the copy if there's no callback. - // hold the preview lock while we grab a reference to the preview buffer - sp previewBuffer; - - if (mPreviewBuffer == 0) { - mPreviewBuffer = new MemoryHeapBase(size, 0, NULL); - } else if (size > mPreviewBuffer->virtualSize()) { - mPreviewBuffer.clear(); - mPreviewBuffer = new MemoryHeapBase(size, 0, NULL); - } - if (mPreviewBuffer == 0) { - ALOGE("failed to allocate space for preview buffer"); - mLock.unlock(); - return; - } - previewBuffer = mPreviewBuffer; - - memcpy(previewBuffer->base(), (uint8_t *)heap->base() + offset, size); - - sp frame = new MemoryBase(previewBuffer, 0, size); - if (frame == 0) { - ALOGE("failed to allocate space for frame callback"); - mLock.unlock(); - return; - } - - mLock.unlock(); - client->dataCallback(msgType, frame, metadata); -} - -int CameraClient::getOrientation(int degrees, bool mirror) { - if (!mirror) { - if (degrees == 0) return 0; - else if (degrees == 90) return HAL_TRANSFORM_ROT_90; - else if (degrees == 180) return HAL_TRANSFORM_ROT_180; - else if (degrees == 270) return HAL_TRANSFORM_ROT_270; - } else { // Do mirror (horizontal flip) - if (degrees == 0) { // FLIP_H and ROT_0 - return HAL_TRANSFORM_FLIP_H; - } else if (degrees == 90) { // FLIP_H and ROT_90 - return HAL_TRANSFORM_FLIP_H | HAL_TRANSFORM_ROT_90; - } else if (degrees == 180) { // FLIP_H and ROT_180 - return HAL_TRANSFORM_FLIP_V; - } else if (degrees == 270) { // FLIP_H and ROT_270 - return HAL_TRANSFORM_FLIP_V | HAL_TRANSFORM_ROT_90; - } - } - ALOGE("Invalid setDisplayOrientation degrees=%d", degrees); - return -1; -} - -}; // namespace android diff --git a/services/camera/libcameraservice/CameraClient.h b/services/camera/libcameraservice/CameraClient.h deleted file mode 100644 index abde75a..0000000 --- a/services/camera/libcameraservice/CameraClient.h +++ /dev/null @@ -1,165 +0,0 @@ -/* - * Copyright (C) 2012 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef ANDROID_SERVERS_CAMERA_CAMERACLIENT_H -#define ANDROID_SERVERS_CAMERA_CAMERACLIENT_H - -#include "CameraService.h" - -namespace android { - -class MemoryHeapBase; -class CameraHardwareInterface; - -/** - * Interface between android.hardware.Camera API and Camera HAL device for version - * CAMERA_DEVICE_API_VERSION_1_0. - */ - -class CameraClient : public CameraService::Client -{ -public: - // ICamera interface (see ICamera for details) - virtual void disconnect(); - virtual status_t connect(const sp& client); - virtual status_t lock(); - virtual status_t unlock(); - virtual status_t setPreviewDisplay(const sp& surface); - virtual status_t setPreviewTexture(const sp& bufferProducer); - virtual void setPreviewCallbackFlag(int flag); - virtual status_t setPreviewCallbackTarget( - const sp& callbackProducer); - virtual status_t startPreview(); - virtual void stopPreview(); - virtual bool previewEnabled(); - virtual status_t storeMetaDataInBuffers(bool enabled); - virtual status_t startRecording(); - virtual void stopRecording(); - virtual bool recordingEnabled(); - virtual void releaseRecordingFrame(const sp& mem); - virtual status_t autoFocus(); - virtual status_t cancelAutoFocus(); - virtual status_t takePicture(int msgType); - virtual status_t setParameters(const String8& params); - virtual String8 getParameters() const; - virtual status_t sendCommand(int32_t cmd, int32_t arg1, int32_t arg2); - - // Interface used by CameraService - CameraClient(const sp& cameraService, - const sp& cameraClient, - const String16& clientPackageName, - int cameraId, - int cameraFacing, - int clientPid, - int clientUid, - int servicePid); - ~CameraClient(); - - status_t initialize(camera_module_t *module); - - status_t dump(int fd, const Vector& args); - -private: - - // check whether the calling process matches mClientPid. - status_t checkPid() const; - status_t checkPidAndHardware() const; // also check mHardware != 0 - - // these are internal functions used to set up preview buffers - status_t registerPreviewBuffers(); - - // camera operation mode - enum camera_mode { - CAMERA_PREVIEW_MODE = 0, // frame automatically released - CAMERA_RECORDING_MODE = 1, // frame has to be explicitly released by releaseRecordingFrame() - }; - // these are internal functions used for preview/recording - status_t startCameraMode(camera_mode mode); - status_t startPreviewMode(); - status_t startRecordingMode(); - - // internal function used by sendCommand to enable/disable shutter sound. - status_t enableShutterSound(bool enable); - - // these are static callback functions - static void notifyCallback(int32_t msgType, int32_t ext1, int32_t ext2, void* user); - static void dataCallback(int32_t msgType, const sp& dataPtr, - camera_frame_metadata_t *metadata, void* user); - static void dataCallbackTimestamp(nsecs_t timestamp, int32_t msgType, const sp& dataPtr, void* user); - // handlers for messages - void handleShutter(void); - void handlePreviewData(int32_t msgType, const sp& mem, - camera_frame_metadata_t *metadata); - void handlePostview(const sp& mem); - void handleRawPicture(const sp& mem); - void handleCompressedPicture(const sp& mem); - void handleGenericNotify(int32_t msgType, int32_t ext1, int32_t ext2); - void handleGenericData(int32_t msgType, const sp& dataPtr, - camera_frame_metadata_t *metadata); - void handleGenericDataTimestamp(nsecs_t timestamp, int32_t msgType, const sp& dataPtr); - - void copyFrameAndPostCopiedFrame( - int32_t msgType, - const sp& client, - const sp& heap, - size_t offset, size_t size, - camera_frame_metadata_t *metadata); - - int getOrientation(int orientation, bool mirror); - - status_t setPreviewWindow( - const sp& binder, - const sp& window); - - - // these are initialized in the constructor. - sp mHardware; // cleared after disconnect() - int mPreviewCallbackFlag; - int mOrientation; // Current display orientation - bool mPlayShutterSound; - - // Ensures atomicity among the public methods - mutable Mutex mLock; - // This is a binder of Surface or Surface. - sp mSurface; - sp mPreviewWindow; - - // If the user want us to return a copy of the preview frame (instead - // of the original one), we allocate mPreviewBuffer and reuse it if possible. - sp mPreviewBuffer; - - // We need to avoid the deadlock when the incoming command thread and - // the CameraHardwareInterface callback thread both want to grab mLock. - // An extra flag is used to tell the callback thread that it should stop - // trying to deliver the callback messages if the client is not - // interested in it anymore. For example, if the client is calling - // stopPreview(), the preview frame messages do not need to be delivered - // anymore. - - // This function takes the same parameter as the enableMsgType() and - // disableMsgType() functions in CameraHardwareInterface. - void enableMsgType(int32_t msgType); - void disableMsgType(int32_t msgType); - volatile int32_t mMsgEnabled; - - // This function keeps trying to grab mLock, or give up if the message - // is found to be disabled. It returns true if mLock is grabbed. - bool lockIfMessageWanted(int32_t msgType); -}; - -} - -#endif diff --git a/services/camera/libcameraservice/CameraDeviceBase.cpp b/services/camera/libcameraservice/CameraDeviceBase.cpp deleted file mode 100644 index 6c4e87f..0000000 --- a/services/camera/libcameraservice/CameraDeviceBase.cpp +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright (C) 2013 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#include "CameraDeviceBase.h" - -namespace android { - -/** - * Base class destructors - */ -CameraDeviceBase::~CameraDeviceBase() { -} - -CameraDeviceBase::NotificationListener::~NotificationListener() { -} - -} // namespace android diff --git a/services/camera/libcameraservice/CameraDeviceBase.h b/services/camera/libcameraservice/CameraDeviceBase.h deleted file mode 100644 index aa92bec..0000000 --- a/services/camera/libcameraservice/CameraDeviceBase.h +++ /dev/null @@ -1,216 +0,0 @@ -/* - * Copyright (C) 2013 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef ANDROID_SERVERS_CAMERA_CAMERADEVICEBASE_H -#define ANDROID_SERVERS_CAMERA_CAMERADEVICEBASE_H - -#include -#include -#include -#include -#include - -#include "hardware/camera2.h" -#include "camera/CameraMetadata.h" - -namespace android { - -/** - * Base interface for version >= 2 camera device classes, which interface to - * camera HAL device versions >= 2. - */ -class CameraDeviceBase : public virtual RefBase { - public: - virtual ~CameraDeviceBase(); - - /** - * The device's camera ID - */ - virtual int getId() const = 0; - - virtual status_t initialize(camera_module_t *module) = 0; - virtual status_t disconnect() = 0; - - virtual status_t dump(int fd, const Vector& args) = 0; - - /** - * The device's static characteristics metadata buffer - */ - virtual const CameraMetadata& info() const = 0; - - /** - * Submit request for capture. The CameraDevice takes ownership of the - * passed-in buffer. - */ - virtual status_t capture(CameraMetadata &request) = 0; - - /** - * Submit request for streaming. The CameraDevice makes a copy of the - * passed-in buffer and the caller retains ownership. - */ - virtual status_t setStreamingRequest(const CameraMetadata &request) = 0; - - /** - * Clear the streaming request slot. - */ - virtual status_t clearStreamingRequest() = 0; - - /** - * Wait until a request with the given ID has been dequeued by the - * HAL. Returns TIMED_OUT if the timeout duration is reached. Returns - * immediately if the latest request received by the HAL has this id. - */ - virtual status_t waitUntilRequestReceived(int32_t requestId, - nsecs_t timeout) = 0; - - /** - * Create an output stream of the requested size and format. - * - * If format is CAMERA2_HAL_PIXEL_FORMAT_OPAQUE, then the HAL device selects - * an appropriate format; it can be queried with getStreamInfo. - * - * If format is HAL_PIXEL_FORMAT_COMPRESSED, the size parameter must be - * equal to the size in bytes of the buffers to allocate for the stream. For - * other formats, the size parameter is ignored. - */ - virtual status_t createStream(sp consumer, - uint32_t width, uint32_t height, int format, size_t size, - int *id) = 0; - - /** - * Create an input reprocess stream that uses buffers from an existing - * output stream. - */ - virtual status_t createReprocessStreamFromStream(int outputId, int *id) = 0; - - /** - * Get information about a given stream. - */ - virtual status_t getStreamInfo(int id, - uint32_t *width, uint32_t *height, uint32_t *format) = 0; - - /** - * Set stream gralloc buffer transform - */ - virtual status_t setStreamTransform(int id, int transform) = 0; - - /** - * Delete stream. Must not be called if there are requests in flight which - * reference that stream. - */ - virtual status_t deleteStream(int id) = 0; - - /** - * Delete reprocess stream. Must not be called if there are requests in - * flight which reference that stream. - */ - virtual status_t deleteReprocessStream(int id) = 0; - - /** - * Create a metadata buffer with fields that the HAL device believes are - * best for the given use case - */ - virtual status_t createDefaultRequest(int templateId, - CameraMetadata *request) = 0; - - /** - * Wait until all requests have been processed. Returns INVALID_OPERATION if - * the streaming slot is not empty, or TIMED_OUT if the requests haven't - * finished processing in 10 seconds. - */ - virtual status_t waitUntilDrained() = 0; - - /** - * Abstract class for HAL notification listeners - */ - class NotificationListener { - public: - // Refer to the Camera2 HAL definition for notification definitions - virtual void notifyError(int errorCode, int arg1, int arg2) = 0; - virtual void notifyShutter(int frameNumber, nsecs_t timestamp) = 0; - virtual void notifyAutoFocus(uint8_t newState, int triggerId) = 0; - virtual void notifyAutoExposure(uint8_t newState, int triggerId) = 0; - virtual void notifyAutoWhitebalance(uint8_t newState, - int triggerId) = 0; - protected: - virtual ~NotificationListener(); - }; - - /** - * Connect HAL notifications to a listener. Overwrites previous - * listener. Set to NULL to stop receiving notifications. - */ - virtual status_t setNotifyCallback(NotificationListener *listener) = 0; - - /** - * Whether the device supports calling notifyAutofocus, notifyAutoExposure, - * and notifyAutoWhitebalance; if this returns false, the client must - * synthesize these notifications from received frame metadata. - */ - virtual bool willNotify3A() = 0; - - /** - * Wait for a new frame to be produced, with timeout in nanoseconds. - * Returns TIMED_OUT when no frame produced within the specified duration - */ - virtual status_t waitForNextFrame(nsecs_t timeout) = 0; - - /** - * Get next metadata frame from the frame queue. Returns NULL if the queue - * is empty; caller takes ownership of the metadata buffer. - */ - virtual status_t getNextFrame(CameraMetadata *frame) = 0; - - /** - * Trigger auto-focus. The latest ID used in a trigger autofocus or cancel - * autofocus call will be returned by the HAL in all subsequent AF - * notifications. - */ - virtual status_t triggerAutofocus(uint32_t id) = 0; - - /** - * Cancel auto-focus. The latest ID used in a trigger autofocus/cancel - * autofocus call will be returned by the HAL in all subsequent AF - * notifications. - */ - virtual status_t triggerCancelAutofocus(uint32_t id) = 0; - - /** - * Trigger pre-capture metering. The latest ID used in a trigger pre-capture - * call will be returned by the HAL in all subsequent AE and AWB - * notifications. - */ - virtual status_t triggerPrecaptureMetering(uint32_t id) = 0; - - /** - * Abstract interface for clients that want to listen to reprocess buffer - * release events - */ - struct BufferReleasedListener : public virtual RefBase { - virtual void onBufferReleased(buffer_handle_t *handle) = 0; - }; - - /** - * Push a buffer to be reprocessed into a reprocessing stream, and - * provide a listener to call once the buffer is returned by the HAL - */ - virtual status_t pushReprocessBuffer(int reprocessStreamId, - buffer_handle_t *buffer, wp listener) = 0; -}; - -}; // namespace android - -#endif diff --git a/services/camera/libcameraservice/CameraDeviceFactory.cpp b/services/camera/libcameraservice/CameraDeviceFactory.cpp index 2acdb5e..7fdf304 100644 --- a/services/camera/libcameraservice/CameraDeviceFactory.cpp +++ b/services/camera/libcameraservice/CameraDeviceFactory.cpp @@ -18,11 +18,11 @@ #define LOG_TAG "CameraDeviceFactory" #include -#include "CameraDeviceBase.h" -#include "Camera2Device.h" -#include "Camera3Device.h" #include "CameraService.h" #include "CameraDeviceFactory.h" +#include "common/CameraDeviceBase.h" +#include "device2/Camera2Device.h" +#include "device3/Camera3Device.h" namespace android { @@ -69,4 +69,3 @@ void CameraDeviceFactory::registerService(wp service) { } }; // namespace android - diff --git a/services/camera/libcameraservice/CameraDeviceFactory.h b/services/camera/libcameraservice/CameraDeviceFactory.h index 93ffaf8..236dc56 100644 --- a/services/camera/libcameraservice/CameraDeviceFactory.h +++ b/services/camera/libcameraservice/CameraDeviceFactory.h @@ -20,6 +20,7 @@ #include namespace android { + class CameraDeviceBase; class CameraService; diff --git a/services/camera/libcameraservice/CameraHardwareInterface.h b/services/camera/libcameraservice/CameraHardwareInterface.h deleted file mode 100644 index 87b2807..0000000 --- a/services/camera/libcameraservice/CameraHardwareInterface.h +++ /dev/null @@ -1,691 +0,0 @@ -/* - * Copyright (C) 2008 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef ANDROID_HARDWARE_CAMERA_HARDWARE_INTERFACE_H -#define ANDROID_HARDWARE_CAMERA_HARDWARE_INTERFACE_H - -#include -#include -#include -#include -#include -#include -#include -#include -#include - -namespace android { - -typedef void (*notify_callback)(int32_t msgType, - int32_t ext1, - int32_t ext2, - void* user); - -typedef void (*data_callback)(int32_t msgType, - const sp &dataPtr, - camera_frame_metadata_t *metadata, - void* user); - -typedef void (*data_callback_timestamp)(nsecs_t timestamp, - int32_t msgType, - const sp &dataPtr, - void *user); - -/** - * CameraHardwareInterface.h defines the interface to the - * camera hardware abstraction layer, used for setting and getting - * parameters, live previewing, and taking pictures. It is used for - * HAL devices with version CAMERA_DEVICE_API_VERSION_1_0 only. - * - * It is a referenced counted interface with RefBase as its base class. - * CameraService calls openCameraHardware() to retrieve a strong pointer to the - * instance of this interface and may be called multiple times. The - * following steps describe a typical sequence: - * - * -# After CameraService calls openCameraHardware(), getParameters() and - * setParameters() are used to initialize the camera instance. - * -# startPreview() is called. - * - * Prior to taking a picture, CameraService often calls autofocus(). When auto - * focusing has completed, the camera instance sends a CAMERA_MSG_FOCUS notification, - * which informs the application whether focusing was successful. The camera instance - * only sends this message once and it is up to the application to call autoFocus() - * again if refocusing is desired. - * - * CameraService calls takePicture() to request the camera instance take a - * picture. At this point, if a shutter, postview, raw, and/or compressed - * callback is desired, the corresponding message must be enabled. Any memory - * provided in a data callback must be copied if it's needed after returning. - */ - -class CameraHardwareInterface : public virtual RefBase { -public: - CameraHardwareInterface(const char *name) - { - mDevice = 0; - mName = name; - } - - ~CameraHardwareInterface() - { - ALOGI("Destroying camera %s", mName.string()); - if(mDevice) { - int rc = mDevice->common.close(&mDevice->common); - if (rc != OK) - ALOGE("Could not close camera %s: %d", mName.string(), rc); - } - } - - status_t initialize(hw_module_t *module) - { - ALOGI("Opening camera %s", mName.string()); - int rc = module->methods->open(module, mName.string(), - (hw_device_t **)&mDevice); - if (rc != OK) { - ALOGE("Could not open camera %s: %d", mName.string(), rc); - return rc; - } - initHalPreviewWindow(); - return rc; - } - - /** Set the ANativeWindow to which preview frames are sent */ - status_t setPreviewWindow(const sp& buf) - { - ALOGV("%s(%s) buf %p", __FUNCTION__, mName.string(), buf.get()); - - if (mDevice->ops->set_preview_window) { - mPreviewWindow = buf; - mHalPreviewWindow.user = this; - ALOGV("%s &mHalPreviewWindow %p mHalPreviewWindow.user %p", __FUNCTION__, - &mHalPreviewWindow, mHalPreviewWindow.user); - return mDevice->ops->set_preview_window(mDevice, - buf.get() ? &mHalPreviewWindow.nw : 0); - } - return INVALID_OPERATION; - } - - /** Set the notification and data callbacks */ - void setCallbacks(notify_callback notify_cb, - data_callback data_cb, - data_callback_timestamp data_cb_timestamp, - void* user) - { - mNotifyCb = notify_cb; - mDataCb = data_cb; - mDataCbTimestamp = data_cb_timestamp; - mCbUser = user; - - ALOGV("%s(%s)", __FUNCTION__, mName.string()); - - if (mDevice->ops->set_callbacks) { - mDevice->ops->set_callbacks(mDevice, - __notify_cb, - __data_cb, - __data_cb_timestamp, - __get_memory, - this); - } - } - - /** - * The following three functions all take a msgtype, - * which is a bitmask of the messages defined in - * include/ui/Camera.h - */ - - /** - * Enable a message, or set of messages. - */ - void enableMsgType(int32_t msgType) - { - ALOGV("%s(%s)", __FUNCTION__, mName.string()); - if (mDevice->ops->enable_msg_type) - mDevice->ops->enable_msg_type(mDevice, msgType); - } - - /** - * Disable a message, or a set of messages. - * - * Once received a call to disableMsgType(CAMERA_MSG_VIDEO_FRAME), camera hal - * should not rely on its client to call releaseRecordingFrame() to release - * video recording frames sent out by the cameral hal before and after the - * disableMsgType(CAMERA_MSG_VIDEO_FRAME) call. Camera hal clients must not - * modify/access any video recording frame after calling - * disableMsgType(CAMERA_MSG_VIDEO_FRAME). - */ - void disableMsgType(int32_t msgType) - { - ALOGV("%s(%s)", __FUNCTION__, mName.string()); - if (mDevice->ops->disable_msg_type) - mDevice->ops->disable_msg_type(mDevice, msgType); - } - - /** - * Query whether a message, or a set of messages, is enabled. - * Note that this is operates as an AND, if any of the messages - * queried are off, this will return false. - */ - int msgTypeEnabled(int32_t msgType) - { - ALOGV("%s(%s)", __FUNCTION__, mName.string()); - if (mDevice->ops->msg_type_enabled) - return mDevice->ops->msg_type_enabled(mDevice, msgType); - return false; - } - - /** - * Start preview mode. - */ - status_t startPreview() - { - ALOGV("%s(%s)", __FUNCTION__, mName.string()); - if (mDevice->ops->start_preview) - return mDevice->ops->start_preview(mDevice); - return INVALID_OPERATION; - } - - /** - * Stop a previously started preview. - */ - void stopPreview() - { - ALOGV("%s(%s)", __FUNCTION__, mName.string()); - if (mDevice->ops->stop_preview) - mDevice->ops->stop_preview(mDevice); - } - - /** - * Returns true if preview is enabled. - */ - int previewEnabled() - { - ALOGV("%s(%s)", __FUNCTION__, mName.string()); - if (mDevice->ops->preview_enabled) - return mDevice->ops->preview_enabled(mDevice); - return false; - } - - /** - * Request the camera hal to store meta data or real YUV data in - * the video buffers send out via CAMERA_MSG_VIDEO_FRRAME for a - * recording session. If it is not called, the default camera - * hal behavior is to store real YUV data in the video buffers. - * - * This method should be called before startRecording() in order - * to be effective. - * - * If meta data is stored in the video buffers, it is up to the - * receiver of the video buffers to interpret the contents and - * to find the actual frame data with the help of the meta data - * in the buffer. How this is done is outside of the scope of - * this method. - * - * Some camera hal may not support storing meta data in the video - * buffers, but all camera hal should support storing real YUV data - * in the video buffers. If the camera hal does not support storing - * the meta data in the video buffers when it is requested to do - * do, INVALID_OPERATION must be returned. It is very useful for - * the camera hal to pass meta data rather than the actual frame - * data directly to the video encoder, since the amount of the - * uncompressed frame data can be very large if video size is large. - * - * @param enable if true to instruct the camera hal to store - * meta data in the video buffers; false to instruct - * the camera hal to store real YUV data in the video - * buffers. - * - * @return OK on success. - */ - - status_t storeMetaDataInBuffers(int enable) - { - ALOGV("%s(%s)", __FUNCTION__, mName.string()); - if (mDevice->ops->store_meta_data_in_buffers) - return mDevice->ops->store_meta_data_in_buffers(mDevice, enable); - return enable ? INVALID_OPERATION: OK; - } - - /** - * Start record mode. When a record image is available a CAMERA_MSG_VIDEO_FRAME - * message is sent with the corresponding frame. Every record frame must be released - * by a cameral hal client via releaseRecordingFrame() before the client calls - * disableMsgType(CAMERA_MSG_VIDEO_FRAME). After the client calls - * disableMsgType(CAMERA_MSG_VIDEO_FRAME), it is camera hal's responsibility - * to manage the life-cycle of the video recording frames, and the client must - * not modify/access any video recording frames. - */ - status_t startRecording() - { - ALOGV("%s(%s)", __FUNCTION__, mName.string()); - if (mDevice->ops->start_recording) - return mDevice->ops->start_recording(mDevice); - return INVALID_OPERATION; - } - - /** - * Stop a previously started recording. - */ - void stopRecording() - { - ALOGV("%s(%s)", __FUNCTION__, mName.string()); - if (mDevice->ops->stop_recording) - mDevice->ops->stop_recording(mDevice); - } - - /** - * Returns true if recording is enabled. - */ - int recordingEnabled() - { - ALOGV("%s(%s)", __FUNCTION__, mName.string()); - if (mDevice->ops->recording_enabled) - return mDevice->ops->recording_enabled(mDevice); - return false; - } - - /** - * Release a record frame previously returned by CAMERA_MSG_VIDEO_FRAME. - * - * It is camera hal client's responsibility to release video recording - * frames sent out by the camera hal before the camera hal receives - * a call to disableMsgType(CAMERA_MSG_VIDEO_FRAME). After it receives - * the call to disableMsgType(CAMERA_MSG_VIDEO_FRAME), it is camera hal's - * responsibility of managing the life-cycle of the video recording - * frames. - */ - void releaseRecordingFrame(const sp& mem) - { - ALOGV("%s(%s)", __FUNCTION__, mName.string()); - if (mDevice->ops->release_recording_frame) { - ssize_t offset; - size_t size; - sp heap = mem->getMemory(&offset, &size); - void *data = ((uint8_t *)heap->base()) + offset; - return mDevice->ops->release_recording_frame(mDevice, data); - } - } - - /** - * Start auto focus, the notification callback routine is called - * with CAMERA_MSG_FOCUS once when focusing is complete. autoFocus() - * will be called again if another auto focus is needed. - */ - status_t autoFocus() - { - ALOGV("%s(%s)", __FUNCTION__, mName.string()); - if (mDevice->ops->auto_focus) - return mDevice->ops->auto_focus(mDevice); - return INVALID_OPERATION; - } - - /** - * Cancels auto-focus function. If the auto-focus is still in progress, - * this function will cancel it. Whether the auto-focus is in progress - * or not, this function will return the focus position to the default. - * If the camera does not support auto-focus, this is a no-op. - */ - status_t cancelAutoFocus() - { - ALOGV("%s(%s)", __FUNCTION__, mName.string()); - if (mDevice->ops->cancel_auto_focus) - return mDevice->ops->cancel_auto_focus(mDevice); - return INVALID_OPERATION; - } - - /** - * Take a picture. - */ - status_t takePicture() - { - ALOGV("%s(%s)", __FUNCTION__, mName.string()); - if (mDevice->ops->take_picture) - return mDevice->ops->take_picture(mDevice); - return INVALID_OPERATION; - } - - /** - * Cancel a picture that was started with takePicture. Calling this - * method when no picture is being taken is a no-op. - */ - status_t cancelPicture() - { - ALOGV("%s(%s)", __FUNCTION__, mName.string()); - if (mDevice->ops->cancel_picture) - return mDevice->ops->cancel_picture(mDevice); - return INVALID_OPERATION; - } - - /** - * Set the camera parameters. This returns BAD_VALUE if any parameter is - * invalid or not supported. */ - status_t setParameters(const CameraParameters ¶ms) - { - ALOGV("%s(%s)", __FUNCTION__, mName.string()); - if (mDevice->ops->set_parameters) - return mDevice->ops->set_parameters(mDevice, - params.flatten().string()); - return INVALID_OPERATION; - } - - /** Return the camera parameters. */ - CameraParameters getParameters() const - { - ALOGV("%s(%s)", __FUNCTION__, mName.string()); - CameraParameters parms; - if (mDevice->ops->get_parameters) { - char *temp = mDevice->ops->get_parameters(mDevice); - String8 str_parms(temp); - if (mDevice->ops->put_parameters) - mDevice->ops->put_parameters(mDevice, temp); - else - free(temp); - parms.unflatten(str_parms); - } - return parms; - } - - /** - * Send command to camera driver. - */ - status_t sendCommand(int32_t cmd, int32_t arg1, int32_t arg2) - { - ALOGV("%s(%s)", __FUNCTION__, mName.string()); - if (mDevice->ops->send_command) - return mDevice->ops->send_command(mDevice, cmd, arg1, arg2); - return INVALID_OPERATION; - } - - /** - * Release the hardware resources owned by this object. Note that this is - * *not* done in the destructor. - */ - void release() { - ALOGV("%s(%s)", __FUNCTION__, mName.string()); - if (mDevice->ops->release) - mDevice->ops->release(mDevice); - } - - /** - * Dump state of the camera hardware - */ - status_t dump(int fd, const Vector& /*args*/) const - { - ALOGV("%s(%s)", __FUNCTION__, mName.string()); - if (mDevice->ops->dump) - return mDevice->ops->dump(mDevice, fd); - return OK; // It's fine if the HAL doesn't implement dump() - } - -private: - camera_device_t *mDevice; - String8 mName; - - static void __notify_cb(int32_t msg_type, int32_t ext1, - int32_t ext2, void *user) - { - ALOGV("%s", __FUNCTION__); - CameraHardwareInterface *__this = - static_cast(user); - __this->mNotifyCb(msg_type, ext1, ext2, __this->mCbUser); - } - - static void __data_cb(int32_t msg_type, - const camera_memory_t *data, unsigned int index, - camera_frame_metadata_t *metadata, - void *user) - { - ALOGV("%s", __FUNCTION__); - CameraHardwareInterface *__this = - static_cast(user); - sp mem(static_cast(data->handle)); - if (index >= mem->mNumBufs) { - ALOGE("%s: invalid buffer index %d, max allowed is %d", __FUNCTION__, - index, mem->mNumBufs); - return; - } - __this->mDataCb(msg_type, mem->mBuffers[index], metadata, __this->mCbUser); - } - - static void __data_cb_timestamp(nsecs_t timestamp, int32_t msg_type, - const camera_memory_t *data, unsigned index, - void *user) - { - ALOGV("%s", __FUNCTION__); - CameraHardwareInterface *__this = - static_cast(user); - // Start refcounting the heap object from here on. When the clients - // drop all references, it will be destroyed (as well as the enclosed - // MemoryHeapBase. - sp mem(static_cast(data->handle)); - if (index >= mem->mNumBufs) { - ALOGE("%s: invalid buffer index %d, max allowed is %d", __FUNCTION__, - index, mem->mNumBufs); - return; - } - __this->mDataCbTimestamp(timestamp, msg_type, mem->mBuffers[index], __this->mCbUser); - } - - // This is a utility class that combines a MemoryHeapBase and a MemoryBase - // in one. Since we tend to use them in a one-to-one relationship, this is - // handy. - - class CameraHeapMemory : public RefBase { - public: - CameraHeapMemory(int fd, size_t buf_size, uint_t num_buffers = 1) : - mBufSize(buf_size), - mNumBufs(num_buffers) - { - mHeap = new MemoryHeapBase(fd, buf_size * num_buffers); - commonInitialization(); - } - - CameraHeapMemory(size_t buf_size, uint_t num_buffers = 1) : - mBufSize(buf_size), - mNumBufs(num_buffers) - { - mHeap = new MemoryHeapBase(buf_size * num_buffers); - commonInitialization(); - } - - void commonInitialization() - { - handle.data = mHeap->base(); - handle.size = mBufSize * mNumBufs; - handle.handle = this; - - mBuffers = new sp[mNumBufs]; - for (uint_t i = 0; i < mNumBufs; i++) - mBuffers[i] = new MemoryBase(mHeap, - i * mBufSize, - mBufSize); - - handle.release = __put_memory; - } - - virtual ~CameraHeapMemory() - { - delete [] mBuffers; - } - - size_t mBufSize; - uint_t mNumBufs; - sp mHeap; - sp *mBuffers; - - camera_memory_t handle; - }; - - static camera_memory_t* __get_memory(int fd, size_t buf_size, uint_t num_bufs, - void *user __attribute__((unused))) - { - CameraHeapMemory *mem; - if (fd < 0) - mem = new CameraHeapMemory(buf_size, num_bufs); - else - mem = new CameraHeapMemory(fd, buf_size, num_bufs); - mem->incStrong(mem); - return &mem->handle; - } - - static void __put_memory(camera_memory_t *data) - { - if (!data) - return; - - CameraHeapMemory *mem = static_cast(data->handle); - mem->decStrong(mem); - } - - static ANativeWindow *__to_anw(void *user) - { - CameraHardwareInterface *__this = - reinterpret_cast(user); - return __this->mPreviewWindow.get(); - } -#define anw(n) __to_anw(((struct camera_preview_window *)n)->user) - - static int __dequeue_buffer(struct preview_stream_ops* w, - buffer_handle_t** buffer, int *stride) - { - int rc; - ANativeWindow *a = anw(w); - ANativeWindowBuffer* anb; - rc = native_window_dequeue_buffer_and_wait(a, &anb); - if (!rc) { - *buffer = &anb->handle; - *stride = anb->stride; - } - return rc; - } - -#ifndef container_of -#define container_of(ptr, type, member) ({ \ - const typeof(((type *) 0)->member) *__mptr = (ptr); \ - (type *) ((char *) __mptr - (char *)(&((type *)0)->member)); }) -#endif - - static int __lock_buffer(struct preview_stream_ops* w, - buffer_handle_t* /*buffer*/) - { - ANativeWindow *a = anw(w); - (void)a; - return 0; - } - - static int __enqueue_buffer(struct preview_stream_ops* w, - buffer_handle_t* buffer) - { - ANativeWindow *a = anw(w); - return a->queueBuffer(a, - container_of(buffer, ANativeWindowBuffer, handle), -1); - } - - static int __cancel_buffer(struct preview_stream_ops* w, - buffer_handle_t* buffer) - { - ANativeWindow *a = anw(w); - return a->cancelBuffer(a, - container_of(buffer, ANativeWindowBuffer, handle), -1); - } - - static int __set_buffer_count(struct preview_stream_ops* w, int count) - { - ANativeWindow *a = anw(w); - return native_window_set_buffer_count(a, count); - } - - static int __set_buffers_geometry(struct preview_stream_ops* w, - int width, int height, int format) - { - ANativeWindow *a = anw(w); - return native_window_set_buffers_geometry(a, - width, height, format); - } - - static int __set_crop(struct preview_stream_ops *w, - int left, int top, int right, int bottom) - { - ANativeWindow *a = anw(w); - android_native_rect_t crop; - crop.left = left; - crop.top = top; - crop.right = right; - crop.bottom = bottom; - return native_window_set_crop(a, &crop); - } - - static int __set_timestamp(struct preview_stream_ops *w, - int64_t timestamp) { - ANativeWindow *a = anw(w); - return native_window_set_buffers_timestamp(a, timestamp); - } - - static int __set_usage(struct preview_stream_ops* w, int usage) - { - ANativeWindow *a = anw(w); - return native_window_set_usage(a, usage); - } - - static int __set_swap_interval(struct preview_stream_ops *w, int interval) - { - ANativeWindow *a = anw(w); - return a->setSwapInterval(a, interval); - } - - static int __get_min_undequeued_buffer_count( - const struct preview_stream_ops *w, - int *count) - { - ANativeWindow *a = anw(w); - return a->query(a, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, count); - } - - void initHalPreviewWindow() - { - mHalPreviewWindow.nw.cancel_buffer = __cancel_buffer; - mHalPreviewWindow.nw.lock_buffer = __lock_buffer; - mHalPreviewWindow.nw.dequeue_buffer = __dequeue_buffer; - mHalPreviewWindow.nw.enqueue_buffer = __enqueue_buffer; - mHalPreviewWindow.nw.set_buffer_count = __set_buffer_count; - mHalPreviewWindow.nw.set_buffers_geometry = __set_buffers_geometry; - mHalPreviewWindow.nw.set_crop = __set_crop; - mHalPreviewWindow.nw.set_timestamp = __set_timestamp; - mHalPreviewWindow.nw.set_usage = __set_usage; - mHalPreviewWindow.nw.set_swap_interval = __set_swap_interval; - - mHalPreviewWindow.nw.get_min_undequeued_buffer_count = - __get_min_undequeued_buffer_count; - } - - sp mPreviewWindow; - - struct camera_preview_window { - struct preview_stream_ops nw; - void *user; - }; - - struct camera_preview_window mHalPreviewWindow; - - notify_callback mNotifyCb; - data_callback mDataCb; - data_callback_timestamp mDataCbTimestamp; - void *mCbUser; -}; - -}; // namespace android - -#endif diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp index 0eb3e32..359b3ca 100644 --- a/services/camera/libcameraservice/CameraService.cpp +++ b/services/camera/libcameraservice/CameraService.cpp @@ -38,10 +38,10 @@ #include #include "CameraService.h" -#include "CameraClient.h" -#include "Camera2Client.h" -#include "ProCamera2Client.h" -#include "photography/CameraDeviceClient.h" +#include "api1/CameraClient.h" +#include "api1/Camera2Client.h" +#include "api_pro/ProCamera2Client.h" +#include "api2/CameraDeviceClient.h" #include "CameraDeviceFactory.h" namespace android { diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h index 2bf7b49..980eb97 100644 --- a/services/camera/libcameraservice/CameraService.h +++ b/services/camera/libcameraservice/CameraService.h @@ -29,8 +29,8 @@ #include #include #include -#include -#include +#include +#include #include diff --git a/services/camera/libcameraservice/ProCamera2Client.cpp b/services/camera/libcameraservice/ProCamera2Client.cpp deleted file mode 100644 index 251fdab..0000000 --- a/services/camera/libcameraservice/ProCamera2Client.cpp +++ /dev/null @@ -1,446 +0,0 @@ -/* - * Copyright (C) 2013 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#define LOG_TAG "ProCamera2Client" -#define ATRACE_TAG ATRACE_TAG_CAMERA -//#define LOG_NDEBUG 0 - -#include -#include - -#include -#include -#include -#include "camera2/Parameters.h" -#include "ProCamera2Client.h" -#include "camera2/ProFrameProcessor.h" -#include "CameraDeviceBase.h" - -namespace android { -using namespace camera2; - -// Interface used by CameraService - -ProCamera2Client::ProCamera2Client(const sp& cameraService, - const sp& remoteCallback, - const String16& clientPackageName, - int cameraId, - int cameraFacing, - int clientPid, - uid_t clientUid, - int servicePid) : - Camera2ClientBase(cameraService, remoteCallback, clientPackageName, - cameraId, cameraFacing, clientPid, clientUid, servicePid) -{ - ATRACE_CALL(); - ALOGI("ProCamera %d: Opened", cameraId); - - mExclusiveLock = false; -} - -status_t ProCamera2Client::initialize(camera_module_t *module) -{ - ATRACE_CALL(); - status_t res; - - res = Camera2ClientBase::initialize(module); - if (res != OK) { - return res; - } - - String8 threadName; - mFrameProcessor = new ProFrameProcessor(mDevice); - threadName = String8::format("PC2-%d-FrameProc", mCameraId); - mFrameProcessor->run(threadName.string()); - - mFrameProcessor->registerListener(FRAME_PROCESSOR_LISTENER_MIN_ID, - FRAME_PROCESSOR_LISTENER_MAX_ID, - /*listener*/this); - - return OK; -} - -ProCamera2Client::~ProCamera2Client() { -} - -status_t ProCamera2Client::exclusiveTryLock() { - ATRACE_CALL(); - ALOGV("%s", __FUNCTION__); - - Mutex::Autolock icl(mBinderSerializationLock); - SharedCameraCallbacks::Lock l(mSharedCameraCallbacks); - - if (!mDevice.get()) return PERMISSION_DENIED; - - if (!mExclusiveLock) { - mExclusiveLock = true; - - if (mRemoteCallback != NULL) { - mRemoteCallback->onLockStatusChanged( - IProCameraCallbacks::LOCK_ACQUIRED); - } - - ALOGV("%s: exclusive lock acquired", __FUNCTION__); - - return OK; - } - - // TODO: have a PERMISSION_DENIED case for when someone else owns the lock - - // don't allow recursive locking - ALOGW("%s: exclusive lock already exists - recursive locking is not" - "allowed", __FUNCTION__); - - return ALREADY_EXISTS; -} - -status_t ProCamera2Client::exclusiveLock() { - ATRACE_CALL(); - ALOGV("%s", __FUNCTION__); - - Mutex::Autolock icl(mBinderSerializationLock); - SharedCameraCallbacks::Lock l(mSharedCameraCallbacks); - - if (!mDevice.get()) return PERMISSION_DENIED; - - /** - * TODO: this should asynchronously 'wait' until the lock becomes available - * if another client already has an exclusive lock. - * - * once we have proper sharing support this will need to do - * more than just return immediately - */ - if (!mExclusiveLock) { - mExclusiveLock = true; - - if (mRemoteCallback != NULL) { - mRemoteCallback->onLockStatusChanged(IProCameraCallbacks::LOCK_ACQUIRED); - } - - ALOGV("%s: exclusive lock acquired", __FUNCTION__); - - return OK; - } - - // don't allow recursive locking - ALOGW("%s: exclusive lock already exists - recursive locking is not allowed" - , __FUNCTION__); - return ALREADY_EXISTS; -} - -status_t ProCamera2Client::exclusiveUnlock() { - ATRACE_CALL(); - ALOGV("%s", __FUNCTION__); - - Mutex::Autolock icl(mBinderSerializationLock); - SharedCameraCallbacks::Lock l(mSharedCameraCallbacks); - - // don't allow unlocking if we have no lock - if (!mExclusiveLock) { - ALOGW("%s: cannot unlock, no lock was held in the first place", - __FUNCTION__); - return BAD_VALUE; - } - - mExclusiveLock = false; - if (mRemoteCallback != NULL ) { - mRemoteCallback->onLockStatusChanged( - IProCameraCallbacks::LOCK_RELEASED); - } - ALOGV("%s: exclusive lock released", __FUNCTION__); - - return OK; -} - -bool ProCamera2Client::hasExclusiveLock() { - Mutex::Autolock icl(mBinderSerializationLock); - return mExclusiveLock; -} - -void ProCamera2Client::onExclusiveLockStolen() { - ALOGV("%s: ProClient lost exclusivity (id %d)", - __FUNCTION__, mCameraId); - - Mutex::Autolock icl(mBinderSerializationLock); - SharedCameraCallbacks::Lock l(mSharedCameraCallbacks); - - if (mExclusiveLock && mRemoteCallback.get() != NULL) { - mRemoteCallback->onLockStatusChanged( - IProCameraCallbacks::LOCK_STOLEN); - } - - mExclusiveLock = false; - - //TODO: we should not need to detach the device, merely reset it. - detachDevice(); -} - -status_t ProCamera2Client::submitRequest(camera_metadata_t* request, - bool streaming) { - ATRACE_CALL(); - ALOGV("%s", __FUNCTION__); - - Mutex::Autolock icl(mBinderSerializationLock); - - if (!mDevice.get()) return DEAD_OBJECT; - - if (!mExclusiveLock) { - return PERMISSION_DENIED; - } - - CameraMetadata metadata(request); - - if (!enforceRequestPermissions(metadata)) { - return PERMISSION_DENIED; - } - - if (streaming) { - return mDevice->setStreamingRequest(metadata); - } else { - return mDevice->capture(metadata); - } - - // unreachable. thx gcc for a useless warning - return OK; -} - -status_t ProCamera2Client::cancelRequest(int requestId) { - ATRACE_CALL(); - ALOGV("%s", __FUNCTION__); - - Mutex::Autolock icl(mBinderSerializationLock); - - if (!mDevice.get()) return DEAD_OBJECT; - - if (!mExclusiveLock) { - return PERMISSION_DENIED; - } - - // TODO: implement - ALOGE("%s: not fully implemented yet", __FUNCTION__); - return INVALID_OPERATION; -} - -status_t ProCamera2Client::deleteStream(int streamId) { - ATRACE_CALL(); - ALOGV("%s (streamId = 0x%x)", __FUNCTION__, streamId); - - status_t res; - if ( (res = checkPid(__FUNCTION__) ) != OK) return res; - - Mutex::Autolock icl(mBinderSerializationLock); - - if (!mDevice.get()) return DEAD_OBJECT; - mDevice->clearStreamingRequest(); - - status_t code; - if ((code = mDevice->waitUntilDrained()) != OK) { - ALOGE("%s: waitUntilDrained failed with code 0x%x", __FUNCTION__, code); - } - - return mDevice->deleteStream(streamId); -} - -status_t ProCamera2Client::createStream(int width, int height, int format, - const sp& bufferProducer, - /*out*/ - int* streamId) -{ - if (streamId) { - *streamId = -1; - } - - ATRACE_CALL(); - ALOGV("%s (w = %d, h = %d, f = 0x%x)", __FUNCTION__, width, height, format); - - status_t res; - if ( (res = checkPid(__FUNCTION__) ) != OK) return res; - - Mutex::Autolock icl(mBinderSerializationLock); - - if (!mDevice.get()) return DEAD_OBJECT; - - sp binder; - sp window; - if (bufferProducer != 0) { - binder = bufferProducer->asBinder(); - window = new Surface(bufferProducer); - } - - return mDevice->createStream(window, width, height, format, /*size*/1, - streamId); -} - -// Create a request object from a template. -// -- Caller owns the newly allocated metadata -status_t ProCamera2Client::createDefaultRequest(int templateId, - /*out*/ - camera_metadata** request) -{ - ATRACE_CALL(); - ALOGV("%s (templateId = 0x%x)", __FUNCTION__, templateId); - - if (request) { - *request = NULL; - } - - status_t res; - if ( (res = checkPid(__FUNCTION__) ) != OK) return res; - - Mutex::Autolock icl(mBinderSerializationLock); - - if (!mDevice.get()) return DEAD_OBJECT; - - CameraMetadata metadata; - if ( (res = mDevice->createDefaultRequest(templateId, &metadata) ) == OK) { - *request = metadata.release(); - } - - return res; -} - -status_t ProCamera2Client::getCameraInfo(int cameraId, - /*out*/ - camera_metadata** info) -{ - if (cameraId != mCameraId) { - return INVALID_OPERATION; - } - - Mutex::Autolock icl(mBinderSerializationLock); - - if (!mDevice.get()) return DEAD_OBJECT; - - CameraMetadata deviceInfo = mDevice->info(); - *info = deviceInfo.release(); - - return OK; -} - -status_t ProCamera2Client::dump(int fd, const Vector& args) { - String8 result; - result.appendFormat("ProCamera2Client[%d] (%p) PID: %d, dump:\n", - mCameraId, - getRemoteCallback()->asBinder().get(), - mClientPid); - result.append(" State: "); - - // TODO: print dynamic/request section from most recent requests - mFrameProcessor->dump(fd, args); - - return dumpDevice(fd, args); -} - -// IProCameraUser interface - -void ProCamera2Client::detachDevice() { - if (mDevice == 0) return; - - ALOGV("Camera %d: Stopping processors", mCameraId); - - mFrameProcessor->removeListener(FRAME_PROCESSOR_LISTENER_MIN_ID, - FRAME_PROCESSOR_LISTENER_MAX_ID, - /*listener*/this); - mFrameProcessor->requestExit(); - ALOGV("Camera %d: Waiting for threads", mCameraId); - mFrameProcessor->join(); - ALOGV("Camera %d: Disconnecting device", mCameraId); - - // WORKAROUND: HAL refuses to disconnect while there's streams in flight - { - mDevice->clearStreamingRequest(); - - status_t code; - if ((code = mDevice->waitUntilDrained()) != OK) { - ALOGE("%s: waitUntilDrained failed with code 0x%x", __FUNCTION__, - code); - } - } - - Camera2ClientBase::detachDevice(); -} - -/** Device-related methods */ -void ProCamera2Client::onFrameAvailable(int32_t frameId, - const CameraMetadata& frame) { - ATRACE_CALL(); - ALOGV("%s", __FUNCTION__); - - Mutex::Autolock icl(mBinderSerializationLock); - SharedCameraCallbacks::Lock l(mSharedCameraCallbacks); - - if (mRemoteCallback != NULL) { - CameraMetadata tmp(frame); - camera_metadata_t* meta = tmp.release(); - ALOGV("%s: meta = %p ", __FUNCTION__, meta); - mRemoteCallback->onResultReceived(frameId, meta); - tmp.acquire(meta); - } - -} - -bool ProCamera2Client::enforceRequestPermissions(CameraMetadata& metadata) { - - const int pid = IPCThreadState::self()->getCallingPid(); - const int selfPid = getpid(); - camera_metadata_entry_t entry; - - /** - * Mixin default important security values - * - android.led.transmit = defaulted ON - */ - CameraMetadata staticInfo = mDevice->info(); - entry = staticInfo.find(ANDROID_LED_AVAILABLE_LEDS); - for(size_t i = 0; i < entry.count; ++i) { - uint8_t led = entry.data.u8[i]; - - switch(led) { - case ANDROID_LED_AVAILABLE_LEDS_TRANSMIT: { - uint8_t transmitDefault = ANDROID_LED_TRANSMIT_ON; - if (!metadata.exists(ANDROID_LED_TRANSMIT)) { - metadata.update(ANDROID_LED_TRANSMIT, - &transmitDefault, 1); - } - break; - } - } - } - - // We can do anything! - if (pid == selfPid) { - return true; - } - - /** - * Permission check special fields in the request - * - android.led.transmit = android.permission.CAMERA_DISABLE_TRANSMIT - */ - entry = metadata.find(ANDROID_LED_TRANSMIT); - if (entry.count > 0 && entry.data.u8[0] != ANDROID_LED_TRANSMIT_ON) { - String16 permissionString = - String16("android.permission.CAMERA_DISABLE_TRANSMIT_LED"); - if (!checkCallingPermission(permissionString)) { - const int uid = IPCThreadState::self()->getCallingUid(); - ALOGE("Permission Denial: " - "can't disable transmit LED pid=%d, uid=%d", pid, uid); - return false; - } - } - - return true; -} - -} // namespace android diff --git a/services/camera/libcameraservice/ProCamera2Client.h b/services/camera/libcameraservice/ProCamera2Client.h deleted file mode 100644 index faee9f9..0000000 --- a/services/camera/libcameraservice/ProCamera2Client.h +++ /dev/null @@ -1,123 +0,0 @@ -/* - * Copyright (C) 2013 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef ANDROID_SERVERS_CAMERA_PROCAMERA2CLIENT_H -#define ANDROID_SERVERS_CAMERA_PROCAMERA2CLIENT_H - -#include "Camera2Device.h" -#include "CameraService.h" -#include "camera2/ProFrameProcessor.h" -#include "Camera2ClientBase.h" - -namespace android { - -class IMemory; -/** - * Implements the binder IProCameraUser API, - * meant for HAL2-level private API access. - */ -class ProCamera2Client : - public Camera2ClientBase, - public camera2::ProFrameProcessor::FilteredListener -{ -public: - /** - * IProCameraUser interface (see IProCameraUser for details) - */ - virtual status_t exclusiveTryLock(); - virtual status_t exclusiveLock(); - virtual status_t exclusiveUnlock(); - - virtual bool hasExclusiveLock(); - - // Note that the callee gets a copy of the metadata. - virtual int submitRequest(camera_metadata_t* metadata, - bool streaming = false); - virtual status_t cancelRequest(int requestId); - - virtual status_t deleteStream(int streamId); - - virtual status_t createStream( - int width, - int height, - int format, - const sp& bufferProducer, - /*out*/ - int* streamId); - - // Create a request object from a template. - // -- Caller owns the newly allocated metadata - virtual status_t createDefaultRequest(int templateId, - /*out*/ - camera_metadata** request); - - // Get the static metadata for the camera - // -- Caller owns the newly allocated metadata - virtual status_t getCameraInfo(int cameraId, - /*out*/ - camera_metadata** info); - - /** - * Interface used by CameraService - */ - - ProCamera2Client(const sp& cameraService, - const sp& remoteCallback, - const String16& clientPackageName, - int cameraId, - int cameraFacing, - int clientPid, - uid_t clientUid, - int servicePid); - virtual ~ProCamera2Client(); - - virtual status_t initialize(camera_module_t *module); - - virtual status_t dump(int fd, const Vector& args); - - // Callbacks from camera service - virtual void onExclusiveLockStolen(); - - /** - * Interface used by independent components of ProCamera2Client. - */ - -protected: - /** FilteredListener implementation **/ - virtual void onFrameAvailable(int32_t frameId, - const CameraMetadata& frame); - virtual void detachDevice(); - -private: - /** IProCameraUser interface-related private members */ - - /** Preview callback related members */ - sp mFrameProcessor; - static const int32_t FRAME_PROCESSOR_LISTENER_MIN_ID = 0; - static const int32_t FRAME_PROCESSOR_LISTENER_MAX_ID = 0x7fffffffL; - - /** Utility members */ - bool enforceRequestPermissions(CameraMetadata& metadata); - - // Whether or not we have an exclusive lock on the device - // - if no we can't modify the request queue. - // note that creating/deleting streams we own is still OK - bool mExclusiveLock; -}; - -}; // namespace android - -#endif diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp new file mode 100644 index 0000000..46aa60c --- /dev/null +++ b/services/camera/libcameraservice/api1/Camera2Client.cpp @@ -0,0 +1,1779 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "Camera2" +#define ATRACE_TAG ATRACE_TAG_CAMERA +//#define LOG_NDEBUG 0 + +#include +#include + +#include +#include + +#include "api1/Camera2Client.h" + +#include "api1/client2/StreamingProcessor.h" +#include "api1/client2/JpegProcessor.h" +#include "api1/client2/CaptureSequencer.h" +#include "api1/client2/CallbackProcessor.h" +#include "api1/client2/ZslProcessor.h" +#include "api1/client2/ZslProcessor3.h" + +#define ALOG1(...) ALOGD_IF(gLogLevel >= 1, __VA_ARGS__); +#define ALOG2(...) ALOGD_IF(gLogLevel >= 2, __VA_ARGS__); + +namespace android { +using namespace camera2; + +static int getCallingPid() { + return IPCThreadState::self()->getCallingPid(); +} + +// Interface used by CameraService + +Camera2Client::Camera2Client(const sp& cameraService, + const sp& cameraClient, + const String16& clientPackageName, + int cameraId, + int cameraFacing, + int clientPid, + uid_t clientUid, + int servicePid, + int deviceVersion): + Camera2ClientBase(cameraService, cameraClient, clientPackageName, + cameraId, cameraFacing, clientPid, clientUid, servicePid), + mParameters(cameraId, cameraFacing), + mDeviceVersion(deviceVersion) +{ + ATRACE_CALL(); + + SharedParameters::Lock l(mParameters); + l.mParameters.state = Parameters::DISCONNECTED; +} + +status_t Camera2Client::initialize(camera_module_t *module) +{ + ATRACE_CALL(); + ALOGV("%s: Initializing client for camera %d", __FUNCTION__, mCameraId); + status_t res; + + res = Camera2ClientBase::initialize(module); + if (res != OK) { + return res; + } + + SharedParameters::Lock l(mParameters); + + res = l.mParameters.initialize(&(mDevice->info())); + if (res != OK) { + ALOGE("%s: Camera %d: unable to build defaults: %s (%d)", + __FUNCTION__, mCameraId, strerror(-res), res); + return NO_INIT; + } + + String8 threadName; + + mStreamingProcessor = new StreamingProcessor(this); + threadName = String8::format("C2-%d-StreamProc", + mCameraId); + mStreamingProcessor->run(threadName.string()); + + mFrameProcessor = new FrameProcessor(mDevice, this); + threadName = String8::format("C2-%d-FrameProc", + mCameraId); + mFrameProcessor->run(threadName.string()); + + mCaptureSequencer = new CaptureSequencer(this); + threadName = String8::format("C2-%d-CaptureSeq", + mCameraId); + mCaptureSequencer->run(threadName.string()); + + mJpegProcessor = new JpegProcessor(this, mCaptureSequencer); + threadName = String8::format("C2-%d-JpegProc", + mCameraId); + mJpegProcessor->run(threadName.string()); + + switch (mDeviceVersion) { + case CAMERA_DEVICE_API_VERSION_2_0: { + sp zslProc = + new ZslProcessor(this, mCaptureSequencer); + mZslProcessor = zslProc; + mZslProcessorThread = zslProc; + break; + } + case CAMERA_DEVICE_API_VERSION_3_0:{ + sp zslProc = + new ZslProcessor3(this, mCaptureSequencer); + mZslProcessor = zslProc; + mZslProcessorThread = zslProc; + break; + } + default: + break; + } + threadName = String8::format("C2-%d-ZslProc", + mCameraId); + mZslProcessorThread->run(threadName.string()); + + mCallbackProcessor = new CallbackProcessor(this); + threadName = String8::format("C2-%d-CallbkProc", + mCameraId); + mCallbackProcessor->run(threadName.string()); + + if (gLogLevel >= 1) { + ALOGD("%s: Default parameters converted from camera %d:", __FUNCTION__, + mCameraId); + ALOGD("%s", l.mParameters.paramsFlattened.string()); + } + + return OK; +} + +Camera2Client::~Camera2Client() { + ATRACE_CALL(); + ALOGV("~Camera2Client"); + + mDestructionStarted = true; + + disconnect(); + + ALOGI("Camera %d: Closed", mCameraId); +} + +status_t Camera2Client::dump(int fd, const Vector& args) { + String8 result; + result.appendFormat("Client2[%d] (%p) Client: %s PID: %d, dump:\n", + mCameraId, + getRemoteCallback()->asBinder().get(), + String8(mClientPackageName).string(), + mClientPid); + result.append(" State: "); +#define CASE_APPEND_ENUM(x) case x: result.append(#x "\n"); break; + + const Parameters& p = mParameters.unsafeAccess(); + + result.append(Parameters::getStateName(p.state)); + + result.append("\n Current parameters:\n"); + result.appendFormat(" Preview size: %d x %d\n", + p.previewWidth, p.previewHeight); + result.appendFormat(" Preview FPS range: %d - %d\n", + p.previewFpsRange[0], p.previewFpsRange[1]); + result.appendFormat(" Preview HAL pixel format: 0x%x\n", + p.previewFormat); + result.appendFormat(" Preview transform: %x\n", + p.previewTransform); + result.appendFormat(" Picture size: %d x %d\n", + p.pictureWidth, p.pictureHeight); + result.appendFormat(" Jpeg thumbnail size: %d x %d\n", + p.jpegThumbSize[0], p.jpegThumbSize[1]); + result.appendFormat(" Jpeg quality: %d, thumbnail quality: %d\n", + p.jpegQuality, p.jpegThumbQuality); + result.appendFormat(" Jpeg rotation: %d\n", p.jpegRotation); + result.appendFormat(" GPS tags %s\n", + p.gpsEnabled ? "enabled" : "disabled"); + if (p.gpsEnabled) { + result.appendFormat(" GPS lat x long x alt: %f x %f x %f\n", + p.gpsCoordinates[0], p.gpsCoordinates[1], + p.gpsCoordinates[2]); + result.appendFormat(" GPS timestamp: %lld\n", + p.gpsTimestamp); + result.appendFormat(" GPS processing method: %s\n", + p.gpsProcessingMethod.string()); + } + + result.append(" White balance mode: "); + switch (p.wbMode) { + CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_AUTO) + CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_INCANDESCENT) + CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_FLUORESCENT) + CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT) + CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_DAYLIGHT) + CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT) + CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_TWILIGHT) + CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_SHADE) + default: result.append("UNKNOWN\n"); + } + + result.append(" Effect mode: "); + switch (p.effectMode) { + CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_OFF) + CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_MONO) + CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_NEGATIVE) + CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_SOLARIZE) + CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_SEPIA) + CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_POSTERIZE) + CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD) + CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD) + CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_AQUA) + default: result.append("UNKNOWN\n"); + } + + result.append(" Antibanding mode: "); + switch (p.antibandingMode) { + CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO) + CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF) + CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ) + CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ) + default: result.append("UNKNOWN\n"); + } + + result.append(" Scene mode: "); + switch (p.sceneMode) { + case ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED: + result.append("AUTO\n"); break; + CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_ACTION) + CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_PORTRAIT) + CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_LANDSCAPE) + CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_NIGHT) + CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT) + CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_THEATRE) + CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_BEACH) + CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_SNOW) + CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_SUNSET) + CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO) + CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_FIREWORKS) + CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_SPORTS) + CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_PARTY) + CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT) + CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_BARCODE) + default: result.append("UNKNOWN\n"); + } + + result.append(" Flash mode: "); + switch (p.flashMode) { + CASE_APPEND_ENUM(Parameters::FLASH_MODE_OFF) + CASE_APPEND_ENUM(Parameters::FLASH_MODE_AUTO) + CASE_APPEND_ENUM(Parameters::FLASH_MODE_ON) + CASE_APPEND_ENUM(Parameters::FLASH_MODE_TORCH) + CASE_APPEND_ENUM(Parameters::FLASH_MODE_RED_EYE) + CASE_APPEND_ENUM(Parameters::FLASH_MODE_INVALID) + default: result.append("UNKNOWN\n"); + } + + result.append(" Focus mode: "); + switch (p.focusMode) { + CASE_APPEND_ENUM(Parameters::FOCUS_MODE_AUTO) + CASE_APPEND_ENUM(Parameters::FOCUS_MODE_MACRO) + CASE_APPEND_ENUM(Parameters::FOCUS_MODE_CONTINUOUS_VIDEO) + CASE_APPEND_ENUM(Parameters::FOCUS_MODE_CONTINUOUS_PICTURE) + CASE_APPEND_ENUM(Parameters::FOCUS_MODE_EDOF) + CASE_APPEND_ENUM(Parameters::FOCUS_MODE_INFINITY) + CASE_APPEND_ENUM(Parameters::FOCUS_MODE_FIXED) + CASE_APPEND_ENUM(Parameters::FOCUS_MODE_INVALID) + default: result.append("UNKNOWN\n"); + } + + result.append(" Focus state: "); + switch (p.focusState) { + CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_INACTIVE) + CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN) + CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED) + CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN) + CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED) + CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED) + default: result.append("UNKNOWN\n"); + } + + result.append(" Focusing areas:\n"); + for (size_t i = 0; i < p.focusingAreas.size(); i++) { + result.appendFormat(" [ (%d, %d, %d, %d), weight %d ]\n", + p.focusingAreas[i].left, + p.focusingAreas[i].top, + p.focusingAreas[i].right, + p.focusingAreas[i].bottom, + p.focusingAreas[i].weight); + } + + result.appendFormat(" Exposure compensation index: %d\n", + p.exposureCompensation); + + result.appendFormat(" AE lock %s, AWB lock %s\n", + p.autoExposureLock ? "enabled" : "disabled", + p.autoWhiteBalanceLock ? "enabled" : "disabled" ); + + result.appendFormat(" Metering areas:\n"); + for (size_t i = 0; i < p.meteringAreas.size(); i++) { + result.appendFormat(" [ (%d, %d, %d, %d), weight %d ]\n", + p.meteringAreas[i].left, + p.meteringAreas[i].top, + p.meteringAreas[i].right, + p.meteringAreas[i].bottom, + p.meteringAreas[i].weight); + } + + result.appendFormat(" Zoom index: %d\n", p.zoom); + result.appendFormat(" Video size: %d x %d\n", p.videoWidth, + p.videoHeight); + + result.appendFormat(" Recording hint is %s\n", + p.recordingHint ? "set" : "not set"); + + result.appendFormat(" Video stabilization is %s\n", + p.videoStabilization ? "enabled" : "disabled"); + + result.append(" Current streams:\n"); + result.appendFormat(" Preview stream ID: %d\n", + getPreviewStreamId()); + result.appendFormat(" Capture stream ID: %d\n", + getCaptureStreamId()); + result.appendFormat(" Recording stream ID: %d\n", + getRecordingStreamId()); + + result.append(" Quirks for this camera:\n"); + bool haveQuirk = false; + if (p.quirks.triggerAfWithAuto) { + result.appendFormat(" triggerAfWithAuto\n"); + haveQuirk = true; + } + if (p.quirks.useZslFormat) { + result.appendFormat(" useZslFormat\n"); + haveQuirk = true; + } + if (p.quirks.meteringCropRegion) { + result.appendFormat(" meteringCropRegion\n"); + haveQuirk = true; + } + if (!haveQuirk) { + result.appendFormat(" none\n"); + } + + write(fd, result.string(), result.size()); + + mStreamingProcessor->dump(fd, args); + + mCaptureSequencer->dump(fd, args); + + mFrameProcessor->dump(fd, args); + + mZslProcessor->dump(fd, args); + + return dumpDevice(fd, args); +#undef CASE_APPEND_ENUM +} + +// ICamera interface + +void Camera2Client::disconnect() { + ATRACE_CALL(); + Mutex::Autolock icl(mBinderSerializationLock); + + // Allow both client and the media server to disconnect at all times + int callingPid = getCallingPid(); + if (callingPid != mClientPid && callingPid != mServicePid) return; + + if (mDevice == 0) return; + + ALOGV("Camera %d: Shutting down", mCameraId); + + /** + * disconnect() cannot call any methods that might need to promote a + * wp, since disconnect can be called from the destructor, at + * which point all such promotions will fail. + */ + + stopPreviewL(); + + { + SharedParameters::Lock l(mParameters); + if (l.mParameters.state == Parameters::DISCONNECTED) return; + l.mParameters.state = Parameters::DISCONNECTED; + } + + mStreamingProcessor->deletePreviewStream(); + mStreamingProcessor->deleteRecordingStream(); + mJpegProcessor->deleteStream(); + mCallbackProcessor->deleteStream(); + mZslProcessor->deleteStream(); + + mStreamingProcessor->requestExit(); + mFrameProcessor->requestExit(); + mCaptureSequencer->requestExit(); + mJpegProcessor->requestExit(); + mZslProcessorThread->requestExit(); + mCallbackProcessor->requestExit(); + + ALOGV("Camera %d: Waiting for threads", mCameraId); + + mStreamingProcessor->join(); + mFrameProcessor->join(); + mCaptureSequencer->join(); + mJpegProcessor->join(); + mZslProcessorThread->join(); + mCallbackProcessor->join(); + + ALOGV("Camera %d: Disconnecting device", mCameraId); + + mDevice->disconnect(); + + mDevice.clear(); + + CameraService::Client::disconnect(); +} + +status_t Camera2Client::connect(const sp& client) { + ATRACE_CALL(); + ALOGV("%s: E", __FUNCTION__); + Mutex::Autolock icl(mBinderSerializationLock); + + if (mClientPid != 0 && getCallingPid() != mClientPid) { + ALOGE("%s: Camera %d: Connection attempt from pid %d; " + "current locked to pid %d", __FUNCTION__, + mCameraId, getCallingPid(), mClientPid); + return BAD_VALUE; + } + + mClientPid = getCallingPid(); + + mRemoteCallback = client; + mSharedCameraCallbacks = client; + + return OK; +} + +status_t Camera2Client::lock() { + ATRACE_CALL(); + ALOGV("%s: E", __FUNCTION__); + Mutex::Autolock icl(mBinderSerializationLock); + ALOGV("%s: Camera %d: Lock call from pid %d; current client pid %d", + __FUNCTION__, mCameraId, getCallingPid(), mClientPid); + + if (mClientPid == 0) { + mClientPid = getCallingPid(); + return OK; + } + + if (mClientPid != getCallingPid()) { + ALOGE("%s: Camera %d: Lock call from pid %d; currently locked to pid %d", + __FUNCTION__, mCameraId, getCallingPid(), mClientPid); + return EBUSY; + } + + return OK; +} + +status_t Camera2Client::unlock() { + ATRACE_CALL(); + ALOGV("%s: E", __FUNCTION__); + Mutex::Autolock icl(mBinderSerializationLock); + ALOGV("%s: Camera %d: Unlock call from pid %d; current client pid %d", + __FUNCTION__, mCameraId, getCallingPid(), mClientPid); + + if (mClientPid == getCallingPid()) { + SharedParameters::Lock l(mParameters); + if (l.mParameters.state == Parameters::RECORD || + l.mParameters.state == Parameters::VIDEO_SNAPSHOT) { + ALOGD("Not allowed to unlock camera during recording."); + return INVALID_OPERATION; + } + mClientPid = 0; + mRemoteCallback.clear(); + mSharedCameraCallbacks.clear(); + return OK; + } + + ALOGE("%s: Camera %d: Unlock call from pid %d; currently locked to pid %d", + __FUNCTION__, mCameraId, getCallingPid(), mClientPid); + return EBUSY; +} + +status_t Camera2Client::setPreviewDisplay( + const sp& surface) { + ATRACE_CALL(); + ALOGV("%s: E", __FUNCTION__); + Mutex::Autolock icl(mBinderSerializationLock); + status_t res; + if ( (res = checkPid(__FUNCTION__) ) != OK) return res; + + sp binder; + sp window; + if (surface != 0) { + binder = surface->getIGraphicBufferProducer()->asBinder(); + window = surface; + } + + return setPreviewWindowL(binder,window); +} + +status_t Camera2Client::setPreviewTexture( + const sp& bufferProducer) { + ATRACE_CALL(); + ALOGV("%s: E", __FUNCTION__); + Mutex::Autolock icl(mBinderSerializationLock); + status_t res; + if ( (res = checkPid(__FUNCTION__) ) != OK) return res; + + sp binder; + sp window; + if (bufferProducer != 0) { + binder = bufferProducer->asBinder(); + window = new Surface(bufferProducer); + } + return setPreviewWindowL(binder, window); +} + +status_t Camera2Client::setPreviewWindowL(const sp& binder, + sp window) { + ATRACE_CALL(); + status_t res; + + if (binder == mPreviewSurface) { + ALOGV("%s: Camera %d: New window is same as old window", + __FUNCTION__, mCameraId); + return NO_ERROR; + } + + Parameters::State state; + { + SharedParameters::Lock l(mParameters); + state = l.mParameters.state; + } + switch (state) { + case Parameters::DISCONNECTED: + case Parameters::RECORD: + case Parameters::STILL_CAPTURE: + case Parameters::VIDEO_SNAPSHOT: + ALOGE("%s: Camera %d: Cannot set preview display while in state %s", + __FUNCTION__, mCameraId, + Parameters::getStateName(state)); + return INVALID_OPERATION; + case Parameters::STOPPED: + case Parameters::WAITING_FOR_PREVIEW_WINDOW: + // OK + break; + case Parameters::PREVIEW: + // Already running preview - need to stop and create a new stream + res = stopStream(); + if (res != OK) { + ALOGE("%s: Unable to stop preview to swap windows: %s (%d)", + __FUNCTION__, strerror(-res), res); + return res; + } + state = Parameters::WAITING_FOR_PREVIEW_WINDOW; + break; + } + + mPreviewSurface = binder; + res = mStreamingProcessor->setPreviewWindow(window); + if (res != OK) { + ALOGE("%s: Unable to set new preview window: %s (%d)", + __FUNCTION__, strerror(-res), res); + return res; + } + + if (state == Parameters::WAITING_FOR_PREVIEW_WINDOW) { + SharedParameters::Lock l(mParameters); + l.mParameters.state = state; + return startPreviewL(l.mParameters, false); + } + + return OK; +} + +void Camera2Client::setPreviewCallbackFlag(int flag) { + ATRACE_CALL(); + ALOGV("%s: Camera %d: Flag 0x%x", __FUNCTION__, mCameraId, flag); + Mutex::Autolock icl(mBinderSerializationLock); + + if ( checkPid(__FUNCTION__) != OK) return; + + SharedParameters::Lock l(mParameters); + setPreviewCallbackFlagL(l.mParameters, flag); +} + +void Camera2Client::setPreviewCallbackFlagL(Parameters ¶ms, int flag) { + status_t res = OK; + + switch(params.state) { + case Parameters::STOPPED: + case Parameters::WAITING_FOR_PREVIEW_WINDOW: + case Parameters::PREVIEW: + case Parameters::STILL_CAPTURE: + // OK + break; + default: + if (flag & CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK) { + ALOGE("%s: Camera %d: Can't use preview callbacks " + "in state %d", __FUNCTION__, mCameraId, params.state); + return; + } + } + + if (flag & CAMERA_FRAME_CALLBACK_FLAG_ONE_SHOT_MASK) { + ALOGV("%s: setting oneshot", __FUNCTION__); + params.previewCallbackOneShot = true; + } + if (params.previewCallbackFlags != (uint32_t)flag) { + + if (flag != CAMERA_FRAME_CALLBACK_FLAG_NOOP) { + // Disable any existing preview callback window when enabling + // preview callback flags + res = mCallbackProcessor->setCallbackWindow(NULL); + if (res != OK) { + ALOGE("%s: Camera %d: Unable to clear preview callback surface:" + " %s (%d)", __FUNCTION__, mCameraId, strerror(-res), res); + return; + } + params.previewCallbackSurface = false; + } + + params.previewCallbackFlags = flag; + + if (params.state == Parameters::PREVIEW) { + res = startPreviewL(params, true); + if (res != OK) { + ALOGE("%s: Camera %d: Unable to refresh request in state %s", + __FUNCTION__, mCameraId, + Parameters::getStateName(params.state)); + } + } + } +} + +status_t Camera2Client::setPreviewCallbackTarget( + const sp& callbackProducer) { + ATRACE_CALL(); + ALOGV("%s: E", __FUNCTION__); + Mutex::Autolock icl(mBinderSerializationLock); + status_t res; + if ( (res = checkPid(__FUNCTION__) ) != OK) return res; + + sp window; + if (callbackProducer != 0) { + window = new Surface(callbackProducer); + } + + res = mCallbackProcessor->setCallbackWindow(window); + if (res != OK) { + ALOGE("%s: Camera %d: Unable to set preview callback surface: %s (%d)", + __FUNCTION__, mCameraId, strerror(-res), res); + return res; + } + + SharedParameters::Lock l(mParameters); + + if (window != NULL) { + // Disable traditional callbacks when a valid callback target is given + l.mParameters.previewCallbackFlags = CAMERA_FRAME_CALLBACK_FLAG_NOOP; + l.mParameters.previewCallbackOneShot = false; + l.mParameters.previewCallbackSurface = true; + } else { + // Disable callback target if given a NULL interface. + l.mParameters.previewCallbackSurface = false; + } + + switch(l.mParameters.state) { + case Parameters::PREVIEW: + res = startPreviewL(l.mParameters, true); + break; + case Parameters::RECORD: + case Parameters::VIDEO_SNAPSHOT: + res = startRecordingL(l.mParameters, true); + break; + default: + break; + } + if (res != OK) { + ALOGE("%s: Camera %d: Unable to refresh request in state %s", + __FUNCTION__, mCameraId, + Parameters::getStateName(l.mParameters.state)); + } + + return OK; +} + + +status_t Camera2Client::startPreview() { + ATRACE_CALL(); + ALOGV("%s: E", __FUNCTION__); + Mutex::Autolock icl(mBinderSerializationLock); + status_t res; + if ( (res = checkPid(__FUNCTION__) ) != OK) return res; + SharedParameters::Lock l(mParameters); + return startPreviewL(l.mParameters, false); +} + +status_t Camera2Client::startPreviewL(Parameters ¶ms, bool restart) { + ATRACE_CALL(); + status_t res; + + ALOGV("%s: state == %d, restart = %d", __FUNCTION__, params.state, restart); + + if ( (params.state == Parameters::PREVIEW || + params.state == Parameters::RECORD || + params.state == Parameters::VIDEO_SNAPSHOT) + && !restart) { + // Succeed attempt to re-enter a streaming state + ALOGI("%s: Camera %d: Preview already active, ignoring restart", + __FUNCTION__, mCameraId); + return OK; + } + if (params.state > Parameters::PREVIEW && !restart) { + ALOGE("%s: Can't start preview in state %s", + __FUNCTION__, + Parameters::getStateName(params.state)); + return INVALID_OPERATION; + } + + if (!mStreamingProcessor->haveValidPreviewWindow()) { + params.state = Parameters::WAITING_FOR_PREVIEW_WINDOW; + return OK; + } + params.state = Parameters::STOPPED; + + res = mStreamingProcessor->updatePreviewStream(params); + if (res != OK) { + ALOGE("%s: Camera %d: Unable to update preview stream: %s (%d)", + __FUNCTION__, mCameraId, strerror(-res), res); + return res; + } + + // We could wait to create the JPEG output stream until first actual use + // (first takePicture call). However, this would substantially increase the + // first capture latency on HAL3 devices, and potentially on some HAL2 + // devices. So create it unconditionally at preview start. As a drawback, + // this increases gralloc memory consumption for applications that don't + // ever take a picture. + // TODO: Find a better compromise, though this likely would involve HAL + // changes. + res = updateProcessorStream(mJpegProcessor, params); + if (res != OK) { + ALOGE("%s: Camera %d: Can't pre-configure still image " + "stream: %s (%d)", + __FUNCTION__, mCameraId, strerror(-res), res); + return res; + } + + Vector outputStreams; + bool callbacksEnabled = (params.previewCallbackFlags & + CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK) || + params.previewCallbackSurface; + + if (callbacksEnabled) { + // Can't have recording stream hanging around when enabling callbacks, + // since it exceeds the max stream count on some devices. + if (mStreamingProcessor->getRecordingStreamId() != NO_STREAM) { + ALOGV("%s: Camera %d: Clearing out recording stream before " + "creating callback stream", __FUNCTION__, mCameraId); + res = mStreamingProcessor->stopStream(); + if (res != OK) { + ALOGE("%s: Camera %d: Can't stop streaming to delete " + "recording stream", __FUNCTION__, mCameraId); + return res; + } + res = mStreamingProcessor->deleteRecordingStream(); + if (res != OK) { + ALOGE("%s: Camera %d: Unable to delete recording stream before " + "enabling callbacks: %s (%d)", __FUNCTION__, mCameraId, + strerror(-res), res); + return res; + } + } + + res = mCallbackProcessor->updateStream(params); + if (res != OK) { + ALOGE("%s: Camera %d: Unable to update callback stream: %s (%d)", + __FUNCTION__, mCameraId, strerror(-res), res); + return res; + } + outputStreams.push(getCallbackStreamId()); + } + if (params.zslMode && !params.recordingHint) { + res = updateProcessorStream(mZslProcessor, params); + if (res != OK) { + ALOGE("%s: Camera %d: Unable to update ZSL stream: %s (%d)", + __FUNCTION__, mCameraId, strerror(-res), res); + return res; + } + outputStreams.push(getZslStreamId()); + } + + outputStreams.push(getPreviewStreamId()); + + if (!params.recordingHint) { + if (!restart) { + res = mStreamingProcessor->updatePreviewRequest(params); + if (res != OK) { + ALOGE("%s: Camera %d: Can't set up preview request: " + "%s (%d)", __FUNCTION__, mCameraId, + strerror(-res), res); + return res; + } + } + res = mStreamingProcessor->startStream(StreamingProcessor::PREVIEW, + outputStreams); + } else { + if (!restart) { + res = mStreamingProcessor->updateRecordingRequest(params); + if (res != OK) { + ALOGE("%s: Camera %d: Can't set up preview request with " + "record hint: %s (%d)", __FUNCTION__, mCameraId, + strerror(-res), res); + return res; + } + } + res = mStreamingProcessor->startStream(StreamingProcessor::RECORD, + outputStreams); + } + if (res != OK) { + ALOGE("%s: Camera %d: Unable to start streaming preview: %s (%d)", + __FUNCTION__, mCameraId, strerror(-res), res); + return res; + } + + params.state = Parameters::PREVIEW; + return OK; +} + +void Camera2Client::stopPreview() { + ATRACE_CALL(); + ALOGV("%s: E", __FUNCTION__); + Mutex::Autolock icl(mBinderSerializationLock); + status_t res; + if ( (res = checkPid(__FUNCTION__) ) != OK) return; + stopPreviewL(); +} + +void Camera2Client::stopPreviewL() { + ATRACE_CALL(); + status_t res; + const nsecs_t kStopCaptureTimeout = 3000000000LL; // 3 seconds + Parameters::State state; + { + SharedParameters::Lock l(mParameters); + state = l.mParameters.state; + } + + switch (state) { + case Parameters::DISCONNECTED: + // Nothing to do. + break; + case Parameters::STOPPED: + case Parameters::VIDEO_SNAPSHOT: + case Parameters::STILL_CAPTURE: + mCaptureSequencer->waitUntilIdle(kStopCaptureTimeout); + // no break + case Parameters::RECORD: + case Parameters::PREVIEW: + res = stopStream(); + if (res != OK) { + ALOGE("%s: Camera %d: Can't stop streaming: %s (%d)", + __FUNCTION__, mCameraId, strerror(-res), res); + } + res = mDevice->waitUntilDrained(); + if (res != OK) { + ALOGE("%s: Camera %d: Waiting to stop streaming failed: %s (%d)", + __FUNCTION__, mCameraId, strerror(-res), res); + } + // no break + case Parameters::WAITING_FOR_PREVIEW_WINDOW: { + SharedParameters::Lock l(mParameters); + l.mParameters.state = Parameters::STOPPED; + commandStopFaceDetectionL(l.mParameters); + break; + } + default: + ALOGE("%s: Camera %d: Unknown state %d", __FUNCTION__, mCameraId, + state); + } +} + +bool Camera2Client::previewEnabled() { + ATRACE_CALL(); + Mutex::Autolock icl(mBinderSerializationLock); + status_t res; + if ( (res = checkPid(__FUNCTION__) ) != OK) return false; + + SharedParameters::Lock l(mParameters); + return l.mParameters.state == Parameters::PREVIEW; +} + +status_t Camera2Client::storeMetaDataInBuffers(bool enabled) { + ATRACE_CALL(); + Mutex::Autolock icl(mBinderSerializationLock); + status_t res; + if ( (res = checkPid(__FUNCTION__) ) != OK) return res; + + SharedParameters::Lock l(mParameters); + switch (l.mParameters.state) { + case Parameters::RECORD: + case Parameters::VIDEO_SNAPSHOT: + ALOGE("%s: Camera %d: Can't be called in state %s", + __FUNCTION__, mCameraId, + Parameters::getStateName(l.mParameters.state)); + return INVALID_OPERATION; + default: + // OK + break; + } + + l.mParameters.storeMetadataInBuffers = enabled; + + return OK; +} + +status_t Camera2Client::startRecording() { + ATRACE_CALL(); + ALOGV("%s: E", __FUNCTION__); + Mutex::Autolock icl(mBinderSerializationLock); + status_t res; + if ( (res = checkPid(__FUNCTION__) ) != OK) return res; + SharedParameters::Lock l(mParameters); + + return startRecordingL(l.mParameters, false); +} + +status_t Camera2Client::startRecordingL(Parameters ¶ms, bool restart) { + status_t res; + + ALOGV("%s: state == %d, restart = %d", __FUNCTION__, params.state, restart); + + switch (params.state) { + case Parameters::STOPPED: + res = startPreviewL(params, false); + if (res != OK) return res; + break; + case Parameters::PREVIEW: + // Ready to go + break; + case Parameters::RECORD: + case Parameters::VIDEO_SNAPSHOT: + // OK to call this when recording is already on, just skip unless + // we're looking to restart + if (!restart) return OK; + break; + default: + ALOGE("%s: Camera %d: Can't start recording in state %s", + __FUNCTION__, mCameraId, + Parameters::getStateName(params.state)); + return INVALID_OPERATION; + }; + + if (!params.storeMetadataInBuffers) { + ALOGE("%s: Camera %d: Recording only supported in metadata mode, but " + "non-metadata recording mode requested!", __FUNCTION__, + mCameraId); + return INVALID_OPERATION; + } + + if (!restart) { + mCameraService->playSound(CameraService::SOUND_RECORDING); + mStreamingProcessor->updateRecordingRequest(params); + if (res != OK) { + ALOGE("%s: Camera %d: Unable to update recording request: %s (%d)", + __FUNCTION__, mCameraId, strerror(-res), res); + return res; + } + } + + // Not all devices can support a preview callback stream and a recording + // stream at the same time, so assume none of them can. + if (mCallbackProcessor->getStreamId() != NO_STREAM) { + ALOGV("%s: Camera %d: Clearing out callback stream before " + "creating recording stream", __FUNCTION__, mCameraId); + res = mStreamingProcessor->stopStream(); + if (res != OK) { + ALOGE("%s: Camera %d: Can't stop streaming to delete callback stream", + __FUNCTION__, mCameraId); + return res; + } + res = mCallbackProcessor->deleteStream(); + if (res != OK) { + ALOGE("%s: Camera %d: Unable to delete callback stream before " + "record: %s (%d)", __FUNCTION__, mCameraId, + strerror(-res), res); + return res; + } + } + // Disable callbacks if they're enabled; can't record and use callbacks, + // and we can't fail record start without stagefright asserting. + params.previewCallbackFlags = 0; + + res = updateProcessorStream< + StreamingProcessor, + &StreamingProcessor::updateRecordingStream>(mStreamingProcessor, + params); + if (res != OK) { + ALOGE("%s: Camera %d: Unable to update recording stream: %s (%d)", + __FUNCTION__, mCameraId, strerror(-res), res); + return res; + } + + Vector outputStreams; + outputStreams.push(getPreviewStreamId()); + outputStreams.push(getRecordingStreamId()); + + res = mStreamingProcessor->startStream(StreamingProcessor::RECORD, + outputStreams); + if (res != OK) { + ALOGE("%s: Camera %d: Unable to start recording stream: %s (%d)", + __FUNCTION__, mCameraId, strerror(-res), res); + return res; + } + + if (params.state < Parameters::RECORD) { + params.state = Parameters::RECORD; + } + + return OK; +} + +void Camera2Client::stopRecording() { + ATRACE_CALL(); + ALOGV("%s: E", __FUNCTION__); + Mutex::Autolock icl(mBinderSerializationLock); + SharedParameters::Lock l(mParameters); + + status_t res; + if ( (res = checkPid(__FUNCTION__) ) != OK) return; + + switch (l.mParameters.state) { + case Parameters::RECORD: + // OK to stop + break; + case Parameters::STOPPED: + case Parameters::PREVIEW: + case Parameters::STILL_CAPTURE: + case Parameters::VIDEO_SNAPSHOT: + default: + ALOGE("%s: Camera %d: Can't stop recording in state %s", + __FUNCTION__, mCameraId, + Parameters::getStateName(l.mParameters.state)); + return; + }; + + mCameraService->playSound(CameraService::SOUND_RECORDING); + + res = startPreviewL(l.mParameters, true); + if (res != OK) { + ALOGE("%s: Camera %d: Unable to return to preview", + __FUNCTION__, mCameraId); + } +} + +bool Camera2Client::recordingEnabled() { + ATRACE_CALL(); + Mutex::Autolock icl(mBinderSerializationLock); + + if ( checkPid(__FUNCTION__) != OK) return false; + + return recordingEnabledL(); +} + +bool Camera2Client::recordingEnabledL() { + ATRACE_CALL(); + SharedParameters::Lock l(mParameters); + + return (l.mParameters.state == Parameters::RECORD + || l.mParameters.state == Parameters::VIDEO_SNAPSHOT); +} + +void Camera2Client::releaseRecordingFrame(const sp& mem) { + ATRACE_CALL(); + Mutex::Autolock icl(mBinderSerializationLock); + if ( checkPid(__FUNCTION__) != OK) return; + + mStreamingProcessor->releaseRecordingFrame(mem); +} + +status_t Camera2Client::autoFocus() { + ATRACE_CALL(); + Mutex::Autolock icl(mBinderSerializationLock); + ALOGV("%s: Camera %d", __FUNCTION__, mCameraId); + status_t res; + if ( (res = checkPid(__FUNCTION__) ) != OK) return res; + + int triggerId; + bool notifyImmediately = false; + bool notifySuccess = false; + { + SharedParameters::Lock l(mParameters); + if (l.mParameters.state < Parameters::PREVIEW) { + return INVALID_OPERATION; + } + + /** + * If the camera does not support auto-focus, it is a no-op and + * onAutoFocus(boolean, Camera) callback will be called immediately + * with a fake value of success set to true. + * + * Similarly, if focus mode is set to INFINITY, there's no reason to + * bother the HAL. + */ + if (l.mParameters.focusMode == Parameters::FOCUS_MODE_FIXED || + l.mParameters.focusMode == Parameters::FOCUS_MODE_INFINITY) { + notifyImmediately = true; + notifySuccess = true; + } + /** + * If we're in CAF mode, and AF has already been locked, just fire back + * the callback right away; the HAL would not send a notification since + * no state change would happen on a AF trigger. + */ + if ( (l.mParameters.focusMode == Parameters::FOCUS_MODE_CONTINUOUS_PICTURE || + l.mParameters.focusMode == Parameters::FOCUS_MODE_CONTINUOUS_VIDEO) && + l.mParameters.focusState == ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED ) { + notifyImmediately = true; + notifySuccess = true; + } + /** + * Send immediate notification back to client + */ + if (notifyImmediately) { + SharedCameraCallbacks::Lock l(mSharedCameraCallbacks); + if (l.mRemoteCallback != 0) { + l.mRemoteCallback->notifyCallback(CAMERA_MSG_FOCUS, + notifySuccess ? 1 : 0, 0); + } + return OK; + } + /** + * Handle quirk mode for AF in scene modes + */ + if (l.mParameters.quirks.triggerAfWithAuto && + l.mParameters.sceneMode != ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED && + l.mParameters.focusMode != Parameters::FOCUS_MODE_AUTO && + !l.mParameters.focusingAreas[0].isEmpty()) { + ALOGV("%s: Quirk: Switching from focusMode %d to AUTO", + __FUNCTION__, l.mParameters.focusMode); + l.mParameters.shadowFocusMode = l.mParameters.focusMode; + l.mParameters.focusMode = Parameters::FOCUS_MODE_AUTO; + updateRequests(l.mParameters); + } + + l.mParameters.currentAfTriggerId = ++l.mParameters.afTriggerCounter; + triggerId = l.mParameters.currentAfTriggerId; + } + syncWithDevice(); + + mDevice->triggerAutofocus(triggerId); + + return OK; +} + +status_t Camera2Client::cancelAutoFocus() { + ATRACE_CALL(); + Mutex::Autolock icl(mBinderSerializationLock); + ALOGV("%s: Camera %d", __FUNCTION__, mCameraId); + status_t res; + if ( (res = checkPid(__FUNCTION__) ) != OK) return res; + + int triggerId; + { + SharedParameters::Lock l(mParameters); + // Canceling does nothing in FIXED or INFINITY modes + if (l.mParameters.focusMode == Parameters::FOCUS_MODE_FIXED || + l.mParameters.focusMode == Parameters::FOCUS_MODE_INFINITY) { + return OK; + } + triggerId = ++l.mParameters.afTriggerCounter; + + // When using triggerAfWithAuto quirk, may need to reset focus mode to + // the real state at this point. No need to cancel explicitly if + // changing the AF mode. + if (l.mParameters.shadowFocusMode != Parameters::FOCUS_MODE_INVALID) { + ALOGV("%s: Quirk: Restoring focus mode to %d", __FUNCTION__, + l.mParameters.shadowFocusMode); + l.mParameters.focusMode = l.mParameters.shadowFocusMode; + l.mParameters.shadowFocusMode = Parameters::FOCUS_MODE_INVALID; + updateRequests(l.mParameters); + + return OK; + } + } + syncWithDevice(); + + mDevice->triggerCancelAutofocus(triggerId); + + return OK; +} + +status_t Camera2Client::takePicture(int msgType) { + ATRACE_CALL(); + Mutex::Autolock icl(mBinderSerializationLock); + status_t res; + if ( (res = checkPid(__FUNCTION__) ) != OK) return res; + + { + SharedParameters::Lock l(mParameters); + switch (l.mParameters.state) { + case Parameters::DISCONNECTED: + case Parameters::STOPPED: + case Parameters::WAITING_FOR_PREVIEW_WINDOW: + ALOGE("%s: Camera %d: Cannot take picture without preview enabled", + __FUNCTION__, mCameraId); + return INVALID_OPERATION; + case Parameters::PREVIEW: + // Good to go for takePicture + res = commandStopFaceDetectionL(l.mParameters); + if (res != OK) { + ALOGE("%s: Camera %d: Unable to stop face detection for still capture", + __FUNCTION__, mCameraId); + return res; + } + l.mParameters.state = Parameters::STILL_CAPTURE; + break; + case Parameters::RECORD: + // Good to go for video snapshot + l.mParameters.state = Parameters::VIDEO_SNAPSHOT; + break; + case Parameters::STILL_CAPTURE: + case Parameters::VIDEO_SNAPSHOT: + ALOGE("%s: Camera %d: Already taking a picture", + __FUNCTION__, mCameraId); + return INVALID_OPERATION; + } + + ALOGV("%s: Camera %d: Starting picture capture", __FUNCTION__, mCameraId); + + res = updateProcessorStream(mJpegProcessor, l.mParameters); + if (res != OK) { + ALOGE("%s: Camera %d: Can't set up still image stream: %s (%d)", + __FUNCTION__, mCameraId, strerror(-res), res); + return res; + } + } + + // Need HAL to have correct settings before (possibly) triggering precapture + syncWithDevice(); + + res = mCaptureSequencer->startCapture(msgType); + if (res != OK) { + ALOGE("%s: Camera %d: Unable to start capture: %s (%d)", + __FUNCTION__, mCameraId, strerror(-res), res); + } + + return res; +} + +status_t Camera2Client::setParameters(const String8& params) { + ATRACE_CALL(); + ALOGV("%s: Camera %d", __FUNCTION__, mCameraId); + Mutex::Autolock icl(mBinderSerializationLock); + status_t res; + if ( (res = checkPid(__FUNCTION__) ) != OK) return res; + + SharedParameters::Lock l(mParameters); + + res = l.mParameters.set(params); + if (res != OK) return res; + + res = updateRequests(l.mParameters); + + return res; +} + +String8 Camera2Client::getParameters() const { + ATRACE_CALL(); + ALOGV("%s: Camera %d", __FUNCTION__, mCameraId); + Mutex::Autolock icl(mBinderSerializationLock); + if ( checkPid(__FUNCTION__) != OK) return String8(); + + SharedParameters::ReadLock l(mParameters); + + return l.mParameters.get(); +} + +status_t Camera2Client::sendCommand(int32_t cmd, int32_t arg1, int32_t arg2) { + ATRACE_CALL(); + Mutex::Autolock icl(mBinderSerializationLock); + status_t res; + if ( (res = checkPid(__FUNCTION__) ) != OK) return res; + + ALOGV("%s: Camera %d: Command %d (%d, %d)", __FUNCTION__, mCameraId, + cmd, arg1, arg2); + + switch (cmd) { + case CAMERA_CMD_START_SMOOTH_ZOOM: + return commandStartSmoothZoomL(); + case CAMERA_CMD_STOP_SMOOTH_ZOOM: + return commandStopSmoothZoomL(); + case CAMERA_CMD_SET_DISPLAY_ORIENTATION: + return commandSetDisplayOrientationL(arg1); + case CAMERA_CMD_ENABLE_SHUTTER_SOUND: + return commandEnableShutterSoundL(arg1 == 1); + case CAMERA_CMD_PLAY_RECORDING_SOUND: + return commandPlayRecordingSoundL(); + case CAMERA_CMD_START_FACE_DETECTION: + return commandStartFaceDetectionL(arg1); + case CAMERA_CMD_STOP_FACE_DETECTION: { + SharedParameters::Lock l(mParameters); + return commandStopFaceDetectionL(l.mParameters); + } + case CAMERA_CMD_ENABLE_FOCUS_MOVE_MSG: + return commandEnableFocusMoveMsgL(arg1 == 1); + case CAMERA_CMD_PING: + return commandPingL(); + case CAMERA_CMD_SET_VIDEO_BUFFER_COUNT: + return commandSetVideoBufferCountL(arg1); + default: + ALOGE("%s: Unknown command %d (arguments %d, %d)", + __FUNCTION__, cmd, arg1, arg2); + return BAD_VALUE; + } +} + +status_t Camera2Client::commandStartSmoothZoomL() { + ALOGE("%s: Unimplemented!", __FUNCTION__); + return OK; +} + +status_t Camera2Client::commandStopSmoothZoomL() { + ALOGE("%s: Unimplemented!", __FUNCTION__); + return OK; +} + +status_t Camera2Client::commandSetDisplayOrientationL(int degrees) { + int transform = Parameters::degToTransform(degrees, + mCameraFacing == CAMERA_FACING_FRONT); + if (transform == -1) { + ALOGE("%s: Camera %d: Error setting %d as display orientation value", + __FUNCTION__, mCameraId, degrees); + return BAD_VALUE; + } + SharedParameters::Lock l(mParameters); + if (transform != l.mParameters.previewTransform && + getPreviewStreamId() != NO_STREAM) { + mDevice->setStreamTransform(getPreviewStreamId(), transform); + } + l.mParameters.previewTransform = transform; + return OK; +} + +status_t Camera2Client::commandEnableShutterSoundL(bool enable) { + SharedParameters::Lock l(mParameters); + if (enable) { + l.mParameters.playShutterSound = true; + return OK; + } + + // Disabling shutter sound may not be allowed. In that case only + // allow the mediaserver process to disable the sound. + char value[PROPERTY_VALUE_MAX]; + property_get("ro.camera.sound.forced", value, "0"); + if (strncmp(value, "0", 2) != 0) { + // Disabling shutter sound is not allowed. Deny if the current + // process is not mediaserver. + if (getCallingPid() != getpid()) { + ALOGE("Failed to disable shutter sound. Permission denied (pid %d)", + getCallingPid()); + return PERMISSION_DENIED; + } + } + + l.mParameters.playShutterSound = false; + return OK; +} + +status_t Camera2Client::commandPlayRecordingSoundL() { + mCameraService->playSound(CameraService::SOUND_RECORDING); + return OK; +} + +status_t Camera2Client::commandStartFaceDetectionL(int /*type*/) { + ALOGV("%s: Camera %d: Starting face detection", + __FUNCTION__, mCameraId); + status_t res; + SharedParameters::Lock l(mParameters); + switch (l.mParameters.state) { + case Parameters::DISCONNECTED: + case Parameters::STOPPED: + case Parameters::WAITING_FOR_PREVIEW_WINDOW: + case Parameters::STILL_CAPTURE: + ALOGE("%s: Camera %d: Cannot start face detection without preview active", + __FUNCTION__, mCameraId); + return INVALID_OPERATION; + case Parameters::PREVIEW: + case Parameters::RECORD: + case Parameters::VIDEO_SNAPSHOT: + // Good to go for starting face detect + break; + } + // Ignoring type + if (l.mParameters.fastInfo.bestFaceDetectMode == + ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) { + ALOGE("%s: Camera %d: Face detection not supported", + __FUNCTION__, mCameraId); + return BAD_VALUE; + } + if (l.mParameters.enableFaceDetect) return OK; + + l.mParameters.enableFaceDetect = true; + + res = updateRequests(l.mParameters); + + return res; +} + +status_t Camera2Client::commandStopFaceDetectionL(Parameters ¶ms) { + status_t res = OK; + ALOGV("%s: Camera %d: Stopping face detection", + __FUNCTION__, mCameraId); + + if (!params.enableFaceDetect) return OK; + + params.enableFaceDetect = false; + + if (params.state == Parameters::PREVIEW + || params.state == Parameters::RECORD + || params.state == Parameters::VIDEO_SNAPSHOT) { + res = updateRequests(params); + } + + return res; +} + +status_t Camera2Client::commandEnableFocusMoveMsgL(bool enable) { + SharedParameters::Lock l(mParameters); + l.mParameters.enableFocusMoveMessages = enable; + + return OK; +} + +status_t Camera2Client::commandPingL() { + // Always ping back if access is proper and device is alive + SharedParameters::Lock l(mParameters); + if (l.mParameters.state != Parameters::DISCONNECTED) { + return OK; + } else { + return NO_INIT; + } +} + +status_t Camera2Client::commandSetVideoBufferCountL(size_t count) { + if (recordingEnabledL()) { + ALOGE("%s: Camera %d: Error setting video buffer count after " + "recording was started", __FUNCTION__, mCameraId); + return INVALID_OPERATION; + } + + return mStreamingProcessor->setRecordingBufferCount(count); +} + +/** Device-related methods */ +void Camera2Client::notifyAutoFocus(uint8_t newState, int triggerId) { + ALOGV("%s: Autofocus state now %d, last trigger %d", + __FUNCTION__, newState, triggerId); + bool sendCompletedMessage = false; + bool sendMovingMessage = false; + + bool success = false; + bool afInMotion = false; + { + SharedParameters::Lock l(mParameters); + l.mParameters.focusState = newState; + switch (l.mParameters.focusMode) { + case Parameters::FOCUS_MODE_AUTO: + case Parameters::FOCUS_MODE_MACRO: + // Don't send notifications upstream if they're not for the current AF + // trigger. For example, if cancel was called in between, or if we + // already sent a notification about this AF call. + if (triggerId != l.mParameters.currentAfTriggerId) break; + switch (newState) { + case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED: + success = true; + // no break + case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED: + sendCompletedMessage = true; + l.mParameters.currentAfTriggerId = -1; + break; + case ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN: + // Just starting focusing, ignore + break; + case ANDROID_CONTROL_AF_STATE_INACTIVE: + case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN: + case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED: + default: + // Unexpected in AUTO/MACRO mode + ALOGE("%s: Unexpected AF state transition in AUTO/MACRO mode: %d", + __FUNCTION__, newState); + break; + } + break; + case Parameters::FOCUS_MODE_CONTINUOUS_VIDEO: + case Parameters::FOCUS_MODE_CONTINUOUS_PICTURE: + switch (newState) { + case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED: + success = true; + // no break + case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED: + // Don't send notifications upstream if they're not for + // the current AF trigger. For example, if cancel was + // called in between, or if we already sent a + // notification about this AF call. + // Send both a 'AF done' callback and a 'AF move' callback + if (triggerId != l.mParameters.currentAfTriggerId) break; + sendCompletedMessage = true; + afInMotion = false; + if (l.mParameters.enableFocusMoveMessages && + l.mParameters.afInMotion) { + sendMovingMessage = true; + } + l.mParameters.currentAfTriggerId = -1; + break; + case ANDROID_CONTROL_AF_STATE_INACTIVE: + // Cancel was called, or we switched state; care if + // currently moving + afInMotion = false; + if (l.mParameters.enableFocusMoveMessages && + l.mParameters.afInMotion) { + sendMovingMessage = true; + } + break; + case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN: + // Start passive scan, inform upstream + afInMotion = true; + // no break + case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED: + // Stop passive scan, inform upstream + if (l.mParameters.enableFocusMoveMessages) { + sendMovingMessage = true; + } + break; + } + l.mParameters.afInMotion = afInMotion; + break; + case Parameters::FOCUS_MODE_EDOF: + case Parameters::FOCUS_MODE_INFINITY: + case Parameters::FOCUS_MODE_FIXED: + default: + if (newState != ANDROID_CONTROL_AF_STATE_INACTIVE) { + ALOGE("%s: Unexpected AF state change %d " + "(ID %d) in focus mode %d", + __FUNCTION__, newState, triggerId, + l.mParameters.focusMode); + } + } + } + if (sendMovingMessage) { + SharedCameraCallbacks::Lock l(mSharedCameraCallbacks); + if (l.mRemoteCallback != 0) { + l.mRemoteCallback->notifyCallback(CAMERA_MSG_FOCUS_MOVE, + afInMotion ? 1 : 0, 0); + } + } + if (sendCompletedMessage) { + SharedCameraCallbacks::Lock l(mSharedCameraCallbacks); + if (l.mRemoteCallback != 0) { + l.mRemoteCallback->notifyCallback(CAMERA_MSG_FOCUS, + success ? 1 : 0, 0); + } + } +} + +void Camera2Client::notifyAutoExposure(uint8_t newState, int triggerId) { + ALOGV("%s: Autoexposure state now %d, last trigger %d", + __FUNCTION__, newState, triggerId); + mCaptureSequencer->notifyAutoExposure(newState, triggerId); +} + +camera2::SharedParameters& Camera2Client::getParameters() { + return mParameters; +} + +int Camera2Client::getPreviewStreamId() const { + return mStreamingProcessor->getPreviewStreamId(); +} + +int Camera2Client::getCaptureStreamId() const { + return mJpegProcessor->getStreamId(); +} + +int Camera2Client::getCallbackStreamId() const { + return mCallbackProcessor->getStreamId(); +} + +int Camera2Client::getRecordingStreamId() const { + return mStreamingProcessor->getRecordingStreamId(); +} + +int Camera2Client::getZslStreamId() const { + return mZslProcessor->getStreamId(); +} + +status_t Camera2Client::registerFrameListener(int32_t minId, int32_t maxId, + wp listener) { + return mFrameProcessor->registerListener(minId, maxId, listener); +} + +status_t Camera2Client::removeFrameListener(int32_t minId, int32_t maxId, + wp listener) { + return mFrameProcessor->removeListener(minId, maxId, listener); +} + +status_t Camera2Client::stopStream() { + return mStreamingProcessor->stopStream(); +} + +const int32_t Camera2Client::kPreviewRequestIdStart; +const int32_t Camera2Client::kPreviewRequestIdEnd; +const int32_t Camera2Client::kRecordingRequestIdStart; +const int32_t Camera2Client::kRecordingRequestIdEnd; +const int32_t Camera2Client::kCaptureRequestIdStart; +const int32_t Camera2Client::kCaptureRequestIdEnd; + +/** Utility methods */ + +status_t Camera2Client::updateRequests(Parameters ¶ms) { + status_t res; + + ALOGV("%s: Camera %d: state = %d", __FUNCTION__, getCameraId(), params.state); + + res = mStreamingProcessor->incrementStreamingIds(); + if (res != OK) { + ALOGE("%s: Camera %d: Unable to increment request IDs: %s (%d)", + __FUNCTION__, mCameraId, strerror(-res), res); + return res; + } + + res = mStreamingProcessor->updatePreviewRequest(params); + if (res != OK) { + ALOGE("%s: Camera %d: Unable to update preview request: %s (%d)", + __FUNCTION__, mCameraId, strerror(-res), res); + return res; + } + res = mStreamingProcessor->updateRecordingRequest(params); + if (res != OK) { + ALOGE("%s: Camera %d: Unable to update recording request: %s (%d)", + __FUNCTION__, mCameraId, strerror(-res), res); + return res; + } + + if (params.state == Parameters::PREVIEW) { + res = startPreviewL(params, true); + if (res != OK) { + ALOGE("%s: Camera %d: Error streaming new preview request: %s (%d)", + __FUNCTION__, mCameraId, strerror(-res), res); + return res; + } + } else if (params.state == Parameters::RECORD || + params.state == Parameters::VIDEO_SNAPSHOT) { + res = startRecordingL(params, true); + if (res != OK) { + ALOGE("%s: Camera %d: Error streaming new record request: %s (%d)", + __FUNCTION__, mCameraId, strerror(-res), res); + return res; + } + } + return res; +} + + +size_t Camera2Client::calculateBufferSize(int width, int height, + int format, int stride) { + switch (format) { + case HAL_PIXEL_FORMAT_YCbCr_422_SP: // NV16 + return width * height * 2; + case HAL_PIXEL_FORMAT_YCrCb_420_SP: // NV21 + return width * height * 3 / 2; + case HAL_PIXEL_FORMAT_YCbCr_422_I: // YUY2 + return width * height * 2; + case HAL_PIXEL_FORMAT_YV12: { // YV12 + size_t ySize = stride * height; + size_t uvStride = (stride / 2 + 0xF) & ~0xF; + size_t uvSize = uvStride * height / 2; + return ySize + uvSize * 2; + } + case HAL_PIXEL_FORMAT_RGB_565: + return width * height * 2; + case HAL_PIXEL_FORMAT_RGBA_8888: + return width * height * 4; + case HAL_PIXEL_FORMAT_RAW_SENSOR: + return width * height * 2; + default: + ALOGE("%s: Unknown preview format: %x", + __FUNCTION__, format); + return 0; + } +} + +status_t Camera2Client::syncWithDevice() { + ATRACE_CALL(); + const nsecs_t kMaxSyncTimeout = 500000000; // 500 ms + status_t res; + + int32_t activeRequestId = mStreamingProcessor->getActiveRequestId(); + if (activeRequestId == 0) return OK; + + res = mDevice->waitUntilRequestReceived(activeRequestId, kMaxSyncTimeout); + if (res == TIMED_OUT) { + ALOGE("%s: Camera %d: Timed out waiting sync with HAL", + __FUNCTION__, mCameraId); + } else if (res != OK) { + ALOGE("%s: Camera %d: Error while waiting to sync with HAL", + __FUNCTION__, mCameraId); + } + return res; +} + +template +status_t Camera2Client::updateProcessorStream(sp processor, + camera2::Parameters params) { + // No default template arguments until C++11, so we need this overload + return updateProcessorStream( + processor, params); +} + +template +status_t Camera2Client::updateProcessorStream(sp processor, + Parameters params) { + status_t res; + + // Get raw pointer since sp doesn't have operator->* + ProcessorT *processorPtr = processor.get(); + res = (processorPtr->*updateStreamF)(params); + + /** + * Can't update the stream if it's busy? + * + * Then we need to stop the device (by temporarily clearing the request + * queue) and then try again. Resume streaming once we're done. + */ + if (res == -EBUSY) { + ALOGV("%s: Camera %d: Pausing to update stream", __FUNCTION__, + mCameraId); + res = mStreamingProcessor->togglePauseStream(/*pause*/true); + if (res != OK) { + ALOGE("%s: Camera %d: Can't pause streaming: %s (%d)", + __FUNCTION__, mCameraId, strerror(-res), res); + } + + res = mDevice->waitUntilDrained(); + if (res != OK) { + ALOGE("%s: Camera %d: Waiting to stop streaming failed: %s (%d)", + __FUNCTION__, mCameraId, strerror(-res), res); + } + + res = (processorPtr->*updateStreamF)(params); + if (res != OK) { + ALOGE("%s: Camera %d: Failed to update processing stream " + " despite having halted streaming first: %s (%d)", + __FUNCTION__, mCameraId, strerror(-res), res); + } + + res = mStreamingProcessor->togglePauseStream(/*pause*/false); + if (res != OK) { + ALOGE("%s: Camera %d: Can't unpause streaming: %s (%d)", + __FUNCTION__, mCameraId, strerror(-res), res); + } + } + + return res; +} + +} // namespace android diff --git a/services/camera/libcameraservice/api1/Camera2Client.h b/services/camera/libcameraservice/api1/Camera2Client.h new file mode 100644 index 0000000..ed448f3 --- /dev/null +++ b/services/camera/libcameraservice/api1/Camera2Client.h @@ -0,0 +1,211 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_SERVERS_CAMERA_CAMERA2CLIENT_H +#define ANDROID_SERVERS_CAMERA_CAMERA2CLIENT_H + +#include "CameraService.h" +#include "common/CameraDeviceBase.h" +#include "common/Camera2ClientBase.h" +#include "api1/client2/Parameters.h" +#include "api1/client2/FrameProcessor.h" +//#include "api1/client2/StreamingProcessor.h" +//#include "api1/client2/JpegProcessor.h" +//#include "api1/client2/ZslProcessorInterface.h" +//#include "api1/client2/CaptureSequencer.h" +//#include "api1/client2/CallbackProcessor.h" + +namespace android { + +namespace camera2 { + +class StreamingProcessor; +class JpegProcessor; +class ZslProcessorInterface; +class CaptureSequencer; +class CallbackProcessor; + +} + +class IMemory; +/** + * Interface between android.hardware.Camera API and Camera HAL device for versions + * CAMERA_DEVICE_API_VERSION_2_0 and 3_0. + */ +class Camera2Client : + public Camera2ClientBase +{ +public: + /** + * ICamera interface (see ICamera for details) + */ + + virtual void disconnect(); + virtual status_t connect(const sp& client); + virtual status_t lock(); + virtual status_t unlock(); + virtual status_t setPreviewDisplay(const sp& surface); + virtual status_t setPreviewTexture( + const sp& bufferProducer); + virtual void setPreviewCallbackFlag(int flag); + virtual status_t setPreviewCallbackTarget( + const sp& callbackProducer); + + virtual status_t startPreview(); + virtual void stopPreview(); + virtual bool previewEnabled(); + virtual status_t storeMetaDataInBuffers(bool enabled); + virtual status_t startRecording(); + virtual void stopRecording(); + virtual bool recordingEnabled(); + virtual void releaseRecordingFrame(const sp& mem); + virtual status_t autoFocus(); + virtual status_t cancelAutoFocus(); + virtual status_t takePicture(int msgType); + virtual status_t setParameters(const String8& params); + virtual String8 getParameters() const; + virtual status_t sendCommand(int32_t cmd, int32_t arg1, int32_t arg2); + + /** + * Interface used by CameraService + */ + + Camera2Client(const sp& cameraService, + const sp& cameraClient, + const String16& clientPackageName, + int cameraId, + int cameraFacing, + int clientPid, + uid_t clientUid, + int servicePid, + int deviceVersion); + + virtual ~Camera2Client(); + + status_t initialize(camera_module_t *module); + + virtual status_t dump(int fd, const Vector& args); + + /** + * Interface used by CameraDeviceBase + */ + + virtual void notifyAutoFocus(uint8_t newState, int triggerId); + virtual void notifyAutoExposure(uint8_t newState, int triggerId); + + /** + * Interface used by independent components of Camera2Client. + */ + + camera2::SharedParameters& getParameters(); + + int getPreviewStreamId() const; + int getCaptureStreamId() const; + int getCallbackStreamId() const; + int getRecordingStreamId() const; + int getZslStreamId() const; + + status_t registerFrameListener(int32_t minId, int32_t maxId, + wp listener); + status_t removeFrameListener(int32_t minId, int32_t maxId, + wp listener); + + status_t stopStream(); + + static size_t calculateBufferSize(int width, int height, + int format, int stride); + + static const int32_t kPreviewRequestIdStart = 10000000; + static const int32_t kPreviewRequestIdEnd = 20000000; + + static const int32_t kRecordingRequestIdStart = 20000000; + static const int32_t kRecordingRequestIdEnd = 30000000; + + static const int32_t kCaptureRequestIdStart = 30000000; + static const int32_t kCaptureRequestIdEnd = 40000000; + +private: + /** ICamera interface-related private members */ + typedef camera2::Parameters Parameters; + + status_t setPreviewWindowL(const sp& binder, + sp window); + status_t startPreviewL(Parameters ¶ms, bool restart); + void stopPreviewL(); + status_t startRecordingL(Parameters ¶ms, bool restart); + bool recordingEnabledL(); + + // Individual commands for sendCommand() + status_t commandStartSmoothZoomL(); + status_t commandStopSmoothZoomL(); + status_t commandSetDisplayOrientationL(int degrees); + status_t commandEnableShutterSoundL(bool enable); + status_t commandPlayRecordingSoundL(); + status_t commandStartFaceDetectionL(int type); + status_t commandStopFaceDetectionL(Parameters ¶ms); + status_t commandEnableFocusMoveMsgL(bool enable); + status_t commandPingL(); + status_t commandSetVideoBufferCountL(size_t count); + + // Current camera device configuration + camera2::SharedParameters mParameters; + + /** Camera device-related private members */ + + void setPreviewCallbackFlagL(Parameters ¶ms, int flag); + status_t updateRequests(Parameters ¶ms); + int mDeviceVersion; + + // Used with stream IDs + static const int NO_STREAM = -1; + + template + status_t updateProcessorStream(sp processor, Parameters params); + template + status_t updateProcessorStream(sp processor, Parameters params); + + sp mFrameProcessor; + + /* Preview/Recording related members */ + + sp mPreviewSurface; + sp mStreamingProcessor; + + /** Preview callback related members */ + + sp mCallbackProcessor; + + /* Still image capture related members */ + + sp mCaptureSequencer; + sp mJpegProcessor; + sp mZslProcessor; + sp mZslProcessorThread; + + /** Notification-related members */ + + bool mAfInMotion; + + /** Utility members */ + + // Wait until the camera device has received the latest control settings + status_t syncWithDevice(); +}; + +}; // namespace android + +#endif diff --git a/services/camera/libcameraservice/api1/CameraClient.cpp b/services/camera/libcameraservice/api1/CameraClient.cpp new file mode 100644 index 0000000..ad8856b --- /dev/null +++ b/services/camera/libcameraservice/api1/CameraClient.cpp @@ -0,0 +1,972 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "CameraClient" +//#define LOG_NDEBUG 0 + +#include +#include + +#include "api1/CameraClient.h" +#include "device1/CameraHardwareInterface.h" +#include "CameraService.h" + +namespace android { + +#define LOG1(...) ALOGD_IF(gLogLevel >= 1, __VA_ARGS__); +#define LOG2(...) ALOGD_IF(gLogLevel >= 2, __VA_ARGS__); + +static int getCallingPid() { + return IPCThreadState::self()->getCallingPid(); +} + +CameraClient::CameraClient(const sp& cameraService, + const sp& cameraClient, + const String16& clientPackageName, + int cameraId, int cameraFacing, + int clientPid, int clientUid, + int servicePid): + Client(cameraService, cameraClient, clientPackageName, + cameraId, cameraFacing, clientPid, clientUid, servicePid) +{ + int callingPid = getCallingPid(); + LOG1("CameraClient::CameraClient E (pid %d, id %d)", callingPid, cameraId); + + mHardware = NULL; + mMsgEnabled = 0; + mSurface = 0; + mPreviewWindow = 0; + mDestructionStarted = false; + + // Callback is disabled by default + mPreviewCallbackFlag = CAMERA_FRAME_CALLBACK_FLAG_NOOP; + mOrientation = getOrientation(0, mCameraFacing == CAMERA_FACING_FRONT); + mPlayShutterSound = true; + LOG1("CameraClient::CameraClient X (pid %d, id %d)", callingPid, cameraId); +} + +status_t CameraClient::initialize(camera_module_t *module) { + int callingPid = getCallingPid(); + status_t res; + + LOG1("CameraClient::initialize E (pid %d, id %d)", callingPid, mCameraId); + + // Verify ops permissions + res = startCameraOps(); + if (res != OK) { + return res; + } + + char camera_device_name[10]; + snprintf(camera_device_name, sizeof(camera_device_name), "%d", mCameraId); + + mHardware = new CameraHardwareInterface(camera_device_name); + res = mHardware->initialize(&module->common); + if (res != OK) { + ALOGE("%s: Camera %d: unable to initialize device: %s (%d)", + __FUNCTION__, mCameraId, strerror(-res), res); + mHardware.clear(); + return NO_INIT; + } + + mHardware->setCallbacks(notifyCallback, + dataCallback, + dataCallbackTimestamp, + (void *)mCameraId); + + // Enable zoom, error, focus, and metadata messages by default + enableMsgType(CAMERA_MSG_ERROR | CAMERA_MSG_ZOOM | CAMERA_MSG_FOCUS | + CAMERA_MSG_PREVIEW_METADATA | CAMERA_MSG_FOCUS_MOVE); + + LOG1("CameraClient::initialize X (pid %d, id %d)", callingPid, mCameraId); + return OK; +} + + +// tear down the client +CameraClient::~CameraClient() { + // this lock should never be NULL + Mutex* lock = mCameraService->getClientLockById(mCameraId); + lock->lock(); + mDestructionStarted = true; + // client will not be accessed from callback. should unlock to prevent dead-lock in disconnect + lock->unlock(); + int callingPid = getCallingPid(); + LOG1("CameraClient::~CameraClient E (pid %d, this %p)", callingPid, this); + + disconnect(); + LOG1("CameraClient::~CameraClient X (pid %d, this %p)", callingPid, this); +} + +status_t CameraClient::dump(int fd, const Vector& args) { + const size_t SIZE = 256; + char buffer[SIZE]; + + size_t len = snprintf(buffer, SIZE, "Client[%d] (%p) PID: %d\n", + mCameraId, + getRemoteCallback()->asBinder().get(), + mClientPid); + len = (len > SIZE - 1) ? SIZE - 1 : len; + write(fd, buffer, len); + return mHardware->dump(fd, args); +} + +// ---------------------------------------------------------------------------- + +status_t CameraClient::checkPid() const { + int callingPid = getCallingPid(); + if (callingPid == mClientPid) return NO_ERROR; + + ALOGW("attempt to use a locked camera from a different process" + " (old pid %d, new pid %d)", mClientPid, callingPid); + return EBUSY; +} + +status_t CameraClient::checkPidAndHardware() const { + status_t result = checkPid(); + if (result != NO_ERROR) return result; + if (mHardware == 0) { + ALOGE("attempt to use a camera after disconnect() (pid %d)", getCallingPid()); + return INVALID_OPERATION; + } + return NO_ERROR; +} + +status_t CameraClient::lock() { + int callingPid = getCallingPid(); + LOG1("lock (pid %d)", callingPid); + Mutex::Autolock lock(mLock); + + // lock camera to this client if the the camera is unlocked + if (mClientPid == 0) { + mClientPid = callingPid; + return NO_ERROR; + } + + // returns NO_ERROR if the client already owns the camera, EBUSY otherwise + return checkPid(); +} + +status_t CameraClient::unlock() { + int callingPid = getCallingPid(); + LOG1("unlock (pid %d)", callingPid); + Mutex::Autolock lock(mLock); + + // allow anyone to use camera (after they lock the camera) + status_t result = checkPid(); + if (result == NO_ERROR) { + if (mHardware->recordingEnabled()) { + ALOGE("Not allowed to unlock camera during recording."); + return INVALID_OPERATION; + } + mClientPid = 0; + LOG1("clear mRemoteCallback (pid %d)", callingPid); + // we need to remove the reference to ICameraClient so that when the app + // goes away, the reference count goes to 0. + mRemoteCallback.clear(); + } + return result; +} + +// connect a new client to the camera +status_t CameraClient::connect(const sp& client) { + int callingPid = getCallingPid(); + LOG1("connect E (pid %d)", callingPid); + Mutex::Autolock lock(mLock); + + if (mClientPid != 0 && checkPid() != NO_ERROR) { + ALOGW("Tried to connect to a locked camera (old pid %d, new pid %d)", + mClientPid, callingPid); + return EBUSY; + } + + if (mRemoteCallback != 0 && + (client->asBinder() == mRemoteCallback->asBinder())) { + LOG1("Connect to the same client"); + return NO_ERROR; + } + + mPreviewCallbackFlag = CAMERA_FRAME_CALLBACK_FLAG_NOOP; + mClientPid = callingPid; + mRemoteCallback = client; + + LOG1("connect X (pid %d)", callingPid); + return NO_ERROR; +} + +static void disconnectWindow(const sp& window) { + if (window != 0) { + status_t result = native_window_api_disconnect(window.get(), + NATIVE_WINDOW_API_CAMERA); + if (result != NO_ERROR) { + ALOGW("native_window_api_disconnect failed: %s (%d)", strerror(-result), + result); + } + } +} + +void CameraClient::disconnect() { + int callingPid = getCallingPid(); + LOG1("disconnect E (pid %d)", callingPid); + Mutex::Autolock lock(mLock); + + // Allow both client and the media server to disconnect at all times + if (callingPid != mClientPid && callingPid != mServicePid) { + ALOGW("different client - don't disconnect"); + return; + } + + if (mClientPid <= 0) { + LOG1("camera is unlocked (mClientPid = %d), don't tear down hardware", mClientPid); + return; + } + + // Make sure disconnect() is done once and once only, whether it is called + // from the user directly, or called by the destructor. + if (mHardware == 0) return; + + LOG1("hardware teardown"); + // Before destroying mHardware, we must make sure it's in the + // idle state. + // Turn off all messages. + disableMsgType(CAMERA_MSG_ALL_MSGS); + mHardware->stopPreview(); + mHardware->cancelPicture(); + // Release the hardware resources. + mHardware->release(); + + // Release the held ANativeWindow resources. + if (mPreviewWindow != 0) { + disconnectWindow(mPreviewWindow); + mPreviewWindow = 0; + mHardware->setPreviewWindow(mPreviewWindow); + } + mHardware.clear(); + + CameraService::Client::disconnect(); + + LOG1("disconnect X (pid %d)", callingPid); +} + +// ---------------------------------------------------------------------------- + +status_t CameraClient::setPreviewWindow(const sp& binder, + const sp& window) { + Mutex::Autolock lock(mLock); + status_t result = checkPidAndHardware(); + if (result != NO_ERROR) return result; + + // return if no change in surface. + if (binder == mSurface) { + return NO_ERROR; + } + + if (window != 0) { + result = native_window_api_connect(window.get(), NATIVE_WINDOW_API_CAMERA); + if (result != NO_ERROR) { + ALOGE("native_window_api_connect failed: %s (%d)", strerror(-result), + result); + return result; + } + } + + // If preview has been already started, register preview buffers now. + if (mHardware->previewEnabled()) { + if (window != 0) { + native_window_set_scaling_mode(window.get(), + NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW); + native_window_set_buffers_transform(window.get(), mOrientation); + result = mHardware->setPreviewWindow(window); + } + } + + if (result == NO_ERROR) { + // Everything has succeeded. Disconnect the old window and remember the + // new window. + disconnectWindow(mPreviewWindow); + mSurface = binder; + mPreviewWindow = window; + } else { + // Something went wrong after we connected to the new window, so + // disconnect here. + disconnectWindow(window); + } + + return result; +} + +// set the Surface that the preview will use +status_t CameraClient::setPreviewDisplay(const sp& surface) { + LOG1("setPreviewDisplay(%p) (pid %d)", surface.get(), getCallingPid()); + + sp binder(surface != 0 ? surface->getIGraphicBufferProducer()->asBinder() : 0); + sp window(surface); + return setPreviewWindow(binder, window); +} + +// set the SurfaceTextureClient that the preview will use +status_t CameraClient::setPreviewTexture( + const sp& bufferProducer) { + LOG1("setPreviewTexture(%p) (pid %d)", bufferProducer.get(), + getCallingPid()); + + sp binder; + sp window; + if (bufferProducer != 0) { + binder = bufferProducer->asBinder(); + window = new Surface(bufferProducer); + } + return setPreviewWindow(binder, window); +} + +// set the preview callback flag to affect how the received frames from +// preview are handled. +void CameraClient::setPreviewCallbackFlag(int callback_flag) { + LOG1("setPreviewCallbackFlag(%d) (pid %d)", callback_flag, getCallingPid()); + Mutex::Autolock lock(mLock); + if (checkPidAndHardware() != NO_ERROR) return; + + mPreviewCallbackFlag = callback_flag; + if (mPreviewCallbackFlag & CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK) { + enableMsgType(CAMERA_MSG_PREVIEW_FRAME); + } else { + disableMsgType(CAMERA_MSG_PREVIEW_FRAME); + } +} + +status_t CameraClient::setPreviewCallbackTarget( + const sp& callbackProducer) { + (void)callbackProducer; + ALOGE("%s: Unimplemented!", __FUNCTION__); + return INVALID_OPERATION; +} + +// start preview mode +status_t CameraClient::startPreview() { + LOG1("startPreview (pid %d)", getCallingPid()); + return startCameraMode(CAMERA_PREVIEW_MODE); +} + +// start recording mode +status_t CameraClient::startRecording() { + LOG1("startRecording (pid %d)", getCallingPid()); + return startCameraMode(CAMERA_RECORDING_MODE); +} + +// start preview or recording +status_t CameraClient::startCameraMode(camera_mode mode) { + LOG1("startCameraMode(%d)", mode); + Mutex::Autolock lock(mLock); + status_t result = checkPidAndHardware(); + if (result != NO_ERROR) return result; + + switch(mode) { + case CAMERA_PREVIEW_MODE: + if (mSurface == 0 && mPreviewWindow == 0) { + LOG1("mSurface is not set yet."); + // still able to start preview in this case. + } + return startPreviewMode(); + case CAMERA_RECORDING_MODE: + if (mSurface == 0 && mPreviewWindow == 0) { + ALOGE("mSurface or mPreviewWindow must be set before startRecordingMode."); + return INVALID_OPERATION; + } + return startRecordingMode(); + default: + return UNKNOWN_ERROR; + } +} + +status_t CameraClient::startPreviewMode() { + LOG1("startPreviewMode"); + status_t result = NO_ERROR; + + // if preview has been enabled, nothing needs to be done + if (mHardware->previewEnabled()) { + return NO_ERROR; + } + + if (mPreviewWindow != 0) { + native_window_set_scaling_mode(mPreviewWindow.get(), + NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW); + native_window_set_buffers_transform(mPreviewWindow.get(), + mOrientation); + } + mHardware->setPreviewWindow(mPreviewWindow); + result = mHardware->startPreview(); + + return result; +} + +status_t CameraClient::startRecordingMode() { + LOG1("startRecordingMode"); + status_t result = NO_ERROR; + + // if recording has been enabled, nothing needs to be done + if (mHardware->recordingEnabled()) { + return NO_ERROR; + } + + // if preview has not been started, start preview first + if (!mHardware->previewEnabled()) { + result = startPreviewMode(); + if (result != NO_ERROR) { + return result; + } + } + + // start recording mode + enableMsgType(CAMERA_MSG_VIDEO_FRAME); + mCameraService->playSound(CameraService::SOUND_RECORDING); + result = mHardware->startRecording(); + if (result != NO_ERROR) { + ALOGE("mHardware->startRecording() failed with status %d", result); + } + return result; +} + +// stop preview mode +void CameraClient::stopPreview() { + LOG1("stopPreview (pid %d)", getCallingPid()); + Mutex::Autolock lock(mLock); + if (checkPidAndHardware() != NO_ERROR) return; + + + disableMsgType(CAMERA_MSG_PREVIEW_FRAME); + mHardware->stopPreview(); + + mPreviewBuffer.clear(); +} + +// stop recording mode +void CameraClient::stopRecording() { + LOG1("stopRecording (pid %d)", getCallingPid()); + Mutex::Autolock lock(mLock); + if (checkPidAndHardware() != NO_ERROR) return; + + disableMsgType(CAMERA_MSG_VIDEO_FRAME); + mHardware->stopRecording(); + mCameraService->playSound(CameraService::SOUND_RECORDING); + + mPreviewBuffer.clear(); +} + +// release a recording frame +void CameraClient::releaseRecordingFrame(const sp& mem) { + Mutex::Autolock lock(mLock); + if (checkPidAndHardware() != NO_ERROR) return; + mHardware->releaseRecordingFrame(mem); +} + +status_t CameraClient::storeMetaDataInBuffers(bool enabled) +{ + LOG1("storeMetaDataInBuffers: %s", enabled? "true": "false"); + Mutex::Autolock lock(mLock); + if (checkPidAndHardware() != NO_ERROR) { + return UNKNOWN_ERROR; + } + return mHardware->storeMetaDataInBuffers(enabled); +} + +bool CameraClient::previewEnabled() { + LOG1("previewEnabled (pid %d)", getCallingPid()); + + Mutex::Autolock lock(mLock); + if (checkPidAndHardware() != NO_ERROR) return false; + return mHardware->previewEnabled(); +} + +bool CameraClient::recordingEnabled() { + LOG1("recordingEnabled (pid %d)", getCallingPid()); + + Mutex::Autolock lock(mLock); + if (checkPidAndHardware() != NO_ERROR) return false; + return mHardware->recordingEnabled(); +} + +status_t CameraClient::autoFocus() { + LOG1("autoFocus (pid %d)", getCallingPid()); + + Mutex::Autolock lock(mLock); + status_t result = checkPidAndHardware(); + if (result != NO_ERROR) return result; + + return mHardware->autoFocus(); +} + +status_t CameraClient::cancelAutoFocus() { + LOG1("cancelAutoFocus (pid %d)", getCallingPid()); + + Mutex::Autolock lock(mLock); + status_t result = checkPidAndHardware(); + if (result != NO_ERROR) return result; + + return mHardware->cancelAutoFocus(); +} + +// take a picture - image is returned in callback +status_t CameraClient::takePicture(int msgType) { + LOG1("takePicture (pid %d): 0x%x", getCallingPid(), msgType); + + Mutex::Autolock lock(mLock); + status_t result = checkPidAndHardware(); + if (result != NO_ERROR) return result; + + if ((msgType & CAMERA_MSG_RAW_IMAGE) && + (msgType & CAMERA_MSG_RAW_IMAGE_NOTIFY)) { + ALOGE("CAMERA_MSG_RAW_IMAGE and CAMERA_MSG_RAW_IMAGE_NOTIFY" + " cannot be both enabled"); + return BAD_VALUE; + } + + // We only accept picture related message types + // and ignore other types of messages for takePicture(). + int picMsgType = msgType + & (CAMERA_MSG_SHUTTER | + CAMERA_MSG_POSTVIEW_FRAME | + CAMERA_MSG_RAW_IMAGE | + CAMERA_MSG_RAW_IMAGE_NOTIFY | + CAMERA_MSG_COMPRESSED_IMAGE); + + enableMsgType(picMsgType); + + return mHardware->takePicture(); +} + +// set preview/capture parameters - key/value pairs +status_t CameraClient::setParameters(const String8& params) { + LOG1("setParameters (pid %d) (%s)", getCallingPid(), params.string()); + + Mutex::Autolock lock(mLock); + status_t result = checkPidAndHardware(); + if (result != NO_ERROR) return result; + + CameraParameters p(params); + return mHardware->setParameters(p); +} + +// get preview/capture parameters - key/value pairs +String8 CameraClient::getParameters() const { + Mutex::Autolock lock(mLock); + if (checkPidAndHardware() != NO_ERROR) return String8(); + + String8 params(mHardware->getParameters().flatten()); + LOG1("getParameters (pid %d) (%s)", getCallingPid(), params.string()); + return params; +} + +// enable shutter sound +status_t CameraClient::enableShutterSound(bool enable) { + LOG1("enableShutterSound (pid %d)", getCallingPid()); + + status_t result = checkPidAndHardware(); + if (result != NO_ERROR) return result; + + if (enable) { + mPlayShutterSound = true; + return OK; + } + + // Disabling shutter sound may not be allowed. In that case only + // allow the mediaserver process to disable the sound. + char value[PROPERTY_VALUE_MAX]; + property_get("ro.camera.sound.forced", value, "0"); + if (strcmp(value, "0") != 0) { + // Disabling shutter sound is not allowed. Deny if the current + // process is not mediaserver. + if (getCallingPid() != getpid()) { + ALOGE("Failed to disable shutter sound. Permission denied (pid %d)", getCallingPid()); + return PERMISSION_DENIED; + } + } + + mPlayShutterSound = false; + return OK; +} + +status_t CameraClient::sendCommand(int32_t cmd, int32_t arg1, int32_t arg2) { + LOG1("sendCommand (pid %d)", getCallingPid()); + int orientation; + Mutex::Autolock lock(mLock); + status_t result = checkPidAndHardware(); + if (result != NO_ERROR) return result; + + if (cmd == CAMERA_CMD_SET_DISPLAY_ORIENTATION) { + // Mirror the preview if the camera is front-facing. + orientation = getOrientation(arg1, mCameraFacing == CAMERA_FACING_FRONT); + if (orientation == -1) return BAD_VALUE; + + if (mOrientation != orientation) { + mOrientation = orientation; + if (mPreviewWindow != 0) { + native_window_set_buffers_transform(mPreviewWindow.get(), + mOrientation); + } + } + return OK; + } else if (cmd == CAMERA_CMD_ENABLE_SHUTTER_SOUND) { + switch (arg1) { + case 0: + return enableShutterSound(false); + case 1: + return enableShutterSound(true); + default: + return BAD_VALUE; + } + return OK; + } else if (cmd == CAMERA_CMD_PLAY_RECORDING_SOUND) { + mCameraService->playSound(CameraService::SOUND_RECORDING); + } else if (cmd == CAMERA_CMD_SET_VIDEO_BUFFER_COUNT) { + // Silently ignore this command + return INVALID_OPERATION; + } else if (cmd == CAMERA_CMD_PING) { + // If mHardware is 0, checkPidAndHardware will return error. + return OK; + } + + return mHardware->sendCommand(cmd, arg1, arg2); +} + +// ---------------------------------------------------------------------------- + +void CameraClient::enableMsgType(int32_t msgType) { + android_atomic_or(msgType, &mMsgEnabled); + mHardware->enableMsgType(msgType); +} + +void CameraClient::disableMsgType(int32_t msgType) { + android_atomic_and(~msgType, &mMsgEnabled); + mHardware->disableMsgType(msgType); +} + +#define CHECK_MESSAGE_INTERVAL 10 // 10ms +bool CameraClient::lockIfMessageWanted(int32_t msgType) { + int sleepCount = 0; + while (mMsgEnabled & msgType) { + if (mLock.tryLock() == NO_ERROR) { + if (sleepCount > 0) { + LOG1("lockIfMessageWanted(%d): waited for %d ms", + msgType, sleepCount * CHECK_MESSAGE_INTERVAL); + } + return true; + } + if (sleepCount++ == 0) { + LOG1("lockIfMessageWanted(%d): enter sleep", msgType); + } + usleep(CHECK_MESSAGE_INTERVAL * 1000); + } + ALOGW("lockIfMessageWanted(%d): dropped unwanted message", msgType); + return false; +} + +// Callback messages can be dispatched to internal handlers or pass to our +// client's callback functions, depending on the message type. +// +// notifyCallback: +// CAMERA_MSG_SHUTTER handleShutter +// (others) c->notifyCallback +// dataCallback: +// CAMERA_MSG_PREVIEW_FRAME handlePreviewData +// CAMERA_MSG_POSTVIEW_FRAME handlePostview +// CAMERA_MSG_RAW_IMAGE handleRawPicture +// CAMERA_MSG_COMPRESSED_IMAGE handleCompressedPicture +// (others) c->dataCallback +// dataCallbackTimestamp +// (others) c->dataCallbackTimestamp +// +// NOTE: the *Callback functions grab mLock of the client before passing +// control to handle* functions. So the handle* functions must release the +// lock before calling the ICameraClient's callbacks, so those callbacks can +// invoke methods in the Client class again (For example, the preview frame +// callback may want to releaseRecordingFrame). The handle* functions must +// release the lock after all accesses to member variables, so it must be +// handled very carefully. + +void CameraClient::notifyCallback(int32_t msgType, int32_t ext1, + int32_t ext2, void* user) { + LOG2("notifyCallback(%d)", msgType); + + Mutex* lock = getClientLockFromCookie(user); + if (lock == NULL) return; + Mutex::Autolock alock(*lock); + + CameraClient* client = + static_cast(getClientFromCookie(user)); + if (client == NULL) return; + + if (!client->lockIfMessageWanted(msgType)) return; + + switch (msgType) { + case CAMERA_MSG_SHUTTER: + // ext1 is the dimension of the yuv picture. + client->handleShutter(); + break; + default: + client->handleGenericNotify(msgType, ext1, ext2); + break; + } +} + +void CameraClient::dataCallback(int32_t msgType, + const sp& dataPtr, camera_frame_metadata_t *metadata, void* user) { + LOG2("dataCallback(%d)", msgType); + + Mutex* lock = getClientLockFromCookie(user); + if (lock == NULL) return; + Mutex::Autolock alock(*lock); + + CameraClient* client = + static_cast(getClientFromCookie(user)); + if (client == NULL) return; + + if (!client->lockIfMessageWanted(msgType)) return; + if (dataPtr == 0 && metadata == NULL) { + ALOGE("Null data returned in data callback"); + client->handleGenericNotify(CAMERA_MSG_ERROR, UNKNOWN_ERROR, 0); + return; + } + + switch (msgType & ~CAMERA_MSG_PREVIEW_METADATA) { + case CAMERA_MSG_PREVIEW_FRAME: + client->handlePreviewData(msgType, dataPtr, metadata); + break; + case CAMERA_MSG_POSTVIEW_FRAME: + client->handlePostview(dataPtr); + break; + case CAMERA_MSG_RAW_IMAGE: + client->handleRawPicture(dataPtr); + break; + case CAMERA_MSG_COMPRESSED_IMAGE: + client->handleCompressedPicture(dataPtr); + break; + default: + client->handleGenericData(msgType, dataPtr, metadata); + break; + } +} + +void CameraClient::dataCallbackTimestamp(nsecs_t timestamp, + int32_t msgType, const sp& dataPtr, void* user) { + LOG2("dataCallbackTimestamp(%d)", msgType); + + Mutex* lock = getClientLockFromCookie(user); + if (lock == NULL) return; + Mutex::Autolock alock(*lock); + + CameraClient* client = + static_cast(getClientFromCookie(user)); + if (client == NULL) return; + + if (!client->lockIfMessageWanted(msgType)) return; + + if (dataPtr == 0) { + ALOGE("Null data returned in data with timestamp callback"); + client->handleGenericNotify(CAMERA_MSG_ERROR, UNKNOWN_ERROR, 0); + return; + } + + client->handleGenericDataTimestamp(timestamp, msgType, dataPtr); +} + +// snapshot taken callback +void CameraClient::handleShutter(void) { + if (mPlayShutterSound) { + mCameraService->playSound(CameraService::SOUND_SHUTTER); + } + + sp c = mRemoteCallback; + if (c != 0) { + mLock.unlock(); + c->notifyCallback(CAMERA_MSG_SHUTTER, 0, 0); + if (!lockIfMessageWanted(CAMERA_MSG_SHUTTER)) return; + } + disableMsgType(CAMERA_MSG_SHUTTER); + + mLock.unlock(); +} + +// preview callback - frame buffer update +void CameraClient::handlePreviewData(int32_t msgType, + const sp& mem, + camera_frame_metadata_t *metadata) { + ssize_t offset; + size_t size; + sp heap = mem->getMemory(&offset, &size); + + // local copy of the callback flags + int flags = mPreviewCallbackFlag; + + // is callback enabled? + if (!(flags & CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK)) { + // If the enable bit is off, the copy-out and one-shot bits are ignored + LOG2("frame callback is disabled"); + mLock.unlock(); + return; + } + + // hold a strong pointer to the client + sp c = mRemoteCallback; + + // clear callback flags if no client or one-shot mode + if (c == 0 || (mPreviewCallbackFlag & CAMERA_FRAME_CALLBACK_FLAG_ONE_SHOT_MASK)) { + LOG2("Disable preview callback"); + mPreviewCallbackFlag &= ~(CAMERA_FRAME_CALLBACK_FLAG_ONE_SHOT_MASK | + CAMERA_FRAME_CALLBACK_FLAG_COPY_OUT_MASK | + CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK); + disableMsgType(CAMERA_MSG_PREVIEW_FRAME); + } + + if (c != 0) { + // Is the received frame copied out or not? + if (flags & CAMERA_FRAME_CALLBACK_FLAG_COPY_OUT_MASK) { + LOG2("frame is copied"); + copyFrameAndPostCopiedFrame(msgType, c, heap, offset, size, metadata); + } else { + LOG2("frame is forwarded"); + mLock.unlock(); + c->dataCallback(msgType, mem, metadata); + } + } else { + mLock.unlock(); + } +} + +// picture callback - postview image ready +void CameraClient::handlePostview(const sp& mem) { + disableMsgType(CAMERA_MSG_POSTVIEW_FRAME); + + sp c = mRemoteCallback; + mLock.unlock(); + if (c != 0) { + c->dataCallback(CAMERA_MSG_POSTVIEW_FRAME, mem, NULL); + } +} + +// picture callback - raw image ready +void CameraClient::handleRawPicture(const sp& mem) { + disableMsgType(CAMERA_MSG_RAW_IMAGE); + + ssize_t offset; + size_t size; + sp heap = mem->getMemory(&offset, &size); + + sp c = mRemoteCallback; + mLock.unlock(); + if (c != 0) { + c->dataCallback(CAMERA_MSG_RAW_IMAGE, mem, NULL); + } +} + +// picture callback - compressed picture ready +void CameraClient::handleCompressedPicture(const sp& mem) { + disableMsgType(CAMERA_MSG_COMPRESSED_IMAGE); + + sp c = mRemoteCallback; + mLock.unlock(); + if (c != 0) { + c->dataCallback(CAMERA_MSG_COMPRESSED_IMAGE, mem, NULL); + } +} + + +void CameraClient::handleGenericNotify(int32_t msgType, + int32_t ext1, int32_t ext2) { + sp c = mRemoteCallback; + mLock.unlock(); + if (c != 0) { + c->notifyCallback(msgType, ext1, ext2); + } +} + +void CameraClient::handleGenericData(int32_t msgType, + const sp& dataPtr, camera_frame_metadata_t *metadata) { + sp c = mRemoteCallback; + mLock.unlock(); + if (c != 0) { + c->dataCallback(msgType, dataPtr, metadata); + } +} + +void CameraClient::handleGenericDataTimestamp(nsecs_t timestamp, + int32_t msgType, const sp& dataPtr) { + sp c = mRemoteCallback; + mLock.unlock(); + if (c != 0) { + c->dataCallbackTimestamp(timestamp, msgType, dataPtr); + } +} + +void CameraClient::copyFrameAndPostCopiedFrame( + int32_t msgType, const sp& client, + const sp& heap, size_t offset, size_t size, + camera_frame_metadata_t *metadata) { + LOG2("copyFrameAndPostCopiedFrame"); + // It is necessary to copy out of pmem before sending this to + // the callback. For efficiency, reuse the same MemoryHeapBase + // provided it's big enough. Don't allocate the memory or + // perform the copy if there's no callback. + // hold the preview lock while we grab a reference to the preview buffer + sp previewBuffer; + + if (mPreviewBuffer == 0) { + mPreviewBuffer = new MemoryHeapBase(size, 0, NULL); + } else if (size > mPreviewBuffer->virtualSize()) { + mPreviewBuffer.clear(); + mPreviewBuffer = new MemoryHeapBase(size, 0, NULL); + } + if (mPreviewBuffer == 0) { + ALOGE("failed to allocate space for preview buffer"); + mLock.unlock(); + return; + } + previewBuffer = mPreviewBuffer; + + memcpy(previewBuffer->base(), (uint8_t *)heap->base() + offset, size); + + sp frame = new MemoryBase(previewBuffer, 0, size); + if (frame == 0) { + ALOGE("failed to allocate space for frame callback"); + mLock.unlock(); + return; + } + + mLock.unlock(); + client->dataCallback(msgType, frame, metadata); +} + +int CameraClient::getOrientation(int degrees, bool mirror) { + if (!mirror) { + if (degrees == 0) return 0; + else if (degrees == 90) return HAL_TRANSFORM_ROT_90; + else if (degrees == 180) return HAL_TRANSFORM_ROT_180; + else if (degrees == 270) return HAL_TRANSFORM_ROT_270; + } else { // Do mirror (horizontal flip) + if (degrees == 0) { // FLIP_H and ROT_0 + return HAL_TRANSFORM_FLIP_H; + } else if (degrees == 90) { // FLIP_H and ROT_90 + return HAL_TRANSFORM_FLIP_H | HAL_TRANSFORM_ROT_90; + } else if (degrees == 180) { // FLIP_H and ROT_180 + return HAL_TRANSFORM_FLIP_V; + } else if (degrees == 270) { // FLIP_H and ROT_270 + return HAL_TRANSFORM_FLIP_V | HAL_TRANSFORM_ROT_90; + } + } + ALOGE("Invalid setDisplayOrientation degrees=%d", degrees); + return -1; +} + +}; // namespace android diff --git a/services/camera/libcameraservice/api1/CameraClient.h b/services/camera/libcameraservice/api1/CameraClient.h new file mode 100644 index 0000000..abde75a --- /dev/null +++ b/services/camera/libcameraservice/api1/CameraClient.h @@ -0,0 +1,165 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_SERVERS_CAMERA_CAMERACLIENT_H +#define ANDROID_SERVERS_CAMERA_CAMERACLIENT_H + +#include "CameraService.h" + +namespace android { + +class MemoryHeapBase; +class CameraHardwareInterface; + +/** + * Interface between android.hardware.Camera API and Camera HAL device for version + * CAMERA_DEVICE_API_VERSION_1_0. + */ + +class CameraClient : public CameraService::Client +{ +public: + // ICamera interface (see ICamera for details) + virtual void disconnect(); + virtual status_t connect(const sp& client); + virtual status_t lock(); + virtual status_t unlock(); + virtual status_t setPreviewDisplay(const sp& surface); + virtual status_t setPreviewTexture(const sp& bufferProducer); + virtual void setPreviewCallbackFlag(int flag); + virtual status_t setPreviewCallbackTarget( + const sp& callbackProducer); + virtual status_t startPreview(); + virtual void stopPreview(); + virtual bool previewEnabled(); + virtual status_t storeMetaDataInBuffers(bool enabled); + virtual status_t startRecording(); + virtual void stopRecording(); + virtual bool recordingEnabled(); + virtual void releaseRecordingFrame(const sp& mem); + virtual status_t autoFocus(); + virtual status_t cancelAutoFocus(); + virtual status_t takePicture(int msgType); + virtual status_t setParameters(const String8& params); + virtual String8 getParameters() const; + virtual status_t sendCommand(int32_t cmd, int32_t arg1, int32_t arg2); + + // Interface used by CameraService + CameraClient(const sp& cameraService, + const sp& cameraClient, + const String16& clientPackageName, + int cameraId, + int cameraFacing, + int clientPid, + int clientUid, + int servicePid); + ~CameraClient(); + + status_t initialize(camera_module_t *module); + + status_t dump(int fd, const Vector& args); + +private: + + // check whether the calling process matches mClientPid. + status_t checkPid() const; + status_t checkPidAndHardware() const; // also check mHardware != 0 + + // these are internal functions used to set up preview buffers + status_t registerPreviewBuffers(); + + // camera operation mode + enum camera_mode { + CAMERA_PREVIEW_MODE = 0, // frame automatically released + CAMERA_RECORDING_MODE = 1, // frame has to be explicitly released by releaseRecordingFrame() + }; + // these are internal functions used for preview/recording + status_t startCameraMode(camera_mode mode); + status_t startPreviewMode(); + status_t startRecordingMode(); + + // internal function used by sendCommand to enable/disable shutter sound. + status_t enableShutterSound(bool enable); + + // these are static callback functions + static void notifyCallback(int32_t msgType, int32_t ext1, int32_t ext2, void* user); + static void dataCallback(int32_t msgType, const sp& dataPtr, + camera_frame_metadata_t *metadata, void* user); + static void dataCallbackTimestamp(nsecs_t timestamp, int32_t msgType, const sp& dataPtr, void* user); + // handlers for messages + void handleShutter(void); + void handlePreviewData(int32_t msgType, const sp& mem, + camera_frame_metadata_t *metadata); + void handlePostview(const sp& mem); + void handleRawPicture(const sp& mem); + void handleCompressedPicture(const sp& mem); + void handleGenericNotify(int32_t msgType, int32_t ext1, int32_t ext2); + void handleGenericData(int32_t msgType, const sp& dataPtr, + camera_frame_metadata_t *metadata); + void handleGenericDataTimestamp(nsecs_t timestamp, int32_t msgType, const sp& dataPtr); + + void copyFrameAndPostCopiedFrame( + int32_t msgType, + const sp& client, + const sp& heap, + size_t offset, size_t size, + camera_frame_metadata_t *metadata); + + int getOrientation(int orientation, bool mirror); + + status_t setPreviewWindow( + const sp& binder, + const sp& window); + + + // these are initialized in the constructor. + sp mHardware; // cleared after disconnect() + int mPreviewCallbackFlag; + int mOrientation; // Current display orientation + bool mPlayShutterSound; + + // Ensures atomicity among the public methods + mutable Mutex mLock; + // This is a binder of Surface or Surface. + sp mSurface; + sp mPreviewWindow; + + // If the user want us to return a copy of the preview frame (instead + // of the original one), we allocate mPreviewBuffer and reuse it if possible. + sp mPreviewBuffer; + + // We need to avoid the deadlock when the incoming command thread and + // the CameraHardwareInterface callback thread both want to grab mLock. + // An extra flag is used to tell the callback thread that it should stop + // trying to deliver the callback messages if the client is not + // interested in it anymore. For example, if the client is calling + // stopPreview(), the preview frame messages do not need to be delivered + // anymore. + + // This function takes the same parameter as the enableMsgType() and + // disableMsgType() functions in CameraHardwareInterface. + void enableMsgType(int32_t msgType); + void disableMsgType(int32_t msgType); + volatile int32_t mMsgEnabled; + + // This function keeps trying to grab mLock, or give up if the message + // is found to be disabled. It returns true if mLock is grabbed. + bool lockIfMessageWanted(int32_t msgType); +}; + +} + +#endif diff --git a/services/camera/libcameraservice/api1/client2/BurstCapture.cpp b/services/camera/libcameraservice/api1/client2/BurstCapture.cpp new file mode 100644 index 0000000..0bfdfd4 --- /dev/null +++ b/services/camera/libcameraservice/api1/client2/BurstCapture.cpp @@ -0,0 +1,113 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "Camera2-BurstCapture" + +#include +#include + +#include "BurstCapture.h" + +#include "api1/Camera2Client.h" +#include "api1/client2/JpegCompressor.h" + +namespace android { +namespace camera2 { + +BurstCapture::BurstCapture(wp client, wp sequencer): + mCaptureStreamId(NO_STREAM), + mClient(client), + mSequencer(sequencer) +{ +} + +BurstCapture::~BurstCapture() { +} + +status_t BurstCapture::start(Vector &/*metadatas*/, + int32_t /*firstCaptureId*/) { + ALOGE("Not completely implemented"); + return INVALID_OPERATION; +} + +void BurstCapture::onFrameAvailable() { + ALOGV("%s", __FUNCTION__); + Mutex::Autolock l(mInputMutex); + if(!mInputChanged) { + mInputChanged = true; + mInputSignal.signal(); + } +} + +bool BurstCapture::threadLoop() { + status_t res; + { + Mutex::Autolock l(mInputMutex); + while(!mInputChanged) { + res = mInputSignal.waitRelative(mInputMutex, kWaitDuration); + if(res == TIMED_OUT) return true; + } + mInputChanged = false; + } + + do { + sp client = mClient.promote(); + if(client == 0) return false; + ALOGV("%s: Calling processFrameAvailable()", __FUNCTION__); + res = processFrameAvailable(client); + } while(res == OK); + + return true; +} + +CpuConsumer::LockedBuffer* BurstCapture::jpegEncode( + CpuConsumer::LockedBuffer *imgBuffer, + int /*quality*/) +{ + ALOGV("%s", __FUNCTION__); + + CpuConsumer::LockedBuffer *imgEncoded = new CpuConsumer::LockedBuffer; + uint8_t *data = new uint8_t[ANDROID_JPEG_MAX_SIZE]; + imgEncoded->data = data; + imgEncoded->width = imgBuffer->width; + imgEncoded->height = imgBuffer->height; + imgEncoded->stride = imgBuffer->stride; + + Vector buffers; + buffers.push_back(imgBuffer); + buffers.push_back(imgEncoded); + + sp jpeg = new JpegCompressor(); + jpeg->start(buffers, 1); + + bool success = jpeg->waitForDone(10 * 1e9); + if(success) { + return buffers[1]; + } + else { + ALOGE("%s: JPEG encode timed out", __FUNCTION__); + return NULL; // TODO: maybe change function return value to status_t + } +} + +status_t BurstCapture::processFrameAvailable(sp &/*client*/) { + ALOGE("Not implemented"); + return INVALID_OPERATION; +} + +} // namespace camera2 +} // namespace android diff --git a/services/camera/libcameraservice/api1/client2/BurstCapture.h b/services/camera/libcameraservice/api1/client2/BurstCapture.h new file mode 100644 index 0000000..ea321fd --- /dev/null +++ b/services/camera/libcameraservice/api1/client2/BurstCapture.h @@ -0,0 +1,72 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_SERVERS_CAMERA_BURST_CAPTURE_H +#define ANDROID_SERVERS_CAMERA_BURST_CAPTURE_H + +#include +#include +#include +#include + +#include "device2/Camera2Device.h" + +namespace android { + +class Camera2Client; + +namespace camera2 { + +class CaptureSequencer; + +class BurstCapture : public virtual Thread, + public virtual CpuConsumer::FrameAvailableListener +{ +public: + BurstCapture(wp client, wp sequencer); + virtual ~BurstCapture(); + + virtual void onFrameAvailable(); + virtual status_t start(Vector &metadatas, int32_t firstCaptureId); + +protected: + Mutex mInputMutex; + bool mInputChanged; + Condition mInputSignal; + int mCaptureStreamId; + wp mClient; + wp mSequencer; + + // Should only be accessed by processing thread + enum { + NO_STREAM = -1 + }; + + CpuConsumer::LockedBuffer* jpegEncode( + CpuConsumer::LockedBuffer *imgBuffer, + int quality); + + virtual status_t processFrameAvailable(sp &client); + +private: + virtual bool threadLoop(); + static const nsecs_t kWaitDuration = 10000000; // 10 ms +}; + +} // namespace camera2 +} // namespace android + +#endif diff --git a/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp b/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp new file mode 100644 index 0000000..12d0859 --- /dev/null +++ b/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp @@ -0,0 +1,539 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "Camera2-CallbackProcessor" +#define ATRACE_TAG ATRACE_TAG_CAMERA +//#define LOG_NDEBUG 0 + +#include +#include +#include + +#include "common/CameraDeviceBase.h" +#include "api1/Camera2Client.h" +#include "api1/client2/CallbackProcessor.h" + +#define ALIGN(x, mask) ( ((x) + (mask) - 1) & ~((mask) - 1) ) + +namespace android { +namespace camera2 { + +CallbackProcessor::CallbackProcessor(sp client): + Thread(false), + mClient(client), + mDevice(client->getCameraDevice()), + mId(client->getCameraId()), + mCallbackAvailable(false), + mCallbackToApp(false), + mCallbackStreamId(NO_STREAM) { +} + +CallbackProcessor::~CallbackProcessor() { + ALOGV("%s: Exit", __FUNCTION__); + deleteStream(); +} + +void CallbackProcessor::onFrameAvailable() { + Mutex::Autolock l(mInputMutex); + if (!mCallbackAvailable) { + mCallbackAvailable = true; + mCallbackAvailableSignal.signal(); + } +} + +status_t CallbackProcessor::setCallbackWindow( + sp callbackWindow) { + ATRACE_CALL(); + status_t res; + + Mutex::Autolock l(mInputMutex); + + sp client = mClient.promote(); + if (client == 0) return OK; + sp device = client->getCameraDevice(); + + // If the window is changing, clear out stream if it already exists + if (mCallbackWindow != callbackWindow && mCallbackStreamId != NO_STREAM) { + res = device->deleteStream(mCallbackStreamId); + if (res != OK) { + ALOGE("%s: Camera %d: Unable to delete old stream " + "for callbacks: %s (%d)", __FUNCTION__, + client->getCameraId(), strerror(-res), res); + return res; + } + mCallbackStreamId = NO_STREAM; + mCallbackConsumer.clear(); + } + mCallbackWindow = callbackWindow; + mCallbackToApp = (mCallbackWindow != NULL); + + return OK; +} + +status_t CallbackProcessor::updateStream(const Parameters ¶ms) { + ATRACE_CALL(); + status_t res; + + Mutex::Autolock l(mInputMutex); + + sp device = mDevice.promote(); + if (device == 0) { + ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId); + return INVALID_OPERATION; + } + + // If possible, use the flexible YUV format + int32_t callbackFormat = params.previewFormat; + if (mCallbackToApp) { + // TODO: etalvala: This should use the flexible YUV format as well, but + // need to reconcile HAL2/HAL3 requirements. + callbackFormat = HAL_PIXEL_FORMAT_YV12; + } else if(params.fastInfo.useFlexibleYuv && + (params.previewFormat == HAL_PIXEL_FORMAT_YCrCb_420_SP || + params.previewFormat == HAL_PIXEL_FORMAT_YV12) ) { + callbackFormat = HAL_PIXEL_FORMAT_YCbCr_420_888; + } + + if (!mCallbackToApp && mCallbackConsumer == 0) { + // Create CPU buffer queue endpoint, since app hasn't given us one + // Make it async to avoid disconnect deadlocks + sp bq = new BufferQueue(); + mCallbackConsumer = new CpuConsumer(bq, kCallbackHeapCount); + mCallbackConsumer->setFrameAvailableListener(this); + mCallbackConsumer->setName(String8("Camera2Client::CallbackConsumer")); + mCallbackWindow = new Surface( + mCallbackConsumer->getProducerInterface()); + } + + if (mCallbackStreamId != NO_STREAM) { + // Check if stream parameters have to change + uint32_t currentWidth, currentHeight, currentFormat; + res = device->getStreamInfo(mCallbackStreamId, + ¤tWidth, ¤tHeight, ¤tFormat); + if (res != OK) { + ALOGE("%s: Camera %d: Error querying callback output stream info: " + "%s (%d)", __FUNCTION__, mId, + strerror(-res), res); + return res; + } + if (currentWidth != (uint32_t)params.previewWidth || + currentHeight != (uint32_t)params.previewHeight || + currentFormat != (uint32_t)callbackFormat) { + // Since size should only change while preview is not running, + // assuming that all existing use of old callback stream is + // completed. + ALOGV("%s: Camera %d: Deleting stream %d since the buffer " + "parameters changed", __FUNCTION__, mId, mCallbackStreamId); + res = device->deleteStream(mCallbackStreamId); + if (res != OK) { + ALOGE("%s: Camera %d: Unable to delete old output stream " + "for callbacks: %s (%d)", __FUNCTION__, + mId, strerror(-res), res); + return res; + } + mCallbackStreamId = NO_STREAM; + } + } + + if (mCallbackStreamId == NO_STREAM) { + ALOGV("Creating callback stream: %d x %d, format 0x%x, API format 0x%x", + params.previewWidth, params.previewHeight, + callbackFormat, params.previewFormat); + res = device->createStream(mCallbackWindow, + params.previewWidth, params.previewHeight, + callbackFormat, 0, &mCallbackStreamId); + if (res != OK) { + ALOGE("%s: Camera %d: Can't create output stream for callbacks: " + "%s (%d)", __FUNCTION__, mId, + strerror(-res), res); + return res; + } + } + + return OK; +} + +status_t CallbackProcessor::deleteStream() { + ATRACE_CALL(); + sp device; + status_t res; + { + Mutex::Autolock l(mInputMutex); + + if (mCallbackStreamId == NO_STREAM) { + return OK; + } + device = mDevice.promote(); + if (device == 0) { + ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId); + return INVALID_OPERATION; + } + } + res = device->waitUntilDrained(); + if (res != OK) { + ALOGE("%s: Error waiting for HAL to drain: %s (%d)", + __FUNCTION__, strerror(-res), res); + return res; + } + + res = device->deleteStream(mCallbackStreamId); + if (res != OK) { + ALOGE("%s: Unable to delete callback stream: %s (%d)", + __FUNCTION__, strerror(-res), res); + return res; + } + + { + Mutex::Autolock l(mInputMutex); + + mCallbackHeap.clear(); + mCallbackWindow.clear(); + mCallbackConsumer.clear(); + + mCallbackStreamId = NO_STREAM; + } + return OK; +} + +int CallbackProcessor::getStreamId() const { + Mutex::Autolock l(mInputMutex); + return mCallbackStreamId; +} + +void CallbackProcessor::dump(int /*fd*/, const Vector& /*args*/) const { +} + +bool CallbackProcessor::threadLoop() { + status_t res; + + { + Mutex::Autolock l(mInputMutex); + while (!mCallbackAvailable) { + res = mCallbackAvailableSignal.waitRelative(mInputMutex, + kWaitDuration); + if (res == TIMED_OUT) return true; + } + mCallbackAvailable = false; + } + + do { + sp client = mClient.promote(); + if (client == 0) { + res = discardNewCallback(); + } else { + res = processNewCallback(client); + } + } while (res == OK); + + return true; +} + +status_t CallbackProcessor::discardNewCallback() { + ATRACE_CALL(); + status_t res; + CpuConsumer::LockedBuffer imgBuffer; + res = mCallbackConsumer->lockNextBuffer(&imgBuffer); + if (res != OK) { + if (res != BAD_VALUE) { + ALOGE("%s: Camera %d: Error receiving next callback buffer: " + "%s (%d)", __FUNCTION__, mId, strerror(-res), res); + } + return res; + } + mCallbackConsumer->unlockBuffer(imgBuffer); + return OK; +} + +status_t CallbackProcessor::processNewCallback(sp &client) { + ATRACE_CALL(); + status_t res; + + sp callbackHeap; + bool useFlexibleYuv = false; + int32_t previewFormat = 0; + size_t heapIdx; + + { + /* acquire SharedParameters before mMutex so we don't dead lock + with Camera2Client code calling into StreamingProcessor */ + SharedParameters::Lock l(client->getParameters()); + Mutex::Autolock m(mInputMutex); + CpuConsumer::LockedBuffer imgBuffer; + if (mCallbackStreamId == NO_STREAM) { + ALOGV("%s: Camera %d:No stream is available" + , __FUNCTION__, mId); + return INVALID_OPERATION; + } + + ALOGV("%s: Getting buffer", __FUNCTION__); + res = mCallbackConsumer->lockNextBuffer(&imgBuffer); + if (res != OK) { + if (res != BAD_VALUE) { + ALOGE("%s: Camera %d: Error receiving next callback buffer: " + "%s (%d)", __FUNCTION__, mId, strerror(-res), res); + } + return res; + } + ALOGV("%s: Camera %d: Preview callback available", __FUNCTION__, + mId); + + if ( l.mParameters.state != Parameters::PREVIEW + && l.mParameters.state != Parameters::RECORD + && l.mParameters.state != Parameters::VIDEO_SNAPSHOT) { + ALOGV("%s: Camera %d: No longer streaming", + __FUNCTION__, mId); + mCallbackConsumer->unlockBuffer(imgBuffer); + return OK; + } + + if (! (l.mParameters.previewCallbackFlags & + CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK) ) { + ALOGV("%s: No longer enabled, dropping", __FUNCTION__); + mCallbackConsumer->unlockBuffer(imgBuffer); + return OK; + } + if ((l.mParameters.previewCallbackFlags & + CAMERA_FRAME_CALLBACK_FLAG_ONE_SHOT_MASK) && + !l.mParameters.previewCallbackOneShot) { + ALOGV("%s: One shot mode, already sent, dropping", __FUNCTION__); + mCallbackConsumer->unlockBuffer(imgBuffer); + return OK; + } + + previewFormat = l.mParameters.previewFormat; + useFlexibleYuv = l.mParameters.fastInfo.useFlexibleYuv && + (previewFormat == HAL_PIXEL_FORMAT_YCrCb_420_SP || + previewFormat == HAL_PIXEL_FORMAT_YV12); + + int32_t expectedFormat = useFlexibleYuv ? + HAL_PIXEL_FORMAT_YCbCr_420_888 : previewFormat; + + if (imgBuffer.format != expectedFormat) { + ALOGE("%s: Camera %d: Unexpected format for callback: " + "0x%x, expected 0x%x", __FUNCTION__, mId, + imgBuffer.format, expectedFormat); + mCallbackConsumer->unlockBuffer(imgBuffer); + return INVALID_OPERATION; + } + + // In one-shot mode, stop sending callbacks after the first one + if (l.mParameters.previewCallbackFlags & + CAMERA_FRAME_CALLBACK_FLAG_ONE_SHOT_MASK) { + ALOGV("%s: clearing oneshot", __FUNCTION__); + l.mParameters.previewCallbackOneShot = false; + } + + uint32_t destYStride = 0; + uint32_t destCStride = 0; + if (useFlexibleYuv) { + if (previewFormat == HAL_PIXEL_FORMAT_YV12) { + // Strides must align to 16 for YV12 + destYStride = ALIGN(imgBuffer.width, 16); + destCStride = ALIGN(destYStride / 2, 16); + } else { + // No padding for NV21 + ALOG_ASSERT(previewFormat == HAL_PIXEL_FORMAT_YCrCb_420_SP, + "Unexpected preview format 0x%x", previewFormat); + destYStride = imgBuffer.width; + destCStride = destYStride / 2; + } + } else { + destYStride = imgBuffer.stride; + // don't care about cStride + } + + size_t bufferSize = Camera2Client::calculateBufferSize( + imgBuffer.width, imgBuffer.height, + previewFormat, destYStride); + size_t currentBufferSize = (mCallbackHeap == 0) ? + 0 : (mCallbackHeap->mHeap->getSize() / kCallbackHeapCount); + if (bufferSize != currentBufferSize) { + mCallbackHeap.clear(); + mCallbackHeap = new Camera2Heap(bufferSize, kCallbackHeapCount, + "Camera2Client::CallbackHeap"); + if (mCallbackHeap->mHeap->getSize() == 0) { + ALOGE("%s: Camera %d: Unable to allocate memory for callbacks", + __FUNCTION__, mId); + mCallbackConsumer->unlockBuffer(imgBuffer); + return INVALID_OPERATION; + } + + mCallbackHeapHead = 0; + mCallbackHeapFree = kCallbackHeapCount; + } + + if (mCallbackHeapFree == 0) { + ALOGE("%s: Camera %d: No free callback buffers, dropping frame", + __FUNCTION__, mId); + mCallbackConsumer->unlockBuffer(imgBuffer); + return OK; + } + + heapIdx = mCallbackHeapHead; + + mCallbackHeapHead = (mCallbackHeapHead + 1) & kCallbackHeapCount; + mCallbackHeapFree--; + + // TODO: Get rid of this copy by passing the gralloc queue all the way + // to app + + ssize_t offset; + size_t size; + sp heap = + mCallbackHeap->mBuffers[heapIdx]->getMemory(&offset, + &size); + uint8_t *data = (uint8_t*)heap->getBase() + offset; + + if (!useFlexibleYuv) { + // Can just memcpy when HAL format matches API format + memcpy(data, imgBuffer.data, bufferSize); + } else { + res = convertFromFlexibleYuv(previewFormat, data, imgBuffer, + destYStride, destCStride); + if (res != OK) { + ALOGE("%s: Camera %d: Can't convert between 0x%x and 0x%x formats!", + __FUNCTION__, mId, imgBuffer.format, previewFormat); + mCallbackConsumer->unlockBuffer(imgBuffer); + return BAD_VALUE; + } + } + + ALOGV("%s: Freeing buffer", __FUNCTION__); + mCallbackConsumer->unlockBuffer(imgBuffer); + + // mCallbackHeap may get freed up once input mutex is released + callbackHeap = mCallbackHeap; + } + + // Call outside parameter lock to allow re-entrancy from notification + { + Camera2Client::SharedCameraCallbacks::Lock + l(client->mSharedCameraCallbacks); + if (l.mRemoteCallback != 0) { + ALOGV("%s: Camera %d: Invoking client data callback", + __FUNCTION__, mId); + l.mRemoteCallback->dataCallback(CAMERA_MSG_PREVIEW_FRAME, + callbackHeap->mBuffers[heapIdx], NULL); + } + } + + // Only increment free if we're still using the same heap + mCallbackHeapFree++; + + ALOGV("%s: exit", __FUNCTION__); + + return OK; +} + +status_t CallbackProcessor::convertFromFlexibleYuv(int32_t previewFormat, + uint8_t *dst, + const CpuConsumer::LockedBuffer &src, + uint32_t dstYStride, + uint32_t dstCStride) const { + + if (previewFormat != HAL_PIXEL_FORMAT_YCrCb_420_SP && + previewFormat != HAL_PIXEL_FORMAT_YV12) { + ALOGE("%s: Camera %d: Unexpected preview format when using " + "flexible YUV: 0x%x", __FUNCTION__, mId, previewFormat); + return INVALID_OPERATION; + } + + // Copy Y plane, adjusting for stride + const uint8_t *ySrc = src.data; + uint8_t *yDst = dst; + for (size_t row = 0; row < src.height; row++) { + memcpy(yDst, ySrc, src.width); + ySrc += src.stride; + yDst += dstYStride; + } + + // Copy/swizzle chroma planes, 4:2:0 subsampling + const uint8_t *cbSrc = src.dataCb; + const uint8_t *crSrc = src.dataCr; + size_t chromaHeight = src.height / 2; + size_t chromaWidth = src.width / 2; + ssize_t chromaGap = src.chromaStride - + (chromaWidth * src.chromaStep); + size_t dstChromaGap = dstCStride - chromaWidth; + + if (previewFormat == HAL_PIXEL_FORMAT_YCrCb_420_SP) { + // Flexible YUV chroma to NV21 chroma + uint8_t *crcbDst = yDst; + // Check for shortcuts + if (cbSrc == crSrc + 1 && src.chromaStep == 2) { + ALOGV("%s: Fast NV21->NV21", __FUNCTION__); + // Source has semiplanar CrCb chroma layout, can copy by rows + for (size_t row = 0; row < chromaHeight; row++) { + memcpy(crcbDst, crSrc, src.width); + crcbDst += src.width; + crSrc += src.chromaStride; + } + } else { + ALOGV("%s: Generic->NV21", __FUNCTION__); + // Generic copy, always works but not very efficient + for (size_t row = 0; row < chromaHeight; row++) { + for (size_t col = 0; col < chromaWidth; col++) { + *(crcbDst++) = *crSrc; + *(crcbDst++) = *cbSrc; + crSrc += src.chromaStep; + cbSrc += src.chromaStep; + } + crSrc += chromaGap; + cbSrc += chromaGap; + } + } + } else { + // flexible YUV chroma to YV12 chroma + ALOG_ASSERT(previewFormat == HAL_PIXEL_FORMAT_YV12, + "Unexpected preview format 0x%x", previewFormat); + uint8_t *crDst = yDst; + uint8_t *cbDst = yDst + chromaHeight * dstCStride; + if (src.chromaStep == 1) { + ALOGV("%s: Fast YV12->YV12", __FUNCTION__); + // Source has planar chroma layout, can copy by row + for (size_t row = 0; row < chromaHeight; row++) { + memcpy(crDst, crSrc, chromaWidth); + crDst += dstCStride; + crSrc += src.chromaStride; + } + for (size_t row = 0; row < chromaHeight; row++) { + memcpy(cbDst, cbSrc, chromaWidth); + cbDst += dstCStride; + cbSrc += src.chromaStride; + } + } else { + ALOGV("%s: Generic->YV12", __FUNCTION__); + // Generic copy, always works but not very efficient + for (size_t row = 0; row < chromaHeight; row++) { + for (size_t col = 0; col < chromaWidth; col++) { + *(crDst++) = *crSrc; + *(cbDst++) = *cbSrc; + crSrc += src.chromaStep; + cbSrc += src.chromaStep; + } + crSrc += chromaGap; + cbSrc += chromaGap; + crDst += dstChromaGap; + cbDst += dstChromaGap; + } + } + } + + return OK; +} + +}; // namespace camera2 +}; // namespace android diff --git a/services/camera/libcameraservice/api1/client2/CallbackProcessor.h b/services/camera/libcameraservice/api1/client2/CallbackProcessor.h new file mode 100644 index 0000000..613f5be --- /dev/null +++ b/services/camera/libcameraservice/api1/client2/CallbackProcessor.h @@ -0,0 +1,99 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_SERVERS_CAMERA_CAMERA2_CALLBACKPROCESSOR_H +#define ANDROID_SERVERS_CAMERA_CAMERA2_CALLBACKPROCESSOR_H + +#include +#include +#include +#include +#include +#include + +#include "api1/client2/Camera2Heap.h" + +namespace android { + +class Camera2Client; +class CameraDeviceBase; + +namespace camera2 { + +class Parameters; + +/*** + * Still image capture output image processing + */ +class CallbackProcessor: + public Thread, public CpuConsumer::FrameAvailableListener { + public: + CallbackProcessor(sp client); + ~CallbackProcessor(); + + void onFrameAvailable(); + + // Set to NULL to disable the direct-to-app callback window + status_t setCallbackWindow(sp callbackWindow); + status_t updateStream(const Parameters ¶ms); + status_t deleteStream(); + int getStreamId() const; + + void dump(int fd, const Vector& args) const; + private: + static const nsecs_t kWaitDuration = 10000000; // 10 ms + wp mClient; + wp mDevice; + int mId; + + mutable Mutex mInputMutex; + bool mCallbackAvailable; + Condition mCallbackAvailableSignal; + + enum { + NO_STREAM = -1 + }; + + // True if mCallbackWindow is a remote consumer, false if just the local + // mCallbackConsumer + bool mCallbackToApp; + int mCallbackStreamId; + static const size_t kCallbackHeapCount = 6; + sp mCallbackConsumer; + sp mCallbackWindow; + sp mCallbackHeap; + int mCallbackHeapId; + size_t mCallbackHeapHead, mCallbackHeapFree; + + virtual bool threadLoop(); + + status_t processNewCallback(sp &client); + // Used when shutting down + status_t discardNewCallback(); + + // Convert from flexible YUV to NV21 or YV12 + status_t convertFromFlexibleYuv(int32_t previewFormat, + uint8_t *dst, + const CpuConsumer::LockedBuffer &src, + uint32_t dstYStride, + uint32_t dstCStride) const; +}; + + +}; //namespace camera2 +}; //namespace android + +#endif diff --git a/services/camera/libcameraservice/api1/client2/Camera2Heap.h b/services/camera/libcameraservice/api1/client2/Camera2Heap.h new file mode 100644 index 0000000..9c72d76 --- /dev/null +++ b/services/camera/libcameraservice/api1/client2/Camera2Heap.h @@ -0,0 +1,55 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROiD_SERVERS_CAMERA_CAMERA2HEAP_H +#define ANDROiD_SERVERS_CAMERA_CAMERA2HEAP_H + +#include +#include + +namespace android { +namespace camera2 { + +// Utility class for managing a set of IMemory blocks +class Camera2Heap : public RefBase { + public: + Camera2Heap(size_t buf_size, uint_t num_buffers = 1, + const char *name = NULL) : + mBufSize(buf_size), + mNumBufs(num_buffers) { + mHeap = new MemoryHeapBase(buf_size * num_buffers, 0, name); + mBuffers = new sp[mNumBufs]; + for (uint_t i = 0; i < mNumBufs; i++) + mBuffers[i] = new MemoryBase(mHeap, + i * mBufSize, + mBufSize); + } + + virtual ~Camera2Heap() + { + delete [] mBuffers; + } + + size_t mBufSize; + uint_t mNumBufs; + sp mHeap; + sp *mBuffers; +}; + +}; // namespace camera2 +}; // namespace android + +#endif diff --git a/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp b/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp new file mode 100644 index 0000000..ad1590a --- /dev/null +++ b/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp @@ -0,0 +1,710 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "Camera2-CaptureSequencer" +#define ATRACE_TAG ATRACE_TAG_CAMERA +//#define LOG_NDEBUG 0 + +#include +#include +#include + +#include "api1/Camera2Client.h" +#include "api1/client2/CaptureSequencer.h" +#include "api1/client2/BurstCapture.h" +#include "api1/client2/Parameters.h" +#include "api1/client2/ZslProcessorInterface.h" + +namespace android { +namespace camera2 { + +/** Public members */ + +CaptureSequencer::CaptureSequencer(wp client): + Thread(false), + mStartCapture(false), + mBusy(false), + mNewAEState(false), + mNewFrameReceived(false), + mNewCaptureReceived(false), + mShutterNotified(false), + mClient(client), + mCaptureState(IDLE), + mTriggerId(0), + mTimeoutCount(0), + mCaptureId(Camera2Client::kCaptureRequestIdStart), + mMsgType(0) { + ALOGV("%s", __FUNCTION__); +} + +CaptureSequencer::~CaptureSequencer() { + ALOGV("%s: Exit", __FUNCTION__); +} + +void CaptureSequencer::setZslProcessor(wp processor) { + Mutex::Autolock l(mInputMutex); + mZslProcessor = processor; +} + +status_t CaptureSequencer::startCapture(int msgType) { + ALOGV("%s", __FUNCTION__); + ATRACE_CALL(); + Mutex::Autolock l(mInputMutex); + if (mBusy) { + ALOGE("%s: Already busy capturing!", __FUNCTION__); + return INVALID_OPERATION; + } + if (!mStartCapture) { + mMsgType = msgType; + mStartCapture = true; + mStartCaptureSignal.signal(); + } + return OK; +} + +status_t CaptureSequencer::waitUntilIdle(nsecs_t timeout) { + ATRACE_CALL(); + ALOGV("%s: Waiting for idle", __FUNCTION__); + Mutex::Autolock l(mStateMutex); + status_t res = -1; + while (mCaptureState != IDLE) { + nsecs_t startTime = systemTime(); + + res = mStateChanged.waitRelative(mStateMutex, timeout); + if (res != OK) return res; + + timeout -= (systemTime() - startTime); + } + ALOGV("%s: Now idle", __FUNCTION__); + return OK; +} + +void CaptureSequencer::notifyAutoExposure(uint8_t newState, int triggerId) { + ATRACE_CALL(); + Mutex::Autolock l(mInputMutex); + mAEState = newState; + mAETriggerId = triggerId; + if (!mNewAEState) { + mNewAEState = true; + mNewNotifySignal.signal(); + } +} + +void CaptureSequencer::onFrameAvailable(int32_t frameId, + const CameraMetadata &frame) { + ALOGV("%s: Listener found new frame", __FUNCTION__); + ATRACE_CALL(); + Mutex::Autolock l(mInputMutex); + mNewFrameId = frameId; + mNewFrame = frame; + if (!mNewFrameReceived) { + mNewFrameReceived = true; + mNewFrameSignal.signal(); + } +} + +void CaptureSequencer::onCaptureAvailable(nsecs_t timestamp, + sp captureBuffer) { + ATRACE_CALL(); + ALOGV("%s", __FUNCTION__); + Mutex::Autolock l(mInputMutex); + mCaptureTimestamp = timestamp; + mCaptureBuffer = captureBuffer; + if (!mNewCaptureReceived) { + mNewCaptureReceived = true; + mNewCaptureSignal.signal(); + } +} + + +void CaptureSequencer::dump(int fd, const Vector& /*args*/) { + String8 result; + if (mCaptureRequest.entryCount() != 0) { + result = " Capture request:\n"; + write(fd, result.string(), result.size()); + mCaptureRequest.dump(fd, 2, 6); + } else { + result = " Capture request: undefined\n"; + write(fd, result.string(), result.size()); + } + result = String8::format(" Current capture state: %s\n", + kStateNames[mCaptureState]); + result.append(" Latest captured frame:\n"); + write(fd, result.string(), result.size()); + mNewFrame.dump(fd, 2, 6); +} + +/** Private members */ + +const char* CaptureSequencer::kStateNames[CaptureSequencer::NUM_CAPTURE_STATES+1] = +{ + "IDLE", + "START", + "ZSL_START", + "ZSL_WAITING", + "ZSL_REPROCESSING", + "STANDARD_START", + "STANDARD_PRECAPTURE_WAIT", + "STANDARD_CAPTURE", + "STANDARD_CAPTURE_WAIT", + "BURST_CAPTURE_START", + "BURST_CAPTURE_WAIT", + "DONE", + "ERROR", + "UNKNOWN" +}; + +const CaptureSequencer::StateManager + CaptureSequencer::kStateManagers[CaptureSequencer::NUM_CAPTURE_STATES-1] = { + &CaptureSequencer::manageIdle, + &CaptureSequencer::manageStart, + &CaptureSequencer::manageZslStart, + &CaptureSequencer::manageZslWaiting, + &CaptureSequencer::manageZslReprocessing, + &CaptureSequencer::manageStandardStart, + &CaptureSequencer::manageStandardPrecaptureWait, + &CaptureSequencer::manageStandardCapture, + &CaptureSequencer::manageStandardCaptureWait, + &CaptureSequencer::manageBurstCaptureStart, + &CaptureSequencer::manageBurstCaptureWait, + &CaptureSequencer::manageDone, +}; + +bool CaptureSequencer::threadLoop() { + + sp client = mClient.promote(); + if (client == 0) return false; + + CaptureState currentState; + { + Mutex::Autolock l(mStateMutex); + currentState = mCaptureState; + } + + currentState = (this->*kStateManagers[currentState])(client); + + Mutex::Autolock l(mStateMutex); + if (currentState != mCaptureState) { + mCaptureState = currentState; + ATRACE_INT("cam2_capt_state", mCaptureState); + ALOGV("Camera %d: New capture state %s", + client->getCameraId(), kStateNames[mCaptureState]); + mStateChanged.signal(); + } + + if (mCaptureState == ERROR) { + ALOGE("Camera %d: Stopping capture sequencer due to error", + client->getCameraId()); + return false; + } + + return true; +} + +CaptureSequencer::CaptureState CaptureSequencer::manageIdle( + sp &/*client*/) { + status_t res; + Mutex::Autolock l(mInputMutex); + while (!mStartCapture) { + res = mStartCaptureSignal.waitRelative(mInputMutex, + kWaitDuration); + if (res == TIMED_OUT) break; + } + if (mStartCapture) { + mStartCapture = false; + mBusy = true; + return START; + } + return IDLE; +} + +CaptureSequencer::CaptureState CaptureSequencer::manageDone(sp &client) { + status_t res = OK; + ATRACE_CALL(); + mCaptureId++; + if (mCaptureId >= Camera2Client::kCaptureRequestIdEnd) { + mCaptureId = Camera2Client::kCaptureRequestIdStart; + } + { + Mutex::Autolock l(mInputMutex); + mBusy = false; + } + + { + SharedParameters::Lock l(client->getParameters()); + switch (l.mParameters.state) { + case Parameters::DISCONNECTED: + ALOGW("%s: Camera %d: Discarding image data during shutdown ", + __FUNCTION__, client->getCameraId()); + res = INVALID_OPERATION; + break; + case Parameters::STILL_CAPTURE: + res = client->getCameraDevice()->waitUntilDrained(); + if (res != OK) { + ALOGE("%s: Camera %d: Can't idle after still capture: " + "%s (%d)", __FUNCTION__, client->getCameraId(), + strerror(-res), res); + } + l.mParameters.state = Parameters::STOPPED; + break; + case Parameters::VIDEO_SNAPSHOT: + l.mParameters.state = Parameters::RECORD; + break; + default: + ALOGE("%s: Camera %d: Still image produced unexpectedly " + "in state %s!", + __FUNCTION__, client->getCameraId(), + Parameters::getStateName(l.mParameters.state)); + res = INVALID_OPERATION; + } + } + sp processor = mZslProcessor.promote(); + if (processor != 0) { + ALOGV("%s: Memory optimization, clearing ZSL queue", + __FUNCTION__); + processor->clearZslQueue(); + } + + /** + * Fire the jpegCallback in Camera#takePicture(..., jpegCallback) + */ + if (mCaptureBuffer != 0 && res == OK) { + Camera2Client::SharedCameraCallbacks::Lock + l(client->mSharedCameraCallbacks); + ALOGV("%s: Sending still image to client", __FUNCTION__); + if (l.mRemoteCallback != 0) { + l.mRemoteCallback->dataCallback(CAMERA_MSG_COMPRESSED_IMAGE, + mCaptureBuffer, NULL); + } else { + ALOGV("%s: No client!", __FUNCTION__); + } + } + mCaptureBuffer.clear(); + + return IDLE; +} + +CaptureSequencer::CaptureState CaptureSequencer::manageStart( + sp &client) { + ALOGV("%s", __FUNCTION__); + status_t res; + ATRACE_CALL(); + SharedParameters::Lock l(client->getParameters()); + CaptureState nextState = DONE; + + res = updateCaptureRequest(l.mParameters, client); + if (res != OK ) { + ALOGE("%s: Camera %d: Can't update still image capture request: %s (%d)", + __FUNCTION__, client->getCameraId(), strerror(-res), res); + return DONE; + } + + if(l.mParameters.lightFx != Parameters::LIGHTFX_NONE && + l.mParameters.state == Parameters::STILL_CAPTURE) { + nextState = BURST_CAPTURE_START; + } + else if (l.mParameters.zslMode && + l.mParameters.state == Parameters::STILL_CAPTURE && + l.mParameters.flashMode != Parameters::FLASH_MODE_ON) { + nextState = ZSL_START; + } else { + nextState = STANDARD_START; + } + mShutterNotified = false; + + return nextState; +} + +CaptureSequencer::CaptureState CaptureSequencer::manageZslStart( + sp &client) { + ALOGV("%s", __FUNCTION__); + status_t res; + sp processor = mZslProcessor.promote(); + if (processor == 0) { + ALOGE("%s: No ZSL queue to use!", __FUNCTION__); + return DONE; + } + + client->registerFrameListener(mCaptureId, mCaptureId + 1, + this); + + // TODO: Actually select the right thing here. + res = processor->pushToReprocess(mCaptureId); + if (res != OK) { + if (res == NOT_ENOUGH_DATA) { + ALOGV("%s: Camera %d: ZSL queue doesn't have good frame, " + "falling back to normal capture", __FUNCTION__, + client->getCameraId()); + } else { + ALOGE("%s: Camera %d: Error in ZSL queue: %s (%d)", + __FUNCTION__, client->getCameraId(), strerror(-res), res); + } + return STANDARD_START; + } + + SharedParameters::Lock l(client->getParameters()); + /* warning: this also locks a SharedCameraCallbacks */ + shutterNotifyLocked(l.mParameters, client, mMsgType); + mShutterNotified = true; + mTimeoutCount = kMaxTimeoutsForCaptureEnd; + return STANDARD_CAPTURE_WAIT; +} + +CaptureSequencer::CaptureState CaptureSequencer::manageZslWaiting( + sp &/*client*/) { + ALOGV("%s", __FUNCTION__); + return DONE; +} + +CaptureSequencer::CaptureState CaptureSequencer::manageZslReprocessing( + sp &/*client*/) { + ALOGV("%s", __FUNCTION__); + return START; +} + +CaptureSequencer::CaptureState CaptureSequencer::manageStandardStart( + sp &client) { + ATRACE_CALL(); + + // Get the onFrameAvailable callback when the requestID == mCaptureId + client->registerFrameListener(mCaptureId, mCaptureId + 1, + this); + { + SharedParameters::Lock l(client->getParameters()); + mTriggerId = l.mParameters.precaptureTriggerCounter++; + } + client->getCameraDevice()->triggerPrecaptureMetering(mTriggerId); + + mAeInPrecapture = false; + mTimeoutCount = kMaxTimeoutsForPrecaptureStart; + return STANDARD_PRECAPTURE_WAIT; +} + +CaptureSequencer::CaptureState CaptureSequencer::manageStandardPrecaptureWait( + sp &/*client*/) { + status_t res; + ATRACE_CALL(); + Mutex::Autolock l(mInputMutex); + while (!mNewAEState) { + res = mNewNotifySignal.waitRelative(mInputMutex, kWaitDuration); + if (res == TIMED_OUT) { + mTimeoutCount--; + break; + } + } + if (mTimeoutCount <= 0) { + ALOGW("Timed out waiting for precapture %s", + mAeInPrecapture ? "end" : "start"); + return STANDARD_CAPTURE; + } + if (mNewAEState) { + if (!mAeInPrecapture) { + // Waiting to see PRECAPTURE state + if (mAETriggerId == mTriggerId && + mAEState == ANDROID_CONTROL_AE_STATE_PRECAPTURE) { + ALOGV("%s: Got precapture start", __FUNCTION__); + mAeInPrecapture = true; + mTimeoutCount = kMaxTimeoutsForPrecaptureEnd; + } + } else { + // Waiting to see PRECAPTURE state end + if (mAETriggerId == mTriggerId && + mAEState != ANDROID_CONTROL_AE_STATE_PRECAPTURE) { + ALOGV("%s: Got precapture end", __FUNCTION__); + return STANDARD_CAPTURE; + } + } + mNewAEState = false; + } + return STANDARD_PRECAPTURE_WAIT; +} + +CaptureSequencer::CaptureState CaptureSequencer::manageStandardCapture( + sp &client) { + status_t res; + ATRACE_CALL(); + SharedParameters::Lock l(client->getParameters()); + Vector outputStreams; + + /** + * Set up output streams in the request + * - preview + * - capture/jpeg + * - callback (if preview callbacks enabled) + * - recording (if recording enabled) + */ + outputStreams.push(client->getPreviewStreamId()); + outputStreams.push(client->getCaptureStreamId()); + + if (l.mParameters.previewCallbackFlags & + CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK) { + outputStreams.push(client->getCallbackStreamId()); + } + + if (l.mParameters.state == Parameters::VIDEO_SNAPSHOT) { + outputStreams.push(client->getRecordingStreamId()); + } + + res = mCaptureRequest.update(ANDROID_REQUEST_OUTPUT_STREAMS, + outputStreams); + if (res == OK) { + res = mCaptureRequest.update(ANDROID_REQUEST_ID, + &mCaptureId, 1); + } + if (res == OK) { + res = mCaptureRequest.sort(); + } + + if (res != OK) { + ALOGE("%s: Camera %d: Unable to set up still capture request: %s (%d)", + __FUNCTION__, client->getCameraId(), strerror(-res), res); + return DONE; + } + + // Create a capture copy since CameraDeviceBase#capture takes ownership + CameraMetadata captureCopy = mCaptureRequest; + if (captureCopy.entryCount() == 0) { + ALOGE("%s: Camera %d: Unable to copy capture request for HAL device", + __FUNCTION__, client->getCameraId()); + return DONE; + } + + /** + * Clear the streaming request for still-capture pictures + * (as opposed to i.e. video snapshots) + */ + if (l.mParameters.state == Parameters::STILL_CAPTURE) { + // API definition of takePicture() - stop preview before taking pic + res = client->stopStream(); + if (res != OK) { + ALOGE("%s: Camera %d: Unable to stop preview for still capture: " + "%s (%d)", + __FUNCTION__, client->getCameraId(), strerror(-res), res); + return DONE; + } + } + // TODO: Capture should be atomic with setStreamingRequest here + res = client->getCameraDevice()->capture(captureCopy); + if (res != OK) { + ALOGE("%s: Camera %d: Unable to submit still image capture request: " + "%s (%d)", + __FUNCTION__, client->getCameraId(), strerror(-res), res); + return DONE; + } + + mTimeoutCount = kMaxTimeoutsForCaptureEnd; + return STANDARD_CAPTURE_WAIT; +} + +CaptureSequencer::CaptureState CaptureSequencer::manageStandardCaptureWait( + sp &client) { + status_t res; + ATRACE_CALL(); + Mutex::Autolock l(mInputMutex); + + // Wait for new metadata result (mNewFrame) + while (!mNewFrameReceived) { + res = mNewFrameSignal.waitRelative(mInputMutex, kWaitDuration); + if (res == TIMED_OUT) { + mTimeoutCount--; + break; + } + } + + // Approximation of the shutter being closed + // - TODO: use the hal3 exposure callback in Camera3Device instead + if (mNewFrameReceived && !mShutterNotified) { + SharedParameters::Lock l(client->getParameters()); + /* warning: this also locks a SharedCameraCallbacks */ + shutterNotifyLocked(l.mParameters, client, mMsgType); + mShutterNotified = true; + } + + // Wait until jpeg was captured by JpegProcessor + while (mNewFrameReceived && !mNewCaptureReceived) { + res = mNewCaptureSignal.waitRelative(mInputMutex, kWaitDuration); + if (res == TIMED_OUT) { + mTimeoutCount--; + break; + } + } + if (mTimeoutCount <= 0) { + ALOGW("Timed out waiting for capture to complete"); + return DONE; + } + if (mNewFrameReceived && mNewCaptureReceived) { + if (mNewFrameId != mCaptureId) { + ALOGW("Mismatched capture frame IDs: Expected %d, got %d", + mCaptureId, mNewFrameId); + } + camera_metadata_entry_t entry; + entry = mNewFrame.find(ANDROID_SENSOR_TIMESTAMP); + if (entry.count == 0) { + ALOGE("No timestamp field in capture frame!"); + } + if (entry.data.i64[0] != mCaptureTimestamp) { + ALOGW("Mismatched capture timestamps: Metadata frame %lld," + " captured buffer %lld", + entry.data.i64[0], + mCaptureTimestamp); + } + client->removeFrameListener(mCaptureId, mCaptureId + 1, this); + + mNewFrameReceived = false; + mNewCaptureReceived = false; + return DONE; + } + return STANDARD_CAPTURE_WAIT; +} + +CaptureSequencer::CaptureState CaptureSequencer::manageBurstCaptureStart( + sp &client) { + ALOGV("%s", __FUNCTION__); + status_t res; + ATRACE_CALL(); + + // check which burst mode is set, create respective burst object + { + SharedParameters::Lock l(client->getParameters()); + + res = updateCaptureRequest(l.mParameters, client); + if(res != OK) { + return DONE; + } + + // + // check for burst mode type in mParameters here + // + mBurstCapture = new BurstCapture(client, this); + } + + res = mCaptureRequest.update(ANDROID_REQUEST_ID, &mCaptureId, 1); + if (res == OK) { + res = mCaptureRequest.sort(); + } + if (res != OK) { + ALOGE("%s: Camera %d: Unable to set up still capture request: %s (%d)", + __FUNCTION__, client->getCameraId(), strerror(-res), res); + return DONE; + } + + CameraMetadata captureCopy = mCaptureRequest; + if (captureCopy.entryCount() == 0) { + ALOGE("%s: Camera %d: Unable to copy capture request for HAL device", + __FUNCTION__, client->getCameraId()); + return DONE; + } + + Vector requests; + requests.push(mCaptureRequest); + res = mBurstCapture->start(requests, mCaptureId); + mTimeoutCount = kMaxTimeoutsForCaptureEnd * 10; + return BURST_CAPTURE_WAIT; +} + +CaptureSequencer::CaptureState CaptureSequencer::manageBurstCaptureWait( + sp &/*client*/) { + status_t res; + ATRACE_CALL(); + + while (!mNewCaptureReceived) { + res = mNewCaptureSignal.waitRelative(mInputMutex, kWaitDuration); + if (res == TIMED_OUT) { + mTimeoutCount--; + break; + } + } + + if (mTimeoutCount <= 0) { + ALOGW("Timed out waiting for burst capture to complete"); + return DONE; + } + if (mNewCaptureReceived) { + mNewCaptureReceived = false; + // TODO: update mCaptureId to last burst's capture ID + 1? + return DONE; + } + + return BURST_CAPTURE_WAIT; +} + +status_t CaptureSequencer::updateCaptureRequest(const Parameters ¶ms, + sp &client) { + ATRACE_CALL(); + status_t res; + if (mCaptureRequest.entryCount() == 0) { + res = client->getCameraDevice()->createDefaultRequest( + CAMERA2_TEMPLATE_STILL_CAPTURE, + &mCaptureRequest); + if (res != OK) { + ALOGE("%s: Camera %d: Unable to create default still image request:" + " %s (%d)", __FUNCTION__, client->getCameraId(), + strerror(-res), res); + return res; + } + } + + res = params.updateRequest(&mCaptureRequest); + if (res != OK) { + ALOGE("%s: Camera %d: Unable to update common entries of capture " + "request: %s (%d)", __FUNCTION__, client->getCameraId(), + strerror(-res), res); + return res; + } + + res = params.updateRequestJpeg(&mCaptureRequest); + if (res != OK) { + ALOGE("%s: Camera %d: Unable to update JPEG entries of capture " + "request: %s (%d)", __FUNCTION__, client->getCameraId(), + strerror(-res), res); + return res; + } + + return OK; +} + +/*static*/ void CaptureSequencer::shutterNotifyLocked(const Parameters ¶ms, + sp client, int msgType) { + ATRACE_CALL(); + + if (params.state == Parameters::STILL_CAPTURE + && params.playShutterSound + && (msgType & CAMERA_MSG_SHUTTER)) { + client->getCameraService()->playSound(CameraService::SOUND_SHUTTER); + } + + { + Camera2Client::SharedCameraCallbacks::Lock + l(client->mSharedCameraCallbacks); + + ALOGV("%s: Notifying of shutter close to client", __FUNCTION__); + if (l.mRemoteCallback != 0) { + // ShutterCallback + l.mRemoteCallback->notifyCallback(CAMERA_MSG_SHUTTER, + /*ext1*/0, /*ext2*/0); + + // RawCallback with null buffer + l.mRemoteCallback->notifyCallback(CAMERA_MSG_RAW_IMAGE_NOTIFY, + /*ext1*/0, /*ext2*/0); + } else { + ALOGV("%s: No client!", __FUNCTION__); + } + } +} + + +}; // namespace camera2 +}; // namespace android diff --git a/services/camera/libcameraservice/api1/client2/CaptureSequencer.h b/services/camera/libcameraservice/api1/client2/CaptureSequencer.h new file mode 100644 index 0000000..76750aa --- /dev/null +++ b/services/camera/libcameraservice/api1/client2/CaptureSequencer.h @@ -0,0 +1,177 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_SERVERS_CAMERA_CAMERA2_CAPTURESEQUENCER_H +#define ANDROID_SERVERS_CAMERA_CAMERA2_CAPTURESEQUENCER_H + +#include +#include +#include +#include +#include +#include +#include "camera/CameraMetadata.h" +#include "Parameters.h" +#include "FrameProcessor.h" + +namespace android { + +class Camera2Client; + +namespace camera2 { + +class ZslProcessorInterface; +class BurstCapture; + +/** + * Manages the still image capture process for + * zero-shutter-lag, regular, and video snapshots. + */ +class CaptureSequencer: + virtual public Thread, + virtual public FrameProcessor::FilteredListener { + public: + CaptureSequencer(wp client); + ~CaptureSequencer(); + + // Get reference to the ZslProcessor, which holds the ZSL buffers and frames + void setZslProcessor(wp processor); + + // Begin still image capture + status_t startCapture(int msgType); + + // Wait until current image capture completes; returns immediately if no + // capture is active. Returns TIMED_OUT if capture does not complete during + // the specified duration. + status_t waitUntilIdle(nsecs_t timeout); + + // Notifications about AE state changes + void notifyAutoExposure(uint8_t newState, int triggerId); + + // Notifications from the frame processor + virtual void onFrameAvailable(int32_t frameId, const CameraMetadata &frame); + + // Notifications from the JPEG processor + void onCaptureAvailable(nsecs_t timestamp, sp captureBuffer); + + void dump(int fd, const Vector& args); + + private: + /** + * Accessed by other threads + */ + Mutex mInputMutex; + + bool mStartCapture; + bool mBusy; + Condition mStartCaptureSignal; + + bool mNewAEState; + uint8_t mAEState; + int mAETriggerId; + Condition mNewNotifySignal; + + bool mNewFrameReceived; + int32_t mNewFrameId; + CameraMetadata mNewFrame; + Condition mNewFrameSignal; + + bool mNewCaptureReceived; + nsecs_t mCaptureTimestamp; + sp mCaptureBuffer; + Condition mNewCaptureSignal; + + bool mShutterNotified; + + /** + * Internal to CaptureSequencer + */ + static const nsecs_t kWaitDuration = 100000000; // 100 ms + static const int kMaxTimeoutsForPrecaptureStart = 2; // 200 ms + static const int kMaxTimeoutsForPrecaptureEnd = 20; // 2 sec + static const int kMaxTimeoutsForCaptureEnd = 40; // 4 sec + + wp mClient; + wp mZslProcessor; + sp mBurstCapture; + + enum CaptureState { + IDLE, + START, + ZSL_START, + ZSL_WAITING, + ZSL_REPROCESSING, + STANDARD_START, + STANDARD_PRECAPTURE_WAIT, + STANDARD_CAPTURE, + STANDARD_CAPTURE_WAIT, + BURST_CAPTURE_START, + BURST_CAPTURE_WAIT, + DONE, + ERROR, + NUM_CAPTURE_STATES + } mCaptureState; + static const char* kStateNames[]; + Mutex mStateMutex; // Guards mCaptureState + Condition mStateChanged; + + typedef CaptureState (CaptureSequencer::*StateManager)(sp &client); + static const StateManager kStateManagers[]; + + CameraMetadata mCaptureRequest; + + int mTriggerId; + int mTimeoutCount; + bool mAeInPrecapture; + + int32_t mCaptureId; + int mMsgType; + + // Main internal methods + + virtual bool threadLoop(); + + CaptureState manageIdle(sp &client); + CaptureState manageStart(sp &client); + + CaptureState manageZslStart(sp &client); + CaptureState manageZslWaiting(sp &client); + CaptureState manageZslReprocessing(sp &client); + + CaptureState manageStandardStart(sp &client); + CaptureState manageStandardPrecaptureWait(sp &client); + CaptureState manageStandardCapture(sp &client); + CaptureState manageStandardCaptureWait(sp &client); + + CaptureState manageBurstCaptureStart(sp &client); + CaptureState manageBurstCaptureWait(sp &client); + + CaptureState manageDone(sp &client); + + // Utility methods + + status_t updateCaptureRequest(const Parameters ¶ms, + sp &client); + + // Emit Shutter/Raw callback to java, and maybe play a shutter sound + static void shutterNotifyLocked(const Parameters ¶ms, + sp client, int msgType); +}; + +}; // namespace camera2 +}; // namespace android + +#endif diff --git a/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp b/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp new file mode 100644 index 0000000..c34cb12 --- /dev/null +++ b/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp @@ -0,0 +1,315 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "Camera2-FrameProcessor" +#define ATRACE_TAG ATRACE_TAG_CAMERA +//#define LOG_NDEBUG 0 + +#include +#include + +#include "common/CameraDeviceBase.h" +#include "api1/Camera2Client.h" +#include "api1/client2/FrameProcessor.h" + +namespace android { +namespace camera2 { + +FrameProcessor::FrameProcessor(wp device, + wp client) : + FrameProcessorBase(device), + mClient(client), + mLastFrameNumberOfFaces(0) { + + sp d = device.promote(); + mSynthesize3ANotify = !(d->willNotify3A()); +} + +FrameProcessor::~FrameProcessor() { +} + +bool FrameProcessor::processSingleFrame(CameraMetadata &frame, + const sp &device) { + + sp client = mClient.promote(); + if (!client.get()) { + return false; + } + + if (processFaceDetect(frame, client) != OK) { + return false; + } + + if (mSynthesize3ANotify) { + // Ignoring missing fields for now + process3aState(frame, client); + } + + if (!FrameProcessorBase::processSingleFrame(frame, device)) { + return false; + } + + return true; +} + +status_t FrameProcessor::processFaceDetect(const CameraMetadata &frame, + const sp &client) { + status_t res = BAD_VALUE; + ATRACE_CALL(); + camera_metadata_ro_entry_t entry; + bool enableFaceDetect; + + { + SharedParameters::Lock l(client->getParameters()); + enableFaceDetect = l.mParameters.enableFaceDetect; + } + entry = frame.find(ANDROID_STATISTICS_FACE_DETECT_MODE); + + // TODO: This should be an error once implementations are compliant + if (entry.count == 0) { + return OK; + } + + uint8_t faceDetectMode = entry.data.u8[0]; + + camera_frame_metadata metadata; + Vector faces; + metadata.number_of_faces = 0; + + if (enableFaceDetect && + faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) { + + SharedParameters::Lock l(client->getParameters()); + entry = frame.find(ANDROID_STATISTICS_FACE_RECTANGLES); + if (entry.count == 0) { + // No faces this frame + /* warning: locks SharedCameraCallbacks */ + callbackFaceDetection(client, metadata); + return OK; + } + metadata.number_of_faces = entry.count / 4; + if (metadata.number_of_faces > + l.mParameters.fastInfo.maxFaces) { + ALOGE("%s: Camera %d: More faces than expected! (Got %d, max %d)", + __FUNCTION__, client->getCameraId(), + metadata.number_of_faces, l.mParameters.fastInfo.maxFaces); + return res; + } + const int32_t *faceRects = entry.data.i32; + + entry = frame.find(ANDROID_STATISTICS_FACE_SCORES); + if (entry.count == 0) { + ALOGE("%s: Camera %d: Unable to read face scores", + __FUNCTION__, client->getCameraId()); + return res; + } + const uint8_t *faceScores = entry.data.u8; + + const int32_t *faceLandmarks = NULL; + const int32_t *faceIds = NULL; + + if (faceDetectMode == ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) { + entry = frame.find(ANDROID_STATISTICS_FACE_LANDMARKS); + if (entry.count == 0) { + ALOGE("%s: Camera %d: Unable to read face landmarks", + __FUNCTION__, client->getCameraId()); + return res; + } + faceLandmarks = entry.data.i32; + + entry = frame.find(ANDROID_STATISTICS_FACE_IDS); + + if (entry.count == 0) { + ALOGE("%s: Camera %d: Unable to read face IDs", + __FUNCTION__, client->getCameraId()); + return res; + } + faceIds = entry.data.i32; + } + + faces.setCapacity(metadata.number_of_faces); + + size_t maxFaces = metadata.number_of_faces; + for (size_t i = 0; i < maxFaces; i++) { + if (faceScores[i] == 0) { + metadata.number_of_faces--; + continue; + } + if (faceScores[i] > 100) { + ALOGW("%s: Face index %d with out of range score %d", + __FUNCTION__, i, faceScores[i]); + } + + camera_face_t face; + + face.rect[0] = l.mParameters.arrayXToNormalized(faceRects[i*4 + 0]); + face.rect[1] = l.mParameters.arrayYToNormalized(faceRects[i*4 + 1]); + face.rect[2] = l.mParameters.arrayXToNormalized(faceRects[i*4 + 2]); + face.rect[3] = l.mParameters.arrayYToNormalized(faceRects[i*4 + 3]); + + face.score = faceScores[i]; + if (faceDetectMode == ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) { + face.id = faceIds[i]; + face.left_eye[0] = + l.mParameters.arrayXToNormalized(faceLandmarks[i*6 + 0]); + face.left_eye[1] = + l.mParameters.arrayYToNormalized(faceLandmarks[i*6 + 1]); + face.right_eye[0] = + l.mParameters.arrayXToNormalized(faceLandmarks[i*6 + 2]); + face.right_eye[1] = + l.mParameters.arrayYToNormalized(faceLandmarks[i*6 + 3]); + face.mouth[0] = + l.mParameters.arrayXToNormalized(faceLandmarks[i*6 + 4]); + face.mouth[1] = + l.mParameters.arrayYToNormalized(faceLandmarks[i*6 + 5]); + } else { + face.id = 0; + face.left_eye[0] = face.left_eye[1] = -2000; + face.right_eye[0] = face.right_eye[1] = -2000; + face.mouth[0] = face.mouth[1] = -2000; + } + faces.push_back(face); + } + + metadata.faces = faces.editArray(); + } + + /* warning: locks SharedCameraCallbacks */ + callbackFaceDetection(client, metadata); + + return OK; +} + +status_t FrameProcessor::process3aState(const CameraMetadata &frame, + const sp &client) { + + ATRACE_CALL(); + camera_metadata_ro_entry_t entry; + int mId = client->getCameraId(); + + entry = frame.find(ANDROID_REQUEST_FRAME_COUNT); + int32_t frameNumber = entry.data.i32[0]; + + // Get 3A states from result metadata + bool gotAllStates = true; + + AlgState new3aState; + + entry = frame.find(ANDROID_CONTROL_AE_STATE); + if (entry.count == 0) { + ALOGE("%s: Camera %d: No AE state provided by HAL for frame %d!", + __FUNCTION__, mId, frameNumber); + gotAllStates = false; + } else { + new3aState.aeState = + static_cast( + entry.data.u8[0]); + } + + entry = frame.find(ANDROID_CONTROL_AF_STATE); + if (entry.count == 0) { + ALOGE("%s: Camera %d: No AF state provided by HAL for frame %d!", + __FUNCTION__, mId, frameNumber); + gotAllStates = false; + } else { + new3aState.afState = + static_cast( + entry.data.u8[0]); + } + + entry = frame.find(ANDROID_CONTROL_AWB_STATE); + if (entry.count == 0) { + ALOGE("%s: Camera %d: No AWB state provided by HAL for frame %d!", + __FUNCTION__, mId, frameNumber); + gotAllStates = false; + } else { + new3aState.awbState = + static_cast( + entry.data.u8[0]); + } + + int32_t afTriggerId = 0; + entry = frame.find(ANDROID_CONTROL_AF_TRIGGER_ID); + if (entry.count == 0) { + ALOGE("%s: Camera %d: No AF trigger ID provided by HAL for frame %d!", + __FUNCTION__, mId, frameNumber); + gotAllStates = false; + } else { + afTriggerId = entry.data.i32[0]; + } + + int32_t aeTriggerId = 0; + entry = frame.find(ANDROID_CONTROL_AE_PRECAPTURE_ID); + if (entry.count == 0) { + ALOGE("%s: Camera %d: No AE precapture trigger ID provided by HAL" + " for frame %d!", + __FUNCTION__, mId, frameNumber); + gotAllStates = false; + } else { + aeTriggerId = entry.data.i32[0]; + } + + if (!gotAllStates) return BAD_VALUE; + + if (new3aState.aeState != m3aState.aeState) { + ALOGV("%s: AE state changed from 0x%x to 0x%x", + __FUNCTION__, m3aState.aeState, new3aState.aeState); + client->notifyAutoExposure(new3aState.aeState, aeTriggerId); + } + if (new3aState.afState != m3aState.afState) { + ALOGV("%s: AF state changed from 0x%x to 0x%x", + __FUNCTION__, m3aState.afState, new3aState.afState); + client->notifyAutoFocus(new3aState.afState, afTriggerId); + } + if (new3aState.awbState != m3aState.awbState) { + ALOGV("%s: AWB state changed from 0x%x to 0x%x", + __FUNCTION__, m3aState.awbState, new3aState.awbState); + client->notifyAutoWhitebalance(new3aState.awbState, aeTriggerId); + } + + m3aState = new3aState; + + return OK; +} + + +void FrameProcessor::callbackFaceDetection(sp client, + const camera_frame_metadata &metadata) { + + camera_frame_metadata *metadata_ptr = + const_cast(&metadata); + + /** + * Filter out repeated 0-face callbacks, + * but not when the last frame was >0 + */ + if (metadata.number_of_faces != 0 || + mLastFrameNumberOfFaces != metadata.number_of_faces) { + + Camera2Client::SharedCameraCallbacks::Lock + l(client->mSharedCameraCallbacks); + if (l.mRemoteCallback != NULL) { + l.mRemoteCallback->dataCallback(CAMERA_MSG_PREVIEW_METADATA, + NULL, + metadata_ptr); + } + } + + mLastFrameNumberOfFaces = metadata.number_of_faces; +} + +}; // namespace camera2 +}; // namespace android diff --git a/services/camera/libcameraservice/api1/client2/FrameProcessor.h b/services/camera/libcameraservice/api1/client2/FrameProcessor.h new file mode 100644 index 0000000..2a17d45 --- /dev/null +++ b/services/camera/libcameraservice/api1/client2/FrameProcessor.h @@ -0,0 +1,85 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_SERVERS_CAMERA_CAMERA2_FRAMEPROCESSOR_H +#define ANDROID_SERVERS_CAMERA_CAMERA2_FRAMEPROCESSOR_H + +#include +#include +#include +#include +#include +#include + +#include "common/FrameProcessorBase.h" + +struct camera_frame_metadata; + +namespace android { + +class Camera2Client; + +namespace camera2 { + +/* Output frame metadata processing thread. This thread waits for new + * frames from the device, and analyzes them as necessary. + */ +class FrameProcessor : public FrameProcessorBase { + public: + FrameProcessor(wp device, wp client); + ~FrameProcessor(); + + private: + wp mClient; + + bool mSynthesize3ANotify; + + int mLastFrameNumberOfFaces; + + void processNewFrames(const sp &client); + + virtual bool processSingleFrame(CameraMetadata &frame, + const sp &device); + + status_t processFaceDetect(const CameraMetadata &frame, + const sp &client); + + // Send 3A state change notifications to client based on frame metadata + status_t process3aState(const CameraMetadata &frame, + const sp &client); + + struct AlgState { + camera_metadata_enum_android_control_ae_state aeState; + camera_metadata_enum_android_control_af_state afState; + camera_metadata_enum_android_control_awb_state awbState; + + AlgState() : + aeState(ANDROID_CONTROL_AE_STATE_INACTIVE), + afState(ANDROID_CONTROL_AF_STATE_INACTIVE), + awbState(ANDROID_CONTROL_AWB_STATE_INACTIVE) { + } + } m3aState; + + // Emit FaceDetection event to java if faces changed + void callbackFaceDetection(sp client, + const camera_frame_metadata &metadata); +}; + + +}; //namespace camera2 +}; //namespace android + +#endif diff --git a/services/camera/libcameraservice/api1/client2/JpegCompressor.cpp b/services/camera/libcameraservice/api1/client2/JpegCompressor.cpp new file mode 100644 index 0000000..2f0c67d --- /dev/null +++ b/services/camera/libcameraservice/api1/client2/JpegCompressor.cpp @@ -0,0 +1,221 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "Camera2-JpegCompressor" + +#include +#include + +#include "JpegCompressor.h" + +namespace android { +namespace camera2 { + +JpegCompressor::JpegCompressor(): + Thread(false), + mIsBusy(false), + mCaptureTime(0) { +} + +JpegCompressor::~JpegCompressor() { + ALOGV("%s", __FUNCTION__); + Mutex::Autolock lock(mMutex); +} + +status_t JpegCompressor::start(Vector buffers, + nsecs_t captureTime) { + ALOGV("%s", __FUNCTION__); + Mutex::Autolock busyLock(mBusyMutex); + + if (mIsBusy) { + ALOGE("%s: Already processing a buffer!", __FUNCTION__); + return INVALID_OPERATION; + } + + mIsBusy = true; + + mBuffers = buffers; + mCaptureTime = captureTime; + + status_t res; + res = run("JpegCompressor"); + if (res != OK) { + ALOGE("%s: Unable to start up compression thread: %s (%d)", + __FUNCTION__, strerror(-res), res); + //delete mBuffers; // necessary? + } + return res; +} + +status_t JpegCompressor::cancel() { + ALOGV("%s", __FUNCTION__); + requestExitAndWait(); + return OK; +} + +status_t JpegCompressor::readyToRun() { + ALOGV("%s", __FUNCTION__); + return OK; +} + +bool JpegCompressor::threadLoop() { + ALOGV("%s", __FUNCTION__); + + mAuxBuffer = mBuffers[0]; // input + mJpegBuffer = mBuffers[1]; // output + + // Set up error management + mJpegErrorInfo = NULL; + JpegError error; + error.parent = this; + + mCInfo.err = jpeg_std_error(&error); + mCInfo.err->error_exit = jpegErrorHandler; + + jpeg_create_compress(&mCInfo); + if (checkError("Error initializing compression")) return false; + + // Route compressed data straight to output stream buffer + JpegDestination jpegDestMgr; + jpegDestMgr.parent = this; + jpegDestMgr.init_destination = jpegInitDestination; + jpegDestMgr.empty_output_buffer = jpegEmptyOutputBuffer; + jpegDestMgr.term_destination = jpegTermDestination; + + mCInfo.dest = &jpegDestMgr; + + // Set up compression parameters + mCInfo.image_width = mAuxBuffer->width; + mCInfo.image_height = mAuxBuffer->height; + mCInfo.input_components = 1; // 3; + mCInfo.in_color_space = JCS_GRAYSCALE; // JCS_RGB + + ALOGV("%s: image_width = %d, image_height = %d", __FUNCTION__, mCInfo.image_width, mCInfo.image_height); + + jpeg_set_defaults(&mCInfo); + if (checkError("Error configuring defaults")) return false; + + // Do compression + jpeg_start_compress(&mCInfo, TRUE); + if (checkError("Error starting compression")) return false; + + size_t rowStride = mAuxBuffer->stride;// * 3; + const size_t kChunkSize = 32; + while (mCInfo.next_scanline < mCInfo.image_height) { + JSAMPROW chunk[kChunkSize]; + for (size_t i = 0 ; i < kChunkSize; i++) { + chunk[i] = (JSAMPROW) + (mAuxBuffer->data + (i + mCInfo.next_scanline) * rowStride); + } + jpeg_write_scanlines(&mCInfo, chunk, kChunkSize); + if (checkError("Error while compressing")) return false; + if (exitPending()) { + ALOGV("%s: Cancel called, exiting early", __FUNCTION__); + cleanUp(); + return false; + } + } + + jpeg_finish_compress(&mCInfo); + if (checkError("Error while finishing compression")) return false; + + cleanUp(); + return false; +} + +bool JpegCompressor::isBusy() { + ALOGV("%s", __FUNCTION__); + Mutex::Autolock busyLock(mBusyMutex); + return mIsBusy; +} + +// old function -- TODO: update for new buffer type +bool JpegCompressor::isStreamInUse(uint32_t /*id*/) { + ALOGV("%s", __FUNCTION__); + Mutex::Autolock lock(mBusyMutex); + + if (mBuffers.size() && mIsBusy) { + for (size_t i = 0; i < mBuffers.size(); i++) { +// if ( mBuffers[i].streamId == (int)id ) return true; + } + } + return false; +} + +bool JpegCompressor::waitForDone(nsecs_t timeout) { + ALOGV("%s", __FUNCTION__); + Mutex::Autolock lock(mBusyMutex); + status_t res = OK; + if (mIsBusy) { + res = mDone.waitRelative(mBusyMutex, timeout); + } + return (res == OK); +} + +bool JpegCompressor::checkError(const char *msg) { + ALOGV("%s", __FUNCTION__); + if (mJpegErrorInfo) { + char errBuffer[JMSG_LENGTH_MAX]; + mJpegErrorInfo->err->format_message(mJpegErrorInfo, errBuffer); + ALOGE("%s: %s: %s", + __FUNCTION__, msg, errBuffer); + cleanUp(); + mJpegErrorInfo = NULL; + return true; + } + return false; +} + +void JpegCompressor::cleanUp() { + ALOGV("%s", __FUNCTION__); + jpeg_destroy_compress(&mCInfo); + Mutex::Autolock lock(mBusyMutex); + mIsBusy = false; + mDone.signal(); +} + +void JpegCompressor::jpegErrorHandler(j_common_ptr cinfo) { + ALOGV("%s", __FUNCTION__); + JpegError *error = static_cast(cinfo->err); + error->parent->mJpegErrorInfo = cinfo; +} + +void JpegCompressor::jpegInitDestination(j_compress_ptr cinfo) { + ALOGV("%s", __FUNCTION__); + JpegDestination *dest= static_cast(cinfo->dest); + ALOGV("%s: Setting destination to %p, size %d", + __FUNCTION__, dest->parent->mJpegBuffer->data, kMaxJpegSize); + dest->next_output_byte = (JOCTET*)(dest->parent->mJpegBuffer->data); + dest->free_in_buffer = kMaxJpegSize; +} + +boolean JpegCompressor::jpegEmptyOutputBuffer(j_compress_ptr /*cinfo*/) { + ALOGV("%s", __FUNCTION__); + ALOGE("%s: JPEG destination buffer overflow!", + __FUNCTION__); + return true; +} + +void JpegCompressor::jpegTermDestination(j_compress_ptr cinfo) { + (void) cinfo; // TODO: clean up + ALOGV("%s", __FUNCTION__); + ALOGV("%s: Done writing JPEG data. %d bytes left in buffer", + __FUNCTION__, cinfo->dest->free_in_buffer); +} + +}; // namespace camera2 +}; // namespace android diff --git a/services/camera/libcameraservice/api1/client2/JpegCompressor.h b/services/camera/libcameraservice/api1/client2/JpegCompressor.h new file mode 100644 index 0000000..945b1de --- /dev/null +++ b/services/camera/libcameraservice/api1/client2/JpegCompressor.h @@ -0,0 +1,107 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +/** + * This class simulates a hardware JPEG compressor. It receives image buffers + * in RGBA_8888 format, processes them in a worker thread, and then pushes them + * out to their destination stream. + */ + +#ifndef ANDROID_SERVERS_CAMERA_JPEGCOMPRESSOR_H +#define ANDROID_SERVERS_CAMERA_JPEGCOMPRESSOR_H + +#include "utils/Thread.h" +#include "utils/Mutex.h" +#include "utils/Timers.h" +#include "utils/Vector.h" +//#include "Base.h" +#include +#include + +extern "C" { +#include +} + + +namespace android { +namespace camera2 { + +class JpegCompressor: private Thread, public virtual RefBase { + public: + + JpegCompressor(); + ~JpegCompressor(); + + // Start compressing COMPRESSED format buffers; JpegCompressor takes + // ownership of the Buffers vector. + status_t start(Vector buffers, + nsecs_t captureTime); + + status_t cancel(); + + bool isBusy(); + bool isStreamInUse(uint32_t id); + + bool waitForDone(nsecs_t timeout); + + // TODO: Measure this + static const size_t kMaxJpegSize = 300000; + + private: + Mutex mBusyMutex; + Mutex mMutex; + bool mIsBusy; + Condition mDone; + nsecs_t mCaptureTime; + + Vector mBuffers; + CpuConsumer::LockedBuffer *mJpegBuffer; + CpuConsumer::LockedBuffer *mAuxBuffer; + bool mFoundJpeg, mFoundAux; + + jpeg_compress_struct mCInfo; + + struct JpegError : public jpeg_error_mgr { + JpegCompressor *parent; + }; + j_common_ptr mJpegErrorInfo; + + struct JpegDestination : public jpeg_destination_mgr { + JpegCompressor *parent; + }; + + static void jpegErrorHandler(j_common_ptr cinfo); + + static void jpegInitDestination(j_compress_ptr cinfo); + static boolean jpegEmptyOutputBuffer(j_compress_ptr cinfo); + static void jpegTermDestination(j_compress_ptr cinfo); + + bool checkError(const char *msg); + void cleanUp(); + + /** + * Inherited Thread virtual overrides + */ + private: + virtual status_t readyToRun(); + virtual bool threadLoop(); +}; + +}; // namespace camera2 +}; // namespace android + +#endif diff --git a/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp b/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp new file mode 100644 index 0000000..b920edf --- /dev/null +++ b/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp @@ -0,0 +1,388 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "Camera2-JpegProcessor" +#define ATRACE_TAG ATRACE_TAG_CAMERA +//#define LOG_NDEBUG 0 + +#include + +#include +#include +#include +#include +#include + +#include "common/CameraDeviceBase.h" +#include "api1/Camera2Client.h" +#include "api1/client2/Camera2Heap.h" +#include "api1/client2/CaptureSequencer.h" +#include "api1/client2/JpegProcessor.h" + +namespace android { +namespace camera2 { + +JpegProcessor::JpegProcessor( + sp client, + wp sequencer): + Thread(false), + mDevice(client->getCameraDevice()), + mSequencer(sequencer), + mId(client->getCameraId()), + mCaptureAvailable(false), + mCaptureStreamId(NO_STREAM) { +} + +JpegProcessor::~JpegProcessor() { + ALOGV("%s: Exit", __FUNCTION__); + deleteStream(); +} + +void JpegProcessor::onFrameAvailable() { + Mutex::Autolock l(mInputMutex); + if (!mCaptureAvailable) { + mCaptureAvailable = true; + mCaptureAvailableSignal.signal(); + } +} + +status_t JpegProcessor::updateStream(const Parameters ¶ms) { + ATRACE_CALL(); + ALOGV("%s", __FUNCTION__); + status_t res; + + Mutex::Autolock l(mInputMutex); + + sp device = mDevice.promote(); + if (device == 0) { + ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId); + return INVALID_OPERATION; + } + + // Find out buffer size for JPEG + camera_metadata_ro_entry_t maxJpegSize = + params.staticInfo(ANDROID_JPEG_MAX_SIZE); + if (maxJpegSize.count == 0) { + ALOGE("%s: Camera %d: Can't find ANDROID_JPEG_MAX_SIZE!", + __FUNCTION__, mId); + return INVALID_OPERATION; + } + + if (mCaptureConsumer == 0) { + // Create CPU buffer queue endpoint + sp bq = new BufferQueue(); + mCaptureConsumer = new CpuConsumer(bq, 1); + mCaptureConsumer->setFrameAvailableListener(this); + mCaptureConsumer->setName(String8("Camera2Client::CaptureConsumer")); + mCaptureWindow = new Surface( + mCaptureConsumer->getProducerInterface()); + // Create memory for API consumption + mCaptureHeap = new MemoryHeapBase(maxJpegSize.data.i32[0], 0, + "Camera2Client::CaptureHeap"); + if (mCaptureHeap->getSize() == 0) { + ALOGE("%s: Camera %d: Unable to allocate memory for capture", + __FUNCTION__, mId); + return NO_MEMORY; + } + } + + if (mCaptureStreamId != NO_STREAM) { + // Check if stream parameters have to change + uint32_t currentWidth, currentHeight; + res = device->getStreamInfo(mCaptureStreamId, + ¤tWidth, ¤tHeight, 0); + if (res != OK) { + ALOGE("%s: Camera %d: Error querying capture output stream info: " + "%s (%d)", __FUNCTION__, + mId, strerror(-res), res); + return res; + } + if (currentWidth != (uint32_t)params.pictureWidth || + currentHeight != (uint32_t)params.pictureHeight) { + ALOGV("%s: Camera %d: Deleting stream %d since the buffer dimensions changed", + __FUNCTION__, mId, mCaptureStreamId); + res = device->deleteStream(mCaptureStreamId); + if (res == -EBUSY) { + ALOGV("%s: Camera %d: Device is busy, call updateStream again " + " after it becomes idle", __FUNCTION__, mId); + return res; + } else if (res != OK) { + ALOGE("%s: Camera %d: Unable to delete old output stream " + "for capture: %s (%d)", __FUNCTION__, + mId, strerror(-res), res); + return res; + } + mCaptureStreamId = NO_STREAM; + } + } + + if (mCaptureStreamId == NO_STREAM) { + // Create stream for HAL production + res = device->createStream(mCaptureWindow, + params.pictureWidth, params.pictureHeight, + HAL_PIXEL_FORMAT_BLOB, maxJpegSize.data.i32[0], + &mCaptureStreamId); + if (res != OK) { + ALOGE("%s: Camera %d: Can't create output stream for capture: " + "%s (%d)", __FUNCTION__, mId, + strerror(-res), res); + return res; + } + + } + return OK; +} + +status_t JpegProcessor::deleteStream() { + ATRACE_CALL(); + + Mutex::Autolock l(mInputMutex); + + if (mCaptureStreamId != NO_STREAM) { + sp device = mDevice.promote(); + if (device == 0) { + ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId); + return INVALID_OPERATION; + } + + device->deleteStream(mCaptureStreamId); + + mCaptureHeap.clear(); + mCaptureWindow.clear(); + mCaptureConsumer.clear(); + + mCaptureStreamId = NO_STREAM; + } + return OK; +} + +int JpegProcessor::getStreamId() const { + Mutex::Autolock l(mInputMutex); + return mCaptureStreamId; +} + +void JpegProcessor::dump(int /*fd*/, const Vector& /*args*/) const { +} + +bool JpegProcessor::threadLoop() { + status_t res; + + { + Mutex::Autolock l(mInputMutex); + while (!mCaptureAvailable) { + res = mCaptureAvailableSignal.waitRelative(mInputMutex, + kWaitDuration); + if (res == TIMED_OUT) return true; + } + mCaptureAvailable = false; + } + + do { + res = processNewCapture(); + } while (res == OK); + + return true; +} + +status_t JpegProcessor::processNewCapture() { + ATRACE_CALL(); + status_t res; + sp captureHeap; + + CpuConsumer::LockedBuffer imgBuffer; + + res = mCaptureConsumer->lockNextBuffer(&imgBuffer); + if (res != OK) { + if (res != BAD_VALUE) { + ALOGE("%s: Camera %d: Error receiving still image buffer: " + "%s (%d)", __FUNCTION__, + mId, strerror(-res), res); + } + return res; + } + + ALOGV("%s: Camera %d: Still capture available", __FUNCTION__, + mId); + + if (imgBuffer.format != HAL_PIXEL_FORMAT_BLOB) { + ALOGE("%s: Camera %d: Unexpected format for still image: " + "%x, expected %x", __FUNCTION__, mId, + imgBuffer.format, + HAL_PIXEL_FORMAT_BLOB); + mCaptureConsumer->unlockBuffer(imgBuffer); + return OK; + } + + // Find size of JPEG image + size_t jpegSize = findJpegSize(imgBuffer.data, imgBuffer.width); + if (jpegSize == 0) { // failed to find size, default to whole buffer + jpegSize = imgBuffer.width; + } + size_t heapSize = mCaptureHeap->getSize(); + if (jpegSize > heapSize) { + ALOGW("%s: JPEG image is larger than expected, truncating " + "(got %d, expected at most %d bytes)", + __FUNCTION__, jpegSize, heapSize); + jpegSize = heapSize; + } + + // TODO: Optimize this to avoid memcopy + sp captureBuffer = new MemoryBase(mCaptureHeap, 0, jpegSize); + void* captureMemory = mCaptureHeap->getBase(); + memcpy(captureMemory, imgBuffer.data, jpegSize); + + mCaptureConsumer->unlockBuffer(imgBuffer); + + sp sequencer = mSequencer.promote(); + if (sequencer != 0) { + sequencer->onCaptureAvailable(imgBuffer.timestamp, captureBuffer); + } + + return OK; +} + +/* + * JPEG FILE FORMAT OVERVIEW. + * http://www.jpeg.org/public/jfif.pdf + * (JPEG is the image compression algorithm, actual file format is called JFIF) + * + * "Markers" are 2-byte patterns used to distinguish parts of JFIF files. The + * first byte is always 0xFF, and the second byte is between 0x01 and 0xFE + * (inclusive). Because every marker begins with the same byte, they are + * referred to by the second byte's value. + * + * JFIF files all begin with the Start of Image (SOI) marker, which is 0xD8. + * Following it, "segment" sections begin with other markers, followed by a + * 2-byte length (in network byte order), then the segment data. + * + * For our purposes we will ignore the data, and just use the length to skip to + * the next segment. This is necessary because the data inside segments are + * allowed to contain the End of Image marker (0xFF 0xD9), preventing us from + * naievely scanning until the end. + * + * After all the segments are processed, the jpeg compressed image stream begins. + * This can be considered an opaque format with one requirement: all 0xFF bytes + * in this stream must be followed with a 0x00 byte. This prevents any of the + * image data to be interpreted as a segment. The only exception to this is at + * the end of the image stream there is an End of Image (EOI) marker, which is + * 0xFF followed by a non-zero (0xD9) byte. + */ + +const uint8_t MARK = 0xFF; // First byte of marker +const uint8_t SOI = 0xD8; // Start of Image +const uint8_t EOI = 0xD9; // End of Image +const size_t MARKER_LENGTH = 2; // length of a marker + +#pragma pack(push) +#pragma pack(1) +typedef struct segment { + uint8_t marker[MARKER_LENGTH]; + uint16_t length; +} segment_t; +#pragma pack(pop) + +/* HELPER FUNCTIONS */ + +// check for Start of Image marker +bool checkJpegStart(uint8_t* buf) { + return buf[0] == MARK && buf[1] == SOI; +} +// check for End of Image marker +bool checkJpegEnd(uint8_t *buf) { + return buf[0] == MARK && buf[1] == EOI; +} +// check for arbitrary marker, returns marker type (second byte) +// returns 0 if no marker found. Note: 0x00 is not a valid marker type +uint8_t checkJpegMarker(uint8_t *buf) { + if (buf[0] == MARK && buf[1] > 0 && buf[1] < 0xFF) { + return buf[1]; + } + return 0; +} + +// Return the size of the JPEG, 0 indicates failure +size_t JpegProcessor::findJpegSize(uint8_t* jpegBuffer, size_t maxSize) { + size_t size; + + // First check for JPEG transport header at the end of the buffer + uint8_t *header = jpegBuffer + (maxSize - sizeof(struct camera2_jpeg_blob)); + struct camera2_jpeg_blob *blob = (struct camera2_jpeg_blob*)(header); + if (blob->jpeg_blob_id == CAMERA2_JPEG_BLOB_ID) { + size = blob->jpeg_size; + if (size > 0 && size <= maxSize - sizeof(struct camera2_jpeg_blob)) { + // Verify SOI and EOI markers + size_t offset = size - MARKER_LENGTH; + uint8_t *end = jpegBuffer + offset; + if (checkJpegStart(jpegBuffer) && checkJpegEnd(end)) { + ALOGV("Found JPEG transport header, img size %d", size); + return size; + } else { + ALOGW("Found JPEG transport header with bad Image Start/End"); + } + } else { + ALOGW("Found JPEG transport header with bad size %d", size); + } + } + + // Check Start of Image + if ( !checkJpegStart(jpegBuffer) ) { + ALOGE("Could not find start of JPEG marker"); + return 0; + } + + // Read JFIF segment markers, skip over segment data + size = 0; + while (size <= maxSize - MARKER_LENGTH) { + segment_t *segment = (segment_t*)(jpegBuffer + size); + uint8_t type = checkJpegMarker(segment->marker); + if (type == 0) { // invalid marker, no more segments, begin JPEG data + ALOGV("JPEG stream found beginning at offset %d", size); + break; + } + if (type == EOI || size > maxSize - sizeof(segment_t)) { + ALOGE("Got premature End before JPEG data, offset %d", size); + return 0; + } + size_t length = ntohs(segment->length); + ALOGV("JFIF Segment, type %x length %x", type, length); + size += length + MARKER_LENGTH; + } + + // Find End of Image + // Scan JPEG buffer until End of Image (EOI) + bool foundEnd = false; + for ( ; size <= maxSize - MARKER_LENGTH; size++) { + if ( checkJpegEnd(jpegBuffer + size) ) { + foundEnd = true; + size += MARKER_LENGTH; + break; + } + } + if (!foundEnd) { + ALOGE("Could not find end of JPEG marker"); + return 0; + } + + if (size > maxSize) { + ALOGW("JPEG size %d too large, reducing to maxSize %d", size, maxSize); + size = maxSize; + } + ALOGV("Final JPEG size %d", size); + return size; +} + +}; // namespace camera2 +}; // namespace android diff --git a/services/camera/libcameraservice/api1/client2/JpegProcessor.h b/services/camera/libcameraservice/api1/client2/JpegProcessor.h new file mode 100644 index 0000000..b2c05df --- /dev/null +++ b/services/camera/libcameraservice/api1/client2/JpegProcessor.h @@ -0,0 +1,87 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_SERVERS_CAMERA_CAMERA2_JPEGPROCESSOR_H +#define ANDROID_SERVERS_CAMERA_CAMERA2_JPEGPROCESSOR_H + +#include +#include +#include +#include +#include +#include + +#include "camera/CameraMetadata.h" + +namespace android { + +class Camera2Client; +class CameraDeviceBase; +class MemoryHeapBase; + +namespace camera2 { + +class CaptureSequencer; +class Parameters; + +/*** + * Still image capture output image processing + */ +class JpegProcessor: + public Thread, public CpuConsumer::FrameAvailableListener { + public: + JpegProcessor(sp client, wp sequencer); + ~JpegProcessor(); + + // CpuConsumer listener implementation + void onFrameAvailable(); + + status_t updateStream(const Parameters ¶ms); + status_t deleteStream(); + int getStreamId() const; + + void dump(int fd, const Vector& args) const; + private: + static const nsecs_t kWaitDuration = 10000000; // 10 ms + wp mDevice; + wp mSequencer; + int mId; + + mutable Mutex mInputMutex; + bool mCaptureAvailable; + Condition mCaptureAvailableSignal; + + enum { + NO_STREAM = -1 + }; + + int mCaptureStreamId; + sp mCaptureConsumer; + sp mCaptureWindow; + sp mCaptureHeap; + + virtual bool threadLoop(); + + status_t processNewCapture(); + size_t findJpegSize(uint8_t* jpegBuffer, size_t maxSize); + +}; + + +}; //namespace camera2 +}; //namespace android + +#endif diff --git a/services/camera/libcameraservice/api1/client2/Parameters.cpp b/services/camera/libcameraservice/api1/client2/Parameters.cpp new file mode 100644 index 0000000..0459866 --- /dev/null +++ b/services/camera/libcameraservice/api1/client2/Parameters.cpp @@ -0,0 +1,2645 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "Camera2-Parameters" +#define ATRACE_TAG ATRACE_TAG_CAMERA +//#define LOG_NDEBUG 0 + +#include +#include +#include +#include + +#include +#include +#include + +#include "Parameters.h" +#include "system/camera.h" + +namespace android { +namespace camera2 { + +Parameters::Parameters(int cameraId, + int cameraFacing) : + cameraId(cameraId), + cameraFacing(cameraFacing), + info(NULL) { +} + +Parameters::~Parameters() { +} + +status_t Parameters::initialize(const CameraMetadata *info) { + status_t res; + + if (info->entryCount() == 0) { + ALOGE("%s: No static information provided!", __FUNCTION__); + return BAD_VALUE; + } + Parameters::info = info; + + res = buildFastInfo(); + if (res != OK) return res; + + res = buildQuirks(); + if (res != OK) return res; + + camera_metadata_ro_entry_t availableProcessedSizes = + staticInfo(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES, 2); + if (!availableProcessedSizes.count) return NO_INIT; + + // TODO: Pick more intelligently + previewWidth = availableProcessedSizes.data.i32[0]; + previewHeight = availableProcessedSizes.data.i32[1]; + videoWidth = previewWidth; + videoHeight = previewHeight; + + params.setPreviewSize(previewWidth, previewHeight); + params.setVideoSize(videoWidth, videoHeight); + params.set(CameraParameters::KEY_PREFERRED_PREVIEW_SIZE_FOR_VIDEO, + String8::format("%dx%d", + previewWidth, previewHeight)); + { + String8 supportedPreviewSizes; + for (size_t i=0; i < availableProcessedSizes.count; i += 2) { + if (i != 0) supportedPreviewSizes += ","; + supportedPreviewSizes += String8::format("%dx%d", + availableProcessedSizes.data.i32[i], + availableProcessedSizes.data.i32[i+1]); + } + params.set(CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES, + supportedPreviewSizes); + params.set(CameraParameters::KEY_SUPPORTED_VIDEO_SIZES, + supportedPreviewSizes); + } + + camera_metadata_ro_entry_t availableFpsRanges = + staticInfo(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, 2); + if (!availableFpsRanges.count) return NO_INIT; + + previewFpsRange[0] = availableFpsRanges.data.i32[0]; + previewFpsRange[1] = availableFpsRanges.data.i32[1]; + + params.set(CameraParameters::KEY_PREVIEW_FPS_RANGE, + String8::format("%d,%d", + previewFpsRange[0] * kFpsToApiScale, + previewFpsRange[1] * kFpsToApiScale)); + + { + String8 supportedPreviewFpsRange; + for (size_t i=0; i < availableFpsRanges.count; i += 2) { + if (i != 0) supportedPreviewFpsRange += ","; + supportedPreviewFpsRange += String8::format("(%d,%d)", + availableFpsRanges.data.i32[i] * kFpsToApiScale, + availableFpsRanges.data.i32[i+1] * kFpsToApiScale); + } + params.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FPS_RANGE, + supportedPreviewFpsRange); + } + + previewFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP; + params.set(CameraParameters::KEY_PREVIEW_FORMAT, + formatEnumToString(previewFormat)); // NV21 + + previewTransform = degToTransform(0, + cameraFacing == CAMERA_FACING_FRONT); + + camera_metadata_ro_entry_t availableFormats = + staticInfo(ANDROID_SCALER_AVAILABLE_FORMATS); + + { + String8 supportedPreviewFormats; + bool addComma = false; + for (size_t i=0; i < availableFormats.count; i++) { + if (addComma) supportedPreviewFormats += ","; + addComma = true; + switch (availableFormats.data.i32[i]) { + case HAL_PIXEL_FORMAT_YCbCr_422_SP: + supportedPreviewFormats += + CameraParameters::PIXEL_FORMAT_YUV422SP; + break; + case HAL_PIXEL_FORMAT_YCrCb_420_SP: + supportedPreviewFormats += + CameraParameters::PIXEL_FORMAT_YUV420SP; + break; + case HAL_PIXEL_FORMAT_YCbCr_422_I: + supportedPreviewFormats += + CameraParameters::PIXEL_FORMAT_YUV422I; + break; + case HAL_PIXEL_FORMAT_YV12: + supportedPreviewFormats += + CameraParameters::PIXEL_FORMAT_YUV420P; + break; + case HAL_PIXEL_FORMAT_RGB_565: + supportedPreviewFormats += + CameraParameters::PIXEL_FORMAT_RGB565; + break; + case HAL_PIXEL_FORMAT_RGBA_8888: + supportedPreviewFormats += + CameraParameters::PIXEL_FORMAT_RGBA8888; + break; + case HAL_PIXEL_FORMAT_YCbCr_420_888: + // Flexible YUV allows both YV12 and NV21 + supportedPreviewFormats += + CameraParameters::PIXEL_FORMAT_YUV420P; + supportedPreviewFormats += ","; + supportedPreviewFormats += + CameraParameters::PIXEL_FORMAT_YUV420SP; + break; + // Not advertizing JPEG, RAW_SENSOR, etc, for preview formats + case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: + case HAL_PIXEL_FORMAT_RAW_SENSOR: + case HAL_PIXEL_FORMAT_BLOB: + addComma = false; + break; + + default: + ALOGW("%s: Camera %d: Unknown preview format: %x", + __FUNCTION__, cameraId, availableFormats.data.i32[i]); + addComma = false; + break; + } + } + params.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FORMATS, + supportedPreviewFormats); + } + + // PREVIEW_FRAME_RATE / SUPPORTED_PREVIEW_FRAME_RATES are deprecated, but + // still have to do something sane for them + + // NOTE: Not scaled like FPS range values are. + previewFps = fpsFromRange(previewFpsRange[0], previewFpsRange[1]); + params.set(CameraParameters::KEY_PREVIEW_FRAME_RATE, + previewFps); + + { + SortedVector sortedPreviewFrameRates; + + String8 supportedPreviewFrameRates; + for (size_t i=0; i < availableFpsRanges.count; i += 2) { + // from the [min, max] fps range use the max value + int fps = fpsFromRange(availableFpsRanges.data.i32[i], + availableFpsRanges.data.i32[i+1]); + + // de-dupe frame rates + if (sortedPreviewFrameRates.indexOf(fps) == NAME_NOT_FOUND) { + sortedPreviewFrameRates.add(fps); + } + else { + continue; + } + + if (sortedPreviewFrameRates.size() > 1) { + supportedPreviewFrameRates += ","; + } + + supportedPreviewFrameRates += String8::format("%d", + fps); + + ALOGV("%s: Supported preview frame rates: %s", + __FUNCTION__, supportedPreviewFrameRates.string()); + } + params.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES, + supportedPreviewFrameRates); + } + + camera_metadata_ro_entry_t availableJpegSizes = + staticInfo(ANDROID_SCALER_AVAILABLE_JPEG_SIZES, 2); + if (!availableJpegSizes.count) return NO_INIT; + + // TODO: Pick maximum + pictureWidth = availableJpegSizes.data.i32[0]; + pictureHeight = availableJpegSizes.data.i32[1]; + + params.setPictureSize(pictureWidth, + pictureHeight); + + { + String8 supportedPictureSizes; + for (size_t i=0; i < availableJpegSizes.count; i += 2) { + if (i != 0) supportedPictureSizes += ","; + supportedPictureSizes += String8::format("%dx%d", + availableJpegSizes.data.i32[i], + availableJpegSizes.data.i32[i+1]); + } + params.set(CameraParameters::KEY_SUPPORTED_PICTURE_SIZES, + supportedPictureSizes); + } + + params.setPictureFormat(CameraParameters::PIXEL_FORMAT_JPEG); + params.set(CameraParameters::KEY_SUPPORTED_PICTURE_FORMATS, + CameraParameters::PIXEL_FORMAT_JPEG); + + camera_metadata_ro_entry_t availableJpegThumbnailSizes = + staticInfo(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES, 4); + if (!availableJpegThumbnailSizes.count) return NO_INIT; + + // TODO: Pick default thumbnail size sensibly + jpegThumbSize[0] = availableJpegThumbnailSizes.data.i32[0]; + jpegThumbSize[1] = availableJpegThumbnailSizes.data.i32[1]; + + params.set(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH, + jpegThumbSize[0]); + params.set(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT, + jpegThumbSize[1]); + + { + String8 supportedJpegThumbSizes; + for (size_t i=0; i < availableJpegThumbnailSizes.count; i += 2) { + if (i != 0) supportedJpegThumbSizes += ","; + supportedJpegThumbSizes += String8::format("%dx%d", + availableJpegThumbnailSizes.data.i32[i], + availableJpegThumbnailSizes.data.i32[i+1]); + } + params.set(CameraParameters::KEY_SUPPORTED_JPEG_THUMBNAIL_SIZES, + supportedJpegThumbSizes); + } + + jpegThumbQuality = 90; + params.set(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY, + jpegThumbQuality); + jpegQuality = 90; + params.set(CameraParameters::KEY_JPEG_QUALITY, + jpegQuality); + jpegRotation = 0; + params.set(CameraParameters::KEY_ROTATION, + jpegRotation); + + gpsEnabled = false; + gpsCoordinates[0] = 0.0; + gpsCoordinates[1] = 0.0; + gpsCoordinates[2] = 0.0; + gpsTimestamp = 0; + gpsProcessingMethod = "unknown"; + // GPS fields in CameraParameters are not set by implementation + + wbMode = ANDROID_CONTROL_AWB_MODE_AUTO; + params.set(CameraParameters::KEY_WHITE_BALANCE, + CameraParameters::WHITE_BALANCE_AUTO); + + camera_metadata_ro_entry_t availableWhiteBalanceModes = + staticInfo(ANDROID_CONTROL_AWB_AVAILABLE_MODES, 0, 0, false); + if (!availableWhiteBalanceModes.count) { + params.set(CameraParameters::KEY_SUPPORTED_WHITE_BALANCE, + CameraParameters::WHITE_BALANCE_AUTO); + } else { + String8 supportedWhiteBalance; + bool addComma = false; + for (size_t i=0; i < availableWhiteBalanceModes.count; i++) { + if (addComma) supportedWhiteBalance += ","; + addComma = true; + switch (availableWhiteBalanceModes.data.u8[i]) { + case ANDROID_CONTROL_AWB_MODE_AUTO: + supportedWhiteBalance += + CameraParameters::WHITE_BALANCE_AUTO; + break; + case ANDROID_CONTROL_AWB_MODE_INCANDESCENT: + supportedWhiteBalance += + CameraParameters::WHITE_BALANCE_INCANDESCENT; + break; + case ANDROID_CONTROL_AWB_MODE_FLUORESCENT: + supportedWhiteBalance += + CameraParameters::WHITE_BALANCE_FLUORESCENT; + break; + case ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT: + supportedWhiteBalance += + CameraParameters::WHITE_BALANCE_WARM_FLUORESCENT; + break; + case ANDROID_CONTROL_AWB_MODE_DAYLIGHT: + supportedWhiteBalance += + CameraParameters::WHITE_BALANCE_DAYLIGHT; + break; + case ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT: + supportedWhiteBalance += + CameraParameters::WHITE_BALANCE_CLOUDY_DAYLIGHT; + break; + case ANDROID_CONTROL_AWB_MODE_TWILIGHT: + supportedWhiteBalance += + CameraParameters::WHITE_BALANCE_TWILIGHT; + break; + case ANDROID_CONTROL_AWB_MODE_SHADE: + supportedWhiteBalance += + CameraParameters::WHITE_BALANCE_SHADE; + break; + // Skipping values not mappable to v1 API + case ANDROID_CONTROL_AWB_MODE_OFF: + addComma = false; + break; + default: + ALOGW("%s: Camera %d: Unknown white balance value: %d", + __FUNCTION__, cameraId, + availableWhiteBalanceModes.data.u8[i]); + addComma = false; + break; + } + } + params.set(CameraParameters::KEY_SUPPORTED_WHITE_BALANCE, + supportedWhiteBalance); + } + + effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF; + params.set(CameraParameters::KEY_EFFECT, + CameraParameters::EFFECT_NONE); + + camera_metadata_ro_entry_t availableEffects = + staticInfo(ANDROID_CONTROL_AVAILABLE_EFFECTS, 0, 0, false); + if (!availableEffects.count) { + params.set(CameraParameters::KEY_SUPPORTED_EFFECTS, + CameraParameters::EFFECT_NONE); + } else { + String8 supportedEffects; + bool addComma = false; + for (size_t i=0; i < availableEffects.count; i++) { + if (addComma) supportedEffects += ","; + addComma = true; + switch (availableEffects.data.u8[i]) { + case ANDROID_CONTROL_EFFECT_MODE_OFF: + supportedEffects += + CameraParameters::EFFECT_NONE; + break; + case ANDROID_CONTROL_EFFECT_MODE_MONO: + supportedEffects += + CameraParameters::EFFECT_MONO; + break; + case ANDROID_CONTROL_EFFECT_MODE_NEGATIVE: + supportedEffects += + CameraParameters::EFFECT_NEGATIVE; + break; + case ANDROID_CONTROL_EFFECT_MODE_SOLARIZE: + supportedEffects += + CameraParameters::EFFECT_SOLARIZE; + break; + case ANDROID_CONTROL_EFFECT_MODE_SEPIA: + supportedEffects += + CameraParameters::EFFECT_SEPIA; + break; + case ANDROID_CONTROL_EFFECT_MODE_POSTERIZE: + supportedEffects += + CameraParameters::EFFECT_POSTERIZE; + break; + case ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD: + supportedEffects += + CameraParameters::EFFECT_WHITEBOARD; + break; + case ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD: + supportedEffects += + CameraParameters::EFFECT_BLACKBOARD; + break; + case ANDROID_CONTROL_EFFECT_MODE_AQUA: + supportedEffects += + CameraParameters::EFFECT_AQUA; + break; + default: + ALOGW("%s: Camera %d: Unknown effect value: %d", + __FUNCTION__, cameraId, availableEffects.data.u8[i]); + addComma = false; + break; + } + } + params.set(CameraParameters::KEY_SUPPORTED_EFFECTS, supportedEffects); + } + + antibandingMode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO; + params.set(CameraParameters::KEY_ANTIBANDING, + CameraParameters::ANTIBANDING_AUTO); + + camera_metadata_ro_entry_t availableAntibandingModes = + staticInfo(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES, 0, 0, false); + if (!availableAntibandingModes.count) { + params.set(CameraParameters::KEY_SUPPORTED_ANTIBANDING, + CameraParameters::ANTIBANDING_OFF); + } else { + String8 supportedAntibanding; + bool addComma = false; + for (size_t i=0; i < availableAntibandingModes.count; i++) { + if (addComma) supportedAntibanding += ","; + addComma = true; + switch (availableAntibandingModes.data.u8[i]) { + case ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF: + supportedAntibanding += + CameraParameters::ANTIBANDING_OFF; + break; + case ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ: + supportedAntibanding += + CameraParameters::ANTIBANDING_50HZ; + break; + case ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ: + supportedAntibanding += + CameraParameters::ANTIBANDING_60HZ; + break; + case ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO: + supportedAntibanding += + CameraParameters::ANTIBANDING_AUTO; + break; + default: + ALOGW("%s: Camera %d: Unknown antibanding value: %d", + __FUNCTION__, cameraId, + availableAntibandingModes.data.u8[i]); + addComma = false; + break; + } + } + params.set(CameraParameters::KEY_SUPPORTED_ANTIBANDING, + supportedAntibanding); + } + + sceneMode = ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED; + params.set(CameraParameters::KEY_SCENE_MODE, + CameraParameters::SCENE_MODE_AUTO); + + camera_metadata_ro_entry_t availableSceneModes = + staticInfo(ANDROID_CONTROL_AVAILABLE_SCENE_MODES, 0, 0, false); + if (!availableSceneModes.count) { + params.remove(CameraParameters::KEY_SCENE_MODE); + } else { + String8 supportedSceneModes(CameraParameters::SCENE_MODE_AUTO); + bool addComma = true; + bool noSceneModes = false; + for (size_t i=0; i < availableSceneModes.count; i++) { + if (addComma) supportedSceneModes += ","; + addComma = true; + switch (availableSceneModes.data.u8[i]) { + case ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED: + noSceneModes = true; + break; + case ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY: + // Not in old API + addComma = false; + break; + case ANDROID_CONTROL_SCENE_MODE_ACTION: + supportedSceneModes += + CameraParameters::SCENE_MODE_ACTION; + break; + case ANDROID_CONTROL_SCENE_MODE_PORTRAIT: + supportedSceneModes += + CameraParameters::SCENE_MODE_PORTRAIT; + break; + case ANDROID_CONTROL_SCENE_MODE_LANDSCAPE: + supportedSceneModes += + CameraParameters::SCENE_MODE_LANDSCAPE; + break; + case ANDROID_CONTROL_SCENE_MODE_NIGHT: + supportedSceneModes += + CameraParameters::SCENE_MODE_NIGHT; + break; + case ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT: + supportedSceneModes += + CameraParameters::SCENE_MODE_NIGHT_PORTRAIT; + break; + case ANDROID_CONTROL_SCENE_MODE_THEATRE: + supportedSceneModes += + CameraParameters::SCENE_MODE_THEATRE; + break; + case ANDROID_CONTROL_SCENE_MODE_BEACH: + supportedSceneModes += + CameraParameters::SCENE_MODE_BEACH; + break; + case ANDROID_CONTROL_SCENE_MODE_SNOW: + supportedSceneModes += + CameraParameters::SCENE_MODE_SNOW; + break; + case ANDROID_CONTROL_SCENE_MODE_SUNSET: + supportedSceneModes += + CameraParameters::SCENE_MODE_SUNSET; + break; + case ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO: + supportedSceneModes += + CameraParameters::SCENE_MODE_STEADYPHOTO; + break; + case ANDROID_CONTROL_SCENE_MODE_FIREWORKS: + supportedSceneModes += + CameraParameters::SCENE_MODE_FIREWORKS; + break; + case ANDROID_CONTROL_SCENE_MODE_SPORTS: + supportedSceneModes += + CameraParameters::SCENE_MODE_SPORTS; + break; + case ANDROID_CONTROL_SCENE_MODE_PARTY: + supportedSceneModes += + CameraParameters::SCENE_MODE_PARTY; + break; + case ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT: + supportedSceneModes += + CameraParameters::SCENE_MODE_CANDLELIGHT; + break; + case ANDROID_CONTROL_SCENE_MODE_BARCODE: + supportedSceneModes += + CameraParameters::SCENE_MODE_BARCODE; + break; + default: + ALOGW("%s: Camera %d: Unknown scene mode value: %d", + __FUNCTION__, cameraId, + availableSceneModes.data.u8[i]); + addComma = false; + break; + } + } + if (!noSceneModes) { + params.set(CameraParameters::KEY_SUPPORTED_SCENE_MODES, + supportedSceneModes); + } else { + params.remove(CameraParameters::KEY_SCENE_MODE); + } + } + + bool isFlashAvailable = false; + camera_metadata_ro_entry_t flashAvailable = + staticInfo(ANDROID_FLASH_INFO_AVAILABLE, 0, 1, false); + if (flashAvailable.count) { + isFlashAvailable = flashAvailable.data.u8[0]; + } + + camera_metadata_ro_entry_t availableAeModes = + staticInfo(ANDROID_CONTROL_AE_AVAILABLE_MODES, 0, 0, false); + + if (isFlashAvailable) { + flashMode = Parameters::FLASH_MODE_OFF; + params.set(CameraParameters::KEY_FLASH_MODE, + CameraParameters::FLASH_MODE_OFF); + + String8 supportedFlashModes(CameraParameters::FLASH_MODE_OFF); + supportedFlashModes = supportedFlashModes + + "," + CameraParameters::FLASH_MODE_AUTO + + "," + CameraParameters::FLASH_MODE_ON + + "," + CameraParameters::FLASH_MODE_TORCH; + for (size_t i=0; i < availableAeModes.count; i++) { + if (availableAeModes.data.u8[i] == + ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) { + supportedFlashModes = supportedFlashModes + "," + + CameraParameters::FLASH_MODE_RED_EYE; + break; + } + } + params.set(CameraParameters::KEY_SUPPORTED_FLASH_MODES, + supportedFlashModes); + } else { + flashMode = Parameters::FLASH_MODE_OFF; + params.set(CameraParameters::KEY_FLASH_MODE, + CameraParameters::FLASH_MODE_OFF); + params.set(CameraParameters::KEY_SUPPORTED_FLASH_MODES, + CameraParameters::FLASH_MODE_OFF); + } + + camera_metadata_ro_entry_t minFocusDistance = + staticInfo(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE, 0, 1, false); + + camera_metadata_ro_entry_t availableAfModes = + staticInfo(ANDROID_CONTROL_AF_AVAILABLE_MODES, 0, 0, false); + + if (!minFocusDistance.count || minFocusDistance.data.f[0] == 0) { + // Fixed-focus lens + focusMode = Parameters::FOCUS_MODE_FIXED; + params.set(CameraParameters::KEY_FOCUS_MODE, + CameraParameters::FOCUS_MODE_FIXED); + params.set(CameraParameters::KEY_SUPPORTED_FOCUS_MODES, + CameraParameters::FOCUS_MODE_FIXED); + } else { + focusMode = Parameters::FOCUS_MODE_AUTO; + params.set(CameraParameters::KEY_FOCUS_MODE, + CameraParameters::FOCUS_MODE_AUTO); + String8 supportedFocusModes(CameraParameters::FOCUS_MODE_INFINITY); + bool addComma = true; + + for (size_t i=0; i < availableAfModes.count; i++) { + if (addComma) supportedFocusModes += ","; + addComma = true; + switch (availableAfModes.data.u8[i]) { + case ANDROID_CONTROL_AF_MODE_AUTO: + supportedFocusModes += + CameraParameters::FOCUS_MODE_AUTO; + break; + case ANDROID_CONTROL_AF_MODE_MACRO: + supportedFocusModes += + CameraParameters::FOCUS_MODE_MACRO; + break; + case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO: + supportedFocusModes += + CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO; + break; + case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE: + supportedFocusModes += + CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE; + break; + case ANDROID_CONTROL_AF_MODE_EDOF: + supportedFocusModes += + CameraParameters::FOCUS_MODE_EDOF; + break; + // Not supported in old API + case ANDROID_CONTROL_AF_MODE_OFF: + addComma = false; + break; + default: + ALOGW("%s: Camera %d: Unknown AF mode value: %d", + __FUNCTION__, cameraId, availableAfModes.data.u8[i]); + addComma = false; + break; + } + } + params.set(CameraParameters::KEY_SUPPORTED_FOCUS_MODES, + supportedFocusModes); + } + focusState = ANDROID_CONTROL_AF_STATE_INACTIVE; + shadowFocusMode = FOCUS_MODE_INVALID; + + camera_metadata_ro_entry_t max3aRegions = + staticInfo(ANDROID_CONTROL_MAX_REGIONS, 1, 1); + if (!max3aRegions.count) return NO_INIT; + + int32_t maxNumFocusAreas = 0; + if (focusMode != Parameters::FOCUS_MODE_FIXED) { + maxNumFocusAreas = max3aRegions.data.i32[0]; + } + params.set(CameraParameters::KEY_MAX_NUM_FOCUS_AREAS, maxNumFocusAreas); + params.set(CameraParameters::KEY_FOCUS_AREAS, + "(0,0,0,0,0)"); + focusingAreas.clear(); + focusingAreas.add(Parameters::Area(0,0,0,0,0)); + + camera_metadata_ro_entry_t availableFocalLengths = + staticInfo(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS, 0, 0, false); + if (!availableFocalLengths.count) return NO_INIT; + + float minFocalLength = availableFocalLengths.data.f[0]; + params.setFloat(CameraParameters::KEY_FOCAL_LENGTH, minFocalLength); + + float horizFov, vertFov; + res = calculatePictureFovs(&horizFov, &vertFov); + if (res != OK) { + ALOGE("%s: Can't calculate field of views!", __FUNCTION__); + return res; + } + + params.setFloat(CameraParameters::KEY_HORIZONTAL_VIEW_ANGLE, horizFov); + params.setFloat(CameraParameters::KEY_VERTICAL_VIEW_ANGLE, vertFov); + + exposureCompensation = 0; + params.set(CameraParameters::KEY_EXPOSURE_COMPENSATION, + exposureCompensation); + + camera_metadata_ro_entry_t exposureCompensationRange = + staticInfo(ANDROID_CONTROL_AE_COMPENSATION_RANGE, 2, 2); + if (!exposureCompensationRange.count) return NO_INIT; + + params.set(CameraParameters::KEY_MAX_EXPOSURE_COMPENSATION, + exposureCompensationRange.data.i32[1]); + params.set(CameraParameters::KEY_MIN_EXPOSURE_COMPENSATION, + exposureCompensationRange.data.i32[0]); + + camera_metadata_ro_entry_t exposureCompensationStep = + staticInfo(ANDROID_CONTROL_AE_COMPENSATION_STEP, 1, 1); + if (!exposureCompensationStep.count) return NO_INIT; + + params.setFloat(CameraParameters::KEY_EXPOSURE_COMPENSATION_STEP, + (float)exposureCompensationStep.data.r[0].numerator / + exposureCompensationStep.data.r[0].denominator); + + autoExposureLock = false; + params.set(CameraParameters::KEY_AUTO_EXPOSURE_LOCK, + CameraParameters::FALSE); + params.set(CameraParameters::KEY_AUTO_EXPOSURE_LOCK_SUPPORTED, + CameraParameters::TRUE); + + autoWhiteBalanceLock = false; + params.set(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK, + CameraParameters::FALSE); + params.set(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK_SUPPORTED, + CameraParameters::TRUE); + + meteringAreas.add(Parameters::Area(0, 0, 0, 0, 0)); + params.set(CameraParameters::KEY_MAX_NUM_METERING_AREAS, + max3aRegions.data.i32[0]); + params.set(CameraParameters::KEY_METERING_AREAS, + "(0,0,0,0,0)"); + + zoom = 0; + params.set(CameraParameters::KEY_ZOOM, zoom); + params.set(CameraParameters::KEY_MAX_ZOOM, NUM_ZOOM_STEPS - 1); + + camera_metadata_ro_entry_t maxDigitalZoom = + staticInfo(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, /*minCount*/1, /*maxCount*/1); + if (!maxDigitalZoom.count) return NO_INIT; + + { + String8 zoomRatios; + float zoom = 1.f; + float zoomIncrement = (maxDigitalZoom.data.f[0] - zoom) / + (NUM_ZOOM_STEPS-1); + bool addComma = false; + for (size_t i=0; i < NUM_ZOOM_STEPS; i++) { + if (addComma) zoomRatios += ","; + addComma = true; + zoomRatios += String8::format("%d", static_cast(zoom * 100)); + zoom += zoomIncrement; + } + params.set(CameraParameters::KEY_ZOOM_RATIOS, zoomRatios); + } + + params.set(CameraParameters::KEY_ZOOM_SUPPORTED, + CameraParameters::TRUE); + params.set(CameraParameters::KEY_SMOOTH_ZOOM_SUPPORTED, + CameraParameters::FALSE); + + params.set(CameraParameters::KEY_FOCUS_DISTANCES, + "Infinity,Infinity,Infinity"); + + params.set(CameraParameters::KEY_MAX_NUM_DETECTED_FACES_HW, + fastInfo.maxFaces); + params.set(CameraParameters::KEY_MAX_NUM_DETECTED_FACES_SW, + 0); + + params.set(CameraParameters::KEY_VIDEO_FRAME_FORMAT, + CameraParameters::PIXEL_FORMAT_ANDROID_OPAQUE); + + recordingHint = false; + params.set(CameraParameters::KEY_RECORDING_HINT, + CameraParameters::FALSE); + + params.set(CameraParameters::KEY_VIDEO_SNAPSHOT_SUPPORTED, + CameraParameters::TRUE); + + videoStabilization = false; + params.set(CameraParameters::KEY_VIDEO_STABILIZATION, + CameraParameters::FALSE); + + camera_metadata_ro_entry_t availableVideoStabilizationModes = + staticInfo(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES, 0, 0, + false); + + if (availableVideoStabilizationModes.count > 1) { + params.set(CameraParameters::KEY_VIDEO_STABILIZATION_SUPPORTED, + CameraParameters::TRUE); + } else { + params.set(CameraParameters::KEY_VIDEO_STABILIZATION_SUPPORTED, + CameraParameters::FALSE); + } + + // Set up initial state for non-Camera.Parameters state variables + + storeMetadataInBuffers = true; + playShutterSound = true; + enableFaceDetect = false; + + enableFocusMoveMessages = false; + afTriggerCounter = 1; + currentAfTriggerId = -1; + afInMotion = false; + + precaptureTriggerCounter = 1; + + previewCallbackFlags = 0; + previewCallbackOneShot = false; + previewCallbackSurface = false; + + camera_metadata_ro_entry_t supportedHardwareLevel = + staticInfo(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL, 0, 0, false); + if (!supportedHardwareLevel.count || (supportedHardwareLevel.data.u8[0] == + ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED)) { + ALOGI("Camera %d: ZSL mode disabled for limited mode HALs", cameraId); + zslMode = false; + } else { + char value[PROPERTY_VALUE_MAX]; + property_get("camera.disable_zsl_mode", value, "0"); + if (!strcmp(value,"1")) { + ALOGI("Camera %d: Disabling ZSL mode", cameraId); + zslMode = false; + } else { + zslMode = true; + } + } + + lightFx = LIGHTFX_NONE; + + state = STOPPED; + + paramsFlattened = params.flatten(); + + return OK; +} + +String8 Parameters::get() const { + return paramsFlattened; +} + +status_t Parameters::buildFastInfo() { + + camera_metadata_ro_entry_t activeArraySize = + staticInfo(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE, 2, 4); + if (!activeArraySize.count) return NO_INIT; + int32_t arrayWidth; + int32_t arrayHeight; + if (activeArraySize.count == 2) { + ALOGW("%s: Camera %d: activeArraySize is missing xmin/ymin!", + __FUNCTION__, cameraId); + arrayWidth = activeArraySize.data.i32[0]; + arrayHeight = activeArraySize.data.i32[1]; + } else if (activeArraySize.count == 4) { + arrayWidth = activeArraySize.data.i32[2]; + arrayHeight = activeArraySize.data.i32[3]; + } else return NO_INIT; + + camera_metadata_ro_entry_t availableFaceDetectModes = + staticInfo(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES, 0, 0, + false); + + uint8_t bestFaceDetectMode = + ANDROID_STATISTICS_FACE_DETECT_MODE_OFF; + for (size_t i = 0 ; i < availableFaceDetectModes.count; i++) { + switch (availableFaceDetectModes.data.u8[i]) { + case ANDROID_STATISTICS_FACE_DETECT_MODE_OFF: + break; + case ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE: + if (bestFaceDetectMode != + ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) { + bestFaceDetectMode = + ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE; + } + break; + case ANDROID_STATISTICS_FACE_DETECT_MODE_FULL: + bestFaceDetectMode = + ANDROID_STATISTICS_FACE_DETECT_MODE_FULL; + break; + default: + ALOGE("%s: Camera %d: Unknown face detect mode %d:", + __FUNCTION__, cameraId, + availableFaceDetectModes.data.u8[i]); + return NO_INIT; + } + } + + int32_t maxFaces = 0; + camera_metadata_ro_entry_t maxFacesDetected = + staticInfo(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, 0, 1, false); + if (maxFacesDetected.count) { + maxFaces = maxFacesDetected.data.i32[0]; + } + + camera_metadata_ro_entry_t availableSceneModes = + staticInfo(ANDROID_CONTROL_AVAILABLE_SCENE_MODES, 0, 0, false); + camera_metadata_ro_entry_t sceneModeOverrides = + staticInfo(ANDROID_CONTROL_SCENE_MODE_OVERRIDES, 0, 0, false); + camera_metadata_ro_entry_t minFocusDistance = + staticInfo(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE, 0, 0, false); + bool fixedLens = minFocusDistance.count == 0 || + minFocusDistance.data.f[0] == 0; + + camera_metadata_ro_entry_t availableFocalLengths = + staticInfo(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS); + if (!availableFocalLengths.count) return NO_INIT; + + camera_metadata_ro_entry_t availableFormats = + staticInfo(ANDROID_SCALER_AVAILABLE_FORMATS); + if (!availableFormats.count) return NO_INIT; + + + if (sceneModeOverrides.count > 0) { + // sceneModeOverrides is defined to have 3 entries for each scene mode, + // which are AE, AWB, and AF override modes the HAL wants for that scene + // mode. + const size_t kModesPerSceneMode = 3; + if (sceneModeOverrides.count != + availableSceneModes.count * kModesPerSceneMode) { + ALOGE("%s: Camera %d: Scene mode override list is an " + "unexpected size: %d (expected %d)", __FUNCTION__, + cameraId, sceneModeOverrides.count, + availableSceneModes.count); + return NO_INIT; + } + for (size_t i = 0; i < availableSceneModes.count; i++) { + DeviceInfo::OverrideModes modes; + uint8_t aeMode = + sceneModeOverrides.data.u8[i * kModesPerSceneMode + 0]; + switch(aeMode) { + case ANDROID_CONTROL_AE_MODE_ON: + modes.flashMode = FLASH_MODE_OFF; + break; + case ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH: + modes.flashMode = FLASH_MODE_AUTO; + break; + case ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH: + modes.flashMode = FLASH_MODE_ON; + break; + case ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE: + modes.flashMode = FLASH_MODE_RED_EYE; + break; + default: + ALOGE("%s: Unknown override AE mode: %d", __FUNCTION__, + aeMode); + modes.flashMode = FLASH_MODE_INVALID; + break; + } + modes.wbMode = + sceneModeOverrides.data.u8[i * kModesPerSceneMode + 1]; + uint8_t afMode = + sceneModeOverrides.data.u8[i * kModesPerSceneMode + 2]; + switch(afMode) { + case ANDROID_CONTROL_AF_MODE_OFF: + modes.focusMode = fixedLens ? + FOCUS_MODE_FIXED : FOCUS_MODE_INFINITY; + break; + case ANDROID_CONTROL_AF_MODE_AUTO: + case ANDROID_CONTROL_AF_MODE_MACRO: + case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO: + case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE: + case ANDROID_CONTROL_AF_MODE_EDOF: + modes.focusMode = static_cast(afMode); + break; + default: + ALOGE("%s: Unknown override AF mode: %d", __FUNCTION__, + afMode); + modes.focusMode = FOCUS_MODE_INVALID; + break; + } + fastInfo.sceneModeOverrides.add(availableSceneModes.data.u8[i], + modes); + } + } + + fastInfo.arrayWidth = arrayWidth; + fastInfo.arrayHeight = arrayHeight; + fastInfo.bestFaceDetectMode = bestFaceDetectMode; + fastInfo.maxFaces = maxFaces; + + // Find smallest (widest-angle) focal length to use as basis of still + // picture FOV reporting. + fastInfo.minFocalLength = availableFocalLengths.data.f[0]; + for (size_t i = 1; i < availableFocalLengths.count; i++) { + if (fastInfo.minFocalLength > availableFocalLengths.data.f[i]) { + fastInfo.minFocalLength = availableFocalLengths.data.f[i]; + } + } + + // Check if the HAL supports HAL_PIXEL_FORMAT_YCbCr_420_888 + fastInfo.useFlexibleYuv = false; + for (size_t i = 0; i < availableFormats.count; i++) { + if (availableFormats.data.i32[i] == HAL_PIXEL_FORMAT_YCbCr_420_888) { + fastInfo.useFlexibleYuv = true; + break; + } + } + ALOGV("Camera %d: Flexible YUV %s supported", + cameraId, fastInfo.useFlexibleYuv ? "is" : "is not"); + + return OK; +} + +status_t Parameters::buildQuirks() { + camera_metadata_ro_entry_t entry; + entry = info->find(ANDROID_QUIRKS_TRIGGER_AF_WITH_AUTO); + quirks.triggerAfWithAuto = (entry.count != 0 && entry.data.u8[0] == 1); + ALOGV_IF(quirks.triggerAfWithAuto, "Camera %d: Quirk triggerAfWithAuto enabled", + cameraId); + + entry = info->find(ANDROID_QUIRKS_USE_ZSL_FORMAT); + quirks.useZslFormat = (entry.count != 0 && entry.data.u8[0] == 1); + ALOGV_IF(quirks.useZslFormat, "Camera %d: Quirk useZslFormat enabled", + cameraId); + + entry = info->find(ANDROID_QUIRKS_METERING_CROP_REGION); + quirks.meteringCropRegion = (entry.count != 0 && entry.data.u8[0] == 1); + ALOGV_IF(quirks.meteringCropRegion, "Camera %d: Quirk meteringCropRegion" + " enabled", cameraId); + + return OK; +} + +camera_metadata_ro_entry_t Parameters::staticInfo(uint32_t tag, + size_t minCount, size_t maxCount, bool required) const { + camera_metadata_ro_entry_t entry = info->find(tag); + + if (CC_UNLIKELY( entry.count == 0 ) && required) { + const char* tagSection = get_camera_metadata_section_name(tag); + if (tagSection == NULL) tagSection = ""; + const char* tagName = get_camera_metadata_tag_name(tag); + if (tagName == NULL) tagName = ""; + + ALOGE("Error finding static metadata entry '%s.%s' (%x)", + tagSection, tagName, tag); + } else if (CC_UNLIKELY( + (minCount != 0 && entry.count < minCount) || + (maxCount != 0 && entry.count > maxCount) ) ) { + const char* tagSection = get_camera_metadata_section_name(tag); + if (tagSection == NULL) tagSection = ""; + const char* tagName = get_camera_metadata_tag_name(tag); + if (tagName == NULL) tagName = ""; + ALOGE("Malformed static metadata entry '%s.%s' (%x):" + "Expected between %d and %d values, but got %d values", + tagSection, tagName, tag, minCount, maxCount, entry.count); + } + + return entry; +} + +status_t Parameters::set(const String8& paramString) { + status_t res; + + CameraParameters newParams(paramString); + + // TODO: Currently ignoring any changes to supposedly read-only parameters + // such as supported preview sizes, etc. Should probably produce an error if + // they're changed. + + /** Extract and verify new parameters */ + + size_t i; + + Parameters validatedParams(*this); + + // PREVIEW_SIZE + newParams.getPreviewSize(&validatedParams.previewWidth, + &validatedParams.previewHeight); + + if (validatedParams.previewWidth != previewWidth || + validatedParams.previewHeight != previewHeight) { + if (state >= PREVIEW) { + ALOGE("%s: Preview size cannot be updated when preview " + "is active! (Currently %d x %d, requested %d x %d", + __FUNCTION__, + previewWidth, previewHeight, + validatedParams.previewWidth, validatedParams.previewHeight); + return BAD_VALUE; + } + camera_metadata_ro_entry_t availablePreviewSizes = + staticInfo(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES); + for (i = 0; i < availablePreviewSizes.count; i += 2 ) { + if ((availablePreviewSizes.data.i32[i] == + validatedParams.previewWidth) && + (availablePreviewSizes.data.i32[i+1] == + validatedParams.previewHeight)) break; + } + if (i == availablePreviewSizes.count) { + ALOGE("%s: Requested preview size %d x %d is not supported", + __FUNCTION__, validatedParams.previewWidth, + validatedParams.previewHeight); + return BAD_VALUE; + } + } + + // RECORDING_HINT (always supported) + validatedParams.recordingHint = boolFromString( + newParams.get(CameraParameters::KEY_RECORDING_HINT) ); + bool recordingHintChanged = validatedParams.recordingHint != recordingHint; + ALOGV_IF(recordingHintChanged, "%s: Recording hint changed to %d", + __FUNCTION__, recordingHintChanged); + + // PREVIEW_FPS_RANGE + bool fpsRangeChanged = false; + newParams.getPreviewFpsRange(&validatedParams.previewFpsRange[0], + &validatedParams.previewFpsRange[1]); + validatedParams.previewFpsRange[0] /= kFpsToApiScale; + validatedParams.previewFpsRange[1] /= kFpsToApiScale; + + if (validatedParams.previewFpsRange[0] != previewFpsRange[0] || + validatedParams.previewFpsRange[1] != previewFpsRange[1]) { + fpsRangeChanged = true; + camera_metadata_ro_entry_t availablePreviewFpsRanges = + staticInfo(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, 2); + for (i = 0; i < availablePreviewFpsRanges.count; i += 2) { + if ((availablePreviewFpsRanges.data.i32[i] == + validatedParams.previewFpsRange[0]) && + (availablePreviewFpsRanges.data.i32[i+1] == + validatedParams.previewFpsRange[1]) ) { + break; + } + } + if (i == availablePreviewFpsRanges.count) { + ALOGE("%s: Requested preview FPS range %d - %d is not supported", + __FUNCTION__, validatedParams.previewFpsRange[0], + validatedParams.previewFpsRange[1]); + return BAD_VALUE; + } + validatedParams.previewFps = + fpsFromRange(validatedParams.previewFpsRange[0], + validatedParams.previewFpsRange[1]); + newParams.setPreviewFrameRate(validatedParams.previewFps); + } + + // PREVIEW_FORMAT + validatedParams.previewFormat = + formatStringToEnum(newParams.getPreviewFormat()); + if (validatedParams.previewFormat != previewFormat) { + if (state >= PREVIEW) { + ALOGE("%s: Preview format cannot be updated when preview " + "is active!", __FUNCTION__); + return BAD_VALUE; + } + camera_metadata_ro_entry_t availableFormats = + staticInfo(ANDROID_SCALER_AVAILABLE_FORMATS); + // If using flexible YUV, always support NV21/YV12. Otherwise, check + // HAL's list. + if (! (fastInfo.useFlexibleYuv && + (validatedParams.previewFormat == + HAL_PIXEL_FORMAT_YCrCb_420_SP || + validatedParams.previewFormat == + HAL_PIXEL_FORMAT_YV12) ) ) { + // Not using flexible YUV format, so check explicitly + for (i = 0; i < availableFormats.count; i++) { + if (availableFormats.data.i32[i] == + validatedParams.previewFormat) break; + } + if (i == availableFormats.count) { + ALOGE("%s: Requested preview format %s (0x%x) is not supported", + __FUNCTION__, newParams.getPreviewFormat(), + validatedParams.previewFormat); + return BAD_VALUE; + } + } + } + + // PREVIEW_FRAME_RATE + // Deprecated, only use if the preview fps range is unchanged this time. + // The single-value FPS is the same as the minimum of the range. + if (!fpsRangeChanged) { + validatedParams.previewFps = newParams.getPreviewFrameRate(); + if (validatedParams.previewFps != previewFps || recordingHintChanged) { + camera_metadata_ro_entry_t availableFrameRates = + staticInfo(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES); + /** + * If recording hint is set, find the range that encompasses + * previewFps with the largest min index. + * + * If recording hint is not set, find the range with previewFps + * with the smallest min index. + * + * Either way, in case of multiple ranges, break the tie by + * selecting the smaller range. + */ + int targetFps = validatedParams.previewFps; + // all ranges which have targetFps + Vector candidateRanges; + for (i = 0; i < availableFrameRates.count; i+=2) { + Range r = { + availableFrameRates.data.i32[i], + availableFrameRates.data.i32[i+1] + }; + + if (r.min <= targetFps && targetFps <= r.max) { + candidateRanges.push(r); + } + } + if (candidateRanges.isEmpty()) { + ALOGE("%s: Requested preview frame rate %d is not supported", + __FUNCTION__, validatedParams.previewFps); + return BAD_VALUE; + } + // most applicable range with targetFps + Range bestRange = candidateRanges[0]; + for (i = 1; i < candidateRanges.size(); ++i) { + Range r = candidateRanges[i]; + + // Find by largest minIndex in recording mode + if (validatedParams.recordingHint) { + if (r.min > bestRange.min) { + bestRange = r; + } + else if (r.min == bestRange.min && r.max < bestRange.max) { + bestRange = r; + } + } + // Find by smallest minIndex in preview mode + else { + if (r.min < bestRange.min) { + bestRange = r; + } + else if (r.min == bestRange.min && r.max < bestRange.max) { + bestRange = r; + } + } + } + + validatedParams.previewFpsRange[0] = + bestRange.min; + validatedParams.previewFpsRange[1] = + bestRange.max; + + ALOGV("%s: New preview FPS range: %d, %d, recordingHint = %d", + __FUNCTION__, + validatedParams.previewFpsRange[0], + validatedParams.previewFpsRange[1], + validatedParams.recordingHint); + } + newParams.set(CameraParameters::KEY_PREVIEW_FPS_RANGE, + String8::format("%d,%d", + validatedParams.previewFpsRange[0] * kFpsToApiScale, + validatedParams.previewFpsRange[1] * kFpsToApiScale)); + + } + + // PICTURE_SIZE + newParams.getPictureSize(&validatedParams.pictureWidth, + &validatedParams.pictureHeight); + if (validatedParams.pictureWidth == pictureWidth || + validatedParams.pictureHeight == pictureHeight) { + camera_metadata_ro_entry_t availablePictureSizes = + staticInfo(ANDROID_SCALER_AVAILABLE_JPEG_SIZES); + for (i = 0; i < availablePictureSizes.count; i+=2) { + if ((availablePictureSizes.data.i32[i] == + validatedParams.pictureWidth) && + (availablePictureSizes.data.i32[i+1] == + validatedParams.pictureHeight)) break; + } + if (i == availablePictureSizes.count) { + ALOGE("%s: Requested picture size %d x %d is not supported", + __FUNCTION__, validatedParams.pictureWidth, + validatedParams.pictureHeight); + return BAD_VALUE; + } + } + + // JPEG_THUMBNAIL_WIDTH/HEIGHT + validatedParams.jpegThumbSize[0] = + newParams.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH); + validatedParams.jpegThumbSize[1] = + newParams.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT); + if (validatedParams.jpegThumbSize[0] != jpegThumbSize[0] || + validatedParams.jpegThumbSize[1] != jpegThumbSize[1]) { + camera_metadata_ro_entry_t availableJpegThumbSizes = + staticInfo(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES); + for (i = 0; i < availableJpegThumbSizes.count; i+=2) { + if ((availableJpegThumbSizes.data.i32[i] == + validatedParams.jpegThumbSize[0]) && + (availableJpegThumbSizes.data.i32[i+1] == + validatedParams.jpegThumbSize[1])) break; + } + if (i == availableJpegThumbSizes.count) { + ALOGE("%s: Requested JPEG thumbnail size %d x %d is not supported", + __FUNCTION__, validatedParams.jpegThumbSize[0], + validatedParams.jpegThumbSize[1]); + return BAD_VALUE; + } + } + + // JPEG_THUMBNAIL_QUALITY + int quality = newParams.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY); + // also makes sure quality fits in uint8_t + if (quality < 0 || quality > 100) { + ALOGE("%s: Requested JPEG thumbnail quality %d is not supported", + __FUNCTION__, quality); + return BAD_VALUE; + } + validatedParams.jpegThumbQuality = quality; + + // JPEG_QUALITY + quality = newParams.getInt(CameraParameters::KEY_JPEG_QUALITY); + // also makes sure quality fits in uint8_t + if (quality < 0 || quality > 100) { + ALOGE("%s: Requested JPEG quality %d is not supported", + __FUNCTION__, quality); + return BAD_VALUE; + } + validatedParams.jpegQuality = quality; + + // ROTATION + validatedParams.jpegRotation = + newParams.getInt(CameraParameters::KEY_ROTATION); + if (validatedParams.jpegRotation != 0 && + validatedParams.jpegRotation != 90 && + validatedParams.jpegRotation != 180 && + validatedParams.jpegRotation != 270) { + ALOGE("%s: Requested picture rotation angle %d is not supported", + __FUNCTION__, validatedParams.jpegRotation); + return BAD_VALUE; + } + + // GPS + + const char *gpsLatStr = + newParams.get(CameraParameters::KEY_GPS_LATITUDE); + if (gpsLatStr != NULL) { + const char *gpsLongStr = + newParams.get(CameraParameters::KEY_GPS_LONGITUDE); + const char *gpsAltitudeStr = + newParams.get(CameraParameters::KEY_GPS_ALTITUDE); + const char *gpsTimeStr = + newParams.get(CameraParameters::KEY_GPS_TIMESTAMP); + const char *gpsProcMethodStr = + newParams.get(CameraParameters::KEY_GPS_PROCESSING_METHOD); + if (gpsLongStr == NULL || + gpsAltitudeStr == NULL || + gpsTimeStr == NULL || + gpsProcMethodStr == NULL) { + ALOGE("%s: Incomplete set of GPS parameters provided", + __FUNCTION__); + return BAD_VALUE; + } + char *endPtr; + errno = 0; + validatedParams.gpsCoordinates[0] = strtod(gpsLatStr, &endPtr); + if (errno || endPtr == gpsLatStr) { + ALOGE("%s: Malformed GPS latitude: %s", __FUNCTION__, gpsLatStr); + return BAD_VALUE; + } + errno = 0; + validatedParams.gpsCoordinates[1] = strtod(gpsLongStr, &endPtr); + if (errno || endPtr == gpsLongStr) { + ALOGE("%s: Malformed GPS longitude: %s", __FUNCTION__, gpsLongStr); + return BAD_VALUE; + } + errno = 0; + validatedParams.gpsCoordinates[2] = strtod(gpsAltitudeStr, &endPtr); + if (errno || endPtr == gpsAltitudeStr) { + ALOGE("%s: Malformed GPS altitude: %s", __FUNCTION__, + gpsAltitudeStr); + return BAD_VALUE; + } + errno = 0; + validatedParams.gpsTimestamp = strtoll(gpsTimeStr, &endPtr, 10); + if (errno || endPtr == gpsTimeStr) { + ALOGE("%s: Malformed GPS timestamp: %s", __FUNCTION__, gpsTimeStr); + return BAD_VALUE; + } + validatedParams.gpsProcessingMethod = gpsProcMethodStr; + + validatedParams.gpsEnabled = true; + } else { + validatedParams.gpsEnabled = false; + } + + // EFFECT + validatedParams.effectMode = effectModeStringToEnum( + newParams.get(CameraParameters::KEY_EFFECT) ); + if (validatedParams.effectMode != effectMode) { + camera_metadata_ro_entry_t availableEffectModes = + staticInfo(ANDROID_CONTROL_AVAILABLE_EFFECTS); + for (i = 0; i < availableEffectModes.count; i++) { + if (validatedParams.effectMode == availableEffectModes.data.u8[i]) break; + } + if (i == availableEffectModes.count) { + ALOGE("%s: Requested effect mode \"%s\" is not supported", + __FUNCTION__, + newParams.get(CameraParameters::KEY_EFFECT) ); + return BAD_VALUE; + } + } + + // ANTIBANDING + validatedParams.antibandingMode = abModeStringToEnum( + newParams.get(CameraParameters::KEY_ANTIBANDING) ); + if (validatedParams.antibandingMode != antibandingMode) { + camera_metadata_ro_entry_t availableAbModes = + staticInfo(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES); + for (i = 0; i < availableAbModes.count; i++) { + if (validatedParams.antibandingMode == availableAbModes.data.u8[i]) + break; + } + if (i == availableAbModes.count) { + ALOGE("%s: Requested antibanding mode \"%s\" is not supported", + __FUNCTION__, + newParams.get(CameraParameters::KEY_ANTIBANDING)); + return BAD_VALUE; + } + } + + // SCENE_MODE + validatedParams.sceneMode = sceneModeStringToEnum( + newParams.get(CameraParameters::KEY_SCENE_MODE) ); + if (validatedParams.sceneMode != sceneMode && + validatedParams.sceneMode != + ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED) { + camera_metadata_ro_entry_t availableSceneModes = + staticInfo(ANDROID_CONTROL_AVAILABLE_SCENE_MODES); + for (i = 0; i < availableSceneModes.count; i++) { + if (validatedParams.sceneMode == availableSceneModes.data.u8[i]) + break; + } + if (i == availableSceneModes.count) { + ALOGE("%s: Requested scene mode \"%s\" is not supported", + __FUNCTION__, + newParams.get(CameraParameters::KEY_SCENE_MODE)); + return BAD_VALUE; + } + } + bool sceneModeSet = + validatedParams.sceneMode != ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED; + + // FLASH_MODE + if (sceneModeSet) { + validatedParams.flashMode = + fastInfo.sceneModeOverrides. + valueFor(validatedParams.sceneMode).flashMode; + } else { + validatedParams.flashMode = FLASH_MODE_INVALID; + } + if (validatedParams.flashMode == FLASH_MODE_INVALID) { + validatedParams.flashMode = flashModeStringToEnum( + newParams.get(CameraParameters::KEY_FLASH_MODE) ); + } + + if (validatedParams.flashMode != flashMode) { + camera_metadata_ro_entry_t flashAvailable = + staticInfo(ANDROID_FLASH_INFO_AVAILABLE, 1, 1); + if (!flashAvailable.data.u8[0] && + validatedParams.flashMode != Parameters::FLASH_MODE_OFF) { + ALOGE("%s: Requested flash mode \"%s\" is not supported: " + "No flash on device", __FUNCTION__, + newParams.get(CameraParameters::KEY_FLASH_MODE)); + return BAD_VALUE; + } else if (validatedParams.flashMode == Parameters::FLASH_MODE_RED_EYE) { + camera_metadata_ro_entry_t availableAeModes = + staticInfo(ANDROID_CONTROL_AE_AVAILABLE_MODES); + for (i = 0; i < availableAeModes.count; i++) { + if (validatedParams.flashMode == availableAeModes.data.u8[i]) + break; + } + if (i == availableAeModes.count) { + ALOGE("%s: Requested flash mode \"%s\" is not supported", + __FUNCTION__, + newParams.get(CameraParameters::KEY_FLASH_MODE)); + return BAD_VALUE; + } + } else if (validatedParams.flashMode == -1) { + ALOGE("%s: Requested flash mode \"%s\" is unknown", + __FUNCTION__, + newParams.get(CameraParameters::KEY_FLASH_MODE)); + return BAD_VALUE; + } + // Update in case of override + newParams.set(CameraParameters::KEY_FLASH_MODE, + flashModeEnumToString(validatedParams.flashMode)); + } + + // WHITE_BALANCE + if (sceneModeSet) { + validatedParams.wbMode = + fastInfo.sceneModeOverrides. + valueFor(validatedParams.sceneMode).wbMode; + } else { + validatedParams.wbMode = ANDROID_CONTROL_AWB_MODE_OFF; + } + if (validatedParams.wbMode == ANDROID_CONTROL_AWB_MODE_OFF) { + validatedParams.wbMode = wbModeStringToEnum( + newParams.get(CameraParameters::KEY_WHITE_BALANCE) ); + } + if (validatedParams.wbMode != wbMode) { + camera_metadata_ro_entry_t availableWbModes = + staticInfo(ANDROID_CONTROL_AWB_AVAILABLE_MODES, 0, 0, false); + for (i = 0; i < availableWbModes.count; i++) { + if (validatedParams.wbMode == availableWbModes.data.u8[i]) break; + } + if (i == availableWbModes.count) { + ALOGE("%s: Requested white balance mode %s is not supported", + __FUNCTION__, + newParams.get(CameraParameters::KEY_WHITE_BALANCE)); + return BAD_VALUE; + } + // Update in case of override + newParams.set(CameraParameters::KEY_WHITE_BALANCE, + wbModeEnumToString(validatedParams.wbMode)); + } + + // FOCUS_MODE + if (sceneModeSet) { + validatedParams.focusMode = + fastInfo.sceneModeOverrides. + valueFor(validatedParams.sceneMode).focusMode; + } else { + validatedParams.focusMode = FOCUS_MODE_INVALID; + } + if (validatedParams.focusMode == FOCUS_MODE_INVALID) { + validatedParams.focusMode = focusModeStringToEnum( + newParams.get(CameraParameters::KEY_FOCUS_MODE) ); + } + if (validatedParams.focusMode != focusMode) { + validatedParams.currentAfTriggerId = -1; + if (validatedParams.focusMode != Parameters::FOCUS_MODE_FIXED) { + camera_metadata_ro_entry_t minFocusDistance = + staticInfo(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE, 0, 0, + false); + if (minFocusDistance.count && minFocusDistance.data.f[0] == 0) { + ALOGE("%s: Requested focus mode \"%s\" is not available: " + "fixed focus lens", + __FUNCTION__, + newParams.get(CameraParameters::KEY_FOCUS_MODE)); + return BAD_VALUE; + } else if (validatedParams.focusMode != + Parameters::FOCUS_MODE_INFINITY) { + camera_metadata_ro_entry_t availableFocusModes = + staticInfo(ANDROID_CONTROL_AF_AVAILABLE_MODES); + for (i = 0; i < availableFocusModes.count; i++) { + if (validatedParams.focusMode == + availableFocusModes.data.u8[i]) break; + } + if (i == availableFocusModes.count) { + ALOGE("%s: Requested focus mode \"%s\" is not supported", + __FUNCTION__, + newParams.get(CameraParameters::KEY_FOCUS_MODE)); + return BAD_VALUE; + } + } + } + validatedParams.focusState = ANDROID_CONTROL_AF_STATE_INACTIVE; + // Always reset shadow focus mode to avoid reverting settings + validatedParams.shadowFocusMode = FOCUS_MODE_INVALID; + // Update in case of override + newParams.set(CameraParameters::KEY_FOCUS_MODE, + focusModeEnumToString(validatedParams.focusMode)); + } else { + validatedParams.currentAfTriggerId = currentAfTriggerId; + } + + // FOCUS_AREAS + res = parseAreas(newParams.get(CameraParameters::KEY_FOCUS_AREAS), + &validatedParams.focusingAreas); + size_t max3aRegions = + (size_t)staticInfo(ANDROID_CONTROL_MAX_REGIONS, 1, 1).data.i32[0]; + if (res == OK) res = validateAreas(validatedParams.focusingAreas, + max3aRegions, AREA_KIND_FOCUS); + if (res != OK) { + ALOGE("%s: Requested focus areas are malformed: %s", + __FUNCTION__, newParams.get(CameraParameters::KEY_FOCUS_AREAS)); + return BAD_VALUE; + } + + // EXPOSURE_COMPENSATION + validatedParams.exposureCompensation = + newParams.getInt(CameraParameters::KEY_EXPOSURE_COMPENSATION); + camera_metadata_ro_entry_t exposureCompensationRange = + staticInfo(ANDROID_CONTROL_AE_COMPENSATION_RANGE); + if ((validatedParams.exposureCompensation < + exposureCompensationRange.data.i32[0]) || + (validatedParams.exposureCompensation > + exposureCompensationRange.data.i32[1])) { + ALOGE("%s: Requested exposure compensation index is out of bounds: %d", + __FUNCTION__, validatedParams.exposureCompensation); + return BAD_VALUE; + } + + // AUTO_EXPOSURE_LOCK (always supported) + validatedParams.autoExposureLock = boolFromString( + newParams.get(CameraParameters::KEY_AUTO_EXPOSURE_LOCK)); + + // AUTO_WHITEBALANCE_LOCK (always supported) + validatedParams.autoWhiteBalanceLock = boolFromString( + newParams.get(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK)); + + // METERING_AREAS + res = parseAreas(newParams.get(CameraParameters::KEY_METERING_AREAS), + &validatedParams.meteringAreas); + if (res == OK) { + res = validateAreas(validatedParams.meteringAreas, max3aRegions, + AREA_KIND_METERING); + } + if (res != OK) { + ALOGE("%s: Requested metering areas are malformed: %s", + __FUNCTION__, + newParams.get(CameraParameters::KEY_METERING_AREAS)); + return BAD_VALUE; + } + + // ZOOM + validatedParams.zoom = newParams.getInt(CameraParameters::KEY_ZOOM); + if (validatedParams.zoom < 0 + || validatedParams.zoom >= (int)NUM_ZOOM_STEPS) { + ALOGE("%s: Requested zoom level %d is not supported", + __FUNCTION__, validatedParams.zoom); + return BAD_VALUE; + } + + // VIDEO_SIZE + newParams.getVideoSize(&validatedParams.videoWidth, + &validatedParams.videoHeight); + if (validatedParams.videoWidth != videoWidth || + validatedParams.videoHeight != videoHeight) { + if (state == RECORD) { + ALOGE("%s: Video size cannot be updated when recording is active!", + __FUNCTION__); + return BAD_VALUE; + } + camera_metadata_ro_entry_t availableVideoSizes = + staticInfo(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES); + for (i = 0; i < availableVideoSizes.count; i += 2 ) { + if ((availableVideoSizes.data.i32[i] == + validatedParams.videoWidth) && + (availableVideoSizes.data.i32[i+1] == + validatedParams.videoHeight)) break; + } + if (i == availableVideoSizes.count) { + ALOGE("%s: Requested video size %d x %d is not supported", + __FUNCTION__, validatedParams.videoWidth, + validatedParams.videoHeight); + return BAD_VALUE; + } + } + + // VIDEO_STABILIZATION + validatedParams.videoStabilization = boolFromString( + newParams.get(CameraParameters::KEY_VIDEO_STABILIZATION) ); + camera_metadata_ro_entry_t availableVideoStabilizationModes = + staticInfo(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES, 0, 0, + false); + if (validatedParams.videoStabilization && + availableVideoStabilizationModes.count == 1) { + ALOGE("%s: Video stabilization not supported", __FUNCTION__); + } + + // LIGHTFX + validatedParams.lightFx = lightFxStringToEnum( + newParams.get(CameraParameters::KEY_LIGHTFX)); + + /** Update internal parameters */ + + *this = validatedParams; + + /** Update external parameters calculated from the internal ones */ + + // HORIZONTAL/VERTICAL FIELD OF VIEW + float horizFov, vertFov; + res = calculatePictureFovs(&horizFov, &vertFov); + if (res != OK) { + ALOGE("%s: Can't calculate FOVs", __FUNCTION__); + // continue so parameters are at least consistent + } + newParams.setFloat(CameraParameters::KEY_HORIZONTAL_VIEW_ANGLE, + horizFov); + newParams.setFloat(CameraParameters::KEY_VERTICAL_VIEW_ANGLE, + vertFov); + ALOGV("Current still picture FOV: %f x %f deg", horizFov, vertFov); + + // Need to flatten again in case of overrides + paramsFlattened = newParams.flatten(); + params = newParams; + + return OK; +} + +status_t Parameters::updateRequest(CameraMetadata *request) const { + ATRACE_CALL(); + status_t res; + + /** + * Mixin default important security values + * - android.led.transmit = defaulted ON + */ + camera_metadata_ro_entry_t entry = staticInfo(ANDROID_LED_AVAILABLE_LEDS, + /*minimumCount*/0, + /*maximumCount*/0, + /*required*/false); + for(size_t i = 0; i < entry.count; ++i) { + uint8_t led = entry.data.u8[i]; + + switch(led) { + // Transmit LED is unconditionally on when using + // the android.hardware.Camera API + case ANDROID_LED_AVAILABLE_LEDS_TRANSMIT: { + uint8_t transmitDefault = ANDROID_LED_TRANSMIT_ON; + res = request->update(ANDROID_LED_TRANSMIT, + &transmitDefault, 1); + if (res != OK) return res; + break; + } + } + } + + /** + * Construct metadata from parameters + */ + + uint8_t metadataMode = ANDROID_REQUEST_METADATA_MODE_FULL; + res = request->update(ANDROID_REQUEST_METADATA_MODE, + &metadataMode, 1); + if (res != OK) return res; + + res = request->update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, + previewFpsRange, 2); + if (res != OK) return res; + + uint8_t reqWbLock = autoWhiteBalanceLock ? + ANDROID_CONTROL_AWB_LOCK_ON : ANDROID_CONTROL_AWB_LOCK_OFF; + res = request->update(ANDROID_CONTROL_AWB_LOCK, + &reqWbLock, 1); + + res = request->update(ANDROID_CONTROL_EFFECT_MODE, + &effectMode, 1); + if (res != OK) return res; + res = request->update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, + &antibandingMode, 1); + if (res != OK) return res; + + // android.hardware.Camera requires that when face detect is enabled, the + // camera is in a face-priority mode. HAL2 splits this into separate parts + // (face detection statistics and face priority scene mode). Map from other + // to the other. + bool sceneModeActive = + sceneMode != (uint8_t)ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED; + uint8_t reqControlMode = ANDROID_CONTROL_MODE_AUTO; + if (enableFaceDetect || sceneModeActive) { + reqControlMode = ANDROID_CONTROL_MODE_USE_SCENE_MODE; + } + res = request->update(ANDROID_CONTROL_MODE, + &reqControlMode, 1); + if (res != OK) return res; + + uint8_t reqSceneMode = + sceneModeActive ? sceneMode : + enableFaceDetect ? (uint8_t)ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY : + (uint8_t)ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED; + res = request->update(ANDROID_CONTROL_SCENE_MODE, + &reqSceneMode, 1); + if (res != OK) return res; + + uint8_t reqFlashMode = ANDROID_FLASH_MODE_OFF; + uint8_t reqAeMode = ANDROID_CONTROL_AE_MODE_OFF; + switch (flashMode) { + case Parameters::FLASH_MODE_OFF: + reqAeMode = ANDROID_CONTROL_AE_MODE_ON; break; + case Parameters::FLASH_MODE_AUTO: + reqAeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH; break; + case Parameters::FLASH_MODE_ON: + reqAeMode = ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH; break; + case Parameters::FLASH_MODE_TORCH: + reqAeMode = ANDROID_CONTROL_AE_MODE_ON; + reqFlashMode = ANDROID_FLASH_MODE_TORCH; + break; + case Parameters::FLASH_MODE_RED_EYE: + reqAeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE; break; + default: + ALOGE("%s: Camera %d: Unknown flash mode %d", __FUNCTION__, + cameraId, flashMode); + return BAD_VALUE; + } + res = request->update(ANDROID_FLASH_MODE, + &reqFlashMode, 1); + if (res != OK) return res; + res = request->update(ANDROID_CONTROL_AE_MODE, + &reqAeMode, 1); + if (res != OK) return res; + + uint8_t reqAeLock = autoExposureLock ? + ANDROID_CONTROL_AE_LOCK_ON : ANDROID_CONTROL_AE_LOCK_OFF; + res = request->update(ANDROID_CONTROL_AE_LOCK, + &reqAeLock, 1); + if (res != OK) return res; + + res = request->update(ANDROID_CONTROL_AWB_MODE, + &wbMode, 1); + if (res != OK) return res; + + float reqFocusDistance = 0; // infinity focus in diopters + uint8_t reqFocusMode = ANDROID_CONTROL_AF_MODE_OFF; + switch (focusMode) { + case Parameters::FOCUS_MODE_AUTO: + case Parameters::FOCUS_MODE_MACRO: + case Parameters::FOCUS_MODE_CONTINUOUS_VIDEO: + case Parameters::FOCUS_MODE_CONTINUOUS_PICTURE: + case Parameters::FOCUS_MODE_EDOF: + reqFocusMode = focusMode; + break; + case Parameters::FOCUS_MODE_INFINITY: + case Parameters::FOCUS_MODE_FIXED: + reqFocusMode = ANDROID_CONTROL_AF_MODE_OFF; + break; + default: + ALOGE("%s: Camera %d: Unknown focus mode %d", __FUNCTION__, + cameraId, focusMode); + return BAD_VALUE; + } + res = request->update(ANDROID_LENS_FOCUS_DISTANCE, + &reqFocusDistance, 1); + if (res != OK) return res; + res = request->update(ANDROID_CONTROL_AF_MODE, + &reqFocusMode, 1); + if (res != OK) return res; + + size_t reqFocusingAreasSize = focusingAreas.size() * 5; + int32_t *reqFocusingAreas = new int32_t[reqFocusingAreasSize]; + for (size_t i = 0; i < reqFocusingAreasSize; i += 5) { + if (focusingAreas[i].weight != 0) { + reqFocusingAreas[i + 0] = + normalizedXToArray(focusingAreas[i].left); + reqFocusingAreas[i + 1] = + normalizedYToArray(focusingAreas[i].top); + reqFocusingAreas[i + 2] = + normalizedXToArray(focusingAreas[i].right); + reqFocusingAreas[i + 3] = + normalizedYToArray(focusingAreas[i].bottom); + } else { + reqFocusingAreas[i + 0] = 0; + reqFocusingAreas[i + 1] = 0; + reqFocusingAreas[i + 2] = 0; + reqFocusingAreas[i + 3] = 0; + } + reqFocusingAreas[i + 4] = focusingAreas[i].weight; + } + res = request->update(ANDROID_CONTROL_AF_REGIONS, + reqFocusingAreas, reqFocusingAreasSize); + if (res != OK) return res; + delete[] reqFocusingAreas; + + res = request->update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, + &exposureCompensation, 1); + if (res != OK) return res; + + size_t reqMeteringAreasSize = meteringAreas.size() * 5; + int32_t *reqMeteringAreas = new int32_t[reqMeteringAreasSize]; + for (size_t i = 0; i < reqMeteringAreasSize; i += 5) { + if (meteringAreas[i].weight != 0) { + reqMeteringAreas[i + 0] = + normalizedXToArray(meteringAreas[i].left); + reqMeteringAreas[i + 1] = + normalizedYToArray(meteringAreas[i].top); + reqMeteringAreas[i + 2] = + normalizedXToArray(meteringAreas[i].right); + reqMeteringAreas[i + 3] = + normalizedYToArray(meteringAreas[i].bottom); + } else { + reqMeteringAreas[i + 0] = 0; + reqMeteringAreas[i + 1] = 0; + reqMeteringAreas[i + 2] = 0; + reqMeteringAreas[i + 3] = 0; + } + reqMeteringAreas[i + 4] = meteringAreas[i].weight; + } + res = request->update(ANDROID_CONTROL_AE_REGIONS, + reqMeteringAreas, reqMeteringAreasSize); + if (res != OK) return res; + + delete[] reqMeteringAreas; + + /* don't include jpeg thumbnail size - it's valid for + it to be set to (0,0), meaning 'no thumbnail' */ + CropRegion crop = calculateCropRegion( (CropRegion::Outputs)( + CropRegion::OUTPUT_PREVIEW | + CropRegion::OUTPUT_VIDEO | + CropRegion::OUTPUT_PICTURE )); + int32_t reqCropRegion[4] = { + static_cast(crop.left), + static_cast(crop.top), + static_cast(crop.width), + static_cast(crop.height) + }; + res = request->update(ANDROID_SCALER_CROP_REGION, + reqCropRegion, 4); + if (res != OK) return res; + + uint8_t reqVstabMode = videoStabilization ? + ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON : + ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF; + res = request->update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, + &reqVstabMode, 1); + if (res != OK) return res; + + uint8_t reqFaceDetectMode = enableFaceDetect ? + fastInfo.bestFaceDetectMode : + (uint8_t)ANDROID_STATISTICS_FACE_DETECT_MODE_OFF; + res = request->update(ANDROID_STATISTICS_FACE_DETECT_MODE, + &reqFaceDetectMode, 1); + if (res != OK) return res; + + return OK; +} + +status_t Parameters::updateRequestJpeg(CameraMetadata *request) const { + status_t res; + + res = request->update(ANDROID_JPEG_THUMBNAIL_SIZE, + jpegThumbSize, 2); + if (res != OK) return res; + res = request->update(ANDROID_JPEG_THUMBNAIL_QUALITY, + &jpegThumbQuality, 1); + if (res != OK) return res; + res = request->update(ANDROID_JPEG_QUALITY, + &jpegQuality, 1); + if (res != OK) return res; + res = request->update( + ANDROID_JPEG_ORIENTATION, + &jpegRotation, 1); + if (res != OK) return res; + + if (gpsEnabled) { + res = request->update( + ANDROID_JPEG_GPS_COORDINATES, + gpsCoordinates, 3); + if (res != OK) return res; + res = request->update( + ANDROID_JPEG_GPS_TIMESTAMP, + &gpsTimestamp, 1); + if (res != OK) return res; + res = request->update( + ANDROID_JPEG_GPS_PROCESSING_METHOD, + gpsProcessingMethod); + if (res != OK) return res; + } else { + res = request->erase(ANDROID_JPEG_GPS_COORDINATES); + if (res != OK) return res; + res = request->erase(ANDROID_JPEG_GPS_TIMESTAMP); + if (res != OK) return res; + res = request->erase(ANDROID_JPEG_GPS_PROCESSING_METHOD); + if (res != OK) return res; + } + return OK; +} + + +const char* Parameters::getStateName(State state) { +#define CASE_ENUM_TO_CHAR(x) case x: return(#x); break; + switch(state) { + CASE_ENUM_TO_CHAR(DISCONNECTED) + CASE_ENUM_TO_CHAR(STOPPED) + CASE_ENUM_TO_CHAR(WAITING_FOR_PREVIEW_WINDOW) + CASE_ENUM_TO_CHAR(PREVIEW) + CASE_ENUM_TO_CHAR(RECORD) + CASE_ENUM_TO_CHAR(STILL_CAPTURE) + CASE_ENUM_TO_CHAR(VIDEO_SNAPSHOT) + default: + return "Unknown state!"; + break; + } +#undef CASE_ENUM_TO_CHAR +} + +int Parameters::formatStringToEnum(const char *format) { + return + !format ? + HAL_PIXEL_FORMAT_YCrCb_420_SP : + !strcmp(format, CameraParameters::PIXEL_FORMAT_YUV422SP) ? + HAL_PIXEL_FORMAT_YCbCr_422_SP : // NV16 + !strcmp(format, CameraParameters::PIXEL_FORMAT_YUV420SP) ? + HAL_PIXEL_FORMAT_YCrCb_420_SP : // NV21 + !strcmp(format, CameraParameters::PIXEL_FORMAT_YUV422I) ? + HAL_PIXEL_FORMAT_YCbCr_422_I : // YUY2 + !strcmp(format, CameraParameters::PIXEL_FORMAT_YUV420P) ? + HAL_PIXEL_FORMAT_YV12 : // YV12 + !strcmp(format, CameraParameters::PIXEL_FORMAT_RGB565) ? + HAL_PIXEL_FORMAT_RGB_565 : // RGB565 + !strcmp(format, CameraParameters::PIXEL_FORMAT_RGBA8888) ? + HAL_PIXEL_FORMAT_RGBA_8888 : // RGB8888 + !strcmp(format, CameraParameters::PIXEL_FORMAT_BAYER_RGGB) ? + HAL_PIXEL_FORMAT_RAW_SENSOR : // Raw sensor data + -1; +} + +const char* Parameters::formatEnumToString(int format) { + const char *fmt; + switch(format) { + case HAL_PIXEL_FORMAT_YCbCr_422_SP: // NV16 + fmt = CameraParameters::PIXEL_FORMAT_YUV422SP; + break; + case HAL_PIXEL_FORMAT_YCrCb_420_SP: // NV21 + fmt = CameraParameters::PIXEL_FORMAT_YUV420SP; + break; + case HAL_PIXEL_FORMAT_YCbCr_422_I: // YUY2 + fmt = CameraParameters::PIXEL_FORMAT_YUV422I; + break; + case HAL_PIXEL_FORMAT_YV12: // YV12 + fmt = CameraParameters::PIXEL_FORMAT_YUV420P; + break; + case HAL_PIXEL_FORMAT_RGB_565: // RGB565 + fmt = CameraParameters::PIXEL_FORMAT_RGB565; + break; + case HAL_PIXEL_FORMAT_RGBA_8888: // RGBA8888 + fmt = CameraParameters::PIXEL_FORMAT_RGBA8888; + break; + case HAL_PIXEL_FORMAT_RAW_SENSOR: + ALOGW("Raw sensor preview format requested."); + fmt = CameraParameters::PIXEL_FORMAT_BAYER_RGGB; + break; + default: + ALOGE("%s: Unknown preview format: %x", + __FUNCTION__, format); + fmt = NULL; + break; + } + return fmt; +} + +int Parameters::wbModeStringToEnum(const char *wbMode) { + return + !wbMode ? + ANDROID_CONTROL_AWB_MODE_AUTO : + !strcmp(wbMode, CameraParameters::WHITE_BALANCE_AUTO) ? + ANDROID_CONTROL_AWB_MODE_AUTO : + !strcmp(wbMode, CameraParameters::WHITE_BALANCE_INCANDESCENT) ? + ANDROID_CONTROL_AWB_MODE_INCANDESCENT : + !strcmp(wbMode, CameraParameters::WHITE_BALANCE_FLUORESCENT) ? + ANDROID_CONTROL_AWB_MODE_FLUORESCENT : + !strcmp(wbMode, CameraParameters::WHITE_BALANCE_WARM_FLUORESCENT) ? + ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT : + !strcmp(wbMode, CameraParameters::WHITE_BALANCE_DAYLIGHT) ? + ANDROID_CONTROL_AWB_MODE_DAYLIGHT : + !strcmp(wbMode, CameraParameters::WHITE_BALANCE_CLOUDY_DAYLIGHT) ? + ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT : + !strcmp(wbMode, CameraParameters::WHITE_BALANCE_TWILIGHT) ? + ANDROID_CONTROL_AWB_MODE_TWILIGHT : + !strcmp(wbMode, CameraParameters::WHITE_BALANCE_SHADE) ? + ANDROID_CONTROL_AWB_MODE_SHADE : + -1; +} + +const char* Parameters::wbModeEnumToString(uint8_t wbMode) { + switch (wbMode) { + case ANDROID_CONTROL_AWB_MODE_AUTO: + return CameraParameters::WHITE_BALANCE_AUTO; + case ANDROID_CONTROL_AWB_MODE_INCANDESCENT: + return CameraParameters::WHITE_BALANCE_INCANDESCENT; + case ANDROID_CONTROL_AWB_MODE_FLUORESCENT: + return CameraParameters::WHITE_BALANCE_FLUORESCENT; + case ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT: + return CameraParameters::WHITE_BALANCE_WARM_FLUORESCENT; + case ANDROID_CONTROL_AWB_MODE_DAYLIGHT: + return CameraParameters::WHITE_BALANCE_DAYLIGHT; + case ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT: + return CameraParameters::WHITE_BALANCE_CLOUDY_DAYLIGHT; + case ANDROID_CONTROL_AWB_MODE_TWILIGHT: + return CameraParameters::WHITE_BALANCE_TWILIGHT; + case ANDROID_CONTROL_AWB_MODE_SHADE: + return CameraParameters::WHITE_BALANCE_SHADE; + default: + ALOGE("%s: Unknown AWB mode enum: %d", + __FUNCTION__, wbMode); + return "unknown"; + } +} + +int Parameters::effectModeStringToEnum(const char *effectMode) { + return + !effectMode ? + ANDROID_CONTROL_EFFECT_MODE_OFF : + !strcmp(effectMode, CameraParameters::EFFECT_NONE) ? + ANDROID_CONTROL_EFFECT_MODE_OFF : + !strcmp(effectMode, CameraParameters::EFFECT_MONO) ? + ANDROID_CONTROL_EFFECT_MODE_MONO : + !strcmp(effectMode, CameraParameters::EFFECT_NEGATIVE) ? + ANDROID_CONTROL_EFFECT_MODE_NEGATIVE : + !strcmp(effectMode, CameraParameters::EFFECT_SOLARIZE) ? + ANDROID_CONTROL_EFFECT_MODE_SOLARIZE : + !strcmp(effectMode, CameraParameters::EFFECT_SEPIA) ? + ANDROID_CONTROL_EFFECT_MODE_SEPIA : + !strcmp(effectMode, CameraParameters::EFFECT_POSTERIZE) ? + ANDROID_CONTROL_EFFECT_MODE_POSTERIZE : + !strcmp(effectMode, CameraParameters::EFFECT_WHITEBOARD) ? + ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD : + !strcmp(effectMode, CameraParameters::EFFECT_BLACKBOARD) ? + ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD : + !strcmp(effectMode, CameraParameters::EFFECT_AQUA) ? + ANDROID_CONTROL_EFFECT_MODE_AQUA : + -1; +} + +int Parameters::abModeStringToEnum(const char *abMode) { + return + !abMode ? + ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO : + !strcmp(abMode, CameraParameters::ANTIBANDING_AUTO) ? + ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO : + !strcmp(abMode, CameraParameters::ANTIBANDING_OFF) ? + ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF : + !strcmp(abMode, CameraParameters::ANTIBANDING_50HZ) ? + ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ : + !strcmp(abMode, CameraParameters::ANTIBANDING_60HZ) ? + ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ : + -1; +} + +int Parameters::sceneModeStringToEnum(const char *sceneMode) { + return + !sceneMode ? + ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED : + !strcmp(sceneMode, CameraParameters::SCENE_MODE_AUTO) ? + ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED : + !strcmp(sceneMode, CameraParameters::SCENE_MODE_ACTION) ? + ANDROID_CONTROL_SCENE_MODE_ACTION : + !strcmp(sceneMode, CameraParameters::SCENE_MODE_PORTRAIT) ? + ANDROID_CONTROL_SCENE_MODE_PORTRAIT : + !strcmp(sceneMode, CameraParameters::SCENE_MODE_LANDSCAPE) ? + ANDROID_CONTROL_SCENE_MODE_LANDSCAPE : + !strcmp(sceneMode, CameraParameters::SCENE_MODE_NIGHT) ? + ANDROID_CONTROL_SCENE_MODE_NIGHT : + !strcmp(sceneMode, CameraParameters::SCENE_MODE_NIGHT_PORTRAIT) ? + ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT : + !strcmp(sceneMode, CameraParameters::SCENE_MODE_THEATRE) ? + ANDROID_CONTROL_SCENE_MODE_THEATRE : + !strcmp(sceneMode, CameraParameters::SCENE_MODE_BEACH) ? + ANDROID_CONTROL_SCENE_MODE_BEACH : + !strcmp(sceneMode, CameraParameters::SCENE_MODE_SNOW) ? + ANDROID_CONTROL_SCENE_MODE_SNOW : + !strcmp(sceneMode, CameraParameters::SCENE_MODE_SUNSET) ? + ANDROID_CONTROL_SCENE_MODE_SUNSET : + !strcmp(sceneMode, CameraParameters::SCENE_MODE_STEADYPHOTO) ? + ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO : + !strcmp(sceneMode, CameraParameters::SCENE_MODE_FIREWORKS) ? + ANDROID_CONTROL_SCENE_MODE_FIREWORKS : + !strcmp(sceneMode, CameraParameters::SCENE_MODE_SPORTS) ? + ANDROID_CONTROL_SCENE_MODE_SPORTS : + !strcmp(sceneMode, CameraParameters::SCENE_MODE_PARTY) ? + ANDROID_CONTROL_SCENE_MODE_PARTY : + !strcmp(sceneMode, CameraParameters::SCENE_MODE_CANDLELIGHT) ? + ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT : + !strcmp(sceneMode, CameraParameters::SCENE_MODE_BARCODE) ? + ANDROID_CONTROL_SCENE_MODE_BARCODE: + -1; +} + +Parameters::Parameters::flashMode_t Parameters::flashModeStringToEnum( + const char *flashMode) { + return + !flashMode ? + Parameters::FLASH_MODE_INVALID : + !strcmp(flashMode, CameraParameters::FLASH_MODE_OFF) ? + Parameters::FLASH_MODE_OFF : + !strcmp(flashMode, CameraParameters::FLASH_MODE_AUTO) ? + Parameters::FLASH_MODE_AUTO : + !strcmp(flashMode, CameraParameters::FLASH_MODE_ON) ? + Parameters::FLASH_MODE_ON : + !strcmp(flashMode, CameraParameters::FLASH_MODE_RED_EYE) ? + Parameters::FLASH_MODE_RED_EYE : + !strcmp(flashMode, CameraParameters::FLASH_MODE_TORCH) ? + Parameters::FLASH_MODE_TORCH : + Parameters::FLASH_MODE_INVALID; +} + +const char *Parameters::flashModeEnumToString(flashMode_t flashMode) { + switch (flashMode) { + case FLASH_MODE_OFF: + return CameraParameters::FLASH_MODE_OFF; + case FLASH_MODE_AUTO: + return CameraParameters::FLASH_MODE_AUTO; + case FLASH_MODE_ON: + return CameraParameters::FLASH_MODE_ON; + case FLASH_MODE_RED_EYE: + return CameraParameters::FLASH_MODE_RED_EYE; + case FLASH_MODE_TORCH: + return CameraParameters::FLASH_MODE_TORCH; + default: + ALOGE("%s: Unknown flash mode enum %d", + __FUNCTION__, flashMode); + return "unknown"; + } +} + +Parameters::Parameters::focusMode_t Parameters::focusModeStringToEnum( + const char *focusMode) { + return + !focusMode ? + Parameters::FOCUS_MODE_INVALID : + !strcmp(focusMode, CameraParameters::FOCUS_MODE_AUTO) ? + Parameters::FOCUS_MODE_AUTO : + !strcmp(focusMode, CameraParameters::FOCUS_MODE_INFINITY) ? + Parameters::FOCUS_MODE_INFINITY : + !strcmp(focusMode, CameraParameters::FOCUS_MODE_MACRO) ? + Parameters::FOCUS_MODE_MACRO : + !strcmp(focusMode, CameraParameters::FOCUS_MODE_FIXED) ? + Parameters::FOCUS_MODE_FIXED : + !strcmp(focusMode, CameraParameters::FOCUS_MODE_EDOF) ? + Parameters::FOCUS_MODE_EDOF : + !strcmp(focusMode, CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO) ? + Parameters::FOCUS_MODE_CONTINUOUS_VIDEO : + !strcmp(focusMode, CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE) ? + Parameters::FOCUS_MODE_CONTINUOUS_PICTURE : + Parameters::FOCUS_MODE_INVALID; +} + +const char *Parameters::focusModeEnumToString(focusMode_t focusMode) { + switch (focusMode) { + case FOCUS_MODE_AUTO: + return CameraParameters::FOCUS_MODE_AUTO; + case FOCUS_MODE_MACRO: + return CameraParameters::FOCUS_MODE_MACRO; + case FOCUS_MODE_CONTINUOUS_VIDEO: + return CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO; + case FOCUS_MODE_CONTINUOUS_PICTURE: + return CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE; + case FOCUS_MODE_EDOF: + return CameraParameters::FOCUS_MODE_EDOF; + case FOCUS_MODE_INFINITY: + return CameraParameters::FOCUS_MODE_INFINITY; + case FOCUS_MODE_FIXED: + return CameraParameters::FOCUS_MODE_FIXED; + default: + ALOGE("%s: Unknown focus mode enum: %d", + __FUNCTION__, focusMode); + return "unknown"; + } +} + +Parameters::Parameters::lightFxMode_t Parameters::lightFxStringToEnum( + const char *lightFxMode) { + return + !lightFxMode ? + Parameters::LIGHTFX_NONE : + !strcmp(lightFxMode, CameraParameters::LIGHTFX_LOWLIGHT) ? + Parameters::LIGHTFX_LOWLIGHT : + !strcmp(lightFxMode, CameraParameters::LIGHTFX_HDR) ? + Parameters::LIGHTFX_HDR : + Parameters::LIGHTFX_NONE; +} + +status_t Parameters::parseAreas(const char *areasCStr, + Vector *areas) { + static const size_t NUM_FIELDS = 5; + areas->clear(); + if (areasCStr == NULL) { + // If no key exists, use default (0,0,0,0,0) + areas->push(); + return OK; + } + String8 areasStr(areasCStr); + ssize_t areaStart = areasStr.find("(", 0) + 1; + while (areaStart != 0) { + const char* area = areasStr.string() + areaStart; + char *numEnd; + int vals[NUM_FIELDS]; + for (size_t i = 0; i < NUM_FIELDS; i++) { + errno = 0; + vals[i] = strtol(area, &numEnd, 10); + if (errno || numEnd == area) return BAD_VALUE; + area = numEnd + 1; + } + areas->push(Parameters::Area( + vals[0], vals[1], vals[2], vals[3], vals[4]) ); + areaStart = areasStr.find("(", areaStart) + 1; + } + return OK; +} + +status_t Parameters::validateAreas(const Vector &areas, + size_t maxRegions, + AreaKind areaKind) const { + // Definition of valid area can be found in + // include/camera/CameraParameters.h + if (areas.size() == 0) return BAD_VALUE; + if (areas.size() == 1) { + if (areas[0].left == 0 && + areas[0].top == 0 && + areas[0].right == 0 && + areas[0].bottom == 0 && + areas[0].weight == 0) { + // Single (0,0,0,0,0) entry is always valid (== driver decides) + return OK; + } + } + + // fixed focus can only set (0,0,0,0,0) focus area + if (areaKind == AREA_KIND_FOCUS && focusMode == FOCUS_MODE_FIXED) { + return BAD_VALUE; + } + + if (areas.size() > maxRegions) { + ALOGE("%s: Too many areas requested: %d", + __FUNCTION__, areas.size()); + return BAD_VALUE; + } + + for (Vector::const_iterator a = areas.begin(); + a != areas.end(); a++) { + if (a->weight < 1 || a->weight > 1000) return BAD_VALUE; + if (a->left < -1000 || a->left > 1000) return BAD_VALUE; + if (a->top < -1000 || a->top > 1000) return BAD_VALUE; + if (a->right < -1000 || a->right > 1000) return BAD_VALUE; + if (a->bottom < -1000 || a->bottom > 1000) return BAD_VALUE; + if (a->left >= a->right) return BAD_VALUE; + if (a->top >= a->bottom) return BAD_VALUE; + } + return OK; +} + +bool Parameters::boolFromString(const char *boolStr) { + return !boolStr ? false : + !strcmp(boolStr, CameraParameters::TRUE) ? true : + false; +} + +int Parameters::degToTransform(int degrees, bool mirror) { + if (!mirror) { + if (degrees == 0) return 0; + else if (degrees == 90) return HAL_TRANSFORM_ROT_90; + else if (degrees == 180) return HAL_TRANSFORM_ROT_180; + else if (degrees == 270) return HAL_TRANSFORM_ROT_270; + } else { // Do mirror (horizontal flip) + if (degrees == 0) { // FLIP_H and ROT_0 + return HAL_TRANSFORM_FLIP_H; + } else if (degrees == 90) { // FLIP_H and ROT_90 + return HAL_TRANSFORM_FLIP_H | HAL_TRANSFORM_ROT_90; + } else if (degrees == 180) { // FLIP_H and ROT_180 + return HAL_TRANSFORM_FLIP_V; + } else if (degrees == 270) { // FLIP_H and ROT_270 + return HAL_TRANSFORM_FLIP_V | HAL_TRANSFORM_ROT_90; + } + } + ALOGE("%s: Bad input: %d", __FUNCTION__, degrees); + return -1; +} + +int Parameters::cropXToArray(int x) const { + ALOG_ASSERT(x >= 0, "Crop-relative X coordinate = '%d' is out of bounds" + "(lower = 0)", x); + + CropRegion previewCrop = calculateCropRegion(CropRegion::OUTPUT_PREVIEW); + ALOG_ASSERT(x < previewCrop.width, "Crop-relative X coordinate = '%d' " + "is out of bounds (upper = %f)", x, previewCrop.width); + + int ret = x + previewCrop.left; + + ALOG_ASSERT( (ret >= 0 && ret < fastInfo.arrayWidth), + "Calculated pixel array value X = '%d' is out of bounds (upper = %d)", + ret, fastInfo.arrayWidth); + return ret; +} + +int Parameters::cropYToArray(int y) const { + ALOG_ASSERT(y >= 0, "Crop-relative Y coordinate = '%d' is out of bounds " + "(lower = 0)", y); + + CropRegion previewCrop = calculateCropRegion(CropRegion::OUTPUT_PREVIEW); + ALOG_ASSERT(y < previewCrop.height, "Crop-relative Y coordinate = '%d' is " + "out of bounds (upper = %f)", y, previewCrop.height); + + int ret = y + previewCrop.top; + + ALOG_ASSERT( (ret >= 0 && ret < fastInfo.arrayHeight), + "Calculated pixel array value Y = '%d' is out of bounds (upper = %d)", + ret, fastInfo.arrayHeight); + + return ret; + +} + +int Parameters::normalizedXToCrop(int x) const { + CropRegion previewCrop = calculateCropRegion(CropRegion::OUTPUT_PREVIEW); + return (x + 1000) * (previewCrop.width - 1) / 2000; +} + +int Parameters::normalizedYToCrop(int y) const { + CropRegion previewCrop = calculateCropRegion(CropRegion::OUTPUT_PREVIEW); + return (y + 1000) * (previewCrop.height - 1) / 2000; +} + +int Parameters::arrayXToCrop(int x) const { + CropRegion previewCrop = calculateCropRegion(CropRegion::OUTPUT_PREVIEW); + return x - previewCrop.left; +} + +int Parameters::arrayYToCrop(int y) const { + CropRegion previewCrop = calculateCropRegion(CropRegion::OUTPUT_PREVIEW); + return y - previewCrop.top; +} + +int Parameters::cropXToNormalized(int x) const { + CropRegion previewCrop = calculateCropRegion(CropRegion::OUTPUT_PREVIEW); + return x * 2000 / (previewCrop.width - 1) - 1000; +} + +int Parameters::cropYToNormalized(int y) const { + CropRegion previewCrop = calculateCropRegion(CropRegion::OUTPUT_PREVIEW); + return y * 2000 / (previewCrop.height - 1) - 1000; +} + +int Parameters::arrayXToNormalized(int width) const { + int ret = cropXToNormalized(arrayXToCrop(width)); + + ALOG_ASSERT(ret >= -1000, "Calculated normalized value out of " + "lower bounds %d", ret); + ALOG_ASSERT(ret <= 1000, "Calculated normalized value out of " + "upper bounds %d", ret); + + // Work-around for HAL pre-scaling the coordinates themselves + if (quirks.meteringCropRegion) { + return width * 2000 / (fastInfo.arrayWidth - 1) - 1000; + } + + return ret; +} + +int Parameters::arrayYToNormalized(int height) const { + int ret = cropYToNormalized(arrayYToCrop(height)); + + ALOG_ASSERT(ret >= -1000, "Calculated normalized value out of lower bounds" + " %d", ret); + ALOG_ASSERT(ret <= 1000, "Calculated normalized value out of upper bounds" + " %d", ret); + + // Work-around for HAL pre-scaling the coordinates themselves + if (quirks.meteringCropRegion) { + return height * 2000 / (fastInfo.arrayHeight - 1) - 1000; + } + + return ret; +} + +int Parameters::normalizedXToArray(int x) const { + + // Work-around for HAL pre-scaling the coordinates themselves + if (quirks.meteringCropRegion) { + return (x + 1000) * (fastInfo.arrayWidth - 1) / 2000; + } + + return cropXToArray(normalizedXToCrop(x)); +} + +int Parameters::normalizedYToArray(int y) const { + // Work-around for HAL pre-scaling the coordinates themselves + if (quirks.meteringCropRegion) { + return (y + 1000) * (fastInfo.arrayHeight - 1) / 2000; + } + + return cropYToArray(normalizedYToCrop(y)); +} + +Parameters::CropRegion Parameters::calculateCropRegion( + Parameters::CropRegion::Outputs outputs) const { + + float zoomLeft, zoomTop, zoomWidth, zoomHeight; + + // Need to convert zoom index into a crop rectangle. The rectangle is + // chosen to maximize its area on the sensor + + camera_metadata_ro_entry_t maxDigitalZoom = + staticInfo(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM); + // For each zoom step by how many pixels more do we change the zoom + float zoomIncrement = (maxDigitalZoom.data.f[0] - 1) / + (NUM_ZOOM_STEPS-1); + // The desired activeAreaWidth/cropAreaWidth ratio (or height if h>w) + // via interpolating zoom step into a zoom ratio + float zoomRatio = 1 + zoomIncrement * zoom; + ALOG_ASSERT( (zoomRatio >= 1.f && zoomRatio <= maxDigitalZoom.data.f[0]), + "Zoom ratio calculated out of bounds. Expected 1 - %f, actual: %f", + maxDigitalZoom.data.f[0], zoomRatio); + + ALOGV("Zoom maxDigital=%f, increment=%f, ratio=%f, previewWidth=%d, " + "previewHeight=%d, activeWidth=%d, activeHeight=%d", + maxDigitalZoom.data.f[0], zoomIncrement, zoomRatio, previewWidth, + previewHeight, fastInfo.arrayWidth, fastInfo.arrayHeight); + + /* + * Assumption: On the HAL side each stream buffer calculates its crop + * rectangle as follows: + * cropRect = (zoomLeft, zoomRight, + * zoomWidth, zoomHeight * zoomWidth / outputWidth); + * + * Note that if zoomWidth > bufferWidth, the new cropHeight > zoomHeight + * (we can then get into trouble if the cropHeight > arrayHeight). + * By selecting the zoomRatio based on the smallest outputRatio, we + * guarantee this will never happen. + */ + + // Enumerate all possible output sizes, select the one with the smallest + // aspect ratio + float minOutputWidth, minOutputHeight, minOutputRatio; + { + float outputSizes[][2] = { + { static_cast(previewWidth), + static_cast(previewHeight) }, + { static_cast(videoWidth), + static_cast(videoHeight) }, + { static_cast(jpegThumbSize[0]), + static_cast(jpegThumbSize[1]) }, + { static_cast(pictureWidth), + static_cast(pictureHeight) }, + }; + + minOutputWidth = outputSizes[0][0]; + minOutputHeight = outputSizes[0][1]; + minOutputRatio = minOutputWidth / minOutputHeight; + for (unsigned int i = 0; + i < sizeof(outputSizes) / sizeof(outputSizes[0]); + ++i) { + + // skip over outputs we don't want to consider for the crop region + if ( !((1 << i) & outputs) ) { + continue; + } + + float outputWidth = outputSizes[i][0]; + float outputHeight = outputSizes[i][1]; + float outputRatio = outputWidth / outputHeight; + + if (minOutputRatio > outputRatio) { + minOutputRatio = outputRatio; + minOutputWidth = outputWidth; + minOutputHeight = outputHeight; + } + + // and then use this output ratio instead of preview output ratio + ALOGV("Enumerating output ratio %f = %f / %f, min is %f", + outputRatio, outputWidth, outputHeight, minOutputRatio); + } + } + + /* Ensure that the width/height never go out of bounds + * by scaling across a diffent dimension if an out-of-bounds + * possibility exists. + * + * e.g. if the previewratio < arrayratio and e.g. zoomratio = 1.0, then by + * calculating the zoomWidth from zoomHeight we'll actually get a + * zoomheight > arrayheight + */ + float arrayRatio = 1.f * fastInfo.arrayWidth / fastInfo.arrayHeight; + if (minOutputRatio >= arrayRatio) { + // Adjust the height based on the width + zoomWidth = fastInfo.arrayWidth / zoomRatio; + zoomHeight = zoomWidth * + minOutputHeight / minOutputWidth; + + } else { + // Adjust the width based on the height + zoomHeight = fastInfo.arrayHeight / zoomRatio; + zoomWidth = zoomHeight * + minOutputWidth / minOutputHeight; + } + // centering the zoom area within the active area + zoomLeft = (fastInfo.arrayWidth - zoomWidth) / 2; + zoomTop = (fastInfo.arrayHeight - zoomHeight) / 2; + + ALOGV("Crop region calculated (x=%d,y=%d,w=%f,h=%f) for zoom=%d", + (int32_t)zoomLeft, (int32_t)zoomTop, zoomWidth, zoomHeight, this->zoom); + + + CropRegion crop = { zoomLeft, zoomTop, zoomWidth, zoomHeight }; + return crop; +} + +status_t Parameters::calculatePictureFovs(float *horizFov, float *vertFov) + const { + camera_metadata_ro_entry_t sensorSize = + staticInfo(ANDROID_SENSOR_INFO_PHYSICAL_SIZE, 2, 2); + if (!sensorSize.count) return NO_INIT; + + float arrayAspect = static_cast(fastInfo.arrayWidth) / + fastInfo.arrayHeight; + float stillAspect = static_cast(pictureWidth) / pictureHeight; + ALOGV("Array aspect: %f, still aspect: %f", arrayAspect, stillAspect); + + // The crop factors from the full sensor array to the still picture crop + // region + float horizCropFactor = 1.f; + float vertCropFactor = 1.f; + + /** + * Need to calculate the still image field of view based on the total pixel + * array field of view, and the relative aspect ratios of the pixel array + * and output streams. + * + * Special treatment for quirky definition of crop region and relative + * stream cropping. + */ + if (quirks.meteringCropRegion) { + // Use max of preview and video as first crop + float previewAspect = static_cast(previewWidth) / previewHeight; + float videoAspect = static_cast(videoWidth) / videoHeight; + if (videoAspect > previewAspect) { + previewAspect = videoAspect; + } + // First crop sensor to preview aspect ratio + if (arrayAspect < previewAspect) { + vertCropFactor = arrayAspect / previewAspect; + } else { + horizCropFactor = previewAspect / arrayAspect; + } + // Second crop to still aspect ratio + if (stillAspect < previewAspect) { + horizCropFactor *= stillAspect / previewAspect; + } else { + vertCropFactor *= previewAspect / stillAspect; + } + } else { + /** + * Crop are just a function of just the still/array relative aspect + * ratios. Since each stream will maximize its area within the crop + * region, and for FOV we assume a full-sensor crop region, we only ever + * crop the FOV either vertically or horizontally, never both. + */ + horizCropFactor = (arrayAspect > stillAspect) ? + (stillAspect / arrayAspect) : 1.f; + vertCropFactor = (arrayAspect < stillAspect) ? + (arrayAspect / stillAspect) : 1.f; + } + ALOGV("Horiz crop factor: %f, vert crop fact: %f", + horizCropFactor, vertCropFactor); + /** + * Basic field of view formula is: + * angle of view = 2 * arctangent ( d / 2f ) + * where d is the physical sensor dimension of interest, and f is + * the focal length. This only applies to rectilinear sensors, for focusing + * at distances >> f, etc. + */ + if (horizFov != NULL) { + *horizFov = 180 / M_PI * 2 * + atanf(horizCropFactor * sensorSize.data.f[0] / + (2 * fastInfo.minFocalLength)); + } + if (vertFov != NULL) { + *vertFov = 180 / M_PI * 2 * + atanf(vertCropFactor * sensorSize.data.f[1] / + (2 * fastInfo.minFocalLength)); + } + return OK; +} + +int32_t Parameters::fpsFromRange(int32_t /*min*/, int32_t max) const { + return max; +} + +}; // namespace camera2 +}; // namespace android diff --git a/services/camera/libcameraservice/api1/client2/Parameters.h b/services/camera/libcameraservice/api1/client2/Parameters.h new file mode 100644 index 0000000..464830c --- /dev/null +++ b/services/camera/libcameraservice/api1/client2/Parameters.h @@ -0,0 +1,372 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_SERVERS_CAMERA_CAMERA2PARAMETERS_H +#define ANDROID_SERVERS_CAMERA_CAMERA2PARAMETERS_H + +#include + +#include +#include +#include +#include +#include +#include +#include + +namespace android { +namespace camera2 { + +/** + * Current camera state; this is the full state of the Camera under the old + * camera API (contents of the CameraParameters object in a more-efficient + * format, plus other state). The enum values are mostly based off the + * corresponding camera2 enums, not the camera1 strings. A few are defined here + * if they don't cleanly map to camera2 values. + */ +struct Parameters { + /** + * Parameters and other state + */ + int cameraId; + int cameraFacing; + + int previewWidth, previewHeight; + int32_t previewFpsRange[2]; + int previewFps; // deprecated, here only for tracking changes + int previewFormat; + + int previewTransform; // set by CAMERA_CMD_SET_DISPLAY_ORIENTATION + + int pictureWidth, pictureHeight; + + int32_t jpegThumbSize[2]; + uint8_t jpegQuality, jpegThumbQuality; + int32_t jpegRotation; + + bool gpsEnabled; + double gpsCoordinates[3]; + int64_t gpsTimestamp; + String8 gpsProcessingMethod; + + uint8_t wbMode; + uint8_t effectMode; + uint8_t antibandingMode; + uint8_t sceneMode; + + enum flashMode_t { + FLASH_MODE_OFF = 0, + FLASH_MODE_AUTO, + FLASH_MODE_ON, + FLASH_MODE_TORCH, + FLASH_MODE_RED_EYE = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, + FLASH_MODE_INVALID = -1 + } flashMode; + + enum focusMode_t { + FOCUS_MODE_AUTO = ANDROID_CONTROL_AF_MODE_AUTO, + FOCUS_MODE_MACRO = ANDROID_CONTROL_AF_MODE_MACRO, + FOCUS_MODE_CONTINUOUS_VIDEO = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, + FOCUS_MODE_CONTINUOUS_PICTURE = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, + FOCUS_MODE_EDOF = ANDROID_CONTROL_AF_MODE_EDOF, + FOCUS_MODE_INFINITY, + FOCUS_MODE_FIXED, + FOCUS_MODE_INVALID = -1 + } focusMode; + + uint8_t focusState; // Latest focus state from HAL + + // For use with triggerAfWithAuto quirk + focusMode_t shadowFocusMode; + + struct Area { + int left, top, right, bottom; + int weight; + Area() {} + Area(int left, int top, int right, int bottom, int weight): + left(left), top(top), right(right), bottom(bottom), + weight(weight) {} + bool isEmpty() const { + return (left == 0) && (top == 0) && (right == 0) && (bottom == 0); + } + }; + Vector focusingAreas; + + int32_t exposureCompensation; + bool autoExposureLock; + bool autoWhiteBalanceLock; + + Vector meteringAreas; + + int zoom; + + int videoWidth, videoHeight; + + bool recordingHint; + bool videoStabilization; + + enum lightFxMode_t { + LIGHTFX_NONE = 0, + LIGHTFX_LOWLIGHT, + LIGHTFX_HDR + } lightFx; + + CameraParameters params; + String8 paramsFlattened; + + // These parameters are also part of the camera API-visible state, but not + // directly listed in Camera.Parameters + bool storeMetadataInBuffers; + bool playShutterSound; + bool enableFaceDetect; + + bool enableFocusMoveMessages; + int afTriggerCounter; + int currentAfTriggerId; + bool afInMotion; + + int precaptureTriggerCounter; + + uint32_t previewCallbackFlags; + bool previewCallbackOneShot; + bool previewCallbackSurface; + + bool zslMode; + + // Overall camera state + enum State { + DISCONNECTED, + STOPPED, + WAITING_FOR_PREVIEW_WINDOW, + PREVIEW, + RECORD, + STILL_CAPTURE, + VIDEO_SNAPSHOT + } state; + + // Number of zoom steps to simulate + static const unsigned int NUM_ZOOM_STEPS = 100; + + // Full static camera info, object owned by someone else, such as + // Camera2Device. + const CameraMetadata *info; + + // Fast-access static device information; this is a subset of the + // information available through the staticInfo() method, used for + // frequently-accessed values or values that have to be calculated from the + // static information. + struct DeviceInfo { + int32_t arrayWidth; + int32_t arrayHeight; + uint8_t bestFaceDetectMode; + int32_t maxFaces; + struct OverrideModes { + flashMode_t flashMode; + uint8_t wbMode; + focusMode_t focusMode; + OverrideModes(): + flashMode(FLASH_MODE_INVALID), + wbMode(ANDROID_CONTROL_AWB_MODE_OFF), + focusMode(FOCUS_MODE_INVALID) { + } + }; + DefaultKeyedVector sceneModeOverrides; + float minFocalLength; + bool useFlexibleYuv; + } fastInfo; + + // Quirks information; these are short-lived flags to enable workarounds for + // incomplete HAL implementations + struct Quirks { + bool triggerAfWithAuto; + bool useZslFormat; + bool meteringCropRegion; + } quirks; + + /** + * Parameter manipulation and setup methods + */ + + Parameters(int cameraId, int cameraFacing); + ~Parameters(); + + // Sets up default parameters + status_t initialize(const CameraMetadata *info); + + // Build fast-access device static info from static info + status_t buildFastInfo(); + // Query for quirks from static info + status_t buildQuirks(); + + // Get entry from camera static characteristics information. min/maxCount + // are used for error checking the number of values in the entry. 0 for + // max/minCount means to do no bounds check in that direction. In case of + // error, the entry data pointer is null and the count is 0. + camera_metadata_ro_entry_t staticInfo(uint32_t tag, + size_t minCount=0, size_t maxCount=0, bool required=true) const; + + // Validate and update camera parameters based on new settings + status_t set(const String8 ¶mString); + + // Retrieve the current settings + String8 get() const; + + // Update passed-in request for common parameters + status_t updateRequest(CameraMetadata *request) const; + + // Add/update JPEG entries in metadata + status_t updateRequestJpeg(CameraMetadata *request) const; + + // Calculate the crop region rectangle based on current stream sizes + struct CropRegion { + float left; + float top; + float width; + float height; + + enum Outputs { + OUTPUT_PREVIEW = 0x01, + OUTPUT_VIDEO = 0x02, + OUTPUT_JPEG_THUMBNAIL = 0x04, + OUTPUT_PICTURE = 0x08, + }; + }; + CropRegion calculateCropRegion(CropRegion::Outputs outputs) const; + + // Calculate the field of view of the high-resolution JPEG capture + status_t calculatePictureFovs(float *horizFov, float *vertFov) const; + + // Static methods for debugging and converting between camera1 and camera2 + // parameters + + static const char *getStateName(State state); + + static int formatStringToEnum(const char *format); + static const char *formatEnumToString(int format); + + static int wbModeStringToEnum(const char *wbMode); + static const char* wbModeEnumToString(uint8_t wbMode); + static int effectModeStringToEnum(const char *effectMode); + static int abModeStringToEnum(const char *abMode); + static int sceneModeStringToEnum(const char *sceneMode); + static flashMode_t flashModeStringToEnum(const char *flashMode); + static const char* flashModeEnumToString(flashMode_t flashMode); + static focusMode_t focusModeStringToEnum(const char *focusMode); + static const char* focusModeEnumToString(focusMode_t focusMode); + static lightFxMode_t lightFxStringToEnum(const char *lightFxMode); + + static status_t parseAreas(const char *areasCStr, + Vector *areas); + + enum AreaKind + { + AREA_KIND_FOCUS, + AREA_KIND_METERING + }; + status_t validateAreas(const Vector &areas, + size_t maxRegions, + AreaKind areaKind) const; + static bool boolFromString(const char *boolStr); + + // Map from camera orientation + facing to gralloc transform enum + static int degToTransform(int degrees, bool mirror); + + // API specifies FPS ranges are done in fixed point integer, with LSB = 0.001. + // Note that this doesn't apply to the (deprecated) single FPS value. + static const int kFpsToApiScale = 1000; + + // Transform between (-1000,-1000)-(1000,1000) normalized coords from camera + // API and HAL2 (0,0)-(activePixelArray.width/height) coordinates + int arrayXToNormalized(int width) const; + int arrayYToNormalized(int height) const; + int normalizedXToArray(int x) const; + int normalizedYToArray(int y) const; + + struct Range { + int min; + int max; + }; + + int32_t fpsFromRange(int32_t min, int32_t max) const; + +private: + + // Convert between HAL2 sensor array coordinates and + // viewfinder crop-region relative array coordinates + int cropXToArray(int x) const; + int cropYToArray(int y) const; + int arrayXToCrop(int x) const; + int arrayYToCrop(int y) const; + + // Convert between viewfinder crop-region relative array coordinates + // and camera API (-1000,1000)-(1000,1000) normalized coords + int cropXToNormalized(int x) const; + int cropYToNormalized(int y) const; + int normalizedXToCrop(int x) const; + int normalizedYToCrop(int y) const; +}; + +// This class encapsulates the Parameters class so that it can only be accessed +// by constructing a Lock object, which locks the SharedParameter's mutex. +class SharedParameters { + public: + SharedParameters(int cameraId, int cameraFacing): + mParameters(cameraId, cameraFacing) { + } + + template + class BaseLock { + public: + BaseLock(S &p): + mParameters(p.mParameters), + mSharedParameters(p) { + mSharedParameters.mLock.lock(); + } + + ~BaseLock() { + mSharedParameters.mLock.unlock(); + } + P &mParameters; + private: + // Disallow copying, default construction + BaseLock(); + BaseLock(const BaseLock &); + BaseLock &operator=(const BaseLock &); + S &mSharedParameters; + }; + typedef BaseLock Lock; + typedef BaseLock ReadLock; + + // Access static info, read-only and immutable, so no lock needed + camera_metadata_ro_entry_t staticInfo(uint32_t tag, + size_t minCount=0, size_t maxCount=0) const { + return mParameters.staticInfo(tag, minCount, maxCount); + } + + // Only use for dumping or other debugging + const Parameters &unsafeAccess() { + return mParameters; + } + private: + Parameters mParameters; + mutable Mutex mLock; +}; + + +}; // namespace camera2 +}; // namespace android + +#endif diff --git a/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp b/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp new file mode 100644 index 0000000..7e98016 --- /dev/null +++ b/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp @@ -0,0 +1,880 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "Camera2-StreamingProcessor" +#define ATRACE_TAG ATRACE_TAG_CAMERA +//#define LOG_NDEBUG 0 +//#define LOG_NNDEBUG 0 // Per-frame verbose logging + +#ifdef LOG_NNDEBUG +#define ALOGVV(...) ALOGV(__VA_ARGS__) +#else +#define ALOGVV(...) ((void)0) +#endif + +#include +#include +#include +#include + +#include "common/CameraDeviceBase.h" +#include "api1/Camera2Client.h" +#include "api1/client2/StreamingProcessor.h" +#include "api1/client2/Camera2Heap.h" + +namespace android { +namespace camera2 { + +StreamingProcessor::StreamingProcessor(sp client): + mClient(client), + mDevice(client->getCameraDevice()), + mId(client->getCameraId()), + mActiveRequest(NONE), + mPaused(false), + mPreviewRequestId(Camera2Client::kPreviewRequestIdStart), + mPreviewStreamId(NO_STREAM), + mRecordingRequestId(Camera2Client::kRecordingRequestIdStart), + mRecordingStreamId(NO_STREAM), + mRecordingFrameAvailable(false), + mRecordingHeapCount(kDefaultRecordingHeapCount), + mRecordingHeapFree(kDefaultRecordingHeapCount) +{ +} + +StreamingProcessor::~StreamingProcessor() { + deletePreviewStream(); + deleteRecordingStream(); +} + +status_t StreamingProcessor::setPreviewWindow(sp window) { + ATRACE_CALL(); + status_t res; + + res = deletePreviewStream(); + if (res != OK) return res; + + Mutex::Autolock m(mMutex); + + mPreviewWindow = window; + + return OK; +} + +bool StreamingProcessor::haveValidPreviewWindow() const { + Mutex::Autolock m(mMutex); + return mPreviewWindow != 0; +} + +status_t StreamingProcessor::updatePreviewRequest(const Parameters ¶ms) { + ATRACE_CALL(); + status_t res; + sp device = mDevice.promote(); + if (device == 0) { + ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId); + return INVALID_OPERATION; + } + + Mutex::Autolock m(mMutex); + if (mPreviewRequest.entryCount() == 0) { + res = device->createDefaultRequest(CAMERA2_TEMPLATE_PREVIEW, + &mPreviewRequest); + if (res != OK) { + ALOGE("%s: Camera %d: Unable to create default preview request: " + "%s (%d)", __FUNCTION__, mId, strerror(-res), res); + return res; + } + } + + res = params.updateRequest(&mPreviewRequest); + if (res != OK) { + ALOGE("%s: Camera %d: Unable to update common entries of preview " + "request: %s (%d)", __FUNCTION__, mId, + strerror(-res), res); + return res; + } + + res = mPreviewRequest.update(ANDROID_REQUEST_ID, + &mPreviewRequestId, 1); + if (res != OK) { + ALOGE("%s: Camera %d: Unable to update request id for preview: %s (%d)", + __FUNCTION__, mId, strerror(-res), res); + return res; + } + + return OK; +} + +status_t StreamingProcessor::updatePreviewStream(const Parameters ¶ms) { + ATRACE_CALL(); + Mutex::Autolock m(mMutex); + + status_t res; + sp device = mDevice.promote(); + if (device == 0) { + ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId); + return INVALID_OPERATION; + } + + if (mPreviewStreamId != NO_STREAM) { + // Check if stream parameters have to change + uint32_t currentWidth, currentHeight; + res = device->getStreamInfo(mPreviewStreamId, + ¤tWidth, ¤tHeight, 0); + if (res != OK) { + ALOGE("%s: Camera %d: Error querying preview stream info: " + "%s (%d)", __FUNCTION__, mId, strerror(-res), res); + return res; + } + if (currentWidth != (uint32_t)params.previewWidth || + currentHeight != (uint32_t)params.previewHeight) { + ALOGV("%s: Camera %d: Preview size switch: %d x %d -> %d x %d", + __FUNCTION__, mId, currentWidth, currentHeight, + params.previewWidth, params.previewHeight); + res = device->waitUntilDrained(); + if (res != OK) { + ALOGE("%s: Camera %d: Error waiting for preview to drain: " + "%s (%d)", __FUNCTION__, mId, strerror(-res), res); + return res; + } + res = device->deleteStream(mPreviewStreamId); + if (res != OK) { + ALOGE("%s: Camera %d: Unable to delete old output stream " + "for preview: %s (%d)", __FUNCTION__, mId, + strerror(-res), res); + return res; + } + mPreviewStreamId = NO_STREAM; + } + } + + if (mPreviewStreamId == NO_STREAM) { + res = device->createStream(mPreviewWindow, + params.previewWidth, params.previewHeight, + CAMERA2_HAL_PIXEL_FORMAT_OPAQUE, 0, + &mPreviewStreamId); + if (res != OK) { + ALOGE("%s: Camera %d: Unable to create preview stream: %s (%d)", + __FUNCTION__, mId, strerror(-res), res); + return res; + } + } + + res = device->setStreamTransform(mPreviewStreamId, + params.previewTransform); + if (res != OK) { + ALOGE("%s: Camera %d: Unable to set preview stream transform: " + "%s (%d)", __FUNCTION__, mId, strerror(-res), res); + return res; + } + + return OK; +} + +status_t StreamingProcessor::deletePreviewStream() { + ATRACE_CALL(); + status_t res; + + Mutex::Autolock m(mMutex); + + if (mPreviewStreamId != NO_STREAM) { + sp device = mDevice.promote(); + if (device == 0) { + ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId); + return INVALID_OPERATION; + } + + ALOGV("%s: for cameraId %d on streamId %d", + __FUNCTION__, mId, mPreviewStreamId); + + res = device->waitUntilDrained(); + if (res != OK) { + ALOGE("%s: Error waiting for preview to drain: %s (%d)", + __FUNCTION__, strerror(-res), res); + return res; + } + res = device->deleteStream(mPreviewStreamId); + if (res != OK) { + ALOGE("%s: Unable to delete old preview stream: %s (%d)", + __FUNCTION__, strerror(-res), res); + return res; + } + mPreviewStreamId = NO_STREAM; + } + return OK; +} + +int StreamingProcessor::getPreviewStreamId() const { + Mutex::Autolock m(mMutex); + return mPreviewStreamId; +} + +status_t StreamingProcessor::setRecordingBufferCount(size_t count) { + ATRACE_CALL(); + // Make sure we can support this many buffer slots + if (count > BufferQueue::NUM_BUFFER_SLOTS) { + ALOGE("%s: Camera %d: Too many recording buffers requested: %d, max %d", + __FUNCTION__, mId, count, BufferQueue::NUM_BUFFER_SLOTS); + return BAD_VALUE; + } + + Mutex::Autolock m(mMutex); + + ALOGV("%s: Camera %d: New recording buffer count from encoder: %d", + __FUNCTION__, mId, count); + + // Need to re-size consumer and heap + if (mRecordingHeapCount != count) { + ALOGV("%s: Camera %d: Resetting recording heap and consumer", + __FUNCTION__, mId); + + if (isStreamActive(mActiveStreamIds, mRecordingStreamId)) { + ALOGE("%s: Camera %d: Setting recording buffer count when " + "recording stream is already active!", __FUNCTION__, + mId); + return INVALID_OPERATION; + } + + releaseAllRecordingFramesLocked(); + + if (mRecordingHeap != 0) { + mRecordingHeap.clear(); + } + mRecordingHeapCount = count; + mRecordingHeapFree = count; + + mRecordingConsumer.clear(); + } + + return OK; +} + +status_t StreamingProcessor::updateRecordingRequest(const Parameters ¶ms) { + ATRACE_CALL(); + status_t res; + Mutex::Autolock m(mMutex); + + sp device = mDevice.promote(); + if (device == 0) { + ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId); + return INVALID_OPERATION; + } + + if (mRecordingRequest.entryCount() == 0) { + res = device->createDefaultRequest(CAMERA2_TEMPLATE_VIDEO_RECORD, + &mRecordingRequest); + if (res != OK) { + ALOGE("%s: Camera %d: Unable to create default recording request:" + " %s (%d)", __FUNCTION__, mId, strerror(-res), res); + return res; + } + } + + res = params.updateRequest(&mRecordingRequest); + if (res != OK) { + ALOGE("%s: Camera %d: Unable to update common entries of recording " + "request: %s (%d)", __FUNCTION__, mId, + strerror(-res), res); + return res; + } + + res = mRecordingRequest.update(ANDROID_REQUEST_ID, + &mRecordingRequestId, 1); + if (res != OK) { + ALOGE("%s: Camera %d: Unable to update request id for request: %s (%d)", + __FUNCTION__, mId, strerror(-res), res); + return res; + } + + return OK; +} + +status_t StreamingProcessor::updateRecordingStream(const Parameters ¶ms) { + ATRACE_CALL(); + status_t res; + Mutex::Autolock m(mMutex); + + sp device = mDevice.promote(); + if (device == 0) { + ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId); + return INVALID_OPERATION; + } + + bool newConsumer = false; + if (mRecordingConsumer == 0) { + ALOGV("%s: Camera %d: Creating recording consumer with %d + 1 " + "consumer-side buffers", __FUNCTION__, mId, mRecordingHeapCount); + // Create CPU buffer queue endpoint. We need one more buffer here so that we can + // always acquire and free a buffer when the heap is full; otherwise the consumer + // will have buffers in flight we'll never clear out. + sp bq = new BufferQueue(); + mRecordingConsumer = new BufferItemConsumer(bq, + GRALLOC_USAGE_HW_VIDEO_ENCODER, + mRecordingHeapCount + 1); + mRecordingConsumer->setFrameAvailableListener(this); + mRecordingConsumer->setName(String8("Camera2-RecordingConsumer")); + mRecordingWindow = new Surface( + mRecordingConsumer->getProducerInterface()); + newConsumer = true; + // Allocate memory later, since we don't know buffer size until receipt + } + + if (mRecordingStreamId != NO_STREAM) { + // Check if stream parameters have to change + uint32_t currentWidth, currentHeight; + res = device->getStreamInfo(mRecordingStreamId, + ¤tWidth, ¤tHeight, 0); + if (res != OK) { + ALOGE("%s: Camera %d: Error querying recording output stream info: " + "%s (%d)", __FUNCTION__, mId, + strerror(-res), res); + return res; + } + if (currentWidth != (uint32_t)params.videoWidth || + currentHeight != (uint32_t)params.videoHeight || newConsumer) { + // TODO: Should wait to be sure previous recording has finished + res = device->deleteStream(mRecordingStreamId); + + if (res == -EBUSY) { + ALOGV("%s: Camera %d: Device is busy, call " + "updateRecordingStream after it becomes idle", + __FUNCTION__, mId); + return res; + } else if (res != OK) { + ALOGE("%s: Camera %d: Unable to delete old output stream " + "for recording: %s (%d)", __FUNCTION__, + mId, strerror(-res), res); + return res; + } + mRecordingStreamId = NO_STREAM; + } + } + + if (mRecordingStreamId == NO_STREAM) { + mRecordingFrameCount = 0; + res = device->createStream(mRecordingWindow, + params.videoWidth, params.videoHeight, + CAMERA2_HAL_PIXEL_FORMAT_OPAQUE, 0, &mRecordingStreamId); + if (res != OK) { + ALOGE("%s: Camera %d: Can't create output stream for recording: " + "%s (%d)", __FUNCTION__, mId, + strerror(-res), res); + return res; + } + } + + return OK; +} + +status_t StreamingProcessor::deleteRecordingStream() { + ATRACE_CALL(); + status_t res; + + Mutex::Autolock m(mMutex); + + if (mRecordingStreamId != NO_STREAM) { + sp device = mDevice.promote(); + if (device == 0) { + ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId); + return INVALID_OPERATION; + } + + res = device->waitUntilDrained(); + if (res != OK) { + ALOGE("%s: Error waiting for HAL to drain: %s (%d)", + __FUNCTION__, strerror(-res), res); + return res; + } + res = device->deleteStream(mRecordingStreamId); + if (res != OK) { + ALOGE("%s: Unable to delete recording stream: %s (%d)", + __FUNCTION__, strerror(-res), res); + return res; + } + mRecordingStreamId = NO_STREAM; + } + return OK; +} + +int StreamingProcessor::getRecordingStreamId() const { + return mRecordingStreamId; +} + +status_t StreamingProcessor::startStream(StreamType type, + const Vector &outputStreams) { + ATRACE_CALL(); + status_t res; + + if (type == NONE) return INVALID_OPERATION; + + sp device = mDevice.promote(); + if (device == 0) { + ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId); + return INVALID_OPERATION; + } + + ALOGV("%s: Camera %d: type = %d", __FUNCTION__, mId, type); + + Mutex::Autolock m(mMutex); + + // If a recording stream is being started up, free up any + // outstanding buffers left from the previous recording session. + // There should never be any, so if there are, warn about it. + if (isStreamActive(outputStreams, mRecordingStreamId)) { + releaseAllRecordingFramesLocked(); + } + + ALOGV("%s: Camera %d: %s started, recording heap has %d free of %d", + __FUNCTION__, mId, (type == PREVIEW) ? "preview" : "recording", + mRecordingHeapFree, mRecordingHeapCount); + + CameraMetadata &request = (type == PREVIEW) ? + mPreviewRequest : mRecordingRequest; + + res = request.update( + ANDROID_REQUEST_OUTPUT_STREAMS, + outputStreams); + if (res != OK) { + ALOGE("%s: Camera %d: Unable to set up preview request: %s (%d)", + __FUNCTION__, mId, strerror(-res), res); + return res; + } + + res = request.sort(); + if (res != OK) { + ALOGE("%s: Camera %d: Error sorting preview request: %s (%d)", + __FUNCTION__, mId, strerror(-res), res); + return res; + } + + res = device->setStreamingRequest(request); + if (res != OK) { + ALOGE("%s: Camera %d: Unable to set preview request to start preview: " + "%s (%d)", + __FUNCTION__, mId, strerror(-res), res); + return res; + } + mActiveRequest = type; + mPaused = false; + mActiveStreamIds = outputStreams; + return OK; +} + +status_t StreamingProcessor::togglePauseStream(bool pause) { + ATRACE_CALL(); + status_t res; + + sp device = mDevice.promote(); + if (device == 0) { + ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId); + return INVALID_OPERATION; + } + + ALOGV("%s: Camera %d: toggling pause to %d", __FUNCTION__, mId, pause); + + Mutex::Autolock m(mMutex); + + if (mActiveRequest == NONE) { + ALOGE("%s: Camera %d: Can't toggle pause, streaming was not started", + __FUNCTION__, mId); + return INVALID_OPERATION; + } + + if (mPaused == pause) { + return OK; + } + + if (pause) { + res = device->clearStreamingRequest(); + if (res != OK) { + ALOGE("%s: Camera %d: Can't clear stream request: %s (%d)", + __FUNCTION__, mId, strerror(-res), res); + return res; + } + } else { + CameraMetadata &request = + (mActiveRequest == PREVIEW) ? mPreviewRequest + : mRecordingRequest; + res = device->setStreamingRequest(request); + if (res != OK) { + ALOGE("%s: Camera %d: Unable to set preview request to resume: " + "%s (%d)", + __FUNCTION__, mId, strerror(-res), res); + return res; + } + } + + mPaused = pause; + return OK; +} + +status_t StreamingProcessor::stopStream() { + ATRACE_CALL(); + status_t res; + + Mutex::Autolock m(mMutex); + + sp device = mDevice.promote(); + if (device == 0) { + ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId); + return INVALID_OPERATION; + } + + res = device->clearStreamingRequest(); + if (res != OK) { + ALOGE("%s: Camera %d: Can't clear stream request: %s (%d)", + __FUNCTION__, mId, strerror(-res), res); + return res; + } + + mActiveRequest = NONE; + mActiveStreamIds.clear(); + mPaused = false; + + return OK; +} + +int32_t StreamingProcessor::getActiveRequestId() const { + Mutex::Autolock m(mMutex); + switch (mActiveRequest) { + case NONE: + return 0; + case PREVIEW: + return mPreviewRequestId; + case RECORD: + return mRecordingRequestId; + default: + ALOGE("%s: Unexpected mode %d", __FUNCTION__, mActiveRequest); + return 0; + } +} + +status_t StreamingProcessor::incrementStreamingIds() { + ATRACE_CALL(); + Mutex::Autolock m(mMutex); + + mPreviewRequestId++; + if (mPreviewRequestId >= Camera2Client::kPreviewRequestIdEnd) { + mPreviewRequestId = Camera2Client::kPreviewRequestIdStart; + } + mRecordingRequestId++; + if (mRecordingRequestId >= Camera2Client::kRecordingRequestIdEnd) { + mRecordingRequestId = Camera2Client::kRecordingRequestIdStart; + } + return OK; +} + +void StreamingProcessor::onFrameAvailable() { + ATRACE_CALL(); + Mutex::Autolock l(mMutex); + if (!mRecordingFrameAvailable) { + mRecordingFrameAvailable = true; + mRecordingFrameAvailableSignal.signal(); + } + +} + +bool StreamingProcessor::threadLoop() { + status_t res; + + { + Mutex::Autolock l(mMutex); + while (!mRecordingFrameAvailable) { + res = mRecordingFrameAvailableSignal.waitRelative( + mMutex, kWaitDuration); + if (res == TIMED_OUT) return true; + } + mRecordingFrameAvailable = false; + } + + do { + res = processRecordingFrame(); + } while (res == OK); + + return true; +} + +status_t StreamingProcessor::processRecordingFrame() { + ATRACE_CALL(); + status_t res; + sp recordingHeap; + size_t heapIdx = 0; + nsecs_t timestamp; + + sp client = mClient.promote(); + if (client == 0) { + // Discard frames during shutdown + BufferItemConsumer::BufferItem imgBuffer; + res = mRecordingConsumer->acquireBuffer(&imgBuffer, 0); + if (res != OK) { + if (res != BufferItemConsumer::NO_BUFFER_AVAILABLE) { + ALOGE("%s: Camera %d: Can't acquire recording buffer: %s (%d)", + __FUNCTION__, mId, strerror(-res), res); + } + return res; + } + mRecordingConsumer->releaseBuffer(imgBuffer); + return OK; + } + + { + /* acquire SharedParameters before mMutex so we don't dead lock + with Camera2Client code calling into StreamingProcessor */ + SharedParameters::Lock l(client->getParameters()); + Mutex::Autolock m(mMutex); + BufferItemConsumer::BufferItem imgBuffer; + res = mRecordingConsumer->acquireBuffer(&imgBuffer, 0); + if (res != OK) { + if (res != BufferItemConsumer::NO_BUFFER_AVAILABLE) { + ALOGE("%s: Camera %d: Can't acquire recording buffer: %s (%d)", + __FUNCTION__, mId, strerror(-res), res); + } + return res; + } + timestamp = imgBuffer.mTimestamp; + + mRecordingFrameCount++; + ALOGVV("OnRecordingFrame: Frame %d", mRecordingFrameCount); + + if (l.mParameters.state != Parameters::RECORD && + l.mParameters.state != Parameters::VIDEO_SNAPSHOT) { + ALOGV("%s: Camera %d: Discarding recording image buffers " + "received after recording done", __FUNCTION__, + mId); + mRecordingConsumer->releaseBuffer(imgBuffer); + return INVALID_OPERATION; + } + + if (mRecordingHeap == 0) { + const size_t bufferSize = 4 + sizeof(buffer_handle_t); + ALOGV("%s: Camera %d: Creating recording heap with %d buffers of " + "size %d bytes", __FUNCTION__, mId, + mRecordingHeapCount, bufferSize); + + mRecordingHeap = new Camera2Heap(bufferSize, mRecordingHeapCount, + "Camera2Client::RecordingHeap"); + if (mRecordingHeap->mHeap->getSize() == 0) { + ALOGE("%s: Camera %d: Unable to allocate memory for recording", + __FUNCTION__, mId); + mRecordingConsumer->releaseBuffer(imgBuffer); + return NO_MEMORY; + } + for (size_t i = 0; i < mRecordingBuffers.size(); i++) { + if (mRecordingBuffers[i].mBuf != + BufferItemConsumer::INVALID_BUFFER_SLOT) { + ALOGE("%s: Camera %d: Non-empty recording buffers list!", + __FUNCTION__, mId); + } + } + mRecordingBuffers.clear(); + mRecordingBuffers.setCapacity(mRecordingHeapCount); + mRecordingBuffers.insertAt(0, mRecordingHeapCount); + + mRecordingHeapHead = 0; + mRecordingHeapFree = mRecordingHeapCount; + } + + if ( mRecordingHeapFree == 0) { + ALOGE("%s: Camera %d: No free recording buffers, dropping frame", + __FUNCTION__, mId); + mRecordingConsumer->releaseBuffer(imgBuffer); + return NO_MEMORY; + } + + heapIdx = mRecordingHeapHead; + mRecordingHeapHead = (mRecordingHeapHead + 1) % mRecordingHeapCount; + mRecordingHeapFree--; + + ALOGVV("%s: Camera %d: Timestamp %lld", + __FUNCTION__, mId, timestamp); + + ssize_t offset; + size_t size; + sp heap = + mRecordingHeap->mBuffers[heapIdx]->getMemory(&offset, + &size); + + uint8_t *data = (uint8_t*)heap->getBase() + offset; + uint32_t type = kMetadataBufferTypeGrallocSource; + *((uint32_t*)data) = type; + *((buffer_handle_t*)(data + 4)) = imgBuffer.mGraphicBuffer->handle; + ALOGVV("%s: Camera %d: Sending out buffer_handle_t %p", + __FUNCTION__, mId, + imgBuffer.mGraphicBuffer->handle); + mRecordingBuffers.replaceAt(imgBuffer, heapIdx); + recordingHeap = mRecordingHeap; + } + + // Call outside locked parameters to allow re-entrancy from notification + Camera2Client::SharedCameraCallbacks::Lock l(client->mSharedCameraCallbacks); + if (l.mRemoteCallback != 0) { + l.mRemoteCallback->dataCallbackTimestamp(timestamp, + CAMERA_MSG_VIDEO_FRAME, + recordingHeap->mBuffers[heapIdx]); + } else { + ALOGW("%s: Camera %d: Remote callback gone", __FUNCTION__, mId); + } + + return OK; +} + +void StreamingProcessor::releaseRecordingFrame(const sp& mem) { + ATRACE_CALL(); + status_t res; + + Mutex::Autolock m(mMutex); + // Make sure this is for the current heap + ssize_t offset; + size_t size; + sp heap = mem->getMemory(&offset, &size); + if (heap->getHeapID() != mRecordingHeap->mHeap->getHeapID()) { + ALOGW("%s: Camera %d: Mismatched heap ID, ignoring release " + "(got %x, expected %x)", __FUNCTION__, mId, + heap->getHeapID(), mRecordingHeap->mHeap->getHeapID()); + return; + } + uint8_t *data = (uint8_t*)heap->getBase() + offset; + uint32_t type = *(uint32_t*)data; + if (type != kMetadataBufferTypeGrallocSource) { + ALOGE("%s: Camera %d: Recording frame type invalid (got %x, expected %x)", + __FUNCTION__, mId, type, + kMetadataBufferTypeGrallocSource); + return; + } + + // Release the buffer back to the recording queue + + buffer_handle_t imgHandle = *(buffer_handle_t*)(data + 4); + + size_t itemIndex; + for (itemIndex = 0; itemIndex < mRecordingBuffers.size(); itemIndex++) { + const BufferItemConsumer::BufferItem item = + mRecordingBuffers[itemIndex]; + if (item.mBuf != BufferItemConsumer::INVALID_BUFFER_SLOT && + item.mGraphicBuffer->handle == imgHandle) { + break; + } + } + if (itemIndex == mRecordingBuffers.size()) { + ALOGE("%s: Camera %d: Can't find buffer_handle_t %p in list of " + "outstanding buffers", __FUNCTION__, mId, + imgHandle); + return; + } + + ALOGVV("%s: Camera %d: Freeing buffer_handle_t %p", __FUNCTION__, + mId, imgHandle); + + res = mRecordingConsumer->releaseBuffer(mRecordingBuffers[itemIndex]); + if (res != OK) { + ALOGE("%s: Camera %d: Unable to free recording frame " + "(buffer_handle_t: %p): %s (%d)", __FUNCTION__, + mId, imgHandle, strerror(-res), res); + return; + } + mRecordingBuffers.replaceAt(itemIndex); + + mRecordingHeapFree++; + ALOGV_IF(mRecordingHeapFree == mRecordingHeapCount, + "%s: Camera %d: All %d recording buffers returned", + __FUNCTION__, mId, mRecordingHeapCount); +} + +void StreamingProcessor::releaseAllRecordingFramesLocked() { + ATRACE_CALL(); + status_t res; + + if (mRecordingConsumer == 0) { + return; + } + + ALOGV("%s: Camera %d: Releasing all recording buffers", __FUNCTION__, + mId); + + size_t releasedCount = 0; + for (size_t itemIndex = 0; itemIndex < mRecordingBuffers.size(); itemIndex++) { + const BufferItemConsumer::BufferItem item = + mRecordingBuffers[itemIndex]; + if (item.mBuf != BufferItemConsumer::INVALID_BUFFER_SLOT) { + res = mRecordingConsumer->releaseBuffer(mRecordingBuffers[itemIndex]); + if (res != OK) { + ALOGE("%s: Camera %d: Unable to free recording frame " + "(buffer_handle_t: %p): %s (%d)", __FUNCTION__, + mId, item.mGraphicBuffer->handle, strerror(-res), res); + } + mRecordingBuffers.replaceAt(itemIndex); + releasedCount++; + } + } + + if (releasedCount > 0) { + ALOGW("%s: Camera %d: Force-freed %d outstanding buffers " + "from previous recording session", __FUNCTION__, mId, releasedCount); + ALOGE_IF(releasedCount != mRecordingHeapCount - mRecordingHeapFree, + "%s: Camera %d: Force-freed %d buffers, but expected %d", + __FUNCTION__, mId, releasedCount, mRecordingHeapCount - mRecordingHeapFree); + } + + mRecordingHeapHead = 0; + mRecordingHeapFree = mRecordingHeapCount; +} + +bool StreamingProcessor::isStreamActive(const Vector &streams, + uint8_t recordingStreamId) { + for (size_t i = 0; i < streams.size(); i++) { + if (streams[i] == recordingStreamId) { + return true; + } + } + return false; +} + + +status_t StreamingProcessor::dump(int fd, const Vector& /*args*/) { + String8 result; + + result.append(" Current requests:\n"); + if (mPreviewRequest.entryCount() != 0) { + result.append(" Preview request:\n"); + write(fd, result.string(), result.size()); + mPreviewRequest.dump(fd, 2, 6); + result.clear(); + } else { + result.append(" Preview request: undefined\n"); + } + + if (mRecordingRequest.entryCount() != 0) { + result = " Recording request:\n"; + write(fd, result.string(), result.size()); + mRecordingRequest.dump(fd, 2, 6); + result.clear(); + } else { + result = " Recording request: undefined\n"; + } + + const char* streamTypeString[] = { + "none", "preview", "record" + }; + result.append(String8::format(" Active request: %s (paused: %s)\n", + streamTypeString[mActiveRequest], + mPaused ? "yes" : "no")); + + write(fd, result.string(), result.size()); + + return OK; +} + +}; // namespace camera2 +}; // namespace android diff --git a/services/camera/libcameraservice/api1/client2/StreamingProcessor.h b/services/camera/libcameraservice/api1/client2/StreamingProcessor.h new file mode 100644 index 0000000..d879b83 --- /dev/null +++ b/services/camera/libcameraservice/api1/client2/StreamingProcessor.h @@ -0,0 +1,143 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_SERVERS_CAMERA_CAMERA2_STREAMINGPROCESSOR_H +#define ANDROID_SERVERS_CAMERA_CAMERA2_STREAMINGPROCESSOR_H + +#include +#include +#include + +#include "camera/CameraMetadata.h" + +namespace android { + +class Camera2Client; +class CameraDeviceBase; +class IMemory; + +namespace camera2 { + +class Parameters; +class Camera2Heap; + +/** + * Management and processing for preview and recording streams + */ +class StreamingProcessor: + public Thread, public BufferItemConsumer::FrameAvailableListener { + public: + StreamingProcessor(sp client); + ~StreamingProcessor(); + + status_t setPreviewWindow(sp window); + + bool haveValidPreviewWindow() const; + + status_t updatePreviewRequest(const Parameters ¶ms); + status_t updatePreviewStream(const Parameters ¶ms); + status_t deletePreviewStream(); + int getPreviewStreamId() const; + + status_t setRecordingBufferCount(size_t count); + status_t updateRecordingRequest(const Parameters ¶ms); + status_t updateRecordingStream(const Parameters ¶ms); + status_t deleteRecordingStream(); + int getRecordingStreamId() const; + + enum StreamType { + NONE, + PREVIEW, + RECORD + }; + status_t startStream(StreamType type, + const Vector &outputStreams); + + // Toggle between paused and unpaused. Stream must be started first. + status_t togglePauseStream(bool pause); + + status_t stopStream(); + + // Returns the request ID for the currently streaming request + // Returns 0 if there is no active request. + status_t getActiveRequestId() const; + status_t incrementStreamingIds(); + + // Callback for new recording frames from HAL + virtual void onFrameAvailable(); + // Callback from stagefright which returns used recording frames + void releaseRecordingFrame(const sp& mem); + + status_t dump(int fd, const Vector& args); + + private: + mutable Mutex mMutex; + + enum { + NO_STREAM = -1 + }; + + wp mClient; + wp mDevice; + int mId; + + StreamType mActiveRequest; + bool mPaused; + + Vector mActiveStreamIds; + + // Preview-related members + int32_t mPreviewRequestId; + int mPreviewStreamId; + CameraMetadata mPreviewRequest; + sp mPreviewWindow; + + // Recording-related members + static const nsecs_t kWaitDuration = 50000000; // 50 ms + + int32_t mRecordingRequestId; + int mRecordingStreamId; + int mRecordingFrameCount; + sp mRecordingConsumer; + sp mRecordingWindow; + CameraMetadata mRecordingRequest; + sp mRecordingHeap; + + bool mRecordingFrameAvailable; + Condition mRecordingFrameAvailableSignal; + + static const size_t kDefaultRecordingHeapCount = 8; + size_t mRecordingHeapCount; + Vector mRecordingBuffers; + size_t mRecordingHeapHead, mRecordingHeapFree; + + virtual bool threadLoop(); + + status_t processRecordingFrame(); + + // Unilaterally free any buffers still outstanding to stagefright + void releaseAllRecordingFramesLocked(); + + // Determine if the specified stream is currently in use + static bool isStreamActive(const Vector &streams, + uint8_t recordingStreamId); +}; + + +}; // namespace camera2 +}; // namespace android + +#endif diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp b/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp new file mode 100644 index 0000000..11a2cbb --- /dev/null +++ b/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp @@ -0,0 +1,556 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "Camera2-ZslProcessor" +#define ATRACE_TAG ATRACE_TAG_CAMERA +//#define LOG_NDEBUG 0 +//#define LOG_NNDEBUG 0 + +#ifdef LOG_NNDEBUG +#define ALOGVV(...) ALOGV(__VA_ARGS__) +#else +#define ALOGVV(...) ((void)0) +#endif + +#include +#include +#include + +#include "common/CameraDeviceBase.h" +#include "api1/Camera2Client.h" +#include "api1/client2/CaptureSequencer.h" +#include "api1/client2/ZslProcessor.h" + +namespace android { +namespace camera2 { + +ZslProcessor::ZslProcessor( + sp client, + wp sequencer): + Thread(false), + mState(RUNNING), + mClient(client), + mDevice(client->getCameraDevice()), + mSequencer(sequencer), + mId(client->getCameraId()), + mZslBufferAvailable(false), + mZslStreamId(NO_STREAM), + mZslReprocessStreamId(NO_STREAM), + mFrameListHead(0), + mZslQueueHead(0), + mZslQueueTail(0) { + mZslQueue.insertAt(0, kZslBufferDepth); + mFrameList.insertAt(0, kFrameListDepth); + sp captureSequencer = mSequencer.promote(); + if (captureSequencer != 0) captureSequencer->setZslProcessor(this); +} + +ZslProcessor::~ZslProcessor() { + ALOGV("%s: Exit", __FUNCTION__); + deleteStream(); +} + +void ZslProcessor::onFrameAvailable() { + Mutex::Autolock l(mInputMutex); + if (!mZslBufferAvailable) { + mZslBufferAvailable = true; + mZslBufferAvailableSignal.signal(); + } +} + +void ZslProcessor::onFrameAvailable(int32_t /*frameId*/, + const CameraMetadata &frame) { + Mutex::Autolock l(mInputMutex); + camera_metadata_ro_entry_t entry; + entry = frame.find(ANDROID_SENSOR_TIMESTAMP); + nsecs_t timestamp = entry.data.i64[0]; + (void)timestamp; + ALOGVV("Got preview frame for timestamp %lld", timestamp); + + if (mState != RUNNING) return; + + mFrameList.editItemAt(mFrameListHead) = frame; + mFrameListHead = (mFrameListHead + 1) % kFrameListDepth; + + findMatchesLocked(); +} + +void ZslProcessor::onBufferReleased(buffer_handle_t *handle) { + Mutex::Autolock l(mInputMutex); + + // Verify that the buffer is in our queue + size_t i = 0; + for (; i < mZslQueue.size(); i++) { + if (&(mZslQueue[i].buffer.mGraphicBuffer->handle) == handle) break; + } + if (i == mZslQueue.size()) { + ALOGW("%s: Released buffer %p not found in queue", + __FUNCTION__, handle); + } + + // Erase entire ZSL queue since we've now completed the capture and preview + // is stopped. + clearZslQueueLocked(); + + mState = RUNNING; +} + +status_t ZslProcessor::updateStream(const Parameters ¶ms) { + ATRACE_CALL(); + ALOGV("%s: Configuring ZSL streams", __FUNCTION__); + status_t res; + + Mutex::Autolock l(mInputMutex); + + sp client = mClient.promote(); + if (client == 0) { + ALOGE("%s: Camera %d: Client does not exist", __FUNCTION__, mId); + return INVALID_OPERATION; + } + sp device = mDevice.promote(); + if (device == 0) { + ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId); + return INVALID_OPERATION; + } + + if (mZslConsumer == 0) { + // Create CPU buffer queue endpoint + sp bq = new BufferQueue(); + mZslConsumer = new BufferItemConsumer(bq, + GRALLOC_USAGE_HW_CAMERA_ZSL, + kZslBufferDepth); + mZslConsumer->setFrameAvailableListener(this); + mZslConsumer->setName(String8("Camera2Client::ZslConsumer")); + mZslWindow = new Surface( + mZslConsumer->getProducerInterface()); + } + + if (mZslStreamId != NO_STREAM) { + // Check if stream parameters have to change + uint32_t currentWidth, currentHeight; + res = device->getStreamInfo(mZslStreamId, + ¤tWidth, ¤tHeight, 0); + if (res != OK) { + ALOGE("%s: Camera %d: Error querying capture output stream info: " + "%s (%d)", __FUNCTION__, + mId, strerror(-res), res); + return res; + } + if (currentWidth != (uint32_t)params.fastInfo.arrayWidth || + currentHeight != (uint32_t)params.fastInfo.arrayHeight) { + res = device->deleteReprocessStream(mZslReprocessStreamId); + if (res != OK) { + ALOGE("%s: Camera %d: Unable to delete old reprocess stream " + "for ZSL: %s (%d)", __FUNCTION__, + mId, strerror(-res), res); + return res; + } + ALOGV("%s: Camera %d: Deleting stream %d since the buffer dimensions changed", + __FUNCTION__, mId, mZslStreamId); + res = device->deleteStream(mZslStreamId); + if (res != OK) { + ALOGE("%s: Camera %d: Unable to delete old output stream " + "for ZSL: %s (%d)", __FUNCTION__, + mId, strerror(-res), res); + return res; + } + mZslStreamId = NO_STREAM; + } + } + + if (mZslStreamId == NO_STREAM) { + // Create stream for HAL production + // TODO: Sort out better way to select resolution for ZSL + int streamType = params.quirks.useZslFormat ? + (int)CAMERA2_HAL_PIXEL_FORMAT_ZSL : + (int)HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED; + res = device->createStream(mZslWindow, + params.fastInfo.arrayWidth, params.fastInfo.arrayHeight, + streamType, 0, + &mZslStreamId); + if (res != OK) { + ALOGE("%s: Camera %d: Can't create output stream for ZSL: " + "%s (%d)", __FUNCTION__, mId, + strerror(-res), res); + return res; + } + res = device->createReprocessStreamFromStream(mZslStreamId, + &mZslReprocessStreamId); + if (res != OK) { + ALOGE("%s: Camera %d: Can't create reprocess stream for ZSL: " + "%s (%d)", __FUNCTION__, mId, + strerror(-res), res); + return res; + } + } + client->registerFrameListener(Camera2Client::kPreviewRequestIdStart, + Camera2Client::kPreviewRequestIdEnd, + this); + + return OK; +} + +status_t ZslProcessor::deleteStream() { + ATRACE_CALL(); + status_t res; + + Mutex::Autolock l(mInputMutex); + + if (mZslStreamId != NO_STREAM) { + sp device = mDevice.promote(); + if (device == 0) { + ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId); + return INVALID_OPERATION; + } + + clearZslQueueLocked(); + + res = device->deleteReprocessStream(mZslReprocessStreamId); + if (res != OK) { + ALOGE("%s: Camera %d: Cannot delete ZSL reprocessing stream %d: " + "%s (%d)", __FUNCTION__, mId, + mZslReprocessStreamId, strerror(-res), res); + return res; + } + + mZslReprocessStreamId = NO_STREAM; + res = device->deleteStream(mZslStreamId); + if (res != OK) { + ALOGE("%s: Camera %d: Cannot delete ZSL output stream %d: " + "%s (%d)", __FUNCTION__, mId, + mZslStreamId, strerror(-res), res); + return res; + } + + mZslWindow.clear(); + mZslConsumer.clear(); + + mZslStreamId = NO_STREAM; + } + return OK; +} + +int ZslProcessor::getStreamId() const { + Mutex::Autolock l(mInputMutex); + return mZslStreamId; +} + +status_t ZslProcessor::pushToReprocess(int32_t requestId) { + ALOGV("%s: Send in reprocess request with id %d", + __FUNCTION__, requestId); + Mutex::Autolock l(mInputMutex); + status_t res; + sp client = mClient.promote(); + + if (client == 0) { + ALOGE("%s: Camera %d: Client does not exist", __FUNCTION__, mId); + return INVALID_OPERATION; + } + + IF_ALOGV() { + dumpZslQueue(-1); + } + + if (mZslQueueTail != mZslQueueHead) { + CameraMetadata request; + size_t index = mZslQueueTail; + while (index != mZslQueueHead) { + if (!mZslQueue[index].frame.isEmpty()) { + request = mZslQueue[index].frame; + break; + } + index = (index + 1) % kZslBufferDepth; + } + if (index == mZslQueueHead) { + ALOGV("%s: ZSL queue has no valid frames to send yet.", + __FUNCTION__); + return NOT_ENOUGH_DATA; + } + // Verify that the frame is reasonable for reprocessing + + camera_metadata_entry_t entry; + entry = request.find(ANDROID_CONTROL_AE_STATE); + if (entry.count == 0) { + ALOGE("%s: ZSL queue frame has no AE state field!", + __FUNCTION__); + return BAD_VALUE; + } + if (entry.data.u8[0] != ANDROID_CONTROL_AE_STATE_CONVERGED && + entry.data.u8[0] != ANDROID_CONTROL_AE_STATE_LOCKED) { + ALOGV("%s: ZSL queue frame AE state is %d, need full capture", + __FUNCTION__, entry.data.u8[0]); + return NOT_ENOUGH_DATA; + } + + buffer_handle_t *handle = + &(mZslQueue[index].buffer.mGraphicBuffer->handle); + + uint8_t requestType = ANDROID_REQUEST_TYPE_REPROCESS; + res = request.update(ANDROID_REQUEST_TYPE, + &requestType, 1); + uint8_t inputStreams[1] = + { static_cast(mZslReprocessStreamId) }; + if (res == OK) request.update(ANDROID_REQUEST_INPUT_STREAMS, + inputStreams, 1); + uint8_t outputStreams[1] = + { static_cast(client->getCaptureStreamId()) }; + if (res == OK) request.update(ANDROID_REQUEST_OUTPUT_STREAMS, + outputStreams, 1); + res = request.update(ANDROID_REQUEST_ID, + &requestId, 1); + + if (res != OK ) { + ALOGE("%s: Unable to update frame to a reprocess request", __FUNCTION__); + return INVALID_OPERATION; + } + + res = client->stopStream(); + if (res != OK) { + ALOGE("%s: Camera %d: Unable to stop preview for ZSL capture: " + "%s (%d)", + __FUNCTION__, mId, strerror(-res), res); + return INVALID_OPERATION; + } + // TODO: have push-and-clear be atomic + res = client->getCameraDevice()->pushReprocessBuffer(mZslReprocessStreamId, + handle, this); + if (res != OK) { + ALOGE("%s: Unable to push buffer for reprocessing: %s (%d)", + __FUNCTION__, strerror(-res), res); + return res; + } + + // Update JPEG settings + { + SharedParameters::Lock l(client->getParameters()); + res = l.mParameters.updateRequestJpeg(&request); + if (res != OK) { + ALOGE("%s: Camera %d: Unable to update JPEG entries of ZSL " + "capture request: %s (%d)", __FUNCTION__, + mId, + strerror(-res), res); + return res; + } + } + + mLatestCapturedRequest = request; + res = client->getCameraDevice()->capture(request); + if (res != OK ) { + ALOGE("%s: Unable to send ZSL reprocess request to capture: %s (%d)", + __FUNCTION__, strerror(-res), res); + return res; + } + + mState = LOCKED; + } else { + ALOGV("%s: No ZSL buffers yet", __FUNCTION__); + return NOT_ENOUGH_DATA; + } + return OK; +} + +status_t ZslProcessor::clearZslQueue() { + Mutex::Autolock l(mInputMutex); + // If in middle of capture, can't clear out queue + if (mState == LOCKED) return OK; + + return clearZslQueueLocked(); +} + +status_t ZslProcessor::clearZslQueueLocked() { + for (size_t i = 0; i < mZslQueue.size(); i++) { + if (mZslQueue[i].buffer.mTimestamp != 0) { + mZslConsumer->releaseBuffer(mZslQueue[i].buffer); + } + mZslQueue.replaceAt(i); + } + mZslQueueHead = 0; + mZslQueueTail = 0; + return OK; +} + +void ZslProcessor::dump(int fd, const Vector& /*args*/) const { + Mutex::Autolock l(mInputMutex); + if (!mLatestCapturedRequest.isEmpty()) { + String8 result(" Latest ZSL capture request:\n"); + write(fd, result.string(), result.size()); + mLatestCapturedRequest.dump(fd, 2, 6); + } else { + String8 result(" Latest ZSL capture request: none yet\n"); + write(fd, result.string(), result.size()); + } + dumpZslQueue(fd); +} + +bool ZslProcessor::threadLoop() { + status_t res; + + { + Mutex::Autolock l(mInputMutex); + while (!mZslBufferAvailable) { + res = mZslBufferAvailableSignal.waitRelative(mInputMutex, + kWaitDuration); + if (res == TIMED_OUT) return true; + } + mZslBufferAvailable = false; + } + + do { + res = processNewZslBuffer(); + } while (res == OK); + + return true; +} + +status_t ZslProcessor::processNewZslBuffer() { + ATRACE_CALL(); + status_t res; + sp zslConsumer; + { + Mutex::Autolock l(mInputMutex); + if (mZslConsumer == 0) return OK; + zslConsumer = mZslConsumer; + } + ALOGVV("Trying to get next buffer"); + BufferItemConsumer::BufferItem item; + res = zslConsumer->acquireBuffer(&item, 0); + if (res != OK) { + if (res != BufferItemConsumer::NO_BUFFER_AVAILABLE) { + ALOGE("%s: Camera %d: Error receiving ZSL image buffer: " + "%s (%d)", __FUNCTION__, + mId, strerror(-res), res); + } else { + ALOGVV(" No buffer"); + } + return res; + } + + Mutex::Autolock l(mInputMutex); + + if (mState == LOCKED) { + ALOGVV("In capture, discarding new ZSL buffers"); + zslConsumer->releaseBuffer(item); + return OK; + } + + ALOGVV("Got ZSL buffer: head: %d, tail: %d", mZslQueueHead, mZslQueueTail); + + if ( (mZslQueueHead + 1) % kZslBufferDepth == mZslQueueTail) { + ALOGVV("Releasing oldest buffer"); + zslConsumer->releaseBuffer(mZslQueue[mZslQueueTail].buffer); + mZslQueue.replaceAt(mZslQueueTail); + mZslQueueTail = (mZslQueueTail + 1) % kZslBufferDepth; + } + + ZslPair &queueHead = mZslQueue.editItemAt(mZslQueueHead); + + queueHead.buffer = item; + queueHead.frame.release(); + + mZslQueueHead = (mZslQueueHead + 1) % kZslBufferDepth; + + ALOGVV(" Acquired buffer, timestamp %lld", queueHead.buffer.mTimestamp); + + findMatchesLocked(); + + return OK; +} + +void ZslProcessor::findMatchesLocked() { + ALOGVV("Scanning"); + for (size_t i = 0; i < mZslQueue.size(); i++) { + ZslPair &queueEntry = mZslQueue.editItemAt(i); + nsecs_t bufferTimestamp = queueEntry.buffer.mTimestamp; + IF_ALOGV() { + camera_metadata_entry_t entry; + nsecs_t frameTimestamp = 0; + if (!queueEntry.frame.isEmpty()) { + entry = queueEntry.frame.find(ANDROID_SENSOR_TIMESTAMP); + frameTimestamp = entry.data.i64[0]; + } + ALOGVV(" %d: b: %lld\tf: %lld", i, + bufferTimestamp, frameTimestamp ); + } + if (queueEntry.frame.isEmpty() && bufferTimestamp != 0) { + // Have buffer, no matching frame. Look for one + for (size_t j = 0; j < mFrameList.size(); j++) { + bool match = false; + CameraMetadata &frame = mFrameList.editItemAt(j); + if (!frame.isEmpty()) { + camera_metadata_entry_t entry; + entry = frame.find(ANDROID_SENSOR_TIMESTAMP); + if (entry.count == 0) { + ALOGE("%s: Can't find timestamp in frame!", + __FUNCTION__); + continue; + } + nsecs_t frameTimestamp = entry.data.i64[0]; + if (bufferTimestamp == frameTimestamp) { + ALOGVV("%s: Found match %lld", __FUNCTION__, + frameTimestamp); + match = true; + } else { + int64_t delta = abs(bufferTimestamp - frameTimestamp); + if ( delta < 1000000) { + ALOGVV("%s: Found close match %lld (delta %lld)", + __FUNCTION__, bufferTimestamp, delta); + match = true; + } + } + } + if (match) { + queueEntry.frame.acquire(frame); + break; + } + } + } + } +} + +void ZslProcessor::dumpZslQueue(int fd) const { + String8 header("ZSL queue contents:"); + String8 indent(" "); + ALOGV("%s", header.string()); + if (fd != -1) { + header = indent + header + "\n"; + write(fd, header.string(), header.size()); + } + for (size_t i = 0; i < mZslQueue.size(); i++) { + const ZslPair &queueEntry = mZslQueue[i]; + nsecs_t bufferTimestamp = queueEntry.buffer.mTimestamp; + camera_metadata_ro_entry_t entry; + nsecs_t frameTimestamp = 0; + int frameAeState = -1; + if (!queueEntry.frame.isEmpty()) { + entry = queueEntry.frame.find(ANDROID_SENSOR_TIMESTAMP); + if (entry.count > 0) frameTimestamp = entry.data.i64[0]; + entry = queueEntry.frame.find(ANDROID_CONTROL_AE_STATE); + if (entry.count > 0) frameAeState = entry.data.u8[0]; + } + String8 result = + String8::format(" %d: b: %lld\tf: %lld, AE state: %d", i, + bufferTimestamp, frameTimestamp, frameAeState); + ALOGV("%s", result.string()); + if (fd != -1) { + result = indent + result + "\n"; + write(fd, result.string(), result.size()); + } + + } +} + +}; // namespace camera2 +}; // namespace android diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor.h b/services/camera/libcameraservice/api1/client2/ZslProcessor.h new file mode 100644 index 0000000..5fb178f --- /dev/null +++ b/services/camera/libcameraservice/api1/client2/ZslProcessor.h @@ -0,0 +1,135 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_SERVERS_CAMERA_CAMERA2_ZSLPROCESSOR_H +#define ANDROID_SERVERS_CAMERA_CAMERA2_ZSLPROCESSOR_H + +#include +#include +#include +#include +#include +#include +#include + +#include "common/CameraDeviceBase.h" +#include "api1/client2/ZslProcessorInterface.h" +#include "api1/client2/FrameProcessor.h" + +namespace android { + +class Camera2Client; + +namespace camera2 { + +class CaptureSequencer; +class Parameters; + +/*** + * ZSL queue processing + */ +class ZslProcessor: + virtual public Thread, + virtual public BufferItemConsumer::FrameAvailableListener, + virtual public FrameProcessor::FilteredListener, + virtual public CameraDeviceBase::BufferReleasedListener, + public ZslProcessorInterface { + public: + ZslProcessor(sp client, wp sequencer); + ~ZslProcessor(); + + // From mZslConsumer + virtual void onFrameAvailable(); + // From FrameProcessor + virtual void onFrameAvailable(int32_t frameId, const CameraMetadata &frame); + + virtual void onBufferReleased(buffer_handle_t *handle); + + /** + **************************************** + * ZslProcessorInterface implementation * + **************************************** + */ + + status_t updateStream(const Parameters ¶ms); + status_t deleteStream(); + int getStreamId() const; + + status_t pushToReprocess(int32_t requestId); + status_t clearZslQueue(); + + void dump(int fd, const Vector& args) const; + private: + static const nsecs_t kWaitDuration = 10000000; // 10 ms + + enum { + RUNNING, + LOCKED + } mState; + + wp mClient; + wp mDevice; + wp mSequencer; + int mId; + + mutable Mutex mInputMutex; + bool mZslBufferAvailable; + Condition mZslBufferAvailableSignal; + + enum { + NO_STREAM = -1 + }; + + int mZslStreamId; + int mZslReprocessStreamId; + sp mZslConsumer; + sp mZslWindow; + + struct ZslPair { + BufferItemConsumer::BufferItem buffer; + CameraMetadata frame; + }; + + static const size_t kZslBufferDepth = 4; + static const size_t kFrameListDepth = kZslBufferDepth * 2; + Vector mFrameList; + size_t mFrameListHead; + + ZslPair mNextPair; + + Vector mZslQueue; + size_t mZslQueueHead; + size_t mZslQueueTail; + + CameraMetadata mLatestCapturedRequest; + + virtual bool threadLoop(); + + status_t processNewZslBuffer(); + + // Match up entries from frame list to buffers in ZSL queue + void findMatchesLocked(); + + status_t clearZslQueueLocked(); + + void dumpZslQueue(int id) const; +}; + + +}; //namespace camera2 +}; //namespace android + +#endif diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor3.cpp b/services/camera/libcameraservice/api1/client2/ZslProcessor3.cpp new file mode 100644 index 0000000..7c4da50 --- /dev/null +++ b/services/camera/libcameraservice/api1/client2/ZslProcessor3.cpp @@ -0,0 +1,482 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "Camera2-ZslProcessor3" +#define ATRACE_TAG ATRACE_TAG_CAMERA +//#define LOG_NDEBUG 0 +//#define LOG_NNDEBUG 0 + +#ifdef LOG_NNDEBUG +#define ALOGVV(...) ALOGV(__VA_ARGS__) +#else +#define ALOGVV(...) ((void)0) +#endif + +#include +#include +#include + +#include "common/CameraDeviceBase.h" +#include "api1/Camera2Client.h" +#include "api1/client2/CaptureSequencer.h" +#include "api1/client2/ZslProcessor3.h" +#include "device3/Camera3Device.h" + +namespace android { +namespace camera2 { + +ZslProcessor3::ZslProcessor3( + sp client, + wp sequencer): + Thread(false), + mState(RUNNING), + mClient(client), + mSequencer(sequencer), + mId(client->getCameraId()), + mZslStreamId(NO_STREAM), + mFrameListHead(0), + mZslQueueHead(0), + mZslQueueTail(0) { + mZslQueue.insertAt(0, kZslBufferDepth); + mFrameList.insertAt(0, kFrameListDepth); + sp captureSequencer = mSequencer.promote(); + if (captureSequencer != 0) captureSequencer->setZslProcessor(this); +} + +ZslProcessor3::~ZslProcessor3() { + ALOGV("%s: Exit", __FUNCTION__); + deleteStream(); +} + +void ZslProcessor3::onFrameAvailable(int32_t /*frameId*/, + const CameraMetadata &frame) { + Mutex::Autolock l(mInputMutex); + camera_metadata_ro_entry_t entry; + entry = frame.find(ANDROID_SENSOR_TIMESTAMP); + nsecs_t timestamp = entry.data.i64[0]; + (void)timestamp; + ALOGVV("Got preview metadata for timestamp %lld", timestamp); + + if (mState != RUNNING) return; + + mFrameList.editItemAt(mFrameListHead) = frame; + mFrameListHead = (mFrameListHead + 1) % kFrameListDepth; +} + +status_t ZslProcessor3::updateStream(const Parameters ¶ms) { + ATRACE_CALL(); + ALOGV("%s: Configuring ZSL streams", __FUNCTION__); + status_t res; + + Mutex::Autolock l(mInputMutex); + + sp client = mClient.promote(); + if (client == 0) { + ALOGE("%s: Camera %d: Client does not exist", __FUNCTION__, mId); + return INVALID_OPERATION; + } + sp device = + static_cast(client->getCameraDevice().get()); + if (device == 0) { + ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId); + return INVALID_OPERATION; + } + + if (mZslStreamId != NO_STREAM) { + // Check if stream parameters have to change + uint32_t currentWidth, currentHeight; + res = device->getStreamInfo(mZslStreamId, + ¤tWidth, ¤tHeight, 0); + if (res != OK) { + ALOGE("%s: Camera %d: Error querying capture output stream info: " + "%s (%d)", __FUNCTION__, + client->getCameraId(), strerror(-res), res); + return res; + } + if (currentWidth != (uint32_t)params.fastInfo.arrayWidth || + currentHeight != (uint32_t)params.fastInfo.arrayHeight) { + ALOGV("%s: Camera %d: Deleting stream %d since the buffer " + "dimensions changed", + __FUNCTION__, client->getCameraId(), mZslStreamId); + res = device->deleteStream(mZslStreamId); + if (res == -EBUSY) { + ALOGV("%s: Camera %d: Device is busy, call updateStream again " + " after it becomes idle", __FUNCTION__, mId); + return res; + } else if(res != OK) { + ALOGE("%s: Camera %d: Unable to delete old output stream " + "for ZSL: %s (%d)", __FUNCTION__, + client->getCameraId(), strerror(-res), res); + return res; + } + mZslStreamId = NO_STREAM; + } + } + + if (mZslStreamId == NO_STREAM) { + // Create stream for HAL production + // TODO: Sort out better way to select resolution for ZSL + + // Note that format specified internally in Camera3ZslStream + res = device->createZslStream( + params.fastInfo.arrayWidth, params.fastInfo.arrayHeight, + kZslBufferDepth, + &mZslStreamId, + &mZslStream); + if (res != OK) { + ALOGE("%s: Camera %d: Can't create ZSL stream: " + "%s (%d)", __FUNCTION__, client->getCameraId(), + strerror(-res), res); + return res; + } + } + client->registerFrameListener(Camera2Client::kPreviewRequestIdStart, + Camera2Client::kPreviewRequestIdEnd, + this); + + return OK; +} + +status_t ZslProcessor3::deleteStream() { + ATRACE_CALL(); + status_t res; + + Mutex::Autolock l(mInputMutex); + + if (mZslStreamId != NO_STREAM) { + sp client = mClient.promote(); + if (client == 0) { + ALOGE("%s: Camera %d: Client does not exist", __FUNCTION__, mId); + return INVALID_OPERATION; + } + + sp device = + reinterpret_cast(client->getCameraDevice().get()); + if (device == 0) { + ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId); + return INVALID_OPERATION; + } + + res = device->deleteStream(mZslStreamId); + if (res != OK) { + ALOGE("%s: Camera %d: Cannot delete ZSL output stream %d: " + "%s (%d)", __FUNCTION__, client->getCameraId(), + mZslStreamId, strerror(-res), res); + return res; + } + + mZslStreamId = NO_STREAM; + } + return OK; +} + +int ZslProcessor3::getStreamId() const { + Mutex::Autolock l(mInputMutex); + return mZslStreamId; +} + +status_t ZslProcessor3::pushToReprocess(int32_t requestId) { + ALOGV("%s: Send in reprocess request with id %d", + __FUNCTION__, requestId); + Mutex::Autolock l(mInputMutex); + status_t res; + sp client = mClient.promote(); + + if (client == 0) { + ALOGE("%s: Camera %d: Client does not exist", __FUNCTION__, mId); + return INVALID_OPERATION; + } + + IF_ALOGV() { + dumpZslQueue(-1); + } + + size_t metadataIdx; + nsecs_t candidateTimestamp = getCandidateTimestampLocked(&metadataIdx); + + if (candidateTimestamp == -1) { + ALOGE("%s: Could not find good candidate for ZSL reprocessing", + __FUNCTION__); + return NOT_ENOUGH_DATA; + } + + res = mZslStream->enqueueInputBufferByTimestamp(candidateTimestamp, + /*actualTimestamp*/NULL); + + if (res == mZslStream->NO_BUFFER_AVAILABLE) { + ALOGV("%s: No ZSL buffers yet", __FUNCTION__); + return NOT_ENOUGH_DATA; + } else if (res != OK) { + ALOGE("%s: Unable to push buffer for reprocessing: %s (%d)", + __FUNCTION__, strerror(-res), res); + return res; + } + + { + CameraMetadata request = mFrameList[metadataIdx]; + + // Verify that the frame is reasonable for reprocessing + + camera_metadata_entry_t entry; + entry = request.find(ANDROID_CONTROL_AE_STATE); + if (entry.count == 0) { + ALOGE("%s: ZSL queue frame has no AE state field!", + __FUNCTION__); + return BAD_VALUE; + } + if (entry.data.u8[0] != ANDROID_CONTROL_AE_STATE_CONVERGED && + entry.data.u8[0] != ANDROID_CONTROL_AE_STATE_LOCKED) { + ALOGV("%s: ZSL queue frame AE state is %d, need full capture", + __FUNCTION__, entry.data.u8[0]); + return NOT_ENOUGH_DATA; + } + + uint8_t requestType = ANDROID_REQUEST_TYPE_REPROCESS; + res = request.update(ANDROID_REQUEST_TYPE, + &requestType, 1); + uint8_t inputStreams[1] = + { static_cast(mZslStreamId) }; + if (res == OK) request.update(ANDROID_REQUEST_INPUT_STREAMS, + inputStreams, 1); + // TODO: Shouldn't we also update the latest preview frame? + uint8_t outputStreams[1] = + { static_cast(client->getCaptureStreamId()) }; + if (res == OK) request.update(ANDROID_REQUEST_OUTPUT_STREAMS, + outputStreams, 1); + res = request.update(ANDROID_REQUEST_ID, + &requestId, 1); + + if (res != OK ) { + ALOGE("%s: Unable to update frame to a reprocess request", + __FUNCTION__); + return INVALID_OPERATION; + } + + res = client->stopStream(); + if (res != OK) { + ALOGE("%s: Camera %d: Unable to stop preview for ZSL capture: " + "%s (%d)", + __FUNCTION__, client->getCameraId(), strerror(-res), res); + return INVALID_OPERATION; + } + + // Update JPEG settings + { + SharedParameters::Lock l(client->getParameters()); + res = l.mParameters.updateRequestJpeg(&request); + if (res != OK) { + ALOGE("%s: Camera %d: Unable to update JPEG entries of ZSL " + "capture request: %s (%d)", __FUNCTION__, + client->getCameraId(), + strerror(-res), res); + return res; + } + } + + mLatestCapturedRequest = request; + res = client->getCameraDevice()->capture(request); + if (res != OK ) { + ALOGE("%s: Unable to send ZSL reprocess request to capture: %s" + " (%d)", __FUNCTION__, strerror(-res), res); + return res; + } + + mState = LOCKED; + } + + return OK; +} + +status_t ZslProcessor3::clearZslQueue() { + Mutex::Autolock l(mInputMutex); + // If in middle of capture, can't clear out queue + if (mState == LOCKED) return OK; + + return clearZslQueueLocked(); +} + +status_t ZslProcessor3::clearZslQueueLocked() { + if (mZslStream != 0) { + return mZslStream->clearInputRingBuffer(); + } + return OK; +} + +void ZslProcessor3::dump(int fd, const Vector& /*args*/) const { + Mutex::Autolock l(mInputMutex); + if (!mLatestCapturedRequest.isEmpty()) { + String8 result(" Latest ZSL capture request:\n"); + write(fd, result.string(), result.size()); + mLatestCapturedRequest.dump(fd, 2, 6); + } else { + String8 result(" Latest ZSL capture request: none yet\n"); + write(fd, result.string(), result.size()); + } + dumpZslQueue(fd); +} + +bool ZslProcessor3::threadLoop() { + // TODO: remove dependency on thread. For now, shut thread down right + // away. + return false; +} + +void ZslProcessor3::dumpZslQueue(int fd) const { + String8 header("ZSL queue contents:"); + String8 indent(" "); + ALOGV("%s", header.string()); + if (fd != -1) { + header = indent + header + "\n"; + write(fd, header.string(), header.size()); + } + for (size_t i = 0; i < mZslQueue.size(); i++) { + const ZslPair &queueEntry = mZslQueue[i]; + nsecs_t bufferTimestamp = queueEntry.buffer.mTimestamp; + camera_metadata_ro_entry_t entry; + nsecs_t frameTimestamp = 0; + int frameAeState = -1; + if (!queueEntry.frame.isEmpty()) { + entry = queueEntry.frame.find(ANDROID_SENSOR_TIMESTAMP); + if (entry.count > 0) frameTimestamp = entry.data.i64[0]; + entry = queueEntry.frame.find(ANDROID_CONTROL_AE_STATE); + if (entry.count > 0) frameAeState = entry.data.u8[0]; + } + String8 result = + String8::format(" %d: b: %lld\tf: %lld, AE state: %d", i, + bufferTimestamp, frameTimestamp, frameAeState); + ALOGV("%s", result.string()); + if (fd != -1) { + result = indent + result + "\n"; + write(fd, result.string(), result.size()); + } + + } +} + +nsecs_t ZslProcessor3::getCandidateTimestampLocked(size_t* metadataIdx) const { + /** + * Find the smallest timestamp we know about so far + * - ensure that aeState is either converged or locked + */ + + size_t idx = 0; + nsecs_t minTimestamp = -1; + + size_t emptyCount = mFrameList.size(); + + for (size_t j = 0; j < mFrameList.size(); j++) { + const CameraMetadata &frame = mFrameList[j]; + if (!frame.isEmpty()) { + + emptyCount--; + + camera_metadata_ro_entry_t entry; + entry = frame.find(ANDROID_SENSOR_TIMESTAMP); + if (entry.count == 0) { + ALOGE("%s: Can't find timestamp in frame!", + __FUNCTION__); + continue; + } + nsecs_t frameTimestamp = entry.data.i64[0]; + if (minTimestamp > frameTimestamp || minTimestamp == -1) { + + entry = frame.find(ANDROID_CONTROL_AE_STATE); + + if (entry.count == 0) { + /** + * This is most likely a HAL bug. The aeState field is + * mandatory, so it should always be in a metadata packet. + */ + ALOGW("%s: ZSL queue frame has no AE state field!", + __FUNCTION__); + continue; + } + if (entry.data.u8[0] != ANDROID_CONTROL_AE_STATE_CONVERGED && + entry.data.u8[0] != ANDROID_CONTROL_AE_STATE_LOCKED) { + ALOGVV("%s: ZSL queue frame AE state is %d, need " + "full capture", __FUNCTION__, entry.data.u8[0]); + continue; + } + + minTimestamp = frameTimestamp; + idx = j; + } + + ALOGVV("%s: Saw timestamp %lld", __FUNCTION__, frameTimestamp); + } + } + + if (emptyCount == mFrameList.size()) { + /** + * This could be mildly bad and means our ZSL was triggered before + * there were any frames yet received by the camera framework. + * + * This is a fairly corner case which can happen under: + * + a user presses the shutter button real fast when the camera starts + * (startPreview followed immediately by takePicture). + * + burst capture case (hitting shutter button as fast possible) + * + * If this happens in steady case (preview running for a while, call + * a single takePicture) then this might be a fwk bug. + */ + ALOGW("%s: ZSL queue has no metadata frames", __FUNCTION__); + } + + ALOGV("%s: Candidate timestamp %lld (idx %d), empty frames: %d", + __FUNCTION__, minTimestamp, idx, emptyCount); + + if (metadataIdx) { + *metadataIdx = idx; + } + + return minTimestamp; +} + +void ZslProcessor3::onBufferAcquired(const BufferInfo& /*bufferInfo*/) { + // Intentionally left empty + // Although theoretically we could use this to get better dump info +} + +void ZslProcessor3::onBufferReleased(const BufferInfo& bufferInfo) { + Mutex::Autolock l(mInputMutex); + + // ignore output buffers + if (bufferInfo.mOutput) { + return; + } + + // TODO: Verify that the buffer is in our queue by looking at timestamp + // theoretically unnecessary unless we change the following assumptions: + // -- only 1 buffer reprocessed at a time (which is the case now) + + // Erase entire ZSL queue since we've now completed the capture and preview + // is stopped. + // + // We need to guarantee that if we do two back-to-back captures, + // the second won't use a buffer that's older/the same as the first, which + // is theoretically possible if we don't clear out the queue and the + // selection criteria is something like 'newest'. Clearing out the queue + // on a completed capture ensures we'll only use new data. + ALOGV("%s: Memory optimization, clearing ZSL queue", + __FUNCTION__); + clearZslQueueLocked(); + + // Required so we accept more ZSL requests + mState = RUNNING; +} + +}; // namespace camera2 +}; // namespace android diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor3.h b/services/camera/libcameraservice/api1/client2/ZslProcessor3.h new file mode 100644 index 0000000..35b85f5 --- /dev/null +++ b/services/camera/libcameraservice/api1/client2/ZslProcessor3.h @@ -0,0 +1,136 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_SERVERS_CAMERA_CAMERA2_ZSLPROCESSOR3_H +#define ANDROID_SERVERS_CAMERA_CAMERA2_ZSLPROCESSOR3_H + +#include +#include +#include +#include +#include +#include +#include + +#include "api1/client2/FrameProcessor.h" +#include "api1/client2/ZslProcessorInterface.h" +#include "device3/Camera3ZslStream.h" + +namespace android { + +class Camera2Client; + +namespace camera2 { + +class CaptureSequencer; +class Parameters; + +/*** + * ZSL queue processing + */ +class ZslProcessor3 : + public ZslProcessorInterface, + public camera3::Camera3StreamBufferListener, + virtual public Thread, + virtual public FrameProcessor::FilteredListener { + public: + ZslProcessor3(sp client, wp sequencer); + ~ZslProcessor3(); + + // From FrameProcessor + virtual void onFrameAvailable(int32_t frameId, const CameraMetadata &frame); + + /** + **************************************** + * ZslProcessorInterface implementation * + **************************************** + */ + + virtual status_t updateStream(const Parameters ¶ms); + virtual status_t deleteStream(); + virtual int getStreamId() const; + + virtual status_t pushToReprocess(int32_t requestId); + virtual status_t clearZslQueue(); + + void dump(int fd, const Vector& args) const; + + protected: + /** + ********************************************** + * Camera3StreamBufferListener implementation * + ********************************************** + */ + typedef camera3::Camera3StreamBufferListener::BufferInfo BufferInfo; + // Buffer was acquired by the HAL + virtual void onBufferAcquired(const BufferInfo& bufferInfo); + // Buffer was released by the HAL + virtual void onBufferReleased(const BufferInfo& bufferInfo); + + private: + static const nsecs_t kWaitDuration = 10000000; // 10 ms + + enum { + RUNNING, + LOCKED + } mState; + + wp mClient; + wp mSequencer; + + const int mId; + + mutable Mutex mInputMutex; + + enum { + NO_STREAM = -1 + }; + + int mZslStreamId; + sp mZslStream; + + struct ZslPair { + BufferItemConsumer::BufferItem buffer; + CameraMetadata frame; + }; + + static const size_t kZslBufferDepth = 4; + static const size_t kFrameListDepth = kZslBufferDepth * 2; + Vector mFrameList; + size_t mFrameListHead; + + ZslPair mNextPair; + + Vector mZslQueue; + size_t mZslQueueHead; + size_t mZslQueueTail; + + CameraMetadata mLatestCapturedRequest; + + virtual bool threadLoop(); + + status_t clearZslQueueLocked(); + + void dumpZslQueue(int id) const; + + nsecs_t getCandidateTimestampLocked(size_t* metadataIdx) const; +}; + + +}; //namespace camera2 +}; //namespace android + +#endif diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessorInterface.h b/services/camera/libcameraservice/api1/client2/ZslProcessorInterface.h new file mode 100644 index 0000000..183c0c2 --- /dev/null +++ b/services/camera/libcameraservice/api1/client2/ZslProcessorInterface.h @@ -0,0 +1,59 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_SERVERS_CAMERA_CAMERA2_ZSLPROCESSORINTERFACE_H +#define ANDROID_SERVERS_CAMERA_CAMERA2_ZSLPROCESSORINTERFACE_H + +#include +#include + +namespace android { +namespace camera2 { + +class Parameters; + +class ZslProcessorInterface : virtual public RefBase { +public: + + // Get ID for use with android.request.outputStreams / inputStreams + virtual int getStreamId() const = 0; + + // Update the streams by recreating them if the size/format has changed + virtual status_t updateStream(const Parameters& params) = 0; + + // Delete the underlying CameraDevice streams + virtual status_t deleteStream() = 0; + + /** + * Submits a ZSL capture request (id = requestId) + * + * An appropriate ZSL buffer is selected by the closest timestamp, + * then we push that buffer to be reprocessed by the HAL. + * A capture request is created and submitted on behalf of the client. + */ + virtual status_t pushToReprocess(int32_t requestId) = 0; + + // Flush the ZSL buffer queue, freeing up all the buffers + virtual status_t clearZslQueue() = 0; + + // (Debugging only) Dump the current state to the specified file descriptor + virtual void dump(int fd, const Vector& args) const = 0; +}; + +}; //namespace camera2 +}; //namespace android + +#endif diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp new file mode 100644 index 0000000..414316d --- /dev/null +++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp @@ -0,0 +1,551 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "CameraDeviceClient" +#define ATRACE_TAG ATRACE_TAG_CAMERA +// #define LOG_NDEBUG 0 + +#include +#include +#include +#include +#include + +#include "common/CameraDeviceBase.h" +#include "api2/CameraDeviceClient.h" + + + +namespace android { +using namespace camera2; + +CameraDeviceClientBase::CameraDeviceClientBase( + const sp& cameraService, + const sp& remoteCallback, + const String16& clientPackageName, + int cameraId, + int cameraFacing, + int clientPid, + uid_t clientUid, + int servicePid) : + BasicClient(cameraService, remoteCallback->asBinder(), clientPackageName, + cameraId, cameraFacing, clientPid, clientUid, servicePid), + mRemoteCallback(remoteCallback) { +} +void CameraDeviceClientBase::notifyError() { + // Thread safe. Don't bother locking. + sp remoteCb = mRemoteCallback; + + if (remoteCb != 0) { + remoteCb->notifyCallback(CAMERA_MSG_ERROR, CAMERA_ERROR_RELEASED, 0); + } +} + +// Interface used by CameraService + +CameraDeviceClient::CameraDeviceClient(const sp& cameraService, + const sp& remoteCallback, + const String16& clientPackageName, + int cameraId, + int cameraFacing, + int clientPid, + uid_t clientUid, + int servicePid) : + Camera2ClientBase(cameraService, remoteCallback, clientPackageName, + cameraId, cameraFacing, clientPid, clientUid, servicePid), + mRequestIdCounter(0) { + + ATRACE_CALL(); + ALOGI("CameraDeviceClient %d: Opened", cameraId); +} + +status_t CameraDeviceClient::initialize(camera_module_t *module) +{ + ATRACE_CALL(); + status_t res; + + res = Camera2ClientBase::initialize(module); + if (res != OK) { + return res; + } + + String8 threadName; + mFrameProcessor = new FrameProcessorBase(mDevice); + threadName = String8::format("CDU-%d-FrameProc", mCameraId); + mFrameProcessor->run(threadName.string()); + + mFrameProcessor->registerListener(FRAME_PROCESSOR_LISTENER_MIN_ID, + FRAME_PROCESSOR_LISTENER_MAX_ID, + /*listener*/this); + + return OK; +} + +CameraDeviceClient::~CameraDeviceClient() { +} + +status_t CameraDeviceClient::submitRequest(sp request, + bool streaming) { + ATRACE_CALL(); + ALOGV("%s", __FUNCTION__); + + status_t res; + + if ( (res = checkPid(__FUNCTION__) ) != OK) return res; + + Mutex::Autolock icl(mBinderSerializationLock); + + if (!mDevice.get()) return DEAD_OBJECT; + + if (request == 0) { + ALOGE("%s: Camera %d: Sent null request. Rejecting request.", + __FUNCTION__, mCameraId); + return BAD_VALUE; + } + + CameraMetadata metadata(request->mMetadata); + + if (metadata.isEmpty()) { + ALOGE("%s: Camera %d: Sent empty metadata packet. Rejecting request.", + __FUNCTION__, mCameraId); + return BAD_VALUE; + } else if (request->mSurfaceList.size() == 0) { + ALOGE("%s: Camera %d: Requests must have at least one surface target. " + "Rejecting request.", __FUNCTION__, mCameraId); + return BAD_VALUE; + } + + if (!enforceRequestPermissions(metadata)) { + // Callee logs + return PERMISSION_DENIED; + } + + /** + * Write in the output stream IDs which we calculate from + * the capture request's list of surface targets + */ + Vector outputStreamIds; + outputStreamIds.setCapacity(request->mSurfaceList.size()); + for (size_t i = 0; i < request->mSurfaceList.size(); ++i) { + sp surface = request->mSurfaceList[i]; + + if (surface == 0) continue; + + sp gbp = surface->getIGraphicBufferProducer(); + int idx = mStreamMap.indexOfKey(gbp->asBinder()); + + // Trying to submit request with surface that wasn't created + if (idx == NAME_NOT_FOUND) { + ALOGE("%s: Camera %d: Tried to submit a request with a surface that" + " we have not called createStream on", + __FUNCTION__, mCameraId); + return BAD_VALUE; + } + + int streamId = mStreamMap.valueAt(idx); + outputStreamIds.push_back(streamId); + ALOGV("%s: Camera %d: Appending output stream %d to request", + __FUNCTION__, mCameraId, streamId); + } + + metadata.update(ANDROID_REQUEST_OUTPUT_STREAMS, &outputStreamIds[0], + outputStreamIds.size()); + + // TODO: @hide ANDROID_REQUEST_ID, or use another request token + int32_t requestId = mRequestIdCounter++; + metadata.update(ANDROID_REQUEST_ID, &requestId, /*size*/1); + ALOGV("%s: Camera %d: Submitting request with ID %d", + __FUNCTION__, mCameraId, requestId); + + if (streaming) { + res = mDevice->setStreamingRequest(metadata); + if (res != OK) { + ALOGE("%s: Camera %d: Got error %d after trying to set streaming " + "request", __FUNCTION__, mCameraId, res); + } else { + mStreamingRequestList.push_back(requestId); + } + } else { + res = mDevice->capture(metadata); + if (res != OK) { + ALOGE("%s: Camera %d: Got error %d after trying to set capture", + __FUNCTION__, mCameraId, res); + } + } + + ALOGV("%s: Camera %d: End of function", __FUNCTION__, mCameraId); + if (res == OK) { + return requestId; + } + + return res; +} + +status_t CameraDeviceClient::cancelRequest(int requestId) { + ATRACE_CALL(); + ALOGV("%s, requestId = %d", __FUNCTION__, requestId); + + status_t res; + + if ( (res = checkPid(__FUNCTION__) ) != OK) return res; + + Mutex::Autolock icl(mBinderSerializationLock); + + if (!mDevice.get()) return DEAD_OBJECT; + + Vector::iterator it, end; + for (it = mStreamingRequestList.begin(), end = mStreamingRequestList.end(); + it != end; ++it) { + if (*it == requestId) { + break; + } + } + + if (it == end) { + ALOGE("%s: Camera%d: Did not find request id %d in list of streaming " + "requests", __FUNCTION__, mCameraId, requestId); + return BAD_VALUE; + } + + res = mDevice->clearStreamingRequest(); + + if (res == OK) { + ALOGV("%s: Camera %d: Successfully cleared streaming request", + __FUNCTION__, mCameraId); + mStreamingRequestList.erase(it); + } + + return res; +} + +status_t CameraDeviceClient::deleteStream(int streamId) { + ATRACE_CALL(); + ALOGV("%s (streamId = 0x%x)", __FUNCTION__, streamId); + + status_t res; + if ( (res = checkPid(__FUNCTION__) ) != OK) return res; + + Mutex::Autolock icl(mBinderSerializationLock); + + if (!mDevice.get()) return DEAD_OBJECT; + + // Guard against trying to delete non-created streams + ssize_t index = NAME_NOT_FOUND; + for (size_t i = 0; i < mStreamMap.size(); ++i) { + if (streamId == mStreamMap.valueAt(i)) { + index = i; + break; + } + } + + if (index == NAME_NOT_FOUND) { + ALOGW("%s: Camera %d: Invalid stream ID (%d) specified, no stream " + "created yet", __FUNCTION__, mCameraId, streamId); + return BAD_VALUE; + } + + // Also returns BAD_VALUE if stream ID was not valid + res = mDevice->deleteStream(streamId); + + if (res == BAD_VALUE) { + ALOGE("%s: Camera %d: Unexpected BAD_VALUE when deleting stream, but we" + " already checked and the stream ID (%d) should be valid.", + __FUNCTION__, mCameraId, streamId); + } else if (res == OK) { + mStreamMap.removeItemsAt(index); + + ALOGV("%s: Camera %d: Successfully deleted stream ID (%d)", + __FUNCTION__, mCameraId, streamId); + } + + return res; +} + +status_t CameraDeviceClient::createStream(int width, int height, int format, + const sp& bufferProducer) +{ + ATRACE_CALL(); + ALOGV("%s (w = %d, h = %d, f = 0x%x)", __FUNCTION__, width, height, format); + + status_t res; + if ( (res = checkPid(__FUNCTION__) ) != OK) return res; + + Mutex::Autolock icl(mBinderSerializationLock); + + if (!mDevice.get()) return DEAD_OBJECT; + + // Don't create multiple streams for the same target surface + { + ssize_t index = mStreamMap.indexOfKey(bufferProducer->asBinder()); + if (index != NAME_NOT_FOUND) { + ALOGW("%s: Camera %d: Buffer producer already has a stream for it " + "(ID %d)", + __FUNCTION__, mCameraId, index); + return ALREADY_EXISTS; + } + } + + sp binder; + sp anw; + if (bufferProducer != 0) { + binder = bufferProducer->asBinder(); + anw = new Surface(bufferProducer); + } + + // TODO: remove w,h,f since we are ignoring them + + if ((res = anw->query(anw.get(), NATIVE_WINDOW_WIDTH, &width)) != OK) { + ALOGE("%s: Camera %d: Failed to query Surface width", __FUNCTION__, + mCameraId); + return res; + } + if ((res = anw->query(anw.get(), NATIVE_WINDOW_HEIGHT, &height)) != OK) { + ALOGE("%s: Camera %d: Failed to query Surface height", __FUNCTION__, + mCameraId); + return res; + } + if ((res = anw->query(anw.get(), NATIVE_WINDOW_FORMAT, &format)) != OK) { + ALOGE("%s: Camera %d: Failed to query Surface format", __FUNCTION__, + mCameraId); + return res; + } + + // FIXME: remove this override since the default format should be + // IMPLEMENTATION_DEFINED. b/9487482 + if (format >= HAL_PIXEL_FORMAT_RGBA_8888 && + format <= HAL_PIXEL_FORMAT_BGRA_8888) { + ALOGW("%s: Camera %d: Overriding format 0x%x to IMPLEMENTATION_DEFINED", + __FUNCTION__, mCameraId, format); + format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED; + } + + // TODO: add startConfigure/stopConfigure call to CameraDeviceBase + // this will make it so Camera3Device doesn't call configure_streams + // after each call, but only once we are done with all. + + int streamId = -1; + if (format == HAL_PIXEL_FORMAT_BLOB) { + // JPEG buffers need to be sized for maximum possible compressed size + CameraMetadata staticInfo = mDevice->info(); + camera_metadata_entry_t entry = staticInfo.find(ANDROID_JPEG_MAX_SIZE); + if (entry.count == 0) { + ALOGE("%s: Camera %d: Can't find maximum JPEG size in " + "static metadata!", __FUNCTION__, mCameraId); + return INVALID_OPERATION; + } + int32_t maxJpegSize = entry.data.i32[0]; + res = mDevice->createStream(anw, width, height, format, maxJpegSize, + &streamId); + } else { + // All other streams are a known size + res = mDevice->createStream(anw, width, height, format, /*size*/0, + &streamId); + } + + if (res == OK) { + mStreamMap.add(bufferProducer->asBinder(), streamId); + + ALOGV("%s: Camera %d: Successfully created a new stream ID %d", + __FUNCTION__, mCameraId, streamId); + return streamId; + } + + return res; +} + +// Create a request object from a template. +status_t CameraDeviceClient::createDefaultRequest(int templateId, + /*out*/ + CameraMetadata* request) +{ + ATRACE_CALL(); + ALOGV("%s (templateId = 0x%x)", __FUNCTION__, templateId); + + status_t res; + if ( (res = checkPid(__FUNCTION__) ) != OK) return res; + + Mutex::Autolock icl(mBinderSerializationLock); + + if (!mDevice.get()) return DEAD_OBJECT; + + CameraMetadata metadata; + if ( (res = mDevice->createDefaultRequest(templateId, &metadata) ) == OK && + request != NULL) { + + request->swap(metadata); + } + + return res; +} + +status_t CameraDeviceClient::getCameraInfo(/*out*/CameraMetadata* info) +{ + ATRACE_CALL(); + ALOGV("%s", __FUNCTION__); + + status_t res = OK; + + if ( (res = checkPid(__FUNCTION__) ) != OK) return res; + + Mutex::Autolock icl(mBinderSerializationLock); + + if (!mDevice.get()) return DEAD_OBJECT; + + if (info != NULL) { + *info = mDevice->info(); // static camera metadata + // TODO: merge with device-specific camera metadata + } + + return res; +} + +status_t CameraDeviceClient::waitUntilIdle() +{ + ATRACE_CALL(); + ALOGV("%s", __FUNCTION__); + + status_t res = OK; + if ( (res = checkPid(__FUNCTION__) ) != OK) return res; + + Mutex::Autolock icl(mBinderSerializationLock); + + if (!mDevice.get()) return DEAD_OBJECT; + + // FIXME: Also need check repeating burst. + if (!mStreamingRequestList.isEmpty()) { + ALOGE("%s: Camera %d: Try to waitUntilIdle when there are active streaming requests", + __FUNCTION__, mCameraId); + return INVALID_OPERATION; + } + res = mDevice->waitUntilDrained(); + ALOGV("%s Done", __FUNCTION__); + + return res; +} + +status_t CameraDeviceClient::dump(int fd, const Vector& args) { + String8 result; + result.appendFormat("CameraDeviceClient[%d] (%p) PID: %d, dump:\n", + mCameraId, + getRemoteCallback()->asBinder().get(), + mClientPid); + result.append(" State: "); + + // TODO: print dynamic/request section from most recent requests + mFrameProcessor->dump(fd, args); + + return dumpDevice(fd, args); +} + +// TODO: refactor the code below this with IProCameraUser. +// it's 100% copy-pasted, so lets not change it right now to make it easier. + +void CameraDeviceClient::detachDevice() { + if (mDevice == 0) return; + + ALOGV("Camera %d: Stopping processors", mCameraId); + + mFrameProcessor->removeListener(FRAME_PROCESSOR_LISTENER_MIN_ID, + FRAME_PROCESSOR_LISTENER_MAX_ID, + /*listener*/this); + mFrameProcessor->requestExit(); + ALOGV("Camera %d: Waiting for threads", mCameraId); + mFrameProcessor->join(); + ALOGV("Camera %d: Disconnecting device", mCameraId); + + // WORKAROUND: HAL refuses to disconnect while there's streams in flight + { + mDevice->clearStreamingRequest(); + + status_t code; + if ((code = mDevice->waitUntilDrained()) != OK) { + ALOGE("%s: waitUntilDrained failed with code 0x%x", __FUNCTION__, + code); + } + } + + Camera2ClientBase::detachDevice(); +} + +/** Device-related methods */ +void CameraDeviceClient::onFrameAvailable(int32_t frameId, + const CameraMetadata& frame) { + ATRACE_CALL(); + ALOGV("%s", __FUNCTION__); + + Mutex::Autolock icl(mBinderSerializationLock); + SharedCameraCallbacks::Lock l(mSharedCameraCallbacks); + + if (mRemoteCallback != NULL) { + ALOGV("%s: frame = %p ", __FUNCTION__, &frame); + mRemoteCallback->onResultReceived(frameId, frame); + } + +} + +// TODO: move to Camera2ClientBase +bool CameraDeviceClient::enforceRequestPermissions(CameraMetadata& metadata) { + + const int pid = IPCThreadState::self()->getCallingPid(); + const int selfPid = getpid(); + camera_metadata_entry_t entry; + + /** + * Mixin default important security values + * - android.led.transmit = defaulted ON + */ + CameraMetadata staticInfo = mDevice->info(); + entry = staticInfo.find(ANDROID_LED_AVAILABLE_LEDS); + for(size_t i = 0; i < entry.count; ++i) { + uint8_t led = entry.data.u8[i]; + + switch(led) { + case ANDROID_LED_AVAILABLE_LEDS_TRANSMIT: { + uint8_t transmitDefault = ANDROID_LED_TRANSMIT_ON; + if (!metadata.exists(ANDROID_LED_TRANSMIT)) { + metadata.update(ANDROID_LED_TRANSMIT, + &transmitDefault, 1); + } + break; + } + } + } + + // We can do anything! + if (pid == selfPid) { + return true; + } + + /** + * Permission check special fields in the request + * - android.led.transmit = android.permission.CAMERA_DISABLE_TRANSMIT + */ + entry = metadata.find(ANDROID_LED_TRANSMIT); + if (entry.count > 0 && entry.data.u8[0] != ANDROID_LED_TRANSMIT_ON) { + String16 permissionString = + String16("android.permission.CAMERA_DISABLE_TRANSMIT_LED"); + if (!checkCallingPermission(permissionString)) { + const int uid = IPCThreadState::self()->getCallingUid(); + ALOGE("Permission Denial: " + "can't disable transmit LED pid=%d, uid=%d", pid, uid); + return false; + } + } + + return true; +} + +} // namespace android diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h new file mode 100644 index 0000000..21d633c --- /dev/null +++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h @@ -0,0 +1,141 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_SERVERS_CAMERA_PHOTOGRAPHY_CAMERADEVICECLIENT_H +#define ANDROID_SERVERS_CAMERA_PHOTOGRAPHY_CAMERADEVICECLIENT_H + +#include +#include + +#include "CameraService.h" +#include "common/FrameProcessorBase.h" +#include "common/Camera2ClientBase.h" + +namespace android { + +struct CameraDeviceClientBase : + public CameraService::BasicClient, public BnCameraDeviceUser +{ + typedef ICameraDeviceCallbacks TCamCallbacks; + + const sp& getRemoteCallback() { + return mRemoteCallback; + } + +protected: + CameraDeviceClientBase(const sp& cameraService, + const sp& remoteCallback, + const String16& clientPackageName, + int cameraId, + int cameraFacing, + int clientPid, + uid_t clientUid, + int servicePid); + + virtual void notifyError(); + + sp mRemoteCallback; +}; + +/** + * Implements the binder ICameraDeviceUser API, + * meant for HAL3-public implementation of + * android.hardware.photography.CameraDevice + */ +class CameraDeviceClient : + public Camera2ClientBase, + public camera2::FrameProcessorBase::FilteredListener +{ +public: + /** + * ICameraDeviceUser interface (see ICameraDeviceUser for details) + */ + + // Note that the callee gets a copy of the metadata. + virtual int submitRequest(sp request, + bool streaming = false); + virtual status_t cancelRequest(int requestId); + + // Returns -EBUSY if device is not idle + virtual status_t deleteStream(int streamId); + + virtual status_t createStream( + int width, + int height, + int format, + const sp& bufferProducer); + + // Create a request object from a template. + virtual status_t createDefaultRequest(int templateId, + /*out*/ + CameraMetadata* request); + + // Get the static metadata for the camera + // -- Caller owns the newly allocated metadata + virtual status_t getCameraInfo(/*out*/CameraMetadata* info); + + // Wait until all the submitted requests have finished processing + virtual status_t waitUntilIdle(); + /** + * Interface used by CameraService + */ + + CameraDeviceClient(const sp& cameraService, + const sp& remoteCallback, + const String16& clientPackageName, + int cameraId, + int cameraFacing, + int clientPid, + uid_t clientUid, + int servicePid); + virtual ~CameraDeviceClient(); + + virtual status_t initialize(camera_module_t *module); + + virtual status_t dump(int fd, const Vector& args); + + /** + * Interface used by independent components of CameraDeviceClient. + */ +protected: + /** FilteredListener implementation **/ + virtual void onFrameAvailable(int32_t frameId, + const CameraMetadata& frame); + virtual void detachDevice(); + +private: + /** ICameraDeviceUser interface-related private members */ + + /** Preview callback related members */ + sp mFrameProcessor; + static const int32_t FRAME_PROCESSOR_LISTENER_MIN_ID = 0; + static const int32_t FRAME_PROCESSOR_LISTENER_MAX_ID = 0x7fffffffL; + + /** Utility members */ + bool enforceRequestPermissions(CameraMetadata& metadata); + + // IGraphicsBufferProducer binder -> Stream ID + KeyedVector, int> mStreamMap; + + // Stream ID + Vector mStreamingRequestList; + + int32_t mRequestIdCounter; +}; + +}; // namespace android + +#endif diff --git a/services/camera/libcameraservice/api_pro/ProCamera2Client.cpp b/services/camera/libcameraservice/api_pro/ProCamera2Client.cpp new file mode 100644 index 0000000..2b583e5 --- /dev/null +++ b/services/camera/libcameraservice/api_pro/ProCamera2Client.cpp @@ -0,0 +1,446 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "ProCamera2Client" +#define ATRACE_TAG ATRACE_TAG_CAMERA +//#define LOG_NDEBUG 0 + +#include +#include + +#include +#include +#include + +#include "api_pro/ProCamera2Client.h" +#include "common/CameraDeviceBase.h" + +namespace android { +using namespace camera2; + +// Interface used by CameraService + +ProCamera2Client::ProCamera2Client(const sp& cameraService, + const sp& remoteCallback, + const String16& clientPackageName, + int cameraId, + int cameraFacing, + int clientPid, + uid_t clientUid, + int servicePid) : + Camera2ClientBase(cameraService, remoteCallback, clientPackageName, + cameraId, cameraFacing, clientPid, clientUid, servicePid) +{ + ATRACE_CALL(); + ALOGI("ProCamera %d: Opened", cameraId); + + mExclusiveLock = false; +} + +status_t ProCamera2Client::initialize(camera_module_t *module) +{ + ATRACE_CALL(); + status_t res; + + res = Camera2ClientBase::initialize(module); + if (res != OK) { + return res; + } + + String8 threadName; + mFrameProcessor = new FrameProcessorBase(mDevice); + threadName = String8::format("PC2-%d-FrameProc", mCameraId); + mFrameProcessor->run(threadName.string()); + + mFrameProcessor->registerListener(FRAME_PROCESSOR_LISTENER_MIN_ID, + FRAME_PROCESSOR_LISTENER_MAX_ID, + /*listener*/this); + + return OK; +} + +ProCamera2Client::~ProCamera2Client() { +} + +status_t ProCamera2Client::exclusiveTryLock() { + ATRACE_CALL(); + ALOGV("%s", __FUNCTION__); + + Mutex::Autolock icl(mBinderSerializationLock); + SharedCameraCallbacks::Lock l(mSharedCameraCallbacks); + + if (!mDevice.get()) return PERMISSION_DENIED; + + if (!mExclusiveLock) { + mExclusiveLock = true; + + if (mRemoteCallback != NULL) { + mRemoteCallback->onLockStatusChanged( + IProCameraCallbacks::LOCK_ACQUIRED); + } + + ALOGV("%s: exclusive lock acquired", __FUNCTION__); + + return OK; + } + + // TODO: have a PERMISSION_DENIED case for when someone else owns the lock + + // don't allow recursive locking + ALOGW("%s: exclusive lock already exists - recursive locking is not" + "allowed", __FUNCTION__); + + return ALREADY_EXISTS; +} + +status_t ProCamera2Client::exclusiveLock() { + ATRACE_CALL(); + ALOGV("%s", __FUNCTION__); + + Mutex::Autolock icl(mBinderSerializationLock); + SharedCameraCallbacks::Lock l(mSharedCameraCallbacks); + + if (!mDevice.get()) return PERMISSION_DENIED; + + /** + * TODO: this should asynchronously 'wait' until the lock becomes available + * if another client already has an exclusive lock. + * + * once we have proper sharing support this will need to do + * more than just return immediately + */ + if (!mExclusiveLock) { + mExclusiveLock = true; + + if (mRemoteCallback != NULL) { + mRemoteCallback->onLockStatusChanged(IProCameraCallbacks::LOCK_ACQUIRED); + } + + ALOGV("%s: exclusive lock acquired", __FUNCTION__); + + return OK; + } + + // don't allow recursive locking + ALOGW("%s: exclusive lock already exists - recursive locking is not allowed" + , __FUNCTION__); + return ALREADY_EXISTS; +} + +status_t ProCamera2Client::exclusiveUnlock() { + ATRACE_CALL(); + ALOGV("%s", __FUNCTION__); + + Mutex::Autolock icl(mBinderSerializationLock); + SharedCameraCallbacks::Lock l(mSharedCameraCallbacks); + + // don't allow unlocking if we have no lock + if (!mExclusiveLock) { + ALOGW("%s: cannot unlock, no lock was held in the first place", + __FUNCTION__); + return BAD_VALUE; + } + + mExclusiveLock = false; + if (mRemoteCallback != NULL ) { + mRemoteCallback->onLockStatusChanged( + IProCameraCallbacks::LOCK_RELEASED); + } + ALOGV("%s: exclusive lock released", __FUNCTION__); + + return OK; +} + +bool ProCamera2Client::hasExclusiveLock() { + Mutex::Autolock icl(mBinderSerializationLock); + return mExclusiveLock; +} + +void ProCamera2Client::onExclusiveLockStolen() { + ALOGV("%s: ProClient lost exclusivity (id %d)", + __FUNCTION__, mCameraId); + + Mutex::Autolock icl(mBinderSerializationLock); + SharedCameraCallbacks::Lock l(mSharedCameraCallbacks); + + if (mExclusiveLock && mRemoteCallback.get() != NULL) { + mRemoteCallback->onLockStatusChanged( + IProCameraCallbacks::LOCK_STOLEN); + } + + mExclusiveLock = false; + + //TODO: we should not need to detach the device, merely reset it. + detachDevice(); +} + +status_t ProCamera2Client::submitRequest(camera_metadata_t* request, + bool streaming) { + ATRACE_CALL(); + ALOGV("%s", __FUNCTION__); + + Mutex::Autolock icl(mBinderSerializationLock); + + if (!mDevice.get()) return DEAD_OBJECT; + + if (!mExclusiveLock) { + return PERMISSION_DENIED; + } + + CameraMetadata metadata(request); + + if (!enforceRequestPermissions(metadata)) { + return PERMISSION_DENIED; + } + + if (streaming) { + return mDevice->setStreamingRequest(metadata); + } else { + return mDevice->capture(metadata); + } + + // unreachable. thx gcc for a useless warning + return OK; +} + +status_t ProCamera2Client::cancelRequest(int requestId) { + (void)requestId; + ATRACE_CALL(); + ALOGV("%s", __FUNCTION__); + + Mutex::Autolock icl(mBinderSerializationLock); + + if (!mDevice.get()) return DEAD_OBJECT; + + if (!mExclusiveLock) { + return PERMISSION_DENIED; + } + + // TODO: implement + ALOGE("%s: not fully implemented yet", __FUNCTION__); + return INVALID_OPERATION; +} + +status_t ProCamera2Client::deleteStream(int streamId) { + ATRACE_CALL(); + ALOGV("%s (streamId = 0x%x)", __FUNCTION__, streamId); + + status_t res; + if ( (res = checkPid(__FUNCTION__) ) != OK) return res; + + Mutex::Autolock icl(mBinderSerializationLock); + + if (!mDevice.get()) return DEAD_OBJECT; + mDevice->clearStreamingRequest(); + + status_t code; + if ((code = mDevice->waitUntilDrained()) != OK) { + ALOGE("%s: waitUntilDrained failed with code 0x%x", __FUNCTION__, code); + } + + return mDevice->deleteStream(streamId); +} + +status_t ProCamera2Client::createStream(int width, int height, int format, + const sp& bufferProducer, + /*out*/ + int* streamId) +{ + if (streamId) { + *streamId = -1; + } + + ATRACE_CALL(); + ALOGV("%s (w = %d, h = %d, f = 0x%x)", __FUNCTION__, width, height, format); + + status_t res; + if ( (res = checkPid(__FUNCTION__) ) != OK) return res; + + Mutex::Autolock icl(mBinderSerializationLock); + + if (!mDevice.get()) return DEAD_OBJECT; + + sp binder; + sp window; + if (bufferProducer != 0) { + binder = bufferProducer->asBinder(); + window = new Surface(bufferProducer); + } + + return mDevice->createStream(window, width, height, format, /*size*/1, + streamId); +} + +// Create a request object from a template. +// -- Caller owns the newly allocated metadata +status_t ProCamera2Client::createDefaultRequest(int templateId, + /*out*/ + camera_metadata** request) +{ + ATRACE_CALL(); + ALOGV("%s (templateId = 0x%x)", __FUNCTION__, templateId); + + if (request) { + *request = NULL; + } + + status_t res; + if ( (res = checkPid(__FUNCTION__) ) != OK) return res; + + Mutex::Autolock icl(mBinderSerializationLock); + + if (!mDevice.get()) return DEAD_OBJECT; + + CameraMetadata metadata; + if ( (res = mDevice->createDefaultRequest(templateId, &metadata) ) == OK) { + *request = metadata.release(); + } + + return res; +} + +status_t ProCamera2Client::getCameraInfo(int cameraId, + /*out*/ + camera_metadata** info) +{ + if (cameraId != mCameraId) { + return INVALID_OPERATION; + } + + Mutex::Autolock icl(mBinderSerializationLock); + + if (!mDevice.get()) return DEAD_OBJECT; + + CameraMetadata deviceInfo = mDevice->info(); + *info = deviceInfo.release(); + + return OK; +} + +status_t ProCamera2Client::dump(int fd, const Vector& args) { + String8 result; + result.appendFormat("ProCamera2Client[%d] (%p) PID: %d, dump:\n", + mCameraId, + getRemoteCallback()->asBinder().get(), + mClientPid); + result.append(" State: "); + + // TODO: print dynamic/request section from most recent requests + mFrameProcessor->dump(fd, args); + + return dumpDevice(fd, args); +} + +// IProCameraUser interface + +void ProCamera2Client::detachDevice() { + if (mDevice == 0) return; + + ALOGV("Camera %d: Stopping processors", mCameraId); + + mFrameProcessor->removeListener(FRAME_PROCESSOR_LISTENER_MIN_ID, + FRAME_PROCESSOR_LISTENER_MAX_ID, + /*listener*/this); + mFrameProcessor->requestExit(); + ALOGV("Camera %d: Waiting for threads", mCameraId); + mFrameProcessor->join(); + ALOGV("Camera %d: Disconnecting device", mCameraId); + + // WORKAROUND: HAL refuses to disconnect while there's streams in flight + { + mDevice->clearStreamingRequest(); + + status_t code; + if ((code = mDevice->waitUntilDrained()) != OK) { + ALOGE("%s: waitUntilDrained failed with code 0x%x", __FUNCTION__, + code); + } + } + + Camera2ClientBase::detachDevice(); +} + +/** Device-related methods */ +void ProCamera2Client::onFrameAvailable(int32_t frameId, + const CameraMetadata& frame) { + ATRACE_CALL(); + ALOGV("%s", __FUNCTION__); + + Mutex::Autolock icl(mBinderSerializationLock); + SharedCameraCallbacks::Lock l(mSharedCameraCallbacks); + + if (mRemoteCallback != NULL) { + CameraMetadata tmp(frame); + camera_metadata_t* meta = tmp.release(); + ALOGV("%s: meta = %p ", __FUNCTION__, meta); + mRemoteCallback->onResultReceived(frameId, meta); + tmp.acquire(meta); + } + +} + +bool ProCamera2Client::enforceRequestPermissions(CameraMetadata& metadata) { + + const int pid = IPCThreadState::self()->getCallingPid(); + const int selfPid = getpid(); + camera_metadata_entry_t entry; + + /** + * Mixin default important security values + * - android.led.transmit = defaulted ON + */ + CameraMetadata staticInfo = mDevice->info(); + entry = staticInfo.find(ANDROID_LED_AVAILABLE_LEDS); + for(size_t i = 0; i < entry.count; ++i) { + uint8_t led = entry.data.u8[i]; + + switch(led) { + case ANDROID_LED_AVAILABLE_LEDS_TRANSMIT: { + uint8_t transmitDefault = ANDROID_LED_TRANSMIT_ON; + if (!metadata.exists(ANDROID_LED_TRANSMIT)) { + metadata.update(ANDROID_LED_TRANSMIT, + &transmitDefault, 1); + } + break; + } + } + } + + // We can do anything! + if (pid == selfPid) { + return true; + } + + /** + * Permission check special fields in the request + * - android.led.transmit = android.permission.CAMERA_DISABLE_TRANSMIT + */ + entry = metadata.find(ANDROID_LED_TRANSMIT); + if (entry.count > 0 && entry.data.u8[0] != ANDROID_LED_TRANSMIT_ON) { + String16 permissionString = + String16("android.permission.CAMERA_DISABLE_TRANSMIT_LED"); + if (!checkCallingPermission(permissionString)) { + const int uid = IPCThreadState::self()->getCallingUid(); + ALOGE("Permission Denial: " + "can't disable transmit LED pid=%d, uid=%d", pid, uid); + return false; + } + } + + return true; +} + +} // namespace android diff --git a/services/camera/libcameraservice/api_pro/ProCamera2Client.h b/services/camera/libcameraservice/api_pro/ProCamera2Client.h new file mode 100644 index 0000000..0bf6784 --- /dev/null +++ b/services/camera/libcameraservice/api_pro/ProCamera2Client.h @@ -0,0 +1,123 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_SERVERS_CAMERA_PROCAMERA2CLIENT_H +#define ANDROID_SERVERS_CAMERA_PROCAMERA2CLIENT_H + +#include "CameraService.h" +#include "common/FrameProcessorBase.h" +#include "common/Camera2ClientBase.h" +#include "device2/Camera2Device.h" + +namespace android { + +class IMemory; +/** + * Implements the binder IProCameraUser API, + * meant for HAL2-level private API access. + */ +class ProCamera2Client : + public Camera2ClientBase, + public camera2::FrameProcessorBase::FilteredListener +{ +public: + /** + * IProCameraUser interface (see IProCameraUser for details) + */ + virtual status_t exclusiveTryLock(); + virtual status_t exclusiveLock(); + virtual status_t exclusiveUnlock(); + + virtual bool hasExclusiveLock(); + + // Note that the callee gets a copy of the metadata. + virtual int submitRequest(camera_metadata_t* metadata, + bool streaming = false); + virtual status_t cancelRequest(int requestId); + + virtual status_t deleteStream(int streamId); + + virtual status_t createStream( + int width, + int height, + int format, + const sp& bufferProducer, + /*out*/ + int* streamId); + + // Create a request object from a template. + // -- Caller owns the newly allocated metadata + virtual status_t createDefaultRequest(int templateId, + /*out*/ + camera_metadata** request); + + // Get the static metadata for the camera + // -- Caller owns the newly allocated metadata + virtual status_t getCameraInfo(int cameraId, + /*out*/ + camera_metadata** info); + + /** + * Interface used by CameraService + */ + + ProCamera2Client(const sp& cameraService, + const sp& remoteCallback, + const String16& clientPackageName, + int cameraId, + int cameraFacing, + int clientPid, + uid_t clientUid, + int servicePid); + virtual ~ProCamera2Client(); + + virtual status_t initialize(camera_module_t *module); + + virtual status_t dump(int fd, const Vector& args); + + // Callbacks from camera service + virtual void onExclusiveLockStolen(); + + /** + * Interface used by independent components of ProCamera2Client. + */ + +protected: + /** FilteredListener implementation **/ + virtual void onFrameAvailable(int32_t frameId, + const CameraMetadata& frame); + virtual void detachDevice(); + +private: + /** IProCameraUser interface-related private members */ + + /** Preview callback related members */ + sp mFrameProcessor; + static const int32_t FRAME_PROCESSOR_LISTENER_MIN_ID = 0; + static const int32_t FRAME_PROCESSOR_LISTENER_MAX_ID = 0x7fffffffL; + + /** Utility members */ + bool enforceRequestPermissions(CameraMetadata& metadata); + + // Whether or not we have an exclusive lock on the device + // - if no we can't modify the request queue. + // note that creating/deleting streams we own is still OK + bool mExclusiveLock; +}; + +}; // namespace android + +#endif diff --git a/services/camera/libcameraservice/camera2/BurstCapture.cpp b/services/camera/libcameraservice/camera2/BurstCapture.cpp deleted file mode 100644 index 192d419..0000000 --- a/services/camera/libcameraservice/camera2/BurstCapture.cpp +++ /dev/null @@ -1,113 +0,0 @@ -/* - * Copyright (C) 2012 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -//#define LOG_NDEBUG 0 -#define LOG_TAG "Camera2-BurstCapture" - -#include -#include - -#include "BurstCapture.h" - -#include "../Camera2Client.h" -#include "JpegCompressor.h" - -namespace android { -namespace camera2 { - -BurstCapture::BurstCapture(wp client, wp sequencer): - mCaptureStreamId(NO_STREAM), - mClient(client), - mSequencer(sequencer) -{ -} - -BurstCapture::~BurstCapture() { -} - -status_t BurstCapture::start(Vector &/*metadatas*/, - int32_t /*firstCaptureId*/) { - ALOGE("Not completely implemented"); - return INVALID_OPERATION; -} - -void BurstCapture::onFrameAvailable() { - ALOGV("%s", __FUNCTION__); - Mutex::Autolock l(mInputMutex); - if(!mInputChanged) { - mInputChanged = true; - mInputSignal.signal(); - } -} - -bool BurstCapture::threadLoop() { - status_t res; - { - Mutex::Autolock l(mInputMutex); - while(!mInputChanged) { - res = mInputSignal.waitRelative(mInputMutex, kWaitDuration); - if(res == TIMED_OUT) return true; - } - mInputChanged = false; - } - - do { - sp client = mClient.promote(); - if(client == 0) return false; - ALOGV("%s: Calling processFrameAvailable()", __FUNCTION__); - res = processFrameAvailable(client); - } while(res == OK); - - return true; -} - -CpuConsumer::LockedBuffer* BurstCapture::jpegEncode( - CpuConsumer::LockedBuffer *imgBuffer, - int /*quality*/) -{ - ALOGV("%s", __FUNCTION__); - - CpuConsumer::LockedBuffer *imgEncoded = new CpuConsumer::LockedBuffer; - uint8_t *data = new uint8_t[ANDROID_JPEG_MAX_SIZE]; - imgEncoded->data = data; - imgEncoded->width = imgBuffer->width; - imgEncoded->height = imgBuffer->height; - imgEncoded->stride = imgBuffer->stride; - - Vector buffers; - buffers.push_back(imgBuffer); - buffers.push_back(imgEncoded); - - sp jpeg = new JpegCompressor(); - jpeg->start(buffers, 1); - - bool success = jpeg->waitForDone(10 * 1e9); - if(success) { - return buffers[1]; - } - else { - ALOGE("%s: JPEG encode timed out", __FUNCTION__); - return NULL; // TODO: maybe change function return value to status_t - } -} - -status_t BurstCapture::processFrameAvailable(sp &/*client*/) { - ALOGE("Not implemented"); - return INVALID_OPERATION; -} - -} // namespace camera2 -} // namespace android diff --git a/services/camera/libcameraservice/camera2/BurstCapture.h b/services/camera/libcameraservice/camera2/BurstCapture.h deleted file mode 100644 index a2cc893..0000000 --- a/services/camera/libcameraservice/camera2/BurstCapture.h +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Copyright (C) 2012 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef ANDROID_SERVERS_CAMERA_BURST_CAPTURE_H -#define ANDROID_SERVERS_CAMERA_BURST_CAPTURE_H - -#include "camera/CameraMetadata.h" -#include -#include -#include -#include "Camera2Device.h" - -namespace android { - -class Camera2Client; - -namespace camera2 { - -class CaptureSequencer; - -class BurstCapture : public virtual Thread, - public virtual CpuConsumer::FrameAvailableListener -{ -public: - BurstCapture(wp client, wp sequencer); - virtual ~BurstCapture(); - - virtual void onFrameAvailable(); - virtual status_t start(Vector &metadatas, int32_t firstCaptureId); - -protected: - Mutex mInputMutex; - bool mInputChanged; - Condition mInputSignal; - int mCaptureStreamId; - wp mClient; - wp mSequencer; - - // Should only be accessed by processing thread - enum { - NO_STREAM = -1 - }; - - CpuConsumer::LockedBuffer* jpegEncode( - CpuConsumer::LockedBuffer *imgBuffer, - int quality); - - virtual status_t processFrameAvailable(sp &client); - -private: - virtual bool threadLoop(); - static const nsecs_t kWaitDuration = 10000000; // 10 ms -}; - -} // namespace camera2 -} // namespace android - -#endif diff --git a/services/camera/libcameraservice/camera2/CallbackProcessor.cpp b/services/camera/libcameraservice/camera2/CallbackProcessor.cpp deleted file mode 100644 index d7bafda..0000000 --- a/services/camera/libcameraservice/camera2/CallbackProcessor.cpp +++ /dev/null @@ -1,539 +0,0 @@ -/* - * Copyright (C) 2012 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#define LOG_TAG "Camera2-CallbackProcessor" -#define ATRACE_TAG ATRACE_TAG_CAMERA -//#define LOG_NDEBUG 0 - -#include -#include - -#include "CallbackProcessor.h" -#include -#include "../CameraDeviceBase.h" -#include "../Camera2Client.h" - -#define ALIGN(x, mask) ( ((x) + (mask) - 1) & ~((mask) - 1) ) - -namespace android { -namespace camera2 { - -CallbackProcessor::CallbackProcessor(sp client): - Thread(false), - mClient(client), - mDevice(client->getCameraDevice()), - mId(client->getCameraId()), - mCallbackAvailable(false), - mCallbackToApp(false), - mCallbackStreamId(NO_STREAM) { -} - -CallbackProcessor::~CallbackProcessor() { - ALOGV("%s: Exit", __FUNCTION__); - deleteStream(); -} - -void CallbackProcessor::onFrameAvailable() { - Mutex::Autolock l(mInputMutex); - if (!mCallbackAvailable) { - mCallbackAvailable = true; - mCallbackAvailableSignal.signal(); - } -} - -status_t CallbackProcessor::setCallbackWindow( - sp callbackWindow) { - ATRACE_CALL(); - status_t res; - - Mutex::Autolock l(mInputMutex); - - sp client = mClient.promote(); - if (client == 0) return OK; - sp device = client->getCameraDevice(); - - // If the window is changing, clear out stream if it already exists - if (mCallbackWindow != callbackWindow && mCallbackStreamId != NO_STREAM) { - res = device->deleteStream(mCallbackStreamId); - if (res != OK) { - ALOGE("%s: Camera %d: Unable to delete old stream " - "for callbacks: %s (%d)", __FUNCTION__, - client->getCameraId(), strerror(-res), res); - return res; - } - mCallbackStreamId = NO_STREAM; - mCallbackConsumer.clear(); - } - mCallbackWindow = callbackWindow; - mCallbackToApp = (mCallbackWindow != NULL); - - return OK; -} - -status_t CallbackProcessor::updateStream(const Parameters ¶ms) { - ATRACE_CALL(); - status_t res; - - Mutex::Autolock l(mInputMutex); - - sp device = mDevice.promote(); - if (device == 0) { - ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId); - return INVALID_OPERATION; - } - - // If possible, use the flexible YUV format - int32_t callbackFormat = params.previewFormat; - if (mCallbackToApp) { - // TODO: etalvala: This should use the flexible YUV format as well, but - // need to reconcile HAL2/HAL3 requirements. - callbackFormat = HAL_PIXEL_FORMAT_YV12; - } else if(params.fastInfo.useFlexibleYuv && - (params.previewFormat == HAL_PIXEL_FORMAT_YCrCb_420_SP || - params.previewFormat == HAL_PIXEL_FORMAT_YV12) ) { - callbackFormat = HAL_PIXEL_FORMAT_YCbCr_420_888; - } - - if (!mCallbackToApp && mCallbackConsumer == 0) { - // Create CPU buffer queue endpoint, since app hasn't given us one - // Make it async to avoid disconnect deadlocks - sp bq = new BufferQueue(); - mCallbackConsumer = new CpuConsumer(bq, kCallbackHeapCount); - mCallbackConsumer->setFrameAvailableListener(this); - mCallbackConsumer->setName(String8("Camera2Client::CallbackConsumer")); - mCallbackWindow = new Surface( - mCallbackConsumer->getProducerInterface()); - } - - if (mCallbackStreamId != NO_STREAM) { - // Check if stream parameters have to change - uint32_t currentWidth, currentHeight, currentFormat; - res = device->getStreamInfo(mCallbackStreamId, - ¤tWidth, ¤tHeight, ¤tFormat); - if (res != OK) { - ALOGE("%s: Camera %d: Error querying callback output stream info: " - "%s (%d)", __FUNCTION__, mId, - strerror(-res), res); - return res; - } - if (currentWidth != (uint32_t)params.previewWidth || - currentHeight != (uint32_t)params.previewHeight || - currentFormat != (uint32_t)callbackFormat) { - // Since size should only change while preview is not running, - // assuming that all existing use of old callback stream is - // completed. - ALOGV("%s: Camera %d: Deleting stream %d since the buffer " - "parameters changed", __FUNCTION__, mId, mCallbackStreamId); - res = device->deleteStream(mCallbackStreamId); - if (res != OK) { - ALOGE("%s: Camera %d: Unable to delete old output stream " - "for callbacks: %s (%d)", __FUNCTION__, - mId, strerror(-res), res); - return res; - } - mCallbackStreamId = NO_STREAM; - } - } - - if (mCallbackStreamId == NO_STREAM) { - ALOGV("Creating callback stream: %d x %d, format 0x%x, API format 0x%x", - params.previewWidth, params.previewHeight, - callbackFormat, params.previewFormat); - res = device->createStream(mCallbackWindow, - params.previewWidth, params.previewHeight, - callbackFormat, 0, &mCallbackStreamId); - if (res != OK) { - ALOGE("%s: Camera %d: Can't create output stream for callbacks: " - "%s (%d)", __FUNCTION__, mId, - strerror(-res), res); - return res; - } - } - - return OK; -} - -status_t CallbackProcessor::deleteStream() { - ATRACE_CALL(); - sp device; - status_t res; - { - Mutex::Autolock l(mInputMutex); - - if (mCallbackStreamId == NO_STREAM) { - return OK; - } - device = mDevice.promote(); - if (device == 0) { - ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId); - return INVALID_OPERATION; - } - } - res = device->waitUntilDrained(); - if (res != OK) { - ALOGE("%s: Error waiting for HAL to drain: %s (%d)", - __FUNCTION__, strerror(-res), res); - return res; - } - - res = device->deleteStream(mCallbackStreamId); - if (res != OK) { - ALOGE("%s: Unable to delete callback stream: %s (%d)", - __FUNCTION__, strerror(-res), res); - return res; - } - - { - Mutex::Autolock l(mInputMutex); - - mCallbackHeap.clear(); - mCallbackWindow.clear(); - mCallbackConsumer.clear(); - - mCallbackStreamId = NO_STREAM; - } - return OK; -} - -int CallbackProcessor::getStreamId() const { - Mutex::Autolock l(mInputMutex); - return mCallbackStreamId; -} - -void CallbackProcessor::dump(int /*fd*/, const Vector& /*args*/) const { -} - -bool CallbackProcessor::threadLoop() { - status_t res; - - { - Mutex::Autolock l(mInputMutex); - while (!mCallbackAvailable) { - res = mCallbackAvailableSignal.waitRelative(mInputMutex, - kWaitDuration); - if (res == TIMED_OUT) return true; - } - mCallbackAvailable = false; - } - - do { - sp client = mClient.promote(); - if (client == 0) { - res = discardNewCallback(); - } else { - res = processNewCallback(client); - } - } while (res == OK); - - return true; -} - -status_t CallbackProcessor::discardNewCallback() { - ATRACE_CALL(); - status_t res; - CpuConsumer::LockedBuffer imgBuffer; - res = mCallbackConsumer->lockNextBuffer(&imgBuffer); - if (res != OK) { - if (res != BAD_VALUE) { - ALOGE("%s: Camera %d: Error receiving next callback buffer: " - "%s (%d)", __FUNCTION__, mId, strerror(-res), res); - } - return res; - } - mCallbackConsumer->unlockBuffer(imgBuffer); - return OK; -} - -status_t CallbackProcessor::processNewCallback(sp &client) { - ATRACE_CALL(); - status_t res; - - sp callbackHeap; - bool useFlexibleYuv = false; - int32_t previewFormat = 0; - size_t heapIdx; - - { - /* acquire SharedParameters before mMutex so we don't dead lock - with Camera2Client code calling into StreamingProcessor */ - SharedParameters::Lock l(client->getParameters()); - Mutex::Autolock m(mInputMutex); - CpuConsumer::LockedBuffer imgBuffer; - if (mCallbackStreamId == NO_STREAM) { - ALOGV("%s: Camera %d:No stream is available" - , __FUNCTION__, mId); - return INVALID_OPERATION; - } - - ALOGV("%s: Getting buffer", __FUNCTION__); - res = mCallbackConsumer->lockNextBuffer(&imgBuffer); - if (res != OK) { - if (res != BAD_VALUE) { - ALOGE("%s: Camera %d: Error receiving next callback buffer: " - "%s (%d)", __FUNCTION__, mId, strerror(-res), res); - } - return res; - } - ALOGV("%s: Camera %d: Preview callback available", __FUNCTION__, - mId); - - if ( l.mParameters.state != Parameters::PREVIEW - && l.mParameters.state != Parameters::RECORD - && l.mParameters.state != Parameters::VIDEO_SNAPSHOT) { - ALOGV("%s: Camera %d: No longer streaming", - __FUNCTION__, mId); - mCallbackConsumer->unlockBuffer(imgBuffer); - return OK; - } - - if (! (l.mParameters.previewCallbackFlags & - CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK) ) { - ALOGV("%s: No longer enabled, dropping", __FUNCTION__); - mCallbackConsumer->unlockBuffer(imgBuffer); - return OK; - } - if ((l.mParameters.previewCallbackFlags & - CAMERA_FRAME_CALLBACK_FLAG_ONE_SHOT_MASK) && - !l.mParameters.previewCallbackOneShot) { - ALOGV("%s: One shot mode, already sent, dropping", __FUNCTION__); - mCallbackConsumer->unlockBuffer(imgBuffer); - return OK; - } - - previewFormat = l.mParameters.previewFormat; - useFlexibleYuv = l.mParameters.fastInfo.useFlexibleYuv && - (previewFormat == HAL_PIXEL_FORMAT_YCrCb_420_SP || - previewFormat == HAL_PIXEL_FORMAT_YV12); - - int32_t expectedFormat = useFlexibleYuv ? - HAL_PIXEL_FORMAT_YCbCr_420_888 : previewFormat; - - if (imgBuffer.format != expectedFormat) { - ALOGE("%s: Camera %d: Unexpected format for callback: " - "0x%x, expected 0x%x", __FUNCTION__, mId, - imgBuffer.format, expectedFormat); - mCallbackConsumer->unlockBuffer(imgBuffer); - return INVALID_OPERATION; - } - - // In one-shot mode, stop sending callbacks after the first one - if (l.mParameters.previewCallbackFlags & - CAMERA_FRAME_CALLBACK_FLAG_ONE_SHOT_MASK) { - ALOGV("%s: clearing oneshot", __FUNCTION__); - l.mParameters.previewCallbackOneShot = false; - } - - uint32_t destYStride = 0; - uint32_t destCStride = 0; - if (useFlexibleYuv) { - if (previewFormat == HAL_PIXEL_FORMAT_YV12) { - // Strides must align to 16 for YV12 - destYStride = ALIGN(imgBuffer.width, 16); - destCStride = ALIGN(destYStride / 2, 16); - } else { - // No padding for NV21 - ALOG_ASSERT(previewFormat == HAL_PIXEL_FORMAT_YCrCb_420_SP, - "Unexpected preview format 0x%x", previewFormat); - destYStride = imgBuffer.width; - destCStride = destYStride / 2; - } - } else { - destYStride = imgBuffer.stride; - // don't care about cStride - } - - size_t bufferSize = Camera2Client::calculateBufferSize( - imgBuffer.width, imgBuffer.height, - previewFormat, destYStride); - size_t currentBufferSize = (mCallbackHeap == 0) ? - 0 : (mCallbackHeap->mHeap->getSize() / kCallbackHeapCount); - if (bufferSize != currentBufferSize) { - mCallbackHeap.clear(); - mCallbackHeap = new Camera2Heap(bufferSize, kCallbackHeapCount, - "Camera2Client::CallbackHeap"); - if (mCallbackHeap->mHeap->getSize() == 0) { - ALOGE("%s: Camera %d: Unable to allocate memory for callbacks", - __FUNCTION__, mId); - mCallbackConsumer->unlockBuffer(imgBuffer); - return INVALID_OPERATION; - } - - mCallbackHeapHead = 0; - mCallbackHeapFree = kCallbackHeapCount; - } - - if (mCallbackHeapFree == 0) { - ALOGE("%s: Camera %d: No free callback buffers, dropping frame", - __FUNCTION__, mId); - mCallbackConsumer->unlockBuffer(imgBuffer); - return OK; - } - - heapIdx = mCallbackHeapHead; - - mCallbackHeapHead = (mCallbackHeapHead + 1) & kCallbackHeapCount; - mCallbackHeapFree--; - - // TODO: Get rid of this copy by passing the gralloc queue all the way - // to app - - ssize_t offset; - size_t size; - sp heap = - mCallbackHeap->mBuffers[heapIdx]->getMemory(&offset, - &size); - uint8_t *data = (uint8_t*)heap->getBase() + offset; - - if (!useFlexibleYuv) { - // Can just memcpy when HAL format matches API format - memcpy(data, imgBuffer.data, bufferSize); - } else { - res = convertFromFlexibleYuv(previewFormat, data, imgBuffer, - destYStride, destCStride); - if (res != OK) { - ALOGE("%s: Camera %d: Can't convert between 0x%x and 0x%x formats!", - __FUNCTION__, mId, imgBuffer.format, previewFormat); - mCallbackConsumer->unlockBuffer(imgBuffer); - return BAD_VALUE; - } - } - - ALOGV("%s: Freeing buffer", __FUNCTION__); - mCallbackConsumer->unlockBuffer(imgBuffer); - - // mCallbackHeap may get freed up once input mutex is released - callbackHeap = mCallbackHeap; - } - - // Call outside parameter lock to allow re-entrancy from notification - { - Camera2Client::SharedCameraCallbacks::Lock - l(client->mSharedCameraCallbacks); - if (l.mRemoteCallback != 0) { - ALOGV("%s: Camera %d: Invoking client data callback", - __FUNCTION__, mId); - l.mRemoteCallback->dataCallback(CAMERA_MSG_PREVIEW_FRAME, - callbackHeap->mBuffers[heapIdx], NULL); - } - } - - // Only increment free if we're still using the same heap - mCallbackHeapFree++; - - ALOGV("%s: exit", __FUNCTION__); - - return OK; -} - -status_t CallbackProcessor::convertFromFlexibleYuv(int32_t previewFormat, - uint8_t *dst, - const CpuConsumer::LockedBuffer &src, - uint32_t dstYStride, - uint32_t dstCStride) const { - - if (previewFormat != HAL_PIXEL_FORMAT_YCrCb_420_SP && - previewFormat != HAL_PIXEL_FORMAT_YV12) { - ALOGE("%s: Camera %d: Unexpected preview format when using " - "flexible YUV: 0x%x", __FUNCTION__, mId, previewFormat); - return INVALID_OPERATION; - } - - // Copy Y plane, adjusting for stride - const uint8_t *ySrc = src.data; - uint8_t *yDst = dst; - for (size_t row = 0; row < src.height; row++) { - memcpy(yDst, ySrc, src.width); - ySrc += src.stride; - yDst += dstYStride; - } - - // Copy/swizzle chroma planes, 4:2:0 subsampling - const uint8_t *cbSrc = src.dataCb; - const uint8_t *crSrc = src.dataCr; - size_t chromaHeight = src.height / 2; - size_t chromaWidth = src.width / 2; - ssize_t chromaGap = src.chromaStride - - (chromaWidth * src.chromaStep); - size_t dstChromaGap = dstCStride - chromaWidth; - - if (previewFormat == HAL_PIXEL_FORMAT_YCrCb_420_SP) { - // Flexible YUV chroma to NV21 chroma - uint8_t *crcbDst = yDst; - // Check for shortcuts - if (cbSrc == crSrc + 1 && src.chromaStep == 2) { - ALOGV("%s: Fast NV21->NV21", __FUNCTION__); - // Source has semiplanar CrCb chroma layout, can copy by rows - for (size_t row = 0; row < chromaHeight; row++) { - memcpy(crcbDst, crSrc, src.width); - crcbDst += src.width; - crSrc += src.chromaStride; - } - } else { - ALOGV("%s: Generic->NV21", __FUNCTION__); - // Generic copy, always works but not very efficient - for (size_t row = 0; row < chromaHeight; row++) { - for (size_t col = 0; col < chromaWidth; col++) { - *(crcbDst++) = *crSrc; - *(crcbDst++) = *cbSrc; - crSrc += src.chromaStep; - cbSrc += src.chromaStep; - } - crSrc += chromaGap; - cbSrc += chromaGap; - } - } - } else { - // flexible YUV chroma to YV12 chroma - ALOG_ASSERT(previewFormat == HAL_PIXEL_FORMAT_YV12, - "Unexpected preview format 0x%x", previewFormat); - uint8_t *crDst = yDst; - uint8_t *cbDst = yDst + chromaHeight * dstCStride; - if (src.chromaStep == 1) { - ALOGV("%s: Fast YV12->YV12", __FUNCTION__); - // Source has planar chroma layout, can copy by row - for (size_t row = 0; row < chromaHeight; row++) { - memcpy(crDst, crSrc, chromaWidth); - crDst += dstCStride; - crSrc += src.chromaStride; - } - for (size_t row = 0; row < chromaHeight; row++) { - memcpy(cbDst, cbSrc, chromaWidth); - cbDst += dstCStride; - cbSrc += src.chromaStride; - } - } else { - ALOGV("%s: Generic->YV12", __FUNCTION__); - // Generic copy, always works but not very efficient - for (size_t row = 0; row < chromaHeight; row++) { - for (size_t col = 0; col < chromaWidth; col++) { - *(crDst++) = *crSrc; - *(cbDst++) = *cbSrc; - crSrc += src.chromaStep; - cbSrc += src.chromaStep; - } - crSrc += chromaGap; - cbSrc += chromaGap; - crDst += dstChromaGap; - cbDst += dstChromaGap; - } - } - } - - return OK; -} - -}; // namespace camera2 -}; // namespace android diff --git a/services/camera/libcameraservice/camera2/CallbackProcessor.h b/services/camera/libcameraservice/camera2/CallbackProcessor.h deleted file mode 100644 index 17dcfb1..0000000 --- a/services/camera/libcameraservice/camera2/CallbackProcessor.h +++ /dev/null @@ -1,98 +0,0 @@ -/* - * Copyright (C) 2012 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef ANDROID_SERVERS_CAMERA_CAMERA2_CALLBACKPROCESSOR_H -#define ANDROID_SERVERS_CAMERA_CAMERA2_CALLBACKPROCESSOR_H - -#include -#include -#include -#include -#include -#include -#include "Parameters.h" -#include "camera/CameraMetadata.h" -#include "Camera2Heap.h" - -namespace android { - -class Camera2Client; -class CameraDeviceBase; - -namespace camera2 { - -/*** - * Still image capture output image processing - */ -class CallbackProcessor: - public Thread, public CpuConsumer::FrameAvailableListener { - public: - CallbackProcessor(sp client); - ~CallbackProcessor(); - - void onFrameAvailable(); - - // Set to NULL to disable the direct-to-app callback window - status_t setCallbackWindow(sp callbackWindow); - status_t updateStream(const Parameters ¶ms); - status_t deleteStream(); - int getStreamId() const; - - void dump(int fd, const Vector& args) const; - private: - static const nsecs_t kWaitDuration = 10000000; // 10 ms - wp mClient; - wp mDevice; - int mId; - - mutable Mutex mInputMutex; - bool mCallbackAvailable; - Condition mCallbackAvailableSignal; - - enum { - NO_STREAM = -1 - }; - - // True if mCallbackWindow is a remote consumer, false if just the local - // mCallbackConsumer - bool mCallbackToApp; - int mCallbackStreamId; - static const size_t kCallbackHeapCount = 6; - sp mCallbackConsumer; - sp mCallbackWindow; - sp mCallbackHeap; - int mCallbackHeapId; - size_t mCallbackHeapHead, mCallbackHeapFree; - - virtual bool threadLoop(); - - status_t processNewCallback(sp &client); - // Used when shutting down - status_t discardNewCallback(); - - // Convert from flexible YUV to NV21 or YV12 - status_t convertFromFlexibleYuv(int32_t previewFormat, - uint8_t *dst, - const CpuConsumer::LockedBuffer &src, - uint32_t dstYStride, - uint32_t dstCStride) const; -}; - - -}; //namespace camera2 -}; //namespace android - -#endif diff --git a/services/camera/libcameraservice/camera2/Camera2Heap.h b/services/camera/libcameraservice/camera2/Camera2Heap.h deleted file mode 100644 index 9c72d76..0000000 --- a/services/camera/libcameraservice/camera2/Camera2Heap.h +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Copyright (C) 2012 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef ANDROiD_SERVERS_CAMERA_CAMERA2HEAP_H -#define ANDROiD_SERVERS_CAMERA_CAMERA2HEAP_H - -#include -#include - -namespace android { -namespace camera2 { - -// Utility class for managing a set of IMemory blocks -class Camera2Heap : public RefBase { - public: - Camera2Heap(size_t buf_size, uint_t num_buffers = 1, - const char *name = NULL) : - mBufSize(buf_size), - mNumBufs(num_buffers) { - mHeap = new MemoryHeapBase(buf_size * num_buffers, 0, name); - mBuffers = new sp[mNumBufs]; - for (uint_t i = 0; i < mNumBufs; i++) - mBuffers[i] = new MemoryBase(mHeap, - i * mBufSize, - mBufSize); - } - - virtual ~Camera2Heap() - { - delete [] mBuffers; - } - - size_t mBufSize; - uint_t mNumBufs; - sp mHeap; - sp *mBuffers; -}; - -}; // namespace camera2 -}; // namespace android - -#endif diff --git a/services/camera/libcameraservice/camera2/CaptureSequencer.cpp b/services/camera/libcameraservice/camera2/CaptureSequencer.cpp deleted file mode 100644 index e5a011c..0000000 --- a/services/camera/libcameraservice/camera2/CaptureSequencer.cpp +++ /dev/null @@ -1,711 +0,0 @@ -/* - * Copyright (C) 2012 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#define LOG_TAG "Camera2-CaptureSequencer" -#define ATRACE_TAG ATRACE_TAG_CAMERA -//#define LOG_NDEBUG 0 - -#include -#include -#include - -#include "CaptureSequencer.h" -#include "BurstCapture.h" -#include "../Camera2Device.h" -#include "../Camera2Client.h" -#include "Parameters.h" -#include "ZslProcessorInterface.h" - -namespace android { -namespace camera2 { - -/** Public members */ - -CaptureSequencer::CaptureSequencer(wp client): - Thread(false), - mStartCapture(false), - mBusy(false), - mNewAEState(false), - mNewFrameReceived(false), - mNewCaptureReceived(false), - mShutterNotified(false), - mClient(client), - mCaptureState(IDLE), - mTriggerId(0), - mTimeoutCount(0), - mCaptureId(Camera2Client::kCaptureRequestIdStart), - mMsgType(0) { - ALOGV("%s", __FUNCTION__); -} - -CaptureSequencer::~CaptureSequencer() { - ALOGV("%s: Exit", __FUNCTION__); -} - -void CaptureSequencer::setZslProcessor(wp processor) { - Mutex::Autolock l(mInputMutex); - mZslProcessor = processor; -} - -status_t CaptureSequencer::startCapture(int msgType) { - ALOGV("%s", __FUNCTION__); - ATRACE_CALL(); - Mutex::Autolock l(mInputMutex); - if (mBusy) { - ALOGE("%s: Already busy capturing!", __FUNCTION__); - return INVALID_OPERATION; - } - if (!mStartCapture) { - mMsgType = msgType; - mStartCapture = true; - mStartCaptureSignal.signal(); - } - return OK; -} - -status_t CaptureSequencer::waitUntilIdle(nsecs_t timeout) { - ATRACE_CALL(); - ALOGV("%s: Waiting for idle", __FUNCTION__); - Mutex::Autolock l(mStateMutex); - status_t res = -1; - while (mCaptureState != IDLE) { - nsecs_t startTime = systemTime(); - - res = mStateChanged.waitRelative(mStateMutex, timeout); - if (res != OK) return res; - - timeout -= (systemTime() - startTime); - } - ALOGV("%s: Now idle", __FUNCTION__); - return OK; -} - -void CaptureSequencer::notifyAutoExposure(uint8_t newState, int triggerId) { - ATRACE_CALL(); - Mutex::Autolock l(mInputMutex); - mAEState = newState; - mAETriggerId = triggerId; - if (!mNewAEState) { - mNewAEState = true; - mNewNotifySignal.signal(); - } -} - -void CaptureSequencer::onFrameAvailable(int32_t frameId, - const CameraMetadata &frame) { - ALOGV("%s: Listener found new frame", __FUNCTION__); - ATRACE_CALL(); - Mutex::Autolock l(mInputMutex); - mNewFrameId = frameId; - mNewFrame = frame; - if (!mNewFrameReceived) { - mNewFrameReceived = true; - mNewFrameSignal.signal(); - } -} - -void CaptureSequencer::onCaptureAvailable(nsecs_t timestamp, - sp captureBuffer) { - ATRACE_CALL(); - ALOGV("%s", __FUNCTION__); - Mutex::Autolock l(mInputMutex); - mCaptureTimestamp = timestamp; - mCaptureBuffer = captureBuffer; - if (!mNewCaptureReceived) { - mNewCaptureReceived = true; - mNewCaptureSignal.signal(); - } -} - - -void CaptureSequencer::dump(int fd, const Vector& /*args*/) { - String8 result; - if (mCaptureRequest.entryCount() != 0) { - result = " Capture request:\n"; - write(fd, result.string(), result.size()); - mCaptureRequest.dump(fd, 2, 6); - } else { - result = " Capture request: undefined\n"; - write(fd, result.string(), result.size()); - } - result = String8::format(" Current capture state: %s\n", - kStateNames[mCaptureState]); - result.append(" Latest captured frame:\n"); - write(fd, result.string(), result.size()); - mNewFrame.dump(fd, 2, 6); -} - -/** Private members */ - -const char* CaptureSequencer::kStateNames[CaptureSequencer::NUM_CAPTURE_STATES+1] = -{ - "IDLE", - "START", - "ZSL_START", - "ZSL_WAITING", - "ZSL_REPROCESSING", - "STANDARD_START", - "STANDARD_PRECAPTURE_WAIT", - "STANDARD_CAPTURE", - "STANDARD_CAPTURE_WAIT", - "BURST_CAPTURE_START", - "BURST_CAPTURE_WAIT", - "DONE", - "ERROR", - "UNKNOWN" -}; - -const CaptureSequencer::StateManager - CaptureSequencer::kStateManagers[CaptureSequencer::NUM_CAPTURE_STATES-1] = { - &CaptureSequencer::manageIdle, - &CaptureSequencer::manageStart, - &CaptureSequencer::manageZslStart, - &CaptureSequencer::manageZslWaiting, - &CaptureSequencer::manageZslReprocessing, - &CaptureSequencer::manageStandardStart, - &CaptureSequencer::manageStandardPrecaptureWait, - &CaptureSequencer::manageStandardCapture, - &CaptureSequencer::manageStandardCaptureWait, - &CaptureSequencer::manageBurstCaptureStart, - &CaptureSequencer::manageBurstCaptureWait, - &CaptureSequencer::manageDone, -}; - -bool CaptureSequencer::threadLoop() { - - sp client = mClient.promote(); - if (client == 0) return false; - - CaptureState currentState; - { - Mutex::Autolock l(mStateMutex); - currentState = mCaptureState; - } - - currentState = (this->*kStateManagers[currentState])(client); - - Mutex::Autolock l(mStateMutex); - if (currentState != mCaptureState) { - mCaptureState = currentState; - ATRACE_INT("cam2_capt_state", mCaptureState); - ALOGV("Camera %d: New capture state %s", - client->getCameraId(), kStateNames[mCaptureState]); - mStateChanged.signal(); - } - - if (mCaptureState == ERROR) { - ALOGE("Camera %d: Stopping capture sequencer due to error", - client->getCameraId()); - return false; - } - - return true; -} - -CaptureSequencer::CaptureState CaptureSequencer::manageIdle( - sp &/*client*/) { - status_t res; - Mutex::Autolock l(mInputMutex); - while (!mStartCapture) { - res = mStartCaptureSignal.waitRelative(mInputMutex, - kWaitDuration); - if (res == TIMED_OUT) break; - } - if (mStartCapture) { - mStartCapture = false; - mBusy = true; - return START; - } - return IDLE; -} - -CaptureSequencer::CaptureState CaptureSequencer::manageDone(sp &client) { - status_t res = OK; - ATRACE_CALL(); - mCaptureId++; - if (mCaptureId >= Camera2Client::kCaptureRequestIdEnd) { - mCaptureId = Camera2Client::kCaptureRequestIdStart; - } - { - Mutex::Autolock l(mInputMutex); - mBusy = false; - } - - { - SharedParameters::Lock l(client->getParameters()); - switch (l.mParameters.state) { - case Parameters::DISCONNECTED: - ALOGW("%s: Camera %d: Discarding image data during shutdown ", - __FUNCTION__, client->getCameraId()); - res = INVALID_OPERATION; - break; - case Parameters::STILL_CAPTURE: - res = client->getCameraDevice()->waitUntilDrained(); - if (res != OK) { - ALOGE("%s: Camera %d: Can't idle after still capture: " - "%s (%d)", __FUNCTION__, client->getCameraId(), - strerror(-res), res); - } - l.mParameters.state = Parameters::STOPPED; - break; - case Parameters::VIDEO_SNAPSHOT: - l.mParameters.state = Parameters::RECORD; - break; - default: - ALOGE("%s: Camera %d: Still image produced unexpectedly " - "in state %s!", - __FUNCTION__, client->getCameraId(), - Parameters::getStateName(l.mParameters.state)); - res = INVALID_OPERATION; - } - } - sp processor = mZslProcessor.promote(); - if (processor != 0) { - ALOGV("%s: Memory optimization, clearing ZSL queue", - __FUNCTION__); - processor->clearZslQueue(); - } - - /** - * Fire the jpegCallback in Camera#takePicture(..., jpegCallback) - */ - if (mCaptureBuffer != 0 && res == OK) { - Camera2Client::SharedCameraCallbacks::Lock - l(client->mSharedCameraCallbacks); - ALOGV("%s: Sending still image to client", __FUNCTION__); - if (l.mRemoteCallback != 0) { - l.mRemoteCallback->dataCallback(CAMERA_MSG_COMPRESSED_IMAGE, - mCaptureBuffer, NULL); - } else { - ALOGV("%s: No client!", __FUNCTION__); - } - } - mCaptureBuffer.clear(); - - return IDLE; -} - -CaptureSequencer::CaptureState CaptureSequencer::manageStart( - sp &client) { - ALOGV("%s", __FUNCTION__); - status_t res; - ATRACE_CALL(); - SharedParameters::Lock l(client->getParameters()); - CaptureState nextState = DONE; - - res = updateCaptureRequest(l.mParameters, client); - if (res != OK ) { - ALOGE("%s: Camera %d: Can't update still image capture request: %s (%d)", - __FUNCTION__, client->getCameraId(), strerror(-res), res); - return DONE; - } - - if(l.mParameters.lightFx != Parameters::LIGHTFX_NONE && - l.mParameters.state == Parameters::STILL_CAPTURE) { - nextState = BURST_CAPTURE_START; - } - else if (l.mParameters.zslMode && - l.mParameters.state == Parameters::STILL_CAPTURE && - l.mParameters.flashMode != Parameters::FLASH_MODE_ON) { - nextState = ZSL_START; - } else { - nextState = STANDARD_START; - } - mShutterNotified = false; - - return nextState; -} - -CaptureSequencer::CaptureState CaptureSequencer::manageZslStart( - sp &client) { - ALOGV("%s", __FUNCTION__); - status_t res; - sp processor = mZslProcessor.promote(); - if (processor == 0) { - ALOGE("%s: No ZSL queue to use!", __FUNCTION__); - return DONE; - } - - client->registerFrameListener(mCaptureId, mCaptureId + 1, - this); - - // TODO: Actually select the right thing here. - res = processor->pushToReprocess(mCaptureId); - if (res != OK) { - if (res == NOT_ENOUGH_DATA) { - ALOGV("%s: Camera %d: ZSL queue doesn't have good frame, " - "falling back to normal capture", __FUNCTION__, - client->getCameraId()); - } else { - ALOGE("%s: Camera %d: Error in ZSL queue: %s (%d)", - __FUNCTION__, client->getCameraId(), strerror(-res), res); - } - return STANDARD_START; - } - - SharedParameters::Lock l(client->getParameters()); - /* warning: this also locks a SharedCameraCallbacks */ - shutterNotifyLocked(l.mParameters, client, mMsgType); - mShutterNotified = true; - mTimeoutCount = kMaxTimeoutsForCaptureEnd; - return STANDARD_CAPTURE_WAIT; -} - -CaptureSequencer::CaptureState CaptureSequencer::manageZslWaiting( - sp &/*client*/) { - ALOGV("%s", __FUNCTION__); - return DONE; -} - -CaptureSequencer::CaptureState CaptureSequencer::manageZslReprocessing( - sp &/*client*/) { - ALOGV("%s", __FUNCTION__); - return START; -} - -CaptureSequencer::CaptureState CaptureSequencer::manageStandardStart( - sp &client) { - ATRACE_CALL(); - - // Get the onFrameAvailable callback when the requestID == mCaptureId - client->registerFrameListener(mCaptureId, mCaptureId + 1, - this); - { - SharedParameters::Lock l(client->getParameters()); - mTriggerId = l.mParameters.precaptureTriggerCounter++; - } - client->getCameraDevice()->triggerPrecaptureMetering(mTriggerId); - - mAeInPrecapture = false; - mTimeoutCount = kMaxTimeoutsForPrecaptureStart; - return STANDARD_PRECAPTURE_WAIT; -} - -CaptureSequencer::CaptureState CaptureSequencer::manageStandardPrecaptureWait( - sp &/*client*/) { - status_t res; - ATRACE_CALL(); - Mutex::Autolock l(mInputMutex); - while (!mNewAEState) { - res = mNewNotifySignal.waitRelative(mInputMutex, kWaitDuration); - if (res == TIMED_OUT) { - mTimeoutCount--; - break; - } - } - if (mTimeoutCount <= 0) { - ALOGW("Timed out waiting for precapture %s", - mAeInPrecapture ? "end" : "start"); - return STANDARD_CAPTURE; - } - if (mNewAEState) { - if (!mAeInPrecapture) { - // Waiting to see PRECAPTURE state - if (mAETriggerId == mTriggerId && - mAEState == ANDROID_CONTROL_AE_STATE_PRECAPTURE) { - ALOGV("%s: Got precapture start", __FUNCTION__); - mAeInPrecapture = true; - mTimeoutCount = kMaxTimeoutsForPrecaptureEnd; - } - } else { - // Waiting to see PRECAPTURE state end - if (mAETriggerId == mTriggerId && - mAEState != ANDROID_CONTROL_AE_STATE_PRECAPTURE) { - ALOGV("%s: Got precapture end", __FUNCTION__); - return STANDARD_CAPTURE; - } - } - mNewAEState = false; - } - return STANDARD_PRECAPTURE_WAIT; -} - -CaptureSequencer::CaptureState CaptureSequencer::manageStandardCapture( - sp &client) { - status_t res; - ATRACE_CALL(); - SharedParameters::Lock l(client->getParameters()); - Vector outputStreams; - - /** - * Set up output streams in the request - * - preview - * - capture/jpeg - * - callback (if preview callbacks enabled) - * - recording (if recording enabled) - */ - outputStreams.push(client->getPreviewStreamId()); - outputStreams.push(client->getCaptureStreamId()); - - if (l.mParameters.previewCallbackFlags & - CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK) { - outputStreams.push(client->getCallbackStreamId()); - } - - if (l.mParameters.state == Parameters::VIDEO_SNAPSHOT) { - outputStreams.push(client->getRecordingStreamId()); - } - - res = mCaptureRequest.update(ANDROID_REQUEST_OUTPUT_STREAMS, - outputStreams); - if (res == OK) { - res = mCaptureRequest.update(ANDROID_REQUEST_ID, - &mCaptureId, 1); - } - if (res == OK) { - res = mCaptureRequest.sort(); - } - - if (res != OK) { - ALOGE("%s: Camera %d: Unable to set up still capture request: %s (%d)", - __FUNCTION__, client->getCameraId(), strerror(-res), res); - return DONE; - } - - // Create a capture copy since CameraDeviceBase#capture takes ownership - CameraMetadata captureCopy = mCaptureRequest; - if (captureCopy.entryCount() == 0) { - ALOGE("%s: Camera %d: Unable to copy capture request for HAL device", - __FUNCTION__, client->getCameraId()); - return DONE; - } - - /** - * Clear the streaming request for still-capture pictures - * (as opposed to i.e. video snapshots) - */ - if (l.mParameters.state == Parameters::STILL_CAPTURE) { - // API definition of takePicture() - stop preview before taking pic - res = client->stopStream(); - if (res != OK) { - ALOGE("%s: Camera %d: Unable to stop preview for still capture: " - "%s (%d)", - __FUNCTION__, client->getCameraId(), strerror(-res), res); - return DONE; - } - } - // TODO: Capture should be atomic with setStreamingRequest here - res = client->getCameraDevice()->capture(captureCopy); - if (res != OK) { - ALOGE("%s: Camera %d: Unable to submit still image capture request: " - "%s (%d)", - __FUNCTION__, client->getCameraId(), strerror(-res), res); - return DONE; - } - - mTimeoutCount = kMaxTimeoutsForCaptureEnd; - return STANDARD_CAPTURE_WAIT; -} - -CaptureSequencer::CaptureState CaptureSequencer::manageStandardCaptureWait( - sp &client) { - status_t res; - ATRACE_CALL(); - Mutex::Autolock l(mInputMutex); - - // Wait for new metadata result (mNewFrame) - while (!mNewFrameReceived) { - res = mNewFrameSignal.waitRelative(mInputMutex, kWaitDuration); - if (res == TIMED_OUT) { - mTimeoutCount--; - break; - } - } - - // Approximation of the shutter being closed - // - TODO: use the hal3 exposure callback in Camera3Device instead - if (mNewFrameReceived && !mShutterNotified) { - SharedParameters::Lock l(client->getParameters()); - /* warning: this also locks a SharedCameraCallbacks */ - shutterNotifyLocked(l.mParameters, client, mMsgType); - mShutterNotified = true; - } - - // Wait until jpeg was captured by JpegProcessor - while (mNewFrameReceived && !mNewCaptureReceived) { - res = mNewCaptureSignal.waitRelative(mInputMutex, kWaitDuration); - if (res == TIMED_OUT) { - mTimeoutCount--; - break; - } - } - if (mTimeoutCount <= 0) { - ALOGW("Timed out waiting for capture to complete"); - return DONE; - } - if (mNewFrameReceived && mNewCaptureReceived) { - if (mNewFrameId != mCaptureId) { - ALOGW("Mismatched capture frame IDs: Expected %d, got %d", - mCaptureId, mNewFrameId); - } - camera_metadata_entry_t entry; - entry = mNewFrame.find(ANDROID_SENSOR_TIMESTAMP); - if (entry.count == 0) { - ALOGE("No timestamp field in capture frame!"); - } - if (entry.data.i64[0] != mCaptureTimestamp) { - ALOGW("Mismatched capture timestamps: Metadata frame %lld," - " captured buffer %lld", - entry.data.i64[0], - mCaptureTimestamp); - } - client->removeFrameListener(mCaptureId, mCaptureId + 1, this); - - mNewFrameReceived = false; - mNewCaptureReceived = false; - return DONE; - } - return STANDARD_CAPTURE_WAIT; -} - -CaptureSequencer::CaptureState CaptureSequencer::manageBurstCaptureStart( - sp &client) { - ALOGV("%s", __FUNCTION__); - status_t res; - ATRACE_CALL(); - - // check which burst mode is set, create respective burst object - { - SharedParameters::Lock l(client->getParameters()); - - res = updateCaptureRequest(l.mParameters, client); - if(res != OK) { - return DONE; - } - - // - // check for burst mode type in mParameters here - // - mBurstCapture = new BurstCapture(client, this); - } - - res = mCaptureRequest.update(ANDROID_REQUEST_ID, &mCaptureId, 1); - if (res == OK) { - res = mCaptureRequest.sort(); - } - if (res != OK) { - ALOGE("%s: Camera %d: Unable to set up still capture request: %s (%d)", - __FUNCTION__, client->getCameraId(), strerror(-res), res); - return DONE; - } - - CameraMetadata captureCopy = mCaptureRequest; - if (captureCopy.entryCount() == 0) { - ALOGE("%s: Camera %d: Unable to copy capture request for HAL device", - __FUNCTION__, client->getCameraId()); - return DONE; - } - - Vector requests; - requests.push(mCaptureRequest); - res = mBurstCapture->start(requests, mCaptureId); - mTimeoutCount = kMaxTimeoutsForCaptureEnd * 10; - return BURST_CAPTURE_WAIT; -} - -CaptureSequencer::CaptureState CaptureSequencer::manageBurstCaptureWait( - sp &/*client*/) { - status_t res; - ATRACE_CALL(); - - while (!mNewCaptureReceived) { - res = mNewCaptureSignal.waitRelative(mInputMutex, kWaitDuration); - if (res == TIMED_OUT) { - mTimeoutCount--; - break; - } - } - - if (mTimeoutCount <= 0) { - ALOGW("Timed out waiting for burst capture to complete"); - return DONE; - } - if (mNewCaptureReceived) { - mNewCaptureReceived = false; - // TODO: update mCaptureId to last burst's capture ID + 1? - return DONE; - } - - return BURST_CAPTURE_WAIT; -} - -status_t CaptureSequencer::updateCaptureRequest(const Parameters ¶ms, - sp &client) { - ATRACE_CALL(); - status_t res; - if (mCaptureRequest.entryCount() == 0) { - res = client->getCameraDevice()->createDefaultRequest( - CAMERA2_TEMPLATE_STILL_CAPTURE, - &mCaptureRequest); - if (res != OK) { - ALOGE("%s: Camera %d: Unable to create default still image request:" - " %s (%d)", __FUNCTION__, client->getCameraId(), - strerror(-res), res); - return res; - } - } - - res = params.updateRequest(&mCaptureRequest); - if (res != OK) { - ALOGE("%s: Camera %d: Unable to update common entries of capture " - "request: %s (%d)", __FUNCTION__, client->getCameraId(), - strerror(-res), res); - return res; - } - - res = params.updateRequestJpeg(&mCaptureRequest); - if (res != OK) { - ALOGE("%s: Camera %d: Unable to update JPEG entries of capture " - "request: %s (%d)", __FUNCTION__, client->getCameraId(), - strerror(-res), res); - return res; - } - - return OK; -} - -/*static*/ void CaptureSequencer::shutterNotifyLocked(const Parameters ¶ms, - sp client, int msgType) { - ATRACE_CALL(); - - if (params.state == Parameters::STILL_CAPTURE - && params.playShutterSound - && (msgType & CAMERA_MSG_SHUTTER)) { - client->getCameraService()->playSound(CameraService::SOUND_SHUTTER); - } - - { - Camera2Client::SharedCameraCallbacks::Lock - l(client->mSharedCameraCallbacks); - - ALOGV("%s: Notifying of shutter close to client", __FUNCTION__); - if (l.mRemoteCallback != 0) { - // ShutterCallback - l.mRemoteCallback->notifyCallback(CAMERA_MSG_SHUTTER, - /*ext1*/0, /*ext2*/0); - - // RawCallback with null buffer - l.mRemoteCallback->notifyCallback(CAMERA_MSG_RAW_IMAGE_NOTIFY, - /*ext1*/0, /*ext2*/0); - } else { - ALOGV("%s: No client!", __FUNCTION__); - } - } -} - - -}; // namespace camera2 -}; // namespace android diff --git a/services/camera/libcameraservice/camera2/CaptureSequencer.h b/services/camera/libcameraservice/camera2/CaptureSequencer.h deleted file mode 100644 index 76750aa..0000000 --- a/services/camera/libcameraservice/camera2/CaptureSequencer.h +++ /dev/null @@ -1,177 +0,0 @@ -/* - * Copyright (C) 2012 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef ANDROID_SERVERS_CAMERA_CAMERA2_CAPTURESEQUENCER_H -#define ANDROID_SERVERS_CAMERA_CAMERA2_CAPTURESEQUENCER_H - -#include -#include -#include -#include -#include -#include -#include "camera/CameraMetadata.h" -#include "Parameters.h" -#include "FrameProcessor.h" - -namespace android { - -class Camera2Client; - -namespace camera2 { - -class ZslProcessorInterface; -class BurstCapture; - -/** - * Manages the still image capture process for - * zero-shutter-lag, regular, and video snapshots. - */ -class CaptureSequencer: - virtual public Thread, - virtual public FrameProcessor::FilteredListener { - public: - CaptureSequencer(wp client); - ~CaptureSequencer(); - - // Get reference to the ZslProcessor, which holds the ZSL buffers and frames - void setZslProcessor(wp processor); - - // Begin still image capture - status_t startCapture(int msgType); - - // Wait until current image capture completes; returns immediately if no - // capture is active. Returns TIMED_OUT if capture does not complete during - // the specified duration. - status_t waitUntilIdle(nsecs_t timeout); - - // Notifications about AE state changes - void notifyAutoExposure(uint8_t newState, int triggerId); - - // Notifications from the frame processor - virtual void onFrameAvailable(int32_t frameId, const CameraMetadata &frame); - - // Notifications from the JPEG processor - void onCaptureAvailable(nsecs_t timestamp, sp captureBuffer); - - void dump(int fd, const Vector& args); - - private: - /** - * Accessed by other threads - */ - Mutex mInputMutex; - - bool mStartCapture; - bool mBusy; - Condition mStartCaptureSignal; - - bool mNewAEState; - uint8_t mAEState; - int mAETriggerId; - Condition mNewNotifySignal; - - bool mNewFrameReceived; - int32_t mNewFrameId; - CameraMetadata mNewFrame; - Condition mNewFrameSignal; - - bool mNewCaptureReceived; - nsecs_t mCaptureTimestamp; - sp mCaptureBuffer; - Condition mNewCaptureSignal; - - bool mShutterNotified; - - /** - * Internal to CaptureSequencer - */ - static const nsecs_t kWaitDuration = 100000000; // 100 ms - static const int kMaxTimeoutsForPrecaptureStart = 2; // 200 ms - static const int kMaxTimeoutsForPrecaptureEnd = 20; // 2 sec - static const int kMaxTimeoutsForCaptureEnd = 40; // 4 sec - - wp mClient; - wp mZslProcessor; - sp mBurstCapture; - - enum CaptureState { - IDLE, - START, - ZSL_START, - ZSL_WAITING, - ZSL_REPROCESSING, - STANDARD_START, - STANDARD_PRECAPTURE_WAIT, - STANDARD_CAPTURE, - STANDARD_CAPTURE_WAIT, - BURST_CAPTURE_START, - BURST_CAPTURE_WAIT, - DONE, - ERROR, - NUM_CAPTURE_STATES - } mCaptureState; - static const char* kStateNames[]; - Mutex mStateMutex; // Guards mCaptureState - Condition mStateChanged; - - typedef CaptureState (CaptureSequencer::*StateManager)(sp &client); - static const StateManager kStateManagers[]; - - CameraMetadata mCaptureRequest; - - int mTriggerId; - int mTimeoutCount; - bool mAeInPrecapture; - - int32_t mCaptureId; - int mMsgType; - - // Main internal methods - - virtual bool threadLoop(); - - CaptureState manageIdle(sp &client); - CaptureState manageStart(sp &client); - - CaptureState manageZslStart(sp &client); - CaptureState manageZslWaiting(sp &client); - CaptureState manageZslReprocessing(sp &client); - - CaptureState manageStandardStart(sp &client); - CaptureState manageStandardPrecaptureWait(sp &client); - CaptureState manageStandardCapture(sp &client); - CaptureState manageStandardCaptureWait(sp &client); - - CaptureState manageBurstCaptureStart(sp &client); - CaptureState manageBurstCaptureWait(sp &client); - - CaptureState manageDone(sp &client); - - // Utility methods - - status_t updateCaptureRequest(const Parameters ¶ms, - sp &client); - - // Emit Shutter/Raw callback to java, and maybe play a shutter sound - static void shutterNotifyLocked(const Parameters ¶ms, - sp client, int msgType); -}; - -}; // namespace camera2 -}; // namespace android - -#endif diff --git a/services/camera/libcameraservice/camera2/FrameProcessor.cpp b/services/camera/libcameraservice/camera2/FrameProcessor.cpp deleted file mode 100644 index 114a7a8..0000000 --- a/services/camera/libcameraservice/camera2/FrameProcessor.cpp +++ /dev/null @@ -1,315 +0,0 @@ -/* - * Copyright (C) 2012 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#define LOG_TAG "Camera2-FrameProcessor" -#define ATRACE_TAG ATRACE_TAG_CAMERA -//#define LOG_NDEBUG 0 - -#include -#include - -#include "FrameProcessor.h" -#include "../CameraDeviceBase.h" -#include "../Camera2Client.h" - -namespace android { -namespace camera2 { - -FrameProcessor::FrameProcessor(wp device, - wp client) : - ProFrameProcessor(device), - mClient(client), - mLastFrameNumberOfFaces(0) { - - sp d = device.promote(); - mSynthesize3ANotify = !(d->willNotify3A()); -} - -FrameProcessor::~FrameProcessor() { -} - -bool FrameProcessor::processSingleFrame(CameraMetadata &frame, - const sp &device) { - - sp client = mClient.promote(); - if (!client.get()) { - return false; - } - - if (processFaceDetect(frame, client) != OK) { - return false; - } - - if (mSynthesize3ANotify) { - // Ignoring missing fields for now - process3aState(frame, client); - } - - if (!ProFrameProcessor::processSingleFrame(frame, device)) { - return false; - } - - return true; -} - -status_t FrameProcessor::processFaceDetect(const CameraMetadata &frame, - const sp &client) { - status_t res = BAD_VALUE; - ATRACE_CALL(); - camera_metadata_ro_entry_t entry; - bool enableFaceDetect; - - { - SharedParameters::Lock l(client->getParameters()); - enableFaceDetect = l.mParameters.enableFaceDetect; - } - entry = frame.find(ANDROID_STATISTICS_FACE_DETECT_MODE); - - // TODO: This should be an error once implementations are compliant - if (entry.count == 0) { - return OK; - } - - uint8_t faceDetectMode = entry.data.u8[0]; - - camera_frame_metadata metadata; - Vector faces; - metadata.number_of_faces = 0; - - if (enableFaceDetect && - faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) { - - SharedParameters::Lock l(client->getParameters()); - entry = frame.find(ANDROID_STATISTICS_FACE_RECTANGLES); - if (entry.count == 0) { - // No faces this frame - /* warning: locks SharedCameraCallbacks */ - callbackFaceDetection(client, metadata); - return OK; - } - metadata.number_of_faces = entry.count / 4; - if (metadata.number_of_faces > - l.mParameters.fastInfo.maxFaces) { - ALOGE("%s: Camera %d: More faces than expected! (Got %d, max %d)", - __FUNCTION__, client->getCameraId(), - metadata.number_of_faces, l.mParameters.fastInfo.maxFaces); - return res; - } - const int32_t *faceRects = entry.data.i32; - - entry = frame.find(ANDROID_STATISTICS_FACE_SCORES); - if (entry.count == 0) { - ALOGE("%s: Camera %d: Unable to read face scores", - __FUNCTION__, client->getCameraId()); - return res; - } - const uint8_t *faceScores = entry.data.u8; - - const int32_t *faceLandmarks = NULL; - const int32_t *faceIds = NULL; - - if (faceDetectMode == ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) { - entry = frame.find(ANDROID_STATISTICS_FACE_LANDMARKS); - if (entry.count == 0) { - ALOGE("%s: Camera %d: Unable to read face landmarks", - __FUNCTION__, client->getCameraId()); - return res; - } - faceLandmarks = entry.data.i32; - - entry = frame.find(ANDROID_STATISTICS_FACE_IDS); - - if (entry.count == 0) { - ALOGE("%s: Camera %d: Unable to read face IDs", - __FUNCTION__, client->getCameraId()); - return res; - } - faceIds = entry.data.i32; - } - - faces.setCapacity(metadata.number_of_faces); - - size_t maxFaces = metadata.number_of_faces; - for (size_t i = 0; i < maxFaces; i++) { - if (faceScores[i] == 0) { - metadata.number_of_faces--; - continue; - } - if (faceScores[i] > 100) { - ALOGW("%s: Face index %d with out of range score %d", - __FUNCTION__, i, faceScores[i]); - } - - camera_face_t face; - - face.rect[0] = l.mParameters.arrayXToNormalized(faceRects[i*4 + 0]); - face.rect[1] = l.mParameters.arrayYToNormalized(faceRects[i*4 + 1]); - face.rect[2] = l.mParameters.arrayXToNormalized(faceRects[i*4 + 2]); - face.rect[3] = l.mParameters.arrayYToNormalized(faceRects[i*4 + 3]); - - face.score = faceScores[i]; - if (faceDetectMode == ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) { - face.id = faceIds[i]; - face.left_eye[0] = - l.mParameters.arrayXToNormalized(faceLandmarks[i*6 + 0]); - face.left_eye[1] = - l.mParameters.arrayYToNormalized(faceLandmarks[i*6 + 1]); - face.right_eye[0] = - l.mParameters.arrayXToNormalized(faceLandmarks[i*6 + 2]); - face.right_eye[1] = - l.mParameters.arrayYToNormalized(faceLandmarks[i*6 + 3]); - face.mouth[0] = - l.mParameters.arrayXToNormalized(faceLandmarks[i*6 + 4]); - face.mouth[1] = - l.mParameters.arrayYToNormalized(faceLandmarks[i*6 + 5]); - } else { - face.id = 0; - face.left_eye[0] = face.left_eye[1] = -2000; - face.right_eye[0] = face.right_eye[1] = -2000; - face.mouth[0] = face.mouth[1] = -2000; - } - faces.push_back(face); - } - - metadata.faces = faces.editArray(); - } - - /* warning: locks SharedCameraCallbacks */ - callbackFaceDetection(client, metadata); - - return OK; -} - -status_t FrameProcessor::process3aState(const CameraMetadata &frame, - const sp &client) { - - ATRACE_CALL(); - camera_metadata_ro_entry_t entry; - int mId = client->getCameraId(); - - entry = frame.find(ANDROID_REQUEST_FRAME_COUNT); - int32_t frameNumber = entry.data.i32[0]; - - // Get 3A states from result metadata - bool gotAllStates = true; - - AlgState new3aState; - - entry = frame.find(ANDROID_CONTROL_AE_STATE); - if (entry.count == 0) { - ALOGE("%s: Camera %d: No AE state provided by HAL for frame %d!", - __FUNCTION__, mId, frameNumber); - gotAllStates = false; - } else { - new3aState.aeState = - static_cast( - entry.data.u8[0]); - } - - entry = frame.find(ANDROID_CONTROL_AF_STATE); - if (entry.count == 0) { - ALOGE("%s: Camera %d: No AF state provided by HAL for frame %d!", - __FUNCTION__, mId, frameNumber); - gotAllStates = false; - } else { - new3aState.afState = - static_cast( - entry.data.u8[0]); - } - - entry = frame.find(ANDROID_CONTROL_AWB_STATE); - if (entry.count == 0) { - ALOGE("%s: Camera %d: No AWB state provided by HAL for frame %d!", - __FUNCTION__, mId, frameNumber); - gotAllStates = false; - } else { - new3aState.awbState = - static_cast( - entry.data.u8[0]); - } - - int32_t afTriggerId = 0; - entry = frame.find(ANDROID_CONTROL_AF_TRIGGER_ID); - if (entry.count == 0) { - ALOGE("%s: Camera %d: No AF trigger ID provided by HAL for frame %d!", - __FUNCTION__, mId, frameNumber); - gotAllStates = false; - } else { - afTriggerId = entry.data.i32[0]; - } - - int32_t aeTriggerId = 0; - entry = frame.find(ANDROID_CONTROL_AE_PRECAPTURE_ID); - if (entry.count == 0) { - ALOGE("%s: Camera %d: No AE precapture trigger ID provided by HAL" - " for frame %d!", - __FUNCTION__, mId, frameNumber); - gotAllStates = false; - } else { - aeTriggerId = entry.data.i32[0]; - } - - if (!gotAllStates) return BAD_VALUE; - - if (new3aState.aeState != m3aState.aeState) { - ALOGV("%s: AE state changed from 0x%x to 0x%x", - __FUNCTION__, m3aState.aeState, new3aState.aeState); - client->notifyAutoExposure(new3aState.aeState, aeTriggerId); - } - if (new3aState.afState != m3aState.afState) { - ALOGV("%s: AF state changed from 0x%x to 0x%x", - __FUNCTION__, m3aState.afState, new3aState.afState); - client->notifyAutoFocus(new3aState.afState, afTriggerId); - } - if (new3aState.awbState != m3aState.awbState) { - ALOGV("%s: AWB state changed from 0x%x to 0x%x", - __FUNCTION__, m3aState.awbState, new3aState.awbState); - client->notifyAutoWhitebalance(new3aState.awbState, aeTriggerId); - } - - m3aState = new3aState; - - return OK; -} - - -void FrameProcessor::callbackFaceDetection(sp client, - const camera_frame_metadata &metadata) { - - camera_frame_metadata *metadata_ptr = - const_cast(&metadata); - - /** - * Filter out repeated 0-face callbacks, - * but not when the last frame was >0 - */ - if (metadata.number_of_faces != 0 || - mLastFrameNumberOfFaces != metadata.number_of_faces) { - - Camera2Client::SharedCameraCallbacks::Lock - l(client->mSharedCameraCallbacks); - if (l.mRemoteCallback != NULL) { - l.mRemoteCallback->dataCallback(CAMERA_MSG_PREVIEW_METADATA, - NULL, - metadata_ptr); - } - } - - mLastFrameNumberOfFaces = metadata.number_of_faces; -} - -}; // namespace camera2 -}; // namespace android diff --git a/services/camera/libcameraservice/camera2/FrameProcessor.h b/services/camera/libcameraservice/camera2/FrameProcessor.h deleted file mode 100644 index f480c55..0000000 --- a/services/camera/libcameraservice/camera2/FrameProcessor.h +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Copyright (C) 2012 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef ANDROID_SERVERS_CAMERA_CAMERA2_FRAMEPROCESSOR_H -#define ANDROID_SERVERS_CAMERA_CAMERA2_FRAMEPROCESSOR_H - -#include -#include -#include -#include -#include -#include - -#include "ProFrameProcessor.h" - -struct camera_frame_metadata; - -namespace android { - -class Camera2Client; - -namespace camera2 { - -/* Output frame metadata processing thread. This thread waits for new - * frames from the device, and analyzes them as necessary. - */ -class FrameProcessor : public ProFrameProcessor { - public: - FrameProcessor(wp device, wp client); - ~FrameProcessor(); - - private: - wp mClient; - - bool mSynthesize3ANotify; - - int mLastFrameNumberOfFaces; - - void processNewFrames(const sp &client); - - virtual bool processSingleFrame(CameraMetadata &frame, - const sp &device); - - status_t processFaceDetect(const CameraMetadata &frame, - const sp &client); - - // Send 3A state change notifications to client based on frame metadata - status_t process3aState(const CameraMetadata &frame, - const sp &client); - - struct AlgState { - camera_metadata_enum_android_control_ae_state aeState; - camera_metadata_enum_android_control_af_state afState; - camera_metadata_enum_android_control_awb_state awbState; - - AlgState() : - aeState(ANDROID_CONTROL_AE_STATE_INACTIVE), - afState(ANDROID_CONTROL_AF_STATE_INACTIVE), - awbState(ANDROID_CONTROL_AWB_STATE_INACTIVE) { - } - } m3aState; - - // Emit FaceDetection event to java if faces changed - void callbackFaceDetection(sp client, - const camera_frame_metadata &metadata); -}; - - -}; //namespace camera2 -}; //namespace android - -#endif diff --git a/services/camera/libcameraservice/camera2/JpegCompressor.cpp b/services/camera/libcameraservice/camera2/JpegCompressor.cpp deleted file mode 100644 index 2f0c67d..0000000 --- a/services/camera/libcameraservice/camera2/JpegCompressor.cpp +++ /dev/null @@ -1,221 +0,0 @@ -/* - * Copyright (C) 2012 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -//#define LOG_NDEBUG 0 -#define LOG_TAG "Camera2-JpegCompressor" - -#include -#include - -#include "JpegCompressor.h" - -namespace android { -namespace camera2 { - -JpegCompressor::JpegCompressor(): - Thread(false), - mIsBusy(false), - mCaptureTime(0) { -} - -JpegCompressor::~JpegCompressor() { - ALOGV("%s", __FUNCTION__); - Mutex::Autolock lock(mMutex); -} - -status_t JpegCompressor::start(Vector buffers, - nsecs_t captureTime) { - ALOGV("%s", __FUNCTION__); - Mutex::Autolock busyLock(mBusyMutex); - - if (mIsBusy) { - ALOGE("%s: Already processing a buffer!", __FUNCTION__); - return INVALID_OPERATION; - } - - mIsBusy = true; - - mBuffers = buffers; - mCaptureTime = captureTime; - - status_t res; - res = run("JpegCompressor"); - if (res != OK) { - ALOGE("%s: Unable to start up compression thread: %s (%d)", - __FUNCTION__, strerror(-res), res); - //delete mBuffers; // necessary? - } - return res; -} - -status_t JpegCompressor::cancel() { - ALOGV("%s", __FUNCTION__); - requestExitAndWait(); - return OK; -} - -status_t JpegCompressor::readyToRun() { - ALOGV("%s", __FUNCTION__); - return OK; -} - -bool JpegCompressor::threadLoop() { - ALOGV("%s", __FUNCTION__); - - mAuxBuffer = mBuffers[0]; // input - mJpegBuffer = mBuffers[1]; // output - - // Set up error management - mJpegErrorInfo = NULL; - JpegError error; - error.parent = this; - - mCInfo.err = jpeg_std_error(&error); - mCInfo.err->error_exit = jpegErrorHandler; - - jpeg_create_compress(&mCInfo); - if (checkError("Error initializing compression")) return false; - - // Route compressed data straight to output stream buffer - JpegDestination jpegDestMgr; - jpegDestMgr.parent = this; - jpegDestMgr.init_destination = jpegInitDestination; - jpegDestMgr.empty_output_buffer = jpegEmptyOutputBuffer; - jpegDestMgr.term_destination = jpegTermDestination; - - mCInfo.dest = &jpegDestMgr; - - // Set up compression parameters - mCInfo.image_width = mAuxBuffer->width; - mCInfo.image_height = mAuxBuffer->height; - mCInfo.input_components = 1; // 3; - mCInfo.in_color_space = JCS_GRAYSCALE; // JCS_RGB - - ALOGV("%s: image_width = %d, image_height = %d", __FUNCTION__, mCInfo.image_width, mCInfo.image_height); - - jpeg_set_defaults(&mCInfo); - if (checkError("Error configuring defaults")) return false; - - // Do compression - jpeg_start_compress(&mCInfo, TRUE); - if (checkError("Error starting compression")) return false; - - size_t rowStride = mAuxBuffer->stride;// * 3; - const size_t kChunkSize = 32; - while (mCInfo.next_scanline < mCInfo.image_height) { - JSAMPROW chunk[kChunkSize]; - for (size_t i = 0 ; i < kChunkSize; i++) { - chunk[i] = (JSAMPROW) - (mAuxBuffer->data + (i + mCInfo.next_scanline) * rowStride); - } - jpeg_write_scanlines(&mCInfo, chunk, kChunkSize); - if (checkError("Error while compressing")) return false; - if (exitPending()) { - ALOGV("%s: Cancel called, exiting early", __FUNCTION__); - cleanUp(); - return false; - } - } - - jpeg_finish_compress(&mCInfo); - if (checkError("Error while finishing compression")) return false; - - cleanUp(); - return false; -} - -bool JpegCompressor::isBusy() { - ALOGV("%s", __FUNCTION__); - Mutex::Autolock busyLock(mBusyMutex); - return mIsBusy; -} - -// old function -- TODO: update for new buffer type -bool JpegCompressor::isStreamInUse(uint32_t /*id*/) { - ALOGV("%s", __FUNCTION__); - Mutex::Autolock lock(mBusyMutex); - - if (mBuffers.size() && mIsBusy) { - for (size_t i = 0; i < mBuffers.size(); i++) { -// if ( mBuffers[i].streamId == (int)id ) return true; - } - } - return false; -} - -bool JpegCompressor::waitForDone(nsecs_t timeout) { - ALOGV("%s", __FUNCTION__); - Mutex::Autolock lock(mBusyMutex); - status_t res = OK; - if (mIsBusy) { - res = mDone.waitRelative(mBusyMutex, timeout); - } - return (res == OK); -} - -bool JpegCompressor::checkError(const char *msg) { - ALOGV("%s", __FUNCTION__); - if (mJpegErrorInfo) { - char errBuffer[JMSG_LENGTH_MAX]; - mJpegErrorInfo->err->format_message(mJpegErrorInfo, errBuffer); - ALOGE("%s: %s: %s", - __FUNCTION__, msg, errBuffer); - cleanUp(); - mJpegErrorInfo = NULL; - return true; - } - return false; -} - -void JpegCompressor::cleanUp() { - ALOGV("%s", __FUNCTION__); - jpeg_destroy_compress(&mCInfo); - Mutex::Autolock lock(mBusyMutex); - mIsBusy = false; - mDone.signal(); -} - -void JpegCompressor::jpegErrorHandler(j_common_ptr cinfo) { - ALOGV("%s", __FUNCTION__); - JpegError *error = static_cast(cinfo->err); - error->parent->mJpegErrorInfo = cinfo; -} - -void JpegCompressor::jpegInitDestination(j_compress_ptr cinfo) { - ALOGV("%s", __FUNCTION__); - JpegDestination *dest= static_cast(cinfo->dest); - ALOGV("%s: Setting destination to %p, size %d", - __FUNCTION__, dest->parent->mJpegBuffer->data, kMaxJpegSize); - dest->next_output_byte = (JOCTET*)(dest->parent->mJpegBuffer->data); - dest->free_in_buffer = kMaxJpegSize; -} - -boolean JpegCompressor::jpegEmptyOutputBuffer(j_compress_ptr /*cinfo*/) { - ALOGV("%s", __FUNCTION__); - ALOGE("%s: JPEG destination buffer overflow!", - __FUNCTION__); - return true; -} - -void JpegCompressor::jpegTermDestination(j_compress_ptr cinfo) { - (void) cinfo; // TODO: clean up - ALOGV("%s", __FUNCTION__); - ALOGV("%s: Done writing JPEG data. %d bytes left in buffer", - __FUNCTION__, cinfo->dest->free_in_buffer); -} - -}; // namespace camera2 -}; // namespace android diff --git a/services/camera/libcameraservice/camera2/JpegCompressor.h b/services/camera/libcameraservice/camera2/JpegCompressor.h deleted file mode 100644 index 945b1de..0000000 --- a/services/camera/libcameraservice/camera2/JpegCompressor.h +++ /dev/null @@ -1,107 +0,0 @@ -/* - * Copyright (C) 2012 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - - -/** - * This class simulates a hardware JPEG compressor. It receives image buffers - * in RGBA_8888 format, processes them in a worker thread, and then pushes them - * out to their destination stream. - */ - -#ifndef ANDROID_SERVERS_CAMERA_JPEGCOMPRESSOR_H -#define ANDROID_SERVERS_CAMERA_JPEGCOMPRESSOR_H - -#include "utils/Thread.h" -#include "utils/Mutex.h" -#include "utils/Timers.h" -#include "utils/Vector.h" -//#include "Base.h" -#include -#include - -extern "C" { -#include -} - - -namespace android { -namespace camera2 { - -class JpegCompressor: private Thread, public virtual RefBase { - public: - - JpegCompressor(); - ~JpegCompressor(); - - // Start compressing COMPRESSED format buffers; JpegCompressor takes - // ownership of the Buffers vector. - status_t start(Vector buffers, - nsecs_t captureTime); - - status_t cancel(); - - bool isBusy(); - bool isStreamInUse(uint32_t id); - - bool waitForDone(nsecs_t timeout); - - // TODO: Measure this - static const size_t kMaxJpegSize = 300000; - - private: - Mutex mBusyMutex; - Mutex mMutex; - bool mIsBusy; - Condition mDone; - nsecs_t mCaptureTime; - - Vector mBuffers; - CpuConsumer::LockedBuffer *mJpegBuffer; - CpuConsumer::LockedBuffer *mAuxBuffer; - bool mFoundJpeg, mFoundAux; - - jpeg_compress_struct mCInfo; - - struct JpegError : public jpeg_error_mgr { - JpegCompressor *parent; - }; - j_common_ptr mJpegErrorInfo; - - struct JpegDestination : public jpeg_destination_mgr { - JpegCompressor *parent; - }; - - static void jpegErrorHandler(j_common_ptr cinfo); - - static void jpegInitDestination(j_compress_ptr cinfo); - static boolean jpegEmptyOutputBuffer(j_compress_ptr cinfo); - static void jpegTermDestination(j_compress_ptr cinfo); - - bool checkError(const char *msg); - void cleanUp(); - - /** - * Inherited Thread virtual overrides - */ - private: - virtual status_t readyToRun(); - virtual bool threadLoop(); -}; - -}; // namespace camera2 -}; // namespace android - -#endif diff --git a/services/camera/libcameraservice/camera2/JpegProcessor.cpp b/services/camera/libcameraservice/camera2/JpegProcessor.cpp deleted file mode 100644 index 1d739cd..0000000 --- a/services/camera/libcameraservice/camera2/JpegProcessor.cpp +++ /dev/null @@ -1,387 +0,0 @@ -/* - * Copyright (C) 2012 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#define LOG_TAG "Camera2-JpegProcessor" -#define ATRACE_TAG ATRACE_TAG_CAMERA -//#define LOG_NDEBUG 0 - -#include - -#include -#include -#include -#include - -#include "JpegProcessor.h" -#include -#include "../CameraDeviceBase.h" -#include "../Camera2Client.h" - - -namespace android { -namespace camera2 { - -JpegProcessor::JpegProcessor( - sp client, - wp sequencer): - Thread(false), - mDevice(client->getCameraDevice()), - mSequencer(sequencer), - mId(client->getCameraId()), - mCaptureAvailable(false), - mCaptureStreamId(NO_STREAM) { -} - -JpegProcessor::~JpegProcessor() { - ALOGV("%s: Exit", __FUNCTION__); - deleteStream(); -} - -void JpegProcessor::onFrameAvailable() { - Mutex::Autolock l(mInputMutex); - if (!mCaptureAvailable) { - mCaptureAvailable = true; - mCaptureAvailableSignal.signal(); - } -} - -status_t JpegProcessor::updateStream(const Parameters ¶ms) { - ATRACE_CALL(); - ALOGV("%s", __FUNCTION__); - status_t res; - - Mutex::Autolock l(mInputMutex); - - sp device = mDevice.promote(); - if (device == 0) { - ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId); - return INVALID_OPERATION; - } - - // Find out buffer size for JPEG - camera_metadata_ro_entry_t maxJpegSize = - params.staticInfo(ANDROID_JPEG_MAX_SIZE); - if (maxJpegSize.count == 0) { - ALOGE("%s: Camera %d: Can't find ANDROID_JPEG_MAX_SIZE!", - __FUNCTION__, mId); - return INVALID_OPERATION; - } - - if (mCaptureConsumer == 0) { - // Create CPU buffer queue endpoint - sp bq = new BufferQueue(); - mCaptureConsumer = new CpuConsumer(bq, 1); - mCaptureConsumer->setFrameAvailableListener(this); - mCaptureConsumer->setName(String8("Camera2Client::CaptureConsumer")); - mCaptureWindow = new Surface( - mCaptureConsumer->getProducerInterface()); - // Create memory for API consumption - mCaptureHeap = new MemoryHeapBase(maxJpegSize.data.i32[0], 0, - "Camera2Client::CaptureHeap"); - if (mCaptureHeap->getSize() == 0) { - ALOGE("%s: Camera %d: Unable to allocate memory for capture", - __FUNCTION__, mId); - return NO_MEMORY; - } - } - - if (mCaptureStreamId != NO_STREAM) { - // Check if stream parameters have to change - uint32_t currentWidth, currentHeight; - res = device->getStreamInfo(mCaptureStreamId, - ¤tWidth, ¤tHeight, 0); - if (res != OK) { - ALOGE("%s: Camera %d: Error querying capture output stream info: " - "%s (%d)", __FUNCTION__, - mId, strerror(-res), res); - return res; - } - if (currentWidth != (uint32_t)params.pictureWidth || - currentHeight != (uint32_t)params.pictureHeight) { - ALOGV("%s: Camera %d: Deleting stream %d since the buffer dimensions changed", - __FUNCTION__, mId, mCaptureStreamId); - res = device->deleteStream(mCaptureStreamId); - if (res == -EBUSY) { - ALOGV("%s: Camera %d: Device is busy, call updateStream again " - " after it becomes idle", __FUNCTION__, mId); - return res; - } else if (res != OK) { - ALOGE("%s: Camera %d: Unable to delete old output stream " - "for capture: %s (%d)", __FUNCTION__, - mId, strerror(-res), res); - return res; - } - mCaptureStreamId = NO_STREAM; - } - } - - if (mCaptureStreamId == NO_STREAM) { - // Create stream for HAL production - res = device->createStream(mCaptureWindow, - params.pictureWidth, params.pictureHeight, - HAL_PIXEL_FORMAT_BLOB, maxJpegSize.data.i32[0], - &mCaptureStreamId); - if (res != OK) { - ALOGE("%s: Camera %d: Can't create output stream for capture: " - "%s (%d)", __FUNCTION__, mId, - strerror(-res), res); - return res; - } - - } - return OK; -} - -status_t JpegProcessor::deleteStream() { - ATRACE_CALL(); - - Mutex::Autolock l(mInputMutex); - - if (mCaptureStreamId != NO_STREAM) { - sp device = mDevice.promote(); - if (device == 0) { - ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId); - return INVALID_OPERATION; - } - - device->deleteStream(mCaptureStreamId); - - mCaptureHeap.clear(); - mCaptureWindow.clear(); - mCaptureConsumer.clear(); - - mCaptureStreamId = NO_STREAM; - } - return OK; -} - -int JpegProcessor::getStreamId() const { - Mutex::Autolock l(mInputMutex); - return mCaptureStreamId; -} - -void JpegProcessor::dump(int /*fd*/, const Vector& /*args*/) const { -} - -bool JpegProcessor::threadLoop() { - status_t res; - - { - Mutex::Autolock l(mInputMutex); - while (!mCaptureAvailable) { - res = mCaptureAvailableSignal.waitRelative(mInputMutex, - kWaitDuration); - if (res == TIMED_OUT) return true; - } - mCaptureAvailable = false; - } - - do { - res = processNewCapture(); - } while (res == OK); - - return true; -} - -status_t JpegProcessor::processNewCapture() { - ATRACE_CALL(); - status_t res; - sp captureHeap; - - CpuConsumer::LockedBuffer imgBuffer; - - res = mCaptureConsumer->lockNextBuffer(&imgBuffer); - if (res != OK) { - if (res != BAD_VALUE) { - ALOGE("%s: Camera %d: Error receiving still image buffer: " - "%s (%d)", __FUNCTION__, - mId, strerror(-res), res); - } - return res; - } - - ALOGV("%s: Camera %d: Still capture available", __FUNCTION__, - mId); - - if (imgBuffer.format != HAL_PIXEL_FORMAT_BLOB) { - ALOGE("%s: Camera %d: Unexpected format for still image: " - "%x, expected %x", __FUNCTION__, mId, - imgBuffer.format, - HAL_PIXEL_FORMAT_BLOB); - mCaptureConsumer->unlockBuffer(imgBuffer); - return OK; - } - - // Find size of JPEG image - size_t jpegSize = findJpegSize(imgBuffer.data, imgBuffer.width); - if (jpegSize == 0) { // failed to find size, default to whole buffer - jpegSize = imgBuffer.width; - } - size_t heapSize = mCaptureHeap->getSize(); - if (jpegSize > heapSize) { - ALOGW("%s: JPEG image is larger than expected, truncating " - "(got %d, expected at most %d bytes)", - __FUNCTION__, jpegSize, heapSize); - jpegSize = heapSize; - } - - // TODO: Optimize this to avoid memcopy - sp captureBuffer = new MemoryBase(mCaptureHeap, 0, jpegSize); - void* captureMemory = mCaptureHeap->getBase(); - memcpy(captureMemory, imgBuffer.data, jpegSize); - - mCaptureConsumer->unlockBuffer(imgBuffer); - - sp sequencer = mSequencer.promote(); - if (sequencer != 0) { - sequencer->onCaptureAvailable(imgBuffer.timestamp, captureBuffer); - } - - return OK; -} - -/* - * JPEG FILE FORMAT OVERVIEW. - * http://www.jpeg.org/public/jfif.pdf - * (JPEG is the image compression algorithm, actual file format is called JFIF) - * - * "Markers" are 2-byte patterns used to distinguish parts of JFIF files. The - * first byte is always 0xFF, and the second byte is between 0x01 and 0xFE - * (inclusive). Because every marker begins with the same byte, they are - * referred to by the second byte's value. - * - * JFIF files all begin with the Start of Image (SOI) marker, which is 0xD8. - * Following it, "segment" sections begin with other markers, followed by a - * 2-byte length (in network byte order), then the segment data. - * - * For our purposes we will ignore the data, and just use the length to skip to - * the next segment. This is necessary because the data inside segments are - * allowed to contain the End of Image marker (0xFF 0xD9), preventing us from - * naievely scanning until the end. - * - * After all the segments are processed, the jpeg compressed image stream begins. - * This can be considered an opaque format with one requirement: all 0xFF bytes - * in this stream must be followed with a 0x00 byte. This prevents any of the - * image data to be interpreted as a segment. The only exception to this is at - * the end of the image stream there is an End of Image (EOI) marker, which is - * 0xFF followed by a non-zero (0xD9) byte. - */ - -const uint8_t MARK = 0xFF; // First byte of marker -const uint8_t SOI = 0xD8; // Start of Image -const uint8_t EOI = 0xD9; // End of Image -const size_t MARKER_LENGTH = 2; // length of a marker - -#pragma pack(push) -#pragma pack(1) -typedef struct segment { - uint8_t marker[MARKER_LENGTH]; - uint16_t length; -} segment_t; -#pragma pack(pop) - -/* HELPER FUNCTIONS */ - -// check for Start of Image marker -bool checkJpegStart(uint8_t* buf) { - return buf[0] == MARK && buf[1] == SOI; -} -// check for End of Image marker -bool checkJpegEnd(uint8_t *buf) { - return buf[0] == MARK && buf[1] == EOI; -} -// check for arbitrary marker, returns marker type (second byte) -// returns 0 if no marker found. Note: 0x00 is not a valid marker type -uint8_t checkJpegMarker(uint8_t *buf) { - if (buf[0] == MARK && buf[1] > 0 && buf[1] < 0xFF) { - return buf[1]; - } - return 0; -} - -// Return the size of the JPEG, 0 indicates failure -size_t JpegProcessor::findJpegSize(uint8_t* jpegBuffer, size_t maxSize) { - size_t size; - - // First check for JPEG transport header at the end of the buffer - uint8_t *header = jpegBuffer + (maxSize - sizeof(struct camera2_jpeg_blob)); - struct camera2_jpeg_blob *blob = (struct camera2_jpeg_blob*)(header); - if (blob->jpeg_blob_id == CAMERA2_JPEG_BLOB_ID) { - size = blob->jpeg_size; - if (size > 0 && size <= maxSize - sizeof(struct camera2_jpeg_blob)) { - // Verify SOI and EOI markers - size_t offset = size - MARKER_LENGTH; - uint8_t *end = jpegBuffer + offset; - if (checkJpegStart(jpegBuffer) && checkJpegEnd(end)) { - ALOGV("Found JPEG transport header, img size %d", size); - return size; - } else { - ALOGW("Found JPEG transport header with bad Image Start/End"); - } - } else { - ALOGW("Found JPEG transport header with bad size %d", size); - } - } - - // Check Start of Image - if ( !checkJpegStart(jpegBuffer) ) { - ALOGE("Could not find start of JPEG marker"); - return 0; - } - - // Read JFIF segment markers, skip over segment data - size = 0; - while (size <= maxSize - MARKER_LENGTH) { - segment_t *segment = (segment_t*)(jpegBuffer + size); - uint8_t type = checkJpegMarker(segment->marker); - if (type == 0) { // invalid marker, no more segments, begin JPEG data - ALOGV("JPEG stream found beginning at offset %d", size); - break; - } - if (type == EOI || size > maxSize - sizeof(segment_t)) { - ALOGE("Got premature End before JPEG data, offset %d", size); - return 0; - } - size_t length = ntohs(segment->length); - ALOGV("JFIF Segment, type %x length %x", type, length); - size += length + MARKER_LENGTH; - } - - // Find End of Image - // Scan JPEG buffer until End of Image (EOI) - bool foundEnd = false; - for ( ; size <= maxSize - MARKER_LENGTH; size++) { - if ( checkJpegEnd(jpegBuffer + size) ) { - foundEnd = true; - size += MARKER_LENGTH; - break; - } - } - if (!foundEnd) { - ALOGE("Could not find end of JPEG marker"); - return 0; - } - - if (size > maxSize) { - ALOGW("JPEG size %d too large, reducing to maxSize %d", size, maxSize); - size = maxSize; - } - ALOGV("Final JPEG size %d", size); - return size; -} - -}; // namespace camera2 -}; // namespace android diff --git a/services/camera/libcameraservice/camera2/JpegProcessor.h b/services/camera/libcameraservice/camera2/JpegProcessor.h deleted file mode 100644 index a38611c..0000000 --- a/services/camera/libcameraservice/camera2/JpegProcessor.h +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Copyright (C) 2012 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef ANDROID_SERVERS_CAMERA_CAMERA2_JPEGPROCESSOR_H -#define ANDROID_SERVERS_CAMERA_CAMERA2_JPEGPROCESSOR_H - -#include -#include -#include -#include -#include -#include -#include "Parameters.h" -#include "camera/CameraMetadata.h" - -namespace android { - -class Camera2Client; -class CameraDeviceBase; -class MemoryHeapBase; - -namespace camera2 { - -class CaptureSequencer; - -/*** - * Still image capture output image processing - */ -class JpegProcessor: - public Thread, public CpuConsumer::FrameAvailableListener { - public: - JpegProcessor(sp client, wp sequencer); - ~JpegProcessor(); - - // CpuConsumer listener implementation - void onFrameAvailable(); - - status_t updateStream(const Parameters ¶ms); - status_t deleteStream(); - int getStreamId() const; - - void dump(int fd, const Vector& args) const; - private: - static const nsecs_t kWaitDuration = 10000000; // 10 ms - wp mDevice; - wp mSequencer; - int mId; - - mutable Mutex mInputMutex; - bool mCaptureAvailable; - Condition mCaptureAvailableSignal; - - enum { - NO_STREAM = -1 - }; - - int mCaptureStreamId; - sp mCaptureConsumer; - sp mCaptureWindow; - sp mCaptureHeap; - - virtual bool threadLoop(); - - status_t processNewCapture(); - size_t findJpegSize(uint8_t* jpegBuffer, size_t maxSize); - -}; - - -}; //namespace camera2 -}; //namespace android - -#endif diff --git a/services/camera/libcameraservice/camera2/Parameters.cpp b/services/camera/libcameraservice/camera2/Parameters.cpp deleted file mode 100644 index 0459866..0000000 --- a/services/camera/libcameraservice/camera2/Parameters.cpp +++ /dev/null @@ -1,2645 +0,0 @@ -/* - * Copyright (C) 2012 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#define LOG_TAG "Camera2-Parameters" -#define ATRACE_TAG ATRACE_TAG_CAMERA -//#define LOG_NDEBUG 0 - -#include -#include -#include -#include - -#include -#include -#include - -#include "Parameters.h" -#include "system/camera.h" - -namespace android { -namespace camera2 { - -Parameters::Parameters(int cameraId, - int cameraFacing) : - cameraId(cameraId), - cameraFacing(cameraFacing), - info(NULL) { -} - -Parameters::~Parameters() { -} - -status_t Parameters::initialize(const CameraMetadata *info) { - status_t res; - - if (info->entryCount() == 0) { - ALOGE("%s: No static information provided!", __FUNCTION__); - return BAD_VALUE; - } - Parameters::info = info; - - res = buildFastInfo(); - if (res != OK) return res; - - res = buildQuirks(); - if (res != OK) return res; - - camera_metadata_ro_entry_t availableProcessedSizes = - staticInfo(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES, 2); - if (!availableProcessedSizes.count) return NO_INIT; - - // TODO: Pick more intelligently - previewWidth = availableProcessedSizes.data.i32[0]; - previewHeight = availableProcessedSizes.data.i32[1]; - videoWidth = previewWidth; - videoHeight = previewHeight; - - params.setPreviewSize(previewWidth, previewHeight); - params.setVideoSize(videoWidth, videoHeight); - params.set(CameraParameters::KEY_PREFERRED_PREVIEW_SIZE_FOR_VIDEO, - String8::format("%dx%d", - previewWidth, previewHeight)); - { - String8 supportedPreviewSizes; - for (size_t i=0; i < availableProcessedSizes.count; i += 2) { - if (i != 0) supportedPreviewSizes += ","; - supportedPreviewSizes += String8::format("%dx%d", - availableProcessedSizes.data.i32[i], - availableProcessedSizes.data.i32[i+1]); - } - params.set(CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES, - supportedPreviewSizes); - params.set(CameraParameters::KEY_SUPPORTED_VIDEO_SIZES, - supportedPreviewSizes); - } - - camera_metadata_ro_entry_t availableFpsRanges = - staticInfo(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, 2); - if (!availableFpsRanges.count) return NO_INIT; - - previewFpsRange[0] = availableFpsRanges.data.i32[0]; - previewFpsRange[1] = availableFpsRanges.data.i32[1]; - - params.set(CameraParameters::KEY_PREVIEW_FPS_RANGE, - String8::format("%d,%d", - previewFpsRange[0] * kFpsToApiScale, - previewFpsRange[1] * kFpsToApiScale)); - - { - String8 supportedPreviewFpsRange; - for (size_t i=0; i < availableFpsRanges.count; i += 2) { - if (i != 0) supportedPreviewFpsRange += ","; - supportedPreviewFpsRange += String8::format("(%d,%d)", - availableFpsRanges.data.i32[i] * kFpsToApiScale, - availableFpsRanges.data.i32[i+1] * kFpsToApiScale); - } - params.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FPS_RANGE, - supportedPreviewFpsRange); - } - - previewFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP; - params.set(CameraParameters::KEY_PREVIEW_FORMAT, - formatEnumToString(previewFormat)); // NV21 - - previewTransform = degToTransform(0, - cameraFacing == CAMERA_FACING_FRONT); - - camera_metadata_ro_entry_t availableFormats = - staticInfo(ANDROID_SCALER_AVAILABLE_FORMATS); - - { - String8 supportedPreviewFormats; - bool addComma = false; - for (size_t i=0; i < availableFormats.count; i++) { - if (addComma) supportedPreviewFormats += ","; - addComma = true; - switch (availableFormats.data.i32[i]) { - case HAL_PIXEL_FORMAT_YCbCr_422_SP: - supportedPreviewFormats += - CameraParameters::PIXEL_FORMAT_YUV422SP; - break; - case HAL_PIXEL_FORMAT_YCrCb_420_SP: - supportedPreviewFormats += - CameraParameters::PIXEL_FORMAT_YUV420SP; - break; - case HAL_PIXEL_FORMAT_YCbCr_422_I: - supportedPreviewFormats += - CameraParameters::PIXEL_FORMAT_YUV422I; - break; - case HAL_PIXEL_FORMAT_YV12: - supportedPreviewFormats += - CameraParameters::PIXEL_FORMAT_YUV420P; - break; - case HAL_PIXEL_FORMAT_RGB_565: - supportedPreviewFormats += - CameraParameters::PIXEL_FORMAT_RGB565; - break; - case HAL_PIXEL_FORMAT_RGBA_8888: - supportedPreviewFormats += - CameraParameters::PIXEL_FORMAT_RGBA8888; - break; - case HAL_PIXEL_FORMAT_YCbCr_420_888: - // Flexible YUV allows both YV12 and NV21 - supportedPreviewFormats += - CameraParameters::PIXEL_FORMAT_YUV420P; - supportedPreviewFormats += ","; - supportedPreviewFormats += - CameraParameters::PIXEL_FORMAT_YUV420SP; - break; - // Not advertizing JPEG, RAW_SENSOR, etc, for preview formats - case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: - case HAL_PIXEL_FORMAT_RAW_SENSOR: - case HAL_PIXEL_FORMAT_BLOB: - addComma = false; - break; - - default: - ALOGW("%s: Camera %d: Unknown preview format: %x", - __FUNCTION__, cameraId, availableFormats.data.i32[i]); - addComma = false; - break; - } - } - params.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FORMATS, - supportedPreviewFormats); - } - - // PREVIEW_FRAME_RATE / SUPPORTED_PREVIEW_FRAME_RATES are deprecated, but - // still have to do something sane for them - - // NOTE: Not scaled like FPS range values are. - previewFps = fpsFromRange(previewFpsRange[0], previewFpsRange[1]); - params.set(CameraParameters::KEY_PREVIEW_FRAME_RATE, - previewFps); - - { - SortedVector sortedPreviewFrameRates; - - String8 supportedPreviewFrameRates; - for (size_t i=0; i < availableFpsRanges.count; i += 2) { - // from the [min, max] fps range use the max value - int fps = fpsFromRange(availableFpsRanges.data.i32[i], - availableFpsRanges.data.i32[i+1]); - - // de-dupe frame rates - if (sortedPreviewFrameRates.indexOf(fps) == NAME_NOT_FOUND) { - sortedPreviewFrameRates.add(fps); - } - else { - continue; - } - - if (sortedPreviewFrameRates.size() > 1) { - supportedPreviewFrameRates += ","; - } - - supportedPreviewFrameRates += String8::format("%d", - fps); - - ALOGV("%s: Supported preview frame rates: %s", - __FUNCTION__, supportedPreviewFrameRates.string()); - } - params.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES, - supportedPreviewFrameRates); - } - - camera_metadata_ro_entry_t availableJpegSizes = - staticInfo(ANDROID_SCALER_AVAILABLE_JPEG_SIZES, 2); - if (!availableJpegSizes.count) return NO_INIT; - - // TODO: Pick maximum - pictureWidth = availableJpegSizes.data.i32[0]; - pictureHeight = availableJpegSizes.data.i32[1]; - - params.setPictureSize(pictureWidth, - pictureHeight); - - { - String8 supportedPictureSizes; - for (size_t i=0; i < availableJpegSizes.count; i += 2) { - if (i != 0) supportedPictureSizes += ","; - supportedPictureSizes += String8::format("%dx%d", - availableJpegSizes.data.i32[i], - availableJpegSizes.data.i32[i+1]); - } - params.set(CameraParameters::KEY_SUPPORTED_PICTURE_SIZES, - supportedPictureSizes); - } - - params.setPictureFormat(CameraParameters::PIXEL_FORMAT_JPEG); - params.set(CameraParameters::KEY_SUPPORTED_PICTURE_FORMATS, - CameraParameters::PIXEL_FORMAT_JPEG); - - camera_metadata_ro_entry_t availableJpegThumbnailSizes = - staticInfo(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES, 4); - if (!availableJpegThumbnailSizes.count) return NO_INIT; - - // TODO: Pick default thumbnail size sensibly - jpegThumbSize[0] = availableJpegThumbnailSizes.data.i32[0]; - jpegThumbSize[1] = availableJpegThumbnailSizes.data.i32[1]; - - params.set(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH, - jpegThumbSize[0]); - params.set(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT, - jpegThumbSize[1]); - - { - String8 supportedJpegThumbSizes; - for (size_t i=0; i < availableJpegThumbnailSizes.count; i += 2) { - if (i != 0) supportedJpegThumbSizes += ","; - supportedJpegThumbSizes += String8::format("%dx%d", - availableJpegThumbnailSizes.data.i32[i], - availableJpegThumbnailSizes.data.i32[i+1]); - } - params.set(CameraParameters::KEY_SUPPORTED_JPEG_THUMBNAIL_SIZES, - supportedJpegThumbSizes); - } - - jpegThumbQuality = 90; - params.set(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY, - jpegThumbQuality); - jpegQuality = 90; - params.set(CameraParameters::KEY_JPEG_QUALITY, - jpegQuality); - jpegRotation = 0; - params.set(CameraParameters::KEY_ROTATION, - jpegRotation); - - gpsEnabled = false; - gpsCoordinates[0] = 0.0; - gpsCoordinates[1] = 0.0; - gpsCoordinates[2] = 0.0; - gpsTimestamp = 0; - gpsProcessingMethod = "unknown"; - // GPS fields in CameraParameters are not set by implementation - - wbMode = ANDROID_CONTROL_AWB_MODE_AUTO; - params.set(CameraParameters::KEY_WHITE_BALANCE, - CameraParameters::WHITE_BALANCE_AUTO); - - camera_metadata_ro_entry_t availableWhiteBalanceModes = - staticInfo(ANDROID_CONTROL_AWB_AVAILABLE_MODES, 0, 0, false); - if (!availableWhiteBalanceModes.count) { - params.set(CameraParameters::KEY_SUPPORTED_WHITE_BALANCE, - CameraParameters::WHITE_BALANCE_AUTO); - } else { - String8 supportedWhiteBalance; - bool addComma = false; - for (size_t i=0; i < availableWhiteBalanceModes.count; i++) { - if (addComma) supportedWhiteBalance += ","; - addComma = true; - switch (availableWhiteBalanceModes.data.u8[i]) { - case ANDROID_CONTROL_AWB_MODE_AUTO: - supportedWhiteBalance += - CameraParameters::WHITE_BALANCE_AUTO; - break; - case ANDROID_CONTROL_AWB_MODE_INCANDESCENT: - supportedWhiteBalance += - CameraParameters::WHITE_BALANCE_INCANDESCENT; - break; - case ANDROID_CONTROL_AWB_MODE_FLUORESCENT: - supportedWhiteBalance += - CameraParameters::WHITE_BALANCE_FLUORESCENT; - break; - case ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT: - supportedWhiteBalance += - CameraParameters::WHITE_BALANCE_WARM_FLUORESCENT; - break; - case ANDROID_CONTROL_AWB_MODE_DAYLIGHT: - supportedWhiteBalance += - CameraParameters::WHITE_BALANCE_DAYLIGHT; - break; - case ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT: - supportedWhiteBalance += - CameraParameters::WHITE_BALANCE_CLOUDY_DAYLIGHT; - break; - case ANDROID_CONTROL_AWB_MODE_TWILIGHT: - supportedWhiteBalance += - CameraParameters::WHITE_BALANCE_TWILIGHT; - break; - case ANDROID_CONTROL_AWB_MODE_SHADE: - supportedWhiteBalance += - CameraParameters::WHITE_BALANCE_SHADE; - break; - // Skipping values not mappable to v1 API - case ANDROID_CONTROL_AWB_MODE_OFF: - addComma = false; - break; - default: - ALOGW("%s: Camera %d: Unknown white balance value: %d", - __FUNCTION__, cameraId, - availableWhiteBalanceModes.data.u8[i]); - addComma = false; - break; - } - } - params.set(CameraParameters::KEY_SUPPORTED_WHITE_BALANCE, - supportedWhiteBalance); - } - - effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF; - params.set(CameraParameters::KEY_EFFECT, - CameraParameters::EFFECT_NONE); - - camera_metadata_ro_entry_t availableEffects = - staticInfo(ANDROID_CONTROL_AVAILABLE_EFFECTS, 0, 0, false); - if (!availableEffects.count) { - params.set(CameraParameters::KEY_SUPPORTED_EFFECTS, - CameraParameters::EFFECT_NONE); - } else { - String8 supportedEffects; - bool addComma = false; - for (size_t i=0; i < availableEffects.count; i++) { - if (addComma) supportedEffects += ","; - addComma = true; - switch (availableEffects.data.u8[i]) { - case ANDROID_CONTROL_EFFECT_MODE_OFF: - supportedEffects += - CameraParameters::EFFECT_NONE; - break; - case ANDROID_CONTROL_EFFECT_MODE_MONO: - supportedEffects += - CameraParameters::EFFECT_MONO; - break; - case ANDROID_CONTROL_EFFECT_MODE_NEGATIVE: - supportedEffects += - CameraParameters::EFFECT_NEGATIVE; - break; - case ANDROID_CONTROL_EFFECT_MODE_SOLARIZE: - supportedEffects += - CameraParameters::EFFECT_SOLARIZE; - break; - case ANDROID_CONTROL_EFFECT_MODE_SEPIA: - supportedEffects += - CameraParameters::EFFECT_SEPIA; - break; - case ANDROID_CONTROL_EFFECT_MODE_POSTERIZE: - supportedEffects += - CameraParameters::EFFECT_POSTERIZE; - break; - case ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD: - supportedEffects += - CameraParameters::EFFECT_WHITEBOARD; - break; - case ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD: - supportedEffects += - CameraParameters::EFFECT_BLACKBOARD; - break; - case ANDROID_CONTROL_EFFECT_MODE_AQUA: - supportedEffects += - CameraParameters::EFFECT_AQUA; - break; - default: - ALOGW("%s: Camera %d: Unknown effect value: %d", - __FUNCTION__, cameraId, availableEffects.data.u8[i]); - addComma = false; - break; - } - } - params.set(CameraParameters::KEY_SUPPORTED_EFFECTS, supportedEffects); - } - - antibandingMode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO; - params.set(CameraParameters::KEY_ANTIBANDING, - CameraParameters::ANTIBANDING_AUTO); - - camera_metadata_ro_entry_t availableAntibandingModes = - staticInfo(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES, 0, 0, false); - if (!availableAntibandingModes.count) { - params.set(CameraParameters::KEY_SUPPORTED_ANTIBANDING, - CameraParameters::ANTIBANDING_OFF); - } else { - String8 supportedAntibanding; - bool addComma = false; - for (size_t i=0; i < availableAntibandingModes.count; i++) { - if (addComma) supportedAntibanding += ","; - addComma = true; - switch (availableAntibandingModes.data.u8[i]) { - case ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF: - supportedAntibanding += - CameraParameters::ANTIBANDING_OFF; - break; - case ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ: - supportedAntibanding += - CameraParameters::ANTIBANDING_50HZ; - break; - case ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ: - supportedAntibanding += - CameraParameters::ANTIBANDING_60HZ; - break; - case ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO: - supportedAntibanding += - CameraParameters::ANTIBANDING_AUTO; - break; - default: - ALOGW("%s: Camera %d: Unknown antibanding value: %d", - __FUNCTION__, cameraId, - availableAntibandingModes.data.u8[i]); - addComma = false; - break; - } - } - params.set(CameraParameters::KEY_SUPPORTED_ANTIBANDING, - supportedAntibanding); - } - - sceneMode = ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED; - params.set(CameraParameters::KEY_SCENE_MODE, - CameraParameters::SCENE_MODE_AUTO); - - camera_metadata_ro_entry_t availableSceneModes = - staticInfo(ANDROID_CONTROL_AVAILABLE_SCENE_MODES, 0, 0, false); - if (!availableSceneModes.count) { - params.remove(CameraParameters::KEY_SCENE_MODE); - } else { - String8 supportedSceneModes(CameraParameters::SCENE_MODE_AUTO); - bool addComma = true; - bool noSceneModes = false; - for (size_t i=0; i < availableSceneModes.count; i++) { - if (addComma) supportedSceneModes += ","; - addComma = true; - switch (availableSceneModes.data.u8[i]) { - case ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED: - noSceneModes = true; - break; - case ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY: - // Not in old API - addComma = false; - break; - case ANDROID_CONTROL_SCENE_MODE_ACTION: - supportedSceneModes += - CameraParameters::SCENE_MODE_ACTION; - break; - case ANDROID_CONTROL_SCENE_MODE_PORTRAIT: - supportedSceneModes += - CameraParameters::SCENE_MODE_PORTRAIT; - break; - case ANDROID_CONTROL_SCENE_MODE_LANDSCAPE: - supportedSceneModes += - CameraParameters::SCENE_MODE_LANDSCAPE; - break; - case ANDROID_CONTROL_SCENE_MODE_NIGHT: - supportedSceneModes += - CameraParameters::SCENE_MODE_NIGHT; - break; - case ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT: - supportedSceneModes += - CameraParameters::SCENE_MODE_NIGHT_PORTRAIT; - break; - case ANDROID_CONTROL_SCENE_MODE_THEATRE: - supportedSceneModes += - CameraParameters::SCENE_MODE_THEATRE; - break; - case ANDROID_CONTROL_SCENE_MODE_BEACH: - supportedSceneModes += - CameraParameters::SCENE_MODE_BEACH; - break; - case ANDROID_CONTROL_SCENE_MODE_SNOW: - supportedSceneModes += - CameraParameters::SCENE_MODE_SNOW; - break; - case ANDROID_CONTROL_SCENE_MODE_SUNSET: - supportedSceneModes += - CameraParameters::SCENE_MODE_SUNSET; - break; - case ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO: - supportedSceneModes += - CameraParameters::SCENE_MODE_STEADYPHOTO; - break; - case ANDROID_CONTROL_SCENE_MODE_FIREWORKS: - supportedSceneModes += - CameraParameters::SCENE_MODE_FIREWORKS; - break; - case ANDROID_CONTROL_SCENE_MODE_SPORTS: - supportedSceneModes += - CameraParameters::SCENE_MODE_SPORTS; - break; - case ANDROID_CONTROL_SCENE_MODE_PARTY: - supportedSceneModes += - CameraParameters::SCENE_MODE_PARTY; - break; - case ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT: - supportedSceneModes += - CameraParameters::SCENE_MODE_CANDLELIGHT; - break; - case ANDROID_CONTROL_SCENE_MODE_BARCODE: - supportedSceneModes += - CameraParameters::SCENE_MODE_BARCODE; - break; - default: - ALOGW("%s: Camera %d: Unknown scene mode value: %d", - __FUNCTION__, cameraId, - availableSceneModes.data.u8[i]); - addComma = false; - break; - } - } - if (!noSceneModes) { - params.set(CameraParameters::KEY_SUPPORTED_SCENE_MODES, - supportedSceneModes); - } else { - params.remove(CameraParameters::KEY_SCENE_MODE); - } - } - - bool isFlashAvailable = false; - camera_metadata_ro_entry_t flashAvailable = - staticInfo(ANDROID_FLASH_INFO_AVAILABLE, 0, 1, false); - if (flashAvailable.count) { - isFlashAvailable = flashAvailable.data.u8[0]; - } - - camera_metadata_ro_entry_t availableAeModes = - staticInfo(ANDROID_CONTROL_AE_AVAILABLE_MODES, 0, 0, false); - - if (isFlashAvailable) { - flashMode = Parameters::FLASH_MODE_OFF; - params.set(CameraParameters::KEY_FLASH_MODE, - CameraParameters::FLASH_MODE_OFF); - - String8 supportedFlashModes(CameraParameters::FLASH_MODE_OFF); - supportedFlashModes = supportedFlashModes + - "," + CameraParameters::FLASH_MODE_AUTO + - "," + CameraParameters::FLASH_MODE_ON + - "," + CameraParameters::FLASH_MODE_TORCH; - for (size_t i=0; i < availableAeModes.count; i++) { - if (availableAeModes.data.u8[i] == - ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) { - supportedFlashModes = supportedFlashModes + "," + - CameraParameters::FLASH_MODE_RED_EYE; - break; - } - } - params.set(CameraParameters::KEY_SUPPORTED_FLASH_MODES, - supportedFlashModes); - } else { - flashMode = Parameters::FLASH_MODE_OFF; - params.set(CameraParameters::KEY_FLASH_MODE, - CameraParameters::FLASH_MODE_OFF); - params.set(CameraParameters::KEY_SUPPORTED_FLASH_MODES, - CameraParameters::FLASH_MODE_OFF); - } - - camera_metadata_ro_entry_t minFocusDistance = - staticInfo(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE, 0, 1, false); - - camera_metadata_ro_entry_t availableAfModes = - staticInfo(ANDROID_CONTROL_AF_AVAILABLE_MODES, 0, 0, false); - - if (!minFocusDistance.count || minFocusDistance.data.f[0] == 0) { - // Fixed-focus lens - focusMode = Parameters::FOCUS_MODE_FIXED; - params.set(CameraParameters::KEY_FOCUS_MODE, - CameraParameters::FOCUS_MODE_FIXED); - params.set(CameraParameters::KEY_SUPPORTED_FOCUS_MODES, - CameraParameters::FOCUS_MODE_FIXED); - } else { - focusMode = Parameters::FOCUS_MODE_AUTO; - params.set(CameraParameters::KEY_FOCUS_MODE, - CameraParameters::FOCUS_MODE_AUTO); - String8 supportedFocusModes(CameraParameters::FOCUS_MODE_INFINITY); - bool addComma = true; - - for (size_t i=0; i < availableAfModes.count; i++) { - if (addComma) supportedFocusModes += ","; - addComma = true; - switch (availableAfModes.data.u8[i]) { - case ANDROID_CONTROL_AF_MODE_AUTO: - supportedFocusModes += - CameraParameters::FOCUS_MODE_AUTO; - break; - case ANDROID_CONTROL_AF_MODE_MACRO: - supportedFocusModes += - CameraParameters::FOCUS_MODE_MACRO; - break; - case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO: - supportedFocusModes += - CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO; - break; - case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE: - supportedFocusModes += - CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE; - break; - case ANDROID_CONTROL_AF_MODE_EDOF: - supportedFocusModes += - CameraParameters::FOCUS_MODE_EDOF; - break; - // Not supported in old API - case ANDROID_CONTROL_AF_MODE_OFF: - addComma = false; - break; - default: - ALOGW("%s: Camera %d: Unknown AF mode value: %d", - __FUNCTION__, cameraId, availableAfModes.data.u8[i]); - addComma = false; - break; - } - } - params.set(CameraParameters::KEY_SUPPORTED_FOCUS_MODES, - supportedFocusModes); - } - focusState = ANDROID_CONTROL_AF_STATE_INACTIVE; - shadowFocusMode = FOCUS_MODE_INVALID; - - camera_metadata_ro_entry_t max3aRegions = - staticInfo(ANDROID_CONTROL_MAX_REGIONS, 1, 1); - if (!max3aRegions.count) return NO_INIT; - - int32_t maxNumFocusAreas = 0; - if (focusMode != Parameters::FOCUS_MODE_FIXED) { - maxNumFocusAreas = max3aRegions.data.i32[0]; - } - params.set(CameraParameters::KEY_MAX_NUM_FOCUS_AREAS, maxNumFocusAreas); - params.set(CameraParameters::KEY_FOCUS_AREAS, - "(0,0,0,0,0)"); - focusingAreas.clear(); - focusingAreas.add(Parameters::Area(0,0,0,0,0)); - - camera_metadata_ro_entry_t availableFocalLengths = - staticInfo(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS, 0, 0, false); - if (!availableFocalLengths.count) return NO_INIT; - - float minFocalLength = availableFocalLengths.data.f[0]; - params.setFloat(CameraParameters::KEY_FOCAL_LENGTH, minFocalLength); - - float horizFov, vertFov; - res = calculatePictureFovs(&horizFov, &vertFov); - if (res != OK) { - ALOGE("%s: Can't calculate field of views!", __FUNCTION__); - return res; - } - - params.setFloat(CameraParameters::KEY_HORIZONTAL_VIEW_ANGLE, horizFov); - params.setFloat(CameraParameters::KEY_VERTICAL_VIEW_ANGLE, vertFov); - - exposureCompensation = 0; - params.set(CameraParameters::KEY_EXPOSURE_COMPENSATION, - exposureCompensation); - - camera_metadata_ro_entry_t exposureCompensationRange = - staticInfo(ANDROID_CONTROL_AE_COMPENSATION_RANGE, 2, 2); - if (!exposureCompensationRange.count) return NO_INIT; - - params.set(CameraParameters::KEY_MAX_EXPOSURE_COMPENSATION, - exposureCompensationRange.data.i32[1]); - params.set(CameraParameters::KEY_MIN_EXPOSURE_COMPENSATION, - exposureCompensationRange.data.i32[0]); - - camera_metadata_ro_entry_t exposureCompensationStep = - staticInfo(ANDROID_CONTROL_AE_COMPENSATION_STEP, 1, 1); - if (!exposureCompensationStep.count) return NO_INIT; - - params.setFloat(CameraParameters::KEY_EXPOSURE_COMPENSATION_STEP, - (float)exposureCompensationStep.data.r[0].numerator / - exposureCompensationStep.data.r[0].denominator); - - autoExposureLock = false; - params.set(CameraParameters::KEY_AUTO_EXPOSURE_LOCK, - CameraParameters::FALSE); - params.set(CameraParameters::KEY_AUTO_EXPOSURE_LOCK_SUPPORTED, - CameraParameters::TRUE); - - autoWhiteBalanceLock = false; - params.set(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK, - CameraParameters::FALSE); - params.set(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK_SUPPORTED, - CameraParameters::TRUE); - - meteringAreas.add(Parameters::Area(0, 0, 0, 0, 0)); - params.set(CameraParameters::KEY_MAX_NUM_METERING_AREAS, - max3aRegions.data.i32[0]); - params.set(CameraParameters::KEY_METERING_AREAS, - "(0,0,0,0,0)"); - - zoom = 0; - params.set(CameraParameters::KEY_ZOOM, zoom); - params.set(CameraParameters::KEY_MAX_ZOOM, NUM_ZOOM_STEPS - 1); - - camera_metadata_ro_entry_t maxDigitalZoom = - staticInfo(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, /*minCount*/1, /*maxCount*/1); - if (!maxDigitalZoom.count) return NO_INIT; - - { - String8 zoomRatios; - float zoom = 1.f; - float zoomIncrement = (maxDigitalZoom.data.f[0] - zoom) / - (NUM_ZOOM_STEPS-1); - bool addComma = false; - for (size_t i=0; i < NUM_ZOOM_STEPS; i++) { - if (addComma) zoomRatios += ","; - addComma = true; - zoomRatios += String8::format("%d", static_cast(zoom * 100)); - zoom += zoomIncrement; - } - params.set(CameraParameters::KEY_ZOOM_RATIOS, zoomRatios); - } - - params.set(CameraParameters::KEY_ZOOM_SUPPORTED, - CameraParameters::TRUE); - params.set(CameraParameters::KEY_SMOOTH_ZOOM_SUPPORTED, - CameraParameters::FALSE); - - params.set(CameraParameters::KEY_FOCUS_DISTANCES, - "Infinity,Infinity,Infinity"); - - params.set(CameraParameters::KEY_MAX_NUM_DETECTED_FACES_HW, - fastInfo.maxFaces); - params.set(CameraParameters::KEY_MAX_NUM_DETECTED_FACES_SW, - 0); - - params.set(CameraParameters::KEY_VIDEO_FRAME_FORMAT, - CameraParameters::PIXEL_FORMAT_ANDROID_OPAQUE); - - recordingHint = false; - params.set(CameraParameters::KEY_RECORDING_HINT, - CameraParameters::FALSE); - - params.set(CameraParameters::KEY_VIDEO_SNAPSHOT_SUPPORTED, - CameraParameters::TRUE); - - videoStabilization = false; - params.set(CameraParameters::KEY_VIDEO_STABILIZATION, - CameraParameters::FALSE); - - camera_metadata_ro_entry_t availableVideoStabilizationModes = - staticInfo(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES, 0, 0, - false); - - if (availableVideoStabilizationModes.count > 1) { - params.set(CameraParameters::KEY_VIDEO_STABILIZATION_SUPPORTED, - CameraParameters::TRUE); - } else { - params.set(CameraParameters::KEY_VIDEO_STABILIZATION_SUPPORTED, - CameraParameters::FALSE); - } - - // Set up initial state for non-Camera.Parameters state variables - - storeMetadataInBuffers = true; - playShutterSound = true; - enableFaceDetect = false; - - enableFocusMoveMessages = false; - afTriggerCounter = 1; - currentAfTriggerId = -1; - afInMotion = false; - - precaptureTriggerCounter = 1; - - previewCallbackFlags = 0; - previewCallbackOneShot = false; - previewCallbackSurface = false; - - camera_metadata_ro_entry_t supportedHardwareLevel = - staticInfo(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL, 0, 0, false); - if (!supportedHardwareLevel.count || (supportedHardwareLevel.data.u8[0] == - ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED)) { - ALOGI("Camera %d: ZSL mode disabled for limited mode HALs", cameraId); - zslMode = false; - } else { - char value[PROPERTY_VALUE_MAX]; - property_get("camera.disable_zsl_mode", value, "0"); - if (!strcmp(value,"1")) { - ALOGI("Camera %d: Disabling ZSL mode", cameraId); - zslMode = false; - } else { - zslMode = true; - } - } - - lightFx = LIGHTFX_NONE; - - state = STOPPED; - - paramsFlattened = params.flatten(); - - return OK; -} - -String8 Parameters::get() const { - return paramsFlattened; -} - -status_t Parameters::buildFastInfo() { - - camera_metadata_ro_entry_t activeArraySize = - staticInfo(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE, 2, 4); - if (!activeArraySize.count) return NO_INIT; - int32_t arrayWidth; - int32_t arrayHeight; - if (activeArraySize.count == 2) { - ALOGW("%s: Camera %d: activeArraySize is missing xmin/ymin!", - __FUNCTION__, cameraId); - arrayWidth = activeArraySize.data.i32[0]; - arrayHeight = activeArraySize.data.i32[1]; - } else if (activeArraySize.count == 4) { - arrayWidth = activeArraySize.data.i32[2]; - arrayHeight = activeArraySize.data.i32[3]; - } else return NO_INIT; - - camera_metadata_ro_entry_t availableFaceDetectModes = - staticInfo(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES, 0, 0, - false); - - uint8_t bestFaceDetectMode = - ANDROID_STATISTICS_FACE_DETECT_MODE_OFF; - for (size_t i = 0 ; i < availableFaceDetectModes.count; i++) { - switch (availableFaceDetectModes.data.u8[i]) { - case ANDROID_STATISTICS_FACE_DETECT_MODE_OFF: - break; - case ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE: - if (bestFaceDetectMode != - ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) { - bestFaceDetectMode = - ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE; - } - break; - case ANDROID_STATISTICS_FACE_DETECT_MODE_FULL: - bestFaceDetectMode = - ANDROID_STATISTICS_FACE_DETECT_MODE_FULL; - break; - default: - ALOGE("%s: Camera %d: Unknown face detect mode %d:", - __FUNCTION__, cameraId, - availableFaceDetectModes.data.u8[i]); - return NO_INIT; - } - } - - int32_t maxFaces = 0; - camera_metadata_ro_entry_t maxFacesDetected = - staticInfo(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, 0, 1, false); - if (maxFacesDetected.count) { - maxFaces = maxFacesDetected.data.i32[0]; - } - - camera_metadata_ro_entry_t availableSceneModes = - staticInfo(ANDROID_CONTROL_AVAILABLE_SCENE_MODES, 0, 0, false); - camera_metadata_ro_entry_t sceneModeOverrides = - staticInfo(ANDROID_CONTROL_SCENE_MODE_OVERRIDES, 0, 0, false); - camera_metadata_ro_entry_t minFocusDistance = - staticInfo(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE, 0, 0, false); - bool fixedLens = minFocusDistance.count == 0 || - minFocusDistance.data.f[0] == 0; - - camera_metadata_ro_entry_t availableFocalLengths = - staticInfo(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS); - if (!availableFocalLengths.count) return NO_INIT; - - camera_metadata_ro_entry_t availableFormats = - staticInfo(ANDROID_SCALER_AVAILABLE_FORMATS); - if (!availableFormats.count) return NO_INIT; - - - if (sceneModeOverrides.count > 0) { - // sceneModeOverrides is defined to have 3 entries for each scene mode, - // which are AE, AWB, and AF override modes the HAL wants for that scene - // mode. - const size_t kModesPerSceneMode = 3; - if (sceneModeOverrides.count != - availableSceneModes.count * kModesPerSceneMode) { - ALOGE("%s: Camera %d: Scene mode override list is an " - "unexpected size: %d (expected %d)", __FUNCTION__, - cameraId, sceneModeOverrides.count, - availableSceneModes.count); - return NO_INIT; - } - for (size_t i = 0; i < availableSceneModes.count; i++) { - DeviceInfo::OverrideModes modes; - uint8_t aeMode = - sceneModeOverrides.data.u8[i * kModesPerSceneMode + 0]; - switch(aeMode) { - case ANDROID_CONTROL_AE_MODE_ON: - modes.flashMode = FLASH_MODE_OFF; - break; - case ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH: - modes.flashMode = FLASH_MODE_AUTO; - break; - case ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH: - modes.flashMode = FLASH_MODE_ON; - break; - case ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE: - modes.flashMode = FLASH_MODE_RED_EYE; - break; - default: - ALOGE("%s: Unknown override AE mode: %d", __FUNCTION__, - aeMode); - modes.flashMode = FLASH_MODE_INVALID; - break; - } - modes.wbMode = - sceneModeOverrides.data.u8[i * kModesPerSceneMode + 1]; - uint8_t afMode = - sceneModeOverrides.data.u8[i * kModesPerSceneMode + 2]; - switch(afMode) { - case ANDROID_CONTROL_AF_MODE_OFF: - modes.focusMode = fixedLens ? - FOCUS_MODE_FIXED : FOCUS_MODE_INFINITY; - break; - case ANDROID_CONTROL_AF_MODE_AUTO: - case ANDROID_CONTROL_AF_MODE_MACRO: - case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO: - case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE: - case ANDROID_CONTROL_AF_MODE_EDOF: - modes.focusMode = static_cast(afMode); - break; - default: - ALOGE("%s: Unknown override AF mode: %d", __FUNCTION__, - afMode); - modes.focusMode = FOCUS_MODE_INVALID; - break; - } - fastInfo.sceneModeOverrides.add(availableSceneModes.data.u8[i], - modes); - } - } - - fastInfo.arrayWidth = arrayWidth; - fastInfo.arrayHeight = arrayHeight; - fastInfo.bestFaceDetectMode = bestFaceDetectMode; - fastInfo.maxFaces = maxFaces; - - // Find smallest (widest-angle) focal length to use as basis of still - // picture FOV reporting. - fastInfo.minFocalLength = availableFocalLengths.data.f[0]; - for (size_t i = 1; i < availableFocalLengths.count; i++) { - if (fastInfo.minFocalLength > availableFocalLengths.data.f[i]) { - fastInfo.minFocalLength = availableFocalLengths.data.f[i]; - } - } - - // Check if the HAL supports HAL_PIXEL_FORMAT_YCbCr_420_888 - fastInfo.useFlexibleYuv = false; - for (size_t i = 0; i < availableFormats.count; i++) { - if (availableFormats.data.i32[i] == HAL_PIXEL_FORMAT_YCbCr_420_888) { - fastInfo.useFlexibleYuv = true; - break; - } - } - ALOGV("Camera %d: Flexible YUV %s supported", - cameraId, fastInfo.useFlexibleYuv ? "is" : "is not"); - - return OK; -} - -status_t Parameters::buildQuirks() { - camera_metadata_ro_entry_t entry; - entry = info->find(ANDROID_QUIRKS_TRIGGER_AF_WITH_AUTO); - quirks.triggerAfWithAuto = (entry.count != 0 && entry.data.u8[0] == 1); - ALOGV_IF(quirks.triggerAfWithAuto, "Camera %d: Quirk triggerAfWithAuto enabled", - cameraId); - - entry = info->find(ANDROID_QUIRKS_USE_ZSL_FORMAT); - quirks.useZslFormat = (entry.count != 0 && entry.data.u8[0] == 1); - ALOGV_IF(quirks.useZslFormat, "Camera %d: Quirk useZslFormat enabled", - cameraId); - - entry = info->find(ANDROID_QUIRKS_METERING_CROP_REGION); - quirks.meteringCropRegion = (entry.count != 0 && entry.data.u8[0] == 1); - ALOGV_IF(quirks.meteringCropRegion, "Camera %d: Quirk meteringCropRegion" - " enabled", cameraId); - - return OK; -} - -camera_metadata_ro_entry_t Parameters::staticInfo(uint32_t tag, - size_t minCount, size_t maxCount, bool required) const { - camera_metadata_ro_entry_t entry = info->find(tag); - - if (CC_UNLIKELY( entry.count == 0 ) && required) { - const char* tagSection = get_camera_metadata_section_name(tag); - if (tagSection == NULL) tagSection = ""; - const char* tagName = get_camera_metadata_tag_name(tag); - if (tagName == NULL) tagName = ""; - - ALOGE("Error finding static metadata entry '%s.%s' (%x)", - tagSection, tagName, tag); - } else if (CC_UNLIKELY( - (minCount != 0 && entry.count < minCount) || - (maxCount != 0 && entry.count > maxCount) ) ) { - const char* tagSection = get_camera_metadata_section_name(tag); - if (tagSection == NULL) tagSection = ""; - const char* tagName = get_camera_metadata_tag_name(tag); - if (tagName == NULL) tagName = ""; - ALOGE("Malformed static metadata entry '%s.%s' (%x):" - "Expected between %d and %d values, but got %d values", - tagSection, tagName, tag, minCount, maxCount, entry.count); - } - - return entry; -} - -status_t Parameters::set(const String8& paramString) { - status_t res; - - CameraParameters newParams(paramString); - - // TODO: Currently ignoring any changes to supposedly read-only parameters - // such as supported preview sizes, etc. Should probably produce an error if - // they're changed. - - /** Extract and verify new parameters */ - - size_t i; - - Parameters validatedParams(*this); - - // PREVIEW_SIZE - newParams.getPreviewSize(&validatedParams.previewWidth, - &validatedParams.previewHeight); - - if (validatedParams.previewWidth != previewWidth || - validatedParams.previewHeight != previewHeight) { - if (state >= PREVIEW) { - ALOGE("%s: Preview size cannot be updated when preview " - "is active! (Currently %d x %d, requested %d x %d", - __FUNCTION__, - previewWidth, previewHeight, - validatedParams.previewWidth, validatedParams.previewHeight); - return BAD_VALUE; - } - camera_metadata_ro_entry_t availablePreviewSizes = - staticInfo(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES); - for (i = 0; i < availablePreviewSizes.count; i += 2 ) { - if ((availablePreviewSizes.data.i32[i] == - validatedParams.previewWidth) && - (availablePreviewSizes.data.i32[i+1] == - validatedParams.previewHeight)) break; - } - if (i == availablePreviewSizes.count) { - ALOGE("%s: Requested preview size %d x %d is not supported", - __FUNCTION__, validatedParams.previewWidth, - validatedParams.previewHeight); - return BAD_VALUE; - } - } - - // RECORDING_HINT (always supported) - validatedParams.recordingHint = boolFromString( - newParams.get(CameraParameters::KEY_RECORDING_HINT) ); - bool recordingHintChanged = validatedParams.recordingHint != recordingHint; - ALOGV_IF(recordingHintChanged, "%s: Recording hint changed to %d", - __FUNCTION__, recordingHintChanged); - - // PREVIEW_FPS_RANGE - bool fpsRangeChanged = false; - newParams.getPreviewFpsRange(&validatedParams.previewFpsRange[0], - &validatedParams.previewFpsRange[1]); - validatedParams.previewFpsRange[0] /= kFpsToApiScale; - validatedParams.previewFpsRange[1] /= kFpsToApiScale; - - if (validatedParams.previewFpsRange[0] != previewFpsRange[0] || - validatedParams.previewFpsRange[1] != previewFpsRange[1]) { - fpsRangeChanged = true; - camera_metadata_ro_entry_t availablePreviewFpsRanges = - staticInfo(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, 2); - for (i = 0; i < availablePreviewFpsRanges.count; i += 2) { - if ((availablePreviewFpsRanges.data.i32[i] == - validatedParams.previewFpsRange[0]) && - (availablePreviewFpsRanges.data.i32[i+1] == - validatedParams.previewFpsRange[1]) ) { - break; - } - } - if (i == availablePreviewFpsRanges.count) { - ALOGE("%s: Requested preview FPS range %d - %d is not supported", - __FUNCTION__, validatedParams.previewFpsRange[0], - validatedParams.previewFpsRange[1]); - return BAD_VALUE; - } - validatedParams.previewFps = - fpsFromRange(validatedParams.previewFpsRange[0], - validatedParams.previewFpsRange[1]); - newParams.setPreviewFrameRate(validatedParams.previewFps); - } - - // PREVIEW_FORMAT - validatedParams.previewFormat = - formatStringToEnum(newParams.getPreviewFormat()); - if (validatedParams.previewFormat != previewFormat) { - if (state >= PREVIEW) { - ALOGE("%s: Preview format cannot be updated when preview " - "is active!", __FUNCTION__); - return BAD_VALUE; - } - camera_metadata_ro_entry_t availableFormats = - staticInfo(ANDROID_SCALER_AVAILABLE_FORMATS); - // If using flexible YUV, always support NV21/YV12. Otherwise, check - // HAL's list. - if (! (fastInfo.useFlexibleYuv && - (validatedParams.previewFormat == - HAL_PIXEL_FORMAT_YCrCb_420_SP || - validatedParams.previewFormat == - HAL_PIXEL_FORMAT_YV12) ) ) { - // Not using flexible YUV format, so check explicitly - for (i = 0; i < availableFormats.count; i++) { - if (availableFormats.data.i32[i] == - validatedParams.previewFormat) break; - } - if (i == availableFormats.count) { - ALOGE("%s: Requested preview format %s (0x%x) is not supported", - __FUNCTION__, newParams.getPreviewFormat(), - validatedParams.previewFormat); - return BAD_VALUE; - } - } - } - - // PREVIEW_FRAME_RATE - // Deprecated, only use if the preview fps range is unchanged this time. - // The single-value FPS is the same as the minimum of the range. - if (!fpsRangeChanged) { - validatedParams.previewFps = newParams.getPreviewFrameRate(); - if (validatedParams.previewFps != previewFps || recordingHintChanged) { - camera_metadata_ro_entry_t availableFrameRates = - staticInfo(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES); - /** - * If recording hint is set, find the range that encompasses - * previewFps with the largest min index. - * - * If recording hint is not set, find the range with previewFps - * with the smallest min index. - * - * Either way, in case of multiple ranges, break the tie by - * selecting the smaller range. - */ - int targetFps = validatedParams.previewFps; - // all ranges which have targetFps - Vector candidateRanges; - for (i = 0; i < availableFrameRates.count; i+=2) { - Range r = { - availableFrameRates.data.i32[i], - availableFrameRates.data.i32[i+1] - }; - - if (r.min <= targetFps && targetFps <= r.max) { - candidateRanges.push(r); - } - } - if (candidateRanges.isEmpty()) { - ALOGE("%s: Requested preview frame rate %d is not supported", - __FUNCTION__, validatedParams.previewFps); - return BAD_VALUE; - } - // most applicable range with targetFps - Range bestRange = candidateRanges[0]; - for (i = 1; i < candidateRanges.size(); ++i) { - Range r = candidateRanges[i]; - - // Find by largest minIndex in recording mode - if (validatedParams.recordingHint) { - if (r.min > bestRange.min) { - bestRange = r; - } - else if (r.min == bestRange.min && r.max < bestRange.max) { - bestRange = r; - } - } - // Find by smallest minIndex in preview mode - else { - if (r.min < bestRange.min) { - bestRange = r; - } - else if (r.min == bestRange.min && r.max < bestRange.max) { - bestRange = r; - } - } - } - - validatedParams.previewFpsRange[0] = - bestRange.min; - validatedParams.previewFpsRange[1] = - bestRange.max; - - ALOGV("%s: New preview FPS range: %d, %d, recordingHint = %d", - __FUNCTION__, - validatedParams.previewFpsRange[0], - validatedParams.previewFpsRange[1], - validatedParams.recordingHint); - } - newParams.set(CameraParameters::KEY_PREVIEW_FPS_RANGE, - String8::format("%d,%d", - validatedParams.previewFpsRange[0] * kFpsToApiScale, - validatedParams.previewFpsRange[1] * kFpsToApiScale)); - - } - - // PICTURE_SIZE - newParams.getPictureSize(&validatedParams.pictureWidth, - &validatedParams.pictureHeight); - if (validatedParams.pictureWidth == pictureWidth || - validatedParams.pictureHeight == pictureHeight) { - camera_metadata_ro_entry_t availablePictureSizes = - staticInfo(ANDROID_SCALER_AVAILABLE_JPEG_SIZES); - for (i = 0; i < availablePictureSizes.count; i+=2) { - if ((availablePictureSizes.data.i32[i] == - validatedParams.pictureWidth) && - (availablePictureSizes.data.i32[i+1] == - validatedParams.pictureHeight)) break; - } - if (i == availablePictureSizes.count) { - ALOGE("%s: Requested picture size %d x %d is not supported", - __FUNCTION__, validatedParams.pictureWidth, - validatedParams.pictureHeight); - return BAD_VALUE; - } - } - - // JPEG_THUMBNAIL_WIDTH/HEIGHT - validatedParams.jpegThumbSize[0] = - newParams.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH); - validatedParams.jpegThumbSize[1] = - newParams.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT); - if (validatedParams.jpegThumbSize[0] != jpegThumbSize[0] || - validatedParams.jpegThumbSize[1] != jpegThumbSize[1]) { - camera_metadata_ro_entry_t availableJpegThumbSizes = - staticInfo(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES); - for (i = 0; i < availableJpegThumbSizes.count; i+=2) { - if ((availableJpegThumbSizes.data.i32[i] == - validatedParams.jpegThumbSize[0]) && - (availableJpegThumbSizes.data.i32[i+1] == - validatedParams.jpegThumbSize[1])) break; - } - if (i == availableJpegThumbSizes.count) { - ALOGE("%s: Requested JPEG thumbnail size %d x %d is not supported", - __FUNCTION__, validatedParams.jpegThumbSize[0], - validatedParams.jpegThumbSize[1]); - return BAD_VALUE; - } - } - - // JPEG_THUMBNAIL_QUALITY - int quality = newParams.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY); - // also makes sure quality fits in uint8_t - if (quality < 0 || quality > 100) { - ALOGE("%s: Requested JPEG thumbnail quality %d is not supported", - __FUNCTION__, quality); - return BAD_VALUE; - } - validatedParams.jpegThumbQuality = quality; - - // JPEG_QUALITY - quality = newParams.getInt(CameraParameters::KEY_JPEG_QUALITY); - // also makes sure quality fits in uint8_t - if (quality < 0 || quality > 100) { - ALOGE("%s: Requested JPEG quality %d is not supported", - __FUNCTION__, quality); - return BAD_VALUE; - } - validatedParams.jpegQuality = quality; - - // ROTATION - validatedParams.jpegRotation = - newParams.getInt(CameraParameters::KEY_ROTATION); - if (validatedParams.jpegRotation != 0 && - validatedParams.jpegRotation != 90 && - validatedParams.jpegRotation != 180 && - validatedParams.jpegRotation != 270) { - ALOGE("%s: Requested picture rotation angle %d is not supported", - __FUNCTION__, validatedParams.jpegRotation); - return BAD_VALUE; - } - - // GPS - - const char *gpsLatStr = - newParams.get(CameraParameters::KEY_GPS_LATITUDE); - if (gpsLatStr != NULL) { - const char *gpsLongStr = - newParams.get(CameraParameters::KEY_GPS_LONGITUDE); - const char *gpsAltitudeStr = - newParams.get(CameraParameters::KEY_GPS_ALTITUDE); - const char *gpsTimeStr = - newParams.get(CameraParameters::KEY_GPS_TIMESTAMP); - const char *gpsProcMethodStr = - newParams.get(CameraParameters::KEY_GPS_PROCESSING_METHOD); - if (gpsLongStr == NULL || - gpsAltitudeStr == NULL || - gpsTimeStr == NULL || - gpsProcMethodStr == NULL) { - ALOGE("%s: Incomplete set of GPS parameters provided", - __FUNCTION__); - return BAD_VALUE; - } - char *endPtr; - errno = 0; - validatedParams.gpsCoordinates[0] = strtod(gpsLatStr, &endPtr); - if (errno || endPtr == gpsLatStr) { - ALOGE("%s: Malformed GPS latitude: %s", __FUNCTION__, gpsLatStr); - return BAD_VALUE; - } - errno = 0; - validatedParams.gpsCoordinates[1] = strtod(gpsLongStr, &endPtr); - if (errno || endPtr == gpsLongStr) { - ALOGE("%s: Malformed GPS longitude: %s", __FUNCTION__, gpsLongStr); - return BAD_VALUE; - } - errno = 0; - validatedParams.gpsCoordinates[2] = strtod(gpsAltitudeStr, &endPtr); - if (errno || endPtr == gpsAltitudeStr) { - ALOGE("%s: Malformed GPS altitude: %s", __FUNCTION__, - gpsAltitudeStr); - return BAD_VALUE; - } - errno = 0; - validatedParams.gpsTimestamp = strtoll(gpsTimeStr, &endPtr, 10); - if (errno || endPtr == gpsTimeStr) { - ALOGE("%s: Malformed GPS timestamp: %s", __FUNCTION__, gpsTimeStr); - return BAD_VALUE; - } - validatedParams.gpsProcessingMethod = gpsProcMethodStr; - - validatedParams.gpsEnabled = true; - } else { - validatedParams.gpsEnabled = false; - } - - // EFFECT - validatedParams.effectMode = effectModeStringToEnum( - newParams.get(CameraParameters::KEY_EFFECT) ); - if (validatedParams.effectMode != effectMode) { - camera_metadata_ro_entry_t availableEffectModes = - staticInfo(ANDROID_CONTROL_AVAILABLE_EFFECTS); - for (i = 0; i < availableEffectModes.count; i++) { - if (validatedParams.effectMode == availableEffectModes.data.u8[i]) break; - } - if (i == availableEffectModes.count) { - ALOGE("%s: Requested effect mode \"%s\" is not supported", - __FUNCTION__, - newParams.get(CameraParameters::KEY_EFFECT) ); - return BAD_VALUE; - } - } - - // ANTIBANDING - validatedParams.antibandingMode = abModeStringToEnum( - newParams.get(CameraParameters::KEY_ANTIBANDING) ); - if (validatedParams.antibandingMode != antibandingMode) { - camera_metadata_ro_entry_t availableAbModes = - staticInfo(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES); - for (i = 0; i < availableAbModes.count; i++) { - if (validatedParams.antibandingMode == availableAbModes.data.u8[i]) - break; - } - if (i == availableAbModes.count) { - ALOGE("%s: Requested antibanding mode \"%s\" is not supported", - __FUNCTION__, - newParams.get(CameraParameters::KEY_ANTIBANDING)); - return BAD_VALUE; - } - } - - // SCENE_MODE - validatedParams.sceneMode = sceneModeStringToEnum( - newParams.get(CameraParameters::KEY_SCENE_MODE) ); - if (validatedParams.sceneMode != sceneMode && - validatedParams.sceneMode != - ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED) { - camera_metadata_ro_entry_t availableSceneModes = - staticInfo(ANDROID_CONTROL_AVAILABLE_SCENE_MODES); - for (i = 0; i < availableSceneModes.count; i++) { - if (validatedParams.sceneMode == availableSceneModes.data.u8[i]) - break; - } - if (i == availableSceneModes.count) { - ALOGE("%s: Requested scene mode \"%s\" is not supported", - __FUNCTION__, - newParams.get(CameraParameters::KEY_SCENE_MODE)); - return BAD_VALUE; - } - } - bool sceneModeSet = - validatedParams.sceneMode != ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED; - - // FLASH_MODE - if (sceneModeSet) { - validatedParams.flashMode = - fastInfo.sceneModeOverrides. - valueFor(validatedParams.sceneMode).flashMode; - } else { - validatedParams.flashMode = FLASH_MODE_INVALID; - } - if (validatedParams.flashMode == FLASH_MODE_INVALID) { - validatedParams.flashMode = flashModeStringToEnum( - newParams.get(CameraParameters::KEY_FLASH_MODE) ); - } - - if (validatedParams.flashMode != flashMode) { - camera_metadata_ro_entry_t flashAvailable = - staticInfo(ANDROID_FLASH_INFO_AVAILABLE, 1, 1); - if (!flashAvailable.data.u8[0] && - validatedParams.flashMode != Parameters::FLASH_MODE_OFF) { - ALOGE("%s: Requested flash mode \"%s\" is not supported: " - "No flash on device", __FUNCTION__, - newParams.get(CameraParameters::KEY_FLASH_MODE)); - return BAD_VALUE; - } else if (validatedParams.flashMode == Parameters::FLASH_MODE_RED_EYE) { - camera_metadata_ro_entry_t availableAeModes = - staticInfo(ANDROID_CONTROL_AE_AVAILABLE_MODES); - for (i = 0; i < availableAeModes.count; i++) { - if (validatedParams.flashMode == availableAeModes.data.u8[i]) - break; - } - if (i == availableAeModes.count) { - ALOGE("%s: Requested flash mode \"%s\" is not supported", - __FUNCTION__, - newParams.get(CameraParameters::KEY_FLASH_MODE)); - return BAD_VALUE; - } - } else if (validatedParams.flashMode == -1) { - ALOGE("%s: Requested flash mode \"%s\" is unknown", - __FUNCTION__, - newParams.get(CameraParameters::KEY_FLASH_MODE)); - return BAD_VALUE; - } - // Update in case of override - newParams.set(CameraParameters::KEY_FLASH_MODE, - flashModeEnumToString(validatedParams.flashMode)); - } - - // WHITE_BALANCE - if (sceneModeSet) { - validatedParams.wbMode = - fastInfo.sceneModeOverrides. - valueFor(validatedParams.sceneMode).wbMode; - } else { - validatedParams.wbMode = ANDROID_CONTROL_AWB_MODE_OFF; - } - if (validatedParams.wbMode == ANDROID_CONTROL_AWB_MODE_OFF) { - validatedParams.wbMode = wbModeStringToEnum( - newParams.get(CameraParameters::KEY_WHITE_BALANCE) ); - } - if (validatedParams.wbMode != wbMode) { - camera_metadata_ro_entry_t availableWbModes = - staticInfo(ANDROID_CONTROL_AWB_AVAILABLE_MODES, 0, 0, false); - for (i = 0; i < availableWbModes.count; i++) { - if (validatedParams.wbMode == availableWbModes.data.u8[i]) break; - } - if (i == availableWbModes.count) { - ALOGE("%s: Requested white balance mode %s is not supported", - __FUNCTION__, - newParams.get(CameraParameters::KEY_WHITE_BALANCE)); - return BAD_VALUE; - } - // Update in case of override - newParams.set(CameraParameters::KEY_WHITE_BALANCE, - wbModeEnumToString(validatedParams.wbMode)); - } - - // FOCUS_MODE - if (sceneModeSet) { - validatedParams.focusMode = - fastInfo.sceneModeOverrides. - valueFor(validatedParams.sceneMode).focusMode; - } else { - validatedParams.focusMode = FOCUS_MODE_INVALID; - } - if (validatedParams.focusMode == FOCUS_MODE_INVALID) { - validatedParams.focusMode = focusModeStringToEnum( - newParams.get(CameraParameters::KEY_FOCUS_MODE) ); - } - if (validatedParams.focusMode != focusMode) { - validatedParams.currentAfTriggerId = -1; - if (validatedParams.focusMode != Parameters::FOCUS_MODE_FIXED) { - camera_metadata_ro_entry_t minFocusDistance = - staticInfo(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE, 0, 0, - false); - if (minFocusDistance.count && minFocusDistance.data.f[0] == 0) { - ALOGE("%s: Requested focus mode \"%s\" is not available: " - "fixed focus lens", - __FUNCTION__, - newParams.get(CameraParameters::KEY_FOCUS_MODE)); - return BAD_VALUE; - } else if (validatedParams.focusMode != - Parameters::FOCUS_MODE_INFINITY) { - camera_metadata_ro_entry_t availableFocusModes = - staticInfo(ANDROID_CONTROL_AF_AVAILABLE_MODES); - for (i = 0; i < availableFocusModes.count; i++) { - if (validatedParams.focusMode == - availableFocusModes.data.u8[i]) break; - } - if (i == availableFocusModes.count) { - ALOGE("%s: Requested focus mode \"%s\" is not supported", - __FUNCTION__, - newParams.get(CameraParameters::KEY_FOCUS_MODE)); - return BAD_VALUE; - } - } - } - validatedParams.focusState = ANDROID_CONTROL_AF_STATE_INACTIVE; - // Always reset shadow focus mode to avoid reverting settings - validatedParams.shadowFocusMode = FOCUS_MODE_INVALID; - // Update in case of override - newParams.set(CameraParameters::KEY_FOCUS_MODE, - focusModeEnumToString(validatedParams.focusMode)); - } else { - validatedParams.currentAfTriggerId = currentAfTriggerId; - } - - // FOCUS_AREAS - res = parseAreas(newParams.get(CameraParameters::KEY_FOCUS_AREAS), - &validatedParams.focusingAreas); - size_t max3aRegions = - (size_t)staticInfo(ANDROID_CONTROL_MAX_REGIONS, 1, 1).data.i32[0]; - if (res == OK) res = validateAreas(validatedParams.focusingAreas, - max3aRegions, AREA_KIND_FOCUS); - if (res != OK) { - ALOGE("%s: Requested focus areas are malformed: %s", - __FUNCTION__, newParams.get(CameraParameters::KEY_FOCUS_AREAS)); - return BAD_VALUE; - } - - // EXPOSURE_COMPENSATION - validatedParams.exposureCompensation = - newParams.getInt(CameraParameters::KEY_EXPOSURE_COMPENSATION); - camera_metadata_ro_entry_t exposureCompensationRange = - staticInfo(ANDROID_CONTROL_AE_COMPENSATION_RANGE); - if ((validatedParams.exposureCompensation < - exposureCompensationRange.data.i32[0]) || - (validatedParams.exposureCompensation > - exposureCompensationRange.data.i32[1])) { - ALOGE("%s: Requested exposure compensation index is out of bounds: %d", - __FUNCTION__, validatedParams.exposureCompensation); - return BAD_VALUE; - } - - // AUTO_EXPOSURE_LOCK (always supported) - validatedParams.autoExposureLock = boolFromString( - newParams.get(CameraParameters::KEY_AUTO_EXPOSURE_LOCK)); - - // AUTO_WHITEBALANCE_LOCK (always supported) - validatedParams.autoWhiteBalanceLock = boolFromString( - newParams.get(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK)); - - // METERING_AREAS - res = parseAreas(newParams.get(CameraParameters::KEY_METERING_AREAS), - &validatedParams.meteringAreas); - if (res == OK) { - res = validateAreas(validatedParams.meteringAreas, max3aRegions, - AREA_KIND_METERING); - } - if (res != OK) { - ALOGE("%s: Requested metering areas are malformed: %s", - __FUNCTION__, - newParams.get(CameraParameters::KEY_METERING_AREAS)); - return BAD_VALUE; - } - - // ZOOM - validatedParams.zoom = newParams.getInt(CameraParameters::KEY_ZOOM); - if (validatedParams.zoom < 0 - || validatedParams.zoom >= (int)NUM_ZOOM_STEPS) { - ALOGE("%s: Requested zoom level %d is not supported", - __FUNCTION__, validatedParams.zoom); - return BAD_VALUE; - } - - // VIDEO_SIZE - newParams.getVideoSize(&validatedParams.videoWidth, - &validatedParams.videoHeight); - if (validatedParams.videoWidth != videoWidth || - validatedParams.videoHeight != videoHeight) { - if (state == RECORD) { - ALOGE("%s: Video size cannot be updated when recording is active!", - __FUNCTION__); - return BAD_VALUE; - } - camera_metadata_ro_entry_t availableVideoSizes = - staticInfo(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES); - for (i = 0; i < availableVideoSizes.count; i += 2 ) { - if ((availableVideoSizes.data.i32[i] == - validatedParams.videoWidth) && - (availableVideoSizes.data.i32[i+1] == - validatedParams.videoHeight)) break; - } - if (i == availableVideoSizes.count) { - ALOGE("%s: Requested video size %d x %d is not supported", - __FUNCTION__, validatedParams.videoWidth, - validatedParams.videoHeight); - return BAD_VALUE; - } - } - - // VIDEO_STABILIZATION - validatedParams.videoStabilization = boolFromString( - newParams.get(CameraParameters::KEY_VIDEO_STABILIZATION) ); - camera_metadata_ro_entry_t availableVideoStabilizationModes = - staticInfo(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES, 0, 0, - false); - if (validatedParams.videoStabilization && - availableVideoStabilizationModes.count == 1) { - ALOGE("%s: Video stabilization not supported", __FUNCTION__); - } - - // LIGHTFX - validatedParams.lightFx = lightFxStringToEnum( - newParams.get(CameraParameters::KEY_LIGHTFX)); - - /** Update internal parameters */ - - *this = validatedParams; - - /** Update external parameters calculated from the internal ones */ - - // HORIZONTAL/VERTICAL FIELD OF VIEW - float horizFov, vertFov; - res = calculatePictureFovs(&horizFov, &vertFov); - if (res != OK) { - ALOGE("%s: Can't calculate FOVs", __FUNCTION__); - // continue so parameters are at least consistent - } - newParams.setFloat(CameraParameters::KEY_HORIZONTAL_VIEW_ANGLE, - horizFov); - newParams.setFloat(CameraParameters::KEY_VERTICAL_VIEW_ANGLE, - vertFov); - ALOGV("Current still picture FOV: %f x %f deg", horizFov, vertFov); - - // Need to flatten again in case of overrides - paramsFlattened = newParams.flatten(); - params = newParams; - - return OK; -} - -status_t Parameters::updateRequest(CameraMetadata *request) const { - ATRACE_CALL(); - status_t res; - - /** - * Mixin default important security values - * - android.led.transmit = defaulted ON - */ - camera_metadata_ro_entry_t entry = staticInfo(ANDROID_LED_AVAILABLE_LEDS, - /*minimumCount*/0, - /*maximumCount*/0, - /*required*/false); - for(size_t i = 0; i < entry.count; ++i) { - uint8_t led = entry.data.u8[i]; - - switch(led) { - // Transmit LED is unconditionally on when using - // the android.hardware.Camera API - case ANDROID_LED_AVAILABLE_LEDS_TRANSMIT: { - uint8_t transmitDefault = ANDROID_LED_TRANSMIT_ON; - res = request->update(ANDROID_LED_TRANSMIT, - &transmitDefault, 1); - if (res != OK) return res; - break; - } - } - } - - /** - * Construct metadata from parameters - */ - - uint8_t metadataMode = ANDROID_REQUEST_METADATA_MODE_FULL; - res = request->update(ANDROID_REQUEST_METADATA_MODE, - &metadataMode, 1); - if (res != OK) return res; - - res = request->update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, - previewFpsRange, 2); - if (res != OK) return res; - - uint8_t reqWbLock = autoWhiteBalanceLock ? - ANDROID_CONTROL_AWB_LOCK_ON : ANDROID_CONTROL_AWB_LOCK_OFF; - res = request->update(ANDROID_CONTROL_AWB_LOCK, - &reqWbLock, 1); - - res = request->update(ANDROID_CONTROL_EFFECT_MODE, - &effectMode, 1); - if (res != OK) return res; - res = request->update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, - &antibandingMode, 1); - if (res != OK) return res; - - // android.hardware.Camera requires that when face detect is enabled, the - // camera is in a face-priority mode. HAL2 splits this into separate parts - // (face detection statistics and face priority scene mode). Map from other - // to the other. - bool sceneModeActive = - sceneMode != (uint8_t)ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED; - uint8_t reqControlMode = ANDROID_CONTROL_MODE_AUTO; - if (enableFaceDetect || sceneModeActive) { - reqControlMode = ANDROID_CONTROL_MODE_USE_SCENE_MODE; - } - res = request->update(ANDROID_CONTROL_MODE, - &reqControlMode, 1); - if (res != OK) return res; - - uint8_t reqSceneMode = - sceneModeActive ? sceneMode : - enableFaceDetect ? (uint8_t)ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY : - (uint8_t)ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED; - res = request->update(ANDROID_CONTROL_SCENE_MODE, - &reqSceneMode, 1); - if (res != OK) return res; - - uint8_t reqFlashMode = ANDROID_FLASH_MODE_OFF; - uint8_t reqAeMode = ANDROID_CONTROL_AE_MODE_OFF; - switch (flashMode) { - case Parameters::FLASH_MODE_OFF: - reqAeMode = ANDROID_CONTROL_AE_MODE_ON; break; - case Parameters::FLASH_MODE_AUTO: - reqAeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH; break; - case Parameters::FLASH_MODE_ON: - reqAeMode = ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH; break; - case Parameters::FLASH_MODE_TORCH: - reqAeMode = ANDROID_CONTROL_AE_MODE_ON; - reqFlashMode = ANDROID_FLASH_MODE_TORCH; - break; - case Parameters::FLASH_MODE_RED_EYE: - reqAeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE; break; - default: - ALOGE("%s: Camera %d: Unknown flash mode %d", __FUNCTION__, - cameraId, flashMode); - return BAD_VALUE; - } - res = request->update(ANDROID_FLASH_MODE, - &reqFlashMode, 1); - if (res != OK) return res; - res = request->update(ANDROID_CONTROL_AE_MODE, - &reqAeMode, 1); - if (res != OK) return res; - - uint8_t reqAeLock = autoExposureLock ? - ANDROID_CONTROL_AE_LOCK_ON : ANDROID_CONTROL_AE_LOCK_OFF; - res = request->update(ANDROID_CONTROL_AE_LOCK, - &reqAeLock, 1); - if (res != OK) return res; - - res = request->update(ANDROID_CONTROL_AWB_MODE, - &wbMode, 1); - if (res != OK) return res; - - float reqFocusDistance = 0; // infinity focus in diopters - uint8_t reqFocusMode = ANDROID_CONTROL_AF_MODE_OFF; - switch (focusMode) { - case Parameters::FOCUS_MODE_AUTO: - case Parameters::FOCUS_MODE_MACRO: - case Parameters::FOCUS_MODE_CONTINUOUS_VIDEO: - case Parameters::FOCUS_MODE_CONTINUOUS_PICTURE: - case Parameters::FOCUS_MODE_EDOF: - reqFocusMode = focusMode; - break; - case Parameters::FOCUS_MODE_INFINITY: - case Parameters::FOCUS_MODE_FIXED: - reqFocusMode = ANDROID_CONTROL_AF_MODE_OFF; - break; - default: - ALOGE("%s: Camera %d: Unknown focus mode %d", __FUNCTION__, - cameraId, focusMode); - return BAD_VALUE; - } - res = request->update(ANDROID_LENS_FOCUS_DISTANCE, - &reqFocusDistance, 1); - if (res != OK) return res; - res = request->update(ANDROID_CONTROL_AF_MODE, - &reqFocusMode, 1); - if (res != OK) return res; - - size_t reqFocusingAreasSize = focusingAreas.size() * 5; - int32_t *reqFocusingAreas = new int32_t[reqFocusingAreasSize]; - for (size_t i = 0; i < reqFocusingAreasSize; i += 5) { - if (focusingAreas[i].weight != 0) { - reqFocusingAreas[i + 0] = - normalizedXToArray(focusingAreas[i].left); - reqFocusingAreas[i + 1] = - normalizedYToArray(focusingAreas[i].top); - reqFocusingAreas[i + 2] = - normalizedXToArray(focusingAreas[i].right); - reqFocusingAreas[i + 3] = - normalizedYToArray(focusingAreas[i].bottom); - } else { - reqFocusingAreas[i + 0] = 0; - reqFocusingAreas[i + 1] = 0; - reqFocusingAreas[i + 2] = 0; - reqFocusingAreas[i + 3] = 0; - } - reqFocusingAreas[i + 4] = focusingAreas[i].weight; - } - res = request->update(ANDROID_CONTROL_AF_REGIONS, - reqFocusingAreas, reqFocusingAreasSize); - if (res != OK) return res; - delete[] reqFocusingAreas; - - res = request->update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, - &exposureCompensation, 1); - if (res != OK) return res; - - size_t reqMeteringAreasSize = meteringAreas.size() * 5; - int32_t *reqMeteringAreas = new int32_t[reqMeteringAreasSize]; - for (size_t i = 0; i < reqMeteringAreasSize; i += 5) { - if (meteringAreas[i].weight != 0) { - reqMeteringAreas[i + 0] = - normalizedXToArray(meteringAreas[i].left); - reqMeteringAreas[i + 1] = - normalizedYToArray(meteringAreas[i].top); - reqMeteringAreas[i + 2] = - normalizedXToArray(meteringAreas[i].right); - reqMeteringAreas[i + 3] = - normalizedYToArray(meteringAreas[i].bottom); - } else { - reqMeteringAreas[i + 0] = 0; - reqMeteringAreas[i + 1] = 0; - reqMeteringAreas[i + 2] = 0; - reqMeteringAreas[i + 3] = 0; - } - reqMeteringAreas[i + 4] = meteringAreas[i].weight; - } - res = request->update(ANDROID_CONTROL_AE_REGIONS, - reqMeteringAreas, reqMeteringAreasSize); - if (res != OK) return res; - - delete[] reqMeteringAreas; - - /* don't include jpeg thumbnail size - it's valid for - it to be set to (0,0), meaning 'no thumbnail' */ - CropRegion crop = calculateCropRegion( (CropRegion::Outputs)( - CropRegion::OUTPUT_PREVIEW | - CropRegion::OUTPUT_VIDEO | - CropRegion::OUTPUT_PICTURE )); - int32_t reqCropRegion[4] = { - static_cast(crop.left), - static_cast(crop.top), - static_cast(crop.width), - static_cast(crop.height) - }; - res = request->update(ANDROID_SCALER_CROP_REGION, - reqCropRegion, 4); - if (res != OK) return res; - - uint8_t reqVstabMode = videoStabilization ? - ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON : - ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF; - res = request->update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, - &reqVstabMode, 1); - if (res != OK) return res; - - uint8_t reqFaceDetectMode = enableFaceDetect ? - fastInfo.bestFaceDetectMode : - (uint8_t)ANDROID_STATISTICS_FACE_DETECT_MODE_OFF; - res = request->update(ANDROID_STATISTICS_FACE_DETECT_MODE, - &reqFaceDetectMode, 1); - if (res != OK) return res; - - return OK; -} - -status_t Parameters::updateRequestJpeg(CameraMetadata *request) const { - status_t res; - - res = request->update(ANDROID_JPEG_THUMBNAIL_SIZE, - jpegThumbSize, 2); - if (res != OK) return res; - res = request->update(ANDROID_JPEG_THUMBNAIL_QUALITY, - &jpegThumbQuality, 1); - if (res != OK) return res; - res = request->update(ANDROID_JPEG_QUALITY, - &jpegQuality, 1); - if (res != OK) return res; - res = request->update( - ANDROID_JPEG_ORIENTATION, - &jpegRotation, 1); - if (res != OK) return res; - - if (gpsEnabled) { - res = request->update( - ANDROID_JPEG_GPS_COORDINATES, - gpsCoordinates, 3); - if (res != OK) return res; - res = request->update( - ANDROID_JPEG_GPS_TIMESTAMP, - &gpsTimestamp, 1); - if (res != OK) return res; - res = request->update( - ANDROID_JPEG_GPS_PROCESSING_METHOD, - gpsProcessingMethod); - if (res != OK) return res; - } else { - res = request->erase(ANDROID_JPEG_GPS_COORDINATES); - if (res != OK) return res; - res = request->erase(ANDROID_JPEG_GPS_TIMESTAMP); - if (res != OK) return res; - res = request->erase(ANDROID_JPEG_GPS_PROCESSING_METHOD); - if (res != OK) return res; - } - return OK; -} - - -const char* Parameters::getStateName(State state) { -#define CASE_ENUM_TO_CHAR(x) case x: return(#x); break; - switch(state) { - CASE_ENUM_TO_CHAR(DISCONNECTED) - CASE_ENUM_TO_CHAR(STOPPED) - CASE_ENUM_TO_CHAR(WAITING_FOR_PREVIEW_WINDOW) - CASE_ENUM_TO_CHAR(PREVIEW) - CASE_ENUM_TO_CHAR(RECORD) - CASE_ENUM_TO_CHAR(STILL_CAPTURE) - CASE_ENUM_TO_CHAR(VIDEO_SNAPSHOT) - default: - return "Unknown state!"; - break; - } -#undef CASE_ENUM_TO_CHAR -} - -int Parameters::formatStringToEnum(const char *format) { - return - !format ? - HAL_PIXEL_FORMAT_YCrCb_420_SP : - !strcmp(format, CameraParameters::PIXEL_FORMAT_YUV422SP) ? - HAL_PIXEL_FORMAT_YCbCr_422_SP : // NV16 - !strcmp(format, CameraParameters::PIXEL_FORMAT_YUV420SP) ? - HAL_PIXEL_FORMAT_YCrCb_420_SP : // NV21 - !strcmp(format, CameraParameters::PIXEL_FORMAT_YUV422I) ? - HAL_PIXEL_FORMAT_YCbCr_422_I : // YUY2 - !strcmp(format, CameraParameters::PIXEL_FORMAT_YUV420P) ? - HAL_PIXEL_FORMAT_YV12 : // YV12 - !strcmp(format, CameraParameters::PIXEL_FORMAT_RGB565) ? - HAL_PIXEL_FORMAT_RGB_565 : // RGB565 - !strcmp(format, CameraParameters::PIXEL_FORMAT_RGBA8888) ? - HAL_PIXEL_FORMAT_RGBA_8888 : // RGB8888 - !strcmp(format, CameraParameters::PIXEL_FORMAT_BAYER_RGGB) ? - HAL_PIXEL_FORMAT_RAW_SENSOR : // Raw sensor data - -1; -} - -const char* Parameters::formatEnumToString(int format) { - const char *fmt; - switch(format) { - case HAL_PIXEL_FORMAT_YCbCr_422_SP: // NV16 - fmt = CameraParameters::PIXEL_FORMAT_YUV422SP; - break; - case HAL_PIXEL_FORMAT_YCrCb_420_SP: // NV21 - fmt = CameraParameters::PIXEL_FORMAT_YUV420SP; - break; - case HAL_PIXEL_FORMAT_YCbCr_422_I: // YUY2 - fmt = CameraParameters::PIXEL_FORMAT_YUV422I; - break; - case HAL_PIXEL_FORMAT_YV12: // YV12 - fmt = CameraParameters::PIXEL_FORMAT_YUV420P; - break; - case HAL_PIXEL_FORMAT_RGB_565: // RGB565 - fmt = CameraParameters::PIXEL_FORMAT_RGB565; - break; - case HAL_PIXEL_FORMAT_RGBA_8888: // RGBA8888 - fmt = CameraParameters::PIXEL_FORMAT_RGBA8888; - break; - case HAL_PIXEL_FORMAT_RAW_SENSOR: - ALOGW("Raw sensor preview format requested."); - fmt = CameraParameters::PIXEL_FORMAT_BAYER_RGGB; - break; - default: - ALOGE("%s: Unknown preview format: %x", - __FUNCTION__, format); - fmt = NULL; - break; - } - return fmt; -} - -int Parameters::wbModeStringToEnum(const char *wbMode) { - return - !wbMode ? - ANDROID_CONTROL_AWB_MODE_AUTO : - !strcmp(wbMode, CameraParameters::WHITE_BALANCE_AUTO) ? - ANDROID_CONTROL_AWB_MODE_AUTO : - !strcmp(wbMode, CameraParameters::WHITE_BALANCE_INCANDESCENT) ? - ANDROID_CONTROL_AWB_MODE_INCANDESCENT : - !strcmp(wbMode, CameraParameters::WHITE_BALANCE_FLUORESCENT) ? - ANDROID_CONTROL_AWB_MODE_FLUORESCENT : - !strcmp(wbMode, CameraParameters::WHITE_BALANCE_WARM_FLUORESCENT) ? - ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT : - !strcmp(wbMode, CameraParameters::WHITE_BALANCE_DAYLIGHT) ? - ANDROID_CONTROL_AWB_MODE_DAYLIGHT : - !strcmp(wbMode, CameraParameters::WHITE_BALANCE_CLOUDY_DAYLIGHT) ? - ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT : - !strcmp(wbMode, CameraParameters::WHITE_BALANCE_TWILIGHT) ? - ANDROID_CONTROL_AWB_MODE_TWILIGHT : - !strcmp(wbMode, CameraParameters::WHITE_BALANCE_SHADE) ? - ANDROID_CONTROL_AWB_MODE_SHADE : - -1; -} - -const char* Parameters::wbModeEnumToString(uint8_t wbMode) { - switch (wbMode) { - case ANDROID_CONTROL_AWB_MODE_AUTO: - return CameraParameters::WHITE_BALANCE_AUTO; - case ANDROID_CONTROL_AWB_MODE_INCANDESCENT: - return CameraParameters::WHITE_BALANCE_INCANDESCENT; - case ANDROID_CONTROL_AWB_MODE_FLUORESCENT: - return CameraParameters::WHITE_BALANCE_FLUORESCENT; - case ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT: - return CameraParameters::WHITE_BALANCE_WARM_FLUORESCENT; - case ANDROID_CONTROL_AWB_MODE_DAYLIGHT: - return CameraParameters::WHITE_BALANCE_DAYLIGHT; - case ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT: - return CameraParameters::WHITE_BALANCE_CLOUDY_DAYLIGHT; - case ANDROID_CONTROL_AWB_MODE_TWILIGHT: - return CameraParameters::WHITE_BALANCE_TWILIGHT; - case ANDROID_CONTROL_AWB_MODE_SHADE: - return CameraParameters::WHITE_BALANCE_SHADE; - default: - ALOGE("%s: Unknown AWB mode enum: %d", - __FUNCTION__, wbMode); - return "unknown"; - } -} - -int Parameters::effectModeStringToEnum(const char *effectMode) { - return - !effectMode ? - ANDROID_CONTROL_EFFECT_MODE_OFF : - !strcmp(effectMode, CameraParameters::EFFECT_NONE) ? - ANDROID_CONTROL_EFFECT_MODE_OFF : - !strcmp(effectMode, CameraParameters::EFFECT_MONO) ? - ANDROID_CONTROL_EFFECT_MODE_MONO : - !strcmp(effectMode, CameraParameters::EFFECT_NEGATIVE) ? - ANDROID_CONTROL_EFFECT_MODE_NEGATIVE : - !strcmp(effectMode, CameraParameters::EFFECT_SOLARIZE) ? - ANDROID_CONTROL_EFFECT_MODE_SOLARIZE : - !strcmp(effectMode, CameraParameters::EFFECT_SEPIA) ? - ANDROID_CONTROL_EFFECT_MODE_SEPIA : - !strcmp(effectMode, CameraParameters::EFFECT_POSTERIZE) ? - ANDROID_CONTROL_EFFECT_MODE_POSTERIZE : - !strcmp(effectMode, CameraParameters::EFFECT_WHITEBOARD) ? - ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD : - !strcmp(effectMode, CameraParameters::EFFECT_BLACKBOARD) ? - ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD : - !strcmp(effectMode, CameraParameters::EFFECT_AQUA) ? - ANDROID_CONTROL_EFFECT_MODE_AQUA : - -1; -} - -int Parameters::abModeStringToEnum(const char *abMode) { - return - !abMode ? - ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO : - !strcmp(abMode, CameraParameters::ANTIBANDING_AUTO) ? - ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO : - !strcmp(abMode, CameraParameters::ANTIBANDING_OFF) ? - ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF : - !strcmp(abMode, CameraParameters::ANTIBANDING_50HZ) ? - ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ : - !strcmp(abMode, CameraParameters::ANTIBANDING_60HZ) ? - ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ : - -1; -} - -int Parameters::sceneModeStringToEnum(const char *sceneMode) { - return - !sceneMode ? - ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED : - !strcmp(sceneMode, CameraParameters::SCENE_MODE_AUTO) ? - ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED : - !strcmp(sceneMode, CameraParameters::SCENE_MODE_ACTION) ? - ANDROID_CONTROL_SCENE_MODE_ACTION : - !strcmp(sceneMode, CameraParameters::SCENE_MODE_PORTRAIT) ? - ANDROID_CONTROL_SCENE_MODE_PORTRAIT : - !strcmp(sceneMode, CameraParameters::SCENE_MODE_LANDSCAPE) ? - ANDROID_CONTROL_SCENE_MODE_LANDSCAPE : - !strcmp(sceneMode, CameraParameters::SCENE_MODE_NIGHT) ? - ANDROID_CONTROL_SCENE_MODE_NIGHT : - !strcmp(sceneMode, CameraParameters::SCENE_MODE_NIGHT_PORTRAIT) ? - ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT : - !strcmp(sceneMode, CameraParameters::SCENE_MODE_THEATRE) ? - ANDROID_CONTROL_SCENE_MODE_THEATRE : - !strcmp(sceneMode, CameraParameters::SCENE_MODE_BEACH) ? - ANDROID_CONTROL_SCENE_MODE_BEACH : - !strcmp(sceneMode, CameraParameters::SCENE_MODE_SNOW) ? - ANDROID_CONTROL_SCENE_MODE_SNOW : - !strcmp(sceneMode, CameraParameters::SCENE_MODE_SUNSET) ? - ANDROID_CONTROL_SCENE_MODE_SUNSET : - !strcmp(sceneMode, CameraParameters::SCENE_MODE_STEADYPHOTO) ? - ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO : - !strcmp(sceneMode, CameraParameters::SCENE_MODE_FIREWORKS) ? - ANDROID_CONTROL_SCENE_MODE_FIREWORKS : - !strcmp(sceneMode, CameraParameters::SCENE_MODE_SPORTS) ? - ANDROID_CONTROL_SCENE_MODE_SPORTS : - !strcmp(sceneMode, CameraParameters::SCENE_MODE_PARTY) ? - ANDROID_CONTROL_SCENE_MODE_PARTY : - !strcmp(sceneMode, CameraParameters::SCENE_MODE_CANDLELIGHT) ? - ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT : - !strcmp(sceneMode, CameraParameters::SCENE_MODE_BARCODE) ? - ANDROID_CONTROL_SCENE_MODE_BARCODE: - -1; -} - -Parameters::Parameters::flashMode_t Parameters::flashModeStringToEnum( - const char *flashMode) { - return - !flashMode ? - Parameters::FLASH_MODE_INVALID : - !strcmp(flashMode, CameraParameters::FLASH_MODE_OFF) ? - Parameters::FLASH_MODE_OFF : - !strcmp(flashMode, CameraParameters::FLASH_MODE_AUTO) ? - Parameters::FLASH_MODE_AUTO : - !strcmp(flashMode, CameraParameters::FLASH_MODE_ON) ? - Parameters::FLASH_MODE_ON : - !strcmp(flashMode, CameraParameters::FLASH_MODE_RED_EYE) ? - Parameters::FLASH_MODE_RED_EYE : - !strcmp(flashMode, CameraParameters::FLASH_MODE_TORCH) ? - Parameters::FLASH_MODE_TORCH : - Parameters::FLASH_MODE_INVALID; -} - -const char *Parameters::flashModeEnumToString(flashMode_t flashMode) { - switch (flashMode) { - case FLASH_MODE_OFF: - return CameraParameters::FLASH_MODE_OFF; - case FLASH_MODE_AUTO: - return CameraParameters::FLASH_MODE_AUTO; - case FLASH_MODE_ON: - return CameraParameters::FLASH_MODE_ON; - case FLASH_MODE_RED_EYE: - return CameraParameters::FLASH_MODE_RED_EYE; - case FLASH_MODE_TORCH: - return CameraParameters::FLASH_MODE_TORCH; - default: - ALOGE("%s: Unknown flash mode enum %d", - __FUNCTION__, flashMode); - return "unknown"; - } -} - -Parameters::Parameters::focusMode_t Parameters::focusModeStringToEnum( - const char *focusMode) { - return - !focusMode ? - Parameters::FOCUS_MODE_INVALID : - !strcmp(focusMode, CameraParameters::FOCUS_MODE_AUTO) ? - Parameters::FOCUS_MODE_AUTO : - !strcmp(focusMode, CameraParameters::FOCUS_MODE_INFINITY) ? - Parameters::FOCUS_MODE_INFINITY : - !strcmp(focusMode, CameraParameters::FOCUS_MODE_MACRO) ? - Parameters::FOCUS_MODE_MACRO : - !strcmp(focusMode, CameraParameters::FOCUS_MODE_FIXED) ? - Parameters::FOCUS_MODE_FIXED : - !strcmp(focusMode, CameraParameters::FOCUS_MODE_EDOF) ? - Parameters::FOCUS_MODE_EDOF : - !strcmp(focusMode, CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO) ? - Parameters::FOCUS_MODE_CONTINUOUS_VIDEO : - !strcmp(focusMode, CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE) ? - Parameters::FOCUS_MODE_CONTINUOUS_PICTURE : - Parameters::FOCUS_MODE_INVALID; -} - -const char *Parameters::focusModeEnumToString(focusMode_t focusMode) { - switch (focusMode) { - case FOCUS_MODE_AUTO: - return CameraParameters::FOCUS_MODE_AUTO; - case FOCUS_MODE_MACRO: - return CameraParameters::FOCUS_MODE_MACRO; - case FOCUS_MODE_CONTINUOUS_VIDEO: - return CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO; - case FOCUS_MODE_CONTINUOUS_PICTURE: - return CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE; - case FOCUS_MODE_EDOF: - return CameraParameters::FOCUS_MODE_EDOF; - case FOCUS_MODE_INFINITY: - return CameraParameters::FOCUS_MODE_INFINITY; - case FOCUS_MODE_FIXED: - return CameraParameters::FOCUS_MODE_FIXED; - default: - ALOGE("%s: Unknown focus mode enum: %d", - __FUNCTION__, focusMode); - return "unknown"; - } -} - -Parameters::Parameters::lightFxMode_t Parameters::lightFxStringToEnum( - const char *lightFxMode) { - return - !lightFxMode ? - Parameters::LIGHTFX_NONE : - !strcmp(lightFxMode, CameraParameters::LIGHTFX_LOWLIGHT) ? - Parameters::LIGHTFX_LOWLIGHT : - !strcmp(lightFxMode, CameraParameters::LIGHTFX_HDR) ? - Parameters::LIGHTFX_HDR : - Parameters::LIGHTFX_NONE; -} - -status_t Parameters::parseAreas(const char *areasCStr, - Vector *areas) { - static const size_t NUM_FIELDS = 5; - areas->clear(); - if (areasCStr == NULL) { - // If no key exists, use default (0,0,0,0,0) - areas->push(); - return OK; - } - String8 areasStr(areasCStr); - ssize_t areaStart = areasStr.find("(", 0) + 1; - while (areaStart != 0) { - const char* area = areasStr.string() + areaStart; - char *numEnd; - int vals[NUM_FIELDS]; - for (size_t i = 0; i < NUM_FIELDS; i++) { - errno = 0; - vals[i] = strtol(area, &numEnd, 10); - if (errno || numEnd == area) return BAD_VALUE; - area = numEnd + 1; - } - areas->push(Parameters::Area( - vals[0], vals[1], vals[2], vals[3], vals[4]) ); - areaStart = areasStr.find("(", areaStart) + 1; - } - return OK; -} - -status_t Parameters::validateAreas(const Vector &areas, - size_t maxRegions, - AreaKind areaKind) const { - // Definition of valid area can be found in - // include/camera/CameraParameters.h - if (areas.size() == 0) return BAD_VALUE; - if (areas.size() == 1) { - if (areas[0].left == 0 && - areas[0].top == 0 && - areas[0].right == 0 && - areas[0].bottom == 0 && - areas[0].weight == 0) { - // Single (0,0,0,0,0) entry is always valid (== driver decides) - return OK; - } - } - - // fixed focus can only set (0,0,0,0,0) focus area - if (areaKind == AREA_KIND_FOCUS && focusMode == FOCUS_MODE_FIXED) { - return BAD_VALUE; - } - - if (areas.size() > maxRegions) { - ALOGE("%s: Too many areas requested: %d", - __FUNCTION__, areas.size()); - return BAD_VALUE; - } - - for (Vector::const_iterator a = areas.begin(); - a != areas.end(); a++) { - if (a->weight < 1 || a->weight > 1000) return BAD_VALUE; - if (a->left < -1000 || a->left > 1000) return BAD_VALUE; - if (a->top < -1000 || a->top > 1000) return BAD_VALUE; - if (a->right < -1000 || a->right > 1000) return BAD_VALUE; - if (a->bottom < -1000 || a->bottom > 1000) return BAD_VALUE; - if (a->left >= a->right) return BAD_VALUE; - if (a->top >= a->bottom) return BAD_VALUE; - } - return OK; -} - -bool Parameters::boolFromString(const char *boolStr) { - return !boolStr ? false : - !strcmp(boolStr, CameraParameters::TRUE) ? true : - false; -} - -int Parameters::degToTransform(int degrees, bool mirror) { - if (!mirror) { - if (degrees == 0) return 0; - else if (degrees == 90) return HAL_TRANSFORM_ROT_90; - else if (degrees == 180) return HAL_TRANSFORM_ROT_180; - else if (degrees == 270) return HAL_TRANSFORM_ROT_270; - } else { // Do mirror (horizontal flip) - if (degrees == 0) { // FLIP_H and ROT_0 - return HAL_TRANSFORM_FLIP_H; - } else if (degrees == 90) { // FLIP_H and ROT_90 - return HAL_TRANSFORM_FLIP_H | HAL_TRANSFORM_ROT_90; - } else if (degrees == 180) { // FLIP_H and ROT_180 - return HAL_TRANSFORM_FLIP_V; - } else if (degrees == 270) { // FLIP_H and ROT_270 - return HAL_TRANSFORM_FLIP_V | HAL_TRANSFORM_ROT_90; - } - } - ALOGE("%s: Bad input: %d", __FUNCTION__, degrees); - return -1; -} - -int Parameters::cropXToArray(int x) const { - ALOG_ASSERT(x >= 0, "Crop-relative X coordinate = '%d' is out of bounds" - "(lower = 0)", x); - - CropRegion previewCrop = calculateCropRegion(CropRegion::OUTPUT_PREVIEW); - ALOG_ASSERT(x < previewCrop.width, "Crop-relative X coordinate = '%d' " - "is out of bounds (upper = %f)", x, previewCrop.width); - - int ret = x + previewCrop.left; - - ALOG_ASSERT( (ret >= 0 && ret < fastInfo.arrayWidth), - "Calculated pixel array value X = '%d' is out of bounds (upper = %d)", - ret, fastInfo.arrayWidth); - return ret; -} - -int Parameters::cropYToArray(int y) const { - ALOG_ASSERT(y >= 0, "Crop-relative Y coordinate = '%d' is out of bounds " - "(lower = 0)", y); - - CropRegion previewCrop = calculateCropRegion(CropRegion::OUTPUT_PREVIEW); - ALOG_ASSERT(y < previewCrop.height, "Crop-relative Y coordinate = '%d' is " - "out of bounds (upper = %f)", y, previewCrop.height); - - int ret = y + previewCrop.top; - - ALOG_ASSERT( (ret >= 0 && ret < fastInfo.arrayHeight), - "Calculated pixel array value Y = '%d' is out of bounds (upper = %d)", - ret, fastInfo.arrayHeight); - - return ret; - -} - -int Parameters::normalizedXToCrop(int x) const { - CropRegion previewCrop = calculateCropRegion(CropRegion::OUTPUT_PREVIEW); - return (x + 1000) * (previewCrop.width - 1) / 2000; -} - -int Parameters::normalizedYToCrop(int y) const { - CropRegion previewCrop = calculateCropRegion(CropRegion::OUTPUT_PREVIEW); - return (y + 1000) * (previewCrop.height - 1) / 2000; -} - -int Parameters::arrayXToCrop(int x) const { - CropRegion previewCrop = calculateCropRegion(CropRegion::OUTPUT_PREVIEW); - return x - previewCrop.left; -} - -int Parameters::arrayYToCrop(int y) const { - CropRegion previewCrop = calculateCropRegion(CropRegion::OUTPUT_PREVIEW); - return y - previewCrop.top; -} - -int Parameters::cropXToNormalized(int x) const { - CropRegion previewCrop = calculateCropRegion(CropRegion::OUTPUT_PREVIEW); - return x * 2000 / (previewCrop.width - 1) - 1000; -} - -int Parameters::cropYToNormalized(int y) const { - CropRegion previewCrop = calculateCropRegion(CropRegion::OUTPUT_PREVIEW); - return y * 2000 / (previewCrop.height - 1) - 1000; -} - -int Parameters::arrayXToNormalized(int width) const { - int ret = cropXToNormalized(arrayXToCrop(width)); - - ALOG_ASSERT(ret >= -1000, "Calculated normalized value out of " - "lower bounds %d", ret); - ALOG_ASSERT(ret <= 1000, "Calculated normalized value out of " - "upper bounds %d", ret); - - // Work-around for HAL pre-scaling the coordinates themselves - if (quirks.meteringCropRegion) { - return width * 2000 / (fastInfo.arrayWidth - 1) - 1000; - } - - return ret; -} - -int Parameters::arrayYToNormalized(int height) const { - int ret = cropYToNormalized(arrayYToCrop(height)); - - ALOG_ASSERT(ret >= -1000, "Calculated normalized value out of lower bounds" - " %d", ret); - ALOG_ASSERT(ret <= 1000, "Calculated normalized value out of upper bounds" - " %d", ret); - - // Work-around for HAL pre-scaling the coordinates themselves - if (quirks.meteringCropRegion) { - return height * 2000 / (fastInfo.arrayHeight - 1) - 1000; - } - - return ret; -} - -int Parameters::normalizedXToArray(int x) const { - - // Work-around for HAL pre-scaling the coordinates themselves - if (quirks.meteringCropRegion) { - return (x + 1000) * (fastInfo.arrayWidth - 1) / 2000; - } - - return cropXToArray(normalizedXToCrop(x)); -} - -int Parameters::normalizedYToArray(int y) const { - // Work-around for HAL pre-scaling the coordinates themselves - if (quirks.meteringCropRegion) { - return (y + 1000) * (fastInfo.arrayHeight - 1) / 2000; - } - - return cropYToArray(normalizedYToCrop(y)); -} - -Parameters::CropRegion Parameters::calculateCropRegion( - Parameters::CropRegion::Outputs outputs) const { - - float zoomLeft, zoomTop, zoomWidth, zoomHeight; - - // Need to convert zoom index into a crop rectangle. The rectangle is - // chosen to maximize its area on the sensor - - camera_metadata_ro_entry_t maxDigitalZoom = - staticInfo(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM); - // For each zoom step by how many pixels more do we change the zoom - float zoomIncrement = (maxDigitalZoom.data.f[0] - 1) / - (NUM_ZOOM_STEPS-1); - // The desired activeAreaWidth/cropAreaWidth ratio (or height if h>w) - // via interpolating zoom step into a zoom ratio - float zoomRatio = 1 + zoomIncrement * zoom; - ALOG_ASSERT( (zoomRatio >= 1.f && zoomRatio <= maxDigitalZoom.data.f[0]), - "Zoom ratio calculated out of bounds. Expected 1 - %f, actual: %f", - maxDigitalZoom.data.f[0], zoomRatio); - - ALOGV("Zoom maxDigital=%f, increment=%f, ratio=%f, previewWidth=%d, " - "previewHeight=%d, activeWidth=%d, activeHeight=%d", - maxDigitalZoom.data.f[0], zoomIncrement, zoomRatio, previewWidth, - previewHeight, fastInfo.arrayWidth, fastInfo.arrayHeight); - - /* - * Assumption: On the HAL side each stream buffer calculates its crop - * rectangle as follows: - * cropRect = (zoomLeft, zoomRight, - * zoomWidth, zoomHeight * zoomWidth / outputWidth); - * - * Note that if zoomWidth > bufferWidth, the new cropHeight > zoomHeight - * (we can then get into trouble if the cropHeight > arrayHeight). - * By selecting the zoomRatio based on the smallest outputRatio, we - * guarantee this will never happen. - */ - - // Enumerate all possible output sizes, select the one with the smallest - // aspect ratio - float minOutputWidth, minOutputHeight, minOutputRatio; - { - float outputSizes[][2] = { - { static_cast(previewWidth), - static_cast(previewHeight) }, - { static_cast(videoWidth), - static_cast(videoHeight) }, - { static_cast(jpegThumbSize[0]), - static_cast(jpegThumbSize[1]) }, - { static_cast(pictureWidth), - static_cast(pictureHeight) }, - }; - - minOutputWidth = outputSizes[0][0]; - minOutputHeight = outputSizes[0][1]; - minOutputRatio = minOutputWidth / minOutputHeight; - for (unsigned int i = 0; - i < sizeof(outputSizes) / sizeof(outputSizes[0]); - ++i) { - - // skip over outputs we don't want to consider for the crop region - if ( !((1 << i) & outputs) ) { - continue; - } - - float outputWidth = outputSizes[i][0]; - float outputHeight = outputSizes[i][1]; - float outputRatio = outputWidth / outputHeight; - - if (minOutputRatio > outputRatio) { - minOutputRatio = outputRatio; - minOutputWidth = outputWidth; - minOutputHeight = outputHeight; - } - - // and then use this output ratio instead of preview output ratio - ALOGV("Enumerating output ratio %f = %f / %f, min is %f", - outputRatio, outputWidth, outputHeight, minOutputRatio); - } - } - - /* Ensure that the width/height never go out of bounds - * by scaling across a diffent dimension if an out-of-bounds - * possibility exists. - * - * e.g. if the previewratio < arrayratio and e.g. zoomratio = 1.0, then by - * calculating the zoomWidth from zoomHeight we'll actually get a - * zoomheight > arrayheight - */ - float arrayRatio = 1.f * fastInfo.arrayWidth / fastInfo.arrayHeight; - if (minOutputRatio >= arrayRatio) { - // Adjust the height based on the width - zoomWidth = fastInfo.arrayWidth / zoomRatio; - zoomHeight = zoomWidth * - minOutputHeight / minOutputWidth; - - } else { - // Adjust the width based on the height - zoomHeight = fastInfo.arrayHeight / zoomRatio; - zoomWidth = zoomHeight * - minOutputWidth / minOutputHeight; - } - // centering the zoom area within the active area - zoomLeft = (fastInfo.arrayWidth - zoomWidth) / 2; - zoomTop = (fastInfo.arrayHeight - zoomHeight) / 2; - - ALOGV("Crop region calculated (x=%d,y=%d,w=%f,h=%f) for zoom=%d", - (int32_t)zoomLeft, (int32_t)zoomTop, zoomWidth, zoomHeight, this->zoom); - - - CropRegion crop = { zoomLeft, zoomTop, zoomWidth, zoomHeight }; - return crop; -} - -status_t Parameters::calculatePictureFovs(float *horizFov, float *vertFov) - const { - camera_metadata_ro_entry_t sensorSize = - staticInfo(ANDROID_SENSOR_INFO_PHYSICAL_SIZE, 2, 2); - if (!sensorSize.count) return NO_INIT; - - float arrayAspect = static_cast(fastInfo.arrayWidth) / - fastInfo.arrayHeight; - float stillAspect = static_cast(pictureWidth) / pictureHeight; - ALOGV("Array aspect: %f, still aspect: %f", arrayAspect, stillAspect); - - // The crop factors from the full sensor array to the still picture crop - // region - float horizCropFactor = 1.f; - float vertCropFactor = 1.f; - - /** - * Need to calculate the still image field of view based on the total pixel - * array field of view, and the relative aspect ratios of the pixel array - * and output streams. - * - * Special treatment for quirky definition of crop region and relative - * stream cropping. - */ - if (quirks.meteringCropRegion) { - // Use max of preview and video as first crop - float previewAspect = static_cast(previewWidth) / previewHeight; - float videoAspect = static_cast(videoWidth) / videoHeight; - if (videoAspect > previewAspect) { - previewAspect = videoAspect; - } - // First crop sensor to preview aspect ratio - if (arrayAspect < previewAspect) { - vertCropFactor = arrayAspect / previewAspect; - } else { - horizCropFactor = previewAspect / arrayAspect; - } - // Second crop to still aspect ratio - if (stillAspect < previewAspect) { - horizCropFactor *= stillAspect / previewAspect; - } else { - vertCropFactor *= previewAspect / stillAspect; - } - } else { - /** - * Crop are just a function of just the still/array relative aspect - * ratios. Since each stream will maximize its area within the crop - * region, and for FOV we assume a full-sensor crop region, we only ever - * crop the FOV either vertically or horizontally, never both. - */ - horizCropFactor = (arrayAspect > stillAspect) ? - (stillAspect / arrayAspect) : 1.f; - vertCropFactor = (arrayAspect < stillAspect) ? - (arrayAspect / stillAspect) : 1.f; - } - ALOGV("Horiz crop factor: %f, vert crop fact: %f", - horizCropFactor, vertCropFactor); - /** - * Basic field of view formula is: - * angle of view = 2 * arctangent ( d / 2f ) - * where d is the physical sensor dimension of interest, and f is - * the focal length. This only applies to rectilinear sensors, for focusing - * at distances >> f, etc. - */ - if (horizFov != NULL) { - *horizFov = 180 / M_PI * 2 * - atanf(horizCropFactor * sensorSize.data.f[0] / - (2 * fastInfo.minFocalLength)); - } - if (vertFov != NULL) { - *vertFov = 180 / M_PI * 2 * - atanf(vertCropFactor * sensorSize.data.f[1] / - (2 * fastInfo.minFocalLength)); - } - return OK; -} - -int32_t Parameters::fpsFromRange(int32_t /*min*/, int32_t max) const { - return max; -} - -}; // namespace camera2 -}; // namespace android diff --git a/services/camera/libcameraservice/camera2/Parameters.h b/services/camera/libcameraservice/camera2/Parameters.h deleted file mode 100644 index 464830c..0000000 --- a/services/camera/libcameraservice/camera2/Parameters.h +++ /dev/null @@ -1,372 +0,0 @@ -/* - * Copyright (C) 2012 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef ANDROID_SERVERS_CAMERA_CAMERA2PARAMETERS_H -#define ANDROID_SERVERS_CAMERA_CAMERA2PARAMETERS_H - -#include - -#include -#include -#include -#include -#include -#include -#include - -namespace android { -namespace camera2 { - -/** - * Current camera state; this is the full state of the Camera under the old - * camera API (contents of the CameraParameters object in a more-efficient - * format, plus other state). The enum values are mostly based off the - * corresponding camera2 enums, not the camera1 strings. A few are defined here - * if they don't cleanly map to camera2 values. - */ -struct Parameters { - /** - * Parameters and other state - */ - int cameraId; - int cameraFacing; - - int previewWidth, previewHeight; - int32_t previewFpsRange[2]; - int previewFps; // deprecated, here only for tracking changes - int previewFormat; - - int previewTransform; // set by CAMERA_CMD_SET_DISPLAY_ORIENTATION - - int pictureWidth, pictureHeight; - - int32_t jpegThumbSize[2]; - uint8_t jpegQuality, jpegThumbQuality; - int32_t jpegRotation; - - bool gpsEnabled; - double gpsCoordinates[3]; - int64_t gpsTimestamp; - String8 gpsProcessingMethod; - - uint8_t wbMode; - uint8_t effectMode; - uint8_t antibandingMode; - uint8_t sceneMode; - - enum flashMode_t { - FLASH_MODE_OFF = 0, - FLASH_MODE_AUTO, - FLASH_MODE_ON, - FLASH_MODE_TORCH, - FLASH_MODE_RED_EYE = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, - FLASH_MODE_INVALID = -1 - } flashMode; - - enum focusMode_t { - FOCUS_MODE_AUTO = ANDROID_CONTROL_AF_MODE_AUTO, - FOCUS_MODE_MACRO = ANDROID_CONTROL_AF_MODE_MACRO, - FOCUS_MODE_CONTINUOUS_VIDEO = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, - FOCUS_MODE_CONTINUOUS_PICTURE = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, - FOCUS_MODE_EDOF = ANDROID_CONTROL_AF_MODE_EDOF, - FOCUS_MODE_INFINITY, - FOCUS_MODE_FIXED, - FOCUS_MODE_INVALID = -1 - } focusMode; - - uint8_t focusState; // Latest focus state from HAL - - // For use with triggerAfWithAuto quirk - focusMode_t shadowFocusMode; - - struct Area { - int left, top, right, bottom; - int weight; - Area() {} - Area(int left, int top, int right, int bottom, int weight): - left(left), top(top), right(right), bottom(bottom), - weight(weight) {} - bool isEmpty() const { - return (left == 0) && (top == 0) && (right == 0) && (bottom == 0); - } - }; - Vector focusingAreas; - - int32_t exposureCompensation; - bool autoExposureLock; - bool autoWhiteBalanceLock; - - Vector meteringAreas; - - int zoom; - - int videoWidth, videoHeight; - - bool recordingHint; - bool videoStabilization; - - enum lightFxMode_t { - LIGHTFX_NONE = 0, - LIGHTFX_LOWLIGHT, - LIGHTFX_HDR - } lightFx; - - CameraParameters params; - String8 paramsFlattened; - - // These parameters are also part of the camera API-visible state, but not - // directly listed in Camera.Parameters - bool storeMetadataInBuffers; - bool playShutterSound; - bool enableFaceDetect; - - bool enableFocusMoveMessages; - int afTriggerCounter; - int currentAfTriggerId; - bool afInMotion; - - int precaptureTriggerCounter; - - uint32_t previewCallbackFlags; - bool previewCallbackOneShot; - bool previewCallbackSurface; - - bool zslMode; - - // Overall camera state - enum State { - DISCONNECTED, - STOPPED, - WAITING_FOR_PREVIEW_WINDOW, - PREVIEW, - RECORD, - STILL_CAPTURE, - VIDEO_SNAPSHOT - } state; - - // Number of zoom steps to simulate - static const unsigned int NUM_ZOOM_STEPS = 100; - - // Full static camera info, object owned by someone else, such as - // Camera2Device. - const CameraMetadata *info; - - // Fast-access static device information; this is a subset of the - // information available through the staticInfo() method, used for - // frequently-accessed values or values that have to be calculated from the - // static information. - struct DeviceInfo { - int32_t arrayWidth; - int32_t arrayHeight; - uint8_t bestFaceDetectMode; - int32_t maxFaces; - struct OverrideModes { - flashMode_t flashMode; - uint8_t wbMode; - focusMode_t focusMode; - OverrideModes(): - flashMode(FLASH_MODE_INVALID), - wbMode(ANDROID_CONTROL_AWB_MODE_OFF), - focusMode(FOCUS_MODE_INVALID) { - } - }; - DefaultKeyedVector sceneModeOverrides; - float minFocalLength; - bool useFlexibleYuv; - } fastInfo; - - // Quirks information; these are short-lived flags to enable workarounds for - // incomplete HAL implementations - struct Quirks { - bool triggerAfWithAuto; - bool useZslFormat; - bool meteringCropRegion; - } quirks; - - /** - * Parameter manipulation and setup methods - */ - - Parameters(int cameraId, int cameraFacing); - ~Parameters(); - - // Sets up default parameters - status_t initialize(const CameraMetadata *info); - - // Build fast-access device static info from static info - status_t buildFastInfo(); - // Query for quirks from static info - status_t buildQuirks(); - - // Get entry from camera static characteristics information. min/maxCount - // are used for error checking the number of values in the entry. 0 for - // max/minCount means to do no bounds check in that direction. In case of - // error, the entry data pointer is null and the count is 0. - camera_metadata_ro_entry_t staticInfo(uint32_t tag, - size_t minCount=0, size_t maxCount=0, bool required=true) const; - - // Validate and update camera parameters based on new settings - status_t set(const String8 ¶mString); - - // Retrieve the current settings - String8 get() const; - - // Update passed-in request for common parameters - status_t updateRequest(CameraMetadata *request) const; - - // Add/update JPEG entries in metadata - status_t updateRequestJpeg(CameraMetadata *request) const; - - // Calculate the crop region rectangle based on current stream sizes - struct CropRegion { - float left; - float top; - float width; - float height; - - enum Outputs { - OUTPUT_PREVIEW = 0x01, - OUTPUT_VIDEO = 0x02, - OUTPUT_JPEG_THUMBNAIL = 0x04, - OUTPUT_PICTURE = 0x08, - }; - }; - CropRegion calculateCropRegion(CropRegion::Outputs outputs) const; - - // Calculate the field of view of the high-resolution JPEG capture - status_t calculatePictureFovs(float *horizFov, float *vertFov) const; - - // Static methods for debugging and converting between camera1 and camera2 - // parameters - - static const char *getStateName(State state); - - static int formatStringToEnum(const char *format); - static const char *formatEnumToString(int format); - - static int wbModeStringToEnum(const char *wbMode); - static const char* wbModeEnumToString(uint8_t wbMode); - static int effectModeStringToEnum(const char *effectMode); - static int abModeStringToEnum(const char *abMode); - static int sceneModeStringToEnum(const char *sceneMode); - static flashMode_t flashModeStringToEnum(const char *flashMode); - static const char* flashModeEnumToString(flashMode_t flashMode); - static focusMode_t focusModeStringToEnum(const char *focusMode); - static const char* focusModeEnumToString(focusMode_t focusMode); - static lightFxMode_t lightFxStringToEnum(const char *lightFxMode); - - static status_t parseAreas(const char *areasCStr, - Vector *areas); - - enum AreaKind - { - AREA_KIND_FOCUS, - AREA_KIND_METERING - }; - status_t validateAreas(const Vector &areas, - size_t maxRegions, - AreaKind areaKind) const; - static bool boolFromString(const char *boolStr); - - // Map from camera orientation + facing to gralloc transform enum - static int degToTransform(int degrees, bool mirror); - - // API specifies FPS ranges are done in fixed point integer, with LSB = 0.001. - // Note that this doesn't apply to the (deprecated) single FPS value. - static const int kFpsToApiScale = 1000; - - // Transform between (-1000,-1000)-(1000,1000) normalized coords from camera - // API and HAL2 (0,0)-(activePixelArray.width/height) coordinates - int arrayXToNormalized(int width) const; - int arrayYToNormalized(int height) const; - int normalizedXToArray(int x) const; - int normalizedYToArray(int y) const; - - struct Range { - int min; - int max; - }; - - int32_t fpsFromRange(int32_t min, int32_t max) const; - -private: - - // Convert between HAL2 sensor array coordinates and - // viewfinder crop-region relative array coordinates - int cropXToArray(int x) const; - int cropYToArray(int y) const; - int arrayXToCrop(int x) const; - int arrayYToCrop(int y) const; - - // Convert between viewfinder crop-region relative array coordinates - // and camera API (-1000,1000)-(1000,1000) normalized coords - int cropXToNormalized(int x) const; - int cropYToNormalized(int y) const; - int normalizedXToCrop(int x) const; - int normalizedYToCrop(int y) const; -}; - -// This class encapsulates the Parameters class so that it can only be accessed -// by constructing a Lock object, which locks the SharedParameter's mutex. -class SharedParameters { - public: - SharedParameters(int cameraId, int cameraFacing): - mParameters(cameraId, cameraFacing) { - } - - template - class BaseLock { - public: - BaseLock(S &p): - mParameters(p.mParameters), - mSharedParameters(p) { - mSharedParameters.mLock.lock(); - } - - ~BaseLock() { - mSharedParameters.mLock.unlock(); - } - P &mParameters; - private: - // Disallow copying, default construction - BaseLock(); - BaseLock(const BaseLock &); - BaseLock &operator=(const BaseLock &); - S &mSharedParameters; - }; - typedef BaseLock Lock; - typedef BaseLock ReadLock; - - // Access static info, read-only and immutable, so no lock needed - camera_metadata_ro_entry_t staticInfo(uint32_t tag, - size_t minCount=0, size_t maxCount=0) const { - return mParameters.staticInfo(tag, minCount, maxCount); - } - - // Only use for dumping or other debugging - const Parameters &unsafeAccess() { - return mParameters; - } - private: - Parameters mParameters; - mutable Mutex mLock; -}; - - -}; // namespace camera2 -}; // namespace android - -#endif diff --git a/services/camera/libcameraservice/camera2/ProFrameProcessor.cpp b/services/camera/libcameraservice/camera2/ProFrameProcessor.cpp deleted file mode 100644 index 4012fc5..0000000 --- a/services/camera/libcameraservice/camera2/ProFrameProcessor.cpp +++ /dev/null @@ -1,176 +0,0 @@ -/* - * Copyright (C) 2013 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#define LOG_TAG "Camera2-ProFrameProcessor" -#define ATRACE_TAG ATRACE_TAG_CAMERA -//#define LOG_NDEBUG 0 - -#include -#include - -#include "ProFrameProcessor.h" -#include "../CameraDeviceBase.h" - -namespace android { -namespace camera2 { - -ProFrameProcessor::ProFrameProcessor(wp device) : - Thread(/*canCallJava*/false), - mDevice(device) { -} - -ProFrameProcessor::~ProFrameProcessor() { - ALOGV("%s: Exit", __FUNCTION__); -} - -status_t ProFrameProcessor::registerListener(int32_t minId, - int32_t maxId, wp listener) { - Mutex::Autolock l(mInputMutex); - ALOGV("%s: Registering listener for frame id range %d - %d", - __FUNCTION__, minId, maxId); - RangeListener rListener = { minId, maxId, listener }; - mRangeListeners.push_back(rListener); - return OK; -} - -status_t ProFrameProcessor::removeListener(int32_t minId, - int32_t maxId, - wp listener) { - Mutex::Autolock l(mInputMutex); - List::iterator item = mRangeListeners.begin(); - while (item != mRangeListeners.end()) { - if (item->minId == minId && - item->maxId == maxId && - item->listener == listener) { - item = mRangeListeners.erase(item); - } else { - item++; - } - } - return OK; -} - -void ProFrameProcessor::dump(int fd, const Vector& /*args*/) { - String8 result(" Latest received frame:\n"); - write(fd, result.string(), result.size()); - mLastFrame.dump(fd, 2, 6); -} - -bool ProFrameProcessor::threadLoop() { - status_t res; - - sp device; - { - device = mDevice.promote(); - if (device == 0) return false; - } - - res = device->waitForNextFrame(kWaitDuration); - if (res == OK) { - processNewFrames(device); - } else if (res != TIMED_OUT) { - ALOGE("ProFrameProcessor: Error waiting for new " - "frames: %s (%d)", strerror(-res), res); - } - - return true; -} - -void ProFrameProcessor::processNewFrames(const sp &device) { - status_t res; - ATRACE_CALL(); - CameraMetadata frame; - - ALOGV("%s: Camera %d: Process new frames", __FUNCTION__, device->getId()); - - while ( (res = device->getNextFrame(&frame)) == OK) { - - camera_metadata_entry_t entry; - - entry = frame.find(ANDROID_REQUEST_FRAME_COUNT); - if (entry.count == 0) { - ALOGE("%s: Camera %d: Error reading frame number", - __FUNCTION__, device->getId()); - break; - } - ATRACE_INT("cam2_frame", entry.data.i32[0]); - - if (!processSingleFrame(frame, device)) { - break; - } - - if (!frame.isEmpty()) { - mLastFrame.acquire(frame); - } - } - if (res != NOT_ENOUGH_DATA) { - ALOGE("%s: Camera %d: Error getting next frame: %s (%d)", - __FUNCTION__, device->getId(), strerror(-res), res); - return; - } - - return; -} - -bool ProFrameProcessor::processSingleFrame(CameraMetadata &frame, - const sp &device) { - ALOGV("%s: Camera %d: Process single frame (is empty? %d)", - __FUNCTION__, device->getId(), frame.isEmpty()); - return processListeners(frame, device) == OK; -} - -status_t ProFrameProcessor::processListeners(const CameraMetadata &frame, - const sp &device) { - ATRACE_CALL(); - camera_metadata_ro_entry_t entry; - - entry = frame.find(ANDROID_REQUEST_ID); - if (entry.count == 0) { - ALOGE("%s: Camera %d: Error reading frame id", - __FUNCTION__, device->getId()); - return BAD_VALUE; - } - int32_t frameId = entry.data.i32[0]; - - List > listeners; - { - Mutex::Autolock l(mInputMutex); - - List::iterator item = mRangeListeners.begin(); - while (item != mRangeListeners.end()) { - if (frameId >= item->minId && - frameId < item->maxId) { - sp listener = item->listener.promote(); - if (listener == 0) { - item = mRangeListeners.erase(item); - continue; - } else { - listeners.push_back(listener); - } - } - item++; - } - } - ALOGV("Got %d range listeners out of %d", listeners.size(), mRangeListeners.size()); - List >::iterator item = listeners.begin(); - for (; item != listeners.end(); item++) { - (*item)->onFrameAvailable(frameId, frame); - } - return OK; -} - -}; // namespace camera2 -}; // namespace android diff --git a/services/camera/libcameraservice/camera2/ProFrameProcessor.h b/services/camera/libcameraservice/camera2/ProFrameProcessor.h deleted file mode 100644 index b82942c..0000000 --- a/services/camera/libcameraservice/camera2/ProFrameProcessor.h +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Copyright (C) 2013 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef ANDROID_SERVERS_CAMERA_CAMERA2_PROFRAMEPROCESSOR_H -#define ANDROID_SERVERS_CAMERA_CAMERA2_PROFRAMEPROCESSOR_H - -#include -#include -#include -#include -#include -#include - -namespace android { - -class CameraDeviceBase; - -namespace camera2 { - -/* Output frame metadata processing thread. This thread waits for new - * frames from the device, and analyzes them as necessary. - */ -class ProFrameProcessor: public Thread { - public: - ProFrameProcessor(wp device); - virtual ~ProFrameProcessor(); - - struct FilteredListener: virtual public RefBase { - virtual void onFrameAvailable(int32_t frameId, - const CameraMetadata &frame) = 0; - }; - - // Register a listener for a range of IDs [minId, maxId). Multiple listeners - // can be listening to the same range - status_t registerListener(int32_t minId, int32_t maxId, - wp listener); - status_t removeListener(int32_t minId, int32_t maxId, - wp listener); - - void dump(int fd, const Vector& args); - protected: - static const nsecs_t kWaitDuration = 10000000; // 10 ms - wp mDevice; - - virtual bool threadLoop(); - - Mutex mInputMutex; - - struct RangeListener { - int32_t minId; - int32_t maxId; - wp listener; - }; - List mRangeListeners; - - void processNewFrames(const sp &device); - - virtual bool processSingleFrame(CameraMetadata &frame, - const sp &device); - - status_t processListeners(const CameraMetadata &frame, - const sp &device); - - CameraMetadata mLastFrame; -}; - - -}; //namespace camera2 -}; //namespace android - -#endif diff --git a/services/camera/libcameraservice/camera2/StreamingProcessor.cpp b/services/camera/libcameraservice/camera2/StreamingProcessor.cpp deleted file mode 100644 index 5981be7..0000000 --- a/services/camera/libcameraservice/camera2/StreamingProcessor.cpp +++ /dev/null @@ -1,880 +0,0 @@ -/* - * Copyright (C) 2012 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#define LOG_TAG "Camera2-StreamingProcessor" -#define ATRACE_TAG ATRACE_TAG_CAMERA -//#define LOG_NDEBUG 0 -//#define LOG_NNDEBUG 0 // Per-frame verbose logging - -#ifdef LOG_NNDEBUG -#define ALOGVV(...) ALOGV(__VA_ARGS__) -#else -#define ALOGVV(...) ((void)0) -#endif - -#include -#include -#include -#include - -#include "StreamingProcessor.h" -#include "Camera2Heap.h" -#include "../Camera2Client.h" -#include "../CameraDeviceBase.h" - -namespace android { -namespace camera2 { - -StreamingProcessor::StreamingProcessor(sp client): - mClient(client), - mDevice(client->getCameraDevice()), - mId(client->getCameraId()), - mActiveRequest(NONE), - mPaused(false), - mPreviewRequestId(Camera2Client::kPreviewRequestIdStart), - mPreviewStreamId(NO_STREAM), - mRecordingRequestId(Camera2Client::kRecordingRequestIdStart), - mRecordingStreamId(NO_STREAM), - mRecordingFrameAvailable(false), - mRecordingHeapCount(kDefaultRecordingHeapCount), - mRecordingHeapFree(kDefaultRecordingHeapCount) -{ -} - -StreamingProcessor::~StreamingProcessor() { - deletePreviewStream(); - deleteRecordingStream(); -} - -status_t StreamingProcessor::setPreviewWindow(sp window) { - ATRACE_CALL(); - status_t res; - - res = deletePreviewStream(); - if (res != OK) return res; - - Mutex::Autolock m(mMutex); - - mPreviewWindow = window; - - return OK; -} - -bool StreamingProcessor::haveValidPreviewWindow() const { - Mutex::Autolock m(mMutex); - return mPreviewWindow != 0; -} - -status_t StreamingProcessor::updatePreviewRequest(const Parameters ¶ms) { - ATRACE_CALL(); - status_t res; - sp device = mDevice.promote(); - if (device == 0) { - ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId); - return INVALID_OPERATION; - } - - Mutex::Autolock m(mMutex); - if (mPreviewRequest.entryCount() == 0) { - res = device->createDefaultRequest(CAMERA2_TEMPLATE_PREVIEW, - &mPreviewRequest); - if (res != OK) { - ALOGE("%s: Camera %d: Unable to create default preview request: " - "%s (%d)", __FUNCTION__, mId, strerror(-res), res); - return res; - } - } - - res = params.updateRequest(&mPreviewRequest); - if (res != OK) { - ALOGE("%s: Camera %d: Unable to update common entries of preview " - "request: %s (%d)", __FUNCTION__, mId, - strerror(-res), res); - return res; - } - - res = mPreviewRequest.update(ANDROID_REQUEST_ID, - &mPreviewRequestId, 1); - if (res != OK) { - ALOGE("%s: Camera %d: Unable to update request id for preview: %s (%d)", - __FUNCTION__, mId, strerror(-res), res); - return res; - } - - return OK; -} - -status_t StreamingProcessor::updatePreviewStream(const Parameters ¶ms) { - ATRACE_CALL(); - Mutex::Autolock m(mMutex); - - status_t res; - sp device = mDevice.promote(); - if (device == 0) { - ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId); - return INVALID_OPERATION; - } - - if (mPreviewStreamId != NO_STREAM) { - // Check if stream parameters have to change - uint32_t currentWidth, currentHeight; - res = device->getStreamInfo(mPreviewStreamId, - ¤tWidth, ¤tHeight, 0); - if (res != OK) { - ALOGE("%s: Camera %d: Error querying preview stream info: " - "%s (%d)", __FUNCTION__, mId, strerror(-res), res); - return res; - } - if (currentWidth != (uint32_t)params.previewWidth || - currentHeight != (uint32_t)params.previewHeight) { - ALOGV("%s: Camera %d: Preview size switch: %d x %d -> %d x %d", - __FUNCTION__, mId, currentWidth, currentHeight, - params.previewWidth, params.previewHeight); - res = device->waitUntilDrained(); - if (res != OK) { - ALOGE("%s: Camera %d: Error waiting for preview to drain: " - "%s (%d)", __FUNCTION__, mId, strerror(-res), res); - return res; - } - res = device->deleteStream(mPreviewStreamId); - if (res != OK) { - ALOGE("%s: Camera %d: Unable to delete old output stream " - "for preview: %s (%d)", __FUNCTION__, mId, - strerror(-res), res); - return res; - } - mPreviewStreamId = NO_STREAM; - } - } - - if (mPreviewStreamId == NO_STREAM) { - res = device->createStream(mPreviewWindow, - params.previewWidth, params.previewHeight, - CAMERA2_HAL_PIXEL_FORMAT_OPAQUE, 0, - &mPreviewStreamId); - if (res != OK) { - ALOGE("%s: Camera %d: Unable to create preview stream: %s (%d)", - __FUNCTION__, mId, strerror(-res), res); - return res; - } - } - - res = device->setStreamTransform(mPreviewStreamId, - params.previewTransform); - if (res != OK) { - ALOGE("%s: Camera %d: Unable to set preview stream transform: " - "%s (%d)", __FUNCTION__, mId, strerror(-res), res); - return res; - } - - return OK; -} - -status_t StreamingProcessor::deletePreviewStream() { - ATRACE_CALL(); - status_t res; - - Mutex::Autolock m(mMutex); - - if (mPreviewStreamId != NO_STREAM) { - sp device = mDevice.promote(); - if (device == 0) { - ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId); - return INVALID_OPERATION; - } - - ALOGV("%s: for cameraId %d on streamId %d", - __FUNCTION__, mId, mPreviewStreamId); - - res = device->waitUntilDrained(); - if (res != OK) { - ALOGE("%s: Error waiting for preview to drain: %s (%d)", - __FUNCTION__, strerror(-res), res); - return res; - } - res = device->deleteStream(mPreviewStreamId); - if (res != OK) { - ALOGE("%s: Unable to delete old preview stream: %s (%d)", - __FUNCTION__, strerror(-res), res); - return res; - } - mPreviewStreamId = NO_STREAM; - } - return OK; -} - -int StreamingProcessor::getPreviewStreamId() const { - Mutex::Autolock m(mMutex); - return mPreviewStreamId; -} - -status_t StreamingProcessor::setRecordingBufferCount(size_t count) { - ATRACE_CALL(); - // Make sure we can support this many buffer slots - if (count > BufferQueue::NUM_BUFFER_SLOTS) { - ALOGE("%s: Camera %d: Too many recording buffers requested: %d, max %d", - __FUNCTION__, mId, count, BufferQueue::NUM_BUFFER_SLOTS); - return BAD_VALUE; - } - - Mutex::Autolock m(mMutex); - - ALOGV("%s: Camera %d: New recording buffer count from encoder: %d", - __FUNCTION__, mId, count); - - // Need to re-size consumer and heap - if (mRecordingHeapCount != count) { - ALOGV("%s: Camera %d: Resetting recording heap and consumer", - __FUNCTION__, mId); - - if (isStreamActive(mActiveStreamIds, mRecordingStreamId)) { - ALOGE("%s: Camera %d: Setting recording buffer count when " - "recording stream is already active!", __FUNCTION__, - mId); - return INVALID_OPERATION; - } - - releaseAllRecordingFramesLocked(); - - if (mRecordingHeap != 0) { - mRecordingHeap.clear(); - } - mRecordingHeapCount = count; - mRecordingHeapFree = count; - - mRecordingConsumer.clear(); - } - - return OK; -} - -status_t StreamingProcessor::updateRecordingRequest(const Parameters ¶ms) { - ATRACE_CALL(); - status_t res; - Mutex::Autolock m(mMutex); - - sp device = mDevice.promote(); - if (device == 0) { - ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId); - return INVALID_OPERATION; - } - - if (mRecordingRequest.entryCount() == 0) { - res = device->createDefaultRequest(CAMERA2_TEMPLATE_VIDEO_RECORD, - &mRecordingRequest); - if (res != OK) { - ALOGE("%s: Camera %d: Unable to create default recording request:" - " %s (%d)", __FUNCTION__, mId, strerror(-res), res); - return res; - } - } - - res = params.updateRequest(&mRecordingRequest); - if (res != OK) { - ALOGE("%s: Camera %d: Unable to update common entries of recording " - "request: %s (%d)", __FUNCTION__, mId, - strerror(-res), res); - return res; - } - - res = mRecordingRequest.update(ANDROID_REQUEST_ID, - &mRecordingRequestId, 1); - if (res != OK) { - ALOGE("%s: Camera %d: Unable to update request id for request: %s (%d)", - __FUNCTION__, mId, strerror(-res), res); - return res; - } - - return OK; -} - -status_t StreamingProcessor::updateRecordingStream(const Parameters ¶ms) { - ATRACE_CALL(); - status_t res; - Mutex::Autolock m(mMutex); - - sp device = mDevice.promote(); - if (device == 0) { - ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId); - return INVALID_OPERATION; - } - - bool newConsumer = false; - if (mRecordingConsumer == 0) { - ALOGV("%s: Camera %d: Creating recording consumer with %d + 1 " - "consumer-side buffers", __FUNCTION__, mId, mRecordingHeapCount); - // Create CPU buffer queue endpoint. We need one more buffer here so that we can - // always acquire and free a buffer when the heap is full; otherwise the consumer - // will have buffers in flight we'll never clear out. - sp bq = new BufferQueue(); - mRecordingConsumer = new BufferItemConsumer(bq, - GRALLOC_USAGE_HW_VIDEO_ENCODER, - mRecordingHeapCount + 1); - mRecordingConsumer->setFrameAvailableListener(this); - mRecordingConsumer->setName(String8("Camera2-RecordingConsumer")); - mRecordingWindow = new Surface( - mRecordingConsumer->getProducerInterface()); - newConsumer = true; - // Allocate memory later, since we don't know buffer size until receipt - } - - if (mRecordingStreamId != NO_STREAM) { - // Check if stream parameters have to change - uint32_t currentWidth, currentHeight; - res = device->getStreamInfo(mRecordingStreamId, - ¤tWidth, ¤tHeight, 0); - if (res != OK) { - ALOGE("%s: Camera %d: Error querying recording output stream info: " - "%s (%d)", __FUNCTION__, mId, - strerror(-res), res); - return res; - } - if (currentWidth != (uint32_t)params.videoWidth || - currentHeight != (uint32_t)params.videoHeight || newConsumer) { - // TODO: Should wait to be sure previous recording has finished - res = device->deleteStream(mRecordingStreamId); - - if (res == -EBUSY) { - ALOGV("%s: Camera %d: Device is busy, call " - "updateRecordingStream after it becomes idle", - __FUNCTION__, mId); - return res; - } else if (res != OK) { - ALOGE("%s: Camera %d: Unable to delete old output stream " - "for recording: %s (%d)", __FUNCTION__, - mId, strerror(-res), res); - return res; - } - mRecordingStreamId = NO_STREAM; - } - } - - if (mRecordingStreamId == NO_STREAM) { - mRecordingFrameCount = 0; - res = device->createStream(mRecordingWindow, - params.videoWidth, params.videoHeight, - CAMERA2_HAL_PIXEL_FORMAT_OPAQUE, 0, &mRecordingStreamId); - if (res != OK) { - ALOGE("%s: Camera %d: Can't create output stream for recording: " - "%s (%d)", __FUNCTION__, mId, - strerror(-res), res); - return res; - } - } - - return OK; -} - -status_t StreamingProcessor::deleteRecordingStream() { - ATRACE_CALL(); - status_t res; - - Mutex::Autolock m(mMutex); - - if (mRecordingStreamId != NO_STREAM) { - sp device = mDevice.promote(); - if (device == 0) { - ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId); - return INVALID_OPERATION; - } - - res = device->waitUntilDrained(); - if (res != OK) { - ALOGE("%s: Error waiting for HAL to drain: %s (%d)", - __FUNCTION__, strerror(-res), res); - return res; - } - res = device->deleteStream(mRecordingStreamId); - if (res != OK) { - ALOGE("%s: Unable to delete recording stream: %s (%d)", - __FUNCTION__, strerror(-res), res); - return res; - } - mRecordingStreamId = NO_STREAM; - } - return OK; -} - -int StreamingProcessor::getRecordingStreamId() const { - return mRecordingStreamId; -} - -status_t StreamingProcessor::startStream(StreamType type, - const Vector &outputStreams) { - ATRACE_CALL(); - status_t res; - - if (type == NONE) return INVALID_OPERATION; - - sp device = mDevice.promote(); - if (device == 0) { - ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId); - return INVALID_OPERATION; - } - - ALOGV("%s: Camera %d: type = %d", __FUNCTION__, mId, type); - - Mutex::Autolock m(mMutex); - - // If a recording stream is being started up, free up any - // outstanding buffers left from the previous recording session. - // There should never be any, so if there are, warn about it. - if (isStreamActive(outputStreams, mRecordingStreamId)) { - releaseAllRecordingFramesLocked(); - } - - ALOGV("%s: Camera %d: %s started, recording heap has %d free of %d", - __FUNCTION__, mId, (type == PREVIEW) ? "preview" : "recording", - mRecordingHeapFree, mRecordingHeapCount); - - CameraMetadata &request = (type == PREVIEW) ? - mPreviewRequest : mRecordingRequest; - - res = request.update( - ANDROID_REQUEST_OUTPUT_STREAMS, - outputStreams); - if (res != OK) { - ALOGE("%s: Camera %d: Unable to set up preview request: %s (%d)", - __FUNCTION__, mId, strerror(-res), res); - return res; - } - - res = request.sort(); - if (res != OK) { - ALOGE("%s: Camera %d: Error sorting preview request: %s (%d)", - __FUNCTION__, mId, strerror(-res), res); - return res; - } - - res = device->setStreamingRequest(request); - if (res != OK) { - ALOGE("%s: Camera %d: Unable to set preview request to start preview: " - "%s (%d)", - __FUNCTION__, mId, strerror(-res), res); - return res; - } - mActiveRequest = type; - mPaused = false; - mActiveStreamIds = outputStreams; - return OK; -} - -status_t StreamingProcessor::togglePauseStream(bool pause) { - ATRACE_CALL(); - status_t res; - - sp device = mDevice.promote(); - if (device == 0) { - ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId); - return INVALID_OPERATION; - } - - ALOGV("%s: Camera %d: toggling pause to %d", __FUNCTION__, mId, pause); - - Mutex::Autolock m(mMutex); - - if (mActiveRequest == NONE) { - ALOGE("%s: Camera %d: Can't toggle pause, streaming was not started", - __FUNCTION__, mId); - return INVALID_OPERATION; - } - - if (mPaused == pause) { - return OK; - } - - if (pause) { - res = device->clearStreamingRequest(); - if (res != OK) { - ALOGE("%s: Camera %d: Can't clear stream request: %s (%d)", - __FUNCTION__, mId, strerror(-res), res); - return res; - } - } else { - CameraMetadata &request = - (mActiveRequest == PREVIEW) ? mPreviewRequest - : mRecordingRequest; - res = device->setStreamingRequest(request); - if (res != OK) { - ALOGE("%s: Camera %d: Unable to set preview request to resume: " - "%s (%d)", - __FUNCTION__, mId, strerror(-res), res); - return res; - } - } - - mPaused = pause; - return OK; -} - -status_t StreamingProcessor::stopStream() { - ATRACE_CALL(); - status_t res; - - Mutex::Autolock m(mMutex); - - sp device = mDevice.promote(); - if (device == 0) { - ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId); - return INVALID_OPERATION; - } - - res = device->clearStreamingRequest(); - if (res != OK) { - ALOGE("%s: Camera %d: Can't clear stream request: %s (%d)", - __FUNCTION__, mId, strerror(-res), res); - return res; - } - - mActiveRequest = NONE; - mActiveStreamIds.clear(); - mPaused = false; - - return OK; -} - -int32_t StreamingProcessor::getActiveRequestId() const { - Mutex::Autolock m(mMutex); - switch (mActiveRequest) { - case NONE: - return 0; - case PREVIEW: - return mPreviewRequestId; - case RECORD: - return mRecordingRequestId; - default: - ALOGE("%s: Unexpected mode %d", __FUNCTION__, mActiveRequest); - return 0; - } -} - -status_t StreamingProcessor::incrementStreamingIds() { - ATRACE_CALL(); - Mutex::Autolock m(mMutex); - - mPreviewRequestId++; - if (mPreviewRequestId >= Camera2Client::kPreviewRequestIdEnd) { - mPreviewRequestId = Camera2Client::kPreviewRequestIdStart; - } - mRecordingRequestId++; - if (mRecordingRequestId >= Camera2Client::kRecordingRequestIdEnd) { - mRecordingRequestId = Camera2Client::kRecordingRequestIdStart; - } - return OK; -} - -void StreamingProcessor::onFrameAvailable() { - ATRACE_CALL(); - Mutex::Autolock l(mMutex); - if (!mRecordingFrameAvailable) { - mRecordingFrameAvailable = true; - mRecordingFrameAvailableSignal.signal(); - } - -} - -bool StreamingProcessor::threadLoop() { - status_t res; - - { - Mutex::Autolock l(mMutex); - while (!mRecordingFrameAvailable) { - res = mRecordingFrameAvailableSignal.waitRelative( - mMutex, kWaitDuration); - if (res == TIMED_OUT) return true; - } - mRecordingFrameAvailable = false; - } - - do { - res = processRecordingFrame(); - } while (res == OK); - - return true; -} - -status_t StreamingProcessor::processRecordingFrame() { - ATRACE_CALL(); - status_t res; - sp recordingHeap; - size_t heapIdx = 0; - nsecs_t timestamp; - - sp client = mClient.promote(); - if (client == 0) { - // Discard frames during shutdown - BufferItemConsumer::BufferItem imgBuffer; - res = mRecordingConsumer->acquireBuffer(&imgBuffer, 0); - if (res != OK) { - if (res != BufferItemConsumer::NO_BUFFER_AVAILABLE) { - ALOGE("%s: Camera %d: Can't acquire recording buffer: %s (%d)", - __FUNCTION__, mId, strerror(-res), res); - } - return res; - } - mRecordingConsumer->releaseBuffer(imgBuffer); - return OK; - } - - { - /* acquire SharedParameters before mMutex so we don't dead lock - with Camera2Client code calling into StreamingProcessor */ - SharedParameters::Lock l(client->getParameters()); - Mutex::Autolock m(mMutex); - BufferItemConsumer::BufferItem imgBuffer; - res = mRecordingConsumer->acquireBuffer(&imgBuffer, 0); - if (res != OK) { - if (res != BufferItemConsumer::NO_BUFFER_AVAILABLE) { - ALOGE("%s: Camera %d: Can't acquire recording buffer: %s (%d)", - __FUNCTION__, mId, strerror(-res), res); - } - return res; - } - timestamp = imgBuffer.mTimestamp; - - mRecordingFrameCount++; - ALOGVV("OnRecordingFrame: Frame %d", mRecordingFrameCount); - - if (l.mParameters.state != Parameters::RECORD && - l.mParameters.state != Parameters::VIDEO_SNAPSHOT) { - ALOGV("%s: Camera %d: Discarding recording image buffers " - "received after recording done", __FUNCTION__, - mId); - mRecordingConsumer->releaseBuffer(imgBuffer); - return INVALID_OPERATION; - } - - if (mRecordingHeap == 0) { - const size_t bufferSize = 4 + sizeof(buffer_handle_t); - ALOGV("%s: Camera %d: Creating recording heap with %d buffers of " - "size %d bytes", __FUNCTION__, mId, - mRecordingHeapCount, bufferSize); - - mRecordingHeap = new Camera2Heap(bufferSize, mRecordingHeapCount, - "Camera2Client::RecordingHeap"); - if (mRecordingHeap->mHeap->getSize() == 0) { - ALOGE("%s: Camera %d: Unable to allocate memory for recording", - __FUNCTION__, mId); - mRecordingConsumer->releaseBuffer(imgBuffer); - return NO_MEMORY; - } - for (size_t i = 0; i < mRecordingBuffers.size(); i++) { - if (mRecordingBuffers[i].mBuf != - BufferItemConsumer::INVALID_BUFFER_SLOT) { - ALOGE("%s: Camera %d: Non-empty recording buffers list!", - __FUNCTION__, mId); - } - } - mRecordingBuffers.clear(); - mRecordingBuffers.setCapacity(mRecordingHeapCount); - mRecordingBuffers.insertAt(0, mRecordingHeapCount); - - mRecordingHeapHead = 0; - mRecordingHeapFree = mRecordingHeapCount; - } - - if ( mRecordingHeapFree == 0) { - ALOGE("%s: Camera %d: No free recording buffers, dropping frame", - __FUNCTION__, mId); - mRecordingConsumer->releaseBuffer(imgBuffer); - return NO_MEMORY; - } - - heapIdx = mRecordingHeapHead; - mRecordingHeapHead = (mRecordingHeapHead + 1) % mRecordingHeapCount; - mRecordingHeapFree--; - - ALOGVV("%s: Camera %d: Timestamp %lld", - __FUNCTION__, mId, timestamp); - - ssize_t offset; - size_t size; - sp heap = - mRecordingHeap->mBuffers[heapIdx]->getMemory(&offset, - &size); - - uint8_t *data = (uint8_t*)heap->getBase() + offset; - uint32_t type = kMetadataBufferTypeGrallocSource; - *((uint32_t*)data) = type; - *((buffer_handle_t*)(data + 4)) = imgBuffer.mGraphicBuffer->handle; - ALOGVV("%s: Camera %d: Sending out buffer_handle_t %p", - __FUNCTION__, mId, - imgBuffer.mGraphicBuffer->handle); - mRecordingBuffers.replaceAt(imgBuffer, heapIdx); - recordingHeap = mRecordingHeap; - } - - // Call outside locked parameters to allow re-entrancy from notification - Camera2Client::SharedCameraCallbacks::Lock l(client->mSharedCameraCallbacks); - if (l.mRemoteCallback != 0) { - l.mRemoteCallback->dataCallbackTimestamp(timestamp, - CAMERA_MSG_VIDEO_FRAME, - recordingHeap->mBuffers[heapIdx]); - } else { - ALOGW("%s: Camera %d: Remote callback gone", __FUNCTION__, mId); - } - - return OK; -} - -void StreamingProcessor::releaseRecordingFrame(const sp& mem) { - ATRACE_CALL(); - status_t res; - - Mutex::Autolock m(mMutex); - // Make sure this is for the current heap - ssize_t offset; - size_t size; - sp heap = mem->getMemory(&offset, &size); - if (heap->getHeapID() != mRecordingHeap->mHeap->getHeapID()) { - ALOGW("%s: Camera %d: Mismatched heap ID, ignoring release " - "(got %x, expected %x)", __FUNCTION__, mId, - heap->getHeapID(), mRecordingHeap->mHeap->getHeapID()); - return; - } - uint8_t *data = (uint8_t*)heap->getBase() + offset; - uint32_t type = *(uint32_t*)data; - if (type != kMetadataBufferTypeGrallocSource) { - ALOGE("%s: Camera %d: Recording frame type invalid (got %x, expected %x)", - __FUNCTION__, mId, type, - kMetadataBufferTypeGrallocSource); - return; - } - - // Release the buffer back to the recording queue - - buffer_handle_t imgHandle = *(buffer_handle_t*)(data + 4); - - size_t itemIndex; - for (itemIndex = 0; itemIndex < mRecordingBuffers.size(); itemIndex++) { - const BufferItemConsumer::BufferItem item = - mRecordingBuffers[itemIndex]; - if (item.mBuf != BufferItemConsumer::INVALID_BUFFER_SLOT && - item.mGraphicBuffer->handle == imgHandle) { - break; - } - } - if (itemIndex == mRecordingBuffers.size()) { - ALOGE("%s: Camera %d: Can't find buffer_handle_t %p in list of " - "outstanding buffers", __FUNCTION__, mId, - imgHandle); - return; - } - - ALOGVV("%s: Camera %d: Freeing buffer_handle_t %p", __FUNCTION__, - mId, imgHandle); - - res = mRecordingConsumer->releaseBuffer(mRecordingBuffers[itemIndex]); - if (res != OK) { - ALOGE("%s: Camera %d: Unable to free recording frame " - "(buffer_handle_t: %p): %s (%d)", __FUNCTION__, - mId, imgHandle, strerror(-res), res); - return; - } - mRecordingBuffers.replaceAt(itemIndex); - - mRecordingHeapFree++; - ALOGV_IF(mRecordingHeapFree == mRecordingHeapCount, - "%s: Camera %d: All %d recording buffers returned", - __FUNCTION__, mId, mRecordingHeapCount); -} - -void StreamingProcessor::releaseAllRecordingFramesLocked() { - ATRACE_CALL(); - status_t res; - - if (mRecordingConsumer == 0) { - return; - } - - ALOGV("%s: Camera %d: Releasing all recording buffers", __FUNCTION__, - mId); - - size_t releasedCount = 0; - for (size_t itemIndex = 0; itemIndex < mRecordingBuffers.size(); itemIndex++) { - const BufferItemConsumer::BufferItem item = - mRecordingBuffers[itemIndex]; - if (item.mBuf != BufferItemConsumer::INVALID_BUFFER_SLOT) { - res = mRecordingConsumer->releaseBuffer(mRecordingBuffers[itemIndex]); - if (res != OK) { - ALOGE("%s: Camera %d: Unable to free recording frame " - "(buffer_handle_t: %p): %s (%d)", __FUNCTION__, - mId, item.mGraphicBuffer->handle, strerror(-res), res); - } - mRecordingBuffers.replaceAt(itemIndex); - releasedCount++; - } - } - - if (releasedCount > 0) { - ALOGW("%s: Camera %d: Force-freed %d outstanding buffers " - "from previous recording session", __FUNCTION__, mId, releasedCount); - ALOGE_IF(releasedCount != mRecordingHeapCount - mRecordingHeapFree, - "%s: Camera %d: Force-freed %d buffers, but expected %d", - __FUNCTION__, mId, releasedCount, mRecordingHeapCount - mRecordingHeapFree); - } - - mRecordingHeapHead = 0; - mRecordingHeapFree = mRecordingHeapCount; -} - -bool StreamingProcessor::isStreamActive(const Vector &streams, - uint8_t recordingStreamId) { - for (size_t i = 0; i < streams.size(); i++) { - if (streams[i] == recordingStreamId) { - return true; - } - } - return false; -} - - -status_t StreamingProcessor::dump(int fd, const Vector& /*args*/) { - String8 result; - - result.append(" Current requests:\n"); - if (mPreviewRequest.entryCount() != 0) { - result.append(" Preview request:\n"); - write(fd, result.string(), result.size()); - mPreviewRequest.dump(fd, 2, 6); - result.clear(); - } else { - result.append(" Preview request: undefined\n"); - } - - if (mRecordingRequest.entryCount() != 0) { - result = " Recording request:\n"; - write(fd, result.string(), result.size()); - mRecordingRequest.dump(fd, 2, 6); - result.clear(); - } else { - result = " Recording request: undefined\n"; - } - - const char* streamTypeString[] = { - "none", "preview", "record" - }; - result.append(String8::format(" Active request: %s (paused: %s)\n", - streamTypeString[mActiveRequest], - mPaused ? "yes" : "no")); - - write(fd, result.string(), result.size()); - - return OK; -} - -}; // namespace camera2 -}; // namespace android diff --git a/services/camera/libcameraservice/camera2/StreamingProcessor.h b/services/camera/libcameraservice/camera2/StreamingProcessor.h deleted file mode 100644 index 3ec2df7..0000000 --- a/services/camera/libcameraservice/camera2/StreamingProcessor.h +++ /dev/null @@ -1,143 +0,0 @@ -/* - * Copyright (C) 2012 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef ANDROID_SERVERS_CAMERA_CAMERA2_STREAMINGPROCESSOR_H -#define ANDROID_SERVERS_CAMERA_CAMERA2_STREAMINGPROCESSOR_H - -#include -#include -#include - -#include "Parameters.h" -#include "camera/CameraMetadata.h" - -namespace android { - -class Camera2Client; -class CameraDeviceBase; -class IMemory; - -namespace camera2 { - -class Camera2Heap; - -/** - * Management and processing for preview and recording streams - */ -class StreamingProcessor: - public Thread, public BufferItemConsumer::FrameAvailableListener { - public: - StreamingProcessor(sp client); - ~StreamingProcessor(); - - status_t setPreviewWindow(sp window); - - bool haveValidPreviewWindow() const; - - status_t updatePreviewRequest(const Parameters ¶ms); - status_t updatePreviewStream(const Parameters ¶ms); - status_t deletePreviewStream(); - int getPreviewStreamId() const; - - status_t setRecordingBufferCount(size_t count); - status_t updateRecordingRequest(const Parameters ¶ms); - status_t updateRecordingStream(const Parameters ¶ms); - status_t deleteRecordingStream(); - int getRecordingStreamId() const; - - enum StreamType { - NONE, - PREVIEW, - RECORD - }; - status_t startStream(StreamType type, - const Vector &outputStreams); - - // Toggle between paused and unpaused. Stream must be started first. - status_t togglePauseStream(bool pause); - - status_t stopStream(); - - // Returns the request ID for the currently streaming request - // Returns 0 if there is no active request. - status_t getActiveRequestId() const; - status_t incrementStreamingIds(); - - // Callback for new recording frames from HAL - virtual void onFrameAvailable(); - // Callback from stagefright which returns used recording frames - void releaseRecordingFrame(const sp& mem); - - status_t dump(int fd, const Vector& args); - - private: - mutable Mutex mMutex; - - enum { - NO_STREAM = -1 - }; - - wp mClient; - wp mDevice; - int mId; - - StreamType mActiveRequest; - bool mPaused; - - Vector mActiveStreamIds; - - // Preview-related members - int32_t mPreviewRequestId; - int mPreviewStreamId; - CameraMetadata mPreviewRequest; - sp mPreviewWindow; - - // Recording-related members - static const nsecs_t kWaitDuration = 50000000; // 50 ms - - int32_t mRecordingRequestId; - int mRecordingStreamId; - int mRecordingFrameCount; - sp mRecordingConsumer; - sp mRecordingWindow; - CameraMetadata mRecordingRequest; - sp mRecordingHeap; - - bool mRecordingFrameAvailable; - Condition mRecordingFrameAvailableSignal; - - static const size_t kDefaultRecordingHeapCount = 8; - size_t mRecordingHeapCount; - Vector mRecordingBuffers; - size_t mRecordingHeapHead, mRecordingHeapFree; - - virtual bool threadLoop(); - - status_t processRecordingFrame(); - - // Unilaterally free any buffers still outstanding to stagefright - void releaseAllRecordingFramesLocked(); - - // Determine if the specified stream is currently in use - static bool isStreamActive(const Vector &streams, - uint8_t recordingStreamId); -}; - - -}; // namespace camera2 -}; // namespace android - -#endif diff --git a/services/camera/libcameraservice/camera2/ZslProcessor.cpp b/services/camera/libcameraservice/camera2/ZslProcessor.cpp deleted file mode 100644 index 0094992..0000000 --- a/services/camera/libcameraservice/camera2/ZslProcessor.cpp +++ /dev/null @@ -1,556 +0,0 @@ -/* - * Copyright (C) 2012 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#define LOG_TAG "Camera2-ZslProcessor" -#define ATRACE_TAG ATRACE_TAG_CAMERA -//#define LOG_NDEBUG 0 -//#define LOG_NNDEBUG 0 - -#ifdef LOG_NNDEBUG -#define ALOGVV(...) ALOGV(__VA_ARGS__) -#else -#define ALOGVV(...) ((void)0) -#endif - -#include -#include - -#include "ZslProcessor.h" -#include -#include "../CameraDeviceBase.h" -#include "../Camera2Client.h" - - -namespace android { -namespace camera2 { - -ZslProcessor::ZslProcessor( - sp client, - wp sequencer): - Thread(false), - mState(RUNNING), - mClient(client), - mDevice(client->getCameraDevice()), - mSequencer(sequencer), - mId(client->getCameraId()), - mZslBufferAvailable(false), - mZslStreamId(NO_STREAM), - mZslReprocessStreamId(NO_STREAM), - mFrameListHead(0), - mZslQueueHead(0), - mZslQueueTail(0) { - mZslQueue.insertAt(0, kZslBufferDepth); - mFrameList.insertAt(0, kFrameListDepth); - sp captureSequencer = mSequencer.promote(); - if (captureSequencer != 0) captureSequencer->setZslProcessor(this); -} - -ZslProcessor::~ZslProcessor() { - ALOGV("%s: Exit", __FUNCTION__); - deleteStream(); -} - -void ZslProcessor::onFrameAvailable() { - Mutex::Autolock l(mInputMutex); - if (!mZslBufferAvailable) { - mZslBufferAvailable = true; - mZslBufferAvailableSignal.signal(); - } -} - -void ZslProcessor::onFrameAvailable(int32_t /*frameId*/, - const CameraMetadata &frame) { - Mutex::Autolock l(mInputMutex); - camera_metadata_ro_entry_t entry; - entry = frame.find(ANDROID_SENSOR_TIMESTAMP); - nsecs_t timestamp = entry.data.i64[0]; - (void)timestamp; - ALOGVV("Got preview frame for timestamp %lld", timestamp); - - if (mState != RUNNING) return; - - mFrameList.editItemAt(mFrameListHead) = frame; - mFrameListHead = (mFrameListHead + 1) % kFrameListDepth; - - findMatchesLocked(); -} - -void ZslProcessor::onBufferReleased(buffer_handle_t *handle) { - Mutex::Autolock l(mInputMutex); - - // Verify that the buffer is in our queue - size_t i = 0; - for (; i < mZslQueue.size(); i++) { - if (&(mZslQueue[i].buffer.mGraphicBuffer->handle) == handle) break; - } - if (i == mZslQueue.size()) { - ALOGW("%s: Released buffer %p not found in queue", - __FUNCTION__, handle); - } - - // Erase entire ZSL queue since we've now completed the capture and preview - // is stopped. - clearZslQueueLocked(); - - mState = RUNNING; -} - -status_t ZslProcessor::updateStream(const Parameters ¶ms) { - ATRACE_CALL(); - ALOGV("%s: Configuring ZSL streams", __FUNCTION__); - status_t res; - - Mutex::Autolock l(mInputMutex); - - sp client = mClient.promote(); - if (client == 0) { - ALOGE("%s: Camera %d: Client does not exist", __FUNCTION__, mId); - return INVALID_OPERATION; - } - sp device = mDevice.promote(); - if (device == 0) { - ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId); - return INVALID_OPERATION; - } - - if (mZslConsumer == 0) { - // Create CPU buffer queue endpoint - sp bq = new BufferQueue(); - mZslConsumer = new BufferItemConsumer(bq, - GRALLOC_USAGE_HW_CAMERA_ZSL, - kZslBufferDepth); - mZslConsumer->setFrameAvailableListener(this); - mZslConsumer->setName(String8("Camera2Client::ZslConsumer")); - mZslWindow = new Surface( - mZslConsumer->getProducerInterface()); - } - - if (mZslStreamId != NO_STREAM) { - // Check if stream parameters have to change - uint32_t currentWidth, currentHeight; - res = device->getStreamInfo(mZslStreamId, - ¤tWidth, ¤tHeight, 0); - if (res != OK) { - ALOGE("%s: Camera %d: Error querying capture output stream info: " - "%s (%d)", __FUNCTION__, - mId, strerror(-res), res); - return res; - } - if (currentWidth != (uint32_t)params.fastInfo.arrayWidth || - currentHeight != (uint32_t)params.fastInfo.arrayHeight) { - res = device->deleteReprocessStream(mZslReprocessStreamId); - if (res != OK) { - ALOGE("%s: Camera %d: Unable to delete old reprocess stream " - "for ZSL: %s (%d)", __FUNCTION__, - mId, strerror(-res), res); - return res; - } - ALOGV("%s: Camera %d: Deleting stream %d since the buffer dimensions changed", - __FUNCTION__, mId, mZslStreamId); - res = device->deleteStream(mZslStreamId); - if (res != OK) { - ALOGE("%s: Camera %d: Unable to delete old output stream " - "for ZSL: %s (%d)", __FUNCTION__, - mId, strerror(-res), res); - return res; - } - mZslStreamId = NO_STREAM; - } - } - - if (mZslStreamId == NO_STREAM) { - // Create stream for HAL production - // TODO: Sort out better way to select resolution for ZSL - int streamType = params.quirks.useZslFormat ? - (int)CAMERA2_HAL_PIXEL_FORMAT_ZSL : - (int)HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED; - res = device->createStream(mZslWindow, - params.fastInfo.arrayWidth, params.fastInfo.arrayHeight, - streamType, 0, - &mZslStreamId); - if (res != OK) { - ALOGE("%s: Camera %d: Can't create output stream for ZSL: " - "%s (%d)", __FUNCTION__, mId, - strerror(-res), res); - return res; - } - res = device->createReprocessStreamFromStream(mZslStreamId, - &mZslReprocessStreamId); - if (res != OK) { - ALOGE("%s: Camera %d: Can't create reprocess stream for ZSL: " - "%s (%d)", __FUNCTION__, mId, - strerror(-res), res); - return res; - } - } - client->registerFrameListener(Camera2Client::kPreviewRequestIdStart, - Camera2Client::kPreviewRequestIdEnd, - this); - - return OK; -} - -status_t ZslProcessor::deleteStream() { - ATRACE_CALL(); - status_t res; - - Mutex::Autolock l(mInputMutex); - - if (mZslStreamId != NO_STREAM) { - sp device = mDevice.promote(); - if (device == 0) { - ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId); - return INVALID_OPERATION; - } - - clearZslQueueLocked(); - - res = device->deleteReprocessStream(mZslReprocessStreamId); - if (res != OK) { - ALOGE("%s: Camera %d: Cannot delete ZSL reprocessing stream %d: " - "%s (%d)", __FUNCTION__, mId, - mZslReprocessStreamId, strerror(-res), res); - return res; - } - - mZslReprocessStreamId = NO_STREAM; - res = device->deleteStream(mZslStreamId); - if (res != OK) { - ALOGE("%s: Camera %d: Cannot delete ZSL output stream %d: " - "%s (%d)", __FUNCTION__, mId, - mZslStreamId, strerror(-res), res); - return res; - } - - mZslWindow.clear(); - mZslConsumer.clear(); - - mZslStreamId = NO_STREAM; - } - return OK; -} - -int ZslProcessor::getStreamId() const { - Mutex::Autolock l(mInputMutex); - return mZslStreamId; -} - -status_t ZslProcessor::pushToReprocess(int32_t requestId) { - ALOGV("%s: Send in reprocess request with id %d", - __FUNCTION__, requestId); - Mutex::Autolock l(mInputMutex); - status_t res; - sp client = mClient.promote(); - - if (client == 0) { - ALOGE("%s: Camera %d: Client does not exist", __FUNCTION__, mId); - return INVALID_OPERATION; - } - - IF_ALOGV() { - dumpZslQueue(-1); - } - - if (mZslQueueTail != mZslQueueHead) { - CameraMetadata request; - size_t index = mZslQueueTail; - while (index != mZslQueueHead) { - if (!mZslQueue[index].frame.isEmpty()) { - request = mZslQueue[index].frame; - break; - } - index = (index + 1) % kZslBufferDepth; - } - if (index == mZslQueueHead) { - ALOGV("%s: ZSL queue has no valid frames to send yet.", - __FUNCTION__); - return NOT_ENOUGH_DATA; - } - // Verify that the frame is reasonable for reprocessing - - camera_metadata_entry_t entry; - entry = request.find(ANDROID_CONTROL_AE_STATE); - if (entry.count == 0) { - ALOGE("%s: ZSL queue frame has no AE state field!", - __FUNCTION__); - return BAD_VALUE; - } - if (entry.data.u8[0] != ANDROID_CONTROL_AE_STATE_CONVERGED && - entry.data.u8[0] != ANDROID_CONTROL_AE_STATE_LOCKED) { - ALOGV("%s: ZSL queue frame AE state is %d, need full capture", - __FUNCTION__, entry.data.u8[0]); - return NOT_ENOUGH_DATA; - } - - buffer_handle_t *handle = - &(mZslQueue[index].buffer.mGraphicBuffer->handle); - - uint8_t requestType = ANDROID_REQUEST_TYPE_REPROCESS; - res = request.update(ANDROID_REQUEST_TYPE, - &requestType, 1); - uint8_t inputStreams[1] = - { static_cast(mZslReprocessStreamId) }; - if (res == OK) request.update(ANDROID_REQUEST_INPUT_STREAMS, - inputStreams, 1); - uint8_t outputStreams[1] = - { static_cast(client->getCaptureStreamId()) }; - if (res == OK) request.update(ANDROID_REQUEST_OUTPUT_STREAMS, - outputStreams, 1); - res = request.update(ANDROID_REQUEST_ID, - &requestId, 1); - - if (res != OK ) { - ALOGE("%s: Unable to update frame to a reprocess request", __FUNCTION__); - return INVALID_OPERATION; - } - - res = client->stopStream(); - if (res != OK) { - ALOGE("%s: Camera %d: Unable to stop preview for ZSL capture: " - "%s (%d)", - __FUNCTION__, mId, strerror(-res), res); - return INVALID_OPERATION; - } - // TODO: have push-and-clear be atomic - res = client->getCameraDevice()->pushReprocessBuffer(mZslReprocessStreamId, - handle, this); - if (res != OK) { - ALOGE("%s: Unable to push buffer for reprocessing: %s (%d)", - __FUNCTION__, strerror(-res), res); - return res; - } - - // Update JPEG settings - { - SharedParameters::Lock l(client->getParameters()); - res = l.mParameters.updateRequestJpeg(&request); - if (res != OK) { - ALOGE("%s: Camera %d: Unable to update JPEG entries of ZSL " - "capture request: %s (%d)", __FUNCTION__, - mId, - strerror(-res), res); - return res; - } - } - - mLatestCapturedRequest = request; - res = client->getCameraDevice()->capture(request); - if (res != OK ) { - ALOGE("%s: Unable to send ZSL reprocess request to capture: %s (%d)", - __FUNCTION__, strerror(-res), res); - return res; - } - - mState = LOCKED; - } else { - ALOGV("%s: No ZSL buffers yet", __FUNCTION__); - return NOT_ENOUGH_DATA; - } - return OK; -} - -status_t ZslProcessor::clearZslQueue() { - Mutex::Autolock l(mInputMutex); - // If in middle of capture, can't clear out queue - if (mState == LOCKED) return OK; - - return clearZslQueueLocked(); -} - -status_t ZslProcessor::clearZslQueueLocked() { - for (size_t i = 0; i < mZslQueue.size(); i++) { - if (mZslQueue[i].buffer.mTimestamp != 0) { - mZslConsumer->releaseBuffer(mZslQueue[i].buffer); - } - mZslQueue.replaceAt(i); - } - mZslQueueHead = 0; - mZslQueueTail = 0; - return OK; -} - -void ZslProcessor::dump(int fd, const Vector& /*args*/) const { - Mutex::Autolock l(mInputMutex); - if (!mLatestCapturedRequest.isEmpty()) { - String8 result(" Latest ZSL capture request:\n"); - write(fd, result.string(), result.size()); - mLatestCapturedRequest.dump(fd, 2, 6); - } else { - String8 result(" Latest ZSL capture request: none yet\n"); - write(fd, result.string(), result.size()); - } - dumpZslQueue(fd); -} - -bool ZslProcessor::threadLoop() { - status_t res; - - { - Mutex::Autolock l(mInputMutex); - while (!mZslBufferAvailable) { - res = mZslBufferAvailableSignal.waitRelative(mInputMutex, - kWaitDuration); - if (res == TIMED_OUT) return true; - } - mZslBufferAvailable = false; - } - - do { - res = processNewZslBuffer(); - } while (res == OK); - - return true; -} - -status_t ZslProcessor::processNewZslBuffer() { - ATRACE_CALL(); - status_t res; - sp zslConsumer; - { - Mutex::Autolock l(mInputMutex); - if (mZslConsumer == 0) return OK; - zslConsumer = mZslConsumer; - } - ALOGVV("Trying to get next buffer"); - BufferItemConsumer::BufferItem item; - res = zslConsumer->acquireBuffer(&item, 0); - if (res != OK) { - if (res != BufferItemConsumer::NO_BUFFER_AVAILABLE) { - ALOGE("%s: Camera %d: Error receiving ZSL image buffer: " - "%s (%d)", __FUNCTION__, - mId, strerror(-res), res); - } else { - ALOGVV(" No buffer"); - } - return res; - } - - Mutex::Autolock l(mInputMutex); - - if (mState == LOCKED) { - ALOGVV("In capture, discarding new ZSL buffers"); - zslConsumer->releaseBuffer(item); - return OK; - } - - ALOGVV("Got ZSL buffer: head: %d, tail: %d", mZslQueueHead, mZslQueueTail); - - if ( (mZslQueueHead + 1) % kZslBufferDepth == mZslQueueTail) { - ALOGVV("Releasing oldest buffer"); - zslConsumer->releaseBuffer(mZslQueue[mZslQueueTail].buffer); - mZslQueue.replaceAt(mZslQueueTail); - mZslQueueTail = (mZslQueueTail + 1) % kZslBufferDepth; - } - - ZslPair &queueHead = mZslQueue.editItemAt(mZslQueueHead); - - queueHead.buffer = item; - queueHead.frame.release(); - - mZslQueueHead = (mZslQueueHead + 1) % kZslBufferDepth; - - ALOGVV(" Acquired buffer, timestamp %lld", queueHead.buffer.mTimestamp); - - findMatchesLocked(); - - return OK; -} - -void ZslProcessor::findMatchesLocked() { - ALOGVV("Scanning"); - for (size_t i = 0; i < mZslQueue.size(); i++) { - ZslPair &queueEntry = mZslQueue.editItemAt(i); - nsecs_t bufferTimestamp = queueEntry.buffer.mTimestamp; - IF_ALOGV() { - camera_metadata_entry_t entry; - nsecs_t frameTimestamp = 0; - if (!queueEntry.frame.isEmpty()) { - entry = queueEntry.frame.find(ANDROID_SENSOR_TIMESTAMP); - frameTimestamp = entry.data.i64[0]; - } - ALOGVV(" %d: b: %lld\tf: %lld", i, - bufferTimestamp, frameTimestamp ); - } - if (queueEntry.frame.isEmpty() && bufferTimestamp != 0) { - // Have buffer, no matching frame. Look for one - for (size_t j = 0; j < mFrameList.size(); j++) { - bool match = false; - CameraMetadata &frame = mFrameList.editItemAt(j); - if (!frame.isEmpty()) { - camera_metadata_entry_t entry; - entry = frame.find(ANDROID_SENSOR_TIMESTAMP); - if (entry.count == 0) { - ALOGE("%s: Can't find timestamp in frame!", - __FUNCTION__); - continue; - } - nsecs_t frameTimestamp = entry.data.i64[0]; - if (bufferTimestamp == frameTimestamp) { - ALOGVV("%s: Found match %lld", __FUNCTION__, - frameTimestamp); - match = true; - } else { - int64_t delta = abs(bufferTimestamp - frameTimestamp); - if ( delta < 1000000) { - ALOGVV("%s: Found close match %lld (delta %lld)", - __FUNCTION__, bufferTimestamp, delta); - match = true; - } - } - } - if (match) { - queueEntry.frame.acquire(frame); - break; - } - } - } - } -} - -void ZslProcessor::dumpZslQueue(int fd) const { - String8 header("ZSL queue contents:"); - String8 indent(" "); - ALOGV("%s", header.string()); - if (fd != -1) { - header = indent + header + "\n"; - write(fd, header.string(), header.size()); - } - for (size_t i = 0; i < mZslQueue.size(); i++) { - const ZslPair &queueEntry = mZslQueue[i]; - nsecs_t bufferTimestamp = queueEntry.buffer.mTimestamp; - camera_metadata_ro_entry_t entry; - nsecs_t frameTimestamp = 0; - int frameAeState = -1; - if (!queueEntry.frame.isEmpty()) { - entry = queueEntry.frame.find(ANDROID_SENSOR_TIMESTAMP); - if (entry.count > 0) frameTimestamp = entry.data.i64[0]; - entry = queueEntry.frame.find(ANDROID_CONTROL_AE_STATE); - if (entry.count > 0) frameAeState = entry.data.u8[0]; - } - String8 result = - String8::format(" %d: b: %lld\tf: %lld, AE state: %d", i, - bufferTimestamp, frameTimestamp, frameAeState); - ALOGV("%s", result.string()); - if (fd != -1) { - result = indent + result + "\n"; - write(fd, result.string(), result.size()); - } - - } -} - -}; // namespace camera2 -}; // namespace android diff --git a/services/camera/libcameraservice/camera2/ZslProcessor.h b/services/camera/libcameraservice/camera2/ZslProcessor.h deleted file mode 100644 index 27b597e..0000000 --- a/services/camera/libcameraservice/camera2/ZslProcessor.h +++ /dev/null @@ -1,135 +0,0 @@ -/* - * Copyright (C) 2012 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef ANDROID_SERVERS_CAMERA_CAMERA2_ZSLPROCESSOR_H -#define ANDROID_SERVERS_CAMERA_CAMERA2_ZSLPROCESSOR_H - -#include -#include -#include -#include -#include -#include -#include "Parameters.h" -#include "FrameProcessor.h" -#include "camera/CameraMetadata.h" -#include "Camera2Heap.h" -#include "../CameraDeviceBase.h" -#include "ZslProcessorInterface.h" - -namespace android { - -class Camera2Client; - -namespace camera2 { - -class CaptureSequencer; - -/*** - * ZSL queue processing - */ -class ZslProcessor: - virtual public Thread, - virtual public BufferItemConsumer::FrameAvailableListener, - virtual public FrameProcessor::FilteredListener, - virtual public CameraDeviceBase::BufferReleasedListener, - public ZslProcessorInterface { - public: - ZslProcessor(sp client, wp sequencer); - ~ZslProcessor(); - - // From mZslConsumer - virtual void onFrameAvailable(); - // From FrameProcessor - virtual void onFrameAvailable(int32_t frameId, const CameraMetadata &frame); - - virtual void onBufferReleased(buffer_handle_t *handle); - - /** - **************************************** - * ZslProcessorInterface implementation * - **************************************** - */ - - status_t updateStream(const Parameters ¶ms); - status_t deleteStream(); - int getStreamId() const; - - status_t pushToReprocess(int32_t requestId); - status_t clearZslQueue(); - - void dump(int fd, const Vector& args) const; - private: - static const nsecs_t kWaitDuration = 10000000; // 10 ms - - enum { - RUNNING, - LOCKED - } mState; - - wp mClient; - wp mDevice; - wp mSequencer; - int mId; - - mutable Mutex mInputMutex; - bool mZslBufferAvailable; - Condition mZslBufferAvailableSignal; - - enum { - NO_STREAM = -1 - }; - - int mZslStreamId; - int mZslReprocessStreamId; - sp mZslConsumer; - sp mZslWindow; - - struct ZslPair { - BufferItemConsumer::BufferItem buffer; - CameraMetadata frame; - }; - - static const size_t kZslBufferDepth = 4; - static const size_t kFrameListDepth = kZslBufferDepth * 2; - Vector mFrameList; - size_t mFrameListHead; - - ZslPair mNextPair; - - Vector mZslQueue; - size_t mZslQueueHead; - size_t mZslQueueTail; - - CameraMetadata mLatestCapturedRequest; - - virtual bool threadLoop(); - - status_t processNewZslBuffer(); - - // Match up entries from frame list to buffers in ZSL queue - void findMatchesLocked(); - - status_t clearZslQueueLocked(); - - void dumpZslQueue(int id) const; -}; - - -}; //namespace camera2 -}; //namespace android - -#endif diff --git a/services/camera/libcameraservice/camera2/ZslProcessor3.cpp b/services/camera/libcameraservice/camera2/ZslProcessor3.cpp deleted file mode 100644 index 40c77df..0000000 --- a/services/camera/libcameraservice/camera2/ZslProcessor3.cpp +++ /dev/null @@ -1,482 +0,0 @@ -/* - * Copyright (C) 2013 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#define LOG_TAG "Camera2-ZslProcessor3" -#define ATRACE_TAG ATRACE_TAG_CAMERA -//#define LOG_NDEBUG 0 -//#define LOG_NNDEBUG 0 - -#ifdef LOG_NNDEBUG -#define ALOGVV(...) ALOGV(__VA_ARGS__) -#else -#define ALOGVV(...) ((void)0) -#endif - -#include -#include - -#include "ZslProcessor3.h" -#include -#include "../CameraDeviceBase.h" -#include "../Camera3Device.h" -#include "../Camera2Client.h" - - -namespace android { -namespace camera2 { - -ZslProcessor3::ZslProcessor3( - sp client, - wp sequencer): - Thread(false), - mState(RUNNING), - mClient(client), - mSequencer(sequencer), - mId(client->getCameraId()), - mZslStreamId(NO_STREAM), - mFrameListHead(0), - mZslQueueHead(0), - mZslQueueTail(0) { - mZslQueue.insertAt(0, kZslBufferDepth); - mFrameList.insertAt(0, kFrameListDepth); - sp captureSequencer = mSequencer.promote(); - if (captureSequencer != 0) captureSequencer->setZslProcessor(this); -} - -ZslProcessor3::~ZslProcessor3() { - ALOGV("%s: Exit", __FUNCTION__); - deleteStream(); -} - -void ZslProcessor3::onFrameAvailable(int32_t /*frameId*/, - const CameraMetadata &frame) { - Mutex::Autolock l(mInputMutex); - camera_metadata_ro_entry_t entry; - entry = frame.find(ANDROID_SENSOR_TIMESTAMP); - nsecs_t timestamp = entry.data.i64[0]; - (void)timestamp; - ALOGVV("Got preview metadata for timestamp %lld", timestamp); - - if (mState != RUNNING) return; - - mFrameList.editItemAt(mFrameListHead) = frame; - mFrameListHead = (mFrameListHead + 1) % kFrameListDepth; -} - -status_t ZslProcessor3::updateStream(const Parameters ¶ms) { - ATRACE_CALL(); - ALOGV("%s: Configuring ZSL streams", __FUNCTION__); - status_t res; - - Mutex::Autolock l(mInputMutex); - - sp client = mClient.promote(); - if (client == 0) { - ALOGE("%s: Camera %d: Client does not exist", __FUNCTION__, mId); - return INVALID_OPERATION; - } - sp device = - static_cast(client->getCameraDevice().get()); - if (device == 0) { - ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId); - return INVALID_OPERATION; - } - - if (mZslStreamId != NO_STREAM) { - // Check if stream parameters have to change - uint32_t currentWidth, currentHeight; - res = device->getStreamInfo(mZslStreamId, - ¤tWidth, ¤tHeight, 0); - if (res != OK) { - ALOGE("%s: Camera %d: Error querying capture output stream info: " - "%s (%d)", __FUNCTION__, - client->getCameraId(), strerror(-res), res); - return res; - } - if (currentWidth != (uint32_t)params.fastInfo.arrayWidth || - currentHeight != (uint32_t)params.fastInfo.arrayHeight) { - ALOGV("%s: Camera %d: Deleting stream %d since the buffer " - "dimensions changed", - __FUNCTION__, client->getCameraId(), mZslStreamId); - res = device->deleteStream(mZslStreamId); - if (res == -EBUSY) { - ALOGV("%s: Camera %d: Device is busy, call updateStream again " - " after it becomes idle", __FUNCTION__, mId); - return res; - } else if(res != OK) { - ALOGE("%s: Camera %d: Unable to delete old output stream " - "for ZSL: %s (%d)", __FUNCTION__, - client->getCameraId(), strerror(-res), res); - return res; - } - mZslStreamId = NO_STREAM; - } - } - - if (mZslStreamId == NO_STREAM) { - // Create stream for HAL production - // TODO: Sort out better way to select resolution for ZSL - - // Note that format specified internally in Camera3ZslStream - res = device->createZslStream( - params.fastInfo.arrayWidth, params.fastInfo.arrayHeight, - kZslBufferDepth, - &mZslStreamId, - &mZslStream); - if (res != OK) { - ALOGE("%s: Camera %d: Can't create ZSL stream: " - "%s (%d)", __FUNCTION__, client->getCameraId(), - strerror(-res), res); - return res; - } - } - client->registerFrameListener(Camera2Client::kPreviewRequestIdStart, - Camera2Client::kPreviewRequestIdEnd, - this); - - return OK; -} - -status_t ZslProcessor3::deleteStream() { - ATRACE_CALL(); - status_t res; - - Mutex::Autolock l(mInputMutex); - - if (mZslStreamId != NO_STREAM) { - sp client = mClient.promote(); - if (client == 0) { - ALOGE("%s: Camera %d: Client does not exist", __FUNCTION__, mId); - return INVALID_OPERATION; - } - - sp device = - reinterpret_cast(client->getCameraDevice().get()); - if (device == 0) { - ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId); - return INVALID_OPERATION; - } - - res = device->deleteStream(mZslStreamId); - if (res != OK) { - ALOGE("%s: Camera %d: Cannot delete ZSL output stream %d: " - "%s (%d)", __FUNCTION__, client->getCameraId(), - mZslStreamId, strerror(-res), res); - return res; - } - - mZslStreamId = NO_STREAM; - } - return OK; -} - -int ZslProcessor3::getStreamId() const { - Mutex::Autolock l(mInputMutex); - return mZslStreamId; -} - -status_t ZslProcessor3::pushToReprocess(int32_t requestId) { - ALOGV("%s: Send in reprocess request with id %d", - __FUNCTION__, requestId); - Mutex::Autolock l(mInputMutex); - status_t res; - sp client = mClient.promote(); - - if (client == 0) { - ALOGE("%s: Camera %d: Client does not exist", __FUNCTION__, mId); - return INVALID_OPERATION; - } - - IF_ALOGV() { - dumpZslQueue(-1); - } - - size_t metadataIdx; - nsecs_t candidateTimestamp = getCandidateTimestampLocked(&metadataIdx); - - if (candidateTimestamp == -1) { - ALOGE("%s: Could not find good candidate for ZSL reprocessing", - __FUNCTION__); - return NOT_ENOUGH_DATA; - } - - res = mZslStream->enqueueInputBufferByTimestamp(candidateTimestamp, - /*actualTimestamp*/NULL); - - if (res == mZslStream->NO_BUFFER_AVAILABLE) { - ALOGV("%s: No ZSL buffers yet", __FUNCTION__); - return NOT_ENOUGH_DATA; - } else if (res != OK) { - ALOGE("%s: Unable to push buffer for reprocessing: %s (%d)", - __FUNCTION__, strerror(-res), res); - return res; - } - - { - CameraMetadata request = mFrameList[metadataIdx]; - - // Verify that the frame is reasonable for reprocessing - - camera_metadata_entry_t entry; - entry = request.find(ANDROID_CONTROL_AE_STATE); - if (entry.count == 0) { - ALOGE("%s: ZSL queue frame has no AE state field!", - __FUNCTION__); - return BAD_VALUE; - } - if (entry.data.u8[0] != ANDROID_CONTROL_AE_STATE_CONVERGED && - entry.data.u8[0] != ANDROID_CONTROL_AE_STATE_LOCKED) { - ALOGV("%s: ZSL queue frame AE state is %d, need full capture", - __FUNCTION__, entry.data.u8[0]); - return NOT_ENOUGH_DATA; - } - - uint8_t requestType = ANDROID_REQUEST_TYPE_REPROCESS; - res = request.update(ANDROID_REQUEST_TYPE, - &requestType, 1); - uint8_t inputStreams[1] = - { static_cast(mZslStreamId) }; - if (res == OK) request.update(ANDROID_REQUEST_INPUT_STREAMS, - inputStreams, 1); - // TODO: Shouldn't we also update the latest preview frame? - uint8_t outputStreams[1] = - { static_cast(client->getCaptureStreamId()) }; - if (res == OK) request.update(ANDROID_REQUEST_OUTPUT_STREAMS, - outputStreams, 1); - res = request.update(ANDROID_REQUEST_ID, - &requestId, 1); - - if (res != OK ) { - ALOGE("%s: Unable to update frame to a reprocess request", - __FUNCTION__); - return INVALID_OPERATION; - } - - res = client->stopStream(); - if (res != OK) { - ALOGE("%s: Camera %d: Unable to stop preview for ZSL capture: " - "%s (%d)", - __FUNCTION__, client->getCameraId(), strerror(-res), res); - return INVALID_OPERATION; - } - - // Update JPEG settings - { - SharedParameters::Lock l(client->getParameters()); - res = l.mParameters.updateRequestJpeg(&request); - if (res != OK) { - ALOGE("%s: Camera %d: Unable to update JPEG entries of ZSL " - "capture request: %s (%d)", __FUNCTION__, - client->getCameraId(), - strerror(-res), res); - return res; - } - } - - mLatestCapturedRequest = request; - res = client->getCameraDevice()->capture(request); - if (res != OK ) { - ALOGE("%s: Unable to send ZSL reprocess request to capture: %s" - " (%d)", __FUNCTION__, strerror(-res), res); - return res; - } - - mState = LOCKED; - } - - return OK; -} - -status_t ZslProcessor3::clearZslQueue() { - Mutex::Autolock l(mInputMutex); - // If in middle of capture, can't clear out queue - if (mState == LOCKED) return OK; - - return clearZslQueueLocked(); -} - -status_t ZslProcessor3::clearZslQueueLocked() { - if (mZslStream != 0) { - return mZslStream->clearInputRingBuffer(); - } - return OK; -} - -void ZslProcessor3::dump(int fd, const Vector& /*args*/) const { - Mutex::Autolock l(mInputMutex); - if (!mLatestCapturedRequest.isEmpty()) { - String8 result(" Latest ZSL capture request:\n"); - write(fd, result.string(), result.size()); - mLatestCapturedRequest.dump(fd, 2, 6); - } else { - String8 result(" Latest ZSL capture request: none yet\n"); - write(fd, result.string(), result.size()); - } - dumpZslQueue(fd); -} - -bool ZslProcessor3::threadLoop() { - // TODO: remove dependency on thread. For now, shut thread down right - // away. - return false; -} - -void ZslProcessor3::dumpZslQueue(int fd) const { - String8 header("ZSL queue contents:"); - String8 indent(" "); - ALOGV("%s", header.string()); - if (fd != -1) { - header = indent + header + "\n"; - write(fd, header.string(), header.size()); - } - for (size_t i = 0; i < mZslQueue.size(); i++) { - const ZslPair &queueEntry = mZslQueue[i]; - nsecs_t bufferTimestamp = queueEntry.buffer.mTimestamp; - camera_metadata_ro_entry_t entry; - nsecs_t frameTimestamp = 0; - int frameAeState = -1; - if (!queueEntry.frame.isEmpty()) { - entry = queueEntry.frame.find(ANDROID_SENSOR_TIMESTAMP); - if (entry.count > 0) frameTimestamp = entry.data.i64[0]; - entry = queueEntry.frame.find(ANDROID_CONTROL_AE_STATE); - if (entry.count > 0) frameAeState = entry.data.u8[0]; - } - String8 result = - String8::format(" %d: b: %lld\tf: %lld, AE state: %d", i, - bufferTimestamp, frameTimestamp, frameAeState); - ALOGV("%s", result.string()); - if (fd != -1) { - result = indent + result + "\n"; - write(fd, result.string(), result.size()); - } - - } -} - -nsecs_t ZslProcessor3::getCandidateTimestampLocked(size_t* metadataIdx) const { - /** - * Find the smallest timestamp we know about so far - * - ensure that aeState is either converged or locked - */ - - size_t idx = 0; - nsecs_t minTimestamp = -1; - - size_t emptyCount = mFrameList.size(); - - for (size_t j = 0; j < mFrameList.size(); j++) { - const CameraMetadata &frame = mFrameList[j]; - if (!frame.isEmpty()) { - - emptyCount--; - - camera_metadata_ro_entry_t entry; - entry = frame.find(ANDROID_SENSOR_TIMESTAMP); - if (entry.count == 0) { - ALOGE("%s: Can't find timestamp in frame!", - __FUNCTION__); - continue; - } - nsecs_t frameTimestamp = entry.data.i64[0]; - if (minTimestamp > frameTimestamp || minTimestamp == -1) { - - entry = frame.find(ANDROID_CONTROL_AE_STATE); - - if (entry.count == 0) { - /** - * This is most likely a HAL bug. The aeState field is - * mandatory, so it should always be in a metadata packet. - */ - ALOGW("%s: ZSL queue frame has no AE state field!", - __FUNCTION__); - continue; - } - if (entry.data.u8[0] != ANDROID_CONTROL_AE_STATE_CONVERGED && - entry.data.u8[0] != ANDROID_CONTROL_AE_STATE_LOCKED) { - ALOGVV("%s: ZSL queue frame AE state is %d, need " - "full capture", __FUNCTION__, entry.data.u8[0]); - continue; - } - - minTimestamp = frameTimestamp; - idx = j; - } - - ALOGVV("%s: Saw timestamp %lld", __FUNCTION__, frameTimestamp); - } - } - - if (emptyCount == mFrameList.size()) { - /** - * This could be mildly bad and means our ZSL was triggered before - * there were any frames yet received by the camera framework. - * - * This is a fairly corner case which can happen under: - * + a user presses the shutter button real fast when the camera starts - * (startPreview followed immediately by takePicture). - * + burst capture case (hitting shutter button as fast possible) - * - * If this happens in steady case (preview running for a while, call - * a single takePicture) then this might be a fwk bug. - */ - ALOGW("%s: ZSL queue has no metadata frames", __FUNCTION__); - } - - ALOGV("%s: Candidate timestamp %lld (idx %d), empty frames: %d", - __FUNCTION__, minTimestamp, idx, emptyCount); - - if (metadataIdx) { - *metadataIdx = idx; - } - - return minTimestamp; -} - -void ZslProcessor3::onBufferAcquired(const BufferInfo& /*bufferInfo*/) { - // Intentionally left empty - // Although theoretically we could use this to get better dump info -} - -void ZslProcessor3::onBufferReleased(const BufferInfo& bufferInfo) { - Mutex::Autolock l(mInputMutex); - - // ignore output buffers - if (bufferInfo.mOutput) { - return; - } - - // TODO: Verify that the buffer is in our queue by looking at timestamp - // theoretically unnecessary unless we change the following assumptions: - // -- only 1 buffer reprocessed at a time (which is the case now) - - // Erase entire ZSL queue since we've now completed the capture and preview - // is stopped. - // - // We need to guarantee that if we do two back-to-back captures, - // the second won't use a buffer that's older/the same as the first, which - // is theoretically possible if we don't clear out the queue and the - // selection criteria is something like 'newest'. Clearing out the queue - // on a completed capture ensures we'll only use new data. - ALOGV("%s: Memory optimization, clearing ZSL queue", - __FUNCTION__); - clearZslQueueLocked(); - - // Required so we accept more ZSL requests - mState = RUNNING; -} - -}; // namespace camera2 -}; // namespace android diff --git a/services/camera/libcameraservice/camera2/ZslProcessor3.h b/services/camera/libcameraservice/camera2/ZslProcessor3.h deleted file mode 100644 index cb98b99..0000000 --- a/services/camera/libcameraservice/camera2/ZslProcessor3.h +++ /dev/null @@ -1,137 +0,0 @@ -/* - * Copyright (C) 2013 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef ANDROID_SERVERS_CAMERA_CAMERA2_ZSLPROCESSOR3_H -#define ANDROID_SERVERS_CAMERA_CAMERA2_ZSLPROCESSOR3_H - -#include -#include -#include -#include -#include -#include -#include "Parameters.h" -#include "FrameProcessor.h" -#include "camera/CameraMetadata.h" -#include "Camera2Heap.h" -#include "../CameraDeviceBase.h" -#include "ZslProcessorInterface.h" -#include "../camera3/Camera3ZslStream.h" - -namespace android { - -class Camera2Client; - -namespace camera2 { - -class CaptureSequencer; - -/*** - * ZSL queue processing - */ -class ZslProcessor3 : - public ZslProcessorInterface, - public camera3::Camera3StreamBufferListener, - virtual public Thread, - virtual public FrameProcessor::FilteredListener { - public: - ZslProcessor3(sp client, wp sequencer); - ~ZslProcessor3(); - - // From FrameProcessor - virtual void onFrameAvailable(int32_t frameId, const CameraMetadata &frame); - - /** - **************************************** - * ZslProcessorInterface implementation * - **************************************** - */ - - virtual status_t updateStream(const Parameters ¶ms); - virtual status_t deleteStream(); - virtual int getStreamId() const; - - virtual status_t pushToReprocess(int32_t requestId); - virtual status_t clearZslQueue(); - - void dump(int fd, const Vector& args) const; - - protected: - /** - ********************************************** - * Camera3StreamBufferListener implementation * - ********************************************** - */ - typedef camera3::Camera3StreamBufferListener::BufferInfo BufferInfo; - // Buffer was acquired by the HAL - virtual void onBufferAcquired(const BufferInfo& bufferInfo); - // Buffer was released by the HAL - virtual void onBufferReleased(const BufferInfo& bufferInfo); - - private: - static const nsecs_t kWaitDuration = 10000000; // 10 ms - - enum { - RUNNING, - LOCKED - } mState; - - wp mClient; - wp mSequencer; - - const int mId; - - mutable Mutex mInputMutex; - - enum { - NO_STREAM = -1 - }; - - int mZslStreamId; - sp mZslStream; - - struct ZslPair { - BufferItemConsumer::BufferItem buffer; - CameraMetadata frame; - }; - - static const size_t kZslBufferDepth = 4; - static const size_t kFrameListDepth = kZslBufferDepth * 2; - Vector mFrameList; - size_t mFrameListHead; - - ZslPair mNextPair; - - Vector mZslQueue; - size_t mZslQueueHead; - size_t mZslQueueTail; - - CameraMetadata mLatestCapturedRequest; - - virtual bool threadLoop(); - - status_t clearZslQueueLocked(); - - void dumpZslQueue(int id) const; - - nsecs_t getCandidateTimestampLocked(size_t* metadataIdx) const; -}; - - -}; //namespace camera2 -}; //namespace android - -#endif diff --git a/services/camera/libcameraservice/camera2/ZslProcessorInterface.h b/services/camera/libcameraservice/camera2/ZslProcessorInterface.h deleted file mode 100644 index 183c0c2..0000000 --- a/services/camera/libcameraservice/camera2/ZslProcessorInterface.h +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Copyright (C) 2013 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef ANDROID_SERVERS_CAMERA_CAMERA2_ZSLPROCESSORINTERFACE_H -#define ANDROID_SERVERS_CAMERA_CAMERA2_ZSLPROCESSORINTERFACE_H - -#include -#include - -namespace android { -namespace camera2 { - -class Parameters; - -class ZslProcessorInterface : virtual public RefBase { -public: - - // Get ID for use with android.request.outputStreams / inputStreams - virtual int getStreamId() const = 0; - - // Update the streams by recreating them if the size/format has changed - virtual status_t updateStream(const Parameters& params) = 0; - - // Delete the underlying CameraDevice streams - virtual status_t deleteStream() = 0; - - /** - * Submits a ZSL capture request (id = requestId) - * - * An appropriate ZSL buffer is selected by the closest timestamp, - * then we push that buffer to be reprocessed by the HAL. - * A capture request is created and submitted on behalf of the client. - */ - virtual status_t pushToReprocess(int32_t requestId) = 0; - - // Flush the ZSL buffer queue, freeing up all the buffers - virtual status_t clearZslQueue() = 0; - - // (Debugging only) Dump the current state to the specified file descriptor - virtual void dump(int fd, const Vector& args) const = 0; -}; - -}; //namespace camera2 -}; //namespace android - -#endif diff --git a/services/camera/libcameraservice/camera3/Camera3IOStreamBase.cpp b/services/camera/libcameraservice/camera3/Camera3IOStreamBase.cpp deleted file mode 100644 index 0850566..0000000 --- a/services/camera/libcameraservice/camera3/Camera3IOStreamBase.cpp +++ /dev/null @@ -1,275 +0,0 @@ -/* - * Copyright (C) 2013 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#define LOG_TAG "Camera3-IOStreamBase" -#define ATRACE_TAG ATRACE_TAG_CAMERA -//#define LOG_NDEBUG 0 - -// This is needed for stdint.h to define INT64_MAX in C++ -#define __STDC_LIMIT_MACROS - -#include -#include -#include "Camera3IOStreamBase.h" - -namespace android { - -namespace camera3 { - -Camera3IOStreamBase::Camera3IOStreamBase(int id, camera3_stream_type_t type, - uint32_t width, uint32_t height, size_t maxSize, int format) : - Camera3Stream(id, type, - width, height, maxSize, format), - mTotalBufferCount(0), - mDequeuedBufferCount(0), - mFrameCount(0), - mLastTimestamp(0) { - - mCombinedFence = new Fence(); - - if (maxSize > 0 && format != HAL_PIXEL_FORMAT_BLOB) { - ALOGE("%s: Bad format for size-only stream: %d", __FUNCTION__, - format); - mState = STATE_ERROR; - } -} - -Camera3IOStreamBase::~Camera3IOStreamBase() { - disconnectLocked(); -} - -bool Camera3IOStreamBase::hasOutstandingBuffersLocked() const { - nsecs_t signalTime = mCombinedFence->getSignalTime(); - ALOGV("%s: Stream %d: Has %d outstanding buffers," - " buffer signal time is %lld", - __FUNCTION__, mId, mDequeuedBufferCount, signalTime); - if (mDequeuedBufferCount > 0 || signalTime == INT64_MAX) { - return true; - } - return false; -} - -status_t Camera3IOStreamBase::waitUntilIdle(nsecs_t timeout) { - status_t res; - { - Mutex::Autolock l(mLock); - while (mDequeuedBufferCount > 0) { - if (timeout != TIMEOUT_NEVER) { - nsecs_t startTime = systemTime(); - res = mBufferReturnedSignal.waitRelative(mLock, timeout); - if (res == TIMED_OUT) { - return res; - } else if (res != OK) { - ALOGE("%s: Error waiting for outstanding buffers: %s (%d)", - __FUNCTION__, strerror(-res), res); - return res; - } - nsecs_t deltaTime = systemTime() - startTime; - if (timeout <= deltaTime) { - timeout = 0; - } else { - timeout -= deltaTime; - } - } else { - res = mBufferReturnedSignal.wait(mLock); - if (res != OK) { - ALOGE("%s: Error waiting for outstanding buffers: %s (%d)", - __FUNCTION__, strerror(-res), res); - return res; - } - } - } - } - - // No lock - - unsigned int timeoutMs; - if (timeout == TIMEOUT_NEVER) { - timeoutMs = Fence::TIMEOUT_NEVER; - } else if (timeout == 0) { - timeoutMs = 0; - } else { - // Round up to wait at least 1 ms - timeoutMs = (timeout + 999999) / 1000000; - } - - return mCombinedFence->wait(timeoutMs); -} - -void Camera3IOStreamBase::dump(int fd, const Vector &args) const { - (void) args; - String8 lines; - lines.appendFormat(" State: %d\n", mState); - lines.appendFormat(" Dims: %d x %d, format 0x%x\n", - camera3_stream::width, camera3_stream::height, - camera3_stream::format); - lines.appendFormat(" Max size: %d\n", mMaxSize); - lines.appendFormat(" Usage: %d, max HAL buffers: %d\n", - camera3_stream::usage, camera3_stream::max_buffers); - lines.appendFormat(" Frames produced: %d, last timestamp: %lld ns\n", - mFrameCount, mLastTimestamp); - lines.appendFormat(" Total buffers: %d, currently dequeued: %d\n", - mTotalBufferCount, mDequeuedBufferCount); - write(fd, lines.string(), lines.size()); -} - -status_t Camera3IOStreamBase::configureQueueLocked() { - status_t res; - - switch (mState) { - case STATE_IN_RECONFIG: - res = disconnectLocked(); - if (res != OK) { - return res; - } - break; - case STATE_IN_CONFIG: - // OK - break; - default: - ALOGE("%s: Bad state: %d", __FUNCTION__, mState); - return INVALID_OPERATION; - } - - return OK; -} - -size_t Camera3IOStreamBase::getBufferCountLocked() { - return mTotalBufferCount; -} - -status_t Camera3IOStreamBase::disconnectLocked() { - switch (mState) { - case STATE_IN_RECONFIG: - case STATE_CONFIGURED: - // OK - break; - default: - // No connection, nothing to do - ALOGV("%s: Stream %d: Already disconnected", - __FUNCTION__, mId); - return -ENOTCONN; - } - - if (mDequeuedBufferCount > 0) { - ALOGE("%s: Can't disconnect with %d buffers still dequeued!", - __FUNCTION__, mDequeuedBufferCount); - return INVALID_OPERATION; - } - - return OK; -} - -void Camera3IOStreamBase::handoutBufferLocked(camera3_stream_buffer &buffer, - buffer_handle_t *handle, - int acquireFence, - int releaseFence, - camera3_buffer_status_t status) { - /** - * Note that all fences are now owned by HAL. - */ - - // Handing out a raw pointer to this object. Increment internal refcount. - incStrong(this); - buffer.stream = this; - buffer.buffer = handle; - buffer.acquire_fence = acquireFence; - buffer.release_fence = releaseFence; - buffer.status = status; - - mDequeuedBufferCount++; -} - -status_t Camera3IOStreamBase::getBufferPreconditionCheckLocked() const { - // Allow dequeue during IN_[RE]CONFIG for registration - if (mState != STATE_CONFIGURED && - mState != STATE_IN_CONFIG && mState != STATE_IN_RECONFIG) { - ALOGE("%s: Stream %d: Can't get buffers in unconfigured state %d", - __FUNCTION__, mId, mState); - return INVALID_OPERATION; - } - - // Only limit dequeue amount when fully configured - if (mState == STATE_CONFIGURED && - mDequeuedBufferCount == camera3_stream::max_buffers) { - ALOGE("%s: Stream %d: Already dequeued maximum number of simultaneous" - " buffers (%d)", __FUNCTION__, mId, - camera3_stream::max_buffers); - return INVALID_OPERATION; - } - - return OK; -} - -status_t Camera3IOStreamBase::returnBufferPreconditionCheckLocked() const { - // Allow buffers to be returned in the error state, to allow for disconnect - // and in the in-config states for registration - if (mState == STATE_CONSTRUCTED) { - ALOGE("%s: Stream %d: Can't return buffers in unconfigured state %d", - __FUNCTION__, mId, mState); - return INVALID_OPERATION; - } - if (mDequeuedBufferCount == 0) { - ALOGE("%s: Stream %d: No buffers outstanding to return", __FUNCTION__, - mId); - return INVALID_OPERATION; - } - - return OK; -} - -status_t Camera3IOStreamBase::returnAnyBufferLocked( - const camera3_stream_buffer &buffer, - nsecs_t timestamp, - bool output) { - status_t res; - - // returnBuffer may be called from a raw pointer, not a sp<>, and we'll be - // decrementing the internal refcount next. In case this is the last ref, we - // might get destructed on the decStrong(), so keep an sp around until the - // end of the call - otherwise have to sprinkle the decStrong on all exit - // points. - sp keepAlive(this); - decStrong(this); - - if ((res = returnBufferPreconditionCheckLocked()) != OK) { - return res; - } - - sp releaseFence; - res = returnBufferCheckedLocked(buffer, timestamp, output, - &releaseFence); - if (res != OK) { - return res; - } - - mCombinedFence = Fence::merge(mName, mCombinedFence, releaseFence); - - mDequeuedBufferCount--; - mBufferReturnedSignal.signal(); - - if (output) { - mLastTimestamp = timestamp; - } - - return OK; -} - - - -}; // namespace camera3 - -}; // namespace android diff --git a/services/camera/libcameraservice/camera3/Camera3IOStreamBase.h b/services/camera/libcameraservice/camera3/Camera3IOStreamBase.h deleted file mode 100644 index 74c4484..0000000 --- a/services/camera/libcameraservice/camera3/Camera3IOStreamBase.h +++ /dev/null @@ -1,102 +0,0 @@ -/* - * Copyright (C) 2013 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef ANDROID_SERVERS_CAMERA3_IO_STREAM_BASE_H -#define ANDROID_SERVERS_CAMERA3_IO_STREAM_BASE_H - -#include -#include - -#include "Camera3Stream.h" - -namespace android { - -namespace camera3 { - -/** - * A base class for managing a single stream of I/O data from the camera device. - */ -class Camera3IOStreamBase : - public Camera3Stream { - protected: - Camera3IOStreamBase(int id, camera3_stream_type_t type, - uint32_t width, uint32_t height, size_t maxSize, int format); - - public: - - virtual ~Camera3IOStreamBase(); - - /** - * Camera3Stream interface - */ - - virtual status_t waitUntilIdle(nsecs_t timeout); - virtual void dump(int fd, const Vector &args) const; - - protected: - size_t mTotalBufferCount; - // sum of input and output buffers that are currently acquired by HAL - size_t mDequeuedBufferCount; - Condition mBufferReturnedSignal; - uint32_t mFrameCount; - // Last received output buffer's timestamp - nsecs_t mLastTimestamp; - - // The merged release fence for all returned buffers - sp mCombinedFence; - - status_t returnAnyBufferLocked( - const camera3_stream_buffer &buffer, - nsecs_t timestamp, - bool output); - - virtual status_t returnBufferCheckedLocked( - const camera3_stream_buffer &buffer, - nsecs_t timestamp, - bool output, - /*out*/ - sp *releaseFenceOut) = 0; - - /** - * Internal Camera3Stream interface - */ - virtual bool hasOutstandingBuffersLocked() const; - - virtual size_t getBufferCountLocked(); - - status_t getBufferPreconditionCheckLocked() const; - status_t returnBufferPreconditionCheckLocked() const; - - // State check only - virtual status_t configureQueueLocked(); - // State checks only - virtual status_t disconnectLocked(); - - // Hand out the buffer to a native location, - // incrementing the internal refcount and dequeued buffer count - void handoutBufferLocked(camera3_stream_buffer &buffer, - buffer_handle_t *handle, - int acquire_fence, - int release_fence, - camera3_buffer_status_t status); - -}; // class Camera3IOStreamBase - -} // namespace camera3 - -} // namespace android - -#endif diff --git a/services/camera/libcameraservice/camera3/Camera3InputStream.cpp b/services/camera/libcameraservice/camera3/Camera3InputStream.cpp deleted file mode 100644 index e9a9c2b..0000000 --- a/services/camera/libcameraservice/camera3/Camera3InputStream.cpp +++ /dev/null @@ -1,239 +0,0 @@ -/* - * Copyright (C) 2013 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#define LOG_TAG "Camera3-InputStream" -#define ATRACE_TAG ATRACE_TAG_CAMERA -//#define LOG_NDEBUG 0 - -#include -#include -#include "Camera3InputStream.h" - -namespace android { - -namespace camera3 { - -Camera3InputStream::Camera3InputStream(int id, - uint32_t width, uint32_t height, int format) : - Camera3IOStreamBase(id, CAMERA3_STREAM_INPUT, width, height, - /*maxSize*/0, format) { - - if (format == HAL_PIXEL_FORMAT_BLOB) { - ALOGE("%s: Bad format, BLOB not supported", __FUNCTION__); - mState = STATE_ERROR; - } -} - -Camera3InputStream::~Camera3InputStream() { - disconnectLocked(); -} - -status_t Camera3InputStream::getInputBufferLocked( - camera3_stream_buffer *buffer) { - ATRACE_CALL(); - status_t res; - - // FIXME: will not work in (re-)registration - if (mState == STATE_IN_CONFIG || mState == STATE_IN_RECONFIG) { - ALOGE("%s: Stream %d: Buffer registration for input streams" - " not implemented (state %d)", - __FUNCTION__, mId, mState); - return INVALID_OPERATION; - } - - if ((res = getBufferPreconditionCheckLocked()) != OK) { - return res; - } - - ANativeWindowBuffer* anb; - int fenceFd; - - assert(mConsumer != 0); - - BufferItem bufferItem; - res = mConsumer->acquireBuffer(&bufferItem, /*waitForFence*/false); - - if (res != OK) { - ALOGE("%s: Stream %d: Can't acquire next output buffer: %s (%d)", - __FUNCTION__, mId, strerror(-res), res); - return res; - } - - anb = bufferItem.mGraphicBuffer->getNativeBuffer(); - assert(anb != NULL); - fenceFd = bufferItem.mFence->dup(); - - /** - * FenceFD now owned by HAL except in case of error, - * in which case we reassign it to acquire_fence - */ - handoutBufferLocked(*buffer, &(anb->handle), /*acquireFence*/fenceFd, - /*releaseFence*/-1, CAMERA3_BUFFER_STATUS_OK); - mBuffersInFlight.push_back(bufferItem); - - return OK; -} - -status_t Camera3InputStream::returnBufferCheckedLocked( - const camera3_stream_buffer &buffer, - nsecs_t timestamp, - bool output, - /*out*/ - sp *releaseFenceOut) { - - (void)timestamp; - (void)output; - ALOG_ASSERT(!output, "Expected output to be false"); - - status_t res; - - bool bufferFound = false; - BufferItem bufferItem; - { - // Find the buffer we are returning - Vector::iterator it, end; - for (it = mBuffersInFlight.begin(), end = mBuffersInFlight.end(); - it != end; - ++it) { - - const BufferItem& tmp = *it; - ANativeWindowBuffer *anb = tmp.mGraphicBuffer->getNativeBuffer(); - if (anb != NULL && &(anb->handle) == buffer.buffer) { - bufferFound = true; - bufferItem = tmp; - mBuffersInFlight.erase(it); - mDequeuedBufferCount--; - } - } - } - if (!bufferFound) { - ALOGE("%s: Stream %d: Can't return buffer that wasn't sent to HAL", - __FUNCTION__, mId); - return INVALID_OPERATION; - } - - if (buffer.status == CAMERA3_BUFFER_STATUS_ERROR) { - if (buffer.release_fence != -1) { - ALOGE("%s: Stream %d: HAL should not set release_fence(%d) when " - "there is an error", __FUNCTION__, mId, buffer.release_fence); - close(buffer.release_fence); - } - - /** - * Reassign release fence as the acquire fence incase of error - */ - const_cast(&buffer)->release_fence = - buffer.acquire_fence; - } - - /** - * Unconditionally return buffer to the buffer queue. - * - Fwk takes over the release_fence ownership - */ - sp releaseFence = new Fence(buffer.release_fence); - res = mConsumer->releaseBuffer(bufferItem, releaseFence); - if (res != OK) { - ALOGE("%s: Stream %d: Error releasing buffer back to buffer queue:" - " %s (%d)", __FUNCTION__, mId, strerror(-res), res); - return res; - } - - *releaseFenceOut = releaseFence; - - return OK; -} - -status_t Camera3InputStream::returnInputBufferLocked( - const camera3_stream_buffer &buffer) { - ATRACE_CALL(); - - return returnAnyBufferLocked(buffer, /*timestamp*/0, /*output*/false); -} - -status_t Camera3InputStream::disconnectLocked() { - - status_t res; - - if ((res = Camera3IOStreamBase::disconnectLocked()) != OK) { - return res; - } - - assert(mBuffersInFlight.size() == 0); - - /** - * no-op since we can't disconnect the producer from the consumer-side - */ - - mState = (mState == STATE_IN_RECONFIG) ? STATE_IN_CONFIG - : STATE_CONSTRUCTED; - return OK; -} - -sp Camera3InputStream::getProducerInterface() const { - return mConsumer->getProducerInterface(); -} - -void Camera3InputStream::dump(int fd, const Vector &args) const { - (void) args; - String8 lines; - lines.appendFormat(" Stream[%d]: Input\n", mId); - write(fd, lines.string(), lines.size()); - - Camera3IOStreamBase::dump(fd, args); -} - -status_t Camera3InputStream::configureQueueLocked() { - status_t res; - - if ((res = Camera3IOStreamBase::configureQueueLocked()) != OK) { - return res; - } - - assert(mMaxSize == 0); - assert(camera3_stream::format != HAL_PIXEL_FORMAT_BLOB); - - mTotalBufferCount = BufferQueue::MIN_UNDEQUEUED_BUFFERS + - camera3_stream::max_buffers; - mDequeuedBufferCount = 0; - mFrameCount = 0; - - if (mConsumer.get() == 0) { - sp bq = new BufferQueue(); - mConsumer = new BufferItemConsumer(bq, camera3_stream::usage, - mTotalBufferCount); - mConsumer->setName(String8::format("Camera3-InputStream-%d", mId)); - } - - res = mConsumer->setDefaultBufferSize(camera3_stream::width, - camera3_stream::height); - if (res != OK) { - ALOGE("%s: Stream %d: Could not set buffer dimensions %dx%d", - __FUNCTION__, mId, camera3_stream::width, camera3_stream::height); - return res; - } - res = mConsumer->setDefaultBufferFormat(camera3_stream::format); - if (res != OK) { - ALOGE("%s: Stream %d: Could not set buffer format %d", - __FUNCTION__, mId, camera3_stream::format); - return res; - } - - return OK; -} - -}; // namespace camera3 - -}; // namespace android diff --git a/services/camera/libcameraservice/camera3/Camera3InputStream.h b/services/camera/libcameraservice/camera3/Camera3InputStream.h deleted file mode 100644 index 8adda88..0000000 --- a/services/camera/libcameraservice/camera3/Camera3InputStream.h +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Copyright (C) 2013 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef ANDROID_SERVERS_CAMERA3_INPUT_STREAM_H -#define ANDROID_SERVERS_CAMERA3_INPUT_STREAM_H - -#include -#include -#include - -#include "Camera3IOStreamBase.h" - -namespace android { - -namespace camera3 { - -/** - * A class for managing a single stream of input data to the camera device. - * - * This class serves as a consumer adapter for the HAL, and will consume the - * buffers by feeding them into the HAL, as well as releasing the buffers back - * the buffers once the HAL is done with them. - */ -class Camera3InputStream : public Camera3IOStreamBase { - public: - /** - * Set up a stream for formats that have fixed size, such as RAW and YUV. - */ - Camera3InputStream(int id, uint32_t width, uint32_t height, int format); - ~Camera3InputStream(); - - virtual void dump(int fd, const Vector &args) const; - - /** - * Get the producer interface for this stream, to hand off to a producer. - * The producer must be connected to the provided interface before - * finishConfigure is called on this stream. - */ - sp getProducerInterface() const; - - private: - - typedef BufferItemConsumer::BufferItem BufferItem; - - sp mConsumer; - Vector mBuffersInFlight; - - /** - * Camera3IOStreamBase - */ - virtual status_t returnBufferCheckedLocked( - const camera3_stream_buffer &buffer, - nsecs_t timestamp, - bool output, - /*out*/ - sp *releaseFenceOut); - - /** - * Camera3Stream interface - */ - - virtual status_t getInputBufferLocked(camera3_stream_buffer *buffer); - virtual status_t returnInputBufferLocked( - const camera3_stream_buffer &buffer); - virtual status_t disconnectLocked(); - - virtual status_t configureQueueLocked(); - -}; // class Camera3InputStream - -}; // namespace camera3 - -}; // namespace android - -#endif diff --git a/services/camera/libcameraservice/camera3/Camera3OutputStream.cpp b/services/camera/libcameraservice/camera3/Camera3OutputStream.cpp deleted file mode 100644 index 0ec2b05..0000000 --- a/services/camera/libcameraservice/camera3/Camera3OutputStream.cpp +++ /dev/null @@ -1,369 +0,0 @@ -/* - * Copyright (C) 2013 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#define LOG_TAG "Camera3-OutputStream" -#define ATRACE_TAG ATRACE_TAG_CAMERA -//#define LOG_NDEBUG 0 - -#include -#include -#include "Camera3OutputStream.h" - -#ifndef container_of -#define container_of(ptr, type, member) \ - (type *)((char*)(ptr) - offsetof(type, member)) -#endif - -namespace android { - -namespace camera3 { - -Camera3OutputStream::Camera3OutputStream(int id, - sp consumer, - uint32_t width, uint32_t height, int format) : - Camera3IOStreamBase(id, CAMERA3_STREAM_OUTPUT, width, height, - /*maxSize*/0, format), - mConsumer(consumer), - mTransform(0) { - - if (mConsumer == NULL) { - ALOGE("%s: Consumer is NULL!", __FUNCTION__); - mState = STATE_ERROR; - } -} - -Camera3OutputStream::Camera3OutputStream(int id, - sp consumer, - uint32_t width, uint32_t height, size_t maxSize, int format) : - Camera3IOStreamBase(id, CAMERA3_STREAM_OUTPUT, width, height, maxSize, - format), - mConsumer(consumer), - mTransform(0) { - - if (format != HAL_PIXEL_FORMAT_BLOB) { - ALOGE("%s: Bad format for size-only stream: %d", __FUNCTION__, - format); - mState = STATE_ERROR; - } - - if (mConsumer == NULL) { - ALOGE("%s: Consumer is NULL!", __FUNCTION__); - mState = STATE_ERROR; - } -} - -Camera3OutputStream::Camera3OutputStream(int id, camera3_stream_type_t type, - uint32_t width, uint32_t height, - int format) : - Camera3IOStreamBase(id, type, width, height, - /*maxSize*/0, - format), - mTransform(0) { - - // Subclasses expected to initialize mConsumer themselves -} - - -Camera3OutputStream::~Camera3OutputStream() { - disconnectLocked(); -} - -status_t Camera3OutputStream::getBufferLocked(camera3_stream_buffer *buffer) { - ATRACE_CALL(); - status_t res; - - if ((res = getBufferPreconditionCheckLocked()) != OK) { - return res; - } - - ANativeWindowBuffer* anb; - int fenceFd; - - res = mConsumer->dequeueBuffer(mConsumer.get(), &anb, &fenceFd); - if (res != OK) { - ALOGE("%s: Stream %d: Can't dequeue next output buffer: %s (%d)", - __FUNCTION__, mId, strerror(-res), res); - return res; - } - - /** - * FenceFD now owned by HAL except in case of error, - * in which case we reassign it to acquire_fence - */ - handoutBufferLocked(*buffer, &(anb->handle), /*acquireFence*/fenceFd, - /*releaseFence*/-1, CAMERA3_BUFFER_STATUS_OK); - - return OK; -} - -status_t Camera3OutputStream::returnBufferLocked( - const camera3_stream_buffer &buffer, - nsecs_t timestamp) { - ATRACE_CALL(); - - status_t res = returnAnyBufferLocked(buffer, timestamp, /*output*/true); - - if (res != OK) { - return res; - } - - mLastTimestamp = timestamp; - - return OK; -} - -status_t Camera3OutputStream::returnBufferCheckedLocked( - const camera3_stream_buffer &buffer, - nsecs_t timestamp, - bool output, - /*out*/ - sp *releaseFenceOut) { - - (void)output; - ALOG_ASSERT(output, "Expected output to be true"); - - status_t res; - sp releaseFence; - - /** - * Fence management - calculate Release Fence - */ - if (buffer.status == CAMERA3_BUFFER_STATUS_ERROR) { - if (buffer.release_fence != -1) { - ALOGE("%s: Stream %d: HAL should not set release_fence(%d) when " - "there is an error", __FUNCTION__, mId, buffer.release_fence); - close(buffer.release_fence); - } - - /** - * Reassign release fence as the acquire fence in case of error - */ - releaseFence = new Fence(buffer.acquire_fence); - } else { - res = native_window_set_buffers_timestamp(mConsumer.get(), timestamp); - if (res != OK) { - ALOGE("%s: Stream %d: Error setting timestamp: %s (%d)", - __FUNCTION__, mId, strerror(-res), res); - return res; - } - - releaseFence = new Fence(buffer.release_fence); - } - - int anwReleaseFence = releaseFence->dup(); - - /** - * Release the lock briefly to avoid deadlock with - * StreamingProcessor::startStream -> Camera3Stream::isConfiguring (this - * thread will go into StreamingProcessor::onFrameAvailable) during - * queueBuffer - */ - sp currentConsumer = mConsumer; - mLock.unlock(); - - /** - * Return buffer back to ANativeWindow - */ - if (buffer.status == CAMERA3_BUFFER_STATUS_ERROR) { - // Cancel buffer - res = currentConsumer->cancelBuffer(currentConsumer.get(), - container_of(buffer.buffer, ANativeWindowBuffer, handle), - anwReleaseFence); - if (res != OK) { - ALOGE("%s: Stream %d: Error cancelling buffer to native window:" - " %s (%d)", __FUNCTION__, mId, strerror(-res), res); - } - } else { - res = currentConsumer->queueBuffer(currentConsumer.get(), - container_of(buffer.buffer, ANativeWindowBuffer, handle), - anwReleaseFence); - if (res != OK) { - ALOGE("%s: Stream %d: Error queueing buffer to native window: " - "%s (%d)", __FUNCTION__, mId, strerror(-res), res); - } - } - mLock.lock(); - if (res != OK) { - close(anwReleaseFence); - return res; - } - - *releaseFenceOut = releaseFence; - - return OK; -} - -void Camera3OutputStream::dump(int fd, const Vector &args) const { - (void) args; - String8 lines; - lines.appendFormat(" Stream[%d]: Output\n", mId); - write(fd, lines.string(), lines.size()); - - Camera3IOStreamBase::dump(fd, args); -} - -status_t Camera3OutputStream::setTransform(int transform) { - ATRACE_CALL(); - Mutex::Autolock l(mLock); - return setTransformLocked(transform); -} - -status_t Camera3OutputStream::setTransformLocked(int transform) { - status_t res = OK; - if (mState == STATE_ERROR) { - ALOGE("%s: Stream in error state", __FUNCTION__); - return INVALID_OPERATION; - } - - mTransform = transform; - if (mState == STATE_CONFIGURED) { - res = native_window_set_buffers_transform(mConsumer.get(), - transform); - if (res != OK) { - ALOGE("%s: Unable to configure stream transform to %x: %s (%d)", - __FUNCTION__, transform, strerror(-res), res); - } - } - return res; -} - -status_t Camera3OutputStream::configureQueueLocked() { - status_t res; - - if ((res = Camera3IOStreamBase::configureQueueLocked()) != OK) { - return res; - } - - ALOG_ASSERT(mConsumer != 0, "mConsumer should never be NULL"); - - // Configure consumer-side ANativeWindow interface - res = native_window_api_connect(mConsumer.get(), - NATIVE_WINDOW_API_CAMERA); - if (res != OK) { - ALOGE("%s: Unable to connect to native window for stream %d", - __FUNCTION__, mId); - return res; - } - - res = native_window_set_usage(mConsumer.get(), camera3_stream::usage); - if (res != OK) { - ALOGE("%s: Unable to configure usage %08x for stream %d", - __FUNCTION__, camera3_stream::usage, mId); - return res; - } - - res = native_window_set_scaling_mode(mConsumer.get(), - NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW); - if (res != OK) { - ALOGE("%s: Unable to configure stream scaling: %s (%d)", - __FUNCTION__, strerror(-res), res); - return res; - } - - if (mMaxSize == 0) { - // For buffers of known size - res = native_window_set_buffers_geometry(mConsumer.get(), - camera3_stream::width, camera3_stream::height, - camera3_stream::format); - } else { - // For buffers with bounded size - res = native_window_set_buffers_geometry(mConsumer.get(), - mMaxSize, 1, - camera3_stream::format); - } - if (res != OK) { - ALOGE("%s: Unable to configure stream buffer geometry" - " %d x %d, format %x for stream %d", - __FUNCTION__, camera3_stream::width, camera3_stream::height, - camera3_stream::format, mId); - return res; - } - - int maxConsumerBuffers; - res = mConsumer->query(mConsumer.get(), - NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &maxConsumerBuffers); - if (res != OK) { - ALOGE("%s: Unable to query consumer undequeued" - " buffer count for stream %d", __FUNCTION__, mId); - return res; - } - - ALOGV("%s: Consumer wants %d buffers, HAL wants %d", __FUNCTION__, - maxConsumerBuffers, camera3_stream::max_buffers); - if (camera3_stream::max_buffers == 0) { - ALOGE("%s: Camera HAL requested max_buffer count: %d, requires at least 1", - __FUNCTION__, camera3_stream::max_buffers); - return INVALID_OPERATION; - } - - mTotalBufferCount = maxConsumerBuffers + camera3_stream::max_buffers; - mDequeuedBufferCount = 0; - mFrameCount = 0; - mLastTimestamp = 0; - - res = native_window_set_buffer_count(mConsumer.get(), - mTotalBufferCount); - if (res != OK) { - ALOGE("%s: Unable to set buffer count for stream %d", - __FUNCTION__, mId); - return res; - } - - res = native_window_set_buffers_transform(mConsumer.get(), - mTransform); - if (res != OK) { - ALOGE("%s: Unable to configure stream transform to %x: %s (%d)", - __FUNCTION__, mTransform, strerror(-res), res); - } - - return OK; -} - -status_t Camera3OutputStream::disconnectLocked() { - status_t res; - - if ((res = Camera3IOStreamBase::disconnectLocked()) != OK) { - return res; - } - - res = native_window_api_disconnect(mConsumer.get(), - NATIVE_WINDOW_API_CAMERA); - - /** - * This is not an error. if client calling process dies, the window will - * also die and all calls to it will return DEAD_OBJECT, thus it's already - * "disconnected" - */ - if (res == DEAD_OBJECT) { - ALOGW("%s: While disconnecting stream %d from native window, the" - " native window died from under us", __FUNCTION__, mId); - } - else if (res != OK) { - ALOGE("%s: Unable to disconnect stream %d from native window " - "(error %d %s)", - __FUNCTION__, mId, res, strerror(-res)); - mState = STATE_ERROR; - return res; - } - - mState = (mState == STATE_IN_RECONFIG) ? STATE_IN_CONFIG - : STATE_CONSTRUCTED; - return OK; -} - -}; // namespace camera3 - -}; // namespace android diff --git a/services/camera/libcameraservice/camera3/Camera3OutputStream.h b/services/camera/libcameraservice/camera3/Camera3OutputStream.h deleted file mode 100644 index 774fbdd..0000000 --- a/services/camera/libcameraservice/camera3/Camera3OutputStream.h +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Copyright (C) 2013 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef ANDROID_SERVERS_CAMERA3_OUTPUT_STREAM_H -#define ANDROID_SERVERS_CAMERA3_OUTPUT_STREAM_H - -#include -#include - -#include "Camera3Stream.h" -#include "Camera3IOStreamBase.h" -#include "Camera3OutputStreamInterface.h" - -namespace android { - -namespace camera3 { - -/** - * A class for managing a single stream of output data from the camera device. - */ -class Camera3OutputStream : - public Camera3IOStreamBase, - public Camera3OutputStreamInterface { - public: - /** - * Set up a stream for formats that have 2 dimensions, such as RAW and YUV. - */ - Camera3OutputStream(int id, sp consumer, - uint32_t width, uint32_t height, int format); - - /** - * Set up a stream for formats that have a variable buffer size for the same - * dimensions, such as compressed JPEG. - */ - Camera3OutputStream(int id, sp consumer, - uint32_t width, uint32_t height, size_t maxSize, int format); - - virtual ~Camera3OutputStream(); - - /** - * Camera3Stream interface - */ - - virtual void dump(int fd, const Vector &args) const; - - /** - * Set the transform on the output stream; one of the - * HAL_TRANSFORM_* / NATIVE_WINDOW_TRANSFORM_* constants. - */ - status_t setTransform(int transform); - - protected: - Camera3OutputStream(int id, camera3_stream_type_t type, - uint32_t width, uint32_t height, int format); - - /** - * Note that we release the lock briefly in this function - */ - virtual status_t returnBufferCheckedLocked( - const camera3_stream_buffer &buffer, - nsecs_t timestamp, - bool output, - /*out*/ - sp *releaseFenceOut); - - sp mConsumer; - private: - int mTransform; - - virtual status_t setTransformLocked(int transform); - - /** - * Internal Camera3Stream interface - */ - virtual status_t getBufferLocked(camera3_stream_buffer *buffer); - virtual status_t returnBufferLocked( - const camera3_stream_buffer &buffer, - nsecs_t timestamp); - - virtual status_t configureQueueLocked(); - virtual status_t disconnectLocked(); -}; // class Camera3OutputStream - -} // namespace camera3 - -} // namespace android - -#endif diff --git a/services/camera/libcameraservice/camera3/Camera3OutputStreamInterface.h b/services/camera/libcameraservice/camera3/Camera3OutputStreamInterface.h deleted file mode 100644 index aae72cf..0000000 --- a/services/camera/libcameraservice/camera3/Camera3OutputStreamInterface.h +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Copyright (C) 2013 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef ANDROID_SERVERS_CAMERA3_OUTPUT_STREAM_INTERFACE_H -#define ANDROID_SERVERS_CAMERA3_OUTPUT_STREAM_INTERFACE_H - -#include "Camera3StreamInterface.h" - -namespace android { - -namespace camera3 { - -/** - * An interface for managing a single stream of output data from the camera - * device. - */ -class Camera3OutputStreamInterface : public virtual Camera3StreamInterface { - public: - /** - * Set the transform on the output stream; one of the - * HAL_TRANSFORM_* / NATIVE_WINDOW_TRANSFORM_* constants. - */ - virtual status_t setTransform(int transform) = 0; -}; - -} // namespace camera3 - -} // namespace android - -#endif diff --git a/services/camera/libcameraservice/camera3/Camera3Stream.cpp b/services/camera/libcameraservice/camera3/Camera3Stream.cpp deleted file mode 100644 index ab563df..0000000 --- a/services/camera/libcameraservice/camera3/Camera3Stream.cpp +++ /dev/null @@ -1,383 +0,0 @@ -/* - * Copyright (C) 2013 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#define LOG_TAG "Camera3-Stream" -#define ATRACE_TAG ATRACE_TAG_CAMERA -//#define LOG_NDEBUG 0 - -#include -#include -#include "Camera3Stream.h" - -namespace android { - -namespace camera3 { - -Camera3Stream::~Camera3Stream() { -} - -Camera3Stream* Camera3Stream::cast(camera3_stream *stream) { - return static_cast(stream); -} - -const Camera3Stream* Camera3Stream::cast(const camera3_stream *stream) { - return static_cast(stream); -} - -Camera3Stream::Camera3Stream(int id, - camera3_stream_type type, - uint32_t width, uint32_t height, size_t maxSize, int format) : - camera3_stream(), - mId(id), - mName(String8::format("Camera3Stream[%d]", id)), - mMaxSize(maxSize), - mState(STATE_CONSTRUCTED) { - - camera3_stream::stream_type = type; - camera3_stream::width = width; - camera3_stream::height = height; - camera3_stream::format = format; - camera3_stream::usage = 0; - camera3_stream::max_buffers = 0; - camera3_stream::priv = NULL; - - if (format == HAL_PIXEL_FORMAT_BLOB && maxSize == 0) { - ALOGE("%s: BLOB format with size == 0", __FUNCTION__); - mState = STATE_ERROR; - } -} - -int Camera3Stream::getId() const { - return mId; -} - -uint32_t Camera3Stream::getWidth() const { - return camera3_stream::width; -} - -uint32_t Camera3Stream::getHeight() const { - return camera3_stream::height; -} - -int Camera3Stream::getFormat() const { - return camera3_stream::format; -} - -camera3_stream* Camera3Stream::startConfiguration() { - Mutex::Autolock l(mLock); - - switch (mState) { - case STATE_ERROR: - ALOGE("%s: In error state", __FUNCTION__); - return NULL; - case STATE_CONSTRUCTED: - // OK - break; - case STATE_IN_CONFIG: - case STATE_IN_RECONFIG: - // Can start config again with no trouble; but don't redo - // oldUsage/oldMaxBuffers - return this; - case STATE_CONFIGURED: - if (stream_type == CAMERA3_STREAM_INPUT) { - ALOGE("%s: Cannot configure an input stream twice", - __FUNCTION__); - return NULL; - } else if (hasOutstandingBuffersLocked()) { - ALOGE("%s: Cannot configure stream; has outstanding buffers", - __FUNCTION__); - return NULL; - } - break; - default: - ALOGE("%s: Unknown state %d", __FUNCTION__, mState); - return NULL; - } - - oldUsage = usage; - oldMaxBuffers = max_buffers; - - if (mState == STATE_CONSTRUCTED) { - mState = STATE_IN_CONFIG; - } else { // mState == STATE_CONFIGURED - mState = STATE_IN_RECONFIG; - } - - return this; -} - -bool Camera3Stream::isConfiguring() const { - Mutex::Autolock l(mLock); - return (mState == STATE_IN_CONFIG) || (mState == STATE_IN_RECONFIG); -} - -status_t Camera3Stream::finishConfiguration(camera3_device *hal3Device) { - Mutex::Autolock l(mLock); - switch (mState) { - case STATE_ERROR: - ALOGE("%s: In error state", __FUNCTION__); - return INVALID_OPERATION; - case STATE_IN_CONFIG: - case STATE_IN_RECONFIG: - // OK - break; - case STATE_CONSTRUCTED: - case STATE_CONFIGURED: - ALOGE("%s: Cannot finish configuration that hasn't been started", - __FUNCTION__); - return INVALID_OPERATION; - default: - ALOGE("%s: Unknown state", __FUNCTION__); - return INVALID_OPERATION; - } - - // Check if the stream configuration is unchanged, and skip reallocation if - // so. As documented in hardware/camera3.h:configure_streams(). - if (mState == STATE_IN_RECONFIG && - oldUsage == usage && - oldMaxBuffers == max_buffers) { - mState = STATE_CONFIGURED; - return OK; - } - - status_t res; - res = configureQueueLocked(); - if (res != OK) { - ALOGE("%s: Unable to configure stream %d queue: %s (%d)", - __FUNCTION__, mId, strerror(-res), res); - mState = STATE_ERROR; - return res; - } - - res = registerBuffersLocked(hal3Device); - if (res != OK) { - ALOGE("%s: Unable to register stream buffers with HAL: %s (%d)", - __FUNCTION__, strerror(-res), res); - mState = STATE_ERROR; - return res; - } - - mState = STATE_CONFIGURED; - - return res; -} - -status_t Camera3Stream::getBuffer(camera3_stream_buffer *buffer) { - ATRACE_CALL(); - Mutex::Autolock l(mLock); - - status_t res = getBufferLocked(buffer); - if (res == OK) { - fireBufferListenersLocked(*buffer, /*acquired*/true, /*output*/true); - } - - return res; -} - -status_t Camera3Stream::returnBuffer(const camera3_stream_buffer &buffer, - nsecs_t timestamp) { - ATRACE_CALL(); - Mutex::Autolock l(mLock); - - status_t res = returnBufferLocked(buffer, timestamp); - if (res == OK) { - fireBufferListenersLocked(buffer, /*acquired*/false, /*output*/true); - } - - return res; -} - -status_t Camera3Stream::getInputBuffer(camera3_stream_buffer *buffer) { - ATRACE_CALL(); - Mutex::Autolock l(mLock); - - status_t res = getInputBufferLocked(buffer); - if (res == OK) { - fireBufferListenersLocked(*buffer, /*acquired*/true, /*output*/false); - } - - return res; -} - -status_t Camera3Stream::returnInputBuffer(const camera3_stream_buffer &buffer) { - ATRACE_CALL(); - Mutex::Autolock l(mLock); - - status_t res = returnInputBufferLocked(buffer); - if (res == OK) { - fireBufferListenersLocked(buffer, /*acquired*/false, /*output*/false); - } - return res; -} - -void Camera3Stream::fireBufferListenersLocked( - const camera3_stream_buffer& /*buffer*/, bool acquired, bool output) { - List >::iterator it, end; - - // TODO: finish implementing - - Camera3StreamBufferListener::BufferInfo info = - Camera3StreamBufferListener::BufferInfo(); - info.mOutput = output; - // TODO: rest of fields - - for (it = mBufferListenerList.begin(), end = mBufferListenerList.end(); - it != end; - ++it) { - - sp listener = it->promote(); - if (listener != 0) { - if (acquired) { - listener->onBufferAcquired(info); - } else { - listener->onBufferReleased(info); - } - } - } -} - -bool Camera3Stream::hasOutstandingBuffers() const { - ATRACE_CALL(); - Mutex::Autolock l(mLock); - return hasOutstandingBuffersLocked(); -} - -status_t Camera3Stream::disconnect() { - ATRACE_CALL(); - Mutex::Autolock l(mLock); - ALOGV("%s: Stream %d: Disconnecting...", __FUNCTION__, mId); - status_t res = disconnectLocked(); - - if (res == -ENOTCONN) { - // "Already disconnected" -- not an error - return OK; - } else { - return res; - } -} - -status_t Camera3Stream::registerBuffersLocked(camera3_device *hal3Device) { - ATRACE_CALL(); - status_t res; - - size_t bufferCount = getBufferCountLocked(); - - Vector buffers; - buffers.insertAt(NULL, 0, bufferCount); - - camera3_stream_buffer_set bufferSet = camera3_stream_buffer_set(); - bufferSet.stream = this; - bufferSet.num_buffers = bufferCount; - bufferSet.buffers = buffers.editArray(); - - Vector streamBuffers; - streamBuffers.insertAt(camera3_stream_buffer_t(), 0, bufferCount); - - // Register all buffers with the HAL. This means getting all the buffers - // from the stream, providing them to the HAL with the - // register_stream_buffers() method, and then returning them back to the - // stream in the error state, since they won't have valid data. - // - // Only registered buffers can be sent to the HAL. - - uint32_t bufferIdx = 0; - for (; bufferIdx < bufferCount; bufferIdx++) { - res = getBufferLocked( &streamBuffers.editItemAt(bufferIdx) ); - if (res != OK) { - ALOGE("%s: Unable to get buffer %d for registration with HAL", - __FUNCTION__, bufferIdx); - // Skip registering, go straight to cleanup - break; - } - - sp fence = new Fence(streamBuffers[bufferIdx].acquire_fence); - fence->waitForever("Camera3Stream::registerBuffers"); - - buffers.editItemAt(bufferIdx) = streamBuffers[bufferIdx].buffer; - } - if (bufferIdx == bufferCount) { - // Got all buffers, register with HAL - ALOGV("%s: Registering %d buffers with camera HAL", - __FUNCTION__, bufferCount); - ATRACE_BEGIN("camera3->register_stream_buffers"); - res = hal3Device->ops->register_stream_buffers(hal3Device, - &bufferSet); - ATRACE_END(); - } - - // Return all valid buffers to stream, in ERROR state to indicate - // they weren't filled. - for (size_t i = 0; i < bufferIdx; i++) { - streamBuffers.editItemAt(i).release_fence = -1; - streamBuffers.editItemAt(i).status = CAMERA3_BUFFER_STATUS_ERROR; - returnBufferLocked(streamBuffers[i], 0); - } - - return res; -} - -status_t Camera3Stream::getBufferLocked(camera3_stream_buffer *) { - ALOGE("%s: This type of stream does not support output", __FUNCTION__); - return INVALID_OPERATION; -} -status_t Camera3Stream::returnBufferLocked(const camera3_stream_buffer &, - nsecs_t) { - ALOGE("%s: This type of stream does not support output", __FUNCTION__); - return INVALID_OPERATION; -} -status_t Camera3Stream::getInputBufferLocked(camera3_stream_buffer *) { - ALOGE("%s: This type of stream does not support input", __FUNCTION__); - return INVALID_OPERATION; -} -status_t Camera3Stream::returnInputBufferLocked( - const camera3_stream_buffer &) { - ALOGE("%s: This type of stream does not support input", __FUNCTION__); - return INVALID_OPERATION; -} - -void Camera3Stream::addBufferListener( - wp listener) { - Mutex::Autolock l(mLock); - mBufferListenerList.push_back(listener); -} - -void Camera3Stream::removeBufferListener( - const sp& listener) { - Mutex::Autolock l(mLock); - - bool erased = true; - List >::iterator it, end; - for (it = mBufferListenerList.begin(), end = mBufferListenerList.end(); - it != end; - ) { - - if (*it == listener) { - it = mBufferListenerList.erase(it); - erased = true; - } else { - ++it; - } - } - - if (!erased) { - ALOGW("%s: Could not find listener to remove, already removed", - __FUNCTION__); - } -} - -}; // namespace camera3 - -}; // namespace android diff --git a/services/camera/libcameraservice/camera3/Camera3Stream.h b/services/camera/libcameraservice/camera3/Camera3Stream.h deleted file mode 100644 index 69d81e4..0000000 --- a/services/camera/libcameraservice/camera3/Camera3Stream.h +++ /dev/null @@ -1,283 +0,0 @@ -/* - * Copyright (C) 2013 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef ANDROID_SERVERS_CAMERA3_STREAM_H -#define ANDROID_SERVERS_CAMERA3_STREAM_H - -#include -#include -#include -#include -#include - -#include "hardware/camera3.h" - -#include "Camera3StreamBufferListener.h" -#include "Camera3StreamInterface.h" - -namespace android { - -namespace camera3 { - -/** - * A class for managing a single stream of input or output data from the camera - * device. - * - * The stream has an internal state machine to track whether it's - * connected/configured/etc. - * - * States: - * - * STATE_ERROR: A serious error has occurred, stream is unusable. Outstanding - * buffers may still be returned. - * - * STATE_CONSTRUCTED: The stream is ready for configuration, but buffers cannot - * be gotten yet. Not connected to any endpoint, no buffers are registered - * with the HAL. - * - * STATE_IN_CONFIG: Configuration has started, but not yet concluded. During this - * time, the usage, max_buffers, and priv fields of camera3_stream returned by - * startConfiguration() may be modified. - * - * STATE_IN_RE_CONFIG: Configuration has started, and the stream has been - * configured before. Need to track separately from IN_CONFIG to avoid - * re-registering buffers with HAL. - * - * STATE_CONFIGURED: Stream is configured, and has registered buffers with the - * HAL. The stream's getBuffer/returnBuffer work. The priv pointer may still be - * modified. - * - * Transition table: - * - * => STATE_CONSTRUCTED: - * When constructed with valid arguments - * => STATE_ERROR: - * When constructed with invalid arguments - * STATE_CONSTRUCTED => STATE_IN_CONFIG: - * When startConfiguration() is called - * STATE_IN_CONFIG => STATE_CONFIGURED: - * When finishConfiguration() is called - * STATE_IN_CONFIG => STATE_ERROR: - * When finishConfiguration() fails to allocate or register buffers. - * STATE_CONFIGURED => STATE_IN_RE_CONFIG: * - * When startConfiguration() is called again, after making sure stream is - * idle with waitUntilIdle(). - * STATE_IN_RE_CONFIG => STATE_CONFIGURED: - * When finishConfiguration() is called. - * STATE_IN_RE_CONFIG => STATE_ERROR: - * When finishConfiguration() fails to allocate or register buffers. - * STATE_CONFIGURED => STATE_CONSTRUCTED: - * When disconnect() is called after making sure stream is idle with - * waitUntilIdle(). - */ -class Camera3Stream : - protected camera3_stream, - public virtual Camera3StreamInterface, - public virtual RefBase { - public: - - virtual ~Camera3Stream(); - - static Camera3Stream* cast(camera3_stream *stream); - static const Camera3Stream* cast(const camera3_stream *stream); - - /** - * Get the stream's ID - */ - int getId() const; - - /** - * Get the stream's dimensions and format - */ - uint32_t getWidth() const; - uint32_t getHeight() const; - int getFormat() const; - - /** - * Start the stream configuration process. Returns a handle to the stream's - * information to be passed into the HAL device's configure_streams call. - * - * Until finishConfiguration() is called, no other methods on the stream may be - * called. The usage and max_buffers fields of camera3_stream may be modified - * between start/finishConfiguration, but may not be changed after that. - * The priv field of camera3_stream may be modified at any time after - * startConfiguration. - * - * Returns NULL in case of error starting configuration. - */ - camera3_stream* startConfiguration(); - - /** - * Check if the stream is mid-configuration (start has been called, but not - * finish). Used for lazy completion of configuration. - */ - bool isConfiguring() const; - - /** - * Completes the stream configuration process. During this call, the stream - * may call the device's register_stream_buffers() method. The stream - * information structure returned by startConfiguration() may no longer be - * modified after this call, but can still be read until the destruction of - * the stream. - * - * Returns: - * OK on a successful configuration - * NO_INIT in case of a serious error from the HAL device - * NO_MEMORY in case of an error registering buffers - * INVALID_OPERATION in case connecting to the consumer failed - */ - status_t finishConfiguration(camera3_device *hal3Device); - - /** - * Fill in the camera3_stream_buffer with the next valid buffer for this - * stream, to hand over to the HAL. - * - * This method may only be called once finishConfiguration has been called. - * For bidirectional streams, this method applies to the output-side - * buffers. - * - */ - status_t getBuffer(camera3_stream_buffer *buffer); - - /** - * Return a buffer to the stream after use by the HAL. - * - * This method may only be called for buffers provided by getBuffer(). - * For bidirectional streams, this method applies to the output-side buffers - */ - status_t returnBuffer(const camera3_stream_buffer &buffer, - nsecs_t timestamp); - - /** - * Fill in the camera3_stream_buffer with the next valid buffer for this - * stream, to hand over to the HAL. - * - * This method may only be called once finishConfiguration has been called. - * For bidirectional streams, this method applies to the input-side - * buffers. - * - */ - status_t getInputBuffer(camera3_stream_buffer *buffer); - - /** - * Return a buffer to the stream after use by the HAL. - * - * This method may only be called for buffers provided by getBuffer(). - * For bidirectional streams, this method applies to the input-side buffers - */ - status_t returnInputBuffer(const camera3_stream_buffer &buffer); - - /** - * Whether any of the stream's buffers are currently in use by the HAL, - * including buffers that have been returned but not yet had their - * release fence signaled. - */ - bool hasOutstandingBuffers() const; - - enum { - TIMEOUT_NEVER = -1 - }; - /** - * Wait until the HAL is done with all of this stream's buffers, including - * signalling all release fences. Returns TIMED_OUT if the timeout is exceeded, - * OK on success. Pass in TIMEOUT_NEVER for timeout to indicate an indefinite wait. - */ - virtual status_t waitUntilIdle(nsecs_t timeout) = 0; - - /** - * Disconnect stream from its non-HAL endpoint. After this, - * start/finishConfiguration must be called before the stream can be used - * again. This cannot be called if the stream has outstanding dequeued - * buffers. - */ - status_t disconnect(); - - /** - * Debug dump of the stream's state. - */ - virtual void dump(int fd, const Vector &args) const = 0; - - void addBufferListener( - wp listener); - void removeBufferListener( - const sp& listener); - - protected: - const int mId; - const String8 mName; - // Zero for formats with fixed buffer size for given dimensions. - const size_t mMaxSize; - - enum { - STATE_ERROR, - STATE_CONSTRUCTED, - STATE_IN_CONFIG, - STATE_IN_RECONFIG, - STATE_CONFIGURED - } mState; - - mutable Mutex mLock; - - Camera3Stream(int id, camera3_stream_type type, - uint32_t width, uint32_t height, size_t maxSize, int format); - - /** - * Interface to be implemented by derived classes - */ - - // getBuffer / returnBuffer implementations - - // Since camera3_stream_buffer includes a raw pointer to the stream, - // cast to camera3_stream*, implementations must increment the - // refcount of the stream manually in getBufferLocked, and decrement it in - // returnBufferLocked. - virtual status_t getBufferLocked(camera3_stream_buffer *buffer); - virtual status_t returnBufferLocked(const camera3_stream_buffer &buffer, - nsecs_t timestamp); - virtual status_t getInputBufferLocked(camera3_stream_buffer *buffer); - virtual status_t returnInputBufferLocked( - const camera3_stream_buffer &buffer); - virtual bool hasOutstandingBuffersLocked() const = 0; - // Can return -ENOTCONN when we are already disconnected (not an error) - virtual status_t disconnectLocked() = 0; - - // Configure the buffer queue interface to the other end of the stream, - // after the HAL has provided usage and max_buffers values. After this call, - // the stream must be ready to produce all buffers for registration with - // HAL. - virtual status_t configureQueueLocked() = 0; - - // Get the total number of buffers in the queue - virtual size_t getBufferCountLocked() = 0; - - private: - uint32_t oldUsage; - uint32_t oldMaxBuffers; - - // Gets all buffers from endpoint and registers them with the HAL. - status_t registerBuffersLocked(camera3_device *hal3Device); - - void fireBufferListenersLocked(const camera3_stream_buffer& buffer, - bool acquired, bool output); - List > mBufferListenerList; - -}; // class Camera3Stream - -}; // namespace camera3 - -}; // namespace android - -#endif diff --git a/services/camera/libcameraservice/camera3/Camera3StreamBufferListener.h b/services/camera/libcameraservice/camera3/Camera3StreamBufferListener.h deleted file mode 100644 index 62ea6c0..0000000 --- a/services/camera/libcameraservice/camera3/Camera3StreamBufferListener.h +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright (C) 2013 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef ANDROID_SERVERS_CAMERA3_STREAMBUFFERLISTENER_H -#define ANDROID_SERVERS_CAMERA3_STREAMBUFFERLISTENER_H - -#include -#include - -namespace android { - -namespace camera3 { - -class Camera3StreamBufferListener : public virtual RefBase { -public: - - struct BufferInfo { - bool mOutput; // if false then input buffer - Rect mCrop; - uint32_t mTransform; - uint32_t mScalingMode; - int64_t mTimestamp; - uint64_t mFrameNumber; - }; - - // Buffer was acquired by the HAL - virtual void onBufferAcquired(const BufferInfo& bufferInfo) = 0; - // Buffer was released by the HAL - virtual void onBufferReleased(const BufferInfo& bufferInfo) = 0; -}; - -}; //namespace camera3 -}; //namespace android - -#endif diff --git a/services/camera/libcameraservice/camera3/Camera3StreamInterface.h b/services/camera/libcameraservice/camera3/Camera3StreamInterface.h deleted file mode 100644 index 4768536..0000000 --- a/services/camera/libcameraservice/camera3/Camera3StreamInterface.h +++ /dev/null @@ -1,162 +0,0 @@ -/* - * Copyright (C) 2013 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef ANDROID_SERVERS_CAMERA3_STREAM_INTERFACE_H -#define ANDROID_SERVERS_CAMERA3_STREAM_INTERFACE_H - -#include -#include "Camera3StreamBufferListener.h" - -struct camera3_stream_buffer; - -namespace android { - -namespace camera3 { - -/** - * An interface for managing a single stream of input and/or output data from - * the camera device. - */ -class Camera3StreamInterface : public virtual RefBase { - public: - /** - * Get the stream's ID - */ - virtual int getId() const = 0; - - /** - * Get the stream's dimensions and format - */ - virtual uint32_t getWidth() const = 0; - virtual uint32_t getHeight() const = 0; - virtual int getFormat() const = 0; - - /** - * Start the stream configuration process. Returns a handle to the stream's - * information to be passed into the HAL device's configure_streams call. - * - * Until finishConfiguration() is called, no other methods on the stream may - * be called. The usage and max_buffers fields of camera3_stream may be - * modified between start/finishConfiguration, but may not be changed after - * that. The priv field of camera3_stream may be modified at any time after - * startConfiguration. - * - * Returns NULL in case of error starting configuration. - */ - virtual camera3_stream* startConfiguration() = 0; - - /** - * Check if the stream is mid-configuration (start has been called, but not - * finish). Used for lazy completion of configuration. - */ - virtual bool isConfiguring() const = 0; - - /** - * Completes the stream configuration process. During this call, the stream - * may call the device's register_stream_buffers() method. The stream - * information structure returned by startConfiguration() may no longer be - * modified after this call, but can still be read until the destruction of - * the stream. - * - * Returns: - * OK on a successful configuration - * NO_INIT in case of a serious error from the HAL device - * NO_MEMORY in case of an error registering buffers - * INVALID_OPERATION in case connecting to the consumer failed - */ - virtual status_t finishConfiguration(camera3_device *hal3Device) = 0; - - /** - * Fill in the camera3_stream_buffer with the next valid buffer for this - * stream, to hand over to the HAL. - * - * This method may only be called once finishConfiguration has been called. - * For bidirectional streams, this method applies to the output-side - * buffers. - * - */ - virtual status_t getBuffer(camera3_stream_buffer *buffer) = 0; - - /** - * Return a buffer to the stream after use by the HAL. - * - * This method may only be called for buffers provided by getBuffer(). - * For bidirectional streams, this method applies to the output-side buffers - */ - virtual status_t returnBuffer(const camera3_stream_buffer &buffer, - nsecs_t timestamp) = 0; - - /** - * Fill in the camera3_stream_buffer with the next valid buffer for this - * stream, to hand over to the HAL. - * - * This method may only be called once finishConfiguration has been called. - * For bidirectional streams, this method applies to the input-side - * buffers. - * - */ - virtual status_t getInputBuffer(camera3_stream_buffer *buffer) = 0; - - /** - * Return a buffer to the stream after use by the HAL. - * - * This method may only be called for buffers provided by getBuffer(). - * For bidirectional streams, this method applies to the input-side buffers - */ - virtual status_t returnInputBuffer(const camera3_stream_buffer &buffer) = 0; - - /** - * Whether any of the stream's buffers are currently in use by the HAL, - * including buffers that have been returned but not yet had their - * release fence signaled. - */ - virtual bool hasOutstandingBuffers() const = 0; - - enum { - TIMEOUT_NEVER = -1 - }; - /** - * Wait until the HAL is done with all of this stream's buffers, including - * signalling all release fences. Returns TIMED_OUT if the timeout is - * exceeded, OK on success. Pass in TIMEOUT_NEVER for timeout to indicate - * an indefinite wait. - */ - virtual status_t waitUntilIdle(nsecs_t timeout) = 0; - - /** - * Disconnect stream from its non-HAL endpoint. After this, - * start/finishConfiguration must be called before the stream can be used - * again. This cannot be called if the stream has outstanding dequeued - * buffers. - */ - virtual status_t disconnect() = 0; - - /** - * Debug dump of the stream's state. - */ - virtual void dump(int fd, const Vector &args) const = 0; - - virtual void addBufferListener( - wp listener) = 0; - virtual void removeBufferListener( - const sp& listener) = 0; -}; - -} // namespace camera3 - -} // namespace android - -#endif diff --git a/services/camera/libcameraservice/camera3/Camera3ZslStream.cpp b/services/camera/libcameraservice/camera3/Camera3ZslStream.cpp deleted file mode 100644 index 8790c8c..0000000 --- a/services/camera/libcameraservice/camera3/Camera3ZslStream.cpp +++ /dev/null @@ -1,328 +0,0 @@ -/* - * Copyright (C) 2013 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#define LOG_TAG "Camera3-ZslStream" -#define ATRACE_TAG ATRACE_TAG_CAMERA -//#define LOG_NDEBUG 0 - -#include -#include -#include "Camera3ZslStream.h" - -typedef android::RingBufferConsumer::PinnedBufferItem PinnedBufferItem; - -namespace android { - -namespace camera3 { - -namespace { -struct TimestampFinder : public RingBufferConsumer::RingBufferComparator { - typedef RingBufferConsumer::BufferInfo BufferInfo; - - enum { - SELECT_I1 = -1, - SELECT_I2 = 1, - SELECT_NEITHER = 0, - }; - - TimestampFinder(nsecs_t timestamp) : mTimestamp(timestamp) {} - ~TimestampFinder() {} - - template - static void swap(T& a, T& b) { - T tmp = a; - a = b; - b = tmp; - } - - /** - * Try to find the best candidate for a ZSL buffer. - * Match priority from best to worst: - * 1) Timestamps match. - * 2) Timestamp is closest to the needle (and lower). - * 3) Timestamp is closest to the needle (and higher). - * - */ - virtual int compare(const BufferInfo *i1, - const BufferInfo *i2) const { - // Try to select non-null object first. - if (i1 == NULL) { - return SELECT_I2; - } else if (i2 == NULL) { - return SELECT_I1; - } - - // Best result: timestamp is identical - if (i1->mTimestamp == mTimestamp) { - return SELECT_I1; - } else if (i2->mTimestamp == mTimestamp) { - return SELECT_I2; - } - - const BufferInfo* infoPtrs[2] = { - i1, - i2 - }; - int infoSelectors[2] = { - SELECT_I1, - SELECT_I2 - }; - - // Order i1,i2 so that always i1.timestamp < i2.timestamp - if (i1->mTimestamp > i2->mTimestamp) { - swap(infoPtrs[0], infoPtrs[1]); - swap(infoSelectors[0], infoSelectors[1]); - } - - // Second best: closest (lower) timestamp - if (infoPtrs[1]->mTimestamp < mTimestamp) { - return infoSelectors[1]; - } else if (infoPtrs[0]->mTimestamp < mTimestamp) { - return infoSelectors[0]; - } - - // Worst: closest (higher) timestamp - return infoSelectors[0]; - - /** - * The above cases should cover all the possibilities, - * and we get an 'empty' result only if the ring buffer - * was empty itself - */ - } - - const nsecs_t mTimestamp; -}; // struct TimestampFinder -} // namespace anonymous - -Camera3ZslStream::Camera3ZslStream(int id, uint32_t width, uint32_t height, - int depth) : - Camera3OutputStream(id, CAMERA3_STREAM_BIDIRECTIONAL, - width, height, - HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED), - mDepth(depth), - mProducer(new RingBufferConsumer(GRALLOC_USAGE_HW_CAMERA_ZSL, - depth)) { - - mConsumer = new Surface(mProducer->getProducerInterface()); -} - -Camera3ZslStream::~Camera3ZslStream() { -} - -status_t Camera3ZslStream::getInputBufferLocked(camera3_stream_buffer *buffer) { - ATRACE_CALL(); - - status_t res; - - // TODO: potentially register from inputBufferLocked - // this should be ok, registerBuffersLocked only calls getBuffer for now - // register in output mode instead of input mode for ZSL streams. - if (mState == STATE_IN_CONFIG || mState == STATE_IN_RECONFIG) { - ALOGE("%s: Stream %d: Buffer registration for input streams" - " not implemented (state %d)", - __FUNCTION__, mId, mState); - return INVALID_OPERATION; - } - - if ((res = getBufferPreconditionCheckLocked()) != OK) { - return res; - } - - ANativeWindowBuffer* anb; - int fenceFd; - - assert(mProducer != 0); - - sp bufferItem; - { - List >::iterator it, end; - it = mInputBufferQueue.begin(); - end = mInputBufferQueue.end(); - - // Need to call enqueueInputBufferByTimestamp as a prerequisite - if (it == end) { - ALOGE("%s: Stream %d: No input buffer was queued", - __FUNCTION__, mId); - return INVALID_OPERATION; - } - bufferItem = *it; - mInputBufferQueue.erase(it); - } - - anb = bufferItem->getBufferItem().mGraphicBuffer->getNativeBuffer(); - assert(anb != NULL); - fenceFd = bufferItem->getBufferItem().mFence->dup(); - - /** - * FenceFD now owned by HAL except in case of error, - * in which case we reassign it to acquire_fence - */ - handoutBufferLocked(*buffer, &(anb->handle), /*acquireFence*/fenceFd, - /*releaseFence*/-1, CAMERA3_BUFFER_STATUS_OK); - - mBuffersInFlight.push_back(bufferItem); - - return OK; -} - -status_t Camera3ZslStream::returnBufferCheckedLocked( - const camera3_stream_buffer &buffer, - nsecs_t timestamp, - bool output, - /*out*/ - sp *releaseFenceOut) { - - if (output) { - // Output stream path - return Camera3OutputStream::returnBufferCheckedLocked(buffer, - timestamp, - output, - releaseFenceOut); - } - - /** - * Input stream path - */ - bool bufferFound = false; - sp bufferItem; - { - // Find the buffer we are returning - Vector >::iterator it, end; - for (it = mBuffersInFlight.begin(), end = mBuffersInFlight.end(); - it != end; - ++it) { - - const sp& tmp = *it; - ANativeWindowBuffer *anb = - tmp->getBufferItem().mGraphicBuffer->getNativeBuffer(); - if (anb != NULL && &(anb->handle) == buffer.buffer) { - bufferFound = true; - bufferItem = tmp; - mBuffersInFlight.erase(it); - break; - } - } - } - if (!bufferFound) { - ALOGE("%s: Stream %d: Can't return buffer that wasn't sent to HAL", - __FUNCTION__, mId); - return INVALID_OPERATION; - } - - int releaseFenceFd = buffer.release_fence; - - if (buffer.status == CAMERA3_BUFFER_STATUS_ERROR) { - if (buffer.release_fence != -1) { - ALOGE("%s: Stream %d: HAL should not set release_fence(%d) when " - "there is an error", __FUNCTION__, mId, buffer.release_fence); - close(buffer.release_fence); - } - - /** - * Reassign release fence as the acquire fence incase of error - */ - releaseFenceFd = buffer.acquire_fence; - } - - /** - * Unconditionally return buffer to the buffer queue. - * - Fwk takes over the release_fence ownership - */ - sp releaseFence = new Fence(releaseFenceFd); - bufferItem->getBufferItem().mFence = releaseFence; - bufferItem.clear(); // dropping last reference unpins buffer - - *releaseFenceOut = releaseFence; - - return OK; -} - -status_t Camera3ZslStream::returnInputBufferLocked( - const camera3_stream_buffer &buffer) { - ATRACE_CALL(); - - status_t res = returnAnyBufferLocked(buffer, /*timestamp*/0, - /*output*/false); - - return res; -} - -void Camera3ZslStream::dump(int fd, const Vector &args) const { - (void) args; - - String8 lines; - lines.appendFormat(" Stream[%d]: ZSL\n", mId); - write(fd, lines.string(), lines.size()); - - Camera3IOStreamBase::dump(fd, args); - - lines = String8(); - lines.appendFormat(" Input buffers pending: %d, in flight %d\n", - mInputBufferQueue.size(), mBuffersInFlight.size()); - write(fd, lines.string(), lines.size()); -} - -status_t Camera3ZslStream::enqueueInputBufferByTimestamp( - nsecs_t timestamp, - nsecs_t* actualTimestamp) { - - Mutex::Autolock l(mLock); - - TimestampFinder timestampFinder = TimestampFinder(timestamp); - - sp pinnedBuffer = - mProducer->pinSelectedBuffer(timestampFinder, - /*waitForFence*/false); - - if (pinnedBuffer == 0) { - ALOGE("%s: No ZSL buffers were available yet", __FUNCTION__); - return NO_BUFFER_AVAILABLE; - } - - nsecs_t actual = pinnedBuffer->getBufferItem().mTimestamp; - - if (actual != timestamp) { - ALOGW("%s: ZSL buffer candidate search didn't find an exact match --" - " requested timestamp = %lld, actual timestamp = %lld", - __FUNCTION__, timestamp, actual); - } - - mInputBufferQueue.push_back(pinnedBuffer); - - if (actualTimestamp != NULL) { - *actualTimestamp = actual; - } - - return OK; -} - -status_t Camera3ZslStream::clearInputRingBuffer() { - Mutex::Autolock l(mLock); - - mInputBufferQueue.clear(); - - return mProducer->clear(); -} - -status_t Camera3ZslStream::setTransform(int /*transform*/) { - ALOGV("%s: Not implemented", __FUNCTION__); - return INVALID_OPERATION; -} - -}; // namespace camera3 - -}; // namespace android diff --git a/services/camera/libcameraservice/camera3/Camera3ZslStream.h b/services/camera/libcameraservice/camera3/Camera3ZslStream.h deleted file mode 100644 index c7f4490..0000000 --- a/services/camera/libcameraservice/camera3/Camera3ZslStream.h +++ /dev/null @@ -1,105 +0,0 @@ -/* - * Copyright (C) 2013 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef ANDROID_SERVERS_CAMERA3_ZSL_STREAM_H -#define ANDROID_SERVERS_CAMERA3_ZSL_STREAM_H - -#include -#include -#include - -#include "Camera3OutputStream.h" - -namespace android { - -namespace camera3 { - -/** - * A class for managing a single opaque ZSL stream to/from the camera device. - * This acts as a bidirectional stream at the HAL layer, caching and discarding - * most output buffers, and when directed, pushes a buffer back to the HAL for - * processing. - */ -class Camera3ZslStream : - public Camera3OutputStream { - public: - /** - * Set up a ZSL stream of a given resolution. Depth is the number of buffers - * cached within the stream that can be retrieved for input. - */ - Camera3ZslStream(int id, uint32_t width, uint32_t height, int depth); - ~Camera3ZslStream(); - - virtual void dump(int fd, const Vector &args) const; - - enum { NO_BUFFER_AVAILABLE = BufferQueue::NO_BUFFER_AVAILABLE }; - - /** - * Locate a buffer matching this timestamp in the RingBufferConsumer, - * and mark it to be queued at the next getInputBufferLocked invocation. - * - * Errors: Returns NO_BUFFER_AVAILABLE if we could not find a match. - * - */ - status_t enqueueInputBufferByTimestamp(nsecs_t timestamp, - nsecs_t* actualTimestamp); - - /** - * Clears the buffers that can be used by enqueueInputBufferByTimestamp - */ - status_t clearInputRingBuffer(); - - protected: - - /** - * Camera3OutputStreamInterface implementation - */ - status_t setTransform(int transform); - - private: - - int mDepth; - // Input buffers pending to be queued into HAL - List > mInputBufferQueue; - sp mProducer; - - // Input buffers in flight to HAL - Vector > mBuffersInFlight; - - /** - * Camera3Stream interface - */ - - // getInputBuffer/returnInputBuffer operate the input stream side of the - // ZslStream. - virtual status_t getInputBufferLocked(camera3_stream_buffer *buffer); - virtual status_t returnInputBufferLocked( - const camera3_stream_buffer &buffer); - - // Actual body to return either input or output buffers - virtual status_t returnBufferCheckedLocked( - const camera3_stream_buffer &buffer, - nsecs_t timestamp, - bool output, - /*out*/ - sp *releaseFenceOut); -}; // class Camera3ZslStream - -}; // namespace camera3 - -}; // namespace android - -#endif diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.cpp b/services/camera/libcameraservice/common/Camera2ClientBase.cpp new file mode 100644 index 0000000..060e2a2 --- /dev/null +++ b/services/camera/libcameraservice/common/Camera2ClientBase.cpp @@ -0,0 +1,333 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "Camera2ClientBase" +#define ATRACE_TAG ATRACE_TAG_CAMERA +//#define LOG_NDEBUG 0 + +#include +#include + +#include +#include +#include + +#include "common/Camera2ClientBase.h" + +#include "api2/CameraDeviceClient.h" + +#include "CameraDeviceFactory.h" + +namespace android { +using namespace camera2; + +static int getCallingPid() { + return IPCThreadState::self()->getCallingPid(); +} + +// Interface used by CameraService + +template +Camera2ClientBase::Camera2ClientBase( + const sp& cameraService, + const sp& remoteCallback, + const String16& clientPackageName, + int cameraId, + int cameraFacing, + int clientPid, + uid_t clientUid, + int servicePid): + TClientBase(cameraService, remoteCallback, clientPackageName, + cameraId, cameraFacing, clientPid, clientUid, servicePid), + mSharedCameraCallbacks(remoteCallback) +{ + ALOGI("Camera %d: Opened", cameraId); + + mDevice = CameraDeviceFactory::createDevice(cameraId); + LOG_ALWAYS_FATAL_IF(mDevice == 0, "Device should never be NULL here."); +} + +template +status_t Camera2ClientBase::checkPid(const char* checkLocation) + const { + + int callingPid = getCallingPid(); + if (callingPid == TClientBase::mClientPid) return NO_ERROR; + + ALOGE("%s: attempt to use a locked camera from a different process" + " (old pid %d, new pid %d)", checkLocation, TClientBase::mClientPid, callingPid); + return PERMISSION_DENIED; +} + +template +status_t Camera2ClientBase::initialize(camera_module_t *module) { + ATRACE_CALL(); + ALOGV("%s: Initializing client for camera %d", __FUNCTION__, + TClientBase::mCameraId); + status_t res; + + // Verify ops permissions + res = TClientBase::startCameraOps(); + if (res != OK) { + return res; + } + + if (mDevice == NULL) { + ALOGE("%s: Camera %d: No device connected", + __FUNCTION__, TClientBase::mCameraId); + return NO_INIT; + } + + res = mDevice->initialize(module); + if (res != OK) { + ALOGE("%s: Camera %d: unable to initialize device: %s (%d)", + __FUNCTION__, TClientBase::mCameraId, strerror(-res), res); + return NO_INIT; + } + + res = mDevice->setNotifyCallback(this); + + return OK; +} + +template +Camera2ClientBase::~Camera2ClientBase() { + ATRACE_CALL(); + + TClientBase::mDestructionStarted = true; + + TClientBase::finishCameraOps(); + + disconnect(); + + ALOGI("Closed Camera %d", TClientBase::mCameraId); +} + +template +status_t Camera2ClientBase::dump(int fd, + const Vector& args) { + String8 result; + result.appendFormat("Camera2ClientBase[%d] (%p) PID: %d, dump:\n", + TClientBase::mCameraId, + TClientBase::getRemoteCallback()->asBinder().get(), + TClientBase::mClientPid); + result.append(" State: "); + + write(fd, result.string(), result.size()); + // TODO: print dynamic/request section from most recent requests + + return dumpDevice(fd, args); +} + +template +status_t Camera2ClientBase::dumpDevice( + int fd, + const Vector& args) { + String8 result; + + result = " Device dump:\n"; + write(fd, result.string(), result.size()); + + if (!mDevice.get()) { + result = " *** Device is detached\n"; + write(fd, result.string(), result.size()); + return NO_ERROR; + } + + status_t res = mDevice->dump(fd, args); + if (res != OK) { + result = String8::format(" Error dumping device: %s (%d)", + strerror(-res), res); + write(fd, result.string(), result.size()); + } + + return NO_ERROR; +} + +// ICameraClient2BaseUser interface + + +template +void Camera2ClientBase::disconnect() { + ATRACE_CALL(); + Mutex::Autolock icl(mBinderSerializationLock); + + // Allow both client and the media server to disconnect at all times + int callingPid = getCallingPid(); + if (callingPid != TClientBase::mClientPid && + callingPid != TClientBase::mServicePid) return; + + ALOGV("Camera %d: Shutting down", TClientBase::mCameraId); + + detachDevice(); + + CameraService::BasicClient::disconnect(); + + ALOGV("Camera %d: Shut down complete complete", TClientBase::mCameraId); +} + +template +void Camera2ClientBase::detachDevice() { + if (mDevice == 0) return; + mDevice->disconnect(); + + mDevice.clear(); + + ALOGV("Camera %d: Detach complete", TClientBase::mCameraId); +} + +template +status_t Camera2ClientBase::connect( + const sp& client) { + ATRACE_CALL(); + ALOGV("%s: E", __FUNCTION__); + Mutex::Autolock icl(mBinderSerializationLock); + + if (TClientBase::mClientPid != 0 && + getCallingPid() != TClientBase::mClientPid) { + + ALOGE("%s: Camera %d: Connection attempt from pid %d; " + "current locked to pid %d", + __FUNCTION__, + TClientBase::mCameraId, + getCallingPid(), + TClientBase::mClientPid); + return BAD_VALUE; + } + + TClientBase::mClientPid = getCallingPid(); + + TClientBase::mRemoteCallback = client; + mSharedCameraCallbacks = client; + + return OK; +} + +/** Device-related methods */ + +template +void Camera2ClientBase::notifyError(int errorCode, int arg1, + int arg2) { + ALOGE("Error condition %d reported by HAL, arguments %d, %d", errorCode, + arg1, arg2); +} + +template +void Camera2ClientBase::notifyShutter(int frameNumber, + nsecs_t timestamp) { + (void)frameNumber; + (void)timestamp; + + ALOGV("%s: Shutter notification for frame %d at time %lld", __FUNCTION__, + frameNumber, timestamp); +} + +template +void Camera2ClientBase::notifyAutoFocus(uint8_t newState, + int triggerId) { + (void)newState; + (void)triggerId; + + ALOGV("%s: Autofocus state now %d, last trigger %d", + __FUNCTION__, newState, triggerId); + + typename SharedCameraCallbacks::Lock l(mSharedCameraCallbacks); + if (l.mRemoteCallback != 0) { + l.mRemoteCallback->notifyCallback(CAMERA_MSG_FOCUS_MOVE, 1, 0); + } + if (l.mRemoteCallback != 0) { + l.mRemoteCallback->notifyCallback(CAMERA_MSG_FOCUS, 1, 0); + } +} + +template +void Camera2ClientBase::notifyAutoExposure(uint8_t newState, + int triggerId) { + (void)newState; + (void)triggerId; + + ALOGV("%s: Autoexposure state now %d, last trigger %d", + __FUNCTION__, newState, triggerId); +} + +template +void Camera2ClientBase::notifyAutoWhitebalance(uint8_t newState, + int triggerId) { + (void)newState; + (void)triggerId; + + ALOGV("%s: Auto-whitebalance state now %d, last trigger %d", + __FUNCTION__, newState, triggerId); +} + +template +int Camera2ClientBase::getCameraId() const { + return TClientBase::mCameraId; +} + +template +const sp& Camera2ClientBase::getCameraDevice() { + return mDevice; +} + +template +const sp& Camera2ClientBase::getCameraService() { + return TClientBase::mCameraService; +} + +template +Camera2ClientBase::SharedCameraCallbacks::Lock::Lock( + SharedCameraCallbacks &client) : + + mRemoteCallback(client.mRemoteCallback), + mSharedClient(client) { + + mSharedClient.mRemoteCallbackLock.lock(); +} + +template +Camera2ClientBase::SharedCameraCallbacks::Lock::~Lock() { + mSharedClient.mRemoteCallbackLock.unlock(); +} + +template +Camera2ClientBase::SharedCameraCallbacks::SharedCameraCallbacks( + const sp&client) : + + mRemoteCallback(client) { +} + +template +typename Camera2ClientBase::SharedCameraCallbacks& +Camera2ClientBase::SharedCameraCallbacks::operator=( + const sp&client) { + + Mutex::Autolock l(mRemoteCallbackLock); + mRemoteCallback = client; + return *this; +} + +template +void Camera2ClientBase::SharedCameraCallbacks::clear() { + Mutex::Autolock l(mRemoteCallbackLock); + mRemoteCallback.clear(); +} + +template class Camera2ClientBase; +template class Camera2ClientBase; +template class Camera2ClientBase; + +} // namespace android diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.h b/services/camera/libcameraservice/common/Camera2ClientBase.h new file mode 100644 index 0000000..d23197c --- /dev/null +++ b/services/camera/libcameraservice/common/Camera2ClientBase.h @@ -0,0 +1,133 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_SERVERS_CAMERA_CAMERA2CLIENT_BASE_H +#define ANDROID_SERVERS_CAMERA_CAMERA2CLIENT_BASE_H + +#include "common/CameraDeviceBase.h" + +namespace android { + +class IMemory; + +class CameraService; + +template +class Camera2ClientBase : + public TClientBase, + public CameraDeviceBase::NotificationListener +{ +public: + typedef typename TClientBase::TCamCallbacks TCamCallbacks; + + /** + * Base binder interface (see ICamera/IProCameraUser for details) + */ + virtual status_t connect(const sp& callbacks); + virtual void disconnect(); + + /** + * Interface used by CameraService + */ + + // TODO: too many params, move into a ClientArgs + Camera2ClientBase(const sp& cameraService, + const sp& remoteCallback, + const String16& clientPackageName, + int cameraId, + int cameraFacing, + int clientPid, + uid_t clientUid, + int servicePid); + virtual ~Camera2ClientBase(); + + virtual status_t initialize(camera_module_t *module); + virtual status_t dump(int fd, const Vector& args); + + /** + * CameraDeviceBase::NotificationListener implementation + */ + + virtual void notifyError(int errorCode, int arg1, int arg2); + virtual void notifyShutter(int frameNumber, nsecs_t timestamp); + virtual void notifyAutoFocus(uint8_t newState, int triggerId); + virtual void notifyAutoExposure(uint8_t newState, int triggerId); + virtual void notifyAutoWhitebalance(uint8_t newState, + int triggerId); + + + int getCameraId() const; + const sp& + getCameraDevice(); + const sp& + getCameraService(); + + /** + * Interface used by independent components of CameraClient2Base. + */ + + // Simple class to ensure that access to TCamCallbacks is serialized + // by requiring mRemoteCallbackLock to be locked before access to + // mRemoteCallback is possible. + class SharedCameraCallbacks { + public: + class Lock { + public: + Lock(SharedCameraCallbacks &client); + ~Lock(); + sp &mRemoteCallback; + private: + SharedCameraCallbacks &mSharedClient; + }; + SharedCameraCallbacks(const sp& client); + SharedCameraCallbacks& operator=(const sp& client); + void clear(); + private: + sp mRemoteCallback; + mutable Mutex mRemoteCallbackLock; + } mSharedCameraCallbacks; + +protected: + + virtual sp asBinderWrapper() { + return IInterface::asBinder(); + } + + virtual status_t dumpDevice(int fd, const Vector& args); + + /** Binder client interface-related private members */ + + // Mutex that must be locked by methods implementing the binder client + // interface. Ensures serialization between incoming client calls. + // All methods in this class hierarchy that append 'L' to the name assume + // that mBinderSerializationLock is locked when they're called + mutable Mutex mBinderSerializationLock; + + /** CameraDeviceBase instance wrapping HAL2+ entry */ + + sp mDevice; + + /** Utility members */ + + // Verify that caller is the owner of the camera + status_t checkPid(const char *checkLocation) const; + + virtual void detachDevice(); +}; + +}; // namespace android + +#endif diff --git a/services/camera/libcameraservice/common/CameraDeviceBase.cpp b/services/camera/libcameraservice/common/CameraDeviceBase.cpp new file mode 100644 index 0000000..6c4e87f --- /dev/null +++ b/services/camera/libcameraservice/common/CameraDeviceBase.cpp @@ -0,0 +1,30 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "CameraDeviceBase.h" + +namespace android { + +/** + * Base class destructors + */ +CameraDeviceBase::~CameraDeviceBase() { +} + +CameraDeviceBase::NotificationListener::~NotificationListener() { +} + +} // namespace android diff --git a/services/camera/libcameraservice/common/CameraDeviceBase.h b/services/camera/libcameraservice/common/CameraDeviceBase.h new file mode 100644 index 0000000..aa92bec --- /dev/null +++ b/services/camera/libcameraservice/common/CameraDeviceBase.h @@ -0,0 +1,216 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_SERVERS_CAMERA_CAMERADEVICEBASE_H +#define ANDROID_SERVERS_CAMERA_CAMERADEVICEBASE_H + +#include +#include +#include +#include +#include + +#include "hardware/camera2.h" +#include "camera/CameraMetadata.h" + +namespace android { + +/** + * Base interface for version >= 2 camera device classes, which interface to + * camera HAL device versions >= 2. + */ +class CameraDeviceBase : public virtual RefBase { + public: + virtual ~CameraDeviceBase(); + + /** + * The device's camera ID + */ + virtual int getId() const = 0; + + virtual status_t initialize(camera_module_t *module) = 0; + virtual status_t disconnect() = 0; + + virtual status_t dump(int fd, const Vector& args) = 0; + + /** + * The device's static characteristics metadata buffer + */ + virtual const CameraMetadata& info() const = 0; + + /** + * Submit request for capture. The CameraDevice takes ownership of the + * passed-in buffer. + */ + virtual status_t capture(CameraMetadata &request) = 0; + + /** + * Submit request for streaming. The CameraDevice makes a copy of the + * passed-in buffer and the caller retains ownership. + */ + virtual status_t setStreamingRequest(const CameraMetadata &request) = 0; + + /** + * Clear the streaming request slot. + */ + virtual status_t clearStreamingRequest() = 0; + + /** + * Wait until a request with the given ID has been dequeued by the + * HAL. Returns TIMED_OUT if the timeout duration is reached. Returns + * immediately if the latest request received by the HAL has this id. + */ + virtual status_t waitUntilRequestReceived(int32_t requestId, + nsecs_t timeout) = 0; + + /** + * Create an output stream of the requested size and format. + * + * If format is CAMERA2_HAL_PIXEL_FORMAT_OPAQUE, then the HAL device selects + * an appropriate format; it can be queried with getStreamInfo. + * + * If format is HAL_PIXEL_FORMAT_COMPRESSED, the size parameter must be + * equal to the size in bytes of the buffers to allocate for the stream. For + * other formats, the size parameter is ignored. + */ + virtual status_t createStream(sp consumer, + uint32_t width, uint32_t height, int format, size_t size, + int *id) = 0; + + /** + * Create an input reprocess stream that uses buffers from an existing + * output stream. + */ + virtual status_t createReprocessStreamFromStream(int outputId, int *id) = 0; + + /** + * Get information about a given stream. + */ + virtual status_t getStreamInfo(int id, + uint32_t *width, uint32_t *height, uint32_t *format) = 0; + + /** + * Set stream gralloc buffer transform + */ + virtual status_t setStreamTransform(int id, int transform) = 0; + + /** + * Delete stream. Must not be called if there are requests in flight which + * reference that stream. + */ + virtual status_t deleteStream(int id) = 0; + + /** + * Delete reprocess stream. Must not be called if there are requests in + * flight which reference that stream. + */ + virtual status_t deleteReprocessStream(int id) = 0; + + /** + * Create a metadata buffer with fields that the HAL device believes are + * best for the given use case + */ + virtual status_t createDefaultRequest(int templateId, + CameraMetadata *request) = 0; + + /** + * Wait until all requests have been processed. Returns INVALID_OPERATION if + * the streaming slot is not empty, or TIMED_OUT if the requests haven't + * finished processing in 10 seconds. + */ + virtual status_t waitUntilDrained() = 0; + + /** + * Abstract class for HAL notification listeners + */ + class NotificationListener { + public: + // Refer to the Camera2 HAL definition for notification definitions + virtual void notifyError(int errorCode, int arg1, int arg2) = 0; + virtual void notifyShutter(int frameNumber, nsecs_t timestamp) = 0; + virtual void notifyAutoFocus(uint8_t newState, int triggerId) = 0; + virtual void notifyAutoExposure(uint8_t newState, int triggerId) = 0; + virtual void notifyAutoWhitebalance(uint8_t newState, + int triggerId) = 0; + protected: + virtual ~NotificationListener(); + }; + + /** + * Connect HAL notifications to a listener. Overwrites previous + * listener. Set to NULL to stop receiving notifications. + */ + virtual status_t setNotifyCallback(NotificationListener *listener) = 0; + + /** + * Whether the device supports calling notifyAutofocus, notifyAutoExposure, + * and notifyAutoWhitebalance; if this returns false, the client must + * synthesize these notifications from received frame metadata. + */ + virtual bool willNotify3A() = 0; + + /** + * Wait for a new frame to be produced, with timeout in nanoseconds. + * Returns TIMED_OUT when no frame produced within the specified duration + */ + virtual status_t waitForNextFrame(nsecs_t timeout) = 0; + + /** + * Get next metadata frame from the frame queue. Returns NULL if the queue + * is empty; caller takes ownership of the metadata buffer. + */ + virtual status_t getNextFrame(CameraMetadata *frame) = 0; + + /** + * Trigger auto-focus. The latest ID used in a trigger autofocus or cancel + * autofocus call will be returned by the HAL in all subsequent AF + * notifications. + */ + virtual status_t triggerAutofocus(uint32_t id) = 0; + + /** + * Cancel auto-focus. The latest ID used in a trigger autofocus/cancel + * autofocus call will be returned by the HAL in all subsequent AF + * notifications. + */ + virtual status_t triggerCancelAutofocus(uint32_t id) = 0; + + /** + * Trigger pre-capture metering. The latest ID used in a trigger pre-capture + * call will be returned by the HAL in all subsequent AE and AWB + * notifications. + */ + virtual status_t triggerPrecaptureMetering(uint32_t id) = 0; + + /** + * Abstract interface for clients that want to listen to reprocess buffer + * release events + */ + struct BufferReleasedListener : public virtual RefBase { + virtual void onBufferReleased(buffer_handle_t *handle) = 0; + }; + + /** + * Push a buffer to be reprocessed into a reprocessing stream, and + * provide a listener to call once the buffer is returned by the HAL + */ + virtual status_t pushReprocessBuffer(int reprocessStreamId, + buffer_handle_t *buffer, wp listener) = 0; +}; + +}; // namespace android + +#endif diff --git a/services/camera/libcameraservice/common/FrameProcessorBase.cpp b/services/camera/libcameraservice/common/FrameProcessorBase.cpp new file mode 100644 index 0000000..10bc6ea --- /dev/null +++ b/services/camera/libcameraservice/common/FrameProcessorBase.cpp @@ -0,0 +1,176 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "Camera2-FrameProcessorBase" +#define ATRACE_TAG ATRACE_TAG_CAMERA +//#define LOG_NDEBUG 0 + +#include +#include + +#include "common/FrameProcessorBase.h" +#include "common/CameraDeviceBase.h" + +namespace android { +namespace camera2 { + +FrameProcessorBase::FrameProcessorBase(wp device) : + Thread(/*canCallJava*/false), + mDevice(device) { +} + +FrameProcessorBase::~FrameProcessorBase() { + ALOGV("%s: Exit", __FUNCTION__); +} + +status_t FrameProcessorBase::registerListener(int32_t minId, + int32_t maxId, wp listener) { + Mutex::Autolock l(mInputMutex); + ALOGV("%s: Registering listener for frame id range %d - %d", + __FUNCTION__, minId, maxId); + RangeListener rListener = { minId, maxId, listener }; + mRangeListeners.push_back(rListener); + return OK; +} + +status_t FrameProcessorBase::removeListener(int32_t minId, + int32_t maxId, + wp listener) { + Mutex::Autolock l(mInputMutex); + List::iterator item = mRangeListeners.begin(); + while (item != mRangeListeners.end()) { + if (item->minId == minId && + item->maxId == maxId && + item->listener == listener) { + item = mRangeListeners.erase(item); + } else { + item++; + } + } + return OK; +} + +void FrameProcessorBase::dump(int fd, const Vector& /*args*/) { + String8 result(" Latest received frame:\n"); + write(fd, result.string(), result.size()); + mLastFrame.dump(fd, 2, 6); +} + +bool FrameProcessorBase::threadLoop() { + status_t res; + + sp device; + { + device = mDevice.promote(); + if (device == 0) return false; + } + + res = device->waitForNextFrame(kWaitDuration); + if (res == OK) { + processNewFrames(device); + } else if (res != TIMED_OUT) { + ALOGE("FrameProcessorBase: Error waiting for new " + "frames: %s (%d)", strerror(-res), res); + } + + return true; +} + +void FrameProcessorBase::processNewFrames(const sp &device) { + status_t res; + ATRACE_CALL(); + CameraMetadata frame; + + ALOGV("%s: Camera %d: Process new frames", __FUNCTION__, device->getId()); + + while ( (res = device->getNextFrame(&frame)) == OK) { + + camera_metadata_entry_t entry; + + entry = frame.find(ANDROID_REQUEST_FRAME_COUNT); + if (entry.count == 0) { + ALOGE("%s: Camera %d: Error reading frame number", + __FUNCTION__, device->getId()); + break; + } + ATRACE_INT("cam2_frame", entry.data.i32[0]); + + if (!processSingleFrame(frame, device)) { + break; + } + + if (!frame.isEmpty()) { + mLastFrame.acquire(frame); + } + } + if (res != NOT_ENOUGH_DATA) { + ALOGE("%s: Camera %d: Error getting next frame: %s (%d)", + __FUNCTION__, device->getId(), strerror(-res), res); + return; + } + + return; +} + +bool FrameProcessorBase::processSingleFrame(CameraMetadata &frame, + const sp &device) { + ALOGV("%s: Camera %d: Process single frame (is empty? %d)", + __FUNCTION__, device->getId(), frame.isEmpty()); + return processListeners(frame, device) == OK; +} + +status_t FrameProcessorBase::processListeners(const CameraMetadata &frame, + const sp &device) { + ATRACE_CALL(); + camera_metadata_ro_entry_t entry; + + entry = frame.find(ANDROID_REQUEST_ID); + if (entry.count == 0) { + ALOGE("%s: Camera %d: Error reading frame id", + __FUNCTION__, device->getId()); + return BAD_VALUE; + } + int32_t frameId = entry.data.i32[0]; + + List > listeners; + { + Mutex::Autolock l(mInputMutex); + + List::iterator item = mRangeListeners.begin(); + while (item != mRangeListeners.end()) { + if (frameId >= item->minId && + frameId < item->maxId) { + sp listener = item->listener.promote(); + if (listener == 0) { + item = mRangeListeners.erase(item); + continue; + } else { + listeners.push_back(listener); + } + } + item++; + } + } + ALOGV("Got %d range listeners out of %d", listeners.size(), mRangeListeners.size()); + List >::iterator item = listeners.begin(); + for (; item != listeners.end(); item++) { + (*item)->onFrameAvailable(frameId, frame); + } + return OK; +} + +}; // namespace camera2 +}; // namespace android diff --git a/services/camera/libcameraservice/common/FrameProcessorBase.h b/services/camera/libcameraservice/common/FrameProcessorBase.h new file mode 100644 index 0000000..1e46beb --- /dev/null +++ b/services/camera/libcameraservice/common/FrameProcessorBase.h @@ -0,0 +1,84 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_SERVERS_CAMERA_CAMERA2_PROFRAMEPROCESSOR_H +#define ANDROID_SERVERS_CAMERA_CAMERA2_PROFRAMEPROCESSOR_H + +#include +#include +#include +#include +#include +#include + +namespace android { + +class CameraDeviceBase; + +namespace camera2 { + +/* Output frame metadata processing thread. This thread waits for new + * frames from the device, and analyzes them as necessary. + */ +class FrameProcessorBase: public Thread { + public: + FrameProcessorBase(wp device); + virtual ~FrameProcessorBase(); + + struct FilteredListener: virtual public RefBase { + virtual void onFrameAvailable(int32_t frameId, + const CameraMetadata &frame) = 0; + }; + + // Register a listener for a range of IDs [minId, maxId). Multiple listeners + // can be listening to the same range + status_t registerListener(int32_t minId, int32_t maxId, + wp listener); + status_t removeListener(int32_t minId, int32_t maxId, + wp listener); + + void dump(int fd, const Vector& args); + protected: + static const nsecs_t kWaitDuration = 10000000; // 10 ms + wp mDevice; + + virtual bool threadLoop(); + + Mutex mInputMutex; + + struct RangeListener { + int32_t minId; + int32_t maxId; + wp listener; + }; + List mRangeListeners; + + void processNewFrames(const sp &device); + + virtual bool processSingleFrame(CameraMetadata &frame, + const sp &device); + + status_t processListeners(const CameraMetadata &frame, + const sp &device); + + CameraMetadata mLastFrame; +}; + + +}; //namespace camera2 +}; //namespace android + +#endif diff --git a/services/camera/libcameraservice/device1/CameraHardwareInterface.h b/services/camera/libcameraservice/device1/CameraHardwareInterface.h new file mode 100644 index 0000000..87b2807 --- /dev/null +++ b/services/camera/libcameraservice/device1/CameraHardwareInterface.h @@ -0,0 +1,691 @@ +/* + * Copyright (C) 2008 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_CAMERA_HARDWARE_INTERFACE_H +#define ANDROID_HARDWARE_CAMERA_HARDWARE_INTERFACE_H + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +namespace android { + +typedef void (*notify_callback)(int32_t msgType, + int32_t ext1, + int32_t ext2, + void* user); + +typedef void (*data_callback)(int32_t msgType, + const sp &dataPtr, + camera_frame_metadata_t *metadata, + void* user); + +typedef void (*data_callback_timestamp)(nsecs_t timestamp, + int32_t msgType, + const sp &dataPtr, + void *user); + +/** + * CameraHardwareInterface.h defines the interface to the + * camera hardware abstraction layer, used for setting and getting + * parameters, live previewing, and taking pictures. It is used for + * HAL devices with version CAMERA_DEVICE_API_VERSION_1_0 only. + * + * It is a referenced counted interface with RefBase as its base class. + * CameraService calls openCameraHardware() to retrieve a strong pointer to the + * instance of this interface and may be called multiple times. The + * following steps describe a typical sequence: + * + * -# After CameraService calls openCameraHardware(), getParameters() and + * setParameters() are used to initialize the camera instance. + * -# startPreview() is called. + * + * Prior to taking a picture, CameraService often calls autofocus(). When auto + * focusing has completed, the camera instance sends a CAMERA_MSG_FOCUS notification, + * which informs the application whether focusing was successful. The camera instance + * only sends this message once and it is up to the application to call autoFocus() + * again if refocusing is desired. + * + * CameraService calls takePicture() to request the camera instance take a + * picture. At this point, if a shutter, postview, raw, and/or compressed + * callback is desired, the corresponding message must be enabled. Any memory + * provided in a data callback must be copied if it's needed after returning. + */ + +class CameraHardwareInterface : public virtual RefBase { +public: + CameraHardwareInterface(const char *name) + { + mDevice = 0; + mName = name; + } + + ~CameraHardwareInterface() + { + ALOGI("Destroying camera %s", mName.string()); + if(mDevice) { + int rc = mDevice->common.close(&mDevice->common); + if (rc != OK) + ALOGE("Could not close camera %s: %d", mName.string(), rc); + } + } + + status_t initialize(hw_module_t *module) + { + ALOGI("Opening camera %s", mName.string()); + int rc = module->methods->open(module, mName.string(), + (hw_device_t **)&mDevice); + if (rc != OK) { + ALOGE("Could not open camera %s: %d", mName.string(), rc); + return rc; + } + initHalPreviewWindow(); + return rc; + } + + /** Set the ANativeWindow to which preview frames are sent */ + status_t setPreviewWindow(const sp& buf) + { + ALOGV("%s(%s) buf %p", __FUNCTION__, mName.string(), buf.get()); + + if (mDevice->ops->set_preview_window) { + mPreviewWindow = buf; + mHalPreviewWindow.user = this; + ALOGV("%s &mHalPreviewWindow %p mHalPreviewWindow.user %p", __FUNCTION__, + &mHalPreviewWindow, mHalPreviewWindow.user); + return mDevice->ops->set_preview_window(mDevice, + buf.get() ? &mHalPreviewWindow.nw : 0); + } + return INVALID_OPERATION; + } + + /** Set the notification and data callbacks */ + void setCallbacks(notify_callback notify_cb, + data_callback data_cb, + data_callback_timestamp data_cb_timestamp, + void* user) + { + mNotifyCb = notify_cb; + mDataCb = data_cb; + mDataCbTimestamp = data_cb_timestamp; + mCbUser = user; + + ALOGV("%s(%s)", __FUNCTION__, mName.string()); + + if (mDevice->ops->set_callbacks) { + mDevice->ops->set_callbacks(mDevice, + __notify_cb, + __data_cb, + __data_cb_timestamp, + __get_memory, + this); + } + } + + /** + * The following three functions all take a msgtype, + * which is a bitmask of the messages defined in + * include/ui/Camera.h + */ + + /** + * Enable a message, or set of messages. + */ + void enableMsgType(int32_t msgType) + { + ALOGV("%s(%s)", __FUNCTION__, mName.string()); + if (mDevice->ops->enable_msg_type) + mDevice->ops->enable_msg_type(mDevice, msgType); + } + + /** + * Disable a message, or a set of messages. + * + * Once received a call to disableMsgType(CAMERA_MSG_VIDEO_FRAME), camera hal + * should not rely on its client to call releaseRecordingFrame() to release + * video recording frames sent out by the cameral hal before and after the + * disableMsgType(CAMERA_MSG_VIDEO_FRAME) call. Camera hal clients must not + * modify/access any video recording frame after calling + * disableMsgType(CAMERA_MSG_VIDEO_FRAME). + */ + void disableMsgType(int32_t msgType) + { + ALOGV("%s(%s)", __FUNCTION__, mName.string()); + if (mDevice->ops->disable_msg_type) + mDevice->ops->disable_msg_type(mDevice, msgType); + } + + /** + * Query whether a message, or a set of messages, is enabled. + * Note that this is operates as an AND, if any of the messages + * queried are off, this will return false. + */ + int msgTypeEnabled(int32_t msgType) + { + ALOGV("%s(%s)", __FUNCTION__, mName.string()); + if (mDevice->ops->msg_type_enabled) + return mDevice->ops->msg_type_enabled(mDevice, msgType); + return false; + } + + /** + * Start preview mode. + */ + status_t startPreview() + { + ALOGV("%s(%s)", __FUNCTION__, mName.string()); + if (mDevice->ops->start_preview) + return mDevice->ops->start_preview(mDevice); + return INVALID_OPERATION; + } + + /** + * Stop a previously started preview. + */ + void stopPreview() + { + ALOGV("%s(%s)", __FUNCTION__, mName.string()); + if (mDevice->ops->stop_preview) + mDevice->ops->stop_preview(mDevice); + } + + /** + * Returns true if preview is enabled. + */ + int previewEnabled() + { + ALOGV("%s(%s)", __FUNCTION__, mName.string()); + if (mDevice->ops->preview_enabled) + return mDevice->ops->preview_enabled(mDevice); + return false; + } + + /** + * Request the camera hal to store meta data or real YUV data in + * the video buffers send out via CAMERA_MSG_VIDEO_FRRAME for a + * recording session. If it is not called, the default camera + * hal behavior is to store real YUV data in the video buffers. + * + * This method should be called before startRecording() in order + * to be effective. + * + * If meta data is stored in the video buffers, it is up to the + * receiver of the video buffers to interpret the contents and + * to find the actual frame data with the help of the meta data + * in the buffer. How this is done is outside of the scope of + * this method. + * + * Some camera hal may not support storing meta data in the video + * buffers, but all camera hal should support storing real YUV data + * in the video buffers. If the camera hal does not support storing + * the meta data in the video buffers when it is requested to do + * do, INVALID_OPERATION must be returned. It is very useful for + * the camera hal to pass meta data rather than the actual frame + * data directly to the video encoder, since the amount of the + * uncompressed frame data can be very large if video size is large. + * + * @param enable if true to instruct the camera hal to store + * meta data in the video buffers; false to instruct + * the camera hal to store real YUV data in the video + * buffers. + * + * @return OK on success. + */ + + status_t storeMetaDataInBuffers(int enable) + { + ALOGV("%s(%s)", __FUNCTION__, mName.string()); + if (mDevice->ops->store_meta_data_in_buffers) + return mDevice->ops->store_meta_data_in_buffers(mDevice, enable); + return enable ? INVALID_OPERATION: OK; + } + + /** + * Start record mode. When a record image is available a CAMERA_MSG_VIDEO_FRAME + * message is sent with the corresponding frame. Every record frame must be released + * by a cameral hal client via releaseRecordingFrame() before the client calls + * disableMsgType(CAMERA_MSG_VIDEO_FRAME). After the client calls + * disableMsgType(CAMERA_MSG_VIDEO_FRAME), it is camera hal's responsibility + * to manage the life-cycle of the video recording frames, and the client must + * not modify/access any video recording frames. + */ + status_t startRecording() + { + ALOGV("%s(%s)", __FUNCTION__, mName.string()); + if (mDevice->ops->start_recording) + return mDevice->ops->start_recording(mDevice); + return INVALID_OPERATION; + } + + /** + * Stop a previously started recording. + */ + void stopRecording() + { + ALOGV("%s(%s)", __FUNCTION__, mName.string()); + if (mDevice->ops->stop_recording) + mDevice->ops->stop_recording(mDevice); + } + + /** + * Returns true if recording is enabled. + */ + int recordingEnabled() + { + ALOGV("%s(%s)", __FUNCTION__, mName.string()); + if (mDevice->ops->recording_enabled) + return mDevice->ops->recording_enabled(mDevice); + return false; + } + + /** + * Release a record frame previously returned by CAMERA_MSG_VIDEO_FRAME. + * + * It is camera hal client's responsibility to release video recording + * frames sent out by the camera hal before the camera hal receives + * a call to disableMsgType(CAMERA_MSG_VIDEO_FRAME). After it receives + * the call to disableMsgType(CAMERA_MSG_VIDEO_FRAME), it is camera hal's + * responsibility of managing the life-cycle of the video recording + * frames. + */ + void releaseRecordingFrame(const sp& mem) + { + ALOGV("%s(%s)", __FUNCTION__, mName.string()); + if (mDevice->ops->release_recording_frame) { + ssize_t offset; + size_t size; + sp heap = mem->getMemory(&offset, &size); + void *data = ((uint8_t *)heap->base()) + offset; + return mDevice->ops->release_recording_frame(mDevice, data); + } + } + + /** + * Start auto focus, the notification callback routine is called + * with CAMERA_MSG_FOCUS once when focusing is complete. autoFocus() + * will be called again if another auto focus is needed. + */ + status_t autoFocus() + { + ALOGV("%s(%s)", __FUNCTION__, mName.string()); + if (mDevice->ops->auto_focus) + return mDevice->ops->auto_focus(mDevice); + return INVALID_OPERATION; + } + + /** + * Cancels auto-focus function. If the auto-focus is still in progress, + * this function will cancel it. Whether the auto-focus is in progress + * or not, this function will return the focus position to the default. + * If the camera does not support auto-focus, this is a no-op. + */ + status_t cancelAutoFocus() + { + ALOGV("%s(%s)", __FUNCTION__, mName.string()); + if (mDevice->ops->cancel_auto_focus) + return mDevice->ops->cancel_auto_focus(mDevice); + return INVALID_OPERATION; + } + + /** + * Take a picture. + */ + status_t takePicture() + { + ALOGV("%s(%s)", __FUNCTION__, mName.string()); + if (mDevice->ops->take_picture) + return mDevice->ops->take_picture(mDevice); + return INVALID_OPERATION; + } + + /** + * Cancel a picture that was started with takePicture. Calling this + * method when no picture is being taken is a no-op. + */ + status_t cancelPicture() + { + ALOGV("%s(%s)", __FUNCTION__, mName.string()); + if (mDevice->ops->cancel_picture) + return mDevice->ops->cancel_picture(mDevice); + return INVALID_OPERATION; + } + + /** + * Set the camera parameters. This returns BAD_VALUE if any parameter is + * invalid or not supported. */ + status_t setParameters(const CameraParameters ¶ms) + { + ALOGV("%s(%s)", __FUNCTION__, mName.string()); + if (mDevice->ops->set_parameters) + return mDevice->ops->set_parameters(mDevice, + params.flatten().string()); + return INVALID_OPERATION; + } + + /** Return the camera parameters. */ + CameraParameters getParameters() const + { + ALOGV("%s(%s)", __FUNCTION__, mName.string()); + CameraParameters parms; + if (mDevice->ops->get_parameters) { + char *temp = mDevice->ops->get_parameters(mDevice); + String8 str_parms(temp); + if (mDevice->ops->put_parameters) + mDevice->ops->put_parameters(mDevice, temp); + else + free(temp); + parms.unflatten(str_parms); + } + return parms; + } + + /** + * Send command to camera driver. + */ + status_t sendCommand(int32_t cmd, int32_t arg1, int32_t arg2) + { + ALOGV("%s(%s)", __FUNCTION__, mName.string()); + if (mDevice->ops->send_command) + return mDevice->ops->send_command(mDevice, cmd, arg1, arg2); + return INVALID_OPERATION; + } + + /** + * Release the hardware resources owned by this object. Note that this is + * *not* done in the destructor. + */ + void release() { + ALOGV("%s(%s)", __FUNCTION__, mName.string()); + if (mDevice->ops->release) + mDevice->ops->release(mDevice); + } + + /** + * Dump state of the camera hardware + */ + status_t dump(int fd, const Vector& /*args*/) const + { + ALOGV("%s(%s)", __FUNCTION__, mName.string()); + if (mDevice->ops->dump) + return mDevice->ops->dump(mDevice, fd); + return OK; // It's fine if the HAL doesn't implement dump() + } + +private: + camera_device_t *mDevice; + String8 mName; + + static void __notify_cb(int32_t msg_type, int32_t ext1, + int32_t ext2, void *user) + { + ALOGV("%s", __FUNCTION__); + CameraHardwareInterface *__this = + static_cast(user); + __this->mNotifyCb(msg_type, ext1, ext2, __this->mCbUser); + } + + static void __data_cb(int32_t msg_type, + const camera_memory_t *data, unsigned int index, + camera_frame_metadata_t *metadata, + void *user) + { + ALOGV("%s", __FUNCTION__); + CameraHardwareInterface *__this = + static_cast(user); + sp mem(static_cast(data->handle)); + if (index >= mem->mNumBufs) { + ALOGE("%s: invalid buffer index %d, max allowed is %d", __FUNCTION__, + index, mem->mNumBufs); + return; + } + __this->mDataCb(msg_type, mem->mBuffers[index], metadata, __this->mCbUser); + } + + static void __data_cb_timestamp(nsecs_t timestamp, int32_t msg_type, + const camera_memory_t *data, unsigned index, + void *user) + { + ALOGV("%s", __FUNCTION__); + CameraHardwareInterface *__this = + static_cast(user); + // Start refcounting the heap object from here on. When the clients + // drop all references, it will be destroyed (as well as the enclosed + // MemoryHeapBase. + sp mem(static_cast(data->handle)); + if (index >= mem->mNumBufs) { + ALOGE("%s: invalid buffer index %d, max allowed is %d", __FUNCTION__, + index, mem->mNumBufs); + return; + } + __this->mDataCbTimestamp(timestamp, msg_type, mem->mBuffers[index], __this->mCbUser); + } + + // This is a utility class that combines a MemoryHeapBase and a MemoryBase + // in one. Since we tend to use them in a one-to-one relationship, this is + // handy. + + class CameraHeapMemory : public RefBase { + public: + CameraHeapMemory(int fd, size_t buf_size, uint_t num_buffers = 1) : + mBufSize(buf_size), + mNumBufs(num_buffers) + { + mHeap = new MemoryHeapBase(fd, buf_size * num_buffers); + commonInitialization(); + } + + CameraHeapMemory(size_t buf_size, uint_t num_buffers = 1) : + mBufSize(buf_size), + mNumBufs(num_buffers) + { + mHeap = new MemoryHeapBase(buf_size * num_buffers); + commonInitialization(); + } + + void commonInitialization() + { + handle.data = mHeap->base(); + handle.size = mBufSize * mNumBufs; + handle.handle = this; + + mBuffers = new sp[mNumBufs]; + for (uint_t i = 0; i < mNumBufs; i++) + mBuffers[i] = new MemoryBase(mHeap, + i * mBufSize, + mBufSize); + + handle.release = __put_memory; + } + + virtual ~CameraHeapMemory() + { + delete [] mBuffers; + } + + size_t mBufSize; + uint_t mNumBufs; + sp mHeap; + sp *mBuffers; + + camera_memory_t handle; + }; + + static camera_memory_t* __get_memory(int fd, size_t buf_size, uint_t num_bufs, + void *user __attribute__((unused))) + { + CameraHeapMemory *mem; + if (fd < 0) + mem = new CameraHeapMemory(buf_size, num_bufs); + else + mem = new CameraHeapMemory(fd, buf_size, num_bufs); + mem->incStrong(mem); + return &mem->handle; + } + + static void __put_memory(camera_memory_t *data) + { + if (!data) + return; + + CameraHeapMemory *mem = static_cast(data->handle); + mem->decStrong(mem); + } + + static ANativeWindow *__to_anw(void *user) + { + CameraHardwareInterface *__this = + reinterpret_cast(user); + return __this->mPreviewWindow.get(); + } +#define anw(n) __to_anw(((struct camera_preview_window *)n)->user) + + static int __dequeue_buffer(struct preview_stream_ops* w, + buffer_handle_t** buffer, int *stride) + { + int rc; + ANativeWindow *a = anw(w); + ANativeWindowBuffer* anb; + rc = native_window_dequeue_buffer_and_wait(a, &anb); + if (!rc) { + *buffer = &anb->handle; + *stride = anb->stride; + } + return rc; + } + +#ifndef container_of +#define container_of(ptr, type, member) ({ \ + const typeof(((type *) 0)->member) *__mptr = (ptr); \ + (type *) ((char *) __mptr - (char *)(&((type *)0)->member)); }) +#endif + + static int __lock_buffer(struct preview_stream_ops* w, + buffer_handle_t* /*buffer*/) + { + ANativeWindow *a = anw(w); + (void)a; + return 0; + } + + static int __enqueue_buffer(struct preview_stream_ops* w, + buffer_handle_t* buffer) + { + ANativeWindow *a = anw(w); + return a->queueBuffer(a, + container_of(buffer, ANativeWindowBuffer, handle), -1); + } + + static int __cancel_buffer(struct preview_stream_ops* w, + buffer_handle_t* buffer) + { + ANativeWindow *a = anw(w); + return a->cancelBuffer(a, + container_of(buffer, ANativeWindowBuffer, handle), -1); + } + + static int __set_buffer_count(struct preview_stream_ops* w, int count) + { + ANativeWindow *a = anw(w); + return native_window_set_buffer_count(a, count); + } + + static int __set_buffers_geometry(struct preview_stream_ops* w, + int width, int height, int format) + { + ANativeWindow *a = anw(w); + return native_window_set_buffers_geometry(a, + width, height, format); + } + + static int __set_crop(struct preview_stream_ops *w, + int left, int top, int right, int bottom) + { + ANativeWindow *a = anw(w); + android_native_rect_t crop; + crop.left = left; + crop.top = top; + crop.right = right; + crop.bottom = bottom; + return native_window_set_crop(a, &crop); + } + + static int __set_timestamp(struct preview_stream_ops *w, + int64_t timestamp) { + ANativeWindow *a = anw(w); + return native_window_set_buffers_timestamp(a, timestamp); + } + + static int __set_usage(struct preview_stream_ops* w, int usage) + { + ANativeWindow *a = anw(w); + return native_window_set_usage(a, usage); + } + + static int __set_swap_interval(struct preview_stream_ops *w, int interval) + { + ANativeWindow *a = anw(w); + return a->setSwapInterval(a, interval); + } + + static int __get_min_undequeued_buffer_count( + const struct preview_stream_ops *w, + int *count) + { + ANativeWindow *a = anw(w); + return a->query(a, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, count); + } + + void initHalPreviewWindow() + { + mHalPreviewWindow.nw.cancel_buffer = __cancel_buffer; + mHalPreviewWindow.nw.lock_buffer = __lock_buffer; + mHalPreviewWindow.nw.dequeue_buffer = __dequeue_buffer; + mHalPreviewWindow.nw.enqueue_buffer = __enqueue_buffer; + mHalPreviewWindow.nw.set_buffer_count = __set_buffer_count; + mHalPreviewWindow.nw.set_buffers_geometry = __set_buffers_geometry; + mHalPreviewWindow.nw.set_crop = __set_crop; + mHalPreviewWindow.nw.set_timestamp = __set_timestamp; + mHalPreviewWindow.nw.set_usage = __set_usage; + mHalPreviewWindow.nw.set_swap_interval = __set_swap_interval; + + mHalPreviewWindow.nw.get_min_undequeued_buffer_count = + __get_min_undequeued_buffer_count; + } + + sp mPreviewWindow; + + struct camera_preview_window { + struct preview_stream_ops nw; + void *user; + }; + + struct camera_preview_window mHalPreviewWindow; + + notify_callback mNotifyCb; + data_callback mDataCb; + data_callback_timestamp mDataCbTimestamp; + void *mCbUser; +}; + +}; // namespace android + +#endif diff --git a/services/camera/libcameraservice/device2/Camera2Device.cpp b/services/camera/libcameraservice/device2/Camera2Device.cpp new file mode 100644 index 0000000..710d0e9 --- /dev/null +++ b/services/camera/libcameraservice/device2/Camera2Device.cpp @@ -0,0 +1,1515 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "Camera2-Device" +#define ATRACE_TAG ATRACE_TAG_CAMERA +//#define LOG_NDEBUG 0 +//#define LOG_NNDEBUG 0 // Per-frame verbose logging + +#ifdef LOG_NNDEBUG +#define ALOGVV(...) ALOGV(__VA_ARGS__) +#else +#define ALOGVV(...) ((void)0) +#endif + +#include +#include +#include +#include "Camera2Device.h" + +namespace android { + +Camera2Device::Camera2Device(int id): + mId(id), + mHal2Device(NULL) +{ + ATRACE_CALL(); + ALOGV("%s: Created device for camera %d", __FUNCTION__, id); +} + +Camera2Device::~Camera2Device() +{ + ATRACE_CALL(); + ALOGV("%s: Tearing down for camera id %d", __FUNCTION__, mId); + disconnect(); +} + +int Camera2Device::getId() const { + return mId; +} + +status_t Camera2Device::initialize(camera_module_t *module) +{ + ATRACE_CALL(); + ALOGV("%s: Initializing device for camera %d", __FUNCTION__, mId); + if (mHal2Device != NULL) { + ALOGE("%s: Already initialized!", __FUNCTION__); + return INVALID_OPERATION; + } + + status_t res; + char name[10]; + snprintf(name, sizeof(name), "%d", mId); + + camera2_device_t *device; + + res = module->common.methods->open(&module->common, name, + reinterpret_cast(&device)); + + if (res != OK) { + ALOGE("%s: Could not open camera %d: %s (%d)", __FUNCTION__, + mId, strerror(-res), res); + return res; + } + + if (device->common.version != CAMERA_DEVICE_API_VERSION_2_0) { + ALOGE("%s: Could not open camera %d: " + "Camera device is not version %x, reports %x instead", + __FUNCTION__, mId, CAMERA_DEVICE_API_VERSION_2_0, + device->common.version); + device->common.close(&device->common); + return BAD_VALUE; + } + + camera_info info; + res = module->get_camera_info(mId, &info); + if (res != OK ) return res; + + if (info.device_version != device->common.version) { + ALOGE("%s: HAL reporting mismatched camera_info version (%x)" + " and device version (%x).", __FUNCTION__, + device->common.version, info.device_version); + device->common.close(&device->common); + return BAD_VALUE; + } + + res = mRequestQueue.setConsumerDevice(device); + if (res != OK) { + ALOGE("%s: Camera %d: Unable to connect request queue to device: %s (%d)", + __FUNCTION__, mId, strerror(-res), res); + device->common.close(&device->common); + return res; + } + res = mFrameQueue.setProducerDevice(device); + if (res != OK) { + ALOGE("%s: Camera %d: Unable to connect frame queue to device: %s (%d)", + __FUNCTION__, mId, strerror(-res), res); + device->common.close(&device->common); + return res; + } + + res = device->ops->get_metadata_vendor_tag_ops(device, &mVendorTagOps); + if (res != OK ) { + ALOGE("%s: Camera %d: Unable to retrieve tag ops from device: %s (%d)", + __FUNCTION__, mId, strerror(-res), res); + device->common.close(&device->common); + return res; + } + res = set_camera_metadata_vendor_tag_ops(mVendorTagOps); + if (res != OK) { + ALOGE("%s: Camera %d: Unable to set tag ops: %s (%d)", + __FUNCTION__, mId, strerror(-res), res); + device->common.close(&device->common); + return res; + } + res = device->ops->set_notify_callback(device, notificationCallback, + NULL); + if (res != OK) { + ALOGE("%s: Camera %d: Unable to initialize notification callback!", + __FUNCTION__, mId); + device->common.close(&device->common); + return res; + } + + mDeviceInfo = info.static_camera_characteristics; + mHal2Device = device; + + return OK; +} + +status_t Camera2Device::disconnect() { + ATRACE_CALL(); + status_t res = OK; + if (mHal2Device) { + ALOGV("%s: Closing device for camera %d", __FUNCTION__, mId); + + int inProgressCount = mHal2Device->ops->get_in_progress_count(mHal2Device); + if (inProgressCount > 0) { + ALOGW("%s: Closing camera device %d with %d requests in flight!", + __FUNCTION__, mId, inProgressCount); + } + mReprocessStreams.clear(); + mStreams.clear(); + res = mHal2Device->common.close(&mHal2Device->common); + if (res != OK) { + ALOGE("%s: Could not close camera %d: %s (%d)", + __FUNCTION__, + mId, strerror(-res), res); + } + mHal2Device = NULL; + ALOGV("%s: Shutdown complete", __FUNCTION__); + } + return res; +} + +status_t Camera2Device::dump(int fd, const Vector& args) { + ATRACE_CALL(); + String8 result; + int detailLevel = 0; + int n = args.size(); + String16 detailOption("-d"); + for (int i = 0; i + 1 < n; i++) { + if (args[i] == detailOption) { + String8 levelStr(args[i+1]); + detailLevel = atoi(levelStr.string()); + } + } + + result.appendFormat(" Camera2Device[%d] dump (detail level %d):\n", + mId, detailLevel); + + if (detailLevel > 0) { + result = " Request queue contents:\n"; + write(fd, result.string(), result.size()); + mRequestQueue.dump(fd, args); + + result = " Frame queue contents:\n"; + write(fd, result.string(), result.size()); + mFrameQueue.dump(fd, args); + } + + result = " Active streams:\n"; + write(fd, result.string(), result.size()); + for (StreamList::iterator s = mStreams.begin(); s != mStreams.end(); s++) { + (*s)->dump(fd, args); + } + + result = " HAL device dump:\n"; + write(fd, result.string(), result.size()); + + status_t res; + res = mHal2Device->ops->dump(mHal2Device, fd); + + return res; +} + +const CameraMetadata& Camera2Device::info() const { + ALOGVV("%s: E", __FUNCTION__); + + return mDeviceInfo; +} + +status_t Camera2Device::capture(CameraMetadata &request) { + ATRACE_CALL(); + ALOGV("%s: E", __FUNCTION__); + + mRequestQueue.enqueue(request.release()); + return OK; +} + + +status_t Camera2Device::setStreamingRequest(const CameraMetadata &request) { + ATRACE_CALL(); + ALOGV("%s: E", __FUNCTION__); + CameraMetadata streamRequest(request); + return mRequestQueue.setStreamSlot(streamRequest.release()); +} + +status_t Camera2Device::clearStreamingRequest() { + ATRACE_CALL(); + return mRequestQueue.setStreamSlot(NULL); +} + +status_t Camera2Device::waitUntilRequestReceived(int32_t requestId, nsecs_t timeout) { + ATRACE_CALL(); + return mRequestQueue.waitForDequeue(requestId, timeout); +} + +status_t Camera2Device::createStream(sp consumer, + uint32_t width, uint32_t height, int format, size_t size, int *id) { + ATRACE_CALL(); + status_t res; + ALOGV("%s: E", __FUNCTION__); + + sp stream = new StreamAdapter(mHal2Device); + + res = stream->connectToDevice(consumer, width, height, format, size); + if (res != OK) { + ALOGE("%s: Camera %d: Unable to create stream (%d x %d, format %x):" + "%s (%d)", + __FUNCTION__, mId, width, height, format, strerror(-res), res); + return res; + } + + *id = stream->getId(); + + mStreams.push_back(stream); + return OK; +} + +status_t Camera2Device::createReprocessStreamFromStream(int outputId, int *id) { + ATRACE_CALL(); + status_t res; + ALOGV("%s: E", __FUNCTION__); + + bool found = false; + StreamList::iterator streamI; + for (streamI = mStreams.begin(); + streamI != mStreams.end(); streamI++) { + if ((*streamI)->getId() == outputId) { + found = true; + break; + } + } + if (!found) { + ALOGE("%s: Camera %d: Output stream %d doesn't exist; can't create " + "reprocess stream from it!", __FUNCTION__, mId, outputId); + return BAD_VALUE; + } + + sp stream = new ReprocessStreamAdapter(mHal2Device); + + res = stream->connectToDevice((*streamI)); + if (res != OK) { + ALOGE("%s: Camera %d: Unable to create reprocessing stream from "\ + "stream %d: %s (%d)", __FUNCTION__, mId, outputId, + strerror(-res), res); + return res; + } + + *id = stream->getId(); + + mReprocessStreams.push_back(stream); + return OK; +} + + +status_t Camera2Device::getStreamInfo(int id, + uint32_t *width, uint32_t *height, uint32_t *format) { + ATRACE_CALL(); + ALOGV("%s: E", __FUNCTION__); + bool found = false; + StreamList::iterator streamI; + for (streamI = mStreams.begin(); + streamI != mStreams.end(); streamI++) { + if ((*streamI)->getId() == id) { + found = true; + break; + } + } + if (!found) { + ALOGE("%s: Camera %d: Stream %d does not exist", + __FUNCTION__, mId, id); + return BAD_VALUE; + } + + if (width) *width = (*streamI)->getWidth(); + if (height) *height = (*streamI)->getHeight(); + if (format) *format = (*streamI)->getFormat(); + + return OK; +} + +status_t Camera2Device::setStreamTransform(int id, + int transform) { + ATRACE_CALL(); + ALOGV("%s: E", __FUNCTION__); + bool found = false; + StreamList::iterator streamI; + for (streamI = mStreams.begin(); + streamI != mStreams.end(); streamI++) { + if ((*streamI)->getId() == id) { + found = true; + break; + } + } + if (!found) { + ALOGE("%s: Camera %d: Stream %d does not exist", + __FUNCTION__, mId, id); + return BAD_VALUE; + } + + return (*streamI)->setTransform(transform); +} + +status_t Camera2Device::deleteStream(int id) { + ATRACE_CALL(); + ALOGV("%s: E", __FUNCTION__); + bool found = false; + for (StreamList::iterator streamI = mStreams.begin(); + streamI != mStreams.end(); streamI++) { + if ((*streamI)->getId() == id) { + status_t res = (*streamI)->release(); + if (res != OK) { + ALOGE("%s: Unable to release stream %d from HAL device: " + "%s (%d)", __FUNCTION__, id, strerror(-res), res); + return res; + } + mStreams.erase(streamI); + found = true; + break; + } + } + if (!found) { + ALOGE("%s: Camera %d: Unable to find stream %d to delete", + __FUNCTION__, mId, id); + return BAD_VALUE; + } + return OK; +} + +status_t Camera2Device::deleteReprocessStream(int id) { + ATRACE_CALL(); + ALOGV("%s: E", __FUNCTION__); + bool found = false; + for (ReprocessStreamList::iterator streamI = mReprocessStreams.begin(); + streamI != mReprocessStreams.end(); streamI++) { + if ((*streamI)->getId() == id) { + status_t res = (*streamI)->release(); + if (res != OK) { + ALOGE("%s: Unable to release reprocess stream %d from " + "HAL device: %s (%d)", __FUNCTION__, id, + strerror(-res), res); + return res; + } + mReprocessStreams.erase(streamI); + found = true; + break; + } + } + if (!found) { + ALOGE("%s: Camera %d: Unable to find stream %d to delete", + __FUNCTION__, mId, id); + return BAD_VALUE; + } + return OK; +} + + +status_t Camera2Device::createDefaultRequest(int templateId, + CameraMetadata *request) { + ATRACE_CALL(); + status_t err; + ALOGV("%s: E", __FUNCTION__); + camera_metadata_t *rawRequest; + err = mHal2Device->ops->construct_default_request( + mHal2Device, templateId, &rawRequest); + request->acquire(rawRequest); + return err; +} + +status_t Camera2Device::waitUntilDrained() { + ATRACE_CALL(); + static const uint32_t kSleepTime = 50000; // 50 ms + static const uint32_t kMaxSleepTime = 10000000; // 10 s + ALOGV("%s: Camera %d: Starting wait", __FUNCTION__, mId); + if (mRequestQueue.getBufferCount() == + CAMERA2_REQUEST_QUEUE_IS_BOTTOMLESS) return INVALID_OPERATION; + + // TODO: Set up notifications from HAL, instead of sleeping here + uint32_t totalTime = 0; + while (mHal2Device->ops->get_in_progress_count(mHal2Device) > 0) { + usleep(kSleepTime); + totalTime += kSleepTime; + if (totalTime > kMaxSleepTime) { + ALOGE("%s: Waited %d us, %d requests still in flight", __FUNCTION__, + totalTime, mHal2Device->ops->get_in_progress_count(mHal2Device)); + return TIMED_OUT; + } + } + ALOGV("%s: Camera %d: HAL is idle", __FUNCTION__, mId); + return OK; +} + +status_t Camera2Device::setNotifyCallback(NotificationListener *listener) { + ATRACE_CALL(); + status_t res; + res = mHal2Device->ops->set_notify_callback(mHal2Device, notificationCallback, + reinterpret_cast(listener) ); + if (res != OK) { + ALOGE("%s: Unable to set notification callback!", __FUNCTION__); + } + return res; +} + +bool Camera2Device::willNotify3A() { + return true; +} + +void Camera2Device::notificationCallback(int32_t msg_type, + int32_t ext1, + int32_t ext2, + int32_t ext3, + void *user) { + ATRACE_CALL(); + NotificationListener *listener = reinterpret_cast(user); + ALOGV("%s: Notification %d, arguments %d, %d, %d", __FUNCTION__, msg_type, + ext1, ext2, ext3); + if (listener != NULL) { + switch (msg_type) { + case CAMERA2_MSG_ERROR: + listener->notifyError(ext1, ext2, ext3); + break; + case CAMERA2_MSG_SHUTTER: { + nsecs_t timestamp = (nsecs_t)ext2 | ((nsecs_t)(ext3) << 32 ); + listener->notifyShutter(ext1, timestamp); + break; + } + case CAMERA2_MSG_AUTOFOCUS: + listener->notifyAutoFocus(ext1, ext2); + break; + case CAMERA2_MSG_AUTOEXPOSURE: + listener->notifyAutoExposure(ext1, ext2); + break; + case CAMERA2_MSG_AUTOWB: + listener->notifyAutoWhitebalance(ext1, ext2); + break; + default: + ALOGE("%s: Unknown notification %d (arguments %d, %d, %d)!", + __FUNCTION__, msg_type, ext1, ext2, ext3); + } + } +} + +status_t Camera2Device::waitForNextFrame(nsecs_t timeout) { + return mFrameQueue.waitForBuffer(timeout); +} + +status_t Camera2Device::getNextFrame(CameraMetadata *frame) { + ATRACE_CALL(); + status_t res; + camera_metadata_t *rawFrame; + res = mFrameQueue.dequeue(&rawFrame); + if (rawFrame == NULL) { + return NOT_ENOUGH_DATA; + } else if (res == OK) { + frame->acquire(rawFrame); + } + return res; +} + +status_t Camera2Device::triggerAutofocus(uint32_t id) { + ATRACE_CALL(); + status_t res; + ALOGV("%s: Triggering autofocus, id %d", __FUNCTION__, id); + res = mHal2Device->ops->trigger_action(mHal2Device, + CAMERA2_TRIGGER_AUTOFOCUS, id, 0); + if (res != OK) { + ALOGE("%s: Error triggering autofocus (id %d)", + __FUNCTION__, id); + } + return res; +} + +status_t Camera2Device::triggerCancelAutofocus(uint32_t id) { + ATRACE_CALL(); + status_t res; + ALOGV("%s: Canceling autofocus, id %d", __FUNCTION__, id); + res = mHal2Device->ops->trigger_action(mHal2Device, + CAMERA2_TRIGGER_CANCEL_AUTOFOCUS, id, 0); + if (res != OK) { + ALOGE("%s: Error canceling autofocus (id %d)", + __FUNCTION__, id); + } + return res; +} + +status_t Camera2Device::triggerPrecaptureMetering(uint32_t id) { + ATRACE_CALL(); + status_t res; + ALOGV("%s: Triggering precapture metering, id %d", __FUNCTION__, id); + res = mHal2Device->ops->trigger_action(mHal2Device, + CAMERA2_TRIGGER_PRECAPTURE_METERING, id, 0); + if (res != OK) { + ALOGE("%s: Error triggering precapture metering (id %d)", + __FUNCTION__, id); + } + return res; +} + +status_t Camera2Device::pushReprocessBuffer(int reprocessStreamId, + buffer_handle_t *buffer, wp listener) { + ATRACE_CALL(); + ALOGV("%s: E", __FUNCTION__); + bool found = false; + status_t res = OK; + for (ReprocessStreamList::iterator streamI = mReprocessStreams.begin(); + streamI != mReprocessStreams.end(); streamI++) { + if ((*streamI)->getId() == reprocessStreamId) { + res = (*streamI)->pushIntoStream(buffer, listener); + if (res != OK) { + ALOGE("%s: Unable to push buffer to reprocess stream %d: %s (%d)", + __FUNCTION__, reprocessStreamId, strerror(-res), res); + return res; + } + found = true; + break; + } + } + if (!found) { + ALOGE("%s: Camera %d: Unable to find reprocess stream %d", + __FUNCTION__, mId, reprocessStreamId); + res = BAD_VALUE; + } + return res; +} + +/** + * Camera2Device::MetadataQueue + */ + +Camera2Device::MetadataQueue::MetadataQueue(): + mHal2Device(NULL), + mFrameCount(0), + mLatestRequestId(0), + mCount(0), + mStreamSlotCount(0), + mSignalConsumer(true) +{ + ATRACE_CALL(); + camera2_request_queue_src_ops::dequeue_request = consumer_dequeue; + camera2_request_queue_src_ops::request_count = consumer_buffer_count; + camera2_request_queue_src_ops::free_request = consumer_free; + + camera2_frame_queue_dst_ops::dequeue_frame = producer_dequeue; + camera2_frame_queue_dst_ops::cancel_frame = producer_cancel; + camera2_frame_queue_dst_ops::enqueue_frame = producer_enqueue; +} + +Camera2Device::MetadataQueue::~MetadataQueue() { + ATRACE_CALL(); + Mutex::Autolock l(mMutex); + freeBuffers(mEntries.begin(), mEntries.end()); + freeBuffers(mStreamSlot.begin(), mStreamSlot.end()); +} + +// Connect to camera2 HAL as consumer (input requests/reprocessing) +status_t Camera2Device::MetadataQueue::setConsumerDevice(camera2_device_t *d) { + ATRACE_CALL(); + status_t res; + res = d->ops->set_request_queue_src_ops(d, + this); + if (res != OK) return res; + mHal2Device = d; + return OK; +} + +status_t Camera2Device::MetadataQueue::setProducerDevice(camera2_device_t *d) { + ATRACE_CALL(); + status_t res; + res = d->ops->set_frame_queue_dst_ops(d, + this); + return res; +} + +// Real interfaces +status_t Camera2Device::MetadataQueue::enqueue(camera_metadata_t *buf) { + ATRACE_CALL(); + ALOGVV("%s: E", __FUNCTION__); + Mutex::Autolock l(mMutex); + + mCount++; + mEntries.push_back(buf); + + return signalConsumerLocked(); +} + +int Camera2Device::MetadataQueue::getBufferCount() { + ATRACE_CALL(); + Mutex::Autolock l(mMutex); + if (mStreamSlotCount > 0) { + return CAMERA2_REQUEST_QUEUE_IS_BOTTOMLESS; + } + return mCount; +} + +status_t Camera2Device::MetadataQueue::dequeue(camera_metadata_t **buf, + bool incrementCount) +{ + ATRACE_CALL(); + ALOGVV("%s: E", __FUNCTION__); + status_t res; + Mutex::Autolock l(mMutex); + + if (mCount == 0) { + if (mStreamSlotCount == 0) { + ALOGVV("%s: Empty", __FUNCTION__); + *buf = NULL; + mSignalConsumer = true; + return OK; + } + ALOGVV("%s: Streaming %d frames to queue", __FUNCTION__, + mStreamSlotCount); + + for (List::iterator slotEntry = mStreamSlot.begin(); + slotEntry != mStreamSlot.end(); + slotEntry++ ) { + size_t entries = get_camera_metadata_entry_count(*slotEntry); + size_t dataBytes = get_camera_metadata_data_count(*slotEntry); + + camera_metadata_t *copy = + allocate_camera_metadata(entries, dataBytes); + append_camera_metadata(copy, *slotEntry); + mEntries.push_back(copy); + } + mCount = mStreamSlotCount; + } + ALOGVV("MetadataQueue: deque (%d buffers)", mCount); + camera_metadata_t *b = *(mEntries.begin()); + mEntries.erase(mEntries.begin()); + + if (incrementCount) { + ATRACE_INT("cam2_request", mFrameCount); + camera_metadata_entry_t frameCount; + res = find_camera_metadata_entry(b, + ANDROID_REQUEST_FRAME_COUNT, + &frameCount); + if (res != OK) { + ALOGE("%s: Unable to add frame count: %s (%d)", + __FUNCTION__, strerror(-res), res); + } else { + *frameCount.data.i32 = mFrameCount; + } + mFrameCount++; + } + + // Check for request ID, and if present, signal waiters. + camera_metadata_entry_t requestId; + res = find_camera_metadata_entry(b, + ANDROID_REQUEST_ID, + &requestId); + if (res == OK) { + mLatestRequestId = requestId.data.i32[0]; + mNewRequestId.signal(); + } + + *buf = b; + mCount--; + + return OK; +} + +status_t Camera2Device::MetadataQueue::waitForBuffer(nsecs_t timeout) +{ + Mutex::Autolock l(mMutex); + status_t res; + while (mCount == 0) { + res = notEmpty.waitRelative(mMutex,timeout); + if (res != OK) return res; + } + return OK; +} + +status_t Camera2Device::MetadataQueue::waitForDequeue(int32_t id, + nsecs_t timeout) { + Mutex::Autolock l(mMutex); + status_t res; + while (mLatestRequestId != id) { + nsecs_t startTime = systemTime(); + + res = mNewRequestId.waitRelative(mMutex, timeout); + if (res != OK) return res; + + timeout -= (systemTime() - startTime); + } + + return OK; +} + +status_t Camera2Device::MetadataQueue::setStreamSlot(camera_metadata_t *buf) +{ + ATRACE_CALL(); + ALOGV("%s: E", __FUNCTION__); + Mutex::Autolock l(mMutex); + if (buf == NULL) { + freeBuffers(mStreamSlot.begin(), mStreamSlot.end()); + mStreamSlotCount = 0; + return OK; + } + camera_metadata_t *buf2 = clone_camera_metadata(buf); + if (!buf2) { + ALOGE("%s: Unable to clone metadata buffer!", __FUNCTION__); + return NO_MEMORY; + } + + if (mStreamSlotCount > 1) { + List::iterator deleter = ++mStreamSlot.begin(); + freeBuffers(++mStreamSlot.begin(), mStreamSlot.end()); + mStreamSlotCount = 1; + } + if (mStreamSlotCount == 1) { + free_camera_metadata( *(mStreamSlot.begin()) ); + *(mStreamSlot.begin()) = buf2; + } else { + mStreamSlot.push_front(buf2); + mStreamSlotCount = 1; + } + return signalConsumerLocked(); +} + +status_t Camera2Device::MetadataQueue::setStreamSlot( + const List &bufs) +{ + ATRACE_CALL(); + ALOGV("%s: E", __FUNCTION__); + Mutex::Autolock l(mMutex); + + if (mStreamSlotCount > 0) { + freeBuffers(mStreamSlot.begin(), mStreamSlot.end()); + } + mStreamSlotCount = 0; + for (List::const_iterator r = bufs.begin(); + r != bufs.end(); r++) { + camera_metadata_t *r2 = clone_camera_metadata(*r); + if (!r2) { + ALOGE("%s: Unable to clone metadata buffer!", __FUNCTION__); + return NO_MEMORY; + } + mStreamSlot.push_back(r2); + mStreamSlotCount++; + } + return signalConsumerLocked(); +} + +status_t Camera2Device::MetadataQueue::dump(int fd, + const Vector& /*args*/) { + ATRACE_CALL(); + String8 result; + status_t notLocked; + notLocked = mMutex.tryLock(); + if (notLocked) { + result.append(" (Unable to lock queue mutex)\n"); + } + result.appendFormat(" Current frame number: %d\n", mFrameCount); + if (mStreamSlotCount == 0) { + result.append(" Stream slot: Empty\n"); + write(fd, result.string(), result.size()); + } else { + result.appendFormat(" Stream slot: %d entries\n", + mStreamSlot.size()); + int i = 0; + for (List::iterator r = mStreamSlot.begin(); + r != mStreamSlot.end(); r++) { + result = String8::format(" Stream slot buffer %d:\n", i); + write(fd, result.string(), result.size()); + dump_indented_camera_metadata(*r, fd, 2, 10); + i++; + } + } + if (mEntries.size() == 0) { + result = " Main queue is empty\n"; + write(fd, result.string(), result.size()); + } else { + result = String8::format(" Main queue has %d entries:\n", + mEntries.size()); + int i = 0; + for (List::iterator r = mEntries.begin(); + r != mEntries.end(); r++) { + result = String8::format(" Queue entry %d:\n", i); + write(fd, result.string(), result.size()); + dump_indented_camera_metadata(*r, fd, 2, 10); + i++; + } + } + + if (notLocked == 0) { + mMutex.unlock(); + } + + return OK; +} + +status_t Camera2Device::MetadataQueue::signalConsumerLocked() { + ATRACE_CALL(); + status_t res = OK; + notEmpty.signal(); + if (mSignalConsumer && mHal2Device != NULL) { + mSignalConsumer = false; + + mMutex.unlock(); + ALOGV("%s: Signaling consumer", __FUNCTION__); + res = mHal2Device->ops->notify_request_queue_not_empty(mHal2Device); + mMutex.lock(); + } + return res; +} + +status_t Camera2Device::MetadataQueue::freeBuffers( + List::iterator start, + List::iterator end) +{ + ATRACE_CALL(); + while (start != end) { + free_camera_metadata(*start); + start = mStreamSlot.erase(start); + } + return OK; +} + +Camera2Device::MetadataQueue* Camera2Device::MetadataQueue::getInstance( + const camera2_request_queue_src_ops_t *q) +{ + const MetadataQueue* cmq = static_cast(q); + return const_cast(cmq); +} + +Camera2Device::MetadataQueue* Camera2Device::MetadataQueue::getInstance( + const camera2_frame_queue_dst_ops_t *q) +{ + const MetadataQueue* cmq = static_cast(q); + return const_cast(cmq); +} + +int Camera2Device::MetadataQueue::consumer_buffer_count( + const camera2_request_queue_src_ops_t *q) +{ + MetadataQueue *queue = getInstance(q); + return queue->getBufferCount(); +} + +int Camera2Device::MetadataQueue::consumer_dequeue( + const camera2_request_queue_src_ops_t *q, + camera_metadata_t **buffer) +{ + MetadataQueue *queue = getInstance(q); + return queue->dequeue(buffer, true); +} + +int Camera2Device::MetadataQueue::consumer_free( + const camera2_request_queue_src_ops_t *q, + camera_metadata_t *old_buffer) +{ + ATRACE_CALL(); + MetadataQueue *queue = getInstance(q); + (void)queue; + free_camera_metadata(old_buffer); + return OK; +} + +int Camera2Device::MetadataQueue::producer_dequeue( + const camera2_frame_queue_dst_ops_t * /*q*/, + size_t entries, size_t bytes, + camera_metadata_t **buffer) +{ + ATRACE_CALL(); + camera_metadata_t *new_buffer = + allocate_camera_metadata(entries, bytes); + if (new_buffer == NULL) return NO_MEMORY; + *buffer = new_buffer; + return OK; +} + +int Camera2Device::MetadataQueue::producer_cancel( + const camera2_frame_queue_dst_ops_t * /*q*/, + camera_metadata_t *old_buffer) +{ + ATRACE_CALL(); + free_camera_metadata(old_buffer); + return OK; +} + +int Camera2Device::MetadataQueue::producer_enqueue( + const camera2_frame_queue_dst_ops_t *q, + camera_metadata_t *filled_buffer) +{ + MetadataQueue *queue = getInstance(q); + return queue->enqueue(filled_buffer); +} + +/** + * Camera2Device::StreamAdapter + */ + +#ifndef container_of +#define container_of(ptr, type, member) \ + (type *)((char*)(ptr) - offsetof(type, member)) +#endif + +Camera2Device::StreamAdapter::StreamAdapter(camera2_device_t *d): + mState(RELEASED), + mHal2Device(d), + mId(-1), + mWidth(0), mHeight(0), mFormat(0), mSize(0), mUsage(0), + mMaxProducerBuffers(0), mMaxConsumerBuffers(0), + mTotalBuffers(0), + mFormatRequested(0), + mActiveBuffers(0), + mFrameCount(0), + mLastTimestamp(0) +{ + camera2_stream_ops::dequeue_buffer = dequeue_buffer; + camera2_stream_ops::enqueue_buffer = enqueue_buffer; + camera2_stream_ops::cancel_buffer = cancel_buffer; + camera2_stream_ops::set_crop = set_crop; +} + +Camera2Device::StreamAdapter::~StreamAdapter() { + ATRACE_CALL(); + if (mState != RELEASED) { + release(); + } +} + +status_t Camera2Device::StreamAdapter::connectToDevice( + sp consumer, + uint32_t width, uint32_t height, int format, size_t size) { + ATRACE_CALL(); + status_t res; + ALOGV("%s: E", __FUNCTION__); + + if (mState != RELEASED) return INVALID_OPERATION; + if (consumer == NULL) { + ALOGE("%s: Null consumer passed to stream adapter", __FUNCTION__); + return BAD_VALUE; + } + + ALOGV("%s: New stream parameters %d x %d, format 0x%x, size %d", + __FUNCTION__, width, height, format, size); + + mConsumerInterface = consumer; + mWidth = width; + mHeight = height; + mSize = (format == HAL_PIXEL_FORMAT_BLOB) ? size : 0; + mFormatRequested = format; + + // Allocate device-side stream interface + + uint32_t id; + uint32_t formatActual; + uint32_t usage; + uint32_t maxBuffers = 2; + res = mHal2Device->ops->allocate_stream(mHal2Device, + mWidth, mHeight, mFormatRequested, getStreamOps(), + &id, &formatActual, &usage, &maxBuffers); + if (res != OK) { + ALOGE("%s: Device stream allocation failed: %s (%d)", + __FUNCTION__, strerror(-res), res); + return res; + } + + ALOGV("%s: Allocated stream id %d, actual format 0x%x, " + "usage 0x%x, producer wants %d buffers", __FUNCTION__, + id, formatActual, usage, maxBuffers); + + mId = id; + mFormat = formatActual; + mUsage = usage; + mMaxProducerBuffers = maxBuffers; + + mState = ALLOCATED; + + // Configure consumer-side ANativeWindow interface + res = native_window_api_connect(mConsumerInterface.get(), + NATIVE_WINDOW_API_CAMERA); + if (res != OK) { + ALOGE("%s: Unable to connect to native window for stream %d", + __FUNCTION__, mId); + + return res; + } + + mState = CONNECTED; + + res = native_window_set_usage(mConsumerInterface.get(), mUsage); + if (res != OK) { + ALOGE("%s: Unable to configure usage %08x for stream %d", + __FUNCTION__, mUsage, mId); + return res; + } + + res = native_window_set_scaling_mode(mConsumerInterface.get(), + NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW); + if (res != OK) { + ALOGE("%s: Unable to configure stream scaling: %s (%d)", + __FUNCTION__, strerror(-res), res); + return res; + } + + res = setTransform(0); + if (res != OK) { + return res; + } + + if (mFormat == HAL_PIXEL_FORMAT_BLOB) { + res = native_window_set_buffers_geometry(mConsumerInterface.get(), + mSize, 1, mFormat); + if (res != OK) { + ALOGE("%s: Unable to configure compressed stream buffer geometry" + " %d x %d, size %d for stream %d", + __FUNCTION__, mWidth, mHeight, mSize, mId); + return res; + } + } else { + res = native_window_set_buffers_geometry(mConsumerInterface.get(), + mWidth, mHeight, mFormat); + if (res != OK) { + ALOGE("%s: Unable to configure stream buffer geometry" + " %d x %d, format 0x%x for stream %d", + __FUNCTION__, mWidth, mHeight, mFormat, mId); + return res; + } + } + + int maxConsumerBuffers; + res = mConsumerInterface->query(mConsumerInterface.get(), + NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &maxConsumerBuffers); + if (res != OK) { + ALOGE("%s: Unable to query consumer undequeued" + " buffer count for stream %d", __FUNCTION__, mId); + return res; + } + mMaxConsumerBuffers = maxConsumerBuffers; + + ALOGV("%s: Consumer wants %d buffers", __FUNCTION__, + mMaxConsumerBuffers); + + mTotalBuffers = mMaxConsumerBuffers + mMaxProducerBuffers; + mActiveBuffers = 0; + mFrameCount = 0; + mLastTimestamp = 0; + + res = native_window_set_buffer_count(mConsumerInterface.get(), + mTotalBuffers); + if (res != OK) { + ALOGE("%s: Unable to set buffer count for stream %d", + __FUNCTION__, mId); + return res; + } + + // Register allocated buffers with HAL device + buffer_handle_t *buffers = new buffer_handle_t[mTotalBuffers]; + ANativeWindowBuffer **anwBuffers = new ANativeWindowBuffer*[mTotalBuffers]; + uint32_t bufferIdx = 0; + for (; bufferIdx < mTotalBuffers; bufferIdx++) { + res = native_window_dequeue_buffer_and_wait(mConsumerInterface.get(), + &anwBuffers[bufferIdx]); + if (res != OK) { + ALOGE("%s: Unable to dequeue buffer %d for initial registration for " + "stream %d", __FUNCTION__, bufferIdx, mId); + goto cleanUpBuffers; + } + + buffers[bufferIdx] = anwBuffers[bufferIdx]->handle; + ALOGV("%s: Buffer %p allocated", __FUNCTION__, (void*)buffers[bufferIdx]); + } + + ALOGV("%s: Registering %d buffers with camera HAL", __FUNCTION__, mTotalBuffers); + res = mHal2Device->ops->register_stream_buffers(mHal2Device, + mId, + mTotalBuffers, + buffers); + if (res != OK) { + ALOGE("%s: Unable to register buffers with HAL device for stream %d", + __FUNCTION__, mId); + } else { + mState = ACTIVE; + } + +cleanUpBuffers: + ALOGV("%s: Cleaning up %d buffers", __FUNCTION__, bufferIdx); + for (uint32_t i = 0; i < bufferIdx; i++) { + res = mConsumerInterface->cancelBuffer(mConsumerInterface.get(), + anwBuffers[i], -1); + if (res != OK) { + ALOGE("%s: Unable to cancel buffer %d after registration", + __FUNCTION__, i); + } + } + delete[] anwBuffers; + delete[] buffers; + + return res; +} + +status_t Camera2Device::StreamAdapter::release() { + ATRACE_CALL(); + status_t res; + ALOGV("%s: Releasing stream %d (%d x %d, format %d)", __FUNCTION__, mId, + mWidth, mHeight, mFormat); + if (mState >= ALLOCATED) { + res = mHal2Device->ops->release_stream(mHal2Device, mId); + if (res != OK) { + ALOGE("%s: Unable to release stream %d", + __FUNCTION__, mId); + return res; + } + } + if (mState >= CONNECTED) { + res = native_window_api_disconnect(mConsumerInterface.get(), + NATIVE_WINDOW_API_CAMERA); + + /* this is not an error. if client calling process dies, + the window will also die and all calls to it will return + DEAD_OBJECT, thus it's already "disconnected" */ + if (res == DEAD_OBJECT) { + ALOGW("%s: While disconnecting stream %d from native window, the" + " native window died from under us", __FUNCTION__, mId); + } + else if (res != OK) { + ALOGE("%s: Unable to disconnect stream %d from native window (error %d %s)", + __FUNCTION__, mId, res, strerror(-res)); + return res; + } + } + mId = -1; + mState = RELEASED; + return OK; +} + +status_t Camera2Device::StreamAdapter::setTransform(int transform) { + ATRACE_CALL(); + status_t res; + if (mState < CONNECTED) { + ALOGE("%s: Cannot set transform on unconnected stream", __FUNCTION__); + return INVALID_OPERATION; + } + res = native_window_set_buffers_transform(mConsumerInterface.get(), + transform); + if (res != OK) { + ALOGE("%s: Unable to configure stream transform to %x: %s (%d)", + __FUNCTION__, transform, strerror(-res), res); + } + return res; +} + +status_t Camera2Device::StreamAdapter::dump(int fd, + const Vector& /*args*/) { + ATRACE_CALL(); + String8 result = String8::format(" Stream %d: %d x %d, format 0x%x\n", + mId, mWidth, mHeight, mFormat); + result.appendFormat(" size %d, usage 0x%x, requested format 0x%x\n", + mSize, mUsage, mFormatRequested); + result.appendFormat(" total buffers: %d, dequeued buffers: %d\n", + mTotalBuffers, mActiveBuffers); + result.appendFormat(" frame count: %d, last timestamp %lld\n", + mFrameCount, mLastTimestamp); + write(fd, result.string(), result.size()); + return OK; +} + +const camera2_stream_ops *Camera2Device::StreamAdapter::getStreamOps() { + return static_cast(this); +} + +ANativeWindow* Camera2Device::StreamAdapter::toANW( + const camera2_stream_ops_t *w) { + return static_cast(w)->mConsumerInterface.get(); +} + +int Camera2Device::StreamAdapter::dequeue_buffer(const camera2_stream_ops_t *w, + buffer_handle_t** buffer) { + ATRACE_CALL(); + int res; + StreamAdapter* stream = + const_cast(static_cast(w)); + if (stream->mState != ACTIVE) { + ALOGE("%s: Called when in bad state: %d", __FUNCTION__, stream->mState); + return INVALID_OPERATION; + } + + ANativeWindow *a = toANW(w); + ANativeWindowBuffer* anb; + res = native_window_dequeue_buffer_and_wait(a, &anb); + if (res != OK) { + ALOGE("Stream %d dequeue: Error from native_window: %s (%d)", stream->mId, + strerror(-res), res); + return res; + } + + *buffer = &(anb->handle); + stream->mActiveBuffers++; + + ALOGVV("Stream %d dequeue: Buffer %p dequeued", stream->mId, (void*)(**buffer)); + return res; +} + +int Camera2Device::StreamAdapter::enqueue_buffer(const camera2_stream_ops_t* w, + int64_t timestamp, + buffer_handle_t* buffer) { + ATRACE_CALL(); + StreamAdapter *stream = + const_cast(static_cast(w)); + stream->mFrameCount++; + ALOGVV("Stream %d enqueue: Frame %d (%p) captured at %lld ns", + stream->mId, stream->mFrameCount, (void*)(*buffer), timestamp); + int state = stream->mState; + if (state != ACTIVE) { + ALOGE("%s: Called when in bad state: %d", __FUNCTION__, state); + return INVALID_OPERATION; + } + ANativeWindow *a = toANW(w); + status_t err; + + err = native_window_set_buffers_timestamp(a, timestamp); + if (err != OK) { + ALOGE("%s: Error setting timestamp on native window: %s (%d)", + __FUNCTION__, strerror(-err), err); + return err; + } + err = a->queueBuffer(a, + container_of(buffer, ANativeWindowBuffer, handle), -1); + if (err != OK) { + ALOGE("%s: Error queueing buffer to native window: %s (%d)", + __FUNCTION__, strerror(-err), err); + return err; + } + + stream->mActiveBuffers--; + stream->mLastTimestamp = timestamp; + return OK; +} + +int Camera2Device::StreamAdapter::cancel_buffer(const camera2_stream_ops_t* w, + buffer_handle_t* buffer) { + ATRACE_CALL(); + StreamAdapter *stream = + const_cast(static_cast(w)); + ALOGVV("Stream %d cancel: Buffer %p", + stream->mId, (void*)(*buffer)); + if (stream->mState != ACTIVE) { + ALOGE("%s: Called when in bad state: %d", __FUNCTION__, stream->mState); + return INVALID_OPERATION; + } + + ANativeWindow *a = toANW(w); + int err = a->cancelBuffer(a, + container_of(buffer, ANativeWindowBuffer, handle), -1); + if (err != OK) { + ALOGE("%s: Error canceling buffer to native window: %s (%d)", + __FUNCTION__, strerror(-err), err); + return err; + } + + stream->mActiveBuffers--; + return OK; +} + +int Camera2Device::StreamAdapter::set_crop(const camera2_stream_ops_t* w, + int left, int top, int right, int bottom) { + ATRACE_CALL(); + int state = static_cast(w)->mState; + if (state != ACTIVE) { + ALOGE("%s: Called when in bad state: %d", __FUNCTION__, state); + return INVALID_OPERATION; + } + ANativeWindow *a = toANW(w); + android_native_rect_t crop = { left, top, right, bottom }; + return native_window_set_crop(a, &crop); +} + +/** + * Camera2Device::ReprocessStreamAdapter + */ + +#ifndef container_of +#define container_of(ptr, type, member) \ + (type *)((char*)(ptr) - offsetof(type, member)) +#endif + +Camera2Device::ReprocessStreamAdapter::ReprocessStreamAdapter(camera2_device_t *d): + mState(RELEASED), + mHal2Device(d), + mId(-1), + mWidth(0), mHeight(0), mFormat(0), + mActiveBuffers(0), + mFrameCount(0) +{ + ATRACE_CALL(); + camera2_stream_in_ops::acquire_buffer = acquire_buffer; + camera2_stream_in_ops::release_buffer = release_buffer; +} + +Camera2Device::ReprocessStreamAdapter::~ReprocessStreamAdapter() { + ATRACE_CALL(); + if (mState != RELEASED) { + release(); + } +} + +status_t Camera2Device::ReprocessStreamAdapter::connectToDevice( + const sp &outputStream) { + ATRACE_CALL(); + status_t res; + ALOGV("%s: E", __FUNCTION__); + + if (mState != RELEASED) return INVALID_OPERATION; + if (outputStream == NULL) { + ALOGE("%s: Null base stream passed to reprocess stream adapter", + __FUNCTION__); + return BAD_VALUE; + } + + mBaseStream = outputStream; + mWidth = outputStream->getWidth(); + mHeight = outputStream->getHeight(); + mFormat = outputStream->getFormat(); + + ALOGV("%s: New reprocess stream parameters %d x %d, format 0x%x", + __FUNCTION__, mWidth, mHeight, mFormat); + + // Allocate device-side stream interface + + uint32_t id; + res = mHal2Device->ops->allocate_reprocess_stream_from_stream(mHal2Device, + outputStream->getId(), getStreamOps(), + &id); + if (res != OK) { + ALOGE("%s: Device reprocess stream allocation failed: %s (%d)", + __FUNCTION__, strerror(-res), res); + return res; + } + + ALOGV("%s: Allocated reprocess stream id %d based on stream %d", + __FUNCTION__, id, outputStream->getId()); + + mId = id; + + mState = ACTIVE; + + return OK; +} + +status_t Camera2Device::ReprocessStreamAdapter::release() { + ATRACE_CALL(); + status_t res; + ALOGV("%s: Releasing stream %d", __FUNCTION__, mId); + if (mState >= ACTIVE) { + res = mHal2Device->ops->release_reprocess_stream(mHal2Device, mId); + if (res != OK) { + ALOGE("%s: Unable to release stream %d", + __FUNCTION__, mId); + return res; + } + } + + List::iterator s; + for (s = mQueue.begin(); s != mQueue.end(); s++) { + sp listener = s->releaseListener.promote(); + if (listener != 0) listener->onBufferReleased(s->handle); + } + for (s = mInFlightQueue.begin(); s != mInFlightQueue.end(); s++) { + sp listener = s->releaseListener.promote(); + if (listener != 0) listener->onBufferReleased(s->handle); + } + mQueue.clear(); + mInFlightQueue.clear(); + + mState = RELEASED; + return OK; +} + +status_t Camera2Device::ReprocessStreamAdapter::pushIntoStream( + buffer_handle_t *handle, const wp &releaseListener) { + ATRACE_CALL(); + // TODO: Some error checking here would be nice + ALOGV("%s: Pushing buffer %p to stream", __FUNCTION__, (void*)(*handle)); + + QueueEntry entry; + entry.handle = handle; + entry.releaseListener = releaseListener; + mQueue.push_back(entry); + return OK; +} + +status_t Camera2Device::ReprocessStreamAdapter::dump(int fd, + const Vector& /*args*/) { + ATRACE_CALL(); + String8 result = + String8::format(" Reprocess stream %d: %d x %d, fmt 0x%x\n", + mId, mWidth, mHeight, mFormat); + result.appendFormat(" acquired buffers: %d\n", + mActiveBuffers); + result.appendFormat(" frame count: %d\n", + mFrameCount); + write(fd, result.string(), result.size()); + return OK; +} + +const camera2_stream_in_ops *Camera2Device::ReprocessStreamAdapter::getStreamOps() { + return static_cast(this); +} + +int Camera2Device::ReprocessStreamAdapter::acquire_buffer( + const camera2_stream_in_ops_t *w, + buffer_handle_t** buffer) { + ATRACE_CALL(); + + ReprocessStreamAdapter* stream = + const_cast( + static_cast(w)); + if (stream->mState != ACTIVE) { + ALOGE("%s: Called when in bad state: %d", __FUNCTION__, stream->mState); + return INVALID_OPERATION; + } + + if (stream->mQueue.empty()) { + *buffer = NULL; + return OK; + } + + QueueEntry &entry = *(stream->mQueue.begin()); + + *buffer = entry.handle; + + stream->mInFlightQueue.push_back(entry); + stream->mQueue.erase(stream->mQueue.begin()); + + stream->mActiveBuffers++; + + ALOGV("Stream %d acquire: Buffer %p acquired", stream->mId, + (void*)(**buffer)); + return OK; +} + +int Camera2Device::ReprocessStreamAdapter::release_buffer( + const camera2_stream_in_ops_t* w, + buffer_handle_t* buffer) { + ATRACE_CALL(); + ReprocessStreamAdapter *stream = + const_cast( + static_cast(w) ); + stream->mFrameCount++; + ALOGV("Reprocess stream %d release: Frame %d (%p)", + stream->mId, stream->mFrameCount, (void*)*buffer); + int state = stream->mState; + if (state != ACTIVE) { + ALOGE("%s: Called when in bad state: %d", __FUNCTION__, state); + return INVALID_OPERATION; + } + stream->mActiveBuffers--; + + List::iterator s; + for (s = stream->mInFlightQueue.begin(); s != stream->mInFlightQueue.end(); s++) { + if ( s->handle == buffer ) break; + } + if (s == stream->mInFlightQueue.end()) { + ALOGE("%s: Can't find buffer %p in in-flight list!", __FUNCTION__, + buffer); + return INVALID_OPERATION; + } + + sp listener = s->releaseListener.promote(); + if (listener != 0) { + listener->onBufferReleased(s->handle); + } else { + ALOGE("%s: Can't free buffer - missing listener", __FUNCTION__); + } + stream->mInFlightQueue.erase(s); + + return OK; +} + +}; // namespace android diff --git a/services/camera/libcameraservice/device2/Camera2Device.h b/services/camera/libcameraservice/device2/Camera2Device.h new file mode 100644 index 0000000..8945ec2 --- /dev/null +++ b/services/camera/libcameraservice/device2/Camera2Device.h @@ -0,0 +1,345 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_SERVERS_CAMERA_CAMERA2DEVICE_H +#define ANDROID_SERVERS_CAMERA_CAMERA2DEVICE_H + +#include +#include +#include +#include + +#include "common/CameraDeviceBase.h" + +namespace android { + +/** + * CameraDevice for HAL devices with version CAMERA_DEVICE_API_VERSION_2_0 + */ +class Camera2Device: public CameraDeviceBase { + public: + Camera2Device(int id); + + virtual ~Camera2Device(); + + /** + * CameraDevice interface + */ + virtual int getId() const; + virtual status_t initialize(camera_module_t *module); + virtual status_t disconnect(); + virtual status_t dump(int fd, const Vector& args); + virtual const CameraMetadata& info() const; + virtual status_t capture(CameraMetadata &request); + virtual status_t setStreamingRequest(const CameraMetadata &request); + virtual status_t clearStreamingRequest(); + virtual status_t waitUntilRequestReceived(int32_t requestId, nsecs_t timeout); + virtual status_t createStream(sp consumer, + uint32_t width, uint32_t height, int format, size_t size, + int *id); + virtual status_t createReprocessStreamFromStream(int outputId, int *id); + virtual status_t getStreamInfo(int id, + uint32_t *width, uint32_t *height, uint32_t *format); + virtual status_t setStreamTransform(int id, int transform); + virtual status_t deleteStream(int id); + virtual status_t deleteReprocessStream(int id); + virtual status_t createDefaultRequest(int templateId, CameraMetadata *request); + virtual status_t waitUntilDrained(); + virtual status_t setNotifyCallback(NotificationListener *listener); + virtual bool willNotify3A(); + virtual status_t waitForNextFrame(nsecs_t timeout); + virtual status_t getNextFrame(CameraMetadata *frame); + virtual status_t triggerAutofocus(uint32_t id); + virtual status_t triggerCancelAutofocus(uint32_t id); + virtual status_t triggerPrecaptureMetering(uint32_t id); + virtual status_t pushReprocessBuffer(int reprocessStreamId, + buffer_handle_t *buffer, wp listener); + private: + const int mId; + camera2_device_t *mHal2Device; + + CameraMetadata mDeviceInfo; + vendor_tag_query_ops_t *mVendorTagOps; + + /** + * Queue class for both sending requests to a camera2 device, and for + * receiving frames from a camera2 device. + */ + class MetadataQueue: public camera2_request_queue_src_ops_t, + public camera2_frame_queue_dst_ops_t { + public: + MetadataQueue(); + ~MetadataQueue(); + + // Interface to camera2 HAL device, either for requests (device is + // consumer) or for frames (device is producer) + const camera2_request_queue_src_ops_t* getToConsumerInterface(); + void setFromConsumerInterface(camera2_device_t *d); + + // Connect queue consumer endpoint to a camera2 device + status_t setConsumerDevice(camera2_device_t *d); + // Connect queue producer endpoint to a camera2 device + status_t setProducerDevice(camera2_device_t *d); + + const camera2_frame_queue_dst_ops_t* getToProducerInterface(); + + // Real interfaces. On enqueue, queue takes ownership of buffer pointer + // On dequeue, user takes ownership of buffer pointer. + status_t enqueue(camera_metadata_t *buf); + status_t dequeue(camera_metadata_t **buf, bool incrementCount = false); + int getBufferCount(); + status_t waitForBuffer(nsecs_t timeout); + // Wait until a buffer with the given ID is dequeued. Will return + // immediately if the latest buffer dequeued has that ID. + status_t waitForDequeue(int32_t id, nsecs_t timeout); + + // Set repeating buffer(s); if the queue is empty on a dequeue call, the + // queue copies the contents of the stream slot into the queue, and then + // dequeues the first new entry. The metadata buffers passed in are + // copied. + status_t setStreamSlot(camera_metadata_t *buf); + status_t setStreamSlot(const List &bufs); + + status_t dump(int fd, const Vector& args); + + private: + status_t signalConsumerLocked(); + status_t freeBuffers(List::iterator start, + List::iterator end); + + camera2_device_t *mHal2Device; + + Mutex mMutex; + Condition notEmpty; + + int mFrameCount; + int32_t mLatestRequestId; + Condition mNewRequestId; + + int mCount; + List mEntries; + int mStreamSlotCount; + List mStreamSlot; + + bool mSignalConsumer; + + static MetadataQueue* getInstance( + const camera2_frame_queue_dst_ops_t *q); + static MetadataQueue* getInstance( + const camera2_request_queue_src_ops_t *q); + + static int consumer_buffer_count( + const camera2_request_queue_src_ops_t *q); + + static int consumer_dequeue(const camera2_request_queue_src_ops_t *q, + camera_metadata_t **buffer); + + static int consumer_free(const camera2_request_queue_src_ops_t *q, + camera_metadata_t *old_buffer); + + static int producer_dequeue(const camera2_frame_queue_dst_ops_t *q, + size_t entries, size_t bytes, + camera_metadata_t **buffer); + + static int producer_cancel(const camera2_frame_queue_dst_ops_t *q, + camera_metadata_t *old_buffer); + + static int producer_enqueue(const camera2_frame_queue_dst_ops_t *q, + camera_metadata_t *filled_buffer); + + }; // class MetadataQueue + + MetadataQueue mRequestQueue; + MetadataQueue mFrameQueue; + + /** + * Adapter from an ANativeWindow interface to camera2 device stream ops. + * Also takes care of allocating/deallocating stream in device interface + */ + class StreamAdapter: public camera2_stream_ops, public virtual RefBase { + public: + StreamAdapter(camera2_device_t *d); + + ~StreamAdapter(); + + /** + * Create a HAL device stream of the requested size and format. + * + * If format is CAMERA2_HAL_PIXEL_FORMAT_OPAQUE, then the HAL device + * selects an appropriate format; it can be queried with getFormat. + * + * If format is HAL_PIXEL_FORMAT_COMPRESSED, the size parameter must + * be equal to the size in bytes of the buffers to allocate for the + * stream. For other formats, the size parameter is ignored. + */ + status_t connectToDevice(sp consumer, + uint32_t width, uint32_t height, int format, size_t size); + + status_t release(); + + status_t setTransform(int transform); + + // Get stream parameters. + // Only valid after a successful connectToDevice call. + int getId() const { return mId; } + uint32_t getWidth() const { return mWidth; } + uint32_t getHeight() const { return mHeight; } + uint32_t getFormat() const { return mFormat; } + + // Dump stream information + status_t dump(int fd, const Vector& args); + + private: + enum { + ERROR = -1, + RELEASED = 0, + ALLOCATED, + CONNECTED, + ACTIVE + } mState; + + sp mConsumerInterface; + camera2_device_t *mHal2Device; + + uint32_t mId; + uint32_t mWidth; + uint32_t mHeight; + uint32_t mFormat; + size_t mSize; + uint32_t mUsage; + uint32_t mMaxProducerBuffers; + uint32_t mMaxConsumerBuffers; + uint32_t mTotalBuffers; + int mFormatRequested; + + /** Debugging information */ + uint32_t mActiveBuffers; + uint32_t mFrameCount; + int64_t mLastTimestamp; + + const camera2_stream_ops *getStreamOps(); + + static ANativeWindow* toANW(const camera2_stream_ops_t *w); + + static int dequeue_buffer(const camera2_stream_ops_t *w, + buffer_handle_t** buffer); + + static int enqueue_buffer(const camera2_stream_ops_t* w, + int64_t timestamp, + buffer_handle_t* buffer); + + static int cancel_buffer(const camera2_stream_ops_t* w, + buffer_handle_t* buffer); + + static int set_crop(const camera2_stream_ops_t* w, + int left, int top, int right, int bottom); + }; // class StreamAdapter + + typedef List > StreamList; + StreamList mStreams; + + /** + * Adapter from an ANativeWindow interface to camera2 device stream ops. + * Also takes care of allocating/deallocating stream in device interface + */ + class ReprocessStreamAdapter: public camera2_stream_in_ops, public virtual RefBase { + public: + ReprocessStreamAdapter(camera2_device_t *d); + + ~ReprocessStreamAdapter(); + + /** + * Create a HAL device reprocess stream based on an existing output stream. + */ + status_t connectToDevice(const sp &outputStream); + + status_t release(); + + /** + * Push buffer into stream for reprocessing. Takes ownership until it notifies + * that the buffer has been released + */ + status_t pushIntoStream(buffer_handle_t *handle, + const wp &releaseListener); + + /** + * Get stream parameters. + * Only valid after a successful connectToDevice call. + */ + int getId() const { return mId; } + uint32_t getWidth() const { return mWidth; } + uint32_t getHeight() const { return mHeight; } + uint32_t getFormat() const { return mFormat; } + + // Dump stream information + status_t dump(int fd, const Vector& args); + + private: + enum { + ERROR = -1, + RELEASED = 0, + ACTIVE + } mState; + + sp mConsumerInterface; + wp mBaseStream; + + struct QueueEntry { + buffer_handle_t *handle; + wp releaseListener; + }; + + List mQueue; + + List mInFlightQueue; + + camera2_device_t *mHal2Device; + + uint32_t mId; + uint32_t mWidth; + uint32_t mHeight; + uint32_t mFormat; + + /** Debugging information */ + uint32_t mActiveBuffers; + uint32_t mFrameCount; + int64_t mLastTimestamp; + + const camera2_stream_in_ops *getStreamOps(); + + static int acquire_buffer(const camera2_stream_in_ops_t *w, + buffer_handle_t** buffer); + + static int release_buffer(const camera2_stream_in_ops_t* w, + buffer_handle_t* buffer); + + }; // class ReprocessStreamAdapter + + typedef List > ReprocessStreamList; + ReprocessStreamList mReprocessStreams; + + // Receives HAL notifications and routes them to the NotificationListener + static void notificationCallback(int32_t msg_type, + int32_t ext1, + int32_t ext2, + int32_t ext3, + void *user); + +}; // class Camera2Device + +}; // namespace android + +#endif diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp new file mode 100644 index 0000000..0a4a24c --- /dev/null +++ b/services/camera/libcameraservice/device3/Camera3Device.cpp @@ -0,0 +1,1974 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "Camera3-Device" +#define ATRACE_TAG ATRACE_TAG_CAMERA +//#define LOG_NDEBUG 0 +//#define LOG_NNDEBUG 0 // Per-frame verbose logging + +#ifdef LOG_NNDEBUG +#define ALOGVV(...) ALOGV(__VA_ARGS__) +#else +#define ALOGVV(...) ((void)0) +#endif + +// Convenience macro for transient errors +#define CLOGE(fmt, ...) ALOGE("Camera %d: %s: " fmt, mId, __FUNCTION__, \ + ##__VA_ARGS__) + +// Convenience macros for transitioning to the error state +#define SET_ERR(fmt, ...) setErrorState( \ + "%s: " fmt, __FUNCTION__, \ + ##__VA_ARGS__) +#define SET_ERR_L(fmt, ...) setErrorStateLocked( \ + "%s: " fmt, __FUNCTION__, \ + ##__VA_ARGS__) + +#include +#include +#include + +#include "device3/Camera3Device.h" +#include "device3/Camera3OutputStream.h" +#include "device3/Camera3InputStream.h" +#include "device3/Camera3ZslStream.h" + +using namespace android::camera3; + +namespace android { + +Camera3Device::Camera3Device(int id): + mId(id), + mHal3Device(NULL), + mStatus(STATUS_UNINITIALIZED), + mNextResultFrameNumber(0), + mNextShutterFrameNumber(0), + mListener(NULL) +{ + ATRACE_CALL(); + camera3_callback_ops::notify = &sNotify; + camera3_callback_ops::process_capture_result = &sProcessCaptureResult; + ALOGV("%s: Created device for camera %d", __FUNCTION__, id); +} + +Camera3Device::~Camera3Device() +{ + ATRACE_CALL(); + ALOGV("%s: Tearing down for camera id %d", __FUNCTION__, mId); + disconnect(); +} + +int Camera3Device::getId() const { + return mId; +} + +/** + * CameraDeviceBase interface + */ + +status_t Camera3Device::initialize(camera_module_t *module) +{ + ATRACE_CALL(); + Mutex::Autolock l(mLock); + + ALOGV("%s: Initializing device for camera %d", __FUNCTION__, mId); + if (mStatus != STATUS_UNINITIALIZED) { + CLOGE("Already initialized!"); + return INVALID_OPERATION; + } + + /** Open HAL device */ + + status_t res; + String8 deviceName = String8::format("%d", mId); + + camera3_device_t *device; + + res = module->common.methods->open(&module->common, deviceName.string(), + reinterpret_cast(&device)); + + if (res != OK) { + SET_ERR_L("Could not open camera: %s (%d)", strerror(-res), res); + return res; + } + + /** Cross-check device version */ + + if (device->common.version != CAMERA_DEVICE_API_VERSION_3_0) { + SET_ERR_L("Could not open camera: " + "Camera device is not version %x, reports %x instead", + CAMERA_DEVICE_API_VERSION_3_0, + device->common.version); + device->common.close(&device->common); + return BAD_VALUE; + } + + camera_info info; + res = module->get_camera_info(mId, &info); + if (res != OK) return res; + + if (info.device_version != device->common.version) { + SET_ERR_L("HAL reporting mismatched camera_info version (%x)" + " and device version (%x).", + device->common.version, info.device_version); + device->common.close(&device->common); + return BAD_VALUE; + } + + /** Initialize device with callback functions */ + + ATRACE_BEGIN("camera3->initialize"); + res = device->ops->initialize(device, this); + ATRACE_END(); + + if (res != OK) { + SET_ERR_L("Unable to initialize HAL device: %s (%d)", + strerror(-res), res); + device->common.close(&device->common); + return BAD_VALUE; + } + + /** Get vendor metadata tags */ + + mVendorTagOps.get_camera_vendor_section_name = NULL; + + ATRACE_BEGIN("camera3->get_metadata_vendor_tag_ops"); + device->ops->get_metadata_vendor_tag_ops(device, &mVendorTagOps); + ATRACE_END(); + + if (mVendorTagOps.get_camera_vendor_section_name != NULL) { + res = set_camera_metadata_vendor_tag_ops(&mVendorTagOps); + if (res != OK) { + SET_ERR_L("Unable to set tag ops: %s (%d)", + strerror(-res), res); + device->common.close(&device->common); + return res; + } + } + + /** Start up request queue thread */ + + mRequestThread = new RequestThread(this, device); + res = mRequestThread->run(String8::format("C3Dev-%d-ReqQueue", mId).string()); + if (res != OK) { + SET_ERR_L("Unable to start request queue thread: %s (%d)", + strerror(-res), res); + device->common.close(&device->common); + mRequestThread.clear(); + return res; + } + + /** Everything is good to go */ + + mDeviceInfo = info.static_camera_characteristics; + mHal3Device = device; + mStatus = STATUS_IDLE; + mNextStreamId = 0; + mNeedConfig = true; + + return OK; +} + +status_t Camera3Device::disconnect() { + ATRACE_CALL(); + Mutex::Autolock l(mLock); + + ALOGV("%s: E", __FUNCTION__); + + status_t res = OK; + if (mStatus == STATUS_UNINITIALIZED) return res; + + if (mStatus == STATUS_ACTIVE || + (mStatus == STATUS_ERROR && mRequestThread != NULL)) { + res = mRequestThread->clearRepeatingRequests(); + if (res != OK) { + SET_ERR_L("Can't stop streaming"); + // Continue to close device even in case of error + } else { + res = waitUntilDrainedLocked(); + if (res != OK) { + SET_ERR_L("Timeout waiting for HAL to drain"); + // Continue to close device even in case of error + } + } + } + assert(mStatus == STATUS_IDLE || mStatus == STATUS_ERROR); + + if (mStatus == STATUS_ERROR) { + CLOGE("Shutting down in an error state"); + } + + if (mRequestThread != NULL) { + mRequestThread->requestExit(); + } + + mOutputStreams.clear(); + mInputStream.clear(); + + if (mRequestThread != NULL) { + if (mStatus != STATUS_ERROR) { + // HAL may be in a bad state, so waiting for request thread + // (which may be stuck in the HAL processCaptureRequest call) + // could be dangerous. + mRequestThread->join(); + } + mRequestThread.clear(); + } + + if (mHal3Device != NULL) { + mHal3Device->common.close(&mHal3Device->common); + mHal3Device = NULL; + } + + mStatus = STATUS_UNINITIALIZED; + + ALOGV("%s: X", __FUNCTION__); + return res; +} + +status_t Camera3Device::dump(int fd, const Vector &args) { + ATRACE_CALL(); + (void)args; + String8 lines; + + const char *status = + mStatus == STATUS_ERROR ? "ERROR" : + mStatus == STATUS_UNINITIALIZED ? "UNINITIALIZED" : + mStatus == STATUS_IDLE ? "IDLE" : + mStatus == STATUS_ACTIVE ? "ACTIVE" : + "Unknown"; + lines.appendFormat(" Device status: %s\n", status); + if (mStatus == STATUS_ERROR) { + lines.appendFormat(" Error cause: %s\n", mErrorCause.string()); + } + lines.appendFormat(" Stream configuration:\n"); + + if (mInputStream != NULL) { + write(fd, lines.string(), lines.size()); + mInputStream->dump(fd, args); + } else { + lines.appendFormat(" No input stream.\n"); + write(fd, lines.string(), lines.size()); + } + for (size_t i = 0; i < mOutputStreams.size(); i++) { + mOutputStreams[i]->dump(fd,args); + } + + lines = String8(" In-flight requests:\n"); + if (mInFlightMap.size() == 0) { + lines.append(" None\n"); + } else { + for (size_t i = 0; i < mInFlightMap.size(); i++) { + InFlightRequest r = mInFlightMap.valueAt(i); + lines.appendFormat(" Frame %d | Timestamp: %lld, metadata" + " arrived: %s, buffers left: %d\n", mInFlightMap.keyAt(i), + r.captureTimestamp, r.haveResultMetadata ? "true" : "false", + r.numBuffersLeft); + } + } + write(fd, lines.string(), lines.size()); + + if (mHal3Device != NULL) { + lines = String8(" HAL device dump:\n"); + write(fd, lines.string(), lines.size()); + mHal3Device->ops->dump(mHal3Device, fd); + } + + return OK; +} + +const CameraMetadata& Camera3Device::info() const { + ALOGVV("%s: E", __FUNCTION__); + if (CC_UNLIKELY(mStatus == STATUS_UNINITIALIZED || + mStatus == STATUS_ERROR)) { + ALOGW("%s: Access to static info %s!", __FUNCTION__, + mStatus == STATUS_ERROR ? + "when in error state" : "before init"); + } + return mDeviceInfo; +} + +status_t Camera3Device::capture(CameraMetadata &request) { + ATRACE_CALL(); + Mutex::Autolock l(mLock); + + // TODO: take ownership of the request + + switch (mStatus) { + case STATUS_ERROR: + CLOGE("Device has encountered a serious error"); + return INVALID_OPERATION; + case STATUS_UNINITIALIZED: + CLOGE("Device not initialized"); + return INVALID_OPERATION; + case STATUS_IDLE: + case STATUS_ACTIVE: + // OK + break; + default: + SET_ERR_L("Unexpected status: %d", mStatus); + return INVALID_OPERATION; + } + + sp newRequest = setUpRequestLocked(request); + if (newRequest == NULL) { + CLOGE("Can't create capture request"); + return BAD_VALUE; + } + + return mRequestThread->queueRequest(newRequest); +} + + +status_t Camera3Device::setStreamingRequest(const CameraMetadata &request) { + ATRACE_CALL(); + Mutex::Autolock l(mLock); + + switch (mStatus) { + case STATUS_ERROR: + CLOGE("Device has encountered a serious error"); + return INVALID_OPERATION; + case STATUS_UNINITIALIZED: + CLOGE("Device not initialized"); + return INVALID_OPERATION; + case STATUS_IDLE: + case STATUS_ACTIVE: + // OK + break; + default: + SET_ERR_L("Unexpected status: %d", mStatus); + return INVALID_OPERATION; + } + + sp newRepeatingRequest = setUpRequestLocked(request); + if (newRepeatingRequest == NULL) { + CLOGE("Can't create repeating request"); + return BAD_VALUE; + } + + RequestList newRepeatingRequests; + newRepeatingRequests.push_back(newRepeatingRequest); + + return mRequestThread->setRepeatingRequests(newRepeatingRequests); +} + + +sp Camera3Device::setUpRequestLocked( + const CameraMetadata &request) { + status_t res; + + if (mStatus == STATUS_IDLE) { + res = configureStreamsLocked(); + if (res != OK) { + SET_ERR_L("Can't set up streams: %s (%d)", strerror(-res), res); + return NULL; + } + } + + sp newRequest = createCaptureRequest(request); + return newRequest; +} + +status_t Camera3Device::clearStreamingRequest() { + ATRACE_CALL(); + Mutex::Autolock l(mLock); + + switch (mStatus) { + case STATUS_ERROR: + CLOGE("Device has encountered a serious error"); + return INVALID_OPERATION; + case STATUS_UNINITIALIZED: + CLOGE("Device not initialized"); + return INVALID_OPERATION; + case STATUS_IDLE: + case STATUS_ACTIVE: + // OK + break; + default: + SET_ERR_L("Unexpected status: %d", mStatus); + return INVALID_OPERATION; + } + + return mRequestThread->clearRepeatingRequests(); +} + +status_t Camera3Device::waitUntilRequestReceived(int32_t requestId, nsecs_t timeout) { + ATRACE_CALL(); + + return mRequestThread->waitUntilRequestProcessed(requestId, timeout); +} + +status_t Camera3Device::createInputStream( + uint32_t width, uint32_t height, int format, int *id) { + ATRACE_CALL(); + Mutex::Autolock l(mLock); + + status_t res; + bool wasActive = false; + + switch (mStatus) { + case STATUS_ERROR: + ALOGE("%s: Device has encountered a serious error", __FUNCTION__); + return INVALID_OPERATION; + case STATUS_UNINITIALIZED: + ALOGE("%s: Device not initialized", __FUNCTION__); + return INVALID_OPERATION; + case STATUS_IDLE: + // OK + break; + case STATUS_ACTIVE: + ALOGV("%s: Stopping activity to reconfigure streams", __FUNCTION__); + mRequestThread->setPaused(true); + res = waitUntilDrainedLocked(); + if (res != OK) { + ALOGE("%s: Can't pause captures to reconfigure streams!", + __FUNCTION__); + mStatus = STATUS_ERROR; + return res; + } + wasActive = true; + break; + default: + ALOGE("%s: Unexpected status: %d", __FUNCTION__, mStatus); + return INVALID_OPERATION; + } + assert(mStatus == STATUS_IDLE); + + if (mInputStream != 0) { + ALOGE("%s: Cannot create more than 1 input stream", __FUNCTION__); + return INVALID_OPERATION; + } + + sp newStream = new Camera3InputStream(mNextStreamId, + width, height, format); + + mInputStream = newStream; + + *id = mNextStreamId++; + + // Continue captures if active at start + if (wasActive) { + ALOGV("%s: Restarting activity to reconfigure streams", __FUNCTION__); + res = configureStreamsLocked(); + if (res != OK) { + ALOGE("%s: Can't reconfigure device for new stream %d: %s (%d)", + __FUNCTION__, mNextStreamId, strerror(-res), res); + return res; + } + mRequestThread->setPaused(false); + } + + return OK; +} + + +status_t Camera3Device::createZslStream( + uint32_t width, uint32_t height, + int depth, + /*out*/ + int *id, + sp* zslStream) { + ATRACE_CALL(); + Mutex::Autolock l(mLock); + + status_t res; + bool wasActive = false; + + switch (mStatus) { + case STATUS_ERROR: + ALOGE("%s: Device has encountered a serious error", __FUNCTION__); + return INVALID_OPERATION; + case STATUS_UNINITIALIZED: + ALOGE("%s: Device not initialized", __FUNCTION__); + return INVALID_OPERATION; + case STATUS_IDLE: + // OK + break; + case STATUS_ACTIVE: + ALOGV("%s: Stopping activity to reconfigure streams", __FUNCTION__); + mRequestThread->setPaused(true); + res = waitUntilDrainedLocked(); + if (res != OK) { + ALOGE("%s: Can't pause captures to reconfigure streams!", + __FUNCTION__); + mStatus = STATUS_ERROR; + return res; + } + wasActive = true; + break; + default: + ALOGE("%s: Unexpected status: %d", __FUNCTION__, mStatus); + return INVALID_OPERATION; + } + assert(mStatus == STATUS_IDLE); + + if (mInputStream != 0) { + ALOGE("%s: Cannot create more than 1 input stream", __FUNCTION__); + return INVALID_OPERATION; + } + + sp newStream = new Camera3ZslStream(mNextStreamId, + width, height, depth); + + res = mOutputStreams.add(mNextStreamId, newStream); + if (res < 0) { + ALOGE("%s: Can't add new stream to set: %s (%d)", + __FUNCTION__, strerror(-res), res); + return res; + } + mInputStream = newStream; + + *id = mNextStreamId++; + *zslStream = newStream; + + // Continue captures if active at start + if (wasActive) { + ALOGV("%s: Restarting activity to reconfigure streams", __FUNCTION__); + res = configureStreamsLocked(); + if (res != OK) { + ALOGE("%s: Can't reconfigure device for new stream %d: %s (%d)", + __FUNCTION__, mNextStreamId, strerror(-res), res); + return res; + } + mRequestThread->setPaused(false); + } + + return OK; +} + +status_t Camera3Device::createStream(sp consumer, + uint32_t width, uint32_t height, int format, size_t size, int *id) { + ATRACE_CALL(); + Mutex::Autolock l(mLock); + + status_t res; + bool wasActive = false; + + switch (mStatus) { + case STATUS_ERROR: + CLOGE("Device has encountered a serious error"); + return INVALID_OPERATION; + case STATUS_UNINITIALIZED: + CLOGE("Device not initialized"); + return INVALID_OPERATION; + case STATUS_IDLE: + // OK + break; + case STATUS_ACTIVE: + ALOGV("%s: Stopping activity to reconfigure streams", __FUNCTION__); + mRequestThread->setPaused(true); + res = waitUntilDrainedLocked(); + if (res != OK) { + ALOGE("%s: Can't pause captures to reconfigure streams!", + __FUNCTION__); + return res; + } + wasActive = true; + break; + default: + SET_ERR_L("Unexpected status: %d", mStatus); + return INVALID_OPERATION; + } + assert(mStatus == STATUS_IDLE); + + sp newStream; + if (format == HAL_PIXEL_FORMAT_BLOB) { + newStream = new Camera3OutputStream(mNextStreamId, consumer, + width, height, size, format); + } else { + newStream = new Camera3OutputStream(mNextStreamId, consumer, + width, height, format); + } + + res = mOutputStreams.add(mNextStreamId, newStream); + if (res < 0) { + SET_ERR_L("Can't add new stream to set: %s (%d)", strerror(-res), res); + return res; + } + + *id = mNextStreamId++; + mNeedConfig = true; + + // Continue captures if active at start + if (wasActive) { + ALOGV("%s: Restarting activity to reconfigure streams", __FUNCTION__); + res = configureStreamsLocked(); + if (res != OK) { + CLOGE("Can't reconfigure device for new stream %d: %s (%d)", + mNextStreamId, strerror(-res), res); + return res; + } + mRequestThread->setPaused(false); + } + + return OK; +} + +status_t Camera3Device::createReprocessStreamFromStream(int outputId, int *id) { + ATRACE_CALL(); + (void)outputId; (void)id; + + CLOGE("Unimplemented"); + return INVALID_OPERATION; +} + + +status_t Camera3Device::getStreamInfo(int id, + uint32_t *width, uint32_t *height, uint32_t *format) { + ATRACE_CALL(); + Mutex::Autolock l(mLock); + + switch (mStatus) { + case STATUS_ERROR: + CLOGE("Device has encountered a serious error"); + return INVALID_OPERATION; + case STATUS_UNINITIALIZED: + CLOGE("Device not initialized!"); + return INVALID_OPERATION; + case STATUS_IDLE: + case STATUS_ACTIVE: + // OK + break; + default: + SET_ERR_L("Unexpected status: %d", mStatus); + return INVALID_OPERATION; + } + + ssize_t idx = mOutputStreams.indexOfKey(id); + if (idx == NAME_NOT_FOUND) { + CLOGE("Stream %d is unknown", id); + return idx; + } + + if (width) *width = mOutputStreams[idx]->getWidth(); + if (height) *height = mOutputStreams[idx]->getHeight(); + if (format) *format = mOutputStreams[idx]->getFormat(); + + return OK; +} + +status_t Camera3Device::setStreamTransform(int id, + int transform) { + ATRACE_CALL(); + Mutex::Autolock l(mLock); + + switch (mStatus) { + case STATUS_ERROR: + CLOGE("Device has encountered a serious error"); + return INVALID_OPERATION; + case STATUS_UNINITIALIZED: + CLOGE("Device not initialized"); + return INVALID_OPERATION; + case STATUS_IDLE: + case STATUS_ACTIVE: + // OK + break; + default: + SET_ERR_L("Unexpected status: %d", mStatus); + return INVALID_OPERATION; + } + + ssize_t idx = mOutputStreams.indexOfKey(id); + if (idx == NAME_NOT_FOUND) { + CLOGE("Stream %d does not exist", + id); + return BAD_VALUE; + } + + return mOutputStreams.editValueAt(idx)->setTransform(transform); +} + +status_t Camera3Device::deleteStream(int id) { + ATRACE_CALL(); + Mutex::Autolock l(mLock); + status_t res; + + ALOGV("%s: Camera %d: Deleting stream %d", __FUNCTION__, mId, id); + + // CameraDevice semantics require device to already be idle before + // deleteStream is called, unlike for createStream. + if (mStatus != STATUS_IDLE) { + ALOGV("%s: Camera %d: Device not idle", __FUNCTION__, mId); + return -EBUSY; + } + + sp deletedStream; + if (mInputStream != NULL && id == mInputStream->getId()) { + deletedStream = mInputStream; + mInputStream.clear(); + } else { + ssize_t idx = mOutputStreams.indexOfKey(id); + if (idx == NAME_NOT_FOUND) { + CLOGE("Stream %d does not exist", id); + return BAD_VALUE; + } + deletedStream = mOutputStreams.editValueAt(idx); + mOutputStreams.removeItem(id); + } + + // Free up the stream endpoint so that it can be used by some other stream + res = deletedStream->disconnect(); + if (res != OK) { + SET_ERR_L("Can't disconnect deleted stream %d", id); + // fall through since we want to still list the stream as deleted. + } + mDeletedStreams.add(deletedStream); + mNeedConfig = true; + + return res; +} + +status_t Camera3Device::deleteReprocessStream(int id) { + ATRACE_CALL(); + (void)id; + + CLOGE("Unimplemented"); + return INVALID_OPERATION; +} + + +status_t Camera3Device::createDefaultRequest(int templateId, + CameraMetadata *request) { + ATRACE_CALL(); + ALOGV("%s: for template %d", __FUNCTION__, templateId); + Mutex::Autolock l(mLock); + + switch (mStatus) { + case STATUS_ERROR: + CLOGE("Device has encountered a serious error"); + return INVALID_OPERATION; + case STATUS_UNINITIALIZED: + CLOGE("Device is not initialized!"); + return INVALID_OPERATION; + case STATUS_IDLE: + case STATUS_ACTIVE: + // OK + break; + default: + SET_ERR_L("Unexpected status: %d", mStatus); + return INVALID_OPERATION; + } + + const camera_metadata_t *rawRequest; + ATRACE_BEGIN("camera3->construct_default_request_settings"); + rawRequest = mHal3Device->ops->construct_default_request_settings( + mHal3Device, templateId); + ATRACE_END(); + if (rawRequest == NULL) { + SET_ERR_L("HAL is unable to construct default settings for template %d", + templateId); + return DEAD_OBJECT; + } + *request = rawRequest; + + return OK; +} + +status_t Camera3Device::waitUntilDrained() { + ATRACE_CALL(); + Mutex::Autolock l(mLock); + + return waitUntilDrainedLocked(); +} + +status_t Camera3Device::waitUntilDrainedLocked() { + ATRACE_CALL(); + status_t res; + + switch (mStatus) { + case STATUS_UNINITIALIZED: + case STATUS_IDLE: + ALOGV("%s: Already idle", __FUNCTION__); + return OK; + case STATUS_ERROR: + case STATUS_ACTIVE: + // Need to shut down + break; + default: + SET_ERR_L("Unexpected status: %d",mStatus); + return INVALID_OPERATION; + } + + if (mRequestThread != NULL) { + res = mRequestThread->waitUntilPaused(kShutdownTimeout); + if (res != OK) { + SET_ERR_L("Can't stop request thread in %f seconds!", + kShutdownTimeout/1e9); + return res; + } + } + if (mInputStream != NULL) { + res = mInputStream->waitUntilIdle(kShutdownTimeout); + if (res != OK) { + SET_ERR_L("Can't idle input stream %d in %f seconds!", + mInputStream->getId(), kShutdownTimeout/1e9); + return res; + } + } + for (size_t i = 0; i < mOutputStreams.size(); i++) { + res = mOutputStreams.editValueAt(i)->waitUntilIdle(kShutdownTimeout); + if (res != OK) { + SET_ERR_L("Can't idle output stream %d in %f seconds!", + mOutputStreams.keyAt(i), kShutdownTimeout/1e9); + return res; + } + } + + if (mStatus != STATUS_ERROR) { + mStatus = STATUS_IDLE; + } + + return OK; +} + +status_t Camera3Device::setNotifyCallback(NotificationListener *listener) { + ATRACE_CALL(); + Mutex::Autolock l(mOutputLock); + + if (listener != NULL && mListener != NULL) { + ALOGW("%s: Replacing old callback listener", __FUNCTION__); + } + mListener = listener; + + return OK; +} + +bool Camera3Device::willNotify3A() { + return false; +} + +status_t Camera3Device::waitForNextFrame(nsecs_t timeout) { + ATRACE_CALL(); + status_t res; + Mutex::Autolock l(mOutputLock); + + while (mResultQueue.empty()) { + res = mResultSignal.waitRelative(mOutputLock, timeout); + if (res == TIMED_OUT) { + return res; + } else if (res != OK) { + ALOGW("%s: Camera %d: No frame in %lld ns: %s (%d)", + __FUNCTION__, mId, timeout, strerror(-res), res); + return res; + } + } + return OK; +} + +status_t Camera3Device::getNextFrame(CameraMetadata *frame) { + ATRACE_CALL(); + Mutex::Autolock l(mOutputLock); + + if (mResultQueue.empty()) { + return NOT_ENOUGH_DATA; + } + + CameraMetadata &result = *(mResultQueue.begin()); + frame->acquire(result); + mResultQueue.erase(mResultQueue.begin()); + + return OK; +} + +status_t Camera3Device::triggerAutofocus(uint32_t id) { + ATRACE_CALL(); + + ALOGV("%s: Triggering autofocus, id %d", __FUNCTION__, id); + // Mix-in this trigger into the next request and only the next request. + RequestTrigger trigger[] = { + { + ANDROID_CONTROL_AF_TRIGGER, + ANDROID_CONTROL_AF_TRIGGER_START + }, + { + ANDROID_CONTROL_AF_TRIGGER_ID, + static_cast(id) + }, + }; + + return mRequestThread->queueTrigger(trigger, + sizeof(trigger)/sizeof(trigger[0])); +} + +status_t Camera3Device::triggerCancelAutofocus(uint32_t id) { + ATRACE_CALL(); + + ALOGV("%s: Triggering cancel autofocus, id %d", __FUNCTION__, id); + // Mix-in this trigger into the next request and only the next request. + RequestTrigger trigger[] = { + { + ANDROID_CONTROL_AF_TRIGGER, + ANDROID_CONTROL_AF_TRIGGER_CANCEL + }, + { + ANDROID_CONTROL_AF_TRIGGER_ID, + static_cast(id) + }, + }; + + return mRequestThread->queueTrigger(trigger, + sizeof(trigger)/sizeof(trigger[0])); +} + +status_t Camera3Device::triggerPrecaptureMetering(uint32_t id) { + ATRACE_CALL(); + + ALOGV("%s: Triggering precapture metering, id %d", __FUNCTION__, id); + // Mix-in this trigger into the next request and only the next request. + RequestTrigger trigger[] = { + { + ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, + ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_START + }, + { + ANDROID_CONTROL_AE_PRECAPTURE_ID, + static_cast(id) + }, + }; + + return mRequestThread->queueTrigger(trigger, + sizeof(trigger)/sizeof(trigger[0])); +} + +status_t Camera3Device::pushReprocessBuffer(int reprocessStreamId, + buffer_handle_t *buffer, wp listener) { + ATRACE_CALL(); + (void)reprocessStreamId; (void)buffer; (void)listener; + + CLOGE("Unimplemented"); + return INVALID_OPERATION; +} + +/** + * Camera3Device private methods + */ + +sp Camera3Device::createCaptureRequest( + const CameraMetadata &request) { + ATRACE_CALL(); + status_t res; + + sp newRequest = new CaptureRequest; + newRequest->mSettings = request; + + camera_metadata_entry_t inputStreams = + newRequest->mSettings.find(ANDROID_REQUEST_INPUT_STREAMS); + if (inputStreams.count > 0) { + if (mInputStream == NULL || + mInputStream->getId() != inputStreams.data.u8[0]) { + CLOGE("Request references unknown input stream %d", + inputStreams.data.u8[0]); + return NULL; + } + // Lazy completion of stream configuration (allocation/registration) + // on first use + if (mInputStream->isConfiguring()) { + res = mInputStream->finishConfiguration(mHal3Device); + if (res != OK) { + SET_ERR_L("Unable to finish configuring input stream %d:" + " %s (%d)", + mInputStream->getId(), strerror(-res), res); + return NULL; + } + } + + newRequest->mInputStream = mInputStream; + newRequest->mSettings.erase(ANDROID_REQUEST_INPUT_STREAMS); + } + + camera_metadata_entry_t streams = + newRequest->mSettings.find(ANDROID_REQUEST_OUTPUT_STREAMS); + if (streams.count == 0) { + CLOGE("Zero output streams specified!"); + return NULL; + } + + for (size_t i = 0; i < streams.count; i++) { + int idx = mOutputStreams.indexOfKey(streams.data.u8[i]); + if (idx == NAME_NOT_FOUND) { + CLOGE("Request references unknown stream %d", + streams.data.u8[i]); + return NULL; + } + sp stream = + mOutputStreams.editValueAt(idx); + + // Lazy completion of stream configuration (allocation/registration) + // on first use + if (stream->isConfiguring()) { + res = stream->finishConfiguration(mHal3Device); + if (res != OK) { + SET_ERR_L("Unable to finish configuring stream %d: %s (%d)", + stream->getId(), strerror(-res), res); + return NULL; + } + } + + newRequest->mOutputStreams.push(stream); + } + newRequest->mSettings.erase(ANDROID_REQUEST_OUTPUT_STREAMS); + + return newRequest; +} + +status_t Camera3Device::configureStreamsLocked() { + ATRACE_CALL(); + status_t res; + + if (mStatus != STATUS_IDLE) { + CLOGE("Not idle"); + return INVALID_OPERATION; + } + + if (!mNeedConfig) { + ALOGV("%s: Skipping config, no stream changes", __FUNCTION__); + mStatus = STATUS_ACTIVE; + return OK; + } + + // Start configuring the streams + + camera3_stream_configuration config; + + config.num_streams = (mInputStream != NULL) + mOutputStreams.size(); + + Vector streams; + streams.setCapacity(config.num_streams); + + if (mInputStream != NULL) { + camera3_stream_t *inputStream; + inputStream = mInputStream->startConfiguration(); + if (inputStream == NULL) { + SET_ERR_L("Can't start input stream configuration"); + return INVALID_OPERATION; + } + streams.add(inputStream); + } + + for (size_t i = 0; i < mOutputStreams.size(); i++) { + + // Don't configure bidi streams twice, nor add them twice to the list + if (mOutputStreams[i].get() == + static_cast(mInputStream.get())) { + + config.num_streams--; + continue; + } + + camera3_stream_t *outputStream; + outputStream = mOutputStreams.editValueAt(i)->startConfiguration(); + if (outputStream == NULL) { + SET_ERR_L("Can't start output stream configuration"); + return INVALID_OPERATION; + } + streams.add(outputStream); + } + + config.streams = streams.editArray(); + + // Do the HAL configuration; will potentially touch stream + // max_buffers, usage, priv fields. + ATRACE_BEGIN("camera3->configure_streams"); + res = mHal3Device->ops->configure_streams(mHal3Device, &config); + ATRACE_END(); + + if (res != OK) { + SET_ERR_L("Unable to configure streams with HAL: %s (%d)", + strerror(-res), res); + return res; + } + + // Finish all stream configuration immediately. + // TODO: Try to relax this later back to lazy completion, which should be + // faster + + if (mInputStream != NULL && mInputStream->isConfiguring()) { + res = mInputStream->finishConfiguration(mHal3Device); + if (res != OK) { + SET_ERR_L("Can't finish configuring input stream %d: %s (%d)", + mInputStream->getId(), strerror(-res), res); + return res; + } + } + + for (size_t i = 0; i < mOutputStreams.size(); i++) { + sp outputStream = + mOutputStreams.editValueAt(i); + if (outputStream->isConfiguring()) { + res = outputStream->finishConfiguration(mHal3Device); + if (res != OK) { + SET_ERR_L("Can't finish configuring output stream %d: %s (%d)", + outputStream->getId(), strerror(-res), res); + return res; + } + } + } + + // Request thread needs to know to avoid using repeat-last-settings protocol + // across configure_streams() calls + mRequestThread->configurationComplete(); + + // Finish configuring the streams lazily on first reference + + mStatus = STATUS_ACTIVE; + mNeedConfig = false; + + return OK; +} + +void Camera3Device::setErrorState(const char *fmt, ...) { + Mutex::Autolock l(mLock); + va_list args; + va_start(args, fmt); + + setErrorStateLockedV(fmt, args); + + va_end(args); +} + +void Camera3Device::setErrorStateV(const char *fmt, va_list args) { + Mutex::Autolock l(mLock); + setErrorStateLockedV(fmt, args); +} + +void Camera3Device::setErrorStateLocked(const char *fmt, ...) { + va_list args; + va_start(args, fmt); + + setErrorStateLockedV(fmt, args); + + va_end(args); +} + +void Camera3Device::setErrorStateLockedV(const char *fmt, va_list args) { + // Print out all error messages to log + String8 errorCause = String8::formatV(fmt, args); + ALOGE("Camera %d: %s", mId, errorCause.string()); + + // But only do error state transition steps for the first error + if (mStatus == STATUS_ERROR || mStatus == STATUS_UNINITIALIZED) return; + + mErrorCause = errorCause; + + mRequestThread->setPaused(true); + mStatus = STATUS_ERROR; +} + +/** + * In-flight request management + */ + +status_t Camera3Device::registerInFlight(int32_t frameNumber, + int32_t numBuffers) { + ATRACE_CALL(); + Mutex::Autolock l(mInFlightLock); + + ssize_t res; + res = mInFlightMap.add(frameNumber, InFlightRequest(numBuffers)); + if (res < 0) return res; + + return OK; +} + +/** + * Camera HAL device callback methods + */ + +void Camera3Device::processCaptureResult(const camera3_capture_result *result) { + ATRACE_CALL(); + + status_t res; + + uint32_t frameNumber = result->frame_number; + if (result->result == NULL && result->num_output_buffers == 0) { + SET_ERR("No result data provided by HAL for frame %d", + frameNumber); + return; + } + + // Get capture timestamp from list of in-flight requests, where it was added + // by the shutter notification for this frame. Then update the in-flight + // status and remove the in-flight entry if all result data has been + // received. + nsecs_t timestamp = 0; + { + Mutex::Autolock l(mInFlightLock); + ssize_t idx = mInFlightMap.indexOfKey(frameNumber); + if (idx == NAME_NOT_FOUND) { + SET_ERR("Unknown frame number for capture result: %d", + frameNumber); + return; + } + InFlightRequest &request = mInFlightMap.editValueAt(idx); + timestamp = request.captureTimestamp; + if (timestamp == 0) { + SET_ERR("Called before shutter notify for frame %d", + frameNumber); + return; + } + + if (result->result != NULL) { + if (request.haveResultMetadata) { + SET_ERR("Called multiple times with metadata for frame %d", + frameNumber); + return; + } + request.haveResultMetadata = true; + } + + request.numBuffersLeft -= result->num_output_buffers; + + if (request.numBuffersLeft < 0) { + SET_ERR("Too many buffers returned for frame %d", + frameNumber); + return; + } + + if (request.haveResultMetadata && request.numBuffersLeft == 0) { + ATRACE_ASYNC_END("frame capture", frameNumber); + mInFlightMap.removeItemsAt(idx, 1); + } + + // Sanity check - if we have too many in-flight frames, something has + // likely gone wrong + if (mInFlightMap.size() > kInFlightWarnLimit) { + CLOGE("In-flight list too large: %d", mInFlightMap.size()); + } + + } + + // Process the result metadata, if provided + if (result->result != NULL) { + Mutex::Autolock l(mOutputLock); + + if (frameNumber != mNextResultFrameNumber) { + SET_ERR("Out-of-order capture result metadata submitted! " + "(got frame number %d, expecting %d)", + frameNumber, mNextResultFrameNumber); + return; + } + mNextResultFrameNumber++; + + CameraMetadata &captureResult = + *mResultQueue.insert(mResultQueue.end(), CameraMetadata()); + + captureResult = result->result; + if (captureResult.update(ANDROID_REQUEST_FRAME_COUNT, + (int32_t*)&frameNumber, 1) != OK) { + SET_ERR("Failed to set frame# in metadata (%d)", + frameNumber); + } else { + ALOGVV("%s: Camera %d: Set frame# in metadata (%d)", + __FUNCTION__, mId, frameNumber); + } + + // Check that there's a timestamp in the result metadata + + camera_metadata_entry entry = + captureResult.find(ANDROID_SENSOR_TIMESTAMP); + if (entry.count == 0) { + SET_ERR("No timestamp provided by HAL for frame %d!", + frameNumber); + } else if (timestamp != entry.data.i64[0]) { + SET_ERR("Timestamp mismatch between shutter notify and result" + " metadata for frame %d (%lld vs %lld respectively)", + frameNumber, timestamp, entry.data.i64[0]); + } + } // scope for mOutputLock + + // Return completed buffers to their streams with the timestamp + + for (size_t i = 0; i < result->num_output_buffers; i++) { + Camera3Stream *stream = + Camera3Stream::cast(result->output_buffers[i].stream); + res = stream->returnBuffer(result->output_buffers[i], timestamp); + // Note: stream may be deallocated at this point, if this buffer was the + // last reference to it. + if (res != OK) { + SET_ERR("Can't return buffer %d for frame %d to its stream: " + " %s (%d)", i, frameNumber, strerror(-res), res); + } + } + + // Finally, signal any waiters for new frames + + if (result->result != NULL) { + mResultSignal.signal(); + } + +} + + + +void Camera3Device::notify(const camera3_notify_msg *msg) { + ATRACE_CALL(); + NotificationListener *listener; + { + Mutex::Autolock l(mOutputLock); + listener = mListener; + } + + if (msg == NULL) { + SET_ERR("HAL sent NULL notify message!"); + return; + } + + switch (msg->type) { + case CAMERA3_MSG_ERROR: { + int streamId = 0; + if (msg->message.error.error_stream != NULL) { + Camera3Stream *stream = + Camera3Stream::cast( + msg->message.error.error_stream); + streamId = stream->getId(); + } + ALOGV("Camera %d: %s: HAL error, frame %d, stream %d: %d", + mId, __FUNCTION__, msg->message.error.frame_number, + streamId, msg->message.error.error_code); + if (listener != NULL) { + listener->notifyError(msg->message.error.error_code, + msg->message.error.frame_number, streamId); + } + break; + } + case CAMERA3_MSG_SHUTTER: { + ssize_t idx; + uint32_t frameNumber = msg->message.shutter.frame_number; + nsecs_t timestamp = msg->message.shutter.timestamp; + // Verify ordering of shutter notifications + { + Mutex::Autolock l(mOutputLock); + if (frameNumber != mNextShutterFrameNumber) { + SET_ERR("Shutter notification out-of-order. Expected " + "notification for frame %d, got frame %d", + mNextShutterFrameNumber, frameNumber); + break; + } + mNextShutterFrameNumber++; + } + + // Set timestamp for the request in the in-flight tracking + { + Mutex::Autolock l(mInFlightLock); + idx = mInFlightMap.indexOfKey(frameNumber); + if (idx >= 0) { + mInFlightMap.editValueAt(idx).captureTimestamp = timestamp; + } + } + if (idx < 0) { + SET_ERR("Shutter notification for non-existent frame number %d", + frameNumber); + break; + } + ALOGVV("Camera %d: %s: Shutter fired for frame %d at %lld", + mId, __FUNCTION__, frameNumber, timestamp); + // Call listener, if any + if (listener != NULL) { + listener->notifyShutter(frameNumber, timestamp); + } + break; + } + default: + SET_ERR("Unknown notify message from HAL: %d", + msg->type); + } +} + +/** + * RequestThread inner class methods + */ + +Camera3Device::RequestThread::RequestThread(wp parent, + camera3_device_t *hal3Device) : + Thread(false), + mParent(parent), + mHal3Device(hal3Device), + mId(getId(parent)), + mReconfigured(false), + mDoPause(false), + mPaused(true), + mFrameNumber(0), + mLatestRequestId(NAME_NOT_FOUND) { +} + +void Camera3Device::RequestThread::configurationComplete() { + Mutex::Autolock l(mRequestLock); + mReconfigured = true; +} + +status_t Camera3Device::RequestThread::queueRequest( + sp request) { + Mutex::Autolock l(mRequestLock); + mRequestQueue.push_back(request); + + return OK; +} + + +status_t Camera3Device::RequestThread::queueTrigger( + RequestTrigger trigger[], + size_t count) { + + Mutex::Autolock l(mTriggerMutex); + status_t ret; + + for (size_t i = 0; i < count; ++i) { + ret = queueTriggerLocked(trigger[i]); + + if (ret != OK) { + return ret; + } + } + + return OK; +} + +int Camera3Device::RequestThread::getId(const wp &device) { + sp d = device.promote(); + if (d != NULL) return d->mId; + return 0; +} + +status_t Camera3Device::RequestThread::queueTriggerLocked( + RequestTrigger trigger) { + + uint32_t tag = trigger.metadataTag; + ssize_t index = mTriggerMap.indexOfKey(tag); + + switch (trigger.getTagType()) { + case TYPE_BYTE: + // fall-through + case TYPE_INT32: + break; + default: + ALOGE("%s: Type not supported: 0x%x", __FUNCTION__, + trigger.getTagType()); + return INVALID_OPERATION; + } + + /** + * Collect only the latest trigger, since we only have 1 field + * in the request settings per trigger tag, and can't send more than 1 + * trigger per request. + */ + if (index != NAME_NOT_FOUND) { + mTriggerMap.editValueAt(index) = trigger; + } else { + mTriggerMap.add(tag, trigger); + } + + return OK; +} + +status_t Camera3Device::RequestThread::setRepeatingRequests( + const RequestList &requests) { + Mutex::Autolock l(mRequestLock); + mRepeatingRequests.clear(); + mRepeatingRequests.insert(mRepeatingRequests.begin(), + requests.begin(), requests.end()); + return OK; +} + +status_t Camera3Device::RequestThread::clearRepeatingRequests() { + Mutex::Autolock l(mRequestLock); + mRepeatingRequests.clear(); + return OK; +} + +void Camera3Device::RequestThread::setPaused(bool paused) { + Mutex::Autolock l(mPauseLock); + mDoPause = paused; + mDoPauseSignal.signal(); +} + +status_t Camera3Device::RequestThread::waitUntilPaused(nsecs_t timeout) { + ATRACE_CALL(); + status_t res; + Mutex::Autolock l(mPauseLock); + while (!mPaused) { + res = mPausedSignal.waitRelative(mPauseLock, timeout); + if (res == TIMED_OUT) { + return res; + } + } + return OK; +} + +status_t Camera3Device::RequestThread::waitUntilRequestProcessed( + int32_t requestId, nsecs_t timeout) { + Mutex::Autolock l(mLatestRequestMutex); + status_t res; + while (mLatestRequestId != requestId) { + nsecs_t startTime = systemTime(); + + res = mLatestRequestSignal.waitRelative(mLatestRequestMutex, timeout); + if (res != OK) return res; + + timeout -= (systemTime() - startTime); + } + + return OK; +} + + + +bool Camera3Device::RequestThread::threadLoop() { + + status_t res; + + // Handle paused state. + if (waitIfPaused()) { + return true; + } + + // Get work to do + + sp nextRequest = waitForNextRequest(); + if (nextRequest == NULL) { + return true; + } + + // Create request to HAL + camera3_capture_request_t request = camera3_capture_request_t(); + Vector outputBuffers; + + // Insert any queued triggers (before metadata is locked) + int32_t triggerCount; + res = insertTriggers(nextRequest); + if (res < 0) { + SET_ERR("RequestThread: Unable to insert triggers " + "(capture request %d, HAL device: %s (%d)", + (mFrameNumber+1), strerror(-res), res); + cleanUpFailedRequest(request, nextRequest, outputBuffers); + return false; + } + triggerCount = res; + + bool triggersMixedIn = (triggerCount > 0 || mPrevTriggers > 0); + + // If the request is the same as last, or we had triggers last time + if (mPrevRequest != nextRequest || triggersMixedIn) { + /** + * The request should be presorted so accesses in HAL + * are O(logn). Sidenote, sorting a sorted metadata is nop. + */ + nextRequest->mSettings.sort(); + request.settings = nextRequest->mSettings.getAndLock(); + mPrevRequest = nextRequest; + ALOGVV("%s: Request settings are NEW", __FUNCTION__); + + IF_ALOGV() { + camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t(); + find_camera_metadata_ro_entry( + request.settings, + ANDROID_CONTROL_AF_TRIGGER, + &e + ); + if (e.count > 0) { + ALOGV("%s: Request (frame num %d) had AF trigger 0x%x", + __FUNCTION__, + mFrameNumber+1, + e.data.u8[0]); + } + } + } else { + // leave request.settings NULL to indicate 'reuse latest given' + ALOGVV("%s: Request settings are REUSED", + __FUNCTION__); + } + + camera3_stream_buffer_t inputBuffer; + + // Fill in buffers + + if (nextRequest->mInputStream != NULL) { + request.input_buffer = &inputBuffer; + res = nextRequest->mInputStream->getInputBuffer(&inputBuffer); + if (res != OK) { + SET_ERR("RequestThread: Can't get input buffer, skipping request:" + " %s (%d)", strerror(-res), res); + cleanUpFailedRequest(request, nextRequest, outputBuffers); + return true; + } + } else { + request.input_buffer = NULL; + } + + outputBuffers.insertAt(camera3_stream_buffer_t(), 0, + nextRequest->mOutputStreams.size()); + request.output_buffers = outputBuffers.array(); + for (size_t i = 0; i < nextRequest->mOutputStreams.size(); i++) { + res = nextRequest->mOutputStreams.editItemAt(i)-> + getBuffer(&outputBuffers.editItemAt(i)); + if (res != OK) { + SET_ERR("RequestThread: Can't get output buffer, skipping request:" + "%s (%d)", strerror(-res), res); + cleanUpFailedRequest(request, nextRequest, outputBuffers); + return true; + } + request.num_output_buffers++; + } + + request.frame_number = mFrameNumber++; + + // Log request in the in-flight queue + sp parent = mParent.promote(); + if (parent == NULL) { + CLOGE("RequestThread: Parent is gone"); + cleanUpFailedRequest(request, nextRequest, outputBuffers); + return false; + } + + res = parent->registerInFlight(request.frame_number, + request.num_output_buffers); + if (res != OK) { + SET_ERR("RequestThread: Unable to register new in-flight request:" + " %s (%d)", strerror(-res), res); + cleanUpFailedRequest(request, nextRequest, outputBuffers); + return false; + } + + // Submit request and block until ready for next one + ATRACE_ASYNC_BEGIN("frame capture", request.frame_number); + ATRACE_BEGIN("camera3->process_capture_request"); + res = mHal3Device->ops->process_capture_request(mHal3Device, &request); + ATRACE_END(); + + if (res != OK) { + SET_ERR("RequestThread: Unable to submit capture request %d to HAL" + " device: %s (%d)", request.frame_number, strerror(-res), res); + cleanUpFailedRequest(request, nextRequest, outputBuffers); + return false; + } + + if (request.settings != NULL) { + nextRequest->mSettings.unlock(request.settings); + } + + // Remove any previously queued triggers (after unlock) + res = removeTriggers(mPrevRequest); + if (res != OK) { + SET_ERR("RequestThread: Unable to remove triggers " + "(capture request %d, HAL device: %s (%d)", + request.frame_number, strerror(-res), res); + return false; + } + mPrevTriggers = triggerCount; + + // Read android.request.id from the request settings metadata + // - inform waitUntilRequestProcessed thread of a new request ID + { + Mutex::Autolock al(mLatestRequestMutex); + + camera_metadata_entry_t requestIdEntry = + nextRequest->mSettings.find(ANDROID_REQUEST_ID); + if (requestIdEntry.count > 0) { + mLatestRequestId = requestIdEntry.data.i32[0]; + } else { + ALOGW("%s: Did not have android.request.id set in the request", + __FUNCTION__); + mLatestRequestId = NAME_NOT_FOUND; + } + + mLatestRequestSignal.signal(); + } + + // Return input buffer back to framework + if (request.input_buffer != NULL) { + Camera3Stream *stream = + Camera3Stream::cast(request.input_buffer->stream); + res = stream->returnInputBuffer(*(request.input_buffer)); + // Note: stream may be deallocated at this point, if this buffer was the + // last reference to it. + if (res != OK) { + ALOGE("%s: RequestThread: Can't return input buffer for frame %d to" + " its stream:%s (%d)", __FUNCTION__, + request.frame_number, strerror(-res), res); + // TODO: Report error upstream + } + } + + + + return true; +} + +void Camera3Device::RequestThread::cleanUpFailedRequest( + camera3_capture_request_t &request, + sp &nextRequest, + Vector &outputBuffers) { + + if (request.settings != NULL) { + nextRequest->mSettings.unlock(request.settings); + } + if (request.input_buffer != NULL) { + request.input_buffer->status = CAMERA3_BUFFER_STATUS_ERROR; + nextRequest->mInputStream->returnInputBuffer(*(request.input_buffer)); + } + for (size_t i = 0; i < request.num_output_buffers; i++) { + outputBuffers.editItemAt(i).status = CAMERA3_BUFFER_STATUS_ERROR; + nextRequest->mOutputStreams.editItemAt(i)->returnBuffer( + outputBuffers[i], 0); + } +} + +sp + Camera3Device::RequestThread::waitForNextRequest() { + status_t res; + sp nextRequest; + + // Optimized a bit for the simple steady-state case (single repeating + // request), to avoid putting that request in the queue temporarily. + Mutex::Autolock l(mRequestLock); + + while (mRequestQueue.empty()) { + if (!mRepeatingRequests.empty()) { + // Always atomically enqueue all requests in a repeating request + // list. Guarantees a complete in-sequence set of captures to + // application. + const RequestList &requests = mRepeatingRequests; + RequestList::const_iterator firstRequest = + requests.begin(); + nextRequest = *firstRequest; + mRequestQueue.insert(mRequestQueue.end(), + ++firstRequest, + requests.end()); + // No need to wait any longer + break; + } + + res = mRequestSignal.waitRelative(mRequestLock, kRequestTimeout); + + if (res == TIMED_OUT) { + // Signal that we're paused by starvation + Mutex::Autolock pl(mPauseLock); + if (mPaused == false) { + mPaused = true; + mPausedSignal.signal(); + } + // Stop waiting for now and let thread management happen + return NULL; + } + } + + if (nextRequest == NULL) { + // Don't have a repeating request already in hand, so queue + // must have an entry now. + RequestList::iterator firstRequest = + mRequestQueue.begin(); + nextRequest = *firstRequest; + mRequestQueue.erase(firstRequest); + } + + // Not paused + Mutex::Autolock pl(mPauseLock); + mPaused = false; + + // Check if we've reconfigured since last time, and reset the preview + // request if so. Can't use 'NULL request == repeat' across configure calls. + if (mReconfigured) { + mPrevRequest.clear(); + mReconfigured = false; + } + + return nextRequest; +} + +bool Camera3Device::RequestThread::waitIfPaused() { + status_t res; + Mutex::Autolock l(mPauseLock); + while (mDoPause) { + // Signal that we're paused by request + if (mPaused == false) { + mPaused = true; + mPausedSignal.signal(); + } + res = mDoPauseSignal.waitRelative(mPauseLock, kRequestTimeout); + if (res == TIMED_OUT) { + return true; + } + } + // We don't set mPaused to false here, because waitForNextRequest needs + // to further manage the paused state in case of starvation. + return false; +} + +void Camera3Device::RequestThread::setErrorState(const char *fmt, ...) { + sp parent = mParent.promote(); + if (parent != NULL) { + va_list args; + va_start(args, fmt); + + parent->setErrorStateV(fmt, args); + + va_end(args); + } +} + +status_t Camera3Device::RequestThread::insertTriggers( + const sp &request) { + + Mutex::Autolock al(mTriggerMutex); + + CameraMetadata &metadata = request->mSettings; + size_t count = mTriggerMap.size(); + + for (size_t i = 0; i < count; ++i) { + RequestTrigger trigger = mTriggerMap.valueAt(i); + + uint32_t tag = trigger.metadataTag; + camera_metadata_entry entry = metadata.find(tag); + + if (entry.count > 0) { + /** + * Already has an entry for this trigger in the request. + * Rewrite it with our requested trigger value. + */ + RequestTrigger oldTrigger = trigger; + + oldTrigger.entryValue = entry.data.u8[0]; + + mTriggerReplacedMap.add(tag, oldTrigger); + } else { + /** + * More typical, no trigger entry, so we just add it + */ + mTriggerRemovedMap.add(tag, trigger); + } + + status_t res; + + switch (trigger.getTagType()) { + case TYPE_BYTE: { + uint8_t entryValue = static_cast(trigger.entryValue); + res = metadata.update(tag, + &entryValue, + /*count*/1); + break; + } + case TYPE_INT32: + res = metadata.update(tag, + &trigger.entryValue, + /*count*/1); + break; + default: + ALOGE("%s: Type not supported: 0x%x", + __FUNCTION__, + trigger.getTagType()); + return INVALID_OPERATION; + } + + if (res != OK) { + ALOGE("%s: Failed to update request metadata with trigger tag %s" + ", value %d", __FUNCTION__, trigger.getTagName(), + trigger.entryValue); + return res; + } + + ALOGV("%s: Mixed in trigger %s, value %d", __FUNCTION__, + trigger.getTagName(), + trigger.entryValue); + } + + mTriggerMap.clear(); + + return count; +} + +status_t Camera3Device::RequestThread::removeTriggers( + const sp &request) { + Mutex::Autolock al(mTriggerMutex); + + CameraMetadata &metadata = request->mSettings; + + /** + * Replace all old entries with their old values. + */ + for (size_t i = 0; i < mTriggerReplacedMap.size(); ++i) { + RequestTrigger trigger = mTriggerReplacedMap.valueAt(i); + + status_t res; + + uint32_t tag = trigger.metadataTag; + switch (trigger.getTagType()) { + case TYPE_BYTE: { + uint8_t entryValue = static_cast(trigger.entryValue); + res = metadata.update(tag, + &entryValue, + /*count*/1); + break; + } + case TYPE_INT32: + res = metadata.update(tag, + &trigger.entryValue, + /*count*/1); + break; + default: + ALOGE("%s: Type not supported: 0x%x", + __FUNCTION__, + trigger.getTagType()); + return INVALID_OPERATION; + } + + if (res != OK) { + ALOGE("%s: Failed to restore request metadata with trigger tag %s" + ", trigger value %d", __FUNCTION__, + trigger.getTagName(), trigger.entryValue); + return res; + } + } + mTriggerReplacedMap.clear(); + + /** + * Remove all new entries. + */ + for (size_t i = 0; i < mTriggerRemovedMap.size(); ++i) { + RequestTrigger trigger = mTriggerRemovedMap.valueAt(i); + status_t res = metadata.erase(trigger.metadataTag); + + if (res != OK) { + ALOGE("%s: Failed to erase metadata with trigger tag %s" + ", trigger value %d", __FUNCTION__, + trigger.getTagName(), trigger.entryValue); + return res; + } + } + mTriggerRemovedMap.clear(); + + return OK; +} + + + +/** + * Static callback forwarding methods from HAL to instance + */ + +void Camera3Device::sProcessCaptureResult(const camera3_callback_ops *cb, + const camera3_capture_result *result) { + Camera3Device *d = + const_cast(static_cast(cb)); + d->processCaptureResult(result); +} + +void Camera3Device::sNotify(const camera3_callback_ops *cb, + const camera3_notify_msg *msg) { + Camera3Device *d = + const_cast(static_cast(cb)); + d->notify(msg); +} + +}; // namespace android diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h new file mode 100644 index 0000000..76c08ae --- /dev/null +++ b/services/camera/libcameraservice/device3/Camera3Device.h @@ -0,0 +1,419 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_SERVERS_CAMERA3DEVICE_H +#define ANDROID_SERVERS_CAMERA3DEVICE_H + +#include +#include +#include +#include +#include +#include +#include + +#include "common/CameraDeviceBase.h" + +/** + * Function pointer types with C calling convention to + * use for HAL callback functions. + */ +extern "C" { + typedef void (callbacks_process_capture_result_t)( + const struct camera3_callback_ops *, + const camera3_capture_result_t *); + + typedef void (callbacks_notify_t)( + const struct camera3_callback_ops *, + const camera3_notify_msg_t *); +} + +namespace android { + +namespace camera3 { + +class Camera3Stream; +class Camera3ZslStream; +class Camera3OutputStreamInterface; +class Camera3StreamInterface; + +} + +/** + * CameraDevice for HAL devices with version CAMERA_DEVICE_API_VERSION_3_0 + */ +class Camera3Device : + public CameraDeviceBase, + private camera3_callback_ops { + public: + Camera3Device(int id); + + virtual ~Camera3Device(); + + /** + * CameraDeviceBase interface + */ + + virtual int getId() const; + + // Transitions to idle state on success. + virtual status_t initialize(camera_module_t *module); + virtual status_t disconnect(); + virtual status_t dump(int fd, const Vector &args); + virtual const CameraMetadata& info() const; + + // Capture and setStreamingRequest will configure streams if currently in + // idle state + virtual status_t capture(CameraMetadata &request); + virtual status_t setStreamingRequest(const CameraMetadata &request); + virtual status_t clearStreamingRequest(); + + virtual status_t waitUntilRequestReceived(int32_t requestId, nsecs_t timeout); + + // Actual stream creation/deletion is delayed until first request is submitted + // If adding streams while actively capturing, will pause device before adding + // stream, reconfiguring device, and unpausing. + virtual status_t createStream(sp consumer, + uint32_t width, uint32_t height, int format, size_t size, + int *id); + virtual status_t createInputStream( + uint32_t width, uint32_t height, int format, + int *id); + virtual status_t createZslStream( + uint32_t width, uint32_t height, + int depth, + /*out*/ + int *id, + sp* zslStream); + virtual status_t createReprocessStreamFromStream(int outputId, int *id); + + virtual status_t getStreamInfo(int id, + uint32_t *width, uint32_t *height, uint32_t *format); + virtual status_t setStreamTransform(int id, int transform); + + virtual status_t deleteStream(int id); + virtual status_t deleteReprocessStream(int id); + + virtual status_t createDefaultRequest(int templateId, CameraMetadata *request); + + // Transitions to the idle state on success + virtual status_t waitUntilDrained(); + + virtual status_t setNotifyCallback(NotificationListener *listener); + virtual bool willNotify3A(); + virtual status_t waitForNextFrame(nsecs_t timeout); + virtual status_t getNextFrame(CameraMetadata *frame); + + virtual status_t triggerAutofocus(uint32_t id); + virtual status_t triggerCancelAutofocus(uint32_t id); + virtual status_t triggerPrecaptureMetering(uint32_t id); + + virtual status_t pushReprocessBuffer(int reprocessStreamId, + buffer_handle_t *buffer, wp listener); + + private: + static const size_t kInFlightWarnLimit = 20; + static const nsecs_t kShutdownTimeout = 5000000000; // 5 sec + struct RequestTrigger; + + Mutex mLock; + + /**** Scope for mLock ****/ + + const int mId; + camera3_device_t *mHal3Device; + + CameraMetadata mDeviceInfo; + vendor_tag_query_ops_t mVendorTagOps; + + enum { + STATUS_ERROR, + STATUS_UNINITIALIZED, + STATUS_IDLE, + STATUS_ACTIVE + } mStatus; + + // Tracking cause of fatal errors when in STATUS_ERROR + String8 mErrorCause; + + // Mapping of stream IDs to stream instances + typedef KeyedVector > + StreamSet; + + StreamSet mOutputStreams; + sp mInputStream; + int mNextStreamId; + bool mNeedConfig; + + // Need to hold on to stream references until configure completes. + Vector > mDeletedStreams; + + /**** End scope for mLock ****/ + + class CaptureRequest : public LightRefBase { + public: + CameraMetadata mSettings; + sp mInputStream; + Vector > + mOutputStreams; + }; + typedef List > RequestList; + + /** + * Lock-held version of waitUntilDrained. Will transition to IDLE on + * success. + */ + status_t waitUntilDrainedLocked(); + + /** + * Do common work for setting up a streaming or single capture request. + * On success, will transition to ACTIVE if in IDLE. + */ + sp setUpRequestLocked(const CameraMetadata &request); + + /** + * Build a CaptureRequest request from the CameraDeviceBase request + * settings. + */ + sp createCaptureRequest(const CameraMetadata &request); + + /** + * Take the currently-defined set of streams and configure the HAL to use + * them. This is a long-running operation (may be several hundered ms). + */ + status_t configureStreamsLocked(); + + /** + * Set device into an error state due to some fatal failure, and set an + * error message to indicate why. Only the first call's message will be + * used. The message is also sent to the log. + */ + void setErrorState(const char *fmt, ...); + void setErrorStateV(const char *fmt, va_list args); + void setErrorStateLocked(const char *fmt, ...); + void setErrorStateLockedV(const char *fmt, va_list args); + + struct RequestTrigger { + // Metadata tag number, e.g. android.control.aePrecaptureTrigger + uint32_t metadataTag; + // Metadata value, e.g. 'START' or the trigger ID + int32_t entryValue; + + // The last part of the fully qualified path, e.g. afTrigger + const char *getTagName() const { + return get_camera_metadata_tag_name(metadataTag) ?: "NULL"; + } + + // e.g. TYPE_BYTE, TYPE_INT32, etc. + int getTagType() const { + return get_camera_metadata_tag_type(metadataTag); + } + }; + + /** + * Thread for managing capture request submission to HAL device. + */ + class RequestThread : public Thread { + + public: + + RequestThread(wp parent, + camera3_device_t *hal3Device); + + /** + * Call after stream (re)-configuration is completed. + */ + void configurationComplete(); + + /** + * Set or clear the list of repeating requests. Does not block + * on either. Use waitUntilPaused to wait until request queue + * has emptied out. + */ + status_t setRepeatingRequests(const RequestList& requests); + status_t clearRepeatingRequests(); + + status_t queueRequest(sp request); + + /** + * Queue a trigger to be dispatched with the next outgoing + * process_capture_request. The settings for that request only + * will be temporarily rewritten to add the trigger tag/value. + * Subsequent requests will not be rewritten (for this tag). + */ + status_t queueTrigger(RequestTrigger trigger[], size_t count); + + /** + * Pause/unpause the capture thread. Doesn't block, so use + * waitUntilPaused to wait until the thread is paused. + */ + void setPaused(bool paused); + + /** + * Wait until thread is paused, either due to setPaused(true) + * or due to lack of input requests. Returns TIMED_OUT in case + * the thread does not pause within the timeout. + */ + status_t waitUntilPaused(nsecs_t timeout); + + /** + * Wait until thread processes the capture request with settings' + * android.request.id == requestId. + * + * Returns TIMED_OUT in case the thread does not process the request + * within the timeout. + */ + status_t waitUntilRequestProcessed(int32_t requestId, nsecs_t timeout); + + protected: + + virtual bool threadLoop(); + + private: + static int getId(const wp &device); + + status_t queueTriggerLocked(RequestTrigger trigger); + // Mix-in queued triggers into this request + int32_t insertTriggers(const sp &request); + // Purge the queued triggers from this request, + // restoring the old field values for those tags. + status_t removeTriggers(const sp &request); + + static const nsecs_t kRequestTimeout = 50e6; // 50 ms + + // Waits for a request, or returns NULL if times out. + sp waitForNextRequest(); + + // Return buffers, etc, for a request that couldn't be fully + // constructed. The buffers will be returned in the ERROR state + // to mark them as not having valid data. + // All arguments will be modified. + void cleanUpFailedRequest(camera3_capture_request_t &request, + sp &nextRequest, + Vector &outputBuffers); + + // Pause handling + bool waitIfPaused(); + + // Relay error to parent device object setErrorState + void setErrorState(const char *fmt, ...); + + wp mParent; + camera3_device_t *mHal3Device; + + const int mId; + + Mutex mRequestLock; + Condition mRequestSignal; + RequestList mRequestQueue; + RequestList mRepeatingRequests; + + bool mReconfigured; + + // Used by waitIfPaused, waitForNextRequest, and waitUntilPaused + Mutex mPauseLock; + bool mDoPause; + Condition mDoPauseSignal; + bool mPaused; + Condition mPausedSignal; + + sp mPrevRequest; + int32_t mPrevTriggers; + + uint32_t mFrameNumber; + + Mutex mLatestRequestMutex; + Condition mLatestRequestSignal; + // android.request.id for latest process_capture_request + int32_t mLatestRequestId; + + typedef KeyedVector TriggerMap; + Mutex mTriggerMutex; + TriggerMap mTriggerMap; + TriggerMap mTriggerRemovedMap; + TriggerMap mTriggerReplacedMap; + }; + sp mRequestThread; + + /** + * In-flight queue for tracking completion of capture requests. + */ + + struct InFlightRequest { + // Set by notify() SHUTTER call. + nsecs_t captureTimestamp; + // Set by process_capture_result call with valid metadata + bool haveResultMetadata; + // Decremented by calls to process_capture_result with valid output + // buffers + int numBuffersLeft; + + InFlightRequest() : + captureTimestamp(0), + haveResultMetadata(false), + numBuffersLeft(0) { + } + + explicit InFlightRequest(int numBuffers) : + captureTimestamp(0), + haveResultMetadata(false), + numBuffersLeft(numBuffers) { + } + }; + // Map from frame number to the in-flight request state + typedef KeyedVector InFlightMap; + + Mutex mInFlightLock; // Protects mInFlightMap + InFlightMap mInFlightMap; + + status_t registerInFlight(int32_t frameNumber, int32_t numBuffers); + + /** + * Output result queue and current HAL device 3A state + */ + + // Lock for output side of device + Mutex mOutputLock; + + /**** Scope for mOutputLock ****/ + + uint32_t mNextResultFrameNumber; + uint32_t mNextShutterFrameNumber; + List mResultQueue; + Condition mResultSignal; + NotificationListener *mListener; + + /**** End scope for mOutputLock ****/ + + /** + * Callback functions from HAL device + */ + void processCaptureResult(const camera3_capture_result *result); + + void notify(const camera3_notify_msg *msg); + + /** + * Static callback forwarding methods from HAL to instance + */ + static callbacks_process_capture_result_t sProcessCaptureResult; + + static callbacks_notify_t sNotify; + +}; // class Camera3Device + +}; // namespace android + +#endif diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp new file mode 100644 index 0000000..0850566 --- /dev/null +++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp @@ -0,0 +1,275 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "Camera3-IOStreamBase" +#define ATRACE_TAG ATRACE_TAG_CAMERA +//#define LOG_NDEBUG 0 + +// This is needed for stdint.h to define INT64_MAX in C++ +#define __STDC_LIMIT_MACROS + +#include +#include +#include "Camera3IOStreamBase.h" + +namespace android { + +namespace camera3 { + +Camera3IOStreamBase::Camera3IOStreamBase(int id, camera3_stream_type_t type, + uint32_t width, uint32_t height, size_t maxSize, int format) : + Camera3Stream(id, type, + width, height, maxSize, format), + mTotalBufferCount(0), + mDequeuedBufferCount(0), + mFrameCount(0), + mLastTimestamp(0) { + + mCombinedFence = new Fence(); + + if (maxSize > 0 && format != HAL_PIXEL_FORMAT_BLOB) { + ALOGE("%s: Bad format for size-only stream: %d", __FUNCTION__, + format); + mState = STATE_ERROR; + } +} + +Camera3IOStreamBase::~Camera3IOStreamBase() { + disconnectLocked(); +} + +bool Camera3IOStreamBase::hasOutstandingBuffersLocked() const { + nsecs_t signalTime = mCombinedFence->getSignalTime(); + ALOGV("%s: Stream %d: Has %d outstanding buffers," + " buffer signal time is %lld", + __FUNCTION__, mId, mDequeuedBufferCount, signalTime); + if (mDequeuedBufferCount > 0 || signalTime == INT64_MAX) { + return true; + } + return false; +} + +status_t Camera3IOStreamBase::waitUntilIdle(nsecs_t timeout) { + status_t res; + { + Mutex::Autolock l(mLock); + while (mDequeuedBufferCount > 0) { + if (timeout != TIMEOUT_NEVER) { + nsecs_t startTime = systemTime(); + res = mBufferReturnedSignal.waitRelative(mLock, timeout); + if (res == TIMED_OUT) { + return res; + } else if (res != OK) { + ALOGE("%s: Error waiting for outstanding buffers: %s (%d)", + __FUNCTION__, strerror(-res), res); + return res; + } + nsecs_t deltaTime = systemTime() - startTime; + if (timeout <= deltaTime) { + timeout = 0; + } else { + timeout -= deltaTime; + } + } else { + res = mBufferReturnedSignal.wait(mLock); + if (res != OK) { + ALOGE("%s: Error waiting for outstanding buffers: %s (%d)", + __FUNCTION__, strerror(-res), res); + return res; + } + } + } + } + + // No lock + + unsigned int timeoutMs; + if (timeout == TIMEOUT_NEVER) { + timeoutMs = Fence::TIMEOUT_NEVER; + } else if (timeout == 0) { + timeoutMs = 0; + } else { + // Round up to wait at least 1 ms + timeoutMs = (timeout + 999999) / 1000000; + } + + return mCombinedFence->wait(timeoutMs); +} + +void Camera3IOStreamBase::dump(int fd, const Vector &args) const { + (void) args; + String8 lines; + lines.appendFormat(" State: %d\n", mState); + lines.appendFormat(" Dims: %d x %d, format 0x%x\n", + camera3_stream::width, camera3_stream::height, + camera3_stream::format); + lines.appendFormat(" Max size: %d\n", mMaxSize); + lines.appendFormat(" Usage: %d, max HAL buffers: %d\n", + camera3_stream::usage, camera3_stream::max_buffers); + lines.appendFormat(" Frames produced: %d, last timestamp: %lld ns\n", + mFrameCount, mLastTimestamp); + lines.appendFormat(" Total buffers: %d, currently dequeued: %d\n", + mTotalBufferCount, mDequeuedBufferCount); + write(fd, lines.string(), lines.size()); +} + +status_t Camera3IOStreamBase::configureQueueLocked() { + status_t res; + + switch (mState) { + case STATE_IN_RECONFIG: + res = disconnectLocked(); + if (res != OK) { + return res; + } + break; + case STATE_IN_CONFIG: + // OK + break; + default: + ALOGE("%s: Bad state: %d", __FUNCTION__, mState); + return INVALID_OPERATION; + } + + return OK; +} + +size_t Camera3IOStreamBase::getBufferCountLocked() { + return mTotalBufferCount; +} + +status_t Camera3IOStreamBase::disconnectLocked() { + switch (mState) { + case STATE_IN_RECONFIG: + case STATE_CONFIGURED: + // OK + break; + default: + // No connection, nothing to do + ALOGV("%s: Stream %d: Already disconnected", + __FUNCTION__, mId); + return -ENOTCONN; + } + + if (mDequeuedBufferCount > 0) { + ALOGE("%s: Can't disconnect with %d buffers still dequeued!", + __FUNCTION__, mDequeuedBufferCount); + return INVALID_OPERATION; + } + + return OK; +} + +void Camera3IOStreamBase::handoutBufferLocked(camera3_stream_buffer &buffer, + buffer_handle_t *handle, + int acquireFence, + int releaseFence, + camera3_buffer_status_t status) { + /** + * Note that all fences are now owned by HAL. + */ + + // Handing out a raw pointer to this object. Increment internal refcount. + incStrong(this); + buffer.stream = this; + buffer.buffer = handle; + buffer.acquire_fence = acquireFence; + buffer.release_fence = releaseFence; + buffer.status = status; + + mDequeuedBufferCount++; +} + +status_t Camera3IOStreamBase::getBufferPreconditionCheckLocked() const { + // Allow dequeue during IN_[RE]CONFIG for registration + if (mState != STATE_CONFIGURED && + mState != STATE_IN_CONFIG && mState != STATE_IN_RECONFIG) { + ALOGE("%s: Stream %d: Can't get buffers in unconfigured state %d", + __FUNCTION__, mId, mState); + return INVALID_OPERATION; + } + + // Only limit dequeue amount when fully configured + if (mState == STATE_CONFIGURED && + mDequeuedBufferCount == camera3_stream::max_buffers) { + ALOGE("%s: Stream %d: Already dequeued maximum number of simultaneous" + " buffers (%d)", __FUNCTION__, mId, + camera3_stream::max_buffers); + return INVALID_OPERATION; + } + + return OK; +} + +status_t Camera3IOStreamBase::returnBufferPreconditionCheckLocked() const { + // Allow buffers to be returned in the error state, to allow for disconnect + // and in the in-config states for registration + if (mState == STATE_CONSTRUCTED) { + ALOGE("%s: Stream %d: Can't return buffers in unconfigured state %d", + __FUNCTION__, mId, mState); + return INVALID_OPERATION; + } + if (mDequeuedBufferCount == 0) { + ALOGE("%s: Stream %d: No buffers outstanding to return", __FUNCTION__, + mId); + return INVALID_OPERATION; + } + + return OK; +} + +status_t Camera3IOStreamBase::returnAnyBufferLocked( + const camera3_stream_buffer &buffer, + nsecs_t timestamp, + bool output) { + status_t res; + + // returnBuffer may be called from a raw pointer, not a sp<>, and we'll be + // decrementing the internal refcount next. In case this is the last ref, we + // might get destructed on the decStrong(), so keep an sp around until the + // end of the call - otherwise have to sprinkle the decStrong on all exit + // points. + sp keepAlive(this); + decStrong(this); + + if ((res = returnBufferPreconditionCheckLocked()) != OK) { + return res; + } + + sp releaseFence; + res = returnBufferCheckedLocked(buffer, timestamp, output, + &releaseFence); + if (res != OK) { + return res; + } + + mCombinedFence = Fence::merge(mName, mCombinedFence, releaseFence); + + mDequeuedBufferCount--; + mBufferReturnedSignal.signal(); + + if (output) { + mLastTimestamp = timestamp; + } + + return OK; +} + + + +}; // namespace camera3 + +}; // namespace android diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.h b/services/camera/libcameraservice/device3/Camera3IOStreamBase.h new file mode 100644 index 0000000..74c4484 --- /dev/null +++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.h @@ -0,0 +1,102 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_SERVERS_CAMERA3_IO_STREAM_BASE_H +#define ANDROID_SERVERS_CAMERA3_IO_STREAM_BASE_H + +#include +#include + +#include "Camera3Stream.h" + +namespace android { + +namespace camera3 { + +/** + * A base class for managing a single stream of I/O data from the camera device. + */ +class Camera3IOStreamBase : + public Camera3Stream { + protected: + Camera3IOStreamBase(int id, camera3_stream_type_t type, + uint32_t width, uint32_t height, size_t maxSize, int format); + + public: + + virtual ~Camera3IOStreamBase(); + + /** + * Camera3Stream interface + */ + + virtual status_t waitUntilIdle(nsecs_t timeout); + virtual void dump(int fd, const Vector &args) const; + + protected: + size_t mTotalBufferCount; + // sum of input and output buffers that are currently acquired by HAL + size_t mDequeuedBufferCount; + Condition mBufferReturnedSignal; + uint32_t mFrameCount; + // Last received output buffer's timestamp + nsecs_t mLastTimestamp; + + // The merged release fence for all returned buffers + sp mCombinedFence; + + status_t returnAnyBufferLocked( + const camera3_stream_buffer &buffer, + nsecs_t timestamp, + bool output); + + virtual status_t returnBufferCheckedLocked( + const camera3_stream_buffer &buffer, + nsecs_t timestamp, + bool output, + /*out*/ + sp *releaseFenceOut) = 0; + + /** + * Internal Camera3Stream interface + */ + virtual bool hasOutstandingBuffersLocked() const; + + virtual size_t getBufferCountLocked(); + + status_t getBufferPreconditionCheckLocked() const; + status_t returnBufferPreconditionCheckLocked() const; + + // State check only + virtual status_t configureQueueLocked(); + // State checks only + virtual status_t disconnectLocked(); + + // Hand out the buffer to a native location, + // incrementing the internal refcount and dequeued buffer count + void handoutBufferLocked(camera3_stream_buffer &buffer, + buffer_handle_t *handle, + int acquire_fence, + int release_fence, + camera3_buffer_status_t status); + +}; // class Camera3IOStreamBase + +} // namespace camera3 + +} // namespace android + +#endif diff --git a/services/camera/libcameraservice/device3/Camera3InputStream.cpp b/services/camera/libcameraservice/device3/Camera3InputStream.cpp new file mode 100644 index 0000000..e9a9c2b --- /dev/null +++ b/services/camera/libcameraservice/device3/Camera3InputStream.cpp @@ -0,0 +1,239 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "Camera3-InputStream" +#define ATRACE_TAG ATRACE_TAG_CAMERA +//#define LOG_NDEBUG 0 + +#include +#include +#include "Camera3InputStream.h" + +namespace android { + +namespace camera3 { + +Camera3InputStream::Camera3InputStream(int id, + uint32_t width, uint32_t height, int format) : + Camera3IOStreamBase(id, CAMERA3_STREAM_INPUT, width, height, + /*maxSize*/0, format) { + + if (format == HAL_PIXEL_FORMAT_BLOB) { + ALOGE("%s: Bad format, BLOB not supported", __FUNCTION__); + mState = STATE_ERROR; + } +} + +Camera3InputStream::~Camera3InputStream() { + disconnectLocked(); +} + +status_t Camera3InputStream::getInputBufferLocked( + camera3_stream_buffer *buffer) { + ATRACE_CALL(); + status_t res; + + // FIXME: will not work in (re-)registration + if (mState == STATE_IN_CONFIG || mState == STATE_IN_RECONFIG) { + ALOGE("%s: Stream %d: Buffer registration for input streams" + " not implemented (state %d)", + __FUNCTION__, mId, mState); + return INVALID_OPERATION; + } + + if ((res = getBufferPreconditionCheckLocked()) != OK) { + return res; + } + + ANativeWindowBuffer* anb; + int fenceFd; + + assert(mConsumer != 0); + + BufferItem bufferItem; + res = mConsumer->acquireBuffer(&bufferItem, /*waitForFence*/false); + + if (res != OK) { + ALOGE("%s: Stream %d: Can't acquire next output buffer: %s (%d)", + __FUNCTION__, mId, strerror(-res), res); + return res; + } + + anb = bufferItem.mGraphicBuffer->getNativeBuffer(); + assert(anb != NULL); + fenceFd = bufferItem.mFence->dup(); + + /** + * FenceFD now owned by HAL except in case of error, + * in which case we reassign it to acquire_fence + */ + handoutBufferLocked(*buffer, &(anb->handle), /*acquireFence*/fenceFd, + /*releaseFence*/-1, CAMERA3_BUFFER_STATUS_OK); + mBuffersInFlight.push_back(bufferItem); + + return OK; +} + +status_t Camera3InputStream::returnBufferCheckedLocked( + const camera3_stream_buffer &buffer, + nsecs_t timestamp, + bool output, + /*out*/ + sp *releaseFenceOut) { + + (void)timestamp; + (void)output; + ALOG_ASSERT(!output, "Expected output to be false"); + + status_t res; + + bool bufferFound = false; + BufferItem bufferItem; + { + // Find the buffer we are returning + Vector::iterator it, end; + for (it = mBuffersInFlight.begin(), end = mBuffersInFlight.end(); + it != end; + ++it) { + + const BufferItem& tmp = *it; + ANativeWindowBuffer *anb = tmp.mGraphicBuffer->getNativeBuffer(); + if (anb != NULL && &(anb->handle) == buffer.buffer) { + bufferFound = true; + bufferItem = tmp; + mBuffersInFlight.erase(it); + mDequeuedBufferCount--; + } + } + } + if (!bufferFound) { + ALOGE("%s: Stream %d: Can't return buffer that wasn't sent to HAL", + __FUNCTION__, mId); + return INVALID_OPERATION; + } + + if (buffer.status == CAMERA3_BUFFER_STATUS_ERROR) { + if (buffer.release_fence != -1) { + ALOGE("%s: Stream %d: HAL should not set release_fence(%d) when " + "there is an error", __FUNCTION__, mId, buffer.release_fence); + close(buffer.release_fence); + } + + /** + * Reassign release fence as the acquire fence incase of error + */ + const_cast(&buffer)->release_fence = + buffer.acquire_fence; + } + + /** + * Unconditionally return buffer to the buffer queue. + * - Fwk takes over the release_fence ownership + */ + sp releaseFence = new Fence(buffer.release_fence); + res = mConsumer->releaseBuffer(bufferItem, releaseFence); + if (res != OK) { + ALOGE("%s: Stream %d: Error releasing buffer back to buffer queue:" + " %s (%d)", __FUNCTION__, mId, strerror(-res), res); + return res; + } + + *releaseFenceOut = releaseFence; + + return OK; +} + +status_t Camera3InputStream::returnInputBufferLocked( + const camera3_stream_buffer &buffer) { + ATRACE_CALL(); + + return returnAnyBufferLocked(buffer, /*timestamp*/0, /*output*/false); +} + +status_t Camera3InputStream::disconnectLocked() { + + status_t res; + + if ((res = Camera3IOStreamBase::disconnectLocked()) != OK) { + return res; + } + + assert(mBuffersInFlight.size() == 0); + + /** + * no-op since we can't disconnect the producer from the consumer-side + */ + + mState = (mState == STATE_IN_RECONFIG) ? STATE_IN_CONFIG + : STATE_CONSTRUCTED; + return OK; +} + +sp Camera3InputStream::getProducerInterface() const { + return mConsumer->getProducerInterface(); +} + +void Camera3InputStream::dump(int fd, const Vector &args) const { + (void) args; + String8 lines; + lines.appendFormat(" Stream[%d]: Input\n", mId); + write(fd, lines.string(), lines.size()); + + Camera3IOStreamBase::dump(fd, args); +} + +status_t Camera3InputStream::configureQueueLocked() { + status_t res; + + if ((res = Camera3IOStreamBase::configureQueueLocked()) != OK) { + return res; + } + + assert(mMaxSize == 0); + assert(camera3_stream::format != HAL_PIXEL_FORMAT_BLOB); + + mTotalBufferCount = BufferQueue::MIN_UNDEQUEUED_BUFFERS + + camera3_stream::max_buffers; + mDequeuedBufferCount = 0; + mFrameCount = 0; + + if (mConsumer.get() == 0) { + sp bq = new BufferQueue(); + mConsumer = new BufferItemConsumer(bq, camera3_stream::usage, + mTotalBufferCount); + mConsumer->setName(String8::format("Camera3-InputStream-%d", mId)); + } + + res = mConsumer->setDefaultBufferSize(camera3_stream::width, + camera3_stream::height); + if (res != OK) { + ALOGE("%s: Stream %d: Could not set buffer dimensions %dx%d", + __FUNCTION__, mId, camera3_stream::width, camera3_stream::height); + return res; + } + res = mConsumer->setDefaultBufferFormat(camera3_stream::format); + if (res != OK) { + ALOGE("%s: Stream %d: Could not set buffer format %d", + __FUNCTION__, mId, camera3_stream::format); + return res; + } + + return OK; +} + +}; // namespace camera3 + +}; // namespace android diff --git a/services/camera/libcameraservice/device3/Camera3InputStream.h b/services/camera/libcameraservice/device3/Camera3InputStream.h new file mode 100644 index 0000000..8adda88 --- /dev/null +++ b/services/camera/libcameraservice/device3/Camera3InputStream.h @@ -0,0 +1,88 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_SERVERS_CAMERA3_INPUT_STREAM_H +#define ANDROID_SERVERS_CAMERA3_INPUT_STREAM_H + +#include +#include +#include + +#include "Camera3IOStreamBase.h" + +namespace android { + +namespace camera3 { + +/** + * A class for managing a single stream of input data to the camera device. + * + * This class serves as a consumer adapter for the HAL, and will consume the + * buffers by feeding them into the HAL, as well as releasing the buffers back + * the buffers once the HAL is done with them. + */ +class Camera3InputStream : public Camera3IOStreamBase { + public: + /** + * Set up a stream for formats that have fixed size, such as RAW and YUV. + */ + Camera3InputStream(int id, uint32_t width, uint32_t height, int format); + ~Camera3InputStream(); + + virtual void dump(int fd, const Vector &args) const; + + /** + * Get the producer interface for this stream, to hand off to a producer. + * The producer must be connected to the provided interface before + * finishConfigure is called on this stream. + */ + sp getProducerInterface() const; + + private: + + typedef BufferItemConsumer::BufferItem BufferItem; + + sp mConsumer; + Vector mBuffersInFlight; + + /** + * Camera3IOStreamBase + */ + virtual status_t returnBufferCheckedLocked( + const camera3_stream_buffer &buffer, + nsecs_t timestamp, + bool output, + /*out*/ + sp *releaseFenceOut); + + /** + * Camera3Stream interface + */ + + virtual status_t getInputBufferLocked(camera3_stream_buffer *buffer); + virtual status_t returnInputBufferLocked( + const camera3_stream_buffer &buffer); + virtual status_t disconnectLocked(); + + virtual status_t configureQueueLocked(); + +}; // class Camera3InputStream + +}; // namespace camera3 + +}; // namespace android + +#endif diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp new file mode 100644 index 0000000..0ec2b05 --- /dev/null +++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp @@ -0,0 +1,369 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "Camera3-OutputStream" +#define ATRACE_TAG ATRACE_TAG_CAMERA +//#define LOG_NDEBUG 0 + +#include +#include +#include "Camera3OutputStream.h" + +#ifndef container_of +#define container_of(ptr, type, member) \ + (type *)((char*)(ptr) - offsetof(type, member)) +#endif + +namespace android { + +namespace camera3 { + +Camera3OutputStream::Camera3OutputStream(int id, + sp consumer, + uint32_t width, uint32_t height, int format) : + Camera3IOStreamBase(id, CAMERA3_STREAM_OUTPUT, width, height, + /*maxSize*/0, format), + mConsumer(consumer), + mTransform(0) { + + if (mConsumer == NULL) { + ALOGE("%s: Consumer is NULL!", __FUNCTION__); + mState = STATE_ERROR; + } +} + +Camera3OutputStream::Camera3OutputStream(int id, + sp consumer, + uint32_t width, uint32_t height, size_t maxSize, int format) : + Camera3IOStreamBase(id, CAMERA3_STREAM_OUTPUT, width, height, maxSize, + format), + mConsumer(consumer), + mTransform(0) { + + if (format != HAL_PIXEL_FORMAT_BLOB) { + ALOGE("%s: Bad format for size-only stream: %d", __FUNCTION__, + format); + mState = STATE_ERROR; + } + + if (mConsumer == NULL) { + ALOGE("%s: Consumer is NULL!", __FUNCTION__); + mState = STATE_ERROR; + } +} + +Camera3OutputStream::Camera3OutputStream(int id, camera3_stream_type_t type, + uint32_t width, uint32_t height, + int format) : + Camera3IOStreamBase(id, type, width, height, + /*maxSize*/0, + format), + mTransform(0) { + + // Subclasses expected to initialize mConsumer themselves +} + + +Camera3OutputStream::~Camera3OutputStream() { + disconnectLocked(); +} + +status_t Camera3OutputStream::getBufferLocked(camera3_stream_buffer *buffer) { + ATRACE_CALL(); + status_t res; + + if ((res = getBufferPreconditionCheckLocked()) != OK) { + return res; + } + + ANativeWindowBuffer* anb; + int fenceFd; + + res = mConsumer->dequeueBuffer(mConsumer.get(), &anb, &fenceFd); + if (res != OK) { + ALOGE("%s: Stream %d: Can't dequeue next output buffer: %s (%d)", + __FUNCTION__, mId, strerror(-res), res); + return res; + } + + /** + * FenceFD now owned by HAL except in case of error, + * in which case we reassign it to acquire_fence + */ + handoutBufferLocked(*buffer, &(anb->handle), /*acquireFence*/fenceFd, + /*releaseFence*/-1, CAMERA3_BUFFER_STATUS_OK); + + return OK; +} + +status_t Camera3OutputStream::returnBufferLocked( + const camera3_stream_buffer &buffer, + nsecs_t timestamp) { + ATRACE_CALL(); + + status_t res = returnAnyBufferLocked(buffer, timestamp, /*output*/true); + + if (res != OK) { + return res; + } + + mLastTimestamp = timestamp; + + return OK; +} + +status_t Camera3OutputStream::returnBufferCheckedLocked( + const camera3_stream_buffer &buffer, + nsecs_t timestamp, + bool output, + /*out*/ + sp *releaseFenceOut) { + + (void)output; + ALOG_ASSERT(output, "Expected output to be true"); + + status_t res; + sp releaseFence; + + /** + * Fence management - calculate Release Fence + */ + if (buffer.status == CAMERA3_BUFFER_STATUS_ERROR) { + if (buffer.release_fence != -1) { + ALOGE("%s: Stream %d: HAL should not set release_fence(%d) when " + "there is an error", __FUNCTION__, mId, buffer.release_fence); + close(buffer.release_fence); + } + + /** + * Reassign release fence as the acquire fence in case of error + */ + releaseFence = new Fence(buffer.acquire_fence); + } else { + res = native_window_set_buffers_timestamp(mConsumer.get(), timestamp); + if (res != OK) { + ALOGE("%s: Stream %d: Error setting timestamp: %s (%d)", + __FUNCTION__, mId, strerror(-res), res); + return res; + } + + releaseFence = new Fence(buffer.release_fence); + } + + int anwReleaseFence = releaseFence->dup(); + + /** + * Release the lock briefly to avoid deadlock with + * StreamingProcessor::startStream -> Camera3Stream::isConfiguring (this + * thread will go into StreamingProcessor::onFrameAvailable) during + * queueBuffer + */ + sp currentConsumer = mConsumer; + mLock.unlock(); + + /** + * Return buffer back to ANativeWindow + */ + if (buffer.status == CAMERA3_BUFFER_STATUS_ERROR) { + // Cancel buffer + res = currentConsumer->cancelBuffer(currentConsumer.get(), + container_of(buffer.buffer, ANativeWindowBuffer, handle), + anwReleaseFence); + if (res != OK) { + ALOGE("%s: Stream %d: Error cancelling buffer to native window:" + " %s (%d)", __FUNCTION__, mId, strerror(-res), res); + } + } else { + res = currentConsumer->queueBuffer(currentConsumer.get(), + container_of(buffer.buffer, ANativeWindowBuffer, handle), + anwReleaseFence); + if (res != OK) { + ALOGE("%s: Stream %d: Error queueing buffer to native window: " + "%s (%d)", __FUNCTION__, mId, strerror(-res), res); + } + } + mLock.lock(); + if (res != OK) { + close(anwReleaseFence); + return res; + } + + *releaseFenceOut = releaseFence; + + return OK; +} + +void Camera3OutputStream::dump(int fd, const Vector &args) const { + (void) args; + String8 lines; + lines.appendFormat(" Stream[%d]: Output\n", mId); + write(fd, lines.string(), lines.size()); + + Camera3IOStreamBase::dump(fd, args); +} + +status_t Camera3OutputStream::setTransform(int transform) { + ATRACE_CALL(); + Mutex::Autolock l(mLock); + return setTransformLocked(transform); +} + +status_t Camera3OutputStream::setTransformLocked(int transform) { + status_t res = OK; + if (mState == STATE_ERROR) { + ALOGE("%s: Stream in error state", __FUNCTION__); + return INVALID_OPERATION; + } + + mTransform = transform; + if (mState == STATE_CONFIGURED) { + res = native_window_set_buffers_transform(mConsumer.get(), + transform); + if (res != OK) { + ALOGE("%s: Unable to configure stream transform to %x: %s (%d)", + __FUNCTION__, transform, strerror(-res), res); + } + } + return res; +} + +status_t Camera3OutputStream::configureQueueLocked() { + status_t res; + + if ((res = Camera3IOStreamBase::configureQueueLocked()) != OK) { + return res; + } + + ALOG_ASSERT(mConsumer != 0, "mConsumer should never be NULL"); + + // Configure consumer-side ANativeWindow interface + res = native_window_api_connect(mConsumer.get(), + NATIVE_WINDOW_API_CAMERA); + if (res != OK) { + ALOGE("%s: Unable to connect to native window for stream %d", + __FUNCTION__, mId); + return res; + } + + res = native_window_set_usage(mConsumer.get(), camera3_stream::usage); + if (res != OK) { + ALOGE("%s: Unable to configure usage %08x for stream %d", + __FUNCTION__, camera3_stream::usage, mId); + return res; + } + + res = native_window_set_scaling_mode(mConsumer.get(), + NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW); + if (res != OK) { + ALOGE("%s: Unable to configure stream scaling: %s (%d)", + __FUNCTION__, strerror(-res), res); + return res; + } + + if (mMaxSize == 0) { + // For buffers of known size + res = native_window_set_buffers_geometry(mConsumer.get(), + camera3_stream::width, camera3_stream::height, + camera3_stream::format); + } else { + // For buffers with bounded size + res = native_window_set_buffers_geometry(mConsumer.get(), + mMaxSize, 1, + camera3_stream::format); + } + if (res != OK) { + ALOGE("%s: Unable to configure stream buffer geometry" + " %d x %d, format %x for stream %d", + __FUNCTION__, camera3_stream::width, camera3_stream::height, + camera3_stream::format, mId); + return res; + } + + int maxConsumerBuffers; + res = mConsumer->query(mConsumer.get(), + NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &maxConsumerBuffers); + if (res != OK) { + ALOGE("%s: Unable to query consumer undequeued" + " buffer count for stream %d", __FUNCTION__, mId); + return res; + } + + ALOGV("%s: Consumer wants %d buffers, HAL wants %d", __FUNCTION__, + maxConsumerBuffers, camera3_stream::max_buffers); + if (camera3_stream::max_buffers == 0) { + ALOGE("%s: Camera HAL requested max_buffer count: %d, requires at least 1", + __FUNCTION__, camera3_stream::max_buffers); + return INVALID_OPERATION; + } + + mTotalBufferCount = maxConsumerBuffers + camera3_stream::max_buffers; + mDequeuedBufferCount = 0; + mFrameCount = 0; + mLastTimestamp = 0; + + res = native_window_set_buffer_count(mConsumer.get(), + mTotalBufferCount); + if (res != OK) { + ALOGE("%s: Unable to set buffer count for stream %d", + __FUNCTION__, mId); + return res; + } + + res = native_window_set_buffers_transform(mConsumer.get(), + mTransform); + if (res != OK) { + ALOGE("%s: Unable to configure stream transform to %x: %s (%d)", + __FUNCTION__, mTransform, strerror(-res), res); + } + + return OK; +} + +status_t Camera3OutputStream::disconnectLocked() { + status_t res; + + if ((res = Camera3IOStreamBase::disconnectLocked()) != OK) { + return res; + } + + res = native_window_api_disconnect(mConsumer.get(), + NATIVE_WINDOW_API_CAMERA); + + /** + * This is not an error. if client calling process dies, the window will + * also die and all calls to it will return DEAD_OBJECT, thus it's already + * "disconnected" + */ + if (res == DEAD_OBJECT) { + ALOGW("%s: While disconnecting stream %d from native window, the" + " native window died from under us", __FUNCTION__, mId); + } + else if (res != OK) { + ALOGE("%s: Unable to disconnect stream %d from native window " + "(error %d %s)", + __FUNCTION__, mId, res, strerror(-res)); + mState = STATE_ERROR; + return res; + } + + mState = (mState == STATE_IN_RECONFIG) ? STATE_IN_CONFIG + : STATE_CONSTRUCTED; + return OK; +} + +}; // namespace camera3 + +}; // namespace android diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.h b/services/camera/libcameraservice/device3/Camera3OutputStream.h new file mode 100644 index 0000000..774fbdd --- /dev/null +++ b/services/camera/libcameraservice/device3/Camera3OutputStream.h @@ -0,0 +1,101 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_SERVERS_CAMERA3_OUTPUT_STREAM_H +#define ANDROID_SERVERS_CAMERA3_OUTPUT_STREAM_H + +#include +#include + +#include "Camera3Stream.h" +#include "Camera3IOStreamBase.h" +#include "Camera3OutputStreamInterface.h" + +namespace android { + +namespace camera3 { + +/** + * A class for managing a single stream of output data from the camera device. + */ +class Camera3OutputStream : + public Camera3IOStreamBase, + public Camera3OutputStreamInterface { + public: + /** + * Set up a stream for formats that have 2 dimensions, such as RAW and YUV. + */ + Camera3OutputStream(int id, sp consumer, + uint32_t width, uint32_t height, int format); + + /** + * Set up a stream for formats that have a variable buffer size for the same + * dimensions, such as compressed JPEG. + */ + Camera3OutputStream(int id, sp consumer, + uint32_t width, uint32_t height, size_t maxSize, int format); + + virtual ~Camera3OutputStream(); + + /** + * Camera3Stream interface + */ + + virtual void dump(int fd, const Vector &args) const; + + /** + * Set the transform on the output stream; one of the + * HAL_TRANSFORM_* / NATIVE_WINDOW_TRANSFORM_* constants. + */ + status_t setTransform(int transform); + + protected: + Camera3OutputStream(int id, camera3_stream_type_t type, + uint32_t width, uint32_t height, int format); + + /** + * Note that we release the lock briefly in this function + */ + virtual status_t returnBufferCheckedLocked( + const camera3_stream_buffer &buffer, + nsecs_t timestamp, + bool output, + /*out*/ + sp *releaseFenceOut); + + sp mConsumer; + private: + int mTransform; + + virtual status_t setTransformLocked(int transform); + + /** + * Internal Camera3Stream interface + */ + virtual status_t getBufferLocked(camera3_stream_buffer *buffer); + virtual status_t returnBufferLocked( + const camera3_stream_buffer &buffer, + nsecs_t timestamp); + + virtual status_t configureQueueLocked(); + virtual status_t disconnectLocked(); +}; // class Camera3OutputStream + +} // namespace camera3 + +} // namespace android + +#endif diff --git a/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h b/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h new file mode 100644 index 0000000..aae72cf --- /dev/null +++ b/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h @@ -0,0 +1,43 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_SERVERS_CAMERA3_OUTPUT_STREAM_INTERFACE_H +#define ANDROID_SERVERS_CAMERA3_OUTPUT_STREAM_INTERFACE_H + +#include "Camera3StreamInterface.h" + +namespace android { + +namespace camera3 { + +/** + * An interface for managing a single stream of output data from the camera + * device. + */ +class Camera3OutputStreamInterface : public virtual Camera3StreamInterface { + public: + /** + * Set the transform on the output stream; one of the + * HAL_TRANSFORM_* / NATIVE_WINDOW_TRANSFORM_* constants. + */ + virtual status_t setTransform(int transform) = 0; +}; + +} // namespace camera3 + +} // namespace android + +#endif diff --git a/services/camera/libcameraservice/device3/Camera3Stream.cpp b/services/camera/libcameraservice/device3/Camera3Stream.cpp new file mode 100644 index 0000000..ab563df --- /dev/null +++ b/services/camera/libcameraservice/device3/Camera3Stream.cpp @@ -0,0 +1,383 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "Camera3-Stream" +#define ATRACE_TAG ATRACE_TAG_CAMERA +//#define LOG_NDEBUG 0 + +#include +#include +#include "Camera3Stream.h" + +namespace android { + +namespace camera3 { + +Camera3Stream::~Camera3Stream() { +} + +Camera3Stream* Camera3Stream::cast(camera3_stream *stream) { + return static_cast(stream); +} + +const Camera3Stream* Camera3Stream::cast(const camera3_stream *stream) { + return static_cast(stream); +} + +Camera3Stream::Camera3Stream(int id, + camera3_stream_type type, + uint32_t width, uint32_t height, size_t maxSize, int format) : + camera3_stream(), + mId(id), + mName(String8::format("Camera3Stream[%d]", id)), + mMaxSize(maxSize), + mState(STATE_CONSTRUCTED) { + + camera3_stream::stream_type = type; + camera3_stream::width = width; + camera3_stream::height = height; + camera3_stream::format = format; + camera3_stream::usage = 0; + camera3_stream::max_buffers = 0; + camera3_stream::priv = NULL; + + if (format == HAL_PIXEL_FORMAT_BLOB && maxSize == 0) { + ALOGE("%s: BLOB format with size == 0", __FUNCTION__); + mState = STATE_ERROR; + } +} + +int Camera3Stream::getId() const { + return mId; +} + +uint32_t Camera3Stream::getWidth() const { + return camera3_stream::width; +} + +uint32_t Camera3Stream::getHeight() const { + return camera3_stream::height; +} + +int Camera3Stream::getFormat() const { + return camera3_stream::format; +} + +camera3_stream* Camera3Stream::startConfiguration() { + Mutex::Autolock l(mLock); + + switch (mState) { + case STATE_ERROR: + ALOGE("%s: In error state", __FUNCTION__); + return NULL; + case STATE_CONSTRUCTED: + // OK + break; + case STATE_IN_CONFIG: + case STATE_IN_RECONFIG: + // Can start config again with no trouble; but don't redo + // oldUsage/oldMaxBuffers + return this; + case STATE_CONFIGURED: + if (stream_type == CAMERA3_STREAM_INPUT) { + ALOGE("%s: Cannot configure an input stream twice", + __FUNCTION__); + return NULL; + } else if (hasOutstandingBuffersLocked()) { + ALOGE("%s: Cannot configure stream; has outstanding buffers", + __FUNCTION__); + return NULL; + } + break; + default: + ALOGE("%s: Unknown state %d", __FUNCTION__, mState); + return NULL; + } + + oldUsage = usage; + oldMaxBuffers = max_buffers; + + if (mState == STATE_CONSTRUCTED) { + mState = STATE_IN_CONFIG; + } else { // mState == STATE_CONFIGURED + mState = STATE_IN_RECONFIG; + } + + return this; +} + +bool Camera3Stream::isConfiguring() const { + Mutex::Autolock l(mLock); + return (mState == STATE_IN_CONFIG) || (mState == STATE_IN_RECONFIG); +} + +status_t Camera3Stream::finishConfiguration(camera3_device *hal3Device) { + Mutex::Autolock l(mLock); + switch (mState) { + case STATE_ERROR: + ALOGE("%s: In error state", __FUNCTION__); + return INVALID_OPERATION; + case STATE_IN_CONFIG: + case STATE_IN_RECONFIG: + // OK + break; + case STATE_CONSTRUCTED: + case STATE_CONFIGURED: + ALOGE("%s: Cannot finish configuration that hasn't been started", + __FUNCTION__); + return INVALID_OPERATION; + default: + ALOGE("%s: Unknown state", __FUNCTION__); + return INVALID_OPERATION; + } + + // Check if the stream configuration is unchanged, and skip reallocation if + // so. As documented in hardware/camera3.h:configure_streams(). + if (mState == STATE_IN_RECONFIG && + oldUsage == usage && + oldMaxBuffers == max_buffers) { + mState = STATE_CONFIGURED; + return OK; + } + + status_t res; + res = configureQueueLocked(); + if (res != OK) { + ALOGE("%s: Unable to configure stream %d queue: %s (%d)", + __FUNCTION__, mId, strerror(-res), res); + mState = STATE_ERROR; + return res; + } + + res = registerBuffersLocked(hal3Device); + if (res != OK) { + ALOGE("%s: Unable to register stream buffers with HAL: %s (%d)", + __FUNCTION__, strerror(-res), res); + mState = STATE_ERROR; + return res; + } + + mState = STATE_CONFIGURED; + + return res; +} + +status_t Camera3Stream::getBuffer(camera3_stream_buffer *buffer) { + ATRACE_CALL(); + Mutex::Autolock l(mLock); + + status_t res = getBufferLocked(buffer); + if (res == OK) { + fireBufferListenersLocked(*buffer, /*acquired*/true, /*output*/true); + } + + return res; +} + +status_t Camera3Stream::returnBuffer(const camera3_stream_buffer &buffer, + nsecs_t timestamp) { + ATRACE_CALL(); + Mutex::Autolock l(mLock); + + status_t res = returnBufferLocked(buffer, timestamp); + if (res == OK) { + fireBufferListenersLocked(buffer, /*acquired*/false, /*output*/true); + } + + return res; +} + +status_t Camera3Stream::getInputBuffer(camera3_stream_buffer *buffer) { + ATRACE_CALL(); + Mutex::Autolock l(mLock); + + status_t res = getInputBufferLocked(buffer); + if (res == OK) { + fireBufferListenersLocked(*buffer, /*acquired*/true, /*output*/false); + } + + return res; +} + +status_t Camera3Stream::returnInputBuffer(const camera3_stream_buffer &buffer) { + ATRACE_CALL(); + Mutex::Autolock l(mLock); + + status_t res = returnInputBufferLocked(buffer); + if (res == OK) { + fireBufferListenersLocked(buffer, /*acquired*/false, /*output*/false); + } + return res; +} + +void Camera3Stream::fireBufferListenersLocked( + const camera3_stream_buffer& /*buffer*/, bool acquired, bool output) { + List >::iterator it, end; + + // TODO: finish implementing + + Camera3StreamBufferListener::BufferInfo info = + Camera3StreamBufferListener::BufferInfo(); + info.mOutput = output; + // TODO: rest of fields + + for (it = mBufferListenerList.begin(), end = mBufferListenerList.end(); + it != end; + ++it) { + + sp listener = it->promote(); + if (listener != 0) { + if (acquired) { + listener->onBufferAcquired(info); + } else { + listener->onBufferReleased(info); + } + } + } +} + +bool Camera3Stream::hasOutstandingBuffers() const { + ATRACE_CALL(); + Mutex::Autolock l(mLock); + return hasOutstandingBuffersLocked(); +} + +status_t Camera3Stream::disconnect() { + ATRACE_CALL(); + Mutex::Autolock l(mLock); + ALOGV("%s: Stream %d: Disconnecting...", __FUNCTION__, mId); + status_t res = disconnectLocked(); + + if (res == -ENOTCONN) { + // "Already disconnected" -- not an error + return OK; + } else { + return res; + } +} + +status_t Camera3Stream::registerBuffersLocked(camera3_device *hal3Device) { + ATRACE_CALL(); + status_t res; + + size_t bufferCount = getBufferCountLocked(); + + Vector buffers; + buffers.insertAt(NULL, 0, bufferCount); + + camera3_stream_buffer_set bufferSet = camera3_stream_buffer_set(); + bufferSet.stream = this; + bufferSet.num_buffers = bufferCount; + bufferSet.buffers = buffers.editArray(); + + Vector streamBuffers; + streamBuffers.insertAt(camera3_stream_buffer_t(), 0, bufferCount); + + // Register all buffers with the HAL. This means getting all the buffers + // from the stream, providing them to the HAL with the + // register_stream_buffers() method, and then returning them back to the + // stream in the error state, since they won't have valid data. + // + // Only registered buffers can be sent to the HAL. + + uint32_t bufferIdx = 0; + for (; bufferIdx < bufferCount; bufferIdx++) { + res = getBufferLocked( &streamBuffers.editItemAt(bufferIdx) ); + if (res != OK) { + ALOGE("%s: Unable to get buffer %d for registration with HAL", + __FUNCTION__, bufferIdx); + // Skip registering, go straight to cleanup + break; + } + + sp fence = new Fence(streamBuffers[bufferIdx].acquire_fence); + fence->waitForever("Camera3Stream::registerBuffers"); + + buffers.editItemAt(bufferIdx) = streamBuffers[bufferIdx].buffer; + } + if (bufferIdx == bufferCount) { + // Got all buffers, register with HAL + ALOGV("%s: Registering %d buffers with camera HAL", + __FUNCTION__, bufferCount); + ATRACE_BEGIN("camera3->register_stream_buffers"); + res = hal3Device->ops->register_stream_buffers(hal3Device, + &bufferSet); + ATRACE_END(); + } + + // Return all valid buffers to stream, in ERROR state to indicate + // they weren't filled. + for (size_t i = 0; i < bufferIdx; i++) { + streamBuffers.editItemAt(i).release_fence = -1; + streamBuffers.editItemAt(i).status = CAMERA3_BUFFER_STATUS_ERROR; + returnBufferLocked(streamBuffers[i], 0); + } + + return res; +} + +status_t Camera3Stream::getBufferLocked(camera3_stream_buffer *) { + ALOGE("%s: This type of stream does not support output", __FUNCTION__); + return INVALID_OPERATION; +} +status_t Camera3Stream::returnBufferLocked(const camera3_stream_buffer &, + nsecs_t) { + ALOGE("%s: This type of stream does not support output", __FUNCTION__); + return INVALID_OPERATION; +} +status_t Camera3Stream::getInputBufferLocked(camera3_stream_buffer *) { + ALOGE("%s: This type of stream does not support input", __FUNCTION__); + return INVALID_OPERATION; +} +status_t Camera3Stream::returnInputBufferLocked( + const camera3_stream_buffer &) { + ALOGE("%s: This type of stream does not support input", __FUNCTION__); + return INVALID_OPERATION; +} + +void Camera3Stream::addBufferListener( + wp listener) { + Mutex::Autolock l(mLock); + mBufferListenerList.push_back(listener); +} + +void Camera3Stream::removeBufferListener( + const sp& listener) { + Mutex::Autolock l(mLock); + + bool erased = true; + List >::iterator it, end; + for (it = mBufferListenerList.begin(), end = mBufferListenerList.end(); + it != end; + ) { + + if (*it == listener) { + it = mBufferListenerList.erase(it); + erased = true; + } else { + ++it; + } + } + + if (!erased) { + ALOGW("%s: Could not find listener to remove, already removed", + __FUNCTION__); + } +} + +}; // namespace camera3 + +}; // namespace android diff --git a/services/camera/libcameraservice/device3/Camera3Stream.h b/services/camera/libcameraservice/device3/Camera3Stream.h new file mode 100644 index 0000000..69d81e4 --- /dev/null +++ b/services/camera/libcameraservice/device3/Camera3Stream.h @@ -0,0 +1,283 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_SERVERS_CAMERA3_STREAM_H +#define ANDROID_SERVERS_CAMERA3_STREAM_H + +#include +#include +#include +#include +#include + +#include "hardware/camera3.h" + +#include "Camera3StreamBufferListener.h" +#include "Camera3StreamInterface.h" + +namespace android { + +namespace camera3 { + +/** + * A class for managing a single stream of input or output data from the camera + * device. + * + * The stream has an internal state machine to track whether it's + * connected/configured/etc. + * + * States: + * + * STATE_ERROR: A serious error has occurred, stream is unusable. Outstanding + * buffers may still be returned. + * + * STATE_CONSTRUCTED: The stream is ready for configuration, but buffers cannot + * be gotten yet. Not connected to any endpoint, no buffers are registered + * with the HAL. + * + * STATE_IN_CONFIG: Configuration has started, but not yet concluded. During this + * time, the usage, max_buffers, and priv fields of camera3_stream returned by + * startConfiguration() may be modified. + * + * STATE_IN_RE_CONFIG: Configuration has started, and the stream has been + * configured before. Need to track separately from IN_CONFIG to avoid + * re-registering buffers with HAL. + * + * STATE_CONFIGURED: Stream is configured, and has registered buffers with the + * HAL. The stream's getBuffer/returnBuffer work. The priv pointer may still be + * modified. + * + * Transition table: + * + * => STATE_CONSTRUCTED: + * When constructed with valid arguments + * => STATE_ERROR: + * When constructed with invalid arguments + * STATE_CONSTRUCTED => STATE_IN_CONFIG: + * When startConfiguration() is called + * STATE_IN_CONFIG => STATE_CONFIGURED: + * When finishConfiguration() is called + * STATE_IN_CONFIG => STATE_ERROR: + * When finishConfiguration() fails to allocate or register buffers. + * STATE_CONFIGURED => STATE_IN_RE_CONFIG: * + * When startConfiguration() is called again, after making sure stream is + * idle with waitUntilIdle(). + * STATE_IN_RE_CONFIG => STATE_CONFIGURED: + * When finishConfiguration() is called. + * STATE_IN_RE_CONFIG => STATE_ERROR: + * When finishConfiguration() fails to allocate or register buffers. + * STATE_CONFIGURED => STATE_CONSTRUCTED: + * When disconnect() is called after making sure stream is idle with + * waitUntilIdle(). + */ +class Camera3Stream : + protected camera3_stream, + public virtual Camera3StreamInterface, + public virtual RefBase { + public: + + virtual ~Camera3Stream(); + + static Camera3Stream* cast(camera3_stream *stream); + static const Camera3Stream* cast(const camera3_stream *stream); + + /** + * Get the stream's ID + */ + int getId() const; + + /** + * Get the stream's dimensions and format + */ + uint32_t getWidth() const; + uint32_t getHeight() const; + int getFormat() const; + + /** + * Start the stream configuration process. Returns a handle to the stream's + * information to be passed into the HAL device's configure_streams call. + * + * Until finishConfiguration() is called, no other methods on the stream may be + * called. The usage and max_buffers fields of camera3_stream may be modified + * between start/finishConfiguration, but may not be changed after that. + * The priv field of camera3_stream may be modified at any time after + * startConfiguration. + * + * Returns NULL in case of error starting configuration. + */ + camera3_stream* startConfiguration(); + + /** + * Check if the stream is mid-configuration (start has been called, but not + * finish). Used for lazy completion of configuration. + */ + bool isConfiguring() const; + + /** + * Completes the stream configuration process. During this call, the stream + * may call the device's register_stream_buffers() method. The stream + * information structure returned by startConfiguration() may no longer be + * modified after this call, but can still be read until the destruction of + * the stream. + * + * Returns: + * OK on a successful configuration + * NO_INIT in case of a serious error from the HAL device + * NO_MEMORY in case of an error registering buffers + * INVALID_OPERATION in case connecting to the consumer failed + */ + status_t finishConfiguration(camera3_device *hal3Device); + + /** + * Fill in the camera3_stream_buffer with the next valid buffer for this + * stream, to hand over to the HAL. + * + * This method may only be called once finishConfiguration has been called. + * For bidirectional streams, this method applies to the output-side + * buffers. + * + */ + status_t getBuffer(camera3_stream_buffer *buffer); + + /** + * Return a buffer to the stream after use by the HAL. + * + * This method may only be called for buffers provided by getBuffer(). + * For bidirectional streams, this method applies to the output-side buffers + */ + status_t returnBuffer(const camera3_stream_buffer &buffer, + nsecs_t timestamp); + + /** + * Fill in the camera3_stream_buffer with the next valid buffer for this + * stream, to hand over to the HAL. + * + * This method may only be called once finishConfiguration has been called. + * For bidirectional streams, this method applies to the input-side + * buffers. + * + */ + status_t getInputBuffer(camera3_stream_buffer *buffer); + + /** + * Return a buffer to the stream after use by the HAL. + * + * This method may only be called for buffers provided by getBuffer(). + * For bidirectional streams, this method applies to the input-side buffers + */ + status_t returnInputBuffer(const camera3_stream_buffer &buffer); + + /** + * Whether any of the stream's buffers are currently in use by the HAL, + * including buffers that have been returned but not yet had their + * release fence signaled. + */ + bool hasOutstandingBuffers() const; + + enum { + TIMEOUT_NEVER = -1 + }; + /** + * Wait until the HAL is done with all of this stream's buffers, including + * signalling all release fences. Returns TIMED_OUT if the timeout is exceeded, + * OK on success. Pass in TIMEOUT_NEVER for timeout to indicate an indefinite wait. + */ + virtual status_t waitUntilIdle(nsecs_t timeout) = 0; + + /** + * Disconnect stream from its non-HAL endpoint. After this, + * start/finishConfiguration must be called before the stream can be used + * again. This cannot be called if the stream has outstanding dequeued + * buffers. + */ + status_t disconnect(); + + /** + * Debug dump of the stream's state. + */ + virtual void dump(int fd, const Vector &args) const = 0; + + void addBufferListener( + wp listener); + void removeBufferListener( + const sp& listener); + + protected: + const int mId; + const String8 mName; + // Zero for formats with fixed buffer size for given dimensions. + const size_t mMaxSize; + + enum { + STATE_ERROR, + STATE_CONSTRUCTED, + STATE_IN_CONFIG, + STATE_IN_RECONFIG, + STATE_CONFIGURED + } mState; + + mutable Mutex mLock; + + Camera3Stream(int id, camera3_stream_type type, + uint32_t width, uint32_t height, size_t maxSize, int format); + + /** + * Interface to be implemented by derived classes + */ + + // getBuffer / returnBuffer implementations + + // Since camera3_stream_buffer includes a raw pointer to the stream, + // cast to camera3_stream*, implementations must increment the + // refcount of the stream manually in getBufferLocked, and decrement it in + // returnBufferLocked. + virtual status_t getBufferLocked(camera3_stream_buffer *buffer); + virtual status_t returnBufferLocked(const camera3_stream_buffer &buffer, + nsecs_t timestamp); + virtual status_t getInputBufferLocked(camera3_stream_buffer *buffer); + virtual status_t returnInputBufferLocked( + const camera3_stream_buffer &buffer); + virtual bool hasOutstandingBuffersLocked() const = 0; + // Can return -ENOTCONN when we are already disconnected (not an error) + virtual status_t disconnectLocked() = 0; + + // Configure the buffer queue interface to the other end of the stream, + // after the HAL has provided usage and max_buffers values. After this call, + // the stream must be ready to produce all buffers for registration with + // HAL. + virtual status_t configureQueueLocked() = 0; + + // Get the total number of buffers in the queue + virtual size_t getBufferCountLocked() = 0; + + private: + uint32_t oldUsage; + uint32_t oldMaxBuffers; + + // Gets all buffers from endpoint and registers them with the HAL. + status_t registerBuffersLocked(camera3_device *hal3Device); + + void fireBufferListenersLocked(const camera3_stream_buffer& buffer, + bool acquired, bool output); + List > mBufferListenerList; + +}; // class Camera3Stream + +}; // namespace camera3 + +}; // namespace android + +#endif diff --git a/services/camera/libcameraservice/device3/Camera3StreamBufferListener.h b/services/camera/libcameraservice/device3/Camera3StreamBufferListener.h new file mode 100644 index 0000000..62ea6c0 --- /dev/null +++ b/services/camera/libcameraservice/device3/Camera3StreamBufferListener.h @@ -0,0 +1,48 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_SERVERS_CAMERA3_STREAMBUFFERLISTENER_H +#define ANDROID_SERVERS_CAMERA3_STREAMBUFFERLISTENER_H + +#include +#include + +namespace android { + +namespace camera3 { + +class Camera3StreamBufferListener : public virtual RefBase { +public: + + struct BufferInfo { + bool mOutput; // if false then input buffer + Rect mCrop; + uint32_t mTransform; + uint32_t mScalingMode; + int64_t mTimestamp; + uint64_t mFrameNumber; + }; + + // Buffer was acquired by the HAL + virtual void onBufferAcquired(const BufferInfo& bufferInfo) = 0; + // Buffer was released by the HAL + virtual void onBufferReleased(const BufferInfo& bufferInfo) = 0; +}; + +}; //namespace camera3 +}; //namespace android + +#endif diff --git a/services/camera/libcameraservice/device3/Camera3StreamInterface.h b/services/camera/libcameraservice/device3/Camera3StreamInterface.h new file mode 100644 index 0000000..4768536 --- /dev/null +++ b/services/camera/libcameraservice/device3/Camera3StreamInterface.h @@ -0,0 +1,162 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_SERVERS_CAMERA3_STREAM_INTERFACE_H +#define ANDROID_SERVERS_CAMERA3_STREAM_INTERFACE_H + +#include +#include "Camera3StreamBufferListener.h" + +struct camera3_stream_buffer; + +namespace android { + +namespace camera3 { + +/** + * An interface for managing a single stream of input and/or output data from + * the camera device. + */ +class Camera3StreamInterface : public virtual RefBase { + public: + /** + * Get the stream's ID + */ + virtual int getId() const = 0; + + /** + * Get the stream's dimensions and format + */ + virtual uint32_t getWidth() const = 0; + virtual uint32_t getHeight() const = 0; + virtual int getFormat() const = 0; + + /** + * Start the stream configuration process. Returns a handle to the stream's + * information to be passed into the HAL device's configure_streams call. + * + * Until finishConfiguration() is called, no other methods on the stream may + * be called. The usage and max_buffers fields of camera3_stream may be + * modified between start/finishConfiguration, but may not be changed after + * that. The priv field of camera3_stream may be modified at any time after + * startConfiguration. + * + * Returns NULL in case of error starting configuration. + */ + virtual camera3_stream* startConfiguration() = 0; + + /** + * Check if the stream is mid-configuration (start has been called, but not + * finish). Used for lazy completion of configuration. + */ + virtual bool isConfiguring() const = 0; + + /** + * Completes the stream configuration process. During this call, the stream + * may call the device's register_stream_buffers() method. The stream + * information structure returned by startConfiguration() may no longer be + * modified after this call, but can still be read until the destruction of + * the stream. + * + * Returns: + * OK on a successful configuration + * NO_INIT in case of a serious error from the HAL device + * NO_MEMORY in case of an error registering buffers + * INVALID_OPERATION in case connecting to the consumer failed + */ + virtual status_t finishConfiguration(camera3_device *hal3Device) = 0; + + /** + * Fill in the camera3_stream_buffer with the next valid buffer for this + * stream, to hand over to the HAL. + * + * This method may only be called once finishConfiguration has been called. + * For bidirectional streams, this method applies to the output-side + * buffers. + * + */ + virtual status_t getBuffer(camera3_stream_buffer *buffer) = 0; + + /** + * Return a buffer to the stream after use by the HAL. + * + * This method may only be called for buffers provided by getBuffer(). + * For bidirectional streams, this method applies to the output-side buffers + */ + virtual status_t returnBuffer(const camera3_stream_buffer &buffer, + nsecs_t timestamp) = 0; + + /** + * Fill in the camera3_stream_buffer with the next valid buffer for this + * stream, to hand over to the HAL. + * + * This method may only be called once finishConfiguration has been called. + * For bidirectional streams, this method applies to the input-side + * buffers. + * + */ + virtual status_t getInputBuffer(camera3_stream_buffer *buffer) = 0; + + /** + * Return a buffer to the stream after use by the HAL. + * + * This method may only be called for buffers provided by getBuffer(). + * For bidirectional streams, this method applies to the input-side buffers + */ + virtual status_t returnInputBuffer(const camera3_stream_buffer &buffer) = 0; + + /** + * Whether any of the stream's buffers are currently in use by the HAL, + * including buffers that have been returned but not yet had their + * release fence signaled. + */ + virtual bool hasOutstandingBuffers() const = 0; + + enum { + TIMEOUT_NEVER = -1 + }; + /** + * Wait until the HAL is done with all of this stream's buffers, including + * signalling all release fences. Returns TIMED_OUT if the timeout is + * exceeded, OK on success. Pass in TIMEOUT_NEVER for timeout to indicate + * an indefinite wait. + */ + virtual status_t waitUntilIdle(nsecs_t timeout) = 0; + + /** + * Disconnect stream from its non-HAL endpoint. After this, + * start/finishConfiguration must be called before the stream can be used + * again. This cannot be called if the stream has outstanding dequeued + * buffers. + */ + virtual status_t disconnect() = 0; + + /** + * Debug dump of the stream's state. + */ + virtual void dump(int fd, const Vector &args) const = 0; + + virtual void addBufferListener( + wp listener) = 0; + virtual void removeBufferListener( + const sp& listener) = 0; +}; + +} // namespace camera3 + +} // namespace android + +#endif diff --git a/services/camera/libcameraservice/device3/Camera3ZslStream.cpp b/services/camera/libcameraservice/device3/Camera3ZslStream.cpp new file mode 100644 index 0000000..8790c8c --- /dev/null +++ b/services/camera/libcameraservice/device3/Camera3ZslStream.cpp @@ -0,0 +1,328 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "Camera3-ZslStream" +#define ATRACE_TAG ATRACE_TAG_CAMERA +//#define LOG_NDEBUG 0 + +#include +#include +#include "Camera3ZslStream.h" + +typedef android::RingBufferConsumer::PinnedBufferItem PinnedBufferItem; + +namespace android { + +namespace camera3 { + +namespace { +struct TimestampFinder : public RingBufferConsumer::RingBufferComparator { + typedef RingBufferConsumer::BufferInfo BufferInfo; + + enum { + SELECT_I1 = -1, + SELECT_I2 = 1, + SELECT_NEITHER = 0, + }; + + TimestampFinder(nsecs_t timestamp) : mTimestamp(timestamp) {} + ~TimestampFinder() {} + + template + static void swap(T& a, T& b) { + T tmp = a; + a = b; + b = tmp; + } + + /** + * Try to find the best candidate for a ZSL buffer. + * Match priority from best to worst: + * 1) Timestamps match. + * 2) Timestamp is closest to the needle (and lower). + * 3) Timestamp is closest to the needle (and higher). + * + */ + virtual int compare(const BufferInfo *i1, + const BufferInfo *i2) const { + // Try to select non-null object first. + if (i1 == NULL) { + return SELECT_I2; + } else if (i2 == NULL) { + return SELECT_I1; + } + + // Best result: timestamp is identical + if (i1->mTimestamp == mTimestamp) { + return SELECT_I1; + } else if (i2->mTimestamp == mTimestamp) { + return SELECT_I2; + } + + const BufferInfo* infoPtrs[2] = { + i1, + i2 + }; + int infoSelectors[2] = { + SELECT_I1, + SELECT_I2 + }; + + // Order i1,i2 so that always i1.timestamp < i2.timestamp + if (i1->mTimestamp > i2->mTimestamp) { + swap(infoPtrs[0], infoPtrs[1]); + swap(infoSelectors[0], infoSelectors[1]); + } + + // Second best: closest (lower) timestamp + if (infoPtrs[1]->mTimestamp < mTimestamp) { + return infoSelectors[1]; + } else if (infoPtrs[0]->mTimestamp < mTimestamp) { + return infoSelectors[0]; + } + + // Worst: closest (higher) timestamp + return infoSelectors[0]; + + /** + * The above cases should cover all the possibilities, + * and we get an 'empty' result only if the ring buffer + * was empty itself + */ + } + + const nsecs_t mTimestamp; +}; // struct TimestampFinder +} // namespace anonymous + +Camera3ZslStream::Camera3ZslStream(int id, uint32_t width, uint32_t height, + int depth) : + Camera3OutputStream(id, CAMERA3_STREAM_BIDIRECTIONAL, + width, height, + HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED), + mDepth(depth), + mProducer(new RingBufferConsumer(GRALLOC_USAGE_HW_CAMERA_ZSL, + depth)) { + + mConsumer = new Surface(mProducer->getProducerInterface()); +} + +Camera3ZslStream::~Camera3ZslStream() { +} + +status_t Camera3ZslStream::getInputBufferLocked(camera3_stream_buffer *buffer) { + ATRACE_CALL(); + + status_t res; + + // TODO: potentially register from inputBufferLocked + // this should be ok, registerBuffersLocked only calls getBuffer for now + // register in output mode instead of input mode for ZSL streams. + if (mState == STATE_IN_CONFIG || mState == STATE_IN_RECONFIG) { + ALOGE("%s: Stream %d: Buffer registration for input streams" + " not implemented (state %d)", + __FUNCTION__, mId, mState); + return INVALID_OPERATION; + } + + if ((res = getBufferPreconditionCheckLocked()) != OK) { + return res; + } + + ANativeWindowBuffer* anb; + int fenceFd; + + assert(mProducer != 0); + + sp bufferItem; + { + List >::iterator it, end; + it = mInputBufferQueue.begin(); + end = mInputBufferQueue.end(); + + // Need to call enqueueInputBufferByTimestamp as a prerequisite + if (it == end) { + ALOGE("%s: Stream %d: No input buffer was queued", + __FUNCTION__, mId); + return INVALID_OPERATION; + } + bufferItem = *it; + mInputBufferQueue.erase(it); + } + + anb = bufferItem->getBufferItem().mGraphicBuffer->getNativeBuffer(); + assert(anb != NULL); + fenceFd = bufferItem->getBufferItem().mFence->dup(); + + /** + * FenceFD now owned by HAL except in case of error, + * in which case we reassign it to acquire_fence + */ + handoutBufferLocked(*buffer, &(anb->handle), /*acquireFence*/fenceFd, + /*releaseFence*/-1, CAMERA3_BUFFER_STATUS_OK); + + mBuffersInFlight.push_back(bufferItem); + + return OK; +} + +status_t Camera3ZslStream::returnBufferCheckedLocked( + const camera3_stream_buffer &buffer, + nsecs_t timestamp, + bool output, + /*out*/ + sp *releaseFenceOut) { + + if (output) { + // Output stream path + return Camera3OutputStream::returnBufferCheckedLocked(buffer, + timestamp, + output, + releaseFenceOut); + } + + /** + * Input stream path + */ + bool bufferFound = false; + sp bufferItem; + { + // Find the buffer we are returning + Vector >::iterator it, end; + for (it = mBuffersInFlight.begin(), end = mBuffersInFlight.end(); + it != end; + ++it) { + + const sp& tmp = *it; + ANativeWindowBuffer *anb = + tmp->getBufferItem().mGraphicBuffer->getNativeBuffer(); + if (anb != NULL && &(anb->handle) == buffer.buffer) { + bufferFound = true; + bufferItem = tmp; + mBuffersInFlight.erase(it); + break; + } + } + } + if (!bufferFound) { + ALOGE("%s: Stream %d: Can't return buffer that wasn't sent to HAL", + __FUNCTION__, mId); + return INVALID_OPERATION; + } + + int releaseFenceFd = buffer.release_fence; + + if (buffer.status == CAMERA3_BUFFER_STATUS_ERROR) { + if (buffer.release_fence != -1) { + ALOGE("%s: Stream %d: HAL should not set release_fence(%d) when " + "there is an error", __FUNCTION__, mId, buffer.release_fence); + close(buffer.release_fence); + } + + /** + * Reassign release fence as the acquire fence incase of error + */ + releaseFenceFd = buffer.acquire_fence; + } + + /** + * Unconditionally return buffer to the buffer queue. + * - Fwk takes over the release_fence ownership + */ + sp releaseFence = new Fence(releaseFenceFd); + bufferItem->getBufferItem().mFence = releaseFence; + bufferItem.clear(); // dropping last reference unpins buffer + + *releaseFenceOut = releaseFence; + + return OK; +} + +status_t Camera3ZslStream::returnInputBufferLocked( + const camera3_stream_buffer &buffer) { + ATRACE_CALL(); + + status_t res = returnAnyBufferLocked(buffer, /*timestamp*/0, + /*output*/false); + + return res; +} + +void Camera3ZslStream::dump(int fd, const Vector &args) const { + (void) args; + + String8 lines; + lines.appendFormat(" Stream[%d]: ZSL\n", mId); + write(fd, lines.string(), lines.size()); + + Camera3IOStreamBase::dump(fd, args); + + lines = String8(); + lines.appendFormat(" Input buffers pending: %d, in flight %d\n", + mInputBufferQueue.size(), mBuffersInFlight.size()); + write(fd, lines.string(), lines.size()); +} + +status_t Camera3ZslStream::enqueueInputBufferByTimestamp( + nsecs_t timestamp, + nsecs_t* actualTimestamp) { + + Mutex::Autolock l(mLock); + + TimestampFinder timestampFinder = TimestampFinder(timestamp); + + sp pinnedBuffer = + mProducer->pinSelectedBuffer(timestampFinder, + /*waitForFence*/false); + + if (pinnedBuffer == 0) { + ALOGE("%s: No ZSL buffers were available yet", __FUNCTION__); + return NO_BUFFER_AVAILABLE; + } + + nsecs_t actual = pinnedBuffer->getBufferItem().mTimestamp; + + if (actual != timestamp) { + ALOGW("%s: ZSL buffer candidate search didn't find an exact match --" + " requested timestamp = %lld, actual timestamp = %lld", + __FUNCTION__, timestamp, actual); + } + + mInputBufferQueue.push_back(pinnedBuffer); + + if (actualTimestamp != NULL) { + *actualTimestamp = actual; + } + + return OK; +} + +status_t Camera3ZslStream::clearInputRingBuffer() { + Mutex::Autolock l(mLock); + + mInputBufferQueue.clear(); + + return mProducer->clear(); +} + +status_t Camera3ZslStream::setTransform(int /*transform*/) { + ALOGV("%s: Not implemented", __FUNCTION__); + return INVALID_OPERATION; +} + +}; // namespace camera3 + +}; // namespace android diff --git a/services/camera/libcameraservice/device3/Camera3ZslStream.h b/services/camera/libcameraservice/device3/Camera3ZslStream.h new file mode 100644 index 0000000..c7f4490 --- /dev/null +++ b/services/camera/libcameraservice/device3/Camera3ZslStream.h @@ -0,0 +1,105 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_SERVERS_CAMERA3_ZSL_STREAM_H +#define ANDROID_SERVERS_CAMERA3_ZSL_STREAM_H + +#include +#include +#include + +#include "Camera3OutputStream.h" + +namespace android { + +namespace camera3 { + +/** + * A class for managing a single opaque ZSL stream to/from the camera device. + * This acts as a bidirectional stream at the HAL layer, caching and discarding + * most output buffers, and when directed, pushes a buffer back to the HAL for + * processing. + */ +class Camera3ZslStream : + public Camera3OutputStream { + public: + /** + * Set up a ZSL stream of a given resolution. Depth is the number of buffers + * cached within the stream that can be retrieved for input. + */ + Camera3ZslStream(int id, uint32_t width, uint32_t height, int depth); + ~Camera3ZslStream(); + + virtual void dump(int fd, const Vector &args) const; + + enum { NO_BUFFER_AVAILABLE = BufferQueue::NO_BUFFER_AVAILABLE }; + + /** + * Locate a buffer matching this timestamp in the RingBufferConsumer, + * and mark it to be queued at the next getInputBufferLocked invocation. + * + * Errors: Returns NO_BUFFER_AVAILABLE if we could not find a match. + * + */ + status_t enqueueInputBufferByTimestamp(nsecs_t timestamp, + nsecs_t* actualTimestamp); + + /** + * Clears the buffers that can be used by enqueueInputBufferByTimestamp + */ + status_t clearInputRingBuffer(); + + protected: + + /** + * Camera3OutputStreamInterface implementation + */ + status_t setTransform(int transform); + + private: + + int mDepth; + // Input buffers pending to be queued into HAL + List > mInputBufferQueue; + sp mProducer; + + // Input buffers in flight to HAL + Vector > mBuffersInFlight; + + /** + * Camera3Stream interface + */ + + // getInputBuffer/returnInputBuffer operate the input stream side of the + // ZslStream. + virtual status_t getInputBufferLocked(camera3_stream_buffer *buffer); + virtual status_t returnInputBufferLocked( + const camera3_stream_buffer &buffer); + + // Actual body to return either input or output buffers + virtual status_t returnBufferCheckedLocked( + const camera3_stream_buffer &buffer, + nsecs_t timestamp, + bool output, + /*out*/ + sp *releaseFenceOut); +}; // class Camera3ZslStream + +}; // namespace camera3 + +}; // namespace android + +#endif diff --git a/services/camera/libcameraservice/photography/CameraDeviceClient.cpp b/services/camera/libcameraservice/photography/CameraDeviceClient.cpp deleted file mode 100644 index b7239e2..0000000 --- a/services/camera/libcameraservice/photography/CameraDeviceClient.cpp +++ /dev/null @@ -1,551 +0,0 @@ -/* - * Copyright (C) 2013 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#define LOG_TAG "CameraDeviceClient" -#define ATRACE_TAG ATRACE_TAG_CAMERA -// #define LOG_NDEBUG 0 - -#include -#include - -#include -#include -#include "camera2/Parameters.h" -#include "CameraDeviceClient.h" -#include "camera2/ProFrameProcessor.h" -#include "CameraDeviceBase.h" -#include - -namespace android { -using namespace camera2; - -CameraDeviceClientBase::CameraDeviceClientBase( - const sp& cameraService, - const sp& remoteCallback, - const String16& clientPackageName, - int cameraId, - int cameraFacing, - int clientPid, - uid_t clientUid, - int servicePid) : - BasicClient(cameraService, remoteCallback->asBinder(), clientPackageName, - cameraId, cameraFacing, clientPid, clientUid, servicePid), - mRemoteCallback(remoteCallback) { -} -void CameraDeviceClientBase::notifyError() { - // Thread safe. Don't bother locking. - sp remoteCb = mRemoteCallback; - - if (remoteCb != 0) { - remoteCb->notifyCallback(CAMERA_MSG_ERROR, CAMERA_ERROR_RELEASED, 0); - } -} - -// Interface used by CameraService - -CameraDeviceClient::CameraDeviceClient(const sp& cameraService, - const sp& remoteCallback, - const String16& clientPackageName, - int cameraId, - int cameraFacing, - int clientPid, - uid_t clientUid, - int servicePid) : - Camera2ClientBase(cameraService, remoteCallback, clientPackageName, - cameraId, cameraFacing, clientPid, clientUid, servicePid), - mRequestIdCounter(0) { - - ATRACE_CALL(); - ALOGI("CameraDeviceClient %d: Opened", cameraId); -} - -status_t CameraDeviceClient::initialize(camera_module_t *module) -{ - ATRACE_CALL(); - status_t res; - - res = Camera2ClientBase::initialize(module); - if (res != OK) { - return res; - } - - String8 threadName; - mFrameProcessor = new ProFrameProcessor(mDevice); - threadName = String8::format("CDU-%d-FrameProc", mCameraId); - mFrameProcessor->run(threadName.string()); - - mFrameProcessor->registerListener(FRAME_PROCESSOR_LISTENER_MIN_ID, - FRAME_PROCESSOR_LISTENER_MAX_ID, - /*listener*/this); - - return OK; -} - -CameraDeviceClient::~CameraDeviceClient() { -} - -status_t CameraDeviceClient::submitRequest(sp request, - bool streaming) { - ATRACE_CALL(); - ALOGV("%s", __FUNCTION__); - - status_t res; - - if ( (res = checkPid(__FUNCTION__) ) != OK) return res; - - Mutex::Autolock icl(mBinderSerializationLock); - - if (!mDevice.get()) return DEAD_OBJECT; - - if (request == 0) { - ALOGE("%s: Camera %d: Sent null request. Rejecting request.", - __FUNCTION__, mCameraId); - return BAD_VALUE; - } - - CameraMetadata metadata(request->mMetadata); - - if (metadata.isEmpty()) { - ALOGE("%s: Camera %d: Sent empty metadata packet. Rejecting request.", - __FUNCTION__, mCameraId); - return BAD_VALUE; - } else if (request->mSurfaceList.size() == 0) { - ALOGE("%s: Camera %d: Requests must have at least one surface target. " - "Rejecting request.", __FUNCTION__, mCameraId); - return BAD_VALUE; - } - - if (!enforceRequestPermissions(metadata)) { - // Callee logs - return PERMISSION_DENIED; - } - - /** - * Write in the output stream IDs which we calculate from - * the capture request's list of surface targets - */ - Vector outputStreamIds; - outputStreamIds.setCapacity(request->mSurfaceList.size()); - for (size_t i = 0; i < request->mSurfaceList.size(); ++i) { - sp surface = request->mSurfaceList[i]; - - if (surface == 0) continue; - - sp gbp = surface->getIGraphicBufferProducer(); - int idx = mStreamMap.indexOfKey(gbp->asBinder()); - - // Trying to submit request with surface that wasn't created - if (idx == NAME_NOT_FOUND) { - ALOGE("%s: Camera %d: Tried to submit a request with a surface that" - " we have not called createStream on", - __FUNCTION__, mCameraId); - return BAD_VALUE; - } - - int streamId = mStreamMap.valueAt(idx); - outputStreamIds.push_back(streamId); - ALOGV("%s: Camera %d: Appending output stream %d to request", - __FUNCTION__, mCameraId, streamId); - } - - metadata.update(ANDROID_REQUEST_OUTPUT_STREAMS, &outputStreamIds[0], - outputStreamIds.size()); - - // TODO: @hide ANDROID_REQUEST_ID, or use another request token - int32_t requestId = mRequestIdCounter++; - metadata.update(ANDROID_REQUEST_ID, &requestId, /*size*/1); - ALOGV("%s: Camera %d: Submitting request with ID %d", - __FUNCTION__, mCameraId, requestId); - - if (streaming) { - res = mDevice->setStreamingRequest(metadata); - if (res != OK) { - ALOGE("%s: Camera %d: Got error %d after trying to set streaming " - "request", __FUNCTION__, mCameraId, res); - } else { - mStreamingRequestList.push_back(requestId); - } - } else { - res = mDevice->capture(metadata); - if (res != OK) { - ALOGE("%s: Camera %d: Got error %d after trying to set capture", - __FUNCTION__, mCameraId, res); - } - } - - ALOGV("%s: Camera %d: End of function", __FUNCTION__, mCameraId); - if (res == OK) { - return requestId; - } - - return res; -} - -status_t CameraDeviceClient::cancelRequest(int requestId) { - ATRACE_CALL(); - ALOGV("%s, requestId = %d", __FUNCTION__, requestId); - - status_t res; - - if ( (res = checkPid(__FUNCTION__) ) != OK) return res; - - Mutex::Autolock icl(mBinderSerializationLock); - - if (!mDevice.get()) return DEAD_OBJECT; - - Vector::iterator it, end; - for (it = mStreamingRequestList.begin(), end = mStreamingRequestList.end(); - it != end; ++it) { - if (*it == requestId) { - break; - } - } - - if (it == end) { - ALOGE("%s: Camera%d: Did not find request id %d in list of streaming " - "requests", __FUNCTION__, mCameraId, requestId); - return BAD_VALUE; - } - - res = mDevice->clearStreamingRequest(); - - if (res == OK) { - ALOGV("%s: Camera %d: Successfully cleared streaming request", - __FUNCTION__, mCameraId); - mStreamingRequestList.erase(it); - } - - return res; -} - -status_t CameraDeviceClient::deleteStream(int streamId) { - ATRACE_CALL(); - ALOGV("%s (streamId = 0x%x)", __FUNCTION__, streamId); - - status_t res; - if ( (res = checkPid(__FUNCTION__) ) != OK) return res; - - Mutex::Autolock icl(mBinderSerializationLock); - - if (!mDevice.get()) return DEAD_OBJECT; - - // Guard against trying to delete non-created streams - ssize_t index = NAME_NOT_FOUND; - for (size_t i = 0; i < mStreamMap.size(); ++i) { - if (streamId == mStreamMap.valueAt(i)) { - index = i; - break; - } - } - - if (index == NAME_NOT_FOUND) { - ALOGW("%s: Camera %d: Invalid stream ID (%d) specified, no stream " - "created yet", __FUNCTION__, mCameraId, streamId); - return BAD_VALUE; - } - - // Also returns BAD_VALUE if stream ID was not valid - res = mDevice->deleteStream(streamId); - - if (res == BAD_VALUE) { - ALOGE("%s: Camera %d: Unexpected BAD_VALUE when deleting stream, but we" - " already checked and the stream ID (%d) should be valid.", - __FUNCTION__, mCameraId, streamId); - } else if (res == OK) { - mStreamMap.removeItemsAt(index); - - ALOGV("%s: Camera %d: Successfully deleted stream ID (%d)", - __FUNCTION__, mCameraId, streamId); - } - - return res; -} - -status_t CameraDeviceClient::createStream(int width, int height, int format, - const sp& bufferProducer) -{ - ATRACE_CALL(); - ALOGV("%s (w = %d, h = %d, f = 0x%x)", __FUNCTION__, width, height, format); - - status_t res; - if ( (res = checkPid(__FUNCTION__) ) != OK) return res; - - Mutex::Autolock icl(mBinderSerializationLock); - - if (!mDevice.get()) return DEAD_OBJECT; - - // Don't create multiple streams for the same target surface - { - ssize_t index = mStreamMap.indexOfKey(bufferProducer->asBinder()); - if (index != NAME_NOT_FOUND) { - ALOGW("%s: Camera %d: Buffer producer already has a stream for it " - "(ID %d)", - __FUNCTION__, mCameraId, index); - return ALREADY_EXISTS; - } - } - - sp binder; - sp anw; - if (bufferProducer != 0) { - binder = bufferProducer->asBinder(); - anw = new Surface(bufferProducer); - } - - // TODO: remove w,h,f since we are ignoring them - - if ((res = anw->query(anw.get(), NATIVE_WINDOW_WIDTH, &width)) != OK) { - ALOGE("%s: Camera %d: Failed to query Surface width", __FUNCTION__, - mCameraId); - return res; - } - if ((res = anw->query(anw.get(), NATIVE_WINDOW_HEIGHT, &height)) != OK) { - ALOGE("%s: Camera %d: Failed to query Surface height", __FUNCTION__, - mCameraId); - return res; - } - if ((res = anw->query(anw.get(), NATIVE_WINDOW_FORMAT, &format)) != OK) { - ALOGE("%s: Camera %d: Failed to query Surface format", __FUNCTION__, - mCameraId); - return res; - } - - // FIXME: remove this override since the default format should be - // IMPLEMENTATION_DEFINED. b/9487482 - if (format >= HAL_PIXEL_FORMAT_RGBA_8888 && - format <= HAL_PIXEL_FORMAT_BGRA_8888) { - ALOGW("%s: Camera %d: Overriding format 0x%x to IMPLEMENTATION_DEFINED", - __FUNCTION__, mCameraId, format); - format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED; - } - - // TODO: add startConfigure/stopConfigure call to CameraDeviceBase - // this will make it so Camera3Device doesn't call configure_streams - // after each call, but only once we are done with all. - - int streamId = -1; - if (format == HAL_PIXEL_FORMAT_BLOB) { - // JPEG buffers need to be sized for maximum possible compressed size - CameraMetadata staticInfo = mDevice->info(); - camera_metadata_entry_t entry = staticInfo.find(ANDROID_JPEG_MAX_SIZE); - if (entry.count == 0) { - ALOGE("%s: Camera %d: Can't find maximum JPEG size in " - "static metadata!", __FUNCTION__, mCameraId); - return INVALID_OPERATION; - } - int32_t maxJpegSize = entry.data.i32[0]; - res = mDevice->createStream(anw, width, height, format, maxJpegSize, - &streamId); - } else { - // All other streams are a known size - res = mDevice->createStream(anw, width, height, format, /*size*/0, - &streamId); - } - - if (res == OK) { - mStreamMap.add(bufferProducer->asBinder(), streamId); - - ALOGV("%s: Camera %d: Successfully created a new stream ID %d", - __FUNCTION__, mCameraId, streamId); - return streamId; - } - - return res; -} - -// Create a request object from a template. -status_t CameraDeviceClient::createDefaultRequest(int templateId, - /*out*/ - CameraMetadata* request) -{ - ATRACE_CALL(); - ALOGV("%s (templateId = 0x%x)", __FUNCTION__, templateId); - - status_t res; - if ( (res = checkPid(__FUNCTION__) ) != OK) return res; - - Mutex::Autolock icl(mBinderSerializationLock); - - if (!mDevice.get()) return DEAD_OBJECT; - - CameraMetadata metadata; - if ( (res = mDevice->createDefaultRequest(templateId, &metadata) ) == OK && - request != NULL) { - - request->swap(metadata); - } - - return res; -} - -status_t CameraDeviceClient::getCameraInfo(/*out*/CameraMetadata* info) -{ - ATRACE_CALL(); - ALOGV("%s", __FUNCTION__); - - status_t res = OK; - - if ( (res = checkPid(__FUNCTION__) ) != OK) return res; - - Mutex::Autolock icl(mBinderSerializationLock); - - if (!mDevice.get()) return DEAD_OBJECT; - - if (info != NULL) { - *info = mDevice->info(); // static camera metadata - // TODO: merge with device-specific camera metadata - } - - return res; -} - -status_t CameraDeviceClient::waitUntilIdle() -{ - ATRACE_CALL(); - ALOGV("%s", __FUNCTION__); - - status_t res = OK; - if ( (res = checkPid(__FUNCTION__) ) != OK) return res; - - Mutex::Autolock icl(mBinderSerializationLock); - - if (!mDevice.get()) return DEAD_OBJECT; - - // FIXME: Also need check repeating burst. - if (!mStreamingRequestList.isEmpty()) { - ALOGE("%s: Camera %d: Try to waitUntilIdle when there are active streaming requests", - __FUNCTION__, mCameraId); - return INVALID_OPERATION; - } - res = mDevice->waitUntilDrained(); - ALOGV("%s Done", __FUNCTION__); - - return res; -} - -status_t CameraDeviceClient::dump(int fd, const Vector& args) { - String8 result; - result.appendFormat("CameraDeviceClient[%d] (%p) PID: %d, dump:\n", - mCameraId, - getRemoteCallback()->asBinder().get(), - mClientPid); - result.append(" State: "); - - // TODO: print dynamic/request section from most recent requests - mFrameProcessor->dump(fd, args); - - return dumpDevice(fd, args); -} - -// TODO: refactor the code below this with IProCameraUser. -// it's 100% copy-pasted, so lets not change it right now to make it easier. - -void CameraDeviceClient::detachDevice() { - if (mDevice == 0) return; - - ALOGV("Camera %d: Stopping processors", mCameraId); - - mFrameProcessor->removeListener(FRAME_PROCESSOR_LISTENER_MIN_ID, - FRAME_PROCESSOR_LISTENER_MAX_ID, - /*listener*/this); - mFrameProcessor->requestExit(); - ALOGV("Camera %d: Waiting for threads", mCameraId); - mFrameProcessor->join(); - ALOGV("Camera %d: Disconnecting device", mCameraId); - - // WORKAROUND: HAL refuses to disconnect while there's streams in flight - { - mDevice->clearStreamingRequest(); - - status_t code; - if ((code = mDevice->waitUntilDrained()) != OK) { - ALOGE("%s: waitUntilDrained failed with code 0x%x", __FUNCTION__, - code); - } - } - - Camera2ClientBase::detachDevice(); -} - -/** Device-related methods */ -void CameraDeviceClient::onFrameAvailable(int32_t frameId, - const CameraMetadata& frame) { - ATRACE_CALL(); - ALOGV("%s", __FUNCTION__); - - Mutex::Autolock icl(mBinderSerializationLock); - SharedCameraCallbacks::Lock l(mSharedCameraCallbacks); - - if (mRemoteCallback != NULL) { - ALOGV("%s: frame = %p ", __FUNCTION__, &frame); - mRemoteCallback->onResultReceived(frameId, frame); - } - -} - -// TODO: move to Camera2ClientBase -bool CameraDeviceClient::enforceRequestPermissions(CameraMetadata& metadata) { - - const int pid = IPCThreadState::self()->getCallingPid(); - const int selfPid = getpid(); - camera_metadata_entry_t entry; - - /** - * Mixin default important security values - * - android.led.transmit = defaulted ON - */ - CameraMetadata staticInfo = mDevice->info(); - entry = staticInfo.find(ANDROID_LED_AVAILABLE_LEDS); - for(size_t i = 0; i < entry.count; ++i) { - uint8_t led = entry.data.u8[i]; - - switch(led) { - case ANDROID_LED_AVAILABLE_LEDS_TRANSMIT: { - uint8_t transmitDefault = ANDROID_LED_TRANSMIT_ON; - if (!metadata.exists(ANDROID_LED_TRANSMIT)) { - metadata.update(ANDROID_LED_TRANSMIT, - &transmitDefault, 1); - } - break; - } - } - } - - // We can do anything! - if (pid == selfPid) { - return true; - } - - /** - * Permission check special fields in the request - * - android.led.transmit = android.permission.CAMERA_DISABLE_TRANSMIT - */ - entry = metadata.find(ANDROID_LED_TRANSMIT); - if (entry.count > 0 && entry.data.u8[0] != ANDROID_LED_TRANSMIT_ON) { - String16 permissionString = - String16("android.permission.CAMERA_DISABLE_TRANSMIT_LED"); - if (!checkCallingPermission(permissionString)) { - const int uid = IPCThreadState::self()->getCallingUid(); - ALOGE("Permission Denial: " - "can't disable transmit LED pid=%d, uid=%d", pid, uid); - return false; - } - } - - return true; -} - -} // namespace android diff --git a/services/camera/libcameraservice/photography/CameraDeviceClient.h b/services/camera/libcameraservice/photography/CameraDeviceClient.h deleted file mode 100644 index bb2949c..0000000 --- a/services/camera/libcameraservice/photography/CameraDeviceClient.h +++ /dev/null @@ -1,141 +0,0 @@ -/* - * Copyright (C) 2013 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef ANDROID_SERVERS_CAMERA_PHOTOGRAPHY_CAMERADEVICECLIENT_H -#define ANDROID_SERVERS_CAMERA_PHOTOGRAPHY_CAMERADEVICECLIENT_H - -#include "CameraDeviceBase.h" -#include "CameraService.h" -#include "camera2/ProFrameProcessor.h" -#include "Camera2ClientBase.h" -#include -#include - -namespace android { - -struct CameraDeviceClientBase : - public CameraService::BasicClient, public BnCameraDeviceUser -{ - typedef ICameraDeviceCallbacks TCamCallbacks; - - const sp& getRemoteCallback() { - return mRemoteCallback; - } - -protected: - CameraDeviceClientBase(const sp& cameraService, - const sp& remoteCallback, - const String16& clientPackageName, - int cameraId, - int cameraFacing, - int clientPid, - uid_t clientUid, - int servicePid); - - virtual void notifyError(); - - sp mRemoteCallback; -}; - -/** - * Implements the binder ICameraDeviceUser API, - * meant for HAL3-public implementation of - * android.hardware.photography.CameraDevice - */ -class CameraDeviceClient : - public Camera2ClientBase, - public camera2::ProFrameProcessor::FilteredListener -{ -public: - /** - * ICameraDeviceUser interface (see ICameraDeviceUser for details) - */ - - // Note that the callee gets a copy of the metadata. - virtual int submitRequest(sp request, - bool streaming = false); - virtual status_t cancelRequest(int requestId); - - // Returns -EBUSY if device is not idle - virtual status_t deleteStream(int streamId); - - virtual status_t createStream( - int width, - int height, - int format, - const sp& bufferProducer); - - // Create a request object from a template. - virtual status_t createDefaultRequest(int templateId, - /*out*/ - CameraMetadata* request); - - // Get the static metadata for the camera - // -- Caller owns the newly allocated metadata - virtual status_t getCameraInfo(/*out*/CameraMetadata* info); - - // Wait until all the submitted requests have finished processing - virtual status_t waitUntilIdle(); - /** - * Interface used by CameraService - */ - - CameraDeviceClient(const sp& cameraService, - const sp& remoteCallback, - const String16& clientPackageName, - int cameraId, - int cameraFacing, - int clientPid, - uid_t clientUid, - int servicePid); - virtual ~CameraDeviceClient(); - - virtual status_t initialize(camera_module_t *module); - - virtual status_t dump(int fd, const Vector& args); - - /** - * Interface used by independent components of CameraDeviceClient. - */ -protected: - /** FilteredListener implementation **/ - virtual void onFrameAvailable(int32_t frameId, - const CameraMetadata& frame); - virtual void detachDevice(); - -private: - /** ICameraDeviceUser interface-related private members */ - - /** Preview callback related members */ - sp mFrameProcessor; - static const int32_t FRAME_PROCESSOR_LISTENER_MIN_ID = 0; - static const int32_t FRAME_PROCESSOR_LISTENER_MAX_ID = 0x7fffffffL; - - /** Utility members */ - bool enforceRequestPermissions(CameraMetadata& metadata); - - // IGraphicsBufferProducer binder -> Stream ID - KeyedVector, int> mStreamMap; - - // Stream ID - Vector mStreamingRequestList; - - int32_t mRequestIdCounter; -}; - -}; // namespace android - -#endif -- cgit v1.1 From 4d74828fc0f6c13d49a36147a4250bcfa8ad0a47 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Mon, 19 Nov 2012 11:24:51 -0800 Subject: Remove default channel mask in AudioRecord constructor and set() Change-Id: I22ad4ba8777842bf6705e79c6ad796fdb9a4104c --- include/media/AudioRecord.h | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/include/media/AudioRecord.h b/include/media/AudioRecord.h index 7aa3c24..c65ffe8 100644 --- a/include/media/AudioRecord.h +++ b/include/media/AudioRecord.h @@ -131,7 +131,7 @@ public: * sampleRate: Data sink sampling rate in Hz. * format: Audio format (e.g AUDIO_FORMAT_PCM_16_BIT for signed * 16 bits per sample). - * channelMask: Channel mask. + * channelMask: Channel mask, such that audio_is_input_channel(channelMask) is true. * frameCount: Minimum size of track PCM buffer in frames. This defines the * application's contribution to the * latency of the track. The actual size selected by the AudioRecord could @@ -148,9 +148,9 @@ public: */ AudioRecord(audio_source_t inputSource, - uint32_t sampleRate = 0, - audio_format_t format = AUDIO_FORMAT_DEFAULT, - audio_channel_mask_t channelMask = AUDIO_CHANNEL_IN_MONO, + uint32_t sampleRate, + audio_format_t format, + audio_channel_mask_t channelMask, int frameCount = 0, callback_t cbf = NULL, void* user = NULL, @@ -178,10 +178,10 @@ public: * * threadCanCallJava: Whether callbacks are made from an attached thread and thus can call JNI. */ - status_t set(audio_source_t inputSource = AUDIO_SOURCE_DEFAULT, - uint32_t sampleRate = 0, - audio_format_t format = AUDIO_FORMAT_DEFAULT, - audio_channel_mask_t channelMask = AUDIO_CHANNEL_IN_MONO, + status_t set(audio_source_t inputSource, + uint32_t sampleRate, + audio_format_t format, + audio_channel_mask_t channelMask, int frameCount = 0, callback_t cbf = NULL, void* user = NULL, -- cgit v1.1 From 34af02647b387a252fb02bab8e2cb9f7bd9c8abb Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Tue, 30 Jul 2013 11:52:39 -0700 Subject: Add resampler comments and fix a typo Change-Id: Ie071673875f663de4212eed4a4dff89d51a5a915 --- services/audioflinger/AudioFlinger.cpp | 2 +- services/audioflinger/AudioResampler.h | 8 ++++++++ services/audioflinger/Threads.cpp | 7 +++++-- services/audioflinger/Threads.h | 3 ++- 4 files changed, 16 insertions(+), 4 deletions(-) diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp index 99e077c..711b62f 100644 --- a/services/audioflinger/AudioFlinger.cpp +++ b/services/audioflinger/AudioFlinger.cpp @@ -1714,7 +1714,7 @@ audio_io_handle_t AudioFlinger::openInput(audio_module_handle_t module, AudioStreamIn *input = new AudioStreamIn(inHwDev, inStream); // Start record thread - // RecorThread require both input and output device indication to forward to audio + // RecordThread requires both input and output device indication to forward to audio // pre processing modules thread = new RecordThread(this, input, diff --git a/services/audioflinger/AudioResampler.h b/services/audioflinger/AudioResampler.h index 29dc5b6..33e64ce 100644 --- a/services/audioflinger/AudioResampler.h +++ b/services/audioflinger/AudioResampler.h @@ -56,6 +56,14 @@ public: // set the PTS of the next buffer output by the resampler virtual void setPTS(int64_t pts); + // Resample int16_t samples from provider and accumulate into 'out'. + // A mono provider delivers a sequence of samples. + // A stereo provider delivers a sequence of interleaved pairs of samples. + // Multi-channel providers are not supported. + // In either case, 'out' holds interleaved pairs of fixed-point signed Q19.12. + // That is, for a mono provider, there is an implicit up-channeling. + // Since this method accumulates, the caller is responsible for clearing 'out' initially. + // FIXME assumes provider is always successful; it should return the actual frame count. virtual void resample(int32_t* out, size_t outFrameCount, AudioBufferProvider* provider) = 0; diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index f27d908..a9014a2 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -3787,7 +3787,8 @@ bool AudioFlinger::RecordThread::threadLoop() } else { // resampling - memset(mRsmpOutBuffer, 0, framesOut * 2 * sizeof(int32_t)); + // resampler accumulates, but we only have one source track + memset(mRsmpOutBuffer, 0, framesOut * FCC_2 * sizeof(int32_t)); // alter output frame count as if we were expecting stereo samples if (mChannelCount == 1 && mReqChannelCount == 1) { framesOut >>= 1; @@ -3797,6 +3798,7 @@ bool AudioFlinger::RecordThread::threadLoop() // ditherAndClamp() works as long as all buffers returned by // mActiveTrack->getNextBuffer() are 32 bit aligned which should be always true. if (mChannelCount == 2 && mReqChannelCount == 1) { + // temporarily type pun mRsmpOutBuffer from Q19.12 to int16_t ditherAndClamp(mRsmpOutBuffer, mRsmpOutBuffer, framesOut); // the resampler always outputs stereo samples: // do post stereo to mono conversion @@ -3805,6 +3807,7 @@ bool AudioFlinger::RecordThread::threadLoop() } else { ditherAndClamp((int32_t *)buffer.raw, mRsmpOutBuffer, framesOut); } + // now done with mRsmpOutBuffer } if (mFramestoDrop == 0) { @@ -4385,7 +4388,7 @@ void AudioFlinger::RecordThread::readInputParameters() mResampler = AudioResampler::create(16, channelCount, mReqSampleRate); mResampler->setSampleRate(mSampleRate); mResampler->setVolume(AudioMixer::UNITY_GAIN, AudioMixer::UNITY_GAIN); - mRsmpOutBuffer = new int32_t[mFrameCount * 2]; + mRsmpOutBuffer = new int32_t[mFrameCount * FCC_2]; // optmization: if mono to mono, alter input frame count as if we were inputing // stereo samples diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h index 365c790..09cbc5c 100644 --- a/services/audioflinger/Threads.h +++ b/services/audioflinger/Threads.h @@ -794,8 +794,9 @@ private: sp mActiveTrack; Condition mStartStopCond; AudioResampler *mResampler; + // interleaved stereo pairs of fixed-point signed Q19.12 int32_t *mRsmpOutBuffer; - int16_t *mRsmpInBuffer; + int16_t *mRsmpInBuffer; // [mFrameCount * mChannelCount] size_t mRsmpInIndex; size_t mInputBytes; const uint32_t mReqChannelCount; -- cgit v1.1 From 82aaf94a5b18939e4d790bbc752031f3070704a3 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Wed, 17 Jul 2013 16:05:07 -0700 Subject: Report underruns for fast tracks also This fixes a regression that was introduced earlier by commit 9f80dd223d83d9bb9077fb6baee056cee4eaf7e5 called "New control block for AudioTrack and AudioRecord". That commit broke underrun reporting for fast tracks. Also remove Track::mUnderrunCount, which counted the number of underrun events, and was only used by dumpsys media.audio_flinger. Now dumpsys media.audio_flinger reports the number of underrun frames, Isolated underrun-related control block accesses via the proxy, so that the server is not directly poking around in the control block. The new proxy APIs are AudioTrackServerProxy::getUnderrunFrames() and AudioTrackServerProxy::tallyUnderrunFrames(). getUnderrunFrames() returns a rolling counter for streaming tracks, or zero for static buffer tracks which never underrun, but do a kind of 'pause' at end of buffer. tallyUnderrunFrames() increments the counter by a specified number of frames. Change-Id: Ib31fd73eb17cbb23888ce3af8ff29f471f5bd5a2 --- include/private/media/AudioTrackShared.h | 9 +++++++++ media/libmedia/AudioTrackShared.cpp | 19 +++++++++++++++++++ services/audioflinger/PlaybackTracks.h | 1 - services/audioflinger/Threads.cpp | 13 +++++-------- services/audioflinger/Tracks.cpp | 10 +++------- 5 files changed, 36 insertions(+), 16 deletions(-) diff --git a/include/private/media/AudioTrackShared.h b/include/private/media/AudioTrackShared.h index 6d778dd..1379379 100644 --- a/include/private/media/AudioTrackShared.h +++ b/include/private/media/AudioTrackShared.h @@ -415,6 +415,13 @@ public: virtual void framesReadyIsCalledByMultipleThreads() { } bool setStreamEndDone(); // and return previous value + + // Add to the tally of underrun frames, and inform client of underrun + virtual void tallyUnderrunFrames(uint32_t frameCount); + + // Return the total number of frames which AudioFlinger desired but were unavailable, + // and thus which resulted in an underrun. + virtual uint32_t getUnderrunFrames() const { return mCblk->u.mStreaming.mUnderrunFrames; } }; class StaticAudioTrackServerProxy : public AudioTrackServerProxy { @@ -429,6 +436,8 @@ public: virtual void framesReadyIsCalledByMultipleThreads(); virtual status_t obtainBuffer(Buffer* buffer); virtual void releaseBuffer(Buffer* buffer); + virtual void tallyUnderrunFrames(uint32_t frameCount); + virtual uint32_t getUnderrunFrames() const { return 0; } private: ssize_t pollPosition(); // poll for state queue update, and return current position diff --git a/media/libmedia/AudioTrackShared.cpp b/media/libmedia/AudioTrackShared.cpp index 3b7616f..e7abb40 100644 --- a/media/libmedia/AudioTrackShared.cpp +++ b/media/libmedia/AudioTrackShared.cpp @@ -661,6 +661,14 @@ bool AudioTrackServerProxy::setStreamEndDone() { return old; } +void AudioTrackServerProxy::tallyUnderrunFrames(uint32_t frameCount) +{ + mCblk->u.mStreaming.mUnderrunFrames += frameCount; + + // FIXME also wake futex so that underrun is noticed more quickly + (void) android_atomic_or(CBLK_UNDERRUN, &mCblk->mFlags); +} + // --------------------------------------------------------------------------- StaticAudioTrackServerProxy::StaticAudioTrackServerProxy(audio_track_cblk_t* cblk, void *buffers, @@ -817,6 +825,17 @@ void StaticAudioTrackServerProxy::releaseBuffer(Buffer* buffer) buffer->mNonContig = 0; } +void StaticAudioTrackServerProxy::tallyUnderrunFrames(uint32_t frameCount) +{ + // Unlike AudioTrackServerProxy::tallyUnderrunFrames() used for streaming tracks, + // we don't have a location to count underrun frames. The underrun frame counter + // only exists in AudioTrackSharedStreaming. Fortunately, underruns are not + // possible for static buffer tracks other than at end of buffer, so this is not a loss. + + // FIXME also wake futex so that underrun is noticed more quickly + (void) android_atomic_or(CBLK_UNDERRUN, &mCblk->mFlags); +} + // --------------------------------------------------------------------------- } // namespace android diff --git a/services/audioflinger/PlaybackTracks.h b/services/audioflinger/PlaybackTracks.h index 628f5af..5600411c 100644 --- a/services/audioflinger/PlaybackTracks.h +++ b/services/audioflinger/PlaybackTracks.h @@ -140,7 +140,6 @@ private: // but the slot is only used if track is active FastTrackUnderruns mObservedUnderruns; // Most recently observed value of // mFastMixerDumpState.mTracks[mFastIndex].mUnderruns - uint32_t mUnderrunCount; // Counter of total number of underruns, never reset volatile float mCachedVolume; // combined master volume and stream type volume; // 'volatile' means accessed without lock or // barrier, but is read/written atomically diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index d9c312e..0c1cc35 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -2739,8 +2739,10 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::MixerThread::prepareTrac track->mObservedUnderruns = underruns; // don't count underruns that occur while stopping or pausing // or stopped which can occur when flush() is called while active - if (!(track->isStopping() || track->isPausing() || track->isStopped())) { - track->mUnderrunCount += recentUnderruns; + if (!(track->isStopping() || track->isPausing() || track->isStopped()) && + recentUnderruns > 0) { + // FIXME fast mixer will pull & mix partial buffers, but we count as a full underrun + track->mAudioTrackServerProxy->tallyUnderrunFrames(recentUnderruns * mFrameCount); } // This is similar to the state machine for normal tracks, @@ -3056,12 +3058,8 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::MixerThread::prepareTrac mixerStatus = MIXER_TRACKS_READY; } } else { - // only implemented for normal tracks, not fast tracks if (framesReady < desiredFrames && !track->isStopped() && !track->isPaused()) { - // we missed desiredFrames whatever the actual number of frames missing was - cblk->u.mStreaming.mUnderrunFrames += desiredFrames; - // FIXME also wake futex so that underrun is noticed more quickly - (void) android_atomic_or(CBLK_UNDERRUN, &cblk->mFlags); + track->mAudioTrackServerProxy->tallyUnderrunFrames(desiredFrames); } // clear effect chain input buffer if an active track underruns to avoid sending // previous audio buffer again to effects @@ -3086,7 +3084,6 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::MixerThread::prepareTrac tracksToRemove->add(track); } } else { - track->mUnderrunCount++; // No buffers for this track. Give it a few chances to // fill a buffer, then remove it from active list. if (--(track->mRetryCount) <= 0) { diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp index 1f75468..e676365 100644 --- a/services/audioflinger/Tracks.cpp +++ b/services/audioflinger/Tracks.cpp @@ -316,7 +316,6 @@ AudioFlinger::PlaybackThread::Track::Track( mPresentationCompleteFrames(0), mFlags(flags), mFastIndex(-1), - mUnderrunCount(0), mCachedVolume(1.0), mIsInvalid(false), mAudioTrackServerProxy(NULL), @@ -389,7 +388,7 @@ void AudioFlinger::PlaybackThread::Track::destroy() /*static*/ void AudioFlinger::PlaybackThread::Track::appendDumpHeader(String8& result) { result.append(" Name Client Type Fmt Chn mask Session fCount S F SRate " - "L dB R dB Server Main buf Aux Buf Flags Underruns\n"); + "L dB R dB Server Main buf Aux Buf Flags UndFrmCnt\n"); } void AudioFlinger::PlaybackThread::Track::dump(char* buffer, size_t size) @@ -470,7 +469,7 @@ void AudioFlinger::PlaybackThread::Track::dump(char* buffer, size_t size) (int)mMainBuffer, (int)mAuxBuffer, mCblk->mFlags, - mUnderrunCount, + mAudioTrackServerProxy->getUnderrunFrames(), nowInUnderrun); } @@ -489,10 +488,7 @@ status_t AudioFlinger::PlaybackThread::Track::getNextBuffer( buffer->frameCount = buf.mFrameCount; buffer->raw = buf.mRaw; if (buf.mFrameCount == 0) { - // only implemented so far for normal tracks, not fast tracks - mCblk->u.mStreaming.mUnderrunFrames += desiredFrames; - // FIXME also wake futex so that underrun is noticed more quickly - (void) android_atomic_or(CBLK_UNDERRUN, &mCblk->mFlags); + mAudioTrackServerProxy->tallyUnderrunFrames(desiredFrames); } return status; } -- cgit v1.1 From 11d0d44d583f679638cc927bfffe920e495e90cc Mon Sep 17 00:00:00 2001 From: Zhijun He Date: Wed, 31 Jul 2013 09:50:58 -0700 Subject: Camera: fix the metadate parcel write issue Also rename the frameId to requestId to make it less confusing. Change-Id: I14a20b6fcabb86294afb31fc0c56397e185c1373 --- camera/camera2/ICameraDeviceCallbacks.cpp | 15 ++++++++++----- .../camera/libcameraservice/common/FrameProcessorBase.cpp | 8 ++++---- 2 files changed, 14 insertions(+), 9 deletions(-) diff --git a/camera/camera2/ICameraDeviceCallbacks.cpp b/camera/camera2/ICameraDeviceCallbacks.cpp index 188bd8e..3cec1f4 100644 --- a/camera/camera2/ICameraDeviceCallbacks.cpp +++ b/camera/camera2/ICameraDeviceCallbacks.cpp @@ -57,11 +57,12 @@ public: data.writeNoException(); } - void onResultReceived(int32_t frameId, const CameraMetadata& result) { + void onResultReceived(int32_t requestId, const CameraMetadata& result) { ALOGV("onResultReceived"); Parcel data, reply; data.writeInterfaceToken(ICameraDeviceCallbacks::getInterfaceDescriptor()); - data.writeInt32(frameId); + data.writeInt32(requestId); + data.writeInt32(1); // to mark presence of metadata object result.writeToParcel(&data); remote()->transact(RESULT_RECEIVED, data, &reply, IBinder::FLAG_ONEWAY); data.writeNoException(); @@ -91,10 +92,14 @@ status_t BnCameraDeviceCallbacks::onTransact( case RESULT_RECEIVED: { ALOGV("RESULT_RECEIVED"); CHECK_INTERFACE(ICameraDeviceCallbacks, data, reply); - int32_t frameId = data.readInt32(); + int32_t requestId = data.readInt32(); CameraMetadata result; - result.readFromParcel(const_cast(&data)); - onResultReceived(frameId, result); + if (data.readInt32() != 0) { + result.readFromParcel(const_cast(&data)); + } else { + ALOGW("No metadata object is present in result"); + } + onResultReceived(requestId, result); data.readExceptionCode(); return NO_ERROR; break; diff --git a/services/camera/libcameraservice/common/FrameProcessorBase.cpp b/services/camera/libcameraservice/common/FrameProcessorBase.cpp index 10bc6ea..e7b440a 100644 --- a/services/camera/libcameraservice/common/FrameProcessorBase.cpp +++ b/services/camera/libcameraservice/common/FrameProcessorBase.cpp @@ -143,7 +143,7 @@ status_t FrameProcessorBase::processListeners(const CameraMetadata &frame, __FUNCTION__, device->getId()); return BAD_VALUE; } - int32_t frameId = entry.data.i32[0]; + int32_t requestId = entry.data.i32[0]; List > listeners; { @@ -151,8 +151,8 @@ status_t FrameProcessorBase::processListeners(const CameraMetadata &frame, List::iterator item = mRangeListeners.begin(); while (item != mRangeListeners.end()) { - if (frameId >= item->minId && - frameId < item->maxId) { + if (requestId >= item->minId && + requestId < item->maxId) { sp listener = item->listener.promote(); if (listener == 0) { item = mRangeListeners.erase(item); @@ -167,7 +167,7 @@ status_t FrameProcessorBase::processListeners(const CameraMetadata &frame, ALOGV("Got %d range listeners out of %d", listeners.size(), mRangeListeners.size()); List >::iterator item = listeners.begin(); for (; item != listeners.end(); item++) { - (*item)->onFrameAvailable(frameId, frame); + (*item)->onFrameAvailable(requestId, frame); } return OK; } -- cgit v1.1 From 30873bfd08255e2c4e98ff5732ffff2838772617 Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Wed, 31 Jul 2013 13:04:50 -0700 Subject: Unregister any handlers still registered on now "dead" ALoopers upon the death of an ALooper. Change-Id: I64c0835b8db04486204f3d0fa7173ee53708a116 related-to-bug: 10106648 --- include/media/stagefright/foundation/ALooperRoster.h | 1 + media/libstagefright/foundation/ALooper.cpp | 4 ++++ media/libstagefright/foundation/ALooperRoster.cpp | 14 ++++++++++++++ 3 files changed, 19 insertions(+) diff --git a/include/media/stagefright/foundation/ALooperRoster.h b/include/media/stagefright/foundation/ALooperRoster.h index 2e5fd73..940fc55 100644 --- a/include/media/stagefright/foundation/ALooperRoster.h +++ b/include/media/stagefright/foundation/ALooperRoster.h @@ -30,6 +30,7 @@ struct ALooperRoster { const sp looper, const sp &handler); void unregisterHandler(ALooper::handler_id handlerID); + void unregisterStaleHandlers(); status_t postMessage(const sp &msg, int64_t delayUs = 0); void deliverMessage(const sp &msg); diff --git a/media/libstagefright/foundation/ALooper.cpp b/media/libstagefright/foundation/ALooper.cpp index 22777a2..ebf9d8d 100644 --- a/media/libstagefright/foundation/ALooper.cpp +++ b/media/libstagefright/foundation/ALooper.cpp @@ -72,6 +72,10 @@ ALooper::ALooper() ALooper::~ALooper() { stop(); + + // Since this looper is "dead" (or as good as dead by now), + // have ALooperRoster unregister any handlers still registered for it. + gLooperRoster.unregisterStaleHandlers(); } void ALooper::setName(const char *name) { diff --git a/media/libstagefright/foundation/ALooperRoster.cpp b/media/libstagefright/foundation/ALooperRoster.cpp index ad10d2b..0c181ff 100644 --- a/media/libstagefright/foundation/ALooperRoster.cpp +++ b/media/libstagefright/foundation/ALooperRoster.cpp @@ -71,6 +71,20 @@ void ALooperRoster::unregisterHandler(ALooper::handler_id handlerID) { mHandlers.removeItemsAt(index); } +void ALooperRoster::unregisterStaleHandlers() { + Mutex::Autolock autoLock(mLock); + + for (size_t i = mHandlers.size(); i-- > 0;) { + const HandlerInfo &info = mHandlers.valueAt(i); + + sp looper = info.mLooper.promote(); + if (looper == NULL) { + ALOGV("Unregistering stale handler %d", mHandlers.keyAt(i)); + mHandlers.removeItemsAt(i); + } + } +} + status_t ALooperRoster::postMessage( const sp &msg, int64_t delayUs) { Mutex::Autolock autoLock(mLock); -- cgit v1.1 From 910813bd66eaf0f6a72769c9b3fa9830dd100a19 Mon Sep 17 00:00:00 2001 From: Mathias Agopian Date: Wed, 31 Jul 2013 20:36:27 -0700 Subject: fix fallout from binderizing BufferQueues consumer side Change-Id: I626bac6df4fc3d8478046193f06ecc7ea60dd3a8 --- media/libstagefright/SurfaceMediaSource.cpp | 6 ++---- media/libstagefright/omx/GraphicBufferSource.cpp | 7 ++----- 2 files changed, 4 insertions(+), 9 deletions(-) diff --git a/media/libstagefright/SurfaceMediaSource.cpp b/media/libstagefright/SurfaceMediaSource.cpp index befd4cc..b082c3a 100644 --- a/media/libstagefright/SurfaceMediaSource.cpp +++ b/media/libstagefright/SurfaceMediaSource.cpp @@ -65,10 +65,8 @@ SurfaceMediaSource::SurfaceMediaSource(uint32_t bufferWidth, uint32_t bufferHeig // reference once the ctor ends, as that would cause the refcount of 'this' // dropping to 0 at the end of the ctor. Since all we need is a wp<...> // that's what we create. - wp listener; - sp proxy; - listener = static_cast(this); - proxy = new BufferQueue::ProxyConsumerListener(listener); + wp listener = static_cast(this); + sp proxy = new BufferQueue::ProxyConsumerListener(listener); status_t err = mBufferQueue->consumerConnect(proxy, false); if (err != NO_ERROR) { diff --git a/media/libstagefright/omx/GraphicBufferSource.cpp b/media/libstagefright/omx/GraphicBufferSource.cpp index d6fd95b..325ffcf 100644 --- a/media/libstagefright/omx/GraphicBufferSource.cpp +++ b/media/libstagefright/omx/GraphicBufferSource.cpp @@ -69,11 +69,8 @@ GraphicBufferSource::GraphicBufferSource(OMXNodeInstance* nodeInstance, // reference once the ctor ends, as that would cause the refcount of 'this' // dropping to 0 at the end of the ctor. Since all we need is a wp<...> // that's what we create. - wp listener; - listener = static_cast(this); - - sp proxy; - proxy = new BufferQueue::ProxyConsumerListener(listener); + wp listener = static_cast(this); + sp proxy = new BufferQueue::ProxyConsumerListener(listener); mInitCheck = mBufferQueue->consumerConnect(proxy, false); if (mInitCheck != NO_ERROR) { -- cgit v1.1 From f560001b5d60225dc7e101b2481477a3c39b66a4 Mon Sep 17 00:00:00 2001 From: Andy McFadden Date: Fri, 2 Aug 2013 13:58:54 -0700 Subject: Use screen dimensions as default video size Instead of 720p, use the screen dimensions as the default video size. For example, a phone with a 768x1280 display would be recorded as a 768x1280 video when in portrait, or a 1280x768 video when in landscape. If the device is not able to record at that resolution, we fall back to 720p. Also, fixed a problem introduced by a buffer timestamp change. Change-Id: I7230ddb323c669ab07327f26df8d103c192c21d3 --- cmds/screenrecord/screenrecord.cpp | 61 ++++++++++++++++++++++++++++++-------- 1 file changed, 49 insertions(+), 12 deletions(-) diff --git a/cmds/screenrecord/screenrecord.cpp b/cmds/screenrecord/screenrecord.cpp index 3e79ee0..28fc00f 100644 --- a/cmds/screenrecord/screenrecord.cpp +++ b/cmds/screenrecord/screenrecord.cpp @@ -44,8 +44,9 @@ using namespace android; // Command-line parameters. static bool gVerbose = false; // chatty on stdout static bool gRotate = false; // rotate 90 degrees -static uint32_t gVideoWidth = 1280; // 720p -static uint32_t gVideoHeight = 720; +static bool gSizeSpecified = false; // was size explicitly requested? +static uint32_t gVideoWidth = 0; // default width+height +static uint32_t gVideoHeight = 0; static uint32_t gBitRate = 4000000; // 4Mbps // Set by signal handler to stop recording. @@ -107,6 +108,14 @@ static status_t configureSignals() } /* + * Returns "true" if the device is rotated 90 degrees. + */ +static bool isDeviceRotated(int orientation) { + return orientation != DISPLAY_ORIENTATION_0 && + orientation != DISPLAY_ORIENTATION_180; +} + +/* * Configures and starts the MediaCodec encoder. Obtains an input surface * from the codec. */ @@ -114,6 +123,11 @@ static status_t prepareEncoder(float displayFps, sp* pCodec, sp* pBufferProducer) { status_t err; + if (gVerbose) { + printf("Configuring recorder for %dx%d video at %.2fMbps\n", + gVideoWidth, gVideoHeight, gBitRate / 1000000.0); + } + sp format = new AMessage; format->setInt32("width", gVideoWidth); format->setInt32("height", gVideoHeight); @@ -152,6 +166,7 @@ static status_t prepareEncoder(float displayFps, sp* pCodec, return err; } + ALOGV("Codec prepared"); *pCodec = codec; *pBufferProducer = bufferProducer; return 0; @@ -169,8 +184,7 @@ static status_t prepareVirtualDisplay(const DisplayInfo& mainDpyInfo, // Set the region of the layer stack we're interested in, which in our // case is "all of it". If the app is rotated (so that the width of the // app is based on the height of the display), reverse width/height. - bool deviceRotated = mainDpyInfo.orientation != DISPLAY_ORIENTATION_0 && - mainDpyInfo.orientation != DISPLAY_ORIENTATION_180; + bool deviceRotated = isDeviceRotated(mainDpyInfo.orientation); uint32_t sourceWidth, sourceHeight; if (!deviceRotated) { sourceWidth = mainDpyInfo.w; @@ -295,6 +309,12 @@ static status_t runEncoder(const sp& encoder, bufIndex, size, ptsUsec); CHECK(trackIdx != -1); + // If the virtual display isn't providing us with timestamps, + // use the current time. + if (ptsUsec == 0) { + ptsUsec = systemTime(SYSTEM_TIME_MONOTONIC) / 1000; + } + // The MediaMuxer docs are unclear, but it appears that we // need to pass either the full set of BufferInfo flags, or // (flags & BUFFER_FLAG_SYNCFRAME). @@ -370,11 +390,6 @@ static status_t runEncoder(const sp& encoder, static status_t recordScreen(const char* fileName) { status_t err; - if (gVerbose) { - printf("Recording %dx%d video at %.2fMbps\n", - gVideoWidth, gVideoHeight, gBitRate / 1000000.0); - } - // Configure signal handler. err = configureSignals(); if (err != NO_ERROR) return err; @@ -399,11 +414,31 @@ static status_t recordScreen(const char* fileName) { mainDpyInfo.orientation); } + bool rotated = isDeviceRotated(mainDpyInfo.orientation); + if (gVideoWidth == 0) { + gVideoWidth = rotated ? mainDpyInfo.h : mainDpyInfo.w; + } + if (gVideoHeight == 0) { + gVideoHeight = rotated ? mainDpyInfo.w : mainDpyInfo.h; + } + // Configure and start the encoder. sp encoder; sp bufferProducer; err = prepareEncoder(mainDpyInfo.fps, &encoder, &bufferProducer); - if (err != NO_ERROR) return err; + if (err != NO_ERROR && !gSizeSpecified) { + ALOGV("Retrying with 720p"); + if (gVideoWidth != 1280 && gVideoHeight != 720) { + fprintf(stderr, "WARNING: failed at %dx%d, retrying at 720p\n", + gVideoWidth, gVideoHeight); + gVideoWidth = 1280; + gVideoHeight = 720; + err = prepareEncoder(mainDpyInfo.fps, &encoder, &bufferProducer); + } + } + if (err != NO_ERROR) { + return err; + } // Configure virtual display. sp dpy; @@ -478,6 +513,8 @@ static void usage() { fprintf(stderr, "Usage: screenrecord [options] \n" "\n" + "Records the device's display to a .mp4 file.\n" + "\n" "Options:\n" "--size WIDTHxHEIGHT\n" " Set the video size, e.g. \"1280x720\". For best results, use\n" @@ -485,8 +522,7 @@ static void usage() { "--bit-rate RATE\n" " Set the video bit rate, in megabits per second. Default 4Mbps.\n" "--rotate\n" - " Rotate the output 90 degrees. Useful for filling the frame\n" - " when in portrait mode.\n" + " Rotate the output 90 degrees.\n" "--verbose\n" " Display interesting information on stdout.\n" "--help\n" @@ -536,6 +572,7 @@ int main(int argc, char* const argv[]) { gVideoWidth, gVideoHeight); return 2; } + gSizeSpecified = true; break; case 'b': gBitRate = atoi(optarg); -- cgit v1.1 From 6ca126d5c83386941c5cb9600099e5510e4430d6 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Wed, 31 Jul 2013 12:25:00 -0700 Subject: Make AudioRecord and AudioTrack comments more similar Change-Id: I122a7cf7bfc162090cb27f37c325db7a23985bc7 --- include/media/AudioRecord.h | 11 +++++++++-- include/media/AudioTrack.h | 4 +++- 2 files changed, 12 insertions(+), 3 deletions(-) diff --git a/include/media/AudioRecord.h b/include/media/AudioRecord.h index c65ffe8..9f84ba0 100644 --- a/include/media/AudioRecord.h +++ b/include/media/AudioRecord.h @@ -62,6 +62,7 @@ public: size_t frameCount; // number of sample frames corresponding to size; // on input it is the number of frames available, // on output is the number of frames actually drained + // (currently ignored, but will make the primary field in future) size_t size; // input/output in bytes == frameCount * frameSize // FIXME this is redundant with respect to frameCount, @@ -363,7 +364,12 @@ public: * Input parameter 'size' is in byte units. * This is implemented on top of obtainBuffer/releaseBuffer. For best * performance use callbacks. Returns actual number of bytes read >= 0, - * or a negative status code. + * or one of the following negative status codes: + * INVALID_OPERATION AudioRecord is configured for streaming mode + * BAD_VALUE size is invalid + * WOULD_BLOCK when obtainBuffer() returns same, or + * AudioRecord was stopped during the read + * or any other error code returned by IAudioRecord::start() or restoreRecord_l(). */ ssize_t read(void* buffer, size_t size); @@ -437,7 +443,7 @@ private: // for client callback handler callback_t mCbf; // callback handler for events, or NULL - void* mUserData; // for client callback handler + void* mUserData; // for notification APIs uint32_t mNotificationFrames; // frames between each notification callback @@ -481,6 +487,7 @@ private: // multi-thread safe. // An exception is that a blocking ClientProxy::obtainBuffer() may be called without a lock, // provided that the caller also holds an extra reference to the proxy and shared memory to keep + // them around in case they are replaced during the obtainBuffer(). sp mProxy; bool mInOverrun; // whether recorder is currently in overrun state diff --git a/include/media/AudioTrack.h b/include/media/AudioTrack.h index 523bd32..7687aae 100644 --- a/include/media/AudioTrack.h +++ b/include/media/AudioTrack.h @@ -116,6 +116,7 @@ public: * Returned status (from utils/Errors.h) can be: * - NO_ERROR: successful operation * - NO_INIT: audio server or audio hardware not initialized + * - BAD_VALUE: unsupported configuration */ static status_t getMinFrameCount(size_t* frameCount, @@ -682,8 +683,9 @@ protected: STATE_STOPPING, } mState; + // for client callback handler callback_t mCbf; // callback handler for events, or NULL - void* mUserData; // for client callback handler + void* mUserData; // for notification APIs uint32_t mNotificationFramesReq; // requested number of frames between each -- cgit v1.1 From 743732236ab84e94168378cdb293964861b1ed00 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Fri, 2 Aug 2013 15:51:35 -0700 Subject: Remove default parameters for AudioTrack The defaults aren't needed; all users must know these values. Change-Id: I0c1c97cb80bfea8e69a7cfa53ec85a91a8d48bb5 --- include/media/AudioTrack.h | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/include/media/AudioTrack.h b/include/media/AudioTrack.h index 523bd32..aa2dd4e 100644 --- a/include/media/AudioTrack.h +++ b/include/media/AudioTrack.h @@ -170,9 +170,9 @@ public: */ AudioTrack( audio_stream_type_t streamType, - uint32_t sampleRate = 0, - audio_format_t format = AUDIO_FORMAT_DEFAULT, - audio_channel_mask_t channelMask = 0, + uint32_t sampleRate, + audio_format_t format, + audio_channel_mask_t, int frameCount = 0, audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE, callback_t cbf = NULL, @@ -194,10 +194,10 @@ public: */ AudioTrack( audio_stream_type_t streamType, - uint32_t sampleRate = 0, - audio_format_t format = AUDIO_FORMAT_DEFAULT, - audio_channel_mask_t channelMask = 0, - const sp& sharedBuffer = 0, + uint32_t sampleRate, + audio_format_t format, + audio_channel_mask_t channelMask, + const sp& sharedBuffer, audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE, callback_t cbf = NULL, void* user = NULL, @@ -227,10 +227,10 @@ public: * * threadCanCallJava: Whether callbacks are made from an attached thread and thus can call JNI. */ - status_t set(audio_stream_type_t streamType = AUDIO_STREAM_DEFAULT, - uint32_t sampleRate = 0, - audio_format_t format = AUDIO_FORMAT_DEFAULT, - audio_channel_mask_t channelMask = 0, + status_t set(audio_stream_type_t streamType, + uint32_t sampleRate, + audio_format_t format, + audio_channel_mask_t channelMask, int frameCount = 0, audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE, callback_t cbf = NULL, -- cgit v1.1 From f0f33c4acd231fa95deb9eeef2c46b0129e64463 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Wed, 31 Jul 2013 12:24:36 -0700 Subject: AudioRecord has no default sample rate Change-Id: I72feefdd6f3a623fd3669b80d4b264518fdc0929 --- include/media/AudioRecord.h | 2 -- media/libmedia/AudioRecord.cpp | 3 ++- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/include/media/AudioRecord.h b/include/media/AudioRecord.h index c65ffe8..aa56deb 100644 --- a/include/media/AudioRecord.h +++ b/include/media/AudioRecord.h @@ -35,8 +35,6 @@ class AudioRecord : public RefBase { public: - static const int DEFAULT_SAMPLE_RATE = 8000; - /* Events used by AudioRecord callback function (callback_t). * Keep in sync with frameworks/base/media/java/android/media/AudioRecord.java NATIVE_EVENT_*. */ diff --git a/media/libmedia/AudioRecord.cpp b/media/libmedia/AudioRecord.cpp index 0e7e17f..b499cbb 100644 --- a/media/libmedia/AudioRecord.cpp +++ b/media/libmedia/AudioRecord.cpp @@ -176,7 +176,8 @@ status_t AudioRecord::set( } if (sampleRate == 0) { - sampleRate = DEFAULT_SAMPLE_RATE; + ALOGE("Invalid sample rate %u", sampleRate); + return BAD_VALUE; } mSampleRate = sampleRate; -- cgit v1.1 From 02de89293b74ab1e9a77ce2367c5c499ab038968 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Wed, 31 Jul 2013 12:30:12 -0700 Subject: Cleanup comments for the new control block implementation There was some obsolete and incomplete text left over after the new control block code was submitted. This cleans up all those comments to be accurate again. Change-Id: Ic52f5869cb723cde25d709514d6deea6aa6f20aa --- include/media/AudioRecord.h | 3 +-- include/media/AudioTrack.h | 18 +++--------------- 2 files changed, 4 insertions(+), 17 deletions(-) diff --git a/include/media/AudioRecord.h b/include/media/AudioRecord.h index c65ffe8..0a3c0e5 100644 --- a/include/media/AudioRecord.h +++ b/include/media/AudioRecord.h @@ -344,8 +344,7 @@ public: __attribute__((__deprecated__)); private: - /* New internal API. - * If nonContig is non-NULL, it is an output parameter that will be set to the number of + /* If nonContig is non-NULL, it is an output parameter that will be set to the number of * additional non-contiguous frames that are available immediately. * FIXME We could pass an array of Buffers instead of only one Buffer to obtainBuffer(), * in case the requested amount of frames is in two or more non-contiguous regions. diff --git a/include/media/AudioTrack.h b/include/media/AudioTrack.h index 523bd32..5ba8461 100644 --- a/include/media/AudioTrack.h +++ b/include/media/AudioTrack.h @@ -331,11 +331,6 @@ public: /* Enables looping and sets the start and end points of looping. * Only supported for static buffer mode. * - * FIXME The comments below are for the new planned interpretation which is not yet implemented. - * Currently the legacy behavior is still implemented, where loopStart and loopEnd - * are in wrapping (overflow) frame units like the return value of getPosition(). - * The plan is to fix all callers to use the new version at same time implementation changes. - * * Parameters: * * loopStart: loop start in frames relative to start of buffer. @@ -393,11 +388,6 @@ public: /* Sets playback head position. * Only supported for static buffer mode. * - * FIXME The comments below are for the new planned interpretation which is not yet implemented. - * Currently the legacy behavior is still implemented, where the new position - * is in wrapping (overflow) frame units like the return value of getPosition(). - * The plan is to fix all callers to use the new version at same time implementation changes. - * * Parameters: * * position: New playback head position in frames relative to start of buffer. @@ -427,7 +417,7 @@ public: status_t getPosition(uint32_t *position) const; /* For static buffer mode only, this returns the current playback position in frames - * relative to start of buffer. It is analogous to the new API for + * relative to start of buffer. It is analogous to the position units used by * setLoop() and setPosition(). After underrun, the position will be at end of buffer. */ status_t getBufferPosition(uint32_t *position); @@ -517,8 +507,7 @@ public: __attribute__((__deprecated__)); private: - /* New internal API - * If nonContig is non-NULL, it is an output parameter that will be set to the number of + /* If nonContig is non-NULL, it is an output parameter that will be set to the number of * additional non-contiguous frames that are available immediately. * FIXME We could pass an array of Buffers instead of only one Buffer to obtainBuffer(), * in case the requested amount of frames is in two or more non-contiguous regions. @@ -546,12 +535,11 @@ public: * This is implemented on top of obtainBuffer/releaseBuffer. For best * performance use callbacks. Returns actual number of bytes written >= 0, * or one of the following negative status codes: - * INVALID_OPERATION AudioTrack is configured for shared buffer mode + * INVALID_OPERATION AudioTrack is configured for static buffer or streaming mode * BAD_VALUE size is invalid * WOULD_BLOCK when obtainBuffer() returns same, or * AudioTrack was stopped during the write * or any other error code returned by IAudioTrack::start() or restoreTrack_l(). - * Not supported for static buffer mode. */ ssize_t write(const void* buffer, size_t size); -- cgit v1.1 From 954315a10089fa3684ac94db5be77c6655c08fc0 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Fri, 2 Aug 2013 09:02:07 -0700 Subject: Recording of non-linear formats is not supported Such formats are already rejected in AudioRecord::set() Change-Id: I5ba1fd9e4cd659e5226c75aa4f63e52f655e0521 --- media/libmedia/AudioRecord.cpp | 14 +++++--------- 1 file changed, 5 insertions(+), 9 deletions(-) diff --git a/media/libmedia/AudioRecord.cpp b/media/libmedia/AudioRecord.cpp index 0e7e17f..103a5f1 100644 --- a/media/libmedia/AudioRecord.cpp +++ b/media/libmedia/AudioRecord.cpp @@ -60,10 +60,9 @@ status_t AudioRecord::getMinFrameCount( // We double the size of input buffer for ping pong use of record buffer. size <<= 1; - if (audio_is_linear_pcm(format)) { - uint32_t channelCount = popcount(channelMask); - size /= channelCount * audio_bytes_per_sample(format); - } + // Assumes audio_is_linear_pcm(format) + uint32_t channelCount = popcount(channelMask); + size /= channelCount * audio_bytes_per_sample(format); *frameCount = size; return NO_ERROR; @@ -205,11 +204,8 @@ status_t AudioRecord::set( uint32_t channelCount = popcount(channelMask); mChannelCount = channelCount; - if (audio_is_linear_pcm(format)) { - mFrameSize = channelCount * audio_bytes_per_sample(format); - } else { - mFrameSize = sizeof(uint8_t); - } + // Assumes audio_is_linear_pcm(format), else sizeof(uint8_t) + mFrameSize = channelCount * audio_bytes_per_sample(format); if (sessionId == 0 ) { mSessionId = AudioSystem::newAudioSessionId(); -- cgit v1.1 From 28f1351369682801e1bb40a835bdae3c97b73c1c Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Wed, 31 Jul 2013 12:27:26 -0700 Subject: AudioRecord callback thread waits for priority boost Change-Id: Iae38fa4ac20a45751566169213a08a15deb0a2f6 --- include/media/AudioRecord.h | 1 + media/libmedia/AudioRecord.cpp | 20 ++++++++++++++++++++ 2 files changed, 21 insertions(+) diff --git a/include/media/AudioRecord.h b/include/media/AudioRecord.h index c65ffe8..596cf93 100644 --- a/include/media/AudioRecord.h +++ b/include/media/AudioRecord.h @@ -476,6 +476,7 @@ private: int mPreviousPriority; // before start() SchedPolicy mPreviousSchedulingGroup; + bool mAwaitBoost; // thread should wait for priority boost before running // The proxy should only be referenced while a lock is held because the proxy isn't // multi-thread safe. diff --git a/media/libmedia/AudioRecord.cpp b/media/libmedia/AudioRecord.cpp index 0e7e17f..7be7529 100644 --- a/media/libmedia/AudioRecord.cpp +++ b/media/libmedia/AudioRecord.cpp @@ -665,6 +665,26 @@ ssize_t AudioRecord::read(void* buffer, size_t userSize) nsecs_t AudioRecord::processAudioBuffer(const sp& thread) { mLock.lock(); + if (mAwaitBoost) { + mAwaitBoost = false; + mLock.unlock(); + static const int32_t kMaxTries = 5; + int32_t tryCounter = kMaxTries; + uint32_t pollUs = 10000; + do { + int policy = sched_getscheduler(0); + if (policy == SCHED_FIFO || policy == SCHED_RR) { + break; + } + usleep(pollUs); + pollUs <<= 1; + } while (tryCounter-- > 0); + if (tryCounter < 0) { + ALOGE("did not receive expected priority boost on time"); + } + // Run again immediately + return 0; + } // Can only reference mCblk while locked int32_t flags = android_atomic_and(~CBLK_OVERRUN, &mCblk->mFlags); -- cgit v1.1 From 27f7b2a8fe899565487d8a326676a5f7d0a05a37 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Wed, 31 Jul 2013 16:10:22 -0700 Subject: AudioRecord::set and constructor now take flags The new optional parameter 'flags' of type audio_input_flags_t will be used for requesting a fast track. Change-Id: Ia7e070cb57c833e608352da354fb30dc26df6918 --- include/media/AudioRecord.h | 8 ++++++-- media/libmedia/AudioRecord.cpp | 8 ++++++-- 2 files changed, 12 insertions(+), 4 deletions(-) diff --git a/include/media/AudioRecord.h b/include/media/AudioRecord.h index 596cf93..bcf2039 100644 --- a/include/media/AudioRecord.h +++ b/include/media/AudioRecord.h @@ -144,6 +144,7 @@ public: * frames are ready in record track output buffer. * sessionId: Not yet supported. * transferType: How data is transferred from AudioRecord. + * flags: See comments on audio_input_flags_t in * threadCanCallJava: Not present in parameter list, and so is fixed at false. */ @@ -156,7 +157,8 @@ public: void* user = NULL, int notificationFrames = 0, int sessionId = 0, - transfer_type transferType = TRANSFER_DEFAULT); + transfer_type transferType = TRANSFER_DEFAULT, + audio_input_flags_t flags = AUDIO_INPUT_FLAG_NONE); /* Terminates the AudioRecord and unregisters it from AudioFlinger. * Also destroys all resources associated with the AudioRecord. @@ -188,7 +190,8 @@ public: int notificationFrames = 0, bool threadCanCallJava = false, int sessionId = 0, - transfer_type transferType = TRANSFER_DEFAULT); + transfer_type transferType = TRANSFER_DEFAULT, + audio_input_flags_t flags = AUDIO_INPUT_FLAG_NONE); /* Result of constructing the AudioRecord. This must be checked * before using any AudioRecord API (except for set()), because using @@ -464,6 +467,7 @@ private: audio_source_t mInputSource; uint32_t mLatency; // in ms audio_channel_mask_t mChannelMask; + audio_input_flags_t mFlags; int mSessionId; transfer_type mTransfer; diff --git a/media/libmedia/AudioRecord.cpp b/media/libmedia/AudioRecord.cpp index 7be7529..d4495dc 100644 --- a/media/libmedia/AudioRecord.cpp +++ b/media/libmedia/AudioRecord.cpp @@ -87,7 +87,8 @@ AudioRecord::AudioRecord( void* user, int notificationFrames, int sessionId, - transfer_type transferType) + transfer_type transferType, + audio_input_flags_t flags) : mStatus(NO_INIT), mSessionId(0), mPreviousPriority(ANDROID_PRIORITY_NORMAL), mPreviousSchedulingGroup(SP_DEFAULT), @@ -129,7 +130,8 @@ status_t AudioRecord::set( int notificationFrames, bool threadCanCallJava, int sessionId, - transfer_type transferType) + transfer_type transferType, + audio_input_flags_t flags) { switch (transferType) { case TRANSFER_DEFAULT: @@ -218,6 +220,8 @@ status_t AudioRecord::set( } ALOGV("set(): mSessionId %d", mSessionId); + mFlags = flags; + audio_io_handle_t input = AudioSystem::getInput(inputSource, sampleRate, format, -- cgit v1.1 From eeca32671896739e84050da5992d5f151a1629de Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Wed, 31 Jul 2013 16:12:48 -0700 Subject: IAudioFlinger::openRecord track_flags_t flags is in/out This will allow AudioFlinger to tell client it is denying a request. Change-Id: Iff2be3ad6636371bbda9c9899a283c94620c1f06 --- include/media/IAudioFlinger.h | 2 +- media/libmedia/AudioRecord.cpp | 3 ++- media/libmedia/IAudioFlinger.cpp | 12 +++++++++--- services/audioflinger/AudioFlinger.cpp | 4 ++-- services/audioflinger/AudioFlinger.h | 2 +- 5 files changed, 15 insertions(+), 8 deletions(-) diff --git a/include/media/IAudioFlinger.h b/include/media/IAudioFlinger.h index 82aae62..49f921b 100644 --- a/include/media/IAudioFlinger.h +++ b/include/media/IAudioFlinger.h @@ -79,7 +79,7 @@ public: audio_format_t format, audio_channel_mask_t channelMask, size_t frameCount, - track_flags_t flags, + track_flags_t *flags, pid_t tid, // -1 means unused, otherwise must be valid non-0 int *sessionId, status_t *status) = 0; diff --git a/media/libmedia/AudioRecord.cpp b/media/libmedia/AudioRecord.cpp index d4495dc..b5060b1 100644 --- a/media/libmedia/AudioRecord.cpp +++ b/media/libmedia/AudioRecord.cpp @@ -451,12 +451,13 @@ status_t AudioRecord::openRecord_l( pid_t tid = -1; // FIXME see similar logic at AudioTrack for tid + IAudioFlinger::track_flags_t trackFlags = IAudioFlinger::TRACK_DEFAULT; int originalSessionId = mSessionId; sp record = audioFlinger->openRecord(input, sampleRate, format, mChannelMask, frameCount, - IAudioFlinger::TRACK_DEFAULT, + &trackFlags, tid, &mSessionId, &status); diff --git a/media/libmedia/IAudioFlinger.cpp b/media/libmedia/IAudioFlinger.cpp index c6e43e7..be818c6 100644 --- a/media/libmedia/IAudioFlinger.cpp +++ b/media/libmedia/IAudioFlinger.cpp @@ -144,7 +144,7 @@ public: audio_format_t format, audio_channel_mask_t channelMask, size_t frameCount, - track_flags_t flags, + track_flags_t *flags, pid_t tid, int *sessionId, status_t *status) @@ -157,7 +157,8 @@ public: data.writeInt32(format); data.writeInt32(channelMask); data.writeInt32(frameCount); - data.writeInt32(flags); + track_flags_t lFlags = flags != NULL ? *flags : (track_flags_t) TRACK_DEFAULT; + data.writeInt32(lFlags); data.writeInt32((int32_t) tid); int lSessionId = 0; if (sessionId != NULL) { @@ -168,6 +169,10 @@ public: if (lStatus != NO_ERROR) { ALOGE("openRecord error: %s", strerror(-lStatus)); } else { + lFlags = reply.readInt32(); + if (flags != NULL) { + *flags = lFlags; + } lSessionId = reply.readInt32(); if (sessionId != NULL) { *sessionId = lSessionId; @@ -761,7 +766,8 @@ status_t BnAudioFlinger::onTransact( int sessionId = data.readInt32(); status_t status; sp record = openRecord(input, - sampleRate, format, channelMask, frameCount, flags, tid, &sessionId, &status); + sampleRate, format, channelMask, frameCount, &flags, tid, &sessionId, &status); + reply->writeInt32(flags); reply->writeInt32(sessionId); reply->writeInt32(status); reply->writeStrongBinder(record->asBinder()); diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp index 00e8a57..f789eca 100644 --- a/services/audioflinger/AudioFlinger.cpp +++ b/services/audioflinger/AudioFlinger.cpp @@ -1212,7 +1212,7 @@ sp AudioFlinger::openRecord( audio_format_t format, audio_channel_mask_t channelMask, size_t frameCount, - IAudioFlinger::track_flags_t flags, + IAudioFlinger::track_flags_t *flags, pid_t tid, int *sessionId, status_t *status) @@ -1261,7 +1261,7 @@ sp AudioFlinger::openRecord( // create new record track. // The record track uses one track in mHardwareMixerThread by convention. recordTrack = thread->createRecordTrack_l(client, sampleRate, format, channelMask, - frameCount, lSessionId, flags, tid, &lStatus); + frameCount, lSessionId, *flags, tid, &lStatus); } if (lStatus != NO_ERROR) { // remove local strong reference to Client before deleting the RecordTrack so that the diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h index d99b779..e5e4113 100644 --- a/services/audioflinger/AudioFlinger.h +++ b/services/audioflinger/AudioFlinger.h @@ -117,7 +117,7 @@ public: audio_format_t format, audio_channel_mask_t channelMask, size_t frameCount, - IAudioFlinger::track_flags_t flags, + IAudioFlinger::track_flags_t *flags, pid_t tid, int *sessionId, status_t *status); -- cgit v1.1 From ddb0ccf3fb6fe8da8c71a6deb30561b821f3c0a2 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Wed, 31 Jul 2013 16:14:50 -0700 Subject: RecordTrack::createRecordTrack_l flags are in/out The flags are currently unused, but will be used for requesting a fast track. Making flags in/out will allow reporting back up to client that the request is denied. Change-Id: Ifbee57da3632ce130551065a426577fb97b1a68d --- services/audioflinger/AudioFlinger.cpp | 2 +- services/audioflinger/Threads.cpp | 2 +- services/audioflinger/Threads.h | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp index f789eca..3d65c44 100644 --- a/services/audioflinger/AudioFlinger.cpp +++ b/services/audioflinger/AudioFlinger.cpp @@ -1261,7 +1261,7 @@ sp AudioFlinger::openRecord( // create new record track. // The record track uses one track in mHardwareMixerThread by convention. recordTrack = thread->createRecordTrack_l(client, sampleRate, format, channelMask, - frameCount, lSessionId, *flags, tid, &lStatus); + frameCount, lSessionId, flags, tid, &lStatus); } if (lStatus != NO_ERROR) { // remove local strong reference to Client before deleting the RecordTrack so that the diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index f0c27c3..305270a 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -4433,7 +4433,7 @@ sp AudioFlinger::RecordThread::createR audio_channel_mask_t channelMask, size_t frameCount, int sessionId, - IAudioFlinger::track_flags_t flags, + IAudioFlinger::track_flags_t *flags, pid_t tid, status_t *status) { diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h index aa04fd4..1453698 100644 --- a/services/audioflinger/Threads.h +++ b/services/audioflinger/Threads.h @@ -837,7 +837,7 @@ public: audio_channel_mask_t channelMask, size_t frameCount, int sessionId, - IAudioFlinger::track_flags_t flags, + IAudioFlinger::track_flags_t *flags, pid_t tid, status_t *status); -- cgit v1.1 From 90e58b1fefc8caf70b34301a92bc86179580b6fc Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Wed, 31 Jul 2013 16:16:02 -0700 Subject: RecordThread::createRecordTrack_l use flags for fast tracks Look at client's request for a fast track in the flags parameter, and check whether the request can be satisfied. Change-Id: I65ddaeb1b85670fe9066076f638bfed0cda01c0d --- services/audioflinger/Threads.cpp | 58 +++++++++++++++++++++++++++++++++++++++ services/audioflinger/Threads.h | 1 + 2 files changed, 59 insertions(+) diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index 305270a..2c2931f 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -4446,6 +4446,57 @@ sp AudioFlinger::RecordThread::createR goto Exit; } + // client expresses a preference for FAST, but we get the final say + if (*flags & IAudioFlinger::TRACK_FAST) { + if ( + // use case: callback handler and frame count is default or at least as large as HAL + ( + (tid != -1) && + ((frameCount == 0) || + (frameCount >= (mFrameCount * kFastTrackMultiplier))) + ) && + // FIXME when record supports non-PCM data, also check for audio_is_linear_pcm(format) + // mono or stereo + ( (channelMask == AUDIO_CHANNEL_OUT_MONO) || + (channelMask == AUDIO_CHANNEL_OUT_STEREO) ) && + // hardware sample rate + (sampleRate == mSampleRate) && + // record thread has an associated fast recorder + hasFastRecorder() + // FIXME test that RecordThread for this fast track has a capable output HAL + // FIXME add a permission test also? + ) { + // if frameCount not specified, then it defaults to fast recorder (HAL) frame count + if (frameCount == 0) { + frameCount = mFrameCount * kFastTrackMultiplier; + } + ALOGV("AUDIO_INPUT_FLAG_FAST accepted: frameCount=%d mFrameCount=%d", + frameCount, mFrameCount); + } else { + ALOGV("AUDIO_INPUT_FLAG_FAST denied: frameCount=%d " + "mFrameCount=%d format=%d isLinear=%d channelMask=%#x sampleRate=%u mSampleRate=%u " + "hasFastRecorder=%d tid=%d", + frameCount, mFrameCount, format, + audio_is_linear_pcm(format), + channelMask, sampleRate, mSampleRate, hasFastRecorder(), tid); + *flags &= ~IAudioFlinger::TRACK_FAST; + // For compatibility with AudioRecord calculation, buffer depth is forced + // to be at least 2 x the record thread frame count and cover audio hardware latency. + // This is probably too conservative, but legacy application code may depend on it. + // If you change this calculation, also review the start threshold which is related. + uint32_t latencyMs = 50; // FIXME mInput->stream->get_latency(mInput->stream); + size_t mNormalFrameCount = 2048; // FIXME + uint32_t minBufCount = latencyMs / ((1000 * mNormalFrameCount) / mSampleRate); + if (minBufCount < 2) { + minBufCount = 2; + } + size_t minFrameCount = mNormalFrameCount * minBufCount; + if (frameCount < minFrameCount) { + frameCount = minFrameCount; + } + } + } + // FIXME use flags and tid similar to createTrack_l() { // scope for mLock @@ -4465,6 +4516,13 @@ sp AudioFlinger::RecordThread::createR mAudioFlinger->btNrecIsOff(); setEffectSuspended_l(FX_IID_AEC, suspend, sessionId); setEffectSuspended_l(FX_IID_NS, suspend, sessionId); + + if ((*flags & IAudioFlinger::TRACK_FAST) && (tid != -1)) { + pid_t callingPid = IPCThreadState::self()->getCallingPid(); + // we don't have CAP_SYS_NICE, nor do we want to have it as it's too powerful, + // so ask activity manager to do this on our behalf + sendPrioConfigEvent_l(callingPid, tid, kPriorityAudioApp); + } } lStatus = NO_ERROR; diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h index 1453698..31d5323 100644 --- a/services/audioflinger/Threads.h +++ b/services/audioflinger/Threads.h @@ -879,6 +879,7 @@ public: void handleSyncStartEvent(const sp& event); virtual size_t frameCount() const { return mFrameCount; } + bool hasFastRecorder() const { return false; } private: void clearSyncStartEvent(); -- cgit v1.1 From 73493688f4190f790ee15d9ca54831cd64f4e195 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Wed, 31 Jul 2013 16:10:53 -0700 Subject: AudioRecord::openRecord_l now take flags The new parameter 'flags' of type audio_input_flags_t will be used for requesting a fast track, but is currently ignored. Change-Id: If68dfda8b2d4eaaca42927d721b4630c47f71f3b --- include/media/AudioRecord.h | 1 + media/libmedia/AudioRecord.cpp | 5 +++-- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/include/media/AudioRecord.h b/include/media/AudioRecord.h index bcf2039..7c240b4 100644 --- a/include/media/AudioRecord.h +++ b/include/media/AudioRecord.h @@ -423,6 +423,7 @@ private: status_t openRecord_l(uint32_t sampleRate, audio_format_t format, size_t frameCount, + audio_input_flags_t flags, audio_io_handle_t input, size_t epoch); diff --git a/media/libmedia/AudioRecord.cpp b/media/libmedia/AudioRecord.cpp index b5060b1..dfaac4c 100644 --- a/media/libmedia/AudioRecord.cpp +++ b/media/libmedia/AudioRecord.cpp @@ -253,7 +253,7 @@ status_t AudioRecord::set( } // create the IAudioRecord - status = openRecord_l(sampleRate, format, frameCount, input, 0 /*epoch*/); + status = openRecord_l(sampleRate, format, frameCount, mFlags, input, 0 /*epoch*/); if (status != NO_ERROR) { return status; } @@ -438,6 +438,7 @@ status_t AudioRecord::openRecord_l( uint32_t sampleRate, audio_format_t format, size_t frameCount, + audio_input_flags_t flags, audio_io_handle_t input, size_t epoch) { @@ -911,7 +912,7 @@ status_t AudioRecord::restoreRecord_l(const char *from) // It will also delete the strong references on previous IAudioRecord and IMemory size_t position = mProxy->getPosition(); mNewPosition = position + mUpdatePeriod; - result = openRecord_l(mSampleRate, mFormat, mFrameCount, getInput_l(), position); + result = openRecord_l(mSampleRate, mFormat, mFrameCount, mFlags, getInput_l(), position); if (result == NO_ERROR) { if (mActive) { // callback thread or sync event hasn't changed -- cgit v1.1 From 3151427b6b0adf99929433715bab6f1e505100c1 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Thu, 1 Aug 2013 07:24:34 -0700 Subject: AudioRecord::openRecord_l use flags Use the flags to determine input parameters for IAudioFlinger::openRecord. Change-Id: I98d2726503af75c8830ce80ceaf3b94a755b342f --- media/libmedia/AudioRecord.cpp | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/media/libmedia/AudioRecord.cpp b/media/libmedia/AudioRecord.cpp index dfaac4c..0c798ae 100644 --- a/media/libmedia/AudioRecord.cpp +++ b/media/libmedia/AudioRecord.cpp @@ -449,10 +449,22 @@ status_t AudioRecord::openRecord_l( return NO_INIT; } + IAudioFlinger::track_flags_t trackFlags = IAudioFlinger::TRACK_DEFAULT; pid_t tid = -1; - // FIXME see similar logic at AudioTrack for tid - IAudioFlinger::track_flags_t trackFlags = IAudioFlinger::TRACK_DEFAULT; + // Client can only express a preference for FAST. Server will perform additional tests. + // The only supported use case for FAST is callback transfer mode. + if (flags & AUDIO_INPUT_FLAG_FAST) { + if ((mTransfer != TRANSFER_CALLBACK) || (mAudioRecordThread == 0)) { + ALOGW("AUDIO_INPUT_FLAG_FAST denied by client"); + // once denied, do not request again if IAudioRecord is re-created + mFlags = (audio_input_flags_t) (flags & ~AUDIO_INPUT_FLAG_FAST); + } else { + trackFlags |= IAudioFlinger::TRACK_FAST; + tid = mAudioRecordThread->getTid(); + } + } + int originalSessionId = mSessionId; sp record = audioFlinger->openRecord(input, sampleRate, format, -- cgit v1.1 From 7cd9cf70e36ad4b8eb12e24f9adbbe6fd69edebd Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Thu, 1 Aug 2013 07:22:02 -0700 Subject: AudioRecord notification frames Change-Id: I76ec536d1504eb9a558178b62bf225aace4b40d1 --- include/media/AudioRecord.h | 5 ++++- media/libmedia/AudioRecord.cpp | 31 +++++++++++++++++++++++++++---- 2 files changed, 31 insertions(+), 5 deletions(-) diff --git a/include/media/AudioRecord.h b/include/media/AudioRecord.h index 7c240b4..c1b6b03 100644 --- a/include/media/AudioRecord.h +++ b/include/media/AudioRecord.h @@ -444,7 +444,10 @@ private: void* mUserData; // for client callback handler // for notification APIs - uint32_t mNotificationFrames; // frames between each notification callback + uint32_t mNotificationFramesReq; // requested number of frames between each + // notification callback + uint32_t mNotificationFramesAct; // actual number of frames between each + // notification callback bool mRefreshRemaining; // processAudioBuffer() should refresh next 2 // These are private to processAudioBuffer(), and are not protected by a lock diff --git a/media/libmedia/AudioRecord.cpp b/media/libmedia/AudioRecord.cpp index 0c798ae..6c04b43 100644 --- a/media/libmedia/AudioRecord.cpp +++ b/media/libmedia/AudioRecord.cpp @@ -270,7 +270,8 @@ status_t AudioRecord::set( mActive = false; mCbf = cbf; - mNotificationFrames = notificationFrames; + mNotificationFramesReq = notificationFrames; + mNotificationFramesAct = 0; mRefreshRemaining = true; mUserData = user; // TODO: add audio hardware input latency here @@ -458,7 +459,8 @@ status_t AudioRecord::openRecord_l( if ((mTransfer != TRANSFER_CALLBACK) || (mAudioRecordThread == 0)) { ALOGW("AUDIO_INPUT_FLAG_FAST denied by client"); // once denied, do not request again if IAudioRecord is re-created - mFlags = (audio_input_flags_t) (flags & ~AUDIO_INPUT_FLAG_FAST); + flags = (audio_input_flags_t) (flags & ~AUDIO_INPUT_FLAG_FAST); + mFlags = flags; } else { trackFlags |= IAudioFlinger::TRACK_FAST; tid = mAudioRecordThread->getTid(); @@ -494,6 +496,27 @@ status_t AudioRecord::openRecord_l( mCblkMemory = iMem; audio_track_cblk_t* cblk = static_cast(iMem->pointer()); mCblk = cblk; + // FIXME missing fast track frameCount logic + mAwaitBoost = false; + mNotificationFramesAct = mNotificationFramesReq; + if (flags & AUDIO_INPUT_FLAG_FAST) { + if (trackFlags & IAudioFlinger::TRACK_FAST) { + ALOGV("AUDIO_INPUT_FLAG_FAST successful; frameCount %u", frameCount); + mAwaitBoost = true; + // double-buffering is not required for fast tracks, due to tighter scheduling + if (mNotificationFramesAct == 0 || mNotificationFramesAct > frameCount) { + mNotificationFramesAct = frameCount; + } + } else { + ALOGV("AUDIO_INPUT_FLAG_FAST denied by server; frameCount %u", frameCount); + // once denied, do not request again if IAudioRecord is re-created + flags = (audio_input_flags_t) (flags & ~AUDIO_INPUT_FLAG_FAST); + mFlags = flags; + if (mNotificationFramesAct == 0 || mNotificationFramesAct > frameCount/2) { + mNotificationFramesAct = frameCount/2; + } + } + } // starting address of buffers in shared memory void *buffers = (char*)cblk + sizeof(audio_track_cblk_t); @@ -501,7 +524,7 @@ status_t AudioRecord::openRecord_l( // update proxy mProxy = new AudioRecordClientProxy(cblk, buffers, frameCount, mFrameSize); mProxy->setEpoch(epoch); - mProxy->setMinimum(mNotificationFrames); + mProxy->setMinimum(mNotificationFramesAct); mDeathNotifier = new DeathNotifier(this); mAudioRecord->asBinder()->linkToDeath(mDeathNotifier, this); @@ -748,7 +771,7 @@ nsecs_t AudioRecord::processAudioBuffer(const sp& thread) } // Cache other fields that will be needed soon - size_t notificationFrames = mNotificationFrames; + size_t notificationFrames = mNotificationFramesAct; if (mRefreshRemaining) { mRefreshRemaining = false; mRemainingFrames = notificationFrames; -- cgit v1.1 From 362552a53523c46679e8b3cbb83b39d7ae769ff7 Mon Sep 17 00:00:00 2001 From: Jeff Tinker Date: Fri, 2 Aug 2013 23:16:18 -0700 Subject: Adds a drm resource busy error, removes some deprecated errors Change-Id: Id719fb6df9c9d955ac24803082d08f2a4a5bc766 related-to-bug: 9695816 --- include/media/stagefright/MediaErrors.h | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/include/media/stagefright/MediaErrors.h b/include/media/stagefright/MediaErrors.h index ee5e4e2..686f286 100644 --- a/include/media/stagefright/MediaErrors.h +++ b/include/media/stagefright/MediaErrors.h @@ -56,14 +56,11 @@ enum { ERROR_DRM_TAMPER_DETECTED = DRM_ERROR_BASE - 7, ERROR_DRM_NOT_PROVISIONED = DRM_ERROR_BASE - 8, ERROR_DRM_DEVICE_REVOKED = DRM_ERROR_BASE - 9, + ERROR_DRM_RESOURCE_BUSY = DRM_ERROR_BASE - 10, ERROR_DRM_VENDOR_MAX = DRM_ERROR_BASE - 500, ERROR_DRM_VENDOR_MIN = DRM_ERROR_BASE - 999, - // Deprecated - ERROR_DRM_WV_VENDOR_MAX = ERROR_DRM_VENDOR_MAX, - ERROR_DRM_WV_VENDOR_MIN = ERROR_DRM_VENDOR_MIN, - // Heartbeat Error Codes HEARTBEAT_ERROR_BASE = -3000, ERROR_HEARTBEAT_TERMINATE_REQUESTED = HEARTBEAT_ERROR_BASE, -- cgit v1.1 From b13820ffafcb6bcdd33b6272676535afb4dff479 Mon Sep 17 00:00:00 2001 From: Marco Nelissen Date: Mon, 5 Aug 2013 12:22:43 -0700 Subject: Reject unprepared MediaPlayers Don't allow a MediaPlayer that has been reset() or release()d to be used as the argument to setNextMediaPlayer. Change-Id: I47da1460ec3742f5b2bd7b79e7998b290032d5a1 --- media/libmedia/mediaplayer.cpp | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/media/libmedia/mediaplayer.cpp b/media/libmedia/mediaplayer.cpp index 963b04f..056cc0a 100644 --- a/media/libmedia/mediaplayer.cpp +++ b/media/libmedia/mediaplayer.cpp @@ -811,6 +811,13 @@ status_t MediaPlayer::setNextMediaPlayer(const sp& next) { if (mPlayer == NULL) { return NO_INIT; } + + if (next != NULL && !(next->mCurrentState & + (MEDIA_PLAYER_PREPARED | MEDIA_PLAYER_PAUSED | MEDIA_PLAYER_PLAYBACK_COMPLETE))) { + ALOGE("next player is not prepared"); + return INVALID_OPERATION; + } + return mPlayer->setNextPlayer(next == NULL ? NULL : next->mPlayer); } -- cgit v1.1 From b2f5b19e5b6e1408a259add23dba91037756a943 Mon Sep 17 00:00:00 2001 From: Eino-Ville Talvala Date: Tue, 30 Jul 2013 14:36:03 -0700 Subject: Camera3: Provide consumer usage flags to HAL for each stream At stream configure time, pass on the stream's consumer usage flags to the HAL, to speed up final hardware configuration. Bug: 9592202 Change-Id: Ie467be053be36a09e482f5f05cad65df42d66476 --- .../libcameraservice/device3/Camera3IOStreamBase.h | 2 ++ .../libcameraservice/device3/Camera3InputStream.cpp | 6 ++++++ .../libcameraservice/device3/Camera3InputStream.h | 2 ++ .../libcameraservice/device3/Camera3OutputStream.cpp | 11 +++++++++++ .../libcameraservice/device3/Camera3OutputStream.h | 3 +++ .../camera/libcameraservice/device3/Camera3Stream.cpp | 18 ++++++++++++++---- .../camera/libcameraservice/device3/Camera3Stream.h | 4 ++++ 7 files changed, 42 insertions(+), 4 deletions(-) diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.h b/services/camera/libcameraservice/device3/Camera3IOStreamBase.h index 74c4484..9432a59 100644 --- a/services/camera/libcameraservice/device3/Camera3IOStreamBase.h +++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.h @@ -77,6 +77,8 @@ class Camera3IOStreamBase : virtual size_t getBufferCountLocked(); + virtual status_t getEndpointUsage(uint32_t *usage) = 0; + status_t getBufferPreconditionCheckLocked() const; status_t returnBufferPreconditionCheckLocked() const; diff --git a/services/camera/libcameraservice/device3/Camera3InputStream.cpp b/services/camera/libcameraservice/device3/Camera3InputStream.cpp index e9a9c2b..1889a11 100644 --- a/services/camera/libcameraservice/device3/Camera3InputStream.cpp +++ b/services/camera/libcameraservice/device3/Camera3InputStream.cpp @@ -234,6 +234,12 @@ status_t Camera3InputStream::configureQueueLocked() { return OK; } +status_t Camera3InputStream::getEndpointUsage(uint32_t *usage) { + // Per HAL3 spec, input streams have 0 for their initial usage field. + *usage = 0; + return OK; +} + }; // namespace camera3 }; // namespace android diff --git a/services/camera/libcameraservice/device3/Camera3InputStream.h b/services/camera/libcameraservice/device3/Camera3InputStream.h index 8adda88..91d6f16 100644 --- a/services/camera/libcameraservice/device3/Camera3InputStream.h +++ b/services/camera/libcameraservice/device3/Camera3InputStream.h @@ -79,6 +79,8 @@ class Camera3InputStream : public Camera3IOStreamBase { virtual status_t configureQueueLocked(); + virtual status_t getEndpointUsage(uint32_t *usage); + }; // class Camera3InputStream }; // namespace camera3 diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp index 0ec2b05..35cb5ba 100644 --- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp +++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp @@ -364,6 +364,17 @@ status_t Camera3OutputStream::disconnectLocked() { return OK; } +status_t Camera3OutputStream::getEndpointUsage(uint32_t *usage) { + + status_t res; + int32_t u = 0; + res = mConsumer->query(mConsumer.get(), + NATIVE_WINDOW_CONSUMER_USAGE_BITS, &u); + *usage = u; + + return res; +} + }; // namespace camera3 }; // namespace android diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.h b/services/camera/libcameraservice/device3/Camera3OutputStream.h index 774fbdd..6cbb9f4 100644 --- a/services/camera/libcameraservice/device3/Camera3OutputStream.h +++ b/services/camera/libcameraservice/device3/Camera3OutputStream.h @@ -92,6 +92,9 @@ class Camera3OutputStream : virtual status_t configureQueueLocked(); virtual status_t disconnectLocked(); + + virtual status_t getEndpointUsage(uint32_t *usage); + }; // class Camera3OutputStream } // namespace camera3 diff --git a/services/camera/libcameraservice/device3/Camera3Stream.cpp b/services/camera/libcameraservice/device3/Camera3Stream.cpp index ab563df..a6872aa 100644 --- a/services/camera/libcameraservice/device3/Camera3Stream.cpp +++ b/services/camera/libcameraservice/device3/Camera3Stream.cpp @@ -77,7 +77,9 @@ int Camera3Stream::getFormat() const { } camera3_stream* Camera3Stream::startConfiguration() { + ATRACE_CALL(); Mutex::Autolock l(mLock); + status_t res; switch (mState) { case STATE_ERROR: @@ -107,8 +109,15 @@ camera3_stream* Camera3Stream::startConfiguration() { return NULL; } - oldUsage = usage; - oldMaxBuffers = max_buffers; + oldUsage = camera3_stream::usage; + oldMaxBuffers = camera3_stream::max_buffers; + + res = getEndpointUsage(&(camera3_stream::usage)); + if (res != OK) { + ALOGE("%s: Cannot query consumer endpoint usage!", + __FUNCTION__); + return NULL; + } if (mState == STATE_CONSTRUCTED) { mState = STATE_IN_CONFIG; @@ -125,6 +134,7 @@ bool Camera3Stream::isConfiguring() const { } status_t Camera3Stream::finishConfiguration(camera3_device *hal3Device) { + ATRACE_CALL(); Mutex::Autolock l(mLock); switch (mState) { case STATE_ERROR: @@ -147,8 +157,8 @@ status_t Camera3Stream::finishConfiguration(camera3_device *hal3Device) { // Check if the stream configuration is unchanged, and skip reallocation if // so. As documented in hardware/camera3.h:configure_streams(). if (mState == STATE_IN_RECONFIG && - oldUsage == usage && - oldMaxBuffers == max_buffers) { + oldUsage == camera3_stream::usage && + oldMaxBuffers == camera3_stream::max_buffers) { mState = STATE_CONFIGURED; return OK; } diff --git a/services/camera/libcameraservice/device3/Camera3Stream.h b/services/camera/libcameraservice/device3/Camera3Stream.h index 69d81e4..b64fd86 100644 --- a/services/camera/libcameraservice/device3/Camera3Stream.h +++ b/services/camera/libcameraservice/device3/Camera3Stream.h @@ -263,6 +263,10 @@ class Camera3Stream : // Get the total number of buffers in the queue virtual size_t getBufferCountLocked() = 0; + // Get the usage flags for the other endpoint, or return + // INVALID_OPERATION if they cannot be obtained. + virtual status_t getEndpointUsage(uint32_t *usage) = 0; + private: uint32_t oldUsage; uint32_t oldMaxBuffers; -- cgit v1.1 From deeef54487a34034dc0cfaab20b20d557224c07c Mon Sep 17 00:00:00 2001 From: Mathias Agopian Date: Fri, 2 Aug 2013 01:50:59 -0700 Subject: separte producer and consumer interfaces Bug: 9265647 Change-Id: Iefabc11e4bd2e2e8ffd31160476c450affe6629c --- camera/ProCamera.cpp | 3 +-- include/media/stagefright/SurfaceMediaSource.h | 2 +- libvideoeditor/lvpp/NativeWindowRenderer.cpp | 2 +- media/libstagefright/SurfaceMediaSource.cpp | 4 ++-- .../libcameraservice/api1/client2/CallbackProcessor.cpp | 3 +-- .../libcameraservice/api1/client2/JpegProcessor.cpp | 3 +-- .../api1/client2/StreamingProcessor.cpp | 3 +-- .../libcameraservice/api1/client2/ZslProcessor.cpp | 3 +-- .../libcameraservice/device3/Camera3InputStream.cpp | 4 ---- .../libcameraservice/device3/Camera3InputStream.h | 7 ------- .../libcameraservice/device3/Camera3ZslStream.cpp | 8 ++++---- .../camera/libcameraservice/gui/RingBufferConsumer.cpp | 17 +++++++++-------- .../camera/libcameraservice/gui/RingBufferConsumer.h | 4 +--- 13 files changed, 23 insertions(+), 40 deletions(-) diff --git a/camera/ProCamera.cpp b/camera/ProCamera.cpp index 1040415..f6c9ca1 100644 --- a/camera/ProCamera.cpp +++ b/camera/ProCamera.cpp @@ -251,8 +251,7 @@ status_t ProCamera::createStreamCpu(int width, int height, int format, sp cc = new CpuConsumer(bq, heapCount/*, synchronousMode*/); cc->setName(String8("ProCamera::mCpuConsumer")); - sp stc = new Surface( - cc->getProducerInterface()); + sp stc = new Surface(bq); status_t s = createStream(width, height, format, stc->getIGraphicBufferProducer(), diff --git a/include/media/stagefright/SurfaceMediaSource.h b/include/media/stagefright/SurfaceMediaSource.h index 7d40379..db5f947 100644 --- a/include/media/stagefright/SurfaceMediaSource.h +++ b/include/media/stagefright/SurfaceMediaSource.h @@ -56,7 +56,7 @@ class GraphicBuffer; class SurfaceMediaSource : public MediaSource, public MediaBufferObserver, - protected BufferQueue::ConsumerListener { + protected ConsumerListener { public: enum { MIN_UNDEQUEUED_BUFFERS = 4}; diff --git a/libvideoeditor/lvpp/NativeWindowRenderer.cpp b/libvideoeditor/lvpp/NativeWindowRenderer.cpp index 84a8e15..8b362ef 100755 --- a/libvideoeditor/lvpp/NativeWindowRenderer.cpp +++ b/libvideoeditor/lvpp/NativeWindowRenderer.cpp @@ -570,7 +570,7 @@ RenderInput::RenderInput(NativeWindowRenderer* renderer, GLuint textureId) , mTextureId(textureId) { sp bq = new BufferQueue(); mST = new GLConsumer(bq, mTextureId); - mSTC = new Surface(mST->getBufferQueue()); + mSTC = new Surface(bq); native_window_connect(mSTC.get(), NATIVE_WINDOW_API_MEDIA); } diff --git a/media/libstagefright/SurfaceMediaSource.cpp b/media/libstagefright/SurfaceMediaSource.cpp index b082c3a..6b934d4 100644 --- a/media/libstagefright/SurfaceMediaSource.cpp +++ b/media/libstagefright/SurfaceMediaSource.cpp @@ -65,7 +65,7 @@ SurfaceMediaSource::SurfaceMediaSource(uint32_t bufferWidth, uint32_t bufferHeig // reference once the ctor ends, as that would cause the refcount of 'this' // dropping to 0 at the end of the ctor. Since all we need is a wp<...> // that's what we create. - wp listener = static_cast(this); + wp listener = static_cast(this); sp proxy = new BufferQueue::ProxyConsumerListener(listener); status_t err = mBufferQueue->consumerConnect(proxy, false); @@ -105,7 +105,7 @@ void SurfaceMediaSource::dump(String8& result, const char* prefix, Mutex::Autolock lock(mMutex); result.append(buffer); - mBufferQueue->dump(result); + mBufferQueue->dump(result, ""); } status_t SurfaceMediaSource::setFrameRate(int32_t fps) diff --git a/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp b/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp index 12d0859..9d8c4a1 100644 --- a/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp +++ b/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp @@ -114,8 +114,7 @@ status_t CallbackProcessor::updateStream(const Parameters ¶ms) { mCallbackConsumer = new CpuConsumer(bq, kCallbackHeapCount); mCallbackConsumer->setFrameAvailableListener(this); mCallbackConsumer->setName(String8("Camera2Client::CallbackConsumer")); - mCallbackWindow = new Surface( - mCallbackConsumer->getProducerInterface()); + mCallbackWindow = new Surface(bq); } if (mCallbackStreamId != NO_STREAM) { diff --git a/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp b/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp index b920edf..77d5c8a 100644 --- a/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp +++ b/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp @@ -87,8 +87,7 @@ status_t JpegProcessor::updateStream(const Parameters ¶ms) { mCaptureConsumer = new CpuConsumer(bq, 1); mCaptureConsumer->setFrameAvailableListener(this); mCaptureConsumer->setName(String8("Camera2Client::CaptureConsumer")); - mCaptureWindow = new Surface( - mCaptureConsumer->getProducerInterface()); + mCaptureWindow = new Surface(bq); // Create memory for API consumption mCaptureHeap = new MemoryHeapBase(maxJpegSize.data.i32[0], 0, "Camera2Client::CaptureHeap"); diff --git a/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp b/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp index 7e98016..dfe8580 100644 --- a/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp +++ b/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp @@ -325,8 +325,7 @@ status_t StreamingProcessor::updateRecordingStream(const Parameters ¶ms) { mRecordingHeapCount + 1); mRecordingConsumer->setFrameAvailableListener(this); mRecordingConsumer->setName(String8("Camera2-RecordingConsumer")); - mRecordingWindow = new Surface( - mRecordingConsumer->getProducerInterface()); + mRecordingWindow = new Surface(bq); newConsumer = true; // Allocate memory later, since we don't know buffer size until receipt } diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp b/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp index 11a2cbb..3b118f4 100644 --- a/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp +++ b/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp @@ -134,8 +134,7 @@ status_t ZslProcessor::updateStream(const Parameters ¶ms) { kZslBufferDepth); mZslConsumer->setFrameAvailableListener(this); mZslConsumer->setName(String8("Camera2Client::ZslConsumer")); - mZslWindow = new Surface( - mZslConsumer->getProducerInterface()); + mZslWindow = new Surface(bq); } if (mZslStreamId != NO_STREAM) { diff --git a/services/camera/libcameraservice/device3/Camera3InputStream.cpp b/services/camera/libcameraservice/device3/Camera3InputStream.cpp index 1889a11..c80f512 100644 --- a/services/camera/libcameraservice/device3/Camera3InputStream.cpp +++ b/services/camera/libcameraservice/device3/Camera3InputStream.cpp @@ -182,10 +182,6 @@ status_t Camera3InputStream::disconnectLocked() { return OK; } -sp Camera3InputStream::getProducerInterface() const { - return mConsumer->getProducerInterface(); -} - void Camera3InputStream::dump(int fd, const Vector &args) const { (void) args; String8 lines; diff --git a/services/camera/libcameraservice/device3/Camera3InputStream.h b/services/camera/libcameraservice/device3/Camera3InputStream.h index 91d6f16..681d684 100644 --- a/services/camera/libcameraservice/device3/Camera3InputStream.h +++ b/services/camera/libcameraservice/device3/Camera3InputStream.h @@ -44,13 +44,6 @@ class Camera3InputStream : public Camera3IOStreamBase { virtual void dump(int fd, const Vector &args) const; - /** - * Get the producer interface for this stream, to hand off to a producer. - * The producer must be connected to the provided interface before - * finishConfigure is called on this stream. - */ - sp getProducerInterface() const; - private: typedef BufferItemConsumer::BufferItem BufferItem; diff --git a/services/camera/libcameraservice/device3/Camera3ZslStream.cpp b/services/camera/libcameraservice/device3/Camera3ZslStream.cpp index 8790c8c..04f5dc5 100644 --- a/services/camera/libcameraservice/device3/Camera3ZslStream.cpp +++ b/services/camera/libcameraservice/device3/Camera3ZslStream.cpp @@ -113,11 +113,11 @@ Camera3ZslStream::Camera3ZslStream(int id, uint32_t width, uint32_t height, Camera3OutputStream(id, CAMERA3_STREAM_BIDIRECTIONAL, width, height, HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED), - mDepth(depth), - mProducer(new RingBufferConsumer(GRALLOC_USAGE_HW_CAMERA_ZSL, - depth)) { + mDepth(depth) { - mConsumer = new Surface(mProducer->getProducerInterface()); + sp bq = new BufferQueue(); + mProducer = new RingBufferConsumer(bq, GRALLOC_USAGE_HW_CAMERA_ZSL, depth); + mConsumer = new Surface(bq); } Camera3ZslStream::~Camera3ZslStream() { diff --git a/services/camera/libcameraservice/gui/RingBufferConsumer.cpp b/services/camera/libcameraservice/gui/RingBufferConsumer.cpp index 8141f4e..ebc7ea7 100644 --- a/services/camera/libcameraservice/gui/RingBufferConsumer.cpp +++ b/services/camera/libcameraservice/gui/RingBufferConsumer.cpp @@ -34,13 +34,14 @@ typedef android::RingBufferConsumer::PinnedBufferItem PinnedBufferItem; namespace android { -RingBufferConsumer::RingBufferConsumer(uint32_t consumerUsage, +RingBufferConsumer::RingBufferConsumer(const sp& consumer, + uint32_t consumerUsage, int bufferCount) : - ConsumerBase(new BufferQueue()), + ConsumerBase(consumer), mBufferCount(bufferCount) { - mBufferQueue->setConsumerUsageBits(consumerUsage); - mBufferQueue->setMaxAcquiredBufferCount(bufferCount); + mConsumer->setConsumerUsageBits(consumerUsage); + mConsumer->setMaxAcquiredBufferCount(bufferCount); assert(bufferCount > 0); } @@ -51,7 +52,7 @@ RingBufferConsumer::~RingBufferConsumer() { void RingBufferConsumer::setName(const String8& name) { Mutex::Autolock _l(mMutex); mName = name; - mBufferQueue->setConsumerName(name); + mConsumer->setConsumerName(name); } sp RingBufferConsumer::pinSelectedBuffer( @@ -342,17 +343,17 @@ void RingBufferConsumer::unpinBuffer(const BufferItem& item) { status_t RingBufferConsumer::setDefaultBufferSize(uint32_t w, uint32_t h) { Mutex::Autolock _l(mMutex); - return mBufferQueue->setDefaultBufferSize(w, h); + return mConsumer->setDefaultBufferSize(w, h); } status_t RingBufferConsumer::setDefaultBufferFormat(uint32_t defaultFormat) { Mutex::Autolock _l(mMutex); - return mBufferQueue->setDefaultBufferFormat(defaultFormat); + return mConsumer->setDefaultBufferFormat(defaultFormat); } status_t RingBufferConsumer::setConsumerUsage(uint32_t usage) { Mutex::Autolock _l(mMutex); - return mBufferQueue->setConsumerUsageBits(usage); + return mConsumer->setConsumerUsageBits(usage); } } // namespace android diff --git a/services/camera/libcameraservice/gui/RingBufferConsumer.h b/services/camera/libcameraservice/gui/RingBufferConsumer.h index 454fbae..b4ad824 100644 --- a/services/camera/libcameraservice/gui/RingBufferConsumer.h +++ b/services/camera/libcameraservice/gui/RingBufferConsumer.h @@ -63,7 +63,7 @@ class RingBufferConsumer : public ConsumerBase, // the consumer usage flags passed to the graphics allocator. The // bufferCount parameter specifies how many buffers can be pinned for user // access at the same time. - RingBufferConsumer(uint32_t consumerUsage, + RingBufferConsumer(const sp& consumer, uint32_t consumerUsage, int bufferCount = BufferQueue::MIN_UNDEQUEUED_BUFFERS); virtual ~RingBufferConsumer(); @@ -72,8 +72,6 @@ class RingBufferConsumer : public ConsumerBase, // log messages. void setName(const String8& name); - sp getProducerInterface() const { return getBufferQueue(); } - // setDefaultBufferSize is used to set the size of buffers returned by // requestBuffers when a with and height of zero is requested. status_t setDefaultBufferSize(uint32_t w, uint32_t h); -- cgit v1.1 From d0715867861c216e88a4a7523b6da8a3cb128724 Mon Sep 17 00:00:00 2001 From: Lajos Molnar Date: Mon, 22 Jul 2013 12:57:43 -0700 Subject: IOMX: add updateGraphicBufferInMeta method for metadata mode This is used to set the handle in the metadata buffer that is valid in the mediaserver process, as well as to keep a reference for the graphic buffers in BufferMeta. Change-Id: I46bb68d8bed894f357eeeb25368360d11b276634 Signed-off-by: Lajos Molnar --- include/media/IOMX.h | 4 ++++ media/libmedia/IOMX.cpp | 33 ++++++++++++++++++++++++++ media/libstagefright/ACodec.cpp | 17 ++++++++----- media/libstagefright/OMXClient.cpp | 11 +++++++++ media/libstagefright/include/OMX.h | 4 ++++ media/libstagefright/include/OMXNodeInstance.h | 4 ++++ media/libstagefright/omx/OMX.cpp | 7 ++++++ media/libstagefright/omx/OMXNodeInstance.cpp | 20 ++++++++++++++++ 8 files changed, 94 insertions(+), 6 deletions(-) diff --git a/include/media/IOMX.h b/include/media/IOMX.h index 38f9d11..6d116f0 100644 --- a/include/media/IOMX.h +++ b/include/media/IOMX.h @@ -97,6 +97,10 @@ public: node_id node, OMX_U32 port_index, const sp &graphicBuffer, buffer_id *buffer) = 0; + virtual status_t updateGraphicBufferInMeta( + node_id node, OMX_U32 port_index, + const sp &graphicBuffer, buffer_id buffer) = 0; + virtual status_t createInputSurface( node_id node, OMX_U32 port_index, sp *bufferProducer) = 0; diff --git a/media/libmedia/IOMX.cpp b/media/libmedia/IOMX.cpp index 5bbb2f0..ef99f4f 100644 --- a/media/libmedia/IOMX.cpp +++ b/media/libmedia/IOMX.cpp @@ -52,6 +52,7 @@ enum { OBSERVER_ON_MSG, GET_GRAPHIC_BUFFER_USAGE, SET_INTERNAL_OPTION, + UPDATE_GRAPHIC_BUFFER_IN_META, }; class BpOMX : public BpInterface { @@ -283,6 +284,21 @@ public: return err; } + virtual status_t updateGraphicBufferInMeta( + node_id node, OMX_U32 port_index, + const sp &graphicBuffer, buffer_id buffer) { + Parcel data, reply; + data.writeInterfaceToken(IOMX::getInterfaceDescriptor()); + data.writeIntPtr((intptr_t)node); + data.writeInt32(port_index); + data.write(*graphicBuffer); + data.writeIntPtr((intptr_t)buffer); + remote()->transact(UPDATE_GRAPHIC_BUFFER_IN_META, data, &reply); + + status_t err = reply.readInt32(); + return err; + } + virtual status_t createInputSurface( node_id node, OMX_U32 port_index, sp *bufferProducer) { @@ -691,6 +707,23 @@ status_t BnOMX::onTransact( return NO_ERROR; } + case UPDATE_GRAPHIC_BUFFER_IN_META: + { + CHECK_OMX_INTERFACE(IOMX, data, reply); + + node_id node = (void*)data.readIntPtr(); + OMX_U32 port_index = data.readInt32(); + sp graphicBuffer = new GraphicBuffer(); + data.read(*graphicBuffer); + buffer_id buffer = (void*)data.readIntPtr(); + + status_t err = updateGraphicBufferInMeta( + node, port_index, graphicBuffer, buffer); + reply->writeInt32(err); + + return NO_ERROR; + } + case CREATE_INPUT_SURFACE: { CHECK_OMX_INTERFACE(IOMX, data, reply); diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp index 00804c5..5aefa58 100644 --- a/media/libstagefright/ACodec.cpp +++ b/media/libstagefright/ACodec.cpp @@ -833,15 +833,20 @@ ACodec::BufferInfo *ACodec::dequeueBufferFromNativeWindow() { oldest->mGraphicBuffer = new GraphicBuffer(buf, false); oldest->mStatus = BufferInfo::OWNED_BY_US; - struct VideoDecoderOutputMetaData metaData; - metaData.eType = kMetadataBufferTypeGrallocSource; - metaData.pHandle = oldest->mGraphicBuffer->handle; - memcpy(oldest->mData->base(), &metaData, sizeof(metaData)); + mOMX->updateGraphicBufferInMeta( + mNode, kPortIndexOutput, oldest->mGraphicBuffer, + oldest->mBufferID); - ALOGV("replaced oldest buffer #%u with age %u (%p stored in %p)", + VideoDecoderOutputMetaData *metaData = + reinterpret_cast( + oldest->mData->base()); + CHECK_EQ(metaData->eType, kMetadataBufferTypeGrallocSource); + + ALOGV("replaced oldest buffer #%u with age %u (%p/%p stored in %p)", oldest - &mBuffers[kPortIndexOutput][0], mDequeueCounter - oldest->mDequeuedAt, - metaData.pHandle, oldest->mData->base()); + metaData->pHandle, + oldest->mGraphicBuffer->handle, oldest->mData->base()); return oldest; } diff --git a/media/libstagefright/OMXClient.cpp b/media/libstagefright/OMXClient.cpp index 810d88f..9820ef5 100644 --- a/media/libstagefright/OMXClient.cpp +++ b/media/libstagefright/OMXClient.cpp @@ -83,6 +83,10 @@ struct MuxOMX : public IOMX { node_id node, OMX_U32 port_index, const sp &graphicBuffer, buffer_id *buffer); + virtual status_t updateGraphicBufferInMeta( + node_id node, OMX_U32 port_index, + const sp &graphicBuffer, buffer_id buffer); + virtual status_t createInputSurface( node_id node, OMX_U32 port_index, sp *bufferProducer); @@ -287,6 +291,13 @@ status_t MuxOMX::useGraphicBuffer( node, port_index, graphicBuffer, buffer); } +status_t MuxOMX::updateGraphicBufferInMeta( + node_id node, OMX_U32 port_index, + const sp &graphicBuffer, buffer_id buffer) { + return getOMX(node)->updateGraphicBufferInMeta( + node, port_index, graphicBuffer, buffer); +} + status_t MuxOMX::createInputSurface( node_id node, OMX_U32 port_index, sp *bufferProducer) { diff --git a/media/libstagefright/include/OMX.h b/media/libstagefright/include/OMX.h index 7fed7d4..7e53af3 100644 --- a/media/libstagefright/include/OMX.h +++ b/media/libstagefright/include/OMX.h @@ -79,6 +79,10 @@ public: node_id node, OMX_U32 port_index, const sp &graphicBuffer, buffer_id *buffer); + virtual status_t updateGraphicBufferInMeta( + node_id node, OMX_U32 port_index, + const sp &graphicBuffer, buffer_id buffer); + virtual status_t createInputSurface( node_id node, OMX_U32 port_index, sp *bufferProducer); diff --git a/media/libstagefright/include/OMXNodeInstance.h b/media/libstagefright/include/OMXNodeInstance.h index f6ae376..ae498b4 100644 --- a/media/libstagefright/include/OMXNodeInstance.h +++ b/media/libstagefright/include/OMXNodeInstance.h @@ -66,6 +66,10 @@ struct OMXNodeInstance { OMX_U32 portIndex, const sp &graphicBuffer, OMX::buffer_id *buffer); + status_t updateGraphicBufferInMeta( + OMX_U32 portIndex, const sp &graphicBuffer, + OMX::buffer_id buffer); + status_t createInputSurface( OMX_U32 portIndex, sp *bufferProducer); diff --git a/media/libstagefright/omx/OMX.cpp b/media/libstagefright/omx/OMX.cpp index 4b1dbe6..aaa9f89 100644 --- a/media/libstagefright/omx/OMX.cpp +++ b/media/libstagefright/omx/OMX.cpp @@ -345,6 +345,13 @@ status_t OMX::useGraphicBuffer( port_index, graphicBuffer, buffer); } +status_t OMX::updateGraphicBufferInMeta( + node_id node, OMX_U32 port_index, + const sp &graphicBuffer, buffer_id buffer) { + return findInstance(node)->updateGraphicBufferInMeta( + port_index, graphicBuffer, buffer); +} + status_t OMX::createInputSurface( node_id node, OMX_U32 port_index, sp *bufferProducer) { diff --git a/media/libstagefright/omx/OMXNodeInstance.cpp b/media/libstagefright/omx/OMXNodeInstance.cpp index 525e18d..8d100f1 100644 --- a/media/libstagefright/omx/OMXNodeInstance.cpp +++ b/media/libstagefright/omx/OMXNodeInstance.cpp @@ -70,6 +70,10 @@ struct BufferMeta { header->nFilledLen); } + void setGraphicBuffer(const sp &graphicBuffer) { + mGraphicBuffer = graphicBuffer; + } + private: sp mGraphicBuffer; sp mMem; @@ -566,6 +570,22 @@ status_t OMXNodeInstance::useGraphicBuffer( return OK; } +status_t OMXNodeInstance::updateGraphicBufferInMeta( + OMX_U32 portIndex, const sp& graphicBuffer, + OMX::buffer_id buffer) { + Mutex::Autolock autoLock(mLock); + + OMX_BUFFERHEADERTYPE *header = (OMX_BUFFERHEADERTYPE *)(buffer); + VideoDecoderOutputMetaData *metadata = + (VideoDecoderOutputMetaData *)(header->pBuffer); + BufferMeta *bufferMeta = (BufferMeta *)(header->pAppPrivate); + bufferMeta->setGraphicBuffer(graphicBuffer); + metadata->eType = kMetadataBufferTypeGrallocSource; + metadata->pHandle = graphicBuffer->handle; + + return OK; +} + status_t OMXNodeInstance::createInputSurface( OMX_U32 portIndex, sp *bufferProducer) { Mutex::Autolock autolock(mLock); -- cgit v1.1 From 9e65879ebc633334bb061e8164c46602ad983c50 Mon Sep 17 00:00:00 2001 From: Ying Wang Date: Tue, 6 Aug 2013 15:54:17 -0700 Subject: Fix build. last attempt. Change-Id: Ib955134f90b78cfc0d31272021ad5907a41df708 --- cmds/stagefright/stagefright.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cmds/stagefright/stagefright.cpp b/cmds/stagefright/stagefright.cpp index 529b96c..797e0b6 100644 --- a/cmds/stagefright/stagefright.cpp +++ b/cmds/stagefright/stagefright.cpp @@ -939,7 +939,7 @@ int main(int argc, char **argv) { sp bq = new BufferQueue(); sp texture = new GLConsumer(bq, 0 /* tex */); - gSurface = new Surface(texture->getBufferQueue()); + gSurface = new Surface(bq); } CHECK_EQ((status_t)OK, -- cgit v1.1 From 93c721273208bea4a5d705dce215437a7633fb5a Mon Sep 17 00:00:00 2001 From: Mathias Agopian Date: Tue, 6 Aug 2013 16:05:27 -0700 Subject: fix build Change-Id: I859bd75eb796b0cb8da30ad947776d3e38f3873f --- cmds/stagefright/stagefright.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cmds/stagefright/stagefright.cpp b/cmds/stagefright/stagefright.cpp index 529b96c..797e0b6 100644 --- a/cmds/stagefright/stagefright.cpp +++ b/cmds/stagefright/stagefright.cpp @@ -939,7 +939,7 @@ int main(int argc, char **argv) { sp bq = new BufferQueue(); sp texture = new GLConsumer(bq, 0 /* tex */); - gSurface = new Surface(texture->getBufferQueue()); + gSurface = new Surface(bq); } CHECK_EQ((status_t)OK, -- cgit v1.1 From 4a0efb77198c69df711ab369ac482a42dbdfab07 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Thu, 8 Aug 2013 15:20:53 -0700 Subject: Fix regression for AudioRecord streaming callback mode The notification period (frequency of client wakeups) was broken in streaming callback mode. Fast tracks were OK, but non-fast tracks with normal latency weren't getting woken up at all. Bug: 10222943 Change-Id: Ife9a2f57fa73c6eb921f1c5ba62de0bfcc20557b --- media/libmedia/AudioRecord.cpp | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/media/libmedia/AudioRecord.cpp b/media/libmedia/AudioRecord.cpp index 2718420..616c3d6 100644 --- a/media/libmedia/AudioRecord.cpp +++ b/media/libmedia/AudioRecord.cpp @@ -245,9 +245,8 @@ status_t AudioRecord::set( return BAD_VALUE; } - if (notificationFrames == 0) { - notificationFrames = frameCount/2; - } + mNotificationFramesReq = notificationFrames; + mNotificationFramesAct = 0; // create the IAudioRecord status = openRecord_l(sampleRate, format, frameCount, mFlags, input, 0 /*epoch*/); @@ -267,8 +266,6 @@ status_t AudioRecord::set( mActive = false; mCbf = cbf; - mNotificationFramesReq = notificationFrames; - mNotificationFramesAct = 0; mRefreshRemaining = true; mUserData = user; // TODO: add audio hardware input latency here @@ -464,6 +461,15 @@ status_t AudioRecord::openRecord_l( } } + mNotificationFramesAct = mNotificationFramesReq; + + if (!(flags & AUDIO_INPUT_FLAG_FAST)) { + // Make sure that application is notified with sufficient margin before overrun + if (mNotificationFramesAct == 0 || mNotificationFramesAct > frameCount/2) { + mNotificationFramesAct = frameCount/2; + } + } + int originalSessionId = mSessionId; sp record = audioFlinger->openRecord(input, sampleRate, format, @@ -495,7 +501,6 @@ status_t AudioRecord::openRecord_l( mCblk = cblk; // FIXME missing fast track frameCount logic mAwaitBoost = false; - mNotificationFramesAct = mNotificationFramesReq; if (flags & AUDIO_INPUT_FLAG_FAST) { if (trackFlags & IAudioFlinger::TRACK_FAST) { ALOGV("AUDIO_INPUT_FLAG_FAST successful; frameCount %u", frameCount); -- cgit v1.1 From c3e54939240e94b454cad18366b9eab52b9f0789 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Thu, 8 Aug 2013 15:20:53 -0700 Subject: Fix regression for AudioRecord streaming callback mode The notification period (frequency of client wakeups) was broken in streaming callback mode. Fast tracks were OK, but non-fast tracks with normal latency weren't getting woken up at all. Bug: 10222943 Change-Id: Ife9a2f57fa73c6eb921f1c5ba62de0bfcc20557b --- media/libmedia/AudioRecord.cpp | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/media/libmedia/AudioRecord.cpp b/media/libmedia/AudioRecord.cpp index 2718420..616c3d6 100644 --- a/media/libmedia/AudioRecord.cpp +++ b/media/libmedia/AudioRecord.cpp @@ -245,9 +245,8 @@ status_t AudioRecord::set( return BAD_VALUE; } - if (notificationFrames == 0) { - notificationFrames = frameCount/2; - } + mNotificationFramesReq = notificationFrames; + mNotificationFramesAct = 0; // create the IAudioRecord status = openRecord_l(sampleRate, format, frameCount, mFlags, input, 0 /*epoch*/); @@ -267,8 +266,6 @@ status_t AudioRecord::set( mActive = false; mCbf = cbf; - mNotificationFramesReq = notificationFrames; - mNotificationFramesAct = 0; mRefreshRemaining = true; mUserData = user; // TODO: add audio hardware input latency here @@ -464,6 +461,15 @@ status_t AudioRecord::openRecord_l( } } + mNotificationFramesAct = mNotificationFramesReq; + + if (!(flags & AUDIO_INPUT_FLAG_FAST)) { + // Make sure that application is notified with sufficient margin before overrun + if (mNotificationFramesAct == 0 || mNotificationFramesAct > frameCount/2) { + mNotificationFramesAct = frameCount/2; + } + } + int originalSessionId = mSessionId; sp record = audioFlinger->openRecord(input, sampleRate, format, @@ -495,7 +501,6 @@ status_t AudioRecord::openRecord_l( mCblk = cblk; // FIXME missing fast track frameCount logic mAwaitBoost = false; - mNotificationFramesAct = mNotificationFramesReq; if (flags & AUDIO_INPUT_FLAG_FAST) { if (trackFlags & IAudioFlinger::TRACK_FAST) { ALOGV("AUDIO_INPUT_FLAG_FAST successful; frameCount %u", frameCount); -- cgit v1.1 From ac0bbe16f3eba46b3d8057b66c2aff9101fc6f7d Mon Sep 17 00:00:00 2001 From: Jeff Brown Date: Fri, 9 Aug 2013 18:32:30 -0700 Subject: Fix potential leak of audio input handle. The audio input handle is ultimately owned by the audio recorder object but it could be dropped on the floor if an error occurred before that object was fully initialized. Rearranged some of the argument validation and merged getInput_l with openRecord_l to simplify the code and prevent such a leak from occurring. Bug: 10265163 Change-Id: I124dce344b1d11c2dd66ca5e2c9aec0c52c230e2 --- include/media/AudioRecord.h | 9 +--- media/libmedia/AudioRecord.cpp | 103 +++++++++++++++++------------------------ 2 files changed, 43 insertions(+), 69 deletions(-) diff --git a/include/media/AudioRecord.h b/include/media/AudioRecord.h index f457261..62f0c64 100644 --- a/include/media/AudioRecord.h +++ b/include/media/AudioRecord.h @@ -423,14 +423,7 @@ private: nsecs_t processAudioBuffer(const sp& thread); // caller must hold lock on mLock for all _l methods - status_t openRecord_l(uint32_t sampleRate, - audio_format_t format, - size_t frameCount, - audio_input_flags_t flags, - audio_io_handle_t input, - size_t epoch); - - audio_io_handle_t getInput_l(); + status_t openRecord_l(size_t epoch); // FIXME enum is faster than strcmp() for parameter 'from' status_t restoreRecord_l(const char *from); diff --git a/media/libmedia/AudioRecord.cpp b/media/libmedia/AudioRecord.cpp index 616c3d6..e934a3e 100644 --- a/media/libmedia/AudioRecord.cpp +++ b/media/libmedia/AudioRecord.cpp @@ -175,6 +175,7 @@ status_t AudioRecord::set( if (inputSource == AUDIO_SOURCE_DEFAULT) { inputSource = AUDIO_SOURCE_MIC; } + mInputSource = inputSource; if (sampleRate == 0) { ALOGE("Invalid sample rate %u", sampleRate); @@ -210,28 +211,10 @@ status_t AudioRecord::set( // Assumes audio_is_linear_pcm(format), else sizeof(uint8_t) mFrameSize = channelCount * audio_bytes_per_sample(format); - if (sessionId == 0 ) { - mSessionId = AudioSystem::newAudioSessionId(); - } else { - mSessionId = sessionId; - } - ALOGV("set(): mSessionId %d", mSessionId); - - mFlags = flags; - - audio_io_handle_t input = AudioSystem::getInput(inputSource, - sampleRate, - format, - channelMask, - mSessionId); - if (input == 0) { - ALOGE("Could not get audio input for record source %d", inputSource); - return BAD_VALUE; - } - // validate framecount size_t minFrameCount = 0; - status_t status = getMinFrameCount(&minFrameCount, sampleRate, format, channelMask); + status_t status = AudioRecord::getMinFrameCount(&minFrameCount, + sampleRate, format, channelMask); if (status != NO_ERROR) { ALOGE("getMinFrameCount() failed; status %d", status); return status; @@ -244,13 +227,23 @@ status_t AudioRecord::set( ALOGE("frameCount %u < minFrameCount %u", frameCount, minFrameCount); return BAD_VALUE; } + mFrameCount = frameCount; mNotificationFramesReq = notificationFrames; mNotificationFramesAct = 0; + if (sessionId == 0 ) { + mSessionId = AudioSystem::newAudioSessionId(); + } else { + mSessionId = sessionId; + } + ALOGV("set(): mSessionId %d", mSessionId); + + mFlags = flags; + // create the IAudioRecord - status = openRecord_l(sampleRate, format, frameCount, mFlags, input, 0 /*epoch*/); - if (status != NO_ERROR) { + status = openRecord_l(0 /*epoch*/); + if (status) { return status; } @@ -274,8 +267,6 @@ status_t AudioRecord::set( mMarkerReached = false; mNewPosition = 0; mUpdatePeriod = 0; - mInputSource = inputSource; - mInput = input; AudioSystem::acquireAudioSessionId(mSessionId); mSequence = 1; mObservedSequence = mSequence; @@ -429,13 +420,7 @@ unsigned int AudioRecord::getInputFramesLost() const // ------------------------------------------------------------------------- // must be called with mLock held -status_t AudioRecord::openRecord_l( - uint32_t sampleRate, - audio_format_t format, - size_t frameCount, - audio_input_flags_t flags, - audio_io_handle_t input, - size_t epoch) +status_t AudioRecord::openRecord_l(size_t epoch) { status_t status; const sp& audioFlinger = AudioSystem::get_audio_flinger(); @@ -449,12 +434,11 @@ status_t AudioRecord::openRecord_l( // Client can only express a preference for FAST. Server will perform additional tests. // The only supported use case for FAST is callback transfer mode. - if (flags & AUDIO_INPUT_FLAG_FAST) { + if (mFlags & AUDIO_INPUT_FLAG_FAST) { if ((mTransfer != TRANSFER_CALLBACK) || (mAudioRecordThread == 0)) { ALOGW("AUDIO_INPUT_FLAG_FAST denied by client"); // once denied, do not request again if IAudioRecord is re-created - flags = (audio_input_flags_t) (flags & ~AUDIO_INPUT_FLAG_FAST); - mFlags = flags; + mFlags = (audio_input_flags_t) (mFlags & ~AUDIO_INPUT_FLAG_FAST); } else { trackFlags |= IAudioFlinger::TRACK_FAST; tid = mAudioRecordThread->getTid(); @@ -463,18 +447,25 @@ status_t AudioRecord::openRecord_l( mNotificationFramesAct = mNotificationFramesReq; - if (!(flags & AUDIO_INPUT_FLAG_FAST)) { + if (!(mFlags & AUDIO_INPUT_FLAG_FAST)) { // Make sure that application is notified with sufficient margin before overrun - if (mNotificationFramesAct == 0 || mNotificationFramesAct > frameCount/2) { - mNotificationFramesAct = frameCount/2; + if (mNotificationFramesAct == 0 || mNotificationFramesAct > mFrameCount/2) { + mNotificationFramesAct = mFrameCount/2; } } + audio_io_handle_t input = AudioSystem::getInput(mInputSource, mSampleRate, mFormat, + mChannelMask, mSessionId); + if (input == 0) { + ALOGE("Could not get audio input for record source %d", mInputSource); + return BAD_VALUE; + } + int originalSessionId = mSessionId; sp record = audioFlinger->openRecord(input, - sampleRate, format, + mSampleRate, mFormat, mChannelMask, - frameCount, + mFrameCount, &trackFlags, tid, &mSessionId, @@ -484,6 +475,7 @@ status_t AudioRecord::openRecord_l( if (record == 0) { ALOGE("AudioFlinger could not create record track, status: %d", status); + AudioSystem::releaseInput(input); return status; } sp iMem = record->getCblk(); @@ -495,27 +487,27 @@ status_t AudioRecord::openRecord_l( mAudioRecord->asBinder()->unlinkToDeath(mDeathNotifier, this); mDeathNotifier.clear(); } + mInput = input; mAudioRecord = record; mCblkMemory = iMem; audio_track_cblk_t* cblk = static_cast(iMem->pointer()); mCblk = cblk; // FIXME missing fast track frameCount logic mAwaitBoost = false; - if (flags & AUDIO_INPUT_FLAG_FAST) { + if (mFlags & AUDIO_INPUT_FLAG_FAST) { if (trackFlags & IAudioFlinger::TRACK_FAST) { - ALOGV("AUDIO_INPUT_FLAG_FAST successful; frameCount %u", frameCount); + ALOGV("AUDIO_INPUT_FLAG_FAST successful; frameCount %u", mFrameCount); mAwaitBoost = true; // double-buffering is not required for fast tracks, due to tighter scheduling - if (mNotificationFramesAct == 0 || mNotificationFramesAct > frameCount) { - mNotificationFramesAct = frameCount; + if (mNotificationFramesAct == 0 || mNotificationFramesAct > mFrameCount) { + mNotificationFramesAct = mFrameCount; } } else { - ALOGV("AUDIO_INPUT_FLAG_FAST denied by server; frameCount %u", frameCount); + ALOGV("AUDIO_INPUT_FLAG_FAST denied by server; frameCount %u", mFrameCount); // once denied, do not request again if IAudioRecord is re-created - flags = (audio_input_flags_t) (flags & ~AUDIO_INPUT_FLAG_FAST); - mFlags = flags; - if (mNotificationFramesAct == 0 || mNotificationFramesAct > frameCount/2) { - mNotificationFramesAct = frameCount/2; + mFlags = (audio_input_flags_t) (mFlags & ~AUDIO_INPUT_FLAG_FAST); + if (mNotificationFramesAct == 0 || mNotificationFramesAct > mFrameCount/2) { + mNotificationFramesAct = mFrameCount/2; } } } @@ -524,7 +516,7 @@ status_t AudioRecord::openRecord_l( void *buffers = (char*)cblk + sizeof(audio_track_cblk_t); // update proxy - mProxy = new AudioRecordClientProxy(cblk, buffers, frameCount, mFrameSize); + mProxy = new AudioRecordClientProxy(cblk, buffers, mFrameCount, mFrameSize); mProxy->setEpoch(epoch); mProxy->setMinimum(mNotificationFramesAct); @@ -651,17 +643,6 @@ audio_io_handle_t AudioRecord::getInput() const return mInput; } -// must be called with mLock held -audio_io_handle_t AudioRecord::getInput_l() -{ - mInput = AudioSystem::getInput(mInputSource, - mSampleRate, - mFormat, - mChannelMask, - mSessionId); - return mInput; -} - // ------------------------------------------------------------------------- ssize_t AudioRecord::read(void* buffer, size_t userSize) @@ -949,7 +930,7 @@ status_t AudioRecord::restoreRecord_l(const char *from) // It will also delete the strong references on previous IAudioRecord and IMemory size_t position = mProxy->getPosition(); mNewPosition = position + mUpdatePeriod; - result = openRecord_l(mSampleRate, mFormat, mFrameCount, mFlags, getInput_l(), position); + result = openRecord_l(position); if (result == NO_ERROR) { if (mActive) { // callback thread or sync event hasn't changed -- cgit v1.1 From b8689b31813f55cbc1bf8e9ca0d46a9ee113e38a Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Mon, 12 Aug 2013 10:08:23 -0700 Subject: Support for "request-sync" parameter in ACodec Also returns errors to caller instead of asserting... Change-Id: Id3018655a2b3da4289167fba16af907350a511ae --- media/libstagefright/ACodec.cpp | 21 ++++++++++++++++++--- 1 file changed, 18 insertions(+), 3 deletions(-) diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp index 5aefa58..36549d1 100644 --- a/media/libstagefright/ACodec.cpp +++ b/media/libstagefright/ACodec.cpp @@ -4123,13 +4123,28 @@ status_t ACodec::setParameters(const sp ¶ms) { if (params->findInt32("drop-input-frames", &dropInputFrames)) { bool suspend = dropInputFrames != 0; - CHECK_EQ((status_t)OK, - mOMX->setInternalOption( + status_t err = + mOMX->setInternalOption( mNode, kPortIndexInput, IOMX::INTERNAL_OPTION_SUSPEND, &suspend, - sizeof(suspend))); + sizeof(suspend)); + + if (err != OK) { + ALOGE("Failed to set parameter 'drop-input-frames' (err %d)", err); + return err; + } + } + + int32_t dummy; + if (params->findInt32("request-sync", &dummy)) { + status_t err = requestIDRFrame(); + + if (err != OK) { + ALOGE("Requesting a sync frame failed w/ err %d", err); + return err; + } } return OK; -- cgit v1.1 From 3cb3f576475c04058c265bb2dfa366cc98b2434c Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Tue, 13 Aug 2013 08:55:47 -0700 Subject: Restore stagefright command line tool. Change-Id: Ia17f48522f601d6a057ef3e1e8cd01f80a5c9951 --- cmds/stagefright/Android.mk | 2 ++ 1 file changed, 2 insertions(+) diff --git a/cmds/stagefright/Android.mk b/cmds/stagefright/Android.mk index 1060131..561ce02 100644 --- a/cmds/stagefright/Android.mk +++ b/cmds/stagefright/Android.mk @@ -19,6 +19,8 @@ LOCAL_C_INCLUDES:= \ LOCAL_CFLAGS += -Wno-multichar +LOCAL_MODULE_TAGS := optional + LOCAL_MODULE:= stagefright include $(BUILD_EXECUTABLE) -- cgit v1.1 From 46052913f307b1561f1661bb776fa29c0775758c Mon Sep 17 00:00:00 2001 From: Andy McFadden Date: Tue, 13 Aug 2013 09:56:19 -0700 Subject: Better error message Attempt to create the output file before handing it to MediaMuxer, which doesn't report file-open failures in a useful way. Change-Id: Ie24ff577dd50e185b4eb72575684d23a46f38d3d --- cmds/screenrecord/screenrecord.cpp | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/cmds/screenrecord/screenrecord.cpp b/cmds/screenrecord/screenrecord.cpp index 28fc00f..923f781 100644 --- a/cmds/screenrecord/screenrecord.cpp +++ b/cmds/screenrecord/screenrecord.cpp @@ -36,6 +36,7 @@ #include #include +#include #include #include @@ -599,7 +600,19 @@ int main(int argc, char* const argv[]) { return 2; } - status_t err = recordScreen(argv[optind]); + // MediaMuxer tries to create the file in the constructor, but we don't + // learn about the failure until muxer.start(), which returns a generic + // error code without logging anything. We attempt to create the file + // now for better diagnostics. + const char* fileName = argv[optind]; + int fd = open(fileName, O_CREAT | O_RDWR, 0644); + if (fd < 0) { + fprintf(stderr, "Unable to open '%s': %s\n", fileName, strerror(errno)); + return 1; + } + close(fd); + + status_t err = recordScreen(fileName); ALOGD(err == NO_ERROR ? "success" : "failed"); return (int) err; } -- cgit v1.1 From a61285dcf1da8a2cf40c499ee3a7b9fc4d74ac58 Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Wed, 31 Jul 2013 13:50:42 -0700 Subject: Optionally repeat the previously submitted frame to the encoder if no new frame has been delivered by surface flinger within the timeout interval. Change-Id: I282f1b726dfe5646b178d7858d6f5d4f5a264fde --- include/media/IOMX.h | 1 + include/media/stagefright/ACodec.h | 2 + media/libstagefright/ACodec.cpp | 27 +++- media/libstagefright/omx/GraphicBufferSource.cpp | 167 ++++++++++++++++++++++- media/libstagefright/omx/GraphicBufferSource.h | 38 ++++++ media/libstagefright/omx/OMXNodeInstance.cpp | 21 ++- 6 files changed, 247 insertions(+), 9 deletions(-) diff --git a/include/media/IOMX.h b/include/media/IOMX.h index 6d116f0..db9093a 100644 --- a/include/media/IOMX.h +++ b/include/media/IOMX.h @@ -137,6 +137,7 @@ public: enum InternalOptionType { INTERNAL_OPTION_SUSPEND, // data is a bool + INTERNAL_OPTION_REPEAT_PREVIOUS_FRAME_DELAY, // data is an int64_t }; virtual status_t setInternalOption( node_id node, diff --git a/include/media/stagefright/ACodec.h b/include/media/stagefright/ACodec.h index 6bf83dd..41542ec 100644 --- a/include/media/stagefright/ACodec.h +++ b/include/media/stagefright/ACodec.h @@ -199,6 +199,8 @@ private: bool mStoreMetaDataInOutputBuffers; int32_t mMetaDataBuffersToSubmit; + int64_t mRepeatFrameDelayUs; + status_t setCyclicIntraMacroblockRefresh(const sp &msg, int32_t mode); status_t allocateBuffersOnPort(OMX_U32 portIndex); status_t freeBuffersOnPort(OMX_U32 portIndex); diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp index 5aefa58..cd67359 100644 --- a/media/libstagefright/ACodec.cpp +++ b/media/libstagefright/ACodec.cpp @@ -369,7 +369,8 @@ ACodec::ACodec() mChannelMask(0), mDequeueCounter(0), mStoreMetaDataInOutputBuffers(false), - mMetaDataBuffersToSubmit(0) { + mMetaDataBuffersToSubmit(0), + mRepeatFrameDelayUs(-1ll) { mUninitializedState = new UninitializedState(this); mLoadedState = new LoadedState(this); mLoadedToIdleState = new LoadedToIdleState(this); @@ -1089,6 +1090,12 @@ status_t ACodec::configureCodec( } else { mUseMetadataOnEncoderOutput = enable; } + + if (!msg->findInt64( + "repeat-previous-frame-after", + &mRepeatFrameDelayUs)) { + mRepeatFrameDelayUs = -1ll; + } } // Always try to enable dynamic output buffers on native surface @@ -3611,6 +3618,7 @@ void ACodec::LoadedState::stateEntered() { mCodec->mDequeueCounter = 0; mCodec->mMetaDataBuffersToSubmit = 0; + mCodec->mRepeatFrameDelayUs = -1ll; if (mCodec->mShutdownInProgress) { bool keepComponentAllocated = mCodec->mKeepComponentAllocated; @@ -3742,6 +3750,23 @@ void ACodec::LoadedState::onCreateInputSurface( err = mCodec->mOMX->createInputSurface(mCodec->mNode, kPortIndexInput, &bufferProducer); + + if (err == OK && mCodec->mRepeatFrameDelayUs > 0ll) { + err = mCodec->mOMX->setInternalOption( + mCodec->mNode, + kPortIndexInput, + IOMX::INTERNAL_OPTION_REPEAT_PREVIOUS_FRAME_DELAY, + &mCodec->mRepeatFrameDelayUs, + sizeof(mCodec->mRepeatFrameDelayUs)); + + if (err != OK) { + ALOGE("[%s] Unable to configure option to repeat previous " + "frames (err %d)", + mCodec->mComponentName.c_str(), + err); + } + } + if (err == OK) { notify->setObject("input-surface", new BufferProducerWrapper(bufferProducer)); diff --git a/media/libstagefright/omx/GraphicBufferSource.cpp b/media/libstagefright/omx/GraphicBufferSource.cpp index 325ffcf..cf43e94 100644 --- a/media/libstagefright/omx/GraphicBufferSource.cpp +++ b/media/libstagefright/omx/GraphicBufferSource.cpp @@ -22,6 +22,7 @@ #include #include +#include #include #include @@ -39,7 +40,13 @@ GraphicBufferSource::GraphicBufferSource(OMXNodeInstance* nodeInstance, mSuspended(false), mNumFramesAvailable(0), mEndOfStream(false), - mEndOfStreamSent(false) { + mEndOfStreamSent(false), + mRepeatAfterUs(-1ll), + mRepeatLastFrameGeneration(0), + mLatestSubmittedBufferId(-1), + mLatestSubmittedBufferFrameNum(0), + mLatestSubmittedBufferUseCount(0), + mRepeatBufferDeferred(false) { ALOGV("GraphicBufferSource w=%u h=%u c=%u", bufferWidth, bufferHeight, bufferCount); @@ -123,6 +130,22 @@ void GraphicBufferSource::omxExecuting() { if (mEndOfStream && mNumFramesAvailable == 0) { submitEndOfInputStream_l(); } + + if (mRepeatAfterUs > 0ll && mLooper == NULL) { + mReflector = new AHandlerReflector(this); + + mLooper = new ALooper; + mLooper->registerHandler(mReflector); + mLooper->start(); + + if (mLatestSubmittedBufferId >= 0) { + sp msg = + new AMessage(kWhatRepeatLastFrame, mReflector->id()); + + msg->setInt32("generation", ++mRepeatLastFrameGeneration); + msg->post(mRepeatAfterUs); + } + } } void GraphicBufferSource::omxLoaded(){ @@ -132,6 +155,14 @@ void GraphicBufferSource::omxLoaded(){ ALOGW("Dropped back down to Loaded without Executing"); } + if (mLooper != NULL) { + mLooper->unregisterHandler(mReflector->id()); + mReflector.clear(); + + mLooper->stop(); + mLooper.clear(); + } + ALOGV("--> loaded; avail=%d eos=%d eosSent=%d", mNumFramesAvailable, mEndOfStream, mEndOfStreamSent); @@ -211,8 +242,12 @@ void GraphicBufferSource::codecBufferEmptied(OMX_BUFFERHEADERTYPE* header) { ALOGV("cbi %d matches bq slot %d, handle=%p", cbi, id, mBufferSlot[id]->handle); - mBufferQueue->releaseBuffer(id, codecBuffer.mFrameNumber, - EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, Fence::NO_FENCE); + if (id == mLatestSubmittedBufferId) { + CHECK_GT(mLatestSubmittedBufferUseCount--, 0); + } else { + mBufferQueue->releaseBuffer(id, codecBuffer.mFrameNumber, + EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, Fence::NO_FENCE); + } } else { ALOGV("codecBufferEmptied: no match for emptied buffer in cbi %d", cbi); @@ -232,7 +267,16 @@ void GraphicBufferSource::codecBufferEmptied(OMX_BUFFERHEADERTYPE* header) { // send that. ALOGV("buffer freed, EOS pending"); submitEndOfInputStream_l(); + } else if (mRepeatBufferDeferred) { + bool success = repeatLatestSubmittedBuffer_l(); + if (success) { + ALOGV("deferred repeatLatestSubmittedBuffer_l SUCCESS"); + } else { + ALOGV("deferred repeatLatestSubmittedBuffer_l FAILURE"); + } + mRepeatBufferDeferred = false; } + return; } @@ -264,6 +308,16 @@ void GraphicBufferSource::suspend(bool suspend) { } mSuspended = false; + + if (mExecuting && mNumFramesAvailable == 0 && mRepeatBufferDeferred) { + if (repeatLatestSubmittedBuffer_l()) { + ALOGV("suspend/deferred repeatLatestSubmittedBuffer_l SUCCESS"); + + mRepeatBufferDeferred = false; + } else { + ALOGV("suspend/deferred repeatLatestSubmittedBuffer_l FAILURE"); + } + } } bool GraphicBufferSource::fillCodecBuffer_l() { @@ -318,11 +372,68 @@ bool GraphicBufferSource::fillCodecBuffer_l() { EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, Fence::NO_FENCE); } else { ALOGV("buffer submitted (bq %d, cbi %d)", item.mBuf, cbi); + setLatestSubmittedBuffer_l(item); + } + + return true; +} + +bool GraphicBufferSource::repeatLatestSubmittedBuffer_l() { + CHECK(mExecuting && mNumFramesAvailable == 0); + + if (mLatestSubmittedBufferId < 0 || mSuspended) { + return false; + } + + int cbi = findAvailableCodecBuffer_l(); + if (cbi < 0) { + // No buffers available, bail. + ALOGV("repeatLatestSubmittedBuffer_l: no codec buffers."); + return false; + } + + BufferQueue::BufferItem item; + item.mBuf = mLatestSubmittedBufferId; + item.mFrameNumber = mLatestSubmittedBufferFrameNum; + + status_t err = submitBuffer_l(item, cbi); + + if (err != OK) { + return false; } + ++mLatestSubmittedBufferUseCount; + return true; } +void GraphicBufferSource::setLatestSubmittedBuffer_l( + const BufferQueue::BufferItem &item) { + ALOGV("setLatestSubmittedBuffer_l"); + + if (mLatestSubmittedBufferId >= 0) { + if (mLatestSubmittedBufferUseCount == 0) { + mBufferQueue->releaseBuffer( + mLatestSubmittedBufferId, + mLatestSubmittedBufferFrameNum, + EGL_NO_DISPLAY, + EGL_NO_SYNC_KHR, + Fence::NO_FENCE); + } + } + + mLatestSubmittedBufferId = item.mBuf; + mLatestSubmittedBufferFrameNum = item.mFrameNumber; + mLatestSubmittedBufferUseCount = 1; + mRepeatBufferDeferred = false; + + if (mReflector != NULL) { + sp msg = new AMessage(kWhatRepeatLastFrame, mReflector->id()); + msg->setInt32("generation", ++mRepeatLastFrameGeneration); + msg->post(mRepeatAfterUs); + } +} + status_t GraphicBufferSource::signalEndOfInputStream() { Mutex::Autolock autoLock(mMutex); ALOGV("signalEndOfInputStream: exec=%d avail=%d eos=%d", @@ -470,6 +581,9 @@ void GraphicBufferSource::onFrameAvailable() { mNumFramesAvailable++; + mRepeatBufferDeferred = false; + ++mRepeatLastFrameGeneration; + if (mExecuting) { fillCodecBuffer_l(); } @@ -495,4 +609,51 @@ void GraphicBufferSource::onBuffersReleased() { } } +status_t GraphicBufferSource::setRepeatPreviousFrameDelayUs( + int64_t repeatAfterUs) { + Mutex::Autolock autoLock(mMutex); + + if (mExecuting || repeatAfterUs <= 0ll) { + return INVALID_OPERATION; + } + + mRepeatAfterUs = repeatAfterUs; + + return OK; +} + +void GraphicBufferSource::onMessageReceived(const sp &msg) { + switch (msg->what()) { + case kWhatRepeatLastFrame: + { + Mutex::Autolock autoLock(mMutex); + + int32_t generation; + CHECK(msg->findInt32("generation", &generation)); + + if (generation != mRepeatLastFrameGeneration) { + // stale + break; + } + + if (!mExecuting || mNumFramesAvailable > 0) { + break; + } + + bool success = repeatLatestSubmittedBuffer_l(); + + if (success) { + ALOGV("repeatLatestSubmittedBuffer_l SUCCESS"); + } else { + ALOGV("repeatLatestSubmittedBuffer_l FAILURE"); + mRepeatBufferDeferred = true; + } + break; + } + + default: + TRESPASS(); + } +} + } // namespace android diff --git a/media/libstagefright/omx/GraphicBufferSource.h b/media/libstagefright/omx/GraphicBufferSource.h index ac73770..244a843 100644 --- a/media/libstagefright/omx/GraphicBufferSource.h +++ b/media/libstagefright/omx/GraphicBufferSource.h @@ -25,6 +25,8 @@ #include #include "../include/OMXNodeInstance.h" #include +#include +#include namespace android { @@ -89,6 +91,15 @@ public: // in the BufferQueue) will be discarded until the suspension is lifted. void suspend(bool suspend); + // Specifies the interval after which we requeue the buffer previously + // queued to the encoder. This is useful in the case of surface flinger + // providing the input surface if the resulting encoded stream is to + // be displayed "live". If we were not to push through the extra frame + // the decoder on the remote end would be unable to decode the latest frame. + // This API must be called before transitioning the encoder to "executing" + // state and once this behaviour is specified it cannot be reset. + status_t setRepeatPreviousFrameDelayUs(int64_t repeatAfterUs); + protected: // BufferQueue::ConsumerListener interface, called when a new frame of // data is available. If we're executing and a codec buffer is @@ -147,6 +158,9 @@ private: // doing anything if we don't have a codec buffer available. void submitEndOfInputStream_l(); + void setLatestSubmittedBuffer_l(const BufferQueue::BufferItem &item); + bool repeatLatestSubmittedBuffer_l(); + // Lock, covers all member variables. mutable Mutex mMutex; @@ -181,6 +195,30 @@ private: // Tracks codec buffers. Vector mCodecBuffers; + //// + friend class AHandlerReflector; + + enum { + kWhatRepeatLastFrame, + }; + + int64_t mRepeatAfterUs; + + sp mLooper; + sp > mReflector; + + int32_t mRepeatLastFrameGeneration; + + int mLatestSubmittedBufferId; + uint64_t mLatestSubmittedBufferFrameNum; + int32_t mLatestSubmittedBufferUseCount; + + // The previously submitted buffer should've been repeated but + // no codec buffer was available at the time. + bool mRepeatBufferDeferred; + + void onMessageReceived(const sp &msg); + DISALLOW_EVIL_CONSTRUCTORS(GraphicBufferSource); }; diff --git a/media/libstagefright/omx/OMXNodeInstance.cpp b/media/libstagefright/omx/OMXNodeInstance.cpp index 8d100f1..ef683a0 100644 --- a/media/libstagefright/omx/OMXNodeInstance.cpp +++ b/media/libstagefright/omx/OMXNodeInstance.cpp @@ -809,6 +809,7 @@ status_t OMXNodeInstance::setInternalOption( size_t size) { switch (type) { case IOMX::INTERNAL_OPTION_SUSPEND: + case IOMX::INTERNAL_OPTION_REPEAT_PREVIOUS_FRAME_DELAY: { const sp &bufferSource = getGraphicBufferSource(); @@ -817,12 +818,22 @@ status_t OMXNodeInstance::setInternalOption( return ERROR_UNSUPPORTED; } - if (size != sizeof(bool)) { - return INVALID_OPERATION; - } + if (type == IOMX::INTERNAL_OPTION_SUSPEND) { + if (size != sizeof(bool)) { + return INVALID_OPERATION; + } + + bool suspend = *(bool *)data; + bufferSource->suspend(suspend); + } else { + if (size != sizeof(int64_t)) { + return INVALID_OPERATION; + } + + int64_t delayUs = *(int64_t *)data; - bool suspend = *(bool *)data; - bufferSource->suspend(suspend); + return bufferSource->setRepeatPreviousFrameDelayUs(delayUs); + } return OK; } -- cgit v1.1 From 2ee14000a38683220fb250d9e7300e1d71ccdaa0 Mon Sep 17 00:00:00 2001 From: Lajos Molnar Date: Tue, 6 Aug 2013 15:02:22 -0700 Subject: Check adaptive playback support via CodecCapabilities Added flags field to CodecCapabilities, so that applications can query whether codecs support various features. For now added one video-decoder feature: kFlagSupportsAdaptivePlayback Media playback applications can query it to see if a codec supports seamless resolution changes, such as by supporting dynamic output buffers. Signed-off-by: Lajos Molnar Change-Id: I09da46e8ab6b8645fa2749e33128e49eda2e865b Related-to-bug: 7093648 --- include/media/stagefright/MediaCodecList.h | 3 ++- include/media/stagefright/OMXCodec.h | 5 +++++ media/libstagefright/MediaCodecList.cpp | 5 ++++- media/libstagefright/OMXCodec.cpp | 9 ++++++++- 4 files changed, 19 insertions(+), 3 deletions(-) diff --git a/include/media/stagefright/MediaCodecList.h b/include/media/stagefright/MediaCodecList.h index dfb845b..590623b 100644 --- a/include/media/stagefright/MediaCodecList.h +++ b/include/media/stagefright/MediaCodecList.h @@ -50,7 +50,8 @@ struct MediaCodecList { status_t getCodecCapabilities( size_t index, const char *type, Vector *profileLevels, - Vector *colorFormats) const; + Vector *colorFormats, + uint32_t *flags) const; private: enum Section { diff --git a/include/media/stagefright/OMXCodec.h b/include/media/stagefright/OMXCodec.h index 583c3b3..daaf20f 100644 --- a/include/media/stagefright/OMXCodec.h +++ b/include/media/stagefright/OMXCodec.h @@ -361,9 +361,14 @@ private: }; struct CodecCapabilities { + enum { + kFlagSupportsAdaptivePlayback = 1 << 0, + }; + String8 mComponentName; Vector mProfileLevels; Vector mColorFormats; + uint32_t mFlags; }; // Return a vector of componentNames with supported profile/level pairs diff --git a/media/libstagefright/MediaCodecList.cpp b/media/libstagefright/MediaCodecList.cpp index d24337f..6248e90 100644 --- a/media/libstagefright/MediaCodecList.cpp +++ b/media/libstagefright/MediaCodecList.cpp @@ -509,7 +509,8 @@ status_t MediaCodecList::getSupportedTypes( status_t MediaCodecList::getCodecCapabilities( size_t index, const char *type, Vector *profileLevels, - Vector *colorFormats) const { + Vector *colorFormats, + uint32_t *flags) const { profileLevels->clear(); colorFormats->clear(); @@ -547,6 +548,8 @@ status_t MediaCodecList::getCodecCapabilities( colorFormats->push(caps.mColorFormats.itemAt(i)); } + *flags = caps.mFlags; + return OK; } diff --git a/media/libstagefright/OMXCodec.cpp b/media/libstagefright/OMXCodec.cpp index 3de3c28..7b37365 100644 --- a/media/libstagefright/OMXCodec.cpp +++ b/media/libstagefright/OMXCodec.cpp @@ -4567,7 +4567,7 @@ status_t QueryCodec( CodecCapabilities *caps) { if (strncmp(componentName, "OMX.", 4)) { // Not an OpenMax component but a software codec. - + caps->mFlags = 0; caps->mComponentName = componentName; return OK; } @@ -4582,8 +4582,15 @@ status_t QueryCodec( OMXCodec::setComponentRole(omx, node, isEncoder, mime); + caps->mFlags = 0; caps->mComponentName = componentName; + if (!isEncoder && !strncmp(mime, "video/", 6) && + omx->storeMetaDataInBuffers( + node, 1 /* port index */, OMX_TRUE) == OK) { + caps->mFlags |= CodecCapabilities::kFlagSupportsAdaptivePlayback; + } + OMX_VIDEO_PARAM_PROFILELEVELTYPE param; InitOMXParams(¶m); -- cgit v1.1 From 0167414e261f88a96b5e4bf6cb592e6ca11e5a95 Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Wed, 14 Aug 2013 10:28:14 -0700 Subject: You can now opt-in to having the video decoder push blank buffers to the surface on the transition from executing->idle by specifying the key "push-blank-buffers-on-shutdown" with a value of 1 in the call to configure. Change-Id: I1155fccf89f18e717728c985e590651597595515 --- include/media/stagefright/ACodec.h | 3 ++- media/libstagefright/ACodec.cpp | 10 +++++++++- 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/include/media/stagefright/ACodec.h b/include/media/stagefright/ACodec.h index 41542ec..a8ffd4a 100644 --- a/include/media/stagefright/ACodec.h +++ b/include/media/stagefright/ACodec.h @@ -124,7 +124,8 @@ private: }; enum { - kFlagIsSecure = 1, + kFlagIsSecure = 1, + kFlagPushBlankBuffersToNativeWindowOnShutdown = 2, }; struct BufferInfo { diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp index 7b0bce0..2e55c4f 100644 --- a/media/libstagefright/ACodec.cpp +++ b/media/libstagefright/ACodec.cpp @@ -1114,6 +1114,12 @@ status_t ACodec::configureCodec( ALOGV("[%s] storeMetaDataInBuffers succeeded", mComponentName.c_str()); mStoreMetaDataInOutputBuffers = true; } + + int32_t push; + if (msg->findInt32("push-blank-buffers-on-shutdown", &push) + && push != 0) { + mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; + } } if (video) { @@ -3584,6 +3590,7 @@ bool ACodec::UninitializedState::onAllocateComponent(const sp &msg) { if (componentName.endsWith(".secure")) { mCodec->mFlags |= kFlagIsSecure; + mCodec->mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; } mCodec->mQuirks = quirks; @@ -4413,7 +4420,8 @@ void ACodec::ExecutingToIdleState::changeStateIfWeOwnAllBuffers() { CHECK_EQ(mCodec->freeBuffersOnPort(kPortIndexInput), (status_t)OK); CHECK_EQ(mCodec->freeBuffersOnPort(kPortIndexOutput), (status_t)OK); - if (mCodec->mFlags & kFlagIsSecure && mCodec->mNativeWindow != NULL) { + if ((mCodec->mFlags & kFlagPushBlankBuffersToNativeWindowOnShutdown) + && mCodec->mNativeWindow != NULL) { // We push enough 1x1 blank buffers to ensure that one of // them has made it to the display. This allows the OMX // component teardown to zero out any protected buffers -- cgit v1.1 From 2533c83b4ed8e1ca5b259d59373f941c8f0e9635 Mon Sep 17 00:00:00 2001 From: Andy McFadden Date: Wed, 14 Aug 2013 11:23:06 -0700 Subject: Use destroyDisplay() Use the new SurfaceComposerClient::destroyDisplay call to shut the virtual display down. Change-Id: I0225a44594abf427d386456622b23b01fdd1690f --- cmds/screenrecord/screenrecord.cpp | 12 ++---------- 1 file changed, 2 insertions(+), 10 deletions(-) diff --git a/cmds/screenrecord/screenrecord.cpp b/cmds/screenrecord/screenrecord.cpp index 28fc00f..c904b03 100644 --- a/cmds/screenrecord/screenrecord.cpp +++ b/cmds/screenrecord/screenrecord.cpp @@ -460,17 +460,9 @@ static status_t recordScreen(const char* fileName) { printf("Stopping encoder and muxer\n"); } - // Shut everything down. - // - // The virtual display will continue to produce frames until "dpy" - // goes out of scope (and something causes the Binder traffic to transmit; - // can be forced with IPCThreadState::self()->flushCommands()). This - // could cause SurfaceFlinger to get stuck trying to feed us, so we want - // to set a NULL Surface to make the virtual display "dormant". + // Shut everything down, starting with the producer side. bufferProducer = NULL; - SurfaceComposerClient::openGlobalTransaction(); - SurfaceComposerClient::setDisplaySurface(dpy, bufferProducer); - SurfaceComposerClient::closeGlobalTransaction(); + SurfaceComposerClient::destroyDisplay(dpy); encoder->stop(); muxer->stop(); -- cgit v1.1 From a2081368e2068a86f2db9b0dd562d9e18f69ea37 Mon Sep 17 00:00:00 2001 From: Andy McFadden Date: Wed, 14 Aug 2013 15:08:29 -0700 Subject: Another error check Change-Id: I01ed5c0c7dcb520732d2856859bced31df5fbbb5 --- cmds/screenrecord/screenrecord.cpp | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/cmds/screenrecord/screenrecord.cpp b/cmds/screenrecord/screenrecord.cpp index c904b03..a5fbe16 100644 --- a/cmds/screenrecord/screenrecord.cpp +++ b/cmds/screenrecord/screenrecord.cpp @@ -143,6 +143,10 @@ static status_t prepareEncoder(float displayFps, sp* pCodec, looper->start(); ALOGV("Creating codec"); sp codec = MediaCodec::CreateByType(looper, "video/avc", true); + if (codec == NULL) { + fprintf(stderr, "ERROR: unable to create video/avc codec instance\n"); + return UNKNOWN_ERROR; + } err = codec->configure(format, NULL, NULL, MediaCodec::CONFIGURE_FLAG_ENCODE); if (err != NO_ERROR) { -- cgit v1.1 From 88c3c4acb13a93209b3572fecd585099defec184 Mon Sep 17 00:00:00 2001 From: Mathias Agopian Date: Wed, 14 Aug 2013 17:08:40 -0700 Subject: fix build. optional tests broke Change-Id: Ifb38fb2a7bd9c3d6305726f8e6d661be05cdcf7e --- media/libstagefright/tests/SurfaceMediaSource_test.cpp | 2 ++ 1 file changed, 2 insertions(+) diff --git a/media/libstagefright/tests/SurfaceMediaSource_test.cpp b/media/libstagefright/tests/SurfaceMediaSource_test.cpp index a5459fe..49ffcd6 100644 --- a/media/libstagefright/tests/SurfaceMediaSource_test.cpp +++ b/media/libstagefright/tests/SurfaceMediaSource_test.cpp @@ -23,6 +23,8 @@ #include #include +#include + #include #include -- cgit v1.1 From c85a986af84962515c10499faab3dc8f9a0948d4 Mon Sep 17 00:00:00 2001 From: Ying Wang Date: Wed, 14 Aug 2013 20:39:49 -0700 Subject: Fix build. Change-Id: If87636a654c8cfb2449e246df213fb24b08b4ad3 --- cmds/stagefright/stagefright.cpp | 1 + 1 file changed, 1 insertion(+) diff --git a/cmds/stagefright/stagefright.cpp b/cmds/stagefright/stagefright.cpp index 797e0b6..030bf1b 100644 --- a/cmds/stagefright/stagefright.cpp +++ b/cmds/stagefright/stagefright.cpp @@ -51,6 +51,7 @@ #include +#include #include #include -- cgit v1.1 From cbaffcffee6418d678806e63097c19fe26d48fe0 Mon Sep 17 00:00:00 2001 From: Lajos Molnar Date: Wed, 14 Aug 2013 18:30:38 -0700 Subject: Add MEDIA_STARTED/PAUSED/STOPPED events to media players This is needed for the MediaTimeProvider java interface, so it does not continually poll for current media time. Note: NuPlayer and AwesomePlayer do not correctly handle stop (pause instead), so for those we will signal PAUSED. Signed-off-by: Lajos Molnar Change-Id: I3c61e1bda475f131323f475c18a42e3ec66c9ae1 Bug: 10326117 --- include/media/mediaplayer.h | 3 +++ media/libmediaplayerservice/MidiFile.cpp | 6 +++++ media/libmediaplayerservice/nuplayer/NuPlayer.cpp | 3 +++ .../nuplayer/NuPlayerDriver.cpp | 5 ++++ .../nuplayer/NuPlayerRenderer.cpp | 30 ++++++++++++++++++++++ .../nuplayer/NuPlayerRenderer.h | 6 +++++ media/libstagefright/AwesomePlayer.cpp | 22 ++++++++++++++++ media/libstagefright/include/AwesomePlayer.h | 3 +++ 8 files changed, 78 insertions(+) diff --git a/include/media/mediaplayer.h b/include/media/mediaplayer.h index 14381c7..1afd7f7 100644 --- a/include/media/mediaplayer.h +++ b/include/media/mediaplayer.h @@ -42,6 +42,9 @@ enum media_event_type { MEDIA_BUFFERING_UPDATE = 3, MEDIA_SEEK_COMPLETE = 4, MEDIA_SET_VIDEO_SIZE = 5, + MEDIA_STARTED = 6, + MEDIA_PAUSED = 7, + MEDIA_STOPPED = 8, MEDIA_TIMED_TEXT = 99, MEDIA_ERROR = 100, MEDIA_INFO = 200, diff --git a/media/libmediaplayerservice/MidiFile.cpp b/media/libmediaplayerservice/MidiFile.cpp index 270b872..0a6aa90 100644 --- a/media/libmediaplayerservice/MidiFile.cpp +++ b/media/libmediaplayerservice/MidiFile.cpp @@ -220,6 +220,9 @@ status_t MidiFile::start() } mRender = true; + if (mState == EAS_STATE_PLAY) { + sendEvent(MEDIA_STARTED); + } // wake up render thread ALOGV(" wakeup render thread"); @@ -242,6 +245,7 @@ status_t MidiFile::stop() } } mPaused = false; + sendEvent(MEDIA_STOPPED); return NO_ERROR; } @@ -279,6 +283,7 @@ status_t MidiFile::pause() return ERROR_EAS_FAILURE; } mPaused = true; + sendEvent(MEDIA_PAUSED); return NO_ERROR; } @@ -382,6 +387,7 @@ status_t MidiFile::reset() status_t MidiFile::reset_nosync() { ALOGV("MidiFile::reset_nosync"); + sendEvent(MEDIA_STOPPED); // close file if (mEasHandle) { EAS_CloseFile(mEasData, mEasHandle); diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp index 7e81035..b411f34 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp +++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp @@ -731,6 +731,9 @@ void NuPlayer::onMessageReceived(const sp &msg) { ALOGV("renderer %s flush completed.", audio ? "audio" : "video"); } else if (what == Renderer::kWhatVideoRenderingStart) { notifyListener(MEDIA_INFO, MEDIA_INFO_RENDERING_START, 0); + } else if (what == Renderer::kWhatMediaRenderingStart) { + ALOGV("media rendering started"); + notifyListener(MEDIA_STARTED, 0, 0); } break; } diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp index 68b9623..cf0373c 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp +++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp @@ -255,6 +255,7 @@ status_t NuPlayerDriver::pause() { return OK; case STATE_RUNNING: + notifyListener(MEDIA_PAUSED); mPlayer->pause(); break; @@ -287,6 +288,8 @@ status_t NuPlayerDriver::seekTo(int msec) { case STATE_PAUSED: { mAtEOS = false; + // seeks can take a while, so we essentially paused + notifyListener(MEDIA_PAUSED); mPlayer->seekToAsync(seekTimeUs); break; } @@ -345,6 +348,8 @@ status_t NuPlayerDriver::reset() { break; } + notifyListener(MEDIA_STOPPED); + mState = STATE_RESET_IN_PROGRESS; mPlayer->resetAsync(); diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp index b543d9d..3b2784b 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp +++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp @@ -50,6 +50,8 @@ NuPlayer::Renderer::Renderer( mSyncQueues(false), mPaused(false), mVideoRenderingStarted(false), + mVideoRenderingStartGeneration(0), + mAudioRenderingStartGeneration(0), mLastPositionUpdateUs(-1ll), mVideoLateByUs(0ll) { } @@ -220,6 +222,23 @@ void NuPlayer::Renderer::signalAudioSinkChanged() { (new AMessage(kWhatAudioSinkChanged, id()))->post(); } +void NuPlayer::Renderer::prepareForMediaRenderingStart() { + mAudioRenderingStartGeneration = mAudioQueueGeneration; + mVideoRenderingStartGeneration = mVideoQueueGeneration; +} + +void NuPlayer::Renderer::notifyIfMediaRenderingStarted() { + if (mVideoRenderingStartGeneration == mVideoQueueGeneration && + mAudioRenderingStartGeneration == mAudioQueueGeneration) { + mVideoRenderingStartGeneration = -1; + mAudioRenderingStartGeneration = -1; + + sp notify = mNotify->dup(); + notify->setInt32("what", kWhatMediaRenderingStart); + notify->post(); + } +} + bool NuPlayer::Renderer::onDrainAudioQueue() { uint32_t numFramesPlayed; if (mAudioSink->getPosition(&numFramesPlayed) != OK) { @@ -299,6 +318,8 @@ bool NuPlayer::Renderer::onDrainAudioQueue() { numBytesAvailableToWrite -= copy; size_t copiedFrames = copy / mAudioSink->frameSize(); mNumFramesWritten += copiedFrames; + + notifyIfMediaRenderingStarted(); } notifyPosition(); @@ -405,6 +426,8 @@ void NuPlayer::Renderer::onDrainVideoQueue() { notifyVideoRenderingStart(); } + notifyIfMediaRenderingStarted(); + notifyPosition(); } @@ -552,6 +575,7 @@ void NuPlayer::Renderer::onFlush(const sp &msg) { // is flushed. syncQueuesDone(); + ALOGV("flushing %s", audio ? "audio" : "video"); if (audio) { flushQueue(&mAudioQueue); @@ -560,6 +584,8 @@ void NuPlayer::Renderer::onFlush(const sp &msg) { mDrainAudioQueuePending = false; ++mAudioQueueGeneration; + + prepareForMediaRenderingStart(); } else { flushQueue(&mVideoQueue); @@ -568,6 +594,8 @@ void NuPlayer::Renderer::onFlush(const sp &msg) { mDrainVideoQueuePending = false; ++mVideoQueueGeneration; + + prepareForMediaRenderingStart(); } notifyFlushComplete(audio); @@ -658,6 +686,8 @@ void NuPlayer::Renderer::onPause() { mDrainVideoQueuePending = false; ++mVideoQueueGeneration; + prepareForMediaRenderingStart(); + if (mHasAudio) { mAudioSink->pause(); } diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h index c9796e2..94a05ea 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h +++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h @@ -53,6 +53,7 @@ struct NuPlayer::Renderer : public AHandler { kWhatFlushComplete = 'fluC', kWhatPosition = 'posi', kWhatVideoRenderingStart = 'vdrd', + kWhatMediaRenderingStart = 'mdrd', }; protected: @@ -106,6 +107,8 @@ private: bool mPaused; bool mVideoRenderingStarted; + int32_t mVideoRenderingStartGeneration; + int32_t mAudioRenderingStartGeneration; int64_t mLastPositionUpdateUs; int64_t mVideoLateByUs; @@ -116,6 +119,9 @@ private: void onDrainVideoQueue(); void postDrainVideoQueue(); + void prepareForMediaRenderingStart(); + void notifyIfMediaRenderingStarted(); + void onQueueBuffer(const sp &msg); void onQueueEOS(const sp &msg); void onFlush(const sp &msg); diff --git a/media/libstagefright/AwesomePlayer.cpp b/media/libstagefright/AwesomePlayer.cpp index 79f2c91..52e178e 100644 --- a/media/libstagefright/AwesomePlayer.cpp +++ b/media/libstagefright/AwesomePlayer.cpp @@ -191,6 +191,8 @@ AwesomePlayer::AwesomePlayer() mTimeSource(NULL), mVideoRenderingStarted(false), mVideoRendererIsPreview(false), + mMediaRenderingStartGeneration(0), + mStartGeneration(0), mAudioPlayer(NULL), mDisplayWidth(0), mDisplayHeight(0), @@ -491,6 +493,8 @@ void AwesomePlayer::reset_l() { mDisplayWidth = 0; mDisplayHeight = 0; + notifyListener_l(MEDIA_STOPPED); + if (mDecryptHandle != NULL) { mDrmManagerClient->setPlaybackStatus(mDecryptHandle, Playback::STOP, 0); @@ -1025,6 +1029,13 @@ void AwesomePlayer::createAudioPlayer_l() seekAudioIfNecessary_l(); } +void AwesomePlayer::notifyIfMediaStarted_l() { + if (mMediaRenderingStartGeneration == mStartGeneration) { + mMediaRenderingStartGeneration = -1; + notifyListener_l(MEDIA_STARTED); + } +} + status_t AwesomePlayer::startAudioPlayer_l(bool sendErrorNotification) { CHECK(!(mFlags & AUDIO_RUNNING)); status_t err = OK; @@ -1061,6 +1072,8 @@ status_t AwesomePlayer::startAudioPlayer_l(bool sendErrorNotification) { // We will have finished the seek while starting the audio player. postAudioSeekComplete(); + } else { + notifyIfMediaStarted_l(); } } else { err = mAudioPlayer->resume(); @@ -1201,6 +1214,9 @@ status_t AwesomePlayer::pause_l(bool at_eos) { return OK; } + notifyListener_l(MEDIA_PAUSED); + mMediaRenderingStartGeneration = ++mStartGeneration; + cancelPlayerEvents(true /* keepNotifications */); if (mAudioPlayer != NULL && (mFlags & AUDIO_RUNNING)) { @@ -1389,6 +1405,9 @@ status_t AwesomePlayer::seekTo_l(int64_t timeUs) { mSeekTimeUs = timeUs; modifyFlags((AT_EOS | AUDIO_AT_EOS | VIDEO_AT_EOS), CLEAR); + notifyListener_l(MEDIA_PAUSED); + mMediaRenderingStartGeneration = ++mStartGeneration; + seekAudioIfNecessary_l(); if (mFlags & TEXTPLAYER_INITIALIZED) { @@ -1903,6 +1922,7 @@ void AwesomePlayer::onVideoEvent() { notifyListener_l(MEDIA_INFO, MEDIA_INFO_RENDERING_START); } + notifyIfMediaStarted_l(); } mVideoBuffer->release(); @@ -1998,6 +2018,8 @@ void AwesomePlayer::onCheckAudioStatus() { } mSeeking = NO_SEEK; + + notifyIfMediaStarted_l(); } status_t finalStatus; diff --git a/media/libstagefright/include/AwesomePlayer.h b/media/libstagefright/include/AwesomePlayer.h index d3c74e2..b001cf4 100644 --- a/media/libstagefright/include/AwesomePlayer.h +++ b/media/libstagefright/include/AwesomePlayer.h @@ -169,6 +169,8 @@ private: sp mVideoRenderer; bool mVideoRenderingStarted; bool mVideoRendererIsPreview; + int32_t mMediaRenderingStartGeneration; + int32_t mStartGeneration; ssize_t mActiveAudioTrackIndex; sp mAudioTrack; @@ -294,6 +296,7 @@ private: void finishSeekIfNecessary(int64_t videoTimeUs); void ensureCacheIsFetching_l(); + void notifyIfMediaStarted_l(); void createAudioPlayer_l(); status_t startAudioPlayer_l(bool sendErrorNotification = true); -- cgit v1.1 From 6047633622328711cc6b23667c8d6381bae179bf Mon Sep 17 00:00:00 2001 From: Jeff Tinker Date: Mon, 22 Jul 2013 15:54:42 -0700 Subject: Remove obsolete drm manager code bug: 9545965 Change-Id: Ia6e09efa826b1349d027045e782980daeb7d7596 (cherry picked from commit f355f18aa2cc1706761e373fe19298a9ccc9c75a) --- drm/common/IDrmManagerService.cpp | 25 ---------------------- drm/drmserver/DrmManager.cpp | 15 ------------- drm/drmserver/DrmManagerService.cpp | 5 ----- drm/libdrmframework/DrmManagerClientImpl.cpp | 9 -------- drm/libdrmframework/include/DrmManager.h | 2 -- drm/libdrmframework/include/DrmManagerClientImpl.h | 11 ---------- drm/libdrmframework/include/DrmManagerService.h | 2 -- drm/libdrmframework/include/IDrmManagerService.h | 4 ---- 8 files changed, 73 deletions(-) diff --git a/drm/common/IDrmManagerService.cpp b/drm/common/IDrmManagerService.cpp index 43f64f2..9ac7118 100644 --- a/drm/common/IDrmManagerService.cpp +++ b/drm/common/IDrmManagerService.cpp @@ -153,18 +153,6 @@ status_t BpDrmManagerService::setDrmServiceListener( return reply.readInt32(); } -status_t BpDrmManagerService::installDrmEngine(int uniqueId, const String8& drmEngineFile) { - ALOGV("Install DRM Engine"); - Parcel data, reply; - - data.writeInterfaceToken(IDrmManagerService::getInterfaceDescriptor()); - data.writeInt32(uniqueId); - data.writeString8(drmEngineFile); - - remote()->transact(INSTALL_DRM_ENGINE, data, &reply); - return reply.readInt32(); -} - DrmConstraints* BpDrmManagerService::getConstraints( int uniqueId, const String8* path, const int action) { ALOGV("Get Constraints"); @@ -815,19 +803,6 @@ status_t BnDrmManagerService::onTransact( return DRM_NO_ERROR; } - case INSTALL_DRM_ENGINE: - { - ALOGV("BnDrmManagerService::onTransact :INSTALL_DRM_ENGINE"); - CHECK_INTERFACE(IDrmManagerService, data, reply); - - const int uniqueId = data.readInt32(); - const String8 engineFile = data.readString8(); - status_t status = installDrmEngine(uniqueId, engineFile); - - reply->writeInt32(status); - return DRM_NO_ERROR; - } - case GET_CONSTRAINTS_FROM_CONTENT: { ALOGV("BnDrmManagerService::onTransact :GET_CONSTRAINTS_FROM_CONTENT"); diff --git a/drm/drmserver/DrmManager.cpp b/drm/drmserver/DrmManager.cpp index 999295a..70a06b4 100644 --- a/drm/drmserver/DrmManager.cpp +++ b/drm/drmserver/DrmManager.cpp @@ -185,21 +185,6 @@ DrmMetadata* DrmManager::getMetadata(int uniqueId, const String8* path) { return NULL; } -status_t DrmManager::installDrmEngine(int uniqueId, const String8& absolutePath) { - Mutex::Autolock _l(mLock); - mPlugInManager.loadPlugIn(absolutePath); - - IDrmEngine& rDrmEngine = mPlugInManager.getPlugIn(absolutePath); - rDrmEngine.initialize(uniqueId); - rDrmEngine.setOnInfoListener(uniqueId, this); - - DrmSupportInfo* info = rDrmEngine.getSupportInfo(0); - mSupportInfoToPlugInIdMap.add(*info, absolutePath); - delete info; - - return DRM_NO_ERROR; -} - bool DrmManager::canHandle(int uniqueId, const String8& path, const String8& mimeType) { Mutex::Autolock _l(mLock); const String8 plugInId = getSupportedPlugInId(mimeType); diff --git a/drm/drmserver/DrmManagerService.cpp b/drm/drmserver/DrmManagerService.cpp index 746f506..d9f8b5c 100644 --- a/drm/drmserver/DrmManagerService.cpp +++ b/drm/drmserver/DrmManagerService.cpp @@ -87,11 +87,6 @@ status_t DrmManagerService::setDrmServiceListener( return DRM_NO_ERROR; } -status_t DrmManagerService::installDrmEngine(int uniqueId, const String8& drmEngineFile) { - ALOGV("Entering installDrmEngine"); - return mDrmManager->installDrmEngine(uniqueId, drmEngineFile); -} - DrmConstraints* DrmManagerService::getConstraints( int uniqueId, const String8* path, const int action) { ALOGV("Entering getConstraints from content"); diff --git a/drm/libdrmframework/DrmManagerClientImpl.cpp b/drm/libdrmframework/DrmManagerClientImpl.cpp index fb0439e..2185422 100644 --- a/drm/libdrmframework/DrmManagerClientImpl.cpp +++ b/drm/libdrmframework/DrmManagerClientImpl.cpp @@ -86,15 +86,6 @@ status_t DrmManagerClientImpl::setOnInfoListener( (NULL != infoListener.get()) ? this : NULL); } -status_t DrmManagerClientImpl::installDrmEngine( - int uniqueId, const String8& drmEngineFile) { - status_t status = DRM_ERROR_UNKNOWN; - if (EMPTY_STRING != drmEngineFile) { - status = getDrmManagerService()->installDrmEngine(uniqueId, drmEngineFile); - } - return status; -} - DrmConstraints* DrmManagerClientImpl::getConstraints( int uniqueId, const String8* path, const int action) { DrmConstraints *drmConstraints = NULL; diff --git a/drm/libdrmframework/include/DrmManager.h b/drm/libdrmframework/include/DrmManager.h index c9167d4..073ea2f 100644 --- a/drm/libdrmframework/include/DrmManager.h +++ b/drm/libdrmframework/include/DrmManager.h @@ -70,8 +70,6 @@ public: status_t setDrmServiceListener( int uniqueId, const sp& drmServiceListener); - status_t installDrmEngine(int uniqueId, const String8& drmEngineFile); - DrmConstraints* getConstraints(int uniqueId, const String8* path, const int action); DrmMetadata* getMetadata(int uniqueId, const String8* path); diff --git a/drm/libdrmframework/include/DrmManagerClientImpl.h b/drm/libdrmframework/include/DrmManagerClientImpl.h index 2aa493f..88912a5 100644 --- a/drm/libdrmframework/include/DrmManagerClientImpl.h +++ b/drm/libdrmframework/include/DrmManagerClientImpl.h @@ -397,17 +397,6 @@ public: status_t notify(const DrmInfoEvent& event); private: - /** - * Install new DRM Engine Plug-in at the runtime - * - * @param[in] uniqueId Unique identifier for a session - * @param[in] drmEngine Shared Object(so) File in which DRM Engine defined - * @return status_t - * Returns DRM_NO_ERROR for success, DRM_ERROR_UNKNOWN for failure - */ - status_t installDrmEngine(int uniqueId, const String8& drmEngineFile); - -private: Mutex mLock; sp mOnInfoListener; diff --git a/drm/libdrmframework/include/DrmManagerService.h b/drm/libdrmframework/include/DrmManagerService.h index 1a8c2ae..304ac22 100644 --- a/drm/libdrmframework/include/DrmManagerService.h +++ b/drm/libdrmframework/include/DrmManagerService.h @@ -57,8 +57,6 @@ public: status_t setDrmServiceListener( int uniqueId, const sp& drmServiceListener); - status_t installDrmEngine(int uniqueId, const String8& drmEngineFile); - DrmConstraints* getConstraints(int uniqueId, const String8* path, const int action); DrmMetadata* getMetadata(int uniqueId, const String8* path); diff --git a/drm/libdrmframework/include/IDrmManagerService.h b/drm/libdrmframework/include/IDrmManagerService.h index a7d21c5..5dab338 100644 --- a/drm/libdrmframework/include/IDrmManagerService.h +++ b/drm/libdrmframework/include/IDrmManagerService.h @@ -92,8 +92,6 @@ public: virtual status_t setDrmServiceListener( int uniqueId, const sp& infoListener) = 0; - virtual status_t installDrmEngine(int uniqueId, const String8& drmEngineFile) = 0; - virtual DrmConstraints* getConstraints( int uniqueId, const String8* path, const int action) = 0; @@ -181,8 +179,6 @@ public: virtual status_t setDrmServiceListener( int uniqueId, const sp& infoListener); - virtual status_t installDrmEngine(int uniqueId, const String8& drmEngineFile); - virtual DrmConstraints* getConstraints(int uniqueId, const String8* path, const int action); virtual DrmMetadata* getMetadata(int uniqueId, const String8* path); -- cgit v1.1 From 0f61d8f14aa368c9cd7076528e8096e10ed100a0 Mon Sep 17 00:00:00 2001 From: Ruben Brunk Date: Thu, 8 Aug 2013 13:07:18 -0700 Subject: Refactor CameraService to handle errors properly. Bug: 10361136 -Connect calls now return status_t error flags. Change-Id: Idca453b111e5df31327f6c99ebe853bb2e332b95 --- camera/Camera.cpp | 3 + camera/CameraBase.cpp | 11 +- camera/ICameraService.cpp | 81 ++++++++++---- camera/ProCamera.cpp | 4 +- include/camera/Camera.h | 6 + include/camera/CameraBase.h | 7 +- include/camera/ICameraService.h | 18 ++- include/camera/ProCamera.h | 7 ++ services/camera/libcameraservice/CameraService.cpp | 121 +++++++++++---------- services/camera/libcameraservice/CameraService.h | 24 ++-- 10 files changed, 184 insertions(+), 98 deletions(-) diff --git a/camera/Camera.cpp b/camera/Camera.cpp index fd78572..22016a9 100644 --- a/camera/Camera.cpp +++ b/camera/Camera.cpp @@ -39,6 +39,9 @@ Camera::Camera(int cameraId) { } +CameraTraits::TCamConnectService CameraTraits::fnConnectService = + &ICameraService::connect; + // construct a camera client from an existing camera remote sp Camera::create(const sp& camera) { diff --git a/camera/CameraBase.cpp b/camera/CameraBase.cpp index c25c5fd..55376b0 100644 --- a/camera/CameraBase.cpp +++ b/camera/CameraBase.cpp @@ -92,20 +92,25 @@ const sp& CameraBase::getCameraService() template sp CameraBase::connect(int cameraId, - const String16& clientPackageName, + const String16& clientPackageName, int clientUid) { ALOGV("%s: connect", __FUNCTION__); sp c = new TCam(cameraId); sp cl = c; + status_t status = NO_ERROR; const sp& cs = getCameraService(); + if (cs != 0) { - c->mCamera = cs->connect(cl, cameraId, clientPackageName, clientUid); + TCamConnectService fnConnectService = TCamTraits::fnConnectService; + status = (cs.get()->*fnConnectService)(cl, cameraId, clientPackageName, clientUid, + /*out*/ c->mCamera); } - if (c->mCamera != 0) { + if (status == OK && c->mCamera != 0) { c->mCamera->asBinder()->linkToDeath(c); c->mStatus = NO_ERROR; } else { + ALOGW("An error occurred while connecting to camera: %d", cameraId); c.clear(); } return c; diff --git a/camera/ICameraService.cpp b/camera/ICameraService.cpp index 876a2df..3debe22 100644 --- a/camera/ICameraService.cpp +++ b/camera/ICameraService.cpp @@ -120,8 +120,10 @@ public: } // connect to camera service (android.hardware.Camera) - virtual sp connect(const sp& cameraClient, int cameraId, - const String16 &clientPackageName, int clientUid) + virtual status_t connect(const sp& cameraClient, int cameraId, + const String16 &clientPackageName, int clientUid, + /*out*/ + sp& device) { Parcel data, reply; data.writeInterfaceToken(ICameraService::getInterfaceDescriptor()); @@ -131,13 +133,19 @@ public: data.writeInt32(clientUid); remote()->transact(BnCameraService::CONNECT, data, &reply); - if (readExceptionCode(reply)) return NULL; - return interface_cast(reply.readStrongBinder()); + if (readExceptionCode(reply)) return -EPROTO; + status_t status = reply.readInt32(); + if (reply.readInt32() != 0) { + device = interface_cast(reply.readStrongBinder()); + } + return status; } // connect to camera service (pro client) - virtual sp connect(const sp& cameraCb, int cameraId, - const String16 &clientPackageName, int clientUid) + virtual status_t connectPro(const sp& cameraCb, int cameraId, + const String16 &clientPackageName, int clientUid, + /*out*/ + sp& device) { Parcel data, reply; data.writeInterfaceToken(ICameraService::getInterfaceDescriptor()); @@ -147,16 +155,22 @@ public: data.writeInt32(clientUid); remote()->transact(BnCameraService::CONNECT_PRO, data, &reply); - if (readExceptionCode(reply)) return NULL; - return interface_cast(reply.readStrongBinder()); + if (readExceptionCode(reply)) return -EPROTO; + status_t status = reply.readInt32(); + if (reply.readInt32() != 0) { + device = interface_cast(reply.readStrongBinder()); + } + return status; } // connect to camera service (android.hardware.camera2.CameraDevice) - virtual sp connect( + virtual status_t connectDevice( const sp& cameraCb, int cameraId, const String16& clientPackageName, - int clientUid) + int clientUid, + /*out*/ + sp& device) { Parcel data, reply; data.writeInterfaceToken(ICameraService::getInterfaceDescriptor()); @@ -166,8 +180,12 @@ public: data.writeInt32(clientUid); remote()->transact(BnCameraService::CONNECT_DEVICE, data, &reply); - if (readExceptionCode(reply)) return NULL; - return interface_cast(reply.readStrongBinder()); + if (readExceptionCode(reply)) return -EPROTO; + status_t status = reply.readInt32(); + if (reply.readInt32() != 0) { + device = interface_cast(reply.readStrongBinder()); + } + return status; } virtual status_t addListener(const sp& listener) @@ -228,10 +246,17 @@ status_t BnCameraService::onTransact( int32_t cameraId = data.readInt32(); const String16 clientName = data.readString16(); int32_t clientUid = data.readInt32(); - sp camera = connect(cameraClient, cameraId, - clientName, clientUid); + sp camera; + status_t status = connect(cameraClient, cameraId, + clientName, clientUid, /*out*/ camera); reply->writeNoException(); - reply->writeStrongBinder(camera->asBinder()); + reply->writeInt32(status); + if (camera != NULL) { + reply->writeInt32(1); + reply->writeStrongBinder(camera->asBinder()); + } else { + reply->writeInt32(0); + } return NO_ERROR; } break; case CONNECT_PRO: { @@ -241,10 +266,17 @@ status_t BnCameraService::onTransact( int32_t cameraId = data.readInt32(); const String16 clientName = data.readString16(); int32_t clientUid = data.readInt32(); - sp camera = connect(cameraClient, cameraId, - clientName, clientUid); + sp camera; + status_t status = connectPro(cameraClient, cameraId, + clientName, clientUid, /*out*/ camera); reply->writeNoException(); - reply->writeStrongBinder(camera->asBinder()); + reply->writeInt32(status); + if (camera != NULL) { + reply->writeInt32(1); + reply->writeStrongBinder(camera->asBinder()); + } else { + reply->writeInt32(0); + } return NO_ERROR; } break; case CONNECT_DEVICE: { @@ -254,10 +286,17 @@ status_t BnCameraService::onTransact( int32_t cameraId = data.readInt32(); const String16 clientName = data.readString16(); int32_t clientUid = data.readInt32(); - sp camera = connect(cameraClient, cameraId, - clientName, clientUid); + sp camera; + status_t status = connectDevice(cameraClient, cameraId, + clientName, clientUid, /*out*/ camera); reply->writeNoException(); - reply->writeStrongBinder(camera->asBinder()); + reply->writeInt32(status); + if (camera != NULL) { + reply->writeInt32(1); + reply->writeStrongBinder(camera->asBinder()); + } else { + reply->writeInt32(0); + } return NO_ERROR; } break; case ADD_LISTENER: { diff --git a/camera/ProCamera.cpp b/camera/ProCamera.cpp index f6c9ca1..577c760 100644 --- a/camera/ProCamera.cpp +++ b/camera/ProCamera.cpp @@ -26,7 +26,6 @@ #include #include -#include #include #include @@ -47,6 +46,9 @@ ProCamera::ProCamera(int cameraId) { } +CameraTraits::TCamConnectService CameraTraits::fnConnectService = + &ICameraService::connectPro; + ProCamera::~ProCamera() { diff --git a/include/camera/Camera.h b/include/camera/Camera.h index c34b3ea..81848b3 100644 --- a/include/camera/Camera.h +++ b/include/camera/Camera.h @@ -51,8 +51,14 @@ struct CameraTraits typedef CameraListener TCamListener; typedef ICamera TCamUser; typedef ICameraClient TCamCallbacks; + typedef status_t (ICameraService::*TCamConnectService)(const sp&, + int, const String16&, int, + /*out*/ + sp&); + static TCamConnectService fnConnectService; }; + class Camera : public CameraBase, public BnCameraClient diff --git a/include/camera/CameraBase.h b/include/camera/CameraBase.h index 9b08c0f..1b93157 100644 --- a/include/camera/CameraBase.h +++ b/include/camera/CameraBase.h @@ -54,9 +54,10 @@ template > class CameraBase : public IBinder::DeathRecipient { public: - typedef typename TCamTraits::TCamListener TCamListener; - typedef typename TCamTraits::TCamUser TCamUser; - typedef typename TCamTraits::TCamCallbacks TCamCallbacks; + typedef typename TCamTraits::TCamListener TCamListener; + typedef typename TCamTraits::TCamUser TCamUser; + typedef typename TCamTraits::TCamCallbacks TCamCallbacks; + typedef typename TCamTraits::TCamConnectService TCamConnectService; static sp connect(int cameraId, const String16& clientPackageName, diff --git a/include/camera/ICameraService.h b/include/camera/ICameraService.h index fa715b7..0e10699 100644 --- a/include/camera/ICameraService.h +++ b/include/camera/ICameraService.h @@ -71,21 +71,27 @@ public: * clientUid == USE_CALLING_UID, then the calling UID is used instead. Only * trusted callers can set a clientUid other than USE_CALLING_UID. */ - virtual sp connect(const sp& cameraClient, + virtual status_t connect(const sp& cameraClient, int cameraId, const String16& clientPackageName, - int clientUid) = 0; + int clientUid, + /*out*/ + sp& device) = 0; - virtual sp connect(const sp& cameraCb, + virtual status_t connectPro(const sp& cameraCb, int cameraId, const String16& clientPackageName, - int clientUid) = 0; + int clientUid, + /*out*/ + sp& device) = 0; - virtual sp connect( + virtual status_t connectDevice( const sp& cameraCb, int cameraId, const String16& clientPackageName, - int clientUid) = 0; + int clientUid, + /*out*/ + sp& device) = 0; }; // ---------------------------------------------------------------------------- diff --git a/include/camera/ProCamera.h b/include/camera/ProCamera.h index 3d1652f..d9ee662 100644 --- a/include/camera/ProCamera.h +++ b/include/camera/ProCamera.h @@ -25,6 +25,7 @@ #include #include #include +#include #include #include @@ -87,8 +88,14 @@ struct CameraTraits typedef ProCameraListener TCamListener; typedef IProCameraUser TCamUser; typedef IProCameraCallbacks TCamCallbacks; + typedef status_t (ICameraService::*TCamConnectService)(const sp&, + int, const String16&, int, + /*out*/ + sp&); + static TCamConnectService fnConnectService; }; + class ProCamera : public CameraBase, public BnProCameraCallbacks diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp index 359b3ca..bf9bc71 100644 --- a/services/camera/libcameraservice/CameraService.cpp +++ b/services/camera/libcameraservice/CameraService.cpp @@ -211,7 +211,7 @@ int32_t CameraService::getNumberOfCameras() { status_t CameraService::getCameraInfo(int cameraId, struct CameraInfo* cameraInfo) { if (!mModule) { - return NO_INIT; + return -ENODEV; } if (cameraId < 0 || cameraId >= mNumberOfCameras) { @@ -262,7 +262,7 @@ bool CameraService::isValidCameraId(int cameraId) { return false; } -bool CameraService::validateConnect(int cameraId, +status_t CameraService::validateConnect(int cameraId, /*inout*/ int& clientUid) const { @@ -275,19 +275,19 @@ bool CameraService::validateConnect(int cameraId, if (callingPid != getpid()) { ALOGE("CameraService::connect X (pid %d) rejected (don't trust clientUid)", callingPid); - return false; + return PERMISSION_DENIED; } } if (!mModule) { ALOGE("Camera HAL module not loaded"); - return false; + return -ENODEV; } if (cameraId < 0 || cameraId >= mNumberOfCameras) { ALOGE("CameraService::connect X (pid %d) rejected (invalid cameraId %d).", callingPid, cameraId); - return false; + return -ENODEV; } char value[PROPERTY_VALUE_MAX]; @@ -295,23 +295,23 @@ bool CameraService::validateConnect(int cameraId, if (strcmp(value, "1") == 0) { // Camera is disabled by DevicePolicyManager. ALOGI("Camera is disabled. connect X (pid %d) rejected", callingPid); - return false; + return -EACCES; } ICameraServiceListener::Status currentStatus = getStatus(cameraId); if (currentStatus == ICameraServiceListener::STATUS_NOT_PRESENT) { ALOGI("Camera is not plugged in," " connect X (pid %d) rejected", callingPid); - return false; + return -ENODEV; } else if (currentStatus == ICameraServiceListener::STATUS_ENUMERATING) { ALOGI("Camera is enumerating," " connect X (pid %d) rejected", callingPid); - return false; + return -EBUSY; } // Else don't check for STATUS_NOT_AVAILABLE. // -- It's done implicitly in canConnectUnsafe /w the mBusy array - return true; + return OK; } bool CameraService::canConnectUnsafe(int cameraId, @@ -358,11 +358,13 @@ bool CameraService::canConnectUnsafe(int cameraId, return true; } -sp CameraService::connect( +status_t CameraService::connect( const sp& cameraClient, int cameraId, const String16& clientPackageName, - int clientUid) { + int clientUid, + /*out*/ + sp& device) { String8 clientName8(clientPackageName); int callingPid = getCallingPid(); @@ -370,8 +372,9 @@ sp CameraService::connect( LOG1("CameraService::connect E (pid %d \"%s\", id %d)", callingPid, clientName8.string(), cameraId); - if (!validateConnect(cameraId, /*inout*/clientUid)) { - return NULL; + status_t status = validateConnect(cameraId, /*inout*/clientUid); + if (status != OK) { + return status; } @@ -382,9 +385,10 @@ sp CameraService::connect( if (!canConnectUnsafe(cameraId, clientPackageName, cameraClient->asBinder(), /*out*/clientTmp)) { - return NULL; + return -EBUSY; } else if (client.get() != NULL) { - return static_cast(clientTmp.get()); + device = static_cast(clientTmp.get()); + return OK; } int facing = -1; @@ -414,19 +418,18 @@ sp CameraService::connect( break; case -1: ALOGE("Invalid camera id %d", cameraId); - return NULL; + return BAD_VALUE; default: ALOGE("Unknown camera device HAL version: %d", deviceVersion); - return NULL; + return INVALID_OPERATION; } - if (!connectFinishUnsafe(client, - client->getRemote())) { + status_t status = connectFinishUnsafe(client, client->getRemote()); + if (status != OK) { // this is probably not recoverable.. maybe the client can try again // OK: we can only get here if we were originally in PRESENT state updateStatus(ICameraServiceListener::STATUS_PRESENT, cameraId); - - return NULL; + return status; } mClient[cameraId] = client; @@ -436,34 +439,38 @@ sp CameraService::connect( // important: release the mutex here so the client can call back // into the service from its destructor (can be at the end of the call) - return client; + device = client; + return OK; } -bool CameraService::connectFinishUnsafe(const sp& client, - const sp& remoteCallback) { - if (client->initialize(mModule) != OK) { - return false; +status_t CameraService::connectFinishUnsafe(const sp& client, + const sp& remoteCallback) { + status_t status = client->initialize(mModule); + if (status != OK) { + return status; } remoteCallback->linkToDeath(this); - return true; + return OK; } -sp CameraService::connect( +status_t CameraService::connectPro( const sp& cameraCb, int cameraId, const String16& clientPackageName, - int clientUid) + int clientUid, + /*out*/ + sp& device) { String8 clientName8(clientPackageName); int callingPid = getCallingPid(); LOG1("CameraService::connectPro E (pid %d \"%s\", id %d)", callingPid, clientName8.string(), cameraId); - - if (!validateConnect(cameraId, /*inout*/clientUid)) { - return NULL; + status_t status = validateConnect(cameraId, /*inout*/clientUid); + if (status != OK) { + return status; } sp client; @@ -474,7 +481,7 @@ sp CameraService::connect( if (!canConnectUnsafe(cameraId, clientPackageName, cameraCb->asBinder(), /*out*/client)) { - return NULL; + return -EBUSY; } } @@ -485,7 +492,7 @@ sp CameraService::connect( case CAMERA_DEVICE_API_VERSION_1_0: ALOGE("Camera id %d uses HALv1, doesn't support ProCamera", cameraId); - return NULL; + return -ENOTSUP; break; case CAMERA_DEVICE_API_VERSION_2_0: case CAMERA_DEVICE_API_VERSION_2_1: @@ -495,14 +502,15 @@ sp CameraService::connect( break; case -1: ALOGE("Invalid camera id %d", cameraId); - return NULL; + return BAD_VALUE; default: ALOGE("Unknown camera device HAL version: %d", deviceVersion); - return NULL; + return INVALID_OPERATION; } - if (!connectFinishUnsafe(client, client->getRemote())) { - return NULL; + status_t status = connectFinishUnsafe(client, client->getRemote()); + if (status != OK) { + return status; } mProClientList[cameraId].push(client); @@ -512,18 +520,18 @@ sp CameraService::connect( } // important: release the mutex here so the client can call back // into the service from its destructor (can be at the end of the call) - - return client; + device = client; + return OK; } -sp CameraService::connect( +status_t CameraService::connectDevice( const sp& cameraCb, int cameraId, const String16& clientPackageName, - int clientUid) + int clientUid, + /*out*/ + sp& device) { - // TODO: this function needs to return status_t - // so that we have an error code when things go wrong and the client is NULL String8 clientName8(clientPackageName); int callingPid = getCallingPid(); @@ -531,8 +539,9 @@ sp CameraService::connect( LOG1("CameraService::connectDevice E (pid %d \"%s\", id %d)", callingPid, clientName8.string(), cameraId); - if (!validateConnect(cameraId, /*inout*/clientUid)) { - return NULL; + status_t status = validateConnect(cameraId, /*inout*/clientUid); + if (status != OK) { + return status; } sp client; @@ -543,7 +552,7 @@ sp CameraService::connect( if (!canConnectUnsafe(cameraId, clientPackageName, cameraCb->asBinder(), /*out*/client)) { - return NULL; + return -EBUSY; } } @@ -560,10 +569,8 @@ sp CameraService::connect( switch(deviceVersion) { case CAMERA_DEVICE_API_VERSION_1_0: - ALOGE("Camera id %d uses old HAL, doesn't support CameraDevice", - cameraId); - return NULL; - break; + ALOGW("Camera using old HAL version: %d", deviceVersion); + return -ENOTSUP; // TODO: don't allow 2.0 Only allow 2.1 and higher case CAMERA_DEVICE_API_VERSION_2_0: case CAMERA_DEVICE_API_VERSION_2_1: @@ -573,17 +580,18 @@ sp CameraService::connect( break; case -1: ALOGE("Invalid camera id %d", cameraId); - return NULL; + return BAD_VALUE; default: ALOGE("Unknown camera device HAL version: %d", deviceVersion); - return NULL; + return INVALID_OPERATION; } - if (!connectFinishUnsafe(client, client->getRemote())) { + status_t status = connectFinishUnsafe(client, client->getRemote()); + if (status != OK) { // this is probably not recoverable.. maybe the client can try again // OK: we can only get here if we were originally in PRESENT state updateStatus(ICameraServiceListener::STATUS_PRESENT, cameraId); - return NULL; + return status; } LOG1("CameraService::connectDevice X (id %d, this pid is %d)", cameraId, @@ -594,7 +602,8 @@ sp CameraService::connect( // important: release the mutex here so the client can call back // into the service from its destructor (can be at the end of the call) - return client; + device = client; + return OK; } diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h index 980eb97..3921cbd 100644 --- a/services/camera/libcameraservice/CameraService.h +++ b/services/camera/libcameraservice/CameraService.h @@ -72,15 +72,23 @@ public: virtual status_t getCameraInfo(int cameraId, struct CameraInfo* cameraInfo); - virtual sp connect(const sp& cameraClient, int cameraId, - const String16& clientPackageName, int clientUid); - virtual sp connect(const sp& cameraCb, - int cameraId, const String16& clientPackageName, int clientUid); - virtual sp connect( + virtual status_t connect(const sp& cameraClient, int cameraId, + const String16& clientPackageName, int clientUid, + /*out*/ + sp& device); + + virtual status_t connectPro(const sp& cameraCb, + int cameraId, const String16& clientPackageName, int clientUid, + /*out*/ + sp& device); + + virtual status_t connectDevice( const sp& cameraCb, int cameraId, const String16& clientPackageName, - int clientUid); + int clientUid, + /*out*/ + sp& device); virtual status_t addListener(const sp& listener); virtual status_t removeListener( @@ -308,7 +316,7 @@ private: virtual void onFirstRef(); // Step 1. Check if we can connect, before we acquire the service lock. - bool validateConnect(int cameraId, + status_t validateConnect(int cameraId, /*inout*/ int& clientUid) const; @@ -320,7 +328,7 @@ private: sp &client); // When connection is successful, initialize client and track its death - bool connectFinishUnsafe(const sp& client, + status_t connectFinishUnsafe(const sp& client, const sp& remoteCallback); virtual sp getClientByRemote(const wp& cameraClient); -- cgit v1.1 From 7fb865653293e665f48b31e791ca124e98c7d257 Mon Sep 17 00:00:00 2001 From: Jeff Brown Date: Thu, 15 Aug 2013 18:06:37 -0700 Subject: Remove call to enable/disable remote submix from test program. The submix will be controlled automatically from now on based on whether there is an active audio recorder. Bug: 10265163 Change-Id: Iea8164182daa037066f60974b54597d20db4903b --- media/libstagefright/wifi-display/wfd.cpp | 26 -------------------------- 1 file changed, 26 deletions(-) diff --git a/media/libstagefright/wifi-display/wfd.cpp b/media/libstagefright/wifi-display/wfd.cpp index 4607606..04cb319 100644 --- a/media/libstagefright/wifi-display/wfd.cpp +++ b/media/libstagefright/wifi-display/wfd.cpp @@ -138,28 +138,6 @@ void RemoteDisplayClient::waitUntilDone() { } } -static status_t enableAudioSubmix(bool enable) { - status_t err = AudioSystem::setDeviceConnectionState( - AUDIO_DEVICE_IN_REMOTE_SUBMIX, - enable - ? AUDIO_POLICY_DEVICE_STATE_AVAILABLE - : AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE, - NULL /* device_address */); - - if (err != OK) { - return err; - } - - err = AudioSystem::setDeviceConnectionState( - AUDIO_DEVICE_OUT_REMOTE_SUBMIX, - enable - ? AUDIO_POLICY_DEVICE_STATE_AVAILABLE - : AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE, - NULL /* device_address */); - - return err; -} - static void createSource(const AString &addr, int32_t port) { sp sm = defaultServiceManager(); sp binder = sm->getService(String16("media.player")); @@ -168,8 +146,6 @@ static void createSource(const AString &addr, int32_t port) { CHECK(service.get() != NULL); - enableAudioSubmix(true /* enable */); - String8 iface; iface.append(addr.c_str()); iface.append(StringPrintf(":%d", port).c_str()); @@ -182,8 +158,6 @@ static void createSource(const AString &addr, int32_t port) { display->dispose(); display.clear(); - - enableAudioSubmix(false /* enable */); } static void createFileSource( -- cgit v1.1 From 893a5642871114fca3b2a00c6ff8e5699ce3e3ed Mon Sep 17 00:00:00 2001 From: Jeff Brown Date: Fri, 16 Aug 2013 20:19:26 -0700 Subject: Enforce permission for recording from remote submix. Bug: 10265163 Change-Id: I2395036bac6e10baeaf6ea4bc3093bd9dd98742d --- services/audioflinger/AudioFlinger.cpp | 10 ++++++++++ services/audioflinger/ServiceUtilities.cpp | 9 +++++++++ services/audioflinger/ServiceUtilities.h | 1 + 3 files changed, 20 insertions(+) diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp index 3d65c44..b8a6b37 100644 --- a/services/audioflinger/AudioFlinger.cpp +++ b/services/audioflinger/AudioFlinger.cpp @@ -1206,6 +1206,10 @@ void AudioFlinger::NotificationClient::binderDied(const wp& who) // ---------------------------------------------------------------------------- +static bool deviceRequiresCaptureAudioOutputPermission(audio_devices_t inDevice) { + return audio_is_remote_submix_device(inDevice); +} + sp AudioFlinger::openRecord( audio_io_handle_t input, uint32_t sampleRate, @@ -1246,6 +1250,12 @@ sp AudioFlinger::openRecord( goto Exit; } + if (deviceRequiresCaptureAudioOutputPermission(thread->inDevice()) + && !captureAudioOutputAllowed()) { + lStatus = PERMISSION_DENIED; + goto Exit; + } + pid_t pid = IPCThreadState::self()->getCallingPid(); client = registerPid_l(pid); diff --git a/services/audioflinger/ServiceUtilities.cpp b/services/audioflinger/ServiceUtilities.cpp index d15bd04..9ee513b 100644 --- a/services/audioflinger/ServiceUtilities.cpp +++ b/services/audioflinger/ServiceUtilities.cpp @@ -34,6 +34,15 @@ bool recordingAllowed() { return ok; } +bool captureAudioOutputAllowed() { + if (getpid_cached == IPCThreadState::self()->getCallingPid()) return true; + static const String16 sCaptureAudioOutput("android.permission.CAPTURE_AUDIO_OUTPUT"); + // don't use PermissionCache; this is not a system permission + bool ok = checkCallingPermission(sCaptureAudioOutput); + if (!ok) ALOGE("Request requires android.permission.CAPTURE_AUDIO_OUTPUT"); + return ok; +} + bool settingsAllowed() { if (getpid_cached == IPCThreadState::self()->getCallingPid()) return true; static const String16 sAudioSettings("android.permission.MODIFY_AUDIO_SETTINGS"); diff --git a/services/audioflinger/ServiceUtilities.h b/services/audioflinger/ServiceUtilities.h index 80cecba..175cd28 100644 --- a/services/audioflinger/ServiceUtilities.h +++ b/services/audioflinger/ServiceUtilities.h @@ -21,6 +21,7 @@ namespace android { extern pid_t getpid_cached; bool recordingAllowed(); +bool captureAudioOutputAllowed(); bool settingsAllowed(); bool dumpAllowed(); -- cgit v1.1 From dcb89b3b505522efde173c105a851c412f947178 Mon Sep 17 00:00:00 2001 From: Chong Zhang Date: Tue, 6 Aug 2013 09:44:47 -0700 Subject: MediaPlayer: add listener for raw track data Bug: 10326117 Change-Id: I2c0bdf8adc67b11f8dc633423bee66897548f181 --- include/media/mediaplayer.h | 2 + media/libmedia/mediaplayer.cpp | 3 + .../nuplayer/HTTPLiveSource.cpp | 58 ++++++++++++- .../nuplayer/HTTPLiveSource.h | 4 + media/libmediaplayerservice/nuplayer/NuPlayer.cpp | 87 +++++++++++++++++++- media/libmediaplayerservice/nuplayer/NuPlayer.h | 6 +- .../nuplayer/NuPlayerDriver.cpp | 22 ++++- .../nuplayer/NuPlayerDriver.h | 2 +- .../nuplayer/NuPlayerSource.h | 9 ++ media/libstagefright/httplive/Android.mk | 1 + media/libstagefright/httplive/LiveSession.cpp | 59 +++++++++++--- media/libstagefright/httplive/LiveSession.h | 9 +- media/libstagefright/httplive/M3UParser.cpp | 95 +++++++++++++++++++++- media/libstagefright/httplive/M3UParser.h | 4 + media/libstagefright/httplive/PlaylistFetcher.cpp | 15 +++- media/libstagefright/httplive/PlaylistFetcher.h | 3 +- 16 files changed, 354 insertions(+), 25 deletions(-) diff --git a/include/media/mediaplayer.h b/include/media/mediaplayer.h index 1afd7f7..923c8b2 100644 --- a/include/media/mediaplayer.h +++ b/include/media/mediaplayer.h @@ -48,6 +48,7 @@ enum media_event_type { MEDIA_TIMED_TEXT = 99, MEDIA_ERROR = 100, MEDIA_INFO = 200, + MEDIA_SUBTITLE_DATA = 201, }; // Generic error codes for the media player framework. Errors are fatal, the @@ -176,6 +177,7 @@ enum media_track_type { MEDIA_TRACK_TYPE_VIDEO = 1, MEDIA_TRACK_TYPE_AUDIO = 2, MEDIA_TRACK_TYPE_TIMEDTEXT = 3, + MEDIA_TRACK_TYPE_SUBTITLE = 4, }; // ---------------------------------------------------------------------------- diff --git a/media/libmedia/mediaplayer.cpp b/media/libmedia/mediaplayer.cpp index 056cc0a..4323d0c 100644 --- a/media/libmedia/mediaplayer.cpp +++ b/media/libmedia/mediaplayer.cpp @@ -756,6 +756,9 @@ void MediaPlayer::notify(int msg, int ext1, int ext2, const Parcel *obj) case MEDIA_TIMED_TEXT: ALOGV("Received timed text message"); break; + case MEDIA_SUBTITLE_DATA: + ALOGV("Received subtitle data message"); + break; default: ALOGV("unrecognized message: (%d, %d, %d)", msg, ext1, ext2); break; diff --git a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp index c8901ce..d8b35d7 100644 --- a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp +++ b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp @@ -43,7 +43,8 @@ NuPlayer::HTTPLiveSource::HTTPLiveSource( mUID(uid), mFlags(0), mFinalResult(OK), - mOffset(0) { + mOffset(0), + mFetchSubtitleDataGeneration(0) { if (headers) { mExtraHeaders = *headers; @@ -120,6 +121,28 @@ status_t NuPlayer::HTTPLiveSource::getDuration(int64_t *durationUs) { return mLiveSession->getDuration(durationUs); } +status_t NuPlayer::HTTPLiveSource::getTrackInfo(Parcel *reply) const { + return mLiveSession->getTrackInfo(reply); +} + +status_t NuPlayer::HTTPLiveSource::selectTrack(size_t trackIndex, bool select) { + status_t err = mLiveSession->selectTrack(trackIndex, select); + + if (err == OK) { + mFetchSubtitleDataGeneration++; + if (select) { + sp msg = new AMessage(kWhatFetchSubtitleData, id()); + msg->setInt32("generation", mFetchSubtitleDataGeneration); + msg->post(); + } + } + + // LiveSession::selectTrack returns BAD_VALUE when selecting the currently + // selected track, or unselecting a non-selected track. In this case it's an + // no-op so we return OK. + return (err == OK || err == BAD_VALUE) ? OK : err; +} + status_t NuPlayer::HTTPLiveSource::seekTo(int64_t seekTimeUs) { return mLiveSession->seekTo(seekTimeUs); } @@ -132,6 +155,39 @@ void NuPlayer::HTTPLiveSource::onMessageReceived(const sp &msg) { break; } + case kWhatFetchSubtitleData: + { + int32_t generation; + CHECK(msg->findInt32("generation", &generation)); + + if (generation != mFetchSubtitleDataGeneration) { + // stale + break; + } + + sp buffer; + if (mLiveSession->dequeueAccessUnit( + LiveSession::STREAMTYPE_SUBTITLES, &buffer) == OK) { + sp notify = dupNotify(); + notify->setInt32("what", kWhatSubtitleData); + notify->setBuffer("buffer", buffer); + notify->post(); + + int64_t timeUs, baseUs, durationUs, delayUs; + CHECK(buffer->meta()->findInt64("baseUs", &baseUs)); + CHECK(buffer->meta()->findInt64("timeUs", &timeUs)); + CHECK(buffer->meta()->findInt64("durationUs", &durationUs)); + delayUs = baseUs + timeUs - ALooper::GetNowUs(); + + msg->post(delayUs > 0ll ? delayUs : 0ll); + } else { + // try again in 1 second + msg->post(1000000ll); + } + + break; + } + default: Source::onMessageReceived(msg); break; diff --git a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h index aa9434b..bcc3f8b 100644 --- a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h +++ b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h @@ -41,6 +41,8 @@ struct NuPlayer::HTTPLiveSource : public NuPlayer::Source { virtual status_t feedMoreTSData(); virtual status_t getDuration(int64_t *durationUs); + virtual status_t getTrackInfo(Parcel *reply) const; + virtual status_t selectTrack(size_t trackIndex, bool select); virtual status_t seekTo(int64_t seekTimeUs); protected: @@ -56,6 +58,7 @@ private: enum { kWhatSessionNotify, + kWhatFetchSubtitleData, }; AString mURL; @@ -67,6 +70,7 @@ private: off64_t mOffset; sp mLiveLooper; sp mLiveSession; + int32_t mFetchSubtitleDataGeneration; void onSessionNotify(const sp &msg); diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp index b411f34..e1735fa 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp +++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp @@ -340,6 +340,46 @@ void NuPlayer::onMessageReceived(const sp &msg) { break; } + case kWhatGetTrackInfo: + { + uint32_t replyID; + CHECK(msg->senderAwaitsResponse(&replyID)); + + status_t err = INVALID_OPERATION; + if (mSource != NULL) { + Parcel* reply; + CHECK(msg->findPointer("reply", (void**)&reply)); + err = mSource->getTrackInfo(reply); + } + + sp response = new AMessage; + response->setInt32("err", err); + + response->postReply(replyID); + break; + } + + case kWhatSelectTrack: + { + uint32_t replyID; + CHECK(msg->senderAwaitsResponse(&replyID)); + + status_t err = INVALID_OPERATION; + if (mSource != NULL) { + size_t trackIndex; + int32_t select; + CHECK(msg->findSize("trackIndex", &trackIndex)); + CHECK(msg->findInt32("select", &select)); + err = mSource->selectTrack(trackIndex, select); + } + + sp response = new AMessage; + response->setInt32("err", err); + + response->postReply(replyID); + break; + } + case kWhatPollDuration: { int32_t generation; @@ -1045,7 +1085,7 @@ void NuPlayer::renderBuffer(bool audio, const sp &msg) { mRenderer->queueBuffer(audio, buffer, reply); } -void NuPlayer::notifyListener(int msg, int ext1, int ext2) { +void NuPlayer::notifyListener(int msg, int ext1, int ext2, const Parcel *in) { if (mDriver == NULL) { return; } @@ -1056,7 +1096,7 @@ void NuPlayer::notifyListener(int msg, int ext1, int ext2) { return; } - driver->notifyListener(msg, ext1, ext2); + driver->notifyListener(msg, ext1, ext2, in); } void NuPlayer::flushDecoder(bool audio, bool needShutdown) { @@ -1132,6 +1172,26 @@ status_t NuPlayer::setVideoScalingMode(int32_t mode) { return OK; } +status_t NuPlayer::getTrackInfo(Parcel* reply) const { + sp msg = new AMessage(kWhatGetTrackInfo, id()); + msg->setPointer("reply", reply); + + sp response; + status_t err = msg->postAndAwaitResponse(&response); + return err; +} + +status_t NuPlayer::selectTrack(size_t trackIndex, bool select) { + sp msg = new AMessage(kWhatSelectTrack, id()); + msg->setSize("trackIndex", trackIndex); + msg->setInt32("select", select); + + sp response; + status_t err = msg->postAndAwaitResponse(&response); + + return err; +} + void NuPlayer::schedulePollDuration() { sp msg = new AMessage(kWhatPollDuration, id()); msg->setInt32("generation", mPollDurationGeneration); @@ -1371,6 +1431,29 @@ void NuPlayer::onSourceNotify(const sp &msg) { break; } + case Source::kWhatSubtitleData: + { + sp buffer; + CHECK(msg->findBuffer("buffer", &buffer)); + + int32_t trackIndex; + int64_t timeUs, durationUs; + CHECK(buffer->meta()->findInt32("trackIndex", &trackIndex)); + CHECK(buffer->meta()->findInt64("timeUs", &timeUs)); + CHECK(buffer->meta()->findInt64("durationUs", &durationUs)); + + Parcel in; + in.writeInt32(trackIndex); + in.writeInt64(timeUs); + in.writeInt64(durationUs); + in.writeInt32(buffer->size()); + in.writeInt32(buffer->size()); + in.write(buffer->data(), buffer->size()); + + notifyListener(MEDIA_SUBTITLE_DATA, 0, 0, &in); + break; + } + case Source::kWhatQueueDecoderShutdown: { int32_t audio, video; diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.h b/media/libmediaplayerservice/nuplayer/NuPlayer.h index 8b6c8c1..13350f3 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayer.h +++ b/media/libmediaplayerservice/nuplayer/NuPlayer.h @@ -60,6 +60,8 @@ struct NuPlayer : public AHandler { void seekToAsync(int64_t seekTimeUs); status_t setVideoScalingMode(int32_t mode); + status_t getTrackInfo(Parcel* reply) const; + status_t selectTrack(size_t trackIndex, bool select); protected: virtual ~NuPlayer(); @@ -101,6 +103,8 @@ private: kWhatResume = 'rsme', kWhatPollDuration = 'polD', kWhatSourceNotify = 'srcN', + kWhatGetTrackInfo = 'gTrI', + kWhatSelectTrack = 'selT', }; wp mDriver; @@ -157,7 +161,7 @@ private: status_t feedDecoderInputData(bool audio, const sp &msg); void renderBuffer(bool audio, const sp &msg); - void notifyListener(int msg, int ext1, int ext2); + void notifyListener(int msg, int ext1, int ext2, const Parcel *in = NULL); void finishFlushIfPossible(); diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp index cf0373c..47834fd 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp +++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp @@ -392,6 +392,23 @@ status_t NuPlayerDriver::invoke(const Parcel &request, Parcel *reply) { return mPlayer->setVideoScalingMode(mode); } + case INVOKE_ID_GET_TRACK_INFO: + { + return mPlayer->getTrackInfo(reply); + } + + case INVOKE_ID_SELECT_TRACK: + { + int trackIndex = request.readInt32(); + return mPlayer->selectTrack(trackIndex, true /* select */); + } + + case INVOKE_ID_UNSELECT_TRACK: + { + int trackIndex = request.readInt32(); + return mPlayer->selectTrack(trackIndex, false /* select */); + } + default: { return INVALID_OPERATION; @@ -495,12 +512,13 @@ status_t NuPlayerDriver::dump(int fd, const Vector &args) const { return OK; } -void NuPlayerDriver::notifyListener(int msg, int ext1, int ext2) { +void NuPlayerDriver::notifyListener( + int msg, int ext1, int ext2, const Parcel *in) { if (msg == MEDIA_PLAYBACK_COMPLETE || msg == MEDIA_ERROR) { mAtEOS = true; } - sendEvent(msg, ext1, ext2); + sendEvent(msg, ext1, ext2, in); } void NuPlayerDriver::notifySetDataSourceCompleted(status_t err) { diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h index 5df0cfb..99f72a6 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h +++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h @@ -69,7 +69,7 @@ struct NuPlayerDriver : public MediaPlayerInterface { void notifyPosition(int64_t positionUs); void notifySeekComplete(); void notifyFrameStats(int64_t numFramesTotal, int64_t numFramesDropped); - void notifyListener(int msg, int ext1 = 0, int ext2 = 0); + void notifyListener(int msg, int ext1 = 0, int ext2 = 0, const Parcel *in = NULL); void notifyFlagsChanged(uint32_t flags); protected: diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerSource.h b/media/libmediaplayerservice/nuplayer/NuPlayerSource.h index 81ffd21..e50533a 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayerSource.h +++ b/media/libmediaplayerservice/nuplayer/NuPlayerSource.h @@ -42,6 +42,7 @@ struct NuPlayer::Source : public AHandler { kWhatVideoSizeChanged, kWhatBufferingStart, kWhatBufferingEnd, + kWhatSubtitleData, kWhatQueueDecoderShutdown, }; @@ -71,6 +72,14 @@ struct NuPlayer::Source : public AHandler { return INVALID_OPERATION; } + virtual status_t getTrackInfo(Parcel* reply) const { + return INVALID_OPERATION; + } + + virtual status_t selectTrack(size_t trackIndex, bool select) { + return INVALID_OPERATION; + } + virtual status_t seekTo(int64_t seekTimeUs) { return INVALID_OPERATION; } diff --git a/media/libstagefright/httplive/Android.mk b/media/libstagefright/httplive/Android.mk index 85bd492..f3529f9 100644 --- a/media/libstagefright/httplive/Android.mk +++ b/media/libstagefright/httplive/Android.mk @@ -14,6 +14,7 @@ LOCAL_C_INCLUDES:= \ $(TOP)/external/openssl/include LOCAL_SHARED_LIBRARIES := \ + libbinder \ libcrypto \ libcutils \ libmedia \ diff --git a/media/libstagefright/httplive/LiveSession.cpp b/media/libstagefright/httplive/LiveSession.cpp index e91c60b..bd12ddc 100644 --- a/media/libstagefright/httplive/LiveSession.cpp +++ b/media/libstagefright/httplive/LiveSession.cpp @@ -59,6 +59,7 @@ LiveSession::LiveSession( mStreamMask(0), mCheckBandwidthGeneration(0), mLastDequeuedTimeUs(0ll), + mRealTimeBaseUs(0ll), mReconfigurationInProgress(false), mDisconnectReplyID(0) { if (mUIDValid) { @@ -122,11 +123,18 @@ status_t LiveSession::dequeueAccessUnit( type, extra == NULL ? "NULL" : extra->debugString().c_str()); } else if (err == OK) { - int64_t timeUs; - CHECK((*accessUnit)->meta()->findInt64("timeUs", &timeUs)); - ALOGV("[%s] read buffer at time %lld us", streamStr, timeUs); - - mLastDequeuedTimeUs = timeUs; + if (stream == STREAMTYPE_AUDIO || stream == STREAMTYPE_VIDEO) { + int64_t timeUs; + CHECK((*accessUnit)->meta()->findInt64("timeUs", &timeUs)); + ALOGV("[%s] read buffer at time %lld us", streamStr, timeUs); + + mLastDequeuedTimeUs = timeUs; + mRealTimeBaseUs = ALooper::GetNowUs() - timeUs; + } else if (stream == STREAMTYPE_SUBTITLES) { + (*accessUnit)->meta()->setInt32( + "trackIndex", mPlaylist->getSelectedIndex()); + (*accessUnit)->meta()->setInt64("baseUs", mRealTimeBaseUs); + } } else { ALOGI("[%s] encountered error %d", streamStr, err); } @@ -325,6 +333,12 @@ void LiveSession::onMessageReceived(const sp &msg) { break; } + case kWhatChangeConfiguration: + { + onChangeConfiguration(msg); + break; + } + case kWhatChangeConfiguration2: { onChangeConfiguration2(msg); @@ -438,7 +452,8 @@ void LiveSession::onConnect(const sp &msg) { mBandwidthItems.push(item); } - changeConfiguration(0ll /* timeUs */, initialBandwidthIndex); + changeConfiguration( + 0ll /* timeUs */, initialBandwidthIndex, true /* pickTrack */); } void LiveSession::finishDisconnect() { @@ -783,16 +798,31 @@ bool LiveSession::hasDynamicDuration() const { return false; } -void LiveSession::changeConfiguration(int64_t timeUs, size_t bandwidthIndex) { +status_t LiveSession::getTrackInfo(Parcel *reply) const { + return mPlaylist->getTrackInfo(reply); +} + +status_t LiveSession::selectTrack(size_t index, bool select) { + status_t err = mPlaylist->selectTrack(index, select); + if (err == OK) { + (new AMessage(kWhatChangeConfiguration, id()))->post(); + } + return err; +} + +void LiveSession::changeConfiguration( + int64_t timeUs, size_t bandwidthIndex, bool pickTrack) { CHECK(!mReconfigurationInProgress); mReconfigurationInProgress = true; mPrevBandwidthIndex = bandwidthIndex; - ALOGV("changeConfiguration => timeUs:%lld us, bwIndex:%d", - timeUs, bandwidthIndex); + ALOGV("changeConfiguration => timeUs:%lld us, bwIndex:%d, pickTrack:%d", + timeUs, bandwidthIndex, pickTrack); - mPlaylist->pickRandomMediaItems(); + if (pickTrack) { + mPlaylist->pickRandomMediaItems(); + } CHECK_LT(bandwidthIndex, mBandwidthItems.size()); const BandwidthItem &item = mBandwidthItems.itemAt(bandwidthIndex); @@ -862,6 +892,14 @@ void LiveSession::changeConfiguration(int64_t timeUs, size_t bandwidthIndex) { } } +void LiveSession::onChangeConfiguration(const sp &msg) { + if (!mReconfigurationInProgress) { + changeConfiguration(-1ll /* timeUs */, getBandwidthIndex()); + } else { + msg->post(1000000ll); // retry in 1 sec + } +} + void LiveSession::onChangeConfiguration2(const sp &msg) { mContinuation.clear(); @@ -948,6 +986,7 @@ void LiveSession::onChangeConfiguration3(const sp &msg) { if (timeUs < 0ll) { timeUs = mLastDequeuedTimeUs; } + mRealTimeBaseUs = ALooper::GetNowUs() - timeUs; mStreamMask = streamMask; mAudioURI = audioURI; diff --git a/media/libstagefright/httplive/LiveSession.h b/media/libstagefright/httplive/LiveSession.h index b134725..99b480a8 100644 --- a/media/libstagefright/httplive/LiveSession.h +++ b/media/libstagefright/httplive/LiveSession.h @@ -31,6 +31,7 @@ struct HTTPBase; struct LiveDataSource; struct M3UParser; struct PlaylistFetcher; +struct Parcel; struct LiveSession : public AHandler { enum Flags { @@ -60,6 +61,8 @@ struct LiveSession : public AHandler { status_t seekTo(int64_t timeUs); status_t getDuration(int64_t *durationUs) const; + status_t getTrackInfo(Parcel *reply) const; + status_t selectTrack(size_t index, bool select); bool isSeekable() const; bool hasDynamicDuration() const; @@ -85,6 +88,7 @@ private: kWhatSeek = 'seek', kWhatFetcherNotify = 'notf', kWhatCheckBandwidth = 'bndw', + kWhatChangeConfiguration = 'chC0', kWhatChangeConfiguration2 = 'chC2', kWhatChangeConfiguration3 = 'chC3', kWhatFinishDisconnect2 = 'fin2', @@ -130,6 +134,7 @@ private: sp mContinuation; int64_t mLastDequeuedTimeUs; + int64_t mRealTimeBaseUs; bool mReconfigurationInProgress; uint32_t mDisconnectReplyID; @@ -151,7 +156,9 @@ private: static int SortByBandwidth(const BandwidthItem *, const BandwidthItem *); - void changeConfiguration(int64_t timeUs, size_t bandwidthIndex); + void changeConfiguration( + int64_t timeUs, size_t bandwidthIndex, bool pickTrack = false); + void onChangeConfiguration(const sp &msg); void onChangeConfiguration2(const sp &msg); void onChangeConfiguration3(const sp &msg); diff --git a/media/libstagefright/httplive/M3UParser.cpp b/media/libstagefright/httplive/M3UParser.cpp index be66252..bc6d629 100644 --- a/media/libstagefright/httplive/M3UParser.cpp +++ b/media/libstagefright/httplive/M3UParser.cpp @@ -19,11 +19,12 @@ #include #include "M3UParser.h" - +#include #include #include #include #include +#include namespace android { @@ -55,6 +56,9 @@ struct M3UParser::MediaGroup : public RefBase { bool getActiveURI(AString *uri) const; void pickRandomMediaItems(); + status_t selectTrack(size_t index, bool select); + void getTrackInfo(Parcel* reply) const; + size_t countTracks() const; protected: virtual ~MediaGroup(); @@ -150,6 +154,59 @@ void M3UParser::MediaGroup::pickRandomMediaItems() { #endif } +status_t M3UParser::MediaGroup::selectTrack(size_t index, bool select) { + if (mType != TYPE_SUBS) { + ALOGE("only select subtitile tracks for now!"); + return INVALID_OPERATION; + } + + if (select) { + if (index >= mMediaItems.size()) { + ALOGE("track %d does not exist", index); + return INVALID_OPERATION; + } + if (mSelectedIndex == index) { + ALOGE("track %d already selected", index); + return BAD_VALUE; + } + ALOGV("selected track %d", index); + mSelectedIndex = index; + } else { + if (mSelectedIndex != index) { + ALOGE("track %d is not selected", index); + return BAD_VALUE; + } + ALOGV("unselected track %d", index); + mSelectedIndex = -1; + } + + return OK; +} + +void M3UParser::MediaGroup::getTrackInfo(Parcel* reply) const { + for (size_t i = 0; i < mMediaItems.size(); ++i) { + reply->writeInt32(2); // 2 fields + + if (mType == TYPE_AUDIO) { + reply->writeInt32(MEDIA_TRACK_TYPE_AUDIO); + } else if (mType == TYPE_VIDEO) { + reply->writeInt32(MEDIA_TRACK_TYPE_VIDEO); + } else if (mType == TYPE_SUBS) { + reply->writeInt32(MEDIA_TRACK_TYPE_SUBTITLE); + } else { + reply->writeInt32(MEDIA_TRACK_TYPE_UNKNOWN); + } + + const Media &item = mMediaItems.itemAt(i); + const char *lang = item.mLanguage.empty() ? "und" : item.mLanguage.c_str(); + reply->writeString16(String16(lang)); + } +} + +size_t M3UParser::MediaGroup::countTracks() const { + return mMediaItems.size(); +} + bool M3UParser::MediaGroup::getActiveURI(AString *uri) const { for (size_t i = 0; i < mMediaItems.size(); ++i) { if (mSelectedIndex >= 0 && i == (size_t)mSelectedIndex) { @@ -172,7 +229,8 @@ M3UParser::M3UParser( mIsExtM3U(false), mIsVariantPlaylist(false), mIsComplete(false), - mIsEvent(false) { + mIsEvent(false), + mSelectedIndex(-1) { mInitCheck = parse(data, size); } @@ -237,6 +295,39 @@ void M3UParser::pickRandomMediaItems() { } } +status_t M3UParser::selectTrack(size_t index, bool select) { + for (size_t i = 0, ii = index; i < mMediaGroups.size(); ++i) { + sp group = mMediaGroups.valueAt(i); + size_t tracks = group->countTracks(); + if (ii < tracks) { + status_t err = group->selectTrack(ii, select); + if (err == OK) { + mSelectedIndex = select ? index : -1; + } + return err; + } + ii -= tracks; + } + return INVALID_OPERATION; +} + +status_t M3UParser::getTrackInfo(Parcel* reply) const { + size_t trackCount = 0; + for (size_t i = 0; i < mMediaGroups.size(); ++i) { + trackCount += mMediaGroups.valueAt(i)->countTracks(); + } + reply->writeInt32(trackCount); + + for (size_t i = 0; i < mMediaGroups.size(); ++i) { + mMediaGroups.valueAt(i)->getTrackInfo(reply); + } + return OK; +} + +ssize_t M3UParser::getSelectedIndex() const { + return mSelectedIndex; +} + bool M3UParser::getTypeURI(size_t index, const char *key, AString *uri) const { if (!mIsVariantPlaylist) { *uri = mBaseURI; diff --git a/media/libstagefright/httplive/M3UParser.h b/media/libstagefright/httplive/M3UParser.h index abea286..5248004 100644 --- a/media/libstagefright/httplive/M3UParser.h +++ b/media/libstagefright/httplive/M3UParser.h @@ -41,6 +41,9 @@ struct M3UParser : public RefBase { bool itemAt(size_t index, AString *uri, sp *meta = NULL); void pickRandomMediaItems(); + status_t selectTrack(size_t index, bool select); + status_t getTrackInfo(Parcel* reply) const; + ssize_t getSelectedIndex() const; bool getAudioURI(size_t index, AString *uri) const; bool getVideoURI(size_t index, AString *uri) const; @@ -67,6 +70,7 @@ private: sp mMeta; Vector mItems; + ssize_t mSelectedIndex; // Media groups keyed by group ID. KeyedVector > mMediaGroups; diff --git a/media/libstagefright/httplive/PlaylistFetcher.cpp b/media/libstagefright/httplive/PlaylistFetcher.cpp index 8ae70b7..973b779 100644 --- a/media/libstagefright/httplive/PlaylistFetcher.cpp +++ b/media/libstagefright/httplive/PlaylistFetcher.cpp @@ -462,7 +462,11 @@ void PlaylistFetcher::onMonitorQueue() { sp packetSource = mPacketSources.valueFor(LiveSession::STREAMTYPE_SUBTITLES); - downloadMore = packetSource->hasBufferAvailable(&finalResult); + int64_t bufferedDurationUs = + packetSource->getBufferedDurationUs(&finalResult); + + downloadMore = (bufferedDurationUs < kMinBufferedDurationUs); + finalResult = OK; } else { bool first = true; int64_t minBufferedDurationUs = 0ll; @@ -659,7 +663,7 @@ void PlaylistFetcher::onDownloadNext() { } } - err = extractAndQueueAccessUnits(buffer); + err = extractAndQueueAccessUnits(buffer, itemMeta); if (err != OK) { notifyError(err); @@ -706,7 +710,7 @@ int32_t PlaylistFetcher::getSeqNumberForTime(int64_t timeUs) const { } status_t PlaylistFetcher::extractAndQueueAccessUnits( - const sp &buffer) { + const sp &buffer, const sp &itemMeta) { if (buffer->size() > 0 && buffer->data()[0] == 0x47) { // Let's assume this is an MPEG2 transport stream. @@ -802,7 +806,10 @@ status_t PlaylistFetcher::extractAndQueueAccessUnits( const sp packetSource = mPacketSources.valueFor(LiveSession::STREAMTYPE_SUBTITLES); - buffer->meta()->setInt64("timeUs", 0ll); + int64_t durationUs; + CHECK(itemMeta->findInt64("durationUs", &durationUs)); + buffer->meta()->setInt64("timeUs", getSegmentStartTimeUs(mSeqNumber)); + buffer->meta()->setInt64("durationUs", durationUs); packetSource->queueAccessUnit(buffer); return OK; diff --git a/media/libstagefright/httplive/PlaylistFetcher.h b/media/libstagefright/httplive/PlaylistFetcher.h index 5a2b901..1648e02 100644 --- a/media/libstagefright/httplive/PlaylistFetcher.h +++ b/media/libstagefright/httplive/PlaylistFetcher.h @@ -135,7 +135,8 @@ private: void onMonitorQueue(); void onDownloadNext(); - status_t extractAndQueueAccessUnits(const sp &buffer); + status_t extractAndQueueAccessUnits( + const sp &buffer, const sp &itemMeta); void notifyError(status_t err); -- cgit v1.1 From 46291616486979986cba3ab83e894728ef53063f Mon Sep 17 00:00:00 2001 From: Eric Laurent Date: Thu, 18 Jul 2013 14:38:44 -0700 Subject: AudioSystem: new audioflinger restart detection Add a specific method to AudioSystem for AudioService to poll for AudioFlinger service restart instead of relying on current callback mechanism which is flaky. Bug: 9693068. Change-Id: Ie88bc9d25033503bc5cd2fa9d8c754d0f8045b8f --- include/media/AudioSystem.h | 3 +++ media/libmedia/AudioSystem.cpp | 8 ++++++++ 2 files changed, 11 insertions(+) diff --git a/include/media/AudioSystem.h b/include/media/AudioSystem.h index 006af08..225ef76 100644 --- a/include/media/AudioSystem.h +++ b/include/media/AudioSystem.h @@ -252,6 +252,9 @@ public: // bit rate, duration, video and streaming or offload property is enabled static bool isOffloadSupported(const audio_offload_info_t& info); + // check presence of audio flinger service. + // returns NO_ERROR if binding to service succeeds, DEAD_OBJECT otherwise + static status_t checkAudioFlinger(); // ---------------------------------------------------------------------------- private: diff --git a/media/libmedia/AudioSystem.cpp b/media/libmedia/AudioSystem.cpp index a571fe4..8033c2c 100644 --- a/media/libmedia/AudioSystem.cpp +++ b/media/libmedia/AudioSystem.cpp @@ -76,6 +76,14 @@ const sp& AudioSystem::get_audio_flinger() return gAudioFlinger; } +/* static */ status_t AudioSystem::checkAudioFlinger() +{ + if (defaultServiceManager()->checkService(String16("media.audio_flinger")) != 0) { + return NO_ERROR; + } + return DEAD_OBJECT; +} + status_t AudioSystem::muteMicrophone(bool state) { const sp& af = AudioSystem::get_audio_flinger(); if (af == 0) return PERMISSION_DENIED; -- cgit v1.1 From 87ecf19404586672008e98babc225e094292ceb5 Mon Sep 17 00:00:00 2001 From: Chong Zhang Date: Thu, 6 Jun 2013 12:42:59 -0700 Subject: wifi-display: pass session info to wifi display settings Bug: 9371882 Change-Id: I9e4b8c2154b03ce8ff3e14c465a5224bb6e8db9a --- include/media/IRemoteDisplayClient.h | 2 +- media/libmedia/IRemoteDisplayClient.cpp | 6 ++++-- media/libstagefright/wifi-display/source/WifiDisplaySource.cpp | 6 ++++-- media/libstagefright/wifi-display/wfd.cpp | 10 ++++++---- 4 files changed, 15 insertions(+), 9 deletions(-) diff --git a/include/media/IRemoteDisplayClient.h b/include/media/IRemoteDisplayClient.h index 7b0fa9e..0e6d55d 100644 --- a/include/media/IRemoteDisplayClient.h +++ b/include/media/IRemoteDisplayClient.h @@ -49,7 +49,7 @@ public: // Provides a surface texture that the client should use to stream buffers to // the remote display. virtual void onDisplayConnected(const sp& bufferProducer, - uint32_t width, uint32_t height, uint32_t flags) = 0; // one-way + uint32_t width, uint32_t height, uint32_t flags, uint32_t session) = 0; // one-way // Indicates that the remote display has been disconnected normally. // This method should only be called once the client has called 'dispose()' diff --git a/media/libmedia/IRemoteDisplayClient.cpp b/media/libmedia/IRemoteDisplayClient.cpp index 5c494b3..7190879 100644 --- a/media/libmedia/IRemoteDisplayClient.cpp +++ b/media/libmedia/IRemoteDisplayClient.cpp @@ -38,7 +38,7 @@ public: } void onDisplayConnected(const sp& bufferProducer, - uint32_t width, uint32_t height, uint32_t flags) + uint32_t width, uint32_t height, uint32_t flags, uint32_t session) { Parcel data, reply; data.writeInterfaceToken(IRemoteDisplayClient::getInterfaceDescriptor()); @@ -46,6 +46,7 @@ public: data.writeInt32(width); data.writeInt32(height); data.writeInt32(flags); + data.writeInt32(session); remote()->transact(ON_DISPLAY_CONNECTED, data, &reply, IBinder::FLAG_ONEWAY); } @@ -80,7 +81,8 @@ status_t BnRemoteDisplayClient::onTransact( uint32_t width = data.readInt32(); uint32_t height = data.readInt32(); uint32_t flags = data.readInt32(); - onDisplayConnected(surfaceTexture, width, height, flags); + uint32_t session = data.readInt32(); + onDisplayConnected(surfaceTexture, width, height, flags, session); return NO_ERROR; } case ON_DISPLAY_DISCONNECTED: { diff --git a/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp b/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp index 4b59e62..d72349d 100644 --- a/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp +++ b/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp @@ -416,7 +416,8 @@ void WifiDisplaySource::onMessageReceived(const sp &msg) { 0, // height, mUsingHDCP ? IRemoteDisplayClient::kDisplayFlagSecure - : 0); + : 0, + 0); } else { size_t width, height; @@ -435,7 +436,8 @@ void WifiDisplaySource::onMessageReceived(const sp &msg) { height, mUsingHDCP ? IRemoteDisplayClient::kDisplayFlagSecure - : 0); + : 0, + playbackSessionID); } } diff --git a/media/libstagefright/wifi-display/wfd.cpp b/media/libstagefright/wifi-display/wfd.cpp index 04cb319..52e4e26 100644 --- a/media/libstagefright/wifi-display/wfd.cpp +++ b/media/libstagefright/wifi-display/wfd.cpp @@ -55,7 +55,8 @@ struct RemoteDisplayClient : public BnRemoteDisplayClient { const sp &bufferProducer, uint32_t width, uint32_t height, - uint32_t flags); + uint32_t flags, + uint32_t session); virtual void onDisplayDisconnected(); virtual void onDisplayError(int32_t error); @@ -91,9 +92,10 @@ void RemoteDisplayClient::onDisplayConnected( const sp &bufferProducer, uint32_t width, uint32_t height, - uint32_t flags) { - ALOGI("onDisplayConnected width=%u, height=%u, flags = 0x%08x", - width, height, flags); + uint32_t flags, + uint32_t session) { + ALOGI("onDisplayConnected width=%u, height=%u, flags = 0x%08x, session = %d", + width, height, flags, session); if (bufferProducer != NULL) { mSurfaceTexture = bufferProducer; -- cgit v1.1 From 1ce7c34e67c2cf58dd88c31f36f4bd62e375f7f0 Mon Sep 17 00:00:00 2001 From: Eino-Ville Talvala Date: Wed, 21 Aug 2013 13:57:21 -0700 Subject: Camera1: Set preview to be asynchronous, and remove dead code - Use the controlledByApp flag to make sure application-bound preview buffer queue is asynchronous as before - Remove setPreviewDisplay in service, since it is no longer in the binder interface - Rename setPreviewTexture to setPreviewTarget, to make it clear it's the only game in town now. Rename only on the binder level and service for now. Bug: 10312644 Change-Id: Icd33a462022f9729a63dc65c69b755cb7969857e --- camera/Camera.cpp | 4 ++-- camera/ICamera.cpp | 18 ++++++++-------- include/camera/Camera.h | 4 ++-- include/camera/ICamera.h | 4 ++-- services/camera/libcameraservice/CameraService.h | 3 +-- .../camera/libcameraservice/api1/Camera2Client.cpp | 25 +++++----------------- .../camera/libcameraservice/api1/Camera2Client.h | 3 +-- .../camera/libcameraservice/api1/CameraClient.cpp | 20 ++++++----------- .../camera/libcameraservice/api1/CameraClient.h | 3 +-- 9 files changed, 30 insertions(+), 54 deletions(-) diff --git a/camera/Camera.cpp b/camera/Camera.cpp index 22016a9..bbc4aa4 100644 --- a/camera/Camera.cpp +++ b/camera/Camera.cpp @@ -106,7 +106,7 @@ status_t Camera::setPreviewTexture(const sp& bufferProdu sp c = mCamera; if (c == 0) return NO_INIT; ALOGD_IF(bufferProducer == 0, "app passed NULL surface"); - return c->setPreviewTexture(bufferProducer); + return c->setPreviewTarget(bufferProducer); } // start preview mode @@ -127,7 +127,7 @@ status_t Camera::storeMetaDataInBuffers(bool enabled) return c->storeMetaDataInBuffers(enabled); } -// start recording mode, must call setPreviewDisplay first +// start recording mode, must call setPreviewTexture first status_t Camera::startRecording() { ALOGV("startRecording"); diff --git a/camera/ICamera.cpp b/camera/ICamera.cpp index 12356f0..8c6e1f7 100644 --- a/camera/ICamera.cpp +++ b/camera/ICamera.cpp @@ -29,7 +29,7 @@ namespace android { enum { DISCONNECT = IBinder::FIRST_CALL_TRANSACTION, - SET_PREVIEW_TEXTURE, + SET_PREVIEW_TARGET, SET_PREVIEW_CALLBACK_FLAG, SET_PREVIEW_CALLBACK_TARGET, START_PREVIEW, @@ -70,14 +70,14 @@ public: } // pass the buffered IGraphicBufferProducer to the camera service - status_t setPreviewTexture(const sp& bufferProducer) + status_t setPreviewTarget(const sp& bufferProducer) { - ALOGV("setPreviewTexture"); + ALOGV("setPreviewTarget"); Parcel data, reply; data.writeInterfaceToken(ICamera::getInterfaceDescriptor()); sp b(bufferProducer->asBinder()); data.writeStrongBinder(b); - remote()->transact(SET_PREVIEW_TEXTURE, data, &reply); + remote()->transact(SET_PREVIEW_TARGET, data, &reply); return reply.readInt32(); } @@ -104,7 +104,7 @@ public: return reply.readInt32(); } - // start preview mode, must call setPreviewDisplay first + // start preview mode, must call setPreviewTarget first status_t startPreview() { ALOGV("startPreview"); @@ -114,7 +114,7 @@ public: return reply.readInt32(); } - // start recording mode, must call setPreviewDisplay first + // start recording mode, must call setPreviewTarget first status_t startRecording() { ALOGV("startRecording"); @@ -285,12 +285,12 @@ status_t BnCamera::onTransact( reply->writeNoException(); return NO_ERROR; } break; - case SET_PREVIEW_TEXTURE: { - ALOGV("SET_PREVIEW_TEXTURE"); + case SET_PREVIEW_TARGET: { + ALOGV("SET_PREVIEW_TARGET"); CHECK_INTERFACE(ICamera, data, reply); sp st = interface_cast(data.readStrongBinder()); - reply->writeInt32(setPreviewTexture(st)); + reply->writeInt32(setPreviewTarget(st)); return NO_ERROR; } break; case SET_PREVIEW_CALLBACK_FLAG: { diff --git a/include/camera/Camera.h b/include/camera/Camera.h index 81848b3..b1b2e95 100644 --- a/include/camera/Camera.h +++ b/include/camera/Camera.h @@ -83,7 +83,7 @@ public: // pass the buffered IGraphicBufferProducer to the camera service status_t setPreviewTexture(const sp& bufferProducer); - // start preview mode, must call setPreviewDisplay first + // start preview mode, must call setPreviewTexture first status_t startPreview(); // stop preview mode @@ -92,7 +92,7 @@ public: // get preview state bool previewEnabled(); - // start recording mode, must call setPreviewDisplay first + // start recording mode, must call setPreviewTexture first status_t startRecording(); // stop recording mode diff --git a/include/camera/ICamera.h b/include/camera/ICamera.h index f3a186e..b025735 100644 --- a/include/camera/ICamera.h +++ b/include/camera/ICamera.h @@ -50,7 +50,7 @@ public: virtual status_t unlock() = 0; // pass the buffered IGraphicBufferProducer to the camera service - virtual status_t setPreviewTexture( + virtual status_t setPreviewTarget( const sp& bufferProducer) = 0; // set the preview callback flag to affect how the received frames from @@ -64,7 +64,7 @@ public: virtual status_t setPreviewCallbackTarget( const sp& callbackProducer) = 0; - // start preview mode, must call setPreviewDisplay first + // start preview mode, must call setPreviewTarget first virtual status_t startPreview() = 0; // stop preview mode diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h index 3921cbd..b34a0f6 100644 --- a/services/camera/libcameraservice/CameraService.h +++ b/services/camera/libcameraservice/CameraService.h @@ -212,8 +212,7 @@ public: virtual status_t connect(const sp& client) = 0; virtual status_t lock() = 0; virtual status_t unlock() = 0; - virtual status_t setPreviewDisplay(const sp& surface) = 0; - virtual status_t setPreviewTexture(const sp& bufferProducer)=0; + virtual status_t setPreviewTarget(const sp& bufferProducer)=0; virtual void setPreviewCallbackFlag(int flag) = 0; virtual status_t setPreviewCallbackTarget( const sp& callbackProducer) = 0; diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp index 46aa60c..3d9fe01 100644 --- a/services/camera/libcameraservice/api1/Camera2Client.cpp +++ b/services/camera/libcameraservice/api1/Camera2Client.cpp @@ -491,25 +491,7 @@ status_t Camera2Client::unlock() { return EBUSY; } -status_t Camera2Client::setPreviewDisplay( - const sp& surface) { - ATRACE_CALL(); - ALOGV("%s: E", __FUNCTION__); - Mutex::Autolock icl(mBinderSerializationLock); - status_t res; - if ( (res = checkPid(__FUNCTION__) ) != OK) return res; - - sp binder; - sp window; - if (surface != 0) { - binder = surface->getIGraphicBufferProducer()->asBinder(); - window = surface; - } - - return setPreviewWindowL(binder,window); -} - -status_t Camera2Client::setPreviewTexture( +status_t Camera2Client::setPreviewTarget( const sp& bufferProducer) { ATRACE_CALL(); ALOGV("%s: E", __FUNCTION__); @@ -521,7 +503,10 @@ status_t Camera2Client::setPreviewTexture( sp window; if (bufferProducer != 0) { binder = bufferProducer->asBinder(); - window = new Surface(bufferProducer); + // Using controlledByApp flag to ensure that the buffer queue remains in + // async mode for the old camera API, where many applications depend + // on that behavior. + window = new Surface(bufferProducer, /*controlledByApp*/ true); } return setPreviewWindowL(binder, window); } diff --git a/services/camera/libcameraservice/api1/Camera2Client.h b/services/camera/libcameraservice/api1/Camera2Client.h index ed448f3..53629a1 100644 --- a/services/camera/libcameraservice/api1/Camera2Client.h +++ b/services/camera/libcameraservice/api1/Camera2Client.h @@ -57,8 +57,7 @@ public: virtual status_t connect(const sp& client); virtual status_t lock(); virtual status_t unlock(); - virtual status_t setPreviewDisplay(const sp& surface); - virtual status_t setPreviewTexture( + virtual status_t setPreviewTarget( const sp& bufferProducer); virtual void setPreviewCallbackFlag(int flag); virtual status_t setPreviewCallbackTarget( diff --git a/services/camera/libcameraservice/api1/CameraClient.cpp b/services/camera/libcameraservice/api1/CameraClient.cpp index ad8856b..bd6805d 100644 --- a/services/camera/libcameraservice/api1/CameraClient.cpp +++ b/services/camera/libcameraservice/api1/CameraClient.cpp @@ -308,26 +308,20 @@ status_t CameraClient::setPreviewWindow(const sp& binder, return result; } -// set the Surface that the preview will use -status_t CameraClient::setPreviewDisplay(const sp& surface) { - LOG1("setPreviewDisplay(%p) (pid %d)", surface.get(), getCallingPid()); - - sp binder(surface != 0 ? surface->getIGraphicBufferProducer()->asBinder() : 0); - sp window(surface); - return setPreviewWindow(binder, window); -} - -// set the SurfaceTextureClient that the preview will use -status_t CameraClient::setPreviewTexture( +// set the buffer consumer that the preview will use +status_t CameraClient::setPreviewTarget( const sp& bufferProducer) { - LOG1("setPreviewTexture(%p) (pid %d)", bufferProducer.get(), + LOG1("setPreviewTarget(%p) (pid %d)", bufferProducer.get(), getCallingPid()); sp binder; sp window; if (bufferProducer != 0) { binder = bufferProducer->asBinder(); - window = new Surface(bufferProducer); + // Using controlledByApp flag to ensure that the buffer queue remains in + // async mode for the old camera API, where many applications depend + // on that behavior. + window = new Surface(bufferProducer, /*controlledByApp*/ true); } return setPreviewWindow(binder, window); } diff --git a/services/camera/libcameraservice/api1/CameraClient.h b/services/camera/libcameraservice/api1/CameraClient.h index abde75a..4b89564 100644 --- a/services/camera/libcameraservice/api1/CameraClient.h +++ b/services/camera/libcameraservice/api1/CameraClient.h @@ -37,8 +37,7 @@ public: virtual status_t connect(const sp& client); virtual status_t lock(); virtual status_t unlock(); - virtual status_t setPreviewDisplay(const sp& surface); - virtual status_t setPreviewTexture(const sp& bufferProducer); + virtual status_t setPreviewTarget(const sp& bufferProducer); virtual void setPreviewCallbackFlag(int flag); virtual status_t setPreviewCallbackTarget( const sp& callbackProducer); -- cgit v1.1 From 4b820b0e1fa069714b123fc35784541d0f94d267 Mon Sep 17 00:00:00 2001 From: Eino-Ville Talvala Date: Wed, 21 Aug 2013 14:39:05 -0700 Subject: Camera1: Rename setPreviewTexture to ...Target for clarity Bug: 10312644 Change-Id: I19976188f0359bfd177209fb40145defdae9c740 --- camera/Camera.cpp | 6 +++--- include/camera/Camera.h | 6 +++--- media/libstagefright/CameraSource.cpp | 2 +- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/camera/Camera.cpp b/camera/Camera.cpp index bbc4aa4..22199fa 100644 --- a/camera/Camera.cpp +++ b/camera/Camera.cpp @@ -100,9 +100,9 @@ status_t Camera::unlock() } // pass the buffered IGraphicBufferProducer to the camera service -status_t Camera::setPreviewTexture(const sp& bufferProducer) +status_t Camera::setPreviewTarget(const sp& bufferProducer) { - ALOGV("setPreviewTexture(%p)", bufferProducer.get()); + ALOGV("setPreviewTarget(%p)", bufferProducer.get()); sp c = mCamera; if (c == 0) return NO_INIT; ALOGD_IF(bufferProducer == 0, "app passed NULL surface"); @@ -127,7 +127,7 @@ status_t Camera::storeMetaDataInBuffers(bool enabled) return c->storeMetaDataInBuffers(enabled); } -// start recording mode, must call setPreviewTexture first +// start recording mode, must call setPreviewTarget first status_t Camera::startRecording() { ALOGV("startRecording"); diff --git a/include/camera/Camera.h b/include/camera/Camera.h index b1b2e95..79682b8 100644 --- a/include/camera/Camera.h +++ b/include/camera/Camera.h @@ -81,9 +81,9 @@ public: status_t unlock(); // pass the buffered IGraphicBufferProducer to the camera service - status_t setPreviewTexture(const sp& bufferProducer); + status_t setPreviewTarget(const sp& bufferProducer); - // start preview mode, must call setPreviewTexture first + // start preview mode, must call setPreviewTarget first status_t startPreview(); // stop preview mode @@ -92,7 +92,7 @@ public: // get preview state bool previewEnabled(); - // start recording mode, must call setPreviewTexture first + // start recording mode, must call setPreviewTarget first status_t startRecording(); // stop recording mode diff --git a/media/libstagefright/CameraSource.cpp b/media/libstagefright/CameraSource.cpp index 5a26b06..3017fe7 100644 --- a/media/libstagefright/CameraSource.cpp +++ b/media/libstagefright/CameraSource.cpp @@ -536,7 +536,7 @@ status_t CameraSource::initWithCameraAccess( if (mSurface != NULL) { // This CHECK is good, since we just passed the lock/unlock // check earlier by calling mCamera->setParameters(). - CHECK_EQ((status_t)OK, mCamera->setPreviewTexture(mSurface)); + CHECK_EQ((status_t)OK, mCamera->setPreviewTarget(mSurface)); } // By default, do not store metadata in video buffers -- cgit v1.1 From 9cf69e0fc110f17c28e988ed0f9bf91abfaf710d Mon Sep 17 00:00:00 2001 From: Jeff Tinker Date: Wed, 21 Aug 2013 11:59:23 -0700 Subject: Add ability to test supported content types to MediaDrm bug: 10244066 Change-Id: I317f05b146db962c271893f6208890a5a6c396f1 --- drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.cpp | 8 ++++++++ drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.h | 1 + include/media/IDrm.h | 2 +- media/libmedia/IDrm.cpp | 7 +++++-- media/libmediaplayerservice/Drm.cpp | 13 ++++++++----- media/libmediaplayerservice/Drm.h | 2 +- 6 files changed, 24 insertions(+), 9 deletions(-) diff --git a/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.cpp b/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.cpp index 06fc29d..4770db0 100644 --- a/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.cpp +++ b/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.cpp @@ -48,6 +48,14 @@ namespace android { return (!memcmp(uuid, mock_uuid, sizeof(uuid))); } + bool MockDrmFactory::isContentTypeSupported(const String8 &mimeType) + { + if (mimeType != "" && mimeType != "video/mp4") { + return false; + } + return true; + } + status_t MockDrmFactory::createDrmPlugin(const uint8_t uuid[16], DrmPlugin **plugin) { *plugin = new MockDrmPlugin(); diff --git a/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.h b/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.h index ca9eac7..2297f9b 100644 --- a/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.h +++ b/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.h @@ -32,6 +32,7 @@ namespace android { virtual ~MockDrmFactory() {} bool isCryptoSchemeSupported(const uint8_t uuid[16]); + bool isContentTypeSupported(const String8 &mimeType); status_t createDrmPlugin(const uint8_t uuid[16], DrmPlugin **plugin); }; diff --git a/include/media/IDrm.h b/include/media/IDrm.h index d630c40..5ef26af 100644 --- a/include/media/IDrm.h +++ b/include/media/IDrm.h @@ -32,7 +32,7 @@ struct IDrm : public IInterface { virtual status_t initCheck() const = 0; - virtual bool isCryptoSchemeSupported(const uint8_t uuid[16]) = 0; + virtual bool isCryptoSchemeSupported(const uint8_t uuid[16], const String8 &mimeType) = 0; virtual status_t createPlugin(const uint8_t uuid[16]) = 0; diff --git a/media/libmedia/IDrm.cpp b/media/libmedia/IDrm.cpp index 902aeb2..f7a9a75 100644 --- a/media/libmedia/IDrm.cpp +++ b/media/libmedia/IDrm.cpp @@ -68,10 +68,11 @@ struct BpDrm : public BpInterface { return reply.readInt32(); } - virtual bool isCryptoSchemeSupported(const uint8_t uuid[16]) { + virtual bool isCryptoSchemeSupported(const uint8_t uuid[16], const String8 &mimeType) { Parcel data, reply; data.writeInterfaceToken(IDrm::getInterfaceDescriptor()); data.write(uuid, 16); + data.writeString8(mimeType); remote()->transact(IS_CRYPTO_SUPPORTED, data, &reply); return reply.readInt32() != 0; @@ -438,7 +439,9 @@ status_t BnDrm::onTransact( CHECK_INTERFACE(IDrm, data, reply); uint8_t uuid[16]; data.read(uuid, sizeof(uuid)); - reply->writeInt32(isCryptoSchemeSupported(uuid)); + String8 mimeType = data.readString8(); + reply->writeInt32(isCryptoSchemeSupported(uuid, mimeType)); + return OK; } diff --git a/media/libmediaplayerservice/Drm.cpp b/media/libmediaplayerservice/Drm.cpp index f00f488..4b527d0 100644 --- a/media/libmediaplayerservice/Drm.cpp +++ b/media/libmediaplayerservice/Drm.cpp @@ -211,15 +211,18 @@ bool Drm::loadLibraryForScheme(const String8 &path, const uint8_t uuid[16]) { return true; } -bool Drm::isCryptoSchemeSupported(const uint8_t uuid[16]) { +bool Drm::isCryptoSchemeSupported(const uint8_t uuid[16], const String8 &mimeType) { + Mutex::Autolock autoLock(mLock); - if (mFactory && mFactory->isCryptoSchemeSupported(uuid)) { - return true; + if (!mFactory || !mFactory->isCryptoSchemeSupported(uuid)) { + findFactoryForScheme(uuid); + if (mInitCheck != OK) { + return false; + } } - findFactoryForScheme(uuid); - return (mInitCheck == OK); + return mFactory->isContentTypeSupported(mimeType); } status_t Drm::createPlugin(const uint8_t uuid[16]) { diff --git a/media/libmediaplayerservice/Drm.h b/media/libmediaplayerservice/Drm.h index 3f460f1..119fd50 100644 --- a/media/libmediaplayerservice/Drm.h +++ b/media/libmediaplayerservice/Drm.h @@ -37,7 +37,7 @@ struct Drm : public BnDrm, virtual status_t initCheck() const; - virtual bool isCryptoSchemeSupported(const uint8_t uuid[16]); + virtual bool isCryptoSchemeSupported(const uint8_t uuid[16], const String8 &mimeType); virtual status_t createPlugin(const uint8_t uuid[16]); -- cgit v1.1 From abaa51d3ca31f0eda99e1d271e6dc64c877dbf58 Mon Sep 17 00:00:00 2001 From: Eino-Ville Talvala Date: Wed, 14 Aug 2013 11:37:00 -0700 Subject: Camera2: Add flush support - On HAL2 devices, fall back to wait until idle - On HAL3 devices, call HAL flush method Bug: 9758581 Change-Id: Ie1c570a15f6590a1ee6c271e3b989c48079b468a --- camera/camera2/ICameraDeviceUser.cpp | 17 +++++++++++++ include/camera/camera2/ICameraDeviceUser.h | 3 +++ .../libcameraservice/api2/CameraDeviceClient.cpp | 14 +++++++++++ .../libcameraservice/api2/CameraDeviceClient.h | 4 ++++ .../libcameraservice/common/CameraDeviceBase.h | 7 ++++++ .../libcameraservice/device2/Camera2Device.cpp | 28 +++++++++++++++++++--- .../libcameraservice/device2/Camera2Device.h | 5 ++++ .../libcameraservice/device3/Camera3Device.cpp | 18 ++++++++++++++ .../libcameraservice/device3/Camera3Device.h | 7 ++++++ 9 files changed, 100 insertions(+), 3 deletions(-) diff --git a/camera/camera2/ICameraDeviceUser.cpp b/camera/camera2/ICameraDeviceUser.cpp index 923f487..ae4cf69 100644 --- a/camera/camera2/ICameraDeviceUser.cpp +++ b/camera/camera2/ICameraDeviceUser.cpp @@ -41,6 +41,7 @@ enum { CREATE_DEFAULT_REQUEST, GET_CAMERA_INFO, WAIT_UNTIL_IDLE, + FLUSH }; class BpCameraDeviceUser : public BpInterface @@ -183,6 +184,16 @@ public: return reply.readInt32(); } + virtual status_t flush() + { + ALOGV("flush"); + Parcel data, reply; + data.writeInterfaceToken(ICameraDeviceUser::getInterfaceDescriptor()); + remote()->transact(FLUSH, data, &reply); + reply.readExceptionCode(); + return reply.readInt32(); + } + private: @@ -312,6 +323,12 @@ status_t BnCameraDeviceUser::onTransact( reply->writeInt32(waitUntilIdle()); return NO_ERROR; } break; + case FLUSH: { + CHECK_INTERFACE(ICameraDeviceUser, data, reply); + reply->writeNoException(); + reply->writeInt32(flush()); + return NO_ERROR; + } default: return BBinder::onTransact(code, data, reply, flags); } diff --git a/include/camera/camera2/ICameraDeviceUser.h b/include/camera/camera2/ICameraDeviceUser.h index 45988d0..f71f302 100644 --- a/include/camera/camera2/ICameraDeviceUser.h +++ b/include/camera/camera2/ICameraDeviceUser.h @@ -63,6 +63,9 @@ public: // Wait until all the submitted requests have finished processing virtual status_t waitUntilIdle() = 0; + + // Flush all pending and in-progress work as quickly as possible. + virtual status_t flush() = 0; }; // ---------------------------------------------------------------------------- diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp index 414316d..289ba06 100644 --- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp +++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp @@ -436,6 +436,20 @@ status_t CameraDeviceClient::waitUntilIdle() return res; } +status_t CameraDeviceClient::flush() { + ATRACE_CALL(); + ALOGV("%s", __FUNCTION__); + + status_t res = OK; + if ( (res = checkPid(__FUNCTION__) ) != OK) return res; + + Mutex::Autolock icl(mBinderSerializationLock); + + if (!mDevice.get()) return DEAD_OBJECT; + + return mDevice->flush(); +} + status_t CameraDeviceClient::dump(int fd, const Vector& args) { String8 result; result.appendFormat("CameraDeviceClient[%d] (%p) PID: %d, dump:\n", diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h index 21d633c..c6b6336 100644 --- a/services/camera/libcameraservice/api2/CameraDeviceClient.h +++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h @@ -89,6 +89,10 @@ public: // Wait until all the submitted requests have finished processing virtual status_t waitUntilIdle(); + + // Flush all active and pending requests as fast as possible + virtual status_t flush(); + /** * Interface used by CameraService */ diff --git a/services/camera/libcameraservice/common/CameraDeviceBase.h b/services/camera/libcameraservice/common/CameraDeviceBase.h index aa92bec..ebbd4ea 100644 --- a/services/camera/libcameraservice/common/CameraDeviceBase.h +++ b/services/camera/libcameraservice/common/CameraDeviceBase.h @@ -209,6 +209,13 @@ class CameraDeviceBase : public virtual RefBase { */ virtual status_t pushReprocessBuffer(int reprocessStreamId, buffer_handle_t *buffer, wp listener) = 0; + + /** + * Flush all pending and in-flight requests. Blocks until flush is + * complete. + */ + virtual status_t flush() = 0; + }; }; // namespace android diff --git a/services/camera/libcameraservice/device2/Camera2Device.cpp b/services/camera/libcameraservice/device2/Camera2Device.cpp index 710d0e9..fe2cd77 100644 --- a/services/camera/libcameraservice/device2/Camera2Device.cpp +++ b/services/camera/libcameraservice/device2/Camera2Device.cpp @@ -567,6 +567,13 @@ status_t Camera2Device::pushReprocessBuffer(int reprocessStreamId, return res; } +status_t Camera2Device::flush() { + ATRACE_CALL(); + + mRequestQueue.clear(); + return waitUntilDrained(); +} + /** * Camera2Device::MetadataQueue */ @@ -591,9 +598,7 @@ Camera2Device::MetadataQueue::MetadataQueue(): Camera2Device::MetadataQueue::~MetadataQueue() { ATRACE_CALL(); - Mutex::Autolock l(mMutex); - freeBuffers(mEntries.begin(), mEntries.end()); - freeBuffers(mStreamSlot.begin(), mStreamSlot.end()); + clear(); } // Connect to camera2 HAL as consumer (input requests/reprocessing) @@ -784,6 +789,23 @@ status_t Camera2Device::MetadataQueue::setStreamSlot( return signalConsumerLocked(); } +status_t Camera2Device::MetadataQueue::clear() +{ + ATRACE_CALL(); + ALOGV("%s: E", __FUNCTION__); + + Mutex::Autolock l(mMutex); + + // Clear streaming slot + freeBuffers(mStreamSlot.begin(), mStreamSlot.end()); + mStreamSlotCount = 0; + + // Clear request queue + freeBuffers(mEntries.begin(), mEntries.end()); + mCount = 0; + return OK; +} + status_t Camera2Device::MetadataQueue::dump(int fd, const Vector& /*args*/) { ATRACE_CALL(); diff --git a/services/camera/libcameraservice/device2/Camera2Device.h b/services/camera/libcameraservice/device2/Camera2Device.h index 8945ec2..2aa22a2 100644 --- a/services/camera/libcameraservice/device2/Camera2Device.h +++ b/services/camera/libcameraservice/device2/Camera2Device.h @@ -67,6 +67,8 @@ class Camera2Device: public CameraDeviceBase { virtual status_t triggerPrecaptureMetering(uint32_t id); virtual status_t pushReprocessBuffer(int reprocessStreamId, buffer_handle_t *buffer, wp listener); + // Flush implemented as just a wait + virtual status_t flush(); private: const int mId; camera2_device_t *mHal2Device; @@ -113,6 +115,9 @@ class Camera2Device: public CameraDeviceBase { status_t setStreamSlot(camera_metadata_t *buf); status_t setStreamSlot(const List &bufs); + // Clear the request queue and the streaming slot + status_t clear(); + status_t dump(int fd, const Vector& args); private: diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp index 0a4a24c..7f2ec7a 100644 --- a/services/camera/libcameraservice/device3/Camera3Device.cpp +++ b/services/camera/libcameraservice/device3/Camera3Device.cpp @@ -952,6 +952,16 @@ status_t Camera3Device::pushReprocessBuffer(int reprocessStreamId, return INVALID_OPERATION; } +status_t Camera3Device::flush() { + ATRACE_CALL(); + ALOGV("%s: Camera %d: Flushing all requests", __FUNCTION__, mId); + + Mutex::Autolock l(mLock); + + mRequestThread->clear(); + return mHal3Device->ops->flush(mHal3Device); +} + /** * Camera3Device private methods */ @@ -1488,6 +1498,14 @@ status_t Camera3Device::RequestThread::clearRepeatingRequests() { return OK; } +status_t Camera3Device::RequestThread::clear() { + Mutex::Autolock l(mRequestLock); + mRepeatingRequests.clear(); + mRequestQueue.clear(); + mTriggerMap.clear(); + return OK; +} + void Camera3Device::RequestThread::setPaused(bool paused) { Mutex::Autolock l(mPauseLock); mDoPause = paused; diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h index 76c08ae..99e1cc8 100644 --- a/services/camera/libcameraservice/device3/Camera3Device.h +++ b/services/camera/libcameraservice/device3/Camera3Device.h @@ -124,6 +124,8 @@ class Camera3Device : virtual status_t pushReprocessBuffer(int reprocessStreamId, buffer_handle_t *buffer, wp listener); + virtual status_t flush(); + private: static const size_t kInFlightWarnLimit = 20; static const nsecs_t kShutdownTimeout = 5000000000; // 5 sec @@ -249,6 +251,11 @@ class Camera3Device : status_t queueRequest(sp request); /** + * Remove all queued and repeating requests, and pending triggers + */ + status_t clear(); + + /** * Queue a trigger to be dispatched with the next outgoing * process_capture_request. The settings for that request only * will be temporarily rewritten to add the trigger tag/value. -- cgit v1.1 From 88aef23d00a509693a955b6a207c0fb023fbc92d Mon Sep 17 00:00:00 2001 From: Igor Murashkin Date: Fri, 23 Aug 2013 17:47:06 -0700 Subject: camera2: Get rid of annoying "Reading a NULL string...." error message Bug: 10461464 Change-Id: Ie99f025db0e0e12c703f2d391e218b49330ee4dd --- camera/camera2/ICameraDeviceUser.cpp | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/camera/camera2/ICameraDeviceUser.cpp b/camera/camera2/ICameraDeviceUser.cpp index 923f487..73fec72 100644 --- a/camera/camera2/ICameraDeviceUser.cpp +++ b/camera/camera2/ICameraDeviceUser.cpp @@ -43,6 +43,19 @@ enum { WAIT_UNTIL_IDLE, }; +namespace { + // Read empty strings without printing a false error message. + String16 readMaybeEmptyString16(const Parcel& parcel) { + size_t len; + const char16_t* str = parcel.readString16Inplace(&len); + if (str != NULL) { + return String16(str, len); + } else { + return String16(); + } + } +}; + class BpCameraDeviceUser : public BpInterface { public: @@ -250,7 +263,7 @@ status_t BnCameraDeviceUser::onTransact( sp bp; if (data.readInt32() != 0) { - String16 name = data.readString16(); + String16 name = readMaybeEmptyString16(data); bp = interface_cast( data.readStrongBinder()); -- cgit v1.1 From 5ff7836da0220b3097f36c8a5e82111816ebca62 Mon Sep 17 00:00:00 2001 From: Takeshi Aimi Date: Wed, 11 Jul 2012 17:09:21 +0900 Subject: Enhancement for OnInfo callback on DRM Framework In DRM framework, plugins can transmit DrmInfoEvent to Java layer. Although DrmInfoEvent has several entries, current implementation can only convey integer and String. This change enables plugins uto propagate a hashmap to Java layer. The hashmap can have one or more Strings and one byte array as value. Changes are made by Sony Corporation. bug: 10459159 Change-Id: Ic19265d4ad3db4eda66a3c27e1e08873a8f2a4d7 (cherry picked from commit 4f782bf0cb69929ebf03de239e2c9bf8e82adf5e) --- drm/common/DrmInfoEvent.cpp | 92 ++++++++++++++++++++++++++++- drm/common/IDrmServiceListener.cpp | 32 +++++++++- include/drm/DrmInfoEvent.h | 117 ++++++++++++++++++++++++++++++++++++- 3 files changed, 237 insertions(+), 4 deletions(-) diff --git a/drm/common/DrmInfoEvent.cpp b/drm/common/DrmInfoEvent.cpp index 27a5a2d..2315aa9 100644 --- a/drm/common/DrmInfoEvent.cpp +++ b/drm/common/DrmInfoEvent.cpp @@ -16,16 +16,29 @@ #include #include +#include using namespace android; DrmInfoEvent::DrmInfoEvent(int uniqueId, int infoType, const String8 message) : mUniqueId(uniqueId), mInfoType(infoType), - mMessage(message) { + mMessage(message), + mDrmBuffer() { } +DrmInfoEvent::DrmInfoEvent(int uniqueId, int infoType, const String8 message, + const DrmBuffer& drmBuffer) + : mUniqueId(uniqueId), mInfoType(infoType), mMessage(message), mDrmBuffer() { + setData(drmBuffer); +} + +DrmInfoEvent::~DrmInfoEvent() { + delete [] mDrmBuffer.data; +} + + int DrmInfoEvent::getUniqueId() const { return mUniqueId; } @@ -38,3 +51,80 @@ const String8 DrmInfoEvent::getMessage() const { return mMessage; } +int DrmInfoEvent::getCount() const { + return mAttributes.size(); +} + +status_t DrmInfoEvent::put(const String8& key, String8& value) { + mAttributes.add(key, value); + return DRM_NO_ERROR; +} + +const String8 DrmInfoEvent::get(const String8& key) const { + if (mAttributes.indexOfKey(key) != NAME_NOT_FOUND) { + return mAttributes.valueFor(key); + } + return String8(""); +} + +const DrmBuffer& DrmInfoEvent::getData() const { + return mDrmBuffer; +} + +void DrmInfoEvent::setData(const DrmBuffer& drmBuffer) { + delete [] mDrmBuffer.data; + mDrmBuffer.data = new char[drmBuffer.length];; + mDrmBuffer.length = drmBuffer.length; + memcpy(mDrmBuffer.data, drmBuffer.data, drmBuffer.length); +} + +DrmInfoEvent::KeyIterator DrmInfoEvent::keyIterator() const { + return KeyIterator(this); +} + +DrmInfoEvent::Iterator DrmInfoEvent::iterator() const { + return Iterator(this); +} + +// KeyIterator implementation +DrmInfoEvent::KeyIterator::KeyIterator(const DrmInfoEvent::KeyIterator& keyIterator) + : mDrmInfoEvent(keyIterator.mDrmInfoEvent), mIndex(keyIterator.mIndex) { +} + +bool DrmInfoEvent::KeyIterator::hasNext() { + return (mIndex < mDrmInfoEvent->mAttributes.size()); +} + +const String8& DrmInfoEvent::KeyIterator::next() { + const String8& key = mDrmInfoEvent->mAttributes.keyAt(mIndex); + mIndex++; + return key; +} + +DrmInfoEvent::KeyIterator& DrmInfoEvent::KeyIterator::operator=( + const DrmInfoEvent::KeyIterator& keyIterator) { + mDrmInfoEvent = keyIterator.mDrmInfoEvent; + mIndex = keyIterator.mIndex; + return *this; +} + +// Iterator implementation +DrmInfoEvent::Iterator::Iterator(const DrmInfoEvent::Iterator& iterator) + : mDrmInfoEvent(iterator.mDrmInfoEvent), mIndex(iterator.mIndex) { +} + +DrmInfoEvent::Iterator& DrmInfoEvent::Iterator::operator=(const DrmInfoEvent::Iterator& iterator) { + mDrmInfoEvent = iterator.mDrmInfoEvent; + mIndex = iterator.mIndex; + return *this; +} + +bool DrmInfoEvent::Iterator::hasNext() { + return mIndex < mDrmInfoEvent->mAttributes.size(); +} + +const String8& DrmInfoEvent::Iterator::next() { + const String8& value = mDrmInfoEvent->mAttributes.editValueAt(mIndex); + mIndex++; + return value; +} diff --git a/drm/common/IDrmServiceListener.cpp b/drm/common/IDrmServiceListener.cpp index 6eeea40..d825afb 100644 --- a/drm/common/IDrmServiceListener.cpp +++ b/drm/common/IDrmServiceListener.cpp @@ -32,6 +32,19 @@ status_t BpDrmServiceListener::notify(const DrmInfoEvent& event) { data.writeInt32(event.getType()); data.writeString8(event.getMessage()); + data.writeInt32(event.getCount()); + DrmInfoEvent::KeyIterator keyIt = event.keyIterator(); + while (keyIt.hasNext()) { + String8 key = keyIt.next(); + data.writeString8(key); + data.writeString8(event.get(key)); + } + const DrmBuffer& value = event.getData(); + data.writeInt32(value.length); + if (value.length > 0) { + data.write(value.data, value.length); + } + remote()->transact(NOTIFY, data, &reply); return reply.readInt32(); } @@ -49,7 +62,24 @@ status_t BnDrmServiceListener::onTransact( int type = data.readInt32(); const String8& message = data.readString8(); - status_t status = notify(DrmInfoEvent(uniqueId, type, message)); + DrmInfoEvent event(uniqueId, type, message); + int size = data.readInt32(); + for (int index = 0; index < size; index++) { + String8 key(data.readString8()); + String8 value(data.readString8()); + event.put(key, value); + } + int valueSize = data.readInt32(); + if (valueSize > 0) { + char* valueData = new char[valueSize]; + data.read(valueData, valueSize); + DrmBuffer drmBuffer(valueData, valueSize); + event.setData(drmBuffer); + delete[] valueData; + } + + status_t status = notify(event); + reply->writeInt32(status); return DRM_NO_ERROR; diff --git a/include/drm/DrmInfoEvent.h b/include/drm/DrmInfoEvent.h index dfca228..23b2950 100644 --- a/include/drm/DrmInfoEvent.h +++ b/include/drm/DrmInfoEvent.h @@ -17,6 +17,8 @@ #ifndef __DRM_INFO_EVENT_H__ #define __DRM_INFO_EVENT_H__ +#include "drm_framework_common.h" + namespace android { class String8; @@ -71,18 +73,70 @@ public: public: /** - * Constructor for DrmInfoEvent + * Constructor for DrmInfoEvent. + * Data in drmBuffer are copied to newly allocated buffer. * * @param[in] uniqueId Unique session identifier * @param[in] infoType Type of information * @param[in] message Message description + * @param[in] drmBuffer Binary information */ DrmInfoEvent(int uniqueId, int infoType, const String8 message); + DrmInfoEvent(int uniqueId, int infoType, const String8 message, const DrmBuffer& drmBuffer); /** * Destructor for DrmInfoEvent */ - virtual ~DrmInfoEvent() {} + ~DrmInfoEvent(); + +public: + /** + * Iterator for key + */ + class KeyIterator { + friend class DrmInfoEvent; + + private: + KeyIterator(const DrmInfoEvent* drmInfoEvent) + : mDrmInfoEvent(const_cast (drmInfoEvent)), mIndex(0) {} + + public: + KeyIterator(const KeyIterator& keyIterator); + KeyIterator& operator=(const KeyIterator& keyIterator); + virtual ~KeyIterator() {} + + public: + bool hasNext(); + const String8& next(); + + private: + DrmInfoEvent* mDrmInfoEvent; + unsigned int mIndex; + }; + + /** + * Iterator + */ + class Iterator { + friend class DrmInfoEvent; + + private: + Iterator(const DrmInfoEvent* drmInfoEvent) + : mDrmInfoEvent(const_cast (drmInfoEvent)), mIndex(0) {} + + public: + Iterator(const Iterator& iterator); + Iterator& operator=(const Iterator& iterator); + virtual ~Iterator() {} + + public: + bool hasNext(); + const String8& next(); + + private: + DrmInfoEvent* mDrmInfoEvent; + unsigned int mIndex; + }; public: /** @@ -106,10 +160,69 @@ public: */ const String8 getMessage() const; + /** + * Returns the number of attributes contained in this instance + * + * @return Number of attributes + */ + int getCount() const; + + /** + * Adds optional information as pair to this instance + * + * @param[in] key Key to add + * @param[in] value Value to add + * @return Returns the error code + */ + status_t put(const String8& key, String8& value); + + /** + * Retrieves the value of given key + * + * @param key Key whose value to be retrieved + * @return The value + */ + const String8 get(const String8& key) const; + + /** + * Returns KeyIterator object to walk through the keys associated with this instance + * + * @return KeyIterator object + */ + KeyIterator keyIterator() const; + + /** + * Returns Iterator object to walk through the values associated with this instance + * + * @return Iterator object + */ + Iterator iterator() const; + + /** + * Returns the Binary information associated with this instance + * + * @return Binary information + */ + const DrmBuffer& getData() const; + + /** + * Sets the Binary information associated with this instance. + * Data in drmBuffer are copied to newly allocated buffer. + * + * @param[in] drmBuffer Binary information associated with this instance + */ + void setData(const DrmBuffer& drmBuffer); + +private: + DrmInfoEvent(const DrmInfoEvent& ref); + const DrmInfoEvent& operator=(const DrmInfoEvent& ref); + private: int mUniqueId; int mInfoType; const String8 mMessage; + KeyedVector mAttributes; + DrmBuffer mDrmBuffer; }; }; -- cgit v1.1 From 48326940f48390e79476e5ce7c2a18b8201cdafc Mon Sep 17 00:00:00 2001 From: Andy McFadden Date: Fri, 23 Aug 2013 13:49:38 -0700 Subject: Notify the media scanner Use an "am broadcast" command to notify the media scanner that a new video file is available. Bug 10096103 Change-Id: I8261d81d96832969ebb9031a9766c1b1f2a569ed --- cmds/screenrecord/screenrecord.cpp | 38 ++++++++++++++++++++++++++++++++++++-- 1 file changed, 36 insertions(+), 2 deletions(-) diff --git a/cmds/screenrecord/screenrecord.cpp b/cmds/screenrecord/screenrecord.cpp index 3f8567c..ceda908 100644 --- a/cmds/screenrecord/screenrecord.cpp +++ b/cmds/screenrecord/screenrecord.cpp @@ -35,6 +35,8 @@ #include #include +#include +#include #include #include #include @@ -138,7 +140,7 @@ static status_t prepareEncoder(float displayFps, sp* pCodec, format->setFloat("frame-rate", displayFps); format->setInt32("i-frame-interval", 10); - /// MediaCodec + // MediaCodec sp looper = new ALooper; looper->setName("screenrecord_looper"); looper->start(); @@ -370,10 +372,15 @@ static status_t runEncoder(const sp& encoder, if (err != NO_ERROR) { fprintf(stderr, "Unable to get new output buffers (err=%d)\n", err); + return err; } break; + case INVALID_OPERATION: + fprintf(stderr, "Request for encoder buffer failed\n"); + return err; default: - ALOGW("Got weird result %d from dequeueOutputBuffer", err); + fprintf(stderr, + "Got weird result %d from dequeueOutputBuffer\n", err); return err; } } @@ -477,6 +484,29 @@ static status_t recordScreen(const char* fileName) { } /* + * Sends a broadcast to the media scanner to tell it about the new video. + */ +static status_t notifyMediaScanner(const char* fileName) { + String8 command("am broadcast -a android.intent.action.MEDIA_SCANNER_SCAN_FILE -d file://"); + command.append(fileName); + if (gVerbose) { + printf("Shell: %s\n", command.string()); + } + + // TODO: for non-verbose mode we should suppress stdout + int status = system(command.string()); + if (status < 0) { + fprintf(stderr, "Unable to fork shell for media scanner broadcast\n"); + return UNKNOWN_ERROR; + } else if (status != 0) { + fprintf(stderr, "am command failed (status=%d): '%s'\n", + status, command.string()); + return UNKNOWN_ERROR; + } + return NO_ERROR; +} + +/* * Parses a string of the form "1280x720". * * Returns true on success. @@ -609,6 +639,10 @@ int main(int argc, char* const argv[]) { close(fd); status_t err = recordScreen(fileName); + if (err == NO_ERROR) { + // Try to notify the media scanner. Not fatal if this fails. + notifyMediaScanner(fileName); + } ALOGD(err == NO_ERROR ? "success" : "failed"); return (int) err; } -- cgit v1.1 From ec9a032c2e104ab1e3b5bf73e69dab1408ced0ad Mon Sep 17 00:00:00 2001 From: Eric Laurent Date: Wed, 28 Aug 2013 10:23:01 -0700 Subject: AudioTrack: fix unwanted underrun when restarting When restarting an AudioTrack from stopped state, it is necessary to force refresh of mRemainingFrames by processAudioBuffer() as the last write before stop() could be partial. No doing so will lead into unnecessary sleep before filling the non contiguous part of the buffer returned by obtainBuffer() when processAudioBuffer() is executed for the first time after start(). Change-Id: Id703f8dc092a6f07c905eee194054b4a978f979d --- media/libmedia/AudioTrack.cpp | 3 +++ 1 file changed, 3 insertions(+) diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp index dd0ec73..214e789 100644 --- a/media/libmedia/AudioTrack.cpp +++ b/media/libmedia/AudioTrack.cpp @@ -387,6 +387,9 @@ status_t AudioTrack::start() if (previousState == STATE_STOPPED || previousState == STATE_FLUSHED) { // reset current position as seen by client to 0 mProxy->setEpoch(mProxy->getEpoch() - mProxy->getPosition()); + // force refresh of remaining frames by processAudioBuffer() as last + // write before stop could be partial. + mRefreshRemaining = true; } mNewPosition = mProxy->getPosition() + mUpdatePeriod; int32_t flags = android_atomic_and(~CBLK_DISABLED, &mCblk->mFlags); -- cgit v1.1 From 2c041c1c927fdbf60f53f1a1960e5155a79007b7 Mon Sep 17 00:00:00 2001 From: Andy McFadden Date: Wed, 28 Aug 2013 15:22:11 -0700 Subject: Add time limit to screenrecord Recordings are now limited to 3 minutes. The time can be set lower with the --time-limit option. Bug 10529645 Change-Id: I6ac19b9da8d616d38262414c2e821d56efcec55d --- cmds/screenrecord/screenrecord.cpp | 63 +++++++++++++++++++++++++++----------- 1 file changed, 45 insertions(+), 18 deletions(-) diff --git a/cmds/screenrecord/screenrecord.cpp b/cmds/screenrecord/screenrecord.cpp index ceda908..d027ba9 100644 --- a/cmds/screenrecord/screenrecord.cpp +++ b/cmds/screenrecord/screenrecord.cpp @@ -21,6 +21,7 @@ #include #include #include +#include #include #include @@ -44,6 +45,12 @@ using namespace android; +static const uint32_t kMinBitRate = 100000; // 0.1Mbps +static const uint32_t kMaxBitRate = 100 * 1000000; // 100Mbps +static const uint32_t kMaxTimeLimitSec = 180; // 3 minutes +static const uint32_t kFallbackWidth = 1280; // 720p +static const uint32_t kFallbackHeight = 720; + // Command-line parameters. static bool gVerbose = false; // chatty on stdout static bool gRotate = false; // rotate 90 degrees @@ -51,6 +58,7 @@ static bool gSizeSpecified = false; // was size explicitly requested? static uint32_t gVideoWidth = 0; // default width+height static uint32_t gVideoHeight = 0; static uint32_t gBitRate = 4000000; // 4Mbps +static uint32_t gTimeLimitSec = kMaxTimeLimitSec; // Set by signal handler to stop recording. static bool gStopRequested; @@ -59,8 +67,6 @@ static bool gStopRequested; static struct sigaction gOrigSigactionINT; static struct sigaction gOrigSigactionHUP; -static const uint32_t kMinBitRate = 100000; // 0.1Mbps -static const uint32_t kMaxBitRate = 100 * 1000000; // 100Mbps /* * Catch keyboard interrupt signals. On receipt, the "stop requested" @@ -72,9 +78,8 @@ static void signalCatcher(int signum) gStopRequested = true; switch (signum) { case SIGINT: - sigaction(SIGINT, &gOrigSigactionINT, NULL); - break; case SIGHUP: + sigaction(SIGINT, &gOrigSigactionINT, NULL); sigaction(SIGHUP, &gOrigSigactionHUP, NULL); break; default: @@ -140,7 +145,6 @@ static status_t prepareEncoder(float displayFps, sp* pCodec, format->setFloat("frame-rate", displayFps); format->setInt32("i-frame-interval", 10); - // MediaCodec sp looper = new ALooper; looper->setName("screenrecord_looper"); looper->start(); @@ -281,7 +285,8 @@ static status_t runEncoder(const sp& encoder, status_t err; ssize_t trackIdx = -1; uint32_t debugNumFrames = 0; - time_t debugStartWhen = time(NULL); + int64_t startWhenNsec = systemTime(CLOCK_MONOTONIC); + int64_t endWhenNsec = startWhenNsec + seconds_to_nanoseconds(gTimeLimitSec); Vector > buffers; err = encoder->getOutputBuffers(&buffers); @@ -298,6 +303,14 @@ static status_t runEncoder(const sp& encoder, size_t bufIndex, offset, size; int64_t ptsUsec; uint32_t flags; + + if (systemTime(CLOCK_MONOTONIC) > endWhenNsec) { + if (gVerbose) { + printf("Time limit reached\n"); + } + break; + } + ALOGV("Calling dequeueOutputBuffer"); err = encoder->dequeueOutputBuffer(&bufIndex, &offset, &size, &ptsUsec, &flags, kTimeout); @@ -347,7 +360,6 @@ static status_t runEncoder(const sp& encoder, } break; case -EAGAIN: // INFO_TRY_AGAIN_LATER - // not expected with infinite timeout ALOGV("Got -EAGAIN, looping"); break; case INFO_FORMAT_CHANGED: // INFO_OUTPUT_FORMAT_CHANGED @@ -387,8 +399,9 @@ static status_t runEncoder(const sp& encoder, ALOGV("Encoder stopping (req=%d)", gStopRequested); if (gVerbose) { - printf("Encoder stopping; recorded %u frames in %ld seconds\n", - debugNumFrames, time(NULL) - debugStartWhen); + printf("Encoder stopping; recorded %u frames in %lld seconds\n", + debugNumFrames, + nanoseconds_to_seconds(systemTime(CLOCK_MONOTONIC) - startWhenNsec)); } return NO_ERROR; } @@ -439,12 +452,12 @@ static status_t recordScreen(const char* fileName) { sp bufferProducer; err = prepareEncoder(mainDpyInfo.fps, &encoder, &bufferProducer); if (err != NO_ERROR && !gSizeSpecified) { - ALOGV("Retrying with 720p"); - if (gVideoWidth != 1280 && gVideoHeight != 720) { + if (gVideoWidth != kFallbackWidth && gVideoHeight != kFallbackHeight) { + ALOGV("Retrying with 720p"); fprintf(stderr, "WARNING: failed at %dx%d, retrying at 720p\n", gVideoWidth, gVideoHeight); - gVideoWidth = 1280; - gVideoHeight = 720; + gVideoWidth = kFallbackWidth; + gVideoHeight = kFallbackHeight; err = prepareEncoder(mainDpyInfo.fps, &encoder, &bufferProducer); } } @@ -544,10 +557,13 @@ static void usage() { "\n" "Options:\n" "--size WIDTHxHEIGHT\n" - " Set the video size, e.g. \"1280x720\". For best results, use\n" - " a size supported by the AVC encoder.\n" + " Set the video size, e.g. \"1280x720\". Default is the device's main\n" + " display resolution (if supported), 1280x720 if not. For best results,\n" + " use a size supported by the AVC encoder.\n" "--bit-rate RATE\n" - " Set the video bit rate, in megabits per second. Default 4Mbps.\n" + " Set the video bit rate, in megabits per second. Default %dMbps.\n" + "--time-limit TIME\n" + " Set the maximum recording time, in seconds. Default / maximum is %d.\n" "--rotate\n" " Rotate the output 90 degrees.\n" "--verbose\n" @@ -555,8 +571,9 @@ static void usage() { "--help\n" " Show this message.\n" "\n" - "Recording continues until Ctrl-C is hit.\n" - "\n" + "Recording continues until Ctrl-C is hit or the time limit is reached.\n" + "\n", + gBitRate / 1000000, gTimeLimitSec ); } @@ -569,6 +586,7 @@ int main(int argc, char* const argv[]) { { "verbose", no_argument, NULL, 'v' }, { "size", required_argument, NULL, 's' }, { "bit-rate", required_argument, NULL, 'b' }, + { "time-limit", required_argument, NULL, 't' }, { "rotate", no_argument, NULL, 'r' }, { NULL, 0, NULL, 0 } }; @@ -610,6 +628,15 @@ int main(int argc, char* const argv[]) { return 2; } break; + case 't': + gTimeLimitSec = atoi(optarg); + if (gTimeLimitSec == 0 || gTimeLimitSec > kMaxTimeLimitSec) { + fprintf(stderr, + "Time limit %ds outside acceptable range [1,%d]\n", + gTimeLimitSec, kMaxTimeLimitSec); + return 2; + } + break; case 'r': gRotate = true; break; -- cgit v1.1 From ee7e77d55d510725a314d8ed36dc730c21af6173 Mon Sep 17 00:00:00 2001 From: Jeff Tinker Date: Wed, 28 Aug 2013 16:40:41 -0700 Subject: fix MediaDrm.isCryptoSchemeSupported(uuid) 1. Don't expect plugins to support an empty mimeType in isContentTypeSupported 2. Move the cts test mock drm plugin to the cts tree so it is always used b/10528466 Change-Id: I6023f6165b1e9d294986f7e5cd0896e056e376f1 --- drm/mediadrm/plugins/mock/Android.mk | 38 -- drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.cpp | 705 ---------------------- drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.h | 156 ----- media/libmediaplayerservice/Drm.cpp | 6 +- 4 files changed, 5 insertions(+), 900 deletions(-) delete mode 100644 drm/mediadrm/plugins/mock/Android.mk delete mode 100644 drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.cpp delete mode 100644 drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.h diff --git a/drm/mediadrm/plugins/mock/Android.mk b/drm/mediadrm/plugins/mock/Android.mk deleted file mode 100644 index ada23a2..0000000 --- a/drm/mediadrm/plugins/mock/Android.mk +++ /dev/null @@ -1,38 +0,0 @@ -# -# Copyright (C) 2013 The Android Open Source Project -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -LOCAL_PATH:= $(call my-dir) -include $(CLEAR_VARS) - -LOCAL_SRC_FILES:= \ - MockDrmCryptoPlugin.cpp - -LOCAL_MODULE := libmockdrmcryptoplugin - -LOCAL_MODULE_PATH := $(TARGET_OUT_VENDOR_SHARED_LIBRARIES)/mediadrm - -LOCAL_SHARED_LIBRARIES := \ - libutils liblog - -LOCAL_C_INCLUDES += \ - $(TOP)/frameworks/av/include \ - $(TOP)/frameworks/native/include/media - -# Set the following flag to enable the decryption passthru flow -#LOCAL_CFLAGS += -DENABLE_PASSTHRU_DECRYPTION - -LOCAL_MODULE_TAGS := optional - -include $(BUILD_SHARED_LIBRARY) diff --git a/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.cpp b/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.cpp deleted file mode 100644 index 4770db0..0000000 --- a/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.cpp +++ /dev/null @@ -1,705 +0,0 @@ -/* - * Copyright (C) 2013 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -//#define LOG_NDEBUG 0 -#define LOG_TAG "MockDrmCryptoPlugin" -#include - - -#include "drm/DrmAPI.h" -#include "MockDrmCryptoPlugin.h" -#include "media/stagefright/MediaErrors.h" - -using namespace android; - -// Shared library entry point -DrmFactory *createDrmFactory() -{ - return new MockDrmFactory(); -} - -// Shared library entry point -CryptoFactory *createCryptoFactory() -{ - return new MockCryptoFactory(); -} - -const uint8_t mock_uuid[16] = {0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, - 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f, 0x10}; - -namespace android { - - // MockDrmFactory - bool MockDrmFactory::isCryptoSchemeSupported(const uint8_t uuid[16]) - { - return (!memcmp(uuid, mock_uuid, sizeof(uuid))); - } - - bool MockDrmFactory::isContentTypeSupported(const String8 &mimeType) - { - if (mimeType != "" && mimeType != "video/mp4") { - return false; - } - return true; - } - - status_t MockDrmFactory::createDrmPlugin(const uint8_t uuid[16], DrmPlugin **plugin) - { - *plugin = new MockDrmPlugin(); - return OK; - } - - // MockCryptoFactory - bool MockCryptoFactory::isCryptoSchemeSupported(const uint8_t uuid[16]) const - { - return (!memcmp(uuid, mock_uuid, sizeof(uuid))); - } - - status_t MockCryptoFactory::createPlugin(const uint8_t uuid[16], const void *data, - size_t size, CryptoPlugin **plugin) - { - *plugin = new MockCryptoPlugin(); - return OK; - } - - - // MockDrmPlugin methods - - status_t MockDrmPlugin::openSession(Vector &sessionId) - { - const size_t kSessionIdSize = 8; - - Mutex::Autolock lock(mLock); - for (size_t i = 0; i < kSessionIdSize / sizeof(long); i++) { - long r = random(); - sessionId.appendArray((uint8_t *)&r, sizeof(long)); - } - mSessions.add(sessionId); - - ALOGD("MockDrmPlugin::openSession() -> %s", vectorToString(sessionId).string()); - return OK; - } - - status_t MockDrmPlugin::closeSession(Vector const &sessionId) - { - Mutex::Autolock lock(mLock); - ALOGD("MockDrmPlugin::closeSession(%s)", vectorToString(sessionId).string()); - ssize_t index = findSession(sessionId); - if (index == kNotFound) { - ALOGD("Invalid sessionId"); - return BAD_VALUE; - } - mSessions.removeAt(index); - return OK; - } - - - status_t MockDrmPlugin::getKeyRequest(Vector const &sessionId, - Vector const &initData, - String8 const &mimeType, KeyType keyType, - KeyedVector const &optionalParameters, - Vector &request, String8 &defaultUrl) - { - Mutex::Autolock lock(mLock); - ALOGD("MockDrmPlugin::getKeyRequest(sessionId=%s, initData=%s, mimeType=%s" - ", keyType=%d, optionalParameters=%s))", - vectorToString(sessionId).string(), vectorToString(initData).string(), mimeType.string(), - keyType, stringMapToString(optionalParameters).string()); - - ssize_t index = findSession(sessionId); - if (index == kNotFound) { - ALOGD("Invalid sessionId"); - return BAD_VALUE; - } - - // Properties used in mock test, set by mock plugin and verifed cts test app - // byte[] initData -> mock-initdata - // string mimeType -> mock-mimetype - // string keyType -> mock-keytype - // string optionalParameters -> mock-optparams formatted as {key1,value1},{key2,value2} - - mByteArrayProperties.add(String8("mock-initdata"), initData); - mStringProperties.add(String8("mock-mimetype"), mimeType); - - String8 keyTypeStr; - keyTypeStr.appendFormat("%d", (int)keyType); - mStringProperties.add(String8("mock-keytype"), keyTypeStr); - - String8 params; - for (size_t i = 0; i < optionalParameters.size(); i++) { - params.appendFormat("%s{%s,%s}", i ? "," : "", - optionalParameters.keyAt(i).string(), - optionalParameters.valueAt(i).string()); - } - mStringProperties.add(String8("mock-optparams"), params); - - // Properties used in mock test, set by cts test app returned from mock plugin - // byte[] mock-request -> request - // string mock-default-url -> defaultUrl - - index = mByteArrayProperties.indexOfKey(String8("mock-request")); - if (index < 0) { - ALOGD("Missing 'mock-request' parameter for mock"); - return BAD_VALUE; - } else { - request = mByteArrayProperties.valueAt(index); - } - - index = mStringProperties.indexOfKey(String8("mock-defaultUrl")); - if (index < 0) { - ALOGD("Missing 'mock-defaultUrl' parameter for mock"); - return BAD_VALUE; - } else { - defaultUrl = mStringProperties.valueAt(index); - } - return OK; - } - - status_t MockDrmPlugin::provideKeyResponse(Vector const &sessionId, - Vector const &response, - Vector &keySetId) - { - Mutex::Autolock lock(mLock); - ALOGD("MockDrmPlugin::provideKeyResponse(sessionId=%s, response=%s)", - vectorToString(sessionId).string(), vectorToString(response).string()); - ssize_t index = findSession(sessionId); - if (index == kNotFound) { - ALOGD("Invalid sessionId"); - return BAD_VALUE; - } - if (response.size() == 0) { - return BAD_VALUE; - } - - // Properties used in mock test, set by mock plugin and verifed cts test app - // byte[] response -> mock-response - mByteArrayProperties.add(String8("mock-response"), response); - - const size_t kKeySetIdSize = 8; - - for (size_t i = 0; i < kKeySetIdSize / sizeof(long); i++) { - long r = random(); - keySetId.appendArray((uint8_t *)&r, sizeof(long)); - } - mKeySets.add(keySetId); - - return OK; - } - - status_t MockDrmPlugin::removeKeys(Vector const &keySetId) - { - Mutex::Autolock lock(mLock); - ALOGD("MockDrmPlugin::removeKeys(keySetId=%s)", - vectorToString(keySetId).string()); - - ssize_t index = findKeySet(keySetId); - if (index == kNotFound) { - ALOGD("Invalid keySetId"); - return BAD_VALUE; - } - mKeySets.removeAt(index); - - return OK; - } - - status_t MockDrmPlugin::restoreKeys(Vector const &sessionId, - Vector const &keySetId) - { - Mutex::Autolock lock(mLock); - ALOGD("MockDrmPlugin::restoreKeys(sessionId=%s, keySetId=%s)", - vectorToString(sessionId).string(), - vectorToString(keySetId).string()); - ssize_t index = findSession(sessionId); - if (index == kNotFound) { - ALOGD("Invalid sessionId"); - return BAD_VALUE; - } - - index = findKeySet(keySetId); - if (index == kNotFound) { - ALOGD("Invalid keySetId"); - return BAD_VALUE; - } - - return OK; - } - - status_t MockDrmPlugin::queryKeyStatus(Vector const &sessionId, - KeyedVector &infoMap) const - { - ALOGD("MockDrmPlugin::queryKeyStatus(sessionId=%s)", - vectorToString(sessionId).string()); - - ssize_t index = findSession(sessionId); - if (index == kNotFound) { - ALOGD("Invalid sessionId"); - return BAD_VALUE; - } - - infoMap.add(String8("purchaseDuration"), String8("1000")); - infoMap.add(String8("licenseDuration"), String8("100")); - return OK; - } - - status_t MockDrmPlugin::getProvisionRequest(Vector &request, - String8 &defaultUrl) - { - Mutex::Autolock lock(mLock); - ALOGD("MockDrmPlugin::getProvisionRequest()"); - - // Properties used in mock test, set by cts test app returned from mock plugin - // byte[] mock-request -> request - // string mock-default-url -> defaultUrl - - ssize_t index = mByteArrayProperties.indexOfKey(String8("mock-request")); - if (index < 0) { - ALOGD("Missing 'mock-request' parameter for mock"); - return BAD_VALUE; - } else { - request = mByteArrayProperties.valueAt(index); - } - - index = mStringProperties.indexOfKey(String8("mock-defaultUrl")); - if (index < 0) { - ALOGD("Missing 'mock-defaultUrl' parameter for mock"); - return BAD_VALUE; - } else { - defaultUrl = mStringProperties.valueAt(index); - } - return OK; - } - - status_t MockDrmPlugin::provideProvisionResponse(Vector const &response) - { - Mutex::Autolock lock(mLock); - ALOGD("MockDrmPlugin::provideProvisionResponse(%s)", - vectorToString(response).string()); - - // Properties used in mock test, set by mock plugin and verifed cts test app - // byte[] response -> mock-response - - mByteArrayProperties.add(String8("mock-response"), response); - return OK; - } - - status_t MockDrmPlugin::getSecureStops(List > &secureStops) - { - Mutex::Autolock lock(mLock); - ALOGD("MockDrmPlugin::getSecureStops()"); - - // Properties used in mock test, set by cts test app returned from mock plugin - // byte[] mock-secure-stop1 -> first secure stop in list - // byte[] mock-secure-stop2 -> second secure stop in list - - Vector ss1, ss2; - ssize_t index = mByteArrayProperties.indexOfKey(String8("mock-secure-stop1")); - if (index < 0) { - ALOGD("Missing 'mock-secure-stop1' parameter for mock"); - return BAD_VALUE; - } else { - ss1 = mByteArrayProperties.valueAt(index); - } - - index = mByteArrayProperties.indexOfKey(String8("mock-secure-stop2")); - if (index < 0) { - ALOGD("Missing 'mock-secure-stop2' parameter for mock"); - return BAD_VALUE; - } else { - ss2 = mByteArrayProperties.valueAt(index); - } - - secureStops.push_back(ss1); - secureStops.push_back(ss2); - return OK; - } - - status_t MockDrmPlugin::releaseSecureStops(Vector const &ssRelease) - { - Mutex::Autolock lock(mLock); - ALOGD("MockDrmPlugin::releaseSecureStops(%s)", - vectorToString(ssRelease).string()); - - // Properties used in mock test, set by mock plugin and verifed cts test app - // byte[] secure-stop-release -> mock-ssrelease - mByteArrayProperties.add(String8("mock-ssrelease"), ssRelease); - - return OK; - } - - status_t MockDrmPlugin::getPropertyString(String8 const &name, String8 &value) const - { - ALOGD("MockDrmPlugin::getPropertyString(name=%s)", name.string()); - ssize_t index = mStringProperties.indexOfKey(name); - if (index < 0) { - ALOGD("no property for '%s'", name.string()); - return BAD_VALUE; - } - value = mStringProperties.valueAt(index); - return OK; - } - - status_t MockDrmPlugin::getPropertyByteArray(String8 const &name, - Vector &value) const - { - ALOGD("MockDrmPlugin::getPropertyByteArray(name=%s)", name.string()); - ssize_t index = mByteArrayProperties.indexOfKey(name); - if (index < 0) { - ALOGD("no property for '%s'", name.string()); - return BAD_VALUE; - } - value = mByteArrayProperties.valueAt(index); - return OK; - } - - status_t MockDrmPlugin::setPropertyString(String8 const &name, - String8 const &value) - { - Mutex::Autolock lock(mLock); - ALOGD("MockDrmPlugin::setPropertyString(name=%s, value=%s)", - name.string(), value.string()); - - if (name == "mock-send-event") { - unsigned code, extra; - sscanf(value.string(), "%d %d", &code, &extra); - DrmPlugin::EventType eventType = (DrmPlugin::EventType)code; - - Vector const *pSessionId = NULL; - ssize_t index = mByteArrayProperties.indexOfKey(String8("mock-event-session-id")); - if (index >= 0) { - pSessionId = &mByteArrayProperties[index]; - } - - Vector const *pData = NULL; - index = mByteArrayProperties.indexOfKey(String8("mock-event-data")); - if (index >= 0) { - pData = &mByteArrayProperties[index]; - } - ALOGD("sending event from mock drm plugin: %d %d %s %s", - (int)code, extra, pSessionId ? vectorToString(*pSessionId) : "{}", - pData ? vectorToString(*pData) : "{}"); - - sendEvent(eventType, extra, pSessionId, pData); - } else { - mStringProperties.add(name, value); - } - return OK; - } - - status_t MockDrmPlugin::setPropertyByteArray(String8 const &name, - Vector const &value) - { - Mutex::Autolock lock(mLock); - ALOGD("MockDrmPlugin::setPropertyByteArray(name=%s, value=%s)", - name.string(), vectorToString(value).string()); - mByteArrayProperties.add(name, value); - return OK; - } - - status_t MockDrmPlugin::setCipherAlgorithm(Vector const &sessionId, - String8 const &algorithm) - { - Mutex::Autolock lock(mLock); - - ALOGD("MockDrmPlugin::setCipherAlgorithm(sessionId=%s, algorithm=%s)", - vectorToString(sessionId).string(), algorithm.string()); - - ssize_t index = findSession(sessionId); - if (index == kNotFound) { - ALOGD("Invalid sessionId"); - return BAD_VALUE; - } - - if (algorithm == "AES/CBC/NoPadding") { - return OK; - } - return BAD_VALUE; - } - - status_t MockDrmPlugin::setMacAlgorithm(Vector const &sessionId, - String8 const &algorithm) - { - Mutex::Autolock lock(mLock); - - ALOGD("MockDrmPlugin::setMacAlgorithm(sessionId=%s, algorithm=%s)", - vectorToString(sessionId).string(), algorithm.string()); - - ssize_t index = findSession(sessionId); - if (index == kNotFound) { - ALOGD("Invalid sessionId"); - return BAD_VALUE; - } - - if (algorithm == "HmacSHA256") { - return OK; - } - return BAD_VALUE; - } - - status_t MockDrmPlugin::encrypt(Vector const &sessionId, - Vector const &keyId, - Vector const &input, - Vector const &iv, - Vector &output) - { - Mutex::Autolock lock(mLock); - ALOGD("MockDrmPlugin::encrypt(sessionId=%s, keyId=%s, input=%s, iv=%s)", - vectorToString(sessionId).string(), - vectorToString(keyId).string(), - vectorToString(input).string(), - vectorToString(iv).string()); - - ssize_t index = findSession(sessionId); - if (index == kNotFound) { - ALOGD("Invalid sessionId"); - return BAD_VALUE; - } - - // Properties used in mock test, set by mock plugin and verifed cts test app - // byte[] keyId -> mock-keyid - // byte[] input -> mock-input - // byte[] iv -> mock-iv - mByteArrayProperties.add(String8("mock-keyid"), keyId); - mByteArrayProperties.add(String8("mock-input"), input); - mByteArrayProperties.add(String8("mock-iv"), iv); - - // Properties used in mock test, set by cts test app returned from mock plugin - // byte[] mock-output -> output - index = mByteArrayProperties.indexOfKey(String8("mock-output")); - if (index < 0) { - ALOGD("Missing 'mock-request' parameter for mock"); - return BAD_VALUE; - } else { - output = mByteArrayProperties.valueAt(index); - } - return OK; - } - - status_t MockDrmPlugin::decrypt(Vector const &sessionId, - Vector const &keyId, - Vector const &input, - Vector const &iv, - Vector &output) - { - Mutex::Autolock lock(mLock); - ALOGD("MockDrmPlugin::decrypt(sessionId=%s, keyId=%s, input=%s, iv=%s)", - vectorToString(sessionId).string(), - vectorToString(keyId).string(), - vectorToString(input).string(), - vectorToString(iv).string()); - - ssize_t index = findSession(sessionId); - if (index == kNotFound) { - ALOGD("Invalid sessionId"); - return BAD_VALUE; - } - - // Properties used in mock test, set by mock plugin and verifed cts test app - // byte[] keyId -> mock-keyid - // byte[] input -> mock-input - // byte[] iv -> mock-iv - mByteArrayProperties.add(String8("mock-keyid"), keyId); - mByteArrayProperties.add(String8("mock-input"), input); - mByteArrayProperties.add(String8("mock-iv"), iv); - - // Properties used in mock test, set by cts test app returned from mock plugin - // byte[] mock-output -> output - index = mByteArrayProperties.indexOfKey(String8("mock-output")); - if (index < 0) { - ALOGD("Missing 'mock-request' parameter for mock"); - return BAD_VALUE; - } else { - output = mByteArrayProperties.valueAt(index); - } - return OK; - } - - status_t MockDrmPlugin::sign(Vector const &sessionId, - Vector const &keyId, - Vector const &message, - Vector &signature) - { - Mutex::Autolock lock(mLock); - ALOGD("MockDrmPlugin::sign(sessionId=%s, keyId=%s, message=%s)", - vectorToString(sessionId).string(), - vectorToString(keyId).string(), - vectorToString(message).string()); - - ssize_t index = findSession(sessionId); - if (index == kNotFound) { - ALOGD("Invalid sessionId"); - return BAD_VALUE; - } - - // Properties used in mock test, set by mock plugin and verifed cts test app - // byte[] keyId -> mock-keyid - // byte[] message -> mock-message - mByteArrayProperties.add(String8("mock-keyid"), keyId); - mByteArrayProperties.add(String8("mock-message"), message); - - // Properties used in mock test, set by cts test app returned from mock plugin - // byte[] mock-signature -> signature - index = mByteArrayProperties.indexOfKey(String8("mock-signature")); - if (index < 0) { - ALOGD("Missing 'mock-request' parameter for mock"); - return BAD_VALUE; - } else { - signature = mByteArrayProperties.valueAt(index); - } - return OK; - } - - status_t MockDrmPlugin::verify(Vector const &sessionId, - Vector const &keyId, - Vector const &message, - Vector const &signature, - bool &match) - { - Mutex::Autolock lock(mLock); - ALOGD("MockDrmPlugin::verify(sessionId=%s, keyId=%s, message=%s, signature=%s)", - vectorToString(sessionId).string(), - vectorToString(keyId).string(), - vectorToString(message).string(), - vectorToString(signature).string()); - - ssize_t index = findSession(sessionId); - if (index == kNotFound) { - ALOGD("Invalid sessionId"); - return BAD_VALUE; - } - - // Properties used in mock test, set by mock plugin and verifed cts test app - // byte[] keyId -> mock-keyid - // byte[] message -> mock-message - // byte[] signature -> mock-signature - mByteArrayProperties.add(String8("mock-keyid"), keyId); - mByteArrayProperties.add(String8("mock-message"), message); - mByteArrayProperties.add(String8("mock-signature"), signature); - - // Properties used in mock test, set by cts test app returned from mock plugin - // String mock-match "1" or "0" -> match - index = mStringProperties.indexOfKey(String8("mock-match")); - if (index < 0) { - ALOGD("Missing 'mock-request' parameter for mock"); - return BAD_VALUE; - } else { - match = atol(mStringProperties.valueAt(index).string()); - } - return OK; - } - - ssize_t MockDrmPlugin::findSession(Vector const &sessionId) const - { - ALOGD("findSession: nsessions=%d, size=%d", mSessions.size(), sessionId.size()); - for (size_t i = 0; i < mSessions.size(); ++i) { - if (memcmp(mSessions[i].array(), sessionId.array(), sessionId.size()) == 0) { - return i; - } - } - return kNotFound; - } - - ssize_t MockDrmPlugin::findKeySet(Vector const &keySetId) const - { - ALOGD("findKeySet: nkeySets=%d, size=%d", mKeySets.size(), keySetId.size()); - for (size_t i = 0; i < mKeySets.size(); ++i) { - if (memcmp(mKeySets[i].array(), keySetId.array(), keySetId.size()) == 0) { - return i; - } - } - return kNotFound; - } - - - // Conversion utilities - String8 MockDrmPlugin::vectorToString(Vector const &vector) const - { - return arrayToString(vector.array(), vector.size()); - } - - String8 MockDrmPlugin::arrayToString(uint8_t const *array, size_t len) const - { - String8 result("{ "); - for (size_t i = 0; i < len; i++) { - result.appendFormat("0x%02x ", array[i]); - } - result += "}"; - return result; - } - - String8 MockDrmPlugin::stringMapToString(KeyedVector map) const - { - String8 result("{ "); - for (size_t i = 0; i < map.size(); i++) { - result.appendFormat("%s{name=%s, value=%s}", i > 0 ? ", " : "", - map.keyAt(i).string(), map.valueAt(i).string()); - } - return result + " }"; - } - - bool operator<(Vector const &lhs, Vector const &rhs) { - return lhs.size() < rhs.size() || (memcmp(lhs.array(), rhs.array(), lhs.size()) < 0); - } - - // - // Crypto Plugin - // - - bool MockCryptoPlugin::requiresSecureDecoderComponent(const char *mime) const - { - ALOGD("MockCryptoPlugin::requiresSecureDecoderComponent(mime=%s)", mime); - return false; - } - - ssize_t - MockCryptoPlugin::decrypt(bool secure, const uint8_t key[16], const uint8_t iv[16], - Mode mode, const void *srcPtr, const SubSample *subSamples, - size_t numSubSamples, void *dstPtr, AString *errorDetailMsg) - { - ALOGD("MockCryptoPlugin::decrypt(secure=%d, key=%s, iv=%s, mode=%d, src=%p, " - "subSamples=%s, dst=%p)", - (int)secure, - arrayToString(key, sizeof(key)).string(), - arrayToString(iv, sizeof(iv)).string(), - (int)mode, srcPtr, - subSamplesToString(subSamples, numSubSamples).string(), - dstPtr); - return OK; - } - - // Conversion utilities - String8 MockCryptoPlugin::arrayToString(uint8_t const *array, size_t len) const - { - String8 result("{ "); - for (size_t i = 0; i < len; i++) { - result.appendFormat("0x%02x ", array[i]); - } - result += "}"; - return result; - } - - String8 MockCryptoPlugin::subSamplesToString(SubSample const *subSamples, - size_t numSubSamples) const - { - String8 result; - for (size_t i = 0; i < numSubSamples; i++) { - result.appendFormat("[%d] {clear:%d, encrypted:%d} ", i, - subSamples[i].mNumBytesOfClearData, - subSamples[i].mNumBytesOfEncryptedData); - } - return result; - } - -}; diff --git a/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.h b/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.h deleted file mode 100644 index 2297f9b..0000000 --- a/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.h +++ /dev/null @@ -1,156 +0,0 @@ -/* - * Copyright (C) 2013 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#include - -#include "drm/DrmAPI.h" -#include "hardware/CryptoAPI.h" - -extern "C" { - android::DrmFactory *createDrmFactory(); - android::CryptoFactory *createCryptoFactory(); -} - -namespace android { - - class MockDrmFactory : public DrmFactory { - public: - MockDrmFactory() {} - virtual ~MockDrmFactory() {} - - bool isCryptoSchemeSupported(const uint8_t uuid[16]); - bool isContentTypeSupported(const String8 &mimeType); - status_t createDrmPlugin(const uint8_t uuid[16], DrmPlugin **plugin); - }; - - class MockCryptoFactory : public CryptoFactory { - public: - MockCryptoFactory() {} - virtual ~MockCryptoFactory() {} - - bool isCryptoSchemeSupported(const uint8_t uuid[16]) const; - status_t createPlugin( - const uint8_t uuid[16], const void *data, size_t size, - CryptoPlugin **plugin); - }; - - - - class MockDrmPlugin : public DrmPlugin { - public: - MockDrmPlugin() {} - virtual ~MockDrmPlugin() {} - - // from DrmPlugin - status_t openSession(Vector &sessionId); - status_t closeSession(Vector const &sessionId); - - status_t getKeyRequest(Vector const &sessionId, - Vector const &initData, - String8 const &mimeType, KeyType keyType, - KeyedVector const &optionalParameters, - Vector &request, String8 &defaultUrl); - - status_t provideKeyResponse(Vector const &sessionId, - Vector const &response, - Vector &keySetId); - - status_t removeKeys(Vector const &keySetId); - - status_t restoreKeys(Vector const &sessionId, - Vector const &keySetId); - - status_t queryKeyStatus(Vector const &sessionId, - KeyedVector &infoMap) const; - - status_t getProvisionRequest(Vector &request, - String8 &defaultUrl); - - status_t provideProvisionResponse(Vector const &response); - - status_t getSecureStops(List > &secureStops); - status_t releaseSecureStops(Vector const &ssRelease); - - status_t getPropertyString(String8 const &name, String8 &value ) const; - status_t getPropertyByteArray(String8 const &name, - Vector &value ) const; - - status_t setPropertyString(String8 const &name, - String8 const &value ); - status_t setPropertyByteArray(String8 const &name, - Vector const &value ); - - status_t setCipherAlgorithm(Vector const &sessionId, - String8 const &algorithm); - - status_t setMacAlgorithm(Vector const &sessionId, - String8 const &algorithm); - - status_t encrypt(Vector const &sessionId, - Vector const &keyId, - Vector const &input, - Vector const &iv, - Vector &output); - - status_t decrypt(Vector const &sessionId, - Vector const &keyId, - Vector const &input, - Vector const &iv, - Vector &output); - - status_t sign(Vector const &sessionId, - Vector const &keyId, - Vector const &message, - Vector &signature); - - status_t verify(Vector const &sessionId, - Vector const &keyId, - Vector const &message, - Vector const &signature, - bool &match); - - private: - String8 vectorToString(Vector const &vector) const; - String8 arrayToString(uint8_t const *array, size_t len) const; - String8 stringMapToString(KeyedVector map) const; - - SortedVector > mSessions; - SortedVector > mKeySets; - - static const ssize_t kNotFound = -1; - ssize_t findSession(Vector const &sessionId) const; - ssize_t findKeySet(Vector const &keySetId) const; - - Mutex mLock; - KeyedVector mStringProperties; - KeyedVector > mByteArrayProperties; - }; - - - class MockCryptoPlugin : public CryptoPlugin { - - bool requiresSecureDecoderComponent(const char *mime) const; - - ssize_t decrypt(bool secure, - const uint8_t key[16], const uint8_t iv[16], - Mode mode, const void *srcPtr, - const SubSample *subSamples, size_t numSubSamples, - void *dstPtr, AString *errorDetailMsg); - private: - String8 subSamplesToString(CryptoPlugin::SubSample const *subSamples, size_t numSubSamples) const; - String8 arrayToString(uint8_t const *array, size_t len) const; - }; -}; diff --git a/media/libmediaplayerservice/Drm.cpp b/media/libmediaplayerservice/Drm.cpp index 4b527d0..eebcb79 100644 --- a/media/libmediaplayerservice/Drm.cpp +++ b/media/libmediaplayerservice/Drm.cpp @@ -222,7 +222,11 @@ bool Drm::isCryptoSchemeSupported(const uint8_t uuid[16], const String8 &mimeTyp } } - return mFactory->isContentTypeSupported(mimeType); + if (mimeType != "") { + return mFactory->isContentTypeSupported(mimeType); + } + + return true; } status_t Drm::createPlugin(const uint8_t uuid[16]) { -- cgit v1.1 From 22f03209ceed3bcdf8c6558fcf02dc7699dde259 Mon Sep 17 00:00:00 2001 From: Chong Zhang Date: Wed, 28 Aug 2013 16:48:35 -0700 Subject: Set PREPARING flag after setDataSource_l() Otherwise the reset_l() inside setDataSource_l() will get stuck waiting for the PREPARING to be cleared. Bug: 10426788 Change-Id: I7ccdf7abcea71cf150544c7cd3f4781e3a946b97 --- media/libstagefright/AwesomePlayer.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/media/libstagefright/AwesomePlayer.cpp b/media/libstagefright/AwesomePlayer.cpp index 52e178e..5fbee7e 100644 --- a/media/libstagefright/AwesomePlayer.cpp +++ b/media/libstagefright/AwesomePlayer.cpp @@ -2805,7 +2805,6 @@ void AwesomePlayer::onAudioTearDownEvent() { // Reset and recreate reset_l(); - mFlags |= PREPARING; status_t err; @@ -2816,6 +2815,7 @@ void AwesomePlayer::onAudioTearDownEvent() { err = setDataSource_l(uri, &uriHeaders); } + mFlags |= PREPARING; if ( err != OK ) { // This will force beingPrepareAsync_l() to notify // a MEDIA_ERROR to the client and abort the prepare -- cgit v1.1 From ce70374bf105f8a6160bf5dd70affea012b2a464 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Fri, 19 Jul 2013 16:33:58 -0700 Subject: New AudioTrack C++ API for audio timestamps This new API is intended to replace latency(), especially for A/V sync. The application will receive periodic timestamp notifications. The period is unspecified, but will likely be more frequent after a pause or stop, set position, underrun, display on/off change, route change, or when audio framework notices drift. It will be up to the higher level application (e.g. Stagefright) to reconstruct a clock that updates more frequently. The current latency() method doesn't indicate when latency changes due to screen on/off state, route changes, etc. Includes squahsed change-Id: I2082f8752040be0c234b1a6f1be2e269abf2ce7c Dummy implementation of AudioTrack:getTimestamp() Rename AudioTrack::Timestamp to AudioTimestamp. Renaming and pulling up to a higher level allows more modules to use it. Change-Id: Ibf7f6a207c3f8d8697f25ede2cd5200697fadb86 (cherry picked from commit dd69eb893867634fd169c03204a6ad7c74b351e7) --- include/media/AudioTimestamp.h | 33 +++++++++++++++++++++++++++++++++ include/media/AudioTrack.h | 16 ++++++++++++++++ media/libmedia/AudioTrack.cpp | 5 +++++ 3 files changed, 54 insertions(+) create mode 100644 include/media/AudioTimestamp.h diff --git a/include/media/AudioTimestamp.h b/include/media/AudioTimestamp.h new file mode 100644 index 0000000..c29c7e5 --- /dev/null +++ b/include/media/AudioTimestamp.h @@ -0,0 +1,33 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_AUDIO_TIMESTAMP_H +#define ANDROID_AUDIO_TIMESTAMP_H + +#include + +class AudioTimestamp { +public: + AudioTimestamp() : mPosition(0) { + mTime.tv_sec = 0; + mTime.tv_nsec = 0; + } + // FIXME change type to match android.media.AudioTrack + uint32_t mPosition; // a frame position in AudioTrack::getPosition() units + struct timespec mTime; // corresponding CLOCK_MONOTONIC when frame is expected to present +}; + +#endif // ANDROID_AUDIO_TIMESTAMP_H diff --git a/include/media/AudioTrack.h b/include/media/AudioTrack.h index ae92cdd..ebb7ef3 100644 --- a/include/media/AudioTrack.h +++ b/include/media/AudioTrack.h @@ -19,6 +19,7 @@ #include #include +#include #include #include @@ -62,6 +63,9 @@ public: // voluntary invalidation by mediaserver, or mediaserver crash. EVENT_STREAM_END = 7, // Sent after all the buffers queued in AF and HW are played // back (after stop is called) + EVENT_NEW_TIMESTAMP = 8, // Delivered periodically and when there's a significant change + // in the mapping from frame position to presentation time. + // See AudioTimestamp for the information included with event. }; /* Client should declare Buffer on the stack and pass address to obtainBuffer() @@ -107,6 +111,8 @@ public: * - EVENT_NEW_POS: pointer to const uint32_t containing the new position in frames. * - EVENT_BUFFER_END: unused. * - EVENT_NEW_IAUDIOTRACK: unused. + * - EVENT_STREAM_END: unused. + * - EVENT_NEW_TIMESTAMP: pointer to const AudioTimestamp. */ typedef void (*callback_t)(int event, void* user, void *info); @@ -564,6 +570,16 @@ public: /* Get parameters */ String8 getParameters(const String8& keys); + /* Poll for a timestamp on demand. + * Use if EVENT_NEW_TIMESTAMP is not delivered often enough for your needs, + * or if you need to get the most recent timestamp outside of the event callback handler. + * Caution: calling this method too often may be inefficient; + * if you need a high resolution mapping between frame position and presentation time, + * consider implementing that at application level, based on the low resolution timestamps. + * Returns NO_ERROR if timestamp is valid. + */ + status_t getTimestamp(AudioTimestamp& timestamp); + protected: /* copying audio tracks is not allowed */ AudioTrack(const AudioTrack& other); diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp index dd0ec73..d90e733 100644 --- a/media/libmedia/AudioTrack.cpp +++ b/media/libmedia/AudioTrack.cpp @@ -1712,6 +1712,11 @@ status_t AudioTrack::setParameters(const String8& keyValuePairs) } } +status_t AudioTrack::getTimestamp(AudioTimestamp& timestamp) +{ + return INVALID_OPERATION; +} + String8 AudioTrack::getParameters(const String8& keys) { if (mOutput) { -- cgit v1.1 From 53cec22821072719ee02c856e9ac2dda2496c570 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Thu, 29 Aug 2013 09:01:02 -0700 Subject: Add IAudioTrack::getTimestamp() with dummy implementation in AudioFlinger::TrackHandle, and implement AudioTrack::getTimestamp() using IAudioTrack. Also document invariant that mAudioTrack and control block are always non-0 after successful initialization. Change-Id: I9861d1454cff7decf795d5d5898ac7999a9f3b7e --- include/media/AudioTrack.h | 5 +++-- include/media/IAudioTrack.h | 4 ++++ media/libmedia/AudioTrack.cpp | 17 +++++++---------- media/libmedia/IAudioTrack.cpp | 30 +++++++++++++++++++++++++++++- services/audioflinger/AudioFlinger.h | 1 + services/audioflinger/Tracks.cpp | 5 +++++ 6 files changed, 49 insertions(+), 13 deletions(-) diff --git a/include/media/AudioTrack.h b/include/media/AudioTrack.h index ebb7ef3..453c106 100644 --- a/include/media/AudioTrack.h +++ b/include/media/AudioTrack.h @@ -227,6 +227,7 @@ public: * - INVALID_OPERATION: AudioTrack is already initialized * - BAD_VALUE: invalid parameter (channelMask, format, sampleRate...) * - NO_INIT: audio server or audio hardware not initialized + * If status is not equal to NO_ERROR, don't call any other APIs on this AudioTrack. * If sharedBuffer is non-0, the frameCount parameter is ignored and * replaced by the shared buffer's total allocated size in frame units. * @@ -249,7 +250,7 @@ public: transfer_type transferType = TRANSFER_DEFAULT, const audio_offload_info_t *offloadInfo = NULL); - /* Result of constructing the AudioTrack. This must be checked + /* Result of constructing the AudioTrack. This must be checked for successful initialization * before using any AudioTrack API (except for set()), because using * an uninitialized AudioTrack produces undefined results. * See set() method above for possible return codes. @@ -646,7 +647,7 @@ protected: bool isOffloaded() const { return (mFlags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) != 0; } - // may be changed if IAudioTrack is re-created + // Next 3 fields may be changed if IAudioTrack is re-created, but always != 0 sp mAudioTrack; sp mCblkMemory; audio_track_cblk_t* mCblk; // re-load after mLock.unlock() diff --git a/include/media/IAudioTrack.h b/include/media/IAudioTrack.h index 1014403..afac4ae 100644 --- a/include/media/IAudioTrack.h +++ b/include/media/IAudioTrack.h @@ -26,6 +26,7 @@ #include #include #include +#include namespace android { @@ -86,6 +87,9 @@ public: /* Send parameters to the audio hardware */ virtual status_t setParameters(const String8& keyValuePairs) = 0; + + /* Return NO_ERROR if timestamp is valid */ + virtual status_t getTimestamp(AudioTimestamp& timestamp) = 0; }; // ---------------------------------------------------------------------------- diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp index d90e733..78ae37e 100644 --- a/media/libmedia/AudioTrack.cpp +++ b/media/libmedia/AudioTrack.cpp @@ -148,10 +148,8 @@ AudioTrack::~AudioTrack() mAudioTrackThread->requestExitAndWait(); mAudioTrackThread.clear(); } - if (mAudioTrack != 0) { - mAudioTrack->asBinder()->unlinkToDeath(mDeathNotifier, this); - mAudioTrack.clear(); - } + mAudioTrack->asBinder()->unlinkToDeath(mDeathNotifier, this); + mAudioTrack.clear(); IPCThreadState::self()->flushCommands(); AudioSystem::releaseAudioSessionId(mSessionId); } @@ -222,6 +220,7 @@ status_t AudioTrack::set( AutoMutex lock(mLock); + // invariant that mAudioTrack != 0 is true only after set() returns successfully if (mAudioTrack != 0) { ALOGE("Track already in use"); return INVALID_OPERATION; @@ -965,6 +964,7 @@ status_t AudioTrack::createTrack_l( ALOGE("Could not get control block"); return NO_INIT; } + // invariant that mAudioTrack != 0 is true only after set() returns successfully if (mAudioTrack != 0) { mAudioTrack->asBinder()->unlinkToDeath(mDeathNotifier, this); mDeathNotifier.clear(); @@ -1705,16 +1705,13 @@ status_t AudioTrack::restoreTrack_l(const char *from) status_t AudioTrack::setParameters(const String8& keyValuePairs) { AutoMutex lock(mLock); - if (mAudioTrack != 0) { - return mAudioTrack->setParameters(keyValuePairs); - } else { - return NO_INIT; - } + return mAudioTrack->setParameters(keyValuePairs); } status_t AudioTrack::getTimestamp(AudioTimestamp& timestamp) { - return INVALID_OPERATION; + AutoMutex lock(mLock); + return mAudioTrack->getTimestamp(timestamp); } String8 AudioTrack::getParameters(const String8& keys) diff --git a/media/libmedia/IAudioTrack.cpp b/media/libmedia/IAudioTrack.cpp index a2b49a3..f0d75ba 100644 --- a/media/libmedia/IAudioTrack.cpp +++ b/media/libmedia/IAudioTrack.cpp @@ -39,7 +39,8 @@ enum { ALLOCATE_TIMED_BUFFER, QUEUE_TIMED_BUFFER, SET_MEDIA_TIME_TRANSFORM, - SET_PARAMETERS + SET_PARAMETERS, + GET_TIMESTAMP, }; class BpAudioTrack : public BpInterface @@ -166,6 +167,21 @@ public: } return status; } + + virtual status_t getTimestamp(AudioTimestamp& timestamp) { + Parcel data, reply; + data.writeInterfaceToken(IAudioTrack::getInterfaceDescriptor()); + status_t status = remote()->transact(GET_TIMESTAMP, data, &reply); + if (status == NO_ERROR) { + status = reply.readInt32(); + if (status == NO_ERROR) { + timestamp.mPosition = reply.readInt32(); + timestamp.mTime.tv_sec = reply.readInt32(); + timestamp.mTime.tv_nsec = reply.readInt32(); + } + } + return status; + } }; IMPLEMENT_META_INTERFACE(AudioTrack, "android.media.IAudioTrack"); @@ -241,6 +257,18 @@ status_t BnAudioTrack::onTransact( reply->writeInt32(setParameters(keyValuePairs)); return NO_ERROR; } break; + case GET_TIMESTAMP: { + CHECK_INTERFACE(IAudioTrack, data, reply); + AudioTimestamp timestamp; + status_t status = getTimestamp(timestamp); + reply->writeInt32(status); + if (status == NO_ERROR) { + reply->writeInt32(timestamp.mPosition); + reply->writeInt32(timestamp.mTime.tv_sec); + reply->writeInt32(timestamp.mTime.tv_nsec); + } + return NO_ERROR; + } break; default: return BBinder::onTransact(code, data, reply, flags); } diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h index e5e4113..5df04f4 100644 --- a/services/audioflinger/AudioFlinger.h +++ b/services/audioflinger/AudioFlinger.h @@ -411,6 +411,7 @@ private: virtual status_t setMediaTimeTransform(const LinearTransform& xform, int target); virtual status_t setParameters(const String8& keyValuePairs); + virtual status_t getTimestamp(AudioTimestamp& timestamp); virtual status_t onTransact( uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags); diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp index e676365..d1ab3c8 100644 --- a/services/audioflinger/Tracks.cpp +++ b/services/audioflinger/Tracks.cpp @@ -283,6 +283,11 @@ status_t AudioFlinger::TrackHandle::setParameters(const String8& keyValuePairs) return mTrack->setParameters(keyValuePairs); } +status_t AudioFlinger::TrackHandle::getTimestamp(AudioTimestamp& timestamp) +{ + return INVALID_OPERATION; +} + status_t AudioFlinger::TrackHandle::onTransact( uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) { -- cgit v1.1 From 573d80a8f463f648a515fc0975bf83951b272993 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Mon, 26 Aug 2013 09:36:23 -0700 Subject: Add AudioFlinger::PlaybackThread::Track::getTimestamp() with a dummy implementation initially, and use it in AudioFlinger::TrackHandle::getTimestamp() Change-Id: I2da88fc52a135a7f0d9fd9538986e919dc8ccd3b --- services/audioflinger/PlaybackTracks.h | 1 + services/audioflinger/Tracks.cpp | 13 ++++++++++++- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/services/audioflinger/PlaybackTracks.h b/services/audioflinger/PlaybackTracks.h index 5600411c..d34833f 100644 --- a/services/audioflinger/PlaybackTracks.h +++ b/services/audioflinger/PlaybackTracks.h @@ -59,6 +59,7 @@ public: void setMainBuffer(int16_t *buffer) { mMainBuffer = buffer; } int16_t *mainBuffer() const { return mMainBuffer; } int auxEffectId() const { return mAuxEffectId; } + virtual status_t getTimestamp(AudioTimestamp& timestamp); // implement FastMixerState::VolumeProvider interface virtual uint32_t getVolumeLR(); diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp index d1ab3c8..687217b 100644 --- a/services/audioflinger/Tracks.cpp +++ b/services/audioflinger/Tracks.cpp @@ -285,7 +285,7 @@ status_t AudioFlinger::TrackHandle::setParameters(const String8& keyValuePairs) status_t AudioFlinger::TrackHandle::getTimestamp(AudioTimestamp& timestamp) { - return INVALID_OPERATION; + return mTrack->getTimestamp(timestamp); } status_t AudioFlinger::TrackHandle::onTransact( @@ -716,6 +716,17 @@ status_t AudioFlinger::PlaybackThread::Track::setParameters(const String8& keyVa } } +status_t AudioFlinger::PlaybackThread::Track::getTimestamp(AudioTimestamp& timestamp) +{ + sp thread = mThread.promote(); + if (thread == 0) { + return false; + } + Mutex::Autolock _l(thread->mLock); + PlaybackThread *playbackThread = (PlaybackThread *)thread.get(); + return INVALID_OPERATION; +} + status_t AudioFlinger::PlaybackThread::Track::attachAuxEffect(int EffectId) { status_t status = DEAD_OBJECT; -- cgit v1.1 From bd096fd9d8e5fc0e62f98807f4818a06f70d0812 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Fri, 23 Aug 2013 13:53:56 -0700 Subject: Implement Track::getTimestamp() using a new timestamp latch in PlaybackThread, and AudioTrackServerProxy::framesReleased() which returns mServer. Change-Id: I1ebfba968c773faaab95648c272fd3ebd74718d6 --- include/private/media/AudioTrackShared.h | 3 +++ services/audioflinger/Threads.cpp | 18 +++++++++++++++++- services/audioflinger/Threads.h | 11 +++++++++++ services/audioflinger/Tracks.cpp | 14 +++++++++++++- 4 files changed, 44 insertions(+), 2 deletions(-) diff --git a/include/private/media/AudioTrackShared.h b/include/private/media/AudioTrackShared.h index 1379379..ad7409d 100644 --- a/include/private/media/AudioTrackShared.h +++ b/include/private/media/AudioTrackShared.h @@ -422,6 +422,9 @@ public: // Return the total number of frames which AudioFlinger desired but were unavailable, // and thus which resulted in an underrun. virtual uint32_t getUnderrunFrames() const { return mCblk->u.mStreaming.mUnderrunFrames; } + + // Return the total number of frames that AudioFlinger has obtained and released + virtual size_t framesReleased() const { return mCblk->mServer; } }; class StaticAudioTrackServerProxy : public AudioTrackServerProxy { diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index 2c2931f..bc01ede 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -943,7 +943,9 @@ AudioFlinger::PlaybackThread::PlaybackThread(const sp& audioFlinge mDraining(false), mScreenState(AudioFlinger::mScreenState), // index 0 is reserved for normal mixer's submix - mFastTrackAvailMask(((1 << FastMixerState::kMaxFastTracks) - 1) & ~1) + mFastTrackAvailMask(((1 << FastMixerState::kMaxFastTracks) - 1) & ~1), + // mLatchD, mLatchQ, + mLatchDValid(false), mLatchQValid(false) { snprintf(mName, kNameLength, "AudioOut_%X", id); mNBLogWriter = audioFlinger->newWriter_l(kLogSize, mName); @@ -1818,6 +1820,14 @@ ssize_t AudioFlinger::PlaybackThread::threadLoop_write() } else { bytesWritten = framesWritten; } + status_t status = INVALID_OPERATION; // mLatchD.mTimestamp is invalid + if (status == NO_ERROR) { + size_t totalFramesWritten = mNormalSink->framesWritten(); + if (totalFramesWritten >= mLatchD.mTimestamp.mPosition) { + mLatchD.mUnpresentedFrames = totalFramesWritten - mLatchD.mTimestamp.mPosition; + mLatchDValid = true; + } + } // otherwise use the HAL / AudioStreamOut directly } else { // Direct output and offload threads @@ -2096,6 +2106,12 @@ bool AudioFlinger::PlaybackThread::threadLoop() logString = NULL; } + if (mLatchDValid) { + mLatchQ = mLatchD; + mLatchDValid = false; + mLatchQValid = true; + } + if (checkForNewParameters_l()) { cacheParameters_l(); } diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h index 31d5323..1333de2 100644 --- a/services/audioflinger/Threads.h +++ b/services/audioflinger/Threads.h @@ -606,6 +606,17 @@ protected: // accessed by both binder threads and within threadLoop(), lock on mutex needed unsigned mFastTrackAvailMask; // bit i set if fast track [i] is available virtual void flushOutput_l(); + +private: + // timestamp latch: + // D input is written by threadLoop_write while mutex is unlocked, and read while locked + // Q output is written while locked, and read while locked + struct { + AudioTimestamp mTimestamp; + uint32_t mUnpresentedFrames; + } mLatchD, mLatchQ; + bool mLatchDValid; // true means mLatchD is valid, and clock it into latch at next opportunity + bool mLatchQValid; // true means mLatchQ is valid }; class MixerThread : public PlaybackThread { diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp index 687217b..9622709 100644 --- a/services/audioflinger/Tracks.cpp +++ b/services/audioflinger/Tracks.cpp @@ -724,7 +724,19 @@ status_t AudioFlinger::PlaybackThread::Track::getTimestamp(AudioTimestamp& times } Mutex::Autolock _l(thread->mLock); PlaybackThread *playbackThread = (PlaybackThread *)thread.get(); - return INVALID_OPERATION; + if (!playbackThread->mLatchQValid) { + return INVALID_OPERATION; + } + uint32_t unpresentedFrames = + ((int64_t) playbackThread->mLatchQ.mUnpresentedFrames * mSampleRate) / + playbackThread->mSampleRate; + uint32_t framesWritten = mAudioTrackServerProxy->framesReleased(); + if (framesWritten < unpresentedFrames) { + return INVALID_OPERATION; + } + timestamp.mPosition = framesWritten - unpresentedFrames; + timestamp.mTime = playbackThread->mLatchQ.mTimestamp.mTime; + return NO_ERROR; } status_t AudioFlinger::PlaybackThread::Track::attachAuxEffect(int EffectId) -- cgit v1.1 From 26fe6c7c56477ef227205c68f17df07ca3501d65 Mon Sep 17 00:00:00 2001 From: Eino-Ville Talvala Date: Thu, 29 Aug 2013 12:46:18 -0700 Subject: Camera3: Be busy as soon as work arrives. RequestThread's internal busy flag (mPaused) was not being immediately set when new work was submitted to it. This allowed for a race condition where a capture() followed by an immediate waitUntilDrained() would immediately return from the wait. Set the mPaused flag to false immediately in capture() and setStreamingRequest() to avoid this, instead of waiting until the end of the next RequestThread iteration. Bug: 10531739 Change-Id: I54a79fe5361d527ec717f41ad805e9b319a48cd8 --- .../camera/libcameraservice/device3/Camera3Device.cpp | 19 ++++++++++++++++++- .../camera/libcameraservice/device3/Camera3Device.h | 1 + 2 files changed, 19 insertions(+), 1 deletion(-) diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp index 7f2ec7a..47321e0 100644 --- a/services/camera/libcameraservice/device3/Camera3Device.cpp +++ b/services/camera/libcameraservice/device3/Camera3Device.cpp @@ -1424,6 +1424,8 @@ status_t Camera3Device::RequestThread::queueRequest( Mutex::Autolock l(mRequestLock); mRequestQueue.push_back(request); + unpauseForNewRequests(); + return OK; } @@ -1489,6 +1491,9 @@ status_t Camera3Device::RequestThread::setRepeatingRequests( mRepeatingRequests.clear(); mRepeatingRequests.insert(mRepeatingRequests.begin(), requests.begin(), requests.end()); + + unpauseForNewRequests(); + return OK; } @@ -1791,7 +1796,9 @@ sp mRequestQueue.erase(firstRequest); } - // Not paused + // In case we've been unpaused by setPaused clearing mDoPause, need to + // update internal pause state (capture/setRepeatingRequest unpause + // directly). Mutex::Autolock pl(mPauseLock); mPaused = false; @@ -1824,6 +1831,16 @@ bool Camera3Device::RequestThread::waitIfPaused() { return false; } +void Camera3Device::RequestThread::unpauseForNewRequests() { + // With work to do, mark thread as unpaused. + // If paused by request (setPaused), don't resume, to avoid + // extra signaling/waiting overhead to waitUntilPaused + Mutex::Autolock p(mPauseLock); + if (!mDoPause) { + mPaused = false; + } +} + void Camera3Device::RequestThread::setErrorState(const char *fmt, ...) { sp parent = mParent.promote(); if (parent != NULL) { diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h index 99e1cc8..6565048 100644 --- a/services/camera/libcameraservice/device3/Camera3Device.h +++ b/services/camera/libcameraservice/device3/Camera3Device.h @@ -314,6 +314,7 @@ class Camera3Device : // Pause handling bool waitIfPaused(); + void unpauseForNewRequests(); // Relay error to parent device object setErrorState void setErrorState(const char *fmt, ...); -- cgit v1.1 From 50d9a8f7de5f79fa8a36489a53846d6653997e38 Mon Sep 17 00:00:00 2001 From: Haynes Mathew George Date: Mon, 5 Aug 2013 11:00:37 -0700 Subject: AudioPlayer: timestamp fixes for compress offload Use realtime queried from AudioTrack as the only time for compress offload playback. Change-Id: I314447637715c4864690c94173260165369146cb --- media/libstagefright/AudioPlayer.cpp | 23 ++++++++++++++++------- 1 file changed, 16 insertions(+), 7 deletions(-) diff --git a/media/libstagefright/AudioPlayer.cpp b/media/libstagefright/AudioPlayer.cpp index 2418aab..e38e261 100644 --- a/media/libstagefright/AudioPlayer.cpp +++ b/media/libstagefright/AudioPlayer.cpp @@ -680,6 +680,14 @@ size_t AudioPlayer::fillBuffer(void *data, size_t size) { int64_t AudioPlayer::getRealTimeUs() { Mutex::Autolock autoLock(mLock); + if (useOffload()) { + if (mSeeking) { + return mSeekTimeUs; + } + mPositionTimeRealUs = getOutputPlayPositionUs_l(); + return mPositionTimeRealUs; + } + return getRealTimeUsLocked(); } @@ -741,11 +749,6 @@ int64_t AudioPlayer::getMediaTimeUs() { return 0; } - if (useOffload()) { - mPositionTimeRealUs = getOutputPlayPositionUs_l(); - return mPositionTimeRealUs; - } - int64_t realTimeOffset = getRealTimeUsLocked() - mPositionTimeRealUs; if (realTimeOffset < 0) { realTimeOffset = 0; @@ -758,8 +761,14 @@ bool AudioPlayer::getMediaTimeMapping( int64_t *realtime_us, int64_t *mediatime_us) { Mutex::Autolock autoLock(mLock); - *realtime_us = mPositionTimeRealUs; - *mediatime_us = mPositionTimeMediaUs; + if (useOffload()) { + mPositionTimeRealUs = getOutputPlayPositionUs_l(); + *realtime_us = mPositionTimeRealUs; + *mediatime_us = mPositionTimeRealUs; + } else { + *realtime_us = mPositionTimeRealUs; + *mediatime_us = mPositionTimeMediaUs; + } return mPositionTimeRealUs != -1 && mPositionTimeMediaUs != -1; } -- cgit v1.1 From 42a6f422c09ca6a960673e0e805ddf71a9b51bef Mon Sep 17 00:00:00 2001 From: Eric Laurent Date: Thu, 29 Aug 2013 14:35:05 -0700 Subject: AudioTrack: fix write retries for compressed audio When the amount of frames that can be written to the buffer is less than requested, AudioTrack::processAudioBuffer() estimates the time needed to free the missing amount of frames and asks the callback thread to sleep. This behavior is not possible for compressed audio and should not be enabled for offloaded tracks. Change-Id: I5b657283cfba06254c9ac0ea9b447467cce7eb61 --- media/libmedia/AudioTrack.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp index 214e789..7aa9e35 100644 --- a/media/libmedia/AudioTrack.cpp +++ b/media/libmedia/AudioTrack.cpp @@ -1550,7 +1550,7 @@ nsecs_t AudioTrack::processAudioBuffer(const sp& thread) return NS_NEVER; } - if (mRetryOnPartialBuffer) { + if (mRetryOnPartialBuffer && !isOffloaded()) { mRetryOnPartialBuffer = false; if (avail < mRemainingFrames) { int64_t myns = ((mRemainingFrames - avail) * 1100000000LL) / sampleRate; -- cgit v1.1 From 1b02586f0f41d82c80619cfc0d16cd3feb5eaec7 Mon Sep 17 00:00:00 2001 From: Jean-Michel Trivi Date: Thu, 29 Aug 2013 15:51:31 -0700 Subject: AAC encoder: handle missing object types The audio object type for parametric stereo and lowdelay was not passed to the FDK AAC encoder. Bug 9428126 Change-Id: Ic32822afff8b1da6a2d80c1b65d514f24059fb29 --- media/libstagefright/codecs/aacenc/SoftAACEncoder2.cpp | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/media/libstagefright/codecs/aacenc/SoftAACEncoder2.cpp b/media/libstagefright/codecs/aacenc/SoftAACEncoder2.cpp index 5749733..ff2b503 100644 --- a/media/libstagefright/codecs/aacenc/SoftAACEncoder2.cpp +++ b/media/libstagefright/codecs/aacenc/SoftAACEncoder2.cpp @@ -292,6 +292,10 @@ static AUDIO_OBJECT_TYPE getAOTFromProfile(OMX_U32 profile) { return AOT_AAC_LC; } else if (profile == OMX_AUDIO_AACObjectHE) { return AOT_SBR; + } else if (profile == OMX_AUDIO_AACObjectHE_PS) { + return AOT_PS; + } else if (profile == OMX_AUDIO_AACObjectLD) { + return AOT_ER_AAC_LD; } else if (profile == OMX_AUDIO_AACObjectELD) { return AOT_ER_AAC_ELD; } else { -- cgit v1.1 From 4fb55c15da1a563ab925914a0f493a3dc80495a3 Mon Sep 17 00:00:00 2001 From: Igor Murashkin Date: Thu, 29 Aug 2013 17:43:01 -0700 Subject: camera: Fix deadlock when sending callbacks during disconnect Bug: 10552448 Change-Id: Ie93ba744ae6705aa657f980ae8e08ec624feeb39 --- services/camera/libcameraservice/api2/CameraDeviceClient.cpp | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp index 289ba06..f147c06 100644 --- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp +++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp @@ -500,14 +500,12 @@ void CameraDeviceClient::onFrameAvailable(int32_t frameId, ATRACE_CALL(); ALOGV("%s", __FUNCTION__); - Mutex::Autolock icl(mBinderSerializationLock); - SharedCameraCallbacks::Lock l(mSharedCameraCallbacks); - - if (mRemoteCallback != NULL) { + // Thread-safe. No lock necessary. + sp remoteCb = mRemoteCallback; + if (remoteCb != NULL) { ALOGV("%s: frame = %p ", __FUNCTION__, &frame); - mRemoteCallback->onResultReceived(frameId, frame); + remoteCb->onResultReceived(frameId, frame); } - } // TODO: move to Camera2ClientBase -- cgit v1.1 From 9da36a6c8df70a5c8179ac78fab33cfbb5078cb2 Mon Sep 17 00:00:00 2001 From: Marco Nelissen Date: Thu, 29 Aug 2013 09:31:26 -0700 Subject: Fix decoder EOS handling Conceptually it should be the same whether EOS is signalled on the last buffer holding data, or an empty buffer that follows. Make it so that this actually behaves the same for mp3, AAC and Vorbis. b/8747869 Change-Id: Idece8ef45689a3ffaf70fb45d19862d7b93b2f92 --- media/libstagefright/codecs/aacdec/SoftAAC2.cpp | 217 ++++++++++----------- media/libstagefright/codecs/aacdec/SoftAAC2.h | 2 + media/libstagefright/codecs/mp3dec/SoftMP3.cpp | 114 ++++++----- media/libstagefright/codecs/mp3dec/SoftMP3.h | 2 + .../codecs/vorbis/dec/SoftVorbis.cpp | 73 +++---- .../libstagefright/codecs/vorbis/dec/SoftVorbis.h | 2 + 6 files changed, 208 insertions(+), 202 deletions(-) diff --git a/media/libstagefright/codecs/aacdec/SoftAAC2.cpp b/media/libstagefright/codecs/aacdec/SoftAAC2.cpp index 1b20cbb..c9b5d26 100644 --- a/media/libstagefright/codecs/aacdec/SoftAAC2.cpp +++ b/media/libstagefright/codecs/aacdec/SoftAAC2.cpp @@ -58,6 +58,8 @@ SoftAAC2::SoftAAC2( mIsADTS(false), mInputBufferCount(0), mSignalledError(false), + mSawInputEos(false), + mSignalledOutputEos(false), mAnchorTimeUs(0), mNumSamplesOutput(0), mOutputPortSettingsChange(NONE) { @@ -350,115 +352,83 @@ void SoftAAC2::onQueueFilled(OMX_U32 portIndex) { return; } - while (!inQueue.empty() && !outQueue.empty()) { - BufferInfo *inInfo = *inQueue.begin(); - OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader; + while ((!inQueue.empty() || (mSawInputEos && !mSignalledOutputEos)) && !outQueue.empty()) { + BufferInfo *inInfo = NULL; + OMX_BUFFERHEADERTYPE *inHeader = NULL; + if (!inQueue.empty()) { + inInfo = *inQueue.begin(); + inHeader = inInfo->mHeader; + } BufferInfo *outInfo = *outQueue.begin(); OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader; + outHeader->nFlags = 0; - if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) { - inQueue.erase(inQueue.begin()); - inInfo->mOwnedByUs = false; - notifyEmptyBufferDone(inHeader); - - if (mDecoderHasData) { - // flush out the decoder's delayed data by calling DecodeFrame - // one more time, with the AACDEC_FLUSH flag set - INT_PCM *outBuffer = - reinterpret_cast( - outHeader->pBuffer + outHeader->nOffset); - - AAC_DECODER_ERROR decoderErr = - aacDecoder_DecodeFrame(mAACDecoder, - outBuffer, - outHeader->nAllocLen, - AACDEC_FLUSH); - mDecoderHasData = false; - - if (decoderErr != AAC_DEC_OK) { - mSignalledError = true; - - notify(OMX_EventError, OMX_ErrorUndefined, decoderErr, - NULL); - - return; - } - - outHeader->nFilledLen = - mStreamInfo->frameSize - * sizeof(int16_t) - * mStreamInfo->numChannels; - } else { - // we never submitted any data to the decoder, so there's nothing to flush out - outHeader->nFilledLen = 0; + if (inHeader) { + if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) { + mSawInputEos = true; } - outHeader->nFlags = OMX_BUFFERFLAG_EOS; - - outQueue.erase(outQueue.begin()); - outInfo->mOwnedByUs = false; - notifyFillBufferDone(outHeader); - return; - } - - if (inHeader->nOffset == 0) { - mAnchorTimeUs = inHeader->nTimeStamp; - mNumSamplesOutput = 0; - } + if (inHeader->nOffset == 0 && inHeader->nFilledLen) { + mAnchorTimeUs = inHeader->nTimeStamp; + mNumSamplesOutput = 0; + } - size_t adtsHeaderSize = 0; - if (mIsADTS) { - // skip 30 bits, aac_frame_length follows. - // ssssssss ssssiiip ppffffPc ccohCCll llllllll lll????? + if (mIsADTS) { + size_t adtsHeaderSize = 0; + // skip 30 bits, aac_frame_length follows. + // ssssssss ssssiiip ppffffPc ccohCCll llllllll lll????? - const uint8_t *adtsHeader = inHeader->pBuffer + inHeader->nOffset; + const uint8_t *adtsHeader = inHeader->pBuffer + inHeader->nOffset; - bool signalError = false; - if (inHeader->nFilledLen < 7) { - ALOGE("Audio data too short to contain even the ADTS header. " - "Got %ld bytes.", inHeader->nFilledLen); - hexdump(adtsHeader, inHeader->nFilledLen); - signalError = true; - } else { - bool protectionAbsent = (adtsHeader[1] & 1); - - unsigned aac_frame_length = - ((adtsHeader[3] & 3) << 11) - | (adtsHeader[4] << 3) - | (adtsHeader[5] >> 5); - - if (inHeader->nFilledLen < aac_frame_length) { - ALOGE("Not enough audio data for the complete frame. " - "Got %ld bytes, frame size according to the ADTS " - "header is %u bytes.", - inHeader->nFilledLen, aac_frame_length); + bool signalError = false; + if (inHeader->nFilledLen < 7) { + ALOGE("Audio data too short to contain even the ADTS header. " + "Got %ld bytes.", inHeader->nFilledLen); hexdump(adtsHeader, inHeader->nFilledLen); signalError = true; } else { - adtsHeaderSize = (protectionAbsent ? 7 : 9); - - inBuffer[0] = (UCHAR *)adtsHeader + adtsHeaderSize; - inBufferLength[0] = aac_frame_length - adtsHeaderSize; - - inHeader->nOffset += adtsHeaderSize; - inHeader->nFilledLen -= adtsHeaderSize; + bool protectionAbsent = (adtsHeader[1] & 1); + + unsigned aac_frame_length = + ((adtsHeader[3] & 3) << 11) + | (adtsHeader[4] << 3) + | (adtsHeader[5] >> 5); + + if (inHeader->nFilledLen < aac_frame_length) { + ALOGE("Not enough audio data for the complete frame. " + "Got %ld bytes, frame size according to the ADTS " + "header is %u bytes.", + inHeader->nFilledLen, aac_frame_length); + hexdump(adtsHeader, inHeader->nFilledLen); + signalError = true; + } else { + adtsHeaderSize = (protectionAbsent ? 7 : 9); + + inBuffer[0] = (UCHAR *)adtsHeader + adtsHeaderSize; + inBufferLength[0] = aac_frame_length - adtsHeaderSize; + + inHeader->nOffset += adtsHeaderSize; + inHeader->nFilledLen -= adtsHeaderSize; + } } - } - if (signalError) { - mSignalledError = true; + if (signalError) { + mSignalledError = true; - notify(OMX_EventError, - OMX_ErrorStreamCorrupt, - ERROR_MALFORMED, - NULL); + notify(OMX_EventError, + OMX_ErrorStreamCorrupt, + ERROR_MALFORMED, + NULL); - return; + return; + } + } else { + inBuffer[0] = inHeader->pBuffer + inHeader->nOffset; + inBufferLength[0] = inHeader->nFilledLen; } } else { - inBuffer[0] = inHeader->pBuffer + inHeader->nOffset; - inBufferLength[0] = inHeader->nFilledLen; + inBufferLength[0] = 0; } // Fill and decode @@ -471,50 +441,66 @@ void SoftAAC2::onQueueFilled(OMX_U32 portIndex) { int prevNumChannels = mStreamInfo->numChannels; AAC_DECODER_ERROR decoderErr = AAC_DEC_NOT_ENOUGH_BITS; - while (bytesValid[0] > 0 && decoderErr == AAC_DEC_NOT_ENOUGH_BITS) { + while ((bytesValid[0] > 0 || mSawInputEos) && decoderErr == AAC_DEC_NOT_ENOUGH_BITS) { + mDecoderHasData |= (bytesValid[0] > 0); aacDecoder_Fill(mAACDecoder, inBuffer, inBufferLength, bytesValid); - mDecoderHasData = true; decoderErr = aacDecoder_DecodeFrame(mAACDecoder, outBuffer, outHeader->nAllocLen, 0 /* flags */); - if (decoderErr == AAC_DEC_NOT_ENOUGH_BITS) { - ALOGW("Not enough bits, bytesValid %d", bytesValid[0]); + if (mSawInputEos && bytesValid[0] <= 0) { + if (mDecoderHasData) { + // flush out the decoder's delayed data by calling DecodeFrame + // one more time, with the AACDEC_FLUSH flag set + decoderErr = aacDecoder_DecodeFrame(mAACDecoder, + outBuffer, + outHeader->nAllocLen, + AACDEC_FLUSH); + mDecoderHasData = false; + } + outHeader->nFlags = OMX_BUFFERFLAG_EOS; + mSignalledOutputEos = true; + break; + } else { + ALOGW("Not enough bits, bytesValid %d", bytesValid[0]); + } } } size_t numOutBytes = mStreamInfo->frameSize * sizeof(int16_t) * mStreamInfo->numChannels; - if (decoderErr == AAC_DEC_OK) { - UINT inBufferUsedLength = inBufferLength[0] - bytesValid[0]; - inHeader->nFilledLen -= inBufferUsedLength; - inHeader->nOffset += inBufferUsedLength; - } else { - ALOGW("AAC decoder returned error %d, substituting silence", - decoderErr); + if (inHeader) { + if (decoderErr == AAC_DEC_OK) { + UINT inBufferUsedLength = inBufferLength[0] - bytesValid[0]; + inHeader->nFilledLen -= inBufferUsedLength; + inHeader->nOffset += inBufferUsedLength; + } else { + ALOGW("AAC decoder returned error %d, substituting silence", + decoderErr); - memset(outHeader->pBuffer + outHeader->nOffset, 0, numOutBytes); + memset(outHeader->pBuffer + outHeader->nOffset, 0, numOutBytes); - // Discard input buffer. - inHeader->nFilledLen = 0; + // Discard input buffer. + inHeader->nFilledLen = 0; - aacDecoder_SetParam(mAACDecoder, AAC_TPDEC_CLEAR_BUFFER, 1); + aacDecoder_SetParam(mAACDecoder, AAC_TPDEC_CLEAR_BUFFER, 1); - // fall through - } + // fall through + } - if (inHeader->nFilledLen == 0) { - inInfo->mOwnedByUs = false; - inQueue.erase(inQueue.begin()); - inInfo = NULL; - notifyEmptyBufferDone(inHeader); - inHeader = NULL; + if (inHeader->nFilledLen == 0) { + inInfo->mOwnedByUs = false; + inQueue.erase(inQueue.begin()); + inInfo = NULL; + notifyEmptyBufferDone(inHeader); + inHeader = NULL; + } } /* @@ -555,7 +541,6 @@ void SoftAAC2::onQueueFilled(OMX_U32 portIndex) { // we've previously decoded valid data, in the latter case // (decode failed) we'll output a silent frame. outHeader->nFilledLen = numOutBytes; - outHeader->nFlags = 0; outHeader->nTimeStamp = mAnchorTimeUs @@ -606,6 +591,8 @@ void SoftAAC2::onReset() { mStreamInfo->sampleRate = 0; mSignalledError = false; + mSawInputEos = false; + mSignalledOutputEos = false; mOutputPortSettingsChange = NONE; } diff --git a/media/libstagefright/codecs/aacdec/SoftAAC2.h b/media/libstagefright/codecs/aacdec/SoftAAC2.h index 2d960ab..a7ea1e2 100644 --- a/media/libstagefright/codecs/aacdec/SoftAAC2.h +++ b/media/libstagefright/codecs/aacdec/SoftAAC2.h @@ -55,6 +55,8 @@ private: bool mDecoderHasData; size_t mInputBufferCount; bool mSignalledError; + bool mSawInputEos; + bool mSignalledOutputEos; int64_t mAnchorTimeUs; int64_t mNumSamplesOutput; diff --git a/media/libstagefright/codecs/mp3dec/SoftMP3.cpp b/media/libstagefright/codecs/mp3dec/SoftMP3.cpp index 7c382fb..877e3cb 100644 --- a/media/libstagefright/codecs/mp3dec/SoftMP3.cpp +++ b/media/libstagefright/codecs/mp3dec/SoftMP3.cpp @@ -49,6 +49,8 @@ SoftMP3::SoftMP3( mNumChannels(2), mSamplingRate(44100), mSignalledError(false), + mSawInputEos(false), + mSignalledOutputEos(false), mOutputPortSettingsChange(NONE) { initPorts(); initDecoder(); @@ -194,48 +196,36 @@ void SoftMP3::onQueueFilled(OMX_U32 portIndex) { List &inQueue = getPortQueue(0); List &outQueue = getPortQueue(1); - while (!inQueue.empty() && !outQueue.empty()) { - BufferInfo *inInfo = *inQueue.begin(); - OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader; + while ((!inQueue.empty() || (mSawInputEos && !mSignalledOutputEos)) && !outQueue.empty()) { + BufferInfo *inInfo = NULL; + OMX_BUFFERHEADERTYPE *inHeader = NULL; + if (!inQueue.empty()) { + inInfo = *inQueue.begin(); + inHeader = inInfo->mHeader; + } BufferInfo *outInfo = *outQueue.begin(); OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader; + outHeader->nFlags = 0; - if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) { - inQueue.erase(inQueue.begin()); - inInfo->mOwnedByUs = false; - notifyEmptyBufferDone(inHeader); - - if (!mIsFirst) { - // pad the end of the stream with 529 samples, since that many samples - // were trimmed off the beginning when decoding started - outHeader->nFilledLen = - kPVMP3DecoderDelay * mNumChannels * sizeof(int16_t); + if (inHeader) { + if (inHeader->nOffset == 0 && inHeader->nFilledLen) { + mAnchorTimeUs = inHeader->nTimeStamp; + mNumFramesOutput = 0; + } - memset(outHeader->pBuffer, 0, outHeader->nFilledLen); - } else { - // Since we never discarded frames from the start, we won't have - // to add any padding at the end either. - outHeader->nFilledLen = 0; + if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) { + mSawInputEos = true; } - outHeader->nFlags = OMX_BUFFERFLAG_EOS; + mConfig->pInputBuffer = + inHeader->pBuffer + inHeader->nOffset; - outQueue.erase(outQueue.begin()); - outInfo->mOwnedByUs = false; - notifyFillBufferDone(outHeader); - return; - } - - if (inHeader->nOffset == 0) { - mAnchorTimeUs = inHeader->nTimeStamp; - mNumFramesOutput = 0; + mConfig->inputBufferCurrentLength = inHeader->nFilledLen; + } else { + mConfig->pInputBuffer = NULL; + mConfig->inputBufferCurrentLength = 0; } - - mConfig->pInputBuffer = - inHeader->pBuffer + inHeader->nOffset; - - mConfig->inputBufferCurrentLength = inHeader->nFilledLen; mConfig->inputBufferMaxLength = 0; mConfig->inputBufferUsedLength = 0; @@ -262,13 +252,28 @@ void SoftMP3::onQueueFilled(OMX_U32 portIndex) { mConfig->outputFrameSize = kOutputBufferSize / sizeof(int16_t); } - // This is recoverable, just ignore the current frame and - // play silence instead. - memset(outHeader->pBuffer, - 0, - mConfig->outputFrameSize * sizeof(int16_t)); - - mConfig->inputBufferUsedLength = inHeader->nFilledLen; + if (decoderErr == NO_ENOUGH_MAIN_DATA_ERROR && mSawInputEos) { + if (!mIsFirst) { + // pad the end of the stream with 529 samples, since that many samples + // were trimmed off the beginning when decoding started + outHeader->nOffset = 0; + outHeader->nFilledLen = kPVMP3DecoderDelay * mNumChannels * sizeof(int16_t); + + memset(outHeader->pBuffer, 0, outHeader->nFilledLen); + } + outHeader->nFlags = OMX_BUFFERFLAG_EOS; + mSignalledOutputEos = true; + } else { + // This is recoverable, just ignore the current frame and + // play silence instead. + memset(outHeader->pBuffer, + 0, + mConfig->outputFrameSize * sizeof(int16_t)); + + if (inHeader) { + mConfig->inputBufferUsedLength = inHeader->nFilledLen; + } + } } else if (mConfig->samplingRate != mSamplingRate || mConfig->num_channels != mNumChannels) { mSamplingRate = mConfig->samplingRate; @@ -289,7 +294,7 @@ void SoftMP3::onQueueFilled(OMX_U32 portIndex) { outHeader->nFilledLen = mConfig->outputFrameSize * sizeof(int16_t) - outHeader->nOffset; - } else { + } else if (!mSignalledOutputEos) { outHeader->nOffset = 0; outHeader->nFilledLen = mConfig->outputFrameSize * sizeof(int16_t); } @@ -298,23 +303,24 @@ void SoftMP3::onQueueFilled(OMX_U32 portIndex) { mAnchorTimeUs + (mNumFramesOutput * 1000000ll) / mConfig->samplingRate; - outHeader->nFlags = 0; - - CHECK_GE(inHeader->nFilledLen, mConfig->inputBufferUsedLength); + if (inHeader) { + CHECK_GE(inHeader->nFilledLen, mConfig->inputBufferUsedLength); - inHeader->nOffset += mConfig->inputBufferUsedLength; - inHeader->nFilledLen -= mConfig->inputBufferUsedLength; + inHeader->nOffset += mConfig->inputBufferUsedLength; + inHeader->nFilledLen -= mConfig->inputBufferUsedLength; - mNumFramesOutput += mConfig->outputFrameSize / mNumChannels; - if (inHeader->nFilledLen == 0) { - inInfo->mOwnedByUs = false; - inQueue.erase(inQueue.begin()); - inInfo = NULL; - notifyEmptyBufferDone(inHeader); - inHeader = NULL; + if (inHeader->nFilledLen == 0) { + inInfo->mOwnedByUs = false; + inQueue.erase(inQueue.begin()); + inInfo = NULL; + notifyEmptyBufferDone(inHeader); + inHeader = NULL; + } } + mNumFramesOutput += mConfig->outputFrameSize / mNumChannels; + outInfo->mOwnedByUs = false; outQueue.erase(outQueue.begin()); outInfo = NULL; @@ -362,6 +368,8 @@ void SoftMP3::onReset() { pvmp3_InitDecoder(mConfig, mDecoderBuf); mIsFirst = true; mSignalledError = false; + mSawInputEos = false; + mSignalledOutputEos = false; mOutputPortSettingsChange = NONE; } diff --git a/media/libstagefright/codecs/mp3dec/SoftMP3.h b/media/libstagefright/codecs/mp3dec/SoftMP3.h index 4af91ea..f9e7b53 100644 --- a/media/libstagefright/codecs/mp3dec/SoftMP3.h +++ b/media/libstagefright/codecs/mp3dec/SoftMP3.h @@ -61,6 +61,8 @@ private: bool mIsFirst; bool mSignalledError; + bool mSawInputEos; + bool mSignalledOutputEos; enum { NONE, diff --git a/media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp b/media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp index 51bb958..a377b23 100644 --- a/media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp +++ b/media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp @@ -54,6 +54,8 @@ SoftVorbis::SoftVorbis( mAnchorTimeUs(0), mNumFramesOutput(0), mNumFramesLeftOnPage(-1), + mSawInputEos(false), + mSignalledOutputEos(false), mOutputPortSettingsChange(NONE) { initPorts(); CHECK_EQ(initDecoder(), (status_t)OK); @@ -290,48 +292,47 @@ void SoftVorbis::onQueueFilled(OMX_U32 portIndex) { return; } - while (!inQueue.empty() && !outQueue.empty()) { - BufferInfo *inInfo = *inQueue.begin(); - OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader; + while ((!inQueue.empty() || (mSawInputEos && !mSignalledOutputEos)) && !outQueue.empty()) { + BufferInfo *inInfo = NULL; + OMX_BUFFERHEADERTYPE *inHeader = NULL; + if (!inQueue.empty()) { + inInfo = *inQueue.begin(); + inHeader = inInfo->mHeader; + } BufferInfo *outInfo = *outQueue.begin(); OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader; - if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) { - inQueue.erase(inQueue.begin()); - inInfo->mOwnedByUs = false; - notifyEmptyBufferDone(inHeader); + int32_t numPageSamples = 0; - outHeader->nFilledLen = 0; - outHeader->nFlags = OMX_BUFFERFLAG_EOS; + if (inHeader) { + if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) { + mSawInputEos = true; + } - outQueue.erase(outQueue.begin()); - outInfo->mOwnedByUs = false; - notifyFillBufferDone(outHeader); - return; - } + if (inHeader->nFilledLen || !mSawInputEos) { + CHECK_GE(inHeader->nFilledLen, sizeof(numPageSamples)); + memcpy(&numPageSamples, + inHeader->pBuffer + + inHeader->nOffset + inHeader->nFilledLen - 4, + sizeof(numPageSamples)); - int32_t numPageSamples; - CHECK_GE(inHeader->nFilledLen, sizeof(numPageSamples)); - memcpy(&numPageSamples, - inHeader->pBuffer - + inHeader->nOffset + inHeader->nFilledLen - 4, - sizeof(numPageSamples)); + if (inHeader->nOffset == 0) { + mAnchorTimeUs = inHeader->nTimeStamp; + mNumFramesOutput = 0; + } - if (numPageSamples >= 0) { - mNumFramesLeftOnPage = numPageSamples; + inHeader->nFilledLen -= sizeof(numPageSamples);; + } } - if (inHeader->nOffset == 0) { - mAnchorTimeUs = inHeader->nTimeStamp; - mNumFramesOutput = 0; + if (numPageSamples >= 0) { + mNumFramesLeftOnPage = numPageSamples; } - inHeader->nFilledLen -= sizeof(numPageSamples);; - ogg_buffer buf; - buf.data = inHeader->pBuffer + inHeader->nOffset; - buf.size = inHeader->nFilledLen; + buf.data = inHeader ? inHeader->pBuffer + inHeader->nOffset : NULL; + buf.size = inHeader ? inHeader->nFilledLen : 0; buf.refcount = 1; buf.ptr.owner = NULL; @@ -384,11 +385,13 @@ void SoftVorbis::onQueueFilled(OMX_U32 portIndex) { mNumFramesOutput += numFrames; - inInfo->mOwnedByUs = false; - inQueue.erase(inQueue.begin()); - inInfo = NULL; - notifyEmptyBufferDone(inHeader); - inHeader = NULL; + if (inHeader) { + inInfo->mOwnedByUs = false; + inQueue.erase(inQueue.begin()); + inInfo = NULL; + notifyEmptyBufferDone(inHeader); + inHeader = NULL; + } outInfo->mOwnedByUs = false; outQueue.erase(outQueue.begin()); @@ -425,6 +428,8 @@ void SoftVorbis::onReset() { mVi = NULL; } + mSawInputEos = false; + mSignalledOutputEos = false; mOutputPortSettingsChange = NONE; } diff --git a/media/libstagefright/codecs/vorbis/dec/SoftVorbis.h b/media/libstagefright/codecs/vorbis/dec/SoftVorbis.h index cb628a0..1d00816 100644 --- a/media/libstagefright/codecs/vorbis/dec/SoftVorbis.h +++ b/media/libstagefright/codecs/vorbis/dec/SoftVorbis.h @@ -59,6 +59,8 @@ private: int64_t mAnchorTimeUs; int64_t mNumFramesOutput; int32_t mNumFramesLeftOnPage; + bool mSawInputEos; + bool mSignalledOutputEos; enum { NONE, -- cgit v1.1 From 6bf9ae20b3bd2dbb8f2e89ee167a6785222301cf Mon Sep 17 00:00:00 2001 From: Eric Laurent Date: Fri, 30 Aug 2013 15:12:37 -0700 Subject: audioflinger: offloaded tracks flush/pause order Make sure that a flush received after a pause is forwarded to the HAL after the pause. Change-Id: Ib3221f70c59f8b3dbbac20b23104f6b779b75be0 --- services/audioflinger/Threads.cpp | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index bc01ede..3921b68 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -3810,10 +3810,6 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::OffloadThread::prepareTr size_t count = mActiveTracks.size(); mixer_state mixerStatus = MIXER_IDLE; - if (mFlushPending) { - flushHw_l(); - mFlushPending = false; - } // find out which tracks need to be processed for (size_t i = 0; i < count; i++) { sp t = mActiveTracks[i].promote(); @@ -3938,6 +3934,12 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::OffloadThread::prepareTr // compute volume for this track processVolume_l(track, last); } + + if (mFlushPending) { + flushHw_l(); + mFlushPending = false; + } + // remove all the tracks that need to be... removeTracks_l(*tracksToRemove); -- cgit v1.1 From fa51e09b30e884fed20b141783a7447599a6563e Mon Sep 17 00:00:00 2001 From: Rachad Date: Fri, 30 Aug 2013 15:52:00 -0700 Subject: Fixed timestamp handling in ESQueue Access Unit parser. This fixes bug b/10294801 Change-Id: Ie96d36e2ff6fdee0c949a85da3602ab04b34bf6e --- media/libstagefright/mpeg2ts/ESQueue.cpp | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/media/libstagefright/mpeg2ts/ESQueue.cpp b/media/libstagefright/mpeg2ts/ESQueue.cpp index 9f3b19c..8f9c9c8 100644 --- a/media/libstagefright/mpeg2ts/ESQueue.cpp +++ b/media/libstagefright/mpeg2ts/ESQueue.cpp @@ -504,15 +504,11 @@ int64_t ElementaryStreamQueue::fetchTimestamp(size_t size) { if (first) { timeUs = info->mTimestampUs; + first = false; } if (info->mLength > size) { info->mLength -= size; - - if (first) { - info->mTimestampUs = -1; - } - size = 0; } else { size -= info->mLength; @@ -521,7 +517,6 @@ int64_t ElementaryStreamQueue::fetchTimestamp(size_t size) { info = NULL; } - first = false; } if (timeUs == 0ll) { -- cgit v1.1 From b6209a3d4c29bbb88de5a77546f4d545883cc484 Mon Sep 17 00:00:00 2001 From: Eric Laurent Date: Fri, 30 Aug 2013 19:22:29 -0700 Subject: Revert "Fix decoder EOS handling" This reverts commit 9da36a6c8df70a5c8179ac78fab33cfbb5078cb2. Bug: 10571297. Change-Id: I76f20fe34872ea54fce626077462fb86c8c3f02e --- media/libstagefright/codecs/aacdec/SoftAAC2.cpp | 217 +++++++++++---------- media/libstagefright/codecs/aacdec/SoftAAC2.h | 2 - media/libstagefright/codecs/mp3dec/SoftMP3.cpp | 114 +++++------ media/libstagefright/codecs/mp3dec/SoftMP3.h | 2 - .../codecs/vorbis/dec/SoftVorbis.cpp | 73 ++++--- .../libstagefright/codecs/vorbis/dec/SoftVorbis.h | 2 - 6 files changed, 202 insertions(+), 208 deletions(-) diff --git a/media/libstagefright/codecs/aacdec/SoftAAC2.cpp b/media/libstagefright/codecs/aacdec/SoftAAC2.cpp index c9b5d26..1b20cbb 100644 --- a/media/libstagefright/codecs/aacdec/SoftAAC2.cpp +++ b/media/libstagefright/codecs/aacdec/SoftAAC2.cpp @@ -58,8 +58,6 @@ SoftAAC2::SoftAAC2( mIsADTS(false), mInputBufferCount(0), mSignalledError(false), - mSawInputEos(false), - mSignalledOutputEos(false), mAnchorTimeUs(0), mNumSamplesOutput(0), mOutputPortSettingsChange(NONE) { @@ -352,83 +350,115 @@ void SoftAAC2::onQueueFilled(OMX_U32 portIndex) { return; } - while ((!inQueue.empty() || (mSawInputEos && !mSignalledOutputEos)) && !outQueue.empty()) { - BufferInfo *inInfo = NULL; - OMX_BUFFERHEADERTYPE *inHeader = NULL; - if (!inQueue.empty()) { - inInfo = *inQueue.begin(); - inHeader = inInfo->mHeader; - } + while (!inQueue.empty() && !outQueue.empty()) { + BufferInfo *inInfo = *inQueue.begin(); + OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader; BufferInfo *outInfo = *outQueue.begin(); OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader; - outHeader->nFlags = 0; - if (inHeader) { - if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) { - mSawInputEos = true; - } + if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) { + inQueue.erase(inQueue.begin()); + inInfo->mOwnedByUs = false; + notifyEmptyBufferDone(inHeader); + + if (mDecoderHasData) { + // flush out the decoder's delayed data by calling DecodeFrame + // one more time, with the AACDEC_FLUSH flag set + INT_PCM *outBuffer = + reinterpret_cast( + outHeader->pBuffer + outHeader->nOffset); + + AAC_DECODER_ERROR decoderErr = + aacDecoder_DecodeFrame(mAACDecoder, + outBuffer, + outHeader->nAllocLen, + AACDEC_FLUSH); + mDecoderHasData = false; + + if (decoderErr != AAC_DEC_OK) { + mSignalledError = true; + + notify(OMX_EventError, OMX_ErrorUndefined, decoderErr, + NULL); - if (inHeader->nOffset == 0 && inHeader->nFilledLen) { - mAnchorTimeUs = inHeader->nTimeStamp; - mNumSamplesOutput = 0; + return; + } + + outHeader->nFilledLen = + mStreamInfo->frameSize + * sizeof(int16_t) + * mStreamInfo->numChannels; + } else { + // we never submitted any data to the decoder, so there's nothing to flush out + outHeader->nFilledLen = 0; } - if (mIsADTS) { - size_t adtsHeaderSize = 0; - // skip 30 bits, aac_frame_length follows. - // ssssssss ssssiiip ppffffPc ccohCCll llllllll lll????? + outHeader->nFlags = OMX_BUFFERFLAG_EOS; - const uint8_t *adtsHeader = inHeader->pBuffer + inHeader->nOffset; + outQueue.erase(outQueue.begin()); + outInfo->mOwnedByUs = false; + notifyFillBufferDone(outHeader); + return; + } - bool signalError = false; - if (inHeader->nFilledLen < 7) { - ALOGE("Audio data too short to contain even the ADTS header. " - "Got %ld bytes.", inHeader->nFilledLen); + if (inHeader->nOffset == 0) { + mAnchorTimeUs = inHeader->nTimeStamp; + mNumSamplesOutput = 0; + } + + size_t adtsHeaderSize = 0; + if (mIsADTS) { + // skip 30 bits, aac_frame_length follows. + // ssssssss ssssiiip ppffffPc ccohCCll llllllll lll????? + + const uint8_t *adtsHeader = inHeader->pBuffer + inHeader->nOffset; + + bool signalError = false; + if (inHeader->nFilledLen < 7) { + ALOGE("Audio data too short to contain even the ADTS header. " + "Got %ld bytes.", inHeader->nFilledLen); + hexdump(adtsHeader, inHeader->nFilledLen); + signalError = true; + } else { + bool protectionAbsent = (adtsHeader[1] & 1); + + unsigned aac_frame_length = + ((adtsHeader[3] & 3) << 11) + | (adtsHeader[4] << 3) + | (adtsHeader[5] >> 5); + + if (inHeader->nFilledLen < aac_frame_length) { + ALOGE("Not enough audio data for the complete frame. " + "Got %ld bytes, frame size according to the ADTS " + "header is %u bytes.", + inHeader->nFilledLen, aac_frame_length); hexdump(adtsHeader, inHeader->nFilledLen); signalError = true; } else { - bool protectionAbsent = (adtsHeader[1] & 1); - - unsigned aac_frame_length = - ((adtsHeader[3] & 3) << 11) - | (adtsHeader[4] << 3) - | (adtsHeader[5] >> 5); - - if (inHeader->nFilledLen < aac_frame_length) { - ALOGE("Not enough audio data for the complete frame. " - "Got %ld bytes, frame size according to the ADTS " - "header is %u bytes.", - inHeader->nFilledLen, aac_frame_length); - hexdump(adtsHeader, inHeader->nFilledLen); - signalError = true; - } else { - adtsHeaderSize = (protectionAbsent ? 7 : 9); - - inBuffer[0] = (UCHAR *)adtsHeader + adtsHeaderSize; - inBufferLength[0] = aac_frame_length - adtsHeaderSize; - - inHeader->nOffset += adtsHeaderSize; - inHeader->nFilledLen -= adtsHeaderSize; - } + adtsHeaderSize = (protectionAbsent ? 7 : 9); + + inBuffer[0] = (UCHAR *)adtsHeader + adtsHeaderSize; + inBufferLength[0] = aac_frame_length - adtsHeaderSize; + + inHeader->nOffset += adtsHeaderSize; + inHeader->nFilledLen -= adtsHeaderSize; } + } - if (signalError) { - mSignalledError = true; + if (signalError) { + mSignalledError = true; - notify(OMX_EventError, - OMX_ErrorStreamCorrupt, - ERROR_MALFORMED, - NULL); + notify(OMX_EventError, + OMX_ErrorStreamCorrupt, + ERROR_MALFORMED, + NULL); - return; - } - } else { - inBuffer[0] = inHeader->pBuffer + inHeader->nOffset; - inBufferLength[0] = inHeader->nFilledLen; + return; } } else { - inBufferLength[0] = 0; + inBuffer[0] = inHeader->pBuffer + inHeader->nOffset; + inBufferLength[0] = inHeader->nFilledLen; } // Fill and decode @@ -441,66 +471,50 @@ void SoftAAC2::onQueueFilled(OMX_U32 portIndex) { int prevNumChannels = mStreamInfo->numChannels; AAC_DECODER_ERROR decoderErr = AAC_DEC_NOT_ENOUGH_BITS; - while ((bytesValid[0] > 0 || mSawInputEos) && decoderErr == AAC_DEC_NOT_ENOUGH_BITS) { - mDecoderHasData |= (bytesValid[0] > 0); + while (bytesValid[0] > 0 && decoderErr == AAC_DEC_NOT_ENOUGH_BITS) { aacDecoder_Fill(mAACDecoder, inBuffer, inBufferLength, bytesValid); + mDecoderHasData = true; decoderErr = aacDecoder_DecodeFrame(mAACDecoder, outBuffer, outHeader->nAllocLen, 0 /* flags */); + if (decoderErr == AAC_DEC_NOT_ENOUGH_BITS) { - if (mSawInputEos && bytesValid[0] <= 0) { - if (mDecoderHasData) { - // flush out the decoder's delayed data by calling DecodeFrame - // one more time, with the AACDEC_FLUSH flag set - decoderErr = aacDecoder_DecodeFrame(mAACDecoder, - outBuffer, - outHeader->nAllocLen, - AACDEC_FLUSH); - mDecoderHasData = false; - } - outHeader->nFlags = OMX_BUFFERFLAG_EOS; - mSignalledOutputEos = true; - break; - } else { - ALOGW("Not enough bits, bytesValid %d", bytesValid[0]); - } + ALOGW("Not enough bits, bytesValid %d", bytesValid[0]); } } size_t numOutBytes = mStreamInfo->frameSize * sizeof(int16_t) * mStreamInfo->numChannels; - if (inHeader) { - if (decoderErr == AAC_DEC_OK) { - UINT inBufferUsedLength = inBufferLength[0] - bytesValid[0]; - inHeader->nFilledLen -= inBufferUsedLength; - inHeader->nOffset += inBufferUsedLength; - } else { - ALOGW("AAC decoder returned error %d, substituting silence", - decoderErr); + if (decoderErr == AAC_DEC_OK) { + UINT inBufferUsedLength = inBufferLength[0] - bytesValid[0]; + inHeader->nFilledLen -= inBufferUsedLength; + inHeader->nOffset += inBufferUsedLength; + } else { + ALOGW("AAC decoder returned error %d, substituting silence", + decoderErr); - memset(outHeader->pBuffer + outHeader->nOffset, 0, numOutBytes); + memset(outHeader->pBuffer + outHeader->nOffset, 0, numOutBytes); - // Discard input buffer. - inHeader->nFilledLen = 0; + // Discard input buffer. + inHeader->nFilledLen = 0; - aacDecoder_SetParam(mAACDecoder, AAC_TPDEC_CLEAR_BUFFER, 1); + aacDecoder_SetParam(mAACDecoder, AAC_TPDEC_CLEAR_BUFFER, 1); - // fall through - } + // fall through + } - if (inHeader->nFilledLen == 0) { - inInfo->mOwnedByUs = false; - inQueue.erase(inQueue.begin()); - inInfo = NULL; - notifyEmptyBufferDone(inHeader); - inHeader = NULL; - } + if (inHeader->nFilledLen == 0) { + inInfo->mOwnedByUs = false; + inQueue.erase(inQueue.begin()); + inInfo = NULL; + notifyEmptyBufferDone(inHeader); + inHeader = NULL; } /* @@ -541,6 +555,7 @@ void SoftAAC2::onQueueFilled(OMX_U32 portIndex) { // we've previously decoded valid data, in the latter case // (decode failed) we'll output a silent frame. outHeader->nFilledLen = numOutBytes; + outHeader->nFlags = 0; outHeader->nTimeStamp = mAnchorTimeUs @@ -591,8 +606,6 @@ void SoftAAC2::onReset() { mStreamInfo->sampleRate = 0; mSignalledError = false; - mSawInputEos = false; - mSignalledOutputEos = false; mOutputPortSettingsChange = NONE; } diff --git a/media/libstagefright/codecs/aacdec/SoftAAC2.h b/media/libstagefright/codecs/aacdec/SoftAAC2.h index a7ea1e2..2d960ab 100644 --- a/media/libstagefright/codecs/aacdec/SoftAAC2.h +++ b/media/libstagefright/codecs/aacdec/SoftAAC2.h @@ -55,8 +55,6 @@ private: bool mDecoderHasData; size_t mInputBufferCount; bool mSignalledError; - bool mSawInputEos; - bool mSignalledOutputEos; int64_t mAnchorTimeUs; int64_t mNumSamplesOutput; diff --git a/media/libstagefright/codecs/mp3dec/SoftMP3.cpp b/media/libstagefright/codecs/mp3dec/SoftMP3.cpp index 877e3cb..7c382fb 100644 --- a/media/libstagefright/codecs/mp3dec/SoftMP3.cpp +++ b/media/libstagefright/codecs/mp3dec/SoftMP3.cpp @@ -49,8 +49,6 @@ SoftMP3::SoftMP3( mNumChannels(2), mSamplingRate(44100), mSignalledError(false), - mSawInputEos(false), - mSignalledOutputEos(false), mOutputPortSettingsChange(NONE) { initPorts(); initDecoder(); @@ -196,36 +194,48 @@ void SoftMP3::onQueueFilled(OMX_U32 portIndex) { List &inQueue = getPortQueue(0); List &outQueue = getPortQueue(1); - while ((!inQueue.empty() || (mSawInputEos && !mSignalledOutputEos)) && !outQueue.empty()) { - BufferInfo *inInfo = NULL; - OMX_BUFFERHEADERTYPE *inHeader = NULL; - if (!inQueue.empty()) { - inInfo = *inQueue.begin(); - inHeader = inInfo->mHeader; - } + while (!inQueue.empty() && !outQueue.empty()) { + BufferInfo *inInfo = *inQueue.begin(); + OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader; BufferInfo *outInfo = *outQueue.begin(); OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader; - outHeader->nFlags = 0; - if (inHeader) { - if (inHeader->nOffset == 0 && inHeader->nFilledLen) { - mAnchorTimeUs = inHeader->nTimeStamp; - mNumFramesOutput = 0; - } + if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) { + inQueue.erase(inQueue.begin()); + inInfo->mOwnedByUs = false; + notifyEmptyBufferDone(inHeader); - if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) { - mSawInputEos = true; + if (!mIsFirst) { + // pad the end of the stream with 529 samples, since that many samples + // were trimmed off the beginning when decoding started + outHeader->nFilledLen = + kPVMP3DecoderDelay * mNumChannels * sizeof(int16_t); + + memset(outHeader->pBuffer, 0, outHeader->nFilledLen); + } else { + // Since we never discarded frames from the start, we won't have + // to add any padding at the end either. + outHeader->nFilledLen = 0; } - mConfig->pInputBuffer = - inHeader->pBuffer + inHeader->nOffset; + outHeader->nFlags = OMX_BUFFERFLAG_EOS; - mConfig->inputBufferCurrentLength = inHeader->nFilledLen; - } else { - mConfig->pInputBuffer = NULL; - mConfig->inputBufferCurrentLength = 0; + outQueue.erase(outQueue.begin()); + outInfo->mOwnedByUs = false; + notifyFillBufferDone(outHeader); + return; + } + + if (inHeader->nOffset == 0) { + mAnchorTimeUs = inHeader->nTimeStamp; + mNumFramesOutput = 0; } + + mConfig->pInputBuffer = + inHeader->pBuffer + inHeader->nOffset; + + mConfig->inputBufferCurrentLength = inHeader->nFilledLen; mConfig->inputBufferMaxLength = 0; mConfig->inputBufferUsedLength = 0; @@ -252,28 +262,13 @@ void SoftMP3::onQueueFilled(OMX_U32 portIndex) { mConfig->outputFrameSize = kOutputBufferSize / sizeof(int16_t); } - if (decoderErr == NO_ENOUGH_MAIN_DATA_ERROR && mSawInputEos) { - if (!mIsFirst) { - // pad the end of the stream with 529 samples, since that many samples - // were trimmed off the beginning when decoding started - outHeader->nOffset = 0; - outHeader->nFilledLen = kPVMP3DecoderDelay * mNumChannels * sizeof(int16_t); - - memset(outHeader->pBuffer, 0, outHeader->nFilledLen); - } - outHeader->nFlags = OMX_BUFFERFLAG_EOS; - mSignalledOutputEos = true; - } else { - // This is recoverable, just ignore the current frame and - // play silence instead. - memset(outHeader->pBuffer, - 0, - mConfig->outputFrameSize * sizeof(int16_t)); - - if (inHeader) { - mConfig->inputBufferUsedLength = inHeader->nFilledLen; - } - } + // This is recoverable, just ignore the current frame and + // play silence instead. + memset(outHeader->pBuffer, + 0, + mConfig->outputFrameSize * sizeof(int16_t)); + + mConfig->inputBufferUsedLength = inHeader->nFilledLen; } else if (mConfig->samplingRate != mSamplingRate || mConfig->num_channels != mNumChannels) { mSamplingRate = mConfig->samplingRate; @@ -294,7 +289,7 @@ void SoftMP3::onQueueFilled(OMX_U32 portIndex) { outHeader->nFilledLen = mConfig->outputFrameSize * sizeof(int16_t) - outHeader->nOffset; - } else if (!mSignalledOutputEos) { + } else { outHeader->nOffset = 0; outHeader->nFilledLen = mConfig->outputFrameSize * sizeof(int16_t); } @@ -303,24 +298,23 @@ void SoftMP3::onQueueFilled(OMX_U32 portIndex) { mAnchorTimeUs + (mNumFramesOutput * 1000000ll) / mConfig->samplingRate; - if (inHeader) { - CHECK_GE(inHeader->nFilledLen, mConfig->inputBufferUsedLength); - - inHeader->nOffset += mConfig->inputBufferUsedLength; - inHeader->nFilledLen -= mConfig->inputBufferUsedLength; + outHeader->nFlags = 0; + CHECK_GE(inHeader->nFilledLen, mConfig->inputBufferUsedLength); - if (inHeader->nFilledLen == 0) { - inInfo->mOwnedByUs = false; - inQueue.erase(inQueue.begin()); - inInfo = NULL; - notifyEmptyBufferDone(inHeader); - inHeader = NULL; - } - } + inHeader->nOffset += mConfig->inputBufferUsedLength; + inHeader->nFilledLen -= mConfig->inputBufferUsedLength; mNumFramesOutput += mConfig->outputFrameSize / mNumChannels; + if (inHeader->nFilledLen == 0) { + inInfo->mOwnedByUs = false; + inQueue.erase(inQueue.begin()); + inInfo = NULL; + notifyEmptyBufferDone(inHeader); + inHeader = NULL; + } + outInfo->mOwnedByUs = false; outQueue.erase(outQueue.begin()); outInfo = NULL; @@ -368,8 +362,6 @@ void SoftMP3::onReset() { pvmp3_InitDecoder(mConfig, mDecoderBuf); mIsFirst = true; mSignalledError = false; - mSawInputEos = false; - mSignalledOutputEos = false; mOutputPortSettingsChange = NONE; } diff --git a/media/libstagefright/codecs/mp3dec/SoftMP3.h b/media/libstagefright/codecs/mp3dec/SoftMP3.h index f9e7b53..4af91ea 100644 --- a/media/libstagefright/codecs/mp3dec/SoftMP3.h +++ b/media/libstagefright/codecs/mp3dec/SoftMP3.h @@ -61,8 +61,6 @@ private: bool mIsFirst; bool mSignalledError; - bool mSawInputEos; - bool mSignalledOutputEos; enum { NONE, diff --git a/media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp b/media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp index a377b23..51bb958 100644 --- a/media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp +++ b/media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp @@ -54,8 +54,6 @@ SoftVorbis::SoftVorbis( mAnchorTimeUs(0), mNumFramesOutput(0), mNumFramesLeftOnPage(-1), - mSawInputEos(false), - mSignalledOutputEos(false), mOutputPortSettingsChange(NONE) { initPorts(); CHECK_EQ(initDecoder(), (status_t)OK); @@ -292,47 +290,48 @@ void SoftVorbis::onQueueFilled(OMX_U32 portIndex) { return; } - while ((!inQueue.empty() || (mSawInputEos && !mSignalledOutputEos)) && !outQueue.empty()) { - BufferInfo *inInfo = NULL; - OMX_BUFFERHEADERTYPE *inHeader = NULL; - if (!inQueue.empty()) { - inInfo = *inQueue.begin(); - inHeader = inInfo->mHeader; - } + while (!inQueue.empty() && !outQueue.empty()) { + BufferInfo *inInfo = *inQueue.begin(); + OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader; BufferInfo *outInfo = *outQueue.begin(); OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader; - int32_t numPageSamples = 0; - - if (inHeader) { - if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) { - mSawInputEos = true; - } - - if (inHeader->nFilledLen || !mSawInputEos) { - CHECK_GE(inHeader->nFilledLen, sizeof(numPageSamples)); - memcpy(&numPageSamples, - inHeader->pBuffer - + inHeader->nOffset + inHeader->nFilledLen - 4, - sizeof(numPageSamples)); + if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) { + inQueue.erase(inQueue.begin()); + inInfo->mOwnedByUs = false; + notifyEmptyBufferDone(inHeader); - if (inHeader->nOffset == 0) { - mAnchorTimeUs = inHeader->nTimeStamp; - mNumFramesOutput = 0; - } + outHeader->nFilledLen = 0; + outHeader->nFlags = OMX_BUFFERFLAG_EOS; - inHeader->nFilledLen -= sizeof(numPageSamples);; - } + outQueue.erase(outQueue.begin()); + outInfo->mOwnedByUs = false; + notifyFillBufferDone(outHeader); + return; } + int32_t numPageSamples; + CHECK_GE(inHeader->nFilledLen, sizeof(numPageSamples)); + memcpy(&numPageSamples, + inHeader->pBuffer + + inHeader->nOffset + inHeader->nFilledLen - 4, + sizeof(numPageSamples)); + if (numPageSamples >= 0) { mNumFramesLeftOnPage = numPageSamples; } + if (inHeader->nOffset == 0) { + mAnchorTimeUs = inHeader->nTimeStamp; + mNumFramesOutput = 0; + } + + inHeader->nFilledLen -= sizeof(numPageSamples);; + ogg_buffer buf; - buf.data = inHeader ? inHeader->pBuffer + inHeader->nOffset : NULL; - buf.size = inHeader ? inHeader->nFilledLen : 0; + buf.data = inHeader->pBuffer + inHeader->nOffset; + buf.size = inHeader->nFilledLen; buf.refcount = 1; buf.ptr.owner = NULL; @@ -385,13 +384,11 @@ void SoftVorbis::onQueueFilled(OMX_U32 portIndex) { mNumFramesOutput += numFrames; - if (inHeader) { - inInfo->mOwnedByUs = false; - inQueue.erase(inQueue.begin()); - inInfo = NULL; - notifyEmptyBufferDone(inHeader); - inHeader = NULL; - } + inInfo->mOwnedByUs = false; + inQueue.erase(inQueue.begin()); + inInfo = NULL; + notifyEmptyBufferDone(inHeader); + inHeader = NULL; outInfo->mOwnedByUs = false; outQueue.erase(outQueue.begin()); @@ -428,8 +425,6 @@ void SoftVorbis::onReset() { mVi = NULL; } - mSawInputEos = false; - mSignalledOutputEos = false; mOutputPortSettingsChange = NONE; } diff --git a/media/libstagefright/codecs/vorbis/dec/SoftVorbis.h b/media/libstagefright/codecs/vorbis/dec/SoftVorbis.h index 1d00816..cb628a0 100644 --- a/media/libstagefright/codecs/vorbis/dec/SoftVorbis.h +++ b/media/libstagefright/codecs/vorbis/dec/SoftVorbis.h @@ -59,8 +59,6 @@ private: int64_t mAnchorTimeUs; int64_t mNumFramesOutput; int32_t mNumFramesLeftOnPage; - bool mSawInputEos; - bool mSignalledOutputEos; enum { NONE, -- cgit v1.1 From 4585e8f0ff47580fdb81ed8652bde32db1770050 Mon Sep 17 00:00:00 2001 From: Jeff Tinker Date: Fri, 30 Aug 2013 23:05:00 +0000 Subject: Revert "Enhancement for OnInfo callback on DRM Framework" This reverts commit 5ff7836da0220b3097f36c8a5e82111816ebca62. Fixes Fatal signal 6 (SIGABRT)" when playing or downloading a video bug: 10542864 Change-Id: I7f81a41c3acdbeb571a514b3f65e7ba37d403cca --- drm/common/DrmInfoEvent.cpp | 92 +---------------------------- drm/common/IDrmServiceListener.cpp | 32 +--------- include/drm/DrmInfoEvent.h | 117 +------------------------------------ 3 files changed, 4 insertions(+), 237 deletions(-) diff --git a/drm/common/DrmInfoEvent.cpp b/drm/common/DrmInfoEvent.cpp index 2315aa9..27a5a2d 100644 --- a/drm/common/DrmInfoEvent.cpp +++ b/drm/common/DrmInfoEvent.cpp @@ -16,29 +16,16 @@ #include #include -#include using namespace android; DrmInfoEvent::DrmInfoEvent(int uniqueId, int infoType, const String8 message) : mUniqueId(uniqueId), mInfoType(infoType), - mMessage(message), - mDrmBuffer() { + mMessage(message) { } -DrmInfoEvent::DrmInfoEvent(int uniqueId, int infoType, const String8 message, - const DrmBuffer& drmBuffer) - : mUniqueId(uniqueId), mInfoType(infoType), mMessage(message), mDrmBuffer() { - setData(drmBuffer); -} - -DrmInfoEvent::~DrmInfoEvent() { - delete [] mDrmBuffer.data; -} - - int DrmInfoEvent::getUniqueId() const { return mUniqueId; } @@ -51,80 +38,3 @@ const String8 DrmInfoEvent::getMessage() const { return mMessage; } -int DrmInfoEvent::getCount() const { - return mAttributes.size(); -} - -status_t DrmInfoEvent::put(const String8& key, String8& value) { - mAttributes.add(key, value); - return DRM_NO_ERROR; -} - -const String8 DrmInfoEvent::get(const String8& key) const { - if (mAttributes.indexOfKey(key) != NAME_NOT_FOUND) { - return mAttributes.valueFor(key); - } - return String8(""); -} - -const DrmBuffer& DrmInfoEvent::getData() const { - return mDrmBuffer; -} - -void DrmInfoEvent::setData(const DrmBuffer& drmBuffer) { - delete [] mDrmBuffer.data; - mDrmBuffer.data = new char[drmBuffer.length];; - mDrmBuffer.length = drmBuffer.length; - memcpy(mDrmBuffer.data, drmBuffer.data, drmBuffer.length); -} - -DrmInfoEvent::KeyIterator DrmInfoEvent::keyIterator() const { - return KeyIterator(this); -} - -DrmInfoEvent::Iterator DrmInfoEvent::iterator() const { - return Iterator(this); -} - -// KeyIterator implementation -DrmInfoEvent::KeyIterator::KeyIterator(const DrmInfoEvent::KeyIterator& keyIterator) - : mDrmInfoEvent(keyIterator.mDrmInfoEvent), mIndex(keyIterator.mIndex) { -} - -bool DrmInfoEvent::KeyIterator::hasNext() { - return (mIndex < mDrmInfoEvent->mAttributes.size()); -} - -const String8& DrmInfoEvent::KeyIterator::next() { - const String8& key = mDrmInfoEvent->mAttributes.keyAt(mIndex); - mIndex++; - return key; -} - -DrmInfoEvent::KeyIterator& DrmInfoEvent::KeyIterator::operator=( - const DrmInfoEvent::KeyIterator& keyIterator) { - mDrmInfoEvent = keyIterator.mDrmInfoEvent; - mIndex = keyIterator.mIndex; - return *this; -} - -// Iterator implementation -DrmInfoEvent::Iterator::Iterator(const DrmInfoEvent::Iterator& iterator) - : mDrmInfoEvent(iterator.mDrmInfoEvent), mIndex(iterator.mIndex) { -} - -DrmInfoEvent::Iterator& DrmInfoEvent::Iterator::operator=(const DrmInfoEvent::Iterator& iterator) { - mDrmInfoEvent = iterator.mDrmInfoEvent; - mIndex = iterator.mIndex; - return *this; -} - -bool DrmInfoEvent::Iterator::hasNext() { - return mIndex < mDrmInfoEvent->mAttributes.size(); -} - -const String8& DrmInfoEvent::Iterator::next() { - const String8& value = mDrmInfoEvent->mAttributes.editValueAt(mIndex); - mIndex++; - return value; -} diff --git a/drm/common/IDrmServiceListener.cpp b/drm/common/IDrmServiceListener.cpp index d825afb..6eeea40 100644 --- a/drm/common/IDrmServiceListener.cpp +++ b/drm/common/IDrmServiceListener.cpp @@ -32,19 +32,6 @@ status_t BpDrmServiceListener::notify(const DrmInfoEvent& event) { data.writeInt32(event.getType()); data.writeString8(event.getMessage()); - data.writeInt32(event.getCount()); - DrmInfoEvent::KeyIterator keyIt = event.keyIterator(); - while (keyIt.hasNext()) { - String8 key = keyIt.next(); - data.writeString8(key); - data.writeString8(event.get(key)); - } - const DrmBuffer& value = event.getData(); - data.writeInt32(value.length); - if (value.length > 0) { - data.write(value.data, value.length); - } - remote()->transact(NOTIFY, data, &reply); return reply.readInt32(); } @@ -62,24 +49,7 @@ status_t BnDrmServiceListener::onTransact( int type = data.readInt32(); const String8& message = data.readString8(); - DrmInfoEvent event(uniqueId, type, message); - int size = data.readInt32(); - for (int index = 0; index < size; index++) { - String8 key(data.readString8()); - String8 value(data.readString8()); - event.put(key, value); - } - int valueSize = data.readInt32(); - if (valueSize > 0) { - char* valueData = new char[valueSize]; - data.read(valueData, valueSize); - DrmBuffer drmBuffer(valueData, valueSize); - event.setData(drmBuffer); - delete[] valueData; - } - - status_t status = notify(event); - + status_t status = notify(DrmInfoEvent(uniqueId, type, message)); reply->writeInt32(status); return DRM_NO_ERROR; diff --git a/include/drm/DrmInfoEvent.h b/include/drm/DrmInfoEvent.h index 23b2950..dfca228 100644 --- a/include/drm/DrmInfoEvent.h +++ b/include/drm/DrmInfoEvent.h @@ -17,8 +17,6 @@ #ifndef __DRM_INFO_EVENT_H__ #define __DRM_INFO_EVENT_H__ -#include "drm_framework_common.h" - namespace android { class String8; @@ -73,70 +71,18 @@ public: public: /** - * Constructor for DrmInfoEvent. - * Data in drmBuffer are copied to newly allocated buffer. + * Constructor for DrmInfoEvent * * @param[in] uniqueId Unique session identifier * @param[in] infoType Type of information * @param[in] message Message description - * @param[in] drmBuffer Binary information */ DrmInfoEvent(int uniqueId, int infoType, const String8 message); - DrmInfoEvent(int uniqueId, int infoType, const String8 message, const DrmBuffer& drmBuffer); /** * Destructor for DrmInfoEvent */ - ~DrmInfoEvent(); - -public: - /** - * Iterator for key - */ - class KeyIterator { - friend class DrmInfoEvent; - - private: - KeyIterator(const DrmInfoEvent* drmInfoEvent) - : mDrmInfoEvent(const_cast (drmInfoEvent)), mIndex(0) {} - - public: - KeyIterator(const KeyIterator& keyIterator); - KeyIterator& operator=(const KeyIterator& keyIterator); - virtual ~KeyIterator() {} - - public: - bool hasNext(); - const String8& next(); - - private: - DrmInfoEvent* mDrmInfoEvent; - unsigned int mIndex; - }; - - /** - * Iterator - */ - class Iterator { - friend class DrmInfoEvent; - - private: - Iterator(const DrmInfoEvent* drmInfoEvent) - : mDrmInfoEvent(const_cast (drmInfoEvent)), mIndex(0) {} - - public: - Iterator(const Iterator& iterator); - Iterator& operator=(const Iterator& iterator); - virtual ~Iterator() {} - - public: - bool hasNext(); - const String8& next(); - - private: - DrmInfoEvent* mDrmInfoEvent; - unsigned int mIndex; - }; + virtual ~DrmInfoEvent() {} public: /** @@ -160,69 +106,10 @@ public: */ const String8 getMessage() const; - /** - * Returns the number of attributes contained in this instance - * - * @return Number of attributes - */ - int getCount() const; - - /** - * Adds optional information as pair to this instance - * - * @param[in] key Key to add - * @param[in] value Value to add - * @return Returns the error code - */ - status_t put(const String8& key, String8& value); - - /** - * Retrieves the value of given key - * - * @param key Key whose value to be retrieved - * @return The value - */ - const String8 get(const String8& key) const; - - /** - * Returns KeyIterator object to walk through the keys associated with this instance - * - * @return KeyIterator object - */ - KeyIterator keyIterator() const; - - /** - * Returns Iterator object to walk through the values associated with this instance - * - * @return Iterator object - */ - Iterator iterator() const; - - /** - * Returns the Binary information associated with this instance - * - * @return Binary information - */ - const DrmBuffer& getData() const; - - /** - * Sets the Binary information associated with this instance. - * Data in drmBuffer are copied to newly allocated buffer. - * - * @param[in] drmBuffer Binary information associated with this instance - */ - void setData(const DrmBuffer& drmBuffer); - -private: - DrmInfoEvent(const DrmInfoEvent& ref); - const DrmInfoEvent& operator=(const DrmInfoEvent& ref); - private: int mUniqueId; int mInfoType; const String8 mMessage; - KeyedVector mAttributes; - DrmBuffer mDrmBuffer; }; }; -- cgit v1.1 From 767094dd98b01baf21de2ad09c27b3c98776cf73 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Fri, 23 Aug 2013 13:51:43 -0700 Subject: Add NBAIO_Sink::getTimestamp() with a real implementation in AudioStreamOutSink for dummy implementation initially in MonoPipe. Use in AudioFlinger::PlaybackThread::threadLoop_write() to keep the input to the timestamp latch up-to-date. Change-Id: I10ef277991b63bb43d55d6f3df75116ef32246cd --- include/media/nbaio/AudioStreamOutSink.h | 2 ++ include/media/nbaio/MonoPipe.h | 3 +++ include/media/nbaio/NBAIO.h | 6 ++++++ media/libnbaio/AudioStreamOutSink.cpp | 12 ++++++++++++ media/libnbaio/MonoPipe.cpp | 5 +++++ services/audioflinger/Threads.cpp | 4 +++- 6 files changed, 31 insertions(+), 1 deletion(-) diff --git a/include/media/nbaio/AudioStreamOutSink.h b/include/media/nbaio/AudioStreamOutSink.h index 5976b18..7948d40 100644 --- a/include/media/nbaio/AudioStreamOutSink.h +++ b/include/media/nbaio/AudioStreamOutSink.h @@ -52,6 +52,8 @@ public: // implementation of GNWT (if any) virtual status_t getNextWriteTimestamp(int64_t *timestamp); + virtual status_t getTimestamp(AudioTimestamp& timestamp); + // NBAIO_Sink end #if 0 // until necessary diff --git a/include/media/nbaio/MonoPipe.h b/include/media/nbaio/MonoPipe.h index 5fcfe9e..ffdcc21 100644 --- a/include/media/nbaio/MonoPipe.h +++ b/include/media/nbaio/MonoPipe.h @@ -88,6 +88,9 @@ public: // Return true if the write side of a pipe is currently shutdown. bool isShutdown(); + // Return NO_ERROR if there is a timestamp available + status_t getTimestamp(AudioTimestamp& timestamp); + private: // A pair of methods and a helper variable which allows the reader and the // writer to update and observe the values of mFront and mNextRdPTS in an diff --git a/include/media/nbaio/NBAIO.h b/include/media/nbaio/NBAIO.h index f5d6eb5..cc5b3e6 100644 --- a/include/media/nbaio/NBAIO.h +++ b/include/media/nbaio/NBAIO.h @@ -28,6 +28,7 @@ #include #include #include +#include namespace android { @@ -213,6 +214,11 @@ public: // Something unexpected happened internally. Check the logs and start debugging. virtual status_t getNextWriteTimestamp(int64_t *ts) { return INVALID_OPERATION; } + // Returns NO_ERROR if a timestamp is available. The timestamp includes the total number + // of frames presented to an external observer, together with the value of CLOCK_MONOTONIC + // as of this presentation count. + virtual status_t getTimestamp(AudioTimestamp& timestamp) { return INVALID_OPERATION; } + protected: NBAIO_Sink(NBAIO_Format format = Format_Invalid) : NBAIO_Port(format), mFramesWritten(0) { } virtual ~NBAIO_Sink() { } diff --git a/media/libnbaio/AudioStreamOutSink.cpp b/media/libnbaio/AudioStreamOutSink.cpp index 6f525e5..b2de8a2 100644 --- a/media/libnbaio/AudioStreamOutSink.cpp +++ b/media/libnbaio/AudioStreamOutSink.cpp @@ -79,4 +79,16 @@ status_t AudioStreamOutSink::getNextWriteTimestamp(int64_t *timestamp) { return mStream->get_next_write_timestamp(mStream, timestamp); } +status_t AudioStreamOutSink::getTimestamp(AudioTimestamp& timestamp) +{ + // FIXME position64 won't be needed after AudioTimestamp.mPosition is changed to uint64_t + uint64_t position64; + int ok = mStream->get_presentation_position(mStream, &position64, ×tamp.mTime); + if (ok != 0) { + return INVALID_OPERATION; + } + timestamp.mPosition = position64; + return OK; +} + } // namespace android diff --git a/media/libnbaio/MonoPipe.cpp b/media/libnbaio/MonoPipe.cpp index e8d3d9b..a74b49e 100644 --- a/media/libnbaio/MonoPipe.cpp +++ b/media/libnbaio/MonoPipe.cpp @@ -310,4 +310,9 @@ bool MonoPipe::isShutdown() return mIsShutdown; } +status_t MonoPipe::getTimestamp(AudioTimestamp& timestamp) +{ + return INVALID_OPERATION; +} + } // namespace android diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index 3921b68..fda4211 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -1820,7 +1820,7 @@ ssize_t AudioFlinger::PlaybackThread::threadLoop_write() } else { bytesWritten = framesWritten; } - status_t status = INVALID_OPERATION; // mLatchD.mTimestamp is invalid + status_t status = mNormalSink->getTimestamp(mLatchD.mTimestamp); if (status == NO_ERROR) { size_t totalFramesWritten = mNormalSink->framesWritten(); if (totalFramesWritten >= mLatchD.mTimestamp.mPosition) { @@ -1837,6 +1837,8 @@ ssize_t AudioFlinger::PlaybackThread::threadLoop_write() ALOG_ASSERT(mCallbackThread != 0); mCallbackThread->setWriteBlocked(true); } + // FIXME We should have an implementation of timestamps for direct output threads. + // They are used e.g for multichannel PCM playback over HDMI. bytesWritten = mOutput->stream->write(mOutput->stream, mMixBuffer + offset, mBytesRemaining); if (mUseAsyncWrite && -- cgit v1.1 From 0a4df03542e7d7aab716a60e206dceaf5da178f3 Mon Sep 17 00:00:00 2001 From: Eino-Ville Talvala Date: Tue, 3 Sep 2013 13:29:28 -0700 Subject: Camera2/3: Increase precapture start timeout Currently, we only wait 200 ms for the HAL to switch to the precapture state. This is insufficient at low preview frame rates (dark conditions, ~10 fps), where a 3-request-deep HAL pipeline will take 300 ms to produce a precapture trigger output. In those cases, we would prematurely skip the precapture sequence, causing bad quality pictures. Since this is a fallback timeout to prevent deadlock in case of a bad HAL or framework implementation, increase the timeout to 1 second. Has no impact on correctly-working implementations Bug: 10430191 Change-Id: I08e99f8e7f3beb8f15dcee32632cdebfe3dca8b3 --- services/camera/libcameraservice/api1/client2/CaptureSequencer.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/camera/libcameraservice/api1/client2/CaptureSequencer.h b/services/camera/libcameraservice/api1/client2/CaptureSequencer.h index 76750aa..7ad461a 100644 --- a/services/camera/libcameraservice/api1/client2/CaptureSequencer.h +++ b/services/camera/libcameraservice/api1/client2/CaptureSequencer.h @@ -100,7 +100,7 @@ class CaptureSequencer: * Internal to CaptureSequencer */ static const nsecs_t kWaitDuration = 100000000; // 100 ms - static const int kMaxTimeoutsForPrecaptureStart = 2; // 200 ms + static const int kMaxTimeoutsForPrecaptureStart = 10; // 1 sec static const int kMaxTimeoutsForPrecaptureEnd = 20; // 2 sec static const int kMaxTimeoutsForCaptureEnd = 40; // 4 sec -- cgit v1.1 From a07a1c2c91dc7ee6ded319262499f20cd01edcf7 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Fri, 23 Aug 2013 10:54:35 -0700 Subject: Instantiate SingleStateQueue as typedef AudioTimestampSingleStateQueue and use it in MonoPipe. Change-Id: Idaebb362bd9d0a81a7ed83792ab9369dc37c0e74 --- include/media/nbaio/MonoPipe.h | 7 +++++++ media/libmedia/Android.mk | 1 + media/libmedia/SingleStateQueueInstantiations.cpp | 2 ++ media/libnbaio/Android.mk | 5 ++++- media/libnbaio/MonoPipe.cpp | 5 ++++- 5 files changed, 18 insertions(+), 2 deletions(-) diff --git a/include/media/nbaio/MonoPipe.h b/include/media/nbaio/MonoPipe.h index ffdcc21..d3802fe 100644 --- a/include/media/nbaio/MonoPipe.h +++ b/include/media/nbaio/MonoPipe.h @@ -20,9 +20,12 @@ #include #include #include "NBAIO.h" +#include namespace android { +typedef SingleStateQueue AudioTimestampSingleStateQueue; + // MonoPipe is similar to Pipe except: // - supports only a single reader, called MonoPipeReader // - write() cannot overrun; instead it will return a short actual count if insufficient space @@ -130,6 +133,10 @@ private: LinearTransform mSamplesToLocalTime; bool mIsShutdown; // whether shutdown(true) was called, no barriers are needed + + AudioTimestampSingleStateQueue::Shared mTimestampShared; + AudioTimestampSingleStateQueue::Mutator mTimestampMutator; + AudioTimestampSingleStateQueue::Observer mTimestampObserver; }; } // namespace android diff --git a/media/libmedia/Android.mk b/media/libmedia/Android.mk index 96755bb..56e7787 100644 --- a/media/libmedia/Android.mk +++ b/media/libmedia/Android.mk @@ -62,6 +62,7 @@ LOCAL_SRC_FILES += ../libnbaio/roundup.c LOCAL_CFLAGS += -DANDROID_SMP=$(if $(findstring true,$(TARGET_CPU_SMP)),1,0) LOCAL_SRC_FILES += SingleStateQueue.cpp LOCAL_CFLAGS += -DSINGLE_STATE_QUEUE_INSTANTIATIONS='"SingleStateQueueInstantiations.cpp"' +# Consider a separate a library for SingleStateQueueInstantiations. LOCAL_SHARED_LIBRARIES := \ libui liblog libcutils libutils libbinder libsonivox libicuuc libexpat \ diff --git a/media/libmedia/SingleStateQueueInstantiations.cpp b/media/libmedia/SingleStateQueueInstantiations.cpp index 2afebe9..0265c8c 100644 --- a/media/libmedia/SingleStateQueueInstantiations.cpp +++ b/media/libmedia/SingleStateQueueInstantiations.cpp @@ -16,11 +16,13 @@ #include #include +#include // FIXME hack for gcc namespace android { template class SingleStateQueue; // typedef StaticAudioTrackSingleStateQueue +template class SingleStateQueue; // typedef AudioTimestampSingleStateQueue } diff --git a/media/libnbaio/Android.mk b/media/libnbaio/Android.mk index 5d00d15..69c75b8 100644 --- a/media/libnbaio/Android.mk +++ b/media/libnbaio/Android.mk @@ -31,6 +31,9 @@ LOCAL_SHARED_LIBRARIES := \ libcommon_time_client \ libcutils \ libutils \ - liblog + liblog \ + libmedia +# This dependency on libmedia is for SingleStateQueueInstantiations. +# Consider a separate a library for SingleStateQueueInstantiations. include $(BUILD_SHARED_LIBRARY) diff --git a/media/libnbaio/MonoPipe.cpp b/media/libnbaio/MonoPipe.cpp index a74b49e..b55be83 100644 --- a/media/libnbaio/MonoPipe.cpp +++ b/media/libnbaio/MonoPipe.cpp @@ -42,7 +42,10 @@ MonoPipe::MonoPipe(size_t reqFrames, NBAIO_Format format, bool writeCanBlock) : // mWriteTs mSetpoint((reqFrames * 11) / 16), mWriteCanBlock(writeCanBlock), - mIsShutdown(false) + mIsShutdown(false), + // mTimestampShared + mTimestampMutator(&mTimestampShared), + mTimestampObserver(&mTimestampShared) { CCHelper tmpHelper; status_t res; -- cgit v1.1 From 894d6be4f9b4721c77a01919ecf03b27cec90cc9 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Mon, 26 Aug 2013 10:29:28 -0700 Subject: Add NBAIO_Source::onTimestamp() with dummy default implementation, and implement in MonoPipeReader. onTimestamp is meant to be called by the corresponding sink when it has a new timestamp available. Change-Id: I8a90d24d1061e4a592ce5bd8ee1c9fce6bdd8a84 --- include/media/nbaio/MonoPipeReader.h | 2 ++ include/media/nbaio/NBAIO.h | 4 ++++ media/libnbaio/MonoPipeReader.cpp | 5 +++++ 3 files changed, 11 insertions(+) diff --git a/include/media/nbaio/MonoPipeReader.h b/include/media/nbaio/MonoPipeReader.h index 0e1c992..78fe867 100644 --- a/include/media/nbaio/MonoPipeReader.h +++ b/include/media/nbaio/MonoPipeReader.h @@ -49,6 +49,8 @@ public: virtual ssize_t read(void *buffer, size_t count, int64_t readPTS); + virtual void onTimestamp(const AudioTimestamp& timestamp); + // NBAIO_Source end #if 0 // until necessary diff --git a/include/media/nbaio/NBAIO.h b/include/media/nbaio/NBAIO.h index cc5b3e6..1da0c73 100644 --- a/include/media/nbaio/NBAIO.h +++ b/include/media/nbaio/NBAIO.h @@ -306,6 +306,10 @@ public: virtual ssize_t readVia(readVia_t via, size_t total, void *user, int64_t readPTS, size_t block = 0); + // Invoked asynchronously by corresponding sink when a new timestamp is available. + // Default implementation ignores the timestamp. + virtual void onTimestamp(const AudioTimestamp& timestamp) { } + protected: NBAIO_Source(NBAIO_Format format = Format_Invalid) : NBAIO_Port(format), mFramesRead(0) { } virtual ~NBAIO_Source() { } diff --git a/media/libnbaio/MonoPipeReader.cpp b/media/libnbaio/MonoPipeReader.cpp index 394f6ac..851341a 100644 --- a/media/libnbaio/MonoPipeReader.cpp +++ b/media/libnbaio/MonoPipeReader.cpp @@ -86,4 +86,9 @@ ssize_t MonoPipeReader::read(void *buffer, size_t count, int64_t readPTS) return red; } +void MonoPipeReader::onTimestamp(const AudioTimestamp& timestamp) +{ + mPipe->mTimestampMutator.push(timestamp); +} + } // namespace android -- cgit v1.1 From 6466c9e6e6278c740aed77f695f679be9f5db478 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Fri, 23 Aug 2013 10:54:07 -0700 Subject: Add ExtendedAudioBufferProvider::framesReleased and onTimestamp and implement them in SourceAudioBufferProvider using the associated NBAIO_Source, and in Track using the associated AudioTrackServerProxy. Change-Id: I60dc4adba63fc1dc452ff16caf347e4a7c8242c2 --- include/media/ExtendedAudioBufferProvider.h | 8 ++++++++ include/media/nbaio/SourceAudioBufferProvider.h | 3 +++ media/libnbaio/SourceAudioBufferProvider.cpp | 13 ++++++++++++- services/audioflinger/PlaybackTracks.h | 2 ++ services/audioflinger/Tracks.cpp | 9 +++++++++ 5 files changed, 34 insertions(+), 1 deletion(-) diff --git a/include/media/ExtendedAudioBufferProvider.h b/include/media/ExtendedAudioBufferProvider.h index 00c4444..2539ed3 100644 --- a/include/media/ExtendedAudioBufferProvider.h +++ b/include/media/ExtendedAudioBufferProvider.h @@ -18,12 +18,20 @@ #define ANDROID_EXTENDED_AUDIO_BUFFER_PROVIDER_H #include +#include namespace android { class ExtendedAudioBufferProvider : public AudioBufferProvider { public: virtual size_t framesReady() const = 0; // see description at AudioFlinger.h + + // Return the total number of frames that have been obtained and released + virtual size_t framesReleased() const { return 0; } + + // Invoked by buffer consumer when a new timestamp is available. + // Default implementation ignores the timestamp. + virtual void onTimestamp(const AudioTimestamp& timestamp) { } }; } // namespace android diff --git a/include/media/nbaio/SourceAudioBufferProvider.h b/include/media/nbaio/SourceAudioBufferProvider.h index c08331b..cdfb6fe 100644 --- a/include/media/nbaio/SourceAudioBufferProvider.h +++ b/include/media/nbaio/SourceAudioBufferProvider.h @@ -36,6 +36,8 @@ public: // ExtendedAudioBufferProvider interface virtual size_t framesReady() const; + virtual size_t framesReleased() const; + virtual void onTimestamp(const AudioTimestamp& timestamp); private: const sp mSource; // the wrapped source @@ -45,6 +47,7 @@ private: size_t mOffset; // frame offset within mAllocated of valid data size_t mRemaining; // frame count within mAllocated of valid data size_t mGetCount; // buffer.frameCount of the most recent getNextBuffer + uint32_t mFramesReleased; // counter of the total number of frames released }; } // namespace android diff --git a/media/libnbaio/SourceAudioBufferProvider.cpp b/media/libnbaio/SourceAudioBufferProvider.cpp index d11a86c..062fa0f 100644 --- a/media/libnbaio/SourceAudioBufferProvider.cpp +++ b/media/libnbaio/SourceAudioBufferProvider.cpp @@ -25,7 +25,7 @@ namespace android { SourceAudioBufferProvider::SourceAudioBufferProvider(const sp& source) : mSource(source), // mFrameBitShiftFormat below - mAllocated(NULL), mSize(0), mOffset(0), mRemaining(0), mGetCount(0) + mAllocated(NULL), mSize(0), mOffset(0), mRemaining(0), mGetCount(0), mFramesReleased(0) { ALOG_ASSERT(source != 0); @@ -90,6 +90,7 @@ void SourceAudioBufferProvider::releaseBuffer(Buffer *buffer) (mOffset + mRemaining <= mSize)); mOffset += buffer->frameCount; mRemaining -= buffer->frameCount; + mFramesReleased += buffer->frameCount; buffer->raw = NULL; buffer->frameCount = 0; mGetCount = 0; @@ -101,4 +102,14 @@ size_t SourceAudioBufferProvider::framesReady() const return avail < 0 ? 0 : (size_t) avail; } +size_t SourceAudioBufferProvider::framesReleased() const +{ + return mFramesReleased; +} + +void SourceAudioBufferProvider::onTimestamp(const AudioTimestamp& timestamp) +{ + mSource->onTimestamp(timestamp); +} + } // namespace android diff --git a/services/audioflinger/PlaybackTracks.h b/services/audioflinger/PlaybackTracks.h index d34833f..0308b99 100644 --- a/services/audioflinger/PlaybackTracks.h +++ b/services/audioflinger/PlaybackTracks.h @@ -81,7 +81,9 @@ protected: int64_t pts = kInvalidPTS); // releaseBuffer() not overridden + // ExtendedAudioBufferProvider interface virtual size_t framesReady() const; + virtual size_t framesReleased() const; bool isPausing() const { return mState == PAUSING; } bool isPaused() const { return mState == PAUSED; } diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp index 9622709..db67be6 100644 --- a/services/audioflinger/Tracks.cpp +++ b/services/audioflinger/Tracks.cpp @@ -498,6 +498,10 @@ status_t AudioFlinger::PlaybackThread::Track::getNextBuffer( return status; } +// releaseBuffer() is not overridden + +// ExtendedAudioBufferProvider interface + // Note that framesReady() takes a mutex on the control block using tryLock(). // This could result in priority inversion if framesReady() is called by the normal mixer, // as the normal mixer thread runs at lower @@ -510,6 +514,11 @@ size_t AudioFlinger::PlaybackThread::Track::framesReady() const { return mAudioTrackServerProxy->framesReady(); } +size_t AudioFlinger::PlaybackThread::Track::framesReleased() const +{ + return mAudioTrackServerProxy->framesReleased(); +} + // Don't call for fast tracks; the framesReady() could result in priority inversion bool AudioFlinger::PlaybackThread::Track::isReady() const { if (mFillingUpStatus != FS_FILLING || isStopped() || isPausing()) { -- cgit v1.1 From 732845c7e8d294bb1aaa4cd9687da62b51f1f6e6 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Fri, 23 Aug 2013 09:26:31 -0700 Subject: FastMixer computes presentation timestamps for fast tracks and forwards them to each track's ExtendedAudioBufferProvider::onTimestamp(). Change-Id: I06fb9586bb7e20d5c0289abdc8cac6cd3fd2f6e8 --- services/audioflinger/FastMixer.cpp | 47 ++++++++++++++++++++++++++++++++++++- 1 file changed, 46 insertions(+), 1 deletion(-) diff --git a/services/audioflinger/FastMixer.cpp b/services/audioflinger/FastMixer.cpp index ad9f4f2..f27ea17 100644 --- a/services/audioflinger/FastMixer.cpp +++ b/services/audioflinger/FastMixer.cpp @@ -96,6 +96,12 @@ bool FastMixer::threadLoop() uint32_t warmupCycles = 0; // counter of number of loop cycles required to warmup NBAIO_Sink* teeSink = NULL; // if non-NULL, then duplicate write() to this non-blocking sink NBLog::Writer dummyLogWriter, *logWriter = &dummyLogWriter; + uint32_t totalNativeFramesWritten = 0; // copied to dumpState->mFramesWritten + + // next 2 fields are valid only when timestampStatus == NO_ERROR + AudioTimestamp timestamp; + uint32_t nativeFramesWrittenButNotPresented = 0; // the = 0 is to silence the compiler + status_t timestampStatus = INVALID_OPERATION; for (;;) { @@ -192,6 +198,7 @@ bool FastMixer::threadLoop() full = false; #endif oldTsValid = !clock_gettime(CLOCK_MONOTONIC, &oldTs); + timestampStatus = INVALID_OPERATION; } else { sleepNs = FAST_HOT_IDLE_NS; } @@ -382,6 +389,31 @@ bool FastMixer::threadLoop() i = __builtin_ctz(currentTrackMask); currentTrackMask &= ~(1 << i); const FastTrack* fastTrack = ¤t->mFastTracks[i]; + + // Refresh the per-track timestamp + if (timestampStatus == NO_ERROR) { + uint32_t trackFramesWrittenButNotPresented; + uint32_t trackSampleRate = fastTrack->mSampleRate; + // There is currently no sample rate conversion for fast tracks currently + if (trackSampleRate != 0 && trackSampleRate != sampleRate) { + trackFramesWrittenButNotPresented = + ((int64_t) nativeFramesWrittenButNotPresented * trackSampleRate) / + sampleRate; + } else { + trackFramesWrittenButNotPresented = nativeFramesWrittenButNotPresented; + } + uint32_t trackFramesWritten = fastTrack->mBufferProvider->framesReleased(); + // Can't provide an AudioTimestamp before first frame presented, + // or during the brief 32-bit wraparound window + if (trackFramesWritten >= trackFramesWrittenButNotPresented) { + AudioTimestamp perTrackTimestamp; + perTrackTimestamp.mPosition = + trackFramesWritten - trackFramesWrittenButNotPresented; + perTrackTimestamp.mTime = timestamp.mTime; + fastTrack->mBufferProvider->onTimestamp(perTrackTimestamp); + } + } + int name = fastTrackNames[i]; ALOG_ASSERT(name >= 0); if (fastTrack->mVolumeProvider != NULL) { @@ -455,7 +487,8 @@ bool FastMixer::threadLoop() dumpState->mWriteSequence++; if (framesWritten >= 0) { ALOG_ASSERT((size_t) framesWritten <= frameCount); - dumpState->mFramesWritten += framesWritten; + totalNativeFramesWritten += framesWritten; + dumpState->mFramesWritten = totalNativeFramesWritten; //if ((size_t) framesWritten == frameCount) { // didFullWrite = true; //} @@ -464,6 +497,18 @@ bool FastMixer::threadLoop() } attemptedWrite = true; // FIXME count # of writes blocked excessively, CPU usage, etc. for dump + + timestampStatus = outputSink->getTimestamp(timestamp); + if (timestampStatus == NO_ERROR) { + uint32_t totalNativeFramesPresented = timestamp.mPosition; + if (totalNativeFramesPresented <= totalNativeFramesWritten) { + nativeFramesWrittenButNotPresented = + totalNativeFramesWritten - totalNativeFramesPresented; + } else { + // HAL reported that more frames were presented than were written + timestampStatus = INVALID_OPERATION; + } + } } // To be exactly periodic, compute the next sleep time based on current time. -- cgit v1.1 From 4d0815d694e5a2edb3ce48427de50f55d0f84c0b Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Thu, 29 Aug 2013 14:40:55 -0700 Subject: Implement MonoPipe::getTimestamp using SingleStateQueue observer Change-Id: I7b1928b087f1e676c7b291df6cefa7707301662c --- media/libnbaio/MonoPipe.cpp | 3 +++ 1 file changed, 3 insertions(+) diff --git a/media/libnbaio/MonoPipe.cpp b/media/libnbaio/MonoPipe.cpp index b55be83..de0ad28 100644 --- a/media/libnbaio/MonoPipe.cpp +++ b/media/libnbaio/MonoPipe.cpp @@ -315,6 +315,9 @@ bool MonoPipe::isShutdown() status_t MonoPipe::getTimestamp(AudioTimestamp& timestamp) { + if (mTimestampObserver.poll(timestamp)) { + return OK; + } return INVALID_OPERATION; } -- cgit v1.1 From fe346c707f59d763ded93bc3d27b51f0c0408258 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Fri, 30 Aug 2013 13:28:22 -0700 Subject: Fix miscellanous AudioTrack::getTimestamp() bugs Check that get_presentation_position is non-NULL before calling. AudioTrack::getTimestamp not implemented for fast tracks. Fix typo in Track::getTimestamp(). Fix bugs in AudioTrack::getTimestamp after stop: - getTimestamp while stopped is not allowed. - stop, start, getTimestamp now returns the correct value. Change-Id: Ie8d9dc1f28d8927634e04175a68b147ffc2ea8eb --- media/libmedia/AudioTrack.cpp | 13 ++++++++++++- media/libnbaio/AudioStreamOutSink.cpp | 3 +++ services/audioflinger/Tracks.cpp | 6 +++++- 3 files changed, 20 insertions(+), 2 deletions(-) diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp index 176197c..744faee 100644 --- a/media/libmedia/AudioTrack.cpp +++ b/media/libmedia/AudioTrack.cpp @@ -1714,7 +1714,18 @@ status_t AudioTrack::setParameters(const String8& keyValuePairs) status_t AudioTrack::getTimestamp(AudioTimestamp& timestamp) { AutoMutex lock(mLock); - return mAudioTrack->getTimestamp(timestamp); + // FIXME not implemented for fast tracks; should use proxy and SSQ + if (mFlags & AUDIO_OUTPUT_FLAG_FAST) { + return INVALID_OPERATION; + } + if (mState != STATE_ACTIVE && mState != STATE_PAUSED) { + return INVALID_OPERATION; + } + status_t status = mAudioTrack->getTimestamp(timestamp); + if (status == NO_ERROR) { + timestamp.mPosition += mProxy->getEpoch(); + } + return status; } String8 AudioTrack::getParameters(const String8& keys) diff --git a/media/libnbaio/AudioStreamOutSink.cpp b/media/libnbaio/AudioStreamOutSink.cpp index b2de8a2..e4341d7 100644 --- a/media/libnbaio/AudioStreamOutSink.cpp +++ b/media/libnbaio/AudioStreamOutSink.cpp @@ -81,6 +81,9 @@ status_t AudioStreamOutSink::getNextWriteTimestamp(int64_t *timestamp) { status_t AudioStreamOutSink::getTimestamp(AudioTimestamp& timestamp) { + if (mStream->get_presentation_position == NULL) { + return INVALID_OPERATION; + } // FIXME position64 won't be needed after AudioTimestamp.mPosition is changed to uint64_t uint64_t position64; int ok = mStream->get_presentation_position(mStream, &position64, ×tamp.mTime); diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp index db67be6..2042050 100644 --- a/services/audioflinger/Tracks.cpp +++ b/services/audioflinger/Tracks.cpp @@ -727,9 +727,13 @@ status_t AudioFlinger::PlaybackThread::Track::setParameters(const String8& keyVa status_t AudioFlinger::PlaybackThread::Track::getTimestamp(AudioTimestamp& timestamp) { + // Client should implement this using SSQ; the unpresented frame count in latch is irrelevant + if (isFastTrack()) { + return INVALID_OPERATION; + } sp thread = mThread.promote(); if (thread == 0) { - return false; + return INVALID_OPERATION; } Mutex::Autolock _l(thread->mLock); PlaybackThread *playbackThread = (PlaybackThread *)thread.get(); -- cgit v1.1 From 491211b87dd38357d37ece687cf8795bff8996a5 Mon Sep 17 00:00:00 2001 From: Marco Nelissen Date: Tue, 3 Sep 2013 15:29:33 -0700 Subject: Work around decoder slowness b/10528409 Change-Id: Ifcaf0488d63e87676b1e9382437943138deb76a6 --- media/libstagefright/MediaCodec.cpp | 2 ++ 1 file changed, 2 insertions(+) diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp index f412dc8..66a0b4e 100644 --- a/media/libstagefright/MediaCodec.cpp +++ b/media/libstagefright/MediaCodec.cpp @@ -106,6 +106,8 @@ status_t MediaCodec::init(const char *name, bool nameIsType, bool encoder) { needDedicatedLooper = true; } else if (!nameIsType && !strncmp(name, "OMX.TI.DUCATI1.VIDEO.", 21)) { needDedicatedLooper = true; + } else if (!nameIsType && !strncmp(name, "OMX.qcom.video.decoder.avc.secure", 33)) { + needDedicatedLooper = true; } if (needDedicatedLooper) { -- cgit v1.1 From ec3acca4a75fc4adc076b56751124f507b419622 Mon Sep 17 00:00:00 2001 From: Chong Zhang Date: Tue, 3 Sep 2013 14:35:37 -0700 Subject: wifi-display: do not use HDCP's encryptNative method if its unsupported Bug: 10609422 Change-Id: I005f1d04a4191b1503b5f3e895a98b8d6560c402 --- include/media/IHDCP.h | 11 +++++++++++ media/libmedia/IHDCP.cpp | 16 ++++++++++++++++ media/libmediaplayerservice/HDCP.cpp | 14 ++++++++++++++ media/libmediaplayerservice/HDCP.h | 1 + .../wifi-display/source/PlaybackSession.cpp | 3 ++- 5 files changed, 44 insertions(+), 1 deletion(-) diff --git a/include/media/IHDCP.h b/include/media/IHDCP.h index 54fefa3..352561e 100644 --- a/include/media/IHDCP.h +++ b/include/media/IHDCP.h @@ -46,6 +46,17 @@ struct IHDCP : public IInterface { // Request to shutdown the active HDCP session. virtual status_t shutdownAsync() = 0; + // Returns the capability bitmask of this HDCP session. + // Possible return values (please refer to HDCAPAPI.h): + // HDCP_CAPS_ENCRYPT: mandatory, meaning the HDCP module can encrypt + // from an input byte-array buffer to an output byte-array buffer + // HDCP_CAPS_ENCRYPT_NATIVE: the HDCP module supports encryption from + // a native buffer to an output byte-array buffer. The format of the + // input native buffer is specific to vendor's encoder implementation. + // It is the same format as that used by the encoder when + // "storeMetaDataInBuffers" extension is enabled on its output port. + virtual uint32_t getCaps() = 0; + // ENCRYPTION only: // Encrypt data according to the HDCP spec. "size" bytes of data are // available at "inData" (virtual address), "size" may not be a multiple diff --git a/media/libmedia/IHDCP.cpp b/media/libmedia/IHDCP.cpp index a46ff91..1cf987a 100644 --- a/media/libmedia/IHDCP.cpp +++ b/media/libmedia/IHDCP.cpp @@ -30,6 +30,7 @@ enum { HDCP_SET_OBSERVER, HDCP_INIT_ASYNC, HDCP_SHUTDOWN_ASYNC, + HDCP_GET_CAPS, HDCP_ENCRYPT, HDCP_ENCRYPT_NATIVE, HDCP_DECRYPT, @@ -85,6 +86,13 @@ struct BpHDCP : public BpInterface { return reply.readInt32(); } + virtual uint32_t getCaps() { + Parcel data, reply; + data.writeInterfaceToken(IHDCP::getInterfaceDescriptor()); + remote()->transact(HDCP_GET_CAPS, data, &reply); + return reply.readInt32(); + } + virtual status_t encrypt( const void *inData, size_t size, uint32_t streamCTR, uint64_t *outInputCTR, void *outData) { @@ -222,6 +230,14 @@ status_t BnHDCP::onTransact( return OK; } + case HDCP_GET_CAPS: + { + CHECK_INTERFACE(IHDCP, data, reply); + + reply->writeInt32(getCaps()); + return OK; + } + case HDCP_ENCRYPT: { size_t size = data.readInt32(); diff --git a/media/libmediaplayerservice/HDCP.cpp b/media/libmediaplayerservice/HDCP.cpp index 8a3188c..c2ac1a3 100644 --- a/media/libmediaplayerservice/HDCP.cpp +++ b/media/libmediaplayerservice/HDCP.cpp @@ -100,6 +100,20 @@ status_t HDCP::shutdownAsync() { return mHDCPModule->shutdownAsync(); } +uint32_t HDCP::getCaps() { + Mutex::Autolock autoLock(mLock); + + if (mHDCPModule == NULL) { + return NO_INIT; + } + + // TO-DO: + // Only support HDCP_CAPS_ENCRYPT (byte-array to byte-array) for now. + // use mHDCPModule->getCaps() when the HDCP libraries get updated. + //return mHDCPModule->getCaps(); + return HDCPModule::HDCP_CAPS_ENCRYPT; +} + status_t HDCP::encrypt( const void *inData, size_t size, uint32_t streamCTR, uint64_t *outInputCTR, void *outData) { diff --git a/media/libmediaplayerservice/HDCP.h b/media/libmediaplayerservice/HDCP.h index c60c2e0..26ddc86 100644 --- a/media/libmediaplayerservice/HDCP.h +++ b/media/libmediaplayerservice/HDCP.h @@ -30,6 +30,7 @@ struct HDCP : public BnHDCP { virtual status_t setObserver(const sp &observer); virtual status_t initAsync(const char *host, unsigned port); virtual status_t shutdownAsync(); + virtual uint32_t getCaps(); virtual status_t encrypt( const void *inData, size_t size, uint32_t streamCTR, diff --git a/media/libstagefright/wifi-display/source/PlaybackSession.cpp b/media/libstagefright/wifi-display/source/PlaybackSession.cpp index 0aa4ee5..286ea13 100644 --- a/media/libstagefright/wifi-display/source/PlaybackSession.cpp +++ b/media/libstagefright/wifi-display/source/PlaybackSession.cpp @@ -939,7 +939,8 @@ status_t WifiDisplaySource::PlaybackSession::addSource( if (isVideo) { format->setString("mime", MEDIA_MIMETYPE_VIDEO_AVC); format->setInt32("store-metadata-in-buffers", true); - format->setInt32("store-metadata-in-buffers-output", (mHDCP != NULL)); + format->setInt32("store-metadata-in-buffers-output", (mHDCP != NULL) + && (mHDCP->getCaps() & HDCPModule::HDCP_CAPS_ENCRYPT_NATIVE)); format->setInt32( "color-format", OMX_COLOR_FormatAndroidOpaque); format->setInt32("profile-idc", profileIdc); -- cgit v1.1 From 17963d16d66526288ed66c91a6744daf3acfd731 Mon Sep 17 00:00:00 2001 From: Ruben Brunk Date: Mon, 19 Aug 2013 15:21:19 -0700 Subject: Fix CameraService error flag when running HAL1 with API2. Bug: 10620944 Change-Id: I44c0d09a3d313ae8f724fbe61fa5f36cf5b38b6d --- services/camera/libcameraservice/CameraService.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp index bf9bc71..fe16314 100644 --- a/services/camera/libcameraservice/CameraService.cpp +++ b/services/camera/libcameraservice/CameraService.cpp @@ -492,7 +492,7 @@ status_t CameraService::connectPro( case CAMERA_DEVICE_API_VERSION_1_0: ALOGE("Camera id %d uses HALv1, doesn't support ProCamera", cameraId); - return -ENOTSUP; + return -EOPNOTSUPP; break; case CAMERA_DEVICE_API_VERSION_2_0: case CAMERA_DEVICE_API_VERSION_2_1: @@ -570,7 +570,7 @@ status_t CameraService::connectDevice( switch(deviceVersion) { case CAMERA_DEVICE_API_VERSION_1_0: ALOGW("Camera using old HAL version: %d", deviceVersion); - return -ENOTSUP; + return -EOPNOTSUPP; // TODO: don't allow 2.0 Only allow 2.1 and higher case CAMERA_DEVICE_API_VERSION_2_0: case CAMERA_DEVICE_API_VERSION_2_1: -- cgit v1.1 From 6f9439efd2a6004b588605f6a9d4af20c98e8e80 Mon Sep 17 00:00:00 2001 From: Marco Nelissen Date: Wed, 4 Sep 2013 15:00:07 -0700 Subject: Better workaround for slow decoders. This is more in the spirit of the original code. Now it checks whether a codec instantiated by name is a video codec, and enables the extra looper if so. b/10528409 Change-Id: Ia253c04c1283d4ecf66f213ef4bf523279ad7cca --- media/libstagefright/MediaCodec.cpp | 23 +++++++++++++++++++---- 1 file changed, 19 insertions(+), 4 deletions(-) diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp index 66a0b4e..e0686be 100644 --- a/media/libstagefright/MediaCodec.cpp +++ b/media/libstagefright/MediaCodec.cpp @@ -31,6 +31,7 @@ #include #include #include +#include #include #include #include @@ -104,10 +105,24 @@ status_t MediaCodec::init(const char *name, bool nameIsType, bool encoder) { bool needDedicatedLooper = false; if (nameIsType && !strncasecmp(name, "video/", 6)) { needDedicatedLooper = true; - } else if (!nameIsType && !strncmp(name, "OMX.TI.DUCATI1.VIDEO.", 21)) { - needDedicatedLooper = true; - } else if (!nameIsType && !strncmp(name, "OMX.qcom.video.decoder.avc.secure", 33)) { - needDedicatedLooper = true; + } else { + AString tmp = name; + if (tmp.endsWith(".secure")) { + tmp.erase(tmp.size() - 7, 7); + } + const MediaCodecList *mcl = MediaCodecList::getInstance(); + ssize_t codecIdx = mcl->findCodecByName(tmp.c_str()); + if (codecIdx >= 0) { + Vector types; + if (mcl->getSupportedTypes(codecIdx, &types) == OK) { + for (int i = 0; i < types.size(); i++) { + if (types[i].startsWith("video/")) { + needDedicatedLooper = true; + break; + } + } + } + } } if (needDedicatedLooper) { -- cgit v1.1 From f74ccdb61627421bccfdbc479ffdc06ced717db4 Mon Sep 17 00:00:00 2001 From: Andy McFadden Date: Wed, 4 Sep 2013 15:54:07 -0700 Subject: Start 'am' with exec() We were using system() to run 'am broadcast'. Switch to fork()+exec(). Bug 10606037 Change-Id: Ifae1a2b4f1914f015d09f2cac8cd0b1896d87a84 --- cmds/screenrecord/screenrecord.cpp | 70 ++++++++++++++++++++++++++++++-------- 1 file changed, 55 insertions(+), 15 deletions(-) diff --git a/cmds/screenrecord/screenrecord.cpp b/cmds/screenrecord/screenrecord.cpp index d027ba9..94c626a 100644 --- a/cmds/screenrecord/screenrecord.cpp +++ b/cmds/screenrecord/screenrecord.cpp @@ -37,11 +37,13 @@ #include #include +#include #include #include #include #include #include +#include using namespace android; @@ -498,23 +500,61 @@ static status_t recordScreen(const char* fileName) { /* * Sends a broadcast to the media scanner to tell it about the new video. + * + * This is optional, but nice to have. */ static status_t notifyMediaScanner(const char* fileName) { - String8 command("am broadcast -a android.intent.action.MEDIA_SCANNER_SCAN_FILE -d file://"); - command.append(fileName); - if (gVerbose) { - printf("Shell: %s\n", command.string()); - } - - // TODO: for non-verbose mode we should suppress stdout - int status = system(command.string()); - if (status < 0) { - fprintf(stderr, "Unable to fork shell for media scanner broadcast\n"); - return UNKNOWN_ERROR; - } else if (status != 0) { - fprintf(stderr, "am command failed (status=%d): '%s'\n", - status, command.string()); - return UNKNOWN_ERROR; + pid_t pid = fork(); + if (pid < 0) { + int err = errno; + ALOGW("fork() failed: %s", strerror(err)); + return -err; + } else if (pid > 0) { + // parent; wait for the child, mostly to make the verbose-mode output + // look right, but also to check for and log failures + int status; + pid_t actualPid = TEMP_FAILURE_RETRY(waitpid(pid, &status, 0)); + if (actualPid != pid) { + ALOGW("waitpid() returned %d (errno=%d)", actualPid, errno); + } else if (status != 0) { + ALOGW("'am broadcast' exited with status=%d", status); + } else { + ALOGV("'am broadcast' exited successfully"); + } + } else { + const char* kCommand = "/system/bin/am"; + + // child; we're single-threaded, so okay to alloc + String8 fileUrl("file://"); + fileUrl.append(fileName); + const char* const argv[] = { + kCommand, + "broadcast", + "-a", + "android.intent.action.MEDIA_SCANNER_SCAN_FILE", + "-d", + fileUrl.string(), + NULL + }; + if (gVerbose) { + printf("Executing:"); + for (int i = 0; argv[i] != NULL; i++) { + printf(" %s", argv[i]); + } + putchar('\n'); + } else { + // non-verbose, suppress 'am' output + ALOGV("closing stdout/stderr in child"); + int fd = open("/dev/null", O_WRONLY); + if (fd >= 0) { + dup2(fd, STDOUT_FILENO); + dup2(fd, STDERR_FILENO); + close(fd); + } + } + execv(kCommand, const_cast(argv)); + ALOGE("execv(%s) failed: %s\n", kCommand, strerror(errno)); + exit(1); } return NO_ERROR; } -- cgit v1.1 From a27c4aa222c8dd1712617dad954c5f3aa68d5427 Mon Sep 17 00:00:00 2001 From: Ruben Brunk Date: Wed, 4 Sep 2013 18:18:26 -0700 Subject: camera: Fix race condition between setParameters() and stopPreview(). Bug: 10414772 Change-Id: I928cb3935afb101da8a7931f88c27dccdb03b4a9 --- services/camera/libcameraservice/api1/Camera2Client.cpp | 1 + 1 file changed, 1 insertion(+) diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp index 3d9fe01..0a18501 100644 --- a/services/camera/libcameraservice/api1/Camera2Client.cpp +++ b/services/camera/libcameraservice/api1/Camera2Client.cpp @@ -854,6 +854,7 @@ void Camera2Client::stopPreviewL() { // no break case Parameters::RECORD: case Parameters::PREVIEW: + syncWithDevice(); res = stopStream(); if (res != OK) { ALOGE("%s: Camera %d: Can't stop streaming: %s (%d)", -- cgit v1.1 From 284c17e73bbff51cb5b1adcee98386d47733757a Mon Sep 17 00:00:00 2001 From: jpadmana Date: Tue, 4 Jun 2013 16:08:29 +0530 Subject: Effects Factory changes for effects offload audio_effects.conf - commented changes to illustrate the addition of Proxy and sub effects to the conf file Added an effectFactoryApi - EffectGetSubEffects for querying the sub effect descriptors from the factory. This api is used by the Proxy to get the sub effects Added functions and data structures in factory code for loading the sub effects gSubEffectList - has the Proxies and their corresponding sub effects - addSubEffect() - reads a sub effect node and adds to the gSubEffectList - findSubEffect() - searches through the gSubEffectList to find a SubEffect Bug: 8174034. Change-Id: I25b0c62b2ad523a52337128b51469e628209ea3e Signed-off-by: jpadmana --- include/media/EffectsFactoryApi.h | 24 ++++ media/libeffects/data/audio_effects.conf | 39 ++++++ media/libeffects/factory/EffectsFactory.c | 218 +++++++++++++++++++++++++++++- media/libeffects/factory/EffectsFactory.h | 19 +++ 4 files changed, 298 insertions(+), 2 deletions(-) diff --git a/include/media/EffectsFactoryApi.h b/include/media/EffectsFactoryApi.h index b1ed7b0..b1143b9 100644 --- a/include/media/EffectsFactoryApi.h +++ b/include/media/EffectsFactoryApi.h @@ -171,6 +171,30 @@ int EffectGetDescriptor(const effect_uuid_t *pEffectUuid, effect_descriptor_t *p //////////////////////////////////////////////////////////////////////////////// int EffectIsNullUuid(const effect_uuid_t *pEffectUuid); +//////////////////////////////////////////////////////////////////////////////// +// +// Function: EffectGetSubEffects +// +// Description: Returns the descriptors of the sub effects of the effect +// whose uuid is pointed to by first argument. +// +// Input: +// pEffectUuid: pointer to the effect uuid. +// size: size of the buffer pointed by pDescriptor. +// +// Input/Output: +// pDescriptor: address where to return the sub effect descriptors. +// +// Output: +// returned value: 0 successful operation. +// -ENODEV factory failed to initialize +// -EINVAL invalid pEffectUuid or pDescriptor +// -ENOENT no effect with this uuid found +// *pDescriptor: updated with the sub effect descriptors. +// +//////////////////////////////////////////////////////////////////////////////// +int EffectGetSubEffects(const effect_uuid_t *pEffectUuid, effect_descriptor_t *pDescriptors, size_t size); + #if __cplusplus } // extern "C" #endif diff --git a/media/libeffects/data/audio_effects.conf b/media/libeffects/data/audio_effects.conf index 93f27cb..aa48e4e 100644 --- a/media/libeffects/data/audio_effects.conf +++ b/media/libeffects/data/audio_effects.conf @@ -6,6 +6,23 @@ # } # } libraries { +# This is a proxy library that will be an abstraction for +# the HW and SW effects + + #proxy { + #path /system/lib/soundfx/libProxy.so + #} + +# This is the SW implementation library of the effect + #libSW { + #path /system/lib/soundfx/libswwrapper.so + #} + +# This is the HW implementation library for the effect + #libHW { + #path /system/lib/soundfx/libhwwrapper.so + #} + bundle { path /system/lib/soundfx/libbundlewrapper.so } @@ -43,6 +60,28 @@ libraries { # } effects { + +# additions for the proxy implementation +# Proxy implementation + #effectname { + #library proxy + #uuid xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + + # SW implemetation of the effect. Added as a node under the proxy to + # indicate this as a sub effect. + #libsw { + #library libSW + #uuid yyyyyyyy-yyyy-yyyy-yyyy-yyyyyyyyyyyy + #} End of SW effect + + # HW implementation of the effect. Added as a node under the proxy to + # indicate this as a sub effect. + #libhw { + #library libHW + #uuid zzzzzzzz-zzzz-zzzz-zzzz-zzzzzzzzzzzz + #}End of HW effect + #} End of effect proxy + bassboost { library bundle uuid 8631f300-72e2-11df-b57e-0002a5d5c51b diff --git a/media/libeffects/factory/EffectsFactory.c b/media/libeffects/factory/EffectsFactory.c index f158929..f8d6041 100644 --- a/media/libeffects/factory/EffectsFactory.c +++ b/media/libeffects/factory/EffectsFactory.c @@ -28,6 +28,9 @@ static list_elem_t *gEffectList; // list of effect_entry_t: all currently created effects static list_elem_t *gLibraryList; // list of lib_entry_t: all currently loaded libraries +// list of effect_descriptor and list of sub effects : all currently loaded +// It does not contain effects without sub effects. +static list_sub_elem_t *gSubEffectList; static pthread_mutex_t gLibLock = PTHREAD_MUTEX_INITIALIZER; // controls access to gLibraryList static uint32_t gNumEffects; // total number number of effects static list_elem_t *gCurLib; // current library in enumeration process @@ -50,6 +53,8 @@ static int loadLibraries(cnode *root); static int loadLibrary(cnode *root, const char *name); static int loadEffects(cnode *root); static int loadEffect(cnode *node); +// To get and add the effect pointed by the passed node to the gSubEffectList +static int addSubEffect(cnode *root); static lib_entry_t *getLibrary(const char *path); static void resetEffectEnumeration(); static uint32_t updateNumEffects(); @@ -57,6 +62,10 @@ static int findEffect(const effect_uuid_t *type, const effect_uuid_t *uuid, lib_entry_t **lib, effect_descriptor_t **desc); +// To search a subeffect in the gSubEffectList +int findSubEffect(const effect_uuid_t *uuid, + lib_entry_t **lib, + effect_descriptor_t **desc); static void dumpEffectDescriptor(effect_descriptor_t *desc, char *str, size_t len); static int stringToUuid(const char *str, effect_uuid_t *uuid); static int uuidToString(const effect_uuid_t *uuid, char *str, size_t maxLen); @@ -287,7 +296,12 @@ int EffectCreate(const effect_uuid_t *uuid, int32_t sessionId, int32_t ioId, eff ret = findEffect(NULL, uuid, &l, &d); if (ret < 0){ - goto exit; + // Sub effects are not associated with the library->effects, + // so, findEffect will fail. Search for the effect in gSubEffectList. + ret = findSubEffect(uuid, &l, &d); + if (ret < 0 ) { + goto exit; + } } // create effect in library @@ -354,21 +368,27 @@ int EffectRelease(effect_handle_t handle) } if (e1 == NULL) { ret = -ENOENT; + pthread_mutex_unlock(&gLibLock); goto exit; } // release effect in library if (fx->lib == NULL) { ALOGW("EffectRelease() fx %p library already unloaded", handle); + pthread_mutex_unlock(&gLibLock); } else { pthread_mutex_lock(&fx->lib->lock); + // Releasing the gLibLock here as the list access is over as the + // effect is removed from the list. + // If the gLibLock is not released, we will have a deadlock situation + // since we call the sub effect release inside the EffectRelease of Proxy + pthread_mutex_unlock(&gLibLock); fx->lib->desc->release_effect(fx->subItfe); pthread_mutex_unlock(&fx->lib->lock); } free(fx); exit: - pthread_mutex_unlock(&gLibLock); return ret; } @@ -380,6 +400,49 @@ int EffectIsNullUuid(const effect_uuid_t *uuid) return 1; } +// Function to get the sub effect descriptors of the effect whose uuid +// is pointed by the first argument. It searches the gSubEffectList for the +// matching uuid and then copies the corresponding sub effect descriptors +// to the inout param +int EffectGetSubEffects(const effect_uuid_t *uuid, + effect_descriptor_t *pDescriptors, size_t size) +{ + ALOGV("EffectGetSubEffects() UUID: %08X-%04X-%04X-%04X-%02X%02X%02X%02X%02X" + "%02X\n",uuid->timeLow, uuid->timeMid, uuid->timeHiAndVersion, + uuid->clockSeq, uuid->node[0], uuid->node[1],uuid->node[2], + uuid->node[3],uuid->node[4],uuid->node[5]); + + // Check if the size of the desc buffer is large enough for 2 subeffects + if ((uuid == NULL) || (pDescriptors == NULL) || + (size < 2*sizeof(effect_descriptor_t))) { + ALOGW("NULL pointer or insufficient memory. Cannot query subeffects"); + return -EINVAL; + } + int ret = init(); + if (ret < 0) + return ret; + list_sub_elem_t *e = gSubEffectList; + sub_effect_entry_t *subeffect; + effect_descriptor_t *d; + int count = 0; + while (e != NULL) { + d = (effect_descriptor_t*)e->object; + if (memcmp(uuid, &d->uuid, sizeof(effect_uuid_t)) == 0) { + ALOGV("EffectGetSubEffects: effect found in the list"); + list_elem_t *subefx = e->sub_elem; + while (subefx != NULL) { + subeffect = (sub_effect_entry_t*)subefx->object; + d = (effect_descriptor_t*)(subeffect->object); + pDescriptors[count++] = *d; + subefx = subefx->next; + } + ALOGV("EffectGetSubEffects end - copied the sub effect descriptors"); + return count; + } + e = e->next; + } + return -ENOENT; +} ///////////////////////////////////////////////// // Local functions ///////////////////////////////////////////////// @@ -503,6 +566,65 @@ error: return -EINVAL; } +// This will find the library and UUID tags of the sub effect pointed by the +// node, gets the effect descriptor and lib_entry_t and adds the subeffect - +// sub_entry_t to the gSubEffectList +int addSubEffect(cnode *root) +{ + ALOGV("addSubEffect"); + cnode *node; + effect_uuid_t uuid; + effect_descriptor_t *d; + lib_entry_t *l; + list_elem_t *e; + node = config_find(root, LIBRARY_TAG); + if (node == NULL) { + return -EINVAL; + } + l = getLibrary(node->value); + if (l == NULL) { + ALOGW("addSubEffect() could not get library %s", node->value); + return -EINVAL; + } + node = config_find(root, UUID_TAG); + if (node == NULL) { + return -EINVAL; + } + if (stringToUuid(node->value, &uuid) != 0) { + ALOGW("addSubEffect() invalid uuid %s", node->value); + return -EINVAL; + } + d = malloc(sizeof(effect_descriptor_t)); + if (l->desc->get_descriptor(&uuid, d) != 0) { + char s[40]; + uuidToString(&uuid, s, 40); + ALOGW("Error querying effect %s on lib %s", s, l->name); + free(d); + return -EINVAL; + } +#if (LOG_NDEBUG==0) + char s[256]; + dumpEffectDescriptor(d, s, 256); + ALOGV("addSubEffect() read descriptor %p:%s",d, s); +#endif + if (EFFECT_API_VERSION_MAJOR(d->apiVersion) != + EFFECT_API_VERSION_MAJOR(EFFECT_CONTROL_API_VERSION)) { + ALOGW("Bad API version %08x on lib %s", d->apiVersion, l->name); + free(d); + return -EINVAL; + } + sub_effect_entry_t *sub_effect = malloc(sizeof(sub_effect_entry_t)); + sub_effect->object = d; + // lib_entry_t is stored since the sub effects are not linked to the library + sub_effect->lib = l; + e = malloc(sizeof(list_elem_t)); + e->object = sub_effect; + e->next = gSubEffectList->sub_elem; + gSubEffectList->sub_elem = e; + ALOGV("addSubEffect end"); + return 0; +} + int loadEffects(cnode *root) { cnode *node; @@ -571,9 +693,101 @@ int loadEffect(cnode *root) e->next = l->effects; l->effects = e; + // After the UUID node in the config_tree, if node->next is valid, + // that would be sub effect node. + // Find the sub effects and add them to the gSubEffectList + node = node->next; + int count = 2; + bool hwSubefx = false, swSubefx = false; + list_sub_elem_t *sube = NULL; + if (node != NULL) { + ALOGV("Adding the effect to gEffectSubList as there are sub effects"); + sube = malloc(sizeof(list_sub_elem_t)); + sube->object = d; + sube->sub_elem = NULL; + sube->next = gSubEffectList; + gSubEffectList = sube; + } + while (node != NULL && count) { + if (addSubEffect(node)) { + ALOGW("loadEffect() could not add subEffect %s", node->value); + // Change the gSubEffectList to point to older list; + gSubEffectList = sube->next; + free(sube->sub_elem);// Free an already added sub effect + sube->sub_elem = NULL; + free(sube); + return -ENOENT; + } + sub_effect_entry_t *subEntry = (sub_effect_entry_t*)gSubEffectList->sub_elem->object; + effect_descriptor_t *subEffectDesc = (effect_descriptor_t*)(subEntry->object); + // Since we return a dummy descriptor for the proxy during + // get_descriptor call,we replace it with the correspoding + // sw effect descriptor, but with Proxy UUID + // check for Sw desc + if (!((subEffectDesc->flags & EFFECT_FLAG_HW_ACC_MASK) == + EFFECT_FLAG_HW_ACC_TUNNEL)) { + swSubefx = true; + *d = *subEffectDesc; + d->uuid = uuid; + ALOGV("loadEffect() Changed the Proxy desc"); + } else + hwSubefx = true; + count--; + node = node->next; + } + // 1 HW and 1 SW sub effect found. Set the offload flag in the Proxy desc + if (hwSubefx && swSubefx) { + d->flags |= EFFECT_FLAG_OFFLOAD_SUPPORTED; + } return 0; } +// Searches the sub effect matching to the specified uuid +// in the gSubEffectList. It gets the lib_entry_t for +// the matched sub_effect . Used in EffectCreate of sub effects +int findSubEffect(const effect_uuid_t *uuid, + lib_entry_t **lib, + effect_descriptor_t **desc) +{ + list_sub_elem_t *e = gSubEffectList; + list_elem_t *subefx; + sub_effect_entry_t *effect; + lib_entry_t *l = NULL; + effect_descriptor_t *d = NULL; + int found = 0; + int ret = 0; + + if (uuid == NULL) + return -EINVAL; + + while (e != NULL && !found) { + subefx = (list_elem_t*)(e->sub_elem); + while (subefx != NULL) { + effect = (sub_effect_entry_t*)subefx->object; + l = (lib_entry_t *)effect->lib; + d = (effect_descriptor_t *)effect->object; + if (memcmp(&d->uuid, uuid, sizeof(effect_uuid_t)) == 0) { + ALOGV("uuid matched"); + found = 1; + break; + } + subefx = subefx->next; + } + e = e->next; + } + if (!found) { + ALOGV("findSubEffect() effect not found"); + ret = -ENOENT; + } else { + ALOGV("findSubEffect() found effect: %s in lib %s", d->name, l->name); + *lib = l; + if (desc != NULL) { + *desc = d; + } + } + return ret; +} + lib_entry_t *getLibrary(const char *name) { list_elem_t *e; diff --git a/media/libeffects/factory/EffectsFactory.h b/media/libeffects/factory/EffectsFactory.h index c1d4319..147ff18 100644 --- a/media/libeffects/factory/EffectsFactory.h +++ b/media/libeffects/factory/EffectsFactory.h @@ -32,6 +32,15 @@ typedef struct list_elem_s { struct list_elem_s *next; } list_elem_t; +// Structure used for storing effects with their sub effects. +// Used in creating gSubEffectList. Here, +// object holds the effect desc and the list sub_elem holds the sub effects +typedef struct list_sub_elem_s { + void *object; + list_elem_t *sub_elem; + struct list_sub_elem_s *next; +} list_sub_elem_t; + typedef struct lib_entry_s { audio_effect_library_t *desc; char *name; @@ -47,6 +56,16 @@ typedef struct effect_entry_s { lib_entry_t *lib; } effect_entry_t; +// Structure used to store the lib entry +// and the descriptor of the sub effects. +// The library entry is to be stored in case of +// sub effects as the sub effects are not linked +// to the library list - gLibraryList. +typedef struct sub_effect_entry_s { + lib_entry_t *lib; + void *object; +} sub_effect_entry_t; + #if __cplusplus } // extern "C" #endif -- cgit v1.1 From 60c60df7db278d2fa5c90b0fa14f99a61d50272b Mon Sep 17 00:00:00 2001 From: jpadmana Date: Tue, 4 Jun 2013 16:03:29 +0530 Subject: Effect Offload Proxy for effects offload Effect Proxy abstracts the sub effects to the upper layers. It has the following functionalities: - creation and release of sub effects - routing the effect commands and process to the appropriate sub effect Bug: 8174034. Change-Id: I22d8136636048e7fe8f8807cbc6e348ffa200a22 Signed-off-by: jpadmana --- media/libeffects/data/audio_effects.conf | 2 +- media/libeffects/proxy/Android.mk | 34 ++++ media/libeffects/proxy/EffectProxy.cpp | 298 +++++++++++++++++++++++++++++++ media/libeffects/proxy/EffectProxy.h | 75 ++++++++ 4 files changed, 408 insertions(+), 1 deletion(-) create mode 100644 media/libeffects/proxy/Android.mk create mode 100644 media/libeffects/proxy/EffectProxy.cpp create mode 100644 media/libeffects/proxy/EffectProxy.h diff --git a/media/libeffects/data/audio_effects.conf b/media/libeffects/data/audio_effects.conf index aa48e4e..69a3c53 100644 --- a/media/libeffects/data/audio_effects.conf +++ b/media/libeffects/data/audio_effects.conf @@ -10,7 +10,7 @@ libraries { # the HW and SW effects #proxy { - #path /system/lib/soundfx/libProxy.so + #path /system/lib/soundfx/libeffectproxy.so #} # This is the SW implementation library of the effect diff --git a/media/libeffects/proxy/Android.mk b/media/libeffects/proxy/Android.mk new file mode 100644 index 0000000..01b3be1 --- /dev/null +++ b/media/libeffects/proxy/Android.mk @@ -0,0 +1,34 @@ +# Copyright 2013 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +LOCAL_PATH:= $(call my-dir) +include $(CLEAR_VARS) +LOCAL_MODULE:= libeffectproxy +LOCAL_MODULE_PATH := $(TARGET_OUT_SHARED_LIBRARIES)/soundfx +LOCAL_MODULE_TAGS := optional + + +LOCAL_SRC_FILES := \ + EffectProxy.cpp + +LOCAL_CFLAGS+= -fvisibility=hidden + +LOCAL_SHARED_LIBRARIES := liblog libcutils libutils libdl libeffects + +LOCAL_C_INCLUDES := \ + system/media/audio_effects/include \ + bionic/libc/include + +include $(BUILD_SHARED_LIBRARY) + diff --git a/media/libeffects/proxy/EffectProxy.cpp b/media/libeffects/proxy/EffectProxy.cpp new file mode 100644 index 0000000..77c6e89 --- /dev/null +++ b/media/libeffects/proxy/EffectProxy.cpp @@ -0,0 +1,298 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "EffectProxy" +//#define LOG_NDEBUG 0 + +#include +#include +#include +#include +#include +#include +#include +#include + +namespace android { +// This is a dummy proxy descriptor just to return to Factory during the initial +// GetDescriptor call. Later in the factory, it is replaced with the +// SW sub effect descriptor +const effect_descriptor_t gProxyDescriptor = { + EFFECT_UUID_INITIALIZER, // type + EFFECT_UUID_INITIALIZER, // uuid + EFFECT_CONTROL_API_VERSION, //version of effect control API + (EFFECT_FLAG_TYPE_INSERT | EFFECT_FLAG_INSERT_LAST | + EFFECT_FLAG_VOLUME_CTRL), // effect capability flags + 0, // CPU load + 1, // Data memory + "Proxy", //effect name + "AOSP", //implementor name +}; + + +static const effect_descriptor_t *const gDescriptors[] = +{ + &gProxyDescriptor, +}; + +int EffectProxyCreate(const effect_uuid_t *uuid, + int32_t sessionId, + int32_t ioId, + effect_handle_t *pHandle) { + + effect_descriptor_t* desc; + EffectContext* pContext; + if (pHandle == NULL || uuid == NULL) { + ALOGE("EffectProxyCreate() called with NULL pointer"); + return -EINVAL; + } + ALOGV("EffectProxyCreate start.."); + pContext = new EffectContext; + pContext->sessionId = sessionId; + pContext->ioId = ioId; + pContext->uuid = *uuid; + pContext->common_itfe = &gEffectInterface; + // The sub effects will be created in effect_command when the first command + // for the effect is received + pContext->eHandle[SUB_FX_HOST] = pContext->eHandle[SUB_FX_OFFLOAD] = NULL; + + // Get the HW and SW sub effect descriptors from the effects factory + desc = new effect_descriptor_t[SUB_FX_COUNT]; + pContext->desc = new effect_descriptor_t[SUB_FX_COUNT]; + int retValue = EffectGetSubEffects(uuid, desc, + sizeof(effect_descriptor_t) * SUB_FX_COUNT); + // EffectGetSubEffects returns the number of sub-effects copied. + if (retValue != SUB_FX_COUNT) { + ALOGE("EffectCreate() could not get the sub effects"); + delete desc; + delete pContext->desc; + return -EINVAL; + } + // Check which is the HW descriptor and copy the descriptors + // to the Context desc array + // Also check if there is only one HW and one SW descriptor. + // HW descriptor alone has the HW_TUNNEL flag. + if ((desc[0].flags & EFFECT_FLAG_HW_ACC_TUNNEL) && + !(desc[1].flags & EFFECT_FLAG_HW_ACC_TUNNEL)) { + pContext->desc[SUB_FX_OFFLOAD] = desc[0]; + pContext->desc[SUB_FX_HOST] = desc[1]; + } + else if ((desc[1].flags & EFFECT_FLAG_HW_ACC_TUNNEL) && + !(desc[0].flags & EFFECT_FLAG_HW_ACC_TUNNEL)) { + pContext->desc[SUB_FX_HOST] = desc[0]; + pContext->desc[SUB_FX_OFFLOAD] = desc[1]; + } + delete desc; +#if (LOG_NDEBUG == 0) + effect_uuid_t uuid_print = pContext->desc[SUB_FX_HOST].uuid; + ALOGV("EffectCreate() UUID of HOST: %08X-%04X-%04X-%04X-%02X%02X%02X%02X" + "%02X%02X\n",uuid_print.timeLow, uuid_print.timeMid, + uuid_print.timeHiAndVersion, uuid_print.clockSeq, uuid_print.node[0], + uuid_print.node[1], uuid_print.node[2], uuid_print.node[3], + uuid_print.node[4], uuid_print.node[5]); + ALOGV("EffectCreate() UUID of OFFLOAD: %08X-%04X-%04X-%04X-%02X%02X%02X%02X" + "%02X%02X\n", uuid_print.timeLow, uuid_print.timeMid, + uuid_print.timeHiAndVersion, uuid_print.clockSeq, uuid_print.node[0], + uuid_print.node[1], uuid_print.node[2], uuid_print.node[3], + uuid_print.node[4], uuid_print.node[5]); +#endif + *pHandle = (effect_handle_t)pContext; + ALOGV("EffectCreate end"); + return 0; +} //end EffectProxyCreate + +int EffectProxyRelease(effect_handle_t handle) { + EffectContext * pContext = (EffectContext *)handle; + if (pContext == NULL) { + ALOGV("ERROR : EffectRelease called with NULL pointer"); + return -EINVAL; + } + ALOGV("EffectRelease"); + delete pContext->desc; + if (pContext->eHandle[SUB_FX_HOST]) + EffectRelease(pContext->eHandle[SUB_FX_HOST]); + if (pContext->eHandle[SUB_FX_OFFLOAD]) + EffectRelease(pContext->eHandle[SUB_FX_OFFLOAD]); + delete pContext; + pContext = NULL; + return 0; +} /*end EffectProxyRelease */ + +int EffectProxyGetDescriptor(const effect_uuid_t *uuid, + effect_descriptor_t *pDescriptor) { + const effect_descriptor_t *desc = NULL; + + if (pDescriptor == NULL || uuid == NULL) { + ALOGV("EffectGetDescriptor() called with NULL pointer"); + return -EINVAL; + } + desc = &gProxyDescriptor; + *pDescriptor = *desc; + return 0; +} /* end EffectProxyGetDescriptor */ + +/* Effect Control Interface Implementation: Process */ +int Effect_process(effect_handle_t self, + audio_buffer_t *inBuffer, + audio_buffer_t *outBuffer) { + + EffectContext *pContext = (EffectContext *) self; + int ret = 0; + if (pContext != NULL) { + int index = pContext->index; + // if the index refers to HW , do not do anything. Just return. + if (index == SUB_FX_HOST) { + ALOGV("Calling CoreProcess"); + ret = (*pContext->eHandle[index])->process(pContext->eHandle[index], + inBuffer, outBuffer); + } + } + return ret; +} /* end Effect_process */ + +/* Effect Control Interface Implementation: Command */ +int Effect_command(effect_handle_t self, + uint32_t cmdCode, + uint32_t cmdSize, + void *pCmdData, + uint32_t *replySize, + void *pReplyData) { + + EffectContext *pContext = (EffectContext *) self; + int status; + if (pContext == NULL) { + ALOGV("Effect_command() Proxy context is NULL"); + return -EINVAL; + } + if (pContext->eHandle[SUB_FX_HOST] == NULL) { + ALOGV("Effect_command() Calling HOST EffectCreate"); + status = EffectCreate(&pContext->desc[SUB_FX_HOST].uuid, + pContext->sessionId, pContext->ioId, + &(pContext->eHandle[SUB_FX_HOST])); + if (status != NO_ERROR || (pContext->eHandle[SUB_FX_HOST] == NULL)) { + ALOGV("Effect_command() Error creating SW sub effect"); + return status; + } + } + if (pContext->eHandle[SUB_FX_OFFLOAD] == NULL) { + ALOGV("Effect_command() Calling OFFLOAD EffectCreate"); + status = EffectCreate(&pContext->desc[SUB_FX_OFFLOAD].uuid, + pContext->sessionId, pContext->ioId, + &(pContext->eHandle[SUB_FX_OFFLOAD])); + if (status != NO_ERROR || (pContext->eHandle[SUB_FX_OFFLOAD] == NULL)) { + ALOGV("Effect_command() Error creating HW effect"); + // Do not return error here as SW effect is created + // Return error if the CMD_OFFLOAD sends the index as OFFLOAD + } + pContext->index = SUB_FX_HOST; + } + // EFFECT_CMD_OFFLOAD used to (1) send whether the thread is offload or not + // (2) Send the ioHandle of the effectThread when the effect + // is moved from one type of thread to another. + // pCmdData points to a memory holding effect_offload_param_t structure + if (cmdCode == EFFECT_CMD_OFFLOAD) { + ALOGV("Effect_command() cmdCode = EFFECT_CMD_OFFLOAD"); + if (cmdSize == 0 || pCmdData == NULL) { + ALOGV("effectsOffload: Effect_command: CMD_OFFLOAD has no data"); + *(int*)pReplyData = FAILED_TRANSACTION; + return FAILED_TRANSACTION; + } + effect_offload_param_t* offloadParam = (effect_offload_param_t*)pCmdData; + // Assign the effect context index based on isOffload field of the structure + pContext->index = offloadParam->isOffload ? SUB_FX_OFFLOAD : SUB_FX_HOST; + // if the index is HW and the HW effect is unavailable, return error + // and reset the index to SW + if (pContext->eHandle[pContext->index] == NULL) { + ALOGV("Effect_command()CMD_OFFLOAD sub effect unavailable"); + *(int*)pReplyData = FAILED_TRANSACTION; + return FAILED_TRANSACTION; + } + pContext->ioId = offloadParam->ioHandle; + ALOGV("Effect_command()CMD_OFFLOAD index:%d io %d", pContext->index, pContext->ioId); + // Update the DSP wrapper with the new ioHandle. + // Pass the OFFLOAD command to the wrapper. + // The DSP wrapper needs to handle this CMD + if (pContext->eHandle[SUB_FX_OFFLOAD]) + status = (*pContext->eHandle[SUB_FX_OFFLOAD])->command( + pContext->eHandle[SUB_FX_OFFLOAD], cmdCode, cmdSize, + pCmdData, replySize, pReplyData); + return status; + } + + int index = pContext->index; + if (index != SUB_FX_HOST && index != SUB_FX_OFFLOAD) { + ALOGV("Effect_command: effect index is neither offload nor host"); + return -EINVAL; + } + ALOGV("Effect_command: pContext->eHandle[%d]: %p", + index, pContext->eHandle[index]); + if (pContext->eHandle[SUB_FX_HOST]) + (*pContext->eHandle[SUB_FX_HOST])->command( + pContext->eHandle[SUB_FX_HOST], cmdCode, cmdSize, + pCmdData, replySize, pReplyData); + if (pContext->eHandle[SUB_FX_OFFLOAD]) { + // In case of SET CMD, when the offload stream is unavailable, + // we will store the effect param values in the DSP effect wrapper. + // When the offload effects get enabled, we send these values to the + // DSP during Effect_config. + // So,we send the params to DSP wrapper also + (*pContext->eHandle[SUB_FX_OFFLOAD])->command( + pContext->eHandle[SUB_FX_OFFLOAD], cmdCode, cmdSize, + pCmdData, replySize, pReplyData); + } + return 0; +} /* end Effect_command */ + + +/* Effect Control Interface Implementation: get_descriptor */ +int Effect_getDescriptor(effect_handle_t self, + effect_descriptor_t *pDescriptor) { + + EffectContext * pContext = (EffectContext *) self; + const effect_descriptor_t *desc; + + ALOGV("Effect_getDescriptor"); + if (pContext == NULL || pDescriptor == NULL) { + ALOGV("Effect_getDescriptor() invalid param"); + return -EINVAL; + } + if (pContext->desc == NULL) { + ALOGV("Effect_getDescriptor() could not get descriptor"); + return -EINVAL; + } + desc = &pContext->desc[SUB_FX_HOST]; + *pDescriptor = *desc; + pDescriptor->uuid = pContext->uuid; // Replace the uuid with the Proxy UUID + // Also set/clear the EFFECT_FLAG_OFFLOAD_SUPPORTED flag based on the sub effects availability + if (pContext->eHandle[SUB_FX_OFFLOAD] != NULL) + pDescriptor->flags |= EFFECT_FLAG_OFFLOAD_SUPPORTED; + else + pDescriptor->flags &= ~EFFECT_FLAG_OFFLOAD_SUPPORTED; + return 0; +} /* end Effect_getDescriptor */ + +} // namespace android + +__attribute__ ((visibility ("default"))) +audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM = { + tag : AUDIO_EFFECT_LIBRARY_TAG, + version : EFFECT_LIBRARY_API_VERSION, + name : "Effect Proxy", + implementor : "AOSP", + create_effect : android::EffectProxyCreate, + release_effect : android::EffectProxyRelease, + get_descriptor : android::EffectProxyGetDescriptor, +}; diff --git a/media/libeffects/proxy/EffectProxy.h b/media/libeffects/proxy/EffectProxy.h new file mode 100644 index 0000000..8992f93 --- /dev/null +++ b/media/libeffects/proxy/EffectProxy.h @@ -0,0 +1,75 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +namespace android { +enum { + SUB_FX_HOST, // Index of HOST in the descriptor and handle arrays + // of the Proxy context + SUB_FX_OFFLOAD, // Index of OFFLOAD in the descriptor and handle arrays + // of the Proxy context + SUB_FX_COUNT // The number of sub effects for a Proxy(1 HW, 1 SW) +}; +#if __cplusplus +extern "C" { +#endif + +int EffectProxyCreate(const effect_uuid_t *uuid, + int32_t sessionId, + int32_t ioId, + effect_handle_t *pHandle); +int EffectProxyRelease(effect_handle_t handle); +int EffectProxyGetDescriptor(const effect_uuid_t *uuid, + effect_descriptor_t *pDescriptor); +/* Effect Control Interface Implementation: Process */ +int Effect_process(effect_handle_t self, + audio_buffer_t *inBuffer, + audio_buffer_t *outBuffer); + +/* Effect Control Interface Implementation: Command */ +int Effect_command(effect_handle_t self, + uint32_t cmdCode, + uint32_t cmdSize, + void *pCmdData, + uint32_t *replySize, + void *pReplyData); +int Effect_getDescriptor(effect_handle_t self, + effect_descriptor_t *pDescriptor); + +const struct effect_interface_s gEffectInterface = { + Effect_process, + Effect_command, + Effect_getDescriptor, + NULL, +}; + +struct EffectContext { + const struct effect_interface_s *common_itfe; // Holds the itfe of the Proxy + effect_descriptor_t* desc; // Points to the sub effect descriptors + effect_handle_t eHandle[SUB_FX_COUNT]; // The effect handles of the sub effects + int index; // The index that is currently active - HOST or OFFLOAD + int32_t sessionId; // The sessiond in which the effect is created. + // Stored in context to pass on to sub effect creation + int32_t ioId; // The ioId in which the effect is created. + // Stored in context to pass on to sub effect creation + effect_uuid_t uuid; // UUID of the Proxy +}; + +#if __cplusplus +} // extern "C" +#endif +} //namespace android -- cgit v1.1 From 3b4529e03c5fc7a44c22f9091ad15a269bfca3a8 Mon Sep 17 00:00:00 2001 From: Eric Laurent Date: Thu, 5 Sep 2013 18:09:19 -0700 Subject: audioflinger: remove async write race conditions Remove possible race conditions between async callback thread and offload thread when clearing and setting the draining and write blocked flags. Bug: 8174034. Change-Id: I7af10491f39dc0e7d7414862a9d8e763daa2e2b7 --- services/audioflinger/Threads.cpp | 118 +++++++++++++++++++++++--------------- services/audioflinger/Threads.h | 35 ++++++++--- 2 files changed, 100 insertions(+), 53 deletions(-) diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index fda4211..e35f47e 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -939,8 +939,8 @@ AudioFlinger::PlaybackThread::PlaybackThread(const sp& audioFlinge mBytesRemaining(0), mCurrentWriteLength(0), mUseAsyncWrite(false), - mWriteBlocked(false), - mDraining(false), + mWriteAckSequence(0), + mDrainSequence(0), mScreenState(AudioFlinger::mScreenState), // index 0 is reserved for normal mixer's submix mFastTrackAvailMask(((1 << FastMixerState::kMaxFastTracks) - 1) & ~1), @@ -1491,29 +1491,31 @@ void AudioFlinger::PlaybackThread::audioConfigChanged_l(int event, int param) { void AudioFlinger::PlaybackThread::writeCallback() { ALOG_ASSERT(mCallbackThread != 0); - mCallbackThread->setWriteBlocked(false); + mCallbackThread->resetWriteBlocked(); } void AudioFlinger::PlaybackThread::drainCallback() { ALOG_ASSERT(mCallbackThread != 0); - mCallbackThread->setDraining(false); + mCallbackThread->resetDraining(); } -void AudioFlinger::PlaybackThread::setWriteBlocked(bool value) +void AudioFlinger::PlaybackThread::resetWriteBlocked(uint32_t sequence) { Mutex::Autolock _l(mLock); - mWriteBlocked = value; - if (!value) { + // reject out of sequence requests + if ((mWriteAckSequence & 1) && (sequence == mWriteAckSequence)) { + mWriteAckSequence &= ~1; mWaitWorkCV.signal(); } } -void AudioFlinger::PlaybackThread::setDraining(bool value) +void AudioFlinger::PlaybackThread::resetDraining(uint32_t sequence) { Mutex::Autolock _l(mLock); - mDraining = value; - if (!value) { + // reject out of sequence requests + if ((mDrainSequence & 1) && (sequence == mDrainSequence)) { + mDrainSequence &= ~1; mWaitWorkCV.signal(); } } @@ -1833,9 +1835,11 @@ ssize_t AudioFlinger::PlaybackThread::threadLoop_write() // Direct output and offload threads size_t offset = (mCurrentWriteLength - mBytesRemaining) / sizeof(int16_t); if (mUseAsyncWrite) { - mWriteBlocked = true; + ALOGW_IF(mWriteAckSequence & 1, "threadLoop_write(): out of sequence write request"); + mWriteAckSequence += 2; + mWriteAckSequence |= 1; ALOG_ASSERT(mCallbackThread != 0); - mCallbackThread->setWriteBlocked(true); + mCallbackThread->setWriteBlocked(mWriteAckSequence); } // FIXME We should have an implementation of timestamps for direct output threads. // They are used e.g for multichannel PCM playback over HDMI. @@ -1844,9 +1848,9 @@ ssize_t AudioFlinger::PlaybackThread::threadLoop_write() if (mUseAsyncWrite && ((bytesWritten < 0) || (bytesWritten == (ssize_t)mBytesRemaining))) { // do not wait for async callback in case of error of full write - mWriteBlocked = false; + mWriteAckSequence &= ~1; ALOG_ASSERT(mCallbackThread != 0); - mCallbackThread->setWriteBlocked(false); + mCallbackThread->setWriteBlocked(mWriteAckSequence); } } @@ -1861,9 +1865,10 @@ void AudioFlinger::PlaybackThread::threadLoop_drain() if (mOutput->stream->drain) { ALOGV("draining %s", (mMixerStatus == MIXER_DRAIN_TRACK) ? "early" : "full"); if (mUseAsyncWrite) { - mDraining = true; + ALOGW_IF(mDrainSequence & 1, "threadLoop_drain(): out of sequence drain request"); + mDrainSequence |= 1; ALOG_ASSERT(mCallbackThread != 0); - mCallbackThread->setDraining(true); + mCallbackThread->setDraining(mDrainSequence); } mOutput->stream->drain(mOutput->stream, (mMixerStatus == MIXER_DRAIN_TRACK) ? AUDIO_DRAIN_EARLY_NOTIFY @@ -2613,11 +2618,12 @@ void AudioFlinger::PlaybackThread::threadLoop_standby() ALOGV("Audio hardware entering standby, mixer %p, suspend count %d", this, mSuspended); mOutput->stream->common.standby(&mOutput->stream->common); if (mUseAsyncWrite != 0) { - mWriteBlocked = false; - mDraining = false; + // discard any pending drain or write ack by incrementing sequence + mWriteAckSequence = (mWriteAckSequence + 2) & ~1; + mDrainSequence = (mDrainSequence + 2) & ~1; ALOG_ASSERT(mCallbackThread != 0); - mCallbackThread->setWriteBlocked(false); - mCallbackThread->setDraining(false); + mCallbackThread->setWriteBlocked(mWriteAckSequence); + mCallbackThread->setDraining(mDrainSequence); } } @@ -3704,8 +3710,8 @@ AudioFlinger::AsyncCallbackThread::AsyncCallbackThread( const sp& offloadThread) : Thread(false /*canCallJava*/), mOffloadThread(offloadThread), - mWriteBlocked(false), - mDraining(false) + mWriteAckSequence(0), + mDrainSequence(0) { } @@ -3721,8 +3727,8 @@ void AudioFlinger::AsyncCallbackThread::onFirstRef() bool AudioFlinger::AsyncCallbackThread::threadLoop() { while (!exitPending()) { - bool writeBlocked; - bool draining; + uint32_t writeAckSequence; + uint32_t drainSequence; { Mutex::Autolock _l(mLock); @@ -3730,18 +3736,21 @@ bool AudioFlinger::AsyncCallbackThread::threadLoop() if (exitPending()) { break; } - writeBlocked = mWriteBlocked; - draining = mDraining; - ALOGV("AsyncCallbackThread mWriteBlocked %d mDraining %d", mWriteBlocked, mDraining); + ALOGV("AsyncCallbackThread mWriteAckSequence %d mDrainSequence %d", + mWriteAckSequence, mDrainSequence); + writeAckSequence = mWriteAckSequence; + mWriteAckSequence &= ~1; + drainSequence = mDrainSequence; + mDrainSequence &= ~1; } { sp offloadThread = mOffloadThread.promote(); if (offloadThread != 0) { - if (writeBlocked == false) { - offloadThread->setWriteBlocked(false); + if (writeAckSequence & 1) { + offloadThread->resetWriteBlocked(writeAckSequence >> 1); } - if (draining == false) { - offloadThread->setDraining(false); + if (drainSequence & 1) { + offloadThread->resetDraining(drainSequence >> 1); } } } @@ -3757,20 +3766,36 @@ void AudioFlinger::AsyncCallbackThread::exit() mWaitWorkCV.broadcast(); } -void AudioFlinger::AsyncCallbackThread::setWriteBlocked(bool value) +void AudioFlinger::AsyncCallbackThread::setWriteBlocked(uint32_t sequence) { Mutex::Autolock _l(mLock); - mWriteBlocked = value; - if (!value) { + // bit 0 is cleared + mWriteAckSequence = sequence << 1; +} + +void AudioFlinger::AsyncCallbackThread::resetWriteBlocked() +{ + Mutex::Autolock _l(mLock); + // ignore unexpected callbacks + if (mWriteAckSequence & 2) { + mWriteAckSequence |= 1; mWaitWorkCV.signal(); } } -void AudioFlinger::AsyncCallbackThread::setDraining(bool value) +void AudioFlinger::AsyncCallbackThread::setDraining(uint32_t sequence) +{ + Mutex::Autolock _l(mLock); + // bit 0 is cleared + mDrainSequence = sequence << 1; +} + +void AudioFlinger::AsyncCallbackThread::resetDraining() { Mutex::Autolock _l(mLock); - mDraining = value; - if (!value) { + // ignore unexpected callbacks + if (mDrainSequence & 2) { + mDrainSequence |= 1; mWaitWorkCV.signal(); } } @@ -3858,7 +3883,7 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::OffloadThread::prepareTr } tracksToRemove->add(track); } else if (track->framesReady() && track->isReady() && - !track->isPaused() && !track->isTerminated()) { + !track->isPaused() && !track->isTerminated() && !track->isStopping_2()) { ALOGVV("OffloadThread: track %d s=%08x [OK]", track->name(), cblk->mServer); if (track->mFillingUpStatus == Track::FS_FILLED) { track->mFillingUpStatus = Track::FS_ACTIVE; @@ -3901,6 +3926,7 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::OffloadThread::prepareTr standbyTime = systemTime() + standbyDelay; if (last) { mixerStatus = MIXER_DRAIN_TRACK; + mDrainSequence += 2; if (mHwPaused) { // It is possible to move from PAUSED to STOPPING_1 without // a resume so we must ensure hardware is running @@ -3911,7 +3937,7 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::OffloadThread::prepareTr } } else if (track->isStopping_2()) { // Drain has completed, signal presentation complete - if (!mDraining || !last) { + if (!(mDrainSequence & 1) || !last) { track->mState = TrackBase::STOPPED; size_t audioHALFrames = (mOutput->stream->get_latency(mOutput->stream)*mSampleRate) / 1000; @@ -3956,8 +3982,9 @@ void AudioFlinger::OffloadThread::flushOutput_l() // must be called with thread mutex locked bool AudioFlinger::OffloadThread::waitingAsyncCallback_l() { - ALOGV("waitingAsyncCallback_l mWriteBlocked %d mDraining %d", mWriteBlocked, mDraining); - if (mUseAsyncWrite && (mWriteBlocked || mDraining)) { + ALOGVV("waitingAsyncCallback_l mWriteAckSequence %d mDrainSequence %d", + mWriteAckSequence, mDrainSequence); + if (mUseAsyncWrite && ((mWriteAckSequence & 1) || (mDrainSequence & 1))) { return true; } return false; @@ -3993,11 +4020,12 @@ void AudioFlinger::OffloadThread::flushHw_l() mPausedWriteLength = 0; mPausedBytesRemaining = 0; if (mUseAsyncWrite) { - mWriteBlocked = false; - mDraining = false; + // discard any pending drain or write ack by incrementing sequence + mWriteAckSequence = (mWriteAckSequence + 2) & ~1; + mDrainSequence = (mDrainSequence + 2) & ~1; ALOG_ASSERT(mCallbackThread != 0); - mCallbackThread->setWriteBlocked(false); - mCallbackThread->setDraining(false); + mCallbackThread->setWriteBlocked(mWriteAckSequence); + mCallbackThread->setDraining(mDrainSequence); } } diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h index 1333de2..3fe470c 100644 --- a/services/audioflinger/Threads.h +++ b/services/audioflinger/Threads.h @@ -377,9 +377,9 @@ protected: void removeTracks_l(const Vector< sp >& tracksToRemove); void writeCallback(); - void setWriteBlocked(bool value); + void resetWriteBlocked(uint32_t sequence); void drainCallback(); - void setDraining(bool value); + void resetDraining(uint32_t sequence); static int asyncCallback(stream_callback_event_t event, void *param, void *cookie); @@ -577,8 +577,19 @@ private: size_t mBytesRemaining; size_t mCurrentWriteLength; bool mUseAsyncWrite; - bool mWriteBlocked; - bool mDraining; + // mWriteAckSequence contains current write sequence on bits 31-1. The write sequence is + // incremented each time a write(), a flush() or a standby() occurs. + // Bit 0 is set when a write blocks and indicates a callback is expected. + // Bit 0 is reset by the async callback thread calling resetWriteBlocked(). Out of sequence + // callbacks are ignored. + uint32_t mWriteAckSequence; + // mDrainSequence contains current drain sequence on bits 31-1. The drain sequence is + // incremented each time a drain is requested or a flush() or standby() occurs. + // Bit 0 is set when the drain() command is called at the HAL and indicates a callback is + // expected. + // Bit 0 is reset by the async callback thread calling resetDraining(). Out of sequence + // callbacks are ignored. + uint32_t mDrainSequence; bool mSignalPending; sp mCallbackThread; @@ -755,13 +766,21 @@ public: virtual void onFirstRef(); void exit(); - void setWriteBlocked(bool value); - void setDraining(bool value); + void setWriteBlocked(uint32_t sequence); + void resetWriteBlocked(); + void setDraining(uint32_t sequence); + void resetDraining(); private: wp mOffloadThread; - bool mWriteBlocked; - bool mDraining; + // mWriteAckSequence corresponds to the last write sequence passed by the offload thread via + // setWriteBlocked(). The sequence is shifted one bit to the left and the lsb is used + // to indicate that the callback has been received via resetWriteBlocked() + uint32_t mWriteAckSequence; + // mDrainSequence corresponds to the last drain sequence passed by the offload thread via + // setDraining(). The sequence is shifted one bit to the left and the lsb is used + // to indicate that the callback has been received via resetDraining() + uint32_t mDrainSequence; Condition mWaitWorkCV; Mutex mLock; }; -- cgit v1.1 From e6d5794b2173ffe4e7509203a91778b19eafcebf Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Fri, 6 Sep 2013 12:03:26 -0700 Subject: Make sure the encoder instance is safely release()'d before its reference count drops to 0. Change-Id: I6601226e2754f17e964125cacf02dc02cfbf6231 related-to-bug: 10150263 --- cmds/screenrecord/screenrecord.cpp | 24 ++++++++++++++++++++++-- 1 file changed, 22 insertions(+), 2 deletions(-) diff --git a/cmds/screenrecord/screenrecord.cpp b/cmds/screenrecord/screenrecord.cpp index 94c626a..68289a5 100644 --- a/cmds/screenrecord/screenrecord.cpp +++ b/cmds/screenrecord/screenrecord.cpp @@ -159,6 +159,9 @@ static status_t prepareEncoder(float displayFps, sp* pCodec, err = codec->configure(format, NULL, NULL, MediaCodec::CONFIGURE_FLAG_ENCODE); if (err != NO_ERROR) { + codec->release(); + codec.clear(); + fprintf(stderr, "ERROR: unable to configure codec (err=%d)\n", err); return err; } @@ -167,6 +170,9 @@ static status_t prepareEncoder(float displayFps, sp* pCodec, sp bufferProducer; err = codec->createInputSurface(&bufferProducer); if (err != NO_ERROR) { + codec->release(); + codec.clear(); + fprintf(stderr, "ERROR: unable to create encoder input surface (err=%d)\n", err); return err; @@ -175,6 +181,9 @@ static status_t prepareEncoder(float displayFps, sp* pCodec, ALOGV("Starting codec"); err = codec->start(); if (err != NO_ERROR) { + codec->release(); + codec.clear(); + fprintf(stderr, "ERROR: unable to start codec (err=%d)\n", err); return err; } @@ -453,6 +462,7 @@ static status_t recordScreen(const char* fileName) { sp encoder; sp bufferProducer; err = prepareEncoder(mainDpyInfo.fps, &encoder, &bufferProducer); + if (err != NO_ERROR && !gSizeSpecified) { if (gVideoWidth != kFallbackWidth && gVideoHeight != kFallbackHeight) { ALOGV("Retrying with 720p"); @@ -470,7 +480,12 @@ static status_t recordScreen(const char* fileName) { // Configure virtual display. sp dpy; err = prepareVirtualDisplay(mainDpyInfo, bufferProducer, &dpy); - if (err != NO_ERROR) return err; + if (err != NO_ERROR) { + encoder->release(); + encoder.clear(); + + return err; + } // Configure, but do not start, muxer. sp muxer = new MediaMuxer(fileName, @@ -481,7 +496,12 @@ static status_t recordScreen(const char* fileName) { // Main encoder loop. err = runEncoder(encoder, muxer); - if (err != NO_ERROR) return err; + if (err != NO_ERROR) { + encoder->release(); + encoder.clear(); + + return err; + } if (gVerbose) { printf("Stopping encoder and muxer\n"); -- cgit v1.1 From 84b7fb0c88ddd05ed7c148c82fe1691040a9404d Mon Sep 17 00:00:00 2001 From: Zhijun He Date: Fri, 6 Sep 2013 11:46:01 -0700 Subject: Camera: Send VIDEO_SNAPSHOT intent for video snapshot case Bug: 9924539 Change-Id: Ief39eff79c31f2c2aa5df2fe1a0f04c4afc74690 --- services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp b/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp index ad1590a..e7f753e 100644 --- a/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp +++ b/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp @@ -438,6 +438,7 @@ CaptureSequencer::CaptureState CaptureSequencer::manageStandardCapture( ATRACE_CALL(); SharedParameters::Lock l(client->getParameters()); Vector outputStreams; + uint8_t captureIntent = static_cast(ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE); /** * Set up output streams in the request @@ -456,6 +457,7 @@ CaptureSequencer::CaptureState CaptureSequencer::manageStandardCapture( if (l.mParameters.state == Parameters::VIDEO_SNAPSHOT) { outputStreams.push(client->getRecordingStreamId()); + captureIntent = static_cast(ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT); } res = mCaptureRequest.update(ANDROID_REQUEST_OUTPUT_STREAMS, @@ -465,6 +467,10 @@ CaptureSequencer::CaptureState CaptureSequencer::manageStandardCapture( &mCaptureId, 1); } if (res == OK) { + res = mCaptureRequest.update(ANDROID_CONTROL_CAPTURE_INTENT, + &captureIntent, 1); + } + if (res == OK) { res = mCaptureRequest.sort(); } -- cgit v1.1 From 1e479c0f4cb3e2174dde0b02e5656fb658f73495 Mon Sep 17 00:00:00 2001 From: Igor Murashkin Date: Fri, 6 Sep 2013 16:55:14 -0700 Subject: camera3: Dump latest request sent with dumpsys Bug: 10606535 Change-Id: I0dd5ca3e09f45701659ab9ebd3b51939a2f6e6ba --- .../libcameraservice/device3/Camera3Device.cpp | 61 ++++++++++++++++++++++ .../libcameraservice/device3/Camera3Device.h | 18 ++++++- 2 files changed, 78 insertions(+), 1 deletion(-) diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp index 47321e0..2902340 100644 --- a/services/camera/libcameraservice/device3/Camera3Device.cpp +++ b/services/camera/libcameraservice/device3/Camera3Device.cpp @@ -281,6 +281,14 @@ status_t Camera3Device::dump(int fd, const Vector &args) { } write(fd, lines.string(), lines.size()); + { + lines = String8(" Last request sent:\n"); + write(fd, lines.string(), lines.size()); + + CameraMetadata lastRequest = getLatestRequest(); + lastRequest.dump(fd, /*verbosity*/2, /*indentation*/6); + } + if (mHal3Device != NULL) { lines = String8(" HAL device dump:\n"); write(fd, lines.string(), lines.size()); @@ -1397,6 +1405,43 @@ void Camera3Device::notify(const camera3_notify_msg *msg) { } } +CameraMetadata Camera3Device::getLatestRequest() { + ALOGV("%s", __FUNCTION__); + + bool locked = false; + + /** + * Why trylock instead of autolock? + * + * We want to be able to call this function from + * dumpsys, which often happens during deadlocks. + */ + for (size_t i = 0; i < kDumpLockAttempts; ++i) { + if (mLock.tryLock() == NO_ERROR) { + locked = true; + break; + } else { + usleep(kDumpSleepDuration); + } + } + + if (!locked) { + ALOGW("%s: Possible deadlock detected", __FUNCTION__); + } + + CameraMetadata retVal; + + if (mRequestThread != NULL) { + retVal = mRequestThread->getLatestRequest(); + } + + if (locked) { + mLock.unlock(); + } + + return retVal; +} + /** * RequestThread inner class methods */ @@ -1677,6 +1722,14 @@ bool Camera3Device::RequestThread::threadLoop() { return false; } + // Update the latest request sent to HAL + if (request.settings != NULL) { // Don't update them if they were unchanged + Mutex::Autolock al(mLatestRequestMutex); + + camera_metadata_t* cloned = clone_camera_metadata(request.settings); + mLatestRequest.acquire(cloned); + } + if (request.settings != NULL) { nextRequest->mSettings.unlock(request.settings); } @@ -1729,6 +1782,14 @@ bool Camera3Device::RequestThread::threadLoop() { return true; } +CameraMetadata Camera3Device::RequestThread::getLatestRequest() const { + Mutex::Autolock al(mLatestRequestMutex); + + ALOGV("RequestThread::%s", __FUNCTION__); + + return mLatestRequest; +} + void Camera3Device::RequestThread::cleanUpFailedRequest( camera3_capture_request_t &request, sp &nextRequest, diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h index 6565048..0b3ad6e 100644 --- a/services/camera/libcameraservice/device3/Camera3Device.h +++ b/services/camera/libcameraservice/device3/Camera3Device.h @@ -127,6 +127,8 @@ class Camera3Device : virtual status_t flush(); private: + static const size_t kDumpLockAttempts = 10; + static const size_t kDumpSleepDuration = 100000; // 0.10 sec static const size_t kInFlightWarnLimit = 20; static const nsecs_t kShutdownTimeout = 5000000000; // 5 sec struct RequestTrigger; @@ -175,6 +177,13 @@ class Camera3Device : typedef List > RequestList; /** + * Get the last request submitted to the hal by the request thread. + * + * Takes mLock. + */ + virtual CameraMetadata getLatestRequest(); + + /** * Lock-held version of waitUntilDrained. Will transition to IDLE on * success. */ @@ -285,6 +294,12 @@ class Camera3Device : */ status_t waitUntilRequestProcessed(int32_t requestId, nsecs_t timeout); + /** + * Get the latest request that was sent to the HAL + * with process_capture_request. + */ + CameraMetadata getLatestRequest() const; + protected: virtual bool threadLoop(); @@ -343,10 +358,11 @@ class Camera3Device : uint32_t mFrameNumber; - Mutex mLatestRequestMutex; + mutable Mutex mLatestRequestMutex; Condition mLatestRequestSignal; // android.request.id for latest process_capture_request int32_t mLatestRequestId; + CameraMetadata mLatestRequest; typedef KeyedVector TriggerMap; Mutex mTriggerMutex; -- cgit v1.1 From d1d6467d3bcbc1305eeba0176a2edf04925c368e Mon Sep 17 00:00:00 2001 From: Zhijun He Date: Fri, 6 Sep 2013 15:00:01 -0700 Subject: Camera: Correct stream id types Bug: 10604704 Change-Id: I223ac5d200eb6439c08955fb7ad586592099fbae --- camera/tests/ProCameraTests.cpp | 26 ++++++++++------------ .../camera/libcameraservice/api1/Camera2Client.cpp | 4 ++-- .../api1/client2/CaptureSequencer.cpp | 2 +- .../api1/client2/StreamingProcessor.cpp | 6 ++--- .../api1/client2/StreamingProcessor.h | 8 +++---- .../libcameraservice/api1/client2/ZslProcessor.cpp | 8 +++---- .../api1/client2/ZslProcessor3.cpp | 8 +++---- .../libcameraservice/api2/CameraDeviceClient.cpp | 2 +- .../libcameraservice/device3/Camera3Device.cpp | 4 ++-- 9 files changed, 33 insertions(+), 35 deletions(-) diff --git a/camera/tests/ProCameraTests.cpp b/camera/tests/ProCameraTests.cpp index f203949..e9aa99d 100644 --- a/camera/tests/ProCameraTests.cpp +++ b/camera/tests/ProCameraTests.cpp @@ -271,7 +271,6 @@ protected: CpuConsumer::LockedBuffer buf; status_t ret; - EXPECT_OK(ret); if (OK == (ret = consumer->lockNextBuffer(&buf))) { dout << "Frame received on streamId = " << streamId << @@ -482,7 +481,7 @@ protected: * Creating a streaming request for these output streams from a template, * and submit it */ - void createSubmitRequestForStreams(uint8_t* streamIds, size_t count, int requestCount=-1) { + void createSubmitRequestForStreams(int32_t* streamIds, size_t count, int requestCount=-1) { ASSERT_NE((void*)NULL, streamIds); ASSERT_LT(0u, count); @@ -629,7 +628,7 @@ TEST_F(ProCameraTest, DISABLED_StreamingImageSingle) { EXPECT_OK(mCamera->exclusiveTryLock()); - uint8_t streams[] = { depthStreamId }; + int32_t streams[] = { depthStreamId }; ASSERT_NO_FATAL_FAILURE(createSubmitRequestForStreams( streams, /*count*/1)); @@ -706,7 +705,7 @@ TEST_F(ProCameraTest, DISABLED_StreamingImageDual) { // set the output streams to just this stream ID // wow what a verbose API. - uint8_t allStreams[] = { streamId, depthStreamId }; + int32_t allStreams[] = { streamId, depthStreamId }; // IMPORTANT. bad things will happen if its not a uint8. size_t streamCount = sizeof(allStreams) / sizeof(allStreams[0]); camera_metadata_entry_t entry; @@ -735,7 +734,7 @@ TEST_F(ProCameraTest, DISABLED_StreamingImageDual) { free_camera_metadata(request); - for (int i = 0; i < streamCount; ++i) { + for (size_t i = 0; i < streamCount; ++i) { EXPECT_OK(mCamera->deleteStream(allStreams[i])); } EXPECT_OK(mCamera->exclusiveUnlock()); @@ -777,7 +776,7 @@ TEST_F(ProCameraTest, CpuConsumerSingle) { // set the output streams to just this stream ID - uint8_t allStreams[] = { streamId }; + int32_t allStreams[] = { streamId }; camera_metadata_entry_t entry; uint32_t tag = static_cast(ANDROID_REQUEST_OUTPUT_STREAMS); int find = find_camera_metadata_entry(request, tag, &entry); @@ -848,7 +847,7 @@ TEST_F(ProCameraTest, CpuConsumerDual) { // set the output streams to just this stream ID // wow what a verbose API. - uint8_t allStreams[] = { streamId, depthStreamId }; + int32_t allStreams[] = { streamId, depthStreamId }; size_t streamCount = 2; camera_metadata_entry_t entry; uint32_t tag = static_cast(ANDROID_REQUEST_OUTPUT_STREAMS); @@ -923,7 +922,7 @@ TEST_F(ProCameraTest, ResultReceiver) { // set the output streams to just this stream ID - uint8_t allStreams[] = { streamId }; + int32_t allStreams[] = { streamId }; size_t streamCount = 1; camera_metadata_entry_t entry; uint32_t tag = static_cast(ANDROID_REQUEST_OUTPUT_STREAMS); @@ -974,7 +973,7 @@ TEST_F(ProCameraTest, DISABLED_WaitForResult) { EXPECT_OK(mCamera->exclusiveTryLock()); - uint8_t streams[] = { streamId }; + int32_t streams[] = { streamId }; ASSERT_NO_FATAL_FAILURE(createSubmitRequestForStreams(streams, /*count*/1)); // Consume a couple of results @@ -1002,7 +1001,7 @@ TEST_F(ProCameraTest, WaitForSingleStreamBuffer) { EXPECT_OK(mCamera->exclusiveTryLock()); - uint8_t streams[] = { streamId }; + int32_t streams[] = { streamId }; ASSERT_NO_FATAL_FAILURE(createSubmitRequestForStreams(streams, /*count*/1, /*requests*/TEST_CPU_FRAME_COUNT)); @@ -1049,7 +1048,7 @@ TEST_F(ProCameraTest, DISABLED_WaitForDualStreamBuffer) { EXPECT_OK(mCamera->exclusiveTryLock()); - uint8_t streams[] = { streamId, depthStreamId }; + int32_t streams[] = { streamId, depthStreamId }; ASSERT_NO_FATAL_FAILURE(createSubmitRequestForStreams(streams, /*count*/2, /*requests*/REQUEST_COUNT)); @@ -1128,7 +1127,7 @@ TEST_F(ProCameraTest, WaitForSingleStreamBufferAndDropFramesSync) { EXPECT_OK(mCamera->exclusiveTryLock()); - uint8_t streams[] = { streamId }; + int32_t streams[] = { streamId }; ASSERT_NO_FATAL_FAILURE(createSubmitRequestForStreams(streams, /*count*/1, /*requests*/NUM_REQUESTS)); @@ -1172,7 +1171,6 @@ TEST_F(ProCameraTest, WaitForSingleStreamBufferAndDropFramesAsync) { } const int NUM_REQUESTS = 20 * TEST_CPU_FRAME_COUNT; - const int CONSECUTIVE_FAILS_ASSUME_TIME_OUT = 5; int streamId = -1; sp consumer; @@ -1183,7 +1181,7 @@ TEST_F(ProCameraTest, WaitForSingleStreamBufferAndDropFramesAsync) { EXPECT_OK(mCamera->exclusiveTryLock()); - uint8_t streams[] = { streamId }; + int32_t streams[] = { streamId }; ASSERT_NO_FATAL_FAILURE(createSubmitRequestForStreams(streams, /*count*/1, /*requests*/NUM_REQUESTS)); diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp index 0a18501..bda2887 100644 --- a/services/camera/libcameraservice/api1/Camera2Client.cpp +++ b/services/camera/libcameraservice/api1/Camera2Client.cpp @@ -743,7 +743,7 @@ status_t Camera2Client::startPreviewL(Parameters ¶ms, bool restart) { return res; } - Vector outputStreams; + Vector outputStreams; bool callbacksEnabled = (params.previewCallbackFlags & CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK) || params.previewCallbackSurface; @@ -999,7 +999,7 @@ status_t Camera2Client::startRecordingL(Parameters ¶ms, bool restart) { return res; } - Vector outputStreams; + Vector outputStreams; outputStreams.push(getPreviewStreamId()); outputStreams.push(getRecordingStreamId()); diff --git a/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp b/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp index e7f753e..ca3198f 100644 --- a/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp +++ b/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp @@ -437,7 +437,7 @@ CaptureSequencer::CaptureState CaptureSequencer::manageStandardCapture( status_t res; ATRACE_CALL(); SharedParameters::Lock l(client->getParameters()); - Vector outputStreams; + Vector outputStreams; uint8_t captureIntent = static_cast(ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE); /** diff --git a/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp b/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp index dfe8580..6076dae 100644 --- a/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp +++ b/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp @@ -412,7 +412,7 @@ int StreamingProcessor::getRecordingStreamId() const { } status_t StreamingProcessor::startStream(StreamType type, - const Vector &outputStreams) { + const Vector &outputStreams) { ATRACE_CALL(); status_t res; @@ -830,8 +830,8 @@ void StreamingProcessor::releaseAllRecordingFramesLocked() { mRecordingHeapFree = mRecordingHeapCount; } -bool StreamingProcessor::isStreamActive(const Vector &streams, - uint8_t recordingStreamId) { +bool StreamingProcessor::isStreamActive(const Vector &streams, + int32_t recordingStreamId) { for (size_t i = 0; i < streams.size(); i++) { if (streams[i] == recordingStreamId) { return true; diff --git a/services/camera/libcameraservice/api1/client2/StreamingProcessor.h b/services/camera/libcameraservice/api1/client2/StreamingProcessor.h index d879b83..833bb8f 100644 --- a/services/camera/libcameraservice/api1/client2/StreamingProcessor.h +++ b/services/camera/libcameraservice/api1/client2/StreamingProcessor.h @@ -64,7 +64,7 @@ class StreamingProcessor: RECORD }; status_t startStream(StreamType type, - const Vector &outputStreams); + const Vector &outputStreams); // Toggle between paused and unpaused. Stream must be started first. status_t togglePauseStream(bool pause); @@ -97,7 +97,7 @@ class StreamingProcessor: StreamType mActiveRequest; bool mPaused; - Vector mActiveStreamIds; + Vector mActiveStreamIds; // Preview-related members int32_t mPreviewRequestId; @@ -132,8 +132,8 @@ class StreamingProcessor: void releaseAllRecordingFramesLocked(); // Determine if the specified stream is currently in use - static bool isStreamActive(const Vector &streams, - uint8_t recordingStreamId); + static bool isStreamActive(const Vector &streams, + int32_t recordingStreamId); }; diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp b/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp index 3b118f4..08ab357 100644 --- a/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp +++ b/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp @@ -300,12 +300,12 @@ status_t ZslProcessor::pushToReprocess(int32_t requestId) { uint8_t requestType = ANDROID_REQUEST_TYPE_REPROCESS; res = request.update(ANDROID_REQUEST_TYPE, &requestType, 1); - uint8_t inputStreams[1] = - { static_cast(mZslReprocessStreamId) }; + int32_t inputStreams[1] = + { mZslReprocessStreamId }; if (res == OK) request.update(ANDROID_REQUEST_INPUT_STREAMS, inputStreams, 1); - uint8_t outputStreams[1] = - { static_cast(client->getCaptureStreamId()) }; + int32_t outputStreams[1] = + { client->getCaptureStreamId() }; if (res == OK) request.update(ANDROID_REQUEST_OUTPUT_STREAMS, outputStreams, 1); res = request.update(ANDROID_REQUEST_ID, diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor3.cpp b/services/camera/libcameraservice/api1/client2/ZslProcessor3.cpp index 7c4da50..3e05091 100644 --- a/services/camera/libcameraservice/api1/client2/ZslProcessor3.cpp +++ b/services/camera/libcameraservice/api1/client2/ZslProcessor3.cpp @@ -247,13 +247,13 @@ status_t ZslProcessor3::pushToReprocess(int32_t requestId) { uint8_t requestType = ANDROID_REQUEST_TYPE_REPROCESS; res = request.update(ANDROID_REQUEST_TYPE, &requestType, 1); - uint8_t inputStreams[1] = - { static_cast(mZslStreamId) }; + int32_t inputStreams[1] = + { mZslStreamId }; if (res == OK) request.update(ANDROID_REQUEST_INPUT_STREAMS, inputStreams, 1); // TODO: Shouldn't we also update the latest preview frame? - uint8_t outputStreams[1] = - { static_cast(client->getCaptureStreamId()) }; + int32_t outputStreams[1] = + { client->getCaptureStreamId() }; if (res == OK) request.update(ANDROID_REQUEST_OUTPUT_STREAMS, outputStreams, 1); res = request.update(ANDROID_REQUEST_ID, diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp index f147c06..055ea12 100644 --- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp +++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp @@ -137,7 +137,7 @@ status_t CameraDeviceClient::submitRequest(sp request, * Write in the output stream IDs which we calculate from * the capture request's list of surface targets */ - Vector outputStreamIds; + Vector outputStreamIds; outputStreamIds.setCapacity(request->mSurfaceList.size()); for (size_t i = 0; i < request->mSurfaceList.size(); ++i) { sp surface = request->mSurfaceList[i]; diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp index 47321e0..44ea739 100644 --- a/services/camera/libcameraservice/device3/Camera3Device.cpp +++ b/services/camera/libcameraservice/device3/Camera3Device.cpp @@ -978,7 +978,7 @@ sp Camera3Device::createCaptureRequest( newRequest->mSettings.find(ANDROID_REQUEST_INPUT_STREAMS); if (inputStreams.count > 0) { if (mInputStream == NULL || - mInputStream->getId() != inputStreams.data.u8[0]) { + mInputStream->getId() != inputStreams.data.i32[0]) { CLOGE("Request references unknown input stream %d", inputStreams.data.u8[0]); return NULL; @@ -1007,7 +1007,7 @@ sp Camera3Device::createCaptureRequest( } for (size_t i = 0; i < streams.count; i++) { - int idx = mOutputStreams.indexOfKey(streams.data.u8[i]); + int idx = mOutputStreams.indexOfKey(streams.data.i32[i]); if (idx == NAME_NOT_FOUND) { CLOGE("Request references unknown stream %d", streams.data.u8[i]); -- cgit v1.1 From 813e2a74853bde19e37d878c596a044b3f299efc Mon Sep 17 00:00:00 2001 From: Eric Laurent Date: Sat, 31 Aug 2013 12:59:48 -0700 Subject: audioflinger: no effects on offloaded tracks Invalidate offloaded tracks when an effect is enabled so that the track is recreated in PCM mode and the effect can be applied. This is temporary until effect offloading is implemented. Bug: 8174034. Change-Id: I77b8b54a10db6cb8334be76d863ea7e720eaad09 --- services/audioflinger/AudioFlinger.cpp | 40 +++++++++++++++++++++++++++++++++- services/audioflinger/AudioFlinger.h | 6 +++++ services/audioflinger/Effects.cpp | 34 +++++++++++++++++++---------- services/audioflinger/Effects.h | 4 ++++ services/audioflinger/Tracks.cpp | 12 +++++++++- 5 files changed, 83 insertions(+), 13 deletions(-) diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp index b8a6b37..626b5c2 100644 --- a/services/audioflinger/AudioFlinger.cpp +++ b/services/audioflinger/AudioFlinger.cpp @@ -98,6 +98,11 @@ size_t AudioFlinger::mTeeSinkOutputFrames = kTeeSinkOutputFramesDefault; size_t AudioFlinger::mTeeSinkTrackFrames = kTeeSinkTrackFramesDefault; #endif +//TODO: remove when effect offload is implemented +// In order to avoid invalidating offloaded tracks each time a Visualizer is turned on and off +// we define a minimum time during which a global effect is considered enabled. +static const nsecs_t kMinGlobalEffectEnabletimeNs = seconds(7200); + // ---------------------------------------------------------------------------- static int load_audio_interface(const char *if_name, audio_hw_device_t **dev) @@ -141,7 +146,8 @@ AudioFlinger::AudioFlinger() mMode(AUDIO_MODE_INVALID), mBtNrecIsOff(false), mIsLowRamDevice(true), - mIsDeviceTypeKnown(false) + mIsDeviceTypeKnown(false), + mGlobalEffectEnableTime(0) { getpid_cached = getpid(); char value[PROPERTY_VALUE_MAX]; @@ -2314,6 +2320,38 @@ status_t AudioFlinger::moveEffectChain_l(int sessionId, return NO_ERROR; } +bool AudioFlinger::isGlobalEffectEnabled_l() +{ + if (mGlobalEffectEnableTime != 0 && + ((systemTime() - mGlobalEffectEnableTime) < kMinGlobalEffectEnabletimeNs)) { + return true; + } + + for (size_t i = 0; i < mPlaybackThreads.size(); i++) { + sp ec = + mPlaybackThreads.valueAt(i)->getEffectChain_l(AUDIO_SESSION_OUTPUT_MIX); + if (ec != 0 && ec->isEnabled()) { + return true; + } + } + return false; +} + +void AudioFlinger::onGlobalEffectEnable() +{ + Mutex::Autolock _l(mLock); + + mGlobalEffectEnableTime = systemTime(); + + for (size_t i = 0; i < mPlaybackThreads.size(); i++) { + sp t = mPlaybackThreads.valueAt(i); + if (t->mType == ThreadBase::OFFLOAD) { + t->invalidateTracks(AUDIO_STREAM_MUSIC); + } + } + +} + struct Entry { #define MAX_NAME 32 // %Y%m%d%H%M%S_%d.wav char mName[MAX_NAME]; diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h index 5df04f4..0992308 100644 --- a/services/audioflinger/AudioFlinger.h +++ b/services/audioflinger/AudioFlinger.h @@ -466,6 +466,10 @@ private: void removeClient_l(pid_t pid); void removeNotificationClient(pid_t pid); + //TODO: remove when effect offload is implemented + bool isGlobalEffectEnabled_l(); + void onGlobalEffectEnable(); + class AudioHwDevice { public: enum Flags { @@ -641,6 +645,8 @@ public: private: bool mIsLowRamDevice; bool mIsDeviceTypeKnown; + //TODO: remove when effect offload is implemented + nsecs_t mGlobalEffectEnableTime; // when a global effect was last enabled }; #undef INCLUDING_FROM_AUDIOFLINGER_H diff --git a/services/audioflinger/Effects.cpp b/services/audioflinger/Effects.cpp index d5a21a7..86671a9 100644 --- a/services/audioflinger/Effects.cpp +++ b/services/audioflinger/Effects.cpp @@ -593,17 +593,6 @@ status_t AudioFlinger::EffectModule::setEnabled_l(bool enabled) h->setEnabled(enabled); } } -//EL_FIXME not sure why this is needed? -// sp thread = mThread.promote(); -// if (thread == 0) { -// return NO_ERROR; -// } -// -// if ((thread->type() == ThreadBase::OFFLOAD) && (enabled)) { -// PlaybackThread *p = (PlaybackThread *)thread.get(); -// ALOGV("setEnabled: Offload, invalidate tracks"); -// p->invalidateTracks(AUDIO_STREAM_MUSIC); -// } } return NO_ERROR; } @@ -942,6 +931,17 @@ status_t AudioFlinger::EffectHandle::enable() thread->checkSuspendOnEffectEnabled(mEffect, false, mEffect->sessionId()); } mEnabled = false; + } else { + //TODO: remove when effect offload is implemented + if (thread != 0) { + if ((thread->type() == ThreadBase::OFFLOAD)) { + PlaybackThread *t = (PlaybackThread *)thread.get(); + t->invalidateTracks(AUDIO_STREAM_MUSIC); + } + if (mEffect->sessionId() == AUDIO_SESSION_OUTPUT_MIX) { + thread->mAudioFlinger->onGlobalEffectEnable(); + } + } } return status; } @@ -1728,4 +1728,16 @@ void AudioFlinger::EffectChain::checkSuspendOnEffectEnabled(const spisEnabled()) { + return true; + } + } + return false; +} + }; // namespace android diff --git a/services/audioflinger/Effects.h b/services/audioflinger/Effects.h index 0b7fb83..bac50f2 100644 --- a/services/audioflinger/Effects.h +++ b/services/audioflinger/Effects.h @@ -303,6 +303,10 @@ public: void clearInputBuffer(); + // At least one effect in the chain is enabled + bool isEnabled(); + + void dump(int fd, const Vector& args); protected: diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp index 2042050..6002aa3 100644 --- a/services/audioflinger/Tracks.cpp +++ b/services/audioflinger/Tracks.cpp @@ -543,7 +543,17 @@ status_t AudioFlinger::PlaybackThread::Track::start(AudioSystem::sync_event_t ev sp thread = mThread.promote(); if (thread != 0) { - Mutex::Autolock _l(thread->mLock); + //TODO: remove when effect offload is implemented + if (isOffloaded()) { + Mutex::Autolock _laf(thread->mAudioFlinger->mLock); + Mutex::Autolock _lth(thread->mLock); + sp ec = thread->getEffectChain_l(mSessionId); + if (thread->mAudioFlinger->isGlobalEffectEnabled_l() || (ec != 0 && ec->isEnabled())) { + invalidate(); + return PERMISSION_DENIED; + } + } + Mutex::Autolock _lth(thread->mLock); track_state state = mState; // here the track could be either new, or restarted // in both cases "unstop" the track -- cgit v1.1 From 7f9551f75eedb3e4e1fe8feaaba48d8080635fc4 Mon Sep 17 00:00:00 2001 From: Zhijun He Date: Mon, 9 Sep 2013 15:48:58 -0700 Subject: MediaMuxer: Hook up setLocation method This method is needed when mediamuxer is used for camera video recording. Bug: 10594784 Change-Id: I9bd006a07e5e2ac7019849e3f4f7cf7b8356d669 --- include/media/stagefright/MediaMuxer.h | 10 ++++++++++ media/libstagefright/MediaMuxer.cpp | 10 ++++++++++ 2 files changed, 20 insertions(+) diff --git a/include/media/stagefright/MediaMuxer.h b/include/media/stagefright/MediaMuxer.h index c1fdbad..ff6a66e 100644 --- a/include/media/stagefright/MediaMuxer.h +++ b/include/media/stagefright/MediaMuxer.h @@ -79,6 +79,16 @@ public: status_t setOrientationHint(int degrees); /** + * Set the location. + * @param latitude The latitude in degree x 1000. Its value must be in the range + * [-900000, 900000]. + * @param longitude The longitude in degree x 1000. Its value must be in the range + * [-1800000, 1800000]. + * @return OK if no error. + */ + status_t setLocation(int latitude, int longitude); + + /** * Stop muxing. * This method is a blocking call. Depending on how * much data is bufferred internally, the time needed for stopping diff --git a/media/libstagefright/MediaMuxer.cpp b/media/libstagefright/MediaMuxer.cpp index 94ce5de..d87e910 100644 --- a/media/libstagefright/MediaMuxer.cpp +++ b/media/libstagefright/MediaMuxer.cpp @@ -103,6 +103,16 @@ status_t MediaMuxer::setOrientationHint(int degrees) { return OK; } +status_t MediaMuxer::setLocation(int latitude, int longitude) { + Mutex::Autolock autoLock(mMuxerLock); + if (mState != INITIALIZED) { + ALOGE("setLocation() must be called before start()."); + return INVALID_OPERATION; + } + ALOGV("Setting location: latitude = %d, longitude = %d", latitude, longitude); + return mWriter->setGeoData(latitude, longitude); +} + status_t MediaMuxer::start() { Mutex::Autolock autoLock(mMuxerLock); if (mState == INITIALIZED) { -- cgit v1.1 From 9a54bc27876acd5d8be5b1fc3dc46701fe76fbb3 Mon Sep 17 00:00:00 2001 From: Eric Laurent Date: Mon, 9 Sep 2013 09:08:44 -0700 Subject: audioflinger: add HOTWORD audio source. HOTWORD is a special capture audio source that uses the same audio tuning as VOICE_RECOGNITION but allows an active capture to be preempted. Bug: 10640877. Change-Id: Iea09a11cbcdbacc8d434e5230e7559dcd1b44ac0 --- services/audioflinger/AudioPolicyService.cpp | 12 ++++++++++-- services/audioflinger/RecordTracks.h | 1 + services/audioflinger/ServiceUtilities.cpp | 7 +++++++ services/audioflinger/ServiceUtilities.h | 1 + services/audioflinger/Threads.cpp | 4 ++++ services/audioflinger/Tracks.cpp | 10 ++++++++++ 6 files changed, 33 insertions(+), 2 deletions(-) diff --git a/services/audioflinger/AudioPolicyService.cpp b/services/audioflinger/AudioPolicyService.cpp index 900b411..4be292f 100644 --- a/services/audioflinger/AudioPolicyService.cpp +++ b/services/audioflinger/AudioPolicyService.cpp @@ -296,9 +296,14 @@ audio_io_handle_t AudioPolicyService::getInput(audio_source_t inputSource, return 0; } // already checked by client, but double-check in case the client wrapper is bypassed - if (uint32_t(inputSource) >= AUDIO_SOURCE_CNT) { + if (inputSource >= AUDIO_SOURCE_CNT && inputSource != AUDIO_SOURCE_HOTWORD) { return 0; } + + if ((inputSource == AUDIO_SOURCE_HOTWORD) && !captureHotwordAllowed()) { + return 0; + } + Mutex::Autolock _l(mLock); // the audio_in_acoustics_t parameter is ignored by get_input() audio_io_handle_t input = mpAudioPolicy->get_input(mpAudioPolicy, inputSource, samplingRate, @@ -308,7 +313,10 @@ audio_io_handle_t AudioPolicyService::getInput(audio_source_t inputSource, return input; } // create audio pre processors according to input source - ssize_t index = mInputSources.indexOfKey(inputSource); + audio_source_t aliasSource = (inputSource == AUDIO_SOURCE_HOTWORD) ? + AUDIO_SOURCE_VOICE_RECOGNITION : inputSource; + + ssize_t index = mInputSources.indexOfKey(aliasSource); if (index < 0) { return input; } diff --git a/services/audioflinger/RecordTracks.h b/services/audioflinger/RecordTracks.h index ffe3e9f..cd8f70c 100644 --- a/services/audioflinger/RecordTracks.h +++ b/services/audioflinger/RecordTracks.h @@ -36,6 +36,7 @@ public: void destroy(); + void invalidate(); // clear the buffer overflow flag void clearOverflow() { mOverflow = false; } // set the buffer overflow flag and return previous value diff --git a/services/audioflinger/ServiceUtilities.cpp b/services/audioflinger/ServiceUtilities.cpp index 9ee513b..152455d 100644 --- a/services/audioflinger/ServiceUtilities.cpp +++ b/services/audioflinger/ServiceUtilities.cpp @@ -43,6 +43,13 @@ bool captureAudioOutputAllowed() { return ok; } +bool captureHotwordAllowed() { + static const String16 sCaptureHotwordAllowed("android.permission.CAPTURE_AUDIO_HOTWORD"); + bool ok = checkCallingPermission(sCaptureHotwordAllowed); + if (!ok) ALOGE("android.permission.CAPTURE_AUDIO_HOTWORD"); + return ok; +} + bool settingsAllowed() { if (getpid_cached == IPCThreadState::self()->getCallingPid()) return true; static const String16 sAudioSettings("android.permission.MODIFY_AUDIO_SETTINGS"); diff --git a/services/audioflinger/ServiceUtilities.h b/services/audioflinger/ServiceUtilities.h index 175cd28..531bc56 100644 --- a/services/audioflinger/ServiceUtilities.h +++ b/services/audioflinger/ServiceUtilities.h @@ -22,6 +22,7 @@ extern pid_t getpid_cached; bool recordingAllowed(); bool captureAudioOutputAllowed(); +bool captureHotwordAllowed(); bool settingsAllowed(); bool dumpAllowed(); diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index e35f47e..1b5a9a9 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -4451,6 +4451,10 @@ bool AudioFlinger::RecordThread::threadLoop() { Mutex::Autolock _l(mLock); + for (size_t i = 0; i < mTracks.size(); i++) { + sp track = mTracks[i]; + track->invalidate(); + } mActiveTrack.clear(); mStartStopCond.broadcast(); } diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp index 2042050..9103932 100644 --- a/services/audioflinger/Tracks.cpp +++ b/services/audioflinger/Tracks.cpp @@ -1760,6 +1760,16 @@ void AudioFlinger::RecordThread::RecordTrack::destroy() } } +void AudioFlinger::RecordThread::RecordTrack::invalidate() +{ + // FIXME should use proxy, and needs work + audio_track_cblk_t* cblk = mCblk; + android_atomic_or(CBLK_INVALID, &cblk->mFlags); + android_atomic_release_store(0x40000000, &cblk->mFutex); + // client is not in server, so FUTEX_WAKE is needed instead of FUTEX_WAKE_PRIVATE + (void) __futex_syscall3(&cblk->mFutex, FUTEX_WAKE, INT_MAX); +} + /*static*/ void AudioFlinger::RecordThread::RecordTrack::appendDumpHeader(String8& result) { -- cgit v1.1 From 0c72b24f91c68442eb374bd1b338c394105b8262 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Wed, 11 Sep 2013 09:14:16 -0700 Subject: Fix AudioTrack shared memory leak Bug: 2801375 Change-Id: I50e15164fe310f69ea019dca5b49171a02bc6992 --- services/audioflinger/PlaybackTracks.h | 5 ++++- services/audioflinger/Tracks.cpp | 10 ++++++++++ 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/services/audioflinger/PlaybackTracks.h b/services/audioflinger/PlaybackTracks.h index 0308b99..f7ad6b1 100644 --- a/services/audioflinger/PlaybackTracks.h +++ b/services/audioflinger/PlaybackTracks.h @@ -117,7 +117,10 @@ protected: enum {FS_INVALID, FS_FILLING, FS_FILLED, FS_ACTIVE}; mutable uint8_t mFillingUpStatus; int8_t mRetryCount; - const sp mSharedBuffer; + + // see comment at AudioFlinger::PlaybackThread::Track::~Track for why this can't be const + sp mSharedBuffer; + bool mResetDone; const audio_stream_type_t mStreamType; int mName; // track name on the normal mixer, diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp index 6002aa3..cd54950 100644 --- a/services/audioflinger/Tracks.cpp +++ b/services/audioflinger/Tracks.cpp @@ -364,6 +364,16 @@ AudioFlinger::PlaybackThread::Track::Track( AudioFlinger::PlaybackThread::Track::~Track() { ALOGV("PlaybackThread::Track destructor"); + + // The destructor would clear mSharedBuffer, + // but it will not push the decremented reference count, + // leaving the client's IMemory dangling indefinitely. + // This prevents that leak. + if (mSharedBuffer != 0) { + mSharedBuffer.clear(); + // flush the binder command buffer + IPCThreadState::self()->flushCommands(); + } } void AudioFlinger::PlaybackThread::Track::destroy() -- cgit v1.1 From 8973c0439984f85870dffa7a100580271933c964 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Wed, 11 Sep 2013 14:35:16 -0700 Subject: Fix SoundPool.play() looping This is done by configuring SoundPool for shared memory and fast track. Previously SoundPool used a streaming track, and looping in streaming mode relied on the ability to loop the most recently enqueued data. That 'feature' was lost in the new implementation of streaming, so we're now switching from streaming mode to shared memory mode. Shared memory mode had always been desired, but was blocked by bug 2801375 which is fixed now. Bug: 10171337 Change-Id: I2a938e3ffafa2a74d5210b4198b50db20ad5da0e --- media/libmedia/SoundPool.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/media/libmedia/SoundPool.cpp b/media/libmedia/SoundPool.cpp index 7f10e05..0985164 100644 --- a/media/libmedia/SoundPool.cpp +++ b/media/libmedia/SoundPool.cpp @@ -18,7 +18,7 @@ #define LOG_TAG "SoundPool" #include -//#define USE_SHARED_MEM_BUFFER +#define USE_SHARED_MEM_BUFFER #include #include @@ -602,7 +602,7 @@ void SoundChannel::play(const sp& sample, int nextChannelID, float leftV // do not create a new audio track if current track is compatible with sample parameters #ifdef USE_SHARED_MEM_BUFFER newTrack = new AudioTrack(streamType, sampleRate, sample->format(), - channels, sample->getIMemory(), AUDIO_OUTPUT_FLAG_NONE, callback, userData); + channels, sample->getIMemory(), AUDIO_OUTPUT_FLAG_FAST, callback, userData); #else newTrack = new AudioTrack(streamType, sampleRate, sample->format(), channels, frameCount, AUDIO_OUTPUT_FLAG_FAST, callback, userData, -- cgit v1.1 From a911f51c21430ac92f1d796b2338878fd98382e9 Mon Sep 17 00:00:00 2001 From: Chong Zhang Date: Fri, 6 Sep 2013 15:34:50 -0700 Subject: Pass subtitle track properties to getTrackInfo Bug: 10326117 Change-Id: I15fcc49ad02e26d7cc92e82ee670bafca62a09a7 --- media/libstagefright/httplive/M3UParser.cpp | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/media/libstagefright/httplive/M3UParser.cpp b/media/libstagefright/httplive/M3UParser.cpp index bc6d629..243888c 100644 --- a/media/libstagefright/httplive/M3UParser.cpp +++ b/media/libstagefright/httplive/M3UParser.cpp @@ -200,6 +200,13 @@ void M3UParser::MediaGroup::getTrackInfo(Parcel* reply) const { const Media &item = mMediaItems.itemAt(i); const char *lang = item.mLanguage.empty() ? "und" : item.mLanguage.c_str(); reply->writeString16(String16(lang)); + + if (mType == TYPE_SUBS) { + // TO-DO: pass in a MediaFormat instead + reply->writeInt32(!!(item.mFlags & MediaGroup::FLAG_AUTOSELECT)); + reply->writeInt32(!!(item.mFlags & MediaGroup::FLAG_DEFAULT)); + reply->writeInt32(!!(item.mFlags & MediaGroup::FLAG_FORCED)); + } } } -- cgit v1.1 From 4b7069dac546ad21cf62ca6132d50ea41857d08e Mon Sep 17 00:00:00 2001 From: Chong Zhang Date: Wed, 11 Sep 2013 12:52:43 -0700 Subject: Add FLAG_CAN_SEEK_BACKWARD and FLAG_CAN_SEEK_FORWARD see flags Also update seek flag in NuPlayerDriver, otherwise MediaPlayer will get wrong flags. Bug: 10676387 Change-Id: Ice30f27a9a04e37b4718d26228a407fea8d9e4fc --- media/libmediaplayerservice/nuplayer/NuPlayer.cpp | 5 +++++ media/libmediaplayerservice/nuplayer/RTSPSource.cpp | 9 ++++----- 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp index e1735fa..750287f 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp +++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp @@ -1396,6 +1396,11 @@ void NuPlayer::onSourceNotify(const sp &msg) { uint32_t flags; CHECK(msg->findInt32("flags", (int32_t *)&flags)); + sp driver = mDriver.promote(); + if (driver != NULL) { + driver->notifyFlagsChanged(flags); + } + if ((mSourceFlags & Source::FLAG_DYNAMIC_DURATION) && (!(flags & Source::FLAG_DYNAMIC_DURATION))) { cancelPollDuration(); diff --git a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp index 3385a19..18cf6d1 100644 --- a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp +++ b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp @@ -358,11 +358,10 @@ void NuPlayer::RTSPSource::onMessageReceived(const sp &msg) { uint32_t flags = 0; if (mHandler->isSeekable()) { - flags = FLAG_CAN_PAUSE | FLAG_CAN_SEEK; - - // Seeking 10secs forward or backward is a very expensive - // operation for rtsp, so let's not enable that. - // The user can always use the seek bar. + flags = FLAG_CAN_PAUSE + | FLAG_CAN_SEEK + | FLAG_CAN_SEEK_BACKWARD + | FLAG_CAN_SEEK_FORWARD; } notifyFlagsChanged(flags); -- cgit v1.1 From 34486800a022857b45c3882624449980c5cb6907 Mon Sep 17 00:00:00 2001 From: Jeff Tinker Date: Wed, 11 Sep 2013 18:40:44 -0700 Subject: Fix MediaDrm cts tests Moving the build of the mock MediaDrm plugin back to frameworks/av since it can't be installed by cts. b/10668350 Change-Id: Id662bdb0775389b458f86a299392217f2d959422 --- drm/mediadrm/plugins/mock/Android.mk | 38 ++ drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.cpp | 705 ++++++++++++++++++++++ drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.h | 156 +++++ 3 files changed, 899 insertions(+) create mode 100644 drm/mediadrm/plugins/mock/Android.mk create mode 100644 drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.cpp create mode 100644 drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.h diff --git a/drm/mediadrm/plugins/mock/Android.mk b/drm/mediadrm/plugins/mock/Android.mk new file mode 100644 index 0000000..ada23a2 --- /dev/null +++ b/drm/mediadrm/plugins/mock/Android.mk @@ -0,0 +1,38 @@ +# +# Copyright (C) 2013 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +LOCAL_PATH:= $(call my-dir) +include $(CLEAR_VARS) + +LOCAL_SRC_FILES:= \ + MockDrmCryptoPlugin.cpp + +LOCAL_MODULE := libmockdrmcryptoplugin + +LOCAL_MODULE_PATH := $(TARGET_OUT_VENDOR_SHARED_LIBRARIES)/mediadrm + +LOCAL_SHARED_LIBRARIES := \ + libutils liblog + +LOCAL_C_INCLUDES += \ + $(TOP)/frameworks/av/include \ + $(TOP)/frameworks/native/include/media + +# Set the following flag to enable the decryption passthru flow +#LOCAL_CFLAGS += -DENABLE_PASSTHRU_DECRYPTION + +LOCAL_MODULE_TAGS := optional + +include $(BUILD_SHARED_LIBRARY) diff --git a/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.cpp b/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.cpp new file mode 100644 index 0000000..f2cadf7 --- /dev/null +++ b/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.cpp @@ -0,0 +1,705 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "MockDrmCryptoPlugin" +#include + + +#include "drm/DrmAPI.h" +#include "MockDrmCryptoPlugin.h" +#include "media/stagefright/MediaErrors.h" + +using namespace android; + +// Shared library entry point +DrmFactory *createDrmFactory() +{ + return new MockDrmFactory(); +} + +// Shared library entry point +CryptoFactory *createCryptoFactory() +{ + return new MockCryptoFactory(); +} + +const uint8_t mock_uuid[16] = {0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, + 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f, 0x10}; + +namespace android { + + // MockDrmFactory + bool MockDrmFactory::isCryptoSchemeSupported(const uint8_t uuid[16]) + { + return (!memcmp(uuid, mock_uuid, sizeof(uuid))); + } + + bool MockDrmFactory::isContentTypeSupported(const String8 &mimeType) + { + if (mimeType != "video/mp4") { + return false; + } + return true; + } + + status_t MockDrmFactory::createDrmPlugin(const uint8_t uuid[16], DrmPlugin **plugin) + { + *plugin = new MockDrmPlugin(); + return OK; + } + + // MockCryptoFactory + bool MockCryptoFactory::isCryptoSchemeSupported(const uint8_t uuid[16]) const + { + return (!memcmp(uuid, mock_uuid, sizeof(uuid))); + } + + status_t MockCryptoFactory::createPlugin(const uint8_t uuid[16], const void *data, + size_t size, CryptoPlugin **plugin) + { + *plugin = new MockCryptoPlugin(); + return OK; + } + + + // MockDrmPlugin methods + + status_t MockDrmPlugin::openSession(Vector &sessionId) + { + const size_t kSessionIdSize = 8; + + Mutex::Autolock lock(mLock); + for (size_t i = 0; i < kSessionIdSize / sizeof(long); i++) { + long r = random(); + sessionId.appendArray((uint8_t *)&r, sizeof(long)); + } + mSessions.add(sessionId); + + ALOGD("MockDrmPlugin::openSession() -> %s", vectorToString(sessionId).string()); + return OK; + } + + status_t MockDrmPlugin::closeSession(Vector const &sessionId) + { + Mutex::Autolock lock(mLock); + ALOGD("MockDrmPlugin::closeSession(%s)", vectorToString(sessionId).string()); + ssize_t index = findSession(sessionId); + if (index == kNotFound) { + ALOGD("Invalid sessionId"); + return BAD_VALUE; + } + mSessions.removeAt(index); + return OK; + } + + + status_t MockDrmPlugin::getKeyRequest(Vector const &sessionId, + Vector const &initData, + String8 const &mimeType, KeyType keyType, + KeyedVector const &optionalParameters, + Vector &request, String8 &defaultUrl) + { + Mutex::Autolock lock(mLock); + ALOGD("MockDrmPlugin::getKeyRequest(sessionId=%s, initData=%s, mimeType=%s" + ", keyType=%d, optionalParameters=%s))", + vectorToString(sessionId).string(), vectorToString(initData).string(), mimeType.string(), + keyType, stringMapToString(optionalParameters).string()); + + ssize_t index = findSession(sessionId); + if (index == kNotFound) { + ALOGD("Invalid sessionId"); + return BAD_VALUE; + } + + // Properties used in mock test, set by mock plugin and verifed cts test app + // byte[] initData -> mock-initdata + // string mimeType -> mock-mimetype + // string keyType -> mock-keytype + // string optionalParameters -> mock-optparams formatted as {key1,value1},{key2,value2} + + mByteArrayProperties.add(String8("mock-initdata"), initData); + mStringProperties.add(String8("mock-mimetype"), mimeType); + + String8 keyTypeStr; + keyTypeStr.appendFormat("%d", (int)keyType); + mStringProperties.add(String8("mock-keytype"), keyTypeStr); + + String8 params; + for (size_t i = 0; i < optionalParameters.size(); i++) { + params.appendFormat("%s{%s,%s}", i ? "," : "", + optionalParameters.keyAt(i).string(), + optionalParameters.valueAt(i).string()); + } + mStringProperties.add(String8("mock-optparams"), params); + + // Properties used in mock test, set by cts test app returned from mock plugin + // byte[] mock-request -> request + // string mock-default-url -> defaultUrl + + index = mByteArrayProperties.indexOfKey(String8("mock-request")); + if (index < 0) { + ALOGD("Missing 'mock-request' parameter for mock"); + return BAD_VALUE; + } else { + request = mByteArrayProperties.valueAt(index); + } + + index = mStringProperties.indexOfKey(String8("mock-defaultUrl")); + if (index < 0) { + ALOGD("Missing 'mock-defaultUrl' parameter for mock"); + return BAD_VALUE; + } else { + defaultUrl = mStringProperties.valueAt(index); + } + return OK; + } + + status_t MockDrmPlugin::provideKeyResponse(Vector const &sessionId, + Vector const &response, + Vector &keySetId) + { + Mutex::Autolock lock(mLock); + ALOGD("MockDrmPlugin::provideKeyResponse(sessionId=%s, response=%s)", + vectorToString(sessionId).string(), vectorToString(response).string()); + ssize_t index = findSession(sessionId); + if (index == kNotFound) { + ALOGD("Invalid sessionId"); + return BAD_VALUE; + } + if (response.size() == 0) { + return BAD_VALUE; + } + + // Properties used in mock test, set by mock plugin and verifed cts test app + // byte[] response -> mock-response + mByteArrayProperties.add(String8("mock-response"), response); + + const size_t kKeySetIdSize = 8; + + for (size_t i = 0; i < kKeySetIdSize / sizeof(long); i++) { + long r = random(); + keySetId.appendArray((uint8_t *)&r, sizeof(long)); + } + mKeySets.add(keySetId); + + return OK; + } + + status_t MockDrmPlugin::removeKeys(Vector const &keySetId) + { + Mutex::Autolock lock(mLock); + ALOGD("MockDrmPlugin::removeKeys(keySetId=%s)", + vectorToString(keySetId).string()); + + ssize_t index = findKeySet(keySetId); + if (index == kNotFound) { + ALOGD("Invalid keySetId"); + return BAD_VALUE; + } + mKeySets.removeAt(index); + + return OK; + } + + status_t MockDrmPlugin::restoreKeys(Vector const &sessionId, + Vector const &keySetId) + { + Mutex::Autolock lock(mLock); + ALOGD("MockDrmPlugin::restoreKeys(sessionId=%s, keySetId=%s)", + vectorToString(sessionId).string(), + vectorToString(keySetId).string()); + ssize_t index = findSession(sessionId); + if (index == kNotFound) { + ALOGD("Invalid sessionId"); + return BAD_VALUE; + } + + index = findKeySet(keySetId); + if (index == kNotFound) { + ALOGD("Invalid keySetId"); + return BAD_VALUE; + } + + return OK; + } + + status_t MockDrmPlugin::queryKeyStatus(Vector const &sessionId, + KeyedVector &infoMap) const + { + ALOGD("MockDrmPlugin::queryKeyStatus(sessionId=%s)", + vectorToString(sessionId).string()); + + ssize_t index = findSession(sessionId); + if (index == kNotFound) { + ALOGD("Invalid sessionId"); + return BAD_VALUE; + } + + infoMap.add(String8("purchaseDuration"), String8("1000")); + infoMap.add(String8("licenseDuration"), String8("100")); + return OK; + } + + status_t MockDrmPlugin::getProvisionRequest(Vector &request, + String8 &defaultUrl) + { + Mutex::Autolock lock(mLock); + ALOGD("MockDrmPlugin::getProvisionRequest()"); + + // Properties used in mock test, set by cts test app returned from mock plugin + // byte[] mock-request -> request + // string mock-default-url -> defaultUrl + + ssize_t index = mByteArrayProperties.indexOfKey(String8("mock-request")); + if (index < 0) { + ALOGD("Missing 'mock-request' parameter for mock"); + return BAD_VALUE; + } else { + request = mByteArrayProperties.valueAt(index); + } + + index = mStringProperties.indexOfKey(String8("mock-defaultUrl")); + if (index < 0) { + ALOGD("Missing 'mock-defaultUrl' parameter for mock"); + return BAD_VALUE; + } else { + defaultUrl = mStringProperties.valueAt(index); + } + return OK; + } + + status_t MockDrmPlugin::provideProvisionResponse(Vector const &response) + { + Mutex::Autolock lock(mLock); + ALOGD("MockDrmPlugin::provideProvisionResponse(%s)", + vectorToString(response).string()); + + // Properties used in mock test, set by mock plugin and verifed cts test app + // byte[] response -> mock-response + + mByteArrayProperties.add(String8("mock-response"), response); + return OK; + } + + status_t MockDrmPlugin::getSecureStops(List > &secureStops) + { + Mutex::Autolock lock(mLock); + ALOGD("MockDrmPlugin::getSecureStops()"); + + // Properties used in mock test, set by cts test app returned from mock plugin + // byte[] mock-secure-stop1 -> first secure stop in list + // byte[] mock-secure-stop2 -> second secure stop in list + + Vector ss1, ss2; + ssize_t index = mByteArrayProperties.indexOfKey(String8("mock-secure-stop1")); + if (index < 0) { + ALOGD("Missing 'mock-secure-stop1' parameter for mock"); + return BAD_VALUE; + } else { + ss1 = mByteArrayProperties.valueAt(index); + } + + index = mByteArrayProperties.indexOfKey(String8("mock-secure-stop2")); + if (index < 0) { + ALOGD("Missing 'mock-secure-stop2' parameter for mock"); + return BAD_VALUE; + } else { + ss2 = mByteArrayProperties.valueAt(index); + } + + secureStops.push_back(ss1); + secureStops.push_back(ss2); + return OK; + } + + status_t MockDrmPlugin::releaseSecureStops(Vector const &ssRelease) + { + Mutex::Autolock lock(mLock); + ALOGD("MockDrmPlugin::releaseSecureStops(%s)", + vectorToString(ssRelease).string()); + + // Properties used in mock test, set by mock plugin and verifed cts test app + // byte[] secure-stop-release -> mock-ssrelease + mByteArrayProperties.add(String8("mock-ssrelease"), ssRelease); + + return OK; + } + + status_t MockDrmPlugin::getPropertyString(String8 const &name, String8 &value) const + { + ALOGD("MockDrmPlugin::getPropertyString(name=%s)", name.string()); + ssize_t index = mStringProperties.indexOfKey(name); + if (index < 0) { + ALOGD("no property for '%s'", name.string()); + return BAD_VALUE; + } + value = mStringProperties.valueAt(index); + return OK; + } + + status_t MockDrmPlugin::getPropertyByteArray(String8 const &name, + Vector &value) const + { + ALOGD("MockDrmPlugin::getPropertyByteArray(name=%s)", name.string()); + ssize_t index = mByteArrayProperties.indexOfKey(name); + if (index < 0) { + ALOGD("no property for '%s'", name.string()); + return BAD_VALUE; + } + value = mByteArrayProperties.valueAt(index); + return OK; + } + + status_t MockDrmPlugin::setPropertyString(String8 const &name, + String8 const &value) + { + Mutex::Autolock lock(mLock); + ALOGD("MockDrmPlugin::setPropertyString(name=%s, value=%s)", + name.string(), value.string()); + + if (name == "mock-send-event") { + unsigned code, extra; + sscanf(value.string(), "%d %d", &code, &extra); + DrmPlugin::EventType eventType = (DrmPlugin::EventType)code; + + Vector const *pSessionId = NULL; + ssize_t index = mByteArrayProperties.indexOfKey(String8("mock-event-session-id")); + if (index >= 0) { + pSessionId = &mByteArrayProperties[index]; + } + + Vector const *pData = NULL; + index = mByteArrayProperties.indexOfKey(String8("mock-event-data")); + if (index >= 0) { + pData = &mByteArrayProperties[index]; + } + ALOGD("sending event from mock drm plugin: %d %d %s %s", + (int)code, extra, pSessionId ? vectorToString(*pSessionId) : "{}", + pData ? vectorToString(*pData) : "{}"); + + sendEvent(eventType, extra, pSessionId, pData); + } else { + mStringProperties.add(name, value); + } + return OK; + } + + status_t MockDrmPlugin::setPropertyByteArray(String8 const &name, + Vector const &value) + { + Mutex::Autolock lock(mLock); + ALOGD("MockDrmPlugin::setPropertyByteArray(name=%s, value=%s)", + name.string(), vectorToString(value).string()); + mByteArrayProperties.add(name, value); + return OK; + } + + status_t MockDrmPlugin::setCipherAlgorithm(Vector const &sessionId, + String8 const &algorithm) + { + Mutex::Autolock lock(mLock); + + ALOGD("MockDrmPlugin::setCipherAlgorithm(sessionId=%s, algorithm=%s)", + vectorToString(sessionId).string(), algorithm.string()); + + ssize_t index = findSession(sessionId); + if (index == kNotFound) { + ALOGD("Invalid sessionId"); + return BAD_VALUE; + } + + if (algorithm == "AES/CBC/NoPadding") { + return OK; + } + return BAD_VALUE; + } + + status_t MockDrmPlugin::setMacAlgorithm(Vector const &sessionId, + String8 const &algorithm) + { + Mutex::Autolock lock(mLock); + + ALOGD("MockDrmPlugin::setMacAlgorithm(sessionId=%s, algorithm=%s)", + vectorToString(sessionId).string(), algorithm.string()); + + ssize_t index = findSession(sessionId); + if (index == kNotFound) { + ALOGD("Invalid sessionId"); + return BAD_VALUE; + } + + if (algorithm == "HmacSHA256") { + return OK; + } + return BAD_VALUE; + } + + status_t MockDrmPlugin::encrypt(Vector const &sessionId, + Vector const &keyId, + Vector const &input, + Vector const &iv, + Vector &output) + { + Mutex::Autolock lock(mLock); + ALOGD("MockDrmPlugin::encrypt(sessionId=%s, keyId=%s, input=%s, iv=%s)", + vectorToString(sessionId).string(), + vectorToString(keyId).string(), + vectorToString(input).string(), + vectorToString(iv).string()); + + ssize_t index = findSession(sessionId); + if (index == kNotFound) { + ALOGD("Invalid sessionId"); + return BAD_VALUE; + } + + // Properties used in mock test, set by mock plugin and verifed cts test app + // byte[] keyId -> mock-keyid + // byte[] input -> mock-input + // byte[] iv -> mock-iv + mByteArrayProperties.add(String8("mock-keyid"), keyId); + mByteArrayProperties.add(String8("mock-input"), input); + mByteArrayProperties.add(String8("mock-iv"), iv); + + // Properties used in mock test, set by cts test app returned from mock plugin + // byte[] mock-output -> output + index = mByteArrayProperties.indexOfKey(String8("mock-output")); + if (index < 0) { + ALOGD("Missing 'mock-request' parameter for mock"); + return BAD_VALUE; + } else { + output = mByteArrayProperties.valueAt(index); + } + return OK; + } + + status_t MockDrmPlugin::decrypt(Vector const &sessionId, + Vector const &keyId, + Vector const &input, + Vector const &iv, + Vector &output) + { + Mutex::Autolock lock(mLock); + ALOGD("MockDrmPlugin::decrypt(sessionId=%s, keyId=%s, input=%s, iv=%s)", + vectorToString(sessionId).string(), + vectorToString(keyId).string(), + vectorToString(input).string(), + vectorToString(iv).string()); + + ssize_t index = findSession(sessionId); + if (index == kNotFound) { + ALOGD("Invalid sessionId"); + return BAD_VALUE; + } + + // Properties used in mock test, set by mock plugin and verifed cts test app + // byte[] keyId -> mock-keyid + // byte[] input -> mock-input + // byte[] iv -> mock-iv + mByteArrayProperties.add(String8("mock-keyid"), keyId); + mByteArrayProperties.add(String8("mock-input"), input); + mByteArrayProperties.add(String8("mock-iv"), iv); + + // Properties used in mock test, set by cts test app returned from mock plugin + // byte[] mock-output -> output + index = mByteArrayProperties.indexOfKey(String8("mock-output")); + if (index < 0) { + ALOGD("Missing 'mock-request' parameter for mock"); + return BAD_VALUE; + } else { + output = mByteArrayProperties.valueAt(index); + } + return OK; + } + + status_t MockDrmPlugin::sign(Vector const &sessionId, + Vector const &keyId, + Vector const &message, + Vector &signature) + { + Mutex::Autolock lock(mLock); + ALOGD("MockDrmPlugin::sign(sessionId=%s, keyId=%s, message=%s)", + vectorToString(sessionId).string(), + vectorToString(keyId).string(), + vectorToString(message).string()); + + ssize_t index = findSession(sessionId); + if (index == kNotFound) { + ALOGD("Invalid sessionId"); + return BAD_VALUE; + } + + // Properties used in mock test, set by mock plugin and verifed cts test app + // byte[] keyId -> mock-keyid + // byte[] message -> mock-message + mByteArrayProperties.add(String8("mock-keyid"), keyId); + mByteArrayProperties.add(String8("mock-message"), message); + + // Properties used in mock test, set by cts test app returned from mock plugin + // byte[] mock-signature -> signature + index = mByteArrayProperties.indexOfKey(String8("mock-signature")); + if (index < 0) { + ALOGD("Missing 'mock-request' parameter for mock"); + return BAD_VALUE; + } else { + signature = mByteArrayProperties.valueAt(index); + } + return OK; + } + + status_t MockDrmPlugin::verify(Vector const &sessionId, + Vector const &keyId, + Vector const &message, + Vector const &signature, + bool &match) + { + Mutex::Autolock lock(mLock); + ALOGD("MockDrmPlugin::verify(sessionId=%s, keyId=%s, message=%s, signature=%s)", + vectorToString(sessionId).string(), + vectorToString(keyId).string(), + vectorToString(message).string(), + vectorToString(signature).string()); + + ssize_t index = findSession(sessionId); + if (index == kNotFound) { + ALOGD("Invalid sessionId"); + return BAD_VALUE; + } + + // Properties used in mock test, set by mock plugin and verifed cts test app + // byte[] keyId -> mock-keyid + // byte[] message -> mock-message + // byte[] signature -> mock-signature + mByteArrayProperties.add(String8("mock-keyid"), keyId); + mByteArrayProperties.add(String8("mock-message"), message); + mByteArrayProperties.add(String8("mock-signature"), signature); + + // Properties used in mock test, set by cts test app returned from mock plugin + // String mock-match "1" or "0" -> match + index = mStringProperties.indexOfKey(String8("mock-match")); + if (index < 0) { + ALOGD("Missing 'mock-request' parameter for mock"); + return BAD_VALUE; + } else { + match = atol(mStringProperties.valueAt(index).string()); + } + return OK; + } + + ssize_t MockDrmPlugin::findSession(Vector const &sessionId) const + { + ALOGD("findSession: nsessions=%d, size=%d", mSessions.size(), sessionId.size()); + for (size_t i = 0; i < mSessions.size(); ++i) { + if (memcmp(mSessions[i].array(), sessionId.array(), sessionId.size()) == 0) { + return i; + } + } + return kNotFound; + } + + ssize_t MockDrmPlugin::findKeySet(Vector const &keySetId) const + { + ALOGD("findKeySet: nkeySets=%d, size=%d", mKeySets.size(), keySetId.size()); + for (size_t i = 0; i < mKeySets.size(); ++i) { + if (memcmp(mKeySets[i].array(), keySetId.array(), keySetId.size()) == 0) { + return i; + } + } + return kNotFound; + } + + + // Conversion utilities + String8 MockDrmPlugin::vectorToString(Vector const &vector) const + { + return arrayToString(vector.array(), vector.size()); + } + + String8 MockDrmPlugin::arrayToString(uint8_t const *array, size_t len) const + { + String8 result("{ "); + for (size_t i = 0; i < len; i++) { + result.appendFormat("0x%02x ", array[i]); + } + result += "}"; + return result; + } + + String8 MockDrmPlugin::stringMapToString(KeyedVector map) const + { + String8 result("{ "); + for (size_t i = 0; i < map.size(); i++) { + result.appendFormat("%s{name=%s, value=%s}", i > 0 ? ", " : "", + map.keyAt(i).string(), map.valueAt(i).string()); + } + return result + " }"; + } + + bool operator<(Vector const &lhs, Vector const &rhs) { + return lhs.size() < rhs.size() || (memcmp(lhs.array(), rhs.array(), lhs.size()) < 0); + } + + // + // Crypto Plugin + // + + bool MockCryptoPlugin::requiresSecureDecoderComponent(const char *mime) const + { + ALOGD("MockCryptoPlugin::requiresSecureDecoderComponent(mime=%s)", mime); + return false; + } + + ssize_t + MockCryptoPlugin::decrypt(bool secure, const uint8_t key[16], const uint8_t iv[16], + Mode mode, const void *srcPtr, const SubSample *subSamples, + size_t numSubSamples, void *dstPtr, AString *errorDetailMsg) + { + ALOGD("MockCryptoPlugin::decrypt(secure=%d, key=%s, iv=%s, mode=%d, src=%p, " + "subSamples=%s, dst=%p)", + (int)secure, + arrayToString(key, sizeof(key)).string(), + arrayToString(iv, sizeof(iv)).string(), + (int)mode, srcPtr, + subSamplesToString(subSamples, numSubSamples).string(), + dstPtr); + return OK; + } + + // Conversion utilities + String8 MockCryptoPlugin::arrayToString(uint8_t const *array, size_t len) const + { + String8 result("{ "); + for (size_t i = 0; i < len; i++) { + result.appendFormat("0x%02x ", array[i]); + } + result += "}"; + return result; + } + + String8 MockCryptoPlugin::subSamplesToString(SubSample const *subSamples, + size_t numSubSamples) const + { + String8 result; + for (size_t i = 0; i < numSubSamples; i++) { + result.appendFormat("[%d] {clear:%d, encrypted:%d} ", i, + subSamples[i].mNumBytesOfClearData, + subSamples[i].mNumBytesOfEncryptedData); + } + return result; + } + +}; diff --git a/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.h b/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.h new file mode 100644 index 0000000..2297f9b --- /dev/null +++ b/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.h @@ -0,0 +1,156 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include "drm/DrmAPI.h" +#include "hardware/CryptoAPI.h" + +extern "C" { + android::DrmFactory *createDrmFactory(); + android::CryptoFactory *createCryptoFactory(); +} + +namespace android { + + class MockDrmFactory : public DrmFactory { + public: + MockDrmFactory() {} + virtual ~MockDrmFactory() {} + + bool isCryptoSchemeSupported(const uint8_t uuid[16]); + bool isContentTypeSupported(const String8 &mimeType); + status_t createDrmPlugin(const uint8_t uuid[16], DrmPlugin **plugin); + }; + + class MockCryptoFactory : public CryptoFactory { + public: + MockCryptoFactory() {} + virtual ~MockCryptoFactory() {} + + bool isCryptoSchemeSupported(const uint8_t uuid[16]) const; + status_t createPlugin( + const uint8_t uuid[16], const void *data, size_t size, + CryptoPlugin **plugin); + }; + + + + class MockDrmPlugin : public DrmPlugin { + public: + MockDrmPlugin() {} + virtual ~MockDrmPlugin() {} + + // from DrmPlugin + status_t openSession(Vector &sessionId); + status_t closeSession(Vector const &sessionId); + + status_t getKeyRequest(Vector const &sessionId, + Vector const &initData, + String8 const &mimeType, KeyType keyType, + KeyedVector const &optionalParameters, + Vector &request, String8 &defaultUrl); + + status_t provideKeyResponse(Vector const &sessionId, + Vector const &response, + Vector &keySetId); + + status_t removeKeys(Vector const &keySetId); + + status_t restoreKeys(Vector const &sessionId, + Vector const &keySetId); + + status_t queryKeyStatus(Vector const &sessionId, + KeyedVector &infoMap) const; + + status_t getProvisionRequest(Vector &request, + String8 &defaultUrl); + + status_t provideProvisionResponse(Vector const &response); + + status_t getSecureStops(List > &secureStops); + status_t releaseSecureStops(Vector const &ssRelease); + + status_t getPropertyString(String8 const &name, String8 &value ) const; + status_t getPropertyByteArray(String8 const &name, + Vector &value ) const; + + status_t setPropertyString(String8 const &name, + String8 const &value ); + status_t setPropertyByteArray(String8 const &name, + Vector const &value ); + + status_t setCipherAlgorithm(Vector const &sessionId, + String8 const &algorithm); + + status_t setMacAlgorithm(Vector const &sessionId, + String8 const &algorithm); + + status_t encrypt(Vector const &sessionId, + Vector const &keyId, + Vector const &input, + Vector const &iv, + Vector &output); + + status_t decrypt(Vector const &sessionId, + Vector const &keyId, + Vector const &input, + Vector const &iv, + Vector &output); + + status_t sign(Vector const &sessionId, + Vector const &keyId, + Vector const &message, + Vector &signature); + + status_t verify(Vector const &sessionId, + Vector const &keyId, + Vector const &message, + Vector const &signature, + bool &match); + + private: + String8 vectorToString(Vector const &vector) const; + String8 arrayToString(uint8_t const *array, size_t len) const; + String8 stringMapToString(KeyedVector map) const; + + SortedVector > mSessions; + SortedVector > mKeySets; + + static const ssize_t kNotFound = -1; + ssize_t findSession(Vector const &sessionId) const; + ssize_t findKeySet(Vector const &keySetId) const; + + Mutex mLock; + KeyedVector mStringProperties; + KeyedVector > mByteArrayProperties; + }; + + + class MockCryptoPlugin : public CryptoPlugin { + + bool requiresSecureDecoderComponent(const char *mime) const; + + ssize_t decrypt(bool secure, + const uint8_t key[16], const uint8_t iv[16], + Mode mode, const void *srcPtr, + const SubSample *subSamples, size_t numSubSamples, + void *dstPtr, AString *errorDetailMsg); + private: + String8 subSamplesToString(CryptoPlugin::SubSample const *subSamples, size_t numSubSamples) const; + String8 arrayToString(uint8_t const *array, size_t len) const; + }; +}; -- cgit v1.1 From c38fcfba95f711e5738e4c72bd5499317a2f30d9 Mon Sep 17 00:00:00 2001 From: Lajos Molnar Date: Thu, 12 Sep 2013 09:12:54 -0700 Subject: ACodec: fix flush/resume for decoder-output-meta-data mode When in decoder-output-meta-data mode, ACodec does not hold onto buffers, but they are either with the native window, or with the component/client. However, for flushing we did not release the discarded buffers back to native window (this makes sense because they will be resubmitted shortly.) This logic can be handled by the normal resubmission. Change-Id: Ic472b386422251515ef12f426e187f208f14decc Signed-off-by: Lajos Molnar Bug: 10621959 Bug: 10192533 --- media/libstagefright/ACodec.cpp | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp index 2e55c4f..5c3abd0 100644 --- a/media/libstagefright/ACodec.cpp +++ b/media/libstagefright/ACodec.cpp @@ -2366,6 +2366,10 @@ void ACodec::waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs() { while (countBuffersOwnedByNativeWindow() > (size_t)minUndequeuedBufs && dequeueBufferFromNativeWindow() != NULL) { + // these buffers will be submitted as regular buffers; account for this + if (mStoreMetaDataInOutputBuffers && mMetaDataBuffersToSubmit > 0) { + --mMetaDataBuffersToSubmit; + } } } @@ -4000,10 +4004,9 @@ void ACodec::ExecutingState::submitRegularOutputBuffers() { } void ACodec::ExecutingState::submitOutputBuffers() { + submitRegularOutputBuffers(); if (mCodec->mStoreMetaDataInOutputBuffers) { submitOutputMetaBuffers(); - } else { - submitRegularOutputBuffers(); } } -- cgit v1.1 From e360d27c6ddba92ca13a75fe673228195f2618c0 Mon Sep 17 00:00:00 2001 From: Jeff Tinker Date: Wed, 11 Sep 2013 18:40:44 -0700 Subject: Fix MediaDrm cts tests Moving the build of the mock MediaDrm plugin back to frameworks/av since it can't be installed by cts. b/10668350 Change-Id: Id662bdb0775389b458f86a299392217f2d959422 --- drm/mediadrm/plugins/mock/Android.mk | 38 ++ drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.cpp | 705 ++++++++++++++++++++++ drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.h | 156 +++++ 3 files changed, 899 insertions(+) create mode 100644 drm/mediadrm/plugins/mock/Android.mk create mode 100644 drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.cpp create mode 100644 drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.h diff --git a/drm/mediadrm/plugins/mock/Android.mk b/drm/mediadrm/plugins/mock/Android.mk new file mode 100644 index 0000000..ada23a2 --- /dev/null +++ b/drm/mediadrm/plugins/mock/Android.mk @@ -0,0 +1,38 @@ +# +# Copyright (C) 2013 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +LOCAL_PATH:= $(call my-dir) +include $(CLEAR_VARS) + +LOCAL_SRC_FILES:= \ + MockDrmCryptoPlugin.cpp + +LOCAL_MODULE := libmockdrmcryptoplugin + +LOCAL_MODULE_PATH := $(TARGET_OUT_VENDOR_SHARED_LIBRARIES)/mediadrm + +LOCAL_SHARED_LIBRARIES := \ + libutils liblog + +LOCAL_C_INCLUDES += \ + $(TOP)/frameworks/av/include \ + $(TOP)/frameworks/native/include/media + +# Set the following flag to enable the decryption passthru flow +#LOCAL_CFLAGS += -DENABLE_PASSTHRU_DECRYPTION + +LOCAL_MODULE_TAGS := optional + +include $(BUILD_SHARED_LIBRARY) diff --git a/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.cpp b/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.cpp new file mode 100644 index 0000000..f2cadf7 --- /dev/null +++ b/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.cpp @@ -0,0 +1,705 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "MockDrmCryptoPlugin" +#include + + +#include "drm/DrmAPI.h" +#include "MockDrmCryptoPlugin.h" +#include "media/stagefright/MediaErrors.h" + +using namespace android; + +// Shared library entry point +DrmFactory *createDrmFactory() +{ + return new MockDrmFactory(); +} + +// Shared library entry point +CryptoFactory *createCryptoFactory() +{ + return new MockCryptoFactory(); +} + +const uint8_t mock_uuid[16] = {0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, + 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f, 0x10}; + +namespace android { + + // MockDrmFactory + bool MockDrmFactory::isCryptoSchemeSupported(const uint8_t uuid[16]) + { + return (!memcmp(uuid, mock_uuid, sizeof(uuid))); + } + + bool MockDrmFactory::isContentTypeSupported(const String8 &mimeType) + { + if (mimeType != "video/mp4") { + return false; + } + return true; + } + + status_t MockDrmFactory::createDrmPlugin(const uint8_t uuid[16], DrmPlugin **plugin) + { + *plugin = new MockDrmPlugin(); + return OK; + } + + // MockCryptoFactory + bool MockCryptoFactory::isCryptoSchemeSupported(const uint8_t uuid[16]) const + { + return (!memcmp(uuid, mock_uuid, sizeof(uuid))); + } + + status_t MockCryptoFactory::createPlugin(const uint8_t uuid[16], const void *data, + size_t size, CryptoPlugin **plugin) + { + *plugin = new MockCryptoPlugin(); + return OK; + } + + + // MockDrmPlugin methods + + status_t MockDrmPlugin::openSession(Vector &sessionId) + { + const size_t kSessionIdSize = 8; + + Mutex::Autolock lock(mLock); + for (size_t i = 0; i < kSessionIdSize / sizeof(long); i++) { + long r = random(); + sessionId.appendArray((uint8_t *)&r, sizeof(long)); + } + mSessions.add(sessionId); + + ALOGD("MockDrmPlugin::openSession() -> %s", vectorToString(sessionId).string()); + return OK; + } + + status_t MockDrmPlugin::closeSession(Vector const &sessionId) + { + Mutex::Autolock lock(mLock); + ALOGD("MockDrmPlugin::closeSession(%s)", vectorToString(sessionId).string()); + ssize_t index = findSession(sessionId); + if (index == kNotFound) { + ALOGD("Invalid sessionId"); + return BAD_VALUE; + } + mSessions.removeAt(index); + return OK; + } + + + status_t MockDrmPlugin::getKeyRequest(Vector const &sessionId, + Vector const &initData, + String8 const &mimeType, KeyType keyType, + KeyedVector const &optionalParameters, + Vector &request, String8 &defaultUrl) + { + Mutex::Autolock lock(mLock); + ALOGD("MockDrmPlugin::getKeyRequest(sessionId=%s, initData=%s, mimeType=%s" + ", keyType=%d, optionalParameters=%s))", + vectorToString(sessionId).string(), vectorToString(initData).string(), mimeType.string(), + keyType, stringMapToString(optionalParameters).string()); + + ssize_t index = findSession(sessionId); + if (index == kNotFound) { + ALOGD("Invalid sessionId"); + return BAD_VALUE; + } + + // Properties used in mock test, set by mock plugin and verifed cts test app + // byte[] initData -> mock-initdata + // string mimeType -> mock-mimetype + // string keyType -> mock-keytype + // string optionalParameters -> mock-optparams formatted as {key1,value1},{key2,value2} + + mByteArrayProperties.add(String8("mock-initdata"), initData); + mStringProperties.add(String8("mock-mimetype"), mimeType); + + String8 keyTypeStr; + keyTypeStr.appendFormat("%d", (int)keyType); + mStringProperties.add(String8("mock-keytype"), keyTypeStr); + + String8 params; + for (size_t i = 0; i < optionalParameters.size(); i++) { + params.appendFormat("%s{%s,%s}", i ? "," : "", + optionalParameters.keyAt(i).string(), + optionalParameters.valueAt(i).string()); + } + mStringProperties.add(String8("mock-optparams"), params); + + // Properties used in mock test, set by cts test app returned from mock plugin + // byte[] mock-request -> request + // string mock-default-url -> defaultUrl + + index = mByteArrayProperties.indexOfKey(String8("mock-request")); + if (index < 0) { + ALOGD("Missing 'mock-request' parameter for mock"); + return BAD_VALUE; + } else { + request = mByteArrayProperties.valueAt(index); + } + + index = mStringProperties.indexOfKey(String8("mock-defaultUrl")); + if (index < 0) { + ALOGD("Missing 'mock-defaultUrl' parameter for mock"); + return BAD_VALUE; + } else { + defaultUrl = mStringProperties.valueAt(index); + } + return OK; + } + + status_t MockDrmPlugin::provideKeyResponse(Vector const &sessionId, + Vector const &response, + Vector &keySetId) + { + Mutex::Autolock lock(mLock); + ALOGD("MockDrmPlugin::provideKeyResponse(sessionId=%s, response=%s)", + vectorToString(sessionId).string(), vectorToString(response).string()); + ssize_t index = findSession(sessionId); + if (index == kNotFound) { + ALOGD("Invalid sessionId"); + return BAD_VALUE; + } + if (response.size() == 0) { + return BAD_VALUE; + } + + // Properties used in mock test, set by mock plugin and verifed cts test app + // byte[] response -> mock-response + mByteArrayProperties.add(String8("mock-response"), response); + + const size_t kKeySetIdSize = 8; + + for (size_t i = 0; i < kKeySetIdSize / sizeof(long); i++) { + long r = random(); + keySetId.appendArray((uint8_t *)&r, sizeof(long)); + } + mKeySets.add(keySetId); + + return OK; + } + + status_t MockDrmPlugin::removeKeys(Vector const &keySetId) + { + Mutex::Autolock lock(mLock); + ALOGD("MockDrmPlugin::removeKeys(keySetId=%s)", + vectorToString(keySetId).string()); + + ssize_t index = findKeySet(keySetId); + if (index == kNotFound) { + ALOGD("Invalid keySetId"); + return BAD_VALUE; + } + mKeySets.removeAt(index); + + return OK; + } + + status_t MockDrmPlugin::restoreKeys(Vector const &sessionId, + Vector const &keySetId) + { + Mutex::Autolock lock(mLock); + ALOGD("MockDrmPlugin::restoreKeys(sessionId=%s, keySetId=%s)", + vectorToString(sessionId).string(), + vectorToString(keySetId).string()); + ssize_t index = findSession(sessionId); + if (index == kNotFound) { + ALOGD("Invalid sessionId"); + return BAD_VALUE; + } + + index = findKeySet(keySetId); + if (index == kNotFound) { + ALOGD("Invalid keySetId"); + return BAD_VALUE; + } + + return OK; + } + + status_t MockDrmPlugin::queryKeyStatus(Vector const &sessionId, + KeyedVector &infoMap) const + { + ALOGD("MockDrmPlugin::queryKeyStatus(sessionId=%s)", + vectorToString(sessionId).string()); + + ssize_t index = findSession(sessionId); + if (index == kNotFound) { + ALOGD("Invalid sessionId"); + return BAD_VALUE; + } + + infoMap.add(String8("purchaseDuration"), String8("1000")); + infoMap.add(String8("licenseDuration"), String8("100")); + return OK; + } + + status_t MockDrmPlugin::getProvisionRequest(Vector &request, + String8 &defaultUrl) + { + Mutex::Autolock lock(mLock); + ALOGD("MockDrmPlugin::getProvisionRequest()"); + + // Properties used in mock test, set by cts test app returned from mock plugin + // byte[] mock-request -> request + // string mock-default-url -> defaultUrl + + ssize_t index = mByteArrayProperties.indexOfKey(String8("mock-request")); + if (index < 0) { + ALOGD("Missing 'mock-request' parameter for mock"); + return BAD_VALUE; + } else { + request = mByteArrayProperties.valueAt(index); + } + + index = mStringProperties.indexOfKey(String8("mock-defaultUrl")); + if (index < 0) { + ALOGD("Missing 'mock-defaultUrl' parameter for mock"); + return BAD_VALUE; + } else { + defaultUrl = mStringProperties.valueAt(index); + } + return OK; + } + + status_t MockDrmPlugin::provideProvisionResponse(Vector const &response) + { + Mutex::Autolock lock(mLock); + ALOGD("MockDrmPlugin::provideProvisionResponse(%s)", + vectorToString(response).string()); + + // Properties used in mock test, set by mock plugin and verifed cts test app + // byte[] response -> mock-response + + mByteArrayProperties.add(String8("mock-response"), response); + return OK; + } + + status_t MockDrmPlugin::getSecureStops(List > &secureStops) + { + Mutex::Autolock lock(mLock); + ALOGD("MockDrmPlugin::getSecureStops()"); + + // Properties used in mock test, set by cts test app returned from mock plugin + // byte[] mock-secure-stop1 -> first secure stop in list + // byte[] mock-secure-stop2 -> second secure stop in list + + Vector ss1, ss2; + ssize_t index = mByteArrayProperties.indexOfKey(String8("mock-secure-stop1")); + if (index < 0) { + ALOGD("Missing 'mock-secure-stop1' parameter for mock"); + return BAD_VALUE; + } else { + ss1 = mByteArrayProperties.valueAt(index); + } + + index = mByteArrayProperties.indexOfKey(String8("mock-secure-stop2")); + if (index < 0) { + ALOGD("Missing 'mock-secure-stop2' parameter for mock"); + return BAD_VALUE; + } else { + ss2 = mByteArrayProperties.valueAt(index); + } + + secureStops.push_back(ss1); + secureStops.push_back(ss2); + return OK; + } + + status_t MockDrmPlugin::releaseSecureStops(Vector const &ssRelease) + { + Mutex::Autolock lock(mLock); + ALOGD("MockDrmPlugin::releaseSecureStops(%s)", + vectorToString(ssRelease).string()); + + // Properties used in mock test, set by mock plugin and verifed cts test app + // byte[] secure-stop-release -> mock-ssrelease + mByteArrayProperties.add(String8("mock-ssrelease"), ssRelease); + + return OK; + } + + status_t MockDrmPlugin::getPropertyString(String8 const &name, String8 &value) const + { + ALOGD("MockDrmPlugin::getPropertyString(name=%s)", name.string()); + ssize_t index = mStringProperties.indexOfKey(name); + if (index < 0) { + ALOGD("no property for '%s'", name.string()); + return BAD_VALUE; + } + value = mStringProperties.valueAt(index); + return OK; + } + + status_t MockDrmPlugin::getPropertyByteArray(String8 const &name, + Vector &value) const + { + ALOGD("MockDrmPlugin::getPropertyByteArray(name=%s)", name.string()); + ssize_t index = mByteArrayProperties.indexOfKey(name); + if (index < 0) { + ALOGD("no property for '%s'", name.string()); + return BAD_VALUE; + } + value = mByteArrayProperties.valueAt(index); + return OK; + } + + status_t MockDrmPlugin::setPropertyString(String8 const &name, + String8 const &value) + { + Mutex::Autolock lock(mLock); + ALOGD("MockDrmPlugin::setPropertyString(name=%s, value=%s)", + name.string(), value.string()); + + if (name == "mock-send-event") { + unsigned code, extra; + sscanf(value.string(), "%d %d", &code, &extra); + DrmPlugin::EventType eventType = (DrmPlugin::EventType)code; + + Vector const *pSessionId = NULL; + ssize_t index = mByteArrayProperties.indexOfKey(String8("mock-event-session-id")); + if (index >= 0) { + pSessionId = &mByteArrayProperties[index]; + } + + Vector const *pData = NULL; + index = mByteArrayProperties.indexOfKey(String8("mock-event-data")); + if (index >= 0) { + pData = &mByteArrayProperties[index]; + } + ALOGD("sending event from mock drm plugin: %d %d %s %s", + (int)code, extra, pSessionId ? vectorToString(*pSessionId) : "{}", + pData ? vectorToString(*pData) : "{}"); + + sendEvent(eventType, extra, pSessionId, pData); + } else { + mStringProperties.add(name, value); + } + return OK; + } + + status_t MockDrmPlugin::setPropertyByteArray(String8 const &name, + Vector const &value) + { + Mutex::Autolock lock(mLock); + ALOGD("MockDrmPlugin::setPropertyByteArray(name=%s, value=%s)", + name.string(), vectorToString(value).string()); + mByteArrayProperties.add(name, value); + return OK; + } + + status_t MockDrmPlugin::setCipherAlgorithm(Vector const &sessionId, + String8 const &algorithm) + { + Mutex::Autolock lock(mLock); + + ALOGD("MockDrmPlugin::setCipherAlgorithm(sessionId=%s, algorithm=%s)", + vectorToString(sessionId).string(), algorithm.string()); + + ssize_t index = findSession(sessionId); + if (index == kNotFound) { + ALOGD("Invalid sessionId"); + return BAD_VALUE; + } + + if (algorithm == "AES/CBC/NoPadding") { + return OK; + } + return BAD_VALUE; + } + + status_t MockDrmPlugin::setMacAlgorithm(Vector const &sessionId, + String8 const &algorithm) + { + Mutex::Autolock lock(mLock); + + ALOGD("MockDrmPlugin::setMacAlgorithm(sessionId=%s, algorithm=%s)", + vectorToString(sessionId).string(), algorithm.string()); + + ssize_t index = findSession(sessionId); + if (index == kNotFound) { + ALOGD("Invalid sessionId"); + return BAD_VALUE; + } + + if (algorithm == "HmacSHA256") { + return OK; + } + return BAD_VALUE; + } + + status_t MockDrmPlugin::encrypt(Vector const &sessionId, + Vector const &keyId, + Vector const &input, + Vector const &iv, + Vector &output) + { + Mutex::Autolock lock(mLock); + ALOGD("MockDrmPlugin::encrypt(sessionId=%s, keyId=%s, input=%s, iv=%s)", + vectorToString(sessionId).string(), + vectorToString(keyId).string(), + vectorToString(input).string(), + vectorToString(iv).string()); + + ssize_t index = findSession(sessionId); + if (index == kNotFound) { + ALOGD("Invalid sessionId"); + return BAD_VALUE; + } + + // Properties used in mock test, set by mock plugin and verifed cts test app + // byte[] keyId -> mock-keyid + // byte[] input -> mock-input + // byte[] iv -> mock-iv + mByteArrayProperties.add(String8("mock-keyid"), keyId); + mByteArrayProperties.add(String8("mock-input"), input); + mByteArrayProperties.add(String8("mock-iv"), iv); + + // Properties used in mock test, set by cts test app returned from mock plugin + // byte[] mock-output -> output + index = mByteArrayProperties.indexOfKey(String8("mock-output")); + if (index < 0) { + ALOGD("Missing 'mock-request' parameter for mock"); + return BAD_VALUE; + } else { + output = mByteArrayProperties.valueAt(index); + } + return OK; + } + + status_t MockDrmPlugin::decrypt(Vector const &sessionId, + Vector const &keyId, + Vector const &input, + Vector const &iv, + Vector &output) + { + Mutex::Autolock lock(mLock); + ALOGD("MockDrmPlugin::decrypt(sessionId=%s, keyId=%s, input=%s, iv=%s)", + vectorToString(sessionId).string(), + vectorToString(keyId).string(), + vectorToString(input).string(), + vectorToString(iv).string()); + + ssize_t index = findSession(sessionId); + if (index == kNotFound) { + ALOGD("Invalid sessionId"); + return BAD_VALUE; + } + + // Properties used in mock test, set by mock plugin and verifed cts test app + // byte[] keyId -> mock-keyid + // byte[] input -> mock-input + // byte[] iv -> mock-iv + mByteArrayProperties.add(String8("mock-keyid"), keyId); + mByteArrayProperties.add(String8("mock-input"), input); + mByteArrayProperties.add(String8("mock-iv"), iv); + + // Properties used in mock test, set by cts test app returned from mock plugin + // byte[] mock-output -> output + index = mByteArrayProperties.indexOfKey(String8("mock-output")); + if (index < 0) { + ALOGD("Missing 'mock-request' parameter for mock"); + return BAD_VALUE; + } else { + output = mByteArrayProperties.valueAt(index); + } + return OK; + } + + status_t MockDrmPlugin::sign(Vector const &sessionId, + Vector const &keyId, + Vector const &message, + Vector &signature) + { + Mutex::Autolock lock(mLock); + ALOGD("MockDrmPlugin::sign(sessionId=%s, keyId=%s, message=%s)", + vectorToString(sessionId).string(), + vectorToString(keyId).string(), + vectorToString(message).string()); + + ssize_t index = findSession(sessionId); + if (index == kNotFound) { + ALOGD("Invalid sessionId"); + return BAD_VALUE; + } + + // Properties used in mock test, set by mock plugin and verifed cts test app + // byte[] keyId -> mock-keyid + // byte[] message -> mock-message + mByteArrayProperties.add(String8("mock-keyid"), keyId); + mByteArrayProperties.add(String8("mock-message"), message); + + // Properties used in mock test, set by cts test app returned from mock plugin + // byte[] mock-signature -> signature + index = mByteArrayProperties.indexOfKey(String8("mock-signature")); + if (index < 0) { + ALOGD("Missing 'mock-request' parameter for mock"); + return BAD_VALUE; + } else { + signature = mByteArrayProperties.valueAt(index); + } + return OK; + } + + status_t MockDrmPlugin::verify(Vector const &sessionId, + Vector const &keyId, + Vector const &message, + Vector const &signature, + bool &match) + { + Mutex::Autolock lock(mLock); + ALOGD("MockDrmPlugin::verify(sessionId=%s, keyId=%s, message=%s, signature=%s)", + vectorToString(sessionId).string(), + vectorToString(keyId).string(), + vectorToString(message).string(), + vectorToString(signature).string()); + + ssize_t index = findSession(sessionId); + if (index == kNotFound) { + ALOGD("Invalid sessionId"); + return BAD_VALUE; + } + + // Properties used in mock test, set by mock plugin and verifed cts test app + // byte[] keyId -> mock-keyid + // byte[] message -> mock-message + // byte[] signature -> mock-signature + mByteArrayProperties.add(String8("mock-keyid"), keyId); + mByteArrayProperties.add(String8("mock-message"), message); + mByteArrayProperties.add(String8("mock-signature"), signature); + + // Properties used in mock test, set by cts test app returned from mock plugin + // String mock-match "1" or "0" -> match + index = mStringProperties.indexOfKey(String8("mock-match")); + if (index < 0) { + ALOGD("Missing 'mock-request' parameter for mock"); + return BAD_VALUE; + } else { + match = atol(mStringProperties.valueAt(index).string()); + } + return OK; + } + + ssize_t MockDrmPlugin::findSession(Vector const &sessionId) const + { + ALOGD("findSession: nsessions=%d, size=%d", mSessions.size(), sessionId.size()); + for (size_t i = 0; i < mSessions.size(); ++i) { + if (memcmp(mSessions[i].array(), sessionId.array(), sessionId.size()) == 0) { + return i; + } + } + return kNotFound; + } + + ssize_t MockDrmPlugin::findKeySet(Vector const &keySetId) const + { + ALOGD("findKeySet: nkeySets=%d, size=%d", mKeySets.size(), keySetId.size()); + for (size_t i = 0; i < mKeySets.size(); ++i) { + if (memcmp(mKeySets[i].array(), keySetId.array(), keySetId.size()) == 0) { + return i; + } + } + return kNotFound; + } + + + // Conversion utilities + String8 MockDrmPlugin::vectorToString(Vector const &vector) const + { + return arrayToString(vector.array(), vector.size()); + } + + String8 MockDrmPlugin::arrayToString(uint8_t const *array, size_t len) const + { + String8 result("{ "); + for (size_t i = 0; i < len; i++) { + result.appendFormat("0x%02x ", array[i]); + } + result += "}"; + return result; + } + + String8 MockDrmPlugin::stringMapToString(KeyedVector map) const + { + String8 result("{ "); + for (size_t i = 0; i < map.size(); i++) { + result.appendFormat("%s{name=%s, value=%s}", i > 0 ? ", " : "", + map.keyAt(i).string(), map.valueAt(i).string()); + } + return result + " }"; + } + + bool operator<(Vector const &lhs, Vector const &rhs) { + return lhs.size() < rhs.size() || (memcmp(lhs.array(), rhs.array(), lhs.size()) < 0); + } + + // + // Crypto Plugin + // + + bool MockCryptoPlugin::requiresSecureDecoderComponent(const char *mime) const + { + ALOGD("MockCryptoPlugin::requiresSecureDecoderComponent(mime=%s)", mime); + return false; + } + + ssize_t + MockCryptoPlugin::decrypt(bool secure, const uint8_t key[16], const uint8_t iv[16], + Mode mode, const void *srcPtr, const SubSample *subSamples, + size_t numSubSamples, void *dstPtr, AString *errorDetailMsg) + { + ALOGD("MockCryptoPlugin::decrypt(secure=%d, key=%s, iv=%s, mode=%d, src=%p, " + "subSamples=%s, dst=%p)", + (int)secure, + arrayToString(key, sizeof(key)).string(), + arrayToString(iv, sizeof(iv)).string(), + (int)mode, srcPtr, + subSamplesToString(subSamples, numSubSamples).string(), + dstPtr); + return OK; + } + + // Conversion utilities + String8 MockCryptoPlugin::arrayToString(uint8_t const *array, size_t len) const + { + String8 result("{ "); + for (size_t i = 0; i < len; i++) { + result.appendFormat("0x%02x ", array[i]); + } + result += "}"; + return result; + } + + String8 MockCryptoPlugin::subSamplesToString(SubSample const *subSamples, + size_t numSubSamples) const + { + String8 result; + for (size_t i = 0; i < numSubSamples; i++) { + result.appendFormat("[%d] {clear:%d, encrypted:%d} ", i, + subSamples[i].mNumBytesOfClearData, + subSamples[i].mNumBytesOfEncryptedData); + } + return result; + } + +}; diff --git a/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.h b/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.h new file mode 100644 index 0000000..2297f9b --- /dev/null +++ b/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.h @@ -0,0 +1,156 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include "drm/DrmAPI.h" +#include "hardware/CryptoAPI.h" + +extern "C" { + android::DrmFactory *createDrmFactory(); + android::CryptoFactory *createCryptoFactory(); +} + +namespace android { + + class MockDrmFactory : public DrmFactory { + public: + MockDrmFactory() {} + virtual ~MockDrmFactory() {} + + bool isCryptoSchemeSupported(const uint8_t uuid[16]); + bool isContentTypeSupported(const String8 &mimeType); + status_t createDrmPlugin(const uint8_t uuid[16], DrmPlugin **plugin); + }; + + class MockCryptoFactory : public CryptoFactory { + public: + MockCryptoFactory() {} + virtual ~MockCryptoFactory() {} + + bool isCryptoSchemeSupported(const uint8_t uuid[16]) const; + status_t createPlugin( + const uint8_t uuid[16], const void *data, size_t size, + CryptoPlugin **plugin); + }; + + + + class MockDrmPlugin : public DrmPlugin { + public: + MockDrmPlugin() {} + virtual ~MockDrmPlugin() {} + + // from DrmPlugin + status_t openSession(Vector &sessionId); + status_t closeSession(Vector const &sessionId); + + status_t getKeyRequest(Vector const &sessionId, + Vector const &initData, + String8 const &mimeType, KeyType keyType, + KeyedVector const &optionalParameters, + Vector &request, String8 &defaultUrl); + + status_t provideKeyResponse(Vector const &sessionId, + Vector const &response, + Vector &keySetId); + + status_t removeKeys(Vector const &keySetId); + + status_t restoreKeys(Vector const &sessionId, + Vector const &keySetId); + + status_t queryKeyStatus(Vector const &sessionId, + KeyedVector &infoMap) const; + + status_t getProvisionRequest(Vector &request, + String8 &defaultUrl); + + status_t provideProvisionResponse(Vector const &response); + + status_t getSecureStops(List > &secureStops); + status_t releaseSecureStops(Vector const &ssRelease); + + status_t getPropertyString(String8 const &name, String8 &value ) const; + status_t getPropertyByteArray(String8 const &name, + Vector &value ) const; + + status_t setPropertyString(String8 const &name, + String8 const &value ); + status_t setPropertyByteArray(String8 const &name, + Vector const &value ); + + status_t setCipherAlgorithm(Vector const &sessionId, + String8 const &algorithm); + + status_t setMacAlgorithm(Vector const &sessionId, + String8 const &algorithm); + + status_t encrypt(Vector const &sessionId, + Vector const &keyId, + Vector const &input, + Vector const &iv, + Vector &output); + + status_t decrypt(Vector const &sessionId, + Vector const &keyId, + Vector const &input, + Vector const &iv, + Vector &output); + + status_t sign(Vector const &sessionId, + Vector const &keyId, + Vector const &message, + Vector &signature); + + status_t verify(Vector const &sessionId, + Vector const &keyId, + Vector const &message, + Vector const &signature, + bool &match); + + private: + String8 vectorToString(Vector const &vector) const; + String8 arrayToString(uint8_t const *array, size_t len) const; + String8 stringMapToString(KeyedVector map) const; + + SortedVector > mSessions; + SortedVector > mKeySets; + + static const ssize_t kNotFound = -1; + ssize_t findSession(Vector const &sessionId) const; + ssize_t findKeySet(Vector const &keySetId) const; + + Mutex mLock; + KeyedVector mStringProperties; + KeyedVector > mByteArrayProperties; + }; + + + class MockCryptoPlugin : public CryptoPlugin { + + bool requiresSecureDecoderComponent(const char *mime) const; + + ssize_t decrypt(bool secure, + const uint8_t key[16], const uint8_t iv[16], + Mode mode, const void *srcPtr, + const SubSample *subSamples, size_t numSubSamples, + void *dstPtr, AString *errorDetailMsg); + private: + String8 subSamplesToString(CryptoPlugin::SubSample const *subSamples, size_t numSubSamples) const; + String8 arrayToString(uint8_t const *array, size_t len) const; + }; +}; -- cgit v1.1 From 972a173d7d1de1a3b5a617aae3e2abb6e05ae02d Mon Sep 17 00:00:00 2001 From: Eric Laurent Date: Wed, 4 Sep 2013 09:42:59 -0700 Subject: audioflinger: longer offload thread standby delay - Increase offloaded output thread standby delay to 1 second to allow transition between tracks with going to stanby if reusing the same audio track (gapless) - Make sure pause/flush/resume sequence is sent to the HAL in the right order - Fix format display in track dump Bug: 8174034. Change-Id: I43ef6f8fdbf7427e4eff6cc2d0665d7d1463ea8a --- services/audioflinger/Threads.cpp | 29 ++++++++++++++++++++++------- services/audioflinger/Tracks.cpp | 4 ++-- 2 files changed, 24 insertions(+), 9 deletions(-) diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index 1b5a9a9..885f72e 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -109,6 +109,9 @@ static const uint32_t kMinNormalMixBufferSizeMs = 20; // maximum normal mix buffer size static const uint32_t kMaxNormalMixBufferSizeMs = 24; +// Offloaded output thread standby delay: allows track transition without going to standby +static const nsecs_t kOffloadStandbyDelayNs = seconds(1); + // Whether to use fast mixer static const enum { FastMixer_Never, // never initialize or use: for debugging only @@ -2137,13 +2140,11 @@ bool AudioFlinger::PlaybackThread::threadLoop() mWaitWorkCV.wait(mLock); ALOGV("async completion/wake"); acquireWakeLock_l(); + standbyTime = systemTime() + standbyDelay; + sleepTime = 0; if (exitPending()) { break; } - if (!mActiveTracks.size() && (systemTime() > standbyTime)) { - continue; - } - sleepTime = 0; } else if ((!mActiveTracks.size() && systemTime() > standbyTime) || isSuspended()) { // put audio hardware into standby after short delay @@ -3701,7 +3702,11 @@ void AudioFlinger::DirectOutputThread::cacheParameters_l() // use shorter standby delay as on normal output to release // hardware resources as soon as possible - standbyDelay = microseconds(activeSleepTime*2); + if (audio_is_linear_pcm(mFormat)) { + standbyDelay = microseconds(activeSleepTime*2); + } else { + standbyDelay = kOffloadStandbyDelayNs; + } } // ---------------------------------------------------------------------------- @@ -3837,6 +3842,9 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::OffloadThread::prepareTr size_t count = mActiveTracks.size(); mixer_state mixerStatus = MIXER_IDLE; + bool doHwPause = false; + bool doHwResume = false; + // find out which tracks need to be processed for (size_t i = 0; i < count; i++) { sp t = mActiveTracks[i].promote(); @@ -3868,7 +3876,7 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::OffloadThread::prepareTr track->setPaused(); if (last) { if (!mHwPaused) { - mOutput->stream->pause(mOutput->stream); + doHwPause = true; mHwPaused = true; } // If we were part way through writing the mixbuffer to @@ -3901,7 +3909,7 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::OffloadThread::prepareTr if (last) { if (mHwPaused) { - mOutput->stream->resume(mOutput->stream); + doHwResume = true; mHwPaused = false; // threadLoop_mix() will handle the case that we need to // resume an interrupted write @@ -3963,10 +3971,17 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::OffloadThread::prepareTr processVolume_l(track, last); } + // make sure the pause/flush/resume sequence is executed in the right order + if (doHwPause) { + mOutput->stream->pause(mOutput->stream); + } if (mFlushPending) { flushHw_l(); mFlushPending = false; } + if (doHwResume) { + mOutput->stream->resume(mOutput->stream); + } // remove all the tracks that need to be... removeTracks_l(*tracksToRemove); diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp index 3a5dc35..3b1874e 100644 --- a/services/audioflinger/Tracks.cpp +++ b/services/audioflinger/Tracks.cpp @@ -402,7 +402,7 @@ void AudioFlinger::PlaybackThread::Track::destroy() /*static*/ void AudioFlinger::PlaybackThread::Track::appendDumpHeader(String8& result) { - result.append(" Name Client Type Fmt Chn mask Session fCount S F SRate " + result.append(" Name Client Type Fmt Chn mask Session fCount S F SRate " "L dB R dB Server Main buf Aux Buf Flags UndFrmCnt\n"); } @@ -467,7 +467,7 @@ void AudioFlinger::PlaybackThread::Track::dump(char* buffer, size_t size) nowInUnderrun = '?'; break; } - snprintf(&buffer[7], size-7, " %6u %4u %3u %08X %7u %6u %1c %1d %5u %5.2g %5.2g " + snprintf(&buffer[7], size-7, " %6u %4u %08X %08X %7u %6u %1c %1d %5u %5.2g %5.2g " "%08X %08X %08X 0x%03X %9u%c\n", (mClient == 0) ? getpid_cached : mClient->pid(), mStreamType, -- cgit v1.1 From 3ef464e095da6c5d1acb5723dbc4b9f7cfd4ecf8 Mon Sep 17 00:00:00 2001 From: Zhijun He Date: Wed, 11 Sep 2013 22:58:17 -0700 Subject: Camera: Limit supported preview sizes The ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES can provide sizes as large as jpeg sizes, which could cause potential issues when trying to do full size still capture and full size preview. This is not supported by many devices due to hardware limitation. This change limits the preview (as well video) size to no more than 1080p. Bug: 10625115 Change-Id: I9467ab843553ec06e8249b4a17c0ecf4c6d6f04e --- .../libcameraservice/api1/client2/Parameters.cpp | 69 +++++++++++++++------- .../libcameraservice/api1/client2/Parameters.h | 12 ++++ 2 files changed, 61 insertions(+), 20 deletions(-) diff --git a/services/camera/libcameraservice/api1/client2/Parameters.cpp b/services/camera/libcameraservice/api1/client2/Parameters.cpp index 0459866..ad55feb 100644 --- a/services/camera/libcameraservice/api1/client2/Parameters.cpp +++ b/services/camera/libcameraservice/api1/client2/Parameters.cpp @@ -58,13 +58,13 @@ status_t Parameters::initialize(const CameraMetadata *info) { res = buildQuirks(); if (res != OK) return res; - camera_metadata_ro_entry_t availableProcessedSizes = - staticInfo(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES, 2); - if (!availableProcessedSizes.count) return NO_INIT; + const Size MAX_PREVIEW_SIZE = { MAX_PREVIEW_WIDTH, MAX_PREVIEW_HEIGHT }; + res = getFilteredPreviewSizes(MAX_PREVIEW_SIZE, &availablePreviewSizes); + if (res != OK) return res; // TODO: Pick more intelligently - previewWidth = availableProcessedSizes.data.i32[0]; - previewHeight = availableProcessedSizes.data.i32[1]; + previewWidth = availablePreviewSizes[0].width; + previewHeight = availablePreviewSizes[0].height; videoWidth = previewWidth; videoHeight = previewHeight; @@ -75,12 +75,13 @@ status_t Parameters::initialize(const CameraMetadata *info) { previewWidth, previewHeight)); { String8 supportedPreviewSizes; - for (size_t i=0; i < availableProcessedSizes.count; i += 2) { + for (size_t i = 0; i < availablePreviewSizes.size(); i++) { if (i != 0) supportedPreviewSizes += ","; supportedPreviewSizes += String8::format("%dx%d", - availableProcessedSizes.data.i32[i], - availableProcessedSizes.data.i32[i+1]); + availablePreviewSizes[i].width, + availablePreviewSizes[i].height); } + ALOGV("Supported preview sizes are: %s", supportedPreviewSizes.string()); params.set(CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES, supportedPreviewSizes); params.set(CameraParameters::KEY_SUPPORTED_VIDEO_SIZES, @@ -1072,15 +1073,13 @@ status_t Parameters::set(const String8& paramString) { validatedParams.previewWidth, validatedParams.previewHeight); return BAD_VALUE; } - camera_metadata_ro_entry_t availablePreviewSizes = - staticInfo(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES); - for (i = 0; i < availablePreviewSizes.count; i += 2 ) { - if ((availablePreviewSizes.data.i32[i] == + for (i = 0; i < availablePreviewSizes.size(); i++) { + if ((availablePreviewSizes[i].width == validatedParams.previewWidth) && - (availablePreviewSizes.data.i32[i+1] == + (availablePreviewSizes[i].height == validatedParams.previewHeight)) break; } - if (i == availablePreviewSizes.count) { + if (i == availablePreviewSizes.size()) { ALOGE("%s: Requested preview size %d x %d is not supported", __FUNCTION__, validatedParams.previewWidth, validatedParams.previewHeight); @@ -1618,15 +1617,13 @@ status_t Parameters::set(const String8& paramString) { __FUNCTION__); return BAD_VALUE; } - camera_metadata_ro_entry_t availableVideoSizes = - staticInfo(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES); - for (i = 0; i < availableVideoSizes.count; i += 2 ) { - if ((availableVideoSizes.data.i32[i] == + for (i = 0; i < availablePreviewSizes.size(); i++) { + if ((availablePreviewSizes[i].width == validatedParams.videoWidth) && - (availableVideoSizes.data.i32[i+1] == + (availablePreviewSizes[i].height == validatedParams.videoHeight)) break; } - if (i == availableVideoSizes.count) { + if (i == availablePreviewSizes.size()) { ALOGE("%s: Requested video size %d x %d is not supported", __FUNCTION__, validatedParams.videoWidth, validatedParams.videoHeight); @@ -2447,6 +2444,38 @@ int Parameters::normalizedYToArray(int y) const { return cropYToArray(normalizedYToCrop(y)); } +status_t Parameters::getFilteredPreviewSizes(Size limit, Vector *sizes) { + if (info == NULL) { + ALOGE("%s: Static metadata is not initialized", __FUNCTION__); + return NO_INIT; + } + if (sizes == NULL) { + ALOGE("%s: Input size is null", __FUNCTION__); + return BAD_VALUE; + } + + const size_t SIZE_COUNT = sizeof(Size) / sizeof(int); + camera_metadata_ro_entry_t availableProcessedSizes = + staticInfo(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES, SIZE_COUNT); + if (availableProcessedSizes.count < SIZE_COUNT) return BAD_VALUE; + + Size previewSize; + for (size_t i = 0; i < availableProcessedSizes.count; i += SIZE_COUNT) { + previewSize.width = availableProcessedSizes.data.i32[i]; + previewSize.height = availableProcessedSizes.data.i32[i+1]; + // Need skip the preview sizes that are too large. + if (previewSize.width <= limit.width && + previewSize.height <= limit.height) { + sizes->push(previewSize); + } + } + if (sizes->isEmpty()) { + ALOGE("generated preview size list is empty!!"); + return BAD_VALUE; + } + return OK; +} + Parameters::CropRegion Parameters::calculateCropRegion( Parameters::CropRegion::Outputs outputs) const { diff --git a/services/camera/libcameraservice/api1/client2/Parameters.h b/services/camera/libcameraservice/api1/client2/Parameters.h index 464830c..a7111a3 100644 --- a/services/camera/libcameraservice/api1/client2/Parameters.h +++ b/services/camera/libcameraservice/api1/client2/Parameters.h @@ -105,6 +105,11 @@ struct Parameters { }; Vector focusingAreas; + struct Size { + int32_t width; + int32_t height; + }; + int32_t exposureCompensation; bool autoExposureLock; bool autoWhiteBalanceLock; @@ -159,6 +164,9 @@ struct Parameters { // Number of zoom steps to simulate static const unsigned int NUM_ZOOM_STEPS = 100; + // Max preview size allowed + static const unsigned int MAX_PREVIEW_WIDTH = 1920; + static const unsigned int MAX_PREVIEW_HEIGHT = 1080; // Full static camera info, object owned by someone else, such as // Camera2Device. @@ -317,6 +325,10 @@ private: int cropYToNormalized(int y) const; int normalizedXToCrop(int x) const; int normalizedYToCrop(int y) const; + + Vector availablePreviewSizes; + // Get size list (that are no larger than limit) from static metadata. + status_t getFilteredPreviewSizes(Size limit, Vector *sizes); }; // This class encapsulates the Parameters class so that it can only be accessed -- cgit v1.1 From 1abbdb4429479975718421c4fef3f79ce7c820e3 Mon Sep 17 00:00:00 2001 From: Eric Laurent Date: Fri, 13 Sep 2013 17:00:08 -0700 Subject: audioflinger: fix ro.audio.silent in offload mode Change-Id: I5a1a79000d53146689b0a198cc5419c36509703f --- services/audioflinger/Threads.cpp | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index 1b5a9a9..3578b7d 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -3509,7 +3509,8 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::DirectOutputThread::prep if (track->mFillingUpStatus == Track::FS_FILLED) { track->mFillingUpStatus = Track::FS_ACTIVE; - mLeftVolFloat = mRightVolFloat = 0; + // make sure processVolume_l() will apply new volume even if 0 + mLeftVolFloat = mRightVolFloat = -1.0; if (track->mState == TrackBase::RESUMING) { track->mState = TrackBase::ACTIVE; } @@ -3887,7 +3888,8 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::OffloadThread::prepareTr ALOGVV("OffloadThread: track %d s=%08x [OK]", track->name(), cblk->mServer); if (track->mFillingUpStatus == Track::FS_FILLED) { track->mFillingUpStatus = Track::FS_ACTIVE; - mLeftVolFloat = mRightVolFloat = 0; + // make sure processVolume_l() will apply new volume even if 0 + mLeftVolFloat = mRightVolFloat = -1.0; if (track->mState == TrackBase::RESUMING) { if (mPausedBytesRemaining) { // Need to continue write that was interrupted -- cgit v1.1 From 66281c3a5d7eea486ddc6ad30088ba92956fd4fd Mon Sep 17 00:00:00 2001 From: Zhijun He Date: Fri, 13 Sep 2013 17:59:59 -0700 Subject: Camera: Don't overwrite the error status Bug: 10749544 Change-Id: I6096effde03e18ee1b8c63cf3b36dce0344054c5 --- services/camera/libcameraservice/common/Camera2ClientBase.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.cpp b/services/camera/libcameraservice/common/Camera2ClientBase.cpp index 060e2a2..e808bf3 100644 --- a/services/camera/libcameraservice/common/Camera2ClientBase.cpp +++ b/services/camera/libcameraservice/common/Camera2ClientBase.cpp @@ -95,7 +95,7 @@ status_t Camera2ClientBase::initialize(camera_module_t *module) { if (res != OK) { ALOGE("%s: Camera %d: unable to initialize device: %s (%d)", __FUNCTION__, TClientBase::mCameraId, strerror(-res), res); - return NO_INIT; + return res; } res = mDevice->setNotifyCallback(this); -- cgit v1.1 From 527748abf04c0060894fd7aace54959a2c343435 Mon Sep 17 00:00:00 2001 From: Zhijun He Date: Sun, 15 Sep 2013 21:06:10 -0700 Subject: Camera: setCallBackWindow only if callback surface is used Calling setCallBackWindow without checking the previewCallbackSurface could make the stream to be deleted during callback stream operation, which is bad because the preview is still active and the delete fails. Bug: 10730496 Change-Id: I8addac25e5aa7901bf200d730c749bee2c088090 --- services/camera/libcameraservice/api1/Camera2Client.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp index bda2887..e7f6c53 100644 --- a/services/camera/libcameraservice/api1/Camera2Client.cpp +++ b/services/camera/libcameraservice/api1/Camera2Client.cpp @@ -604,7 +604,7 @@ void Camera2Client::setPreviewCallbackFlagL(Parameters ¶ms, int flag) { } if (params.previewCallbackFlags != (uint32_t)flag) { - if (flag != CAMERA_FRAME_CALLBACK_FLAG_NOOP) { + if (params.previewCallbackSurface && flag != CAMERA_FRAME_CALLBACK_FLAG_NOOP) { // Disable any existing preview callback window when enabling // preview callback flags res = mCallbackProcessor->setCallbackWindow(NULL); -- cgit v1.1 From 2f876f9ee63396e4e0117f85c5b3132cac7e2c9d Mon Sep 17 00:00:00 2001 From: Eino-Ville Talvala Date: Fri, 13 Sep 2013 11:39:24 -0700 Subject: Camera3: Always set a trigger ID for an active trigger This is to work around HAL implementations that expect to see an ID for every trigger. Use a dummy trigger ID of 1, since 0 has special meaning for older HALs. Bug: 10720617 Change-Id: I9cb1b8dbcb113cb9e737e5adb04032fd61a54c12 --- .../libcameraservice/device3/Camera3Device.cpp | 47 ++++++++++++++++++++++ .../libcameraservice/device3/Camera3Device.h | 4 ++ 2 files changed, 51 insertions(+) diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp index b70a278..b468eb3 100644 --- a/services/camera/libcameraservice/device3/Camera3Device.cpp +++ b/services/camera/libcameraservice/device3/Camera3Device.cpp @@ -1630,6 +1630,19 @@ bool Camera3Device::RequestThread::threadLoop() { // If the request is the same as last, or we had triggers last time if (mPrevRequest != nextRequest || triggersMixedIn) { /** + * HAL workaround: + * Insert a dummy trigger ID if a trigger is set but no trigger ID is + */ + res = addDummyTriggerIds(nextRequest); + if (res != OK) { + SET_ERR("RequestThread: Unable to insert dummy trigger IDs " + "(capture request %d, HAL device: %s (%d)", + (mFrameNumber+1), strerror(-res), res); + cleanUpFailedRequest(request, nextRequest, outputBuffers); + return false; + } + + /** * The request should be presorted so accesses in HAL * are O(logn). Sidenote, sorting a sorted metadata is nop. */ @@ -2047,6 +2060,40 @@ status_t Camera3Device::RequestThread::removeTriggers( return OK; } +status_t Camera3Device::RequestThread::addDummyTriggerIds( + const sp &request) { + // Trigger ID 0 has special meaning in the HAL2 spec, so avoid it here + static const int32_t dummyTriggerId = 1; + status_t res; + + CameraMetadata &metadata = request->mSettings; + + // If AF trigger is active, insert a dummy AF trigger ID if none already + // exists + camera_metadata_entry afTrigger = metadata.find(ANDROID_CONTROL_AF_TRIGGER); + camera_metadata_entry afId = metadata.find(ANDROID_CONTROL_AF_TRIGGER_ID); + if (afTrigger.count > 0 && + afTrigger.data.u8[0] != ANDROID_CONTROL_AF_TRIGGER_IDLE && + afId.count == 0) { + res = metadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &dummyTriggerId, 1); + if (res != OK) return res; + } + + // If AE precapture trigger is active, insert a dummy precapture trigger ID + // if none already exists + camera_metadata_entry pcTrigger = + metadata.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER); + camera_metadata_entry pcId = metadata.find(ANDROID_CONTROL_AE_PRECAPTURE_ID); + if (pcTrigger.count > 0 && + pcTrigger.data.u8[0] != ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE && + pcId.count == 0) { + res = metadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID, + &dummyTriggerId, 1); + if (res != OK) return res; + } + + return OK; +} /** diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h index 0b3ad6e..61caf13 100644 --- a/services/camera/libcameraservice/device3/Camera3Device.h +++ b/services/camera/libcameraservice/device3/Camera3Device.h @@ -314,6 +314,10 @@ class Camera3Device : // restoring the old field values for those tags. status_t removeTriggers(const sp &request); + // HAL workaround: Make sure a trigger ID always exists if + // a trigger does + status_t addDummyTriggerIds(const sp &request); + static const nsecs_t kRequestTimeout = 50e6; // 50 ms // Waits for a request, or returns NULL if times out. -- cgit v1.1 From ee08f7e36eeba80e005f9bdaebce635860a8f005 Mon Sep 17 00:00:00 2001 From: Marco Nelissen Date: Mon, 16 Sep 2013 13:30:01 -0700 Subject: Fix hang Specify that the surface is controlled by the app, to avoid a hang. b/10531761 Change-Id: Idccc2c73aa3d368d8e7fbdc071ce36e2382efea4 --- media/libmediaplayerservice/MediaPlayerService.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp index 8833bd7..0dabd37 100644 --- a/media/libmediaplayerservice/MediaPlayerService.cpp +++ b/media/libmediaplayerservice/MediaPlayerService.cpp @@ -744,7 +744,7 @@ status_t MediaPlayerService::Client::setVideoSurfaceTexture( sp anw; if (bufferProducer != NULL) { - anw = new Surface(bufferProducer); + anw = new Surface(bufferProducer, true /* controlledByApp */); status_t err = native_window_api_connect(anw.get(), NATIVE_WINDOW_API_MEDIA); -- cgit v1.1 From ce8828a016b082f730152af2204b8ea3610dc1ec Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Mon, 16 Sep 2013 18:07:38 -0700 Subject: Fix underruns when fast track denied due to SRC OpenSL ES requests a fast track. If sample rate conversion is needed, the request is denied by server, and a larger client buffer is used to handle the higher latency of a normal track. However the client notification period was calculated based on buffer being divided into 2 sub-buffers. That resulted in the notification period being too long. The server pulls chunks that are smaller than half the total buffer. So now the client uses 3 sub-buffers when there is SRC. Also removed the 'defer wake' optimization because it was incorrect. This optimization attempted to reduce the number of wakeups of client, when server releaseBuffer knows that another releaseBuffer will be following. But there is no way for the first releaseBuffer to predict how soon the second releaseBuffer will occur. In some cases it was a long time, and the client underran. So now the client is woken up immediately if the total number of available frames to client is >= the minimum number the client wants to see (the notification period). Also fix bug where minimum frame count was not being used in the calculation of notification period. Bug: 10342804 Change-Id: I3c246f4e7bc3684a344f2cf08268dc082e338e2a --- include/private/media/AudioTrackShared.h | 2 - media/libmedia/AudioTrack.cpp | 67 +++++++++++++++++--------------- media/libmedia/AudioTrackShared.cpp | 7 +--- 3 files changed, 37 insertions(+), 39 deletions(-) diff --git a/include/private/media/AudioTrackShared.h b/include/private/media/AudioTrackShared.h index ad7409d..fe258ad 100644 --- a/include/private/media/AudioTrackShared.h +++ b/include/private/media/AudioTrackShared.h @@ -383,8 +383,6 @@ public: protected: size_t mAvailToClient; // estimated frames available to client prior to releaseBuffer() int32_t mFlush; // our copy of cblk->u.mStreaming.mFlush, for streaming output only -private: - bool mDeferWake; // whether another releaseBuffer() is expected soon }; // Proxy used by AudioFlinger for servicing AudioTrack diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp index 744faee..15249a4 100644 --- a/media/libmedia/AudioTrack.cpp +++ b/media/libmedia/AudioTrack.cpp @@ -815,12 +815,29 @@ status_t AudioTrack::createTrack_l( return NO_INIT; } + // Not all of these values are needed under all conditions, but it is easier to get them all + uint32_t afLatency; - if ((status = AudioSystem::getLatency(output, streamType, &afLatency)) != NO_ERROR) { + status = AudioSystem::getLatency(output, streamType, &afLatency); + if (status != NO_ERROR) { ALOGE("getLatency(%d) failed status %d", output, status); return NO_INIT; } + size_t afFrameCount; + status = AudioSystem::getFrameCount(output, streamType, &afFrameCount); + if (status != NO_ERROR) { + ALOGE("getFrameCount(output=%d, streamType=%d) status %d", output, streamType, status); + return NO_INIT; + } + + uint32_t afSampleRate; + status = AudioSystem::getSamplingRate(output, streamType, &afSampleRate); + if (status != NO_ERROR) { + ALOGE("getSamplingRate(output=%d, streamType=%d) status %d", output, streamType, status); + return NO_INIT; + } + // Client decides whether the track is TIMED (see below), but can only express a preference // for FAST. Server will perform additional tests. if ((flags & AUDIO_OUTPUT_FLAG_FAST) && !( @@ -836,6 +853,14 @@ status_t AudioTrack::createTrack_l( } ALOGV("createTrack_l() output %d afLatency %d", output, afLatency); + // The client's AudioTrack buffer is divided into n parts for purpose of wakeup by server, where + // n = 1 fast track; nBuffering is ignored + // n = 2 normal track, no sample rate conversion + // n = 3 normal track, with sample rate conversion + // (pessimistic; some non-1:1 conversion ratios don't actually need triple-buffering) + // n > 3 very high latency or very small notification interval; nBuffering is ignored + const uint32_t nBuffering = (sampleRate == afSampleRate) ? 2 : 3; + mNotificationFramesAct = mNotificationFramesReq; if (!audio_is_linear_pcm(format)) { @@ -844,13 +869,6 @@ status_t AudioTrack::createTrack_l( // Same comment as below about ignoring frameCount parameter for set() frameCount = sharedBuffer->size(); } else if (frameCount == 0) { - size_t afFrameCount; - status = AudioSystem::getFrameCount(output, streamType, &afFrameCount); - if (status != NO_ERROR) { - ALOGE("getFrameCount(output=%d, streamType=%d) status %d", output, streamType, - status); - return NO_INIT; - } frameCount = afFrameCount; } if (mNotificationFramesAct != frameCount) { @@ -880,26 +898,13 @@ status_t AudioTrack::createTrack_l( } else if (!(flags & AUDIO_OUTPUT_FLAG_FAST)) { // FIXME move these calculations and associated checks to server - uint32_t afSampleRate; - status = AudioSystem::getSamplingRate(output, streamType, &afSampleRate); - if (status != NO_ERROR) { - ALOGE("getSamplingRate(output=%d, streamType=%d) status %d", output, streamType, - status); - return NO_INIT; - } - size_t afFrameCount; - status = AudioSystem::getFrameCount(output, streamType, &afFrameCount); - if (status != NO_ERROR) { - ALOGE("getFrameCount(output=%d, streamType=%d) status %d", output, streamType, status); - return NO_INIT; - } // Ensure that buffer depth covers at least audio hardware latency uint32_t minBufCount = afLatency / ((1000 * afFrameCount)/afSampleRate); ALOGV("afFrameCount=%d, minBufCount=%d, afSampleRate=%u, afLatency=%d", afFrameCount, minBufCount, afSampleRate, afLatency); - if (minBufCount <= 2) { - minBufCount = sampleRate == afSampleRate ? 2 : 3; + if (minBufCount <= nBuffering) { + minBufCount = nBuffering; } size_t minFrameCount = (afFrameCount*sampleRate*minBufCount)/afSampleRate; @@ -909,18 +914,16 @@ status_t AudioTrack::createTrack_l( if (frameCount == 0) { frameCount = minFrameCount; - } - // Make sure that application is notified with sufficient margin - // before underrun - if (mNotificationFramesAct == 0 || mNotificationFramesAct > frameCount/2) { - mNotificationFramesAct = frameCount/2; - } - if (frameCount < minFrameCount) { + } else if (frameCount < minFrameCount) { // not ALOGW because it happens all the time when playing key clicks over A2DP ALOGV("Minimum buffer size corrected from %d to %d", frameCount, minFrameCount); frameCount = minFrameCount; } + // Make sure that application is notified with sufficient margin before underrun + if (mNotificationFramesAct == 0 || mNotificationFramesAct > frameCount/nBuffering) { + mNotificationFramesAct = frameCount/nBuffering; + } } else { // For fast tracks, the frame count calculations and checks are done by server @@ -1001,8 +1004,8 @@ status_t AudioTrack::createTrack_l( flags = (audio_output_flags_t) (flags & ~AUDIO_OUTPUT_FLAG_FAST); mFlags = flags; if (sharedBuffer == 0) { - if (mNotificationFramesAct == 0 || mNotificationFramesAct > frameCount/2) { - mNotificationFramesAct = frameCount/2; + if (mNotificationFramesAct == 0 || mNotificationFramesAct > frameCount/nBuffering) { + mNotificationFramesAct = frameCount/nBuffering; } } } diff --git a/media/libmedia/AudioTrackShared.cpp b/media/libmedia/AudioTrackShared.cpp index e7abb40..4fd92b2 100644 --- a/media/libmedia/AudioTrackShared.cpp +++ b/media/libmedia/AudioTrackShared.cpp @@ -481,7 +481,7 @@ size_t StaticAudioTrackClientProxy::getBufferPosition() ServerProxy::ServerProxy(audio_track_cblk_t* cblk, void *buffers, size_t frameCount, size_t frameSize, bool isOut, bool clientInServer) : Proxy(cblk, buffers, frameCount, frameSize, isOut, clientInServer), - mAvailToClient(0), mFlush(0), mDeferWake(false) + mAvailToClient(0), mFlush(0) { } @@ -559,9 +559,6 @@ status_t ServerProxy::obtainBuffer(Buffer* buffer) &((char *) mBuffers)[(mIsOut ? front : rear) * mFrameSize] : NULL; buffer->mNonContig = availToServer - part1; mUnreleased = part1; - // optimization to avoid waking up the client too early - // FIXME need to test for recording - mDeferWake = part1 < ask && availToServer >= ask; return part1 > 0 ? NO_ERROR : WOULD_BLOCK; } no_init: @@ -607,7 +604,7 @@ void ServerProxy::releaseBuffer(Buffer* buffer) minimum = half; } // FIXME AudioRecord wakeup needs to be optimized; it currently wakes up client every time - if (!mIsOut || (!mDeferWake && mAvailToClient + stepCount >= minimum)) { + if (!mIsOut || (mAvailToClient + stepCount >= minimum)) { ALOGV("mAvailToClient=%u stepCount=%u minimum=%u", mAvailToClient, stepCount, minimum); int32_t old = android_atomic_or(CBLK_FUTEX_WAKE, &cblk->mFutex); if (!(old & CBLK_FUTEX_WAKE)) { -- cgit v1.1 From 6cc3a9948b51193dfdcb0c3527d7f3d1ca38aa3c Mon Sep 17 00:00:00 2001 From: Jean-Michel Trivi Date: Tue, 10 Sep 2013 09:15:18 -0700 Subject: LoudnessEnhancer audio effect implementation Implementation based on DRC effect, controlled by a target gain. The target gain is used to amplify the signal at the input of the DRC, and to compute the knee of the DRC. Bug 8413913 Change-Id: I386d64793a9fa3f7218e053d6f0a99f6836c02bd --- media/libeffects/data/audio_effects.conf | 7 + media/libeffects/loudness/Android.mk | 27 ++ .../libeffects/loudness/EffectLoudnessEnhancer.cpp | 474 +++++++++++++++++++++ media/libeffects/loudness/MODULE_LICENSE_APACHE2 | 0 media/libeffects/loudness/NOTICE | 190 +++++++++ .../libeffects/loudness/common/core/basic_types.h | 114 +++++ .../libeffects/loudness/common/core/byte_swapper.h | 151 +++++++ media/libeffects/loudness/common/core/math.h | 89 ++++ media/libeffects/loudness/common/core/os.h | 29 ++ media/libeffects/loudness/common/core/types.h | 31 ++ media/libeffects/loudness/dsp/core/basic-inl.h | 48 +++ media/libeffects/loudness/dsp/core/basic.h | 48 +++ .../dsp/core/dynamic_range_compression-inl.h | 45 ++ .../dsp/core/dynamic_range_compression.cpp | 106 +++++ .../loudness/dsp/core/dynamic_range_compression.h | 116 +++++ media/libeffects/loudness/dsp/core/interpolation.h | 24 ++ .../loudness/dsp/core/interpolator_base-inl.h | 180 ++++++++ .../loudness/dsp/core/interpolator_base.h | 112 +++++ .../loudness/dsp/core/interpolator_linear.h | 81 ++++ 19 files changed, 1872 insertions(+) create mode 100644 media/libeffects/loudness/Android.mk create mode 100644 media/libeffects/loudness/EffectLoudnessEnhancer.cpp create mode 100644 media/libeffects/loudness/MODULE_LICENSE_APACHE2 create mode 100644 media/libeffects/loudness/NOTICE create mode 100644 media/libeffects/loudness/common/core/basic_types.h create mode 100644 media/libeffects/loudness/common/core/byte_swapper.h create mode 100644 media/libeffects/loudness/common/core/math.h create mode 100644 media/libeffects/loudness/common/core/os.h create mode 100644 media/libeffects/loudness/common/core/types.h create mode 100644 media/libeffects/loudness/dsp/core/basic-inl.h create mode 100644 media/libeffects/loudness/dsp/core/basic.h create mode 100644 media/libeffects/loudness/dsp/core/dynamic_range_compression-inl.h create mode 100644 media/libeffects/loudness/dsp/core/dynamic_range_compression.cpp create mode 100644 media/libeffects/loudness/dsp/core/dynamic_range_compression.h create mode 100644 media/libeffects/loudness/dsp/core/interpolation.h create mode 100644 media/libeffects/loudness/dsp/core/interpolator_base-inl.h create mode 100644 media/libeffects/loudness/dsp/core/interpolator_base.h create mode 100644 media/libeffects/loudness/dsp/core/interpolator_linear.h diff --git a/media/libeffects/data/audio_effects.conf b/media/libeffects/data/audio_effects.conf index 69a3c53..c3c4b67 100644 --- a/media/libeffects/data/audio_effects.conf +++ b/media/libeffects/data/audio_effects.conf @@ -35,6 +35,9 @@ libraries { downmix { path /system/lib/soundfx/libdownmix.so } + loudness_enhancer { + path /system/lib/soundfx/libldnhncr.so + } } # Default pre-processing library. Add to audio_effect.conf "libraries" section if @@ -122,6 +125,10 @@ effects { library downmix uuid 93f04452-e4fe-41cc-91f9-e475b6d1d69f } + loudness_enhancer { + library loudness_enhancer + uuid fa415329-2034-4bea-b5dc-5b381c8d1e2c + } } # Default pre-processing effects. Add to audio_effect.conf "effects" section if diff --git a/media/libeffects/loudness/Android.mk b/media/libeffects/loudness/Android.mk new file mode 100644 index 0000000..dcb7b27 --- /dev/null +++ b/media/libeffects/loudness/Android.mk @@ -0,0 +1,27 @@ +LOCAL_PATH:= $(call my-dir) + +# LoudnessEnhancer library +include $(CLEAR_VARS) + +LOCAL_SRC_FILES:= \ + EffectLoudnessEnhancer.cpp \ + dsp/core/dynamic_range_compression.cpp + +LOCAL_CFLAGS+= -O2 -fvisibility=hidden + +LOCAL_SHARED_LIBRARIES := \ + libcutils \ + liblog \ + libstlport + +LOCAL_MODULE_PATH := $(TARGET_OUT_SHARED_LIBRARIES)/soundfx +LOCAL_MODULE:= libldnhncr + +LOCAL_C_INCLUDES := \ + $(call include-path-for, audio-effects) \ + bionic \ + bionic/libstdc++/include \ + external/stlport/stlport + + +include $(BUILD_SHARED_LIBRARY) diff --git a/media/libeffects/loudness/EffectLoudnessEnhancer.cpp b/media/libeffects/loudness/EffectLoudnessEnhancer.cpp new file mode 100644 index 0000000..dfc25db --- /dev/null +++ b/media/libeffects/loudness/EffectLoudnessEnhancer.cpp @@ -0,0 +1,474 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "EffectLE" +//#define LOG_NDEBUG 0 +#include +#include +#include +#include +#include +#include +#include +#include +#include "dsp/core/dynamic_range_compression.h" + +extern "C" { + +// effect_handle_t interface implementation for LE effect +extern const struct effect_interface_s gLEInterface; + +// AOSP Loudness Enhancer UUID: fa415329-2034-4bea-b5dc-5b381c8d1e2c +const effect_descriptor_t gLEDescriptor = { + {0xfe3199be, 0xaed0, 0x413f, 0x87bb, {0x11, 0x26, 0x0e, 0xb6, 0x3c, 0xf1}}, // type + {0xfa415329, 0x2034, 0x4bea, 0xb5dc, {0x5b, 0x38, 0x1c, 0x8d, 0x1e, 0x2c}}, // uuid + EFFECT_CONTROL_API_VERSION, + (EFFECT_FLAG_TYPE_INSERT | EFFECT_FLAG_INSERT_FIRST), + 0, // TODO + 1, + "Loudness Enhancer", + "The Android Open Source Project", +}; + +enum le_state_e { + LOUDNESS_ENHANCER_STATE_UNINITIALIZED, + LOUDNESS_ENHANCER_STATE_INITIALIZED, + LOUDNESS_ENHANCER_STATE_ACTIVE, +}; + +struct LoudnessEnhancerContext { + const struct effect_interface_s *mItfe; + effect_config_t mConfig; + uint8_t mState; + int32_t mTargetGainmB;// target gain in mB + // in this implementation, there is no coupling between the compression on the left and right + // channels + le_fx::AdaptiveDynamicRangeCompression* mCompressorL; + le_fx::AdaptiveDynamicRangeCompression* mCompressorR; +}; + +// +//--- Local functions (not directly used by effect interface) +// + +void LE_reset(LoudnessEnhancerContext *pContext) +{ + ALOGV(" > LE_reset(%p)", pContext); + + if ((pContext->mCompressorL != NULL) && (pContext->mCompressorR != NULL)) { + float targetAmp = pow(10, pContext->mTargetGainmB/2000.0f); // mB to linear amplification + ALOGV("LE_reset(): Target gain=%dmB <=> factor=%.2fX", pContext->mTargetGainmB, targetAmp); + pContext->mCompressorL->Initialize(targetAmp, pContext->mConfig.inputCfg.samplingRate); + pContext->mCompressorR->Initialize(targetAmp, pContext->mConfig.inputCfg.samplingRate); + } else { + ALOGE("LE_reset(%p): null compressors, can't apply target gain", pContext); + } +} + +static inline int16_t clamp16(int32_t sample) +{ + if ((sample>>15) ^ (sample>>31)) + sample = 0x7FFF ^ (sample>>31); + return sample; +} + +//---------------------------------------------------------------------------- +// LE_setConfig() +//---------------------------------------------------------------------------- +// Purpose: Set input and output audio configuration. +// +// Inputs: +// pContext: effect engine context +// pConfig: pointer to effect_config_t structure holding input and output +// configuration parameters +// +// Outputs: +// +//---------------------------------------------------------------------------- + +int LE_setConfig(LoudnessEnhancerContext *pContext, effect_config_t *pConfig) +{ + ALOGV("LE_setConfig(%p)", pContext); + + if (pConfig->inputCfg.samplingRate != pConfig->outputCfg.samplingRate) return -EINVAL; + if (pConfig->inputCfg.channels != pConfig->outputCfg.channels) return -EINVAL; + if (pConfig->inputCfg.format != pConfig->outputCfg.format) return -EINVAL; + if (pConfig->inputCfg.channels != AUDIO_CHANNEL_OUT_STEREO) return -EINVAL; + if (pConfig->outputCfg.accessMode != EFFECT_BUFFER_ACCESS_WRITE && + pConfig->outputCfg.accessMode != EFFECT_BUFFER_ACCESS_ACCUMULATE) return -EINVAL; + if (pConfig->inputCfg.format != AUDIO_FORMAT_PCM_16_BIT) return -EINVAL; + + pContext->mConfig = *pConfig; + + LE_reset(pContext); + + return 0; +} + + +//---------------------------------------------------------------------------- +// LE_getConfig() +//---------------------------------------------------------------------------- +// Purpose: Get input and output audio configuration. +// +// Inputs: +// pContext: effect engine context +// pConfig: pointer to effect_config_t structure holding input and output +// configuration parameters +// +// Outputs: +// +//---------------------------------------------------------------------------- + +void LE_getConfig(LoudnessEnhancerContext *pContext, effect_config_t *pConfig) +{ + *pConfig = pContext->mConfig; +} + + +//---------------------------------------------------------------------------- +// LE_init() +//---------------------------------------------------------------------------- +// Purpose: Initialize engine with default configuration. +// +// Inputs: +// pContext: effect engine context +// +// Outputs: +// +//---------------------------------------------------------------------------- + +int LE_init(LoudnessEnhancerContext *pContext) +{ + ALOGV("LE_init(%p)", pContext); + + pContext->mConfig.inputCfg.accessMode = EFFECT_BUFFER_ACCESS_READ; + pContext->mConfig.inputCfg.channels = AUDIO_CHANNEL_OUT_STEREO; + pContext->mConfig.inputCfg.format = AUDIO_FORMAT_PCM_16_BIT; + pContext->mConfig.inputCfg.samplingRate = 44100; + pContext->mConfig.inputCfg.bufferProvider.getBuffer = NULL; + pContext->mConfig.inputCfg.bufferProvider.releaseBuffer = NULL; + pContext->mConfig.inputCfg.bufferProvider.cookie = NULL; + pContext->mConfig.inputCfg.mask = EFFECT_CONFIG_ALL; + pContext->mConfig.outputCfg.accessMode = EFFECT_BUFFER_ACCESS_ACCUMULATE; + pContext->mConfig.outputCfg.channels = AUDIO_CHANNEL_OUT_STEREO; + pContext->mConfig.outputCfg.format = AUDIO_FORMAT_PCM_16_BIT; + pContext->mConfig.outputCfg.samplingRate = 44100; + pContext->mConfig.outputCfg.bufferProvider.getBuffer = NULL; + pContext->mConfig.outputCfg.bufferProvider.releaseBuffer = NULL; + pContext->mConfig.outputCfg.bufferProvider.cookie = NULL; + pContext->mConfig.outputCfg.mask = EFFECT_CONFIG_ALL; + + pContext->mTargetGainmB = LOUDNESS_ENHANCER_DEFAULT_TARGET_GAIN_MB; + float targetAmp = pow(10, pContext->mTargetGainmB/2000.0f); // mB to linear amplification + ALOGV("LE_init(): Target gain=%dmB <=> factor=%.2fX", pContext->mTargetGainmB, targetAmp); + + if (pContext->mCompressorL == NULL) { + pContext->mCompressorL = new le_fx::AdaptiveDynamicRangeCompression(); + pContext->mCompressorL->Initialize(targetAmp, pContext->mConfig.inputCfg.samplingRate); + } + if (pContext->mCompressorR == NULL) { + pContext->mCompressorR = new le_fx::AdaptiveDynamicRangeCompression(); + pContext->mCompressorR->Initialize(targetAmp, pContext->mConfig.inputCfg.samplingRate); + } + + LE_setConfig(pContext, &pContext->mConfig); + + return 0; +} + +// +//--- Effect Library Interface Implementation +// + +int LELib_Create(const effect_uuid_t *uuid, + int32_t sessionId, + int32_t ioId, + effect_handle_t *pHandle) { + ALOGV("LELib_Create()"); + int ret; + int i; + + if (pHandle == NULL || uuid == NULL) { + return -EINVAL; + } + + if (memcmp(uuid, &gLEDescriptor.uuid, sizeof(effect_uuid_t)) != 0) { + return -EINVAL; + } + + LoudnessEnhancerContext *pContext = new LoudnessEnhancerContext; + + pContext->mItfe = &gLEInterface; + pContext->mState = LOUDNESS_ENHANCER_STATE_UNINITIALIZED; + + pContext->mCompressorL = NULL; + pContext->mCompressorR = NULL; + ret = LE_init(pContext); + if (ret < 0) { + ALOGW("LELib_Create() init failed"); + delete pContext; + return ret; + } + + *pHandle = (effect_handle_t)pContext; + + pContext->mState = LOUDNESS_ENHANCER_STATE_INITIALIZED; + + ALOGV(" LELib_Create context is %p", pContext); + + return 0; + +} + +int LELib_Release(effect_handle_t handle) { + LoudnessEnhancerContext * pContext = (LoudnessEnhancerContext *)handle; + + ALOGV("LELib_Release %p", handle); + if (pContext == NULL) { + return -EINVAL; + } + pContext->mState = LOUDNESS_ENHANCER_STATE_UNINITIALIZED; + if (pContext->mCompressorL != NULL) { + delete pContext->mCompressorL; + pContext->mCompressorL = NULL; + } + if (pContext->mCompressorR != NULL) { + delete pContext->mCompressorR; + pContext->mCompressorR = NULL; + } + delete pContext; + + return 0; +} + +int LELib_GetDescriptor(const effect_uuid_t *uuid, + effect_descriptor_t *pDescriptor) { + + if (pDescriptor == NULL || uuid == NULL){ + ALOGV("LELib_GetDescriptor() called with NULL pointer"); + return -EINVAL; + } + + if (memcmp(uuid, &gLEDescriptor.uuid, sizeof(effect_uuid_t)) == 0) { + *pDescriptor = gLEDescriptor; + return 0; + } + + return -EINVAL; +} /* end LELib_GetDescriptor */ + +// +//--- Effect Control Interface Implementation +// +int LE_process( + effect_handle_t self, audio_buffer_t *inBuffer, audio_buffer_t *outBuffer) +{ + LoudnessEnhancerContext * pContext = (LoudnessEnhancerContext *)self; + + if (pContext == NULL) { + return -EINVAL; + } + + if (inBuffer == NULL || inBuffer->raw == NULL || + outBuffer == NULL || outBuffer->raw == NULL || + inBuffer->frameCount != outBuffer->frameCount || + inBuffer->frameCount == 0) { + return -EINVAL; + } + + //ALOGV("LE about to process %d samples", inBuffer->frameCount); + uint16_t inIdx; + float inputAmp = pow(10, pContext->mTargetGainmB/2000.0f); + for (inIdx = 0 ; inIdx < inBuffer->frameCount ; inIdx++) { + inBuffer->s16[2*inIdx] = pContext->mCompressorL->Compress( + inputAmp * (float)inBuffer->s16[2*inIdx]); + inBuffer->s16[2*inIdx +1] = pContext->mCompressorR->Compress( + inputAmp * (float)inBuffer->s16[2*inIdx +1]); + } + + if (inBuffer->raw != outBuffer->raw) { + if (pContext->mConfig.outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE) { + for (size_t i = 0; i < outBuffer->frameCount*2; i++) { + outBuffer->s16[i] = clamp16(outBuffer->s16[i] + inBuffer->s16[i]); + } + } else { + memcpy(outBuffer->raw, inBuffer->raw, outBuffer->frameCount * 2 * sizeof(int16_t)); + } + } + if (pContext->mState != LOUDNESS_ENHANCER_STATE_ACTIVE) { + return -ENODATA; + } + return 0; +} + +int LE_command(effect_handle_t self, uint32_t cmdCode, uint32_t cmdSize, + void *pCmdData, uint32_t *replySize, void *pReplyData) { + + LoudnessEnhancerContext * pContext = (LoudnessEnhancerContext *)self; + int retsize; + + if (pContext == NULL || pContext->mState == LOUDNESS_ENHANCER_STATE_UNINITIALIZED) { + return -EINVAL; + } + +// ALOGV("LE_command command %d cmdSize %d",cmdCode, cmdSize); + switch (cmdCode) { + case EFFECT_CMD_INIT: + if (pReplyData == NULL || *replySize != sizeof(int)) { + return -EINVAL; + } + *(int *) pReplyData = LE_init(pContext); + break; + case EFFECT_CMD_SET_CONFIG: + if (pCmdData == NULL || cmdSize != sizeof(effect_config_t) + || pReplyData == NULL || *replySize != sizeof(int)) { + return -EINVAL; + } + *(int *) pReplyData = LE_setConfig(pContext, + (effect_config_t *) pCmdData); + break; + case EFFECT_CMD_GET_CONFIG: + if (pReplyData == NULL || + *replySize != sizeof(effect_config_t)) { + return -EINVAL; + } + LE_getConfig(pContext, (effect_config_t *)pReplyData); + break; + case EFFECT_CMD_RESET: + LE_reset(pContext); + break; + case EFFECT_CMD_ENABLE: + if (pReplyData == NULL || *replySize != sizeof(int)) { + return -EINVAL; + } + if (pContext->mState != LOUDNESS_ENHANCER_STATE_INITIALIZED) { + return -ENOSYS; + } + pContext->mState = LOUDNESS_ENHANCER_STATE_ACTIVE; + ALOGV("EFFECT_CMD_ENABLE() OK"); + *(int *)pReplyData = 0; + break; + case EFFECT_CMD_DISABLE: + if (pReplyData == NULL || *replySize != sizeof(int)) { + return -EINVAL; + } + if (pContext->mState != LOUDNESS_ENHANCER_STATE_ACTIVE) { + return -ENOSYS; + } + pContext->mState = LOUDNESS_ENHANCER_STATE_INITIALIZED; + ALOGV("EFFECT_CMD_DISABLE() OK"); + *(int *)pReplyData = 0; + break; + case EFFECT_CMD_GET_PARAM: { + if (pCmdData == NULL || + cmdSize != (int)(sizeof(effect_param_t) + sizeof(uint32_t)) || + pReplyData == NULL || + *replySize < (int)(sizeof(effect_param_t) + sizeof(uint32_t) + sizeof(uint32_t))) { + return -EINVAL; + } + memcpy(pReplyData, pCmdData, sizeof(effect_param_t) + sizeof(uint32_t)); + effect_param_t *p = (effect_param_t *)pReplyData; + p->status = 0; + *replySize = sizeof(effect_param_t) + sizeof(uint32_t); + if (p->psize != sizeof(uint32_t)) { + p->status = -EINVAL; + break; + } + switch (*(uint32_t *)p->data) { + case LOUDNESS_ENHANCER_PARAM_TARGET_GAIN_MB: + ALOGV("get target gain(mB) = %d", pContext->mTargetGainmB); + *((int32_t *)p->data + 1) = pContext->mTargetGainmB; + p->vsize = sizeof(int32_t); + *replySize += sizeof(int32_t); + break; + default: + p->status = -EINVAL; + } + } break; + case EFFECT_CMD_SET_PARAM: { + if (pCmdData == NULL || + cmdSize != (int)(sizeof(effect_param_t) + sizeof(uint32_t) + sizeof(uint32_t)) || + pReplyData == NULL || *replySize != sizeof(int32_t)) { + return -EINVAL; + } + *(int32_t *)pReplyData = 0; + effect_param_t *p = (effect_param_t *)pCmdData; + if (p->psize != sizeof(uint32_t) || p->vsize != sizeof(uint32_t)) { + *(int32_t *)pReplyData = -EINVAL; + break; + } + switch (*(uint32_t *)p->data) { + case LOUDNESS_ENHANCER_PARAM_TARGET_GAIN_MB: + pContext->mTargetGainmB = *((int32_t *)p->data + 1); + ALOGV("set target gain(mB) = %d", pContext->mTargetGainmB); + LE_reset(pContext); // apply parameter update + break; + default: + *(int32_t *)pReplyData = -EINVAL; + } + } break; + case EFFECT_CMD_SET_DEVICE: + case EFFECT_CMD_SET_VOLUME: + case EFFECT_CMD_SET_AUDIO_MODE: + break; + + default: + ALOGW("LE_command invalid command %d",cmdCode); + return -EINVAL; + } + + return 0; +} + +/* Effect Control Interface Implementation: get_descriptor */ +int LE_getDescriptor(effect_handle_t self, + effect_descriptor_t *pDescriptor) +{ + LoudnessEnhancerContext * pContext = (LoudnessEnhancerContext *) self; + + if (pContext == NULL || pDescriptor == NULL) { + ALOGV("LE_getDescriptor() invalid param"); + return -EINVAL; + } + + *pDescriptor = gLEDescriptor; + + return 0; +} /* end LE_getDescriptor */ + +// effect_handle_t interface implementation for DRC effect +const struct effect_interface_s gLEInterface = { + LE_process, + LE_command, + LE_getDescriptor, + NULL, +}; + +// This is the only symbol that needs to be exported +__attribute__ ((visibility ("default"))) +audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM = { + tag : AUDIO_EFFECT_LIBRARY_TAG, + version : EFFECT_LIBRARY_API_VERSION, + name : "Loudness Enhancer Library", + implementor : "The Android Open Source Project", + create_effect : LELib_Create, + release_effect : LELib_Release, + get_descriptor : LELib_GetDescriptor, +}; + +}; // extern "C" + diff --git a/media/libeffects/loudness/MODULE_LICENSE_APACHE2 b/media/libeffects/loudness/MODULE_LICENSE_APACHE2 new file mode 100644 index 0000000..e69de29 diff --git a/media/libeffects/loudness/NOTICE b/media/libeffects/loudness/NOTICE new file mode 100644 index 0000000..ad6ed94 --- /dev/null +++ b/media/libeffects/loudness/NOTICE @@ -0,0 +1,190 @@ + + Copyright (c) 2013, The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + diff --git a/media/libeffects/loudness/common/core/basic_types.h b/media/libeffects/loudness/common/core/basic_types.h new file mode 100644 index 0000000..593e914 --- /dev/null +++ b/media/libeffects/loudness/common/core/basic_types.h @@ -0,0 +1,114 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef LE_FX_ENGINE_COMMON_CORE_BASIC_TYPES_H_ +#define LE_FX_ENGINE_COMMON_CORE_BASIC_TYPES_H_ + +#include +#include +#include +using ::std::string; +using ::std::basic_string; +#include +using ::std::vector; + +#include "common/core/os.h" + +// ----------------------------------------------------------------------------- +// Definitions of common basic types: +// ----------------------------------------------------------------------------- + +#if !defined(G_COMPILE) && !defined(BASE_INTEGRAL_TYPES_H_) + +namespace le_fx { + +typedef signed char schar; +typedef signed char int8; +typedef short int16; +typedef int int32; +typedef long long int64; + +typedef unsigned char uint8; +typedef unsigned short uint16; +typedef unsigned int uint32; +typedef unsigned long long uint64; + +} // namespace le_fx + +#endif + +namespace le_fx { + +struct FloatArray { + int length; + float *data; + + FloatArray(void) { + data = NULL; + length = 0; + } +}; + +struct Int16Array { + int length; + int16 *data; + + Int16Array(void) { + data = NULL; + length = 0; + } +}; + +struct Int32Array { + int length; + int32 *data; + + Int32Array(void) { + data = NULL; + length = 0; + } +}; + +struct Int8Array { + int length; + uint8 *data; + + Int8Array(void) { + data = NULL; + length = 0; + } +}; + +// +// Simple wrapper for waveform data: +// +class WaveData : public vector { + public: + WaveData(); + ~WaveData(); + + void Set(int number_samples, int sampling_rate, int16 *data); + int sample_rate(void) const; + void set_sample_rate(int sample_rate); + bool Equals(const WaveData &wave_data, int threshold = 0) const; + + private: + int sample_rate_; +}; + +} // namespace le_fx + +#endif // LE_FX_ENGINE_COMMON_CORE_BASIC_TYPES_H_ diff --git a/media/libeffects/loudness/common/core/byte_swapper.h b/media/libeffects/loudness/common/core/byte_swapper.h new file mode 100644 index 0000000..8f0caf3 --- /dev/null +++ b/media/libeffects/loudness/common/core/byte_swapper.h @@ -0,0 +1,151 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef LE_FX_ENGINE_COMMON_CORE_BYTE_SWAPPER_H_ +#define LE_FX_ENGINE_COMMON_CORE_BYTE_SWAPPER_H_ + +#include +#include + +#include "common/core/basic_types.h" +#include "common/core/os.h" + +namespace le_fx { + +namespace arch { + +inline bool IsLittleEndian(void) { + int16 word = 1; + char *cp = reinterpret_cast(&word); + return cp[0] != 0; +} + +inline bool IsBigEndian(void) { + return !IsLittleEndian(); +} + +template +struct ByteSwapper { + static T Swap(const T &val) { + T new_val = val; + char *first = &new_val, *last = first + kValSize - 1, x; + for (; first < last; ++first, --last) { + x = *last; + *last = *first; + *first = x; + } + return new_val; + } +}; + +template +struct ByteSwapper { + static T Swap(const T &val) { + return val; + } +}; + +template +struct ByteSwapper { + static T Swap(const T &val) { + T new_val; + const char *o = (const char *)&val; + char *p = reinterpret_cast(&new_val); + p[0] = o[1]; + p[1] = o[0]; + return new_val; + } +}; + +template +struct ByteSwapper { + static T Swap(const T &val) { + T new_val; + const char *o = (const char *)&val; + char *p = reinterpret_cast(&new_val); + p[0] = o[3]; + p[1] = o[2]; + p[2] = o[1]; + p[3] = o[0]; + return new_val; + } +}; + +template +struct ByteSwapper { + static T Swap(const T &val) { + T new_val = val; + const char *o = (const char *)&val; + char *p = reinterpret_cast(&new_val); + p[0] = o[7]; + p[1] = o[6]; + p[2] = o[5]; + p[3] = o[4]; + p[4] = o[3]; + p[5] = o[2]; + p[6] = o[1]; + p[7] = o[0]; + return new_val; + } +}; + +template +T SwapBytes(const T &val, bool force_swap) { + if (force_swap) { +#if !defined(LE_FX__NEED_BYTESWAP) + return ByteSwapper::Swap(val); +#else + return val; +#endif // !LE_FX_NEED_BYTESWAP + } else { +#if !defined(LE_FX_NEED_BYTESWAP) + return val; +#else + return ByteSwapper::Swap(val); +#endif // !LE_FX_NEED_BYTESWAP + } +} + +template +const T *SwapBytes(const T *vals, unsigned int num_items, bool force_swap) { + if (force_swap) { +#if !defined(LE_FX_NEED_BYTESWAP) + T *writeable_vals = const_cast(vals); + for (unsigned int i = 0; i < num_items; i++) { + writeable_vals[i] = ByteSwapper::Swap(vals[i]); + } + return writeable_vals; +#else + return vals; +#endif // !LE_FX_NEED_BYTESWAP + } else { +#if !defined(LE_FX_NEED_BYTESWAP) + return vals; +#else + T *writeable_vals = const_cast(vals); + for (unsigned int i = 0; i < num_items; i++) { + writeable_vals[i] = ByteSwapper::Swap(vals[i]); + } + return writeable_vals; +#endif // !LE_FX_NEED_BYTESWAP + } +} + +} // namespace arch + +} // namespace le_fx + +#endif // LE_FX_ENGINE_COMMON_CORE_BYTE_SWAPPER_H_ diff --git a/media/libeffects/loudness/common/core/math.h b/media/libeffects/loudness/common/core/math.h new file mode 100644 index 0000000..3f302cc --- /dev/null +++ b/media/libeffects/loudness/common/core/math.h @@ -0,0 +1,89 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef LE_FX_ENGINE_COMMON_CORE_MATH_H_ +#define LE_FX_ENGINE_COMMON_CORE_MATH_H_ + +#include +#include +using ::std::min; +using ::std::max; +using ::std::fill; +using ::std::fill_n;using ::std::lower_bound; +#include +#include +//using ::std::fpclassify; + +#include "common/core/os.h" +#include "common/core/types.h" + +namespace le_fx { +namespace math { + +// A fast approximation to log2(.) +inline float fast_log2(float val) { + int* const exp_ptr = reinterpret_cast (&val); + int x = *exp_ptr; + const int log_2 = ((x >> 23) & 255) - 128; + x &= ~(255 << 23); + x += 127 << 23; + *exp_ptr = x; + val = ((-1.0f / 3) * val + 2) * val - 2.0f / 3; + return static_cast(val + log_2); +} + +// A fast approximation to log(.) +inline float fast_log(float val) { + return fast_log2(val) * + 0.693147180559945286226763982995180413126945495605468750f; +} + +// An approximation of the exp(.) function using a 5-th order Taylor expansion. +// It's pretty accurate between +-0.1 and accurate to 10e-3 between +-1 +template +inline T ExpApproximationViaTaylorExpansionOrder5(T x) { + const T x2 = x * x; + const T x3 = x2 * x; + const T x4 = x2 * x2; + const T x5 = x3 * x2; + return 1.0f + x + 0.5f * x2 + + 0.16666666666666665741480812812369549646973609924316406250f * x3 + + 0.0416666666666666643537020320309238741174340248107910156250f * x4 + + 0.008333333333333333217685101601546193705871701240539550781250f * x5; +} + +} // namespace math +} // namespace le_fx + +// Math functions missing in Android NDK: +#if defined(LE_FX_OS_ANDROID) + +namespace std { + +// +// Round to the nearest integer: We need this implementation +// since std::round is missing on android. +// +template +inline T round(const T &x) { + return static_cast(std::floor(static_cast(x) + 0.5)); +} + +} // namespace std + +#endif // LE_FX_OS_ANDROID + +#endif // LE_FX_ENGINE_COMMON_CORE_MATH_H_ diff --git a/media/libeffects/loudness/common/core/os.h b/media/libeffects/loudness/common/core/os.h new file mode 100644 index 0000000..4a8ce82 --- /dev/null +++ b/media/libeffects/loudness/common/core/os.h @@ -0,0 +1,29 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef LE_FX_ENGINE_COMMON_CORE_OS_H_ +#define LE_FX_ENGINE_COMMON_CORE_OS_H_ + +// ----------------------------------------------------------------------------- +// OS Identification: +// ----------------------------------------------------------------------------- + +#define LE_FX_OS_UNIX +#if defined(__ANDROID__) +# define LE_FX_OS_ANDROID +#endif // Android + +#endif // LE_FX_ENGINE_COMMON_CORE_OS_H_ diff --git a/media/libeffects/loudness/common/core/types.h b/media/libeffects/loudness/common/core/types.h new file mode 100644 index 0000000..d1b6c6a --- /dev/null +++ b/media/libeffects/loudness/common/core/types.h @@ -0,0 +1,31 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef LE_FX_ENGINE_COMMON_CORE_TYPES_H_ +#define LE_FX_ENGINE_COMMON_CORE_TYPES_H_ + +#include "common/core/os.h" + +#include "common/core/basic_types.h" + +#ifndef LE_FX_DISALLOW_COPY_AND_ASSIGN +#define LE_FX_DISALLOW_COPY_AND_ASSIGN(TypeName) \ + TypeName(const TypeName&); \ + void operator=(const TypeName&) +#endif // LE_FX_DISALLOW_COPY_AND_ASSIGN + + +#endif // LE_FX_ENGINE_COMMON_CORE_TYPES_H_ diff --git a/media/libeffects/loudness/dsp/core/basic-inl.h b/media/libeffects/loudness/dsp/core/basic-inl.h new file mode 100644 index 0000000..3f77147 --- /dev/null +++ b/media/libeffects/loudness/dsp/core/basic-inl.h @@ -0,0 +1,48 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef LE_FX_ENGINE_DSP_CORE_BASIC_INL_H_ +#define LE_FX_ENGINE_DSP_CORE_BASIC_INL_H_ + +#include + +namespace le_fx { + +namespace sigmod { + +template +int SearchIndex(const T x_data[], + T x, + int start_index, + int end_index) { + int start = start_index; + int end = end_index; + while (end > start + 1) { + int i = (end + start) / 2; + if (x_data[i] > x) { + end = i; + } else { + start = i; + } + } + return start; +} + +} // namespace sigmod + +} // namespace le_fx + +#endif // LE_FX_ENGINE_DSP_CORE_BASIC_INL_H_ diff --git a/media/libeffects/loudness/dsp/core/basic.h b/media/libeffects/loudness/dsp/core/basic.h new file mode 100644 index 0000000..27e0a8d --- /dev/null +++ b/media/libeffects/loudness/dsp/core/basic.h @@ -0,0 +1,48 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef LE_FX_ENGINE_DSP_CORE_BASIC_H_ +#define LE_FX_ENGINE_DSP_CORE_BASIC_H_ + +#include +#include "common/core/math.h" +#include "common/core/types.h" + +namespace le_fx { + +namespace sigmod { + +// Searchs for the interval that contains using a divide-and-conquer +// algorithm. +// X[]: a vector of sorted values (X[i+1] > X[i]) +// x: a value +// StartIndex: the minimum searched index +// EndIndex: the maximum searched index +// returns: the index that satisfies: X[i] <= x <= X[i+1] && +// StartIndex <= i <= (EndIndex-1) +template +int SearchIndex(const T x_data[], + T x, + int start_index, + int end_index); + +} // namespace sigmod + +} // namespace le_fx + +#include "dsp/core/basic-inl.h" + +#endif // LE_FX_ENGINE_DSP_CORE_BASIC_H_ diff --git a/media/libeffects/loudness/dsp/core/dynamic_range_compression-inl.h b/media/libeffects/loudness/dsp/core/dynamic_range_compression-inl.h new file mode 100644 index 0000000..fed8c2a --- /dev/null +++ b/media/libeffects/loudness/dsp/core/dynamic_range_compression-inl.h @@ -0,0 +1,45 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LE_FX_ENGINE_DSP_CORE_DYNAMIC_RANGE_COMPRESSION_INL_H_ +#define LE_FX_ENGINE_DSP_CORE_DYNAMIC_RANGE_COMPRESSION_INL_H_ + +//#define LOG_NDEBUG 0 +#include + + +namespace le_fx { + + +inline void AdaptiveDynamicRangeCompression::set_knee_threshold(float decibel) { + // Converts to 1og-base + knee_threshold_in_decibel_ = decibel; + knee_threshold_ = 0.1151292546497023061569109358970308676362037658691406250f * + decibel + 10.39717719035538401328722102334722876548767089843750f; +} + + +inline void AdaptiveDynamicRangeCompression::set_knee_threshold_via_target_gain( + float target_gain) { + const float decibel = target_gain_to_knee_threshold_.Interpolate( + target_gain); + ALOGE("set_knee_threshold_via_target_gain: decibel =%.3f", decibel); + set_knee_threshold(decibel); +} + +} // namespace le_fx + + +#endif // LE_FX_ENGINE_DSP_CORE_DYNAMIC_RANGE_COMPRESSION_INL_H_ diff --git a/media/libeffects/loudness/dsp/core/dynamic_range_compression.cpp b/media/libeffects/loudness/dsp/core/dynamic_range_compression.cpp new file mode 100644 index 0000000..2bbd043 --- /dev/null +++ b/media/libeffects/loudness/dsp/core/dynamic_range_compression.cpp @@ -0,0 +1,106 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include "common/core/math.h" +#include "common/core/types.h" +#include "dsp/core/basic.h" +#include "dsp/core/interpolation.h" +#include "dsp/core/dynamic_range_compression.h" + +//#define LOG_NDEBUG 0 +#include + + +namespace le_fx { + +// Definitions for static const class members declared in +// dynamic_range_compression.h. +const float AdaptiveDynamicRangeCompression::kMinAbsValue = 0.000001f; +const float AdaptiveDynamicRangeCompression::kMinLogAbsValue = + 0.032766999999999997517097227728299912996590137481689453125f; +const float AdaptiveDynamicRangeCompression::kFixedPointLimit = 32767.0f; +const float AdaptiveDynamicRangeCompression::kInverseFixedPointLimit = + 1.0f / AdaptiveDynamicRangeCompression::kFixedPointLimit; +const float AdaptiveDynamicRangeCompression::kDefaultKneeThresholdInDecibel = + -8.0f; +const float AdaptiveDynamicRangeCompression::kCompressionRatio = 7.0f; +const float AdaptiveDynamicRangeCompression::kTauAttack = 0.001f; +const float AdaptiveDynamicRangeCompression::kTauRelease = 0.015f; + +AdaptiveDynamicRangeCompression::AdaptiveDynamicRangeCompression() { + static const float kTargetGain[] = { + 1.0f, 2.0f, 3.0f, 4.0f, 5.0f }; + static const float kKneeThreshold[] = { + -8.0f, -8.0f, -8.5f, -9.0f, -10.0f }; + target_gain_to_knee_threshold_.Initialize( + &kTargetGain[0], &kKneeThreshold[0], + sizeof(kTargetGain) / sizeof(kTargetGain[0])); +} + +bool AdaptiveDynamicRangeCompression::Initialize( + float target_gain, float sampling_rate) { + set_knee_threshold_via_target_gain(target_gain); + sampling_rate_ = sampling_rate; + state_ = 0.0f; + compressor_gain_ = 1.0f; + if (kTauAttack > 0.0f) { + const float taufs = kTauAttack * sampling_rate_; + alpha_attack_ = std::exp(-1.0f / taufs); + } else { + alpha_attack_ = 0.0f; + } + if (kTauRelease > 0.0f) { + const float taufs = kTauRelease * sampling_rate_; + alpha_release_ = std::exp(-1.0f / taufs); + } else { + alpha_release_ = 0.0f; + } + // Feed-forward topology + slope_ = 1.0f / kCompressionRatio - 1.0f; + return true; +} + +float AdaptiveDynamicRangeCompression::Compress(float x) { + const float max_abs_x = std::max(std::fabs(x), kMinLogAbsValue); + const float max_abs_x_dB = math::fast_log(max_abs_x); + // Subtract Threshold from log-encoded input to get the amount of overshoot + const float overshoot = max_abs_x_dB - knee_threshold_; + // Hard half-wave rectifier + const float rect = std::max(overshoot, 0.0f); + // Multiply rectified overshoot with slope + const float cv = rect * slope_; + const float prev_state = state_; + if (cv <= state_) { + state_ = alpha_attack_ * state_ + (1.0f - alpha_attack_) * cv; + } else { + state_ = alpha_release_ * state_ + (1.0f - alpha_release_) * cv; + } + compressor_gain_ *= + math::ExpApproximationViaTaylorExpansionOrder5(state_ - prev_state); + x *= compressor_gain_; + if (x > kFixedPointLimit) { + return kFixedPointLimit; + } + if (x < -kFixedPointLimit) { + return -kFixedPointLimit; + } + return x; +} + +} // namespace le_fx + diff --git a/media/libeffects/loudness/dsp/core/dynamic_range_compression.h b/media/libeffects/loudness/dsp/core/dynamic_range_compression.h new file mode 100644 index 0000000..4c015df --- /dev/null +++ b/media/libeffects/loudness/dsp/core/dynamic_range_compression.h @@ -0,0 +1,116 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LE_FX_ENGINE_DSP_CORE_DYNAMIC_RANGE_COMPRESSION_H_ +#define LE_FX_ENGINE_DSP_CORE_DYNAMIC_RANGE_COMPRESSION_H_ + +#include "common/core/types.h" +#include "common/core/math.h" +#include "dsp/core/basic.h" +#include "dsp/core/interpolation.h" + +//#define LOG_NDEBUG 0 +#include + + +namespace le_fx { + +// An adaptive dynamic range compression algorithm. The gain adaptation is made +// at the logarithmic domain and it is based on a Branching-Smooth compensated +// digital peak detector with different time constants for attack and release. +class AdaptiveDynamicRangeCompression { + public: + AdaptiveDynamicRangeCompression(); + + // Initializes the compressor using prior information. It assumes that the + // input signal is speech from high-quality recordings that is scaled and then + // fed to the compressor. The compressor is tuned according to the target gain + // that is expected to be applied. + // + // Target gain receives values between 0.0 and 10.0. The knee threshold is + // reduced as the target gain increases in order to fit the increased range of + // values. + // + // Values between 1.0 and 2.0 will only mildly affect your signal. Higher + // values will reduce the dynamic range of the signal to the benefit of + // increased loudness. + // + // If nothing is known regarding the input, a `target_gain` of 1.0f is a + // relatively safe choice for many signals. + bool Initialize(float target_gain, float sampling_rate); + + // A fast version of the algorithm that uses approximate computations for the + // log(.) and exp(.). + float Compress(float x); + + // This version is slower than Compress(.) but faster than CompressSlow(.) + float CompressNormalSpeed(float x); + + // A slow version of the algorithm that is easier for further developement, + // tuning and debugging + float CompressSlow(float x); + + // Sets knee threshold (in decibel). + void set_knee_threshold(float decibel); + + // Sets knee threshold via the target gain using an experimentally derived + // relationship. + void set_knee_threshold_via_target_gain(float target_gain); + + private: + // The minimum accepted absolute input value and it's natural logarithm. This + // is to prevent numerical issues when the input is close to zero + static const float kMinAbsValue; + static const float kMinLogAbsValue; + // Fixed-point arithmetic limits + static const float kFixedPointLimit; + static const float kInverseFixedPointLimit; + // The default knee threshold in decibel. The knee threshold defines when the + // compressor is actually starting to compress the value of the input samples + static const float kDefaultKneeThresholdInDecibel; + // The compression ratio is the reciprocal of the slope of the line segment + // above the threshold (in the log-domain). The ratio controls the + // effectiveness of the compression. + static const float kCompressionRatio; + // The attack time of the envelope detector + static const float kTauAttack; + // The release time of the envelope detector + static const float kTauRelease; + + float sampling_rate_; + // the internal state of the envelope detector + float state_; + // the latest gain factor that was applied to the input signal + float compressor_gain_; + // attack constant for exponential dumping + float alpha_attack_; + // release constant for exponential dumping + float alpha_release_; + float slope_; + // The knee threshold + float knee_threshold_; + float knee_threshold_in_decibel_; + // This interpolator provides the function that relates target gain to knee + // threshold. + sigmod::InterpolatorLinear target_gain_to_knee_threshold_; + + LE_FX_DISALLOW_COPY_AND_ASSIGN(AdaptiveDynamicRangeCompression); +}; + +} // namespace le_fx + +#include "dsp/core/dynamic_range_compression-inl.h" + +#endif // LE_FX_ENGINE_DSP_CORE_DYNAMIC_RANGE_COMPRESSION_H_ diff --git a/media/libeffects/loudness/dsp/core/interpolation.h b/media/libeffects/loudness/dsp/core/interpolation.h new file mode 100644 index 0000000..23c287c --- /dev/null +++ b/media/libeffects/loudness/dsp/core/interpolation.h @@ -0,0 +1,24 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef LE_FX_ENGINE_DSP_CORE_INTERPOLATION_H_ +#define LE_FX_ENGINE_DSP_CORE_INTERPOLATION_H_ + +#include "common/core/math.h" +#include "dsp/core/interpolator_base.h" +#include "dsp/core/interpolator_linear.h" + +#endif // LE_FX_ENGINE_DSP_CORE_INTERPOLATION_H_ + diff --git a/media/libeffects/loudness/dsp/core/interpolator_base-inl.h b/media/libeffects/loudness/dsp/core/interpolator_base-inl.h new file mode 100644 index 0000000..bd08b65 --- /dev/null +++ b/media/libeffects/loudness/dsp/core/interpolator_base-inl.h @@ -0,0 +1,180 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef LE_FX_ENGINE_DSP_CORE_INTERPOLATOR_BASE_INL_H_ +#define LE_FX_ENGINE_DSP_CORE_INTERPOLATOR_BASE_INL_H_ + +#include "dsp/core/basic.h" + +//#define LOG_NDEBUG 0 +#include + + +namespace le_fx { + +namespace sigmod { + +template +InterpolatorBase::InterpolatorBase() { + status_ = false; + cached_index_ = 0; + x_data_ = NULL; + y_data_ = NULL; + data_length_ = 0; + own_x_data_ = false; + x_start_offset_ = 0.0; + last_element_index_ = -1; + x_inverse_sampling_interval_ = 0.0; + state_ = NULL; +} + +template +InterpolatorBase::~InterpolatorBase() { + delete [] state_; + if (own_x_data_) { + delete [] x_data_; + } +} + +template +bool InterpolatorBase::Initialize(const vector &x_data, + const vector &y_data) { +#ifndef NDEBUG + if (x_data.size() != y_data.size()) { + LoggerError("InterpolatorBase::Initialize: xData size (%d) != yData size" + " (%d)", x_data.size(), y_data.size()); + } +#endif + return Initialize(&x_data[0], &y_data[0], x_data.size()); +} + +template +bool InterpolatorBase::Initialize(double x_start_offset, + double x_sampling_interval, + const vector &y_data) { + return Initialize(x_start_offset, + x_sampling_interval, + &y_data[0], + y_data.size()); +} + +template +bool InterpolatorBase::Initialize(double x_start_offset, + double x_sampling_interval, + const T *y_data, + int data_length) { + // Constructs and populate x-axis data: `x_data_` + T *x_data_tmp = new T[data_length]; + float time_offset = x_start_offset; + for (int n = 0; n < data_length; n++) { + x_data_tmp[n] = time_offset; + time_offset += x_sampling_interval; + } + Initialize(x_data_tmp, y_data, data_length); + // Sets-up the regularly sampled interpolation mode + x_start_offset_ = x_start_offset; + x_inverse_sampling_interval_ = 1.0 / x_sampling_interval; + own_x_data_ = true; + return status_; +} + + +template +bool InterpolatorBase::Initialize( + const T *x_data, const T *y_data, int data_length) { + // Default settings + cached_index_ = 0; + data_length_ = 0; + x_start_offset_ = 0; + x_inverse_sampling_interval_ = 0; + state_ = NULL; + // Input data is externally owned + own_x_data_ = false; + x_data_ = x_data; + y_data_ = y_data; + data_length_ = data_length; + last_element_index_ = data_length - 1; + // Check input data sanity + for (int n = 0; n < last_element_index_; ++n) { + if (x_data_[n + 1] <= x_data_[n]) { + ALOGE("InterpolatorBase::Initialize: xData are not ordered or " + "contain equal values (X[%d] <= X[%d]) (%.5e <= %.5e)", + n + 1, n, x_data_[n + 1], x_data_[n]); + status_ = false; + return false; + } + } + // Pre-compute internal state by calling the corresponding function of the + // derived class. + status_ = static_cast(this)->SetInternalState(); + return status_; +} + +template +T InterpolatorBase::Interpolate(T x) { +#ifndef NDEBUG + if (cached_index_ < 0 || cached_index_ > data_length_ - 2) { + LoggerError("InterpolatorBase:Interpolate: CachedIndex_ out of bounds " + "[0, %d, %d]", cached_index_, data_length_ - 2); + } +#endif + // Search for the containing interval + if (x <= x_data_[cached_index_]) { + if (cached_index_ <= 0) { + cached_index_ = 0; + return y_data_[0]; + } + if (x >= x_data_[cached_index_ - 1]) { + cached_index_--; // Fast descending + } else { + if (x <= x_data_[0]) { + cached_index_ = 0; + return y_data_[0]; + } + cached_index_ = SearchIndex(x_data_, x, 0, cached_index_); + } + } else { + if (cached_index_ >= last_element_index_) { + cached_index_ = last_element_index_; + return y_data_[last_element_index_]; + } + if (x > x_data_[cached_index_ + 1]) { + if (cached_index_ + 2 > last_element_index_) { + cached_index_ = last_element_index_ - 1; + return y_data_[last_element_index_]; + } + if (x <= x_data_[cached_index_ + 2]) { + cached_index_++; // Fast ascending + } else { + if (x >= x_data_[last_element_index_]) { + cached_index_ = last_element_index_ - 1; + return y_data_[last_element_index_]; + } + cached_index_ = SearchIndex( + x_data_, x, cached_index_, last_element_index_); + } + } + } + // Compute interpolated value by calling the corresponding function of the + // derived class. + return static_cast(this)->MethodSpecificInterpolation(x); +} + +} // namespace sigmod + +} // namespace le_fx + +#endif // LE_FX_ENGINE_DSP_CORE_INTERPOLATOR_BASE_INL_H_ diff --git a/media/libeffects/loudness/dsp/core/interpolator_base.h b/media/libeffects/loudness/dsp/core/interpolator_base.h new file mode 100644 index 0000000..0cd1a35 --- /dev/null +++ b/media/libeffects/loudness/dsp/core/interpolator_base.h @@ -0,0 +1,112 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef LE_FX_ENGINE_DSP_CORE_INTERPOLATOR_BASE_H_ +#define LE_FX_ENGINE_DSP_CORE_INTERPOLATOR_BASE_H_ + +#include "common/core/types.h" + +namespace le_fx { + +namespace sigmod { + +// Interpolation base-class that provides the interface, while it is the derived +// class that provides the specific interpolation algorithm. The following list +// of interpolation algorithms are currently present: +// +// InterpolationSine: weighted interpolation between y_data[n] and +// y_data[n+1] using a sin(.) weighting factor from +// 0 to pi/4. +// InterpolationLinear: linear interpolation +// InterpolationSplines: spline-based interpolation +// +// Example (using derived spline-based interpolation class): +// InterpolatorSplines interp(x_data, y_data, data_length); +// for (int n = 0; n < data_length; n++) Y[n] = interp.Interpolate(X[n]); +// +template +class InterpolatorBase { + public: + InterpolatorBase(); + ~InterpolatorBase(); + + // Generic random-access interpolation with arbitrary spaced x-axis samples. + // Below X[0], the interpolator returns Y[0]. Above X[data_length-1], it + // returns Y[data_length-1]. + T Interpolate(T x); + + bool get_status() const { + return status_; + } + + // Initializes internal buffers. + // x_data: [(data_length)x1] x-axis coordinates (searching axis) + // y_data: [(data_length)x1] y-axis coordinates (interpolation axis) + // data_length: number of points + // returns `true` if everything is ok, `false`, otherwise + bool Initialize(const T *x_data, const T *y_data, int data_length); + + // Initializes internal buffers. + // x_data: x-axis coordinates (searching axis) + // y_data: y-axis coordinates (interpolating axis) + // returns `true` if everything is ok, `false`, otherwise + bool Initialize(const vector &x_data, const vector &y_data); + + // Initialization for regularly sampled sequences, where: + // x_data[i] = x_start_offset + i * x_sampling_interval + bool Initialize(double x_start_offset, + double x_sampling_interval, + const vector &y_data); + + // Initialization for regularly sampled sequences, where: + // x_data[i] = x_start_offset + i * x_sampling_interval + bool Initialize(double x_start_offset, + double x_sampling_interval, + const T *y_data, + int data_length); + + protected: + // Is set to false if something goes wrong, and to true if everything is ok. + bool status_; + + // The start-index of the previously searched interval + int cached_index_; + + // Data points + const T *x_data_; // Externally or internally owned, depending on own_x_data_ + const T *y_data_; // Externally owned (always) + int data_length_; + // Index of the last element `data_length_ - 1` kept here for optimization + int last_element_index_; + bool own_x_data_; + // For regularly-samples sequences, keep only the boundaries and the intervals + T x_start_offset_; + float x_inverse_sampling_interval_; + + // Algorithm state (internally owned) + double *state_; + + private: + LE_FX_DISALLOW_COPY_AND_ASSIGN(InterpolatorBase); +}; + +} // namespace sigmod + +} // namespace le_fx + +#include "dsp/core/interpolator_base-inl.h" + +#endif // LE_FX_ENGINE_DSP_CORE_INTERPOLATOR_BASE_H_ diff --git a/media/libeffects/loudness/dsp/core/interpolator_linear.h b/media/libeffects/loudness/dsp/core/interpolator_linear.h new file mode 100644 index 0000000..434698a --- /dev/null +++ b/media/libeffects/loudness/dsp/core/interpolator_linear.h @@ -0,0 +1,81 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef LE_FX_ENGINE_DSP_CORE_INTERPOLATOR_LINEAR_H_ +#define LE_FX_ENGINE_DSP_CORE_INTERPOLATOR_LINEAR_H_ + +#include +#include "dsp/core/interpolator_base.h" + +namespace le_fx { + +namespace sigmod { + +// Linear interpolation class. +// +// The main functionality of this class is provided by it's base-class, so +// please refer to: InterpolatorBase +// +// Example: +// InterpolatorLinear interp(x_data, y_data, data_length); +// for (int n = 0; n < data_length; n++) Y[n] = interp.Interpolate(X[n]); +// +template +class InterpolatorLinear: public InterpolatorBase > { + public: + InterpolatorLinear() { } + ~InterpolatorLinear() { } + + protected: + // Provides the main implementation of the linear interpolation algorithm. + // Assumes that: X[cached_index_] < x < X[cached_index_ + 1] + T MethodSpecificInterpolation(T x); + + // Pre-compute internal state_ parameters. + bool SetInternalState(); + + private: + friend class InterpolatorBase >; + typedef InterpolatorBase > BaseClass; + using BaseClass::status_; + using BaseClass::cached_index_; + using BaseClass::x_data_; + using BaseClass::y_data_; + using BaseClass::data_length_; + using BaseClass::state_; + + LE_FX_DISALLOW_COPY_AND_ASSIGN(InterpolatorLinear); +}; + +template +inline T InterpolatorLinear::MethodSpecificInterpolation(T x) { + T dX = x_data_[cached_index_ + 1] - x_data_[cached_index_]; + T dY = y_data_[cached_index_ + 1] - y_data_[cached_index_]; + T dx = x - x_data_[cached_index_]; + return y_data_[cached_index_] + (dY * dx) / dX; +} + +template +bool InterpolatorLinear::SetInternalState() { + state_ = NULL; + return true; +} + +} // namespace sigmod + +} // namespace le_fx + +#endif // LE_FX_ENGINE_DSP_CORE_INTERPOLATOR_LINEAR_H_ -- cgit v1.1 From 530fdbdc1b5491f3fbf172752834d1515701e142 Mon Sep 17 00:00:00 2001 From: Lajos Molnar Date: Tue, 17 Sep 2013 19:07:40 -0700 Subject: Use changed MediaCodec.PARAMETER_KEY_VIDEO_BITRATE value Change-Id: I157bcafbf705865e66c81517b1eab10c3daa039e Signed-off-by: Lajos Molnar Bug: 10461617 --- media/libstagefright/ACodec.cpp | 2 +- media/libstagefright/wifi-display/source/Converter.cpp | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp index 5c3abd0..bfb730c 100644 --- a/media/libstagefright/ACodec.cpp +++ b/media/libstagefright/ACodec.cpp @@ -4134,7 +4134,7 @@ bool ACodec::ExecutingState::onMessageReceived(const sp &msg) { status_t ACodec::setParameters(const sp ¶ms) { int32_t videoBitrate; - if (params->findInt32("videoBitrate", &videoBitrate)) { + if (params->findInt32("video-bitrate", &videoBitrate)) { OMX_VIDEO_CONFIG_BITRATETYPE configParams; InitOMXParams(&configParams); configParams.nPortIndex = kPortIndexOutput; diff --git a/media/libstagefright/wifi-display/source/Converter.cpp b/media/libstagefright/wifi-display/source/Converter.cpp index 6f23854..753b3ec 100644 --- a/media/libstagefright/wifi-display/source/Converter.cpp +++ b/media/libstagefright/wifi-display/source/Converter.cpp @@ -833,7 +833,7 @@ int32_t Converter::getVideoBitrate() const { void Converter::setVideoBitrate(int32_t bitRate) { if (mIsVideo && mEncoder != NULL && bitRate != mPrevVideoBitrate) { sp params = new AMessage; - params->setInt32("videoBitrate", bitRate); + params->setInt32("video-bitrate", bitRate); mEncoder->setParameters(params); -- cgit v1.1 From 8d0fda9660aee7059f802f400875247b01226084 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Wed, 18 Sep 2013 10:33:39 -0700 Subject: Partial fix for SoundPool not terminating SoundPool was waiting for EVENT_UNDERRUN only to indicate end of clip. In J, AudioTrack delivered both EVENT_UNDERRUN followed by EVENT_BUFFER_END. However, as of K, AudioTrack is only delivering EVENT_BUFFER_END (this lack of EVENT_UNDERRUN is another bug which still needs to be fixed). The workaround is to also respond to EVENT_BUFFER_END in SoundPool. Bug: 10787103 Change-Id: Id68a23bddd6dd9df6c49c55138197260d71ca468 --- media/libmedia/SoundPool.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/media/libmedia/SoundPool.cpp b/media/libmedia/SoundPool.cpp index 0985164..5239b2f 100644 --- a/media/libmedia/SoundPool.cpp +++ b/media/libmedia/SoundPool.cpp @@ -740,8 +740,8 @@ void SoundChannel::process(int event, void *info, unsigned long toggle) b->size = count; //ALOGV("buffer=%p, [0]=%d", b->i16, b->i16[0]); } - } else if (event == AudioTrack::EVENT_UNDERRUN) { - ALOGV("process %p channel %d EVENT_UNDERRUN", this, mChannelID); + } else if (event == AudioTrack::EVENT_UNDERRUN || event == AudioTrack::EVENT_BUFFER_END) { + ALOGV("process %p channel %d EVENT_UNDERRUN or EVENT_BUFFER_END", this, mChannelID); mSoundPool->addToStopList(this); } else if (event == AudioTrack::EVENT_LOOP_END) { ALOGV("End loop %p channel %d count %d", this, mChannelID, *(int *)info); -- cgit v1.1 From 5baf2af52cd186633b7173196c1e4a4cd3435f22 Mon Sep 17 00:00:00 2001 From: Eric Laurent Date: Thu, 12 Sep 2013 17:37:00 -0700 Subject: more support for audio effect offload Offloading of audio effects is now enabled for offloaded output threads. If an effect not supporting offload is enabled, the AudioTrack is invalidated so that it can be recreated in PCM mode. Fix some issues in effect proxy related to handling of effect commands to offloaded and non offloaded effects. Also fixed a bug on capture index in software Visualizer effect. Bug: 8174034. Change-Id: Ib23d3c2d5a652361b0aaec7faee09102f2b18fce --- media/libeffects/proxy/EffectProxy.cpp | 59 +++++++++++++--- media/libeffects/visualizer/EffectVisualizer.cpp | 4 +- services/audioflinger/AudioFlinger.cpp | 89 ++++++++++++++---------- services/audioflinger/AudioFlinger.h | 6 +- services/audioflinger/Effects.cpp | 49 +++++++++++-- services/audioflinger/Effects.h | 9 ++- services/audioflinger/Threads.cpp | 30 ++++++-- services/audioflinger/Tracks.cpp | 10 ++- 8 files changed, 186 insertions(+), 70 deletions(-) diff --git a/media/libeffects/proxy/EffectProxy.cpp b/media/libeffects/proxy/EffectProxy.cpp index 77c6e89..41640da 100644 --- a/media/libeffects/proxy/EffectProxy.cpp +++ b/media/libeffects/proxy/EffectProxy.cpp @@ -48,6 +48,21 @@ static const effect_descriptor_t *const gDescriptors[] = &gProxyDescriptor, }; +static inline bool isGetterCmd(uint32_t cmdCode) +{ + switch (cmdCode) { + case EFFECT_CMD_GET_PARAM: + case EFFECT_CMD_GET_CONFIG: + case EFFECT_CMD_GET_CONFIG_REVERSE: + case EFFECT_CMD_GET_FEATURE_SUPPORTED_CONFIGS: + case EFFECT_CMD_GET_FEATURE_CONFIG: + return true; + default: + return false; + } +} + + int EffectProxyCreate(const effect_uuid_t *uuid, int32_t sessionId, int32_t ioId, @@ -155,7 +170,6 @@ int Effect_process(effect_handle_t self, int index = pContext->index; // if the index refers to HW , do not do anything. Just return. if (index == SUB_FX_HOST) { - ALOGV("Calling CoreProcess"); ret = (*pContext->eHandle[index])->process(pContext->eHandle[index], inBuffer, outBuffer); } @@ -172,7 +186,7 @@ int Effect_command(effect_handle_t self, void *pReplyData) { EffectContext *pContext = (EffectContext *) self; - int status; + int status = 0; if (pContext == NULL) { ALOGV("Effect_command() Proxy context is NULL"); return -EINVAL; @@ -237,23 +251,46 @@ int Effect_command(effect_handle_t self, ALOGV("Effect_command: effect index is neither offload nor host"); return -EINVAL; } - ALOGV("Effect_command: pContext->eHandle[%d]: %p", - index, pContext->eHandle[index]); - if (pContext->eHandle[SUB_FX_HOST]) - (*pContext->eHandle[SUB_FX_HOST])->command( + + // Getter commands are only sent to the active sub effect. + uint32_t hostReplySize = replySize != NULL ? *replySize : 0; + bool hostReplied = false; + int hostStatus = 0; + uint32_t offloadReplySize = replySize != NULL ? *replySize : 0; + bool offloadReplied = false; + int offloadStatus = 0; + + if (pContext->eHandle[SUB_FX_HOST] && (!isGetterCmd(cmdCode) || index == SUB_FX_HOST)) { + hostStatus = (*pContext->eHandle[SUB_FX_HOST])->command( pContext->eHandle[SUB_FX_HOST], cmdCode, cmdSize, - pCmdData, replySize, pReplyData); - if (pContext->eHandle[SUB_FX_OFFLOAD]) { + pCmdData, replySize != NULL ? &hostReplySize : NULL, pReplyData); + hostReplied = true; + } + if (pContext->eHandle[SUB_FX_OFFLOAD] && (!isGetterCmd(cmdCode) || index == SUB_FX_OFFLOAD)) { // In case of SET CMD, when the offload stream is unavailable, // we will store the effect param values in the DSP effect wrapper. // When the offload effects get enabled, we send these values to the // DSP during Effect_config. // So,we send the params to DSP wrapper also - (*pContext->eHandle[SUB_FX_OFFLOAD])->command( + offloadStatus = (*pContext->eHandle[SUB_FX_OFFLOAD])->command( pContext->eHandle[SUB_FX_OFFLOAD], cmdCode, cmdSize, - pCmdData, replySize, pReplyData); + pCmdData, replySize != NULL ? &offloadReplySize : NULL, pReplyData); + offloadReplied = true; } - return 0; + // By convention the offloaded implementation reply is returned if command is processed by both + // host and offloaded sub effects + if (offloadReplied){ + status = offloadStatus; + if (replySize) { + *replySize = offloadReplySize; + } + } else if (hostReplied) { + status = hostStatus; + if (replySize) { + *replySize = hostReplySize; + } + } + return status; } /* end Effect_command */ diff --git a/media/libeffects/visualizer/EffectVisualizer.cpp b/media/libeffects/visualizer/EffectVisualizer.cpp index e7eccf1..96935e3 100644 --- a/media/libeffects/visualizer/EffectVisualizer.cpp +++ b/media/libeffects/visualizer/EffectVisualizer.cpp @@ -61,7 +61,7 @@ struct VisualizerContext { uint32_t mCaptureSize; uint32_t mScalingMode; uint8_t mState; - uint8_t mLastCaptureIdx; + uint32_t mLastCaptureIdx; uint32_t mLatency; struct timespec mBufferUpdateTime; uint8_t mCaptureBuf[CAPTURE_BUF_SIZE]; @@ -499,7 +499,7 @@ int Visualizer_command(effect_handle_t self, uint32_t cmdCode, uint32_t cmdSize, memcpy(pReplyData, pContext->mCaptureBuf + CAPTURE_BUF_SIZE + capturePoint, size); - pReplyData += size; + pReplyData = (char *)pReplyData + size; captureSize -= size; capturePoint = 0; } diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp index 626b5c2..8fbac42 100644 --- a/services/audioflinger/AudioFlinger.cpp +++ b/services/audioflinger/AudioFlinger.cpp @@ -98,7 +98,6 @@ size_t AudioFlinger::mTeeSinkOutputFrames = kTeeSinkOutputFramesDefault; size_t AudioFlinger::mTeeSinkTrackFrames = kTeeSinkTrackFramesDefault; #endif -//TODO: remove when effect offload is implemented // In order to avoid invalidating offloaded tracks each time a Visualizer is turned on and off // we define a minimum time during which a global effect is considered enabled. static const nsecs_t kMinGlobalEffectEnabletimeNs = seconds(7200); @@ -2084,20 +2083,6 @@ sp AudioFlinger::createEffect( goto Exit; } - if (io == 0) { - if (sessionId == AUDIO_SESSION_OUTPUT_STAGE) { - // output must be specified by AudioPolicyManager when using session - // AUDIO_SESSION_OUTPUT_STAGE - lStatus = BAD_VALUE; - goto Exit; - } else if (sessionId == AUDIO_SESSION_OUTPUT_MIX) { - // if the output returned by getOutputForEffect() is removed before we lock the - // mutex below, the call to checkPlaybackThread_l(io) below will detect it - // and we will exit safely - io = AudioSystem::getOutputForEffect(&desc); - } - } - { Mutex::Autolock _l(mLock); @@ -2181,20 +2166,35 @@ sp AudioFlinger::createEffect( // because of code checking output when entering the function. // Note: io is never 0 when creating an effect on an input if (io == 0) { - // look for the thread where the specified audio session is present - for (size_t i = 0; i < mPlaybackThreads.size(); i++) { - if (mPlaybackThreads.valueAt(i)->hasAudioSession(sessionId) != 0) { - io = mPlaybackThreads.keyAt(i); - break; - } + if (sessionId == AUDIO_SESSION_OUTPUT_STAGE) { + // output must be specified by AudioPolicyManager when using session + // AUDIO_SESSION_OUTPUT_STAGE + lStatus = BAD_VALUE; + goto Exit; + } + if (sessionId == AUDIO_SESSION_OUTPUT_MIX) { + // if the output returned by getOutputForEffect() is removed before we lock the + // mutex below, the call to checkPlaybackThread_l(io) below will detect it + // and we will exit safely + io = AudioSystem::getOutputForEffect(&desc); + ALOGV("createEffect got output %d", io); } if (io == 0) { - for (size_t i = 0; i < mRecordThreads.size(); i++) { - if (mRecordThreads.valueAt(i)->hasAudioSession(sessionId) != 0) { - io = mRecordThreads.keyAt(i); + // look for the thread where the specified audio session is present + for (size_t i = 0; i < mPlaybackThreads.size(); i++) { + if (mPlaybackThreads.valueAt(i)->hasAudioSession(sessionId) != 0) { + io = mPlaybackThreads.keyAt(i); break; } } + if (io == 0) { + for (size_t i = 0; i < mRecordThreads.size(); i++) { + if (mRecordThreads.valueAt(i)->hasAudioSession(sessionId) != 0) { + io = mRecordThreads.keyAt(i); + break; + } + } + } } // If no output thread contains the requested session ID, default to // first output. The effect chain will be moved to the correct output @@ -2254,9 +2254,7 @@ status_t AudioFlinger::moveEffects(int sessionId, audio_io_handle_t srcOutput, Mutex::Autolock _dl(dstThread->mLock); Mutex::Autolock _sl(srcThread->mLock); - moveEffectChain_l(sessionId, srcThread, dstThread, false); - - return NO_ERROR; + return moveEffectChain_l(sessionId, srcThread, dstThread, false); } // moveEffectChain_l must be called with both srcThread and dstThread mLocks held @@ -2283,13 +2281,18 @@ status_t AudioFlinger::moveEffectChain_l(int sessionId, // transfer all effects one by one so that new effect chain is created on new thread with // correct buffer sizes and audio parameters and effect engines reconfigured accordingly - audio_io_handle_t dstOutput = dstThread->id(); sp dstChain; uint32_t strategy = 0; // prevent compiler warning sp effect = chain->getEffectFromId_l(0); + Vector< sp > removed; + status_t status = NO_ERROR; while (effect != 0) { srcThread->removeEffect_l(effect); - dstThread->addEffect_l(effect); + removed.add(effect); + status = dstThread->addEffect_l(effect); + if (status != NO_ERROR) { + break; + } // removeEffect_l() has stopped the effect if it was active so it must be restarted if (effect->state() == EffectModule::ACTIVE || effect->state() == EffectModule::STOPPING) { @@ -2301,15 +2304,15 @@ status_t AudioFlinger::moveEffectChain_l(int sessionId, dstChain = effect->chain().promote(); if (dstChain == 0) { ALOGW("moveEffectChain_l() cannot get chain from effect %p", effect.get()); - srcThread->addEffect_l(effect); - return NO_INIT; + status = NO_INIT; + break; } strategy = dstChain->strategy(); } if (reRegister) { AudioSystem::unregisterEffect(effect->id()); AudioSystem::registerEffect(&effect->desc(), - dstOutput, + dstThread->id(), strategy, sessionId, effect->id()); @@ -2317,10 +2320,24 @@ status_t AudioFlinger::moveEffectChain_l(int sessionId, effect = chain->getEffectFromId_l(0); } - return NO_ERROR; + if (status != NO_ERROR) { + for (size_t i = 0; i < removed.size(); i++) { + srcThread->addEffect_l(removed[i]); + if (dstChain != 0 && reRegister) { + AudioSystem::unregisterEffect(removed[i]->id()); + AudioSystem::registerEffect(&removed[i]->desc(), + srcThread->id(), + strategy, + sessionId, + removed[i]->id()); + } + } + } + + return status; } -bool AudioFlinger::isGlobalEffectEnabled_l() +bool AudioFlinger::isNonOffloadableGlobalEffectEnabled_l() { if (mGlobalEffectEnableTime != 0 && ((systemTime() - mGlobalEffectEnableTime) < kMinGlobalEffectEnabletimeNs)) { @@ -2330,14 +2347,14 @@ bool AudioFlinger::isGlobalEffectEnabled_l() for (size_t i = 0; i < mPlaybackThreads.size(); i++) { sp ec = mPlaybackThreads.valueAt(i)->getEffectChain_l(AUDIO_SESSION_OUTPUT_MIX); - if (ec != 0 && ec->isEnabled()) { + if (ec != 0 && ec->isNonOffloadableEnabled()) { return true; } } return false; } -void AudioFlinger::onGlobalEffectEnable() +void AudioFlinger::onNonOffloadableGlobalEffectEnable() { Mutex::Autolock _l(mLock); diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h index 0992308..b41d480 100644 --- a/services/audioflinger/AudioFlinger.h +++ b/services/audioflinger/AudioFlinger.h @@ -466,9 +466,8 @@ private: void removeClient_l(pid_t pid); void removeNotificationClient(pid_t pid); - //TODO: remove when effect offload is implemented - bool isGlobalEffectEnabled_l(); - void onGlobalEffectEnable(); + bool isNonOffloadableGlobalEffectEnabled_l(); + void onNonOffloadableGlobalEffectEnable(); class AudioHwDevice { public: @@ -645,7 +644,6 @@ public: private: bool mIsLowRamDevice; bool mIsDeviceTypeKnown; - //TODO: remove when effect offload is implemented nsecs_t mGlobalEffectEnableTime; // when a global effect was last enabled }; diff --git a/services/audioflinger/Effects.cpp b/services/audioflinger/Effects.cpp index 86671a9..0ca2107 100644 --- a/services/audioflinger/Effects.cpp +++ b/services/audioflinger/Effects.cpp @@ -764,6 +764,46 @@ bool AudioFlinger::EffectModule::purgeHandles() return enabled; } +status_t AudioFlinger::EffectModule::setOffloaded(bool offloaded, audio_io_handle_t io) +{ + Mutex::Autolock _l(mLock); + if (mStatus != NO_ERROR) { + return mStatus; + } + status_t status = NO_ERROR; + if ((mDescriptor.flags & EFFECT_FLAG_OFFLOAD_SUPPORTED) != 0) { + status_t cmdStatus; + uint32_t size = sizeof(status_t); + effect_offload_param_t cmd; + + cmd.isOffload = offloaded; + cmd.ioHandle = io; + status = (*mEffectInterface)->command(mEffectInterface, + EFFECT_CMD_OFFLOAD, + sizeof(effect_offload_param_t), + &cmd, + &size, + &cmdStatus); + if (status == NO_ERROR) { + status = cmdStatus; + } + mOffloaded = (status == NO_ERROR) ? offloaded : false; + } else { + if (offloaded) { + status = INVALID_OPERATION; + } + mOffloaded = false; + } + ALOGV("setOffloaded() offloaded %d io %d status %d", offloaded, io, status); + return status; +} + +bool AudioFlinger::EffectModule::isOffloaded() const +{ + Mutex::Autolock _l(mLock); + return mOffloaded; +} + void AudioFlinger::EffectModule::dump(int fd, const Vector& args) { const size_t SIZE = 256; @@ -932,14 +972,13 @@ status_t AudioFlinger::EffectHandle::enable() } mEnabled = false; } else { - //TODO: remove when effect offload is implemented - if (thread != 0) { + if (thread != 0 && !mEffect->isOffloadable()) { if ((thread->type() == ThreadBase::OFFLOAD)) { PlaybackThread *t = (PlaybackThread *)thread.get(); t->invalidateTracks(AUDIO_STREAM_MUSIC); } if (mEffect->sessionId() == AUDIO_SESSION_OUTPUT_MIX) { - thread->mAudioFlinger->onGlobalEffectEnable(); + thread->mAudioFlinger->onNonOffloadableGlobalEffectEnable(); } } } @@ -1728,12 +1767,12 @@ void AudioFlinger::EffectChain::checkSuspendOnEffectEnabled(const spisEnabled()) { + if (mEffects[i]->isEnabled() && !mEffects[i]->isOffloadable()) { return true; } } diff --git a/services/audioflinger/Effects.h b/services/audioflinger/Effects.h index bac50f2..c35cff0 100644 --- a/services/audioflinger/Effects.h +++ b/services/audioflinger/Effects.h @@ -111,6 +111,10 @@ public: bool purgeHandles(); void lock() { mLock.lock(); } void unlock() { mLock.unlock(); } + bool isOffloadable() const + { return (mDescriptor.flags & EFFECT_FLAG_OFFLOAD_SUPPORTED) != 0; } + status_t setOffloaded(bool offloaded, audio_io_handle_t io); + bool isOffloaded() const; void dump(int fd, const Vector& args); @@ -144,6 +148,7 @@ mutable Mutex mLock; // mutex for process, commands and handl // sending disable command. uint32_t mDisableWaitCnt; // current process() calls count during disable period. bool mSuspended; // effect is suspended: temporarily disabled by framework + bool mOffloaded; // effect is currently offloaded to the audio DSP }; // The EffectHandle class implements the IEffect interface. It provides resources @@ -303,8 +308,8 @@ public: void clearInputBuffer(); - // At least one effect in the chain is enabled - bool isEnabled(); + // At least one non offloadable effect in the chain is enabled + bool isNonOffloadableEnabled(); void dump(int fd, const Vector& args); diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index 4234965..7d0ecac 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -702,14 +702,22 @@ sp AudioFlinger::ThreadBase::createEffect_l( goto Exit; } - // Do not allow effects with session ID 0 on direct output or duplicating threads - // TODO: add rule for hw accelerated effects on direct outputs with non PCM format - if (sessionId == AUDIO_SESSION_OUTPUT_MIX && mType != MIXER) { - ALOGW("createEffect_l() Cannot add auxiliary effect %s to session %d", - desc->name, sessionId); - lStatus = BAD_VALUE; - goto Exit; + // Allow global effects only on offloaded and mixer threads + if (sessionId == AUDIO_SESSION_OUTPUT_MIX) { + switch (mType) { + case MIXER: + case OFFLOAD: + break; + case DIRECT: + case DUPLICATING: + case RECORD: + default: + ALOGW("createEffect_l() Cannot add global effect %s on thread %s", desc->name, mName); + lStatus = BAD_VALUE; + goto Exit; + } } + // Only Pre processor effects are allowed on input threads and only on input threads if ((mType == RECORD) != ((desc->flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_PRE_PROC)) { ALOGW("createEffect_l() effect %s (flags %08x) created on wrong thread type %d", @@ -752,6 +760,8 @@ sp AudioFlinger::ThreadBase::createEffect_l( if (lStatus != NO_ERROR) { goto Exit; } + effect->setOffloaded(mType == OFFLOAD, mId); + lStatus = chain->addEffect_l(effect); if (lStatus != NO_ERROR) { goto Exit; @@ -813,6 +823,10 @@ status_t AudioFlinger::ThreadBase::addEffect_l(const sp& effect) sp chain = getEffectChain_l(sessionId); bool chainCreated = false; + ALOGD_IF((mType == OFFLOAD) && !effect->isOffloadable(), + "addEffect_l() on offloaded thread %p: effect %s does not support offload flags %x", + this, effect->desc().name, effect->desc().flags); + if (chain == 0) { // create a new chain for this session ALOGV("addEffect_l() new effect chain for session %d", sessionId); @@ -829,6 +843,8 @@ status_t AudioFlinger::ThreadBase::addEffect_l(const sp& effect) return BAD_VALUE; } + effect->setOffloaded(mType == OFFLOAD, mId); + status_t status = chain->addEffect_l(effect); if (status != NO_ERROR) { if (chainCreated) { diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp index 3b1874e..57aad1e 100644 --- a/services/audioflinger/Tracks.cpp +++ b/services/audioflinger/Tracks.cpp @@ -553,12 +553,12 @@ status_t AudioFlinger::PlaybackThread::Track::start(AudioSystem::sync_event_t ev sp thread = mThread.promote(); if (thread != 0) { - //TODO: remove when effect offload is implemented if (isOffloaded()) { Mutex::Autolock _laf(thread->mAudioFlinger->mLock); Mutex::Autolock _lth(thread->mLock); sp ec = thread->getEffectChain_l(mSessionId); - if (thread->mAudioFlinger->isGlobalEffectEnabled_l() || (ec != 0 && ec->isEnabled())) { + if (thread->mAudioFlinger->isNonOffloadableGlobalEffectEnabled_l() || + (ec != 0 && ec->isNonOffloadableEnabled())) { invalidate(); return PERMISSION_DENIED; } @@ -797,7 +797,11 @@ status_t AudioFlinger::PlaybackThread::Track::attachAuxEffect(int EffectId) return INVALID_OPERATION; } srcThread->removeEffect_l(effect); - playbackThread->addEffect_l(effect); + status = playbackThread->addEffect_l(effect); + if (status != NO_ERROR) { + srcThread->addEffect_l(effect); + return INVALID_OPERATION; + } // removeEffect_l() has stopped the effect if it was active so it must be restarted if (effect->state() == EffectModule::ACTIVE || effect->state() == EffectModule::STOPPING) { -- cgit v1.1 From 8c617cdb672ff5c71db7bfacbc45b2545d0cfc60 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Wed, 18 Sep 2013 10:33:39 -0700 Subject: Partial fix for SoundPool not terminating SoundPool was waiting for EVENT_UNDERRUN only to indicate end of clip. In J, AudioTrack delivered both EVENT_UNDERRUN followed by EVENT_BUFFER_END. However, as of K, AudioTrack is only delivering EVENT_BUFFER_END (this lack of EVENT_UNDERRUN is another bug which still needs to be fixed). The workaround is to also respond to EVENT_BUFFER_END in SoundPool. Bug: 10787103 Change-Id: Id68a23bddd6dd9df6c49c55138197260d71ca468 --- media/libmedia/SoundPool.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/media/libmedia/SoundPool.cpp b/media/libmedia/SoundPool.cpp index 0985164..5239b2f 100644 --- a/media/libmedia/SoundPool.cpp +++ b/media/libmedia/SoundPool.cpp @@ -740,8 +740,8 @@ void SoundChannel::process(int event, void *info, unsigned long toggle) b->size = count; //ALOGV("buffer=%p, [0]=%d", b->i16, b->i16[0]); } - } else if (event == AudioTrack::EVENT_UNDERRUN) { - ALOGV("process %p channel %d EVENT_UNDERRUN", this, mChannelID); + } else if (event == AudioTrack::EVENT_UNDERRUN || event == AudioTrack::EVENT_BUFFER_END) { + ALOGV("process %p channel %d EVENT_UNDERRUN or EVENT_BUFFER_END", this, mChannelID); mSoundPool->addToStopList(this); } else if (event == AudioTrack::EVENT_LOOP_END) { ALOGV("End loop %p channel %d count %d", this, mChannelID, *(int *)info); -- cgit v1.1 From 8bbbd7da02fac3de40139af19f7cf7a7cc3cc824 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Wed, 18 Sep 2013 14:15:42 -0700 Subject: Workaround slow AudioTrack destruction Bug: 10809586 Change-Id: I5f30d4deb1233e8ade8967568e40684ef680c395 --- media/libmedia/SoundPool.cpp | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/media/libmedia/SoundPool.cpp b/media/libmedia/SoundPool.cpp index 5239b2f..37b400c 100644 --- a/media/libmedia/SoundPool.cpp +++ b/media/libmedia/SoundPool.cpp @@ -537,6 +537,18 @@ void SoundChannel::init(SoundPool* soundPool) mSoundPool = soundPool; } +// This class is used to destroy a RefBase asynchronously +class AsyncDestructThread : public Thread +{ +public: + AsyncDestructThread(sp refBase) : mRefBase(refBase) { } +protected: + virtual ~AsyncDestructThread() { } +private: + virtual bool threadLoop() { return false; } + const sp mRefBase; +}; + // call with sound pool lock held void SoundChannel::play(const sp& sample, int nextChannelID, float leftVolume, float rightVolume, int priority, int loop, float rate) @@ -641,6 +653,17 @@ exit: if (status != NO_ERROR) { mAudioTrack.clear(); } + // FIXME AudioTrack destruction should not be slow + if (oldTrack != 0) { + // must be a raw reference to avoid a race after run() + AsyncDestructThread *adt = new AsyncDestructThread(oldTrack); + // guaranteed to not run destructor + oldTrack.clear(); + // after the run(), adt thread will hold a strong reference to oldTrack, + // and the only strong reference to itself + adt->run("AsyncDestruct"); + // do not delete adt here: adt thread destroys itself, and oldTrack if needed + } } void SoundChannel::nextEvent() -- cgit v1.1 From d1b28d41dbda203ffb420ba2e36cbe736b163ff8 Mon Sep 17 00:00:00 2001 From: Eric Laurent Date: Wed, 18 Sep 2013 18:47:13 -0700 Subject: audioflniger: fix leaked audiosession references acquireAudioSessionId() should not create session references if the caller is not listed in the known notification clients. It happens when the MediaPlayer or AudioTrack is created by the mediaserver on behalf of a client (e.g CameraService). In this case releaseAudioSessionId() can be called from a different pid and the session reference will not be removed. Also we will not be able to remove the reference if the client process dies. Bug: 10606426. Change-Id: Ibb5e27518daeb484a2e66302e4c6221cbaca5b70 --- services/audioflinger/AudioFlinger.cpp | 20 +++++++++++++++++++- 1 file changed, 19 insertions(+), 1 deletion(-) diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp index 8fbac42..f6e4c6a 100644 --- a/services/audioflinger/AudioFlinger.cpp +++ b/services/audioflinger/AudioFlinger.cpp @@ -264,6 +264,12 @@ void AudioFlinger::dumpClients(int fd, const Vector& args) } } + result.append("Notification Clients:\n"); + for (size_t i = 0; i < mNotificationClients.size(); ++i) { + snprintf(buffer, SIZE, " pid: %d\n", mNotificationClients.keyAt(i)); + result.append(buffer); + } + result.append("Global session refs:\n"); result.append(" session pid count\n"); for (size_t i = 0; i < mAudioSessionRefs.size(); i++) { @@ -1850,6 +1856,16 @@ void AudioFlinger::acquireAudioSessionId(int audioSession) Mutex::Autolock _l(mLock); pid_t caller = IPCThreadState::self()->getCallingPid(); ALOGV("acquiring %d from %d", audioSession, caller); + + // Ignore requests received from processes not known as notification client. The request + // is likely proxied by mediaserver (e.g CameraService) and releaseAudioSessionId() can be + // called from a different pid leaving a stale session reference. Also we don't know how + // to clear this reference if the client process dies. + if (mNotificationClients.indexOfKey(caller) < 0) { + ALOGV("acquireAudioSessionId() unknown client %d for session %d", caller, audioSession); + return; + } + size_t num = mAudioSessionRefs.size(); for (size_t i = 0; i< num; i++) { AudioSessionRef *ref = mAudioSessionRefs.editItemAt(i); @@ -1882,7 +1898,9 @@ void AudioFlinger::releaseAudioSessionId(int audioSession) return; } } - ALOGW("session id %d not found for pid %d", audioSession, caller); + // If the caller is mediaserver it is likely that the session being released was acquired + // on behalf of a process not in notification clients and we ignore the warning. + ALOGW_IF(caller != getpid_cached, "session id %d not found for pid %d", audioSession, caller); } void AudioFlinger::purgeStaleEffects_l() { -- cgit v1.1 From f8b2a6f7dea06234c7966798d9363d2d236488a6 Mon Sep 17 00:00:00 2001 From: Igor Murashkin Date: Tue, 17 Sep 2013 17:03:28 -0700 Subject: camera2: Tell all streams to ignore global device UI rotation - Also use android.sensor.orientation to set the right transform flags automatically. Bug: 10804238 Change-Id: I10caf8331f19e107c461696963cc10f597c91d83 --- .../libcameraservice/api2/CameraDeviceClient.cpp | 80 ++++++++++++++++++++++ .../libcameraservice/api2/CameraDeviceClient.h | 3 + 2 files changed, 83 insertions(+) diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp index 055ea12..83466cb 100644 --- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp +++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp @@ -360,6 +360,26 @@ status_t CameraDeviceClient::createStream(int width, int height, int format, ALOGV("%s: Camera %d: Successfully created a new stream ID %d", __FUNCTION__, mCameraId, streamId); + + /** + * Set the stream transform flags to automatically + * rotate the camera stream for preview use cases. + */ + int32_t transform = 0; + res = getRotationTransformLocked(&transform); + + if (res != OK) { + // Error logged by getRotationTransformLocked. + return res; + } + + res = mDevice->setStreamTransform(streamId, transform); + if (res != OK) { + ALOGE("%s: Failed to set stream transform (stream id %d)", + __FUNCTION__, streamId); + return res; + } + return streamId; } @@ -560,4 +580,64 @@ bool CameraDeviceClient::enforceRequestPermissions(CameraMetadata& metadata) { return true; } +status_t CameraDeviceClient::getRotationTransformLocked(int32_t* transform) { + ALOGV("%s: begin", __FUNCTION__); + + if (transform == NULL) { + ALOGW("%s: null transform", __FUNCTION__); + return BAD_VALUE; + } + + *transform = 0; + + const CameraMetadata& staticInfo = mDevice->info(); + camera_metadata_ro_entry_t entry = staticInfo.find(ANDROID_SENSOR_ORIENTATION); + if (entry.count == 0) { + ALOGE("%s: Camera %d: Can't find android.sensor.orientation in " + "static metadata!", __FUNCTION__, mCameraId); + return INVALID_OPERATION; + } + + int32_t& flags = *transform; + + int orientation = entry.data.i32[0]; + switch (orientation) { + case 0: + flags = 0; + break; + case 90: + flags = NATIVE_WINDOW_TRANSFORM_ROT_90; + break; + case 180: + flags = NATIVE_WINDOW_TRANSFORM_ROT_180; + break; + case 270: + flags = NATIVE_WINDOW_TRANSFORM_ROT_270; + break; + default: + ALOGE("%s: Invalid HAL android.sensor.orientation value: %d", + __FUNCTION__, orientation); + return INVALID_OPERATION; + } + + /** + * This magic flag makes surfaceflinger un-rotate the buffers + * to counter the extra global device UI rotation whenever the user + * physically rotates the device. + * + * By doing this, the camera buffer always ends up aligned + * with the physical camera for a "see through" effect. + * + * In essence, the buffer only gets rotated during preview use-cases. + * The user is still responsible to re-create streams of the proper + * aspect ratio, or the preview will end up looking non-uniformly + * stretched. + */ + flags |= NATIVE_WINDOW_TRANSFORM_INVERSE_DISPLAY; + + ALOGV("%s: final transform = 0x%x", __FUNCTION__, flags); + + return OK; +} + } // namespace android diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h index c6b6336..b490924 100644 --- a/services/camera/libcameraservice/api2/CameraDeviceClient.h +++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h @@ -120,6 +120,9 @@ protected: const CameraMetadata& frame); virtual void detachDevice(); + // Calculate the ANativeWindow transform from android.sensor.orientation + status_t getRotationTransformLocked(/*out*/int32_t* transform); + private: /** ICameraDeviceUser interface-related private members */ -- cgit v1.1 From 253dfdb983611b8375c9e0b0483eda03fa146028 Mon Sep 17 00:00:00 2001 From: Andy McFadden Date: Thu, 19 Sep 2013 11:08:33 -0700 Subject: Rotate fallback size If configuring the encoder for display-size recording fails, we drop back to 720p. This was done a bit too literally, and didn't look good in portrait mode. Rotate the fallback size as needed. Bug 10826876 Change-Id: Id3130471fc1467afa8bf91d75c9d4a49245253c3 --- cmds/screenrecord/screenrecord.cpp | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/cmds/screenrecord/screenrecord.cpp b/cmds/screenrecord/screenrecord.cpp index 68289a5..49999b5 100644 --- a/cmds/screenrecord/screenrecord.cpp +++ b/cmds/screenrecord/screenrecord.cpp @@ -464,12 +464,16 @@ static status_t recordScreen(const char* fileName) { err = prepareEncoder(mainDpyInfo.fps, &encoder, &bufferProducer); if (err != NO_ERROR && !gSizeSpecified) { - if (gVideoWidth != kFallbackWidth && gVideoHeight != kFallbackHeight) { + // fallback is defined for landscape; swap if we're in portrait + bool needSwap = gVideoWidth < gVideoHeight; + uint32_t newWidth = needSwap ? kFallbackHeight : kFallbackWidth; + uint32_t newHeight = needSwap ? kFallbackWidth : kFallbackHeight; + if (gVideoWidth != newWidth && gVideoHeight != newHeight) { ALOGV("Retrying with 720p"); - fprintf(stderr, "WARNING: failed at %dx%d, retrying at 720p\n", - gVideoWidth, gVideoHeight); - gVideoWidth = kFallbackWidth; - gVideoHeight = kFallbackHeight; + fprintf(stderr, "WARNING: failed at %dx%d, retrying at %dx%d\n", + gVideoWidth, gVideoHeight, newWidth, newHeight); + gVideoWidth = newWidth; + gVideoHeight = newHeight; err = prepareEncoder(mainDpyInfo.fps, &encoder, &bufferProducer); } } -- cgit v1.1 From 6e86faa1752b0ff584e5c432245494e9fb288182 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Wed, 18 Sep 2013 14:15:42 -0700 Subject: Workaround slow AudioTrack destruction Bug: 10809586 Change-Id: I5f30d4deb1233e8ade8967568e40684ef680c395 --- media/libmedia/SoundPool.cpp | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/media/libmedia/SoundPool.cpp b/media/libmedia/SoundPool.cpp index 5239b2f..37b400c 100644 --- a/media/libmedia/SoundPool.cpp +++ b/media/libmedia/SoundPool.cpp @@ -537,6 +537,18 @@ void SoundChannel::init(SoundPool* soundPool) mSoundPool = soundPool; } +// This class is used to destroy a RefBase asynchronously +class AsyncDestructThread : public Thread +{ +public: + AsyncDestructThread(sp refBase) : mRefBase(refBase) { } +protected: + virtual ~AsyncDestructThread() { } +private: + virtual bool threadLoop() { return false; } + const sp mRefBase; +}; + // call with sound pool lock held void SoundChannel::play(const sp& sample, int nextChannelID, float leftVolume, float rightVolume, int priority, int loop, float rate) @@ -641,6 +653,17 @@ exit: if (status != NO_ERROR) { mAudioTrack.clear(); } + // FIXME AudioTrack destruction should not be slow + if (oldTrack != 0) { + // must be a raw reference to avoid a race after run() + AsyncDestructThread *adt = new AsyncDestructThread(oldTrack); + // guaranteed to not run destructor + oldTrack.clear(); + // after the run(), adt thread will hold a strong reference to oldTrack, + // and the only strong reference to itself + adt->run("AsyncDestruct"); + // do not delete adt here: adt thread destroys itself, and oldTrack if needed + } } void SoundChannel::nextEvent() -- cgit v1.1 From ede6c3b8b1147bc425f7b923882f559a513fe23b Mon Sep 17 00:00:00 2001 From: Eric Laurent Date: Thu, 19 Sep 2013 14:37:46 -0700 Subject: audioflinger: fix lost offload thread resume event It was possible that a resume request signaled by addTrack_l() while waiting for an async write callback is lost. This is because mSignalPending was not set and waitingAsyncCallback_l() would pause the thread loop before executing prepareTracks_l(). The fix consists in using signal_l() method to wake the thread loop o that mSignalPending is set. Also make sure that sleepTime is reset to 0 when resuming to make sure that we write any remaining bytes to the HAL. Bug: 10810347. Change-Id: If9a3b22cc3b9e6eb384a56c48c40e6258d0896ad --- services/audioflinger/Threads.cpp | 59 +++++++++++++++++++++------------------ services/audioflinger/Threads.h | 4 ++- services/audioflinger/Tracks.cpp | 4 +-- 3 files changed, 37 insertions(+), 30 deletions(-) diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index 7d0ecac..9210330 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -960,6 +960,7 @@ AudioFlinger::PlaybackThread::PlaybackThread(const sp& audioFlinge mUseAsyncWrite(false), mWriteAckSequence(0), mDrainSequence(0), + mSignalPending(false), mScreenState(AudioFlinger::mScreenState), // index 0 is reserved for normal mixer's submix mFastTrackAvailMask(((1 << FastMixerState::kMaxFastTracks) - 1) & ~1), @@ -1348,14 +1349,14 @@ void AudioFlinger::PlaybackThread::setStreamVolume(audio_stream_type_t stream, f { Mutex::Autolock _l(mLock); mStreamTypes[stream].volume = value; - signal_l(); + broadcast_l(); } void AudioFlinger::PlaybackThread::setStreamMute(audio_stream_type_t stream, bool muted) { Mutex::Autolock _l(mLock); mStreamTypes[stream].mute = muted; - signal_l(); + broadcast_l(); } float AudioFlinger::PlaybackThread::streamVolume(audio_stream_type_t stream) const @@ -1413,8 +1414,8 @@ status_t AudioFlinger::PlaybackThread::addTrack_l(const sp& track) status = NO_ERROR; } - ALOGV("mWaitWorkCV.broadcast"); - mWaitWorkCV.broadcast(); + ALOGV("signal playback thread"); + broadcast_l(); return status; } @@ -1455,14 +1456,14 @@ void AudioFlinger::PlaybackThread::removeTrack_l(const sp& track) } } -void AudioFlinger::PlaybackThread::signal_l() +void AudioFlinger::PlaybackThread::broadcast_l() { // Thread could be blocked waiting for async // so signal it to handle state changes immediately // If threadLoop is currently unlocked a signal of mWaitWorkCV will // be lost so we also flag to prevent it blocking on mWaitWorkCV mSignalPending = true; - mWaitWorkCV.signal(); + mWaitWorkCV.broadcast(); } String8 AudioFlinger::PlaybackThread::getParameters(const String8& keys) @@ -2143,7 +2144,6 @@ bool AudioFlinger::PlaybackThread::threadLoop() } saveOutputTracks(); - if (mSignalPending) { // A signal was raised while we were unlocked mSignalPending = false; @@ -2158,10 +2158,10 @@ bool AudioFlinger::PlaybackThread::threadLoop() acquireWakeLock_l(); standbyTime = systemTime() + standbyDelay; sleepTime = 0; - if (exitPending()) { - break; - } - } else if ((!mActiveTracks.size() && systemTime() > standbyTime) || + + continue; + } + if ((!mActiveTracks.size() && systemTime() > standbyTime) || isSuspended()) { // put audio hardware into standby after short delay if (shouldStandby_l()) { @@ -2203,7 +2203,6 @@ bool AudioFlinger::PlaybackThread::threadLoop() continue; } } - // mMixerStatusIgnoringFastTracks is also updated internally mMixerStatus = prepareTracks_l(&tracksToRemove); @@ -3855,13 +3854,14 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::OffloadThread::prepareTr Vector< sp > *tracksToRemove ) { - ALOGV("OffloadThread::prepareTracks_l"); size_t count = mActiveTracks.size(); mixer_state mixerStatus = MIXER_IDLE; bool doHwPause = false; bool doHwResume = false; + ALOGV("OffloadThread::prepareTracks_l active tracks %d", count); + // find out which tracks need to be processed for (size_t i = 0; i < count; i++) { sp t = mActiveTracks[i].promote(); @@ -3915,23 +3915,27 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::OffloadThread::prepareTr // make sure processVolume_l() will apply new volume even if 0 mLeftVolFloat = mRightVolFloat = -1.0; if (track->mState == TrackBase::RESUMING) { - if (mPausedBytesRemaining) { - // Need to continue write that was interrupted - mCurrentWriteLength = mPausedWriteLength; - mBytesRemaining = mPausedBytesRemaining; - mPausedBytesRemaining = 0; - } track->mState = TrackBase::ACTIVE; + if (last) { + if (mPausedBytesRemaining) { + // Need to continue write that was interrupted + mCurrentWriteLength = mPausedWriteLength; + mBytesRemaining = mPausedBytesRemaining; + mPausedBytesRemaining = 0; + } + if (mHwPaused) { + doHwResume = true; + mHwPaused = false; + // threadLoop_mix() will handle the case that we need to + // resume an interrupted write + } + // enable write to audio HAL + sleepTime = 0; + } } } if (last) { - if (mHwPaused) { - doHwResume = true; - mHwPaused = false; - // threadLoop_mix() will handle the case that we need to - // resume an interrupted write - } // reset retry count track->mRetryCount = kMaxTrackRetriesOffload; mActiveTrack = t; @@ -3948,9 +3952,9 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::OffloadThread::prepareTr // has been written ALOGV("OffloadThread: underrun and STOPPING_1 -> draining, STOPPING_2"); track->mState = TrackBase::STOPPING_2; // so presentation completes after drain - sleepTime = 0; - standbyTime = systemTime() + standbyDelay; if (last) { + sleepTime = 0; + standbyTime = systemTime() + standbyDelay; mixerStatus = MIXER_DRAIN_TRACK; mDrainSequence += 2; if (mHwPaused) { @@ -4337,6 +4341,7 @@ bool AudioFlinger::RecordThread::threadLoop() mStandby = false; } } + lockEffectChains_l(effectChains); } diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h index 3fe470c..b3d88e3 100644 --- a/services/audioflinger/Threads.h +++ b/services/audioflinger/Threads.h @@ -526,7 +526,7 @@ private: status_t addTrack_l(const sp& track); bool destroyTrack_l(const sp& track); void removeTrack_l(const sp& track); - void signal_l(); + void broadcast_l(); void readOutputParameters(); @@ -590,6 +590,8 @@ private: // Bit 0 is reset by the async callback thread calling resetDraining(). Out of sequence // callbacks are ignored. uint32_t mDrainSequence; + // A condition that must be evaluated by prepareTrack_l() has changed and we must not wait + // for async write callback in the thread loop before evaluating it bool mSignalPending; sp mCallbackThread; diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp index 57aad1e..2b8f0ab 100644 --- a/services/audioflinger/Tracks.cpp +++ b/services/audioflinger/Tracks.cpp @@ -651,7 +651,7 @@ void AudioFlinger::PlaybackThread::Track::pause() case RESUMING: mState = PAUSING; ALOGV("ACTIVE/RESUMING => PAUSING (%d) on thread %p", mName, thread.get()); - playbackThread->signal_l(); + playbackThread->broadcast_l(); break; default: @@ -711,7 +711,7 @@ void AudioFlinger::PlaybackThread::Track::flush() // before mixer thread can run. This is important when offloading // because the hardware buffer could hold a large amount of audio playbackThread->flushOutput_l(); - playbackThread->signal_l(); + playbackThread->broadcast_l(); } } -- cgit v1.1 From 75318ada7a94c5a67b31bbb9e010eac3e9660188 Mon Sep 17 00:00:00 2001 From: Igor Murashkin Date: Tue, 17 Sep 2013 17:03:28 -0700 Subject: camera2: Tell all streams to ignore global device UI rotation - Also use android.sensor.orientation to set the right transform flags automatically. Bug: 10804238 Change-Id: I10caf8331f19e107c461696963cc10f597c91d83 --- .../libcameraservice/api2/CameraDeviceClient.cpp | 80 ++++++++++++++++++++++ .../libcameraservice/api2/CameraDeviceClient.h | 3 + 2 files changed, 83 insertions(+) diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp index 055ea12..83466cb 100644 --- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp +++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp @@ -360,6 +360,26 @@ status_t CameraDeviceClient::createStream(int width, int height, int format, ALOGV("%s: Camera %d: Successfully created a new stream ID %d", __FUNCTION__, mCameraId, streamId); + + /** + * Set the stream transform flags to automatically + * rotate the camera stream for preview use cases. + */ + int32_t transform = 0; + res = getRotationTransformLocked(&transform); + + if (res != OK) { + // Error logged by getRotationTransformLocked. + return res; + } + + res = mDevice->setStreamTransform(streamId, transform); + if (res != OK) { + ALOGE("%s: Failed to set stream transform (stream id %d)", + __FUNCTION__, streamId); + return res; + } + return streamId; } @@ -560,4 +580,64 @@ bool CameraDeviceClient::enforceRequestPermissions(CameraMetadata& metadata) { return true; } +status_t CameraDeviceClient::getRotationTransformLocked(int32_t* transform) { + ALOGV("%s: begin", __FUNCTION__); + + if (transform == NULL) { + ALOGW("%s: null transform", __FUNCTION__); + return BAD_VALUE; + } + + *transform = 0; + + const CameraMetadata& staticInfo = mDevice->info(); + camera_metadata_ro_entry_t entry = staticInfo.find(ANDROID_SENSOR_ORIENTATION); + if (entry.count == 0) { + ALOGE("%s: Camera %d: Can't find android.sensor.orientation in " + "static metadata!", __FUNCTION__, mCameraId); + return INVALID_OPERATION; + } + + int32_t& flags = *transform; + + int orientation = entry.data.i32[0]; + switch (orientation) { + case 0: + flags = 0; + break; + case 90: + flags = NATIVE_WINDOW_TRANSFORM_ROT_90; + break; + case 180: + flags = NATIVE_WINDOW_TRANSFORM_ROT_180; + break; + case 270: + flags = NATIVE_WINDOW_TRANSFORM_ROT_270; + break; + default: + ALOGE("%s: Invalid HAL android.sensor.orientation value: %d", + __FUNCTION__, orientation); + return INVALID_OPERATION; + } + + /** + * This magic flag makes surfaceflinger un-rotate the buffers + * to counter the extra global device UI rotation whenever the user + * physically rotates the device. + * + * By doing this, the camera buffer always ends up aligned + * with the physical camera for a "see through" effect. + * + * In essence, the buffer only gets rotated during preview use-cases. + * The user is still responsible to re-create streams of the proper + * aspect ratio, or the preview will end up looking non-uniformly + * stretched. + */ + flags |= NATIVE_WINDOW_TRANSFORM_INVERSE_DISPLAY; + + ALOGV("%s: final transform = 0x%x", __FUNCTION__, flags); + + return OK; +} + } // namespace android diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h index c6b6336..b490924 100644 --- a/services/camera/libcameraservice/api2/CameraDeviceClient.h +++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h @@ -120,6 +120,9 @@ protected: const CameraMetadata& frame); virtual void detachDevice(); + // Calculate the ANativeWindow transform from android.sensor.orientation + status_t getRotationTransformLocked(/*out*/int32_t* transform); + private: /** ICameraDeviceUser interface-related private members */ -- cgit v1.1 From accc147666bfd37fc8b4ef745f18a8c751555ec2 Mon Sep 17 00:00:00 2001 From: Eric Laurent Date: Fri, 20 Sep 2013 09:36:34 -0700 Subject: audioflinger: implement getTimestamp() for offloaded tracks Bug: 9587132. Change-Id: Ie9d5f4cca96306d08bc9a2dbd6edd8953096702d --- services/audioflinger/Threads.cpp | 16 ++++++++++++++++ services/audioflinger/Threads.h | 2 ++ services/audioflinger/Tracks.cpp | 28 ++++++++++++++++------------ 3 files changed, 34 insertions(+), 12 deletions(-) diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index 7d0ecac..242e020 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -2340,6 +2340,22 @@ void AudioFlinger::PlaybackThread::removeTracks_l(const Vector< sp >& tra } +status_t AudioFlinger::PlaybackThread::getTimestamp_l(AudioTimestamp& timestamp) +{ + if (mNormalSink != 0) { + return mNormalSink->getTimestamp(timestamp); + } + if (mType == OFFLOAD && mOutput->stream->get_presentation_position) { + uint64_t position64; + int ret = mOutput->stream->get_presentation_position( + mOutput->stream, &position64, ×tamp.mTime); + if (ret == 0) { + timestamp.mPosition = (uint32_t)position64; + return NO_ERROR; + } + } + return INVALID_OPERATION; +} // ---------------------------------------------------------------------------- AudioFlinger::MixerThread::MixerThread(const sp& audioFlinger, AudioStreamOut* output, diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h index 3fe470c..f13cb54 100644 --- a/services/audioflinger/Threads.h +++ b/services/audioflinger/Threads.h @@ -466,6 +466,8 @@ public: // Return's the HAL's frame count i.e. fast mixer buffer size. size_t frameCountHAL() const { return mFrameCount; } + status_t getTimestamp_l(AudioTimestamp& timestamp); + protected: // updated by readOutputParameters() size_t mNormalFrameCount; // normal mixer and effects diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp index 57aad1e..ccba014 100644 --- a/services/audioflinger/Tracks.cpp +++ b/services/audioflinger/Tracks.cpp @@ -757,19 +757,23 @@ status_t AudioFlinger::PlaybackThread::Track::getTimestamp(AudioTimestamp& times } Mutex::Autolock _l(thread->mLock); PlaybackThread *playbackThread = (PlaybackThread *)thread.get(); - if (!playbackThread->mLatchQValid) { - return INVALID_OPERATION; - } - uint32_t unpresentedFrames = - ((int64_t) playbackThread->mLatchQ.mUnpresentedFrames * mSampleRate) / - playbackThread->mSampleRate; - uint32_t framesWritten = mAudioTrackServerProxy->framesReleased(); - if (framesWritten < unpresentedFrames) { - return INVALID_OPERATION; + if (!isOffloaded()) { + if (!playbackThread->mLatchQValid) { + return INVALID_OPERATION; + } + uint32_t unpresentedFrames = + ((int64_t) playbackThread->mLatchQ.mUnpresentedFrames * mSampleRate) / + playbackThread->mSampleRate; + uint32_t framesWritten = mAudioTrackServerProxy->framesReleased(); + if (framesWritten < unpresentedFrames) { + return INVALID_OPERATION; + } + timestamp.mPosition = framesWritten - unpresentedFrames; + timestamp.mTime = playbackThread->mLatchQ.mTimestamp.mTime; + return NO_ERROR; } - timestamp.mPosition = framesWritten - unpresentedFrames; - timestamp.mTime = playbackThread->mLatchQ.mTimestamp.mTime; - return NO_ERROR; + + return playbackThread->getTimestamp_l(timestamp); } status_t AudioFlinger::PlaybackThread::Track::attachAuxEffect(int EffectId) -- cgit v1.1 From 0181fde7bd20238cb13ae2665f0e5bfe7c2d9ac8 Mon Sep 17 00:00:00 2001 From: Eino-Ville Talvala Date: Fri, 20 Sep 2013 10:12:32 -0700 Subject: Camera2/3: For still captures, use widest FPS range available. Instead of using the application-selected preview FPS range for still captures, select the widest FPS range advertised by the device and use it for still captures instead, to give the HAL the widest range of exposure values to choose from possible. Bug: 10842868 Change-Id: I0e24f5eb713a4f8df116cd5979a84390cc0974d7 --- .../camera/libcameraservice/api1/Camera2Client.cpp | 4 +++ .../libcameraservice/api1/client2/Parameters.cpp | 40 ++++++++++++++++++++-- .../libcameraservice/api1/client2/Parameters.h | 1 + 3 files changed, 43 insertions(+), 2 deletions(-) diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp index e7f6c53..fb81468 100644 --- a/services/camera/libcameraservice/api1/Camera2Client.cpp +++ b/services/camera/libcameraservice/api1/Camera2Client.cpp @@ -326,6 +326,10 @@ status_t Camera2Client::dump(int fd, const Vector& args) { result.appendFormat(" Video stabilization is %s\n", p.videoStabilization ? "enabled" : "disabled"); + result.appendFormat(" Selected still capture FPS range: %d - %d\n", + p.fastInfo.bestStillCaptureFpsRange[0], + p.fastInfo.bestStillCaptureFpsRange[1]); + result.append(" Current streams:\n"); result.appendFormat(" Preview stream ID: %d\n", getPreviewStreamId()); diff --git a/services/camera/libcameraservice/api1/client2/Parameters.cpp b/services/camera/libcameraservice/api1/client2/Parameters.cpp index ad55feb..0705791 100644 --- a/services/camera/libcameraservice/api1/client2/Parameters.cpp +++ b/services/camera/libcameraservice/api1/client2/Parameters.cpp @@ -852,6 +852,33 @@ status_t Parameters::buildFastInfo() { arrayHeight = activeArraySize.data.i32[3]; } else return NO_INIT; + // We'll set the target FPS range for still captures to be as wide + // as possible to give the HAL maximum latitude for exposure selection + camera_metadata_ro_entry_t availableFpsRanges = + staticInfo(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, 2); + if (availableFpsRanges.count < 2 || availableFpsRanges.count % 2 != 0) { + return NO_INIT; + } + + int32_t bestStillCaptureFpsRange[2] = { + availableFpsRanges.data.i32[0], availableFpsRanges.data.i32[1] + }; + int32_t curRange = + bestStillCaptureFpsRange[1] - bestStillCaptureFpsRange[0]; + for (size_t i = 2; i < availableFpsRanges.count; i += 2) { + int32_t nextRange = + availableFpsRanges.data.i32[i + 1] - + availableFpsRanges.data.i32[i]; + if ( (nextRange > curRange) || // Maximize size of FPS range first + (nextRange == curRange && // Then minimize low-end FPS + bestStillCaptureFpsRange[0] > availableFpsRanges.data.i32[i])) { + + bestStillCaptureFpsRange[0] = availableFpsRanges.data.i32[i]; + bestStillCaptureFpsRange[1] = availableFpsRanges.data.i32[i + 1]; + curRange = nextRange; + } + } + camera_metadata_ro_entry_t availableFaceDetectModes = staticInfo(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES, 0, 0, false); @@ -971,6 +998,8 @@ status_t Parameters::buildFastInfo() { fastInfo.arrayWidth = arrayWidth; fastInfo.arrayHeight = arrayHeight; + fastInfo.bestStillCaptureFpsRange[0] = bestStillCaptureFpsRange[0]; + fastInfo.bestStillCaptureFpsRange[1] = bestStillCaptureFpsRange[1]; fastInfo.bestFaceDetectMode = bestFaceDetectMode; fastInfo.maxFaces = maxFaces; @@ -1709,8 +1738,15 @@ status_t Parameters::updateRequest(CameraMetadata *request) const { &metadataMode, 1); if (res != OK) return res; - res = request->update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, - previewFpsRange, 2); + camera_metadata_entry_t intent = + request->find(ANDROID_CONTROL_CAPTURE_INTENT); + if (intent.data.u8[0] == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) { + res = request->update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, + fastInfo.bestStillCaptureFpsRange, 2); + } else { + res = request->update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, + previewFpsRange, 2); + } if (res != OK) return res; uint8_t reqWbLock = autoWhiteBalanceLock ? diff --git a/services/camera/libcameraservice/api1/client2/Parameters.h b/services/camera/libcameraservice/api1/client2/Parameters.h index a7111a3..b9ca7bf 100644 --- a/services/camera/libcameraservice/api1/client2/Parameters.h +++ b/services/camera/libcameraservice/api1/client2/Parameters.h @@ -179,6 +179,7 @@ struct Parameters { struct DeviceInfo { int32_t arrayWidth; int32_t arrayHeight; + int32_t bestStillCaptureFpsRange[2]; uint8_t bestFaceDetectMode; int32_t maxFaces; struct OverrideModes { -- cgit v1.1 From e2773bb17bc5d01e05a77b8913539575ebd04500 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Fri, 20 Sep 2013 18:12:06 +0000 Subject: Revert "Workaround slow AudioTrack destruction" This reverts commit 8bbbd7da02fac3de40139af19f7cf7a7cc3cc824. Change-Id: I269a6c445cbce33451b6a9e74223e36e6abbdbe0 --- media/libmedia/SoundPool.cpp | 23 ----------------------- 1 file changed, 23 deletions(-) diff --git a/media/libmedia/SoundPool.cpp b/media/libmedia/SoundPool.cpp index 37b400c..5239b2f 100644 --- a/media/libmedia/SoundPool.cpp +++ b/media/libmedia/SoundPool.cpp @@ -537,18 +537,6 @@ void SoundChannel::init(SoundPool* soundPool) mSoundPool = soundPool; } -// This class is used to destroy a RefBase asynchronously -class AsyncDestructThread : public Thread -{ -public: - AsyncDestructThread(sp refBase) : mRefBase(refBase) { } -protected: - virtual ~AsyncDestructThread() { } -private: - virtual bool threadLoop() { return false; } - const sp mRefBase; -}; - // call with sound pool lock held void SoundChannel::play(const sp& sample, int nextChannelID, float leftVolume, float rightVolume, int priority, int loop, float rate) @@ -653,17 +641,6 @@ exit: if (status != NO_ERROR) { mAudioTrack.clear(); } - // FIXME AudioTrack destruction should not be slow - if (oldTrack != 0) { - // must be a raw reference to avoid a race after run() - AsyncDestructThread *adt = new AsyncDestructThread(oldTrack); - // guaranteed to not run destructor - oldTrack.clear(); - // after the run(), adt thread will hold a strong reference to oldTrack, - // and the only strong reference to itself - adt->run("AsyncDestruct"); - // do not delete adt here: adt thread destroys itself, and oldTrack if needed - } } void SoundChannel::nextEvent() -- cgit v1.1 From 5a6cd224d07c05b496b6aca050ce5ecf96f125af Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Fri, 20 Sep 2013 09:20:45 -0700 Subject: Fix slow AudioTrack and AudioRecord destruction There were two causes for the slowness: When thread was paused, it used nanosleep and sleep. These usually run to completion (except for POSIX signal, which we avoid because it is low-level). Instead, replace the nanosleep and sleep by condition timed wait, as that can be made to return early by a condition signal. Another advantage of condition timed wait is that a condition wait was already being used at top of thread loop, so it is a simpler change. The AudioRecord destructor was missing a proxy interrupt that was correct in AudioTrack. This proxy interrupt is needed in case another thread is blocked in proxy obtainBuffer. Does not address the 1 second polling for NS_WHENEVER. Bug: 10822765 Change-Id: Id665994551e87e4d7da9c7b015f424fd7a0b5560 --- include/media/AudioRecord.h | 10 ++++---- include/media/AudioTrack.h | 10 ++++---- media/libmedia/AudioRecord.cpp | 54 +++++++++++++++++++++++------------------- media/libmedia/AudioTrack.cpp | 53 ++++++++++++++++++++++------------------- 4 files changed, 71 insertions(+), 56 deletions(-) diff --git a/include/media/AudioRecord.h b/include/media/AudioRecord.h index 62f0c64..052064d 100644 --- a/include/media/AudioRecord.h +++ b/include/media/AudioRecord.h @@ -398,18 +398,20 @@ private: void pause(); // suspend thread from execution at next loop boundary void resume(); // allow thread to execute, if not requested to exit - void pauseConditional(); - // like pause(), but only if prior resume() wasn't latched private: + void pauseInternal(nsecs_t ns = 0LL); + // like pause(), but only used internally within thread + friend class AudioRecord; virtual bool threadLoop(); AudioRecord& mReceiver; virtual ~AudioRecordThread(); Mutex mMyLock; // Thread::mLock is private Condition mMyCond; // Thread::mThreadExitedCondition is private - bool mPaused; // whether thread is currently paused - bool mResumeLatch; // whether next pauseConditional() will be a nop + bool mPaused; // whether thread is requested to pause at next loop entry + bool mPausedInt; // whether thread internally requests pause + nsecs_t mPausedNs; // if mPausedInt then associated timeout, otherwise ignored }; // body of AudioRecordThread::threadLoop() diff --git a/include/media/AudioTrack.h b/include/media/AudioTrack.h index 453c106..22ad57e 100644 --- a/include/media/AudioTrack.h +++ b/include/media/AudioTrack.h @@ -598,18 +598,20 @@ protected: void pause(); // suspend thread from execution at next loop boundary void resume(); // allow thread to execute, if not requested to exit - void pauseConditional(); - // like pause(), but only if prior resume() wasn't latched private: + void pauseInternal(nsecs_t ns = 0LL); + // like pause(), but only used internally within thread + friend class AudioTrack; virtual bool threadLoop(); AudioTrack& mReceiver; virtual ~AudioTrackThread(); Mutex mMyLock; // Thread::mLock is private Condition mMyCond; // Thread::mThreadExitedCondition is private - bool mPaused; // whether thread is currently paused - bool mResumeLatch; // whether next pauseConditional() will be a nop + bool mPaused; // whether thread is requested to pause at next loop entry + bool mPausedInt; // whether thread internally requests pause + nsecs_t mPausedNs; // if mPausedInt then associated timeout, otherwise ignored }; // body of AudioTrackThread::threadLoop() diff --git a/media/libmedia/AudioRecord.cpp b/media/libmedia/AudioRecord.cpp index e934a3e..fb731b9 100644 --- a/media/libmedia/AudioRecord.cpp +++ b/media/libmedia/AudioRecord.cpp @@ -105,6 +105,7 @@ AudioRecord::~AudioRecord() // Otherwise the callback thread will never exit. stop(); if (mAudioRecordThread != 0) { + mProxy->interrupt(); mAudioRecordThread->requestExit(); // see comment in AudioRecord.h mAudioRecordThread->requestExitAndWait(); mAudioRecordThread.clear(); @@ -960,7 +961,7 @@ void AudioRecord::DeathNotifier::binderDied(const wp& who) // ========================================================================= AudioRecord::AudioRecordThread::AudioRecordThread(AudioRecord& receiver, bool bCanCallJava) - : Thread(bCanCallJava), mReceiver(receiver), mPaused(true), mResumeLatch(false) + : Thread(bCanCallJava), mReceiver(receiver), mPaused(true), mPausedInt(false), mPausedNs(0LL) { } @@ -977,25 +978,32 @@ bool AudioRecord::AudioRecordThread::threadLoop() // caller will check for exitPending() return true; } + if (mPausedInt) { + mPausedInt = false; + if (mPausedNs > 0) { + (void) mMyCond.waitRelative(mMyLock, mPausedNs); + } else { + mMyCond.wait(mMyLock); + } + return true; + } } nsecs_t ns = mReceiver.processAudioBuffer(this); switch (ns) { case 0: return true; - case NS_WHENEVER: - sleep(1); - return true; case NS_INACTIVE: - pauseConditional(); + pauseInternal(); return true; case NS_NEVER: return false; + case NS_WHENEVER: + // FIXME increase poll interval, or make event-driven + ns = 1000000000LL; + // fall through default: LOG_ALWAYS_FATAL_IF(ns < 0, "processAudioBuffer() returned %lld", ns); - struct timespec req; - req.tv_sec = ns / 1000000000LL; - req.tv_nsec = ns % 1000000000LL; - nanosleep(&req, NULL /*rem*/); + pauseInternal(ns); return true; } } @@ -1004,24 +1012,18 @@ void AudioRecord::AudioRecordThread::requestExit() { // must be in this order to avoid a race condition Thread::requestExit(); - resume(); + AutoMutex _l(mMyLock); + if (mPaused || mPausedInt) { + mPaused = false; + mPausedInt = false; + mMyCond.signal(); + } } void AudioRecord::AudioRecordThread::pause() { AutoMutex _l(mMyLock); mPaused = true; - mResumeLatch = false; -} - -void AudioRecord::AudioRecordThread::pauseConditional() -{ - AutoMutex _l(mMyLock); - if (mResumeLatch) { - mResumeLatch = false; - } else { - mPaused = true; - } } void AudioRecord::AudioRecordThread::resume() @@ -1029,13 +1031,17 @@ void AudioRecord::AudioRecordThread::resume() AutoMutex _l(mMyLock); if (mPaused) { mPaused = false; - mResumeLatch = false; mMyCond.signal(); - } else { - mResumeLatch = true; } } +void AudioRecord::AudioRecordThread::pauseInternal(nsecs_t ns) +{ + AutoMutex _l(mMyLock); + mPausedInt = true; + mPausedNs = ns; +} + // ------------------------------------------------------------------------- }; // namespace android diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp index 15249a4..fdcf911 100644 --- a/media/libmedia/AudioTrack.cpp +++ b/media/libmedia/AudioTrack.cpp @@ -1782,7 +1782,7 @@ void AudioTrack::DeathNotifier::binderDied(const wp& who) // ========================================================================= AudioTrack::AudioTrackThread::AudioTrackThread(AudioTrack& receiver, bool bCanCallJava) - : Thread(bCanCallJava), mReceiver(receiver), mPaused(true), mResumeLatch(false) + : Thread(bCanCallJava), mReceiver(receiver), mPaused(true), mPausedInt(false), mPausedNs(0LL) { } @@ -1799,25 +1799,32 @@ bool AudioTrack::AudioTrackThread::threadLoop() // caller will check for exitPending() return true; } + if (mPausedInt) { + mPausedInt = false; + if (mPausedNs > 0) { + (void) mMyCond.waitRelative(mMyLock, mPausedNs); + } else { + mMyCond.wait(mMyLock); + } + return true; + } } nsecs_t ns = mReceiver.processAudioBuffer(this); switch (ns) { case 0: return true; - case NS_WHENEVER: - sleep(1); - return true; case NS_INACTIVE: - pauseConditional(); + pauseInternal(); return true; case NS_NEVER: return false; + case NS_WHENEVER: + // FIXME increase poll interval, or make event-driven + ns = 1000000000LL; + // fall through default: LOG_ALWAYS_FATAL_IF(ns < 0, "processAudioBuffer() returned %lld", ns); - struct timespec req; - req.tv_sec = ns / 1000000000LL; - req.tv_nsec = ns % 1000000000LL; - nanosleep(&req, NULL /*rem*/); + pauseInternal(ns); return true; } } @@ -1826,24 +1833,18 @@ void AudioTrack::AudioTrackThread::requestExit() { // must be in this order to avoid a race condition Thread::requestExit(); - resume(); + AutoMutex _l(mMyLock); + if (mPaused || mPausedInt) { + mPaused = false; + mPausedInt = false; + mMyCond.signal(); + } } void AudioTrack::AudioTrackThread::pause() { AutoMutex _l(mMyLock); mPaused = true; - mResumeLatch = false; -} - -void AudioTrack::AudioTrackThread::pauseConditional() -{ - AutoMutex _l(mMyLock); - if (mResumeLatch) { - mResumeLatch = false; - } else { - mPaused = true; - } } void AudioTrack::AudioTrackThread::resume() @@ -1851,11 +1852,15 @@ void AudioTrack::AudioTrackThread::resume() AutoMutex _l(mMyLock); if (mPaused) { mPaused = false; - mResumeLatch = false; mMyCond.signal(); - } else { - mResumeLatch = true; } } +void AudioTrack::AudioTrackThread::pauseInternal(nsecs_t ns) +{ + AutoMutex _l(mMyLock); + mPausedInt = true; + mPausedNs = ns; +} + }; // namespace android -- cgit v1.1 From 25150209a20906ea25b8cc233962e81432260ec5 Mon Sep 17 00:00:00 2001 From: Eino-Ville Talvala Date: Fri, 20 Sep 2013 10:12:32 -0700 Subject: Camera2/3: For still captures, use widest FPS range available. Instead of using the application-selected preview FPS range for still captures, select the widest FPS range advertised by the device and use it for still captures instead, to give the HAL the widest range of exposure values to choose from possible. Bug: 10842868 Change-Id: I0e24f5eb713a4f8df116cd5979a84390cc0974d7 --- .../camera/libcameraservice/api1/Camera2Client.cpp | 4 +++ .../libcameraservice/api1/client2/Parameters.cpp | 40 ++++++++++++++++++++-- .../libcameraservice/api1/client2/Parameters.h | 1 + 3 files changed, 43 insertions(+), 2 deletions(-) diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp index e7f6c53..fb81468 100644 --- a/services/camera/libcameraservice/api1/Camera2Client.cpp +++ b/services/camera/libcameraservice/api1/Camera2Client.cpp @@ -326,6 +326,10 @@ status_t Camera2Client::dump(int fd, const Vector& args) { result.appendFormat(" Video stabilization is %s\n", p.videoStabilization ? "enabled" : "disabled"); + result.appendFormat(" Selected still capture FPS range: %d - %d\n", + p.fastInfo.bestStillCaptureFpsRange[0], + p.fastInfo.bestStillCaptureFpsRange[1]); + result.append(" Current streams:\n"); result.appendFormat(" Preview stream ID: %d\n", getPreviewStreamId()); diff --git a/services/camera/libcameraservice/api1/client2/Parameters.cpp b/services/camera/libcameraservice/api1/client2/Parameters.cpp index ad55feb..0705791 100644 --- a/services/camera/libcameraservice/api1/client2/Parameters.cpp +++ b/services/camera/libcameraservice/api1/client2/Parameters.cpp @@ -852,6 +852,33 @@ status_t Parameters::buildFastInfo() { arrayHeight = activeArraySize.data.i32[3]; } else return NO_INIT; + // We'll set the target FPS range for still captures to be as wide + // as possible to give the HAL maximum latitude for exposure selection + camera_metadata_ro_entry_t availableFpsRanges = + staticInfo(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, 2); + if (availableFpsRanges.count < 2 || availableFpsRanges.count % 2 != 0) { + return NO_INIT; + } + + int32_t bestStillCaptureFpsRange[2] = { + availableFpsRanges.data.i32[0], availableFpsRanges.data.i32[1] + }; + int32_t curRange = + bestStillCaptureFpsRange[1] - bestStillCaptureFpsRange[0]; + for (size_t i = 2; i < availableFpsRanges.count; i += 2) { + int32_t nextRange = + availableFpsRanges.data.i32[i + 1] - + availableFpsRanges.data.i32[i]; + if ( (nextRange > curRange) || // Maximize size of FPS range first + (nextRange == curRange && // Then minimize low-end FPS + bestStillCaptureFpsRange[0] > availableFpsRanges.data.i32[i])) { + + bestStillCaptureFpsRange[0] = availableFpsRanges.data.i32[i]; + bestStillCaptureFpsRange[1] = availableFpsRanges.data.i32[i + 1]; + curRange = nextRange; + } + } + camera_metadata_ro_entry_t availableFaceDetectModes = staticInfo(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES, 0, 0, false); @@ -971,6 +998,8 @@ status_t Parameters::buildFastInfo() { fastInfo.arrayWidth = arrayWidth; fastInfo.arrayHeight = arrayHeight; + fastInfo.bestStillCaptureFpsRange[0] = bestStillCaptureFpsRange[0]; + fastInfo.bestStillCaptureFpsRange[1] = bestStillCaptureFpsRange[1]; fastInfo.bestFaceDetectMode = bestFaceDetectMode; fastInfo.maxFaces = maxFaces; @@ -1709,8 +1738,15 @@ status_t Parameters::updateRequest(CameraMetadata *request) const { &metadataMode, 1); if (res != OK) return res; - res = request->update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, - previewFpsRange, 2); + camera_metadata_entry_t intent = + request->find(ANDROID_CONTROL_CAPTURE_INTENT); + if (intent.data.u8[0] == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) { + res = request->update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, + fastInfo.bestStillCaptureFpsRange, 2); + } else { + res = request->update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, + previewFpsRange, 2); + } if (res != OK) return res; uint8_t reqWbLock = autoWhiteBalanceLock ? diff --git a/services/camera/libcameraservice/api1/client2/Parameters.h b/services/camera/libcameraservice/api1/client2/Parameters.h index a7111a3..b9ca7bf 100644 --- a/services/camera/libcameraservice/api1/client2/Parameters.h +++ b/services/camera/libcameraservice/api1/client2/Parameters.h @@ -179,6 +179,7 @@ struct Parameters { struct DeviceInfo { int32_t arrayWidth; int32_t arrayHeight; + int32_t bestStillCaptureFpsRange[2]; uint8_t bestFaceDetectMode; int32_t maxFaces; struct OverrideModes { -- cgit v1.1 From 70b054021a3a22622c2710267362c5e235ed73bd Mon Sep 17 00:00:00 2001 From: Eino-Ville Talvala Date: Fri, 20 Sep 2013 16:32:26 -0700 Subject: Camera2/3: Properly disambiguate FPS range vs. single FPS setting The camera1 API allows for either setPreviewFrameRate or setPreviewFPSRange, so both values may or may not change in a single setParameter call. The disambiguation of which setting has been changed since the last setParameter() call was not fully correct, so a sequence of changes that only changed setPreviewFPSRange or didn't change either could be interpreted as as a change to setPreviewFrameRate. Bug: 10842868 Change-Id: I40baeced80a58f09f8a1742ece8dd5e141e9c1e3 --- .../libcameraservice/api1/client2/Parameters.cpp | 23 +++++++++++++++++----- .../libcameraservice/api1/client2/Parameters.h | 1 + 2 files changed, 19 insertions(+), 5 deletions(-) diff --git a/services/camera/libcameraservice/api1/client2/Parameters.cpp b/services/camera/libcameraservice/api1/client2/Parameters.cpp index 0705791..a6c1083 100644 --- a/services/camera/libcameraservice/api1/client2/Parameters.cpp +++ b/services/camera/libcameraservice/api1/client2/Parameters.cpp @@ -184,6 +184,7 @@ status_t Parameters::initialize(const CameraMetadata *info) { // NOTE: Not scaled like FPS range values are. previewFps = fpsFromRange(previewFpsRange[0], previewFpsRange[1]); + lastSetPreviewFps = previewFps; params.set(CameraParameters::KEY_PREVIEW_FRAME_RATE, previewFps); @@ -1152,6 +1153,12 @@ status_t Parameters::set(const String8& paramString) { validatedParams.previewFps = fpsFromRange(validatedParams.previewFpsRange[0], validatedParams.previewFpsRange[1]); + + // Update our last-seen single preview FPS, needed for disambiguating + // when the application is intending to use the deprecated single-FPS + // setting vs. the range FPS setting + validatedParams.lastSetPreviewFps = newParams.getPreviewFrameRate(); + newParams.setPreviewFrameRate(validatedParams.previewFps); } @@ -1187,12 +1194,15 @@ status_t Parameters::set(const String8& paramString) { } } - // PREVIEW_FRAME_RATE - // Deprecated, only use if the preview fps range is unchanged this time. - // The single-value FPS is the same as the minimum of the range. + // PREVIEW_FRAME_RATE Deprecated, only use if the preview fps range is + // unchanged this time. The single-value FPS is the same as the minimum of + // the range. To detect whether the application has changed the value of + // previewFps, compare against their last-set preview FPS instead of the + // single FPS we may have synthesized from a range FPS set. if (!fpsRangeChanged) { validatedParams.previewFps = newParams.getPreviewFrameRate(); - if (validatedParams.previewFps != previewFps || recordingHintChanged) { + if (validatedParams.previewFps != lastSetPreviewFps || + recordingHintChanged) { camera_metadata_ro_entry_t availableFrameRates = staticInfo(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES); /** @@ -1263,7 +1273,10 @@ status_t Parameters::set(const String8& paramString) { String8::format("%d,%d", validatedParams.previewFpsRange[0] * kFpsToApiScale, validatedParams.previewFpsRange[1] * kFpsToApiScale)); - + // Update our last-seen single preview FPS, needed for disambiguating + // when the application is intending to use the deprecated single-FPS + // setting vs. the range FPS setting + validatedParams.lastSetPreviewFps = validatedParams.previewFps; } // PICTURE_SIZE diff --git a/services/camera/libcameraservice/api1/client2/Parameters.h b/services/camera/libcameraservice/api1/client2/Parameters.h index b9ca7bf..0505b0e 100644 --- a/services/camera/libcameraservice/api1/client2/Parameters.h +++ b/services/camera/libcameraservice/api1/client2/Parameters.h @@ -46,6 +46,7 @@ struct Parameters { int previewWidth, previewHeight; int32_t previewFpsRange[2]; + int lastSetPreviewFps; // the last single FPS value seen in a set call int previewFps; // deprecated, here only for tracking changes int previewFormat; -- cgit v1.1 From 4ad28ea0a76c7ecdfcc5b96af2b0afcb0c3e5120 Mon Sep 17 00:00:00 2001 From: Eino-Ville Talvala Date: Fri, 20 Sep 2013 17:21:18 -0700 Subject: Camera2/3: Support new PASSIVE_UNFOCUSED AF state Bug: 10860639 Change-Id: I39be64abaf79b49b2d1f74e1c2ab2adf718ae701 --- services/camera/libcameraservice/api1/Camera2Client.cpp | 3 +++ 1 file changed, 3 insertions(+) diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp index e7f6c53..a41cc36 100644 --- a/services/camera/libcameraservice/api1/Camera2Client.cpp +++ b/services/camera/libcameraservice/api1/Camera2Client.cpp @@ -283,6 +283,7 @@ status_t Camera2Client::dump(int fd, const Vector& args) { CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_INACTIVE) CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN) CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED) + CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_PASSIVE_UNFOCUSED) CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN) CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED) CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED) @@ -1483,6 +1484,7 @@ void Camera2Client::notifyAutoFocus(uint8_t newState, int triggerId) { case ANDROID_CONTROL_AF_STATE_INACTIVE: case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN: case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED: + case ANDROID_CONTROL_AF_STATE_PASSIVE_UNFOCUSED: default: // Unexpected in AUTO/MACRO mode ALOGE("%s: Unexpected AF state transition in AUTO/MACRO mode: %d", @@ -1525,6 +1527,7 @@ void Camera2Client::notifyAutoFocus(uint8_t newState, int triggerId) { afInMotion = true; // no break case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED: + case ANDROID_CONTROL_AF_STATE_PASSIVE_UNFOCUSED: // Stop passive scan, inform upstream if (l.mParameters.enableFocusMoveMessages) { sendMovingMessage = true; -- cgit v1.1 From 9d2c78c4798ffd8c276c1bf0eaa0b34bc255a2da Mon Sep 17 00:00:00 2001 From: Eric Laurent Date: Mon, 23 Sep 2013 12:29:42 -0700 Subject: AudioTrack: fix music resume Fix regression introduced by commit 5a6cd22 in AudioTrack resume: the callback thread was not signaled if paused internaly. Bug: 10895013. Change-Id: Ic356b115132d6fccbcee2d9bb855e92671dc20c5 --- media/libmedia/AudioRecord.cpp | 5 +++-- media/libmedia/AudioTrack.cpp | 5 +++-- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/media/libmedia/AudioRecord.cpp b/media/libmedia/AudioRecord.cpp index fb731b9..c5a7777 100644 --- a/media/libmedia/AudioRecord.cpp +++ b/media/libmedia/AudioRecord.cpp @@ -979,12 +979,12 @@ bool AudioRecord::AudioRecordThread::threadLoop() return true; } if (mPausedInt) { - mPausedInt = false; if (mPausedNs > 0) { (void) mMyCond.waitRelative(mMyLock, mPausedNs); } else { mMyCond.wait(mMyLock); } + mPausedInt = false; return true; } } @@ -1029,8 +1029,9 @@ void AudioRecord::AudioRecordThread::pause() void AudioRecord::AudioRecordThread::resume() { AutoMutex _l(mMyLock); - if (mPaused) { + if (mPaused || mPausedInt) { mPaused = false; + mPausedInt = false; mMyCond.signal(); } } diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp index fdcf911..754a4e3 100644 --- a/media/libmedia/AudioTrack.cpp +++ b/media/libmedia/AudioTrack.cpp @@ -1800,12 +1800,12 @@ bool AudioTrack::AudioTrackThread::threadLoop() return true; } if (mPausedInt) { - mPausedInt = false; if (mPausedNs > 0) { (void) mMyCond.waitRelative(mMyLock, mPausedNs); } else { mMyCond.wait(mMyLock); } + mPausedInt = false; return true; } } @@ -1850,8 +1850,9 @@ void AudioTrack::AudioTrackThread::pause() void AudioTrack::AudioTrackThread::resume() { AutoMutex _l(mMyLock); - if (mPaused) { + if (mPaused || mPausedInt) { mPaused = false; + mPausedInt = false; mMyCond.signal(); } } -- cgit v1.1 From 21e5456821e3c107d09b0629adbf23c9e06e7c0b Mon Sep 17 00:00:00 2001 From: Eric Laurent Date: Mon, 23 Sep 2013 12:08:05 -0700 Subject: fix incoming call audio start time. commit cec4abb fixing issue 6720482 had a flaw that would insert commmands at the wrong position in the audio policy service command thread queue causing other commands to be delayed more than expected. A change in track stop timing introduced by commit bfb1b83 made this issue visible when answering a call. Bug: 10860012. Change-Id: Iec769112f14e13d13b64628b808e43663e33771c --- services/audioflinger/AudioPolicyService.cpp | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/services/audioflinger/AudioPolicyService.cpp b/services/audioflinger/AudioPolicyService.cpp index 4be292f..35e816b 100644 --- a/services/audioflinger/AudioPolicyService.cpp +++ b/services/audioflinger/AudioPolicyService.cpp @@ -975,7 +975,6 @@ void AudioPolicyService::AudioCommandThread::insertCommand_l(AudioCommand *comma { ssize_t i; // not size_t because i will count down to -1 Vector removedCommands; - nsecs_t time = 0; command->mTime = systemTime() + milliseconds(delayMs); // acquire wake lock to make sure delayed commands are processed @@ -1021,7 +1020,10 @@ void AudioPolicyService::AudioCommandThread::insertCommand_l(AudioCommand *comma } else { data2->mKeyValuePairs = param2.toString(); } - time = command2->mTime; + command->mTime = command2->mTime; + // force delayMs to non 0 so that code below does not request to wait for + // command status as the command is now delayed + delayMs = 1; } break; case SET_VOLUME: { @@ -1032,7 +1034,10 @@ void AudioPolicyService::AudioCommandThread::insertCommand_l(AudioCommand *comma ALOGV("Filtering out volume command on output %d for stream %d", data->mIO, data->mStream); removedCommands.add(command2); - time = command2->mTime; + command->mTime = command2->mTime; + // force delayMs to non 0 so that code below does not request to wait for + // command status as the command is now delayed + delayMs = 1; } break; case START_TONE: case STOP_TONE: @@ -1054,16 +1059,12 @@ void AudioPolicyService::AudioCommandThread::insertCommand_l(AudioCommand *comma } removedCommands.clear(); - // wait for status only if delay is 0 and command time was not modified above - if (delayMs == 0 && time == 0) { + // wait for status only if delay is 0 + if (delayMs == 0) { command->mWaitStatus = true; } else { command->mWaitStatus = false; } - // update command time if modified above - if (time != 0) { - command->mTime = time; - } // insert command at the right place according to its time stamp ALOGV("inserting command: %d at index %d, num commands %d", -- cgit v1.1 From 09647d29eaf429ce88c9c9709ff63dee62f2147a Mon Sep 17 00:00:00 2001 From: Jean-Michel Trivi Date: Fri, 20 Sep 2013 11:58:40 -0700 Subject: Add support for level measurements in Visualizer New commands to set a measurement mode and perform peak + RMS measurements. Bug 8413913 Change-Id: Ib25254065c79d365ebb34f9dc9caa0490e2d300d --- include/media/Visualizer.h | 9 ++ media/libeffects/visualizer/EffectVisualizer.cpp | 154 ++++++++++++++++++++--- media/libmedia/Visualizer.cpp | 68 ++++++++++ 3 files changed, 214 insertions(+), 17 deletions(-) diff --git a/include/media/Visualizer.h b/include/media/Visualizer.h index e429263..6167dd6 100644 --- a/include/media/Visualizer.h +++ b/include/media/Visualizer.h @@ -114,6 +114,14 @@ public: status_t setScalingMode(uint32_t mode); uint32_t getScalingMode() { return mScalingMode; } + // set which measurements are done on the audio buffers processed by the effect. + // valid measurements (mask): MEASUREMENT_MODE_PEAK_RMS + status_t setMeasurementMode(uint32_t mode); + uint32_t getMeasurementMode() { return mMeasurementMode; } + + // return a set of int32_t measurements + status_t getIntMeasurements(uint32_t type, uint32_t number, int32_t *measurements); + // return a capture in PCM 8 bit unsigned format. The size of the capture is equal to // getCaptureSize() status_t getWaveForm(uint8_t *waveform); @@ -156,6 +164,7 @@ private: uint32_t mCaptureSize; uint32_t mSampleRate; uint32_t mScalingMode; + uint32_t mMeasurementMode; capture_cbk_t mCaptureCallBack; void *mCaptureCbkUser; sp mCaptureThread; diff --git a/media/libeffects/visualizer/EffectVisualizer.cpp b/media/libeffects/visualizer/EffectVisualizer.cpp index 96935e3..0f27cbf 100644 --- a/media/libeffects/visualizer/EffectVisualizer.cpp +++ b/media/libeffects/visualizer/EffectVisualizer.cpp @@ -22,6 +22,7 @@ #include #include #include +#include #include @@ -54,6 +55,18 @@ enum visualizer_state_e { #define CAPTURE_BUF_SIZE 65536 // "64k should be enough for everyone" +#define DISCARD_MEASUREMENTS_TIME_MS 2000 // discard measurements older than this number of ms + +// maximum number of buffers for which we keep track of the measurements +#define MEASUREMENT_WINDOW_MAX_SIZE_IN_BUFFERS 25 + + +struct BufferStats { + bool mIsValid; + uint16_t mPeakU16; // the positive peak of the absolute value of the samples in a buffer + float mRmsSquared; // the average square of the samples in a buffer +}; + struct VisualizerContext { const struct effect_interface_s *mItfe; effect_config_t mConfig; @@ -65,11 +78,34 @@ struct VisualizerContext { uint32_t mLatency; struct timespec mBufferUpdateTime; uint8_t mCaptureBuf[CAPTURE_BUF_SIZE]; + // for measurements + uint8_t mChannelCount; // to avoid recomputing it every time a buffer is processed + uint32_t mMeasurementMode; + uint8_t mMeasurementWindowSizeInBuffers; + uint8_t mMeasurementBufferIdx; + BufferStats mPastMeasurements[MEASUREMENT_WINDOW_MAX_SIZE_IN_BUFFERS]; }; // //--- Local functions // +uint32_t Visualizer_getDeltaTimeMsFromUpdatedTime(VisualizerContext* pContext) { + uint32_t deltaMs = 0; + if (pContext->mBufferUpdateTime.tv_sec != 0) { + struct timespec ts; + if (clock_gettime(CLOCK_MONOTONIC, &ts) == 0) { + time_t secs = ts.tv_sec - pContext->mBufferUpdateTime.tv_sec; + long nsec = ts.tv_nsec - pContext->mBufferUpdateTime.tv_nsec; + if (nsec < 0) { + --secs; + nsec += 1000000000; + } + deltaMs = secs * 1000 + nsec / 1000000; + } + } + return deltaMs; +} + void Visualizer_reset(VisualizerContext *pContext) { @@ -165,9 +201,21 @@ int Visualizer_init(VisualizerContext *pContext) pContext->mConfig.outputCfg.bufferProvider.cookie = NULL; pContext->mConfig.outputCfg.mask = EFFECT_CONFIG_ALL; + // visualization initialization pContext->mCaptureSize = VISUALIZER_CAPTURE_SIZE_MAX; pContext->mScalingMode = VISUALIZER_SCALING_MODE_NORMALIZED; + // measurement initialization + pContext->mChannelCount = popcount(pContext->mConfig.inputCfg.channels); + pContext->mMeasurementMode = MEASUREMENT_MODE_NONE; + pContext->mMeasurementWindowSizeInBuffers = MEASUREMENT_WINDOW_MAX_SIZE_IN_BUFFERS; + pContext->mMeasurementBufferIdx = 0; + for (uint8_t i=0 ; imMeasurementWindowSizeInBuffers ; i++) { + pContext->mPastMeasurements[i].mIsValid = false; + pContext->mPastMeasurements[i].mPeakU16 = 0; + pContext->mPastMeasurements[i].mRmsSquared = 0; + } + Visualizer_setConfig(pContext, &pContext->mConfig); return 0; @@ -270,6 +318,30 @@ int Visualizer_process( return -EINVAL; } + // perform measurements if needed + if (pContext->mMeasurementMode & MEASUREMENT_MODE_PEAK_RMS) { + // find the peak and RMS squared for the new buffer + uint32_t inIdx; + int16_t maxSample = 0; + float rmsSqAcc = 0; + for (inIdx = 0 ; inIdx < inBuffer->frameCount * pContext->mChannelCount ; inIdx++) { + if (inBuffer->s16[inIdx] > maxSample) { + maxSample = inBuffer->s16[inIdx]; + } else if (-inBuffer->s16[inIdx] > maxSample) { + maxSample = -inBuffer->s16[inIdx]; + } + rmsSqAcc += (inBuffer->s16[inIdx] * inBuffer->s16[inIdx]); + } + // store the measurement + pContext->mPastMeasurements[pContext->mMeasurementBufferIdx].mPeakU16 = (uint16_t)maxSample; + pContext->mPastMeasurements[pContext->mMeasurementBufferIdx].mRmsSquared = + rmsSqAcc / (inBuffer->frameCount * pContext->mChannelCount); + pContext->mPastMeasurements[pContext->mMeasurementBufferIdx].mIsValid = true; + if (++pContext->mMeasurementBufferIdx >= pContext->mMeasurementWindowSizeInBuffers) { + pContext->mMeasurementBufferIdx = 0; + } + } + // all code below assumes stereo 16 bit PCM output and input int32_t shift; @@ -423,6 +495,12 @@ int Visualizer_command(effect_handle_t self, uint32_t cmdCode, uint32_t cmdSize, p->vsize = sizeof(uint32_t); *replySize += sizeof(uint32_t); break; + case VISUALIZER_PARAM_MEASUREMENT_MODE: + ALOGV("get mMeasurementMode = %d", pContext->mMeasurementMode); + *((uint32_t *)p->data + 1) = pContext->mMeasurementMode; + p->vsize = sizeof(uint32_t); + *replySize += sizeof(uint32_t); + break; default: p->status = -EINVAL; } @@ -452,6 +530,10 @@ int Visualizer_command(effect_handle_t self, uint32_t cmdCode, uint32_t cmdSize, pContext->mLatency = *((uint32_t *)p->data + 1); ALOGV("set mLatency = %d", pContext->mLatency); break; + case VISUALIZER_PARAM_MEASUREMENT_MODE: + pContext->mMeasurementMode = *((uint32_t *)p->data + 1); + ALOGV("set mMeasurementMode = %d", pContext->mMeasurementMode); + break; default: *(int32_t *)pReplyData = -EINVAL; } @@ -470,24 +552,12 @@ int Visualizer_command(effect_handle_t self, uint32_t cmdCode, uint32_t cmdSize, } if (pContext->mState == VISUALIZER_STATE_ACTIVE) { int32_t latencyMs = pContext->mLatency; - uint32_t deltaMs = 0; - if (pContext->mBufferUpdateTime.tv_sec != 0) { - struct timespec ts; - if (clock_gettime(CLOCK_MONOTONIC, &ts) == 0) { - time_t secs = ts.tv_sec - pContext->mBufferUpdateTime.tv_sec; - long nsec = ts.tv_nsec - pContext->mBufferUpdateTime.tv_nsec; - if (nsec < 0) { - --secs; - nsec += 1000000000; - } - deltaMs = secs * 1000 + nsec / 1000000; - latencyMs -= deltaMs; - if (latencyMs < 0) { - latencyMs = 0; - } - } + const uint32_t deltaMs = Visualizer_getDeltaTimeMsFromUpdatedTime(pContext); + latencyMs -= deltaMs; + if (latencyMs < 0) { + latencyMs = 0; } - uint32_t deltaSmpl = pContext->mConfig.inputCfg.samplingRate * latencyMs / 1000; + const uint32_t deltaSmpl = pContext->mConfig.inputCfg.samplingRate * latencyMs / 1000; int32_t capturePoint = pContext->mCaptureIdx - pContext->mCaptureSize - deltaSmpl; int32_t captureSize = pContext->mCaptureSize; @@ -525,6 +595,56 @@ int Visualizer_command(effect_handle_t self, uint32_t cmdCode, uint32_t cmdSize, break; + case VISUALIZER_CMD_MEASURE: { + uint16_t peakU16 = 0; + float sumRmsSquared = 0.0f; + uint8_t nbValidMeasurements = 0; + // reset measurements if last measurement was too long ago (which implies stored + // measurements aren't relevant anymore and shouldn't bias the new one) + const int32_t delayMs = Visualizer_getDeltaTimeMsFromUpdatedTime(pContext); + if (delayMs > DISCARD_MEASUREMENTS_TIME_MS) { + ALOGE("Discarding measurements, last measurement is %dms old", delayMs); + for (uint8_t i=0 ; imMeasurementWindowSizeInBuffers ; i++) { + pContext->mPastMeasurements[i].mIsValid = false; + pContext->mPastMeasurements[i].mPeakU16 = 0; + pContext->mPastMeasurements[i].mRmsSquared = 0; + } + pContext->mMeasurementBufferIdx = 0; + } else { + // only use actual measurements, otherwise the first RMS measure happening before + // MEASUREMENT_WINDOW_MAX_SIZE_IN_BUFFERS have been played will always be artificially + // low + for (uint8_t i=0 ; i < pContext->mMeasurementWindowSizeInBuffers ; i++) { + if (pContext->mPastMeasurements[i].mIsValid) { + if (pContext->mPastMeasurements[i].mPeakU16 > peakU16) { + peakU16 = pContext->mPastMeasurements[i].mPeakU16; + } + if (pContext->mMeasurementWindowSizeInBuffers != 0) { + sumRmsSquared += pContext->mPastMeasurements[i].mRmsSquared; + } + nbValidMeasurements++; + } + } + } + float rms = nbValidMeasurements == 0 ? 0.0f : sqrtf(sumRmsSquared / nbValidMeasurements); + int32_t* pIntReplyData = (int32_t*)pReplyData; + // convert from I16 sample values to mB and write results + if (rms < 0.000016f) { + pIntReplyData[MEASUREMENT_IDX_RMS] = -9600; //-96dB + } else { + pIntReplyData[MEASUREMENT_IDX_RMS] = (int32_t) (2000 * log10(rms / 32767.0f)); + } + if (peakU16 == 0) { + pIntReplyData[MEASUREMENT_IDX_PEAK] = -9600; //-96dB + } else { + pIntReplyData[MEASUREMENT_IDX_PEAK] = (int32_t) (2000 * log10(peakU16 / 32767.0f)); + } + ALOGV("LEVEL_MONITOR_CMD_MEASURE peak=%d (%dmB), rms=%.1f (%dmB)", + peakU16, pIntReplyData[MEASUREMENT_IDX_PEAK], + rms, pIntReplyData[MEASUREMENT_IDX_RMS]); + } + break; + default: ALOGW("Visualizer_command invalid command %d",cmdCode); return -EINVAL; diff --git a/media/libmedia/Visualizer.cpp b/media/libmedia/Visualizer.cpp index e519f13..c146b8d 100644 --- a/media/libmedia/Visualizer.cpp +++ b/media/libmedia/Visualizer.cpp @@ -43,6 +43,7 @@ Visualizer::Visualizer (int32_t priority, mCaptureSize(CAPTURE_SIZE_DEF), mSampleRate(44100000), mScalingMode(VISUALIZER_SCALING_MODE_NORMALIZED), + mMeasurementMode(MEASUREMENT_MODE_NONE), mCaptureCallBack(NULL), mCaptureCbkUser(NULL) { @@ -186,6 +187,73 @@ status_t Visualizer::setScalingMode(uint32_t mode) { return status; } +status_t Visualizer::setMeasurementMode(uint32_t mode) { + if ((mode != MEASUREMENT_MODE_NONE) + //Note: needs to be handled as a mask when more measurement modes are added + && ((mode & MEASUREMENT_MODE_PEAK_RMS) != mode)) { + return BAD_VALUE; + } + + Mutex::Autolock _l(mCaptureLock); + + uint32_t buf32[sizeof(effect_param_t) / sizeof(uint32_t) + 2]; + effect_param_t *p = (effect_param_t *)buf32; + + p->psize = sizeof(uint32_t); + p->vsize = sizeof(uint32_t); + *(int32_t *)p->data = VISUALIZER_PARAM_MEASUREMENT_MODE; + *((int32_t *)p->data + 1)= mode; + status_t status = setParameter(p); + + ALOGV("setMeasurementMode mode %d status %d p->status %d", mode, status, p->status); + + if (status == NO_ERROR) { + status = p->status; + if (status == NO_ERROR) { + mMeasurementMode = mode; + } + } + return status; +} + +status_t Visualizer::getIntMeasurements(uint32_t type, uint32_t number, int32_t *measurements) { + if (mMeasurementMode == MEASUREMENT_MODE_NONE) { + ALOGE("Cannot retrieve int measurements, no measurement mode set"); + return INVALID_OPERATION; + } + if (!(mMeasurementMode & type)) { + // measurement type has not been set on this Visualizer + ALOGE("Cannot retrieve int measurements, requested measurement mode 0x%x not set(0x%x)", + type, mMeasurementMode); + return INVALID_OPERATION; + } + // only peak+RMS measurement supported + if ((type != MEASUREMENT_MODE_PEAK_RMS) + // for peak+RMS measurement, the results are 2 int32_t values + || (number != 2)) { + ALOGE("Cannot retrieve int measurements, MEASUREMENT_MODE_PEAK_RMS returns 2 ints, not %d", + number); + return BAD_VALUE; + } + + status_t status = NO_ERROR; + if (mEnabled) { + uint32_t replySize = number * sizeof(int32_t); + status = command(VISUALIZER_CMD_MEASURE, + sizeof(uint32_t) /*cmdSize*/, + &type /*cmdData*/, + &replySize, measurements); + ALOGV("getMeasurements() command returned %d", status); + if ((status == NO_ERROR) && (replySize == 0)) { + status = NOT_ENOUGH_DATA; + } + } else { + ALOGV("getMeasurements() disabled"); + return INVALID_OPERATION; + } + return status; +} + status_t Visualizer::getWaveForm(uint8_t *waveform) { if (waveform == NULL) { -- cgit v1.1 From 0ca25cbd26801c5ead00f1358eb157b2ad9dccac Mon Sep 17 00:00:00 2001 From: Eric Laurent Date: Mon, 23 Sep 2013 12:29:42 -0700 Subject: AudioTrack: fix music resume Fix regression introduced by commit 5a6cd22 in AudioTrack resume: the callback thread was not signaled if paused internaly. Bug: 10895013. Change-Id: Ic356b115132d6fccbcee2d9bb855e92671dc20c5 --- media/libmedia/AudioRecord.cpp | 5 +++-- media/libmedia/AudioTrack.cpp | 5 +++-- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/media/libmedia/AudioRecord.cpp b/media/libmedia/AudioRecord.cpp index fb731b9..c5a7777 100644 --- a/media/libmedia/AudioRecord.cpp +++ b/media/libmedia/AudioRecord.cpp @@ -979,12 +979,12 @@ bool AudioRecord::AudioRecordThread::threadLoop() return true; } if (mPausedInt) { - mPausedInt = false; if (mPausedNs > 0) { (void) mMyCond.waitRelative(mMyLock, mPausedNs); } else { mMyCond.wait(mMyLock); } + mPausedInt = false; return true; } } @@ -1029,8 +1029,9 @@ void AudioRecord::AudioRecordThread::pause() void AudioRecord::AudioRecordThread::resume() { AutoMutex _l(mMyLock); - if (mPaused) { + if (mPaused || mPausedInt) { mPaused = false; + mPausedInt = false; mMyCond.signal(); } } diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp index fdcf911..754a4e3 100644 --- a/media/libmedia/AudioTrack.cpp +++ b/media/libmedia/AudioTrack.cpp @@ -1800,12 +1800,12 @@ bool AudioTrack::AudioTrackThread::threadLoop() return true; } if (mPausedInt) { - mPausedInt = false; if (mPausedNs > 0) { (void) mMyCond.waitRelative(mMyLock, mPausedNs); } else { mMyCond.wait(mMyLock); } + mPausedInt = false; return true; } } @@ -1850,8 +1850,9 @@ void AudioTrack::AudioTrackThread::pause() void AudioTrack::AudioTrackThread::resume() { AutoMutex _l(mMyLock); - if (mPaused) { + if (mPaused || mPausedInt) { mPaused = false; + mPausedInt = false; mMyCond.signal(); } } -- cgit v1.1 From 664539d25180ab8f77e0521533ea2821cf28985f Mon Sep 17 00:00:00 2001 From: Eric Laurent Date: Mon, 23 Sep 2013 18:24:31 -0700 Subject: audioflinger: fix ro.audio.silent in offload again Commit 1abbdb4 was not working 100% of the times because it is possible that the offload thread loop never sleeps after being created in which case the property is never read. The loop now reads the property once when starting. Bug: 10899309. Change-Id: I2e2ca332f2d7086e59d65f6010378c4d9618ba9e --- services/audioflinger/Threads.cpp | 2 ++ 1 file changed, 2 insertions(+) diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index 5f36cab..b771e3b 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -2115,6 +2115,8 @@ bool AudioFlinger::PlaybackThread::threadLoop() // and then that string will be logged at the next convenient opportunity. const char *logString = NULL; + checkSilentMode_l(); + while (!exitPending()) { cpuStats.sample(myName); -- cgit v1.1 From ffd5687c9ece8e28779793a20f06f99c7199ce44 Mon Sep 17 00:00:00 2001 From: Chong Zhang Date: Tue, 24 Sep 2013 10:04:42 -0700 Subject: Send kWhatConnected in onTimeUpdate() before first access unit Bug: 10642588 Change-Id: If2b4fbbf250d5307e304f31c7aa4ac480e279484 --- media/libstagefright/rtsp/MyHandler.h | 40 ++++++++++++++++++++--------------- 1 file changed, 23 insertions(+), 17 deletions(-) diff --git a/media/libstagefright/rtsp/MyHandler.h b/media/libstagefright/rtsp/MyHandler.h index 946f602..f4b5846 100644 --- a/media/libstagefright/rtsp/MyHandler.h +++ b/media/libstagefright/rtsp/MyHandler.h @@ -1681,6 +1681,26 @@ private: return true; } + void handleFirstAccessUnit() { + if (mFirstAccessUnit) { + sp msg = mNotify->dup(); + msg->setInt32("what", kWhatConnected); + msg->post(); + + if (mSeekable) { + for (size_t i = 0; i < mTracks.size(); ++i) { + TrackInfo *info = &mTracks.editItemAt(i); + + postNormalPlayTimeMapping( + i, + info->mNormalPlayTimeRTP, info->mNormalPlayTimeUs); + } + } + + mFirstAccessUnit = false; + } + } + void onTimeUpdate(int32_t trackIndex, uint32_t rtpTime, uint64_t ntpTime) { ALOGV("onTimeUpdate track %d, rtpTime = 0x%08x, ntpTime = 0x%016llx", trackIndex, rtpTime, ntpTime); @@ -1712,6 +1732,8 @@ private: } } if (mAllTracksHaveTime && dataReceivedOnAllChannels()) { + handleFirstAccessUnit(); + // Time is now established, lets start timestamping immediately for (size_t i = 0; i < mTracks.size(); ++i) { TrackInfo *trackInfo = &mTracks.editItemAt(i); @@ -1745,23 +1767,7 @@ private: return; } - if (mFirstAccessUnit) { - sp msg = mNotify->dup(); - msg->setInt32("what", kWhatConnected); - msg->post(); - - if (mSeekable) { - for (size_t i = 0; i < mTracks.size(); ++i) { - TrackInfo *info = &mTracks.editItemAt(i); - - postNormalPlayTimeMapping( - i, - info->mNormalPlayTimeRTP, info->mNormalPlayTimeUs); - } - } - - mFirstAccessUnit = false; - } + handleFirstAccessUnit(); TrackInfo *track = &mTracks.editItemAt(trackIndex); -- cgit v1.1 From 3d00aa6de95fb46e36f2bab4e3facdf0b96acf06 Mon Sep 17 00:00:00 2001 From: Eric Laurent Date: Tue, 24 Sep 2013 09:53:27 -0700 Subject: soundpool: allocate shared memory heap by client Current SoundPool implementation allocates the shared memory heap containing decoded PCM samples in mediaserver process. When mediaserver process crashes, the shared memory heaps allocated by AudioCache cannot be mapped anymore in the new instance of mediaserver. This causes a silent failure to end playback of new sounds because AudioFlinger believes the new AudioTracks are opened in streaming mode and not static mode: it sees a NULL shared memory pointer when the track is created. The fix consists in allocating the memory heap in the client process. Thus the heap is not lost when mediaserver restarts. The global memory usage is the same as this is shared memory. Also added a way to detect that a shared memory is passed when the track is created but cannot be mapped on mediaserver side. Also fix a crash in SoundPool when ALOGV is enabled. Bug: 10894793. Change-Id: Ice6c66ec3b2a409d75dc903a508b6c6fbfb2e8a7 --- include/media/IMediaPlayerService.h | 8 ++- include/media/SoundPool.h | 3 + include/media/mediaplayer.h | 8 ++- media/libmedia/IAudioFlinger.cpp | 27 ++++++-- media/libmedia/IMediaPlayerService.cpp | 73 +++++++++++++++------- media/libmedia/SoundPool.cpp | 40 ++++++------ media/libmedia/mediaplayer.cpp | 24 ++++--- media/libmediaplayerservice/MediaPlayerService.cpp | 49 ++++++++------- media/libmediaplayerservice/MediaPlayerService.h | 13 ++-- 9 files changed, 162 insertions(+), 83 deletions(-) diff --git a/include/media/IMediaPlayerService.h b/include/media/IMediaPlayerService.h index fef7af2..2998b37 100644 --- a/include/media/IMediaPlayerService.h +++ b/include/media/IMediaPlayerService.h @@ -49,8 +49,12 @@ public: virtual sp createMetadataRetriever() = 0; virtual sp create(const sp& client, int audioSessionId = 0) = 0; - virtual sp decode(const char* url, uint32_t *pSampleRate, int* pNumChannels, audio_format_t* pFormat) = 0; - virtual sp decode(int fd, int64_t offset, int64_t length, uint32_t *pSampleRate, int* pNumChannels, audio_format_t* pFormat) = 0; + virtual status_t decode(const char* url, uint32_t *pSampleRate, int* pNumChannels, + audio_format_t* pFormat, + const sp& heap, size_t *pSize) = 0; + virtual status_t decode(int fd, int64_t offset, int64_t length, uint32_t *pSampleRate, + int* pNumChannels, audio_format_t* pFormat, + const sp& heap, size_t *pSize) = 0; virtual sp getOMX() = 0; virtual sp makeCrypto() = 0; virtual sp makeDrm() = 0; diff --git a/include/media/SoundPool.h b/include/media/SoundPool.h index 9e5654f..2dd78cc 100644 --- a/include/media/SoundPool.h +++ b/include/media/SoundPool.h @@ -22,6 +22,8 @@ #include #include #include +#include +#include namespace android { @@ -85,6 +87,7 @@ private: int64_t mLength; char* mUrl; sp mData; + sp mHeap; }; // stores pending events for stolen channels diff --git a/include/media/mediaplayer.h b/include/media/mediaplayer.h index 923c8b2..2177c4c 100644 --- a/include/media/mediaplayer.h +++ b/include/media/mediaplayer.h @@ -223,8 +223,12 @@ public: bool isLooping(); status_t setVolume(float leftVolume, float rightVolume); void notify(int msg, int ext1, int ext2, const Parcel *obj = NULL); - static sp decode(const char* url, uint32_t *pSampleRate, int* pNumChannels, audio_format_t* pFormat); - static sp decode(int fd, int64_t offset, int64_t length, uint32_t *pSampleRate, int* pNumChannels, audio_format_t* pFormat); + static status_t decode(const char* url, uint32_t *pSampleRate, int* pNumChannels, + audio_format_t* pFormat, + const sp& heap, size_t *pSize); + static status_t decode(int fd, int64_t offset, int64_t length, uint32_t *pSampleRate, + int* pNumChannels, audio_format_t* pFormat, + const sp& heap, size_t *pSize); status_t invoke(const Parcel& request, Parcel *reply); status_t setMetadataFilter(const Parcel& filter); status_t getMetadata(bool update_only, bool apply_filter, Parcel *metadata); diff --git a/media/libmedia/IAudioFlinger.cpp b/media/libmedia/IAudioFlinger.cpp index be818c6..68928f1 100644 --- a/media/libmedia/IAudioFlinger.cpp +++ b/media/libmedia/IAudioFlinger.cpp @@ -108,7 +108,12 @@ public: data.writeInt32(frameCount); track_flags_t lFlags = flags != NULL ? *flags : (track_flags_t) TRACK_DEFAULT; data.writeInt32(lFlags); - data.writeStrongBinder(sharedBuffer->asBinder()); + if (sharedBuffer != 0) { + data.writeInt32(true); + data.writeStrongBinder(sharedBuffer->asBinder()); + } else { + data.writeInt32(false); + } data.writeInt32((int32_t) output); data.writeInt32((int32_t) tid); int lSessionId = 0; @@ -738,15 +743,27 @@ status_t BnAudioFlinger::onTransact( audio_channel_mask_t channelMask = data.readInt32(); size_t frameCount = data.readInt32(); track_flags_t flags = (track_flags_t) data.readInt32(); - sp buffer = interface_cast(data.readStrongBinder()); + bool haveSharedBuffer = data.readInt32() != 0; + sp buffer; + if (haveSharedBuffer) { + buffer = interface_cast(data.readStrongBinder()); + } audio_io_handle_t output = (audio_io_handle_t) data.readInt32(); pid_t tid = (pid_t) data.readInt32(); int sessionId = data.readInt32(); String8 name; status_t status; - sp track = createTrack( - (audio_stream_type_t) streamType, sampleRate, format, - channelMask, frameCount, &flags, buffer, output, tid, &sessionId, name, &status); + sp track; + if ((haveSharedBuffer && (buffer == 0)) || + ((buffer != 0) && (buffer->pointer() == NULL))) { + ALOGW("CREATE_TRACK: cannot retrieve shared memory"); + status = DEAD_OBJECT; + } else { + track = createTrack( + (audio_stream_type_t) streamType, sampleRate, format, + channelMask, frameCount, &flags, buffer, output, tid, + &sessionId, name, &status); + } reply->writeInt32(flags); reply->writeInt32(sessionId); reply->writeString8(name); diff --git a/media/libmedia/IMediaPlayerService.cpp b/media/libmedia/IMediaPlayerService.cpp index 74f574d..3c22b4c 100644 --- a/media/libmedia/IMediaPlayerService.cpp +++ b/media/libmedia/IMediaPlayerService.cpp @@ -86,30 +86,48 @@ public: return interface_cast(reply.readStrongBinder()); } - virtual sp decode(const char* url, uint32_t *pSampleRate, int* pNumChannels, audio_format_t* pFormat) + virtual status_t decode(const char* url, uint32_t *pSampleRate, int* pNumChannels, + audio_format_t* pFormat, + const sp& heap, size_t *pSize) { Parcel data, reply; data.writeInterfaceToken(IMediaPlayerService::getInterfaceDescriptor()); data.writeCString(url); - remote()->transact(DECODE_URL, data, &reply); - *pSampleRate = uint32_t(reply.readInt32()); - *pNumChannels = reply.readInt32(); - *pFormat = (audio_format_t) reply.readInt32(); - return interface_cast(reply.readStrongBinder()); + data.writeStrongBinder(heap->asBinder()); + status_t status = remote()->transact(DECODE_URL, data, &reply); + if (status == NO_ERROR) { + status = (status_t)reply.readInt32(); + if (status == NO_ERROR) { + *pSampleRate = uint32_t(reply.readInt32()); + *pNumChannels = reply.readInt32(); + *pFormat = (audio_format_t)reply.readInt32(); + *pSize = (size_t)reply.readInt32(); + } + } + return status; } - virtual sp decode(int fd, int64_t offset, int64_t length, uint32_t *pSampleRate, int* pNumChannels, audio_format_t* pFormat) + virtual status_t decode(int fd, int64_t offset, int64_t length, uint32_t *pSampleRate, + int* pNumChannels, audio_format_t* pFormat, + const sp& heap, size_t *pSize) { Parcel data, reply; data.writeInterfaceToken(IMediaPlayerService::getInterfaceDescriptor()); data.writeFileDescriptor(fd); data.writeInt64(offset); data.writeInt64(length); - remote()->transact(DECODE_FD, data, &reply); - *pSampleRate = uint32_t(reply.readInt32()); - *pNumChannels = reply.readInt32(); - *pFormat = (audio_format_t) reply.readInt32(); - return interface_cast(reply.readStrongBinder()); + data.writeStrongBinder(heap->asBinder()); + status_t status = remote()->transact(DECODE_FD, data, &reply); + if (status == NO_ERROR) { + status = (status_t)reply.readInt32(); + if (status == NO_ERROR) { + *pSampleRate = uint32_t(reply.readInt32()); + *pNumChannels = reply.readInt32(); + *pFormat = (audio_format_t)reply.readInt32(); + *pSize = (size_t)reply.readInt32(); + } + } + return status; } virtual sp getOMX() { @@ -205,14 +223,19 @@ status_t BnMediaPlayerService::onTransact( case DECODE_URL: { CHECK_INTERFACE(IMediaPlayerService, data, reply); const char* url = data.readCString(); + sp heap = interface_cast(data.readStrongBinder()); uint32_t sampleRate; int numChannels; audio_format_t format; - sp player = decode(url, &sampleRate, &numChannels, &format); - reply->writeInt32(sampleRate); - reply->writeInt32(numChannels); - reply->writeInt32((int32_t) format); - reply->writeStrongBinder(player->asBinder()); + size_t size; + status_t status = decode(url, &sampleRate, &numChannels, &format, heap, &size); + reply->writeInt32(status); + if (status == NO_ERROR) { + reply->writeInt32(sampleRate); + reply->writeInt32(numChannels); + reply->writeInt32((int32_t)format); + reply->writeInt32((int32_t)size); + } return NO_ERROR; } break; case DECODE_FD: { @@ -220,14 +243,20 @@ status_t BnMediaPlayerService::onTransact( int fd = dup(data.readFileDescriptor()); int64_t offset = data.readInt64(); int64_t length = data.readInt64(); + sp heap = interface_cast(data.readStrongBinder()); uint32_t sampleRate; int numChannels; audio_format_t format; - sp player = decode(fd, offset, length, &sampleRate, &numChannels, &format); - reply->writeInt32(sampleRate); - reply->writeInt32(numChannels); - reply->writeInt32((int32_t) format); - reply->writeStrongBinder(player->asBinder()); + size_t size; + status_t status = decode(fd, offset, length, &sampleRate, &numChannels, &format, + heap, &size); + reply->writeInt32(status); + if (status == NO_ERROR) { + reply->writeInt32(sampleRate); + reply->writeInt32(numChannels); + reply->writeInt32((int32_t)format); + reply->writeInt32((int32_t)size); + } return NO_ERROR; } break; case CREATE_MEDIA_RECORDER: { diff --git a/media/libmedia/SoundPool.cpp b/media/libmedia/SoundPool.cpp index 5239b2f..8434d43 100644 --- a/media/libmedia/SoundPool.cpp +++ b/media/libmedia/SoundPool.cpp @@ -32,6 +32,8 @@ int kDefaultBufferCount = 4; uint32_t kMaxSampleRate = 48000; uint32_t kDefaultSampleRate = 44100; uint32_t kDefaultFrameCount = 1200; +size_t kDefaultHeapSize = 1024 * 1024; // 1MB + SoundPool::SoundPool(int maxChannels, audio_stream_type_t streamType, int srcQuality) { @@ -464,7 +466,6 @@ Sample::Sample(int sampleID, int fd, int64_t offset, int64_t length) void Sample::init() { - mData = 0; mSize = 0; mRefCount = 0; mSampleID = 0; @@ -482,7 +483,6 @@ Sample::~Sample() ALOGV("close(%d)", mFd); ::close(mFd); } - mData.clear(); free(mUrl); } @@ -491,44 +491,48 @@ status_t Sample::doLoad() uint32_t sampleRate; int numChannels; audio_format_t format; - sp p; + status_t status; + mHeap = new MemoryHeapBase(kDefaultHeapSize); + ALOGV("Start decode"); if (mUrl) { - p = MediaPlayer::decode(mUrl, &sampleRate, &numChannels, &format); + status = MediaPlayer::decode(mUrl, &sampleRate, &numChannels, &format, mHeap, &mSize); } else { - p = MediaPlayer::decode(mFd, mOffset, mLength, &sampleRate, &numChannels, &format); + status = MediaPlayer::decode(mFd, mOffset, mLength, &sampleRate, &numChannels, &format, + mHeap, &mSize); ALOGV("close(%d)", mFd); ::close(mFd); mFd = -1; } - if (p == 0) { + if (status != NO_ERROR) { ALOGE("Unable to load sample: %s", mUrl); - return -1; + goto error; } ALOGV("pointer = %p, size = %u, sampleRate = %u, numChannels = %d", - p->pointer(), p->size(), sampleRate, numChannels); + mHeap->getBase(), mSize, sampleRate, numChannels); if (sampleRate > kMaxSampleRate) { ALOGE("Sample rate (%u) out of range", sampleRate); - return - 1; + status = BAD_VALUE; + goto error; } if ((numChannels < 1) || (numChannels > 2)) { ALOGE("Sample channel count (%d) out of range", numChannels); - return - 1; + status = BAD_VALUE; + goto error; } - //_dumpBuffer(p->pointer(), p->size()); - uint8_t* q = static_cast(p->pointer()) + p->size() - 10; - //_dumpBuffer(q, 10, 10, false); - - mData = p; - mSize = p->size(); + mData = new MemoryBase(mHeap, 0, mSize); mSampleRate = sampleRate; mNumChannels = numChannels; mFormat = format; mState = READY; - return 0; + return NO_ERROR; + +error: + mHeap.clear(); + return status; } @@ -744,7 +748,7 @@ void SoundChannel::process(int event, void *info, unsigned long toggle) ALOGV("process %p channel %d EVENT_UNDERRUN or EVENT_BUFFER_END", this, mChannelID); mSoundPool->addToStopList(this); } else if (event == AudioTrack::EVENT_LOOP_END) { - ALOGV("End loop %p channel %d count %d", this, mChannelID, *(int *)info); + ALOGV("End loop %p channel %d", this, mChannelID); } } diff --git a/media/libmedia/mediaplayer.cpp b/media/libmedia/mediaplayer.cpp index 4323d0c..0f6d897 100644 --- a/media/libmedia/mediaplayer.cpp +++ b/media/libmedia/mediaplayer.cpp @@ -776,17 +776,20 @@ void MediaPlayer::notify(int msg, int ext1, int ext2, const Parcel *obj) } } -/*static*/ sp MediaPlayer::decode(const char* url, uint32_t *pSampleRate, int* pNumChannels, audio_format_t* pFormat) +/*static*/ status_t MediaPlayer::decode(const char* url, uint32_t *pSampleRate, + int* pNumChannels, audio_format_t* pFormat, + const sp& heap, size_t *pSize) { ALOGV("decode(%s)", url); - sp p; + status_t status; const sp& service = getMediaPlayerService(); if (service != 0) { - p = service->decode(url, pSampleRate, pNumChannels, pFormat); + status = service->decode(url, pSampleRate, pNumChannels, pFormat, heap, pSize); } else { ALOGE("Unable to locate media service"); + status = DEAD_OBJECT; } - return p; + return status; } @@ -796,17 +799,22 @@ void MediaPlayer::died() notify(MEDIA_ERROR, MEDIA_ERROR_SERVER_DIED, 0); } -/*static*/ sp MediaPlayer::decode(int fd, int64_t offset, int64_t length, uint32_t *pSampleRate, int* pNumChannels, audio_format_t* pFormat) +/*static*/ status_t MediaPlayer::decode(int fd, int64_t offset, int64_t length, + uint32_t *pSampleRate, int* pNumChannels, + audio_format_t* pFormat, + const sp& heap, size_t *pSize) { ALOGV("decode(%d, %lld, %lld)", fd, offset, length); - sp p; + status_t status; const sp& service = getMediaPlayerService(); if (service != 0) { - p = service->decode(fd, offset, length, pSampleRate, pNumChannels, pFormat); + status = service->decode(fd, offset, length, pSampleRate, + pNumChannels, pFormat, heap, pSize); } else { ALOGE("Unable to locate media service"); + status = DEAD_OBJECT; } - return p; + return status; } diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp index 0dabd37..9553458 100644 --- a/media/libmediaplayerservice/MediaPlayerService.cpp +++ b/media/libmediaplayerservice/MediaPlayerService.cpp @@ -319,8 +319,8 @@ status_t MediaPlayerService::AudioCache::dump(int fd, const Vector& ar result.append(" AudioCache\n"); if (mHeap != 0) { - snprintf(buffer, 255, " heap base(%p), size(%d), flags(%d), device(%s)\n", - mHeap->getBase(), mHeap->getSize(), mHeap->getFlags(), mHeap->getDevice()); + snprintf(buffer, 255, " heap base(%p), size(%d), flags(%d)\n", + mHeap->getBase(), mHeap->getSize(), mHeap->getFlags()); result.append(buffer); } snprintf(buffer, 255, " msec per frame(%f), channel count(%d), format(%d), frame count(%zd)\n", @@ -1176,13 +1176,13 @@ int Antagonizer::callbackThread(void* user) } #endif -static size_t kDefaultHeapSize = 1024 * 1024; // 1MB - -sp MediaPlayerService::decode(const char* url, uint32_t *pSampleRate, int* pNumChannels, audio_format_t* pFormat) +status_t MediaPlayerService::decode(const char* url, uint32_t *pSampleRate, int* pNumChannels, + audio_format_t* pFormat, + const sp& heap, size_t *pSize) { ALOGV("decode(%s)", url); - sp mem; sp player; + status_t status = BAD_VALUE; // Protect our precious, precious DRMd ringtones by only allowing // decoding of http, but not filesystem paths or content Uris. @@ -1190,7 +1190,7 @@ sp MediaPlayerService::decode(const char* url, uint32_t *pSampleRate, i // filedescriptor for them and use that. if (url != NULL && strncmp(url, "http://", 7) != 0) { ALOGD("Can't decode %s by path, use filedescriptor instead", url); - return mem; + return BAD_VALUE; } player_type playerType = @@ -1198,7 +1198,7 @@ sp MediaPlayerService::decode(const char* url, uint32_t *pSampleRate, i ALOGV("player type = %d", playerType); // create the right type of player - sp cache = new AudioCache(url); + sp cache = new AudioCache(heap); player = MediaPlayerFactory::createPlayer(playerType, cache.get(), cache->notify); if (player == NULL) goto Exit; if (player->hardwareOutput()) goto Exit; @@ -1224,22 +1224,27 @@ sp MediaPlayerService::decode(const char* url, uint32_t *pSampleRate, i goto Exit; } - mem = new MemoryBase(cache->getHeap(), 0, cache->size()); + *pSize = cache->size(); *pSampleRate = cache->sampleRate(); *pNumChannels = cache->channelCount(); *pFormat = cache->format(); - ALOGV("return memory @ %p, sampleRate=%u, channelCount = %d, format = %d", mem->pointer(), *pSampleRate, *pNumChannels, *pFormat); + ALOGV("return size %d sampleRate=%u, channelCount = %d, format = %d", + *pSize, *pSampleRate, *pNumChannels, *pFormat); + status = NO_ERROR; Exit: if (player != 0) player->reset(); - return mem; + return status; } -sp MediaPlayerService::decode(int fd, int64_t offset, int64_t length, uint32_t *pSampleRate, int* pNumChannels, audio_format_t* pFormat) +status_t MediaPlayerService::decode(int fd, int64_t offset, int64_t length, + uint32_t *pSampleRate, int* pNumChannels, + audio_format_t* pFormat, + const sp& heap, size_t *pSize) { ALOGV("decode(%d, %lld, %lld)", fd, offset, length); - sp mem; sp player; + status_t status = BAD_VALUE; player_type playerType = MediaPlayerFactory::getPlayerType(NULL /* client */, fd, @@ -1248,7 +1253,7 @@ sp MediaPlayerService::decode(int fd, int64_t offset, int64_t length, u ALOGV("player type = %d", playerType); // create the right type of player - sp cache = new AudioCache("decode_fd"); + sp cache = new AudioCache(heap); player = MediaPlayerFactory::createPlayer(playerType, cache.get(), cache->notify); if (player == NULL) goto Exit; if (player->hardwareOutput()) goto Exit; @@ -1274,16 +1279,18 @@ sp MediaPlayerService::decode(int fd, int64_t offset, int64_t length, u goto Exit; } - mem = new MemoryBase(cache->getHeap(), 0, cache->size()); + *pSize = cache->size(); *pSampleRate = cache->sampleRate(); *pNumChannels = cache->channelCount(); *pFormat = cache->format(); - ALOGV("return memory @ %p, sampleRate=%u, channelCount = %d, format = %d", mem->pointer(), *pSampleRate, *pNumChannels, *pFormat); + ALOGV("return size %d, sampleRate=%u, channelCount = %d, format = %d", + *pSize, *pSampleRate, *pNumChannels, *pFormat); + status = NO_ERROR; Exit: if (player != 0) player->reset(); ::close(fd); - return mem; + return status; } @@ -1803,12 +1810,10 @@ int MediaPlayerService::AudioOutput::getSessionId() const #undef LOG_TAG #define LOG_TAG "AudioCache" -MediaPlayerService::AudioCache::AudioCache(const char* name) : - mChannelCount(0), mFrameCount(1024), mSampleRate(0), mSize(0), - mError(NO_ERROR), mCommandComplete(false) +MediaPlayerService::AudioCache::AudioCache(const sp& heap) : + mHeap(heap), mChannelCount(0), mFrameCount(1024), mSampleRate(0), mSize(0), + mError(NO_ERROR), mCommandComplete(false) { - // create ashmem heap - mHeap = new MemoryHeapBase(kDefaultHeapSize, 0, name); } uint32_t MediaPlayerService::AudioCache::latency () const diff --git a/media/libmediaplayerservice/MediaPlayerService.h b/media/libmediaplayerservice/MediaPlayerService.h index 7d27944..21f4117 100644 --- a/media/libmediaplayerservice/MediaPlayerService.h +++ b/media/libmediaplayerservice/MediaPlayerService.h @@ -177,7 +177,7 @@ class MediaPlayerService : public BnMediaPlayerService class AudioCache : public MediaPlayerBase::AudioSink { public: - AudioCache(const char* name); + AudioCache(const sp& heap); virtual ~AudioCache() {} virtual bool ready() const { return (mChannelCount > 0) && (mHeap->getHeapID() > 0); } @@ -224,7 +224,7 @@ class MediaPlayerService : public BnMediaPlayerService Mutex mLock; Condition mSignal; - sp mHeap; + sp mHeap; float mMsecsPerFrame; uint16_t mChannelCount; audio_format_t mFormat; @@ -247,8 +247,13 @@ public: virtual sp create(const sp& client, int audioSessionId); - virtual sp decode(const char* url, uint32_t *pSampleRate, int* pNumChannels, audio_format_t* pFormat); - virtual sp decode(int fd, int64_t offset, int64_t length, uint32_t *pSampleRate, int* pNumChannels, audio_format_t* pFormat); + virtual status_t decode(const char* url, uint32_t *pSampleRate, int* pNumChannels, + audio_format_t* pFormat, + const sp& heap, size_t *pSize); + virtual status_t decode(int fd, int64_t offset, int64_t length, + uint32_t *pSampleRate, int* pNumChannels, + audio_format_t* pFormat, + const sp& heap, size_t *pSize); virtual sp getOMX(); virtual sp makeCrypto(); virtual sp makeDrm(); -- cgit v1.1 From e93cf2ca27ae6f4a81d4ef548bbf10a34db6d98f Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Tue, 24 Sep 2013 11:52:37 -0700 Subject: Cleanup openRecord error handling Bug: 10888816 Change-Id: I84897dd7d30b370640b54e928f230604b873cb68 --- include/media/IAudioFlinger.h | 3 +++ media/libmedia/AudioRecord.cpp | 9 +++++++-- media/libmedia/IAudioFlinger.cpp | 12 ++++++++++++ services/audioflinger/AudioFlinger.cpp | 4 ++++ services/audioflinger/Threads.cpp | 4 +++- 5 files changed, 29 insertions(+), 3 deletions(-) diff --git a/include/media/IAudioFlinger.h b/include/media/IAudioFlinger.h index 49f921b..eaf7780 100644 --- a/include/media/IAudioFlinger.h +++ b/include/media/IAudioFlinger.h @@ -53,6 +53,9 @@ public: }; typedef uint32_t track_flags_t; + // invariant on exit for all APIs that return an sp<>: + // (return value != 0) == (*status == NO_ERROR) + /* create an audio track and registers it with AudioFlinger. * return null if the track cannot be created. */ diff --git a/media/libmedia/AudioRecord.cpp b/media/libmedia/AudioRecord.cpp index c5a7777..666fafa 100644 --- a/media/libmedia/AudioRecord.cpp +++ b/media/libmedia/AudioRecord.cpp @@ -474,7 +474,7 @@ status_t AudioRecord::openRecord_l(size_t epoch) ALOGE_IF(originalSessionId != 0 && mSessionId != originalSessionId, "session ID changed from %d to %d", originalSessionId, mSessionId); - if (record == 0) { + if (record == 0 || status != NO_ERROR) { ALOGE("AudioFlinger could not create record track, status: %d", status); AudioSystem::releaseInput(input); return status; @@ -484,6 +484,11 @@ status_t AudioRecord::openRecord_l(size_t epoch) ALOGE("Could not get control block"); return NO_INIT; } + void *iMemPointer = iMem->pointer(); + if (iMemPointer == NULL) { + ALOGE("Could not get control block pointer"); + return NO_INIT; + } if (mAudioRecord != 0) { mAudioRecord->asBinder()->unlinkToDeath(mDeathNotifier, this); mDeathNotifier.clear(); @@ -491,7 +496,7 @@ status_t AudioRecord::openRecord_l(size_t epoch) mInput = input; mAudioRecord = record; mCblkMemory = iMem; - audio_track_cblk_t* cblk = static_cast(iMem->pointer()); + audio_track_cblk_t* cblk = static_cast(iMemPointer); mCblk = cblk; // FIXME missing fast track frameCount logic mAwaitBoost = false; diff --git a/media/libmedia/IAudioFlinger.cpp b/media/libmedia/IAudioFlinger.cpp index be818c6..2cc35c6 100644 --- a/media/libmedia/IAudioFlinger.cpp +++ b/media/libmedia/IAudioFlinger.cpp @@ -179,6 +179,17 @@ public: } lStatus = reply.readInt32(); record = interface_cast(reply.readStrongBinder()); + if (lStatus == NO_ERROR) { + if (record == 0) { + ALOGE("openRecord should have returned an IAudioRecord"); + lStatus = UNKNOWN_ERROR; + } + } else { + if (record != 0) { + ALOGE("openRecord returned an IAudioRecord but with status %d", lStatus); + record.clear(); + } + } } if (status) { *status = lStatus; @@ -767,6 +778,7 @@ status_t BnAudioFlinger::onTransact( status_t status; sp record = openRecord(input, sampleRate, format, channelMask, frameCount, &flags, tid, &sessionId, &status); + LOG_ALWAYS_FATAL_IF((record != 0) != (status == NO_ERROR)); reply->writeInt32(flags); reply->writeInt32(sessionId); reply->writeInt32(status); diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp index f6e4c6a..b8f2d0d 100644 --- a/services/audioflinger/AudioFlinger.cpp +++ b/services/audioflinger/AudioFlinger.cpp @@ -1242,6 +1242,7 @@ sp AudioFlinger::openRecord( // check calling permissions if (!recordingAllowed()) { + ALOGE("openRecord() permission denied: recording not allowed"); lStatus = PERMISSION_DENIED; goto Exit; } @@ -1257,12 +1258,14 @@ sp AudioFlinger::openRecord( Mutex::Autolock _l(mLock); thread = checkRecordThread_l(input); if (thread == NULL) { + ALOGE("openRecord() checkRecordThread_l failed"); lStatus = BAD_VALUE; goto Exit; } if (deviceRequiresCaptureAudioOutputPermission(thread->inDevice()) && !captureAudioOutputAllowed()) { + ALOGE("openRecord() permission denied: capture not allowed"); lStatus = PERMISSION_DENIED; goto Exit; } @@ -1283,6 +1286,7 @@ sp AudioFlinger::openRecord( // The record track uses one track in mHardwareMixerThread by convention. recordTrack = thread->createRecordTrack_l(client, sampleRate, format, channelMask, frameCount, lSessionId, flags, tid, &lStatus); + LOG_ALWAYS_FATAL_IF((recordTrack != 0) != (lStatus == NO_ERROR)); } if (lStatus != NO_ERROR) { // remove local strong reference to Client before deleting the RecordTrack so that the diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index 5f36cab..cba9ad1 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -4548,7 +4548,7 @@ sp AudioFlinger::RecordThread::createR lStatus = initCheck(); if (lStatus != NO_ERROR) { - ALOGE("Audio driver not initialized."); + ALOGE("createRecordTrack_l() audio driver not initialized"); goto Exit; } @@ -4612,7 +4612,9 @@ sp AudioFlinger::RecordThread::createR format, channelMask, frameCount, sessionId); if (track->getCblk() == 0) { + ALOGE("createRecordTrack_l() no control block"); lStatus = NO_MEMORY; + track.clear(); goto Exit; } mTracks.add(track); -- cgit v1.1 From ddfbfaeb00295fff7351711f0f044f17d6c40f3c Mon Sep 17 00:00:00 2001 From: Eric Laurent Date: Fri, 20 Sep 2013 12:27:32 -0700 Subject: fix oflload effect proxy commmand handling Implement a more generic command handling in offload effect proxy. All commands are sent to both sub effects but only the reply from the active one is returned to the caller. Bug: 8174034. Change-Id: Ia45f9933b3bf338257ec70b37732fa1578d26b9f --- media/libeffects/proxy/EffectProxy.cpp | 95 ++++++++++++++++++---------------- media/libeffects/proxy/EffectProxy.h | 5 ++ 2 files changed, 54 insertions(+), 46 deletions(-) diff --git a/media/libeffects/proxy/EffectProxy.cpp b/media/libeffects/proxy/EffectProxy.cpp index 41640da..b3304b7 100644 --- a/media/libeffects/proxy/EffectProxy.cpp +++ b/media/libeffects/proxy/EffectProxy.cpp @@ -48,20 +48,6 @@ static const effect_descriptor_t *const gDescriptors[] = &gProxyDescriptor, }; -static inline bool isGetterCmd(uint32_t cmdCode) -{ - switch (cmdCode) { - case EFFECT_CMD_GET_PARAM: - case EFFECT_CMD_GET_CONFIG: - case EFFECT_CMD_GET_CONFIG_REVERSE: - case EFFECT_CMD_GET_FEATURE_SUPPORTED_CONFIGS: - case EFFECT_CMD_GET_FEATURE_CONFIG: - return true; - default: - return false; - } -} - int EffectProxyCreate(const effect_uuid_t *uuid, int32_t sessionId, @@ -80,6 +66,7 @@ int EffectProxyCreate(const effect_uuid_t *uuid, pContext->ioId = ioId; pContext->uuid = *uuid; pContext->common_itfe = &gEffectInterface; + // The sub effects will be created in effect_command when the first command // for the effect is received pContext->eHandle[SUB_FX_HOST] = pContext->eHandle[SUB_FX_OFFLOAD] = NULL; @@ -124,6 +111,10 @@ int EffectProxyCreate(const effect_uuid_t *uuid, uuid_print.node[1], uuid_print.node[2], uuid_print.node[3], uuid_print.node[4], uuid_print.node[5]); #endif + + pContext->replySize = PROXY_REPLY_SIZE_DEFAULT; + pContext->replyData = (char *)malloc(PROXY_REPLY_SIZE_DEFAULT); + *pHandle = (effect_handle_t)pContext; ALOGV("EffectCreate end"); return 0; @@ -137,6 +128,8 @@ int EffectProxyRelease(effect_handle_t handle) { } ALOGV("EffectRelease"); delete pContext->desc; + free(pContext->replyData); + if (pContext->eHandle[SUB_FX_HOST]) EffectRelease(pContext->eHandle[SUB_FX_HOST]); if (pContext->eHandle[SUB_FX_OFFLOAD]) @@ -253,43 +246,53 @@ int Effect_command(effect_handle_t self, } // Getter commands are only sent to the active sub effect. - uint32_t hostReplySize = replySize != NULL ? *replySize : 0; - bool hostReplied = false; - int hostStatus = 0; - uint32_t offloadReplySize = replySize != NULL ? *replySize : 0; - bool offloadReplied = false; - int offloadStatus = 0; + int *subStatus[SUB_FX_COUNT]; + uint32_t *subReplySize[SUB_FX_COUNT]; + void *subReplyData[SUB_FX_COUNT]; + uint32_t tmpSize; + int tmpStatus; - if (pContext->eHandle[SUB_FX_HOST] && (!isGetterCmd(cmdCode) || index == SUB_FX_HOST)) { - hostStatus = (*pContext->eHandle[SUB_FX_HOST])->command( - pContext->eHandle[SUB_FX_HOST], cmdCode, cmdSize, - pCmdData, replySize != NULL ? &hostReplySize : NULL, pReplyData); - hostReplied = true; - } - if (pContext->eHandle[SUB_FX_OFFLOAD] && (!isGetterCmd(cmdCode) || index == SUB_FX_OFFLOAD)) { - // In case of SET CMD, when the offload stream is unavailable, - // we will store the effect param values in the DSP effect wrapper. - // When the offload effects get enabled, we send these values to the - // DSP during Effect_config. - // So,we send the params to DSP wrapper also - offloadStatus = (*pContext->eHandle[SUB_FX_OFFLOAD])->command( - pContext->eHandle[SUB_FX_OFFLOAD], cmdCode, cmdSize, - pCmdData, replySize != NULL ? &offloadReplySize : NULL, pReplyData); - offloadReplied = true; + // grow temp reply buffer if needed + if (replySize != NULL) { + tmpSize = pContext->replySize; + while (tmpSize < *replySize && tmpSize < PROXY_REPLY_SIZE_MAX) { + tmpSize *= 2; + } + if (tmpSize > pContext->replySize) { + ALOGV("Effect_command grow reply buf to %d", tmpSize); + pContext->replyData = (char *)realloc(pContext->replyData, tmpSize); + pContext->replySize = tmpSize; + } + if (tmpSize > *replySize) { + tmpSize = *replySize; + } + } else { + tmpSize = 0; } - // By convention the offloaded implementation reply is returned if command is processed by both - // host and offloaded sub effects - if (offloadReplied){ - status = offloadStatus; - if (replySize) { - *replySize = offloadReplySize; + // tmpSize is now the actual reply size for the non active sub effect + + // Send command to sub effects. The command is sent to all sub effects so that their internal + // state is kept in sync. + // Only the reply from the active sub effect is returned to the caller. The reply from the + // other sub effect is lost in pContext->replyData + for (int i = 0; i < SUB_FX_COUNT; i++) { + if (pContext->eHandle[i] == NULL) { + continue; } - } else if (hostReplied) { - status = hostStatus; - if (replySize) { - *replySize = hostReplySize; + if (i == index) { + subStatus[i] = &status; + subReplySize[i] = replySize; + subReplyData[i] = pReplyData; + } else { + subStatus[i] = &tmpStatus; + subReplySize[i] = replySize == NULL ? NULL : &tmpSize; + subReplyData[i] = pReplyData == NULL ? NULL : pContext->replyData; } + *subStatus[i] = (*pContext->eHandle[i])->command( + pContext->eHandle[i], cmdCode, cmdSize, + pCmdData, subReplySize[i], subReplyData[i]); } + return status; } /* end Effect_command */ diff --git a/media/libeffects/proxy/EffectProxy.h b/media/libeffects/proxy/EffectProxy.h index 8992f93..acbe17e 100644 --- a/media/libeffects/proxy/EffectProxy.h +++ b/media/libeffects/proxy/EffectProxy.h @@ -57,6 +57,9 @@ const struct effect_interface_s gEffectInterface = { NULL, }; +#define PROXY_REPLY_SIZE_MAX (64 * 1024) // must be power of two +#define PROXY_REPLY_SIZE_DEFAULT 32 // must be power of two + struct EffectContext { const struct effect_interface_s *common_itfe; // Holds the itfe of the Proxy effect_descriptor_t* desc; // Points to the sub effect descriptors @@ -67,6 +70,8 @@ struct EffectContext { int32_t ioId; // The ioId in which the effect is created. // Stored in context to pass on to sub effect creation effect_uuid_t uuid; // UUID of the Proxy + char* replyData; // temporary buffer for non active sub effect command reply + uint32_t replySize; // current size of temporary reply buffer }; #if __cplusplus -- cgit v1.1 From 6fbc9ef121b081f888163190bb13cbac31599900 Mon Sep 17 00:00:00 2001 From: Jean-Michel Trivi Date: Tue, 24 Sep 2013 15:31:13 -0700 Subject: Fix log typos in Visualizer effect Fix errors in logs for Visualizer. Set loop counters on 32 bits Bug 8413913 Change-Id: Iad2140d003d15d45be46826a5e89baff14fe9e77 --- media/libeffects/visualizer/EffectVisualizer.cpp | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) diff --git a/media/libeffects/visualizer/EffectVisualizer.cpp b/media/libeffects/visualizer/EffectVisualizer.cpp index 0f27cbf..dc403ab 100644 --- a/media/libeffects/visualizer/EffectVisualizer.cpp +++ b/media/libeffects/visualizer/EffectVisualizer.cpp @@ -58,7 +58,7 @@ enum visualizer_state_e { #define DISCARD_MEASUREMENTS_TIME_MS 2000 // discard measurements older than this number of ms // maximum number of buffers for which we keep track of the measurements -#define MEASUREMENT_WINDOW_MAX_SIZE_IN_BUFFERS 25 +#define MEASUREMENT_WINDOW_MAX_SIZE_IN_BUFFERS 25 // note: buffer index is stored in uint8_t struct BufferStats { @@ -210,7 +210,7 @@ int Visualizer_init(VisualizerContext *pContext) pContext->mMeasurementMode = MEASUREMENT_MODE_NONE; pContext->mMeasurementWindowSizeInBuffers = MEASUREMENT_WINDOW_MAX_SIZE_IN_BUFFERS; pContext->mMeasurementBufferIdx = 0; - for (uint8_t i=0 ; imMeasurementWindowSizeInBuffers ; i++) { + for (uint32_t i=0 ; imMeasurementWindowSizeInBuffers ; i++) { pContext->mPastMeasurements[i].mIsValid = false; pContext->mPastMeasurements[i].mPeakU16 = 0; pContext->mPastMeasurements[i].mRmsSquared = 0; @@ -603,8 +603,8 @@ int Visualizer_command(effect_handle_t self, uint32_t cmdCode, uint32_t cmdSize, // measurements aren't relevant anymore and shouldn't bias the new one) const int32_t delayMs = Visualizer_getDeltaTimeMsFromUpdatedTime(pContext); if (delayMs > DISCARD_MEASUREMENTS_TIME_MS) { - ALOGE("Discarding measurements, last measurement is %dms old", delayMs); - for (uint8_t i=0 ; imMeasurementWindowSizeInBuffers ; i++) { + ALOGV("Discarding measurements, last measurement is %dms old", delayMs); + for (uint32_t i=0 ; imMeasurementWindowSizeInBuffers ; i++) { pContext->mPastMeasurements[i].mIsValid = false; pContext->mPastMeasurements[i].mPeakU16 = 0; pContext->mPastMeasurements[i].mRmsSquared = 0; @@ -614,14 +614,12 @@ int Visualizer_command(effect_handle_t self, uint32_t cmdCode, uint32_t cmdSize, // only use actual measurements, otherwise the first RMS measure happening before // MEASUREMENT_WINDOW_MAX_SIZE_IN_BUFFERS have been played will always be artificially // low - for (uint8_t i=0 ; i < pContext->mMeasurementWindowSizeInBuffers ; i++) { + for (uint32_t i=0 ; i < pContext->mMeasurementWindowSizeInBuffers ; i++) { if (pContext->mPastMeasurements[i].mIsValid) { if (pContext->mPastMeasurements[i].mPeakU16 > peakU16) { peakU16 = pContext->mPastMeasurements[i].mPeakU16; } - if (pContext->mMeasurementWindowSizeInBuffers != 0) { - sumRmsSquared += pContext->mPastMeasurements[i].mRmsSquared; - } + sumRmsSquared += pContext->mPastMeasurements[i].mRmsSquared; nbValidMeasurements++; } } @@ -639,7 +637,7 @@ int Visualizer_command(effect_handle_t self, uint32_t cmdCode, uint32_t cmdSize, } else { pIntReplyData[MEASUREMENT_IDX_PEAK] = (int32_t) (2000 * log10(peakU16 / 32767.0f)); } - ALOGV("LEVEL_MONITOR_CMD_MEASURE peak=%d (%dmB), rms=%.1f (%dmB)", + ALOGV("VISUALIZER_CMD_MEASURE peak=%d (%dmB), rms=%.1f (%dmB)", peakU16, pIntReplyData[MEASUREMENT_IDX_PEAK], rms, pIntReplyData[MEASUREMENT_IDX_RMS]); } -- cgit v1.1 From eb3c337a3d6c74ec857dfc8be7eeafe634614bcd Mon Sep 17 00:00:00 2001 From: Eric Laurent Date: Wed, 25 Sep 2013 12:25:29 -0700 Subject: fix deadlock in audioflinger::createEffect() commit 5baf2af5 introduced a regression by calling getOutputForEffect() with AudioFLinger main mutex locked. The locking order must always be AudioPolicyService mutex then AudioFlinger then ThreadBase mutex. Bug: 10916796. Change-Id: Ide34a2d84dbb06dbb35abd0640d91b01b0ac4d40 --- services/audioflinger/AudioFlinger.cpp | 39 ++++++++++++++++------------------ services/audioflinger/Effects.h | 4 ++++ 2 files changed, 22 insertions(+), 21 deletions(-) diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp index f6e4c6a..e70d566 100644 --- a/services/audioflinger/AudioFlinger.cpp +++ b/services/audioflinger/AudioFlinger.cpp @@ -2102,9 +2102,6 @@ sp AudioFlinger::createEffect( } { - Mutex::Autolock _l(mLock); - - if (!EffectIsNullUuid(&pDesc->uuid)) { // if uuid is specified, request effect descriptor lStatus = EffectGetDescriptor(&pDesc->uuid, &desc); @@ -2177,6 +2174,15 @@ sp AudioFlinger::createEffect( // return effect descriptor *pDesc = desc; + if (io == 0 && sessionId == AUDIO_SESSION_OUTPUT_MIX) { + // if the output returned by getOutputForEffect() is removed before we lock the + // mutex below, the call to checkPlaybackThread_l(io) below will detect it + // and we will exit safely + io = AudioSystem::getOutputForEffect(&desc); + ALOGV("createEffect got output %d", io); + } + + Mutex::Autolock _l(mLock); // If output is not specified try to find a matching audio session ID in one of the // output threads. @@ -2190,29 +2196,20 @@ sp AudioFlinger::createEffect( lStatus = BAD_VALUE; goto Exit; } - if (sessionId == AUDIO_SESSION_OUTPUT_MIX) { - // if the output returned by getOutputForEffect() is removed before we lock the - // mutex below, the call to checkPlaybackThread_l(io) below will detect it - // and we will exit safely - io = AudioSystem::getOutputForEffect(&desc); - ALOGV("createEffect got output %d", io); + // look for the thread where the specified audio session is present + for (size_t i = 0; i < mPlaybackThreads.size(); i++) { + if (mPlaybackThreads.valueAt(i)->hasAudioSession(sessionId) != 0) { + io = mPlaybackThreads.keyAt(i); + break; + } } if (io == 0) { - // look for the thread where the specified audio session is present - for (size_t i = 0; i < mPlaybackThreads.size(); i++) { - if (mPlaybackThreads.valueAt(i)->hasAudioSession(sessionId) != 0) { - io = mPlaybackThreads.keyAt(i); + for (size_t i = 0; i < mRecordThreads.size(); i++) { + if (mRecordThreads.valueAt(i)->hasAudioSession(sessionId) != 0) { + io = mRecordThreads.keyAt(i); break; } } - if (io == 0) { - for (size_t i = 0; i < mRecordThreads.size(); i++) { - if (mRecordThreads.valueAt(i)->hasAudioSession(sessionId) != 0) { - io = mRecordThreads.keyAt(i); - break; - } - } - } } // If no output thread contains the requested session ID, default to // first output. The effect chain will be moved to the correct output diff --git a/services/audioflinger/Effects.h b/services/audioflinger/Effects.h index c35cff0..b717857 100644 --- a/services/audioflinger/Effects.h +++ b/services/audioflinger/Effects.h @@ -25,6 +25,10 @@ // state changes or resource modifications. Always respect the following order // if multiple mutexes must be acquired to avoid cross deadlock: // AudioFlinger -> ThreadBase -> EffectChain -> EffectModule +// In addition, methods that lock the AudioPolicyService mutex (getOutputForEffect(), +// startOutput()...) should never be called with AudioFlinger or Threadbase mutex locked +// to avoid cross deadlock with other clients calling AudioPolicyService methods that in turn +// call AudioFlinger thus locking the same mutexes in the reverse order. // The EffectModule class is a wrapper object controlling the effect engine implementation // in the effect library. It prevents concurrent calls to process() and command() functions -- cgit v1.1 From 2b59be89dc245b6e2475d9e8b0c5f2392370e71e Mon Sep 17 00:00:00 2001 From: Zhijun He Date: Wed, 25 Sep 2013 10:14:30 -0700 Subject: Camera: Implement getCameraCharacteristics Bug: 10904541 Bug: 10360518 Change-Id: Ie9ca6b3b0b5f2fe529e6b0decc193096e770a017 --- camera/ICameraService.cpp | 36 +++++++++++++++++++++ include/camera/ICameraService.h | 5 +++ services/camera/libcameraservice/CameraService.cpp | 37 ++++++++++++++++++++++ services/camera/libcameraservice/CameraService.h | 2 ++ 4 files changed, 80 insertions(+) diff --git a/camera/ICameraService.cpp b/camera/ICameraService.cpp index 3debe22..5fc89fb 100644 --- a/camera/ICameraService.cpp +++ b/camera/ICameraService.cpp @@ -33,6 +33,7 @@ #include #include #include +#include namespace android { @@ -119,6 +120,29 @@ public: return result; } + // get camera characteristics (static metadata) + virtual status_t getCameraCharacteristics(int cameraId, + CameraMetadata* cameraInfo) { + Parcel data, reply; + data.writeInterfaceToken(ICameraService::getInterfaceDescriptor()); + data.writeInt32(cameraId); + remote()->transact(BnCameraService::GET_CAMERA_CHARACTERISTICS, data, &reply); + + if (readExceptionCode(reply)) return -EPROTO; + status_t result = reply.readInt32(); + + CameraMetadata out; + if (reply.readInt32() != 0) { + out.readFromParcel(&reply); + } + + if (cameraInfo != NULL) { + cameraInfo->swap(out); + } + + return result; + } + // connect to camera service (android.hardware.Camera) virtual status_t connect(const sp& cameraClient, int cameraId, const String16 &clientPackageName, int clientUid, @@ -239,6 +263,18 @@ status_t BnCameraService::onTransact( reply->writeInt32(cameraInfo.orientation); return NO_ERROR; } break; + case GET_CAMERA_CHARACTERISTICS: { + CHECK_INTERFACE(ICameraService, data, reply); + CameraMetadata info; + status_t result = getCameraCharacteristics(data.readInt32(), &info); + reply->writeNoException(); + reply->writeInt32(result); + + // out-variables are after exception and return value + reply->writeInt32(1); // means the parcelable is included + info.writeToParcel(reply); + return NO_ERROR; + } break; case CONNECT: { CHECK_INTERFACE(ICameraService, data, reply); sp cameraClient = diff --git a/include/camera/ICameraService.h b/include/camera/ICameraService.h index 0e10699..f342122 100644 --- a/include/camera/ICameraService.h +++ b/include/camera/ICameraService.h @@ -30,6 +30,7 @@ class IProCameraCallbacks; class ICameraServiceListener; class ICameraDeviceUser; class ICameraDeviceCallbacks; +class CameraMetadata; class ICameraService : public IInterface { @@ -45,6 +46,7 @@ public: CONNECT_DEVICE, ADD_LISTENER, REMOVE_LISTENER, + GET_CAMERA_CHARACTERISTICS, }; enum { @@ -58,6 +60,9 @@ public: virtual status_t getCameraInfo(int cameraId, struct CameraInfo* cameraInfo) = 0; + virtual status_t getCameraCharacteristics(int cameraId, + CameraMetadata* cameraInfo) = 0; + // Returns 'OK' if operation succeeded // - Errors: ALREADY_EXISTS if the listener was already added virtual status_t addListener(const sp& listener) diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp index fe16314..5e84aaf 100644 --- a/services/camera/libcameraservice/CameraService.cpp +++ b/services/camera/libcameraservice/CameraService.cpp @@ -225,6 +225,43 @@ status_t CameraService::getCameraInfo(int cameraId, return rc; } +status_t CameraService::getCameraCharacteristics(int cameraId, + CameraMetadata* cameraInfo) { + if (!cameraInfo) { + ALOGE("%s: cameraInfo is NULL", __FUNCTION__); + return BAD_VALUE; + } + + if (!mModule) { + ALOGE("%s: camera hardware module doesn't exist", __FUNCTION__); + return -ENODEV; + } + + if (mModule->common.module_api_version < CAMERA_MODULE_API_VERSION_2_0) { + // TODO: Remove this check once HAL1 shim is in place. + ALOGE("%s: Only HAL module version V2 or higher supports static metadata", __FUNCTION__); + return BAD_VALUE; + } + + if (cameraId < 0 || cameraId >= mNumberOfCameras) { + ALOGE("%s: Invalid camera id: %d", __FUNCTION__, cameraId); + return BAD_VALUE; + } + + int facing; + if (getDeviceVersion(cameraId, &facing) == CAMERA_DEVICE_API_VERSION_1_0) { + // TODO: Remove this check once HAL1 shim is in place. + ALOGE("%s: HAL1 doesn't support static metadata yet", __FUNCTION__); + return BAD_VALUE; + } + + struct camera_info info; + status_t ret = mModule->get_camera_info(cameraId, &info); + *cameraInfo = info.static_camera_characteristics; + + return ret; +} + int CameraService::getDeviceVersion(int cameraId, int* facing) { struct camera_info info; if (mModule->get_camera_info(cameraId, &info) != OK) { diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h index b34a0f6..ad6a582 100644 --- a/services/camera/libcameraservice/CameraService.h +++ b/services/camera/libcameraservice/CameraService.h @@ -71,6 +71,8 @@ public: virtual int32_t getNumberOfCameras(); virtual status_t getCameraInfo(int cameraId, struct CameraInfo* cameraInfo); + virtual status_t getCameraCharacteristics(int cameraId, + CameraMetadata* cameraInfo); virtual status_t connect(const sp& cameraClient, int cameraId, const String16& clientPackageName, int clientUid, -- cgit v1.1 From fed6292af65a0b97b583ecbd3c232b3811a3f37b Mon Sep 17 00:00:00 2001 From: Jean-Michel Trivi Date: Wed, 25 Sep 2013 18:50:33 -0700 Subject: Don't call audio effect process on offloaded playback threads An audio effect process command is not meant to be called for playback threads belonging to offloaded playback. Bug 10933817 Change-Id: Idc7b6a0224791bbf8d61648474d3c288617c498f --- services/audioflinger/Effects.cpp | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/services/audioflinger/Effects.cpp b/services/audioflinger/Effects.cpp index 0ca2107..6e0354d 100644 --- a/services/audioflinger/Effects.cpp +++ b/services/audioflinger/Effects.cpp @@ -1279,9 +1279,10 @@ void AudioFlinger::EffectChain::process_l() } bool isGlobalSession = (mSessionId == AUDIO_SESSION_OUTPUT_MIX) || (mSessionId == AUDIO_SESSION_OUTPUT_STAGE); - // always process effects unless no more tracks are on the session and the effect tail - // has been rendered - bool doProcess = true; + // never process effects when: + // - on an OFFLOAD thread + // - no more tracks are on the session and the effect tail has been rendered + bool doProcess = (thread->type() != ThreadBase::OFFLOAD); if (!isGlobalSession) { bool tracksOnSession = (trackCnt() != 0); -- cgit v1.1 From cd0c4683947231a7d3dc7811bedb75c5a965103c Mon Sep 17 00:00:00 2001 From: Jean-Michel Trivi Date: Wed, 25 Sep 2013 18:43:55 -0700 Subject: LoudnessEnhancer compatible with stereo imaging Use a single compressor for both channels. Envelope of signal is determined by looking at both channels. Bug 8413913 Change-Id: Ia9b6f34923d2977c60a3352500b858dfa1fab33c --- .../libeffects/loudness/EffectLoudnessEnhancer.cpp | 42 +++++++++------------- .../dsp/core/dynamic_range_compression-inl.h | 2 +- .../dsp/core/dynamic_range_compression.cpp | 35 ++++++++++++++++++ .../loudness/dsp/core/dynamic_range_compression.h | 3 ++ 4 files changed, 56 insertions(+), 26 deletions(-) diff --git a/media/libeffects/loudness/EffectLoudnessEnhancer.cpp b/media/libeffects/loudness/EffectLoudnessEnhancer.cpp index dfc25db..91ed677 100644 --- a/media/libeffects/loudness/EffectLoudnessEnhancer.cpp +++ b/media/libeffects/loudness/EffectLoudnessEnhancer.cpp @@ -56,8 +56,7 @@ struct LoudnessEnhancerContext { int32_t mTargetGainmB;// target gain in mB // in this implementation, there is no coupling between the compression on the left and right // channels - le_fx::AdaptiveDynamicRangeCompression* mCompressorL; - le_fx::AdaptiveDynamicRangeCompression* mCompressorR; + le_fx::AdaptiveDynamicRangeCompression* mCompressor; }; // @@ -68,11 +67,10 @@ void LE_reset(LoudnessEnhancerContext *pContext) { ALOGV(" > LE_reset(%p)", pContext); - if ((pContext->mCompressorL != NULL) && (pContext->mCompressorR != NULL)) { + if (pContext->mCompressor != NULL) { float targetAmp = pow(10, pContext->mTargetGainmB/2000.0f); // mB to linear amplification ALOGV("LE_reset(): Target gain=%dmB <=> factor=%.2fX", pContext->mTargetGainmB, targetAmp); - pContext->mCompressorL->Initialize(targetAmp, pContext->mConfig.inputCfg.samplingRate); - pContext->mCompressorR->Initialize(targetAmp, pContext->mConfig.inputCfg.samplingRate); + pContext->mCompressor->Initialize(targetAmp, pContext->mConfig.inputCfg.samplingRate); } else { ALOGE("LE_reset(%p): null compressors, can't apply target gain", pContext); } @@ -176,13 +174,9 @@ int LE_init(LoudnessEnhancerContext *pContext) float targetAmp = pow(10, pContext->mTargetGainmB/2000.0f); // mB to linear amplification ALOGV("LE_init(): Target gain=%dmB <=> factor=%.2fX", pContext->mTargetGainmB, targetAmp); - if (pContext->mCompressorL == NULL) { - pContext->mCompressorL = new le_fx::AdaptiveDynamicRangeCompression(); - pContext->mCompressorL->Initialize(targetAmp, pContext->mConfig.inputCfg.samplingRate); - } - if (pContext->mCompressorR == NULL) { - pContext->mCompressorR = new le_fx::AdaptiveDynamicRangeCompression(); - pContext->mCompressorR->Initialize(targetAmp, pContext->mConfig.inputCfg.samplingRate); + if (pContext->mCompressor == NULL) { + pContext->mCompressor = new le_fx::AdaptiveDynamicRangeCompression(); + pContext->mCompressor->Initialize(targetAmp, pContext->mConfig.inputCfg.samplingRate); } LE_setConfig(pContext, &pContext->mConfig); @@ -215,8 +209,7 @@ int LELib_Create(const effect_uuid_t *uuid, pContext->mItfe = &gLEInterface; pContext->mState = LOUDNESS_ENHANCER_STATE_UNINITIALIZED; - pContext->mCompressorL = NULL; - pContext->mCompressorR = NULL; + pContext->mCompressor = NULL; ret = LE_init(pContext); if (ret < 0) { ALOGW("LELib_Create() init failed"); @@ -242,13 +235,9 @@ int LELib_Release(effect_handle_t handle) { return -EINVAL; } pContext->mState = LOUDNESS_ENHANCER_STATE_UNINITIALIZED; - if (pContext->mCompressorL != NULL) { - delete pContext->mCompressorL; - pContext->mCompressorL = NULL; - } - if (pContext->mCompressorR != NULL) { - delete pContext->mCompressorR; - pContext->mCompressorR = NULL; + if (pContext->mCompressor != NULL) { + delete pContext->mCompressor; + pContext->mCompressor = NULL; } delete pContext; @@ -293,11 +282,14 @@ int LE_process( //ALOGV("LE about to process %d samples", inBuffer->frameCount); uint16_t inIdx; float inputAmp = pow(10, pContext->mTargetGainmB/2000.0f); + float leftSample, rightSample; for (inIdx = 0 ; inIdx < inBuffer->frameCount ; inIdx++) { - inBuffer->s16[2*inIdx] = pContext->mCompressorL->Compress( - inputAmp * (float)inBuffer->s16[2*inIdx]); - inBuffer->s16[2*inIdx +1] = pContext->mCompressorR->Compress( - inputAmp * (float)inBuffer->s16[2*inIdx +1]); + // makeup gain is applied on the input of the compressor + leftSample = inputAmp * (float)inBuffer->s16[2*inIdx]; + rightSample = inputAmp * (float)inBuffer->s16[2*inIdx +1]; + pContext->mCompressor->Compress(&leftSample, &rightSample); + inBuffer->s16[2*inIdx] = (int16_t) leftSample; + inBuffer->s16[2*inIdx +1] = (int16_t) rightSample; } if (inBuffer->raw != outBuffer->raw) { diff --git a/media/libeffects/loudness/dsp/core/dynamic_range_compression-inl.h b/media/libeffects/loudness/dsp/core/dynamic_range_compression-inl.h index fed8c2a..da75ceb 100644 --- a/media/libeffects/loudness/dsp/core/dynamic_range_compression-inl.h +++ b/media/libeffects/loudness/dsp/core/dynamic_range_compression-inl.h @@ -35,7 +35,7 @@ inline void AdaptiveDynamicRangeCompression::set_knee_threshold_via_target_gain( float target_gain) { const float decibel = target_gain_to_knee_threshold_.Interpolate( target_gain); - ALOGE("set_knee_threshold_via_target_gain: decibel =%.3f", decibel); + ALOGV("set_knee_threshold_via_target_gain: decibel =%.3fdB", decibel); set_knee_threshold(decibel); } diff --git a/media/libeffects/loudness/dsp/core/dynamic_range_compression.cpp b/media/libeffects/loudness/dsp/core/dynamic_range_compression.cpp index 2bbd043..7bd068e 100644 --- a/media/libeffects/loudness/dsp/core/dynamic_range_compression.cpp +++ b/media/libeffects/loudness/dsp/core/dynamic_range_compression.cpp @@ -102,5 +102,40 @@ float AdaptiveDynamicRangeCompression::Compress(float x) { return x; } +void AdaptiveDynamicRangeCompression::Compress(float *x1, float *x2) { + // Taking the maximum amplitude of both channels + const float max_abs_x = std::max(std::fabs(*x1), + std::max(std::fabs(*x2), kMinLogAbsValue)); + const float max_abs_x_dB = math::fast_log(max_abs_x); + // Subtract Threshold from log-encoded input to get the amount of overshoot + const float overshoot = max_abs_x_dB - knee_threshold_; + // Hard half-wave rectifier + const float rect = std::max(overshoot, 0.0f); + // Multiply rectified overshoot with slope + const float cv = rect * slope_; + const float prev_state = state_; + if (cv <= state_) { + state_ = alpha_attack_ * state_ + (1.0f - alpha_attack_) * cv; + } else { + state_ = alpha_release_ * state_ + (1.0f - alpha_release_) * cv; + } + compressor_gain_ *= + math::ExpApproximationViaTaylorExpansionOrder5(state_ - prev_state); + *x1 *= compressor_gain_; + if (*x1 > kFixedPointLimit) { + *x1 = kFixedPointLimit; + } + if (*x1 < -kFixedPointLimit) { + *x1 = -kFixedPointLimit; + } + *x2 *= compressor_gain_; + if (*x2 > kFixedPointLimit) { + *x2 = kFixedPointLimit; + } + if (*x2 < -kFixedPointLimit) { + *x2 = -kFixedPointLimit; + } +} + } // namespace le_fx diff --git a/media/libeffects/loudness/dsp/core/dynamic_range_compression.h b/media/libeffects/loudness/dsp/core/dynamic_range_compression.h index 4c015df..2821a78 100644 --- a/media/libeffects/loudness/dsp/core/dynamic_range_compression.h +++ b/media/libeffects/loudness/dsp/core/dynamic_range_compression.h @@ -55,6 +55,9 @@ class AdaptiveDynamicRangeCompression { // log(.) and exp(.). float Compress(float x); + // Stereo channel version of the compressor + void Compress(float *x1, float *x2); + // This version is slower than Compress(.) but faster than CompressSlow(.) float CompressNormalSpeed(float x); -- cgit v1.1 From 1da3b602130d71ac3bff1a1fdecdc5e0d7b9d701 Mon Sep 17 00:00:00 2001 From: Eino-Ville Talvala Date: Thu, 26 Sep 2013 15:28:55 -0700 Subject: Camera2 API: Hack: Always use async mode with GLConsumer Until a cleaner solution is implemented, force buffer queues to consumers that set USAGE_HW_TEXTURE usage to be asynchronous. Bug: 10949105 Change-Id: I69e6b02b773831396767c282ce8c5936c5d41f03 --- .../libcameraservice/api2/CameraDeviceClient.cpp | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp index 83466cb..76d44bf 100644 --- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp +++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp @@ -298,11 +298,28 @@ status_t CameraDeviceClient::createStream(int width, int height, int format, } } + // HACK b/10949105 + // Query consumer usage bits to set async operation mode for + // GLConsumer using controlledByApp parameter. + bool useAsync = false; + int32_t consumerUsage; + if ((res = bufferProducer->query(NATIVE_WINDOW_CONSUMER_USAGE_BITS, + &consumerUsage)) != OK) { + ALOGE("%s: Camera %d: Failed to query consumer usage", __FUNCTION__, + mCameraId); + return res; + } + if (consumerUsage & GraphicBuffer::USAGE_HW_TEXTURE) { + ALOGW("%s: Camera %d: Forcing asynchronous mode for stream", + __FUNCTION__, mCameraId); + useAsync = true; + } + sp binder; sp anw; if (bufferProducer != 0) { binder = bufferProducer->asBinder(); - anw = new Surface(bufferProducer); + anw = new Surface(bufferProducer, useAsync); } // TODO: remove w,h,f since we are ignoring them -- cgit v1.1 From 4de95592980dba88a35b3dc8f3fd045588387a4f Mon Sep 17 00:00:00 2001 From: Eric Laurent Date: Thu, 26 Sep 2013 15:28:21 -0700 Subject: audioflinger: fix crash when starting offload thread Passing a sp to parent thread to AsyncCallbackThread() constructor causes a strong reference to be acquired on the OffloadThread inside its constructor which causes an early launch of the thread loop with unpredictable consequences. Pass a wp to parent thread instead. Also move the creation of the AsyncCallbackThread to readOutputParameters() where mUseAsyncWrite is initialized which makes more sense. Also change the type of AsyncCallbackThread parent thread to PlaybackThread instead of OffloadThread to allow a broder use of non blocking write which in theory is not limited to audio offload use case. Bug: 8174034. Change-Id: I4b093b022030cd4f5eb8b8e477333e91098a6549 --- services/audioflinger/Threads.cpp | 14 +++++++------- services/audioflinger/Threads.h | 12 ++++++------ 2 files changed, 13 insertions(+), 13 deletions(-) diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index b771e3b..2d9d485 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -1594,6 +1594,7 @@ void AudioFlinger::PlaybackThread::readOutputParameters() if (mOutput->stream->set_callback(mOutput->stream, AudioFlinger::PlaybackThread::asyncCallback, this) == 0) { mUseAsyncWrite = true; + mCallbackThread = new AudioFlinger::AsyncCallbackThread(this); } } @@ -3746,9 +3747,9 @@ void AudioFlinger::DirectOutputThread::cacheParameters_l() // ---------------------------------------------------------------------------- AudioFlinger::AsyncCallbackThread::AsyncCallbackThread( - const sp& offloadThread) + const wp& playbackThread) : Thread(false /*canCallJava*/), - mOffloadThread(offloadThread), + mPlaybackThread(playbackThread), mWriteAckSequence(0), mDrainSequence(0) { @@ -3783,13 +3784,13 @@ bool AudioFlinger::AsyncCallbackThread::threadLoop() mDrainSequence &= ~1; } { - sp offloadThread = mOffloadThread.promote(); - if (offloadThread != 0) { + sp playbackThread = mPlaybackThread.promote(); + if (playbackThread != 0) { if (writeAckSequence & 1) { - offloadThread->resetWriteBlocked(writeAckSequence >> 1); + playbackThread->resetWriteBlocked(writeAckSequence >> 1); } if (drainSequence & 1) { - offloadThread->resetDraining(drainSequence >> 1); + playbackThread->resetDraining(drainSequence >> 1); } } } @@ -3847,7 +3848,6 @@ AudioFlinger::OffloadThread::OffloadThread(const sp& audioFlinger, mHwPaused(false), mPausedBytesRemaining(0) { - mCallbackThread = new AudioFlinger::AsyncCallbackThread(this); } AudioFlinger::OffloadThread::~OffloadThread() diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h index 443b8d7..241424f 100644 --- a/services/audioflinger/Threads.h +++ b/services/audioflinger/Threads.h @@ -759,7 +759,7 @@ private: class AsyncCallbackThread : public Thread { public: - AsyncCallbackThread(const sp& offloadThread); + AsyncCallbackThread(const wp& playbackThread); virtual ~AsyncCallbackThread(); @@ -776,17 +776,17 @@ public: void resetDraining(); private: - wp mOffloadThread; + const wp mPlaybackThread; // mWriteAckSequence corresponds to the last write sequence passed by the offload thread via // setWriteBlocked(). The sequence is shifted one bit to the left and the lsb is used // to indicate that the callback has been received via resetWriteBlocked() - uint32_t mWriteAckSequence; + uint32_t mWriteAckSequence; // mDrainSequence corresponds to the last drain sequence passed by the offload thread via // setDraining(). The sequence is shifted one bit to the left and the lsb is used // to indicate that the callback has been received via resetDraining() - uint32_t mDrainSequence; - Condition mWaitWorkCV; - Mutex mLock; + uint32_t mDrainSequence; + Condition mWaitWorkCV; + Mutex mLock; }; class DuplicatingThread : public MixerThread { -- cgit v1.1 From bcfcfd4b3ccf255d07ba5c81d8a3c3152972ae6c Mon Sep 17 00:00:00 2001 From: Eino-Ville Talvala Date: Thu, 26 Sep 2013 15:28:55 -0700 Subject: Camera2 API: Hack: Always use async mode with GLConsumer Until a cleaner solution is implemented, force buffer queues to consumers that set USAGE_HW_TEXTURE usage to be asynchronous. Bug: 10949105 Change-Id: I69e6b02b773831396767c282ce8c5936c5d41f03 --- .../libcameraservice/api2/CameraDeviceClient.cpp | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp index 83466cb..76d44bf 100644 --- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp +++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp @@ -298,11 +298,28 @@ status_t CameraDeviceClient::createStream(int width, int height, int format, } } + // HACK b/10949105 + // Query consumer usage bits to set async operation mode for + // GLConsumer using controlledByApp parameter. + bool useAsync = false; + int32_t consumerUsage; + if ((res = bufferProducer->query(NATIVE_WINDOW_CONSUMER_USAGE_BITS, + &consumerUsage)) != OK) { + ALOGE("%s: Camera %d: Failed to query consumer usage", __FUNCTION__, + mCameraId); + return res; + } + if (consumerUsage & GraphicBuffer::USAGE_HW_TEXTURE) { + ALOGW("%s: Camera %d: Forcing asynchronous mode for stream", + __FUNCTION__, mCameraId); + useAsync = true; + } + sp binder; sp anw; if (bufferProducer != 0) { binder = bufferProducer->asBinder(); - anw = new Surface(bufferProducer); + anw = new Surface(bufferProducer, useAsync); } // TODO: remove w,h,f since we are ignoring them -- cgit v1.1 From 465da60d885c8fa4e7cea4626478574ce17a54a9 Mon Sep 17 00:00:00 2001 From: Johann Date: Thu, 26 Sep 2013 17:37:51 -0700 Subject: Indicate sync frames returned by encoder Set the appropriate OMX flag when the encoder generates a keyframe. This is necessary for any muxer which needs to indicate which frames are seekable. Bug: 8422347 Change-Id: I744a0b3023db24d3de2210bce82f41e50d259505 --- media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp | 2 ++ 1 file changed, 2 insertions(+) diff --git a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp index 5f2b5c8..16f0f30 100644 --- a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp +++ b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp @@ -803,6 +803,8 @@ void SoftVPXEncoder::onQueueFilled(OMX_U32 portIndex) { if (encoded_packet->kind == VPX_CODEC_CX_FRAME_PKT) { outputBufferHeader->nTimeStamp = encoded_packet->data.frame.pts; outputBufferHeader->nFlags = 0; + if (encoded_packet->data.frame.flags & VPX_FRAME_IS_KEY) + outputBufferHeader->nFlags |= OMX_BUFFERFLAG_SYNCFRAME; outputBufferHeader->nOffset = 0; outputBufferHeader->nFilledLen = encoded_packet->data.frame.sz; memcpy(outputBufferHeader->pBuffer, -- cgit v1.1 From 91b0ca1a5bea44dd9b5196910186dd2927821994 Mon Sep 17 00:00:00 2001 From: Eric Laurent Date: Thu, 26 Sep 2013 17:23:10 -0700 Subject: fix playback position after switching to offload After switching from offloaded track to PCM track while paused (e.g. when connecting A2DP), playback restarts from the beginning of the song when resuming. Save current position before recreating an AudioPlayer in AwesomePlayer::play_l() and seek to the saved position before starting playback. Also fix a problem where the position is not reported properly by AudioPlayer if a seek is pending and queried just after start and before the first buffer is read from the MediaSource. Bug: 8174034. Change-Id: I254e65418ff903a9bf2e2111b89a00e2e54876c5 --- media/libstagefright/AudioPlayer.cpp | 34 +++++++++++++++++++++------------- media/libstagefright/AwesomePlayer.cpp | 7 +++++++ 2 files changed, 28 insertions(+), 13 deletions(-) diff --git a/media/libstagefright/AudioPlayer.cpp b/media/libstagefright/AudioPlayer.cpp index e38e261..a8a8786 100644 --- a/media/libstagefright/AudioPlayer.cpp +++ b/media/libstagefright/AudioPlayer.cpp @@ -363,6 +363,7 @@ void AudioPlayer::reset() { mPositionTimeMediaUs = -1; mPositionTimeRealUs = -1; mSeeking = false; + mSeekTimeUs = 0; mReachedEOS = false; mFinalStatus = OK; mStarted = false; @@ -602,15 +603,24 @@ size_t AudioPlayer::fillBuffer(void *data, size_t size) { // need to adjust the mStartPosUs for offload decoding since parser // might not be able to get the exact seek time requested. - if (refreshSeekTime && useOffload()) { - if (postSeekComplete) { - ALOGV("fillBuffer is going to post SEEK_COMPLETE"); - mObserver->postAudioSeekComplete(); - postSeekComplete = false; - } + if (refreshSeekTime) { + if (useOffload()) { + if (postSeekComplete) { + ALOGV("fillBuffer is going to post SEEK_COMPLETE"); + mObserver->postAudioSeekComplete(); + postSeekComplete = false; + } - mStartPosUs = mPositionTimeMediaUs; - ALOGV("adjust seek time to: %.2f", mStartPosUs/ 1E6); + mStartPosUs = mPositionTimeMediaUs; + ALOGV("adjust seek time to: %.2f", mStartPosUs/ 1E6); + } + // clear seek time with mLock locked and once we have valid mPositionTimeMediaUs + // and mPositionTimeRealUs + // before clearing mSeekTimeUs check if a new seek request has been received while + // we were reading from the source with mLock released. + if (!mSeeking) { + mSeekTimeUs = 0; + } } if (!useOffload()) { @@ -741,12 +751,10 @@ int64_t AudioPlayer::getMediaTimeUs() { return mPositionTimeRealUs; } - if (mPositionTimeMediaUs < 0 || mPositionTimeRealUs < 0) { - if (mSeeking) { - return mSeekTimeUs; - } - return 0; + if (mPositionTimeMediaUs < 0 || mPositionTimeRealUs < 0) { + // mSeekTimeUs is either seek time while seeking or 0 if playback did not start. + return mSeekTimeUs; } int64_t realTimeOffset = getRealTimeUsLocked() - mPositionTimeRealUs; diff --git a/media/libstagefright/AwesomePlayer.cpp b/media/libstagefright/AwesomePlayer.cpp index 5fbee7e..9b0c69a 100644 --- a/media/libstagefright/AwesomePlayer.cpp +++ b/media/libstagefright/AwesomePlayer.cpp @@ -927,6 +927,9 @@ status_t AwesomePlayer::play_l() { if ((err != OK) && mOffloadAudio) { ALOGI("play_l() cannot create offload output, fallback to sw decode"); + int64_t curTimeUs; + getPosition(&curTimeUs); + delete mAudioPlayer; mAudioPlayer = NULL; // if the player was started it will take care of stopping the source when destroyed @@ -942,6 +945,10 @@ status_t AwesomePlayer::play_l() { if (err != OK) { mAudioSource.clear(); } else { + mSeekNotificationSent = true; + if (mExtractorFlags & MediaExtractor::CAN_SEEK) { + seekTo_l(curTimeUs); + } createAudioPlayer_l(); err = startAudioPlayer_l(false); } -- cgit v1.1 From 42c5ae81036d4002da3fe1e3b1016131ba737e74 Mon Sep 17 00:00:00 2001 From: Marco Nelissen Date: Fri, 27 Sep 2013 12:13:52 -0700 Subject: Prevent onPrepared callback from being called twice b/10891995 Change-Id: If6845c832d114629282f0b03f904a37c3325208e --- media/libstagefright/AwesomePlayer.cpp | 36 +++++++++++++++------------- media/libstagefright/include/AwesomePlayer.h | 2 ++ 2 files changed, 21 insertions(+), 17 deletions(-) diff --git a/media/libstagefright/AwesomePlayer.cpp b/media/libstagefright/AwesomePlayer.cpp index 5fbee7e..bcf9cd3 100644 --- a/media/libstagefright/AwesomePlayer.cpp +++ b/media/libstagefright/AwesomePlayer.cpp @@ -259,6 +259,7 @@ void AwesomePlayer::cancelPlayerEvents(bool keepNotifications) { mQueue.cancelEvent(mBufferingEvent->eventID()); mBufferingEventPending = false; + mAudioTearDown = false; } } @@ -2301,6 +2302,7 @@ void AwesomePlayer::abortPrepare(status_t err) { modifyFlags((PREPARING|PREPARE_CANCELLED|PREPARING_CONNECTED), CLEAR); mAsyncPrepareEvent = NULL; mPreparedCondition.broadcast(); + mAudioTearDown = false; } // static @@ -2374,6 +2376,20 @@ void AwesomePlayer::finishAsyncPrepare_l() { modifyFlags(PREPARED, SET); mAsyncPrepareEvent = NULL; mPreparedCondition.broadcast(); + + if (mAudioTearDown) { + if (mPrepareResult == OK) { + if (mExtractorFlags & MediaExtractor::CAN_SEEK) { + seekTo_l(mAudioTearDownPosition); + } + + if (mAudioTearDownWasPlaying) { + modifyFlags(CACHE_UNDERRUN, CLEAR); + play_l(); + } + } + mAudioTearDown = false; + } } uint32_t AwesomePlayer::flags() const { @@ -2791,7 +2807,7 @@ void AwesomePlayer::onAudioTearDownEvent() { ALOGV("onAudioTearDownEvent"); // stream info is cleared by reset_l() so copy what we need - const bool wasPlaying = (mFlags & PLAYING); + mAudioTearDownWasPlaying = (mFlags & PLAYING); KeyedVector uriHeaders(mUriHeaders); sp fileSource(mFileSource); @@ -2800,8 +2816,7 @@ void AwesomePlayer::onAudioTearDownEvent() { mStatsLock.unlock(); // get current position so we can start recreated stream from here - int64_t position = 0; - getPosition(&position); + getPosition(&mAudioTearDownPosition); // Reset and recreate reset_l(); @@ -2825,21 +2840,8 @@ void AwesomePlayer::onAudioTearDownEvent() { mAudioTearDown = true; mIsAsyncPrepare = true; - // Call parepare for the host decoding + // Call prepare for the host decoding beginPrepareAsync_l(); - - if (mPrepareResult == OK) { - if (mExtractorFlags & MediaExtractor::CAN_SEEK) { - seekTo_l(position); - } - - if (wasPlaying) { - modifyFlags(CACHE_UNDERRUN, CLEAR); - play_l(); - } - } - - mAudioTearDown = false; } } // namespace android diff --git a/media/libstagefright/include/AwesomePlayer.h b/media/libstagefright/include/AwesomePlayer.h index b001cf4..271df8e 100644 --- a/media/libstagefright/include/AwesomePlayer.h +++ b/media/libstagefright/include/AwesomePlayer.h @@ -342,6 +342,8 @@ private: bool mOffloadAudio; bool mAudioTearDown; + bool mAudioTearDownWasPlaying; + int64_t mAudioTearDownPosition; status_t setVideoScalingMode(int32_t mode); status_t setVideoScalingMode_l(int32_t mode); -- cgit v1.1 From 5b8ce24b849f6cd5629b4ba508f7c78d6227d250 Mon Sep 17 00:00:00 2001 From: Eric Laurent Date: Fri, 27 Sep 2013 14:50:48 -0700 Subject: Revert "Effect Offload Proxy for effects offload" This reverts commit 60c60df7db278d2fa5c90b0fa14f99a61d50272b. Change-Id: Iafba9e02a9f3bfde6248d802e96c4e649686a87d --- media/libeffects/data/audio_effects.conf | 2 +- media/libeffects/proxy/Android.mk | 34 ---- media/libeffects/proxy/EffectProxy.cpp | 338 ------------------------------- media/libeffects/proxy/EffectProxy.h | 80 -------- 4 files changed, 1 insertion(+), 453 deletions(-) delete mode 100644 media/libeffects/proxy/Android.mk delete mode 100644 media/libeffects/proxy/EffectProxy.cpp delete mode 100644 media/libeffects/proxy/EffectProxy.h diff --git a/media/libeffects/data/audio_effects.conf b/media/libeffects/data/audio_effects.conf index c3c4b67..f1c5f5b 100644 --- a/media/libeffects/data/audio_effects.conf +++ b/media/libeffects/data/audio_effects.conf @@ -10,7 +10,7 @@ libraries { # the HW and SW effects #proxy { - #path /system/lib/soundfx/libeffectproxy.so + #path /system/lib/soundfx/libProxy.so #} # This is the SW implementation library of the effect diff --git a/media/libeffects/proxy/Android.mk b/media/libeffects/proxy/Android.mk deleted file mode 100644 index 01b3be1..0000000 --- a/media/libeffects/proxy/Android.mk +++ /dev/null @@ -1,34 +0,0 @@ -# Copyright 2013 The Android Open Source Project -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -LOCAL_PATH:= $(call my-dir) -include $(CLEAR_VARS) -LOCAL_MODULE:= libeffectproxy -LOCAL_MODULE_PATH := $(TARGET_OUT_SHARED_LIBRARIES)/soundfx -LOCAL_MODULE_TAGS := optional - - -LOCAL_SRC_FILES := \ - EffectProxy.cpp - -LOCAL_CFLAGS+= -fvisibility=hidden - -LOCAL_SHARED_LIBRARIES := liblog libcutils libutils libdl libeffects - -LOCAL_C_INCLUDES := \ - system/media/audio_effects/include \ - bionic/libc/include - -include $(BUILD_SHARED_LIBRARY) - diff --git a/media/libeffects/proxy/EffectProxy.cpp b/media/libeffects/proxy/EffectProxy.cpp deleted file mode 100644 index b3304b7..0000000 --- a/media/libeffects/proxy/EffectProxy.cpp +++ /dev/null @@ -1,338 +0,0 @@ -/* - * Copyright (C) 2013 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#define LOG_TAG "EffectProxy" -//#define LOG_NDEBUG 0 - -#include -#include -#include -#include -#include -#include -#include -#include - -namespace android { -// This is a dummy proxy descriptor just to return to Factory during the initial -// GetDescriptor call. Later in the factory, it is replaced with the -// SW sub effect descriptor -const effect_descriptor_t gProxyDescriptor = { - EFFECT_UUID_INITIALIZER, // type - EFFECT_UUID_INITIALIZER, // uuid - EFFECT_CONTROL_API_VERSION, //version of effect control API - (EFFECT_FLAG_TYPE_INSERT | EFFECT_FLAG_INSERT_LAST | - EFFECT_FLAG_VOLUME_CTRL), // effect capability flags - 0, // CPU load - 1, // Data memory - "Proxy", //effect name - "AOSP", //implementor name -}; - - -static const effect_descriptor_t *const gDescriptors[] = -{ - &gProxyDescriptor, -}; - - -int EffectProxyCreate(const effect_uuid_t *uuid, - int32_t sessionId, - int32_t ioId, - effect_handle_t *pHandle) { - - effect_descriptor_t* desc; - EffectContext* pContext; - if (pHandle == NULL || uuid == NULL) { - ALOGE("EffectProxyCreate() called with NULL pointer"); - return -EINVAL; - } - ALOGV("EffectProxyCreate start.."); - pContext = new EffectContext; - pContext->sessionId = sessionId; - pContext->ioId = ioId; - pContext->uuid = *uuid; - pContext->common_itfe = &gEffectInterface; - - // The sub effects will be created in effect_command when the first command - // for the effect is received - pContext->eHandle[SUB_FX_HOST] = pContext->eHandle[SUB_FX_OFFLOAD] = NULL; - - // Get the HW and SW sub effect descriptors from the effects factory - desc = new effect_descriptor_t[SUB_FX_COUNT]; - pContext->desc = new effect_descriptor_t[SUB_FX_COUNT]; - int retValue = EffectGetSubEffects(uuid, desc, - sizeof(effect_descriptor_t) * SUB_FX_COUNT); - // EffectGetSubEffects returns the number of sub-effects copied. - if (retValue != SUB_FX_COUNT) { - ALOGE("EffectCreate() could not get the sub effects"); - delete desc; - delete pContext->desc; - return -EINVAL; - } - // Check which is the HW descriptor and copy the descriptors - // to the Context desc array - // Also check if there is only one HW and one SW descriptor. - // HW descriptor alone has the HW_TUNNEL flag. - if ((desc[0].flags & EFFECT_FLAG_HW_ACC_TUNNEL) && - !(desc[1].flags & EFFECT_FLAG_HW_ACC_TUNNEL)) { - pContext->desc[SUB_FX_OFFLOAD] = desc[0]; - pContext->desc[SUB_FX_HOST] = desc[1]; - } - else if ((desc[1].flags & EFFECT_FLAG_HW_ACC_TUNNEL) && - !(desc[0].flags & EFFECT_FLAG_HW_ACC_TUNNEL)) { - pContext->desc[SUB_FX_HOST] = desc[0]; - pContext->desc[SUB_FX_OFFLOAD] = desc[1]; - } - delete desc; -#if (LOG_NDEBUG == 0) - effect_uuid_t uuid_print = pContext->desc[SUB_FX_HOST].uuid; - ALOGV("EffectCreate() UUID of HOST: %08X-%04X-%04X-%04X-%02X%02X%02X%02X" - "%02X%02X\n",uuid_print.timeLow, uuid_print.timeMid, - uuid_print.timeHiAndVersion, uuid_print.clockSeq, uuid_print.node[0], - uuid_print.node[1], uuid_print.node[2], uuid_print.node[3], - uuid_print.node[4], uuid_print.node[5]); - ALOGV("EffectCreate() UUID of OFFLOAD: %08X-%04X-%04X-%04X-%02X%02X%02X%02X" - "%02X%02X\n", uuid_print.timeLow, uuid_print.timeMid, - uuid_print.timeHiAndVersion, uuid_print.clockSeq, uuid_print.node[0], - uuid_print.node[1], uuid_print.node[2], uuid_print.node[3], - uuid_print.node[4], uuid_print.node[5]); -#endif - - pContext->replySize = PROXY_REPLY_SIZE_DEFAULT; - pContext->replyData = (char *)malloc(PROXY_REPLY_SIZE_DEFAULT); - - *pHandle = (effect_handle_t)pContext; - ALOGV("EffectCreate end"); - return 0; -} //end EffectProxyCreate - -int EffectProxyRelease(effect_handle_t handle) { - EffectContext * pContext = (EffectContext *)handle; - if (pContext == NULL) { - ALOGV("ERROR : EffectRelease called with NULL pointer"); - return -EINVAL; - } - ALOGV("EffectRelease"); - delete pContext->desc; - free(pContext->replyData); - - if (pContext->eHandle[SUB_FX_HOST]) - EffectRelease(pContext->eHandle[SUB_FX_HOST]); - if (pContext->eHandle[SUB_FX_OFFLOAD]) - EffectRelease(pContext->eHandle[SUB_FX_OFFLOAD]); - delete pContext; - pContext = NULL; - return 0; -} /*end EffectProxyRelease */ - -int EffectProxyGetDescriptor(const effect_uuid_t *uuid, - effect_descriptor_t *pDescriptor) { - const effect_descriptor_t *desc = NULL; - - if (pDescriptor == NULL || uuid == NULL) { - ALOGV("EffectGetDescriptor() called with NULL pointer"); - return -EINVAL; - } - desc = &gProxyDescriptor; - *pDescriptor = *desc; - return 0; -} /* end EffectProxyGetDescriptor */ - -/* Effect Control Interface Implementation: Process */ -int Effect_process(effect_handle_t self, - audio_buffer_t *inBuffer, - audio_buffer_t *outBuffer) { - - EffectContext *pContext = (EffectContext *) self; - int ret = 0; - if (pContext != NULL) { - int index = pContext->index; - // if the index refers to HW , do not do anything. Just return. - if (index == SUB_FX_HOST) { - ret = (*pContext->eHandle[index])->process(pContext->eHandle[index], - inBuffer, outBuffer); - } - } - return ret; -} /* end Effect_process */ - -/* Effect Control Interface Implementation: Command */ -int Effect_command(effect_handle_t self, - uint32_t cmdCode, - uint32_t cmdSize, - void *pCmdData, - uint32_t *replySize, - void *pReplyData) { - - EffectContext *pContext = (EffectContext *) self; - int status = 0; - if (pContext == NULL) { - ALOGV("Effect_command() Proxy context is NULL"); - return -EINVAL; - } - if (pContext->eHandle[SUB_FX_HOST] == NULL) { - ALOGV("Effect_command() Calling HOST EffectCreate"); - status = EffectCreate(&pContext->desc[SUB_FX_HOST].uuid, - pContext->sessionId, pContext->ioId, - &(pContext->eHandle[SUB_FX_HOST])); - if (status != NO_ERROR || (pContext->eHandle[SUB_FX_HOST] == NULL)) { - ALOGV("Effect_command() Error creating SW sub effect"); - return status; - } - } - if (pContext->eHandle[SUB_FX_OFFLOAD] == NULL) { - ALOGV("Effect_command() Calling OFFLOAD EffectCreate"); - status = EffectCreate(&pContext->desc[SUB_FX_OFFLOAD].uuid, - pContext->sessionId, pContext->ioId, - &(pContext->eHandle[SUB_FX_OFFLOAD])); - if (status != NO_ERROR || (pContext->eHandle[SUB_FX_OFFLOAD] == NULL)) { - ALOGV("Effect_command() Error creating HW effect"); - // Do not return error here as SW effect is created - // Return error if the CMD_OFFLOAD sends the index as OFFLOAD - } - pContext->index = SUB_FX_HOST; - } - // EFFECT_CMD_OFFLOAD used to (1) send whether the thread is offload or not - // (2) Send the ioHandle of the effectThread when the effect - // is moved from one type of thread to another. - // pCmdData points to a memory holding effect_offload_param_t structure - if (cmdCode == EFFECT_CMD_OFFLOAD) { - ALOGV("Effect_command() cmdCode = EFFECT_CMD_OFFLOAD"); - if (cmdSize == 0 || pCmdData == NULL) { - ALOGV("effectsOffload: Effect_command: CMD_OFFLOAD has no data"); - *(int*)pReplyData = FAILED_TRANSACTION; - return FAILED_TRANSACTION; - } - effect_offload_param_t* offloadParam = (effect_offload_param_t*)pCmdData; - // Assign the effect context index based on isOffload field of the structure - pContext->index = offloadParam->isOffload ? SUB_FX_OFFLOAD : SUB_FX_HOST; - // if the index is HW and the HW effect is unavailable, return error - // and reset the index to SW - if (pContext->eHandle[pContext->index] == NULL) { - ALOGV("Effect_command()CMD_OFFLOAD sub effect unavailable"); - *(int*)pReplyData = FAILED_TRANSACTION; - return FAILED_TRANSACTION; - } - pContext->ioId = offloadParam->ioHandle; - ALOGV("Effect_command()CMD_OFFLOAD index:%d io %d", pContext->index, pContext->ioId); - // Update the DSP wrapper with the new ioHandle. - // Pass the OFFLOAD command to the wrapper. - // The DSP wrapper needs to handle this CMD - if (pContext->eHandle[SUB_FX_OFFLOAD]) - status = (*pContext->eHandle[SUB_FX_OFFLOAD])->command( - pContext->eHandle[SUB_FX_OFFLOAD], cmdCode, cmdSize, - pCmdData, replySize, pReplyData); - return status; - } - - int index = pContext->index; - if (index != SUB_FX_HOST && index != SUB_FX_OFFLOAD) { - ALOGV("Effect_command: effect index is neither offload nor host"); - return -EINVAL; - } - - // Getter commands are only sent to the active sub effect. - int *subStatus[SUB_FX_COUNT]; - uint32_t *subReplySize[SUB_FX_COUNT]; - void *subReplyData[SUB_FX_COUNT]; - uint32_t tmpSize; - int tmpStatus; - - // grow temp reply buffer if needed - if (replySize != NULL) { - tmpSize = pContext->replySize; - while (tmpSize < *replySize && tmpSize < PROXY_REPLY_SIZE_MAX) { - tmpSize *= 2; - } - if (tmpSize > pContext->replySize) { - ALOGV("Effect_command grow reply buf to %d", tmpSize); - pContext->replyData = (char *)realloc(pContext->replyData, tmpSize); - pContext->replySize = tmpSize; - } - if (tmpSize > *replySize) { - tmpSize = *replySize; - } - } else { - tmpSize = 0; - } - // tmpSize is now the actual reply size for the non active sub effect - - // Send command to sub effects. The command is sent to all sub effects so that their internal - // state is kept in sync. - // Only the reply from the active sub effect is returned to the caller. The reply from the - // other sub effect is lost in pContext->replyData - for (int i = 0; i < SUB_FX_COUNT; i++) { - if (pContext->eHandle[i] == NULL) { - continue; - } - if (i == index) { - subStatus[i] = &status; - subReplySize[i] = replySize; - subReplyData[i] = pReplyData; - } else { - subStatus[i] = &tmpStatus; - subReplySize[i] = replySize == NULL ? NULL : &tmpSize; - subReplyData[i] = pReplyData == NULL ? NULL : pContext->replyData; - } - *subStatus[i] = (*pContext->eHandle[i])->command( - pContext->eHandle[i], cmdCode, cmdSize, - pCmdData, subReplySize[i], subReplyData[i]); - } - - return status; -} /* end Effect_command */ - - -/* Effect Control Interface Implementation: get_descriptor */ -int Effect_getDescriptor(effect_handle_t self, - effect_descriptor_t *pDescriptor) { - - EffectContext * pContext = (EffectContext *) self; - const effect_descriptor_t *desc; - - ALOGV("Effect_getDescriptor"); - if (pContext == NULL || pDescriptor == NULL) { - ALOGV("Effect_getDescriptor() invalid param"); - return -EINVAL; - } - if (pContext->desc == NULL) { - ALOGV("Effect_getDescriptor() could not get descriptor"); - return -EINVAL; - } - desc = &pContext->desc[SUB_FX_HOST]; - *pDescriptor = *desc; - pDescriptor->uuid = pContext->uuid; // Replace the uuid with the Proxy UUID - // Also set/clear the EFFECT_FLAG_OFFLOAD_SUPPORTED flag based on the sub effects availability - if (pContext->eHandle[SUB_FX_OFFLOAD] != NULL) - pDescriptor->flags |= EFFECT_FLAG_OFFLOAD_SUPPORTED; - else - pDescriptor->flags &= ~EFFECT_FLAG_OFFLOAD_SUPPORTED; - return 0; -} /* end Effect_getDescriptor */ - -} // namespace android - -__attribute__ ((visibility ("default"))) -audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM = { - tag : AUDIO_EFFECT_LIBRARY_TAG, - version : EFFECT_LIBRARY_API_VERSION, - name : "Effect Proxy", - implementor : "AOSP", - create_effect : android::EffectProxyCreate, - release_effect : android::EffectProxyRelease, - get_descriptor : android::EffectProxyGetDescriptor, -}; diff --git a/media/libeffects/proxy/EffectProxy.h b/media/libeffects/proxy/EffectProxy.h deleted file mode 100644 index acbe17e..0000000 --- a/media/libeffects/proxy/EffectProxy.h +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Copyright (C) 2013 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#include -#include -namespace android { -enum { - SUB_FX_HOST, // Index of HOST in the descriptor and handle arrays - // of the Proxy context - SUB_FX_OFFLOAD, // Index of OFFLOAD in the descriptor and handle arrays - // of the Proxy context - SUB_FX_COUNT // The number of sub effects for a Proxy(1 HW, 1 SW) -}; -#if __cplusplus -extern "C" { -#endif - -int EffectProxyCreate(const effect_uuid_t *uuid, - int32_t sessionId, - int32_t ioId, - effect_handle_t *pHandle); -int EffectProxyRelease(effect_handle_t handle); -int EffectProxyGetDescriptor(const effect_uuid_t *uuid, - effect_descriptor_t *pDescriptor); -/* Effect Control Interface Implementation: Process */ -int Effect_process(effect_handle_t self, - audio_buffer_t *inBuffer, - audio_buffer_t *outBuffer); - -/* Effect Control Interface Implementation: Command */ -int Effect_command(effect_handle_t self, - uint32_t cmdCode, - uint32_t cmdSize, - void *pCmdData, - uint32_t *replySize, - void *pReplyData); -int Effect_getDescriptor(effect_handle_t self, - effect_descriptor_t *pDescriptor); - -const struct effect_interface_s gEffectInterface = { - Effect_process, - Effect_command, - Effect_getDescriptor, - NULL, -}; - -#define PROXY_REPLY_SIZE_MAX (64 * 1024) // must be power of two -#define PROXY_REPLY_SIZE_DEFAULT 32 // must be power of two - -struct EffectContext { - const struct effect_interface_s *common_itfe; // Holds the itfe of the Proxy - effect_descriptor_t* desc; // Points to the sub effect descriptors - effect_handle_t eHandle[SUB_FX_COUNT]; // The effect handles of the sub effects - int index; // The index that is currently active - HOST or OFFLOAD - int32_t sessionId; // The sessiond in which the effect is created. - // Stored in context to pass on to sub effect creation - int32_t ioId; // The ioId in which the effect is created. - // Stored in context to pass on to sub effect creation - effect_uuid_t uuid; // UUID of the Proxy - char* replyData; // temporary buffer for non active sub effect command reply - uint32_t replySize; // current size of temporary reply buffer -}; - -#if __cplusplus -} // extern "C" -#endif -} //namespace android -- cgit v1.1 From 83f400056ac913250f0926326ff78697c68d18a1 Mon Sep 17 00:00:00 2001 From: Eric Laurent Date: Fri, 27 Sep 2013 14:53:24 -0700 Subject: Revert "Effects Factory changes for effects offload" This reverts commit 284c17e73bbff51cb5b1adcee98386d47733757a. Change-Id: I31db21e1ad4758b21356bfe4c4c64f15b2da8737 --- include/media/EffectsFactoryApi.h | 24 ---- media/libeffects/data/audio_effects.conf | 39 ------ media/libeffects/factory/EffectsFactory.c | 218 +----------------------------- media/libeffects/factory/EffectsFactory.h | 19 --- 4 files changed, 2 insertions(+), 298 deletions(-) diff --git a/include/media/EffectsFactoryApi.h b/include/media/EffectsFactoryApi.h index b1143b9..b1ed7b0 100644 --- a/include/media/EffectsFactoryApi.h +++ b/include/media/EffectsFactoryApi.h @@ -171,30 +171,6 @@ int EffectGetDescriptor(const effect_uuid_t *pEffectUuid, effect_descriptor_t *p //////////////////////////////////////////////////////////////////////////////// int EffectIsNullUuid(const effect_uuid_t *pEffectUuid); -//////////////////////////////////////////////////////////////////////////////// -// -// Function: EffectGetSubEffects -// -// Description: Returns the descriptors of the sub effects of the effect -// whose uuid is pointed to by first argument. -// -// Input: -// pEffectUuid: pointer to the effect uuid. -// size: size of the buffer pointed by pDescriptor. -// -// Input/Output: -// pDescriptor: address where to return the sub effect descriptors. -// -// Output: -// returned value: 0 successful operation. -// -ENODEV factory failed to initialize -// -EINVAL invalid pEffectUuid or pDescriptor -// -ENOENT no effect with this uuid found -// *pDescriptor: updated with the sub effect descriptors. -// -//////////////////////////////////////////////////////////////////////////////// -int EffectGetSubEffects(const effect_uuid_t *pEffectUuid, effect_descriptor_t *pDescriptors, size_t size); - #if __cplusplus } // extern "C" #endif diff --git a/media/libeffects/data/audio_effects.conf b/media/libeffects/data/audio_effects.conf index f1c5f5b..0c3c687 100644 --- a/media/libeffects/data/audio_effects.conf +++ b/media/libeffects/data/audio_effects.conf @@ -6,23 +6,6 @@ # } # } libraries { -# This is a proxy library that will be an abstraction for -# the HW and SW effects - - #proxy { - #path /system/lib/soundfx/libProxy.so - #} - -# This is the SW implementation library of the effect - #libSW { - #path /system/lib/soundfx/libswwrapper.so - #} - -# This is the HW implementation library for the effect - #libHW { - #path /system/lib/soundfx/libhwwrapper.so - #} - bundle { path /system/lib/soundfx/libbundlewrapper.so } @@ -63,28 +46,6 @@ libraries { # } effects { - -# additions for the proxy implementation -# Proxy implementation - #effectname { - #library proxy - #uuid xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx - - # SW implemetation of the effect. Added as a node under the proxy to - # indicate this as a sub effect. - #libsw { - #library libSW - #uuid yyyyyyyy-yyyy-yyyy-yyyy-yyyyyyyyyyyy - #} End of SW effect - - # HW implementation of the effect. Added as a node under the proxy to - # indicate this as a sub effect. - #libhw { - #library libHW - #uuid zzzzzzzz-zzzz-zzzz-zzzz-zzzzzzzzzzzz - #}End of HW effect - #} End of effect proxy - bassboost { library bundle uuid 8631f300-72e2-11df-b57e-0002a5d5c51b diff --git a/media/libeffects/factory/EffectsFactory.c b/media/libeffects/factory/EffectsFactory.c index f8d6041..f158929 100644 --- a/media/libeffects/factory/EffectsFactory.c +++ b/media/libeffects/factory/EffectsFactory.c @@ -28,9 +28,6 @@ static list_elem_t *gEffectList; // list of effect_entry_t: all currently created effects static list_elem_t *gLibraryList; // list of lib_entry_t: all currently loaded libraries -// list of effect_descriptor and list of sub effects : all currently loaded -// It does not contain effects without sub effects. -static list_sub_elem_t *gSubEffectList; static pthread_mutex_t gLibLock = PTHREAD_MUTEX_INITIALIZER; // controls access to gLibraryList static uint32_t gNumEffects; // total number number of effects static list_elem_t *gCurLib; // current library in enumeration process @@ -53,8 +50,6 @@ static int loadLibraries(cnode *root); static int loadLibrary(cnode *root, const char *name); static int loadEffects(cnode *root); static int loadEffect(cnode *node); -// To get and add the effect pointed by the passed node to the gSubEffectList -static int addSubEffect(cnode *root); static lib_entry_t *getLibrary(const char *path); static void resetEffectEnumeration(); static uint32_t updateNumEffects(); @@ -62,10 +57,6 @@ static int findEffect(const effect_uuid_t *type, const effect_uuid_t *uuid, lib_entry_t **lib, effect_descriptor_t **desc); -// To search a subeffect in the gSubEffectList -int findSubEffect(const effect_uuid_t *uuid, - lib_entry_t **lib, - effect_descriptor_t **desc); static void dumpEffectDescriptor(effect_descriptor_t *desc, char *str, size_t len); static int stringToUuid(const char *str, effect_uuid_t *uuid); static int uuidToString(const effect_uuid_t *uuid, char *str, size_t maxLen); @@ -296,12 +287,7 @@ int EffectCreate(const effect_uuid_t *uuid, int32_t sessionId, int32_t ioId, eff ret = findEffect(NULL, uuid, &l, &d); if (ret < 0){ - // Sub effects are not associated with the library->effects, - // so, findEffect will fail. Search for the effect in gSubEffectList. - ret = findSubEffect(uuid, &l, &d); - if (ret < 0 ) { - goto exit; - } + goto exit; } // create effect in library @@ -368,27 +354,21 @@ int EffectRelease(effect_handle_t handle) } if (e1 == NULL) { ret = -ENOENT; - pthread_mutex_unlock(&gLibLock); goto exit; } // release effect in library if (fx->lib == NULL) { ALOGW("EffectRelease() fx %p library already unloaded", handle); - pthread_mutex_unlock(&gLibLock); } else { pthread_mutex_lock(&fx->lib->lock); - // Releasing the gLibLock here as the list access is over as the - // effect is removed from the list. - // If the gLibLock is not released, we will have a deadlock situation - // since we call the sub effect release inside the EffectRelease of Proxy - pthread_mutex_unlock(&gLibLock); fx->lib->desc->release_effect(fx->subItfe); pthread_mutex_unlock(&fx->lib->lock); } free(fx); exit: + pthread_mutex_unlock(&gLibLock); return ret; } @@ -400,49 +380,6 @@ int EffectIsNullUuid(const effect_uuid_t *uuid) return 1; } -// Function to get the sub effect descriptors of the effect whose uuid -// is pointed by the first argument. It searches the gSubEffectList for the -// matching uuid and then copies the corresponding sub effect descriptors -// to the inout param -int EffectGetSubEffects(const effect_uuid_t *uuid, - effect_descriptor_t *pDescriptors, size_t size) -{ - ALOGV("EffectGetSubEffects() UUID: %08X-%04X-%04X-%04X-%02X%02X%02X%02X%02X" - "%02X\n",uuid->timeLow, uuid->timeMid, uuid->timeHiAndVersion, - uuid->clockSeq, uuid->node[0], uuid->node[1],uuid->node[2], - uuid->node[3],uuid->node[4],uuid->node[5]); - - // Check if the size of the desc buffer is large enough for 2 subeffects - if ((uuid == NULL) || (pDescriptors == NULL) || - (size < 2*sizeof(effect_descriptor_t))) { - ALOGW("NULL pointer or insufficient memory. Cannot query subeffects"); - return -EINVAL; - } - int ret = init(); - if (ret < 0) - return ret; - list_sub_elem_t *e = gSubEffectList; - sub_effect_entry_t *subeffect; - effect_descriptor_t *d; - int count = 0; - while (e != NULL) { - d = (effect_descriptor_t*)e->object; - if (memcmp(uuid, &d->uuid, sizeof(effect_uuid_t)) == 0) { - ALOGV("EffectGetSubEffects: effect found in the list"); - list_elem_t *subefx = e->sub_elem; - while (subefx != NULL) { - subeffect = (sub_effect_entry_t*)subefx->object; - d = (effect_descriptor_t*)(subeffect->object); - pDescriptors[count++] = *d; - subefx = subefx->next; - } - ALOGV("EffectGetSubEffects end - copied the sub effect descriptors"); - return count; - } - e = e->next; - } - return -ENOENT; -} ///////////////////////////////////////////////// // Local functions ///////////////////////////////////////////////// @@ -566,65 +503,6 @@ error: return -EINVAL; } -// This will find the library and UUID tags of the sub effect pointed by the -// node, gets the effect descriptor and lib_entry_t and adds the subeffect - -// sub_entry_t to the gSubEffectList -int addSubEffect(cnode *root) -{ - ALOGV("addSubEffect"); - cnode *node; - effect_uuid_t uuid; - effect_descriptor_t *d; - lib_entry_t *l; - list_elem_t *e; - node = config_find(root, LIBRARY_TAG); - if (node == NULL) { - return -EINVAL; - } - l = getLibrary(node->value); - if (l == NULL) { - ALOGW("addSubEffect() could not get library %s", node->value); - return -EINVAL; - } - node = config_find(root, UUID_TAG); - if (node == NULL) { - return -EINVAL; - } - if (stringToUuid(node->value, &uuid) != 0) { - ALOGW("addSubEffect() invalid uuid %s", node->value); - return -EINVAL; - } - d = malloc(sizeof(effect_descriptor_t)); - if (l->desc->get_descriptor(&uuid, d) != 0) { - char s[40]; - uuidToString(&uuid, s, 40); - ALOGW("Error querying effect %s on lib %s", s, l->name); - free(d); - return -EINVAL; - } -#if (LOG_NDEBUG==0) - char s[256]; - dumpEffectDescriptor(d, s, 256); - ALOGV("addSubEffect() read descriptor %p:%s",d, s); -#endif - if (EFFECT_API_VERSION_MAJOR(d->apiVersion) != - EFFECT_API_VERSION_MAJOR(EFFECT_CONTROL_API_VERSION)) { - ALOGW("Bad API version %08x on lib %s", d->apiVersion, l->name); - free(d); - return -EINVAL; - } - sub_effect_entry_t *sub_effect = malloc(sizeof(sub_effect_entry_t)); - sub_effect->object = d; - // lib_entry_t is stored since the sub effects are not linked to the library - sub_effect->lib = l; - e = malloc(sizeof(list_elem_t)); - e->object = sub_effect; - e->next = gSubEffectList->sub_elem; - gSubEffectList->sub_elem = e; - ALOGV("addSubEffect end"); - return 0; -} - int loadEffects(cnode *root) { cnode *node; @@ -693,101 +571,9 @@ int loadEffect(cnode *root) e->next = l->effects; l->effects = e; - // After the UUID node in the config_tree, if node->next is valid, - // that would be sub effect node. - // Find the sub effects and add them to the gSubEffectList - node = node->next; - int count = 2; - bool hwSubefx = false, swSubefx = false; - list_sub_elem_t *sube = NULL; - if (node != NULL) { - ALOGV("Adding the effect to gEffectSubList as there are sub effects"); - sube = malloc(sizeof(list_sub_elem_t)); - sube->object = d; - sube->sub_elem = NULL; - sube->next = gSubEffectList; - gSubEffectList = sube; - } - while (node != NULL && count) { - if (addSubEffect(node)) { - ALOGW("loadEffect() could not add subEffect %s", node->value); - // Change the gSubEffectList to point to older list; - gSubEffectList = sube->next; - free(sube->sub_elem);// Free an already added sub effect - sube->sub_elem = NULL; - free(sube); - return -ENOENT; - } - sub_effect_entry_t *subEntry = (sub_effect_entry_t*)gSubEffectList->sub_elem->object; - effect_descriptor_t *subEffectDesc = (effect_descriptor_t*)(subEntry->object); - // Since we return a dummy descriptor for the proxy during - // get_descriptor call,we replace it with the correspoding - // sw effect descriptor, but with Proxy UUID - // check for Sw desc - if (!((subEffectDesc->flags & EFFECT_FLAG_HW_ACC_MASK) == - EFFECT_FLAG_HW_ACC_TUNNEL)) { - swSubefx = true; - *d = *subEffectDesc; - d->uuid = uuid; - ALOGV("loadEffect() Changed the Proxy desc"); - } else - hwSubefx = true; - count--; - node = node->next; - } - // 1 HW and 1 SW sub effect found. Set the offload flag in the Proxy desc - if (hwSubefx && swSubefx) { - d->flags |= EFFECT_FLAG_OFFLOAD_SUPPORTED; - } return 0; } -// Searches the sub effect matching to the specified uuid -// in the gSubEffectList. It gets the lib_entry_t for -// the matched sub_effect . Used in EffectCreate of sub effects -int findSubEffect(const effect_uuid_t *uuid, - lib_entry_t **lib, - effect_descriptor_t **desc) -{ - list_sub_elem_t *e = gSubEffectList; - list_elem_t *subefx; - sub_effect_entry_t *effect; - lib_entry_t *l = NULL; - effect_descriptor_t *d = NULL; - int found = 0; - int ret = 0; - - if (uuid == NULL) - return -EINVAL; - - while (e != NULL && !found) { - subefx = (list_elem_t*)(e->sub_elem); - while (subefx != NULL) { - effect = (sub_effect_entry_t*)subefx->object; - l = (lib_entry_t *)effect->lib; - d = (effect_descriptor_t *)effect->object; - if (memcmp(&d->uuid, uuid, sizeof(effect_uuid_t)) == 0) { - ALOGV("uuid matched"); - found = 1; - break; - } - subefx = subefx->next; - } - e = e->next; - } - if (!found) { - ALOGV("findSubEffect() effect not found"); - ret = -ENOENT; - } else { - ALOGV("findSubEffect() found effect: %s in lib %s", d->name, l->name); - *lib = l; - if (desc != NULL) { - *desc = d; - } - } - return ret; -} - lib_entry_t *getLibrary(const char *name) { list_elem_t *e; diff --git a/media/libeffects/factory/EffectsFactory.h b/media/libeffects/factory/EffectsFactory.h index 147ff18..c1d4319 100644 --- a/media/libeffects/factory/EffectsFactory.h +++ b/media/libeffects/factory/EffectsFactory.h @@ -32,15 +32,6 @@ typedef struct list_elem_s { struct list_elem_s *next; } list_elem_t; -// Structure used for storing effects with their sub effects. -// Used in creating gSubEffectList. Here, -// object holds the effect desc and the list sub_elem holds the sub effects -typedef struct list_sub_elem_s { - void *object; - list_elem_t *sub_elem; - struct list_sub_elem_s *next; -} list_sub_elem_t; - typedef struct lib_entry_s { audio_effect_library_t *desc; char *name; @@ -56,16 +47,6 @@ typedef struct effect_entry_s { lib_entry_t *lib; } effect_entry_t; -// Structure used to store the lib entry -// and the descriptor of the sub effects. -// The library entry is to be stored in case of -// sub effects as the sub effects are not linked -// to the library list - gLibraryList. -typedef struct sub_effect_entry_s { - lib_entry_t *lib; - void *object; -} sub_effect_entry_t; - #if __cplusplus } // extern "C" #endif -- cgit v1.1 From 2eab94f7dfd41a65e13aca379a1aed97447f8884 Mon Sep 17 00:00:00 2001 From: jpadmana Date: Tue, 4 Jun 2013 16:08:29 +0530 Subject: Effects Factory changes for effects offload audio_effects.conf - commented changes to illustrate the addition of Proxy and sub effects to the conf file Added an effectFactoryApi - EffectGetSubEffects for querying the sub effect descriptors from the factory. This api is used by the Proxy to get the sub effects Added functions and data structures in factory code for loading the sub effects gSubEffectList - has the Proxies and their corresponding sub effects - addSubEffect() - reads a sub effect node and adds to the gSubEffectList - findSubEffect() - searches through the gSubEffectList to find a SubEffect Bug: 8174034. Change-Id: Id7f6aa67c41db370d32beaf43a979ba4ac925928 Signed-off-by: jpadmana --- include/media/EffectsFactoryApi.h | 24 ++++ media/libeffects/data/audio_effects.conf | 39 ++++++ media/libeffects/factory/EffectsFactory.c | 218 +++++++++++++++++++++++++++++- media/libeffects/factory/EffectsFactory.h | 19 +++ 4 files changed, 298 insertions(+), 2 deletions(-) diff --git a/include/media/EffectsFactoryApi.h b/include/media/EffectsFactoryApi.h index b1ed7b0..b1143b9 100644 --- a/include/media/EffectsFactoryApi.h +++ b/include/media/EffectsFactoryApi.h @@ -171,6 +171,30 @@ int EffectGetDescriptor(const effect_uuid_t *pEffectUuid, effect_descriptor_t *p //////////////////////////////////////////////////////////////////////////////// int EffectIsNullUuid(const effect_uuid_t *pEffectUuid); +//////////////////////////////////////////////////////////////////////////////// +// +// Function: EffectGetSubEffects +// +// Description: Returns the descriptors of the sub effects of the effect +// whose uuid is pointed to by first argument. +// +// Input: +// pEffectUuid: pointer to the effect uuid. +// size: size of the buffer pointed by pDescriptor. +// +// Input/Output: +// pDescriptor: address where to return the sub effect descriptors. +// +// Output: +// returned value: 0 successful operation. +// -ENODEV factory failed to initialize +// -EINVAL invalid pEffectUuid or pDescriptor +// -ENOENT no effect with this uuid found +// *pDescriptor: updated with the sub effect descriptors. +// +//////////////////////////////////////////////////////////////////////////////// +int EffectGetSubEffects(const effect_uuid_t *pEffectUuid, effect_descriptor_t *pDescriptors, size_t size); + #if __cplusplus } // extern "C" #endif diff --git a/media/libeffects/data/audio_effects.conf b/media/libeffects/data/audio_effects.conf index 0c3c687..f1c5f5b 100644 --- a/media/libeffects/data/audio_effects.conf +++ b/media/libeffects/data/audio_effects.conf @@ -6,6 +6,23 @@ # } # } libraries { +# This is a proxy library that will be an abstraction for +# the HW and SW effects + + #proxy { + #path /system/lib/soundfx/libProxy.so + #} + +# This is the SW implementation library of the effect + #libSW { + #path /system/lib/soundfx/libswwrapper.so + #} + +# This is the HW implementation library for the effect + #libHW { + #path /system/lib/soundfx/libhwwrapper.so + #} + bundle { path /system/lib/soundfx/libbundlewrapper.so } @@ -46,6 +63,28 @@ libraries { # } effects { + +# additions for the proxy implementation +# Proxy implementation + #effectname { + #library proxy + #uuid xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + + # SW implemetation of the effect. Added as a node under the proxy to + # indicate this as a sub effect. + #libsw { + #library libSW + #uuid yyyyyyyy-yyyy-yyyy-yyyy-yyyyyyyyyyyy + #} End of SW effect + + # HW implementation of the effect. Added as a node under the proxy to + # indicate this as a sub effect. + #libhw { + #library libHW + #uuid zzzzzzzz-zzzz-zzzz-zzzz-zzzzzzzzzzzz + #}End of HW effect + #} End of effect proxy + bassboost { library bundle uuid 8631f300-72e2-11df-b57e-0002a5d5c51b diff --git a/media/libeffects/factory/EffectsFactory.c b/media/libeffects/factory/EffectsFactory.c index f158929..f8d6041 100644 --- a/media/libeffects/factory/EffectsFactory.c +++ b/media/libeffects/factory/EffectsFactory.c @@ -28,6 +28,9 @@ static list_elem_t *gEffectList; // list of effect_entry_t: all currently created effects static list_elem_t *gLibraryList; // list of lib_entry_t: all currently loaded libraries +// list of effect_descriptor and list of sub effects : all currently loaded +// It does not contain effects without sub effects. +static list_sub_elem_t *gSubEffectList; static pthread_mutex_t gLibLock = PTHREAD_MUTEX_INITIALIZER; // controls access to gLibraryList static uint32_t gNumEffects; // total number number of effects static list_elem_t *gCurLib; // current library in enumeration process @@ -50,6 +53,8 @@ static int loadLibraries(cnode *root); static int loadLibrary(cnode *root, const char *name); static int loadEffects(cnode *root); static int loadEffect(cnode *node); +// To get and add the effect pointed by the passed node to the gSubEffectList +static int addSubEffect(cnode *root); static lib_entry_t *getLibrary(const char *path); static void resetEffectEnumeration(); static uint32_t updateNumEffects(); @@ -57,6 +62,10 @@ static int findEffect(const effect_uuid_t *type, const effect_uuid_t *uuid, lib_entry_t **lib, effect_descriptor_t **desc); +// To search a subeffect in the gSubEffectList +int findSubEffect(const effect_uuid_t *uuid, + lib_entry_t **lib, + effect_descriptor_t **desc); static void dumpEffectDescriptor(effect_descriptor_t *desc, char *str, size_t len); static int stringToUuid(const char *str, effect_uuid_t *uuid); static int uuidToString(const effect_uuid_t *uuid, char *str, size_t maxLen); @@ -287,7 +296,12 @@ int EffectCreate(const effect_uuid_t *uuid, int32_t sessionId, int32_t ioId, eff ret = findEffect(NULL, uuid, &l, &d); if (ret < 0){ - goto exit; + // Sub effects are not associated with the library->effects, + // so, findEffect will fail. Search for the effect in gSubEffectList. + ret = findSubEffect(uuid, &l, &d); + if (ret < 0 ) { + goto exit; + } } // create effect in library @@ -354,21 +368,27 @@ int EffectRelease(effect_handle_t handle) } if (e1 == NULL) { ret = -ENOENT; + pthread_mutex_unlock(&gLibLock); goto exit; } // release effect in library if (fx->lib == NULL) { ALOGW("EffectRelease() fx %p library already unloaded", handle); + pthread_mutex_unlock(&gLibLock); } else { pthread_mutex_lock(&fx->lib->lock); + // Releasing the gLibLock here as the list access is over as the + // effect is removed from the list. + // If the gLibLock is not released, we will have a deadlock situation + // since we call the sub effect release inside the EffectRelease of Proxy + pthread_mutex_unlock(&gLibLock); fx->lib->desc->release_effect(fx->subItfe); pthread_mutex_unlock(&fx->lib->lock); } free(fx); exit: - pthread_mutex_unlock(&gLibLock); return ret; } @@ -380,6 +400,49 @@ int EffectIsNullUuid(const effect_uuid_t *uuid) return 1; } +// Function to get the sub effect descriptors of the effect whose uuid +// is pointed by the first argument. It searches the gSubEffectList for the +// matching uuid and then copies the corresponding sub effect descriptors +// to the inout param +int EffectGetSubEffects(const effect_uuid_t *uuid, + effect_descriptor_t *pDescriptors, size_t size) +{ + ALOGV("EffectGetSubEffects() UUID: %08X-%04X-%04X-%04X-%02X%02X%02X%02X%02X" + "%02X\n",uuid->timeLow, uuid->timeMid, uuid->timeHiAndVersion, + uuid->clockSeq, uuid->node[0], uuid->node[1],uuid->node[2], + uuid->node[3],uuid->node[4],uuid->node[5]); + + // Check if the size of the desc buffer is large enough for 2 subeffects + if ((uuid == NULL) || (pDescriptors == NULL) || + (size < 2*sizeof(effect_descriptor_t))) { + ALOGW("NULL pointer or insufficient memory. Cannot query subeffects"); + return -EINVAL; + } + int ret = init(); + if (ret < 0) + return ret; + list_sub_elem_t *e = gSubEffectList; + sub_effect_entry_t *subeffect; + effect_descriptor_t *d; + int count = 0; + while (e != NULL) { + d = (effect_descriptor_t*)e->object; + if (memcmp(uuid, &d->uuid, sizeof(effect_uuid_t)) == 0) { + ALOGV("EffectGetSubEffects: effect found in the list"); + list_elem_t *subefx = e->sub_elem; + while (subefx != NULL) { + subeffect = (sub_effect_entry_t*)subefx->object; + d = (effect_descriptor_t*)(subeffect->object); + pDescriptors[count++] = *d; + subefx = subefx->next; + } + ALOGV("EffectGetSubEffects end - copied the sub effect descriptors"); + return count; + } + e = e->next; + } + return -ENOENT; +} ///////////////////////////////////////////////// // Local functions ///////////////////////////////////////////////// @@ -503,6 +566,65 @@ error: return -EINVAL; } +// This will find the library and UUID tags of the sub effect pointed by the +// node, gets the effect descriptor and lib_entry_t and adds the subeffect - +// sub_entry_t to the gSubEffectList +int addSubEffect(cnode *root) +{ + ALOGV("addSubEffect"); + cnode *node; + effect_uuid_t uuid; + effect_descriptor_t *d; + lib_entry_t *l; + list_elem_t *e; + node = config_find(root, LIBRARY_TAG); + if (node == NULL) { + return -EINVAL; + } + l = getLibrary(node->value); + if (l == NULL) { + ALOGW("addSubEffect() could not get library %s", node->value); + return -EINVAL; + } + node = config_find(root, UUID_TAG); + if (node == NULL) { + return -EINVAL; + } + if (stringToUuid(node->value, &uuid) != 0) { + ALOGW("addSubEffect() invalid uuid %s", node->value); + return -EINVAL; + } + d = malloc(sizeof(effect_descriptor_t)); + if (l->desc->get_descriptor(&uuid, d) != 0) { + char s[40]; + uuidToString(&uuid, s, 40); + ALOGW("Error querying effect %s on lib %s", s, l->name); + free(d); + return -EINVAL; + } +#if (LOG_NDEBUG==0) + char s[256]; + dumpEffectDescriptor(d, s, 256); + ALOGV("addSubEffect() read descriptor %p:%s",d, s); +#endif + if (EFFECT_API_VERSION_MAJOR(d->apiVersion) != + EFFECT_API_VERSION_MAJOR(EFFECT_CONTROL_API_VERSION)) { + ALOGW("Bad API version %08x on lib %s", d->apiVersion, l->name); + free(d); + return -EINVAL; + } + sub_effect_entry_t *sub_effect = malloc(sizeof(sub_effect_entry_t)); + sub_effect->object = d; + // lib_entry_t is stored since the sub effects are not linked to the library + sub_effect->lib = l; + e = malloc(sizeof(list_elem_t)); + e->object = sub_effect; + e->next = gSubEffectList->sub_elem; + gSubEffectList->sub_elem = e; + ALOGV("addSubEffect end"); + return 0; +} + int loadEffects(cnode *root) { cnode *node; @@ -571,9 +693,101 @@ int loadEffect(cnode *root) e->next = l->effects; l->effects = e; + // After the UUID node in the config_tree, if node->next is valid, + // that would be sub effect node. + // Find the sub effects and add them to the gSubEffectList + node = node->next; + int count = 2; + bool hwSubefx = false, swSubefx = false; + list_sub_elem_t *sube = NULL; + if (node != NULL) { + ALOGV("Adding the effect to gEffectSubList as there are sub effects"); + sube = malloc(sizeof(list_sub_elem_t)); + sube->object = d; + sube->sub_elem = NULL; + sube->next = gSubEffectList; + gSubEffectList = sube; + } + while (node != NULL && count) { + if (addSubEffect(node)) { + ALOGW("loadEffect() could not add subEffect %s", node->value); + // Change the gSubEffectList to point to older list; + gSubEffectList = sube->next; + free(sube->sub_elem);// Free an already added sub effect + sube->sub_elem = NULL; + free(sube); + return -ENOENT; + } + sub_effect_entry_t *subEntry = (sub_effect_entry_t*)gSubEffectList->sub_elem->object; + effect_descriptor_t *subEffectDesc = (effect_descriptor_t*)(subEntry->object); + // Since we return a dummy descriptor for the proxy during + // get_descriptor call,we replace it with the correspoding + // sw effect descriptor, but with Proxy UUID + // check for Sw desc + if (!((subEffectDesc->flags & EFFECT_FLAG_HW_ACC_MASK) == + EFFECT_FLAG_HW_ACC_TUNNEL)) { + swSubefx = true; + *d = *subEffectDesc; + d->uuid = uuid; + ALOGV("loadEffect() Changed the Proxy desc"); + } else + hwSubefx = true; + count--; + node = node->next; + } + // 1 HW and 1 SW sub effect found. Set the offload flag in the Proxy desc + if (hwSubefx && swSubefx) { + d->flags |= EFFECT_FLAG_OFFLOAD_SUPPORTED; + } return 0; } +// Searches the sub effect matching to the specified uuid +// in the gSubEffectList. It gets the lib_entry_t for +// the matched sub_effect . Used in EffectCreate of sub effects +int findSubEffect(const effect_uuid_t *uuid, + lib_entry_t **lib, + effect_descriptor_t **desc) +{ + list_sub_elem_t *e = gSubEffectList; + list_elem_t *subefx; + sub_effect_entry_t *effect; + lib_entry_t *l = NULL; + effect_descriptor_t *d = NULL; + int found = 0; + int ret = 0; + + if (uuid == NULL) + return -EINVAL; + + while (e != NULL && !found) { + subefx = (list_elem_t*)(e->sub_elem); + while (subefx != NULL) { + effect = (sub_effect_entry_t*)subefx->object; + l = (lib_entry_t *)effect->lib; + d = (effect_descriptor_t *)effect->object; + if (memcmp(&d->uuid, uuid, sizeof(effect_uuid_t)) == 0) { + ALOGV("uuid matched"); + found = 1; + break; + } + subefx = subefx->next; + } + e = e->next; + } + if (!found) { + ALOGV("findSubEffect() effect not found"); + ret = -ENOENT; + } else { + ALOGV("findSubEffect() found effect: %s in lib %s", d->name, l->name); + *lib = l; + if (desc != NULL) { + *desc = d; + } + } + return ret; +} + lib_entry_t *getLibrary(const char *name) { list_elem_t *e; diff --git a/media/libeffects/factory/EffectsFactory.h b/media/libeffects/factory/EffectsFactory.h index c1d4319..147ff18 100644 --- a/media/libeffects/factory/EffectsFactory.h +++ b/media/libeffects/factory/EffectsFactory.h @@ -32,6 +32,15 @@ typedef struct list_elem_s { struct list_elem_s *next; } list_elem_t; +// Structure used for storing effects with their sub effects. +// Used in creating gSubEffectList. Here, +// object holds the effect desc and the list sub_elem holds the sub effects +typedef struct list_sub_elem_s { + void *object; + list_elem_t *sub_elem; + struct list_sub_elem_s *next; +} list_sub_elem_t; + typedef struct lib_entry_s { audio_effect_library_t *desc; char *name; @@ -47,6 +56,16 @@ typedef struct effect_entry_s { lib_entry_t *lib; } effect_entry_t; +// Structure used to store the lib entry +// and the descriptor of the sub effects. +// The library entry is to be stored in case of +// sub effects as the sub effects are not linked +// to the library list - gLibraryList. +typedef struct sub_effect_entry_s { + lib_entry_t *lib; + void *object; +} sub_effect_entry_t; + #if __cplusplus } // extern "C" #endif -- cgit v1.1 From faca05e96744dfaa2f352e3dbb29eead4e55cfa0 Mon Sep 17 00:00:00 2001 From: jpadmana Date: Tue, 4 Jun 2013 16:03:29 +0530 Subject: Effect Offload Proxy for effects offload Effect Proxy abstracts the sub effects to the upper layers. It has the following functionalities: - creation and release of sub effects - routing the effect commands and process to the appropriate sub effect Bug: 8174034. Change-Id: Iec34b61104f0bbec4ef67c62f0710a5536dc325b Signed-off-by: jpadmana --- media/libeffects/data/audio_effects.conf | 2 +- media/libeffects/proxy/Android.mk | 34 ++++ media/libeffects/proxy/EffectProxy.cpp | 298 +++++++++++++++++++++++++++++++ media/libeffects/proxy/EffectProxy.h | 75 ++++++++ 4 files changed, 408 insertions(+), 1 deletion(-) create mode 100644 media/libeffects/proxy/Android.mk create mode 100644 media/libeffects/proxy/EffectProxy.cpp create mode 100644 media/libeffects/proxy/EffectProxy.h diff --git a/media/libeffects/data/audio_effects.conf b/media/libeffects/data/audio_effects.conf index f1c5f5b..c3c4b67 100644 --- a/media/libeffects/data/audio_effects.conf +++ b/media/libeffects/data/audio_effects.conf @@ -10,7 +10,7 @@ libraries { # the HW and SW effects #proxy { - #path /system/lib/soundfx/libProxy.so + #path /system/lib/soundfx/libeffectproxy.so #} # This is the SW implementation library of the effect diff --git a/media/libeffects/proxy/Android.mk b/media/libeffects/proxy/Android.mk new file mode 100644 index 0000000..01b3be1 --- /dev/null +++ b/media/libeffects/proxy/Android.mk @@ -0,0 +1,34 @@ +# Copyright 2013 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +LOCAL_PATH:= $(call my-dir) +include $(CLEAR_VARS) +LOCAL_MODULE:= libeffectproxy +LOCAL_MODULE_PATH := $(TARGET_OUT_SHARED_LIBRARIES)/soundfx +LOCAL_MODULE_TAGS := optional + + +LOCAL_SRC_FILES := \ + EffectProxy.cpp + +LOCAL_CFLAGS+= -fvisibility=hidden + +LOCAL_SHARED_LIBRARIES := liblog libcutils libutils libdl libeffects + +LOCAL_C_INCLUDES := \ + system/media/audio_effects/include \ + bionic/libc/include + +include $(BUILD_SHARED_LIBRARY) + diff --git a/media/libeffects/proxy/EffectProxy.cpp b/media/libeffects/proxy/EffectProxy.cpp new file mode 100644 index 0000000..77c6e89 --- /dev/null +++ b/media/libeffects/proxy/EffectProxy.cpp @@ -0,0 +1,298 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "EffectProxy" +//#define LOG_NDEBUG 0 + +#include +#include +#include +#include +#include +#include +#include +#include + +namespace android { +// This is a dummy proxy descriptor just to return to Factory during the initial +// GetDescriptor call. Later in the factory, it is replaced with the +// SW sub effect descriptor +const effect_descriptor_t gProxyDescriptor = { + EFFECT_UUID_INITIALIZER, // type + EFFECT_UUID_INITIALIZER, // uuid + EFFECT_CONTROL_API_VERSION, //version of effect control API + (EFFECT_FLAG_TYPE_INSERT | EFFECT_FLAG_INSERT_LAST | + EFFECT_FLAG_VOLUME_CTRL), // effect capability flags + 0, // CPU load + 1, // Data memory + "Proxy", //effect name + "AOSP", //implementor name +}; + + +static const effect_descriptor_t *const gDescriptors[] = +{ + &gProxyDescriptor, +}; + +int EffectProxyCreate(const effect_uuid_t *uuid, + int32_t sessionId, + int32_t ioId, + effect_handle_t *pHandle) { + + effect_descriptor_t* desc; + EffectContext* pContext; + if (pHandle == NULL || uuid == NULL) { + ALOGE("EffectProxyCreate() called with NULL pointer"); + return -EINVAL; + } + ALOGV("EffectProxyCreate start.."); + pContext = new EffectContext; + pContext->sessionId = sessionId; + pContext->ioId = ioId; + pContext->uuid = *uuid; + pContext->common_itfe = &gEffectInterface; + // The sub effects will be created in effect_command when the first command + // for the effect is received + pContext->eHandle[SUB_FX_HOST] = pContext->eHandle[SUB_FX_OFFLOAD] = NULL; + + // Get the HW and SW sub effect descriptors from the effects factory + desc = new effect_descriptor_t[SUB_FX_COUNT]; + pContext->desc = new effect_descriptor_t[SUB_FX_COUNT]; + int retValue = EffectGetSubEffects(uuid, desc, + sizeof(effect_descriptor_t) * SUB_FX_COUNT); + // EffectGetSubEffects returns the number of sub-effects copied. + if (retValue != SUB_FX_COUNT) { + ALOGE("EffectCreate() could not get the sub effects"); + delete desc; + delete pContext->desc; + return -EINVAL; + } + // Check which is the HW descriptor and copy the descriptors + // to the Context desc array + // Also check if there is only one HW and one SW descriptor. + // HW descriptor alone has the HW_TUNNEL flag. + if ((desc[0].flags & EFFECT_FLAG_HW_ACC_TUNNEL) && + !(desc[1].flags & EFFECT_FLAG_HW_ACC_TUNNEL)) { + pContext->desc[SUB_FX_OFFLOAD] = desc[0]; + pContext->desc[SUB_FX_HOST] = desc[1]; + } + else if ((desc[1].flags & EFFECT_FLAG_HW_ACC_TUNNEL) && + !(desc[0].flags & EFFECT_FLAG_HW_ACC_TUNNEL)) { + pContext->desc[SUB_FX_HOST] = desc[0]; + pContext->desc[SUB_FX_OFFLOAD] = desc[1]; + } + delete desc; +#if (LOG_NDEBUG == 0) + effect_uuid_t uuid_print = pContext->desc[SUB_FX_HOST].uuid; + ALOGV("EffectCreate() UUID of HOST: %08X-%04X-%04X-%04X-%02X%02X%02X%02X" + "%02X%02X\n",uuid_print.timeLow, uuid_print.timeMid, + uuid_print.timeHiAndVersion, uuid_print.clockSeq, uuid_print.node[0], + uuid_print.node[1], uuid_print.node[2], uuid_print.node[3], + uuid_print.node[4], uuid_print.node[5]); + ALOGV("EffectCreate() UUID of OFFLOAD: %08X-%04X-%04X-%04X-%02X%02X%02X%02X" + "%02X%02X\n", uuid_print.timeLow, uuid_print.timeMid, + uuid_print.timeHiAndVersion, uuid_print.clockSeq, uuid_print.node[0], + uuid_print.node[1], uuid_print.node[2], uuid_print.node[3], + uuid_print.node[4], uuid_print.node[5]); +#endif + *pHandle = (effect_handle_t)pContext; + ALOGV("EffectCreate end"); + return 0; +} //end EffectProxyCreate + +int EffectProxyRelease(effect_handle_t handle) { + EffectContext * pContext = (EffectContext *)handle; + if (pContext == NULL) { + ALOGV("ERROR : EffectRelease called with NULL pointer"); + return -EINVAL; + } + ALOGV("EffectRelease"); + delete pContext->desc; + if (pContext->eHandle[SUB_FX_HOST]) + EffectRelease(pContext->eHandle[SUB_FX_HOST]); + if (pContext->eHandle[SUB_FX_OFFLOAD]) + EffectRelease(pContext->eHandle[SUB_FX_OFFLOAD]); + delete pContext; + pContext = NULL; + return 0; +} /*end EffectProxyRelease */ + +int EffectProxyGetDescriptor(const effect_uuid_t *uuid, + effect_descriptor_t *pDescriptor) { + const effect_descriptor_t *desc = NULL; + + if (pDescriptor == NULL || uuid == NULL) { + ALOGV("EffectGetDescriptor() called with NULL pointer"); + return -EINVAL; + } + desc = &gProxyDescriptor; + *pDescriptor = *desc; + return 0; +} /* end EffectProxyGetDescriptor */ + +/* Effect Control Interface Implementation: Process */ +int Effect_process(effect_handle_t self, + audio_buffer_t *inBuffer, + audio_buffer_t *outBuffer) { + + EffectContext *pContext = (EffectContext *) self; + int ret = 0; + if (pContext != NULL) { + int index = pContext->index; + // if the index refers to HW , do not do anything. Just return. + if (index == SUB_FX_HOST) { + ALOGV("Calling CoreProcess"); + ret = (*pContext->eHandle[index])->process(pContext->eHandle[index], + inBuffer, outBuffer); + } + } + return ret; +} /* end Effect_process */ + +/* Effect Control Interface Implementation: Command */ +int Effect_command(effect_handle_t self, + uint32_t cmdCode, + uint32_t cmdSize, + void *pCmdData, + uint32_t *replySize, + void *pReplyData) { + + EffectContext *pContext = (EffectContext *) self; + int status; + if (pContext == NULL) { + ALOGV("Effect_command() Proxy context is NULL"); + return -EINVAL; + } + if (pContext->eHandle[SUB_FX_HOST] == NULL) { + ALOGV("Effect_command() Calling HOST EffectCreate"); + status = EffectCreate(&pContext->desc[SUB_FX_HOST].uuid, + pContext->sessionId, pContext->ioId, + &(pContext->eHandle[SUB_FX_HOST])); + if (status != NO_ERROR || (pContext->eHandle[SUB_FX_HOST] == NULL)) { + ALOGV("Effect_command() Error creating SW sub effect"); + return status; + } + } + if (pContext->eHandle[SUB_FX_OFFLOAD] == NULL) { + ALOGV("Effect_command() Calling OFFLOAD EffectCreate"); + status = EffectCreate(&pContext->desc[SUB_FX_OFFLOAD].uuid, + pContext->sessionId, pContext->ioId, + &(pContext->eHandle[SUB_FX_OFFLOAD])); + if (status != NO_ERROR || (pContext->eHandle[SUB_FX_OFFLOAD] == NULL)) { + ALOGV("Effect_command() Error creating HW effect"); + // Do not return error here as SW effect is created + // Return error if the CMD_OFFLOAD sends the index as OFFLOAD + } + pContext->index = SUB_FX_HOST; + } + // EFFECT_CMD_OFFLOAD used to (1) send whether the thread is offload or not + // (2) Send the ioHandle of the effectThread when the effect + // is moved from one type of thread to another. + // pCmdData points to a memory holding effect_offload_param_t structure + if (cmdCode == EFFECT_CMD_OFFLOAD) { + ALOGV("Effect_command() cmdCode = EFFECT_CMD_OFFLOAD"); + if (cmdSize == 0 || pCmdData == NULL) { + ALOGV("effectsOffload: Effect_command: CMD_OFFLOAD has no data"); + *(int*)pReplyData = FAILED_TRANSACTION; + return FAILED_TRANSACTION; + } + effect_offload_param_t* offloadParam = (effect_offload_param_t*)pCmdData; + // Assign the effect context index based on isOffload field of the structure + pContext->index = offloadParam->isOffload ? SUB_FX_OFFLOAD : SUB_FX_HOST; + // if the index is HW and the HW effect is unavailable, return error + // and reset the index to SW + if (pContext->eHandle[pContext->index] == NULL) { + ALOGV("Effect_command()CMD_OFFLOAD sub effect unavailable"); + *(int*)pReplyData = FAILED_TRANSACTION; + return FAILED_TRANSACTION; + } + pContext->ioId = offloadParam->ioHandle; + ALOGV("Effect_command()CMD_OFFLOAD index:%d io %d", pContext->index, pContext->ioId); + // Update the DSP wrapper with the new ioHandle. + // Pass the OFFLOAD command to the wrapper. + // The DSP wrapper needs to handle this CMD + if (pContext->eHandle[SUB_FX_OFFLOAD]) + status = (*pContext->eHandle[SUB_FX_OFFLOAD])->command( + pContext->eHandle[SUB_FX_OFFLOAD], cmdCode, cmdSize, + pCmdData, replySize, pReplyData); + return status; + } + + int index = pContext->index; + if (index != SUB_FX_HOST && index != SUB_FX_OFFLOAD) { + ALOGV("Effect_command: effect index is neither offload nor host"); + return -EINVAL; + } + ALOGV("Effect_command: pContext->eHandle[%d]: %p", + index, pContext->eHandle[index]); + if (pContext->eHandle[SUB_FX_HOST]) + (*pContext->eHandle[SUB_FX_HOST])->command( + pContext->eHandle[SUB_FX_HOST], cmdCode, cmdSize, + pCmdData, replySize, pReplyData); + if (pContext->eHandle[SUB_FX_OFFLOAD]) { + // In case of SET CMD, when the offload stream is unavailable, + // we will store the effect param values in the DSP effect wrapper. + // When the offload effects get enabled, we send these values to the + // DSP during Effect_config. + // So,we send the params to DSP wrapper also + (*pContext->eHandle[SUB_FX_OFFLOAD])->command( + pContext->eHandle[SUB_FX_OFFLOAD], cmdCode, cmdSize, + pCmdData, replySize, pReplyData); + } + return 0; +} /* end Effect_command */ + + +/* Effect Control Interface Implementation: get_descriptor */ +int Effect_getDescriptor(effect_handle_t self, + effect_descriptor_t *pDescriptor) { + + EffectContext * pContext = (EffectContext *) self; + const effect_descriptor_t *desc; + + ALOGV("Effect_getDescriptor"); + if (pContext == NULL || pDescriptor == NULL) { + ALOGV("Effect_getDescriptor() invalid param"); + return -EINVAL; + } + if (pContext->desc == NULL) { + ALOGV("Effect_getDescriptor() could not get descriptor"); + return -EINVAL; + } + desc = &pContext->desc[SUB_FX_HOST]; + *pDescriptor = *desc; + pDescriptor->uuid = pContext->uuid; // Replace the uuid with the Proxy UUID + // Also set/clear the EFFECT_FLAG_OFFLOAD_SUPPORTED flag based on the sub effects availability + if (pContext->eHandle[SUB_FX_OFFLOAD] != NULL) + pDescriptor->flags |= EFFECT_FLAG_OFFLOAD_SUPPORTED; + else + pDescriptor->flags &= ~EFFECT_FLAG_OFFLOAD_SUPPORTED; + return 0; +} /* end Effect_getDescriptor */ + +} // namespace android + +__attribute__ ((visibility ("default"))) +audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM = { + tag : AUDIO_EFFECT_LIBRARY_TAG, + version : EFFECT_LIBRARY_API_VERSION, + name : "Effect Proxy", + implementor : "AOSP", + create_effect : android::EffectProxyCreate, + release_effect : android::EffectProxyRelease, + get_descriptor : android::EffectProxyGetDescriptor, +}; diff --git a/media/libeffects/proxy/EffectProxy.h b/media/libeffects/proxy/EffectProxy.h new file mode 100644 index 0000000..8992f93 --- /dev/null +++ b/media/libeffects/proxy/EffectProxy.h @@ -0,0 +1,75 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +namespace android { +enum { + SUB_FX_HOST, // Index of HOST in the descriptor and handle arrays + // of the Proxy context + SUB_FX_OFFLOAD, // Index of OFFLOAD in the descriptor and handle arrays + // of the Proxy context + SUB_FX_COUNT // The number of sub effects for a Proxy(1 HW, 1 SW) +}; +#if __cplusplus +extern "C" { +#endif + +int EffectProxyCreate(const effect_uuid_t *uuid, + int32_t sessionId, + int32_t ioId, + effect_handle_t *pHandle); +int EffectProxyRelease(effect_handle_t handle); +int EffectProxyGetDescriptor(const effect_uuid_t *uuid, + effect_descriptor_t *pDescriptor); +/* Effect Control Interface Implementation: Process */ +int Effect_process(effect_handle_t self, + audio_buffer_t *inBuffer, + audio_buffer_t *outBuffer); + +/* Effect Control Interface Implementation: Command */ +int Effect_command(effect_handle_t self, + uint32_t cmdCode, + uint32_t cmdSize, + void *pCmdData, + uint32_t *replySize, + void *pReplyData); +int Effect_getDescriptor(effect_handle_t self, + effect_descriptor_t *pDescriptor); + +const struct effect_interface_s gEffectInterface = { + Effect_process, + Effect_command, + Effect_getDescriptor, + NULL, +}; + +struct EffectContext { + const struct effect_interface_s *common_itfe; // Holds the itfe of the Proxy + effect_descriptor_t* desc; // Points to the sub effect descriptors + effect_handle_t eHandle[SUB_FX_COUNT]; // The effect handles of the sub effects + int index; // The index that is currently active - HOST or OFFLOAD + int32_t sessionId; // The sessiond in which the effect is created. + // Stored in context to pass on to sub effect creation + int32_t ioId; // The ioId in which the effect is created. + // Stored in context to pass on to sub effect creation + effect_uuid_t uuid; // UUID of the Proxy +}; + +#if __cplusplus +} // extern "C" +#endif +} //namespace android -- cgit v1.1 From eba9bf72fb5e036bb15ca4a1dc126883a2cb938d Mon Sep 17 00:00:00 2001 From: Eric Laurent Date: Fri, 27 Sep 2013 15:04:26 -0700 Subject: fix command handling in effect offload proxy Fix some issues in effect proxy related to handling of effect commands to offloaded and non offloaded effects. Also fixed a bug on capture index in software Visualizer effect. Bug: 8174034. Change-Id: I119458fea597cc3acbc0ef9ec315f67aa211cbd9 --- media/libeffects/proxy/EffectProxy.cpp | 59 +++++++++++++++++++++++++++------- 1 file changed, 48 insertions(+), 11 deletions(-) diff --git a/media/libeffects/proxy/EffectProxy.cpp b/media/libeffects/proxy/EffectProxy.cpp index 77c6e89..41640da 100644 --- a/media/libeffects/proxy/EffectProxy.cpp +++ b/media/libeffects/proxy/EffectProxy.cpp @@ -48,6 +48,21 @@ static const effect_descriptor_t *const gDescriptors[] = &gProxyDescriptor, }; +static inline bool isGetterCmd(uint32_t cmdCode) +{ + switch (cmdCode) { + case EFFECT_CMD_GET_PARAM: + case EFFECT_CMD_GET_CONFIG: + case EFFECT_CMD_GET_CONFIG_REVERSE: + case EFFECT_CMD_GET_FEATURE_SUPPORTED_CONFIGS: + case EFFECT_CMD_GET_FEATURE_CONFIG: + return true; + default: + return false; + } +} + + int EffectProxyCreate(const effect_uuid_t *uuid, int32_t sessionId, int32_t ioId, @@ -155,7 +170,6 @@ int Effect_process(effect_handle_t self, int index = pContext->index; // if the index refers to HW , do not do anything. Just return. if (index == SUB_FX_HOST) { - ALOGV("Calling CoreProcess"); ret = (*pContext->eHandle[index])->process(pContext->eHandle[index], inBuffer, outBuffer); } @@ -172,7 +186,7 @@ int Effect_command(effect_handle_t self, void *pReplyData) { EffectContext *pContext = (EffectContext *) self; - int status; + int status = 0; if (pContext == NULL) { ALOGV("Effect_command() Proxy context is NULL"); return -EINVAL; @@ -237,23 +251,46 @@ int Effect_command(effect_handle_t self, ALOGV("Effect_command: effect index is neither offload nor host"); return -EINVAL; } - ALOGV("Effect_command: pContext->eHandle[%d]: %p", - index, pContext->eHandle[index]); - if (pContext->eHandle[SUB_FX_HOST]) - (*pContext->eHandle[SUB_FX_HOST])->command( + + // Getter commands are only sent to the active sub effect. + uint32_t hostReplySize = replySize != NULL ? *replySize : 0; + bool hostReplied = false; + int hostStatus = 0; + uint32_t offloadReplySize = replySize != NULL ? *replySize : 0; + bool offloadReplied = false; + int offloadStatus = 0; + + if (pContext->eHandle[SUB_FX_HOST] && (!isGetterCmd(cmdCode) || index == SUB_FX_HOST)) { + hostStatus = (*pContext->eHandle[SUB_FX_HOST])->command( pContext->eHandle[SUB_FX_HOST], cmdCode, cmdSize, - pCmdData, replySize, pReplyData); - if (pContext->eHandle[SUB_FX_OFFLOAD]) { + pCmdData, replySize != NULL ? &hostReplySize : NULL, pReplyData); + hostReplied = true; + } + if (pContext->eHandle[SUB_FX_OFFLOAD] && (!isGetterCmd(cmdCode) || index == SUB_FX_OFFLOAD)) { // In case of SET CMD, when the offload stream is unavailable, // we will store the effect param values in the DSP effect wrapper. // When the offload effects get enabled, we send these values to the // DSP during Effect_config. // So,we send the params to DSP wrapper also - (*pContext->eHandle[SUB_FX_OFFLOAD])->command( + offloadStatus = (*pContext->eHandle[SUB_FX_OFFLOAD])->command( pContext->eHandle[SUB_FX_OFFLOAD], cmdCode, cmdSize, - pCmdData, replySize, pReplyData); + pCmdData, replySize != NULL ? &offloadReplySize : NULL, pReplyData); + offloadReplied = true; } - return 0; + // By convention the offloaded implementation reply is returned if command is processed by both + // host and offloaded sub effects + if (offloadReplied){ + status = offloadStatus; + if (replySize) { + *replySize = offloadReplySize; + } + } else if (hostReplied) { + status = hostStatus; + if (replySize) { + *replySize = hostReplySize; + } + } + return status; } /* end Effect_command */ -- cgit v1.1 From 5d6d86a4d102704f49b9235eaf282c428d7100b6 Mon Sep 17 00:00:00 2001 From: Eric Laurent Date: Fri, 20 Sep 2013 12:27:32 -0700 Subject: fix oflload effect proxy commmand handling Implement a more generic command handling in offload effect proxy. All commands are sent to both sub effects but only the reply from the active one is returned to the caller. Bug: 8174034. Change-Id: I28aa0f0d806e846332bc29801ee40d34e4ea0c43 --- media/libeffects/proxy/EffectProxy.cpp | 95 ++++++++++++++++++---------------- media/libeffects/proxy/EffectProxy.h | 5 ++ 2 files changed, 54 insertions(+), 46 deletions(-) diff --git a/media/libeffects/proxy/EffectProxy.cpp b/media/libeffects/proxy/EffectProxy.cpp index 41640da..b3304b7 100644 --- a/media/libeffects/proxy/EffectProxy.cpp +++ b/media/libeffects/proxy/EffectProxy.cpp @@ -48,20 +48,6 @@ static const effect_descriptor_t *const gDescriptors[] = &gProxyDescriptor, }; -static inline bool isGetterCmd(uint32_t cmdCode) -{ - switch (cmdCode) { - case EFFECT_CMD_GET_PARAM: - case EFFECT_CMD_GET_CONFIG: - case EFFECT_CMD_GET_CONFIG_REVERSE: - case EFFECT_CMD_GET_FEATURE_SUPPORTED_CONFIGS: - case EFFECT_CMD_GET_FEATURE_CONFIG: - return true; - default: - return false; - } -} - int EffectProxyCreate(const effect_uuid_t *uuid, int32_t sessionId, @@ -80,6 +66,7 @@ int EffectProxyCreate(const effect_uuid_t *uuid, pContext->ioId = ioId; pContext->uuid = *uuid; pContext->common_itfe = &gEffectInterface; + // The sub effects will be created in effect_command when the first command // for the effect is received pContext->eHandle[SUB_FX_HOST] = pContext->eHandle[SUB_FX_OFFLOAD] = NULL; @@ -124,6 +111,10 @@ int EffectProxyCreate(const effect_uuid_t *uuid, uuid_print.node[1], uuid_print.node[2], uuid_print.node[3], uuid_print.node[4], uuid_print.node[5]); #endif + + pContext->replySize = PROXY_REPLY_SIZE_DEFAULT; + pContext->replyData = (char *)malloc(PROXY_REPLY_SIZE_DEFAULT); + *pHandle = (effect_handle_t)pContext; ALOGV("EffectCreate end"); return 0; @@ -137,6 +128,8 @@ int EffectProxyRelease(effect_handle_t handle) { } ALOGV("EffectRelease"); delete pContext->desc; + free(pContext->replyData); + if (pContext->eHandle[SUB_FX_HOST]) EffectRelease(pContext->eHandle[SUB_FX_HOST]); if (pContext->eHandle[SUB_FX_OFFLOAD]) @@ -253,43 +246,53 @@ int Effect_command(effect_handle_t self, } // Getter commands are only sent to the active sub effect. - uint32_t hostReplySize = replySize != NULL ? *replySize : 0; - bool hostReplied = false; - int hostStatus = 0; - uint32_t offloadReplySize = replySize != NULL ? *replySize : 0; - bool offloadReplied = false; - int offloadStatus = 0; + int *subStatus[SUB_FX_COUNT]; + uint32_t *subReplySize[SUB_FX_COUNT]; + void *subReplyData[SUB_FX_COUNT]; + uint32_t tmpSize; + int tmpStatus; - if (pContext->eHandle[SUB_FX_HOST] && (!isGetterCmd(cmdCode) || index == SUB_FX_HOST)) { - hostStatus = (*pContext->eHandle[SUB_FX_HOST])->command( - pContext->eHandle[SUB_FX_HOST], cmdCode, cmdSize, - pCmdData, replySize != NULL ? &hostReplySize : NULL, pReplyData); - hostReplied = true; - } - if (pContext->eHandle[SUB_FX_OFFLOAD] && (!isGetterCmd(cmdCode) || index == SUB_FX_OFFLOAD)) { - // In case of SET CMD, when the offload stream is unavailable, - // we will store the effect param values in the DSP effect wrapper. - // When the offload effects get enabled, we send these values to the - // DSP during Effect_config. - // So,we send the params to DSP wrapper also - offloadStatus = (*pContext->eHandle[SUB_FX_OFFLOAD])->command( - pContext->eHandle[SUB_FX_OFFLOAD], cmdCode, cmdSize, - pCmdData, replySize != NULL ? &offloadReplySize : NULL, pReplyData); - offloadReplied = true; + // grow temp reply buffer if needed + if (replySize != NULL) { + tmpSize = pContext->replySize; + while (tmpSize < *replySize && tmpSize < PROXY_REPLY_SIZE_MAX) { + tmpSize *= 2; + } + if (tmpSize > pContext->replySize) { + ALOGV("Effect_command grow reply buf to %d", tmpSize); + pContext->replyData = (char *)realloc(pContext->replyData, tmpSize); + pContext->replySize = tmpSize; + } + if (tmpSize > *replySize) { + tmpSize = *replySize; + } + } else { + tmpSize = 0; } - // By convention the offloaded implementation reply is returned if command is processed by both - // host and offloaded sub effects - if (offloadReplied){ - status = offloadStatus; - if (replySize) { - *replySize = offloadReplySize; + // tmpSize is now the actual reply size for the non active sub effect + + // Send command to sub effects. The command is sent to all sub effects so that their internal + // state is kept in sync. + // Only the reply from the active sub effect is returned to the caller. The reply from the + // other sub effect is lost in pContext->replyData + for (int i = 0; i < SUB_FX_COUNT; i++) { + if (pContext->eHandle[i] == NULL) { + continue; } - } else if (hostReplied) { - status = hostStatus; - if (replySize) { - *replySize = hostReplySize; + if (i == index) { + subStatus[i] = &status; + subReplySize[i] = replySize; + subReplyData[i] = pReplyData; + } else { + subStatus[i] = &tmpStatus; + subReplySize[i] = replySize == NULL ? NULL : &tmpSize; + subReplyData[i] = pReplyData == NULL ? NULL : pContext->replyData; } + *subStatus[i] = (*pContext->eHandle[i])->command( + pContext->eHandle[i], cmdCode, cmdSize, + pCmdData, subReplySize[i], subReplyData[i]); } + return status; } /* end Effect_command */ diff --git a/media/libeffects/proxy/EffectProxy.h b/media/libeffects/proxy/EffectProxy.h index 8992f93..acbe17e 100644 --- a/media/libeffects/proxy/EffectProxy.h +++ b/media/libeffects/proxy/EffectProxy.h @@ -57,6 +57,9 @@ const struct effect_interface_s gEffectInterface = { NULL, }; +#define PROXY_REPLY_SIZE_MAX (64 * 1024) // must be power of two +#define PROXY_REPLY_SIZE_DEFAULT 32 // must be power of two + struct EffectContext { const struct effect_interface_s *common_itfe; // Holds the itfe of the Proxy effect_descriptor_t* desc; // Points to the sub effect descriptors @@ -67,6 +70,8 @@ struct EffectContext { int32_t ioId; // The ioId in which the effect is created. // Stored in context to pass on to sub effect creation effect_uuid_t uuid; // UUID of the Proxy + char* replyData; // temporary buffer for non active sub effect command reply + uint32_t replySize; // current size of temporary reply buffer }; #if __cplusplus -- cgit v1.1 From 30c08634416a99a0f627e4de3a5f49dcf0a72fd3 Mon Sep 17 00:00:00 2001 From: Johann Date: Fri, 27 Sep 2013 17:42:12 -0700 Subject: Change VP8 encoder bitrate Allow the bitrate to be updated while the encoder is running. Bug: 8422347 Change-Id: I8427fe20921b00f92b8f99fe21691709fab354b0 --- .../codecs/on2/enc/SoftVPXEncoder.cpp | 32 ++++++++++++++++++++++ .../libstagefright/codecs/on2/enc/SoftVPXEncoder.h | 5 +++- 2 files changed, 36 insertions(+), 1 deletion(-) diff --git a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp index 16f0f30..8375cac 100644 --- a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp +++ b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp @@ -141,6 +141,7 @@ SoftVPXEncoder::SoftVPXEncoder(const char *name, mWidth(176), mHeight(144), mBitrate(192000), // in bps + mBitrateUpdated(false), mBitrateControlMode(VPX_VBR), // variable bitrate mFrameDurationUs(33333), // Defaults to 30 fps mDCTPartitions(0), @@ -536,6 +537,22 @@ OMX_ERRORTYPE SoftVPXEncoder::setConfig( return OMX_ErrorNone; } + case OMX_IndexConfigVideoBitrate: + { + OMX_VIDEO_CONFIG_BITRATETYPE *params = + (OMX_VIDEO_CONFIG_BITRATETYPE *)_params; + + if (params->nPortIndex != kOutputPortIndex) { + return OMX_ErrorBadPortIndex; + } + + if (mBitrate != params->nEncodeBitrate) { + mBitrate = params->nEncodeBitrate; + mBitrateUpdated = true; + } + return OMX_ErrorNone; + } + default: return SimpleSoftOMXComponent::setConfig(index, _params); } @@ -779,6 +796,21 @@ void SoftVPXEncoder::onQueueFilled(OMX_U32 portIndex) { mKeyFrameRequested = false; } + if (mBitrateUpdated) { + mCodecConfiguration->rc_target_bitrate = mBitrate/1000; + vpx_codec_err_t res = vpx_codec_enc_config_set(mCodecContext, + mCodecConfiguration); + if (res != VPX_CODEC_OK) { + ALOGE("vp8 encoder failed to update bitrate: %s", + vpx_codec_err_to_string(res)); + notify(OMX_EventError, + OMX_ErrorUndefined, + 0, // Extra notification data + NULL); // Notification data pointer + } + mBitrateUpdated = false; + } + codec_return = vpx_codec_encode( mCodecContext, &raw_frame, diff --git a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h index 4ee5e51..076830f 100644 --- a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h +++ b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h @@ -128,7 +128,10 @@ private: int32_t mHeight; // Target bitrate set for the encoder, in bits per second. - int32_t mBitrate; + uint32_t mBitrate; + + // If a request for a change it bitrate has been received. + bool mBitrateUpdated; // Bitrate control mode, either constant or variable vpx_rc_mode mBitrateControlMode; -- cgit v1.1 From 59fe010bcc072597852454a2ec53d7b0a2002a3b Mon Sep 17 00:00:00 2001 From: Eric Laurent Date: Fri, 27 Sep 2013 18:48:26 -0700 Subject: fix volume and effect enable delay on offloaded tracks Volume: add a method to wake up the mediaserver playback thread when a volume command is received on an offloaded track. Effects: call effect chain process on offloaded playback threads asynchronously from writes to allow effect state updates while waiting for async write callback. Bug: 10796540. Change-Id: Id2747ae88783575d1d7ffd6fc86fbd054ab2c739 --- include/media/IAudioTrack.h | 3 +++ media/libmedia/AudioTrack.cpp | 3 +++ media/libmedia/IAudioTrack.cpp | 12 ++++++++++++ services/audioflinger/AudioFlinger.h | 1 + services/audioflinger/Effects.cpp | 22 +++++++++++++++++----- services/audioflinger/PlaybackTracks.h | 1 + services/audioflinger/Threads.cpp | 11 ++++++++++- services/audioflinger/Tracks.cpp | 16 ++++++++++++++++ 8 files changed, 63 insertions(+), 6 deletions(-) diff --git a/include/media/IAudioTrack.h b/include/media/IAudioTrack.h index afac4ae..5c8a484 100644 --- a/include/media/IAudioTrack.h +++ b/include/media/IAudioTrack.h @@ -90,6 +90,9 @@ public: /* Return NO_ERROR if timestamp is valid */ virtual status_t getTimestamp(AudioTimestamp& timestamp) = 0; + + /* Signal the playback thread for a change in control block */ + virtual void signal() = 0; }; // ---------------------------------------------------------------------------- diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp index 754a4e3..37d50cf 100644 --- a/media/libmedia/AudioTrack.cpp +++ b/media/libmedia/AudioTrack.cpp @@ -534,6 +534,9 @@ status_t AudioTrack::setVolume(float left, float right) mProxy->setVolumeLR((uint32_t(uint16_t(right * 0x1000)) << 16) | uint16_t(left * 0x1000)); + if (isOffloaded()) { + mAudioTrack->signal(); + } return NO_ERROR; } diff --git a/media/libmedia/IAudioTrack.cpp b/media/libmedia/IAudioTrack.cpp index f0d75ba..3cd9cfd 100644 --- a/media/libmedia/IAudioTrack.cpp +++ b/media/libmedia/IAudioTrack.cpp @@ -41,6 +41,7 @@ enum { SET_MEDIA_TIME_TRANSFORM, SET_PARAMETERS, GET_TIMESTAMP, + SIGNAL, }; class BpAudioTrack : public BpInterface @@ -182,6 +183,12 @@ public: } return status; } + + virtual void signal() { + Parcel data, reply; + data.writeInterfaceToken(IAudioTrack::getInterfaceDescriptor()); + remote()->transact(SIGNAL, data, &reply); + } }; IMPLEMENT_META_INTERFACE(AudioTrack, "android.media.IAudioTrack"); @@ -269,6 +276,11 @@ status_t BnAudioTrack::onTransact( } return NO_ERROR; } break; + case SIGNAL: { + CHECK_INTERFACE(IAudioTrack, data, reply); + signal(); + return NO_ERROR; + } break; default: return BBinder::onTransact(code, data, reply, flags); } diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h index b41d480..2aeb263 100644 --- a/services/audioflinger/AudioFlinger.h +++ b/services/audioflinger/AudioFlinger.h @@ -412,6 +412,7 @@ private: int target); virtual status_t setParameters(const String8& keyValuePairs); virtual status_t getTimestamp(AudioTimestamp& timestamp); + virtual void signal(); // signal playback thread for a change in control block virtual status_t onTransact( uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags); diff --git a/services/audioflinger/Effects.cpp b/services/audioflinger/Effects.cpp index 6e0354d..a8a5169 100644 --- a/services/audioflinger/Effects.cpp +++ b/services/audioflinger/Effects.cpp @@ -972,13 +972,20 @@ status_t AudioFlinger::EffectHandle::enable() } mEnabled = false; } else { - if (thread != 0 && !mEffect->isOffloadable()) { - if ((thread->type() == ThreadBase::OFFLOAD)) { + if (thread != 0) { + if (thread->type() == ThreadBase::OFFLOAD) { PlaybackThread *t = (PlaybackThread *)thread.get(); - t->invalidateTracks(AUDIO_STREAM_MUSIC); + Mutex::Autolock _l(t->mLock); + t->broadcast_l(); } - if (mEffect->sessionId() == AUDIO_SESSION_OUTPUT_MIX) { - thread->mAudioFlinger->onNonOffloadableGlobalEffectEnable(); + if (!mEffect->isOffloadable()) { + if (thread->type() == ThreadBase::OFFLOAD) { + PlaybackThread *t = (PlaybackThread *)thread.get(); + t->invalidateTracks(AUDIO_STREAM_MUSIC); + } + if (mEffect->sessionId() == AUDIO_SESSION_OUTPUT_MIX) { + thread->mAudioFlinger->onNonOffloadableGlobalEffectEnable(); + } } } } @@ -1009,6 +1016,11 @@ status_t AudioFlinger::EffectHandle::disable() sp thread = mEffect->thread().promote(); if (thread != 0) { thread->checkSuspendOnEffectEnabled(mEffect, false, mEffect->sessionId()); + if (thread->type() == ThreadBase::OFFLOAD) { + PlaybackThread *t = (PlaybackThread *)thread.get(); + Mutex::Autolock _l(t->mLock); + t->broadcast_l(); + } } return status; diff --git a/services/audioflinger/PlaybackTracks.h b/services/audioflinger/PlaybackTracks.h index f7ad6b1..a2e2511 100644 --- a/services/audioflinger/PlaybackTracks.h +++ b/services/audioflinger/PlaybackTracks.h @@ -60,6 +60,7 @@ public: int16_t *mainBuffer() const { return mMainBuffer; } int auxEffectId() const { return mAuxEffectId; } virtual status_t getTimestamp(AudioTimestamp& timestamp); + void signal(); // implement FastMixerState::VolumeProvider interface virtual uint32_t getVolumeLR(); diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index 2d9d485..187adf3 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -2238,12 +2238,21 @@ bool AudioFlinger::PlaybackThread::threadLoop() } // only process effects if we're going to write - if (sleepTime == 0) { + if (sleepTime == 0 && mType != OFFLOAD) { for (size_t i = 0; i < effectChains.size(); i ++) { effectChains[i]->process_l(); } } } + // Process effect chains for offloaded thread even if no audio + // was read from audio track: process only updates effect state + // and thus does have to be synchronized with audio writes but may have + // to be called while waiting for async write callback + if (mType == OFFLOAD) { + for (size_t i = 0; i < effectChains.size(); i ++) { + effectChains[i]->process_l(); + } + } // enable changes in effect chain unlockEffectChains(effectChains); diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp index d8d325d..9c6e724 100644 --- a/services/audioflinger/Tracks.cpp +++ b/services/audioflinger/Tracks.cpp @@ -288,6 +288,12 @@ status_t AudioFlinger::TrackHandle::getTimestamp(AudioTimestamp& timestamp) return mTrack->getTimestamp(timestamp); } + +void AudioFlinger::TrackHandle::signal() +{ + return mTrack->signal(); +} + status_t AudioFlinger::TrackHandle::onTransact( uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) { @@ -926,6 +932,16 @@ void AudioFlinger::PlaybackThread::Track::invalidate() mIsInvalid = true; } +void AudioFlinger::PlaybackThread::Track::signal() +{ + sp thread = mThread.promote(); + if (thread != 0) { + PlaybackThread *t = (PlaybackThread *)thread.get(); + Mutex::Autolock _l(t->mLock); + t->broadcast_l(); + } +} + // ---------------------------------------------------------------------------- sp -- cgit v1.1 From 9fc79c6fccc41255bb4f3538e2a21b01db8dc2dc Mon Sep 17 00:00:00 2001 From: Zhijun He Date: Mon, 30 Sep 2013 12:07:57 -0700 Subject: Camera: Set default thumbnail size with matched aspect ratio Make default thumbnail size match the default still capture size aspect ratio. Bug: 10885012 Change-Id: If46da9508d3b71992f0e14a35c600b7e8d347f4e --- .../libcameraservice/api1/client2/Parameters.cpp | 40 ++++++++++++++++++++-- .../libcameraservice/api1/client2/Parameters.h | 4 +++ 2 files changed, 41 insertions(+), 3 deletions(-) diff --git a/services/camera/libcameraservice/api1/client2/Parameters.cpp b/services/camera/libcameraservice/api1/client2/Parameters.cpp index a6c1083..8e197a9 100644 --- a/services/camera/libcameraservice/api1/client2/Parameters.cpp +++ b/services/camera/libcameraservice/api1/client2/Parameters.cpp @@ -250,9 +250,17 @@ status_t Parameters::initialize(const CameraMetadata *info) { staticInfo(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES, 4); if (!availableJpegThumbnailSizes.count) return NO_INIT; - // TODO: Pick default thumbnail size sensibly - jpegThumbSize[0] = availableJpegThumbnailSizes.data.i32[0]; - jpegThumbSize[1] = availableJpegThumbnailSizes.data.i32[1]; + // Pick the largest thumbnail size that matches still image aspect ratio. + ALOG_ASSERT(pictureWidth > 0 && pictureHeight > 0, + "Invalid picture size, %d x %d", pictureWidth, pictureHeight); + float picAspectRatio = static_cast(pictureWidth) / pictureHeight; + Size thumbnailSize = + getMaxSizeForRatio( + picAspectRatio, + &availableJpegThumbnailSizes.data.i32[0], + availableJpegThumbnailSizes.count); + jpegThumbSize[0] = thumbnailSize.width; + jpegThumbSize[1] = thumbnailSize.height; params.set(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH, jpegThumbSize[0]); @@ -2525,6 +2533,32 @@ status_t Parameters::getFilteredPreviewSizes(Size limit, Vector *sizes) { return OK; } +Parameters::Size Parameters::getMaxSizeForRatio( + float ratio, const int32_t* sizeArray, size_t count) { + ALOG_ASSERT(sizeArray != NULL, "size array shouldn't be NULL"); + ALOG_ASSERT(count >= 2 && count % 2 == 0, "count must be a positive even number"); + + Size maxSize = {0, 0}; + for (size_t i = 0; i < count; i += 2) { + if (sizeArray[i] > 0 && sizeArray[i+1] > 0) { + float curRatio = static_cast(sizeArray[i]) / sizeArray[i+1]; + if (fabs(curRatio - ratio) < ASPECT_RATIO_TOLERANCE && maxSize.width < sizeArray[i]) { + maxSize.width = sizeArray[i]; + maxSize.height = sizeArray[i+1]; + } + } + } + + if (maxSize.width == 0 || maxSize.height == 0) { + maxSize.width = sizeArray[0]; + maxSize.height = sizeArray[1]; + ALOGW("Unable to find the size to match the given aspect ratio %f." + "Fall back to %d x %d", ratio, maxSize.width, maxSize.height); + } + + return maxSize; +} + Parameters::CropRegion Parameters::calculateCropRegion( Parameters::CropRegion::Outputs outputs) const { diff --git a/services/camera/libcameraservice/api1/client2/Parameters.h b/services/camera/libcameraservice/api1/client2/Parameters.h index 0505b0e..2e78c73 100644 --- a/services/camera/libcameraservice/api1/client2/Parameters.h +++ b/services/camera/libcameraservice/api1/client2/Parameters.h @@ -168,6 +168,8 @@ struct Parameters { // Max preview size allowed static const unsigned int MAX_PREVIEW_WIDTH = 1920; static const unsigned int MAX_PREVIEW_HEIGHT = 1080; + // Aspect ratio tolerance + static const float ASPECT_RATIO_TOLERANCE = 0.001; // Full static camera info, object owned by someone else, such as // Camera2Device. @@ -331,6 +333,8 @@ private: Vector availablePreviewSizes; // Get size list (that are no larger than limit) from static metadata. status_t getFilteredPreviewSizes(Size limit, Vector *sizes); + // Get max size (from the size array) that matches the given aspect ratio. + Size getMaxSizeForRatio(float ratio, const int32_t* sizeArray, size_t count); }; // This class encapsulates the Parameters class so that it can only be accessed -- cgit v1.1 From c16c203047ca0f0d76573ead2c42764a78baf521 Mon Sep 17 00:00:00 2001 From: Lajos Molnar Date: Mon, 30 Sep 2013 13:18:55 -0700 Subject: MediaCodecInfo: report supported color formats for non-native-window mode Revert the change in behavior when checking for adaptive playback was introduced. Change-Id: I59dc2450a4299b912015f2e4c9ec018a19a20b84 Signed-off-by: Lajos Molnar Bug: 10921537 --- media/libstagefright/OMXCodec.cpp | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/media/libstagefright/OMXCodec.cpp b/media/libstagefright/OMXCodec.cpp index 7b37365..2c95ab4 100644 --- a/media/libstagefright/OMXCodec.cpp +++ b/media/libstagefright/OMXCodec.cpp @@ -4585,12 +4585,6 @@ status_t QueryCodec( caps->mFlags = 0; caps->mComponentName = componentName; - if (!isEncoder && !strncmp(mime, "video/", 6) && - omx->storeMetaDataInBuffers( - node, 1 /* port index */, OMX_TRUE) == OK) { - caps->mFlags |= CodecCapabilities::kFlagSupportsAdaptivePlayback; - } - OMX_VIDEO_PARAM_PROFILELEVELTYPE param; InitOMXParams(¶m); @@ -4626,6 +4620,12 @@ status_t QueryCodec( caps->mColorFormats.push(portFormat.eColorFormat); } + if (!isEncoder && !strncmp(mime, "video/", 6) && + omx->storeMetaDataInBuffers( + node, 1 /* port index */, OMX_TRUE) == OK) { + caps->mFlags |= CodecCapabilities::kFlagSupportsAdaptivePlayback; + } + CHECK_EQ(omx->freeNode(node), (status_t)OK); return OK; -- cgit v1.1 From 6218fdc2bef7b9c912bc4d132c12ee43b7b2dd37 Mon Sep 17 00:00:00 2001 From: Lajos Molnar Date: Wed, 25 Sep 2013 08:09:41 -0700 Subject: MediaTimeProvider support fixes Add MEDIA_SKIPPED event when resuming at a different time than seeked to. Send MEDIA_STARTED/PAUSED events only when playing (vs. when doing seek previews) Change-Id: I243ebf054303755ea8863229c3211694f2c204a7 Signed-off-by: Lajos Molnar Bug: 10954008 --- include/media/mediaplayer.h | 1 + media/libstagefright/AwesomePlayer.cpp | 20 +++++++++++++++++--- 2 files changed, 18 insertions(+), 3 deletions(-) diff --git a/include/media/mediaplayer.h b/include/media/mediaplayer.h index 2177c4c..4c05fc3 100644 --- a/include/media/mediaplayer.h +++ b/include/media/mediaplayer.h @@ -45,6 +45,7 @@ enum media_event_type { MEDIA_STARTED = 6, MEDIA_PAUSED = 7, MEDIA_STOPPED = 8, + MEDIA_SKIPPED = 9, MEDIA_TIMED_TEXT = 99, MEDIA_ERROR = 100, MEDIA_INFO = 200, diff --git a/media/libstagefright/AwesomePlayer.cpp b/media/libstagefright/AwesomePlayer.cpp index 3b516af..e7cfc78 100644 --- a/media/libstagefright/AwesomePlayer.cpp +++ b/media/libstagefright/AwesomePlayer.cpp @@ -1413,8 +1413,10 @@ status_t AwesomePlayer::seekTo_l(int64_t timeUs) { mSeekTimeUs = timeUs; modifyFlags((AT_EOS | AUDIO_AT_EOS | VIDEO_AT_EOS), CLEAR); - notifyListener_l(MEDIA_PAUSED); - mMediaRenderingStartGeneration = ++mStartGeneration; + if (mFlags & PLAYING) { + notifyListener_l(MEDIA_PAUSED); + mMediaRenderingStartGeneration = ++mStartGeneration; + } seekAudioIfNecessary_l(); @@ -1659,6 +1661,16 @@ void AwesomePlayer::finishSeekIfNecessary(int64_t videoTimeUs) { return; } + // If we paused, then seeked, then resumed, it is possible that we have + // signaled SEEK_COMPLETE at a copmletely different media time than where + // we are now resuming. Signal new position to media time provider. + // Cannot signal another SEEK_COMPLETE, as existing clients may not expect + // multiple SEEK_COMPLETE responses to a single seek() request. + if (mSeekNotificationSent && abs(mSeekTimeUs - videoTimeUs) > 10000) { + // notify if we are resuming more than 10ms away from desired seek time + notifyListener_l(MEDIA_SKIPPED); + } + if (mAudioPlayer != NULL) { ALOGV("seeking audio to %lld us (%.2f secs).", videoTimeUs, videoTimeUs / 1E6); @@ -1930,7 +1942,9 @@ void AwesomePlayer::onVideoEvent() { notifyListener_l(MEDIA_INFO, MEDIA_INFO_RENDERING_START); } - notifyIfMediaStarted_l(); + if (mFlags & PLAYING) { + notifyIfMediaStarted_l(); + } } mVideoBuffer->release(); -- cgit v1.1 From 893068ad0ca0cce8428f5a358c86b81139399c07 Mon Sep 17 00:00:00 2001 From: Zhijun He Date: Mon, 30 Sep 2013 19:31:39 -0700 Subject: Camera: drop stale callback buffer Sometimes, when preview size is changed and preview callback is enabled, stale callback buffer from previous size could be delivered to callback processor because stop preview doesn't guarantee consumer side buffers are properly cleaned up. We need drop these buffers to avoid sending wrong data to app side. Bug: 11009183 Change-Id: If9281e8c02481f883872ce58dfd8660a06d56a47 --- .../camera/libcameraservice/api1/client2/CallbackProcessor.cpp | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp b/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp index 9d8c4a1..d2ac79c 100644 --- a/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp +++ b/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp @@ -312,6 +312,16 @@ status_t CallbackProcessor::processNewCallback(sp &client) { return OK; } + if (imgBuffer.width != static_cast(l.mParameters.previewWidth) || + imgBuffer.height != static_cast(l.mParameters.previewHeight)) { + ALOGW("%s: The preview size has changed to %d x %d from %d x %d, this buffer is" + " no longer valid, dropping",__FUNCTION__, + l.mParameters.previewWidth, l.mParameters.previewHeight, + imgBuffer.width, imgBuffer.height); + mCallbackConsumer->unlockBuffer(imgBuffer); + return OK; + } + previewFormat = l.mParameters.previewFormat; useFlexibleYuv = l.mParameters.fastInfo.useFlexibleYuv && (previewFormat == HAL_PIXEL_FORMAT_YCrCb_420_SP || -- cgit v1.1 From f05e50eb06d3f70e50fa7f44c1fd32128033b49d Mon Sep 17 00:00:00 2001 From: Zhijun He Date: Tue, 1 Oct 2013 11:05:33 -0700 Subject: Camera: only return static metadata for HAL3 device This effectively disables the camera2 APIs for HAL2.x or lower HAL device. Bug: 10360518 Change-Id: I0f374ac533468c6181f5672c75088b7144b9c53d --- services/camera/libcameraservice/CameraService.cpp | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp index 5e84aaf..34a5b15 100644 --- a/services/camera/libcameraservice/CameraService.cpp +++ b/services/camera/libcameraservice/CameraService.cpp @@ -255,6 +255,12 @@ status_t CameraService::getCameraCharacteristics(int cameraId, return BAD_VALUE; } + if (getDeviceVersion(cameraId, &facing) <= CAMERA_DEVICE_API_VERSION_2_1) { + // Disable HAL2.x support for camera2 API for now. + ALOGW("%s: HAL2.x doesn't support getCameraCharacteristics for now", __FUNCTION__); + return BAD_VALUE; + } + struct camera_info info; status_t ret = mModule->get_camera_info(cameraId, &info); *cameraInfo = info.static_camera_characteristics; -- cgit v1.1 From a504d2902445616f7c508e0c0dbb07e93316234d Mon Sep 17 00:00:00 2001 From: Lajos Molnar Date: Mon, 30 Sep 2013 13:18:55 -0700 Subject: MediaCodecInfo: report supported color formats for non-native-window mode Revert the change in behavior when checking for adaptive playback was introduced. Change-Id: I59dc2450a4299b912015f2e4c9ec018a19a20b84 Signed-off-by: Lajos Molnar Bug: 10921537 --- media/libstagefright/OMXCodec.cpp | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/media/libstagefright/OMXCodec.cpp b/media/libstagefright/OMXCodec.cpp index 7b37365..2c95ab4 100644 --- a/media/libstagefright/OMXCodec.cpp +++ b/media/libstagefright/OMXCodec.cpp @@ -4585,12 +4585,6 @@ status_t QueryCodec( caps->mFlags = 0; caps->mComponentName = componentName; - if (!isEncoder && !strncmp(mime, "video/", 6) && - omx->storeMetaDataInBuffers( - node, 1 /* port index */, OMX_TRUE) == OK) { - caps->mFlags |= CodecCapabilities::kFlagSupportsAdaptivePlayback; - } - OMX_VIDEO_PARAM_PROFILELEVELTYPE param; InitOMXParams(¶m); @@ -4626,6 +4620,12 @@ status_t QueryCodec( caps->mColorFormats.push(portFormat.eColorFormat); } + if (!isEncoder && !strncmp(mime, "video/", 6) && + omx->storeMetaDataInBuffers( + node, 1 /* port index */, OMX_TRUE) == OK) { + caps->mFlags |= CodecCapabilities::kFlagSupportsAdaptivePlayback; + } + CHECK_EQ(omx->freeNode(node), (status_t)OK); return OK; -- cgit v1.1 From 6ea551fa13b69e5ce359a7dba7485d857a005304 Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Wed, 2 Oct 2013 13:06:06 -0700 Subject: Remove obsolete miracast sink code and friends. Change-Id: I8bbb22fb0cfe2d73881d9f05bf8112ae86d8040b related-to-bug: 11047222 --- media/libstagefright/wifi-display/Android.mk | 70 -- .../libstagefright/wifi-display/MediaReceiver.cpp | 328 ------ media/libstagefright/wifi-display/MediaReceiver.h | 111 -- media/libstagefright/wifi-display/SNTPClient.cpp | 174 --- media/libstagefright/wifi-display/SNTPClient.h | 62 -- media/libstagefright/wifi-display/TimeSyncer.cpp | 337 ------ media/libstagefright/wifi-display/TimeSyncer.h | 109 -- media/libstagefright/wifi-display/nettest.cpp | 400 ------- .../wifi-display/rtp/RTPAssembler.cpp | 328 ------ .../libstagefright/wifi-display/rtp/RTPAssembler.h | 92 -- .../wifi-display/rtp/RTPReceiver.cpp | 1152 -------------------- .../libstagefright/wifi-display/rtp/RTPReceiver.h | 125 --- media/libstagefright/wifi-display/rtptest.cpp | 565 ---------- .../wifi-display/sink/DirectRenderer.cpp | 653 ----------- .../wifi-display/sink/DirectRenderer.h | 87 -- .../wifi-display/sink/WifiDisplaySink.cpp | 917 ---------------- .../wifi-display/sink/WifiDisplaySink.h | 195 ---- .../wifi-display/source/WifiDisplaySource.cpp | 16 +- .../wifi-display/source/WifiDisplaySource.h | 3 - media/libstagefright/wifi-display/udptest.cpp | 116 -- 20 files changed, 1 insertion(+), 5839 deletions(-) delete mode 100644 media/libstagefright/wifi-display/MediaReceiver.cpp delete mode 100644 media/libstagefright/wifi-display/MediaReceiver.h delete mode 100644 media/libstagefright/wifi-display/SNTPClient.cpp delete mode 100644 media/libstagefright/wifi-display/SNTPClient.h delete mode 100644 media/libstagefright/wifi-display/TimeSyncer.cpp delete mode 100644 media/libstagefright/wifi-display/TimeSyncer.h delete mode 100644 media/libstagefright/wifi-display/nettest.cpp delete mode 100644 media/libstagefright/wifi-display/rtp/RTPAssembler.cpp delete mode 100644 media/libstagefright/wifi-display/rtp/RTPAssembler.h delete mode 100644 media/libstagefright/wifi-display/rtp/RTPReceiver.cpp delete mode 100644 media/libstagefright/wifi-display/rtp/RTPReceiver.h delete mode 100644 media/libstagefright/wifi-display/rtptest.cpp delete mode 100644 media/libstagefright/wifi-display/sink/DirectRenderer.cpp delete mode 100644 media/libstagefright/wifi-display/sink/DirectRenderer.h delete mode 100644 media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp delete mode 100644 media/libstagefright/wifi-display/sink/WifiDisplaySink.h delete mode 100644 media/libstagefright/wifi-display/udptest.cpp diff --git a/media/libstagefright/wifi-display/Android.mk b/media/libstagefright/wifi-display/Android.mk index c7d107e..3abe8a8 100644 --- a/media/libstagefright/wifi-display/Android.mk +++ b/media/libstagefright/wifi-display/Android.mk @@ -3,16 +3,9 @@ LOCAL_PATH:= $(call my-dir) include $(CLEAR_VARS) LOCAL_SRC_FILES:= \ - MediaReceiver.cpp \ MediaSender.cpp \ Parameters.cpp \ - rtp/RTPAssembler.cpp \ - rtp/RTPReceiver.cpp \ rtp/RTPSender.cpp \ - sink/DirectRenderer.cpp \ - sink/WifiDisplaySink.cpp \ - SNTPClient.cpp \ - TimeSyncer.cpp \ source/Converter.cpp \ source/MediaPuller.cpp \ source/PlaybackSession.cpp \ @@ -63,66 +56,3 @@ LOCAL_SHARED_LIBRARIES:= \ LOCAL_MODULE:= wfd include $(BUILD_EXECUTABLE) - -################################################################################ - -include $(CLEAR_VARS) - -LOCAL_SRC_FILES:= \ - udptest.cpp \ - -LOCAL_SHARED_LIBRARIES:= \ - libbinder \ - libgui \ - libmedia \ - libstagefright \ - libstagefright_foundation \ - libstagefright_wfd \ - libutils \ - liblog \ - -LOCAL_MODULE:= udptest - -include $(BUILD_EXECUTABLE) - -################################################################################ - -include $(CLEAR_VARS) - -LOCAL_SRC_FILES:= \ - rtptest.cpp \ - -LOCAL_SHARED_LIBRARIES:= \ - libbinder \ - libgui \ - libmedia \ - libstagefright \ - libstagefright_foundation \ - libstagefright_wfd \ - libutils \ - liblog \ - -LOCAL_MODULE:= rtptest - -include $(BUILD_EXECUTABLE) - -################################################################################ - -include $(CLEAR_VARS) - -LOCAL_SRC_FILES:= \ - nettest.cpp \ - -LOCAL_SHARED_LIBRARIES:= \ - libbinder \ - libgui \ - libmedia \ - libstagefright \ - libstagefright_foundation \ - libstagefright_wfd \ - libutils \ - liblog \ - -LOCAL_MODULE:= nettest - -include $(BUILD_EXECUTABLE) diff --git a/media/libstagefright/wifi-display/MediaReceiver.cpp b/media/libstagefright/wifi-display/MediaReceiver.cpp deleted file mode 100644 index 5524235..0000000 --- a/media/libstagefright/wifi-display/MediaReceiver.cpp +++ /dev/null @@ -1,328 +0,0 @@ -/* - * Copyright 2013, The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -//#define LOG_NDEBUG 0 -#define LOG_TAG "MediaReceiver" -#include - -#include "MediaReceiver.h" - -#include "AnotherPacketSource.h" -#include "rtp/RTPReceiver.h" - -#include -#include -#include -#include -#include -#include - -namespace android { - -MediaReceiver::MediaReceiver( - const sp &netSession, - const sp ¬ify) - : mNetSession(netSession), - mNotify(notify), - mMode(MODE_UNDEFINED), - mGeneration(0), - mInitStatus(OK), - mInitDoneCount(0) { -} - -MediaReceiver::~MediaReceiver() { -} - -ssize_t MediaReceiver::addTrack( - RTPReceiver::TransportMode rtpMode, - RTPReceiver::TransportMode rtcpMode, - int32_t *localRTPPort) { - if (mMode != MODE_UNDEFINED) { - return INVALID_OPERATION; - } - - size_t trackIndex = mTrackInfos.size(); - - TrackInfo info; - - sp notify = new AMessage(kWhatReceiverNotify, id()); - notify->setInt32("generation", mGeneration); - notify->setSize("trackIndex", trackIndex); - - info.mReceiver = new RTPReceiver(mNetSession, notify); - looper()->registerHandler(info.mReceiver); - - info.mReceiver->registerPacketType( - 33, RTPReceiver::PACKETIZATION_TRANSPORT_STREAM); - - info.mReceiver->registerPacketType( - 96, RTPReceiver::PACKETIZATION_AAC); - - info.mReceiver->registerPacketType( - 97, RTPReceiver::PACKETIZATION_H264); - - status_t err = info.mReceiver->initAsync( - rtpMode, - rtcpMode, - localRTPPort); - - if (err != OK) { - looper()->unregisterHandler(info.mReceiver->id()); - info.mReceiver.clear(); - - return err; - } - - mTrackInfos.push_back(info); - - return trackIndex; -} - -status_t MediaReceiver::connectTrack( - size_t trackIndex, - const char *remoteHost, - int32_t remoteRTPPort, - int32_t remoteRTCPPort) { - if (trackIndex >= mTrackInfos.size()) { - return -ERANGE; - } - - TrackInfo *info = &mTrackInfos.editItemAt(trackIndex); - return info->mReceiver->connect(remoteHost, remoteRTPPort, remoteRTCPPort); -} - -status_t MediaReceiver::initAsync(Mode mode) { - if ((mode == MODE_TRANSPORT_STREAM || mode == MODE_TRANSPORT_STREAM_RAW) - && mTrackInfos.size() > 1) { - return INVALID_OPERATION; - } - - sp msg = new AMessage(kWhatInit, id()); - msg->setInt32("mode", mode); - msg->post(); - - return OK; -} - -void MediaReceiver::onMessageReceived(const sp &msg) { - switch (msg->what()) { - case kWhatInit: - { - int32_t mode; - CHECK(msg->findInt32("mode", &mode)); - - CHECK_EQ(mMode, MODE_UNDEFINED); - mMode = (Mode)mode; - - if (mInitStatus != OK || mInitDoneCount == mTrackInfos.size()) { - notifyInitDone(mInitStatus); - } - - mTSParser = new ATSParser( - ATSParser::ALIGNED_VIDEO_DATA - | ATSParser::TS_TIMESTAMPS_ARE_ABSOLUTE); - - mFormatKnownMask = 0; - break; - } - - case kWhatReceiverNotify: - { - int32_t generation; - CHECK(msg->findInt32("generation", &generation)); - if (generation != mGeneration) { - break; - } - - onReceiverNotify(msg); - break; - } - - default: - TRESPASS(); - } -} - -void MediaReceiver::onReceiverNotify(const sp &msg) { - int32_t what; - CHECK(msg->findInt32("what", &what)); - - switch (what) { - case RTPReceiver::kWhatInitDone: - { - ++mInitDoneCount; - - int32_t err; - CHECK(msg->findInt32("err", &err)); - - if (err != OK) { - mInitStatus = err; - ++mGeneration; - } - - if (mMode != MODE_UNDEFINED) { - if (mInitStatus != OK || mInitDoneCount == mTrackInfos.size()) { - notifyInitDone(mInitStatus); - } - } - break; - } - - case RTPReceiver::kWhatError: - { - int32_t err; - CHECK(msg->findInt32("err", &err)); - - notifyError(err); - break; - } - - case RTPReceiver::kWhatAccessUnit: - { - size_t trackIndex; - CHECK(msg->findSize("trackIndex", &trackIndex)); - - sp accessUnit; - CHECK(msg->findBuffer("accessUnit", &accessUnit)); - - int32_t followsDiscontinuity; - if (!msg->findInt32( - "followsDiscontinuity", &followsDiscontinuity)) { - followsDiscontinuity = 0; - } - - if (mMode == MODE_TRANSPORT_STREAM) { - if (followsDiscontinuity) { - mTSParser->signalDiscontinuity( - ATSParser::DISCONTINUITY_TIME, NULL /* extra */); - } - - for (size_t offset = 0; - offset < accessUnit->size(); offset += 188) { - status_t err = mTSParser->feedTSPacket( - accessUnit->data() + offset, 188); - - if (err != OK) { - notifyError(err); - break; - } - } - - drainPackets(0 /* trackIndex */, ATSParser::VIDEO); - drainPackets(1 /* trackIndex */, ATSParser::AUDIO); - } else { - postAccessUnit(trackIndex, accessUnit, NULL); - } - break; - } - - case RTPReceiver::kWhatPacketLost: - { - notifyPacketLost(); - break; - } - - default: - TRESPASS(); - } -} - -void MediaReceiver::drainPackets( - size_t trackIndex, ATSParser::SourceType type) { - sp source = - static_cast( - mTSParser->getSource(type).get()); - - if (source == NULL) { - return; - } - - sp format; - if (!(mFormatKnownMask & (1ul << trackIndex))) { - sp meta = source->getFormat(); - CHECK(meta != NULL); - - CHECK_EQ((status_t)OK, convertMetaDataToMessage(meta, &format)); - - mFormatKnownMask |= 1ul << trackIndex; - } - - status_t finalResult; - while (source->hasBufferAvailable(&finalResult)) { - sp accessUnit; - status_t err = source->dequeueAccessUnit(&accessUnit); - if (err == OK) { - postAccessUnit(trackIndex, accessUnit, format); - format.clear(); - } else if (err != INFO_DISCONTINUITY) { - notifyError(err); - } - } - - if (finalResult != OK) { - notifyError(finalResult); - } -} - -void MediaReceiver::notifyInitDone(status_t err) { - sp notify = mNotify->dup(); - notify->setInt32("what", kWhatInitDone); - notify->setInt32("err", err); - notify->post(); -} - -void MediaReceiver::notifyError(status_t err) { - sp notify = mNotify->dup(); - notify->setInt32("what", kWhatError); - notify->setInt32("err", err); - notify->post(); -} - -void MediaReceiver::notifyPacketLost() { - sp notify = mNotify->dup(); - notify->setInt32("what", kWhatPacketLost); - notify->post(); -} - -void MediaReceiver::postAccessUnit( - size_t trackIndex, - const sp &accessUnit, - const sp &format) { - sp notify = mNotify->dup(); - notify->setInt32("what", kWhatAccessUnit); - notify->setSize("trackIndex", trackIndex); - notify->setBuffer("accessUnit", accessUnit); - - if (format != NULL) { - notify->setMessage("format", format); - } - - notify->post(); -} - -status_t MediaReceiver::informSender( - size_t trackIndex, const sp ¶ms) { - if (trackIndex >= mTrackInfos.size()) { - return -ERANGE; - } - - TrackInfo *info = &mTrackInfos.editItemAt(trackIndex); - return info->mReceiver->informSender(params); -} - -} // namespace android - - diff --git a/media/libstagefright/wifi-display/MediaReceiver.h b/media/libstagefright/wifi-display/MediaReceiver.h deleted file mode 100644 index afbb407..0000000 --- a/media/libstagefright/wifi-display/MediaReceiver.h +++ /dev/null @@ -1,111 +0,0 @@ -/* - * Copyright 2013, The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#include - -#include "ATSParser.h" -#include "rtp/RTPReceiver.h" - -namespace android { - -struct ABuffer; -struct ANetworkSession; -struct AMessage; -struct ATSParser; - -// This class facilitates receiving of media data for one or more tracks -// over RTP. Either a 1:1 track to RTP channel mapping is used or a single -// RTP channel provides the data for a transport stream that is consequently -// demuxed and its track's data provided to the observer. -struct MediaReceiver : public AHandler { - enum { - kWhatInitDone, - kWhatError, - kWhatAccessUnit, - kWhatPacketLost, - }; - - MediaReceiver( - const sp &netSession, - const sp ¬ify); - - ssize_t addTrack( - RTPReceiver::TransportMode rtpMode, - RTPReceiver::TransportMode rtcpMode, - int32_t *localRTPPort); - - status_t connectTrack( - size_t trackIndex, - const char *remoteHost, - int32_t remoteRTPPort, - int32_t remoteRTCPPort); - - enum Mode { - MODE_UNDEFINED, - MODE_TRANSPORT_STREAM, - MODE_TRANSPORT_STREAM_RAW, - MODE_ELEMENTARY_STREAMS, - }; - status_t initAsync(Mode mode); - - status_t informSender(size_t trackIndex, const sp ¶ms); - -protected: - virtual void onMessageReceived(const sp &msg); - virtual ~MediaReceiver(); - -private: - enum { - kWhatInit, - kWhatReceiverNotify, - }; - - struct TrackInfo { - sp mReceiver; - }; - - sp mNetSession; - sp mNotify; - - Mode mMode; - int32_t mGeneration; - - Vector mTrackInfos; - - status_t mInitStatus; - size_t mInitDoneCount; - - sp mTSParser; - uint32_t mFormatKnownMask; - - void onReceiverNotify(const sp &msg); - - void drainPackets(size_t trackIndex, ATSParser::SourceType type); - - void notifyInitDone(status_t err); - void notifyError(status_t err); - void notifyPacketLost(); - - void postAccessUnit( - size_t trackIndex, - const sp &accessUnit, - const sp &format); - - DISALLOW_EVIL_CONSTRUCTORS(MediaReceiver); -}; - -} // namespace android - diff --git a/media/libstagefright/wifi-display/SNTPClient.cpp b/media/libstagefright/wifi-display/SNTPClient.cpp deleted file mode 100644 index 5c0af6a..0000000 --- a/media/libstagefright/wifi-display/SNTPClient.cpp +++ /dev/null @@ -1,174 +0,0 @@ -/* - * Copyright 2013, The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#include "SNTPClient.h" - -#include -#include - -#include -#include -#include -#include -#include - -namespace android { - -SNTPClient::SNTPClient() { -} - -status_t SNTPClient::requestTime(const char *host) { - struct hostent *ent; - int64_t requestTimeNTP, requestTimeUs; - ssize_t n; - int64_t responseTimeUs, responseTimeNTP; - int64_t originateTimeNTP, receiveTimeNTP, transmitTimeNTP; - int64_t roundTripTimeNTP, clockOffsetNTP; - - status_t err = UNKNOWN_ERROR; - - int s = socket(AF_INET, SOCK_DGRAM, 0); - - if (s < 0) { - err = -errno; - - goto bail; - } - - ent = gethostbyname(host); - - if (ent == NULL) { - err = -ENOENT; - goto bail2; - } - - struct sockaddr_in hostAddr; - memset(hostAddr.sin_zero, 0, sizeof(hostAddr.sin_zero)); - hostAddr.sin_family = AF_INET; - hostAddr.sin_port = htons(kNTPPort); - hostAddr.sin_addr.s_addr = *(in_addr_t *)ent->h_addr; - - uint8_t packet[kNTPPacketSize]; - memset(packet, 0, sizeof(packet)); - - packet[0] = kNTPModeClient | (kNTPVersion << 3); - - requestTimeNTP = getNowNTP(); - requestTimeUs = ALooper::GetNowUs(); - writeTimeStamp(&packet[kNTPTransmitTimeOffset], requestTimeNTP); - - n = sendto( - s, packet, sizeof(packet), 0, - (const struct sockaddr *)&hostAddr, sizeof(hostAddr)); - - if (n < 0) { - err = -errno; - goto bail2; - } - - memset(packet, 0, sizeof(packet)); - - do { - n = recv(s, packet, sizeof(packet), 0); - } while (n < 0 && errno == EINTR); - - if (n < 0) { - err = -errno; - goto bail2; - } - - responseTimeUs = ALooper::GetNowUs(); - - responseTimeNTP = requestTimeNTP + makeNTP(responseTimeUs - requestTimeUs); - - originateTimeNTP = readTimeStamp(&packet[kNTPOriginateTimeOffset]); - receiveTimeNTP = readTimeStamp(&packet[kNTPReceiveTimeOffset]); - transmitTimeNTP = readTimeStamp(&packet[kNTPTransmitTimeOffset]); - - roundTripTimeNTP = - makeNTP(responseTimeUs - requestTimeUs) - - (transmitTimeNTP - receiveTimeNTP); - - clockOffsetNTP = - ((receiveTimeNTP - originateTimeNTP) - + (transmitTimeNTP - responseTimeNTP)) / 2; - - mTimeReferenceNTP = responseTimeNTP + clockOffsetNTP; - mTimeReferenceUs = responseTimeUs; - mRoundTripTimeNTP = roundTripTimeNTP; - - err = OK; - -bail2: - close(s); - s = -1; - -bail: - return err; -} - -int64_t SNTPClient::adjustTimeUs(int64_t timeUs) const { - uint64_t nowNTP = - mTimeReferenceNTP + makeNTP(timeUs - mTimeReferenceUs); - - int64_t nowUs = - (nowNTP >> 32) * 1000000ll - + ((nowNTP & 0xffffffff) * 1000000ll) / (1ll << 32); - - nowUs -= ((70ll * 365 + 17) * 24) * 60 * 60 * 1000000ll; - - return nowUs; -} - -// static -void SNTPClient::writeTimeStamp(uint8_t *dst, uint64_t ntpTime) { - *dst++ = (ntpTime >> 56) & 0xff; - *dst++ = (ntpTime >> 48) & 0xff; - *dst++ = (ntpTime >> 40) & 0xff; - *dst++ = (ntpTime >> 32) & 0xff; - *dst++ = (ntpTime >> 24) & 0xff; - *dst++ = (ntpTime >> 16) & 0xff; - *dst++ = (ntpTime >> 8) & 0xff; - *dst++ = ntpTime & 0xff; -} - -// static -uint64_t SNTPClient::readTimeStamp(const uint8_t *dst) { - return U64_AT(dst); -} - -// static -uint64_t SNTPClient::getNowNTP() { - struct timeval tv; - gettimeofday(&tv, NULL /* time zone */); - - uint64_t nowUs = tv.tv_sec * 1000000ll + tv.tv_usec; - - nowUs += ((70ll * 365 + 17) * 24) * 60 * 60 * 1000000ll; - - return makeNTP(nowUs); -} - -// static -uint64_t SNTPClient::makeNTP(uint64_t deltaUs) { - uint64_t hi = deltaUs / 1000000ll; - uint64_t lo = ((1ll << 32) * (deltaUs % 1000000ll)) / 1000000ll; - - return (hi << 32) | lo; -} - -} // namespace android - diff --git a/media/libstagefright/wifi-display/SNTPClient.h b/media/libstagefright/wifi-display/SNTPClient.h deleted file mode 100644 index 967d1fc..0000000 --- a/media/libstagefright/wifi-display/SNTPClient.h +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Copyright 2013, The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef SNTP_CLIENT_H_ - -#define SNTP_CLIENT_H_ - -#include -#include - -namespace android { - -// Implementation of the SNTP (Simple Network Time Protocol) -struct SNTPClient { - SNTPClient(); - - status_t requestTime(const char *host); - - // given a time obtained from ALooper::GetNowUs() - // return the number of us elapsed since Jan 1 1970 00:00:00 (UTC). - int64_t adjustTimeUs(int64_t timeUs) const; - -private: - enum { - kNTPPort = 123, - kNTPPacketSize = 48, - kNTPModeClient = 3, - kNTPVersion = 3, - kNTPTransmitTimeOffset = 40, - kNTPOriginateTimeOffset = 24, - kNTPReceiveTimeOffset = 32, - }; - - uint64_t mTimeReferenceNTP; - int64_t mTimeReferenceUs; - int64_t mRoundTripTimeNTP; - - static void writeTimeStamp(uint8_t *dst, uint64_t ntpTime); - static uint64_t readTimeStamp(const uint8_t *dst); - - static uint64_t getNowNTP(); - static uint64_t makeNTP(uint64_t deltaUs); - - DISALLOW_EVIL_CONSTRUCTORS(SNTPClient); -}; - -} // namespace android - -#endif // SNTP_CLIENT_H_ diff --git a/media/libstagefright/wifi-display/TimeSyncer.cpp b/media/libstagefright/wifi-display/TimeSyncer.cpp deleted file mode 100644 index 0f4d93a..0000000 --- a/media/libstagefright/wifi-display/TimeSyncer.cpp +++ /dev/null @@ -1,337 +0,0 @@ -/* - * Copyright 2013, The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -//#define LOG_NEBUG 0 -#define LOG_TAG "TimeSyncer" -#include - -#include "TimeSyncer.h" - -#include -#include -#include -#include -#include -#include -#include - -namespace android { - -TimeSyncer::TimeSyncer( - const sp &netSession, const sp ¬ify) - : mNetSession(netSession), - mNotify(notify), - mIsServer(false), - mConnected(false), - mUDPSession(0), - mSeqNo(0), - mTotalTimeUs(0.0), - mPendingT1(0ll), - mTimeoutGeneration(0) { -} - -TimeSyncer::~TimeSyncer() { -} - -void TimeSyncer::startServer(unsigned localPort) { - sp msg = new AMessage(kWhatStartServer, id()); - msg->setInt32("localPort", localPort); - msg->post(); -} - -void TimeSyncer::startClient(const char *remoteHost, unsigned remotePort) { - sp msg = new AMessage(kWhatStartClient, id()); - msg->setString("remoteHost", remoteHost); - msg->setInt32("remotePort", remotePort); - msg->post(); -} - -void TimeSyncer::onMessageReceived(const sp &msg) { - switch (msg->what()) { - case kWhatStartClient: - { - AString remoteHost; - CHECK(msg->findString("remoteHost", &remoteHost)); - - int32_t remotePort; - CHECK(msg->findInt32("remotePort", &remotePort)); - - sp notify = new AMessage(kWhatUDPNotify, id()); - - CHECK_EQ((status_t)OK, - mNetSession->createUDPSession( - 0 /* localPort */, - remoteHost.c_str(), - remotePort, - notify, - &mUDPSession)); - - postSendPacket(); - break; - } - - case kWhatStartServer: - { - mIsServer = true; - - int32_t localPort; - CHECK(msg->findInt32("localPort", &localPort)); - - sp notify = new AMessage(kWhatUDPNotify, id()); - - CHECK_EQ((status_t)OK, - mNetSession->createUDPSession( - localPort, notify, &mUDPSession)); - - break; - } - - case kWhatSendPacket: - { - if (mHistory.size() == 0) { - ALOGI("starting batch"); - } - - TimeInfo ti; - memset(&ti, 0, sizeof(ti)); - - ti.mT1 = ALooper::GetNowUs(); - - CHECK_EQ((status_t)OK, - mNetSession->sendRequest( - mUDPSession, &ti, sizeof(ti))); - - mPendingT1 = ti.mT1; - postTimeout(); - break; - } - - case kWhatTimedOut: - { - int32_t generation; - CHECK(msg->findInt32("generation", &generation)); - - if (generation != mTimeoutGeneration) { - break; - } - - ALOGI("timed out, sending another request"); - postSendPacket(); - break; - } - - case kWhatUDPNotify: - { - int32_t reason; - CHECK(msg->findInt32("reason", &reason)); - - switch (reason) { - case ANetworkSession::kWhatError: - { - int32_t sessionID; - CHECK(msg->findInt32("sessionID", &sessionID)); - - int32_t err; - CHECK(msg->findInt32("err", &err)); - - AString detail; - CHECK(msg->findString("detail", &detail)); - - ALOGE("An error occurred in session %d (%d, '%s/%s').", - sessionID, - err, - detail.c_str(), - strerror(-err)); - - mNetSession->destroySession(sessionID); - - cancelTimeout(); - - notifyError(err); - break; - } - - case ANetworkSession::kWhatDatagram: - { - int32_t sessionID; - CHECK(msg->findInt32("sessionID", &sessionID)); - - sp packet; - CHECK(msg->findBuffer("data", &packet)); - - int64_t arrivalTimeUs; - CHECK(packet->meta()->findInt64( - "arrivalTimeUs", &arrivalTimeUs)); - - CHECK_EQ(packet->size(), sizeof(TimeInfo)); - - TimeInfo *ti = (TimeInfo *)packet->data(); - - if (mIsServer) { - if (!mConnected) { - AString fromAddr; - CHECK(msg->findString("fromAddr", &fromAddr)); - - int32_t fromPort; - CHECK(msg->findInt32("fromPort", &fromPort)); - - CHECK_EQ((status_t)OK, - mNetSession->connectUDPSession( - mUDPSession, fromAddr.c_str(), fromPort)); - - mConnected = true; - } - - ti->mT2 = arrivalTimeUs; - ti->mT3 = ALooper::GetNowUs(); - - CHECK_EQ((status_t)OK, - mNetSession->sendRequest( - mUDPSession, ti, sizeof(*ti))); - } else { - if (ti->mT1 != mPendingT1) { - break; - } - - cancelTimeout(); - mPendingT1 = 0; - - ti->mT4 = arrivalTimeUs; - - // One way delay for a packet to travel from client - // to server or back (assumed to be the same either way). - int64_t delay = - (ti->mT2 - ti->mT1 + ti->mT4 - ti->mT3) / 2; - - // Offset between the client clock (T1, T4) and the - // server clock (T2, T3) timestamps. - int64_t offset = - (ti->mT2 - ti->mT1 - ti->mT4 + ti->mT3) / 2; - - mHistory.push_back(*ti); - - ALOGV("delay = %lld us,\toffset %lld us", - delay, - offset); - - if (mHistory.size() < kNumPacketsPerBatch) { - postSendPacket(1000000ll / 30); - } else { - notifyOffset(); - - ALOGI("batch done"); - - mHistory.clear(); - postSendPacket(kBatchDelayUs); - } - } - break; - } - - default: - TRESPASS(); - } - - break; - } - - default: - TRESPASS(); - } -} - -void TimeSyncer::postSendPacket(int64_t delayUs) { - (new AMessage(kWhatSendPacket, id()))->post(delayUs); -} - -void TimeSyncer::postTimeout() { - sp msg = new AMessage(kWhatTimedOut, id()); - msg->setInt32("generation", mTimeoutGeneration); - msg->post(kTimeoutDelayUs); -} - -void TimeSyncer::cancelTimeout() { - ++mTimeoutGeneration; -} - -void TimeSyncer::notifyError(status_t err) { - if (mNotify == NULL) { - looper()->stop(); - return; - } - - sp notify = mNotify->dup(); - notify->setInt32("what", kWhatError); - notify->setInt32("err", err); - notify->post(); -} - -// static -int TimeSyncer::CompareRountripTime(const TimeInfo *ti1, const TimeInfo *ti2) { - int64_t rt1 = ti1->mT4 - ti1->mT1; - int64_t rt2 = ti2->mT4 - ti2->mT1; - - if (rt1 < rt2) { - return -1; - } else if (rt1 > rt2) { - return 1; - } - - return 0; -} - -void TimeSyncer::notifyOffset() { - mHistory.sort(CompareRountripTime); - - int64_t sum = 0ll; - size_t count = 0; - - // Only consider the third of the information associated with the best - // (smallest) roundtrip times. - for (size_t i = 0; i < mHistory.size() / 3; ++i) { - const TimeInfo *ti = &mHistory[i]; - -#if 0 - // One way delay for a packet to travel from client - // to server or back (assumed to be the same either way). - int64_t delay = - (ti->mT2 - ti->mT1 + ti->mT4 - ti->mT3) / 2; -#endif - - // Offset between the client clock (T1, T4) and the - // server clock (T2, T3) timestamps. - int64_t offset = - (ti->mT2 - ti->mT1 - ti->mT4 + ti->mT3) / 2; - - ALOGV("(%d) RT: %lld us, offset: %lld us", - i, ti->mT4 - ti->mT1, offset); - - sum += offset; - ++count; - } - - if (mNotify == NULL) { - ALOGI("avg. offset is %lld", sum / count); - return; - } - - sp notify = mNotify->dup(); - notify->setInt32("what", kWhatTimeOffset); - notify->setInt64("offset", sum / count); - notify->post(); -} - -} // namespace android diff --git a/media/libstagefright/wifi-display/TimeSyncer.h b/media/libstagefright/wifi-display/TimeSyncer.h deleted file mode 100644 index 4e7571f..0000000 --- a/media/libstagefright/wifi-display/TimeSyncer.h +++ /dev/null @@ -1,109 +0,0 @@ -/* - * Copyright 2013, The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef TIME_SYNCER_H_ - -#define TIME_SYNCER_H_ - -#include - -namespace android { - -struct ANetworkSession; - -/* - TimeSyncer allows us to synchronize time between a client and a server. - The client sends a UDP packet containing its send-time to the server, - the server sends that packet back to the client amended with information - about when it was received as well as the time the reply was sent back. - Finally the client receives the reply and has now enough information to - compute the clock offset between client and server assuming that packet - exchange is symmetric, i.e. time for a packet client->server and - server->client is roughly equal. - This exchange is repeated a number of times and the average offset computed - over the 30% of packets that had the lowest roundtrip times. - The offset is determined every 10 secs to account for slight differences in - clock frequency. -*/ -struct TimeSyncer : public AHandler { - enum { - kWhatError, - kWhatTimeOffset, - }; - TimeSyncer( - const sp &netSession, - const sp ¬ify); - - void startServer(unsigned localPort); - void startClient(const char *remoteHost, unsigned remotePort); - -protected: - virtual ~TimeSyncer(); - - virtual void onMessageReceived(const sp &msg); - -private: - enum { - kWhatStartServer, - kWhatStartClient, - kWhatUDPNotify, - kWhatSendPacket, - kWhatTimedOut, - }; - - struct TimeInfo { - int64_t mT1; // client timestamp at send - int64_t mT2; // server timestamp at receive - int64_t mT3; // server timestamp at send - int64_t mT4; // client timestamp at receive - }; - - enum { - kNumPacketsPerBatch = 30, - }; - static const int64_t kTimeoutDelayUs = 500000ll; - static const int64_t kBatchDelayUs = 60000000ll; // every minute - - sp mNetSession; - sp mNotify; - - bool mIsServer; - bool mConnected; - int32_t mUDPSession; - uint32_t mSeqNo; - double mTotalTimeUs; - - Vector mHistory; - - int64_t mPendingT1; - int32_t mTimeoutGeneration; - - void postSendPacket(int64_t delayUs = 0ll); - - void postTimeout(); - void cancelTimeout(); - - void notifyError(status_t err); - void notifyOffset(); - - static int CompareRountripTime(const TimeInfo *ti1, const TimeInfo *ti2); - - DISALLOW_EVIL_CONSTRUCTORS(TimeSyncer); -}; - -} // namespace android - -#endif // TIME_SYNCER_H_ diff --git a/media/libstagefright/wifi-display/nettest.cpp b/media/libstagefright/wifi-display/nettest.cpp deleted file mode 100644 index 73c0d80..0000000 --- a/media/libstagefright/wifi-display/nettest.cpp +++ /dev/null @@ -1,400 +0,0 @@ -/* - * Copyright 2013, The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -//#define LOG_NEBUG 0 -#define LOG_TAG "nettest" -#include - -#include "TimeSyncer.h" - -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -namespace android { - -struct TestHandler : public AHandler { - TestHandler(const sp &netSession); - - void listen(int32_t port); - void connect(const char *host, int32_t port); - -protected: - virtual ~TestHandler(); - virtual void onMessageReceived(const sp &msg); - -private: - enum { - kTimeSyncerPort = 8123, - }; - - enum { - kWhatListen, - kWhatConnect, - kWhatTimeSyncerNotify, - kWhatNetNotify, - kWhatSendMore, - kWhatStop, - }; - - sp mNetSession; - sp mTimeSyncer; - - int32_t mServerSessionID; - int32_t mSessionID; - - int64_t mTimeOffsetUs; - bool mTimeOffsetValid; - - int32_t mCounter; - - int64_t mMaxDelayMs; - - void dumpDelay(int32_t counter, int64_t delayMs); - - DISALLOW_EVIL_CONSTRUCTORS(TestHandler); -}; - -TestHandler::TestHandler(const sp &netSession) - : mNetSession(netSession), - mServerSessionID(0), - mSessionID(0), - mTimeOffsetUs(-1ll), - mTimeOffsetValid(false), - mCounter(0), - mMaxDelayMs(-1ll) { -} - -TestHandler::~TestHandler() { -} - -void TestHandler::listen(int32_t port) { - sp msg = new AMessage(kWhatListen, id()); - msg->setInt32("port", port); - msg->post(); -} - -void TestHandler::connect(const char *host, int32_t port) { - sp msg = new AMessage(kWhatConnect, id()); - msg->setString("host", host); - msg->setInt32("port", port); - msg->post(); -} - -void TestHandler::dumpDelay(int32_t counter, int64_t delayMs) { - static const int64_t kMinDelayMs = 0; - static const int64_t kMaxDelayMs = 300; - - const char *kPattern = "########################################"; - size_t kPatternSize = strlen(kPattern); - - int n = (kPatternSize * (delayMs - kMinDelayMs)) - / (kMaxDelayMs - kMinDelayMs); - - if (n < 0) { - n = 0; - } else if ((size_t)n > kPatternSize) { - n = kPatternSize; - } - - if (delayMs > mMaxDelayMs) { - mMaxDelayMs = delayMs; - } - - ALOGI("[%d] (%4lld ms / %4lld ms) %s", - counter, - delayMs, - mMaxDelayMs, - kPattern + kPatternSize - n); -} - -void TestHandler::onMessageReceived(const sp &msg) { - switch (msg->what()) { - case kWhatListen: - { - sp notify = new AMessage(kWhatTimeSyncerNotify, id()); - mTimeSyncer = new TimeSyncer(mNetSession, notify); - looper()->registerHandler(mTimeSyncer); - - notify = new AMessage(kWhatNetNotify, id()); - - int32_t port; - CHECK(msg->findInt32("port", &port)); - - struct in_addr ifaceAddr; - ifaceAddr.s_addr = INADDR_ANY; - - CHECK_EQ((status_t)OK, - mNetSession->createTCPDatagramSession( - ifaceAddr, - port, - notify, - &mServerSessionID)); - break; - } - - case kWhatConnect: - { - sp notify = new AMessage(kWhatTimeSyncerNotify, id()); - mTimeSyncer = new TimeSyncer(mNetSession, notify); - looper()->registerHandler(mTimeSyncer); - mTimeSyncer->startServer(kTimeSyncerPort); - - AString host; - CHECK(msg->findString("host", &host)); - - int32_t port; - CHECK(msg->findInt32("port", &port)); - - notify = new AMessage(kWhatNetNotify, id()); - - CHECK_EQ((status_t)OK, - mNetSession->createTCPDatagramSession( - 0 /* localPort */, - host.c_str(), - port, - notify, - &mSessionID)); - break; - } - - case kWhatNetNotify: - { - int32_t reason; - CHECK(msg->findInt32("reason", &reason)); - - switch (reason) { - case ANetworkSession::kWhatConnected: - { - ALOGI("kWhatConnected"); - - (new AMessage(kWhatSendMore, id()))->post(); - break; - } - - case ANetworkSession::kWhatClientConnected: - { - ALOGI("kWhatClientConnected"); - - CHECK_EQ(mSessionID, 0); - CHECK(msg->findInt32("sessionID", &mSessionID)); - - AString clientIP; - CHECK(msg->findString("client-ip", &clientIP)); - - mTimeSyncer->startClient(clientIP.c_str(), kTimeSyncerPort); - break; - } - - case ANetworkSession::kWhatDatagram: - { - sp packet; - CHECK(msg->findBuffer("data", &packet)); - - CHECK_EQ(packet->size(), 12u); - - int32_t counter = U32_AT(packet->data()); - int64_t timeUs = U64_AT(packet->data() + 4); - - if (mTimeOffsetValid) { - timeUs -= mTimeOffsetUs; - int64_t nowUs = ALooper::GetNowUs(); - int64_t delayMs = (nowUs - timeUs) / 1000ll; - - dumpDelay(counter, delayMs); - } else { - ALOGI("received %d", counter); - } - break; - } - - case ANetworkSession::kWhatError: - { - ALOGE("kWhatError"); - break; - } - - default: - TRESPASS(); - } - break; - } - - case kWhatTimeSyncerNotify: - { - CHECK(msg->findInt64("offset", &mTimeOffsetUs)); - mTimeOffsetValid = true; - break; - } - - case kWhatSendMore: - { - uint8_t buffer[4 + 8]; - buffer[0] = mCounter >> 24; - buffer[1] = (mCounter >> 16) & 0xff; - buffer[2] = (mCounter >> 8) & 0xff; - buffer[3] = mCounter & 0xff; - - int64_t nowUs = ALooper::GetNowUs(); - - buffer[4] = nowUs >> 56; - buffer[5] = (nowUs >> 48) & 0xff; - buffer[6] = (nowUs >> 40) & 0xff; - buffer[7] = (nowUs >> 32) & 0xff; - buffer[8] = (nowUs >> 24) & 0xff; - buffer[9] = (nowUs >> 16) & 0xff; - buffer[10] = (nowUs >> 8) & 0xff; - buffer[11] = nowUs & 0xff; - - ++mCounter; - - CHECK_EQ((status_t)OK, - mNetSession->sendRequest( - mSessionID, - buffer, - sizeof(buffer), - true /* timeValid */, - nowUs)); - - msg->post(100000ll); - break; - } - - case kWhatStop: - { - if (mSessionID != 0) { - mNetSession->destroySession(mSessionID); - mSessionID = 0; - } - - if (mServerSessionID != 0) { - mNetSession->destroySession(mServerSessionID); - mServerSessionID = 0; - } - - looper()->stop(); - break; - } - - default: - TRESPASS(); - } -} - -} // namespace android - -static void usage(const char *me) { - fprintf(stderr, - "usage: %s -c host:port\tconnect to remote host\n" - " -l port \tlisten\n", - me); -} - -int main(int argc, char **argv) { - using namespace android; - - // srand(time(NULL)); - - ProcessState::self()->startThreadPool(); - - DataSource::RegisterDefaultSniffers(); - - int32_t connectToPort = -1; - AString connectToHost; - - int32_t listenOnPort = -1; - - int res; - while ((res = getopt(argc, argv, "hc:l:")) >= 0) { - switch (res) { - case 'c': - { - const char *colonPos = strrchr(optarg, ':'); - - if (colonPos == NULL) { - usage(argv[0]); - exit(1); - } - - connectToHost.setTo(optarg, colonPos - optarg); - - char *end; - connectToPort = strtol(colonPos + 1, &end, 10); - - if (*end != '\0' || end == colonPos + 1 - || connectToPort < 0 || connectToPort > 65535) { - fprintf(stderr, "Illegal port specified.\n"); - exit(1); - } - break; - } - - case 'l': - { - char *end; - listenOnPort = strtol(optarg, &end, 10); - - if (*end != '\0' || end == optarg - || listenOnPort < 0 || listenOnPort > 65535) { - fprintf(stderr, "Illegal port specified.\n"); - exit(1); - } - break; - } - - case '?': - case 'h': - usage(argv[0]); - exit(1); - } - } - - if ((listenOnPort < 0 && connectToPort < 0) - || (listenOnPort >= 0 && connectToPort >= 0)) { - fprintf(stderr, - "You need to select either client or server mode.\n"); - exit(1); - } - - sp netSession = new ANetworkSession; - netSession->start(); - - sp looper = new ALooper; - - sp handler = new TestHandler(netSession); - looper->registerHandler(handler); - - if (listenOnPort) { - handler->listen(listenOnPort); - } - - if (connectToPort >= 0) { - handler->connect(connectToHost.c_str(), connectToPort); - } - - looper->start(true /* runOnCallingThread */); - - return 0; -} diff --git a/media/libstagefright/wifi-display/rtp/RTPAssembler.cpp b/media/libstagefright/wifi-display/rtp/RTPAssembler.cpp deleted file mode 100644 index 7a96081..0000000 --- a/media/libstagefright/wifi-display/rtp/RTPAssembler.cpp +++ /dev/null @@ -1,328 +0,0 @@ -/* - * Copyright 2013, The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -//#define LOG_NDEBUG 0 -#define LOG_TAG "RTPAssembler" -#include - -#include "RTPAssembler.h" - -#include -#include -#include -#include -#include - -namespace android { - -RTPReceiver::Assembler::Assembler(const sp ¬ify) - : mNotify(notify) { -} - -void RTPReceiver::Assembler::postAccessUnit( - const sp &accessUnit, bool followsDiscontinuity) { - sp notify = mNotify->dup(); - notify->setInt32("what", RTPReceiver::kWhatAccessUnit); - notify->setBuffer("accessUnit", accessUnit); - notify->setInt32("followsDiscontinuity", followsDiscontinuity); - notify->post(); -} -//////////////////////////////////////////////////////////////////////////////// - -RTPReceiver::TSAssembler::TSAssembler(const sp ¬ify) - : Assembler(notify), - mSawDiscontinuity(false) { -} - -void RTPReceiver::TSAssembler::signalDiscontinuity() { - mSawDiscontinuity = true; -} - -status_t RTPReceiver::TSAssembler::processPacket(const sp &packet) { - int32_t rtpTime; - CHECK(packet->meta()->findInt32("rtp-time", &rtpTime)); - - packet->meta()->setInt64("timeUs", (rtpTime * 100ll) / 9); - - postAccessUnit(packet, mSawDiscontinuity); - - if (mSawDiscontinuity) { - mSawDiscontinuity = false; - } - - return OK; -} - -//////////////////////////////////////////////////////////////////////////////// - -RTPReceiver::H264Assembler::H264Assembler(const sp ¬ify) - : Assembler(notify), - mState(0), - mIndicator(0), - mNALType(0), - mAccessUnitRTPTime(0) { -} - -void RTPReceiver::H264Assembler::signalDiscontinuity() { - reset(); -} - -status_t RTPReceiver::H264Assembler::processPacket(const sp &packet) { - status_t err = internalProcessPacket(packet); - - if (err != OK) { - reset(); - } - - return err; -} - -status_t RTPReceiver::H264Assembler::internalProcessPacket( - const sp &packet) { - const uint8_t *data = packet->data(); - size_t size = packet->size(); - - switch (mState) { - case 0: - { - if (size < 1 || (data[0] & 0x80)) { - ALOGV("Malformed H264 RTP packet (empty or F-bit set)"); - return ERROR_MALFORMED; - } - - unsigned nalType = data[0] & 0x1f; - if (nalType >= 1 && nalType <= 23) { - addSingleNALUnit(packet); - ALOGV("added single NAL packet"); - } else if (nalType == 28) { - // FU-A - unsigned indicator = data[0]; - CHECK((indicator & 0x1f) == 28); - - if (size < 2) { - ALOGV("Malformed H264 FU-A packet (single byte)"); - return ERROR_MALFORMED; - } - - if (!(data[1] & 0x80)) { - ALOGV("Malformed H264 FU-A packet (no start bit)"); - return ERROR_MALFORMED; - } - - mIndicator = data[0]; - mNALType = data[1] & 0x1f; - uint32_t nri = (data[0] >> 5) & 3; - - clearAccumulator(); - - uint8_t byte = mNALType | (nri << 5); - appendToAccumulator(&byte, 1); - appendToAccumulator(data + 2, size - 2); - - int32_t rtpTime; - CHECK(packet->meta()->findInt32("rtp-time", &rtpTime)); - mAccumulator->meta()->setInt32("rtp-time", rtpTime); - - if (data[1] & 0x40) { - // Huh? End bit also set on the first buffer. - addSingleNALUnit(mAccumulator); - clearAccumulator(); - - ALOGV("added FU-A"); - break; - } - - mState = 1; - } else if (nalType == 24) { - // STAP-A - - status_t err = addSingleTimeAggregationPacket(packet); - if (err != OK) { - return err; - } - } else { - ALOGV("Malformed H264 packet (unknown type %d)", nalType); - return ERROR_UNSUPPORTED; - } - break; - } - - case 1: - { - if (size < 2 - || data[0] != mIndicator - || (data[1] & 0x1f) != mNALType - || (data[1] & 0x80)) { - ALOGV("Malformed H264 FU-A packet (indicator, " - "type or start bit mismatch)"); - - return ERROR_MALFORMED; - } - - appendToAccumulator(data + 2, size - 2); - - if (data[1] & 0x40) { - addSingleNALUnit(mAccumulator); - - clearAccumulator(); - mState = 0; - - ALOGV("added FU-A"); - } - break; - } - - default: - TRESPASS(); - } - - int32_t marker; - CHECK(packet->meta()->findInt32("M", &marker)); - - if (marker) { - flushAccessUnit(); - } - - return OK; -} - -void RTPReceiver::H264Assembler::reset() { - mNALUnits.clear(); - - clearAccumulator(); - mState = 0; -} - -void RTPReceiver::H264Assembler::clearAccumulator() { - if (mAccumulator != NULL) { - // XXX Too expensive. - mAccumulator.clear(); - } -} - -void RTPReceiver::H264Assembler::appendToAccumulator( - const void *data, size_t size) { - if (mAccumulator == NULL) { - mAccumulator = new ABuffer(size); - memcpy(mAccumulator->data(), data, size); - return; - } - - if (mAccumulator->size() + size > mAccumulator->capacity()) { - sp buf = new ABuffer(mAccumulator->size() + size); - memcpy(buf->data(), mAccumulator->data(), mAccumulator->size()); - buf->setRange(0, mAccumulator->size()); - - int32_t rtpTime; - if (mAccumulator->meta()->findInt32("rtp-time", &rtpTime)) { - buf->meta()->setInt32("rtp-time", rtpTime); - } - - mAccumulator = buf; - } - - memcpy(mAccumulator->data() + mAccumulator->size(), data, size); - mAccumulator->setRange(0, mAccumulator->size() + size); -} - -void RTPReceiver::H264Assembler::addSingleNALUnit(const sp &packet) { - if (mNALUnits.empty()) { - int32_t rtpTime; - CHECK(packet->meta()->findInt32("rtp-time", &rtpTime)); - - mAccessUnitRTPTime = rtpTime; - } - - mNALUnits.push_back(packet); -} - -void RTPReceiver::H264Assembler::flushAccessUnit() { - if (mNALUnits.empty()) { - return; - } - - size_t totalSize = 0; - for (List >::iterator it = mNALUnits.begin(); - it != mNALUnits.end(); ++it) { - totalSize += 4 + (*it)->size(); - } - - sp accessUnit = new ABuffer(totalSize); - size_t offset = 0; - for (List >::iterator it = mNALUnits.begin(); - it != mNALUnits.end(); ++it) { - const sp nalUnit = *it; - - memcpy(accessUnit->data() + offset, "\x00\x00\x00\x01", 4); - - memcpy(accessUnit->data() + offset + 4, - nalUnit->data(), - nalUnit->size()); - - offset += 4 + nalUnit->size(); - } - - mNALUnits.clear(); - - accessUnit->meta()->setInt64("timeUs", mAccessUnitRTPTime * 100ll / 9ll); - postAccessUnit(accessUnit, false /* followsDiscontinuity */); -} - -status_t RTPReceiver::H264Assembler::addSingleTimeAggregationPacket( - const sp &packet) { - const uint8_t *data = packet->data(); - size_t size = packet->size(); - - if (size < 3) { - ALOGV("Malformed H264 STAP-A packet (too small)"); - return ERROR_MALFORMED; - } - - int32_t rtpTime; - CHECK(packet->meta()->findInt32("rtp-time", &rtpTime)); - - ++data; - --size; - while (size >= 2) { - size_t nalSize = (data[0] << 8) | data[1]; - - if (size < nalSize + 2) { - ALOGV("Malformed H264 STAP-A packet (incomplete NAL unit)"); - return ERROR_MALFORMED; - } - - sp unit = new ABuffer(nalSize); - memcpy(unit->data(), &data[2], nalSize); - - unit->meta()->setInt32("rtp-time", rtpTime); - - addSingleNALUnit(unit); - - data += 2 + nalSize; - size -= 2 + nalSize; - } - - if (size != 0) { - ALOGV("Unexpected padding at end of STAP-A packet."); - } - - ALOGV("added STAP-A"); - - return OK; -} - -} // namespace android - diff --git a/media/libstagefright/wifi-display/rtp/RTPAssembler.h b/media/libstagefright/wifi-display/rtp/RTPAssembler.h deleted file mode 100644 index e456d32..0000000 --- a/media/libstagefright/wifi-display/rtp/RTPAssembler.h +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Copyright 2013, The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef RTP_ASSEMBLER_H_ - -#define RTP_ASSEMBLER_H_ - -#include "RTPReceiver.h" - -namespace android { - -// A helper class to reassemble the payload of RTP packets into access -// units depending on the packetization scheme. -struct RTPReceiver::Assembler : public RefBase { - Assembler(const sp ¬ify); - - virtual void signalDiscontinuity() = 0; - virtual status_t processPacket(const sp &packet) = 0; - -protected: - virtual ~Assembler() {} - - void postAccessUnit( - const sp &accessUnit, bool followsDiscontinuity); - -private: - sp mNotify; - - DISALLOW_EVIL_CONSTRUCTORS(Assembler); -}; - -struct RTPReceiver::TSAssembler : public RTPReceiver::Assembler { - TSAssembler(const sp ¬ify); - - virtual void signalDiscontinuity(); - virtual status_t processPacket(const sp &packet); - -private: - bool mSawDiscontinuity; - - DISALLOW_EVIL_CONSTRUCTORS(TSAssembler); -}; - -struct RTPReceiver::H264Assembler : public RTPReceiver::Assembler { - H264Assembler(const sp ¬ify); - - virtual void signalDiscontinuity(); - virtual status_t processPacket(const sp &packet); - -private: - int32_t mState; - - uint8_t mIndicator; - uint8_t mNALType; - - sp mAccumulator; - - List > mNALUnits; - int32_t mAccessUnitRTPTime; - - status_t internalProcessPacket(const sp &packet); - - void addSingleNALUnit(const sp &packet); - status_t addSingleTimeAggregationPacket(const sp &packet); - - void flushAccessUnit(); - - void clearAccumulator(); - void appendToAccumulator(const void *data, size_t size); - - void reset(); - - DISALLOW_EVIL_CONSTRUCTORS(H264Assembler); -}; - -} // namespace android - -#endif // RTP_ASSEMBLER_H_ - diff --git a/media/libstagefright/wifi-display/rtp/RTPReceiver.cpp b/media/libstagefright/wifi-display/rtp/RTPReceiver.cpp deleted file mode 100644 index 3b3bd63..0000000 --- a/media/libstagefright/wifi-display/rtp/RTPReceiver.cpp +++ /dev/null @@ -1,1152 +0,0 @@ -/* - * Copyright 2013, The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -//#define LOG_NDEBUG 0 -#define LOG_TAG "RTPReceiver" -#include - -#include "RTPAssembler.h" -#include "RTPReceiver.h" - -#include -#include -#include -#include -#include -#include -#include - -#define TRACK_PACKET_LOSS 0 - -namespace android { - -//////////////////////////////////////////////////////////////////////////////// - -struct RTPReceiver::Source : public AHandler { - Source(RTPReceiver *receiver, uint32_t ssrc); - - void onPacketReceived(uint16_t seq, const sp &buffer); - - void addReportBlock(uint32_t ssrc, const sp &buf); - -protected: - virtual ~Source(); - - virtual void onMessageReceived(const sp &msg); - -private: - enum { - kWhatRetransmit, - kWhatDeclareLost, - }; - - static const uint32_t kMinSequential = 2; - static const uint32_t kMaxDropout = 3000; - static const uint32_t kMaxMisorder = 100; - static const uint32_t kRTPSeqMod = 1u << 16; - static const int64_t kReportIntervalUs = 10000000ll; - - RTPReceiver *mReceiver; - uint32_t mSSRC; - bool mFirst; - uint16_t mMaxSeq; - uint32_t mCycles; - uint32_t mBaseSeq; - uint32_t mReceived; - uint32_t mExpectedPrior; - uint32_t mReceivedPrior; - - int64_t mFirstArrivalTimeUs; - int64_t mFirstRTPTimeUs; - - // Ordered by extended seq number. - List > mPackets; - - enum StatusBits { - STATUS_DECLARED_LOST = 1, - STATUS_REQUESTED_RETRANSMISSION = 2, - STATUS_ARRIVED_LATE = 4, - }; -#if TRACK_PACKET_LOSS - KeyedVector mLostPackets; -#endif - - void modifyPacketStatus(int32_t extSeqNo, uint32_t mask); - - int32_t mAwaitingExtSeqNo; - bool mRequestedRetransmission; - - int32_t mActivePacketType; - sp mActiveAssembler; - - int64_t mNextReportTimeUs; - - int32_t mNumDeclaredLost; - int32_t mNumDeclaredLostPrior; - - int32_t mRetransmitGeneration; - int32_t mDeclareLostGeneration; - bool mDeclareLostTimerPending; - - void queuePacket(const sp &packet); - void dequeueMore(); - - sp getNextPacket(); - void resync(); - - void postRetransmitTimer(int64_t delayUs); - void postDeclareLostTimer(int64_t delayUs); - void cancelTimers(); - - DISALLOW_EVIL_CONSTRUCTORS(Source); -}; - -//////////////////////////////////////////////////////////////////////////////// - -RTPReceiver::Source::Source(RTPReceiver *receiver, uint32_t ssrc) - : mReceiver(receiver), - mSSRC(ssrc), - mFirst(true), - mMaxSeq(0), - mCycles(0), - mBaseSeq(0), - mReceived(0), - mExpectedPrior(0), - mReceivedPrior(0), - mFirstArrivalTimeUs(-1ll), - mFirstRTPTimeUs(-1ll), - mAwaitingExtSeqNo(-1), - mRequestedRetransmission(false), - mActivePacketType(-1), - mNextReportTimeUs(-1ll), - mNumDeclaredLost(0), - mNumDeclaredLostPrior(0), - mRetransmitGeneration(0), - mDeclareLostGeneration(0), - mDeclareLostTimerPending(false) { -} - -RTPReceiver::Source::~Source() { -} - -void RTPReceiver::Source::onMessageReceived(const sp &msg) { - switch (msg->what()) { - case kWhatRetransmit: - { - int32_t generation; - CHECK(msg->findInt32("generation", &generation)); - - if (generation != mRetransmitGeneration) { - break; - } - - mRequestedRetransmission = true; - mReceiver->requestRetransmission(mSSRC, mAwaitingExtSeqNo); - - modifyPacketStatus( - mAwaitingExtSeqNo, STATUS_REQUESTED_RETRANSMISSION); - break; - } - - case kWhatDeclareLost: - { - int32_t generation; - CHECK(msg->findInt32("generation", &generation)); - - if (generation != mDeclareLostGeneration) { - break; - } - - cancelTimers(); - - ALOGV("Lost packet extSeqNo %d %s", - mAwaitingExtSeqNo, - mRequestedRetransmission ? "*" : ""); - - mRequestedRetransmission = false; - if (mActiveAssembler != NULL) { - mActiveAssembler->signalDiscontinuity(); - } - - modifyPacketStatus(mAwaitingExtSeqNo, STATUS_DECLARED_LOST); - - // resync(); - ++mAwaitingExtSeqNo; - ++mNumDeclaredLost; - - mReceiver->notifyPacketLost(); - - dequeueMore(); - break; - } - - default: - TRESPASS(); - } -} - -void RTPReceiver::Source::onPacketReceived( - uint16_t seq, const sp &buffer) { - if (mFirst) { - buffer->setInt32Data(mCycles | seq); - queuePacket(buffer); - - mFirst = false; - mBaseSeq = seq; - mMaxSeq = seq; - ++mReceived; - return; - } - - uint16_t udelta = seq - mMaxSeq; - - if (udelta < kMaxDropout) { - // In order, with permissible gap. - - if (seq < mMaxSeq) { - // Sequence number wrapped - count another 64K cycle - mCycles += kRTPSeqMod; - } - - mMaxSeq = seq; - - ++mReceived; - } else if (udelta <= kRTPSeqMod - kMaxMisorder) { - // The sequence number made a very large jump - return; - } else { - // Duplicate or reordered packet. - } - - buffer->setInt32Data(mCycles | seq); - queuePacket(buffer); -} - -void RTPReceiver::Source::queuePacket(const sp &packet) { - int32_t newExtendedSeqNo = packet->int32Data(); - - if (mFirstArrivalTimeUs < 0ll) { - mFirstArrivalTimeUs = ALooper::GetNowUs(); - - uint32_t rtpTime; - CHECK(packet->meta()->findInt32("rtp-time", (int32_t *)&rtpTime)); - - mFirstRTPTimeUs = (rtpTime * 100ll) / 9ll; - } - - if (mAwaitingExtSeqNo >= 0 && newExtendedSeqNo < mAwaitingExtSeqNo) { - // We're no longer interested in these. They're old. - ALOGV("dropping stale extSeqNo %d", newExtendedSeqNo); - - modifyPacketStatus(newExtendedSeqNo, STATUS_ARRIVED_LATE); - return; - } - - if (mPackets.empty()) { - mPackets.push_back(packet); - dequeueMore(); - return; - } - - List >::iterator firstIt = mPackets.begin(); - List >::iterator it = --mPackets.end(); - for (;;) { - int32_t extendedSeqNo = (*it)->int32Data(); - - if (extendedSeqNo == newExtendedSeqNo) { - // Duplicate packet. - return; - } - - if (extendedSeqNo < newExtendedSeqNo) { - // Insert new packet after the one at "it". - mPackets.insert(++it, packet); - break; - } - - if (it == firstIt) { - // Insert new packet before the first existing one. - mPackets.insert(it, packet); - break; - } - - --it; - } - - dequeueMore(); -} - -void RTPReceiver::Source::dequeueMore() { - int64_t nowUs = ALooper::GetNowUs(); - if (mNextReportTimeUs < 0ll || nowUs >= mNextReportTimeUs) { - if (mNextReportTimeUs >= 0ll) { - uint32_t expected = (mMaxSeq | mCycles) - mBaseSeq + 1; - - uint32_t expectedInterval = expected - mExpectedPrior; - mExpectedPrior = expected; - - uint32_t receivedInterval = mReceived - mReceivedPrior; - mReceivedPrior = mReceived; - - int64_t lostInterval = - (int64_t)expectedInterval - (int64_t)receivedInterval; - - int32_t declaredLostInterval = - mNumDeclaredLost - mNumDeclaredLostPrior; - - mNumDeclaredLostPrior = mNumDeclaredLost; - - if (declaredLostInterval > 0) { - ALOGI("lost %lld packets (%.2f %%), declared %d lost\n", - lostInterval, - 100.0f * lostInterval / expectedInterval, - declaredLostInterval); - } - } - - mNextReportTimeUs = nowUs + kReportIntervalUs; - -#if TRACK_PACKET_LOSS - for (size_t i = 0; i < mLostPackets.size(); ++i) { - int32_t key = mLostPackets.keyAt(i); - uint32_t value = mLostPackets.valueAt(i); - - AString status; - if (value & STATUS_REQUESTED_RETRANSMISSION) { - status.append("retrans "); - } - if (value & STATUS_ARRIVED_LATE) { - status.append("arrived-late "); - } - ALOGI("Packet %d declared lost %s", key, status.c_str()); - } -#endif - } - - sp packet; - while ((packet = getNextPacket()) != NULL) { - if (mDeclareLostTimerPending) { - cancelTimers(); - } - - CHECK_GE(mAwaitingExtSeqNo, 0); -#if TRACK_PACKET_LOSS - mLostPackets.removeItem(mAwaitingExtSeqNo); -#endif - - int32_t packetType; - CHECK(packet->meta()->findInt32("PT", &packetType)); - - if (packetType != mActivePacketType) { - mActiveAssembler = mReceiver->makeAssembler(packetType); - mActivePacketType = packetType; - } - - if (mActiveAssembler != NULL) { - status_t err = mActiveAssembler->processPacket(packet); - if (err != OK) { - ALOGV("assembler returned error %d", err); - } - } - - ++mAwaitingExtSeqNo; - } - - if (mDeclareLostTimerPending) { - return; - } - - if (mPackets.empty()) { - return; - } - - CHECK_GE(mAwaitingExtSeqNo, 0); - - const sp &firstPacket = *mPackets.begin(); - - uint32_t rtpTime; - CHECK(firstPacket->meta()->findInt32( - "rtp-time", (int32_t *)&rtpTime)); - - - int64_t rtpUs = (rtpTime * 100ll) / 9ll; - - int64_t maxArrivalTimeUs = - mFirstArrivalTimeUs + rtpUs - mFirstRTPTimeUs; - - nowUs = ALooper::GetNowUs(); - - CHECK_LT(mAwaitingExtSeqNo, firstPacket->int32Data()); - - ALOGV("waiting for %d, comparing against %d, %lld us left", - mAwaitingExtSeqNo, - firstPacket->int32Data(), - maxArrivalTimeUs - nowUs); - - postDeclareLostTimer(maxArrivalTimeUs + kPacketLostAfterUs); - - if (kRequestRetransmissionAfterUs > 0ll) { - postRetransmitTimer( - maxArrivalTimeUs + kRequestRetransmissionAfterUs); - } -} - -sp RTPReceiver::Source::getNextPacket() { - if (mPackets.empty()) { - return NULL; - } - - int32_t extSeqNo = (*mPackets.begin())->int32Data(); - - if (mAwaitingExtSeqNo < 0) { - mAwaitingExtSeqNo = extSeqNo; - } else if (extSeqNo != mAwaitingExtSeqNo) { - return NULL; - } - - sp packet = *mPackets.begin(); - mPackets.erase(mPackets.begin()); - - return packet; -} - -void RTPReceiver::Source::resync() { - mAwaitingExtSeqNo = -1; -} - -void RTPReceiver::Source::addReportBlock( - uint32_t ssrc, const sp &buf) { - uint32_t extMaxSeq = mMaxSeq | mCycles; - uint32_t expected = extMaxSeq - mBaseSeq + 1; - - int64_t lost = (int64_t)expected - (int64_t)mReceived; - if (lost > 0x7fffff) { - lost = 0x7fffff; - } else if (lost < -0x800000) { - lost = -0x800000; - } - - uint32_t expectedInterval = expected - mExpectedPrior; - mExpectedPrior = expected; - - uint32_t receivedInterval = mReceived - mReceivedPrior; - mReceivedPrior = mReceived; - - int64_t lostInterval = expectedInterval - receivedInterval; - - uint8_t fractionLost; - if (expectedInterval == 0 || lostInterval <=0) { - fractionLost = 0; - } else { - fractionLost = (lostInterval << 8) / expectedInterval; - } - - uint8_t *ptr = buf->data() + buf->size(); - - ptr[0] = ssrc >> 24; - ptr[1] = (ssrc >> 16) & 0xff; - ptr[2] = (ssrc >> 8) & 0xff; - ptr[3] = ssrc & 0xff; - - ptr[4] = fractionLost; - - ptr[5] = (lost >> 16) & 0xff; - ptr[6] = (lost >> 8) & 0xff; - ptr[7] = lost & 0xff; - - ptr[8] = extMaxSeq >> 24; - ptr[9] = (extMaxSeq >> 16) & 0xff; - ptr[10] = (extMaxSeq >> 8) & 0xff; - ptr[11] = extMaxSeq & 0xff; - - // XXX TODO: - - ptr[12] = 0x00; // interarrival jitter - ptr[13] = 0x00; - ptr[14] = 0x00; - ptr[15] = 0x00; - - ptr[16] = 0x00; // last SR - ptr[17] = 0x00; - ptr[18] = 0x00; - ptr[19] = 0x00; - - ptr[20] = 0x00; // delay since last SR - ptr[21] = 0x00; - ptr[22] = 0x00; - ptr[23] = 0x00; - - buf->setRange(buf->offset(), buf->size() + 24); -} - -//////////////////////////////////////////////////////////////////////////////// - -RTPReceiver::RTPReceiver( - const sp &netSession, - const sp ¬ify, - uint32_t flags) - : mNetSession(netSession), - mNotify(notify), - mFlags(flags), - mRTPMode(TRANSPORT_UNDEFINED), - mRTCPMode(TRANSPORT_UNDEFINED), - mRTPSessionID(0), - mRTCPSessionID(0), - mRTPConnected(false), - mRTCPConnected(false), - mRTPClientSessionID(0), - mRTCPClientSessionID(0) { -} - -RTPReceiver::~RTPReceiver() { - if (mRTCPClientSessionID != 0) { - mNetSession->destroySession(mRTCPClientSessionID); - mRTCPClientSessionID = 0; - } - - if (mRTPClientSessionID != 0) { - mNetSession->destroySession(mRTPClientSessionID); - mRTPClientSessionID = 0; - } - - if (mRTCPSessionID != 0) { - mNetSession->destroySession(mRTCPSessionID); - mRTCPSessionID = 0; - } - - if (mRTPSessionID != 0) { - mNetSession->destroySession(mRTPSessionID); - mRTPSessionID = 0; - } -} - -status_t RTPReceiver::initAsync( - TransportMode rtpMode, - TransportMode rtcpMode, - int32_t *outLocalRTPPort) { - if (mRTPMode != TRANSPORT_UNDEFINED - || rtpMode == TRANSPORT_UNDEFINED - || rtpMode == TRANSPORT_NONE - || rtcpMode == TRANSPORT_UNDEFINED) { - return INVALID_OPERATION; - } - - CHECK_NE(rtpMode, TRANSPORT_TCP_INTERLEAVED); - CHECK_NE(rtcpMode, TRANSPORT_TCP_INTERLEAVED); - - sp rtpNotify = new AMessage(kWhatRTPNotify, id()); - - sp rtcpNotify; - if (rtcpMode != TRANSPORT_NONE) { - rtcpNotify = new AMessage(kWhatRTCPNotify, id()); - } - - CHECK_EQ(mRTPSessionID, 0); - CHECK_EQ(mRTCPSessionID, 0); - - int32_t localRTPPort; - - struct in_addr ifaceAddr; - ifaceAddr.s_addr = INADDR_ANY; - - for (;;) { - localRTPPort = PickRandomRTPPort(); - - status_t err; - if (rtpMode == TRANSPORT_UDP) { - err = mNetSession->createUDPSession( - localRTPPort, - rtpNotify, - &mRTPSessionID); - } else { - CHECK_EQ(rtpMode, TRANSPORT_TCP); - err = mNetSession->createTCPDatagramSession( - ifaceAddr, - localRTPPort, - rtpNotify, - &mRTPSessionID); - } - - if (err != OK) { - continue; - } - - if (rtcpMode == TRANSPORT_NONE) { - break; - } else if (rtcpMode == TRANSPORT_UDP) { - err = mNetSession->createUDPSession( - localRTPPort + 1, - rtcpNotify, - &mRTCPSessionID); - } else { - CHECK_EQ(rtpMode, TRANSPORT_TCP); - err = mNetSession->createTCPDatagramSession( - ifaceAddr, - localRTPPort + 1, - rtcpNotify, - &mRTCPSessionID); - } - - if (err == OK) { - break; - } - - mNetSession->destroySession(mRTPSessionID); - mRTPSessionID = 0; - } - - mRTPMode = rtpMode; - mRTCPMode = rtcpMode; - *outLocalRTPPort = localRTPPort; - - return OK; -} - -status_t RTPReceiver::connect( - const char *remoteHost, int32_t remoteRTPPort, int32_t remoteRTCPPort) { - status_t err; - - if (mRTPMode == TRANSPORT_UDP) { - CHECK(!mRTPConnected); - - err = mNetSession->connectUDPSession( - mRTPSessionID, remoteHost, remoteRTPPort); - - if (err != OK) { - notifyInitDone(err); - return err; - } - - ALOGI("connectUDPSession RTP successful."); - - mRTPConnected = true; - } - - if (mRTCPMode == TRANSPORT_UDP) { - CHECK(!mRTCPConnected); - - err = mNetSession->connectUDPSession( - mRTCPSessionID, remoteHost, remoteRTCPPort); - - if (err != OK) { - notifyInitDone(err); - return err; - } - - scheduleSendRR(); - - ALOGI("connectUDPSession RTCP successful."); - - mRTCPConnected = true; - } - - if (mRTPConnected - && (mRTCPConnected || mRTCPMode == TRANSPORT_NONE)) { - notifyInitDone(OK); - } - - return OK; -} - -status_t RTPReceiver::informSender(const sp ¶ms) { - if (!mRTCPConnected) { - return INVALID_OPERATION; - } - - int64_t avgLatencyUs; - CHECK(params->findInt64("avgLatencyUs", &avgLatencyUs)); - - int64_t maxLatencyUs; - CHECK(params->findInt64("maxLatencyUs", &maxLatencyUs)); - - sp buf = new ABuffer(28); - - uint8_t *ptr = buf->data(); - ptr[0] = 0x80 | 0; - ptr[1] = 204; // APP - ptr[2] = 0; - - CHECK((buf->size() % 4) == 0u); - ptr[3] = (buf->size() / 4) - 1; - - ptr[4] = kSourceID >> 24; // SSRC - ptr[5] = (kSourceID >> 16) & 0xff; - ptr[6] = (kSourceID >> 8) & 0xff; - ptr[7] = kSourceID & 0xff; - ptr[8] = 'l'; - ptr[9] = 'a'; - ptr[10] = 't'; - ptr[11] = 'e'; - - ptr[12] = avgLatencyUs >> 56; - ptr[13] = (avgLatencyUs >> 48) & 0xff; - ptr[14] = (avgLatencyUs >> 40) & 0xff; - ptr[15] = (avgLatencyUs >> 32) & 0xff; - ptr[16] = (avgLatencyUs >> 24) & 0xff; - ptr[17] = (avgLatencyUs >> 16) & 0xff; - ptr[18] = (avgLatencyUs >> 8) & 0xff; - ptr[19] = avgLatencyUs & 0xff; - - ptr[20] = maxLatencyUs >> 56; - ptr[21] = (maxLatencyUs >> 48) & 0xff; - ptr[22] = (maxLatencyUs >> 40) & 0xff; - ptr[23] = (maxLatencyUs >> 32) & 0xff; - ptr[24] = (maxLatencyUs >> 24) & 0xff; - ptr[25] = (maxLatencyUs >> 16) & 0xff; - ptr[26] = (maxLatencyUs >> 8) & 0xff; - ptr[27] = maxLatencyUs & 0xff; - - mNetSession->sendRequest(mRTCPSessionID, buf->data(), buf->size()); - - return OK; -} - -void RTPReceiver::onMessageReceived(const sp &msg) { - switch (msg->what()) { - case kWhatRTPNotify: - case kWhatRTCPNotify: - onNetNotify(msg->what() == kWhatRTPNotify, msg); - break; - - case kWhatSendRR: - { - onSendRR(); - break; - } - - default: - TRESPASS(); - } -} - -void RTPReceiver::onNetNotify(bool isRTP, const sp &msg) { - int32_t reason; - CHECK(msg->findInt32("reason", &reason)); - - switch (reason) { - case ANetworkSession::kWhatError: - { - int32_t sessionID; - CHECK(msg->findInt32("sessionID", &sessionID)); - - int32_t err; - CHECK(msg->findInt32("err", &err)); - - int32_t errorOccuredDuringSend; - CHECK(msg->findInt32("send", &errorOccuredDuringSend)); - - AString detail; - CHECK(msg->findString("detail", &detail)); - - ALOGE("An error occurred during %s in session %d " - "(%d, '%s' (%s)).", - errorOccuredDuringSend ? "send" : "receive", - sessionID, - err, - detail.c_str(), - strerror(-err)); - - mNetSession->destroySession(sessionID); - - if (sessionID == mRTPSessionID) { - mRTPSessionID = 0; - } else if (sessionID == mRTCPSessionID) { - mRTCPSessionID = 0; - } else if (sessionID == mRTPClientSessionID) { - mRTPClientSessionID = 0; - } else if (sessionID == mRTCPClientSessionID) { - mRTCPClientSessionID = 0; - } - - if (!mRTPConnected - || (mRTCPMode != TRANSPORT_NONE && !mRTCPConnected)) { - notifyInitDone(err); - break; - } - - notifyError(err); - break; - } - - case ANetworkSession::kWhatDatagram: - { - sp data; - CHECK(msg->findBuffer("data", &data)); - - if (isRTP) { - if (mFlags & FLAG_AUTO_CONNECT) { - AString fromAddr; - CHECK(msg->findString("fromAddr", &fromAddr)); - - int32_t fromPort; - CHECK(msg->findInt32("fromPort", &fromPort)); - - CHECK_EQ((status_t)OK, - connect( - fromAddr.c_str(), fromPort, fromPort + 1)); - - mFlags &= ~FLAG_AUTO_CONNECT; - } - - onRTPData(data); - } else { - onRTCPData(data); - } - break; - } - - case ANetworkSession::kWhatClientConnected: - { - int32_t sessionID; - CHECK(msg->findInt32("sessionID", &sessionID)); - - if (isRTP) { - CHECK_EQ(mRTPMode, TRANSPORT_TCP); - - if (mRTPClientSessionID != 0) { - // We only allow a single client connection. - mNetSession->destroySession(sessionID); - sessionID = 0; - break; - } - - mRTPClientSessionID = sessionID; - mRTPConnected = true; - } else { - CHECK_EQ(mRTCPMode, TRANSPORT_TCP); - - if (mRTCPClientSessionID != 0) { - // We only allow a single client connection. - mNetSession->destroySession(sessionID); - sessionID = 0; - break; - } - - mRTCPClientSessionID = sessionID; - mRTCPConnected = true; - } - - if (mRTPConnected - && (mRTCPConnected || mRTCPMode == TRANSPORT_NONE)) { - notifyInitDone(OK); - } - break; - } - } -} - -void RTPReceiver::notifyInitDone(status_t err) { - sp notify = mNotify->dup(); - notify->setInt32("what", kWhatInitDone); - notify->setInt32("err", err); - notify->post(); -} - -void RTPReceiver::notifyError(status_t err) { - sp notify = mNotify->dup(); - notify->setInt32("what", kWhatError); - notify->setInt32("err", err); - notify->post(); -} - -void RTPReceiver::notifyPacketLost() { - sp notify = mNotify->dup(); - notify->setInt32("what", kWhatPacketLost); - notify->post(); -} - -status_t RTPReceiver::onRTPData(const sp &buffer) { - size_t size = buffer->size(); - if (size < 12) { - // Too short to be a valid RTP header. - return ERROR_MALFORMED; - } - - const uint8_t *data = buffer->data(); - - if ((data[0] >> 6) != 2) { - // Unsupported version. - return ERROR_UNSUPPORTED; - } - - if (data[0] & 0x20) { - // Padding present. - - size_t paddingLength = data[size - 1]; - - if (paddingLength + 12 > size) { - // If we removed this much padding we'd end up with something - // that's too short to be a valid RTP header. - return ERROR_MALFORMED; - } - - size -= paddingLength; - } - - int numCSRCs = data[0] & 0x0f; - - size_t payloadOffset = 12 + 4 * numCSRCs; - - if (size < payloadOffset) { - // Not enough data to fit the basic header and all the CSRC entries. - return ERROR_MALFORMED; - } - - if (data[0] & 0x10) { - // Header eXtension present. - - if (size < payloadOffset + 4) { - // Not enough data to fit the basic header, all CSRC entries - // and the first 4 bytes of the extension header. - - return ERROR_MALFORMED; - } - - const uint8_t *extensionData = &data[payloadOffset]; - - size_t extensionLength = - 4 * (extensionData[2] << 8 | extensionData[3]); - - if (size < payloadOffset + 4 + extensionLength) { - return ERROR_MALFORMED; - } - - payloadOffset += 4 + extensionLength; - } - - uint32_t srcId = U32_AT(&data[8]); - uint32_t rtpTime = U32_AT(&data[4]); - uint16_t seqNo = U16_AT(&data[2]); - - sp meta = buffer->meta(); - meta->setInt32("ssrc", srcId); - meta->setInt32("rtp-time", rtpTime); - meta->setInt32("PT", data[1] & 0x7f); - meta->setInt32("M", data[1] >> 7); - - buffer->setRange(payloadOffset, size - payloadOffset); - - ssize_t index = mSources.indexOfKey(srcId); - sp source; - if (index < 0) { - source = new Source(this, srcId); - looper()->registerHandler(source); - - mSources.add(srcId, source); - } else { - source = mSources.valueAt(index); - } - - source->onPacketReceived(seqNo, buffer); - - return OK; -} - -status_t RTPReceiver::onRTCPData(const sp &data) { - ALOGI("onRTCPData"); - return OK; -} - -void RTPReceiver::addSDES(const sp &buffer) { - uint8_t *data = buffer->data() + buffer->size(); - data[0] = 0x80 | 1; - data[1] = 202; // SDES - data[4] = kSourceID >> 24; // SSRC - data[5] = (kSourceID >> 16) & 0xff; - data[6] = (kSourceID >> 8) & 0xff; - data[7] = kSourceID & 0xff; - - size_t offset = 8; - - data[offset++] = 1; // CNAME - - AString cname = "stagefright@somewhere"; - data[offset++] = cname.size(); - - memcpy(&data[offset], cname.c_str(), cname.size()); - offset += cname.size(); - - data[offset++] = 6; // TOOL - - AString tool = "stagefright/1.0"; - data[offset++] = tool.size(); - - memcpy(&data[offset], tool.c_str(), tool.size()); - offset += tool.size(); - - data[offset++] = 0; - - if ((offset % 4) > 0) { - size_t count = 4 - (offset % 4); - switch (count) { - case 3: - data[offset++] = 0; - case 2: - data[offset++] = 0; - case 1: - data[offset++] = 0; - } - } - - size_t numWords = (offset / 4) - 1; - data[2] = numWords >> 8; - data[3] = numWords & 0xff; - - buffer->setRange(buffer->offset(), buffer->size() + offset); -} - -void RTPReceiver::scheduleSendRR() { - (new AMessage(kWhatSendRR, id()))->post(5000000ll); -} - -void RTPReceiver::onSendRR() { - sp buf = new ABuffer(kMaxUDPPacketSize); - buf->setRange(0, 0); - - uint8_t *ptr = buf->data(); - ptr[0] = 0x80 | 0; - ptr[1] = 201; // RR - ptr[2] = 0; - ptr[3] = 1; - ptr[4] = kSourceID >> 24; // SSRC - ptr[5] = (kSourceID >> 16) & 0xff; - ptr[6] = (kSourceID >> 8) & 0xff; - ptr[7] = kSourceID & 0xff; - - buf->setRange(0, 8); - - size_t numReportBlocks = 0; - for (size_t i = 0; i < mSources.size(); ++i) { - uint32_t ssrc = mSources.keyAt(i); - sp source = mSources.valueAt(i); - - if (numReportBlocks > 31 || buf->size() + 24 > buf->capacity()) { - // Cannot fit another report block. - break; - } - - source->addReportBlock(ssrc, buf); - ++numReportBlocks; - } - - ptr[0] |= numReportBlocks; // 5 bit - - size_t sizeInWordsMinus1 = 1 + 6 * numReportBlocks; - ptr[2] = sizeInWordsMinus1 >> 8; - ptr[3] = sizeInWordsMinus1 & 0xff; - - buf->setRange(0, (sizeInWordsMinus1 + 1) * 4); - - addSDES(buf); - - mNetSession->sendRequest(mRTCPSessionID, buf->data(), buf->size()); - - scheduleSendRR(); -} - -status_t RTPReceiver::registerPacketType( - uint8_t packetType, PacketizationMode mode) { - mPacketTypes.add(packetType, mode); - - return OK; -} - -sp RTPReceiver::makeAssembler(uint8_t packetType) { - ssize_t index = mPacketTypes.indexOfKey(packetType); - if (index < 0) { - return NULL; - } - - PacketizationMode mode = mPacketTypes.valueAt(index); - - switch (mode) { - case PACKETIZATION_NONE: - case PACKETIZATION_TRANSPORT_STREAM: - return new TSAssembler(mNotify); - - case PACKETIZATION_H264: - return new H264Assembler(mNotify); - - default: - return NULL; - } -} - -void RTPReceiver::requestRetransmission(uint32_t senderSSRC, int32_t extSeqNo) { - int32_t blp = 0; - - sp buf = new ABuffer(16); - buf->setRange(0, 0); - - uint8_t *ptr = buf->data(); - ptr[0] = 0x80 | 1; // generic NACK - ptr[1] = 205; // TSFB - ptr[2] = 0; - ptr[3] = 3; - ptr[8] = (senderSSRC >> 24) & 0xff; - ptr[9] = (senderSSRC >> 16) & 0xff; - ptr[10] = (senderSSRC >> 8) & 0xff; - ptr[11] = (senderSSRC & 0xff); - ptr[8] = (kSourceID >> 24) & 0xff; - ptr[9] = (kSourceID >> 16) & 0xff; - ptr[10] = (kSourceID >> 8) & 0xff; - ptr[11] = (kSourceID & 0xff); - ptr[12] = (extSeqNo >> 8) & 0xff; - ptr[13] = (extSeqNo & 0xff); - ptr[14] = (blp >> 8) & 0xff; - ptr[15] = (blp & 0xff); - - buf->setRange(0, 16); - - mNetSession->sendRequest(mRTCPSessionID, buf->data(), buf->size()); -} - -void RTPReceiver::Source::modifyPacketStatus(int32_t extSeqNo, uint32_t mask) { -#if TRACK_PACKET_LOSS - ssize_t index = mLostPackets.indexOfKey(extSeqNo); - if (index < 0) { - mLostPackets.add(extSeqNo, mask); - } else { - mLostPackets.editValueAt(index) |= mask; - } -#endif -} - -void RTPReceiver::Source::postRetransmitTimer(int64_t timeUs) { - int64_t delayUs = timeUs - ALooper::GetNowUs(); - sp msg = new AMessage(kWhatRetransmit, id()); - msg->setInt32("generation", mRetransmitGeneration); - msg->post(delayUs); -} - -void RTPReceiver::Source::postDeclareLostTimer(int64_t timeUs) { - CHECK(!mDeclareLostTimerPending); - mDeclareLostTimerPending = true; - - int64_t delayUs = timeUs - ALooper::GetNowUs(); - sp msg = new AMessage(kWhatDeclareLost, id()); - msg->setInt32("generation", mDeclareLostGeneration); - msg->post(delayUs); -} - -void RTPReceiver::Source::cancelTimers() { - ++mRetransmitGeneration; - ++mDeclareLostGeneration; - mDeclareLostTimerPending = false; -} - -} // namespace android - diff --git a/media/libstagefright/wifi-display/rtp/RTPReceiver.h b/media/libstagefright/wifi-display/rtp/RTPReceiver.h deleted file mode 100644 index 240ab2e..0000000 --- a/media/libstagefright/wifi-display/rtp/RTPReceiver.h +++ /dev/null @@ -1,125 +0,0 @@ -/* - * Copyright 2013, The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef RTP_RECEIVER_H_ - -#define RTP_RECEIVER_H_ - -#include "RTPBase.h" - -#include - -namespace android { - -struct ABuffer; -struct ANetworkSession; - -// An object of this class facilitates receiving of media data on an RTP -// channel. The channel is established over a UDP or TCP connection depending -// on which "TransportMode" was chosen. In addition different RTP packetization -// schemes are supported such as "Transport Stream Packets over RTP", -// or "AVC/H.264 encapsulation as specified in RFC 3984 (non-interleaved mode)" -struct RTPReceiver : public RTPBase, public AHandler { - enum { - kWhatInitDone, - kWhatError, - kWhatAccessUnit, - kWhatPacketLost, - }; - - enum Flags { - FLAG_AUTO_CONNECT = 1, - }; - RTPReceiver( - const sp &netSession, - const sp ¬ify, - uint32_t flags = 0); - - status_t registerPacketType( - uint8_t packetType, PacketizationMode mode); - - status_t initAsync( - TransportMode rtpMode, - TransportMode rtcpMode, - int32_t *outLocalRTPPort); - - status_t connect( - const char *remoteHost, - int32_t remoteRTPPort, - int32_t remoteRTCPPort); - - status_t informSender(const sp ¶ms); - -protected: - virtual ~RTPReceiver(); - virtual void onMessageReceived(const sp &msg); - -private: - enum { - kWhatRTPNotify, - kWhatRTCPNotify, - kWhatSendRR, - }; - - enum { - kSourceID = 0xdeadbeef, - kPacketLostAfterUs = 100000, - kRequestRetransmissionAfterUs = -1, - }; - - struct Assembler; - struct H264Assembler; - struct Source; - struct TSAssembler; - - sp mNetSession; - sp mNotify; - uint32_t mFlags; - TransportMode mRTPMode; - TransportMode mRTCPMode; - int32_t mRTPSessionID; - int32_t mRTCPSessionID; - bool mRTPConnected; - bool mRTCPConnected; - - int32_t mRTPClientSessionID; // in TRANSPORT_TCP mode. - int32_t mRTCPClientSessionID; // in TRANSPORT_TCP mode. - - KeyedVector mPacketTypes; - KeyedVector > mSources; - - void onNetNotify(bool isRTP, const sp &msg); - status_t onRTPData(const sp &data); - status_t onRTCPData(const sp &data); - void onSendRR(); - - void scheduleSendRR(); - void addSDES(const sp &buffer); - - void notifyInitDone(status_t err); - void notifyError(status_t err); - void notifyPacketLost(); - - sp makeAssembler(uint8_t packetType); - - void requestRetransmission(uint32_t senderSSRC, int32_t extSeqNo); - - DISALLOW_EVIL_CONSTRUCTORS(RTPReceiver); -}; - -} // namespace android - -#endif // RTP_RECEIVER_H_ diff --git a/media/libstagefright/wifi-display/rtptest.cpp b/media/libstagefright/wifi-display/rtptest.cpp deleted file mode 100644 index b902f29..0000000 --- a/media/libstagefright/wifi-display/rtptest.cpp +++ /dev/null @@ -1,565 +0,0 @@ -/* - * Copyright 2013, The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -//#define LOG_NEBUG 0 -#define LOG_TAG "rtptest" -#include - -#include "rtp/RTPSender.h" -#include "rtp/RTPReceiver.h" -#include "TimeSyncer.h" - -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -#define MEDIA_FILENAME "/sdcard/Frame Counter HD 30FPS_1080p.mp4" - -namespace android { - -struct PacketSource : public RefBase { - PacketSource() {} - - virtual sp getNextAccessUnit() = 0; - -protected: - virtual ~PacketSource() {} - -private: - DISALLOW_EVIL_CONSTRUCTORS(PacketSource); -}; - -struct MediaPacketSource : public PacketSource { - MediaPacketSource() - : mMaxSampleSize(1024 * 1024) { - mExtractor = new NuMediaExtractor; - CHECK_EQ((status_t)OK, - mExtractor->setDataSource(MEDIA_FILENAME)); - - bool haveVideo = false; - for (size_t i = 0; i < mExtractor->countTracks(); ++i) { - sp format; - CHECK_EQ((status_t)OK, mExtractor->getTrackFormat(i, &format)); - - AString mime; - CHECK(format->findString("mime", &mime)); - - if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_AVC, mime.c_str())) { - mExtractor->selectTrack(i); - haveVideo = true; - break; - } - } - - CHECK(haveVideo); - } - - virtual sp getNextAccessUnit() { - int64_t timeUs; - status_t err = mExtractor->getSampleTime(&timeUs); - - if (err != OK) { - return NULL; - } - - sp accessUnit = new ABuffer(mMaxSampleSize); - CHECK_EQ((status_t)OK, mExtractor->readSampleData(accessUnit)); - - accessUnit->meta()->setInt64("timeUs", timeUs); - - CHECK_EQ((status_t)OK, mExtractor->advance()); - - return accessUnit; - } - -protected: - virtual ~MediaPacketSource() { - } - -private: - sp mExtractor; - size_t mMaxSampleSize; - - DISALLOW_EVIL_CONSTRUCTORS(MediaPacketSource); -}; - -struct SimplePacketSource : public PacketSource { - SimplePacketSource() - : mCounter(0) { - } - - virtual sp getNextAccessUnit() { - sp buffer = new ABuffer(4); - uint8_t *dst = buffer->data(); - dst[0] = mCounter >> 24; - dst[1] = (mCounter >> 16) & 0xff; - dst[2] = (mCounter >> 8) & 0xff; - dst[3] = mCounter & 0xff; - - buffer->meta()->setInt64("timeUs", mCounter * 1000000ll / kFrameRate); - - ++mCounter; - - return buffer; - } - -protected: - virtual ~SimplePacketSource() { - } - -private: - enum { - kFrameRate = 30 - }; - - uint32_t mCounter; - - DISALLOW_EVIL_CONSTRUCTORS(SimplePacketSource); -}; - -struct TestHandler : public AHandler { - TestHandler(const sp &netSession); - - void listen(); - void connect(const char *host, int32_t port); - -protected: - virtual ~TestHandler(); - virtual void onMessageReceived(const sp &msg); - -private: - enum { - kWhatListen, - kWhatConnect, - kWhatReceiverNotify, - kWhatSenderNotify, - kWhatSendMore, - kWhatStop, - kWhatTimeSyncerNotify, - }; - -#if 1 - static const RTPBase::TransportMode kRTPMode = RTPBase::TRANSPORT_UDP; - static const RTPBase::TransportMode kRTCPMode = RTPBase::TRANSPORT_UDP; -#else - static const RTPBase::TransportMode kRTPMode = RTPBase::TRANSPORT_TCP; - static const RTPBase::TransportMode kRTCPMode = RTPBase::TRANSPORT_NONE; -#endif - -#if 1 - static const RTPBase::PacketizationMode kPacketizationMode - = RTPBase::PACKETIZATION_H264; -#else - static const RTPBase::PacketizationMode kPacketizationMode - = RTPBase::PACKETIZATION_NONE; -#endif - - sp mNetSession; - sp mSource; - sp mSender; - sp mReceiver; - - sp mTimeSyncer; - bool mTimeSyncerStarted; - - int64_t mFirstTimeRealUs; - int64_t mFirstTimeMediaUs; - - int64_t mTimeOffsetUs; - bool mTimeOffsetValid; - - status_t readMore(); - - DISALLOW_EVIL_CONSTRUCTORS(TestHandler); -}; - -TestHandler::TestHandler(const sp &netSession) - : mNetSession(netSession), - mTimeSyncerStarted(false), - mFirstTimeRealUs(-1ll), - mFirstTimeMediaUs(-1ll), - mTimeOffsetUs(-1ll), - mTimeOffsetValid(false) { -} - -TestHandler::~TestHandler() { -} - -void TestHandler::listen() { - sp msg = new AMessage(kWhatListen, id()); - msg->post(); -} - -void TestHandler::connect(const char *host, int32_t port) { - sp msg = new AMessage(kWhatConnect, id()); - msg->setString("host", host); - msg->setInt32("port", port); - msg->post(); -} - -static void dumpDelay(int64_t delayMs) { - static const int64_t kMinDelayMs = 0; - static const int64_t kMaxDelayMs = 300; - - const char *kPattern = "########################################"; - size_t kPatternSize = strlen(kPattern); - - int n = (kPatternSize * (delayMs - kMinDelayMs)) - / (kMaxDelayMs - kMinDelayMs); - - if (n < 0) { - n = 0; - } else if ((size_t)n > kPatternSize) { - n = kPatternSize; - } - - ALOGI("(%4lld ms) %s\n", - delayMs, - kPattern + kPatternSize - n); -} - -void TestHandler::onMessageReceived(const sp &msg) { - switch (msg->what()) { - case kWhatListen: - { - sp notify = new AMessage(kWhatTimeSyncerNotify, id()); - mTimeSyncer = new TimeSyncer(mNetSession, notify); - looper()->registerHandler(mTimeSyncer); - - notify = new AMessage(kWhatReceiverNotify, id()); - mReceiver = new RTPReceiver( - mNetSession, notify, RTPReceiver::FLAG_AUTO_CONNECT); - looper()->registerHandler(mReceiver); - - CHECK_EQ((status_t)OK, - mReceiver->registerPacketType(33, kPacketizationMode)); - - int32_t receiverRTPPort; - CHECK_EQ((status_t)OK, - mReceiver->initAsync( - kRTPMode, - kRTCPMode, - &receiverRTPPort)); - - printf("picked receiverRTPPort %d\n", receiverRTPPort); - -#if 0 - CHECK_EQ((status_t)OK, - mReceiver->connect( - "127.0.0.1", senderRTPPort, senderRTPPort + 1)); -#endif - break; - } - - case kWhatConnect: - { - AString host; - CHECK(msg->findString("host", &host)); - - sp notify = new AMessage(kWhatTimeSyncerNotify, id()); - mTimeSyncer = new TimeSyncer(mNetSession, notify); - looper()->registerHandler(mTimeSyncer); - mTimeSyncer->startServer(8123); - - int32_t receiverRTPPort; - CHECK(msg->findInt32("port", &receiverRTPPort)); - -#if 1 - mSource = new MediaPacketSource; -#else - mSource = new SimplePacketSource; -#endif - - notify = new AMessage(kWhatSenderNotify, id()); - mSender = new RTPSender(mNetSession, notify); - - looper()->registerHandler(mSender); - - int32_t senderRTPPort; - CHECK_EQ((status_t)OK, - mSender->initAsync( - host.c_str(), - receiverRTPPort, - kRTPMode, - kRTCPMode == RTPBase::TRANSPORT_NONE - ? -1 : receiverRTPPort + 1, - kRTCPMode, - &senderRTPPort)); - - printf("picked senderRTPPort %d\n", senderRTPPort); - break; - } - - case kWhatSenderNotify: - { - ALOGI("kWhatSenderNotify"); - - int32_t what; - CHECK(msg->findInt32("what", &what)); - - switch (what) { - case RTPSender::kWhatInitDone: - { - int32_t err; - CHECK(msg->findInt32("err", &err)); - - ALOGI("RTPSender::initAsync completed w/ err %d", err); - - if (err == OK) { - err = readMore(); - - if (err != OK) { - (new AMessage(kWhatStop, id()))->post(); - } - } - break; - } - - case RTPSender::kWhatError: - break; - } - break; - } - - case kWhatReceiverNotify: - { - ALOGV("kWhatReceiverNotify"); - - int32_t what; - CHECK(msg->findInt32("what", &what)); - - switch (what) { - case RTPReceiver::kWhatInitDone: - { - int32_t err; - CHECK(msg->findInt32("err", &err)); - - ALOGI("RTPReceiver::initAsync completed w/ err %d", err); - break; - } - - case RTPReceiver::kWhatError: - break; - - case RTPReceiver::kWhatAccessUnit: - { -#if 0 - if (!mTimeSyncerStarted) { - mTimeSyncer->startClient("172.18.41.216", 8123); - mTimeSyncerStarted = true; - } - - sp accessUnit; - CHECK(msg->findBuffer("accessUnit", &accessUnit)); - - int64_t timeUs; - CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs)); - - if (mTimeOffsetValid) { - timeUs -= mTimeOffsetUs; - int64_t nowUs = ALooper::GetNowUs(); - int64_t delayMs = (nowUs - timeUs) / 1000ll; - - dumpDelay(delayMs); - } -#endif - break; - } - - case RTPReceiver::kWhatPacketLost: - ALOGV("kWhatPacketLost"); - break; - - default: - TRESPASS(); - } - break; - } - - case kWhatSendMore: - { - sp accessUnit; - CHECK(msg->findBuffer("accessUnit", &accessUnit)); - - CHECK_EQ((status_t)OK, - mSender->queueBuffer( - accessUnit, - 33, - kPacketizationMode)); - - status_t err = readMore(); - - if (err != OK) { - (new AMessage(kWhatStop, id()))->post(); - } - break; - } - - case kWhatStop: - { - if (mReceiver != NULL) { - looper()->unregisterHandler(mReceiver->id()); - mReceiver.clear(); - } - - if (mSender != NULL) { - looper()->unregisterHandler(mSender->id()); - mSender.clear(); - } - - mSource.clear(); - - looper()->stop(); - break; - } - - case kWhatTimeSyncerNotify: - { - CHECK(msg->findInt64("offset", &mTimeOffsetUs)); - mTimeOffsetValid = true; - break; - } - - default: - TRESPASS(); - } -} - -status_t TestHandler::readMore() { - sp accessUnit = mSource->getNextAccessUnit(); - - if (accessUnit == NULL) { - return ERROR_END_OF_STREAM; - } - - int64_t timeUs; - CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs)); - - int64_t nowUs = ALooper::GetNowUs(); - int64_t whenUs; - - if (mFirstTimeRealUs < 0ll) { - mFirstTimeRealUs = whenUs = nowUs; - mFirstTimeMediaUs = timeUs; - } else { - whenUs = mFirstTimeRealUs + timeUs - mFirstTimeMediaUs; - } - - accessUnit->meta()->setInt64("timeUs", whenUs); - - sp msg = new AMessage(kWhatSendMore, id()); - msg->setBuffer("accessUnit", accessUnit); - msg->post(whenUs - nowUs); - - return OK; -} - -} // namespace android - -static void usage(const char *me) { - fprintf(stderr, - "usage: %s -c host:port\tconnect to remote host\n" - " -l \tlisten\n", - me); -} - -int main(int argc, char **argv) { - using namespace android; - - // srand(time(NULL)); - - ProcessState::self()->startThreadPool(); - - DataSource::RegisterDefaultSniffers(); - - bool listen = false; - int32_t connectToPort = -1; - AString connectToHost; - - int res; - while ((res = getopt(argc, argv, "hc:l")) >= 0) { - switch (res) { - case 'c': - { - const char *colonPos = strrchr(optarg, ':'); - - if (colonPos == NULL) { - usage(argv[0]); - exit(1); - } - - connectToHost.setTo(optarg, colonPos - optarg); - - char *end; - connectToPort = strtol(colonPos + 1, &end, 10); - - if (*end != '\0' || end == colonPos + 1 - || connectToPort < 1 || connectToPort > 65535) { - fprintf(stderr, "Illegal port specified.\n"); - exit(1); - } - break; - } - - case 'l': - { - listen = true; - break; - } - - case '?': - case 'h': - usage(argv[0]); - exit(1); - } - } - - if (!listen && connectToPort < 0) { - fprintf(stderr, - "You need to select either client or server mode.\n"); - exit(1); - } - - sp netSession = new ANetworkSession; - netSession->start(); - - sp looper = new ALooper; - - sp handler = new TestHandler(netSession); - looper->registerHandler(handler); - - if (listen) { - handler->listen(); - } - - if (connectToPort >= 0) { - handler->connect(connectToHost.c_str(), connectToPort); - } - - looper->start(true /* runOnCallingThread */); - - return 0; -} - diff --git a/media/libstagefright/wifi-display/sink/DirectRenderer.cpp b/media/libstagefright/wifi-display/sink/DirectRenderer.cpp deleted file mode 100644 index cdb2267..0000000 --- a/media/libstagefright/wifi-display/sink/DirectRenderer.cpp +++ /dev/null @@ -1,653 +0,0 @@ -/* - * Copyright 2012, The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -//#define LOG_NDEBUG 0 -#define LOG_TAG "DirectRenderer" -#include - -#include "DirectRenderer.h" - -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -namespace android { - -/* - Drives the decoding process using a MediaCodec instance. Input buffers - queued by calls to "queueInputBuffer" are fed to the decoder as soon - as the decoder is ready for them, the client is notified about output - buffers as the decoder spits them out. -*/ -struct DirectRenderer::DecoderContext : public AHandler { - enum { - kWhatOutputBufferReady, - }; - DecoderContext(const sp ¬ify); - - status_t init( - const sp &format, - const sp &surfaceTex); - - void queueInputBuffer(const sp &accessUnit); - - status_t renderOutputBufferAndRelease(size_t index); - status_t releaseOutputBuffer(size_t index); - -protected: - virtual ~DecoderContext(); - - virtual void onMessageReceived(const sp &msg); - -private: - enum { - kWhatDecoderNotify, - }; - - sp mNotify; - sp mDecoderLooper; - sp mDecoder; - Vector > mDecoderInputBuffers; - Vector > mDecoderOutputBuffers; - List mDecoderInputBuffersAvailable; - bool mDecoderNotificationPending; - - List > mAccessUnits; - - void onDecoderNotify(); - void scheduleDecoderNotification(); - void queueDecoderInputBuffers(); - - void queueOutputBuffer( - size_t index, int64_t timeUs, const sp &buffer); - - DISALLOW_EVIL_CONSTRUCTORS(DecoderContext); -}; - -//////////////////////////////////////////////////////////////////////////////// - -/* - A "push" audio renderer. The primary function of this renderer is to use - an AudioTrack in push mode and making sure not to block the event loop - be ensuring that calls to AudioTrack::write never block. This is done by - estimating an upper bound of data that can be written to the AudioTrack - buffer without delay. -*/ -struct DirectRenderer::AudioRenderer : public AHandler { - AudioRenderer(const sp &decoderContext); - - void queueInputBuffer( - size_t index, int64_t timeUs, const sp &buffer); - -protected: - virtual ~AudioRenderer(); - virtual void onMessageReceived(const sp &msg); - -private: - enum { - kWhatPushAudio, - }; - - struct BufferInfo { - size_t mIndex; - int64_t mTimeUs; - sp mBuffer; - }; - - sp mDecoderContext; - sp mAudioTrack; - - List mInputBuffers; - bool mPushPending; - - size_t mNumFramesWritten; - - void schedulePushIfNecessary(); - void onPushAudio(); - - ssize_t writeNonBlocking(const uint8_t *data, size_t size); - - DISALLOW_EVIL_CONSTRUCTORS(AudioRenderer); -}; - -//////////////////////////////////////////////////////////////////////////////// - -DirectRenderer::DecoderContext::DecoderContext(const sp ¬ify) - : mNotify(notify), - mDecoderNotificationPending(false) { -} - -DirectRenderer::DecoderContext::~DecoderContext() { - if (mDecoder != NULL) { - mDecoder->release(); - mDecoder.clear(); - - mDecoderLooper->stop(); - mDecoderLooper.clear(); - } -} - -status_t DirectRenderer::DecoderContext::init( - const sp &format, - const sp &surfaceTex) { - CHECK(mDecoder == NULL); - - AString mime; - CHECK(format->findString("mime", &mime)); - - mDecoderLooper = new ALooper; - mDecoderLooper->setName("video codec looper"); - - mDecoderLooper->start( - false /* runOnCallingThread */, - false /* canCallJava */, - PRIORITY_DEFAULT); - - mDecoder = MediaCodec::CreateByType( - mDecoderLooper, mime.c_str(), false /* encoder */); - - CHECK(mDecoder != NULL); - - status_t err = mDecoder->configure( - format, - surfaceTex == NULL - ? NULL : new Surface(surfaceTex), - NULL /* crypto */, - 0 /* flags */); - CHECK_EQ(err, (status_t)OK); - - err = mDecoder->start(); - CHECK_EQ(err, (status_t)OK); - - err = mDecoder->getInputBuffers( - &mDecoderInputBuffers); - CHECK_EQ(err, (status_t)OK); - - err = mDecoder->getOutputBuffers( - &mDecoderOutputBuffers); - CHECK_EQ(err, (status_t)OK); - - scheduleDecoderNotification(); - - return OK; -} - -void DirectRenderer::DecoderContext::queueInputBuffer( - const sp &accessUnit) { - CHECK(mDecoder != NULL); - - mAccessUnits.push_back(accessUnit); - queueDecoderInputBuffers(); -} - -status_t DirectRenderer::DecoderContext::renderOutputBufferAndRelease( - size_t index) { - return mDecoder->renderOutputBufferAndRelease(index); -} - -status_t DirectRenderer::DecoderContext::releaseOutputBuffer(size_t index) { - return mDecoder->releaseOutputBuffer(index); -} - -void DirectRenderer::DecoderContext::queueDecoderInputBuffers() { - if (mDecoder == NULL) { - return; - } - - bool submittedMore = false; - - while (!mAccessUnits.empty() - && !mDecoderInputBuffersAvailable.empty()) { - size_t index = *mDecoderInputBuffersAvailable.begin(); - - mDecoderInputBuffersAvailable.erase( - mDecoderInputBuffersAvailable.begin()); - - sp srcBuffer = *mAccessUnits.begin(); - mAccessUnits.erase(mAccessUnits.begin()); - - const sp &dstBuffer = - mDecoderInputBuffers.itemAt(index); - - memcpy(dstBuffer->data(), srcBuffer->data(), srcBuffer->size()); - - int64_t timeUs; - CHECK(srcBuffer->meta()->findInt64("timeUs", &timeUs)); - - status_t err = mDecoder->queueInputBuffer( - index, - 0 /* offset */, - srcBuffer->size(), - timeUs, - 0 /* flags */); - CHECK_EQ(err, (status_t)OK); - - submittedMore = true; - } - - if (submittedMore) { - scheduleDecoderNotification(); - } -} - -void DirectRenderer::DecoderContext::onMessageReceived( - const sp &msg) { - switch (msg->what()) { - case kWhatDecoderNotify: - { - onDecoderNotify(); - break; - } - - default: - TRESPASS(); - } -} - -void DirectRenderer::DecoderContext::onDecoderNotify() { - mDecoderNotificationPending = false; - - for (;;) { - size_t index; - status_t err = mDecoder->dequeueInputBuffer(&index); - - if (err == OK) { - mDecoderInputBuffersAvailable.push_back(index); - } else if (err == -EAGAIN) { - break; - } else { - TRESPASS(); - } - } - - queueDecoderInputBuffers(); - - for (;;) { - size_t index; - size_t offset; - size_t size; - int64_t timeUs; - uint32_t flags; - status_t err = mDecoder->dequeueOutputBuffer( - &index, - &offset, - &size, - &timeUs, - &flags); - - if (err == OK) { - queueOutputBuffer( - index, timeUs, mDecoderOutputBuffers.itemAt(index)); - } else if (err == INFO_OUTPUT_BUFFERS_CHANGED) { - err = mDecoder->getOutputBuffers( - &mDecoderOutputBuffers); - CHECK_EQ(err, (status_t)OK); - } else if (err == INFO_FORMAT_CHANGED) { - // We don't care. - } else if (err == -EAGAIN) { - break; - } else { - TRESPASS(); - } - } - - scheduleDecoderNotification(); -} - -void DirectRenderer::DecoderContext::scheduleDecoderNotification() { - if (mDecoderNotificationPending) { - return; - } - - sp notify = - new AMessage(kWhatDecoderNotify, id()); - - mDecoder->requestActivityNotification(notify); - mDecoderNotificationPending = true; -} - -void DirectRenderer::DecoderContext::queueOutputBuffer( - size_t index, int64_t timeUs, const sp &buffer) { - sp msg = mNotify->dup(); - msg->setInt32("what", kWhatOutputBufferReady); - msg->setSize("index", index); - msg->setInt64("timeUs", timeUs); - msg->setBuffer("buffer", buffer); - msg->post(); -} - -//////////////////////////////////////////////////////////////////////////////// - -DirectRenderer::AudioRenderer::AudioRenderer( - const sp &decoderContext) - : mDecoderContext(decoderContext), - mPushPending(false), - mNumFramesWritten(0) { - mAudioTrack = new AudioTrack( - AUDIO_STREAM_DEFAULT, - 48000.0f, - AUDIO_FORMAT_PCM, - AUDIO_CHANNEL_OUT_STEREO, - (int)0 /* frameCount */); - - CHECK_EQ((status_t)OK, mAudioTrack->initCheck()); - - mAudioTrack->start(); -} - -DirectRenderer::AudioRenderer::~AudioRenderer() { -} - -void DirectRenderer::AudioRenderer::queueInputBuffer( - size_t index, int64_t timeUs, const sp &buffer) { - BufferInfo info; - info.mIndex = index; - info.mTimeUs = timeUs; - info.mBuffer = buffer; - - mInputBuffers.push_back(info); - schedulePushIfNecessary(); -} - -void DirectRenderer::AudioRenderer::onMessageReceived( - const sp &msg) { - switch (msg->what()) { - case kWhatPushAudio: - { - onPushAudio(); - break; - } - - default: - break; - } -} - -void DirectRenderer::AudioRenderer::schedulePushIfNecessary() { - if (mPushPending || mInputBuffers.empty()) { - return; - } - - mPushPending = true; - - uint32_t numFramesPlayed; - CHECK_EQ(mAudioTrack->getPosition(&numFramesPlayed), - (status_t)OK); - - uint32_t numFramesPendingPlayout = mNumFramesWritten - numFramesPlayed; - - // This is how long the audio sink will have data to - // play back. - const float msecsPerFrame = 1000.0f / mAudioTrack->getSampleRate(); - - int64_t delayUs = - msecsPerFrame * numFramesPendingPlayout * 1000ll; - - // Let's give it more data after about half that time - // has elapsed. - (new AMessage(kWhatPushAudio, id()))->post(delayUs / 2); -} - -void DirectRenderer::AudioRenderer::onPushAudio() { - mPushPending = false; - - while (!mInputBuffers.empty()) { - const BufferInfo &info = *mInputBuffers.begin(); - - ssize_t n = writeNonBlocking( - info.mBuffer->data(), info.mBuffer->size()); - - if (n < (ssize_t)info.mBuffer->size()) { - CHECK_GE(n, 0); - - info.mBuffer->setRange( - info.mBuffer->offset() + n, info.mBuffer->size() - n); - break; - } - - mDecoderContext->releaseOutputBuffer(info.mIndex); - - mInputBuffers.erase(mInputBuffers.begin()); - } - - schedulePushIfNecessary(); -} - -ssize_t DirectRenderer::AudioRenderer::writeNonBlocking( - const uint8_t *data, size_t size) { - uint32_t numFramesPlayed; - status_t err = mAudioTrack->getPosition(&numFramesPlayed); - if (err != OK) { - return err; - } - - ssize_t numFramesAvailableToWrite = - mAudioTrack->frameCount() - (mNumFramesWritten - numFramesPlayed); - - size_t numBytesAvailableToWrite = - numFramesAvailableToWrite * mAudioTrack->frameSize(); - - if (size > numBytesAvailableToWrite) { - size = numBytesAvailableToWrite; - } - - CHECK_EQ(mAudioTrack->write(data, size), (ssize_t)size); - - size_t numFramesWritten = size / mAudioTrack->frameSize(); - mNumFramesWritten += numFramesWritten; - - return size; -} - -//////////////////////////////////////////////////////////////////////////////// - -DirectRenderer::DirectRenderer( - const sp &bufferProducer) - : mSurfaceTex(bufferProducer), - mVideoRenderPending(false), - mNumFramesLate(0), - mNumFrames(0) { -} - -DirectRenderer::~DirectRenderer() { -} - -void DirectRenderer::onMessageReceived(const sp &msg) { - switch (msg->what()) { - case kWhatDecoderNotify: - { - onDecoderNotify(msg); - break; - } - - case kWhatRenderVideo: - { - onRenderVideo(); - break; - } - - case kWhatQueueAccessUnit: - onQueueAccessUnit(msg); - break; - - case kWhatSetFormat: - onSetFormat(msg); - break; - - default: - TRESPASS(); - } -} - -void DirectRenderer::setFormat(size_t trackIndex, const sp &format) { - sp msg = new AMessage(kWhatSetFormat, id()); - msg->setSize("trackIndex", trackIndex); - msg->setMessage("format", format); - msg->post(); -} - -void DirectRenderer::onSetFormat(const sp &msg) { - size_t trackIndex; - CHECK(msg->findSize("trackIndex", &trackIndex)); - - sp format; - CHECK(msg->findMessage("format", &format)); - - internalSetFormat(trackIndex, format); -} - -void DirectRenderer::internalSetFormat( - size_t trackIndex, const sp &format) { - CHECK_LT(trackIndex, 2u); - - CHECK(mDecoderContext[trackIndex] == NULL); - - sp notify = new AMessage(kWhatDecoderNotify, id()); - notify->setSize("trackIndex", trackIndex); - - mDecoderContext[trackIndex] = new DecoderContext(notify); - looper()->registerHandler(mDecoderContext[trackIndex]); - - CHECK_EQ((status_t)OK, - mDecoderContext[trackIndex]->init( - format, trackIndex == 0 ? mSurfaceTex : NULL)); - - if (trackIndex == 1) { - // Audio - mAudioRenderer = new AudioRenderer(mDecoderContext[1]); - looper()->registerHandler(mAudioRenderer); - } -} - -void DirectRenderer::queueAccessUnit( - size_t trackIndex, const sp &accessUnit) { - sp msg = new AMessage(kWhatQueueAccessUnit, id()); - msg->setSize("trackIndex", trackIndex); - msg->setBuffer("accessUnit", accessUnit); - msg->post(); -} - -void DirectRenderer::onQueueAccessUnit(const sp &msg) { - size_t trackIndex; - CHECK(msg->findSize("trackIndex", &trackIndex)); - - sp accessUnit; - CHECK(msg->findBuffer("accessUnit", &accessUnit)); - - CHECK_LT(trackIndex, 2u); - CHECK(mDecoderContext[trackIndex] != NULL); - - mDecoderContext[trackIndex]->queueInputBuffer(accessUnit); -} - -void DirectRenderer::onDecoderNotify(const sp &msg) { - size_t trackIndex; - CHECK(msg->findSize("trackIndex", &trackIndex)); - - int32_t what; - CHECK(msg->findInt32("what", &what)); - - switch (what) { - case DecoderContext::kWhatOutputBufferReady: - { - size_t index; - CHECK(msg->findSize("index", &index)); - - int64_t timeUs; - CHECK(msg->findInt64("timeUs", &timeUs)); - - sp buffer; - CHECK(msg->findBuffer("buffer", &buffer)); - - queueOutputBuffer(trackIndex, index, timeUs, buffer); - break; - } - - default: - TRESPASS(); - } -} - -void DirectRenderer::queueOutputBuffer( - size_t trackIndex, - size_t index, int64_t timeUs, const sp &buffer) { - if (trackIndex == 1) { - // Audio - mAudioRenderer->queueInputBuffer(index, timeUs, buffer); - return; - } - - OutputInfo info; - info.mIndex = index; - info.mTimeUs = timeUs; - info.mBuffer = buffer; - mVideoOutputBuffers.push_back(info); - - scheduleVideoRenderIfNecessary(); -} - -void DirectRenderer::scheduleVideoRenderIfNecessary() { - if (mVideoRenderPending || mVideoOutputBuffers.empty()) { - return; - } - - mVideoRenderPending = true; - - int64_t timeUs = (*mVideoOutputBuffers.begin()).mTimeUs; - int64_t nowUs = ALooper::GetNowUs(); - - int64_t delayUs = timeUs - nowUs; - - (new AMessage(kWhatRenderVideo, id()))->post(delayUs); -} - -void DirectRenderer::onRenderVideo() { - mVideoRenderPending = false; - - int64_t nowUs = ALooper::GetNowUs(); - - while (!mVideoOutputBuffers.empty()) { - const OutputInfo &info = *mVideoOutputBuffers.begin(); - - if (info.mTimeUs > nowUs) { - break; - } - - if (info.mTimeUs + 15000ll < nowUs) { - ++mNumFramesLate; - } - ++mNumFrames; - - status_t err = - mDecoderContext[0]->renderOutputBufferAndRelease(info.mIndex); - CHECK_EQ(err, (status_t)OK); - - mVideoOutputBuffers.erase(mVideoOutputBuffers.begin()); - } - - scheduleVideoRenderIfNecessary(); -} - -} // namespace android - diff --git a/media/libstagefright/wifi-display/sink/DirectRenderer.h b/media/libstagefright/wifi-display/sink/DirectRenderer.h deleted file mode 100644 index 07c2170..0000000 --- a/media/libstagefright/wifi-display/sink/DirectRenderer.h +++ /dev/null @@ -1,87 +0,0 @@ -/* - * Copyright 2012, The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef DIRECT_RENDERER_H_ - -#define DIRECT_RENDERER_H_ - -#include - -namespace android { - -struct ABuffer; -struct IGraphicBufferProducer; - -// Renders audio and video data queued by calls to "queueAccessUnit". -struct DirectRenderer : public AHandler { - DirectRenderer(const sp &bufferProducer); - - void setFormat(size_t trackIndex, const sp &format); - void queueAccessUnit(size_t trackIndex, const sp &accessUnit); - -protected: - virtual void onMessageReceived(const sp &msg); - virtual ~DirectRenderer(); - -private: - struct DecoderContext; - struct AudioRenderer; - - enum { - kWhatDecoderNotify, - kWhatRenderVideo, - kWhatQueueAccessUnit, - kWhatSetFormat, - }; - - struct OutputInfo { - size_t mIndex; - int64_t mTimeUs; - sp mBuffer; - }; - - sp mSurfaceTex; - - sp mDecoderContext[2]; - List mVideoOutputBuffers; - - bool mVideoRenderPending; - - sp mAudioRenderer; - - int32_t mNumFramesLate; - int32_t mNumFrames; - - void onDecoderNotify(const sp &msg); - - void queueOutputBuffer( - size_t trackIndex, - size_t index, int64_t timeUs, const sp &buffer); - - void scheduleVideoRenderIfNecessary(); - void onRenderVideo(); - - void onSetFormat(const sp &msg); - void onQueueAccessUnit(const sp &msg); - - void internalSetFormat(size_t trackIndex, const sp &format); - - DISALLOW_EVIL_CONSTRUCTORS(DirectRenderer); -}; - -} // namespace android - -#endif // DIRECT_RENDERER_H_ diff --git a/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp b/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp deleted file mode 100644 index bc88f1e..0000000 --- a/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp +++ /dev/null @@ -1,917 +0,0 @@ -/* - * Copyright 2012, The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -//#define LOG_NDEBUG 0 -#define LOG_TAG "WifiDisplaySink" -#include - -#include "WifiDisplaySink.h" - -#include "DirectRenderer.h" -#include "MediaReceiver.h" -#include "TimeSyncer.h" - -#include -#include -#include -#include -#include -#include -#include - -namespace android { - -// static -const AString WifiDisplaySink::sUserAgent = MakeUserAgent(); - -WifiDisplaySink::WifiDisplaySink( - uint32_t flags, - const sp &netSession, - const sp &bufferProducer, - const sp ¬ify) - : mState(UNDEFINED), - mFlags(flags), - mNetSession(netSession), - mSurfaceTex(bufferProducer), - mNotify(notify), - mUsingTCPTransport(false), - mUsingTCPInterleaving(false), - mSessionID(0), - mNextCSeq(1), - mIDRFrameRequestPending(false), - mTimeOffsetUs(0ll), - mTimeOffsetValid(false), - mSetupDeferred(false), - mLatencyCount(0), - mLatencySumUs(0ll), - mLatencyMaxUs(0ll), - mMaxDelayMs(-1ll) { - // We support any and all resolutions, but prefer 720p30 - mSinkSupportedVideoFormats.setNativeResolution( - VideoFormats::RESOLUTION_CEA, 5); // 1280 x 720 p30 - - mSinkSupportedVideoFormats.enableAll(); -} - -WifiDisplaySink::~WifiDisplaySink() { -} - -void WifiDisplaySink::start(const char *sourceHost, int32_t sourcePort) { - sp msg = new AMessage(kWhatStart, id()); - msg->setString("sourceHost", sourceHost); - msg->setInt32("sourcePort", sourcePort); - msg->post(); -} - -void WifiDisplaySink::start(const char *uri) { - sp msg = new AMessage(kWhatStart, id()); - msg->setString("setupURI", uri); - msg->post(); -} - -// static -bool WifiDisplaySink::ParseURL( - const char *url, AString *host, int32_t *port, AString *path, - AString *user, AString *pass) { - host->clear(); - *port = 0; - path->clear(); - user->clear(); - pass->clear(); - - if (strncasecmp("rtsp://", url, 7)) { - return false; - } - - const char *slashPos = strchr(&url[7], '/'); - - if (slashPos == NULL) { - host->setTo(&url[7]); - path->setTo("/"); - } else { - host->setTo(&url[7], slashPos - &url[7]); - path->setTo(slashPos); - } - - ssize_t atPos = host->find("@"); - - if (atPos >= 0) { - // Split of user:pass@ from hostname. - - AString userPass(*host, 0, atPos); - host->erase(0, atPos + 1); - - ssize_t colonPos = userPass.find(":"); - - if (colonPos < 0) { - *user = userPass; - } else { - user->setTo(userPass, 0, colonPos); - pass->setTo(userPass, colonPos + 1, userPass.size() - colonPos - 1); - } - } - - const char *colonPos = strchr(host->c_str(), ':'); - - if (colonPos != NULL) { - char *end; - unsigned long x = strtoul(colonPos + 1, &end, 10); - - if (end == colonPos + 1 || *end != '\0' || x >= 65536) { - return false; - } - - *port = x; - - size_t colonOffset = colonPos - host->c_str(); - size_t trailing = host->size() - colonOffset; - host->erase(colonOffset, trailing); - } else { - *port = 554; - } - - return true; -} - -void WifiDisplaySink::onMessageReceived(const sp &msg) { - switch (msg->what()) { - case kWhatStart: - { - sleep(2); // XXX - - int32_t sourcePort; - CHECK(msg->findString("sourceHost", &mRTSPHost)); - CHECK(msg->findInt32("sourcePort", &sourcePort)); - - sp notify = new AMessage(kWhatRTSPNotify, id()); - - status_t err = mNetSession->createRTSPClient( - mRTSPHost.c_str(), sourcePort, notify, &mSessionID); - CHECK_EQ(err, (status_t)OK); - - mState = CONNECTING; - break; - } - - case kWhatRTSPNotify: - { - int32_t reason; - CHECK(msg->findInt32("reason", &reason)); - - switch (reason) { - case ANetworkSession::kWhatError: - { - int32_t sessionID; - CHECK(msg->findInt32("sessionID", &sessionID)); - - int32_t err; - CHECK(msg->findInt32("err", &err)); - - AString detail; - CHECK(msg->findString("detail", &detail)); - - ALOGE("An error occurred in session %d (%d, '%s/%s').", - sessionID, - err, - detail.c_str(), - strerror(-err)); - - if (sessionID == mSessionID) { - ALOGI("Lost control connection."); - - // The control connection is dead now. - mNetSession->destroySession(mSessionID); - mSessionID = 0; - - if (mNotify == NULL) { - looper()->stop(); - } else { - sp notify = mNotify->dup(); - notify->setInt32("what", kWhatDisconnected); - notify->post(); - } - } - break; - } - - case ANetworkSession::kWhatConnected: - { - ALOGI("We're now connected."); - mState = CONNECTED; - - if (mFlags & FLAG_SPECIAL_MODE) { - sp notify = new AMessage( - kWhatTimeSyncerNotify, id()); - - mTimeSyncer = new TimeSyncer(mNetSession, notify); - looper()->registerHandler(mTimeSyncer); - - mTimeSyncer->startClient(mRTSPHost.c_str(), 8123); - } - break; - } - - case ANetworkSession::kWhatData: - { - onReceiveClientData(msg); - break; - } - - default: - TRESPASS(); - } - break; - } - - case kWhatStop: - { - looper()->stop(); - break; - } - - case kWhatMediaReceiverNotify: - { - onMediaReceiverNotify(msg); - break; - } - - case kWhatTimeSyncerNotify: - { - int32_t what; - CHECK(msg->findInt32("what", &what)); - - if (what == TimeSyncer::kWhatTimeOffset) { - CHECK(msg->findInt64("offset", &mTimeOffsetUs)); - mTimeOffsetValid = true; - - if (mSetupDeferred) { - CHECK_EQ((status_t)OK, - sendSetup( - mSessionID, - "rtsp://x.x.x.x:x/wfd1.0/streamid=0")); - - mSetupDeferred = false; - } - } - break; - } - - case kWhatReportLateness: - { - if (mLatencyCount > 0) { - int64_t avgLatencyUs = mLatencySumUs / mLatencyCount; - - ALOGV("avg. latency = %lld ms (max %lld ms)", - avgLatencyUs / 1000ll, - mLatencyMaxUs / 1000ll); - - sp params = new AMessage; - params->setInt64("avgLatencyUs", avgLatencyUs); - params->setInt64("maxLatencyUs", mLatencyMaxUs); - mMediaReceiver->informSender(0 /* trackIndex */, params); - } - - mLatencyCount = 0; - mLatencySumUs = 0ll; - mLatencyMaxUs = 0ll; - - msg->post(kReportLatenessEveryUs); - break; - } - - default: - TRESPASS(); - } -} - -void WifiDisplaySink::dumpDelay(size_t trackIndex, int64_t timeUs) { - int64_t delayMs = (ALooper::GetNowUs() - timeUs) / 1000ll; - - if (delayMs > mMaxDelayMs) { - mMaxDelayMs = delayMs; - } - - static const int64_t kMinDelayMs = 0; - static const int64_t kMaxDelayMs = 300; - - const char *kPattern = "########################################"; - size_t kPatternSize = strlen(kPattern); - - int n = (kPatternSize * (delayMs - kMinDelayMs)) - / (kMaxDelayMs - kMinDelayMs); - - if (n < 0) { - n = 0; - } else if ((size_t)n > kPatternSize) { - n = kPatternSize; - } - - ALOGI("[%lld]: (%4lld ms / %4lld ms) %s", - timeUs / 1000, - delayMs, - mMaxDelayMs, - kPattern + kPatternSize - n); -} - -void WifiDisplaySink::onMediaReceiverNotify(const sp &msg) { - int32_t what; - CHECK(msg->findInt32("what", &what)); - - switch (what) { - case MediaReceiver::kWhatInitDone: - { - status_t err; - CHECK(msg->findInt32("err", &err)); - - ALOGI("MediaReceiver initialization completed w/ err %d", err); - break; - } - - case MediaReceiver::kWhatError: - { - status_t err; - CHECK(msg->findInt32("err", &err)); - - ALOGE("MediaReceiver signaled error %d", err); - break; - } - - case MediaReceiver::kWhatAccessUnit: - { - if (mRenderer == NULL) { - mRenderer = new DirectRenderer(mSurfaceTex); - looper()->registerHandler(mRenderer); - } - - sp accessUnit; - CHECK(msg->findBuffer("accessUnit", &accessUnit)); - - int64_t timeUs; - CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs)); - - if (!mTimeOffsetValid && !(mFlags & FLAG_SPECIAL_MODE)) { - mTimeOffsetUs = timeUs - ALooper::GetNowUs(); - mTimeOffsetValid = true; - } - - CHECK(mTimeOffsetValid); - - // We are the timesync _client_, - // client time = server time - time offset. - timeUs -= mTimeOffsetUs; - - size_t trackIndex; - CHECK(msg->findSize("trackIndex", &trackIndex)); - - int64_t nowUs = ALooper::GetNowUs(); - int64_t delayUs = nowUs - timeUs; - - mLatencySumUs += delayUs; - if (mLatencyCount == 0 || delayUs > mLatencyMaxUs) { - mLatencyMaxUs = delayUs; - } - ++mLatencyCount; - - // dumpDelay(trackIndex, timeUs); - - timeUs += 220000ll; // Assume 220 ms of latency - accessUnit->meta()->setInt64("timeUs", timeUs); - - sp format; - if (msg->findMessage("format", &format)) { - mRenderer->setFormat(trackIndex, format); - } - - mRenderer->queueAccessUnit(trackIndex, accessUnit); - break; - } - - case MediaReceiver::kWhatPacketLost: - { -#if 0 - if (!mIDRFrameRequestPending) { - ALOGI("requesting IDR frame"); - - sendIDRFrameRequest(mSessionID); - } -#endif - break; - } - - default: - TRESPASS(); - } -} - -void WifiDisplaySink::registerResponseHandler( - int32_t sessionID, int32_t cseq, HandleRTSPResponseFunc func) { - ResponseID id; - id.mSessionID = sessionID; - id.mCSeq = cseq; - mResponseHandlers.add(id, func); -} - -status_t WifiDisplaySink::sendM2(int32_t sessionID) { - AString request = "OPTIONS * RTSP/1.0\r\n"; - AppendCommonResponse(&request, mNextCSeq); - - request.append( - "Require: org.wfa.wfd1.0\r\n" - "\r\n"); - - status_t err = - mNetSession->sendRequest(sessionID, request.c_str(), request.size()); - - if (err != OK) { - return err; - } - - registerResponseHandler( - sessionID, mNextCSeq, &WifiDisplaySink::onReceiveM2Response); - - ++mNextCSeq; - - return OK; -} - -status_t WifiDisplaySink::onReceiveM2Response( - int32_t sessionID, const sp &msg) { - int32_t statusCode; - if (!msg->getStatusCode(&statusCode)) { - return ERROR_MALFORMED; - } - - if (statusCode != 200) { - return ERROR_UNSUPPORTED; - } - - return OK; -} - -status_t WifiDisplaySink::onReceiveSetupResponse( - int32_t sessionID, const sp &msg) { - int32_t statusCode; - if (!msg->getStatusCode(&statusCode)) { - return ERROR_MALFORMED; - } - - if (statusCode != 200) { - return ERROR_UNSUPPORTED; - } - - if (!msg->findString("session", &mPlaybackSessionID)) { - return ERROR_MALFORMED; - } - - if (!ParsedMessage::GetInt32Attribute( - mPlaybackSessionID.c_str(), - "timeout", - &mPlaybackSessionTimeoutSecs)) { - mPlaybackSessionTimeoutSecs = -1; - } - - ssize_t colonPos = mPlaybackSessionID.find(";"); - if (colonPos >= 0) { - // Strip any options from the returned session id. - mPlaybackSessionID.erase( - colonPos, mPlaybackSessionID.size() - colonPos); - } - - status_t err = configureTransport(msg); - - if (err != OK) { - return err; - } - - mState = PAUSED; - - return sendPlay( - sessionID, - "rtsp://x.x.x.x:x/wfd1.0/streamid=0"); -} - -status_t WifiDisplaySink::configureTransport(const sp &msg) { - if (mUsingTCPTransport && !(mFlags & FLAG_SPECIAL_MODE)) { - // In "special" mode we still use a UDP RTCP back-channel that - // needs connecting. - return OK; - } - - AString transport; - if (!msg->findString("transport", &transport)) { - ALOGE("Missing 'transport' field in SETUP response."); - return ERROR_MALFORMED; - } - - AString sourceHost; - if (!ParsedMessage::GetAttribute( - transport.c_str(), "source", &sourceHost)) { - sourceHost = mRTSPHost; - } - - AString serverPortStr; - if (!ParsedMessage::GetAttribute( - transport.c_str(), "server_port", &serverPortStr)) { - ALOGE("Missing 'server_port' in Transport field."); - return ERROR_MALFORMED; - } - - int rtpPort, rtcpPort; - if (sscanf(serverPortStr.c_str(), "%d-%d", &rtpPort, &rtcpPort) != 2 - || rtpPort <= 0 || rtpPort > 65535 - || rtcpPort <=0 || rtcpPort > 65535 - || rtcpPort != rtpPort + 1) { - ALOGE("Invalid server_port description '%s'.", - serverPortStr.c_str()); - - return ERROR_MALFORMED; - } - - if (rtpPort & 1) { - ALOGW("Server picked an odd numbered RTP port."); - } - - return mMediaReceiver->connectTrack( - 0 /* trackIndex */, sourceHost.c_str(), rtpPort, rtcpPort); -} - -status_t WifiDisplaySink::onReceivePlayResponse( - int32_t sessionID, const sp &msg) { - int32_t statusCode; - if (!msg->getStatusCode(&statusCode)) { - return ERROR_MALFORMED; - } - - if (statusCode != 200) { - return ERROR_UNSUPPORTED; - } - - mState = PLAYING; - - (new AMessage(kWhatReportLateness, id()))->post(kReportLatenessEveryUs); - - return OK; -} - -status_t WifiDisplaySink::onReceiveIDRFrameRequestResponse( - int32_t sessionID, const sp &msg) { - CHECK(mIDRFrameRequestPending); - mIDRFrameRequestPending = false; - - return OK; -} - -void WifiDisplaySink::onReceiveClientData(const sp &msg) { - int32_t sessionID; - CHECK(msg->findInt32("sessionID", &sessionID)); - - sp obj; - CHECK(msg->findObject("data", &obj)); - - sp data = - static_cast(obj.get()); - - ALOGV("session %d received '%s'", - sessionID, data->debugString().c_str()); - - AString method; - AString uri; - data->getRequestField(0, &method); - - int32_t cseq; - if (!data->findInt32("cseq", &cseq)) { - sendErrorResponse(sessionID, "400 Bad Request", -1 /* cseq */); - return; - } - - if (method.startsWith("RTSP/")) { - // This is a response. - - ResponseID id; - id.mSessionID = sessionID; - id.mCSeq = cseq; - - ssize_t index = mResponseHandlers.indexOfKey(id); - - if (index < 0) { - ALOGW("Received unsolicited server response, cseq %d", cseq); - return; - } - - HandleRTSPResponseFunc func = mResponseHandlers.valueAt(index); - mResponseHandlers.removeItemsAt(index); - - status_t err = (this->*func)(sessionID, data); - CHECK_EQ(err, (status_t)OK); - } else { - AString version; - data->getRequestField(2, &version); - if (!(version == AString("RTSP/1.0"))) { - sendErrorResponse(sessionID, "505 RTSP Version not supported", cseq); - return; - } - - if (method == "OPTIONS") { - onOptionsRequest(sessionID, cseq, data); - } else if (method == "GET_PARAMETER") { - onGetParameterRequest(sessionID, cseq, data); - } else if (method == "SET_PARAMETER") { - onSetParameterRequest(sessionID, cseq, data); - } else { - sendErrorResponse(sessionID, "405 Method Not Allowed", cseq); - } - } -} - -void WifiDisplaySink::onOptionsRequest( - int32_t sessionID, - int32_t cseq, - const sp &data) { - AString response = "RTSP/1.0 200 OK\r\n"; - AppendCommonResponse(&response, cseq); - response.append("Public: org.wfa.wfd1.0, GET_PARAMETER, SET_PARAMETER\r\n"); - response.append("\r\n"); - - status_t err = mNetSession->sendRequest(sessionID, response.c_str()); - CHECK_EQ(err, (status_t)OK); - - err = sendM2(sessionID); - CHECK_EQ(err, (status_t)OK); -} - -void WifiDisplaySink::onGetParameterRequest( - int32_t sessionID, - int32_t cseq, - const sp &data) { - AString body; - - if (mState == CONNECTED) { - mUsingTCPTransport = false; - mUsingTCPInterleaving = false; - - char val[PROPERTY_VALUE_MAX]; - if (property_get("media.wfd-sink.tcp-mode", val, NULL)) { - if (!strcasecmp("true", val) || !strcmp("1", val)) { - ALOGI("Using TCP unicast transport."); - mUsingTCPTransport = true; - mUsingTCPInterleaving = false; - } else if (!strcasecmp("interleaved", val)) { - ALOGI("Using TCP interleaved transport."); - mUsingTCPTransport = true; - mUsingTCPInterleaving = true; - } - } else if (mFlags & FLAG_SPECIAL_MODE) { - mUsingTCPTransport = true; - } - - body = "wfd_video_formats: "; - body.append(mSinkSupportedVideoFormats.getFormatSpec()); - - body.append( - "\r\nwfd_audio_codecs: AAC 0000000F 00\r\n" - "wfd_client_rtp_ports: RTP/AVP/"); - - if (mUsingTCPTransport) { - body.append("TCP;"); - if (mUsingTCPInterleaving) { - body.append("interleaved"); - } else { - body.append("unicast 19000 0"); - } - } else { - body.append("UDP;unicast 19000 0"); - } - - body.append(" mode=play\r\n"); - } - - AString response = "RTSP/1.0 200 OK\r\n"; - AppendCommonResponse(&response, cseq); - response.append("Content-Type: text/parameters\r\n"); - response.append(StringPrintf("Content-Length: %d\r\n", body.size())); - response.append("\r\n"); - response.append(body); - - status_t err = mNetSession->sendRequest(sessionID, response.c_str()); - CHECK_EQ(err, (status_t)OK); -} - -status_t WifiDisplaySink::sendSetup(int32_t sessionID, const char *uri) { - sp notify = new AMessage(kWhatMediaReceiverNotify, id()); - - mMediaReceiverLooper = new ALooper; - mMediaReceiverLooper->setName("media_receiver"); - - mMediaReceiverLooper->start( - false /* runOnCallingThread */, - false /* canCallJava */, - PRIORITY_AUDIO); - - mMediaReceiver = new MediaReceiver(mNetSession, notify); - mMediaReceiverLooper->registerHandler(mMediaReceiver); - - RTPReceiver::TransportMode rtpMode = RTPReceiver::TRANSPORT_UDP; - if (mUsingTCPTransport) { - if (mUsingTCPInterleaving) { - rtpMode = RTPReceiver::TRANSPORT_TCP_INTERLEAVED; - } else { - rtpMode = RTPReceiver::TRANSPORT_TCP; - } - } - - int32_t localRTPPort; - status_t err = mMediaReceiver->addTrack( - rtpMode, RTPReceiver::TRANSPORT_UDP /* rtcpMode */, &localRTPPort); - - if (err == OK) { - err = mMediaReceiver->initAsync(MediaReceiver::MODE_TRANSPORT_STREAM); - } - - if (err != OK) { - mMediaReceiverLooper->unregisterHandler(mMediaReceiver->id()); - mMediaReceiver.clear(); - - mMediaReceiverLooper->stop(); - mMediaReceiverLooper.clear(); - - return err; - } - - AString request = StringPrintf("SETUP %s RTSP/1.0\r\n", uri); - - AppendCommonResponse(&request, mNextCSeq); - - if (rtpMode == RTPReceiver::TRANSPORT_TCP_INTERLEAVED) { - request.append("Transport: RTP/AVP/TCP;interleaved=0-1\r\n"); - } else if (rtpMode == RTPReceiver::TRANSPORT_TCP) { - if (mFlags & FLAG_SPECIAL_MODE) { - // This isn't quite true, since the RTP connection is through TCP - // and the RTCP connection through UDP... - request.append( - StringPrintf( - "Transport: RTP/AVP/TCP;unicast;client_port=%d-%d\r\n", - localRTPPort, localRTPPort + 1)); - } else { - request.append( - StringPrintf( - "Transport: RTP/AVP/TCP;unicast;client_port=%d\r\n", - localRTPPort)); - } - } else { - request.append( - StringPrintf( - "Transport: RTP/AVP/UDP;unicast;client_port=%d-%d\r\n", - localRTPPort, - localRTPPort + 1)); - } - - request.append("\r\n"); - - ALOGV("request = '%s'", request.c_str()); - - err = mNetSession->sendRequest(sessionID, request.c_str(), request.size()); - - if (err != OK) { - return err; - } - - registerResponseHandler( - sessionID, mNextCSeq, &WifiDisplaySink::onReceiveSetupResponse); - - ++mNextCSeq; - - return OK; -} - -status_t WifiDisplaySink::sendPlay(int32_t sessionID, const char *uri) { - AString request = StringPrintf("PLAY %s RTSP/1.0\r\n", uri); - - AppendCommonResponse(&request, mNextCSeq); - - request.append(StringPrintf("Session: %s\r\n", mPlaybackSessionID.c_str())); - request.append("\r\n"); - - status_t err = - mNetSession->sendRequest(sessionID, request.c_str(), request.size()); - - if (err != OK) { - return err; - } - - registerResponseHandler( - sessionID, mNextCSeq, &WifiDisplaySink::onReceivePlayResponse); - - ++mNextCSeq; - - return OK; -} - -status_t WifiDisplaySink::sendIDRFrameRequest(int32_t sessionID) { - CHECK(!mIDRFrameRequestPending); - - AString request = "SET_PARAMETER rtsp://localhost/wfd1.0 RTSP/1.0\r\n"; - - AppendCommonResponse(&request, mNextCSeq); - - AString content = "wfd_idr_request\r\n"; - - request.append(StringPrintf("Session: %s\r\n", mPlaybackSessionID.c_str())); - request.append(StringPrintf("Content-Length: %d\r\n", content.size())); - request.append("\r\n"); - request.append(content); - - status_t err = - mNetSession->sendRequest(sessionID, request.c_str(), request.size()); - - if (err != OK) { - return err; - } - - registerResponseHandler( - sessionID, - mNextCSeq, - &WifiDisplaySink::onReceiveIDRFrameRequestResponse); - - ++mNextCSeq; - - mIDRFrameRequestPending = true; - - return OK; -} - -void WifiDisplaySink::onSetParameterRequest( - int32_t sessionID, - int32_t cseq, - const sp &data) { - const char *content = data->getContent(); - - if (strstr(content, "wfd_trigger_method: SETUP\r\n") != NULL) { - if ((mFlags & FLAG_SPECIAL_MODE) && !mTimeOffsetValid) { - mSetupDeferred = true; - } else { - status_t err = - sendSetup( - sessionID, - "rtsp://x.x.x.x:x/wfd1.0/streamid=0"); - - CHECK_EQ(err, (status_t)OK); - } - } - - AString response = "RTSP/1.0 200 OK\r\n"; - AppendCommonResponse(&response, cseq); - response.append("\r\n"); - - status_t err = mNetSession->sendRequest(sessionID, response.c_str()); - CHECK_EQ(err, (status_t)OK); -} - -void WifiDisplaySink::sendErrorResponse( - int32_t sessionID, - const char *errorDetail, - int32_t cseq) { - AString response; - response.append("RTSP/1.0 "); - response.append(errorDetail); - response.append("\r\n"); - - AppendCommonResponse(&response, cseq); - - response.append("\r\n"); - - status_t err = mNetSession->sendRequest(sessionID, response.c_str()); - CHECK_EQ(err, (status_t)OK); -} - -// static -void WifiDisplaySink::AppendCommonResponse(AString *response, int32_t cseq) { - time_t now = time(NULL); - struct tm *now2 = gmtime(&now); - char buf[128]; - strftime(buf, sizeof(buf), "%a, %d %b %Y %H:%M:%S %z", now2); - - response->append("Date: "); - response->append(buf); - response->append("\r\n"); - - response->append(StringPrintf("User-Agent: %s\r\n", sUserAgent.c_str())); - - if (cseq >= 0) { - response->append(StringPrintf("CSeq: %d\r\n", cseq)); - } -} - -} // namespace android diff --git a/media/libstagefright/wifi-display/sink/WifiDisplaySink.h b/media/libstagefright/wifi-display/sink/WifiDisplaySink.h deleted file mode 100644 index dc1fc32..0000000 --- a/media/libstagefright/wifi-display/sink/WifiDisplaySink.h +++ /dev/null @@ -1,195 +0,0 @@ -/* - * Copyright 2012, The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef WIFI_DISPLAY_SINK_H_ - -#define WIFI_DISPLAY_SINK_H_ - -#include "VideoFormats.h" - -#include -#include -#include - -namespace android { - -struct AMessage; -struct DirectRenderer; -struct MediaReceiver; -struct ParsedMessage; -struct TimeSyncer; - -// Represents the RTSP client acting as a wifi display sink. -// Connects to a wifi display source and renders the incoming -// transport stream using a MediaPlayer instance. -struct WifiDisplaySink : public AHandler { - enum { - kWhatDisconnected, - }; - - enum Flags { - FLAG_SPECIAL_MODE = 1, - }; - - // If no notification message is specified (notify == NULL) - // the sink will stop its looper() once the session ends, - // otherwise it will post an appropriate notification but leave - // the looper() running. - WifiDisplaySink( - uint32_t flags, - const sp &netSession, - const sp &bufferProducer = NULL, - const sp ¬ify = NULL); - - void start(const char *sourceHost, int32_t sourcePort); - void start(const char *uri); - -protected: - virtual ~WifiDisplaySink(); - virtual void onMessageReceived(const sp &msg); - -private: - enum State { - UNDEFINED, - CONNECTING, - CONNECTED, - PAUSED, - PLAYING, - }; - - enum { - kWhatStart, - kWhatRTSPNotify, - kWhatStop, - kWhatMediaReceiverNotify, - kWhatTimeSyncerNotify, - kWhatReportLateness, - }; - - struct ResponseID { - int32_t mSessionID; - int32_t mCSeq; - - bool operator<(const ResponseID &other) const { - return mSessionID < other.mSessionID - || (mSessionID == other.mSessionID - && mCSeq < other.mCSeq); - } - }; - - typedef status_t (WifiDisplaySink::*HandleRTSPResponseFunc)( - int32_t sessionID, const sp &msg); - - static const int64_t kReportLatenessEveryUs = 1000000ll; - - static const AString sUserAgent; - - State mState; - uint32_t mFlags; - VideoFormats mSinkSupportedVideoFormats; - sp mNetSession; - sp mSurfaceTex; - sp mNotify; - sp mTimeSyncer; - bool mUsingTCPTransport; - bool mUsingTCPInterleaving; - AString mRTSPHost; - int32_t mSessionID; - - int32_t mNextCSeq; - - KeyedVector mResponseHandlers; - - sp mMediaReceiverLooper; - sp mMediaReceiver; - sp mRenderer; - - AString mPlaybackSessionID; - int32_t mPlaybackSessionTimeoutSecs; - - bool mIDRFrameRequestPending; - - int64_t mTimeOffsetUs; - bool mTimeOffsetValid; - - bool mSetupDeferred; - - size_t mLatencyCount; - int64_t mLatencySumUs; - int64_t mLatencyMaxUs; - - int64_t mMaxDelayMs; - - status_t sendM2(int32_t sessionID); - status_t sendSetup(int32_t sessionID, const char *uri); - status_t sendPlay(int32_t sessionID, const char *uri); - status_t sendIDRFrameRequest(int32_t sessionID); - - status_t onReceiveM2Response( - int32_t sessionID, const sp &msg); - - status_t onReceiveSetupResponse( - int32_t sessionID, const sp &msg); - - status_t configureTransport(const sp &msg); - - status_t onReceivePlayResponse( - int32_t sessionID, const sp &msg); - - status_t onReceiveIDRFrameRequestResponse( - int32_t sessionID, const sp &msg); - - void registerResponseHandler( - int32_t sessionID, int32_t cseq, HandleRTSPResponseFunc func); - - void onReceiveClientData(const sp &msg); - - void onOptionsRequest( - int32_t sessionID, - int32_t cseq, - const sp &data); - - void onGetParameterRequest( - int32_t sessionID, - int32_t cseq, - const sp &data); - - void onSetParameterRequest( - int32_t sessionID, - int32_t cseq, - const sp &data); - - void onMediaReceiverNotify(const sp &msg); - - void sendErrorResponse( - int32_t sessionID, - const char *errorDetail, - int32_t cseq); - - static void AppendCommonResponse(AString *response, int32_t cseq); - - bool ParseURL( - const char *url, AString *host, int32_t *port, AString *path, - AString *user, AString *pass); - - void dumpDelay(size_t trackIndex, int64_t timeUs); - - DISALLOW_EVIL_CONSTRUCTORS(WifiDisplaySink); -}; - -} // namespace android - -#endif // WIFI_DISPLAY_SINK_H_ diff --git a/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp b/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp index d72349d..05e4018 100644 --- a/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp +++ b/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp @@ -22,7 +22,6 @@ #include "PlaybackSession.h" #include "Parameters.h" #include "rtp/RTPSender.h" -#include "TimeSyncer.h" #include #include @@ -173,15 +172,7 @@ void WifiDisplaySource::onMessageReceived(const sp &msg) { } } - if (err == OK) { - sp notify = new AMessage(kWhatTimeSyncerNotify, id()); - mTimeSyncer = new TimeSyncer(mNetSession, notify); - looper()->registerHandler(mTimeSyncer); - - mTimeSyncer->startServer(8123); - - mState = AWAITING_CLIENT_CONNECTION; - } + mState = AWAITING_CLIENT_CONNECTION; sp response = new AMessage; response->setInt32("err", err); @@ -556,11 +547,6 @@ void WifiDisplaySource::onMessageReceived(const sp &msg) { break; } - case kWhatTimeSyncerNotify: - { - break; - } - default: TRESPASS(); } diff --git a/media/libstagefright/wifi-display/source/WifiDisplaySource.h b/media/libstagefright/wifi-display/source/WifiDisplaySource.h index 4f11712..750265f 100644 --- a/media/libstagefright/wifi-display/source/WifiDisplaySource.h +++ b/media/libstagefright/wifi-display/source/WifiDisplaySource.h @@ -30,7 +30,6 @@ namespace android { struct IHDCP; struct IRemoteDisplayClient; struct ParsedMessage; -struct TimeSyncer; // Represents the RTSP server acting as a wifi display source. // Manages incoming connections, sets up Playback sessions as necessary. @@ -83,7 +82,6 @@ private: kWhatHDCPNotify, kWhatFinishStop2, kWhatTeardownTriggerTimedOut, - kWhatTimeSyncerNotify, }; struct ResponseID { @@ -120,7 +118,6 @@ private: sp mNetSession; sp mClient; AString mMediaPath; - sp mTimeSyncer; struct in_addr mInterfaceAddr; int32_t mSessionID; diff --git a/media/libstagefright/wifi-display/udptest.cpp b/media/libstagefright/wifi-display/udptest.cpp deleted file mode 100644 index 61eb9f9..0000000 --- a/media/libstagefright/wifi-display/udptest.cpp +++ /dev/null @@ -1,116 +0,0 @@ -/* - * Copyright 2012, The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -//#define LOG_NEBUG 0 -#define LOG_TAG "udptest" -#include - -#include "TimeSyncer.h" - -#include -#include -#include - -namespace android { - -} // namespace android - -static void usage(const char *me) { - fprintf(stderr, - "usage: %s -c host[:port]\tconnect to test server\n" - " -l \tcreate a test server\n", - me); -} - -int main(int argc, char **argv) { - using namespace android; - - ProcessState::self()->startThreadPool(); - - int32_t localPort = -1; - int32_t connectToPort = -1; - AString connectToHost; - - int res; - while ((res = getopt(argc, argv, "hc:l:")) >= 0) { - switch (res) { - case 'c': - { - const char *colonPos = strrchr(optarg, ':'); - - if (colonPos == NULL) { - connectToHost = optarg; - connectToPort = 49152; - } else { - connectToHost.setTo(optarg, colonPos - optarg); - - char *end; - connectToPort = strtol(colonPos + 1, &end, 10); - - if (*end != '\0' || end == colonPos + 1 - || connectToPort < 1 || connectToPort > 65535) { - fprintf(stderr, "Illegal port specified.\n"); - exit(1); - } - } - break; - } - - case 'l': - { - char *end; - localPort = strtol(optarg, &end, 10); - - if (*end != '\0' || end == optarg - || localPort < 1 || localPort > 65535) { - fprintf(stderr, "Illegal port specified.\n"); - exit(1); - } - break; - } - - case '?': - case 'h': - usage(argv[0]); - exit(1); - } - } - - if (localPort < 0 && connectToPort < 0) { - fprintf(stderr, - "You need to select either client or server mode.\n"); - exit(1); - } - - sp netSession = new ANetworkSession; - netSession->start(); - - sp looper = new ALooper; - - sp handler = new TimeSyncer(netSession, NULL /* notify */); - looper->registerHandler(handler); - - if (localPort >= 0) { - handler->startServer(localPort); - } else { - handler->startClient(connectToHost.c_str(), connectToPort); - } - - looper->start(true /* runOnCallingThread */); - - return 0; -} - -- cgit v1.1 From f1e98d857ec377f2c9b916073d40732e6ebb7ced Mon Sep 17 00:00:00 2001 From: Eino-Ville Talvala Date: Fri, 6 Sep 2013 09:32:43 -0700 Subject: Camera API 2, Device 2/3: Implement idle and shutter callbacks - Update callback Binder interface - Rename frameId to be requestId to be consistent and disambiguate from frameNumber. - Implement shutter callback from HAL2/3 notify() - Add in-flight tracking to HAL2 - Add requestId to in-flight tracking - Report requestId from shutter callback - Implement idle callback from HAL3 process_capture_result - Add new idle tracker thread - Update all idle waiting to use the tracker - Add reporting from request thread, all streams to tracker - Remove existing idle waiting infrastructure Bug: 10549462 Change-Id: I867bfc248e3848c50e71527e3561fe92dc037958 --- camera/IProCameraCallbacks.cpp | 8 +- camera/ProCamera.cpp | 6 +- camera/camera2/ICameraDeviceCallbacks.cpp | 68 ++- camera/tests/ProCameraTests.cpp | 5 +- include/camera/IProCameraCallbacks.h | 2 +- include/camera/ProCamera.h | 2 +- include/camera/camera2/ICameraDeviceCallbacks.h | 22 +- services/camera/libcameraservice/Android.mk | 1 + .../api1/client2/CaptureSequencer.cpp | 4 +- .../api1/client2/CaptureSequencer.h | 2 +- .../libcameraservice/api1/client2/ZslProcessor.cpp | 2 +- .../libcameraservice/api1/client2/ZslProcessor.h | 2 +- .../api1/client2/ZslProcessor3.cpp | 2 +- .../libcameraservice/api1/client2/ZslProcessor3.h | 2 +- .../libcameraservice/api2/CameraDeviceClient.cpp | 43 +- .../libcameraservice/api2/CameraDeviceClient.h | 12 +- .../libcameraservice/api_pro/ProCamera2Client.cpp | 4 +- .../libcameraservice/api_pro/ProCamera2Client.h | 2 +- .../libcameraservice/common/Camera2ClientBase.cpp | 20 +- .../libcameraservice/common/Camera2ClientBase.h | 3 +- .../libcameraservice/common/CameraDeviceBase.h | 15 +- .../libcameraservice/common/FrameProcessorBase.h | 2 +- .../libcameraservice/device2/Camera2Device.cpp | 6 +- .../libcameraservice/device2/Camera2Device.h | 4 + .../libcameraservice/device3/Camera3Device.cpp | 539 ++++++++++++++------- .../libcameraservice/device3/Camera3Device.h | 98 +++- .../device3/Camera3IOStreamBase.cpp | 72 +-- .../libcameraservice/device3/Camera3IOStreamBase.h | 1 - .../device3/Camera3InputStream.cpp | 4 +- .../device3/Camera3OutputStream.cpp | 3 +- .../libcameraservice/device3/Camera3Stream.cpp | 37 +- .../libcameraservice/device3/Camera3Stream.h | 12 +- .../device3/Camera3StreamInterface.h | 10 +- .../libcameraservice/device3/StatusTracker.cpp | 219 +++++++++ .../libcameraservice/device3/StatusTracker.h | 130 +++++ 35 files changed, 1030 insertions(+), 334 deletions(-) create mode 100644 services/camera/libcameraservice/device3/StatusTracker.cpp create mode 100644 services/camera/libcameraservice/device3/StatusTracker.h diff --git a/camera/IProCameraCallbacks.cpp b/camera/IProCameraCallbacks.cpp index 0fdb85a..bd3d420 100644 --- a/camera/IProCameraCallbacks.cpp +++ b/camera/IProCameraCallbacks.cpp @@ -67,11 +67,11 @@ public: IBinder::FLAG_ONEWAY); } - void onResultReceived(int32_t frameId, camera_metadata* result) { + void onResultReceived(int32_t requestId, camera_metadata* result) { ALOGV("onResultReceived"); Parcel data, reply; data.writeInterfaceToken(IProCameraCallbacks::getInterfaceDescriptor()); - data.writeInt32(frameId); + data.writeInt32(requestId); CameraMetadata::writeToParcel(data, result); remote()->transact(RESULT_RECEIVED, data, &reply, IBinder::FLAG_ONEWAY); } @@ -107,10 +107,10 @@ status_t BnProCameraCallbacks::onTransact( case RESULT_RECEIVED: { ALOGV("RESULT_RECEIVED"); CHECK_INTERFACE(IProCameraCallbacks, data, reply); - int32_t frameId = data.readInt32(); + int32_t requestId = data.readInt32(); camera_metadata_t *result = NULL; CameraMetadata::readFromParcel(data, &result); - onResultReceived(frameId, result); + onResultReceived(requestId, result); return NO_ERROR; break; } diff --git a/camera/ProCamera.cpp b/camera/ProCamera.cpp index 577c760..ba5a48c 100644 --- a/camera/ProCamera.cpp +++ b/camera/ProCamera.cpp @@ -90,8 +90,8 @@ void ProCamera::onLockStatusChanged( } } -void ProCamera::onResultReceived(int32_t frameId, camera_metadata* result) { - ALOGV("%s: frameId = %d, result = %p", __FUNCTION__, frameId, result); +void ProCamera::onResultReceived(int32_t requestId, camera_metadata* result) { + ALOGV("%s: requestId = %d, result = %p", __FUNCTION__, requestId, result); sp listener; { @@ -112,7 +112,7 @@ void ProCamera::onResultReceived(int32_t frameId, camera_metadata* result) { result = tmp.release(); if (listener != NULL) { - listener->onResultReceived(frameId, result); + listener->onResultReceived(requestId, result); } else { free_camera_metadata(result); } diff --git a/camera/camera2/ICameraDeviceCallbacks.cpp b/camera/camera2/ICameraDeviceCallbacks.cpp index 3cec1f4..613358a 100644 --- a/camera/camera2/ICameraDeviceCallbacks.cpp +++ b/camera/camera2/ICameraDeviceCallbacks.cpp @@ -32,7 +32,9 @@ namespace android { enum { - NOTIFY_CALLBACK = IBinder::FIRST_CALL_TRANSACTION, + CAMERA_ERROR = IBinder::FIRST_CALL_TRANSACTION, + CAMERA_IDLE, + CAPTURE_STARTED, RESULT_RECEIVED, }; @@ -44,19 +46,37 @@ public: { } - // generic callback from camera service to app - void notifyCallback(int32_t msgType, int32_t ext1, int32_t ext2) + void onDeviceError(CameraErrorCode errorCode) { - ALOGV("notifyCallback"); + ALOGV("onDeviceError"); Parcel data, reply; data.writeInterfaceToken(ICameraDeviceCallbacks::getInterfaceDescriptor()); - data.writeInt32(msgType); - data.writeInt32(ext1); - data.writeInt32(ext2); - remote()->transact(NOTIFY_CALLBACK, data, &reply, IBinder::FLAG_ONEWAY); + data.writeInt32(static_cast(errorCode)); + remote()->transact(CAMERA_ERROR, data, &reply, IBinder::FLAG_ONEWAY); data.writeNoException(); } + void onDeviceIdle() + { + ALOGV("onDeviceIdle"); + Parcel data, reply; + data.writeInterfaceToken(ICameraDeviceCallbacks::getInterfaceDescriptor()); + remote()->transact(CAMERA_IDLE, data, &reply, IBinder::FLAG_ONEWAY); + data.writeNoException(); + } + + void onCaptureStarted(int32_t requestId, int64_t timestamp) + { + ALOGV("onCaptureStarted"); + Parcel data, reply; + data.writeInterfaceToken(ICameraDeviceCallbacks::getInterfaceDescriptor()); + data.writeInt32(requestId); + data.writeInt64(timestamp); + remote()->transact(CAPTURE_STARTED, data, &reply, IBinder::FLAG_ONEWAY); + data.writeNoException(); + } + + void onResultReceived(int32_t requestId, const CameraMetadata& result) { ALOGV("onResultReceived"); Parcel data, reply; @@ -79,18 +99,33 @@ status_t BnCameraDeviceCallbacks::onTransact( { ALOGV("onTransact - code = %d", code); switch(code) { - case NOTIFY_CALLBACK: { - ALOGV("NOTIFY_CALLBACK"); + case CAMERA_ERROR: { + ALOGV("onDeviceError"); + CHECK_INTERFACE(ICameraDeviceCallbacks, data, reply); + CameraErrorCode errorCode = + static_cast(data.readInt32()); + onDeviceError(errorCode); + data.readExceptionCode(); + return NO_ERROR; + } break; + case CAMERA_IDLE: { + ALOGV("onDeviceIdle"); CHECK_INTERFACE(ICameraDeviceCallbacks, data, reply); - int32_t msgType = data.readInt32(); - int32_t ext1 = data.readInt32(); - int32_t ext2 = data.readInt32(); - notifyCallback(msgType, ext1, ext2); + onDeviceIdle(); + data.readExceptionCode(); + return NO_ERROR; + } break; + case CAPTURE_STARTED: { + ALOGV("onCaptureStarted"); + CHECK_INTERFACE(ICameraDeviceCallbacks, data, reply); + int32_t requestId = data.readInt32(); + int64_t timestamp = data.readInt64(); + onCaptureStarted(requestId, timestamp); data.readExceptionCode(); return NO_ERROR; } break; case RESULT_RECEIVED: { - ALOGV("RESULT_RECEIVED"); + ALOGV("onResultReceived"); CHECK_INTERFACE(ICameraDeviceCallbacks, data, reply); int32_t requestId = data.readInt32(); CameraMetadata result; @@ -102,8 +137,7 @@ status_t BnCameraDeviceCallbacks::onTransact( onResultReceived(requestId, result); data.readExceptionCode(); return NO_ERROR; - break; - } + } break; default: return BBinder::onTransact(code, data, reply, flags); } diff --git a/camera/tests/ProCameraTests.cpp b/camera/tests/ProCameraTests.cpp index e9aa99d..1f5867a 100644 --- a/camera/tests/ProCameraTests.cpp +++ b/camera/tests/ProCameraTests.cpp @@ -284,9 +284,9 @@ protected: } } - virtual void onResultReceived(int32_t frameId, + virtual void onResultReceived(int32_t requestId, camera_metadata* request) { - dout << "Result received frameId = " << frameId + dout << "Result received requestId = " << requestId << ", requestPtr = " << (void*)request << std::endl; QueueEvent(RESULT_RECEIVED); free_camera_metadata(request); @@ -1276,4 +1276,3 @@ TEST_F(ProCameraTest, ServiceListenersFunctional) { } } } - diff --git a/include/camera/IProCameraCallbacks.h b/include/camera/IProCameraCallbacks.h index c774698..e8abb89 100644 --- a/include/camera/IProCameraCallbacks.h +++ b/include/camera/IProCameraCallbacks.h @@ -51,7 +51,7 @@ public: /** Missing by design: implementation is client-side in ProCamera.cpp **/ // virtual void onBufferReceived(int streamId, // const CpuConsumer::LockedBufer& buf); - virtual void onResultReceived(int32_t frameId, + virtual void onResultReceived(int32_t requestId, camera_metadata* result) = 0; }; diff --git a/include/camera/ProCamera.h b/include/camera/ProCamera.h index d9ee662..83a3028 100644 --- a/include/camera/ProCamera.h +++ b/include/camera/ProCamera.h @@ -252,7 +252,7 @@ protected: virtual void onLockStatusChanged( IProCameraCallbacks::LockStatus newLockStatus); - virtual void onResultReceived(int32_t frameId, + virtual void onResultReceived(int32_t requestId, camera_metadata* result); private: ProCamera(int cameraId); diff --git a/include/camera/camera2/ICameraDeviceCallbacks.h b/include/camera/camera2/ICameraDeviceCallbacks.h index 041fa65..8dac4f2 100644 --- a/include/camera/camera2/ICameraDeviceCallbacks.h +++ b/include/camera/camera2/ICameraDeviceCallbacks.h @@ -35,13 +35,27 @@ class ICameraDeviceCallbacks : public IInterface public: DECLARE_META_INTERFACE(CameraDeviceCallbacks); + /** + * Error codes for CAMERA_MSG_ERROR + */ + enum CameraErrorCode { + ERROR_CAMERA_DISCONNECTED = 0, + ERROR_CAMERA_DEVICE = 1, + ERROR_CAMERA_SERVICE = 2 + }; + + // One way + virtual void onDeviceError(CameraErrorCode errorCode) = 0; + + // One way + virtual void onDeviceIdle() = 0; + // One way - virtual void notifyCallback(int32_t msgType, - int32_t ext1, - int32_t ext2) = 0; + virtual void onCaptureStarted(int32_t requestId, + int64_t timestamp) = 0; // One way - virtual void onResultReceived(int32_t frameId, + virtual void onResultReceived(int32_t requestId, const CameraMetadata& result) = 0; }; diff --git a/services/camera/libcameraservice/Android.mk b/services/camera/libcameraservice/Android.mk index d659ebb..d23f8b9 100644 --- a/services/camera/libcameraservice/Android.mk +++ b/services/camera/libcameraservice/Android.mk @@ -33,6 +33,7 @@ LOCAL_SRC_FILES:= \ device3/Camera3InputStream.cpp \ device3/Camera3OutputStream.cpp \ device3/Camera3ZslStream.cpp \ + device3/StatusTracker.cpp \ gui/RingBufferConsumer.cpp \ LOCAL_SHARED_LIBRARIES:= \ diff --git a/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp b/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp index ca3198f..1a1b27b 100644 --- a/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp +++ b/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp @@ -103,12 +103,12 @@ void CaptureSequencer::notifyAutoExposure(uint8_t newState, int triggerId) { } } -void CaptureSequencer::onFrameAvailable(int32_t frameId, +void CaptureSequencer::onFrameAvailable(int32_t requestId, const CameraMetadata &frame) { ALOGV("%s: Listener found new frame", __FUNCTION__); ATRACE_CALL(); Mutex::Autolock l(mInputMutex); - mNewFrameId = frameId; + mNewFrameId = requestId; mNewFrame = frame; if (!mNewFrameReceived) { mNewFrameReceived = true; diff --git a/services/camera/libcameraservice/api1/client2/CaptureSequencer.h b/services/camera/libcameraservice/api1/client2/CaptureSequencer.h index 7ad461a..e1e6201 100644 --- a/services/camera/libcameraservice/api1/client2/CaptureSequencer.h +++ b/services/camera/libcameraservice/api1/client2/CaptureSequencer.h @@ -62,7 +62,7 @@ class CaptureSequencer: void notifyAutoExposure(uint8_t newState, int triggerId); // Notifications from the frame processor - virtual void onFrameAvailable(int32_t frameId, const CameraMetadata &frame); + virtual void onFrameAvailable(int32_t requestId, const CameraMetadata &frame); // Notifications from the JPEG processor void onCaptureAvailable(nsecs_t timestamp, sp captureBuffer); diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp b/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp index 08ab357..4207ba9 100644 --- a/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp +++ b/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp @@ -71,7 +71,7 @@ void ZslProcessor::onFrameAvailable() { } } -void ZslProcessor::onFrameAvailable(int32_t /*frameId*/, +void ZslProcessor::onFrameAvailable(int32_t /*requestId*/, const CameraMetadata &frame) { Mutex::Autolock l(mInputMutex); camera_metadata_ro_entry_t entry; diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor.h b/services/camera/libcameraservice/api1/client2/ZslProcessor.h index 5fb178f..6d3cb85 100644 --- a/services/camera/libcameraservice/api1/client2/ZslProcessor.h +++ b/services/camera/libcameraservice/api1/client2/ZslProcessor.h @@ -54,7 +54,7 @@ class ZslProcessor: // From mZslConsumer virtual void onFrameAvailable(); // From FrameProcessor - virtual void onFrameAvailable(int32_t frameId, const CameraMetadata &frame); + virtual void onFrameAvailable(int32_t requestId, const CameraMetadata &frame); virtual void onBufferReleased(buffer_handle_t *handle); diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor3.cpp b/services/camera/libcameraservice/api1/client2/ZslProcessor3.cpp index 3e05091..776ebe2 100644 --- a/services/camera/libcameraservice/api1/client2/ZslProcessor3.cpp +++ b/services/camera/libcameraservice/api1/client2/ZslProcessor3.cpp @@ -61,7 +61,7 @@ ZslProcessor3::~ZslProcessor3() { deleteStream(); } -void ZslProcessor3::onFrameAvailable(int32_t /*frameId*/, +void ZslProcessor3::onFrameAvailable(int32_t /*requestId*/, const CameraMetadata &frame) { Mutex::Autolock l(mInputMutex); camera_metadata_ro_entry_t entry; diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor3.h b/services/camera/libcameraservice/api1/client2/ZslProcessor3.h index 35b85f5..d2f8322 100644 --- a/services/camera/libcameraservice/api1/client2/ZslProcessor3.h +++ b/services/camera/libcameraservice/api1/client2/ZslProcessor3.h @@ -51,7 +51,7 @@ class ZslProcessor3 : ~ZslProcessor3(); // From FrameProcessor - virtual void onFrameAvailable(int32_t frameId, const CameraMetadata &frame); + virtual void onFrameAvailable(int32_t requestId, const CameraMetadata &frame); /** **************************************** diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp index 76d44bf..72126c1 100644 --- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp +++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp @@ -45,14 +45,6 @@ CameraDeviceClientBase::CameraDeviceClientBase( cameraId, cameraFacing, clientPid, clientUid, servicePid), mRemoteCallback(remoteCallback) { } -void CameraDeviceClientBase::notifyError() { - // Thread safe. Don't bother locking. - sp remoteCb = mRemoteCallback; - - if (remoteCb != 0) { - remoteCb->notifyCallback(CAMERA_MSG_ERROR, CAMERA_ERROR_RELEASED, 0); - } -} // Interface used by CameraService @@ -164,7 +156,6 @@ status_t CameraDeviceClient::submitRequest(sp request, metadata.update(ANDROID_REQUEST_OUTPUT_STREAMS, &outputStreamIds[0], outputStreamIds.size()); - // TODO: @hide ANDROID_REQUEST_ID, or use another request token int32_t requestId = mRequestIdCounter++; metadata.update(ANDROID_REQUEST_ID, &requestId, /*size*/1); ALOGV("%s: Camera %d: Submitting request with ID %d", @@ -501,6 +492,34 @@ status_t CameraDeviceClient::dump(int fd, const Vector& args) { return dumpDevice(fd, args); } + +void CameraDeviceClient::notifyError() { + // Thread safe. Don't bother locking. + sp remoteCb = getRemoteCallback(); + + if (remoteCb != 0) { + remoteCb->onDeviceError(ICameraDeviceCallbacks::ERROR_CAMERA_DEVICE); + } +} + +void CameraDeviceClient::notifyIdle() { + // Thread safe. Don't bother locking. + sp remoteCb = getRemoteCallback(); + + if (remoteCb != 0) { + remoteCb->onDeviceIdle(); + } +} + +void CameraDeviceClient::notifyShutter(int requestId, + nsecs_t timestamp) { + // Thread safe. Don't bother locking. + sp remoteCb = getRemoteCallback(); + if (remoteCb != 0) { + remoteCb->onCaptureStarted(requestId, timestamp); + } +} + // TODO: refactor the code below this with IProCameraUser. // it's 100% copy-pasted, so lets not change it right now to make it easier. @@ -532,8 +551,8 @@ void CameraDeviceClient::detachDevice() { } /** Device-related methods */ -void CameraDeviceClient::onFrameAvailable(int32_t frameId, - const CameraMetadata& frame) { +void CameraDeviceClient::onFrameAvailable(int32_t requestId, + const CameraMetadata& frame) { ATRACE_CALL(); ALOGV("%s", __FUNCTION__); @@ -541,7 +560,7 @@ void CameraDeviceClient::onFrameAvailable(int32_t frameId, sp remoteCb = mRemoteCallback; if (remoteCb != NULL) { ALOGV("%s: frame = %p ", __FUNCTION__, &frame); - remoteCb->onResultReceived(frameId, frame); + remoteCb->onResultReceived(requestId, frame); } } diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h index b490924..b9c16aa 100644 --- a/services/camera/libcameraservice/api2/CameraDeviceClient.h +++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h @@ -45,8 +45,6 @@ protected: uid_t clientUid, int servicePid); - virtual void notifyError(); - sp mRemoteCallback; }; @@ -112,11 +110,19 @@ public: virtual status_t dump(int fd, const Vector& args); /** + * Device listener interface + */ + + virtual void notifyIdle(); + virtual void notifyError(); + virtual void notifyShutter(int requestId, nsecs_t timestamp); + + /** * Interface used by independent components of CameraDeviceClient. */ protected: /** FilteredListener implementation **/ - virtual void onFrameAvailable(int32_t frameId, + virtual void onFrameAvailable(int32_t requestId, const CameraMetadata& frame); virtual void detachDevice(); diff --git a/services/camera/libcameraservice/api_pro/ProCamera2Client.cpp b/services/camera/libcameraservice/api_pro/ProCamera2Client.cpp index 2b583e5..1a7a7a7 100644 --- a/services/camera/libcameraservice/api_pro/ProCamera2Client.cpp +++ b/services/camera/libcameraservice/api_pro/ProCamera2Client.cpp @@ -374,7 +374,7 @@ void ProCamera2Client::detachDevice() { } /** Device-related methods */ -void ProCamera2Client::onFrameAvailable(int32_t frameId, +void ProCamera2Client::onFrameAvailable(int32_t requestId, const CameraMetadata& frame) { ATRACE_CALL(); ALOGV("%s", __FUNCTION__); @@ -386,7 +386,7 @@ void ProCamera2Client::onFrameAvailable(int32_t frameId, CameraMetadata tmp(frame); camera_metadata_t* meta = tmp.release(); ALOGV("%s: meta = %p ", __FUNCTION__, meta); - mRemoteCallback->onResultReceived(frameId, meta); + mRemoteCallback->onResultReceived(requestId, meta); tmp.acquire(meta); } diff --git a/services/camera/libcameraservice/api_pro/ProCamera2Client.h b/services/camera/libcameraservice/api_pro/ProCamera2Client.h index 0bf6784..8a0f547 100644 --- a/services/camera/libcameraservice/api_pro/ProCamera2Client.h +++ b/services/camera/libcameraservice/api_pro/ProCamera2Client.h @@ -97,7 +97,7 @@ public: protected: /** FilteredListener implementation **/ - virtual void onFrameAvailable(int32_t frameId, + virtual void onFrameAvailable(int32_t requestId, const CameraMetadata& frame); virtual void detachDevice(); diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.cpp b/services/camera/libcameraservice/common/Camera2ClientBase.cpp index e808bf3..2d1253f 100644 --- a/services/camera/libcameraservice/common/Camera2ClientBase.cpp +++ b/services/camera/libcameraservice/common/Camera2ClientBase.cpp @@ -226,13 +226,18 @@ void Camera2ClientBase::notifyError(int errorCode, int arg1, } template -void Camera2ClientBase::notifyShutter(int frameNumber, +void Camera2ClientBase::notifyIdle() { + ALOGV("Camera device is now idle"); +} + +template +void Camera2ClientBase::notifyShutter(int requestId, nsecs_t timestamp) { - (void)frameNumber; + (void)requestId; (void)timestamp; - ALOGV("%s: Shutter notification for frame %d at time %lld", __FUNCTION__, - frameNumber, timestamp); + ALOGV("%s: Shutter notification for request id %d at time %lld", + __FUNCTION__, requestId, timestamp); } template @@ -244,13 +249,6 @@ void Camera2ClientBase::notifyAutoFocus(uint8_t newState, ALOGV("%s: Autofocus state now %d, last trigger %d", __FUNCTION__, newState, triggerId); - typename SharedCameraCallbacks::Lock l(mSharedCameraCallbacks); - if (l.mRemoteCallback != 0) { - l.mRemoteCallback->notifyCallback(CAMERA_MSG_FOCUS_MOVE, 1, 0); - } - if (l.mRemoteCallback != 0) { - l.mRemoteCallback->notifyCallback(CAMERA_MSG_FOCUS, 1, 0); - } } template diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.h b/services/camera/libcameraservice/common/Camera2ClientBase.h index d23197c..61e44f0 100644 --- a/services/camera/libcameraservice/common/Camera2ClientBase.h +++ b/services/camera/libcameraservice/common/Camera2ClientBase.h @@ -62,7 +62,8 @@ public: */ virtual void notifyError(int errorCode, int arg1, int arg2); - virtual void notifyShutter(int frameNumber, nsecs_t timestamp); + virtual void notifyIdle(); + virtual void notifyShutter(int requestId, nsecs_t timestamp); virtual void notifyAutoFocus(uint8_t newState, int triggerId); virtual void notifyAutoExposure(uint8_t newState, int triggerId); virtual void notifyAutoWhitebalance(uint8_t newState, diff --git a/services/camera/libcameraservice/common/CameraDeviceBase.h b/services/camera/libcameraservice/common/CameraDeviceBase.h index ebbd4ea..e80abf1 100644 --- a/services/camera/libcameraservice/common/CameraDeviceBase.h +++ b/services/camera/libcameraservice/common/CameraDeviceBase.h @@ -138,9 +138,18 @@ class CameraDeviceBase : public virtual RefBase { */ class NotificationListener { public: - // Refer to the Camera2 HAL definition for notification definitions + // The set of notifications is a merge of the notifications required for + // API1 and API2. + + // Required for API 1 and 2 virtual void notifyError(int errorCode, int arg1, int arg2) = 0; - virtual void notifyShutter(int frameNumber, nsecs_t timestamp) = 0; + + // Required only for API2 + virtual void notifyIdle() = 0; + virtual void notifyShutter(int requestId, + nsecs_t timestamp) = 0; + + // Required only for API1 virtual void notifyAutoFocus(uint8_t newState, int triggerId) = 0; virtual void notifyAutoExposure(uint8_t newState, int triggerId) = 0; virtual void notifyAutoWhitebalance(uint8_t newState, @@ -165,12 +174,14 @@ class CameraDeviceBase : public virtual RefBase { /** * Wait for a new frame to be produced, with timeout in nanoseconds. * Returns TIMED_OUT when no frame produced within the specified duration + * May be called concurrently to most methods, except for getNextFrame */ virtual status_t waitForNextFrame(nsecs_t timeout) = 0; /** * Get next metadata frame from the frame queue. Returns NULL if the queue * is empty; caller takes ownership of the metadata buffer. + * May be called concurrently to most methods, except for waitForNextFrame */ virtual status_t getNextFrame(CameraMetadata *frame) = 0; diff --git a/services/camera/libcameraservice/common/FrameProcessorBase.h b/services/camera/libcameraservice/common/FrameProcessorBase.h index 1e46beb..f96caff 100644 --- a/services/camera/libcameraservice/common/FrameProcessorBase.h +++ b/services/camera/libcameraservice/common/FrameProcessorBase.h @@ -39,7 +39,7 @@ class FrameProcessorBase: public Thread { virtual ~FrameProcessorBase(); struct FilteredListener: virtual public RefBase { - virtual void onFrameAvailable(int32_t frameId, + virtual void onFrameAvailable(int32_t requestId, const CameraMetadata &frame) = 0; }; diff --git a/services/camera/libcameraservice/device2/Camera2Device.cpp b/services/camera/libcameraservice/device2/Camera2Device.cpp index fe2cd77..2bc1a8a 100644 --- a/services/camera/libcameraservice/device2/Camera2Device.cpp +++ b/services/camera/libcameraservice/device2/Camera2Device.cpp @@ -464,8 +464,10 @@ void Camera2Device::notificationCallback(int32_t msg_type, listener->notifyError(ext1, ext2, ext3); break; case CAMERA2_MSG_SHUTTER: { - nsecs_t timestamp = (nsecs_t)ext2 | ((nsecs_t)(ext3) << 32 ); - listener->notifyShutter(ext1, timestamp); + // TODO: Only needed for camera2 API, which is unsupported + // by HAL2 directly. + // nsecs_t timestamp = (nsecs_t)ext2 | ((nsecs_t)(ext3) << 32 ); + // listener->notifyShutter(requestId, timestamp); break; } case CAMERA2_MSG_AUTOFOCUS: diff --git a/services/camera/libcameraservice/device2/Camera2Device.h b/services/camera/libcameraservice/device2/Camera2Device.h index 2aa22a2..1f53c56 100644 --- a/services/camera/libcameraservice/device2/Camera2Device.h +++ b/services/camera/libcameraservice/device2/Camera2Device.h @@ -28,6 +28,10 @@ namespace android { /** * CameraDevice for HAL devices with version CAMERA_DEVICE_API_VERSION_2_0 + * + * TODO for camera2 API implementation: + * Does not produce notifyShutter / notifyIdle callbacks to NotificationListener + * Use waitUntilDrained for idle. */ class Camera2Device: public CameraDeviceBase { public: diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp index b468eb3..ed6458c 100644 --- a/services/camera/libcameraservice/device3/Camera3Device.cpp +++ b/services/camera/libcameraservice/device3/Camera3Device.cpp @@ -82,6 +82,7 @@ int Camera3Device::getId() const { status_t Camera3Device::initialize(camera_module_t *module) { ATRACE_CALL(); + Mutex::Autolock il(mInterfaceLock); Mutex::Autolock l(mLock); ALOGV("%s: Initializing device for camera %d", __FUNCTION__, mId); @@ -159,9 +160,20 @@ status_t Camera3Device::initialize(camera_module_t *module) } } + /** Start up status tracker thread */ + mStatusTracker = new StatusTracker(this); + res = mStatusTracker->run(String8::format("C3Dev-%d-Status", mId).string()); + if (res != OK) { + SET_ERR_L("Unable to start status tracking thread: %s (%d)", + strerror(-res), res); + device->common.close(&device->common); + mStatusTracker.clear(); + return res; + } + /** Start up request queue thread */ - mRequestThread = new RequestThread(this, device); + mRequestThread = new RequestThread(this, mStatusTracker, device); res = mRequestThread->run(String8::format("C3Dev-%d-ReqQueue", mId).string()); if (res != OK) { SET_ERR_L("Unable to start request queue thread: %s (%d)", @@ -175,81 +187,130 @@ status_t Camera3Device::initialize(camera_module_t *module) mDeviceInfo = info.static_camera_characteristics; mHal3Device = device; - mStatus = STATUS_IDLE; + mStatus = STATUS_UNCONFIGURED; mNextStreamId = 0; mNeedConfig = true; + mPauseStateNotify = false; return OK; } status_t Camera3Device::disconnect() { ATRACE_CALL(); - Mutex::Autolock l(mLock); + Mutex::Autolock il(mInterfaceLock); ALOGV("%s: E", __FUNCTION__); status_t res = OK; - if (mStatus == STATUS_UNINITIALIZED) return res; - if (mStatus == STATUS_ACTIVE || - (mStatus == STATUS_ERROR && mRequestThread != NULL)) { - res = mRequestThread->clearRepeatingRequests(); - if (res != OK) { - SET_ERR_L("Can't stop streaming"); - // Continue to close device even in case of error - } else { - res = waitUntilDrainedLocked(); + { + Mutex::Autolock l(mLock); + if (mStatus == STATUS_UNINITIALIZED) return res; + + if (mStatus == STATUS_ACTIVE || + (mStatus == STATUS_ERROR && mRequestThread != NULL)) { + res = mRequestThread->clearRepeatingRequests(); if (res != OK) { - SET_ERR_L("Timeout waiting for HAL to drain"); + SET_ERR_L("Can't stop streaming"); // Continue to close device even in case of error + } else { + res = waitUntilStateThenRelock(/*active*/ false, kShutdownTimeout); + if (res != OK) { + SET_ERR_L("Timeout waiting for HAL to drain"); + // Continue to close device even in case of error + } } } + + if (mStatus == STATUS_ERROR) { + CLOGE("Shutting down in an error state"); + } + + if (mStatusTracker != NULL) { + mStatusTracker->requestExit(); + } + + if (mRequestThread != NULL) { + mRequestThread->requestExit(); + } + + mOutputStreams.clear(); + mInputStream.clear(); } - assert(mStatus == STATUS_IDLE || mStatus == STATUS_ERROR); - if (mStatus == STATUS_ERROR) { - CLOGE("Shutting down in an error state"); + // Joining done without holding mLock, otherwise deadlocks may ensue + // as the threads try to access parent state + if (mRequestThread != NULL && mStatus != STATUS_ERROR) { + // HAL may be in a bad state, so waiting for request thread + // (which may be stuck in the HAL processCaptureRequest call) + // could be dangerous. + mRequestThread->join(); } - if (mRequestThread != NULL) { - mRequestThread->requestExit(); + if (mStatusTracker != NULL) { + mStatusTracker->join(); } - mOutputStreams.clear(); - mInputStream.clear(); + { + Mutex::Autolock l(mLock); - if (mRequestThread != NULL) { - if (mStatus != STATUS_ERROR) { - // HAL may be in a bad state, so waiting for request thread - // (which may be stuck in the HAL processCaptureRequest call) - // could be dangerous. - mRequestThread->join(); - } mRequestThread.clear(); - } + mStatusTracker.clear(); - if (mHal3Device != NULL) { - mHal3Device->common.close(&mHal3Device->common); - mHal3Device = NULL; - } + if (mHal3Device != NULL) { + mHal3Device->common.close(&mHal3Device->common); + mHal3Device = NULL; + } - mStatus = STATUS_UNINITIALIZED; + mStatus = STATUS_UNINITIALIZED; + } ALOGV("%s: X", __FUNCTION__); return res; } +// For dumping/debugging only - +// try to acquire a lock a few times, eventually give up to proceed with +// debug/dump operations +bool Camera3Device::tryLockSpinRightRound(Mutex& lock) { + bool gotLock = false; + for (size_t i = 0; i < kDumpLockAttempts; ++i) { + if (lock.tryLock() == NO_ERROR) { + gotLock = true; + break; + } else { + usleep(kDumpSleepDuration); + } + } + return gotLock; +} + status_t Camera3Device::dump(int fd, const Vector &args) { ATRACE_CALL(); (void)args; + + // Try to lock, but continue in case of failure (to avoid blocking in + // deadlocks) + bool gotInterfaceLock = tryLockSpinRightRound(mInterfaceLock); + bool gotLock = tryLockSpinRightRound(mLock); + + ALOGW_IF(!gotInterfaceLock, + "Camera %d: %s: Unable to lock interface lock, proceeding anyway", + mId, __FUNCTION__); + ALOGW_IF(!gotLock, + "Camera %d: %s: Unable to lock main lock, proceeding anyway", + mId, __FUNCTION__); + String8 lines; const char *status = mStatus == STATUS_ERROR ? "ERROR" : mStatus == STATUS_UNINITIALIZED ? "UNINITIALIZED" : - mStatus == STATUS_IDLE ? "IDLE" : + mStatus == STATUS_UNCONFIGURED ? "UNCONFIGURED" : + mStatus == STATUS_CONFIGURED ? "CONFIGURED" : mStatus == STATUS_ACTIVE ? "ACTIVE" : "Unknown"; + lines.appendFormat(" Device status: %s\n", status); if (mStatus == STATUS_ERROR) { lines.appendFormat(" Error cause: %s\n", mErrorCause.string()); @@ -285,7 +346,7 @@ status_t Camera3Device::dump(int fd, const Vector &args) { lines = String8(" Last request sent:\n"); write(fd, lines.string(), lines.size()); - CameraMetadata lastRequest = getLatestRequest(); + CameraMetadata lastRequest = getLatestRequestLocked(); lastRequest.dump(fd, /*verbosity*/2, /*indentation*/6); } @@ -295,6 +356,9 @@ status_t Camera3Device::dump(int fd, const Vector &args) { mHal3Device->ops->dump(mHal3Device, fd); } + if (gotLock) mLock.unlock(); + if (gotInterfaceLock) mInterfaceLock.unlock(); + return OK; } @@ -311,6 +375,8 @@ const CameraMetadata& Camera3Device::info() const { status_t Camera3Device::capture(CameraMetadata &request) { ATRACE_CALL(); + status_t res; + Mutex::Autolock il(mInterfaceLock); Mutex::Autolock l(mLock); // TODO: take ownership of the request @@ -322,7 +388,9 @@ status_t Camera3Device::capture(CameraMetadata &request) { case STATUS_UNINITIALIZED: CLOGE("Device not initialized"); return INVALID_OPERATION; - case STATUS_IDLE: + case STATUS_UNCONFIGURED: + // May be lazily configuring streams, will check during setup + case STATUS_CONFIGURED: case STATUS_ACTIVE: // OK break; @@ -337,12 +405,23 @@ status_t Camera3Device::capture(CameraMetadata &request) { return BAD_VALUE; } - return mRequestThread->queueRequest(newRequest); + res = mRequestThread->queueRequest(newRequest); + if (res == OK) { + waitUntilStateThenRelock(/*active*/ true, kActiveTimeout); + if (res != OK) { + SET_ERR_L("Can't transition to active in %f seconds!", + kActiveTimeout/1e9); + } + ALOGV("Camera %d: Capture request enqueued", mId); + } + return res; } status_t Camera3Device::setStreamingRequest(const CameraMetadata &request) { ATRACE_CALL(); + status_t res; + Mutex::Autolock il(mInterfaceLock); Mutex::Autolock l(mLock); switch (mStatus) { @@ -352,7 +431,9 @@ status_t Camera3Device::setStreamingRequest(const CameraMetadata &request) { case STATUS_UNINITIALIZED: CLOGE("Device not initialized"); return INVALID_OPERATION; - case STATUS_IDLE: + case STATUS_UNCONFIGURED: + // May be lazily configuring streams, will check during setup + case STATUS_CONFIGURED: case STATUS_ACTIVE: // OK break; @@ -370,7 +451,16 @@ status_t Camera3Device::setStreamingRequest(const CameraMetadata &request) { RequestList newRepeatingRequests; newRepeatingRequests.push_back(newRepeatingRequest); - return mRequestThread->setRepeatingRequests(newRepeatingRequests); + res = mRequestThread->setRepeatingRequests(newRepeatingRequests); + if (res == OK) { + waitUntilStateThenRelock(/*active*/ true, kActiveTimeout); + if (res != OK) { + SET_ERR_L("Can't transition to active in %f seconds!", + kActiveTimeout/1e9); + } + ALOGV("Camera %d: Repeating request set", mId); + } + return res; } @@ -378,12 +468,16 @@ sp Camera3Device::setUpRequestLocked( const CameraMetadata &request) { status_t res; - if (mStatus == STATUS_IDLE) { + if (mStatus == STATUS_UNCONFIGURED || mNeedConfig) { res = configureStreamsLocked(); if (res != OK) { SET_ERR_L("Can't set up streams: %s (%d)", strerror(-res), res); return NULL; } + if (mStatus == STATUS_UNCONFIGURED) { + CLOGE("No streams configured"); + return NULL; + } } sp newRequest = createCaptureRequest(request); @@ -392,6 +486,7 @@ sp Camera3Device::setUpRequestLocked( status_t Camera3Device::clearStreamingRequest() { ATRACE_CALL(); + Mutex::Autolock il(mInterfaceLock); Mutex::Autolock l(mLock); switch (mStatus) { @@ -401,7 +496,8 @@ status_t Camera3Device::clearStreamingRequest() { case STATUS_UNINITIALIZED: CLOGE("Device not initialized"); return INVALID_OPERATION; - case STATUS_IDLE: + case STATUS_UNCONFIGURED: + case STATUS_CONFIGURED: case STATUS_ACTIVE: // OK break; @@ -409,12 +505,13 @@ status_t Camera3Device::clearStreamingRequest() { SET_ERR_L("Unexpected status: %d", mStatus); return INVALID_OPERATION; } - + ALOGV("Camera %d: Clearing repeating request", mId); return mRequestThread->clearRepeatingRequests(); } status_t Camera3Device::waitUntilRequestReceived(int32_t requestId, nsecs_t timeout) { ATRACE_CALL(); + Mutex::Autolock il(mInterfaceLock); return mRequestThread->waitUntilRequestProcessed(requestId, timeout); } @@ -422,7 +519,10 @@ status_t Camera3Device::waitUntilRequestReceived(int32_t requestId, nsecs_t time status_t Camera3Device::createInputStream( uint32_t width, uint32_t height, int format, int *id) { ATRACE_CALL(); + Mutex::Autolock il(mInterfaceLock); Mutex::Autolock l(mLock); + ALOGV("Camera %d: Creating new input stream %d: %d x %d, format %d", + mId, mNextStreamId, width, height, format); status_t res; bool wasActive = false; @@ -434,26 +534,24 @@ status_t Camera3Device::createInputStream( case STATUS_UNINITIALIZED: ALOGE("%s: Device not initialized", __FUNCTION__); return INVALID_OPERATION; - case STATUS_IDLE: + case STATUS_UNCONFIGURED: + case STATUS_CONFIGURED: // OK break; case STATUS_ACTIVE: ALOGV("%s: Stopping activity to reconfigure streams", __FUNCTION__); - mRequestThread->setPaused(true); - res = waitUntilDrainedLocked(); + res = internalPauseAndWaitLocked(); if (res != OK) { - ALOGE("%s: Can't pause captures to reconfigure streams!", - __FUNCTION__); - mStatus = STATUS_ERROR; + SET_ERR_L("Can't pause captures to reconfigure streams!"); return res; } wasActive = true; break; default: - ALOGE("%s: Unexpected status: %d", __FUNCTION__, mStatus); + SET_ERR_L("%s: Unexpected status: %d", mStatus); return INVALID_OPERATION; } - assert(mStatus == STATUS_IDLE); + assert(mStatus != STATUS_ACTIVE); if (mInputStream != 0) { ALOGE("%s: Cannot create more than 1 input stream", __FUNCTION__); @@ -462,6 +560,7 @@ status_t Camera3Device::createInputStream( sp newStream = new Camera3InputStream(mNextStreamId, width, height, format); + newStream->setStatusTracker(mStatusTracker); mInputStream = newStream; @@ -476,9 +575,10 @@ status_t Camera3Device::createInputStream( __FUNCTION__, mNextStreamId, strerror(-res), res); return res; } - mRequestThread->setPaused(false); + internalResumeLocked(); } + ALOGV("Camera %d: Created input stream", mId); return OK; } @@ -490,7 +590,10 @@ status_t Camera3Device::createZslStream( int *id, sp* zslStream) { ATRACE_CALL(); + Mutex::Autolock il(mInterfaceLock); Mutex::Autolock l(mLock); + ALOGV("Camera %d: Creating ZSL stream %d: %d x %d, depth %d", + mId, mNextStreamId, width, height, depth); status_t res; bool wasActive = false; @@ -502,26 +605,24 @@ status_t Camera3Device::createZslStream( case STATUS_UNINITIALIZED: ALOGE("%s: Device not initialized", __FUNCTION__); return INVALID_OPERATION; - case STATUS_IDLE: + case STATUS_UNCONFIGURED: + case STATUS_CONFIGURED: // OK break; case STATUS_ACTIVE: ALOGV("%s: Stopping activity to reconfigure streams", __FUNCTION__); - mRequestThread->setPaused(true); - res = waitUntilDrainedLocked(); + res = internalPauseAndWaitLocked(); if (res != OK) { - ALOGE("%s: Can't pause captures to reconfigure streams!", - __FUNCTION__); - mStatus = STATUS_ERROR; + SET_ERR_L("Can't pause captures to reconfigure streams!"); return res; } wasActive = true; break; default: - ALOGE("%s: Unexpected status: %d", __FUNCTION__, mStatus); + SET_ERR_L("Unexpected status: %d", mStatus); return INVALID_OPERATION; } - assert(mStatus == STATUS_IDLE); + assert(mStatus != STATUS_ACTIVE); if (mInputStream != 0) { ALOGE("%s: Cannot create more than 1 input stream", __FUNCTION__); @@ -530,6 +631,7 @@ status_t Camera3Device::createZslStream( sp newStream = new Camera3ZslStream(mNextStreamId, width, height, depth); + newStream->setStatusTracker(mStatusTracker); res = mOutputStreams.add(mNextStreamId, newStream); if (res < 0) { @@ -551,16 +653,20 @@ status_t Camera3Device::createZslStream( __FUNCTION__, mNextStreamId, strerror(-res), res); return res; } - mRequestThread->setPaused(false); + internalResumeLocked(); } + ALOGV("Camera %d: Created ZSL stream", mId); return OK; } status_t Camera3Device::createStream(sp consumer, uint32_t width, uint32_t height, int format, size_t size, int *id) { ATRACE_CALL(); + Mutex::Autolock il(mInterfaceLock); Mutex::Autolock l(mLock); + ALOGV("Camera %d: Creating new stream %d: %d x %d, format %d, size %d", + mId, mNextStreamId, width, height, format, size); status_t res; bool wasActive = false; @@ -572,16 +678,15 @@ status_t Camera3Device::createStream(sp consumer, case STATUS_UNINITIALIZED: CLOGE("Device not initialized"); return INVALID_OPERATION; - case STATUS_IDLE: + case STATUS_UNCONFIGURED: + case STATUS_CONFIGURED: // OK break; case STATUS_ACTIVE: ALOGV("%s: Stopping activity to reconfigure streams", __FUNCTION__); - mRequestThread->setPaused(true); - res = waitUntilDrainedLocked(); + res = internalPauseAndWaitLocked(); if (res != OK) { - ALOGE("%s: Can't pause captures to reconfigure streams!", - __FUNCTION__); + SET_ERR_L("Can't pause captures to reconfigure streams!"); return res; } wasActive = true; @@ -590,7 +695,7 @@ status_t Camera3Device::createStream(sp consumer, SET_ERR_L("Unexpected status: %d", mStatus); return INVALID_OPERATION; } - assert(mStatus == STATUS_IDLE); + assert(mStatus != STATUS_ACTIVE); sp newStream; if (format == HAL_PIXEL_FORMAT_BLOB) { @@ -600,6 +705,7 @@ status_t Camera3Device::createStream(sp consumer, newStream = new Camera3OutputStream(mNextStreamId, consumer, width, height, format); } + newStream->setStatusTracker(mStatusTracker); res = mOutputStreams.add(mNextStreamId, newStream); if (res < 0) { @@ -619,9 +725,9 @@ status_t Camera3Device::createStream(sp consumer, mNextStreamId, strerror(-res), res); return res; } - mRequestThread->setPaused(false); + internalResumeLocked(); } - + ALOGV("Camera %d: Created new stream", mId); return OK; } @@ -637,6 +743,7 @@ status_t Camera3Device::createReprocessStreamFromStream(int outputId, int *id) { status_t Camera3Device::getStreamInfo(int id, uint32_t *width, uint32_t *height, uint32_t *format) { ATRACE_CALL(); + Mutex::Autolock il(mInterfaceLock); Mutex::Autolock l(mLock); switch (mStatus) { @@ -646,7 +753,8 @@ status_t Camera3Device::getStreamInfo(int id, case STATUS_UNINITIALIZED: CLOGE("Device not initialized!"); return INVALID_OPERATION; - case STATUS_IDLE: + case STATUS_UNCONFIGURED: + case STATUS_CONFIGURED: case STATUS_ACTIVE: // OK break; @@ -671,6 +779,7 @@ status_t Camera3Device::getStreamInfo(int id, status_t Camera3Device::setStreamTransform(int id, int transform) { ATRACE_CALL(); + Mutex::Autolock il(mInterfaceLock); Mutex::Autolock l(mLock); switch (mStatus) { @@ -680,7 +789,8 @@ status_t Camera3Device::setStreamTransform(int id, case STATUS_UNINITIALIZED: CLOGE("Device not initialized"); return INVALID_OPERATION; - case STATUS_IDLE: + case STATUS_UNCONFIGURED: + case STATUS_CONFIGURED: case STATUS_ACTIVE: // OK break; @@ -701,6 +811,7 @@ status_t Camera3Device::setStreamTransform(int id, status_t Camera3Device::deleteStream(int id) { ATRACE_CALL(); + Mutex::Autolock il(mInterfaceLock); Mutex::Autolock l(mLock); status_t res; @@ -708,7 +819,7 @@ status_t Camera3Device::deleteStream(int id) { // CameraDevice semantics require device to already be idle before // deleteStream is called, unlike for createStream. - if (mStatus != STATUS_IDLE) { + if (mStatus == STATUS_ACTIVE) { ALOGV("%s: Camera %d: Device not idle", __FUNCTION__, mId); return -EBUSY; } @@ -752,6 +863,7 @@ status_t Camera3Device::createDefaultRequest(int templateId, CameraMetadata *request) { ATRACE_CALL(); ALOGV("%s: for template %d", __FUNCTION__, templateId); + Mutex::Autolock il(mInterfaceLock); Mutex::Autolock l(mLock); switch (mStatus) { @@ -761,7 +873,8 @@ status_t Camera3Device::createDefaultRequest(int templateId, case STATUS_UNINITIALIZED: CLOGE("Device is not initialized!"); return INVALID_OPERATION; - case STATUS_IDLE: + case STATUS_UNCONFIGURED: + case STATUS_CONFIGURED: case STATUS_ACTIVE: // OK break; @@ -787,61 +900,88 @@ status_t Camera3Device::createDefaultRequest(int templateId, status_t Camera3Device::waitUntilDrained() { ATRACE_CALL(); + Mutex::Autolock il(mInterfaceLock); Mutex::Autolock l(mLock); - return waitUntilDrainedLocked(); -} - -status_t Camera3Device::waitUntilDrainedLocked() { - ATRACE_CALL(); - status_t res; - switch (mStatus) { case STATUS_UNINITIALIZED: - case STATUS_IDLE: + case STATUS_UNCONFIGURED: ALOGV("%s: Already idle", __FUNCTION__); return OK; + case STATUS_CONFIGURED: + // To avoid race conditions, check with tracker to be sure case STATUS_ERROR: case STATUS_ACTIVE: - // Need to shut down + // Need to verify shut down break; default: SET_ERR_L("Unexpected status: %d",mStatus); return INVALID_OPERATION; } - if (mRequestThread != NULL) { - res = mRequestThread->waitUntilPaused(kShutdownTimeout); - if (res != OK) { - SET_ERR_L("Can't stop request thread in %f seconds!", - kShutdownTimeout/1e9); - return res; - } - } - if (mInputStream != NULL) { - res = mInputStream->waitUntilIdle(kShutdownTimeout); - if (res != OK) { - SET_ERR_L("Can't idle input stream %d in %f seconds!", - mInputStream->getId(), kShutdownTimeout/1e9); - return res; - } + ALOGV("%s: Camera %d: Waiting until idle", __FUNCTION__, mId); + status_t res = waitUntilStateThenRelock(/*active*/ false, kShutdownTimeout); + return res; +} + +// Pause to reconfigure +status_t Camera3Device::internalPauseAndWaitLocked() { + mRequestThread->setPaused(true); + mPauseStateNotify = true; + + ALOGV("%s: Camera %d: Internal wait until idle", __FUNCTION__, mId); + status_t res = waitUntilStateThenRelock(/*active*/ false, kShutdownTimeout); + if (res != OK) { + SET_ERR_L("Can't idle device in %f seconds!", + kShutdownTimeout/1e9); } - for (size_t i = 0; i < mOutputStreams.size(); i++) { - res = mOutputStreams.editValueAt(i)->waitUntilIdle(kShutdownTimeout); - if (res != OK) { - SET_ERR_L("Can't idle output stream %d in %f seconds!", - mOutputStreams.keyAt(i), kShutdownTimeout/1e9); - return res; - } + + return res; +} + +// Resume after internalPauseAndWaitLocked +status_t Camera3Device::internalResumeLocked() { + status_t res; + + mRequestThread->setPaused(false); + + res = waitUntilStateThenRelock(/*active*/ true, kActiveTimeout); + if (res != OK) { + SET_ERR_L("Can't transition to active in %f seconds!", + kActiveTimeout/1e9); } + mPauseStateNotify = false; + return OK; +} - if (mStatus != STATUS_ERROR) { - mStatus = STATUS_IDLE; +status_t Camera3Device::waitUntilStateThenRelock(bool active, + nsecs_t timeout) { + status_t res = OK; + if (active == (mStatus == STATUS_ACTIVE)) { + // Desired state already reached + return res; } - return OK; + bool stateSeen = false; + do { + mRecentStatusUpdates.clear(); + + res = mStatusChanged.waitRelative(mLock, timeout); + if (res != OK) break; + + // Check state change history during wait + for (size_t i = 0; i < mRecentStatusUpdates.size(); i++) { + if (active == (mRecentStatusUpdates[i] == STATUS_ACTIVE) ) { + stateSeen = true; + break; + } + } + } while (!stateSeen); + + return res; } + status_t Camera3Device::setNotifyCallback(NotificationListener *listener) { ATRACE_CALL(); Mutex::Autolock l(mOutputLock); @@ -893,6 +1033,7 @@ status_t Camera3Device::getNextFrame(CameraMetadata *frame) { status_t Camera3Device::triggerAutofocus(uint32_t id) { ATRACE_CALL(); + Mutex::Autolock il(mInterfaceLock); ALOGV("%s: Triggering autofocus, id %d", __FUNCTION__, id); // Mix-in this trigger into the next request and only the next request. @@ -913,6 +1054,7 @@ status_t Camera3Device::triggerAutofocus(uint32_t id) { status_t Camera3Device::triggerCancelAutofocus(uint32_t id) { ATRACE_CALL(); + Mutex::Autolock il(mInterfaceLock); ALOGV("%s: Triggering cancel autofocus, id %d", __FUNCTION__, id); // Mix-in this trigger into the next request and only the next request. @@ -933,6 +1075,7 @@ status_t Camera3Device::triggerCancelAutofocus(uint32_t id) { status_t Camera3Device::triggerPrecaptureMetering(uint32_t id) { ATRACE_CALL(); + Mutex::Autolock il(mInterfaceLock); ALOGV("%s: Triggering precapture metering, id %d", __FUNCTION__, id); // Mix-in this trigger into the next request and only the next request. @@ -963,7 +1106,7 @@ status_t Camera3Device::pushReprocessBuffer(int reprocessStreamId, status_t Camera3Device::flush() { ATRACE_CALL(); ALOGV("%s: Camera %d: Flushing all requests", __FUNCTION__, mId); - + Mutex::Autolock il(mInterfaceLock); Mutex::Autolock l(mLock); mRequestThread->clear(); @@ -971,6 +1114,41 @@ status_t Camera3Device::flush() { } /** + * Methods called by subclasses + */ + +void Camera3Device::notifyStatus(bool idle) { + { + // Need mLock to safely update state and synchronize to current + // state of methods in flight. + Mutex::Autolock l(mLock); + // We can get various system-idle notices from the status tracker + // while starting up. Only care about them if we've actually sent + // in some requests recently. + if (mStatus != STATUS_ACTIVE && mStatus != STATUS_CONFIGURED) { + return; + } + ALOGV("%s: Camera %d: Now %s", __FUNCTION__, mId, + idle ? "idle" : "active"); + mStatus = idle ? STATUS_CONFIGURED : STATUS_ACTIVE; + mRecentStatusUpdates.add(mStatus); + mStatusChanged.signal(); + + // Skip notifying listener if we're doing some user-transparent + // state changes + if (mPauseStateNotify) return; + } + NotificationListener *listener; + { + Mutex::Autolock l(mOutputLock); + listener = mListener; + } + if (idle && listener != NULL) { + listener->notifyIdle(); + } +} + +/** * Camera3Device private methods */ @@ -1046,18 +1224,18 @@ status_t Camera3Device::configureStreamsLocked() { ATRACE_CALL(); status_t res; - if (mStatus != STATUS_IDLE) { + if (mStatus != STATUS_UNCONFIGURED && mStatus != STATUS_CONFIGURED) { CLOGE("Not idle"); return INVALID_OPERATION; } if (!mNeedConfig) { ALOGV("%s: Skipping config, no stream changes", __FUNCTION__); - mStatus = STATUS_ACTIVE; return OK; } // Start configuring the streams + ALOGV("%s: Camera %d: Starting stream configuration", __FUNCTION__, mId); camera3_stream_configuration config; @@ -1139,11 +1317,18 @@ status_t Camera3Device::configureStreamsLocked() { // across configure_streams() calls mRequestThread->configurationComplete(); - // Finish configuring the streams lazily on first reference + // Update device state - mStatus = STATUS_ACTIVE; mNeedConfig = false; + if (config.num_streams > 0) { + mStatus = STATUS_CONFIGURED; + } else { + mStatus = STATUS_UNCONFIGURED; + } + + ALOGV("%s: Camera %d: Stream configuration complete", __FUNCTION__, mId); + return OK; } @@ -1190,12 +1375,12 @@ void Camera3Device::setErrorStateLockedV(const char *fmt, va_list args) { */ status_t Camera3Device::registerInFlight(int32_t frameNumber, - int32_t numBuffers) { + int32_t requestId, int32_t numBuffers) { ATRACE_CALL(); Mutex::Autolock l(mInFlightLock); ssize_t res; - res = mInFlightMap.add(frameNumber, InFlightRequest(numBuffers)); + res = mInFlightMap.add(frameNumber, InFlightRequest(requestId, numBuffers)); if (res < 0) return res; return OK; @@ -1378,12 +1563,17 @@ void Camera3Device::notify(const camera3_notify_msg *msg) { mNextShutterFrameNumber++; } + int32_t requestId = -1; + // Set timestamp for the request in the in-flight tracking + // and get the request ID to send upstream { Mutex::Autolock l(mInFlightLock); idx = mInFlightMap.indexOfKey(frameNumber); if (idx >= 0) { - mInFlightMap.editValueAt(idx).captureTimestamp = timestamp; + InFlightRequest &r = mInFlightMap.editValueAt(idx); + r.captureTimestamp = timestamp; + requestId = r.requestId; } } if (idx < 0) { @@ -1391,11 +1581,11 @@ void Camera3Device::notify(const camera3_notify_msg *msg) { frameNumber); break; } - ALOGVV("Camera %d: %s: Shutter fired for frame %d at %lld", - mId, __FUNCTION__, frameNumber, timestamp); + ALOGVV("Camera %d: %s: Shutter fired for frame %d (id %d) at %lld", + mId, __FUNCTION__, frameNumber, requestId, timestamp); // Call listener, if any if (listener != NULL) { - listener->notifyShutter(frameNumber, timestamp); + listener->notifyShutter(requestId, timestamp); } break; } @@ -1405,40 +1595,15 @@ void Camera3Device::notify(const camera3_notify_msg *msg) { } } -CameraMetadata Camera3Device::getLatestRequest() { +CameraMetadata Camera3Device::getLatestRequestLocked() { ALOGV("%s", __FUNCTION__); - bool locked = false; - - /** - * Why trylock instead of autolock? - * - * We want to be able to call this function from - * dumpsys, which often happens during deadlocks. - */ - for (size_t i = 0; i < kDumpLockAttempts; ++i) { - if (mLock.tryLock() == NO_ERROR) { - locked = true; - break; - } else { - usleep(kDumpSleepDuration); - } - } - - if (!locked) { - ALOGW("%s: Possible deadlock detected", __FUNCTION__); - } - CameraMetadata retVal; if (mRequestThread != NULL) { retVal = mRequestThread->getLatestRequest(); } - if (locked) { - mLock.unlock(); - } - return retVal; } @@ -1447,9 +1612,11 @@ CameraMetadata Camera3Device::getLatestRequest() { */ Camera3Device::RequestThread::RequestThread(wp parent, + sp statusTracker, camera3_device_t *hal3Device) : Thread(false), mParent(parent), + mStatusTracker(statusTracker), mHal3Device(hal3Device), mId(getId(parent)), mReconfigured(false), @@ -1457,6 +1624,7 @@ Camera3Device::RequestThread::RequestThread(wp parent, mPaused(true), mFrameNumber(0), mLatestRequestId(NAME_NOT_FOUND) { + mStatusId = statusTracker->addComponent(); } void Camera3Device::RequestThread::configurationComplete() { @@ -1562,19 +1730,6 @@ void Camera3Device::RequestThread::setPaused(bool paused) { mDoPauseSignal.signal(); } -status_t Camera3Device::RequestThread::waitUntilPaused(nsecs_t timeout) { - ATRACE_CALL(); - status_t res; - Mutex::Autolock l(mPauseLock); - while (!mPaused) { - res = mPausedSignal.waitRelative(mPauseLock, timeout); - if (res == TIMED_OUT) { - return res; - } - } - return OK; -} - status_t Camera3Device::RequestThread::waitUntilRequestProcessed( int32_t requestId, nsecs_t timeout) { Mutex::Autolock l(mLatestRequestMutex); @@ -1591,7 +1746,13 @@ status_t Camera3Device::RequestThread::waitUntilRequestProcessed( return OK; } - +void Camera3Device::RequestThread::requestExit() { + // Call parent to set up shutdown + Thread::requestExit(); + // The exit from any possible waits + mDoPauseSignal.signal(); + mRequestSignal.signal(); +} bool Camera3Device::RequestThread::threadLoop() { @@ -1613,6 +1774,18 @@ bool Camera3Device::RequestThread::threadLoop() { camera3_capture_request_t request = camera3_capture_request_t(); Vector outputBuffers; + // Get the request ID, if any + int requestId; + camera_metadata_entry_t requestIdEntry = + nextRequest->mSettings.find(ANDROID_REQUEST_ID); + if (requestIdEntry.count > 0) { + requestId = requestIdEntry.data.i32[0]; + } else { + ALOGW("%s: Did not have android.request.id set in the request", + __FUNCTION__); + requestId = NAME_NOT_FOUND; + } + // Insert any queued triggers (before metadata is locked) int32_t triggerCount; res = insertTriggers(nextRequest); @@ -1713,7 +1886,7 @@ bool Camera3Device::RequestThread::threadLoop() { return false; } - res = parent->registerInFlight(request.frame_number, + res = parent->registerInFlight(request.frame_number, requestId, request.num_output_buffers); if (res != OK) { SET_ERR("RequestThread: Unable to register new in-flight request:" @@ -1762,16 +1935,7 @@ bool Camera3Device::RequestThread::threadLoop() { { Mutex::Autolock al(mLatestRequestMutex); - camera_metadata_entry_t requestIdEntry = - nextRequest->mSettings.find(ANDROID_REQUEST_ID); - if (requestIdEntry.count > 0) { - mLatestRequestId = requestIdEntry.data.i32[0]; - } else { - ALOGW("%s: Did not have android.request.id set in the request", - __FUNCTION__); - mLatestRequestId = NAME_NOT_FOUND; - } - + mLatestRequestId = requestId; mLatestRequestSignal.signal(); } @@ -1790,8 +1954,6 @@ bool Camera3Device::RequestThread::threadLoop() { } } - - return true; } @@ -1849,12 +2011,17 @@ sp res = mRequestSignal.waitRelative(mRequestLock, kRequestTimeout); - if (res == TIMED_OUT) { - // Signal that we're paused by starvation + if ((mRequestQueue.empty() && mRepeatingRequests.empty()) || + exitPending()) { Mutex::Autolock pl(mPauseLock); if (mPaused == false) { + ALOGV("%s: RequestThread: Going idle", __FUNCTION__); mPaused = true; - mPausedSignal.signal(); + // Let the tracker know + sp statusTracker = mStatusTracker.promote(); + if (statusTracker != 0) { + statusTracker->markComponentIdle(mStatusId, Fence::NO_FENCE); + } } // Stop waiting for now and let thread management happen return NULL; @@ -1874,6 +2041,13 @@ sp // update internal pause state (capture/setRepeatingRequest unpause // directly). Mutex::Autolock pl(mPauseLock); + if (mPaused) { + ALOGV("%s: RequestThread: Unpaused", __FUNCTION__); + sp statusTracker = mStatusTracker.promote(); + if (statusTracker != 0) { + statusTracker->markComponentActive(mStatusId); + } + } mPaused = false; // Check if we've reconfigured since last time, and reset the preview @@ -1890,13 +2064,18 @@ bool Camera3Device::RequestThread::waitIfPaused() { status_t res; Mutex::Autolock l(mPauseLock); while (mDoPause) { - // Signal that we're paused by request if (mPaused == false) { mPaused = true; - mPausedSignal.signal(); + ALOGV("%s: RequestThread: Paused", __FUNCTION__); + // Let the tracker know + sp statusTracker = mStatusTracker.promote(); + if (statusTracker != 0) { + statusTracker->markComponentIdle(mStatusId, Fence::NO_FENCE); + } } + res = mDoPauseSignal.waitRelative(mPauseLock, kRequestTimeout); - if (res == TIMED_OUT) { + if (res == TIMED_OUT || exitPending()) { return true; } } @@ -1909,8 +2088,16 @@ void Camera3Device::RequestThread::unpauseForNewRequests() { // With work to do, mark thread as unpaused. // If paused by request (setPaused), don't resume, to avoid // extra signaling/waiting overhead to waitUntilPaused + mRequestSignal.signal(); Mutex::Autolock p(mPauseLock); if (!mDoPause) { + ALOGV("%s: RequestThread: Going active", __FUNCTION__); + if (mPaused) { + sp statusTracker = mStatusTracker.promote(); + if (statusTracker != 0) { + statusTracker->markComponentActive(mStatusId); + } + } mPaused = false; } } diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h index 61caf13..6295c80 100644 --- a/services/camera/libcameraservice/device3/Camera3Device.h +++ b/services/camera/libcameraservice/device3/Camera3Device.h @@ -26,6 +26,7 @@ #include #include "common/CameraDeviceBase.h" +#include "device3/StatusTracker.h" /** * Function pointer types with C calling convention to @@ -126,29 +127,47 @@ class Camera3Device : virtual status_t flush(); + // Methods called by subclasses + void notifyStatus(bool idle); // updates from StatusTracker + private: static const size_t kDumpLockAttempts = 10; static const size_t kDumpSleepDuration = 100000; // 0.10 sec static const size_t kInFlightWarnLimit = 20; static const nsecs_t kShutdownTimeout = 5000000000; // 5 sec + static const nsecs_t kActiveTimeout = 500000000; // 500 ms struct RequestTrigger; + // A lock to enforce serialization on the input/configure side + // of the public interface. + // Only locked by public methods inherited from CameraDeviceBase. + // Not locked by methods guarded by mOutputLock, since they may act + // concurrently to the input/configure side of the interface. + // Must be locked before mLock if both will be locked by a method + Mutex mInterfaceLock; + + // The main lock on internal state Mutex mLock; + // Camera device ID + const int mId; + /**** Scope for mLock ****/ - const int mId; camera3_device_t *mHal3Device; CameraMetadata mDeviceInfo; vendor_tag_query_ops_t mVendorTagOps; - enum { + enum Status { STATUS_ERROR, STATUS_UNINITIALIZED, - STATUS_IDLE, + STATUS_UNCONFIGURED, + STATUS_CONFIGURED, STATUS_ACTIVE } mStatus; + Vector mRecentStatusUpdates; + Condition mStatusChanged; // Tracking cause of fatal errors when in STATUS_ERROR String8 mErrorCause; @@ -162,6 +181,10 @@ class Camera3Device : int mNextStreamId; bool mNeedConfig; + // Whether to send state updates upstream + // Pause when doing transparent reconfiguration + bool mPauseStateNotify; + // Need to hold on to stream references until configure completes. Vector > mDeletedStreams; @@ -181,13 +204,34 @@ class Camera3Device : * * Takes mLock. */ - virtual CameraMetadata getLatestRequest(); + virtual CameraMetadata getLatestRequestLocked(); + + /** + * Pause processing and flush everything, but don't tell the clients. + * This is for reconfiguring outputs transparently when according to the + * CameraDeviceBase interface we shouldn't need to. + * Must be called with mLock and mInterfaceLock both held. + */ + status_t internalPauseAndWaitLocked(); /** - * Lock-held version of waitUntilDrained. Will transition to IDLE on - * success. + * Resume work after internalPauseAndWaitLocked() + * Must be called with mLock and mInterfaceLock both held. */ - status_t waitUntilDrainedLocked(); + status_t internalResumeLocked(); + + /** + * Wait until status tracker tells us we've transitioned to the target state + * set, which is either ACTIVE when active==true or IDLE (which is any + * non-ACTIVE state) when active==false. + * + * Needs to be called with mLock and mInterfaceLock held. This means there + * can ever only be one waiter at most. + * + * During the wait mLock is released. + * + */ + status_t waitUntilStateThenRelock(bool active, nsecs_t timeout); /** * Do common work for setting up a streaming or single capture request. @@ -217,6 +261,12 @@ class Camera3Device : void setErrorStateLocked(const char *fmt, ...); void setErrorStateLockedV(const char *fmt, va_list args); + /** + * Debugging trylock/spin method + * Try to acquire a lock a few times with sleeps between before giving up. + */ + bool tryLockSpinRightRound(Mutex& lock); + struct RequestTrigger { // Metadata tag number, e.g. android.control.aePrecaptureTrigger uint32_t metadataTag; @@ -242,6 +292,7 @@ class Camera3Device : public: RequestThread(wp parent, + sp statusTracker, camera3_device_t *hal3Device); /** @@ -279,13 +330,6 @@ class Camera3Device : void setPaused(bool paused); /** - * Wait until thread is paused, either due to setPaused(true) - * or due to lack of input requests. Returns TIMED_OUT in case - * the thread does not pause within the timeout. - */ - status_t waitUntilPaused(nsecs_t timeout); - - /** * Wait until thread processes the capture request with settings' * android.request.id == requestId. * @@ -295,6 +339,12 @@ class Camera3Device : status_t waitUntilRequestProcessed(int32_t requestId, nsecs_t timeout); /** + * Shut down the thread. Shutdown is asynchronous, so thread may + * still be running once this method returns. + */ + virtual void requestExit(); + + /** * Get the latest request that was sent to the HAL * with process_capture_request. */ @@ -339,9 +389,12 @@ class Camera3Device : void setErrorState(const char *fmt, ...); wp mParent; + wp mStatusTracker; camera3_device_t *mHal3Device; - const int mId; + const int mId; // The camera ID + int mStatusId; // The RequestThread's component ID for + // status tracking Mutex mRequestLock; Condition mRequestSignal; @@ -381,6 +434,8 @@ class Camera3Device : */ struct InFlightRequest { + // android.request.id for the request + int requestId; // Set by notify() SHUTTER call. nsecs_t captureTimestamp; // Set by process_capture_result call with valid metadata @@ -389,13 +444,16 @@ class Camera3Device : // buffers int numBuffersLeft; + // Default constructor needed by KeyedVector InFlightRequest() : + requestId(0), captureTimestamp(0), haveResultMetadata(false), numBuffersLeft(0) { } - explicit InFlightRequest(int numBuffers) : + InFlightRequest(int id, int numBuffers) : + requestId(id), captureTimestamp(0), haveResultMetadata(false), numBuffersLeft(numBuffers) { @@ -407,7 +465,13 @@ class Camera3Device : Mutex mInFlightLock; // Protects mInFlightMap InFlightMap mInFlightMap; - status_t registerInFlight(int32_t frameNumber, int32_t numBuffers); + status_t registerInFlight(int32_t frameNumber, int32_t requestId, + int32_t numBuffers); + + /** + * Tracking for idle detection + */ + sp mStatusTracker; /** * Output result queue and current HAL device 3A state diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp index 0850566..727a8c9 100644 --- a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp +++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp @@ -23,7 +23,8 @@ #include #include -#include "Camera3IOStreamBase.h" +#include "device3/Camera3IOStreamBase.h" +#include "device3/StatusTracker.h" namespace android { @@ -62,53 +63,6 @@ bool Camera3IOStreamBase::hasOutstandingBuffersLocked() const { return false; } -status_t Camera3IOStreamBase::waitUntilIdle(nsecs_t timeout) { - status_t res; - { - Mutex::Autolock l(mLock); - while (mDequeuedBufferCount > 0) { - if (timeout != TIMEOUT_NEVER) { - nsecs_t startTime = systemTime(); - res = mBufferReturnedSignal.waitRelative(mLock, timeout); - if (res == TIMED_OUT) { - return res; - } else if (res != OK) { - ALOGE("%s: Error waiting for outstanding buffers: %s (%d)", - __FUNCTION__, strerror(-res), res); - return res; - } - nsecs_t deltaTime = systemTime() - startTime; - if (timeout <= deltaTime) { - timeout = 0; - } else { - timeout -= deltaTime; - } - } else { - res = mBufferReturnedSignal.wait(mLock); - if (res != OK) { - ALOGE("%s: Error waiting for outstanding buffers: %s (%d)", - __FUNCTION__, strerror(-res), res); - return res; - } - } - } - } - - // No lock - - unsigned int timeoutMs; - if (timeout == TIMEOUT_NEVER) { - timeoutMs = Fence::TIMEOUT_NEVER; - } else if (timeout == 0) { - timeoutMs = 0; - } else { - // Round up to wait at least 1 ms - timeoutMs = (timeout + 999999) / 1000000; - } - - return mCombinedFence->wait(timeoutMs); -} - void Camera3IOStreamBase::dump(int fd, const Vector &args) const { (void) args; String8 lines; @@ -190,6 +144,14 @@ void Camera3IOStreamBase::handoutBufferLocked(camera3_stream_buffer &buffer, buffer.release_fence = releaseFence; buffer.status = status; + // Inform tracker about becoming busy + if (mDequeuedBufferCount == 0 && mState != STATE_IN_CONFIG && + mState != STATE_IN_RECONFIG) { + sp statusTracker = mStatusTracker.promote(); + if (statusTracker != 0) { + statusTracker->markComponentActive(mStatusId); + } + } mDequeuedBufferCount++; } @@ -253,12 +215,24 @@ status_t Camera3IOStreamBase::returnAnyBufferLocked( res = returnBufferCheckedLocked(buffer, timestamp, output, &releaseFence); if (res != OK) { - return res; + // NO_INIT means the buffer queue is abandoned, so to be resilient, + // still want to decrement in-flight counts. + if (res != NO_INIT) { + return res; + } } mCombinedFence = Fence::merge(mName, mCombinedFence, releaseFence); mDequeuedBufferCount--; + if (mDequeuedBufferCount == 0 && mState != STATE_IN_CONFIG && + mState != STATE_IN_RECONFIG) { + sp statusTracker = mStatusTracker.promote(); + if (statusTracker != 0) { + statusTracker->markComponentIdle(mStatusId, mCombinedFence); + } + } + mBufferReturnedSignal.signal(); if (output) { diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.h b/services/camera/libcameraservice/device3/Camera3IOStreamBase.h index 9432a59..fcb9d04 100644 --- a/services/camera/libcameraservice/device3/Camera3IOStreamBase.h +++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.h @@ -43,7 +43,6 @@ class Camera3IOStreamBase : * Camera3Stream interface */ - virtual status_t waitUntilIdle(nsecs_t timeout); virtual void dump(int fd, const Vector &args) const; protected: diff --git a/services/camera/libcameraservice/device3/Camera3InputStream.cpp b/services/camera/libcameraservice/device3/Camera3InputStream.cpp index c80f512..5aa9a3e 100644 --- a/services/camera/libcameraservice/device3/Camera3InputStream.cpp +++ b/services/camera/libcameraservice/device3/Camera3InputStream.cpp @@ -115,7 +115,6 @@ status_t Camera3InputStream::returnBufferCheckedLocked( bufferFound = true; bufferItem = tmp; mBuffersInFlight.erase(it); - mDequeuedBufferCount--; } } } @@ -148,12 +147,11 @@ status_t Camera3InputStream::returnBufferCheckedLocked( if (res != OK) { ALOGE("%s: Stream %d: Error releasing buffer back to buffer queue:" " %s (%d)", __FUNCTION__, mId, strerror(-res), res); - return res; } *releaseFenceOut = releaseFence; - return OK; + return res; } status_t Camera3InputStream::returnInputBufferLocked( diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp index 35cb5ba..41328fc 100644 --- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp +++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp @@ -198,12 +198,11 @@ status_t Camera3OutputStream::returnBufferCheckedLocked( mLock.lock(); if (res != OK) { close(anwReleaseFence); - return res; } *releaseFenceOut = releaseFence; - return OK; + return res; } void Camera3OutputStream::dump(int fd, const Vector &args) const { diff --git a/services/camera/libcameraservice/device3/Camera3Stream.cpp b/services/camera/libcameraservice/device3/Camera3Stream.cpp index a6872aa..6d2cf94 100644 --- a/services/camera/libcameraservice/device3/Camera3Stream.cpp +++ b/services/camera/libcameraservice/device3/Camera3Stream.cpp @@ -20,13 +20,18 @@ #include #include -#include "Camera3Stream.h" +#include "device3/Camera3Stream.h" +#include "device3/StatusTracker.h" namespace android { namespace camera3 { Camera3Stream::~Camera3Stream() { + sp statusTracker = mStatusTracker.promote(); + if (statusTracker != 0 && mStatusId != StatusTracker::NO_STATUS_ID) { + statusTracker->removeComponent(mStatusId); + } } Camera3Stream* Camera3Stream::cast(camera3_stream *stream) { @@ -44,7 +49,8 @@ Camera3Stream::Camera3Stream(int id, mId(id), mName(String8::format("Camera3Stream[%d]", id)), mMaxSize(maxSize), - mState(STATE_CONSTRUCTED) { + mState(STATE_CONSTRUCTED), + mStatusId(StatusTracker::NO_STATUS_ID) { camera3_stream::stream_type = type; camera3_stream::width = width; @@ -119,6 +125,15 @@ camera3_stream* Camera3Stream::startConfiguration() { return NULL; } + // Stop tracking if currently doing so + if (mStatusId != StatusTracker::NO_STATUS_ID) { + sp statusTracker = mStatusTracker.promote(); + if (statusTracker != 0) { + statusTracker->removeComponent(mStatusId); + } + mStatusId = StatusTracker::NO_STATUS_ID; + } + if (mState == STATE_CONSTRUCTED) { mState = STATE_IN_CONFIG; } else { // mState == STATE_CONFIGURED @@ -154,6 +169,12 @@ status_t Camera3Stream::finishConfiguration(camera3_device *hal3Device) { return INVALID_OPERATION; } + // Register for idle tracking + sp statusTracker = mStatusTracker.promote(); + if (statusTracker != 0) { + mStatusId = statusTracker->addComponent(); + } + // Check if the stream configuration is unchanged, and skip reallocation if // so. As documented in hardware/camera3.h:configure_streams(). if (mState == STATE_IN_RECONFIG && @@ -265,6 +286,18 @@ bool Camera3Stream::hasOutstandingBuffers() const { return hasOutstandingBuffersLocked(); } +status_t Camera3Stream::setStatusTracker(sp statusTracker) { + Mutex::Autolock l(mLock); + sp oldTracker = mStatusTracker.promote(); + if (oldTracker != 0 && mStatusId != StatusTracker::NO_STATUS_ID) { + oldTracker->removeComponent(mStatusId); + } + mStatusId = StatusTracker::NO_STATUS_ID; + mStatusTracker = statusTracker; + + return OK; +} + status_t Camera3Stream::disconnect() { ATRACE_CALL(); Mutex::Autolock l(mLock); diff --git a/services/camera/libcameraservice/device3/Camera3Stream.h b/services/camera/libcameraservice/device3/Camera3Stream.h index b64fd86..6eeb721 100644 --- a/services/camera/libcameraservice/device3/Camera3Stream.h +++ b/services/camera/libcameraservice/device3/Camera3Stream.h @@ -190,12 +190,11 @@ class Camera3Stream : enum { TIMEOUT_NEVER = -1 }; + /** - * Wait until the HAL is done with all of this stream's buffers, including - * signalling all release fences. Returns TIMED_OUT if the timeout is exceeded, - * OK on success. Pass in TIMEOUT_NEVER for timeout to indicate an indefinite wait. + * Set the status tracker to notify about idle transitions */ - virtual status_t waitUntilIdle(nsecs_t timeout) = 0; + virtual status_t setStatusTracker(sp statusTracker); /** * Disconnect stream from its non-HAL endpoint. After this, @@ -267,6 +266,11 @@ class Camera3Stream : // INVALID_OPERATION if they cannot be obtained. virtual status_t getEndpointUsage(uint32_t *usage) = 0; + // Tracking for idle state + wp mStatusTracker; + // Status tracker component ID + int mStatusId; + private: uint32_t oldUsage; uint32_t oldMaxBuffers; diff --git a/services/camera/libcameraservice/device3/Camera3StreamInterface.h b/services/camera/libcameraservice/device3/Camera3StreamInterface.h index 4768536..c93ae15 100644 --- a/services/camera/libcameraservice/device3/Camera3StreamInterface.h +++ b/services/camera/libcameraservice/device3/Camera3StreamInterface.h @@ -26,6 +26,8 @@ namespace android { namespace camera3 { +class StatusTracker; + /** * An interface for managing a single stream of input and/or output data from * the camera device. @@ -128,13 +130,11 @@ class Camera3StreamInterface : public virtual RefBase { enum { TIMEOUT_NEVER = -1 }; + /** - * Wait until the HAL is done with all of this stream's buffers, including - * signalling all release fences. Returns TIMED_OUT if the timeout is - * exceeded, OK on success. Pass in TIMEOUT_NEVER for timeout to indicate - * an indefinite wait. + * Set the state tracker to use for signaling idle transitions. */ - virtual status_t waitUntilIdle(nsecs_t timeout) = 0; + virtual status_t setStatusTracker(sp statusTracker) = 0; /** * Disconnect stream from its non-HAL endpoint. After this, diff --git a/services/camera/libcameraservice/device3/StatusTracker.cpp b/services/camera/libcameraservice/device3/StatusTracker.cpp new file mode 100644 index 0000000..ab5419f --- /dev/null +++ b/services/camera/libcameraservice/device3/StatusTracker.cpp @@ -0,0 +1,219 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "Camera3-Status" +#define ATRACE_TAG ATRACE_TAG_CAMERA +//#define LOG_NDEBUG 0 + +// This is needed for stdint.h to define INT64_MAX in C++ +#define __STDC_LIMIT_MACROS + +#include +#include +#include + +#include "device3/StatusTracker.h" +#include "device3/Camera3Device.h" + +namespace android { + +namespace camera3 { + +StatusTracker::StatusTracker(wp parent) : + mComponentsChanged(false), + mParent(parent), + mNextComponentId(0), + mIdleFence(new Fence()), + mDeviceState(IDLE) { +} + +StatusTracker::~StatusTracker() { +} + +int StatusTracker::addComponent() { + int id; + ssize_t err; + { + Mutex::Autolock l(mLock); + id = mNextComponentId++; + ALOGV("%s: Adding new component %d", __FUNCTION__, id); + + err = mStates.add(id, IDLE); + ALOGE_IF(err < 0, "%s: Can't add new component %d: %s (%d)", + __FUNCTION__, id, strerror(-err), err); + } + + if (err >= 0) { + Mutex::Autolock pl(mPendingLock); + mComponentsChanged = true; + mPendingChangeSignal.signal(); + } + + return err < 0 ? err : id; +} + +void StatusTracker::removeComponent(int id) { + ssize_t idx; + { + Mutex::Autolock l(mLock); + ALOGV("%s: Removing component %d", __FUNCTION__, id); + idx = mStates.removeItem(id); + } + + if (idx >= 0) { + Mutex::Autolock pl(mPendingLock); + mComponentsChanged = true; + mPendingChangeSignal.signal(); + } + + return; +} + + +void StatusTracker::markComponentIdle(int id, const sp& componentFence) { + markComponent(id, IDLE, componentFence); +} + +void StatusTracker::markComponentActive(int id) { + markComponent(id, ACTIVE, Fence::NO_FENCE); +} + +void StatusTracker::markComponent(int id, ComponentState state, + const sp& componentFence) { + ALOGV("%s: Component %d is now %s", __FUNCTION__, id, + state == IDLE ? "idle" : "active"); + Mutex::Autolock l(mPendingLock); + + StateChange newState = { + id, + state, + componentFence + }; + + mPendingChangeQueue.add(newState); + mPendingChangeSignal.signal(); +} + +void StatusTracker::requestExit() { + // First mark thread dead + Thread::requestExit(); + // Then exit any waits + mPendingChangeSignal.signal(); +} + +StatusTracker::ComponentState StatusTracker::getDeviceStateLocked() { + for (size_t i = 0; i < mStates.size(); i++) { + if (mStates.valueAt(i) == ACTIVE) { + ALOGV("%s: Component %d not idle", __FUNCTION__, + mStates.keyAt(i)); + return ACTIVE; + } + } + // - If not yet signaled, getSignalTime returns INT64_MAX + // - If invalid fence or error, returns -1 + // - Otherwise returns time of signalling. + // Treat -1 as 'signalled', since HAL may not be using fences, and want + // to be able to idle in case of errors. + nsecs_t signalTime = mIdleFence->getSignalTime(); + bool fencesDone = signalTime != INT64_MAX; + + ALOGV_IF(!fencesDone, "%s: Fences still to wait on", __FUNCTION__); + + return fencesDone ? IDLE : ACTIVE; +} + +bool StatusTracker::threadLoop() { + status_t res; + + // Wait for state updates + { + Mutex::Autolock pl(mPendingLock); + while (mPendingChangeQueue.size() == 0 && !mComponentsChanged) { + res = mPendingChangeSignal.waitRelative(mPendingLock, + kWaitDuration); + if (exitPending()) return false; + if (res != OK) { + if (res != TIMED_OUT) { + ALOGE("%s: Error waiting on state changes: %s (%d)", + __FUNCTION__, strerror(-res), res); + } + // TIMED_OUT is expected + break; + } + } + } + + // After new pending states appear, or timeout, check if we're idle. Even + // with timeout, need to check to account for fences that may still be + // clearing out + sp parent; + { + Mutex::Autolock pl(mPendingLock); + Mutex::Autolock l(mLock); + + // Collect all pending state updates and see if the device + // collectively transitions between idle and active for each one + + // First pass for changed components or fence completions + ComponentState prevState = getDeviceStateLocked(); + if (prevState != mDeviceState) { + // Only collect changes to overall device state + mStateTransitions.add(prevState); + } + // For each pending component state update, check if we've transitioned + // to a new overall device state + for (size_t i = 0; i < mPendingChangeQueue.size(); i++) { + const StateChange &newState = mPendingChangeQueue[i]; + ssize_t idx = mStates.indexOfKey(newState.id); + // Ignore notices for unknown components + if (idx >= 0) { + // Update single component state + mStates.replaceValueAt(idx, newState.state); + mIdleFence = Fence::merge(String8("idleFence"), + mIdleFence, newState.fence); + // .. and see if overall device state has changed + ComponentState newState = getDeviceStateLocked(); + if (newState != prevState) { + mStateTransitions.add(newState); + } + prevState = newState; + } + } + mPendingChangeQueue.clear(); + mComponentsChanged = false; + + // Store final state after all pending state changes are done with + + mDeviceState = prevState; + parent = mParent.promote(); + } + + // Notify parent for all intermediate transitions + if (mStateTransitions.size() > 0 && parent.get()) { + for (size_t i = 0; i < mStateTransitions.size(); i++) { + bool idle = (mStateTransitions[i] == IDLE); + ALOGV("Camera device is now %s", idle ? "idle" : "active"); + parent->notifyStatus(idle); + } + } + mStateTransitions.clear(); + + return true; +} + +} // namespace android + +} // namespace camera3 diff --git a/services/camera/libcameraservice/device3/StatusTracker.h b/services/camera/libcameraservice/device3/StatusTracker.h new file mode 100644 index 0000000..49cecb3 --- /dev/null +++ b/services/camera/libcameraservice/device3/StatusTracker.h @@ -0,0 +1,130 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_SERVERS_CAMERA3_STATUSTRACKER_H +#define ANDROID_SERVERS_CAMERA3_STATUSTRACKER_H + +#include +#include +#include +#include +#include +#include +#include + +#include "common/CameraDeviceBase.h" + +namespace android { + +class Camera3Device; +class Fence; + +namespace camera3 { + +/** + * State tracking for idle and other collective state transitions. + * Collects idle notifications from different sources and calls the + * parent when all of them become idle. + * + * The parent is responsible for synchronizing the status updates with its + * internal state correctly, which means the notifyStatus call to the parent may + * block for a while. + */ +class StatusTracker: public Thread { + public: + StatusTracker(wp parent); + ~StatusTracker(); + + // An always-invalid component ID + static const int NO_STATUS_ID = -1; + + // Add a component to track; returns non-negative unique ID for the new + // component on success, negative error code on failure. + // New components start in the idle state. + int addComponent(); + + // Remove existing component from idle tracking. Ignores unknown IDs + void removeComponent(int id); + + // Set the state of a tracked component to be idle. Ignores unknown IDs; can + // accept a fence to wait on to complete idle. The fence is merged with any + // previous fences given, which means they all must signal before the + // component is considered idle. + void markComponentIdle(int id, const sp& componentFence); + + // Set the state of a tracked component to be active. Ignores unknown IDs. + void markComponentActive(int id); + + virtual void requestExit(); + protected: + + virtual bool threadLoop(); + + private: + enum ComponentState { + IDLE, + ACTIVE + }; + + void markComponent(int id, ComponentState state, + const sp& componentFence); + + // Guards mPendingChange, mPendingStates, mComponentsChanged + Mutex mPendingLock; + + Condition mPendingChangeSignal; + + struct StateChange { + int id; + ComponentState state; + sp fence; + }; + // A queue of yet-to-be-processed state changes to components + Vector mPendingChangeQueue; + bool mComponentsChanged; + + wp mParent; + + // Guards rest of internals. Must be locked after mPendingLock if both used. + Mutex mLock; + + int mNextComponentId; + + // Current component states + KeyedVector mStates; + // Merged fence for all processed state changes + sp mIdleFence; + // Current overall device state + ComponentState mDeviceState; + + // Private to threadLoop + + // Determine current overall device state + // We're IDLE iff + // - All components are currently IDLE + // - The merged fence for all component updates has signalled + ComponentState getDeviceStateLocked(); + + Vector mStateTransitions; + + static const nsecs_t kWaitDuration = 250000000LL; // 250 ms +}; + +} // namespace camera3 + +} // namespace android + +#endif -- cgit v1.1 From a73c954d947748a3b6f630cf2c160fe55ec596e3 Mon Sep 17 00:00:00 2001 From: Lajos Molnar Date: Wed, 2 Oct 2013 11:25:20 -0700 Subject: MediaCodec: avoid codec EOS if flushing during port reconfiguration Change-Id: Ic2e93f38feeb2e906f8d6b400ed6df2b7580ca87 Signed-off-by: Lajos Molnar Bug: 11045434 --- media/libstagefright/ACodec.cpp | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp index bfb730c..8158d70 100644 --- a/media/libstagefright/ACodec.cpp +++ b/media/libstagefright/ACodec.cpp @@ -3026,16 +3026,17 @@ void ACodec::BaseState::onInputBufferFilled(const sp &msg) { sp buffer; int32_t err = OK; bool eos = false; + PortMode mode = getPortMode(kPortIndexInput); if (!msg->findBuffer("buffer", &buffer)) { + /* these are unfilled buffers returned by client */ CHECK(msg->findInt32("err", &err)); ALOGV("[%s] saw error %d instead of an input buffer", mCodec->mComponentName.c_str(), err); buffer.clear(); - - eos = true; + mode = KEEP_BUFFERS; } int32_t tmp; @@ -3049,8 +3050,6 @@ void ACodec::BaseState::onInputBufferFilled(const sp &msg) { info->mStatus = BufferInfo::OWNED_BY_US; - PortMode mode = getPortMode(kPortIndexInput); - switch (mode) { case KEEP_BUFFERS: { -- cgit v1.1 From a306ee6bc1aef463f8984be26b8a4214490b6c55 Mon Sep 17 00:00:00 2001 From: Andreas Huber Date: Thu, 3 Oct 2013 08:43:20 -0700 Subject: Remove now unused wfd commandline tool. (also unbreaks the build) Change-Id: I03320342976e4e355cc6aeb3d2c485985613b3b5 related-to-bug: 11047222 --- media/libstagefright/wifi-display/Android.mk | 21 -- media/libstagefright/wifi-display/wfd.cpp | 363 --------------------------- 2 files changed, 384 deletions(-) delete mode 100644 media/libstagefright/wifi-display/wfd.cpp diff --git a/media/libstagefright/wifi-display/Android.mk b/media/libstagefright/wifi-display/Android.mk index 3abe8a8..f70454a 100644 --- a/media/libstagefright/wifi-display/Android.mk +++ b/media/libstagefright/wifi-display/Android.mk @@ -35,24 +35,3 @@ LOCAL_MODULE:= libstagefright_wfd LOCAL_MODULE_TAGS:= optional include $(BUILD_SHARED_LIBRARY) - -################################################################################ - -include $(CLEAR_VARS) - -LOCAL_SRC_FILES:= \ - wfd.cpp \ - -LOCAL_SHARED_LIBRARIES:= \ - libbinder \ - libgui \ - libmedia \ - libstagefright \ - libstagefright_foundation \ - libstagefright_wfd \ - libutils \ - liblog \ - -LOCAL_MODULE:= wfd - -include $(BUILD_EXECUTABLE) diff --git a/media/libstagefright/wifi-display/wfd.cpp b/media/libstagefright/wifi-display/wfd.cpp deleted file mode 100644 index 52e4e26..0000000 --- a/media/libstagefright/wifi-display/wfd.cpp +++ /dev/null @@ -1,363 +0,0 @@ -/* - * Copyright 2012, The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -//#define LOG_NDEBUG 0 -#define LOG_TAG "wfd" -#include - -#include "sink/WifiDisplaySink.h" -#include "source/WifiDisplaySource.h" - -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -namespace android { - -static void usage(const char *me) { - fprintf(stderr, - "usage:\n" - " %s -c host[:port]\tconnect to wifi source\n" - " -u uri \tconnect to an rtsp uri\n" - " -l ip[:port] \tlisten on the specified port " - " -f(ilename) \tstream media " - "(create a sink)\n" - " -s(pecial) \trun in 'special' mode\n", - me); -} - -struct RemoteDisplayClient : public BnRemoteDisplayClient { - RemoteDisplayClient(); - - virtual void onDisplayConnected( - const sp &bufferProducer, - uint32_t width, - uint32_t height, - uint32_t flags, - uint32_t session); - - virtual void onDisplayDisconnected(); - virtual void onDisplayError(int32_t error); - - void waitUntilDone(); - -protected: - virtual ~RemoteDisplayClient(); - -private: - Mutex mLock; - Condition mCondition; - - bool mDone; - - sp mComposerClient; - sp mSurfaceTexture; - sp mDisplayBinder; - - DISALLOW_EVIL_CONSTRUCTORS(RemoteDisplayClient); -}; - -RemoteDisplayClient::RemoteDisplayClient() - : mDone(false) { - mComposerClient = new SurfaceComposerClient; - CHECK_EQ(mComposerClient->initCheck(), (status_t)OK); -} - -RemoteDisplayClient::~RemoteDisplayClient() { -} - -void RemoteDisplayClient::onDisplayConnected( - const sp &bufferProducer, - uint32_t width, - uint32_t height, - uint32_t flags, - uint32_t session) { - ALOGI("onDisplayConnected width=%u, height=%u, flags = 0x%08x, session = %d", - width, height, flags, session); - - if (bufferProducer != NULL) { - mSurfaceTexture = bufferProducer; - mDisplayBinder = mComposerClient->createDisplay( - String8("foo"), false /* secure */); - - SurfaceComposerClient::openGlobalTransaction(); - mComposerClient->setDisplaySurface(mDisplayBinder, mSurfaceTexture); - - Rect layerStackRect(1280, 720); // XXX fix this. - Rect displayRect(1280, 720); - - mComposerClient->setDisplayProjection( - mDisplayBinder, 0 /* 0 degree rotation */, - layerStackRect, - displayRect); - - SurfaceComposerClient::closeGlobalTransaction(); - } -} - -void RemoteDisplayClient::onDisplayDisconnected() { - ALOGI("onDisplayDisconnected"); - - Mutex::Autolock autoLock(mLock); - mDone = true; - mCondition.broadcast(); -} - -void RemoteDisplayClient::onDisplayError(int32_t error) { - ALOGI("onDisplayError error=%d", error); - - Mutex::Autolock autoLock(mLock); - mDone = true; - mCondition.broadcast(); -} - -void RemoteDisplayClient::waitUntilDone() { - Mutex::Autolock autoLock(mLock); - while (!mDone) { - mCondition.wait(mLock); - } -} - -static void createSource(const AString &addr, int32_t port) { - sp sm = defaultServiceManager(); - sp binder = sm->getService(String16("media.player")); - sp service = - interface_cast(binder); - - CHECK(service.get() != NULL); - - String8 iface; - iface.append(addr.c_str()); - iface.append(StringPrintf(":%d", port).c_str()); - - sp client = new RemoteDisplayClient; - sp display = - service->listenForRemoteDisplay(client, iface); - - client->waitUntilDone(); - - display->dispose(); - display.clear(); -} - -static void createFileSource( - const AString &addr, int32_t port, const char *path) { - sp session = new ANetworkSession; - session->start(); - - sp looper = new ALooper; - looper->start(); - - sp client = new RemoteDisplayClient; - sp source = new WifiDisplaySource(session, client, path); - looper->registerHandler(source); - - AString iface = StringPrintf("%s:%d", addr.c_str(), port); - CHECK_EQ((status_t)OK, source->start(iface.c_str())); - - client->waitUntilDone(); - - source->stop(); -} - -} // namespace android - -int main(int argc, char **argv) { - using namespace android; - - ProcessState::self()->startThreadPool(); - - DataSource::RegisterDefaultSniffers(); - - AString connectToHost; - int32_t connectToPort = -1; - AString uri; - - AString listenOnAddr; - int32_t listenOnPort = -1; - - AString path; - - bool specialMode = false; - - int res; - while ((res = getopt(argc, argv, "hc:l:u:f:s")) >= 0) { - switch (res) { - case 'c': - { - const char *colonPos = strrchr(optarg, ':'); - - if (colonPos == NULL) { - connectToHost = optarg; - connectToPort = WifiDisplaySource::kWifiDisplayDefaultPort; - } else { - connectToHost.setTo(optarg, colonPos - optarg); - - char *end; - connectToPort = strtol(colonPos + 1, &end, 10); - - if (*end != '\0' || end == colonPos + 1 - || connectToPort < 1 || connectToPort > 65535) { - fprintf(stderr, "Illegal port specified.\n"); - exit(1); - } - } - break; - } - - case 'u': - { - uri = optarg; - break; - } - - case 'f': - { - path = optarg; - break; - } - - case 'l': - { - const char *colonPos = strrchr(optarg, ':'); - - if (colonPos == NULL) { - listenOnAddr = optarg; - listenOnPort = WifiDisplaySource::kWifiDisplayDefaultPort; - } else { - listenOnAddr.setTo(optarg, colonPos - optarg); - - char *end; - listenOnPort = strtol(colonPos + 1, &end, 10); - - if (*end != '\0' || end == colonPos + 1 - || listenOnPort < 1 || listenOnPort > 65535) { - fprintf(stderr, "Illegal port specified.\n"); - exit(1); - } - } - break; - } - - case 's': - { - specialMode = true; - break; - } - - case '?': - case 'h': - default: - usage(argv[0]); - exit(1); - } - } - - if (connectToPort >= 0 && listenOnPort >= 0) { - fprintf(stderr, - "You can connect to a source or create one, " - "but not both at the same time.\n"); - exit(1); - } - - if (listenOnPort >= 0) { - if (path.empty()) { - createSource(listenOnAddr, listenOnPort); - } else { - createFileSource(listenOnAddr, listenOnPort, path.c_str()); - } - - exit(0); - } - - if (connectToPort < 0 && uri.empty()) { - fprintf(stderr, - "You need to select either source host or uri.\n"); - - exit(1); - } - - if (connectToPort >= 0 && !uri.empty()) { - fprintf(stderr, - "You need to either connect to a wfd host or an rtsp url, " - "not both.\n"); - exit(1); - } - - sp composerClient = new SurfaceComposerClient; - CHECK_EQ(composerClient->initCheck(), (status_t)OK); - - sp display(SurfaceComposerClient::getBuiltInDisplay( - ISurfaceComposer::eDisplayIdMain)); - DisplayInfo info; - SurfaceComposerClient::getDisplayInfo(display, &info); - ssize_t displayWidth = info.w; - ssize_t displayHeight = info.h; - - ALOGV("display is %d x %d\n", displayWidth, displayHeight); - - sp control = - composerClient->createSurface( - String8("A Surface"), - displayWidth, - displayHeight, - PIXEL_FORMAT_RGB_565, - 0); - - CHECK(control != NULL); - CHECK(control->isValid()); - - SurfaceComposerClient::openGlobalTransaction(); - CHECK_EQ(control->setLayer(INT_MAX), (status_t)OK); - CHECK_EQ(control->show(), (status_t)OK); - SurfaceComposerClient::closeGlobalTransaction(); - - sp surface = control->getSurface(); - CHECK(surface != NULL); - - sp session = new ANetworkSession; - session->start(); - - sp looper = new ALooper; - - sp sink = new WifiDisplaySink( - specialMode ? WifiDisplaySink::FLAG_SPECIAL_MODE : 0 /* flags */, - session, - surface->getIGraphicBufferProducer()); - - looper->registerHandler(sink); - - if (connectToPort >= 0) { - sink->start(connectToHost.c_str(), connectToPort); - } else { - sink->start(uri.c_str()); - } - - looper->start(true /* runOnCallingThread */); - - composerClient->dispose(); - - return 0; -} -- cgit v1.1 From 56ce726019f700a95ce5b45beebceadae4836e30 Mon Sep 17 00:00:00 2001 From: Lajos Molnar Date: Thu, 2 May 2013 16:30:48 -0700 Subject: IOMX: Add prepareForAdaptivePlayback method prepareForAdaptivePlayback is the fallback mechanism to support seamless resolution change for devices that do not support dynamic output buffers. It is up to the codecs to handle this appropriately, but codecs that do not handle dynamic output buffers would request enough buffers up to the requested size in this method to avoid port reconfiguration on resolution changes. Change-Id: I58d4aa8ef1359ea3472735bbe9140c3132039b3d Signed-off-by: Lajos Molnar Bug: 10192531 Related-to-bug: 7093648 --- include/media/IOMX.h | 4 +++ media/libmedia/IOMX.cpp | 34 ++++++++++++++++++++++++++ media/libstagefright/OMXClient.cpp | 11 +++++++++ media/libstagefright/include/OMX.h | 4 +++ media/libstagefright/include/OMXNodeInstance.h | 4 +++ media/libstagefright/omx/OMX.cpp | 7 ++++++ media/libstagefright/omx/OMXNodeInstance.cpp | 34 ++++++++++++++++++++++++++ 7 files changed, 98 insertions(+) diff --git a/include/media/IOMX.h b/include/media/IOMX.h index db9093a..9c8451c 100644 --- a/include/media/IOMX.h +++ b/include/media/IOMX.h @@ -83,6 +83,10 @@ public: virtual status_t storeMetaDataInBuffers( node_id node, OMX_U32 port_index, OMX_BOOL enable) = 0; + virtual status_t prepareForAdaptivePlayback( + node_id node, OMX_U32 portIndex, OMX_BOOL enable, + OMX_U32 maxFrameWidth, OMX_U32 maxFrameHeight) = 0; + virtual status_t enableGraphicBuffers( node_id node, OMX_U32 port_index, OMX_BOOL enable) = 0; diff --git a/media/libmedia/IOMX.cpp b/media/libmedia/IOMX.cpp index ef99f4f..71ce320 100644 --- a/media/libmedia/IOMX.cpp +++ b/media/libmedia/IOMX.cpp @@ -43,6 +43,7 @@ enum { CREATE_INPUT_SURFACE, SIGNAL_END_OF_INPUT_STREAM, STORE_META_DATA_IN_BUFFERS, + PREPARE_FOR_ADAPTIVE_PLAYBACK, ALLOC_BUFFER, ALLOC_BUFFER_WITH_BACKUP, FREE_BUFFER, @@ -351,6 +352,22 @@ public: return err; } + virtual status_t prepareForAdaptivePlayback( + node_id node, OMX_U32 port_index, OMX_BOOL enable, + OMX_U32 max_width, OMX_U32 max_height) { + Parcel data, reply; + data.writeInterfaceToken(IOMX::getInterfaceDescriptor()); + data.writeIntPtr((intptr_t)node); + data.writeInt32(port_index); + data.writeInt32((int32_t)enable); + data.writeInt32(max_width); + data.writeInt32(max_height); + remote()->transact(PREPARE_FOR_ADAPTIVE_PLAYBACK, data, &reply); + + status_t err = reply.readInt32(); + return err; + } + virtual status_t allocateBuffer( node_id node, OMX_U32 port_index, size_t size, buffer_id *buffer, void **buffer_data) { @@ -770,6 +787,23 @@ status_t BnOMX::onTransact( return NO_ERROR; } + case PREPARE_FOR_ADAPTIVE_PLAYBACK: + { + CHECK_OMX_INTERFACE(IOMX, data, reply); + + node_id node = (void*)data.readIntPtr(); + OMX_U32 port_index = data.readInt32(); + OMX_BOOL enable = (OMX_BOOL)data.readInt32(); + OMX_U32 max_width = data.readInt32(); + OMX_U32 max_height = data.readInt32(); + + status_t err = prepareForAdaptivePlayback( + node, port_index, enable, max_width, max_height); + reply->writeInt32(err); + + return NO_ERROR; + } + case ALLOC_BUFFER: { CHECK_OMX_INTERFACE(IOMX, data, reply); diff --git a/media/libstagefright/OMXClient.cpp b/media/libstagefright/OMXClient.cpp index 9820ef5..9f9352d 100644 --- a/media/libstagefright/OMXClient.cpp +++ b/media/libstagefright/OMXClient.cpp @@ -69,6 +69,10 @@ struct MuxOMX : public IOMX { virtual status_t storeMetaDataInBuffers( node_id node, OMX_U32 port_index, OMX_BOOL enable); + virtual status_t prepareForAdaptivePlayback( + node_id node, OMX_U32 port_index, OMX_BOOL enable, + OMX_U32 maxFrameWidth, OMX_U32 maxFrameHeight); + virtual status_t enableGraphicBuffers( node_id node, OMX_U32 port_index, OMX_BOOL enable); @@ -268,6 +272,13 @@ status_t MuxOMX::storeMetaDataInBuffers( return getOMX(node)->storeMetaDataInBuffers(node, port_index, enable); } +status_t MuxOMX::prepareForAdaptivePlayback( + node_id node, OMX_U32 port_index, OMX_BOOL enable, + OMX_U32 maxFrameWidth, OMX_U32 maxFrameHeight) { + return getOMX(node)->prepareForAdaptivePlayback( + node, port_index, enable, maxFrameWidth, maxFrameHeight); +} + status_t MuxOMX::enableGraphicBuffers( node_id node, OMX_U32 port_index, OMX_BOOL enable) { return getOMX(node)->enableGraphicBuffers(node, port_index, enable); diff --git a/media/libstagefright/include/OMX.h b/media/libstagefright/include/OMX.h index 7e53af3..31a5077 100644 --- a/media/libstagefright/include/OMX.h +++ b/media/libstagefright/include/OMX.h @@ -71,6 +71,10 @@ public: virtual status_t storeMetaDataInBuffers( node_id node, OMX_U32 port_index, OMX_BOOL enable); + virtual status_t prepareForAdaptivePlayback( + node_id node, OMX_U32 portIndex, OMX_BOOL enable, + OMX_U32 max_frame_width, OMX_U32 max_frame_height); + virtual status_t useBuffer( node_id node, OMX_U32 port_index, const sp ¶ms, buffer_id *buffer); diff --git a/media/libstagefright/include/OMXNodeInstance.h b/media/libstagefright/include/OMXNodeInstance.h index ae498b4..339179e 100644 --- a/media/libstagefright/include/OMXNodeInstance.h +++ b/media/libstagefright/include/OMXNodeInstance.h @@ -58,6 +58,10 @@ struct OMXNodeInstance { status_t storeMetaDataInBuffers(OMX_U32 portIndex, OMX_BOOL enable); + status_t prepareForAdaptivePlayback( + OMX_U32 portIndex, OMX_BOOL enable, + OMX_U32 maxFrameWidth, OMX_U32 maxFrameHeight); + status_t useBuffer( OMX_U32 portIndex, const sp ¶ms, OMX::buffer_id *buffer); diff --git a/media/libstagefright/omx/OMX.cpp b/media/libstagefright/omx/OMX.cpp index aaa9f89..84a0e10 100644 --- a/media/libstagefright/omx/OMX.cpp +++ b/media/libstagefright/omx/OMX.cpp @@ -331,6 +331,13 @@ status_t OMX::storeMetaDataInBuffers( return findInstance(node)->storeMetaDataInBuffers(port_index, enable); } +status_t OMX::prepareForAdaptivePlayback( + node_id node, OMX_U32 portIndex, OMX_BOOL enable, + OMX_U32 maxFrameWidth, OMX_U32 maxFrameHeight) { + return findInstance(node)->prepareForAdaptivePlayback( + portIndex, enable, maxFrameWidth, maxFrameHeight); +} + status_t OMX::useBuffer( node_id node, OMX_U32 port_index, const sp ¶ms, buffer_id *buffer) { diff --git a/media/libstagefright/omx/OMXNodeInstance.cpp b/media/libstagefright/omx/OMXNodeInstance.cpp index ef683a0..46e5d71 100644 --- a/media/libstagefright/omx/OMXNodeInstance.cpp +++ b/media/libstagefright/omx/OMXNodeInstance.cpp @@ -417,6 +417,40 @@ status_t OMXNodeInstance::storeMetaDataInBuffers_l( return err; } +status_t OMXNodeInstance::prepareForAdaptivePlayback( + OMX_U32 portIndex, OMX_BOOL enable, OMX_U32 maxFrameWidth, + OMX_U32 maxFrameHeight) { + Mutex::Autolock autolock(mLock); + + OMX_INDEXTYPE index; + OMX_STRING name = const_cast( + "OMX.google.android.index.prepareForAdaptivePlayback"); + + OMX_ERRORTYPE err = OMX_GetExtensionIndex(mHandle, name, &index); + if (err != OMX_ErrorNone) { + ALOGW_IF(enable, "OMX_GetExtensionIndex %s failed", name); + return StatusFromOMXError(err); + } + + PrepareForAdaptivePlaybackParams params; + params.nSize = sizeof(params); + params.nVersion.s.nVersionMajor = 1; + params.nVersion.s.nVersionMinor = 0; + params.nVersion.s.nRevision = 0; + params.nVersion.s.nStep = 0; + + params.nPortIndex = portIndex; + params.bEnable = enable; + params.nMaxFrameWidth = maxFrameWidth; + params.nMaxFrameHeight = maxFrameHeight; + if ((err = OMX_SetParameter(mHandle, index, ¶ms)) != OMX_ErrorNone) { + ALOGW("OMX_SetParameter failed for PrepareForAdaptivePlayback " + "with error %d (0x%08x)", err, err); + return UNKNOWN_ERROR; + } + return err; +} + status_t OMXNodeInstance::useBuffer( OMX_U32 portIndex, const sp ¶ms, OMX::buffer_id *buffer) { -- cgit v1.1 From fce0d1883cdbcb7d501625fb43844043cd28a267 Mon Sep 17 00:00:00 2001 From: Lajos Molnar Date: Thu, 2 May 2013 16:46:41 -0700 Subject: stagefright: enable adaptive playback based on codec format key Added support for prepareForAdaptivePlayback() call in configureCodec, if max-width and max-height keys are specified in the format. It is OK for this call to fail, if component does not implement adaptive playback. Change-Id: Ie15892bc666df103b635890a0fda799b204bb06c Signed-off-by: Lajos Molnar Bug: 7093648 Bug: 10192531 --- media/libstagefright/ACodec.cpp | 42 ++++++++++++++++++++++++++++++++++++++- media/libstagefright/OMXCodec.cpp | 12 +++++++---- 2 files changed, 49 insertions(+), 5 deletions(-) diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp index bfb730c..9fa8a00 100644 --- a/media/libstagefright/ACodec.cpp +++ b/media/libstagefright/ACodec.cpp @@ -1106,9 +1106,49 @@ status_t ACodec::configureCodec( if (!encoder && video && haveNativeWindow) { err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexOutput, OMX_TRUE); if (err != OK) { - // allow failure ALOGE("[%s] storeMetaDataInBuffers failed w/ err %d", mComponentName.c_str(), err); + + // if adaptive playback has been requested, try JB fallback + // NOTE: THIS FALLBACK MECHANISM WILL BE REMOVED DUE TO ITS + // LARGE MEMORY REQUIREMENT + + // we will not do adaptive playback on software accessed + // surfaces as they never had to respond to changes in the + // crop window, and we don't trust that they will be able to. + int usageBits = 0; + bool canDoAdaptivePlayback; + + sp windowWrapper( + static_cast(obj.get())); + sp nativeWindow = windowWrapper->getNativeWindow(); + + if (nativeWindow->query( + nativeWindow.get(), + NATIVE_WINDOW_CONSUMER_USAGE_BITS, + &usageBits) != OK) { + canDoAdaptivePlayback = false; + } else { + canDoAdaptivePlayback = + (usageBits & + (GRALLOC_USAGE_SW_READ_MASK | + GRALLOC_USAGE_SW_WRITE_MASK)) == 0; + } + + int32_t maxWidth = 0, maxHeight = 0; + if (canDoAdaptivePlayback && + msg->findInt32("max-width", &maxWidth) && + msg->findInt32("max-height", &maxHeight)) { + ALOGV("[%s] prepareForAdaptivePlayback(%ldx%ld)", + mComponentName.c_str(), maxWidth, maxHeight); + + err = mOMX->prepareForAdaptivePlayback( + mNode, kPortIndexOutput, OMX_TRUE, maxWidth, maxHeight); + ALOGW_IF(err != OK, + "[%s] prepareForAdaptivePlayback failed w/ err %d", + mComponentName.c_str(), err); + } + // allow failure err = OK; } else { ALOGV("[%s] storeMetaDataInBuffers succeeded", mComponentName.c_str()); diff --git a/media/libstagefright/OMXCodec.cpp b/media/libstagefright/OMXCodec.cpp index 2c95ab4..7f56af8 100644 --- a/media/libstagefright/OMXCodec.cpp +++ b/media/libstagefright/OMXCodec.cpp @@ -4620,10 +4620,14 @@ status_t QueryCodec( caps->mColorFormats.push(portFormat.eColorFormat); } - if (!isEncoder && !strncmp(mime, "video/", 6) && - omx->storeMetaDataInBuffers( - node, 1 /* port index */, OMX_TRUE) == OK) { - caps->mFlags |= CodecCapabilities::kFlagSupportsAdaptivePlayback; + if (!isEncoder && !strncmp(mime, "video/", 6)) { + if (omx->storeMetaDataInBuffers( + node, 1 /* port index */, OMX_TRUE) == OK || + omx->prepareForAdaptivePlayback( + node, 1 /* port index */, OMX_TRUE, + 1280 /* width */, 720 /* height */) == OK) { + caps->mFlags |= CodecCapabilities::kFlagSupportsAdaptivePlayback; + } } CHECK_EQ(omx->freeNode(node), (status_t)OK); -- cgit v1.1 From e14a5d6d2cc91dd2fc09ffdf7aa670b37da0795d Mon Sep 17 00:00:00 2001 From: Marco Nelissen Date: Thu, 3 Oct 2013 08:51:24 -0700 Subject: Better recording wake lock accounting Count wake lock use against the app that initiated the recording, not against the media server. b/10985160 Change-Id: Iae6e6c030b7f1c6081d9c79725d6990f60dadaaa --- services/audioflinger/Threads.cpp | 30 +++++++++++++++++++----------- services/audioflinger/Threads.h | 5 +++-- 2 files changed, 22 insertions(+), 13 deletions(-) diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index 2d9d485..c31babc 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -476,13 +476,13 @@ void AudioFlinger::ThreadBase::dumpEffectChains(int fd, const Vector& } } -void AudioFlinger::ThreadBase::acquireWakeLock() +void AudioFlinger::ThreadBase::acquireWakeLock(int uid) { Mutex::Autolock _l(mLock); - acquireWakeLock_l(); + acquireWakeLock_l(uid); } -void AudioFlinger::ThreadBase::acquireWakeLock_l() +void AudioFlinger::ThreadBase::acquireWakeLock_l(int uid) { if (mPowerManager == 0) { // use checkService() to avoid blocking if power service is not up yet @@ -497,10 +497,19 @@ void AudioFlinger::ThreadBase::acquireWakeLock_l() } if (mPowerManager != 0) { sp binder = new BBinder(); - status_t status = mPowerManager->acquireWakeLock(POWERMANAGER_PARTIAL_WAKE_LOCK, - binder, - String16(mName), - String16("media")); + status_t status; + if (uid >= 0) { + mPowerManager->acquireWakeLockWithUid(POWERMANAGER_PARTIAL_WAKE_LOCK, + binder, + String16(mName), + String16("media"), + uid); + } else { + mPowerManager->acquireWakeLock(POWERMANAGER_PARTIAL_WAKE_LOCK, + binder, + String16(mName), + String16("media")); + } if (status == NO_ERROR) { mWakeLockToken = binder; } @@ -4274,7 +4283,7 @@ AudioFlinger::RecordThread::RecordThread(const sp& audioFlinger, snprintf(mName, kNameLength, "AudioIn_%X", id); readInputParameters(); - + mClientUid = IPCThreadState::self()->getCallingUid(); } @@ -4306,7 +4315,7 @@ bool AudioFlinger::RecordThread::threadLoop() nsecs_t lastWarning = 0; inputStandBy(); - acquireWakeLock(); + acquireWakeLock(mClientUid); // used to verify we've read at least once before evaluating how many bytes were read bool readOnce = false; @@ -4331,7 +4340,7 @@ bool AudioFlinger::RecordThread::threadLoop() // go to sleep mWaitWorkCV.wait(mLock); ALOGV("RecordThread: loop starting"); - acquireWakeLock_l(); + acquireWakeLock_l(mClientUid); continue; } if (mActiveTrack != 0) { @@ -4553,7 +4562,6 @@ sp AudioFlinger::RecordThread::createR ALOGE("Audio driver not initialized."); goto Exit; } - // client expresses a preference for FAST, but we get the final say if (*flags & IAudioFlinger::TRACK_FAST) { if ( diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h index 241424f..0cb3ef7 100644 --- a/services/audioflinger/Threads.h +++ b/services/audioflinger/Threads.h @@ -237,8 +237,8 @@ protected: effect_uuid_t mType; // effect type UUID }; - void acquireWakeLock(); - void acquireWakeLock_l(); + void acquireWakeLock(int uid = -1); + void acquireWakeLock_l(int uid = -1); void releaseWakeLock(); void releaseWakeLock_l(); void setEffectSuspended_l(const effect_uuid_t *type, @@ -951,4 +951,5 @@ private: // For dumpsys const sp mTeeSink; + int mClientUid; }; -- cgit v1.1 From 1d1f846c0dbaa36d0944e7b1e54cc07863e00a92 Mon Sep 17 00:00:00 2001 From: Zhijun He Date: Wed, 2 Oct 2013 16:29:51 -0700 Subject: Camera3: track request status in inflight queue Bug: 9758581 Change-Id: I1d7135cd0932bd6b453acabfeb9a553985c887bc --- .../camera/libcameraservice/device3/Camera3Device.cpp | 17 ++++++++++++++++- .../camera/libcameraservice/device3/Camera3Device.h | 2 ++ 2 files changed, 18 insertions(+), 1 deletion(-) diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp index b468eb3..5201ffb 100644 --- a/services/camera/libcameraservice/device3/Camera3Device.cpp +++ b/services/camera/libcameraservice/device3/Camera3Device.cpp @@ -1232,7 +1232,12 @@ void Camera3Device::processCaptureResult(const camera3_capture_result *result) { } InFlightRequest &request = mInFlightMap.editValueAt(idx); timestamp = request.captureTimestamp; - if (timestamp == 0) { + /** + * One of the following must happen before it's legal to call process_capture_result: + * - CAMERA3_MSG_SHUTTER (expected during normal operation) + * - CAMERA3_MSG_ERROR (expected during flush) + */ + if (request.requestStatus == OK && timestamp == 0) { SET_ERR("Called before shutter notify for frame %d", frameNumber); return; @@ -1356,6 +1361,16 @@ void Camera3Device::notify(const camera3_notify_msg *msg) { ALOGV("Camera %d: %s: HAL error, frame %d, stream %d: %d", mId, __FUNCTION__, msg->message.error.frame_number, streamId, msg->message.error.error_code); + + // Set request error status for the request in the in-flight tracking + { + Mutex::Autolock l(mInFlightLock); + ssize_t idx = mInFlightMap.indexOfKey(msg->message.error.frame_number); + if (idx >= 0) { + mInFlightMap.editValueAt(idx).requestStatus = msg->message.error.error_code; + } + } + if (listener != NULL) { listener->notifyError(msg->message.error.error_code, msg->message.error.frame_number, streamId); diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h index 61caf13..92c90f1 100644 --- a/services/camera/libcameraservice/device3/Camera3Device.h +++ b/services/camera/libcameraservice/device3/Camera3Device.h @@ -383,6 +383,7 @@ class Camera3Device : struct InFlightRequest { // Set by notify() SHUTTER call. nsecs_t captureTimestamp; + int requestStatus; // Set by process_capture_result call with valid metadata bool haveResultMetadata; // Decremented by calls to process_capture_result with valid output @@ -391,6 +392,7 @@ class Camera3Device : InFlightRequest() : captureTimestamp(0), + requestStatus(OK), haveResultMetadata(false), numBuffersLeft(0) { } -- cgit v1.1 From 385e7509eb563c983647e72b1232225c2200435f Mon Sep 17 00:00:00 2001 From: Eric Laurent Date: Fri, 4 Oct 2013 08:36:52 -0700 Subject: fix offload audio effect proxy implementation uuid The proxy implementation UUID should not be the NULL UUID as AudioFlinger will reject effect creation if the AudioEffect is constructed by passing the implementation UUID and not the type UUID. Bug: 11070481. Change-Id: Ia9049d974e76303c5b63a607ee594b7dc1f182d4 --- media/libeffects/proxy/EffectProxy.cpp | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/media/libeffects/proxy/EffectProxy.cpp b/media/libeffects/proxy/EffectProxy.cpp index b3304b7..dd4ad08 100644 --- a/media/libeffects/proxy/EffectProxy.cpp +++ b/media/libeffects/proxy/EffectProxy.cpp @@ -30,9 +30,10 @@ namespace android { // This is a dummy proxy descriptor just to return to Factory during the initial // GetDescriptor call. Later in the factory, it is replaced with the // SW sub effect descriptor +// proxy UUID af8da7e0-2ca1-11e3-b71d-0002a5d5c51b const effect_descriptor_t gProxyDescriptor = { EFFECT_UUID_INITIALIZER, // type - EFFECT_UUID_INITIALIZER, // uuid + {0xaf8da7e0, 0x2ca1, 0x11e3, 0xb71d, { 0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b }}, // uuid EFFECT_CONTROL_API_VERSION, //version of effect control API (EFFECT_FLAG_TYPE_INSERT | EFFECT_FLAG_INSERT_LAST | EFFECT_FLAG_VOLUME_CTRL), // effect capability flags -- cgit v1.1 From 8db188489871c770d5d56cf67b0001222415db41 Mon Sep 17 00:00:00 2001 From: Eric Laurent Date: Thu, 3 Oct 2013 17:54:26 -0700 Subject: TimedEventQueue takes a wake lock Take a wake lock when events present in a TimedEventQueue to prevent from going idle before all delayed events are processed. Bug: 11057387. Change-Id: I26a17df68068fde5e879a2fe7568dec439fc540f --- media/libstagefright/Android.mk | 1 + media/libstagefright/TimedEventQueue.cpp | 76 +++++++++++++++++++++++++- media/libstagefright/include/TimedEventQueue.h | 25 +++++++++ 3 files changed, 99 insertions(+), 3 deletions(-) diff --git a/media/libstagefright/Android.mk b/media/libstagefright/Android.mk index 1f68b51..6a2a696 100644 --- a/media/libstagefright/Android.mk +++ b/media/libstagefright/Android.mk @@ -89,6 +89,7 @@ LOCAL_SHARED_LIBRARIES := \ libutils \ libvorbisidec \ libz \ + libpowermanager LOCAL_STATIC_LIBRARIES := \ libstagefright_color_conversion \ diff --git a/media/libstagefright/TimedEventQueue.cpp b/media/libstagefright/TimedEventQueue.cpp index 7e9c4bf..74c5905 100644 --- a/media/libstagefright/TimedEventQueue.cpp +++ b/media/libstagefright/TimedEventQueue.cpp @@ -31,17 +31,24 @@ #include #include +#include +#include namespace android { TimedEventQueue::TimedEventQueue() : mNextEventID(1), mRunning(false), - mStopped(false) { + mStopped(false), + mDeathRecipient(new PMDeathRecipient(this)) { } TimedEventQueue::~TimedEventQueue() { stop(); + if (mPowerManager != 0) { + sp binder = mPowerManager->asBinder(); + binder->unlinkToDeath(mDeathRecipient); + } } void TimedEventQueue::start() { @@ -76,6 +83,11 @@ void TimedEventQueue::stop(bool flush) { void *dummy; pthread_join(mThread, &dummy); + // some events may be left in the queue if we did not flush and the wake lock + // must be released. + if (!mQueue.empty()) { + releaseWakeLock_l(); + } mQueue.clear(); mRunning = false; @@ -117,6 +129,9 @@ TimedEventQueue::event_id TimedEventQueue::postTimedEvent( mQueueHeadChangedCondition.signal(); } + if (mQueue.empty()) { + acquireWakeLock_l(); + } mQueue.insert(it, item); mQueueNotEmptyCondition.signal(); @@ -172,7 +187,9 @@ void TimedEventQueue::cancelEvents( (*it).event->setEventID(0); it = mQueue.erase(it); - + if (mQueue.empty()) { + releaseWakeLock_l(); + } if (stopAfterFirstMatch) { return; } @@ -280,7 +297,9 @@ sp TimedEventQueue::removeEventFromQueue_l( event->setEventID(0); mQueue.erase(it); - + if (mQueue.empty()) { + releaseWakeLock_l(); + } return event; } } @@ -290,5 +309,56 @@ sp TimedEventQueue::removeEventFromQueue_l( return NULL; } +void TimedEventQueue::acquireWakeLock_l() +{ + if (mWakeLockToken != 0) { + return; + } + if (mPowerManager == 0) { + // use checkService() to avoid blocking if power service is not up yet + sp binder = + defaultServiceManager()->checkService(String16("power")); + if (binder == 0) { + ALOGW("cannot connect to the power manager service"); + } else { + mPowerManager = interface_cast(binder); + binder->linkToDeath(mDeathRecipient); + } + } + if (mPowerManager != 0) { + sp binder = new BBinder(); + status_t status = mPowerManager->acquireWakeLock(POWERMANAGER_PARTIAL_WAKE_LOCK, + binder, + String16("TimedEventQueue"), + String16("media")); + if (status == NO_ERROR) { + mWakeLockToken = binder; + } + } +} + +void TimedEventQueue::releaseWakeLock_l() +{ + if (mWakeLockToken == 0) { + return; + } + if (mPowerManager != 0) { + mPowerManager->releaseWakeLock(mWakeLockToken, 0); + } + mWakeLockToken.clear(); +} + +void TimedEventQueue::clearPowerManager() +{ + Mutex::Autolock _l(mLock); + releaseWakeLock_l(); + mPowerManager.clear(); +} + +void TimedEventQueue::PMDeathRecipient::binderDied(const wp& who) +{ + mQueue->clearPowerManager(); +} + } // namespace android diff --git a/media/libstagefright/include/TimedEventQueue.h b/media/libstagefright/include/TimedEventQueue.h index 11f844c..4e49c83 100644 --- a/media/libstagefright/include/TimedEventQueue.h +++ b/media/libstagefright/include/TimedEventQueue.h @@ -23,6 +23,7 @@ #include #include #include +#include namespace android { @@ -57,6 +58,21 @@ struct TimedEventQueue { Event &operator=(const Event &); }; + class PMDeathRecipient : public IBinder::DeathRecipient { + public: + PMDeathRecipient(TimedEventQueue *queue) : mQueue(queue) {} + virtual ~PMDeathRecipient() {} + + // IBinder::DeathRecipient + virtual void binderDied(const wp& who); + + private: + PMDeathRecipient(const PMDeathRecipient&); + PMDeathRecipient& operator = (const PMDeathRecipient&); + + TimedEventQueue *mQueue; + }; + TimedEventQueue(); ~TimedEventQueue(); @@ -96,6 +112,8 @@ struct TimedEventQueue { static int64_t getRealTimeUs(); + void clearPowerManager(); + private: struct QueueItem { sp event; @@ -118,11 +136,18 @@ private: bool mRunning; bool mStopped; + sp mPowerManager; + sp mWakeLockToken; + const sp mDeathRecipient; + static void *ThreadWrapper(void *me); void threadEntry(); sp removeEventFromQueue_l(event_id id); + void acquireWakeLock_l(); + void releaseWakeLock_l(); + TimedEventQueue(const TimedEventQueue &); TimedEventQueue &operator=(const TimedEventQueue &); }; -- cgit v1.1 From 547789d25dc6bd6561553bcf6b384fb0d4fee834 Mon Sep 17 00:00:00 2001 From: Eric Laurent Date: Fri, 4 Oct 2013 11:46:55 -0700 Subject: audioflinger: fix wake lock acquisition check Commit e14a5d6 introduced a bug in ThreadBase::acquireWakeLock() where the wake lock acquisition return code is not initialized before being checked causing the wake lock token to be sometines not set and potential failure to release the wake lock later. Bug: 10985160. Change-Id: Iffd40e773ae3bcfec3c148a844d5dbebbf474eaf --- services/audioflinger/Threads.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index 8aae892..943a70e 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -499,13 +499,13 @@ void AudioFlinger::ThreadBase::acquireWakeLock_l(int uid) sp binder = new BBinder(); status_t status; if (uid >= 0) { - mPowerManager->acquireWakeLockWithUid(POWERMANAGER_PARTIAL_WAKE_LOCK, + status = mPowerManager->acquireWakeLockWithUid(POWERMANAGER_PARTIAL_WAKE_LOCK, binder, String16(mName), String16("media"), uid); } else { - mPowerManager->acquireWakeLock(POWERMANAGER_PARTIAL_WAKE_LOCK, + status = mPowerManager->acquireWakeLock(POWERMANAGER_PARTIAL_WAKE_LOCK, binder, String16(mName), String16("media")); -- cgit v1.1 From e69e81c460416a814486fe2852efecee4ae1a13a Mon Sep 17 00:00:00 2001 From: Eric Laurent Date: Fri, 4 Oct 2013 08:36:52 -0700 Subject: fix offload audio effect proxy implementation uuid The proxy implementation UUID should not be the NULL UUID as AudioFlinger will reject effect creation if the AudioEffect is constructed by passing the implementation UUID and not the type UUID. Bug: 11070481. Change-Id: Ia9049d974e76303c5b63a607ee594b7dc1f182d4 --- media/libeffects/proxy/EffectProxy.cpp | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/media/libeffects/proxy/EffectProxy.cpp b/media/libeffects/proxy/EffectProxy.cpp index b3304b7..dd4ad08 100644 --- a/media/libeffects/proxy/EffectProxy.cpp +++ b/media/libeffects/proxy/EffectProxy.cpp @@ -30,9 +30,10 @@ namespace android { // This is a dummy proxy descriptor just to return to Factory during the initial // GetDescriptor call. Later in the factory, it is replaced with the // SW sub effect descriptor +// proxy UUID af8da7e0-2ca1-11e3-b71d-0002a5d5c51b const effect_descriptor_t gProxyDescriptor = { EFFECT_UUID_INITIALIZER, // type - EFFECT_UUID_INITIALIZER, // uuid + {0xaf8da7e0, 0x2ca1, 0x11e3, 0xb71d, { 0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b }}, // uuid EFFECT_CONTROL_API_VERSION, //version of effect control API (EFFECT_FLAG_TYPE_INSERT | EFFECT_FLAG_INSERT_LAST | EFFECT_FLAG_VOLUME_CTRL), // effect capability flags -- cgit v1.1 From d1d48a0da103237b69cc44d6ceb5b4c67fbc841b Mon Sep 17 00:00:00 2001 From: Eric Laurent Date: Thu, 3 Oct 2013 17:54:26 -0700 Subject: TimedEventQueue takes a wake lock Take a wake lock when events present in a TimedEventQueue to prevent from going idle before all delayed events are processed. Bug: 11057387. Change-Id: I26a17df68068fde5e879a2fe7568dec439fc540f --- media/libstagefright/Android.mk | 1 + media/libstagefright/TimedEventQueue.cpp | 76 +++++++++++++++++++++++++- media/libstagefright/include/TimedEventQueue.h | 25 +++++++++ 3 files changed, 99 insertions(+), 3 deletions(-) diff --git a/media/libstagefright/Android.mk b/media/libstagefright/Android.mk index 1f68b51..6a2a696 100644 --- a/media/libstagefright/Android.mk +++ b/media/libstagefright/Android.mk @@ -89,6 +89,7 @@ LOCAL_SHARED_LIBRARIES := \ libutils \ libvorbisidec \ libz \ + libpowermanager LOCAL_STATIC_LIBRARIES := \ libstagefright_color_conversion \ diff --git a/media/libstagefright/TimedEventQueue.cpp b/media/libstagefright/TimedEventQueue.cpp index 7e9c4bf..74c5905 100644 --- a/media/libstagefright/TimedEventQueue.cpp +++ b/media/libstagefright/TimedEventQueue.cpp @@ -31,17 +31,24 @@ #include #include +#include +#include namespace android { TimedEventQueue::TimedEventQueue() : mNextEventID(1), mRunning(false), - mStopped(false) { + mStopped(false), + mDeathRecipient(new PMDeathRecipient(this)) { } TimedEventQueue::~TimedEventQueue() { stop(); + if (mPowerManager != 0) { + sp binder = mPowerManager->asBinder(); + binder->unlinkToDeath(mDeathRecipient); + } } void TimedEventQueue::start() { @@ -76,6 +83,11 @@ void TimedEventQueue::stop(bool flush) { void *dummy; pthread_join(mThread, &dummy); + // some events may be left in the queue if we did not flush and the wake lock + // must be released. + if (!mQueue.empty()) { + releaseWakeLock_l(); + } mQueue.clear(); mRunning = false; @@ -117,6 +129,9 @@ TimedEventQueue::event_id TimedEventQueue::postTimedEvent( mQueueHeadChangedCondition.signal(); } + if (mQueue.empty()) { + acquireWakeLock_l(); + } mQueue.insert(it, item); mQueueNotEmptyCondition.signal(); @@ -172,7 +187,9 @@ void TimedEventQueue::cancelEvents( (*it).event->setEventID(0); it = mQueue.erase(it); - + if (mQueue.empty()) { + releaseWakeLock_l(); + } if (stopAfterFirstMatch) { return; } @@ -280,7 +297,9 @@ sp TimedEventQueue::removeEventFromQueue_l( event->setEventID(0); mQueue.erase(it); - + if (mQueue.empty()) { + releaseWakeLock_l(); + } return event; } } @@ -290,5 +309,56 @@ sp TimedEventQueue::removeEventFromQueue_l( return NULL; } +void TimedEventQueue::acquireWakeLock_l() +{ + if (mWakeLockToken != 0) { + return; + } + if (mPowerManager == 0) { + // use checkService() to avoid blocking if power service is not up yet + sp binder = + defaultServiceManager()->checkService(String16("power")); + if (binder == 0) { + ALOGW("cannot connect to the power manager service"); + } else { + mPowerManager = interface_cast(binder); + binder->linkToDeath(mDeathRecipient); + } + } + if (mPowerManager != 0) { + sp binder = new BBinder(); + status_t status = mPowerManager->acquireWakeLock(POWERMANAGER_PARTIAL_WAKE_LOCK, + binder, + String16("TimedEventQueue"), + String16("media")); + if (status == NO_ERROR) { + mWakeLockToken = binder; + } + } +} + +void TimedEventQueue::releaseWakeLock_l() +{ + if (mWakeLockToken == 0) { + return; + } + if (mPowerManager != 0) { + mPowerManager->releaseWakeLock(mWakeLockToken, 0); + } + mWakeLockToken.clear(); +} + +void TimedEventQueue::clearPowerManager() +{ + Mutex::Autolock _l(mLock); + releaseWakeLock_l(); + mPowerManager.clear(); +} + +void TimedEventQueue::PMDeathRecipient::binderDied(const wp& who) +{ + mQueue->clearPowerManager(); +} + } // namespace android diff --git a/media/libstagefright/include/TimedEventQueue.h b/media/libstagefright/include/TimedEventQueue.h index 11f844c..4e49c83 100644 --- a/media/libstagefright/include/TimedEventQueue.h +++ b/media/libstagefright/include/TimedEventQueue.h @@ -23,6 +23,7 @@ #include #include #include +#include namespace android { @@ -57,6 +58,21 @@ struct TimedEventQueue { Event &operator=(const Event &); }; + class PMDeathRecipient : public IBinder::DeathRecipient { + public: + PMDeathRecipient(TimedEventQueue *queue) : mQueue(queue) {} + virtual ~PMDeathRecipient() {} + + // IBinder::DeathRecipient + virtual void binderDied(const wp& who); + + private: + PMDeathRecipient(const PMDeathRecipient&); + PMDeathRecipient& operator = (const PMDeathRecipient&); + + TimedEventQueue *mQueue; + }; + TimedEventQueue(); ~TimedEventQueue(); @@ -96,6 +112,8 @@ struct TimedEventQueue { static int64_t getRealTimeUs(); + void clearPowerManager(); + private: struct QueueItem { sp event; @@ -118,11 +136,18 @@ private: bool mRunning; bool mStopped; + sp mPowerManager; + sp mWakeLockToken; + const sp mDeathRecipient; + static void *ThreadWrapper(void *me); void threadEntry(); sp removeEventFromQueue_l(event_id id); + void acquireWakeLock_l(); + void releaseWakeLock_l(); + TimedEventQueue(const TimedEventQueue &); TimedEventQueue &operator=(const TimedEventQueue &); }; -- cgit v1.1 From ea0fadeb5d81ef3cb7f9db458c9033d628bdb86a Mon Sep 17 00:00:00 2001 From: Eric Laurent Date: Fri, 4 Oct 2013 16:23:48 -0700 Subject: audioflinger: offload: fix pause/flush/resume If a pause/flush/resume sequence is fast enough, resume is received while we are still in PAUSING state in which case it is a NOP. If this happens, flush is still forwarded to the audio HAL but is not preceeded by a pause which can cause problems to the audio DSP. It is necessary to preserve the flush as this sequence is typical to a seek. The fix consists in forcing a pause/resume when a flush request must be executed and the audio HAL has not been paused previously. Bug: 11081559. Change-Id: Ib84ed26d503a61c05933b923ec556b10cedfe140 --- services/audioflinger/Threads.cpp | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index 943a70e..b618eff 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -3864,6 +3864,7 @@ AudioFlinger::OffloadThread::OffloadThread(const sp& audioFlinger, AudioStreamOut* output, audio_io_handle_t id, uint32_t device) : DirectOutputThread(audioFlinger, output, id, device, OFFLOAD), mHwPaused(false), + mFlushPending(false), mPausedBytesRemaining(0) { } @@ -4029,9 +4030,15 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::OffloadThread::prepareTr processVolume_l(track, last); } - // make sure the pause/flush/resume sequence is executed in the right order - if (doHwPause) { + // make sure the pause/flush/resume sequence is executed in the right order. + // If a flush is pending and a track is active but the HW is not paused, force a HW pause + // before flush and then resume HW. This can happen in case of pause/flush/resume + // if resume is received before pause is executed. + if (doHwPause || (mFlushPending && !mHwPaused && (count != 0))) { mOutput->stream->pause(mOutput->stream); + if (!doHwPause) { + doHwResume = true; + } } if (mFlushPending) { flushHw_l(); -- cgit v1.1 From e8b75efbd743e8cf8e9799c87b7bca5423fef135 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Tue, 24 Sep 2013 11:52:37 -0700 Subject: Cleanup openRecord error handling Bug: 10888816 Change-Id: I84897dd7d30b370640b54e928f230604b873cb68 --- include/media/IAudioFlinger.h | 3 +++ media/libmedia/AudioRecord.cpp | 9 +++++++-- media/libmedia/IAudioFlinger.cpp | 12 ++++++++++++ services/audioflinger/AudioFlinger.cpp | 4 ++++ services/audioflinger/Threads.cpp | 4 +++- 5 files changed, 29 insertions(+), 3 deletions(-) diff --git a/include/media/IAudioFlinger.h b/include/media/IAudioFlinger.h index 49f921b..eaf7780 100644 --- a/include/media/IAudioFlinger.h +++ b/include/media/IAudioFlinger.h @@ -53,6 +53,9 @@ public: }; typedef uint32_t track_flags_t; + // invariant on exit for all APIs that return an sp<>: + // (return value != 0) == (*status == NO_ERROR) + /* create an audio track and registers it with AudioFlinger. * return null if the track cannot be created. */ diff --git a/media/libmedia/AudioRecord.cpp b/media/libmedia/AudioRecord.cpp index c5a7777..666fafa 100644 --- a/media/libmedia/AudioRecord.cpp +++ b/media/libmedia/AudioRecord.cpp @@ -474,7 +474,7 @@ status_t AudioRecord::openRecord_l(size_t epoch) ALOGE_IF(originalSessionId != 0 && mSessionId != originalSessionId, "session ID changed from %d to %d", originalSessionId, mSessionId); - if (record == 0) { + if (record == 0 || status != NO_ERROR) { ALOGE("AudioFlinger could not create record track, status: %d", status); AudioSystem::releaseInput(input); return status; @@ -484,6 +484,11 @@ status_t AudioRecord::openRecord_l(size_t epoch) ALOGE("Could not get control block"); return NO_INIT; } + void *iMemPointer = iMem->pointer(); + if (iMemPointer == NULL) { + ALOGE("Could not get control block pointer"); + return NO_INIT; + } if (mAudioRecord != 0) { mAudioRecord->asBinder()->unlinkToDeath(mDeathNotifier, this); mDeathNotifier.clear(); @@ -491,7 +496,7 @@ status_t AudioRecord::openRecord_l(size_t epoch) mInput = input; mAudioRecord = record; mCblkMemory = iMem; - audio_track_cblk_t* cblk = static_cast(iMem->pointer()); + audio_track_cblk_t* cblk = static_cast(iMemPointer); mCblk = cblk; // FIXME missing fast track frameCount logic mAwaitBoost = false; diff --git a/media/libmedia/IAudioFlinger.cpp b/media/libmedia/IAudioFlinger.cpp index 68928f1..448a82e 100644 --- a/media/libmedia/IAudioFlinger.cpp +++ b/media/libmedia/IAudioFlinger.cpp @@ -184,6 +184,17 @@ public: } lStatus = reply.readInt32(); record = interface_cast(reply.readStrongBinder()); + if (lStatus == NO_ERROR) { + if (record == 0) { + ALOGE("openRecord should have returned an IAudioRecord"); + lStatus = UNKNOWN_ERROR; + } + } else { + if (record != 0) { + ALOGE("openRecord returned an IAudioRecord but with status %d", lStatus); + record.clear(); + } + } } if (status) { *status = lStatus; @@ -784,6 +795,7 @@ status_t BnAudioFlinger::onTransact( status_t status; sp record = openRecord(input, sampleRate, format, channelMask, frameCount, &flags, tid, &sessionId, &status); + LOG_ALWAYS_FATAL_IF((record != 0) != (status == NO_ERROR)); reply->writeInt32(flags); reply->writeInt32(sessionId); reply->writeInt32(status); diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp index e70d566..a9c9b56 100644 --- a/services/audioflinger/AudioFlinger.cpp +++ b/services/audioflinger/AudioFlinger.cpp @@ -1242,6 +1242,7 @@ sp AudioFlinger::openRecord( // check calling permissions if (!recordingAllowed()) { + ALOGE("openRecord() permission denied: recording not allowed"); lStatus = PERMISSION_DENIED; goto Exit; } @@ -1257,12 +1258,14 @@ sp AudioFlinger::openRecord( Mutex::Autolock _l(mLock); thread = checkRecordThread_l(input); if (thread == NULL) { + ALOGE("openRecord() checkRecordThread_l failed"); lStatus = BAD_VALUE; goto Exit; } if (deviceRequiresCaptureAudioOutputPermission(thread->inDevice()) && !captureAudioOutputAllowed()) { + ALOGE("openRecord() permission denied: capture not allowed"); lStatus = PERMISSION_DENIED; goto Exit; } @@ -1283,6 +1286,7 @@ sp AudioFlinger::openRecord( // The record track uses one track in mHardwareMixerThread by convention. recordTrack = thread->createRecordTrack_l(client, sampleRate, format, channelMask, frameCount, lSessionId, flags, tid, &lStatus); + LOG_ALWAYS_FATAL_IF((recordTrack != 0) != (lStatus == NO_ERROR)); } if (lStatus != NO_ERROR) { // remove local strong reference to Client before deleting the RecordTrack so that the diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index b618eff..c46242d 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -4575,7 +4575,7 @@ sp AudioFlinger::RecordThread::createR lStatus = initCheck(); if (lStatus != NO_ERROR) { - ALOGE("Audio driver not initialized."); + ALOGE("createRecordTrack_l() audio driver not initialized"); goto Exit; } // client expresses a preference for FAST, but we get the final say @@ -4638,7 +4638,9 @@ sp AudioFlinger::RecordThread::createR format, channelMask, frameCount, sessionId); if (track->getCblk() == 0) { + ALOGE("createRecordTrack_l() no control block"); lStatus = NO_MEMORY; + track.clear(); goto Exit; } mTracks.add(track); -- cgit v1.1 From 215bb3499c7eeea6303e55fac66452f2574c022a Mon Sep 17 00:00:00 2001 From: Igor Murashkin Date: Mon, 7 Oct 2013 18:01:46 -0700 Subject: camera2: Don't race while dumping last frame metadata Bug: 11095203 Change-Id: Icfb31e1719634b62004d6c15a95a9316e9642e4c --- services/camera/libcameraservice/common/FrameProcessorBase.cpp | 10 +++++++++- services/camera/libcameraservice/common/FrameProcessorBase.h | 1 + 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/services/camera/libcameraservice/common/FrameProcessorBase.cpp b/services/camera/libcameraservice/common/FrameProcessorBase.cpp index e7b440a..52906ee 100644 --- a/services/camera/libcameraservice/common/FrameProcessorBase.cpp +++ b/services/camera/libcameraservice/common/FrameProcessorBase.cpp @@ -66,7 +66,14 @@ status_t FrameProcessorBase::removeListener(int32_t minId, void FrameProcessorBase::dump(int fd, const Vector& /*args*/) { String8 result(" Latest received frame:\n"); write(fd, result.string(), result.size()); - mLastFrame.dump(fd, 2, 6); + + CameraMetadata lastFrame; + { + // Don't race while dumping metadata + Mutex::Autolock al(mLastFrameMutex); + lastFrame = CameraMetadata(mLastFrame); + } + lastFrame.dump(fd, 2, 6); } bool FrameProcessorBase::threadLoop() { @@ -113,6 +120,7 @@ void FrameProcessorBase::processNewFrames(const sp &device) { } if (!frame.isEmpty()) { + Mutex::Autolock al(mLastFrameMutex); mLastFrame.acquire(frame); } } diff --git a/services/camera/libcameraservice/common/FrameProcessorBase.h b/services/camera/libcameraservice/common/FrameProcessorBase.h index f96caff..4d80ebf 100644 --- a/services/camera/libcameraservice/common/FrameProcessorBase.h +++ b/services/camera/libcameraservice/common/FrameProcessorBase.h @@ -58,6 +58,7 @@ class FrameProcessorBase: public Thread { virtual bool threadLoop(); Mutex mInputMutex; + Mutex mLastFrameMutex; struct RangeListener { int32_t minId; -- cgit v1.1 From aab5b08cb4a3b5a47daece6168f41ec918020739 Mon Sep 17 00:00:00 2001 From: Eric Laurent Date: Tue, 8 Oct 2013 09:22:31 -0700 Subject: AwesomePlayer: do not send events when paused. When streaming audio and paused, AwesomePlayer should stop sending BufferingEvents as they will keep a wake lock for no reason. TimedEventQueue should always acquire the wakelock with mediaserver identity so that it is released with the same identity by the event handler thread. Bug: 11104408. Change-Id: Ied0e03acd6ad2f5a4c0ec82d5c2aa4e1c6da772c --- media/libstagefright/AwesomePlayer.cpp | 8 +++++++- media/libstagefright/TimedEventQueue.cpp | 6 ++++++ 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/media/libstagefright/AwesomePlayer.cpp b/media/libstagefright/AwesomePlayer.cpp index e7cfc78..be6719a 100644 --- a/media/libstagefright/AwesomePlayer.cpp +++ b/media/libstagefright/AwesomePlayer.cpp @@ -798,7 +798,9 @@ void AwesomePlayer::onBufferingUpdate() { } } - postBufferingEvent_l(); + if (mFlags & (PLAYING | PREPARING)) { + postBufferingEvent_l(); + } } void AwesomePlayer::sendCacheStats() { @@ -1001,6 +1003,10 @@ status_t AwesomePlayer::play_l() { } addBatteryData(params); + if (isStreamingHTTP()) { + postBufferingEvent_l(); + } + return OK; } diff --git a/media/libstagefright/TimedEventQueue.cpp b/media/libstagefright/TimedEventQueue.cpp index 74c5905..6a16bb4 100644 --- a/media/libstagefright/TimedEventQueue.cpp +++ b/media/libstagefright/TimedEventQueue.cpp @@ -33,6 +33,8 @@ #include #include #include +#include +#include namespace android { @@ -327,10 +329,12 @@ void TimedEventQueue::acquireWakeLock_l() } if (mPowerManager != 0) { sp binder = new BBinder(); + int64_t token = IPCThreadState::self()->clearCallingIdentity(); status_t status = mPowerManager->acquireWakeLock(POWERMANAGER_PARTIAL_WAKE_LOCK, binder, String16("TimedEventQueue"), String16("media")); + IPCThreadState::self()->restoreCallingIdentity(token); if (status == NO_ERROR) { mWakeLockToken = binder; } @@ -343,7 +347,9 @@ void TimedEventQueue::releaseWakeLock_l() return; } if (mPowerManager != 0) { + int64_t token = IPCThreadState::self()->clearCallingIdentity(); mPowerManager->releaseWakeLock(mWakeLockToken, 0); + IPCThreadState::self()->restoreCallingIdentity(token); } mWakeLockToken.clear(); } -- cgit v1.1 From 07d2169d46f3536add6044dbf106967a1982252f Mon Sep 17 00:00:00 2001 From: Eino-Ville Talvala Date: Tue, 24 Sep 2013 18:04:19 -0700 Subject: Camera3: Improve resilience to stream failures - Handle binder connection drops (DEAD_OBJECT) for streams - Just log an error, don't go to an error state when queue/dequeue fails Bug: 10347526 Change-Id: Ib463ffe15d58db444cf5d8cad176a201c7d1facc --- .../camera/libcameraservice/device3/Camera3Device.cpp | 8 ++++---- .../libcameraservice/device3/Camera3IOStreamBase.cpp | 18 +++++++++--------- 2 files changed, 13 insertions(+), 13 deletions(-) diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp index 1f853ab..303823c 100644 --- a/services/camera/libcameraservice/device3/Camera3Device.cpp +++ b/services/camera/libcameraservice/device3/Camera3Device.cpp @@ -1506,7 +1506,7 @@ void Camera3Device::processCaptureResult(const camera3_capture_result *result) { // Note: stream may be deallocated at this point, if this buffer was the // last reference to it. if (res != OK) { - SET_ERR("Can't return buffer %d for frame %d to its stream: " + ALOGE("Can't return buffer %d for frame %d to its stream: " " %s (%d)", i, frameNumber, strerror(-res), res); } } @@ -1867,7 +1867,7 @@ bool Camera3Device::RequestThread::threadLoop() { request.input_buffer = &inputBuffer; res = nextRequest->mInputStream->getInputBuffer(&inputBuffer); if (res != OK) { - SET_ERR("RequestThread: Can't get input buffer, skipping request:" + ALOGE("RequestThread: Can't get input buffer, skipping request:" " %s (%d)", strerror(-res), res); cleanUpFailedRequest(request, nextRequest, outputBuffers); return true; @@ -1883,8 +1883,8 @@ bool Camera3Device::RequestThread::threadLoop() { res = nextRequest->mOutputStreams.editItemAt(i)-> getBuffer(&outputBuffers.editItemAt(i)); if (res != OK) { - SET_ERR("RequestThread: Can't get output buffer, skipping request:" - "%s (%d)", strerror(-res), res); + ALOGE("RequestThread: Can't get output buffer, skipping request:" + " %s (%d)", strerror(-res), res); cleanUpFailedRequest(request, nextRequest, outputBuffers); return true; } diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp index 727a8c9..da51228 100644 --- a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp +++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp @@ -214,19 +214,19 @@ status_t Camera3IOStreamBase::returnAnyBufferLocked( sp releaseFence; res = returnBufferCheckedLocked(buffer, timestamp, output, &releaseFence); - if (res != OK) { - // NO_INIT means the buffer queue is abandoned, so to be resilient, - // still want to decrement in-flight counts. - if (res != NO_INIT) { - return res; - } - } + // Res may be an error, but we still want to decrement our owned count + // to enable clean shutdown. So we'll just return the error but otherwise + // carry on - mCombinedFence = Fence::merge(mName, mCombinedFence, releaseFence); + if (releaseFence != 0) { + mCombinedFence = Fence::merge(mName, mCombinedFence, releaseFence); + } mDequeuedBufferCount--; if (mDequeuedBufferCount == 0 && mState != STATE_IN_CONFIG && mState != STATE_IN_RECONFIG) { + ALOGV("%s: Stream %d: All buffers returned; now idle", __FUNCTION__, + mId); sp statusTracker = mStatusTracker.promote(); if (statusTracker != 0) { statusTracker->markComponentIdle(mStatusId, mCombinedFence); @@ -239,7 +239,7 @@ status_t Camera3IOStreamBase::returnAnyBufferLocked( mLastTimestamp = timestamp; } - return OK; + return res; } -- cgit v1.1 From cc27e117ed01c9a2b4def5a9c7a3103af83ee47e Mon Sep 17 00:00:00 2001 From: Zhijun He Date: Thu, 3 Oct 2013 16:12:43 -0700 Subject: Camera: Save 1 frame time for takePicture call Move the latest request signal call right before submitting request to HAL, which avoids unnecessary 1 frame time wait. Also fix request status constructor issue, and the log tag for Camera2Client. Bug: 10570497 Change-Id: I0b1818658bee9983b117e49a5a00f1aab515c567 --- services/camera/libcameraservice/api1/Camera2Client.cpp | 2 +- .../camera/libcameraservice/device3/Camera3Device.cpp | 17 ++++++++--------- .../camera/libcameraservice/device3/Camera3Device.h | 1 + 3 files changed, 10 insertions(+), 10 deletions(-) diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp index 09829ea..9b1c947 100644 --- a/services/camera/libcameraservice/api1/Camera2Client.cpp +++ b/services/camera/libcameraservice/api1/Camera2Client.cpp @@ -14,7 +14,7 @@ * limitations under the License. */ -#define LOG_TAG "Camera2" +#define LOG_TAG "Camera2Client" #define ATRACE_TAG ATRACE_TAG_CAMERA //#define LOG_NDEBUG 0 diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp index 303823c..81bbeee 100644 --- a/services/camera/libcameraservice/device3/Camera3Device.cpp +++ b/services/camera/libcameraservice/device3/Camera3Device.cpp @@ -1910,6 +1910,14 @@ bool Camera3Device::RequestThread::threadLoop() { return false; } + // Inform waitUntilRequestProcessed thread of a new request ID + { + Mutex::Autolock al(mLatestRequestMutex); + + mLatestRequestId = requestId; + mLatestRequestSignal.signal(); + } + // Submit request and block until ready for next one ATRACE_ASYNC_BEGIN("frame capture", request.frame_number); ATRACE_BEGIN("camera3->process_capture_request"); @@ -1945,15 +1953,6 @@ bool Camera3Device::RequestThread::threadLoop() { } mPrevTriggers = triggerCount; - // Read android.request.id from the request settings metadata - // - inform waitUntilRequestProcessed thread of a new request ID - { - Mutex::Autolock al(mLatestRequestMutex); - - mLatestRequestId = requestId; - mLatestRequestSignal.signal(); - } - // Return input buffer back to framework if (request.input_buffer != NULL) { Camera3Stream *stream = diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h index c2b0867..12252c8 100644 --- a/services/camera/libcameraservice/device3/Camera3Device.h +++ b/services/camera/libcameraservice/device3/Camera3Device.h @@ -457,6 +457,7 @@ class Camera3Device : InFlightRequest(int id, int numBuffers) : requestId(id), captureTimestamp(0), + requestStatus(OK), haveResultMetadata(false), numBuffersLeft(numBuffers) { } -- cgit v1.1 From 2b07e0207da44d7b3cc63c369fd10c9f12a5e2cd Mon Sep 17 00:00:00 2001 From: Eino-Ville Talvala Date: Thu, 10 Oct 2013 15:13:09 -0700 Subject: Camera2/3: Add more tracing for API1 - AF state - Async autofocus completion - Async takePicture completion - Stages of takePicture Bug: 10570497 Change-Id: Ida9fedd81aa4ee3ae3fb8dfada858a3bc3c213a3 --- .../camera/libcameraservice/api1/Camera2Client.cpp | 33 ++++++++++++++++++++++ .../camera/libcameraservice/api1/Camera2Client.h | 4 +++ .../api1/client2/CaptureSequencer.cpp | 13 ++++++++- .../api1/client2/CaptureSequencer.h | 1 + .../libcameraservice/api1/client2/Parameters.cpp | 3 ++ .../libcameraservice/api1/client2/Parameters.h | 3 ++ .../libcameraservice/device3/Camera3Device.cpp | 1 - 7 files changed, 56 insertions(+), 2 deletions(-) diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp index 09829ea..315fe25 100644 --- a/services/camera/libcameraservice/api1/Camera2Client.cpp +++ b/services/camera/libcameraservice/api1/Camera2Client.cpp @@ -1149,6 +1149,8 @@ status_t Camera2Client::autoFocus() { l.mParameters.currentAfTriggerId = ++l.mParameters.afTriggerCounter; triggerId = l.mParameters.currentAfTriggerId; } + ATRACE_ASYNC_BEGIN(kAutofocusLabel, triggerId); + syncWithDevice(); mDevice->triggerAutofocus(triggerId); @@ -1171,6 +1173,12 @@ status_t Camera2Client::cancelAutoFocus() { l.mParameters.focusMode == Parameters::FOCUS_MODE_INFINITY) { return OK; } + + // An active AF trigger is canceled + if (l.mParameters.afTriggerCounter == l.mParameters.currentAfTriggerId) { + ATRACE_ASYNC_END(kAutofocusLabel, l.mParameters.currentAfTriggerId); + } + triggerId = ++l.mParameters.afTriggerCounter; // When using triggerAfWithAuto quirk, may need to reset focus mode to @@ -1199,6 +1207,7 @@ status_t Camera2Client::takePicture(int msgType) { status_t res; if ( (res = checkPid(__FUNCTION__) ) != OK) return res; + int takePictureCounter; { SharedParameters::Lock l(mParameters); switch (l.mParameters.state) { @@ -1237,8 +1246,11 @@ status_t Camera2Client::takePicture(int msgType) { __FUNCTION__, mCameraId, strerror(-res), res); return res; } + takePictureCounter = ++l.mParameters.takePictureCounter; } + ATRACE_ASYNC_BEGIN(kTakepictureLabel, takePictureCounter); + // Need HAL to have correct settings before (possibly) triggering precapture syncWithDevice(); @@ -1466,7 +1478,24 @@ void Camera2Client::notifyAutoFocus(uint8_t newState, int triggerId) { bool afInMotion = false; { SharedParameters::Lock l(mParameters); + // Trace end of AF state + char tmp[32]; + if (l.mParameters.afStateCounter > 0) { + camera_metadata_enum_snprint( + ANDROID_CONTROL_AF_STATE, l.mParameters.focusState, tmp, sizeof(tmp)); + ATRACE_ASYNC_END(tmp, l.mParameters.afStateCounter); + } + + // Update state l.mParameters.focusState = newState; + l.mParameters.afStateCounter++; + + // Trace start of AF state + + camera_metadata_enum_snprint( + ANDROID_CONTROL_AF_STATE, l.mParameters.focusState, tmp, sizeof(tmp)); + ATRACE_ASYNC_BEGIN(tmp, l.mParameters.afStateCounter); + switch (l.mParameters.focusMode) { case Parameters::FOCUS_MODE_AUTO: case Parameters::FOCUS_MODE_MACRO: @@ -1560,6 +1589,7 @@ void Camera2Client::notifyAutoFocus(uint8_t newState, int triggerId) { } } if (sendCompletedMessage) { + ATRACE_ASYNC_END(kAutofocusLabel, triggerId); SharedCameraCallbacks::Lock l(mSharedCameraCallbacks); if (l.mRemoteCallback != 0) { l.mRemoteCallback->notifyCallback(CAMERA_MSG_FOCUS, @@ -1769,4 +1799,7 @@ status_t Camera2Client::updateProcessorStream(sp processor, return res; } +const char* Camera2Client::kAutofocusLabel = "autofocus"; +const char* Camera2Client::kTakepictureLabel = "take_picture"; + } // namespace android diff --git a/services/camera/libcameraservice/api1/Camera2Client.h b/services/camera/libcameraservice/api1/Camera2Client.h index 53629a1..fe0bf74 100644 --- a/services/camera/libcameraservice/api1/Camera2Client.h +++ b/services/camera/libcameraservice/api1/Camera2Client.h @@ -136,6 +136,10 @@ public: static const int32_t kCaptureRequestIdStart = 30000000; static const int32_t kCaptureRequestIdEnd = 40000000; + // Constant strings for ATRACE logging + static const char* kAutofocusLabel; + static const char* kTakepictureLabel; + private: /** ICamera interface-related private members */ typedef camera2::Parameters Parameters; diff --git a/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp b/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp index 1a1b27b..c8920bb 100644 --- a/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp +++ b/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp @@ -43,6 +43,7 @@ CaptureSequencer::CaptureSequencer(wp client): mShutterNotified(false), mClient(client), mCaptureState(IDLE), + mStateTransitionCount(0), mTriggerId(0), mTimeoutCount(0), mCaptureId(Camera2Client::kCaptureRequestIdStart), @@ -198,8 +199,14 @@ bool CaptureSequencer::threadLoop() { Mutex::Autolock l(mStateMutex); if (currentState != mCaptureState) { + if (mCaptureState != IDLE) { + ATRACE_ASYNC_END(kStateNames[mCaptureState], mStateTransitionCount); + } mCaptureState = currentState; - ATRACE_INT("cam2_capt_state", mCaptureState); + mStateTransitionCount++; + if (mCaptureState != IDLE) { + ATRACE_ASYNC_BEGIN(kStateNames[mCaptureState], mStateTransitionCount); + } ALOGV("Camera %d: New capture state %s", client->getCameraId(), kStateNames[mCaptureState]); mStateChanged.signal(); @@ -243,6 +250,7 @@ CaptureSequencer::CaptureState CaptureSequencer::manageDone(sp &c mBusy = false; } + int takePictureCounter = 0; { SharedParameters::Lock l(client->getParameters()); switch (l.mParameters.state) { @@ -270,6 +278,7 @@ CaptureSequencer::CaptureState CaptureSequencer::manageDone(sp &c Parameters::getStateName(l.mParameters.state)); res = INVALID_OPERATION; } + takePictureCounter = l.mParameters.takePictureCounter; } sp processor = mZslProcessor.promote(); if (processor != 0) { @@ -282,6 +291,8 @@ CaptureSequencer::CaptureState CaptureSequencer::manageDone(sp &c * Fire the jpegCallback in Camera#takePicture(..., jpegCallback) */ if (mCaptureBuffer != 0 && res == OK) { + ATRACE_ASYNC_END(Camera2Client::kTakepictureLabel, takePictureCounter); + Camera2Client::SharedCameraCallbacks::Lock l(client->mSharedCameraCallbacks); ALOGV("%s: Sending still image to client", __FUNCTION__); diff --git a/services/camera/libcameraservice/api1/client2/CaptureSequencer.h b/services/camera/libcameraservice/api1/client2/CaptureSequencer.h index e1e6201..9fb4ee7 100644 --- a/services/camera/libcameraservice/api1/client2/CaptureSequencer.h +++ b/services/camera/libcameraservice/api1/client2/CaptureSequencer.h @@ -125,6 +125,7 @@ class CaptureSequencer: NUM_CAPTURE_STATES } mCaptureState; static const char* kStateNames[]; + int mStateTransitionCount; Mutex mStateMutex; // Guards mCaptureState Condition mStateChanged; diff --git a/services/camera/libcameraservice/api1/client2/Parameters.cpp b/services/camera/libcameraservice/api1/client2/Parameters.cpp index 8e197a9..9317aa3 100644 --- a/services/camera/libcameraservice/api1/client2/Parameters.cpp +++ b/services/camera/libcameraservice/api1/client2/Parameters.cpp @@ -805,11 +805,14 @@ status_t Parameters::initialize(const CameraMetadata *info) { enableFocusMoveMessages = false; afTriggerCounter = 1; + afStateCounter = 0; currentAfTriggerId = -1; afInMotion = false; precaptureTriggerCounter = 1; + takePictureCounter = 0; + previewCallbackFlags = 0; previewCallbackOneShot = false; previewCallbackSurface = false; diff --git a/services/camera/libcameraservice/api1/client2/Parameters.h b/services/camera/libcameraservice/api1/client2/Parameters.h index 2e78c73..bcbdb99 100644 --- a/services/camera/libcameraservice/api1/client2/Parameters.h +++ b/services/camera/libcameraservice/api1/client2/Parameters.h @@ -141,11 +141,14 @@ struct Parameters { bool enableFocusMoveMessages; int afTriggerCounter; + int afStateCounter; int currentAfTriggerId; bool afInMotion; int precaptureTriggerCounter; + int takePictureCounter; + uint32_t previewCallbackFlags; bool previewCallbackOneShot; bool previewCallbackSurface; diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp index 303823c..92b390d 100644 --- a/services/camera/libcameraservice/device3/Camera3Device.cpp +++ b/services/camera/libcameraservice/device3/Camera3Device.cpp @@ -999,7 +999,6 @@ bool Camera3Device::willNotify3A() { } status_t Camera3Device::waitForNextFrame(nsecs_t timeout) { - ATRACE_CALL(); status_t res; Mutex::Autolock l(mOutputLock); -- cgit v1.1 From b790abf4d17f1c6865af7eb1595ec94dc0306447 Mon Sep 17 00:00:00 2001 From: Zhijun He Date: Fri, 11 Oct 2013 10:26:52 -0700 Subject: Camera: Skip AE precapture when possible Skip AE precapture when AE is already converged for still capture use case. This could save still capture latency 6-7 frame time for AE already converged case. Bug: 10712902 Change-Id: Ie5512618b76e5d87c62c57c3d96d2004c604e29e --- .../libcameraservice/api1/client2/CaptureSequencer.cpp | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp b/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp index 1a1b27b..4260c3a 100644 --- a/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp +++ b/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp @@ -379,11 +379,23 @@ CaptureSequencer::CaptureState CaptureSequencer::manageStandardStart( sp &client) { ATRACE_CALL(); + bool isAeConverged = false; // Get the onFrameAvailable callback when the requestID == mCaptureId client->registerFrameListener(mCaptureId, mCaptureId + 1, this); + + { + Mutex::Autolock l(mInputMutex); + isAeConverged = (mAEState == ANDROID_CONTROL_AE_STATE_CONVERGED); + } + { SharedParameters::Lock l(client->getParameters()); + // Skip AE precapture when it is already converged and not in force flash mode. + if (l.mParameters.flashMode != Parameters::FLASH_MODE_ON && isAeConverged) { + return STANDARD_CAPTURE; + } + mTriggerId = l.mParameters.precaptureTriggerCounter++; } client->getCameraDevice()->triggerPrecaptureMetering(mTriggerId); -- cgit v1.1 From bf33cd55b1a27e9144055ecfbf026cc5516031d6 Mon Sep 17 00:00:00 2001 From: Zhijun He Date: Fri, 11 Oct 2013 10:26:52 -0700 Subject: Camera: Skip AE precapture when possible Skip AE precapture when AE is already converged for still capture use case. This could save still capture latency 6-7 frame time for AE already converged case. Bug: 10712902 Change-Id: Ie5512618b76e5d87c62c57c3d96d2004c604e29e --- .../libcameraservice/api1/client2/CaptureSequencer.cpp | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp b/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp index 1a1b27b..4260c3a 100644 --- a/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp +++ b/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp @@ -379,11 +379,23 @@ CaptureSequencer::CaptureState CaptureSequencer::manageStandardStart( sp &client) { ATRACE_CALL(); + bool isAeConverged = false; // Get the onFrameAvailable callback when the requestID == mCaptureId client->registerFrameListener(mCaptureId, mCaptureId + 1, this); + + { + Mutex::Autolock l(mInputMutex); + isAeConverged = (mAEState == ANDROID_CONTROL_AE_STATE_CONVERGED); + } + { SharedParameters::Lock l(client->getParameters()); + // Skip AE precapture when it is already converged and not in force flash mode. + if (l.mParameters.flashMode != Parameters::FLASH_MODE_ON && isAeConverged) { + return STANDARD_CAPTURE; + } + mTriggerId = l.mParameters.precaptureTriggerCounter++; } client->getCameraDevice()->triggerPrecaptureMetering(mTriggerId); -- cgit v1.1 From 15ad2470b2f2ac34473eb568b606ad75e8e63ac6 Mon Sep 17 00:00:00 2001 From: Zhijun He Date: Fri, 11 Oct 2013 16:21:11 -0700 Subject: Camera: Fix deadlock in Camera3OutputStream process capture request thread, process capture result and setParameter binder threads can run into circular locking situation when acquiring StreamingProcessor lock, Camera3Stream lock, and bufferQueue lock. Releasing the Camera3Stream lock briefly in process capture request thread getbuffer call can break this deadlock. Bug: 11016037 Change-Id: If08d4b134c26be26039b1d5363759e60f911bad6 --- .../libcameraservice/device3/Camera3OutputStream.cpp | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp index 41328fc..682755d 100644 --- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp +++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp @@ -92,7 +92,22 @@ status_t Camera3OutputStream::getBufferLocked(camera3_stream_buffer *buffer) { ANativeWindowBuffer* anb; int fenceFd; - res = mConsumer->dequeueBuffer(mConsumer.get(), &anb, &fenceFd); + /** + * Release the lock briefly to avoid deadlock for below scenario: + * Thread 1: StreamingProcessor::startStream -> Camera3Stream::isConfiguring(). + * This thread acquired StreamingProcessor lock and try to lock Camera3Stream lock. + * Thread 2: Camera3Stream::returnBuffer->StreamingProcessor::onFrameAvailable(). + * This thread acquired Camera3Stream lock and bufferQueue lock, and try to lock + * StreamingProcessor lock. + * Thread 3: Camera3Stream::getBuffer(). This thread acquired Camera3Stream lock + * and try to lock bufferQueue lock. + * Then there is circular locking dependency. + */ + sp currentConsumer = mConsumer; + mLock.unlock(); + + res = currentConsumer->dequeueBuffer(currentConsumer.get(), &anb, &fenceFd); + mLock.lock(); if (res != OK) { ALOGE("%s: Stream %d: Can't dequeue next output buffer: %s (%d)", __FUNCTION__, mId, strerror(-res), res); -- cgit v1.1 From b3cb72a17d9a472883e9e2faa18b42eac533fe99 Mon Sep 17 00:00:00 2001 From: Eric Laurent Date: Sat, 12 Oct 2013 17:05:19 -0700 Subject: SoundPool: handle new audio track event If the AudioTrack is torn down, SoundPool will never receive the buffer end event and the track will stay active for ever. The fix consists in stopping the AudioTrack when a new audiotrack event is received. Bug: 11193583. Change-Id: I9876eb2a8f75c601368f669acd67b0accf6e2736 --- media/libmedia/SoundPool.cpp | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/media/libmedia/SoundPool.cpp b/media/libmedia/SoundPool.cpp index 8434d43..22e9fad 100644 --- a/media/libmedia/SoundPool.cpp +++ b/media/libmedia/SoundPool.cpp @@ -744,11 +744,16 @@ void SoundChannel::process(int event, void *info, unsigned long toggle) b->size = count; //ALOGV("buffer=%p, [0]=%d", b->i16, b->i16[0]); } - } else if (event == AudioTrack::EVENT_UNDERRUN || event == AudioTrack::EVENT_BUFFER_END) { - ALOGV("process %p channel %d EVENT_UNDERRUN or EVENT_BUFFER_END", this, mChannelID); + } else if (event == AudioTrack::EVENT_UNDERRUN || event == AudioTrack::EVENT_BUFFER_END || + event == AudioTrack::EVENT_NEW_IAUDIOTRACK) { + ALOGV("process %p channel %d event %s", + this, mChannelID, (event == AudioTrack::EVENT_UNDERRUN) ? "UNDERRUN" : + (event == AudioTrack::EVENT_BUFFER_END) ? "BUFFER_END" : "NEW_IAUDIOTRACK"); mSoundPool->addToStopList(this); } else if (event == AudioTrack::EVENT_LOOP_END) { ALOGV("End loop %p channel %d", this, mChannelID); + } else { + ALOGW("SoundChannel::process unexpected event %d", event); } } -- cgit v1.1 From 014e7fa2e90827d911c37bb0ce4d2e10e14d0bb3 Mon Sep 17 00:00:00 2001 From: Narayan Kamath Date: Mon, 14 Oct 2013 15:03:38 +0100 Subject: Make Audio wakelock names less unique. These wakelocks now show up in the client process, where they might cause wakelock overflows if AudioFlinger threads die often. The client process should be agnostic of audio flinger threading, so don't make the wakelock names rely on audioflinger thread names. Wakelock names now depend only on the audio flinger thread type, and not the ID itself. bug: 11200551 Change-Id: Ia1f71b569cb7502d617b77596abc245f461a6ec0 --- services/audioflinger/Threads.cpp | 23 +++++++++++++++++++++-- services/audioflinger/Threads.h | 2 ++ 2 files changed, 23 insertions(+), 2 deletions(-) diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index c46242d..47dcca6 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -482,6 +482,25 @@ void AudioFlinger::ThreadBase::acquireWakeLock(int uid) acquireWakeLock_l(uid); } +String16 AudioFlinger::ThreadBase::getWakeLockTag() +{ + switch (mType) { + case MIXER: + return String16("AudioMix"); + case DIRECT: + return String16("AudioDirectOut"); + case DUPLICATING: + return String16("AudioDup"); + case RECORD: + return String16("AudioIn"); + case OFFLOAD: + return String16("AudioOffload"); + default: + ALOG_ASSERT(false); + return String16("AudioUnknown"); + } +} + void AudioFlinger::ThreadBase::acquireWakeLock_l(int uid) { if (mPowerManager == 0) { @@ -501,13 +520,13 @@ void AudioFlinger::ThreadBase::acquireWakeLock_l(int uid) if (uid >= 0) { status = mPowerManager->acquireWakeLockWithUid(POWERMANAGER_PARTIAL_WAKE_LOCK, binder, - String16(mName), + getWakeLockTag(), String16("media"), uid); } else { status = mPowerManager->acquireWakeLock(POWERMANAGER_PARTIAL_WAKE_LOCK, binder, - String16(mName), + getWakeLockTag(), String16("media")); } if (status == NO_ERROR) { diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h index 0cb3ef7..802b784 100644 --- a/services/audioflinger/Threads.h +++ b/services/audioflinger/Threads.h @@ -251,6 +251,8 @@ protected: // check if some effects must be suspended when an effect chain is added void checkSuspendOnAddEffectChain_l(const sp& chain); + String16 getWakeLockTag(); + virtual void preExit() { } friend class AudioFlinger; // for mEffectChains -- cgit v1.1 From d576687570f19f3956d91c1d3da10d965e34c407 Mon Sep 17 00:00:00 2001 From: Eric Laurent Date: Tue, 15 Oct 2013 16:11:24 -0700 Subject: AwesomePlayer: fix concurrent pause and teardown There was a race condition if a pause request was received while we were executing a teardown event. Although we hold a mutex while processing the teardown event, there is a step of the prepare sequence where we release the mutex if streaming. In this case, the pause request is executed but ignored because the player state is still preparing. At the end of the teardown event processing we restore previous playback state and resume. The fix consists in clearing the saved teardown playback state when a pause request is received while processing a teardown event. Bug: 11225491. Change-Id: If0e61855ce5a336322f1ba8e5559bdc190beeb76 --- media/libstagefright/AwesomePlayer.cpp | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/media/libstagefright/AwesomePlayer.cpp b/media/libstagefright/AwesomePlayer.cpp index be6719a..c912f75 100644 --- a/media/libstagefright/AwesomePlayer.cpp +++ b/media/libstagefright/AwesomePlayer.cpp @@ -1225,6 +1225,12 @@ status_t AwesomePlayer::pause() { status_t AwesomePlayer::pause_l(bool at_eos) { if (!(mFlags & PLAYING)) { + if (mAudioTearDown && mAudioTearDownWasPlaying) { + ALOGV("pause_l() during teardown and finishSetDataSource_l() mFlags %x" , mFlags); + mAudioTearDownWasPlaying = false; + notifyListener_l(MEDIA_PAUSED); + mMediaRenderingStartGeneration = ++mStartGeneration; + } return OK; } -- cgit v1.1 From afada1e56cf0f91be5e44678850fcead2a70cca2 Mon Sep 17 00:00:00 2001 From: Zhijun He Date: Wed, 16 Oct 2013 15:09:17 -0700 Subject: Camera: don't do hw support check for ZSL Only use camera.disable_zsl_mode to control disable/enable ZSL. Bug: 11258054 Change-Id: Ibf10b959d0913f7dedb59d89b571e9bf66fe978a --- .../libcameraservice/api1/client2/Parameters.cpp | 18 +++++------------- 1 file changed, 5 insertions(+), 13 deletions(-) diff --git a/services/camera/libcameraservice/api1/client2/Parameters.cpp b/services/camera/libcameraservice/api1/client2/Parameters.cpp index 9317aa3..8a4e75c 100644 --- a/services/camera/libcameraservice/api1/client2/Parameters.cpp +++ b/services/camera/libcameraservice/api1/client2/Parameters.cpp @@ -817,21 +817,13 @@ status_t Parameters::initialize(const CameraMetadata *info) { previewCallbackOneShot = false; previewCallbackSurface = false; - camera_metadata_ro_entry_t supportedHardwareLevel = - staticInfo(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL, 0, 0, false); - if (!supportedHardwareLevel.count || (supportedHardwareLevel.data.u8[0] == - ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED)) { - ALOGI("Camera %d: ZSL mode disabled for limited mode HALs", cameraId); + char value[PROPERTY_VALUE_MAX]; + property_get("camera.disable_zsl_mode", value, "0"); + if (!strcmp(value,"1")) { + ALOGI("Camera %d: Disabling ZSL mode", cameraId); zslMode = false; } else { - char value[PROPERTY_VALUE_MAX]; - property_get("camera.disable_zsl_mode", value, "0"); - if (!strcmp(value,"1")) { - ALOGI("Camera %d: Disabling ZSL mode", cameraId); - zslMode = false; - } else { - zslMode = true; - } + zslMode = true; } lightFx = LIGHTFX_NONE; -- cgit v1.1 From cc21e4f1e41dfa17e7e2bef995fcd22c45f6bcd0 Mon Sep 17 00:00:00 2001 From: Eric Laurent Date: Wed, 16 Oct 2013 15:12:32 -0700 Subject: AudioTrack: fix head position after restore The head position transfered to the new track by restoreTrack_l() must take into account the frames that are dropped from the old track to avoid a non recoverable offset in the playback head position returned to applications. Bug: 11230062. Change-Id: I51143a08b95e8f264ed709ae2054360315f2b8b1 --- include/private/media/AudioTrackShared.h | 2 ++ media/libmedia/AudioTrack.cpp | 4 +++- media/libmedia/AudioTrackShared.cpp | 21 +++++++++++++++++++++ 3 files changed, 26 insertions(+), 1 deletion(-) diff --git a/include/private/media/AudioTrackShared.h b/include/private/media/AudioTrackShared.h index fe258ad..395f164 100644 --- a/include/private/media/AudioTrackShared.h +++ b/include/private/media/AudioTrackShared.h @@ -256,6 +256,8 @@ public: return mEpoch; } + size_t getFramesFilled(); + private: size_t mEpoch; }; diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp index 37d50cf..507f9bc 100644 --- a/media/libmedia/AudioTrack.cpp +++ b/media/libmedia/AudioTrack.cpp @@ -1662,7 +1662,9 @@ status_t AudioTrack::restoreTrack_l(const char *from) // if the new IAudioTrack is created, createTrack_l() will modify the // following member variables: mAudioTrack, mCblkMemory and mCblk. // It will also delete the strong references on previous IAudioTrack and IMemory - size_t position = mProxy->getPosition(); + + // take the frames that will be lost by track recreation into account in saved position + size_t position = mProxy->getPosition() + mProxy->getFramesFilled(); mNewPosition = position + mUpdatePeriod; size_t bufferPosition = mStaticProxy != NULL ? mStaticProxy->getBufferPosition() : 0; result = createTrack_l(mStreamType, diff --git a/media/libmedia/AudioTrackShared.cpp b/media/libmedia/AudioTrackShared.cpp index 4fd92b2..da73d65 100644 --- a/media/libmedia/AudioTrackShared.cpp +++ b/media/libmedia/AudioTrackShared.cpp @@ -316,6 +316,27 @@ size_t ClientProxy::getMisalignment() (mFrameCountP2 - 1); } +size_t ClientProxy::getFramesFilled() { + audio_track_cblk_t* cblk = mCblk; + int32_t front; + int32_t rear; + + if (mIsOut) { + front = android_atomic_acquire_load(&cblk->u.mStreaming.mFront); + rear = cblk->u.mStreaming.mRear; + } else { + rear = android_atomic_acquire_load(&cblk->u.mStreaming.mRear); + front = cblk->u.mStreaming.mFront; + } + ssize_t filled = rear - front; + // pipe should not be overfull + if (!(0 <= filled && (size_t) filled <= mFrameCount)) { + ALOGE("Shared memory control block is corrupt (filled=%d); shutting down", filled); + return 0; + } + return (size_t)filled; +} + // --------------------------------------------------------------------------- void AudioTrackClientProxy::flush() -- cgit v1.1 From 598de6c701e989385eeffa7c5dfd61f0459a2631 Mon Sep 17 00:00:00 2001 From: Glenn Kasten Date: Wed, 16 Oct 2013 17:02:13 -0700 Subject: Fix race condition in AudioTrack::pause followed by start Bug: 11148722 Change-Id: Iec88f00c8510363d4418e4b8d5b34feb06ecf04d --- include/media/AudioTrack.h | 1 + media/libmedia/AudioTrack.cpp | 15 ++++++++------- 2 files changed, 9 insertions(+), 7 deletions(-) diff --git a/include/media/AudioTrack.h b/include/media/AudioTrack.h index 22ad57e..f2f9c22 100644 --- a/include/media/AudioTrack.h +++ b/include/media/AudioTrack.h @@ -612,6 +612,7 @@ protected: bool mPaused; // whether thread is requested to pause at next loop entry bool mPausedInt; // whether thread internally requests pause nsecs_t mPausedNs; // if mPausedInt then associated timeout, otherwise ignored + bool mIgnoreNextPausedInt; // whether to ignore next mPausedInt request }; // body of AudioTrackThread::threadLoop() diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp index 37d50cf..80f5155 100644 --- a/media/libmedia/AudioTrack.cpp +++ b/media/libmedia/AudioTrack.cpp @@ -1785,7 +1785,8 @@ void AudioTrack::DeathNotifier::binderDied(const wp& who) // ========================================================================= AudioTrack::AudioTrackThread::AudioTrackThread(AudioTrack& receiver, bool bCanCallJava) - : Thread(bCanCallJava), mReceiver(receiver), mPaused(true), mPausedInt(false), mPausedNs(0LL) + : Thread(bCanCallJava), mReceiver(receiver), mPaused(true), mPausedInt(false), mPausedNs(0LL), + mIgnoreNextPausedInt(false) { } @@ -1802,6 +1803,10 @@ bool AudioTrack::AudioTrackThread::threadLoop() // caller will check for exitPending() return true; } + if (mIgnoreNextPausedInt) { + mIgnoreNextPausedInt = false; + mPausedInt = false; + } if (mPausedInt) { if (mPausedNs > 0) { (void) mMyCond.waitRelative(mMyLock, mPausedNs); @@ -1836,12 +1841,7 @@ void AudioTrack::AudioTrackThread::requestExit() { // must be in this order to avoid a race condition Thread::requestExit(); - AutoMutex _l(mMyLock); - if (mPaused || mPausedInt) { - mPaused = false; - mPausedInt = false; - mMyCond.signal(); - } + resume(); } void AudioTrack::AudioTrackThread::pause() @@ -1853,6 +1853,7 @@ void AudioTrack::AudioTrackThread::pause() void AudioTrack::AudioTrackThread::resume() { AutoMutex _l(mMyLock); + mIgnoreNextPausedInt = true; if (mPaused || mPausedInt) { mPaused = false; mPausedInt = false; -- cgit v1.1